diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 000000000..2b684ded0 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,20 @@ +Please fill the following sections in as completely as possible. Please include all CMake output under `Additional information` when reporting build issues. + +**Description of the issue** +... + +**Platform** +| Field | Value | +| ------------: | :------------------ | +| HPC Center: | | +| System name: | | +| OS name/ver: | | +| Compiler ver: | | +| CMake ver: | | + + +**Steps to Reproduce** +... + +**Additional information** +... diff --git a/.gitignore b/.gitignore index 3c74fb441..bf6a22fc6 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ *.sw[a-z] *.patch _build +*.pt diff --git a/.readthedocs.yml b/.readthedocs.yml new file mode 100644 index 000000000..e2c25ef5e --- /dev/null +++ b/.readthedocs.yml @@ -0,0 +1,13 @@ +# .readthedocs.yml +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +version: 2 + +sphinx: + configuration: doc/rtd/conf.py + +python: + version: 3.7 + install: + - requirements: doc/rtd/requirements.txt diff --git a/.travis.yml b/.travis.yml index 36f4ffc3a..909717b1d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -9,23 +9,32 @@ os: - linux - osx +osx_image: + - xcode12.2 + env: global: - BUILD_TYPE=Debug - TECA_DIR=/travis_teca_dir - TECA_PYTHON_VERSION=3 - - TECA_DATA_REVISION=49 - matrix: - - DOCKER_IMAGE=ubuntu IMAGE_VERSION=18.04 IMAGE_NAME=ubuntu_18_04 - - DOCKER_IMAGE=fedora IMAGE_VERSION=28 IMAGE_NAME=fedora_28 + - TECA_DATA_REVISION=101 + jobs: + - DOCKER_IMAGE=ubuntu IMAGE_VERSION=20.04 IMAGE_NAME=ubuntu_20_04 REQUIRE_NETCDF_MPI=TRUE + - DOCKER_IMAGE=ubuntu IMAGE_VERSION=20.04 IMAGE_NAME=ubuntu_20_04 REQUIRE_NETCDF_MPI=FALSE + - DOCKER_IMAGE=fedora IMAGE_VERSION=32 IMAGE_NAME=fedora_32 REQUIRE_NETCDF_MPI=TRUE + - DOCKER_IMAGE=fedora IMAGE_VERSION=32 IMAGE_NAME=fedora_32 REQUIRE_NETCDF_MPI=FALSE - NO_DOCKER=TRUE -matrix: +jobs: exclude: - os: osx - env: DOCKER_IMAGE=ubuntu IMAGE_VERSION=18.04 IMAGE_NAME=ubuntu_18_04 + env: DOCKER_IMAGE=ubuntu IMAGE_VERSION=20.04 IMAGE_NAME=ubuntu_20_04 REQUIRE_NETCDF_MPI=TRUE + - os: osx + env: DOCKER_IMAGE=ubuntu IMAGE_VERSION=20.04 IMAGE_NAME=ubuntu_20_04 REQUIRE_NETCDF_MPI=FALSE + - os: osx + env: DOCKER_IMAGE=fedora IMAGE_VERSION=32 IMAGE_NAME=fedora_32 REQUIRE_NETCDF_MPI=TRUE - os: osx - env: DOCKER_IMAGE=fedora IMAGE_VERSION=28 IMAGE_NAME=fedora_28 + env: DOCKER_IMAGE=fedora IMAGE_VERSION=32 IMAGE_NAME=fedora_32 REQUIRE_NETCDF_MPI=FALSE - os: linux env: NO_DOCKER=TRUE @@ -48,6 +57,7 @@ install: docker exec teca_${DOCKER_IMAGE}_${IMAGE_VERSION} /bin/bash -c "export TECA_PYTHON_VERSION=${TECA_PYTHON_VERSION} && export TECA_DATA_REVISION=${TECA_DATA_REVISION} && + export REQUIRE_NETCDF_MPI=${REQUIRE_NETCDF_MPI} && ${TECA_DIR}/test/travis_ci/install_${IMAGE_NAME}.sh"; fi @@ -64,5 +74,6 @@ script: export BUILD_TYPE=${BUILD_TYPE} && export DOCKER_IMAGE=${DOCKER_IMAGE} && export IMAGE_VERSION=${IMAGE_VERSION} && + export REQUIRE_NETCDF_MPI=${REQUIRE_NETCDF_MPI} && ${TECA_DIR}/test/travis_ci/ctest_linux.sh"; fi diff --git a/CMake/FindMPI4Py.cmake b/CMake/FindMPI4Py.cmake index 2ac89bd9b..6dbfbb282 100644 --- a/CMake/FindMPI4Py.cmake +++ b/CMake/FindMPI4Py.cmake @@ -4,22 +4,22 @@ # Check if mpi4py is installed and configure c-api includes # # This module defines -# MPI4PY_FOUND, set TRUE if mpi4py and c-api are available -# MPI4PY_INCLUDE_DIR, where to find c-api headers -# MPI4PY_VERSION, mpi4py release version +# MPI4Py_FOUND, set TRUE if mpi4py and c-api are available +# MPI4Py_INCLUDE_DIR, where to find c-api headers +# MPI4Py_VERSION, mpi4py release version set(_TMP_PY_OUTPUT) set(_TMP_PY_RETURN) exec_program("${PYTHON_EXECUTABLE}" ARGS "-c 'import mpi4py; print(mpi4py.get_include())'" OUTPUT_VARIABLE _TMP_PY_OUTPUT RETURN_VALUE _TMP_PY_RETURN) -set(MPI4PY_INCLUDE_FOUND FALSE) +set(MPI4Py_INCLUDE_FOUND FALSE) if(NOT _TMP_PY_RETURN AND EXISTS "${_TMP_PY_OUTPUT}") - set(MPI4PY_INCLUDE_FOUND TRUE) + set(MPI4Py_INCLUDE_FOUND TRUE) else() set(_TMP_PY_OUTPUT) endif() -set(MPI4PY_INCLUDE_DIR "${_TMP_PY_OUTPUT}" CACHE PATH +set(MPI4Py_INCLUDE_DIR "${_TMP_PY_OUTPUT}" CACHE PATH "mpi4py include directories") set(_TMP_PY_OUTPUT) @@ -28,22 +28,22 @@ exec_program("${PYTHON_EXECUTABLE}" ARGS "-c 'import mpi4py; print(mpi4py.__version__)'" OUTPUT_VARIABLE _TMP_PY_OUTPUT RETURN_VALUE _TMP_PY_RETURN) -set(MPI4PY_VERSION_FOUND FALSE) +set(MPI4Py_VERSION_FOUND FALSE) if(NOT _TMP_PY_RETURN) - set(MPI4PY_VERSION_FOUND TRUE) + set(MPI4Py_VERSION_FOUND TRUE) else() set(_TMP_PY_OUTPUT) endif() -set(MPI4PY_VERSION "${_TMP_PY_OUTPUT}" CACHE STRING +set(MPI4Py_VERSION "${_TMP_PY_OUTPUT}" CACHE STRING "mpi4py version string") if (NOT ${QUIET}) - message(STATUS "MPI4PY_INCLUDE_DIR=${MPI4PY_INCLUDE_DIR}") - message(STATUS "MPI4PY_VERSION=${MPI4PY_VERSION}") + message(STATUS "MPI4Py_INCLUDE_DIR=${MPI4Py_INCLUDE_DIR}") + message(STATUS "MPI4Py_VERSION=${MPI4Py_VERSION}") endif() -mark_as_advanced(MPI4PY_INCLUDE_DIR MPI4PY_VERSION) +mark_as_advanced(MPI4Py_INCLUDE_DIR MPI4Py_VERSION) include(FindPackageHandleStandardArgs) -find_package_handle_standard_args(MPI4PY DEFAULT_MSG - MPI4PY_INCLUDE_FOUND MPI4PY_VERSION_FOUND) +find_package_handle_standard_args(MPI4Py DEFAULT_MSG + MPI4Py_INCLUDE_FOUND MPI4Py_VERSION_FOUND) diff --git a/CMake/FindMatplotlib.cmake b/CMake/FindMatplotlib.cmake new file mode 100644 index 000000000..143210d48 --- /dev/null +++ b/CMake/FindMatplotlib.cmake @@ -0,0 +1,25 @@ +#***************************************************************************** +# FindMatplotlib +# +# Check if matplotlib is installed and configure c-api includes +# +# This module defines +# Matplotlib_FOUND, set TRUE if matplotlib and c-api are available +# Matplotlib_VERSION, matplotlib release version + +set(_TMP_PY_OUTPUT) +set(_TMP_PY_RETURN) +exec_program("${PYTHON_EXECUTABLE}" + ARGS "-c 'import matplotlib; print(matplotlib.__version__)'" + OUTPUT_VARIABLE _TMP_PY_OUTPUT + RETURN_VALUE _TMP_PY_RETURN) +set(Matplotlib_VERSION_FOUND FALSE) +if(NOT _TMP_PY_RETURN) + set(Matplotlib_VERSION_FOUND TRUE) +else() + set(_TMP_PY_OUTPUT) +endif() +set(Matplotlib_VERSION "${_TMP_PY_OUTPUT}") + +include(FindPackageHandleStandardArgs) +find_package_handle_standard_args(Matplotlib DEFAULT_MSG Matplotlib_VERSION_FOUND) diff --git a/CMake/FindNetCDF.cmake b/CMake/FindNetCDF.cmake index beccd3ff9..7f7a9957b 100644 --- a/CMake/FindNetCDF.cmake +++ b/CMake/FindNetCDF.cmake @@ -32,133 +32,96 @@ # target_link_libraries (uses_everthing ${NETCDF_LIBRARIES}) # target_link_libraries (only_uses_f90 ${NETCDF_F90_LIBRARIES}) + #search starting from user editable cache var if (NETCDF_INCLUDE_DIR AND NETCDF_LIBRARY) # Already in cache, be silent set (NETCDF_FIND_QUIETLY TRUE) -endif () - -# find the library -# first look where the user told us -if (NETCDF_DIR) - find_library (NETCDF_LIBRARY NAMES netcdf - PATHS "${NETCDF_DIR}/lib" "${NETCDF_DIR}/lib64" - NO_DEFAULT_PATH) -endif() - -# next look in LD_LIBRARY_PATH for libraries -find_library (NETCDF_LIBRARY NAMES netcdf - PATHS ENV LD_LIBRARY_PATH NO_DEFAULT_PATH) - -# finally CMake can look -find_library (NETCDF_LIBRARY NAMES netcdf) - -mark_as_advanced (NETCDF_LIBRARY) -set (NETCDF_C_LIBRARIES ${NETCDF_LIBRARY}) - -# find the header -# first look where the user told us -if (NETCDF_DIR) - find_path (NETCDF_INCLUDE_DIR netcdf.h - PATHS "${NETCDF_DIR}/include" NO_DEFAULT_PATH) endif() -# then look relative to library dir -get_filename_component(NETCDF_LIBRARY_DIR - ${NETCDF_LIBRARY} DIRECTORY) - -find_path (NETCDF_INCLUDE_DIR netcdf.h - PATHS "${NETCDF_LIBRARY_DIR}/../include" - NO_DEFAULT_PATH) - -# finally CMake can look -find_path (NETCDF_INCLUDE_DIR netcdf.h) - -mark_as_advanced (NETCDF_INCLUDE_DIR) -set (NETCDF_C_INCLUDE_DIRS ${NETCDF_INCLUDE_DIR}) - - -#start finding requested language components -set (NetCDF_libs "") -set (NetCDF_includes "${NETCDF_INCLUDE_DIR}") - -get_filename_component (NetCDF_lib_dirs "${NETCDF_LIBRARY}" PATH) -set (NETCDF_HAS_INTERFACES "YES") # will be set to NO if we're missing any interfaces - -macro (NetCDF_check_interface lang header libs) - if (NETCDF_${lang}) - # find the library +# find the library +# use package config, this works well on systems that make +# use of modules to manage installs not in the standard +# locations that cmake knows about +find_package(PkgConfig REQUIRED) +pkg_check_modules(NC_TMP netcdf QUIET) +if (NC_TMP_FOUND AND NC_TMP_LINK_LIBRARIES AND NC_TMP_LIBRARY_DIRS AND NC_TMP_INCLUDE_DIRS) + set(NETCDF_LIBRARY_DIR ${NC_TMP_LIBRARY_DIRS}) + set(NETCDF_LIBRARY ${NC_TMP_LINK_LIBRARIES}) + set(NETCDF_INCLUDE_DIR ${NC_TMP_INCLUDE_DIRS}) +else() + # package config failed, use cmake # first look where the user told us if (NETCDF_DIR) - find_library (NETCDF_${lang}_LIBRARY NAMES netcdf + find_library(NETCDF_LIBRARY NAMES netcdf PATHS "${NETCDF_DIR}/lib" "${NETCDF_DIR}/lib64" NO_DEFAULT_PATH) endif() # next look in LD_LIBRARY_PATH for libraries - find_library (NETCDF_${lang}_LIBRARY NAMES netcdf + find_library(NETCDF_LIBRARY NAMES netcdf PATHS ENV LD_LIBRARY_PATH NO_DEFAULT_PATH) # finally CMake can look - find_library (NETCDF_${lang}_LIBRARY NAMES netcdf) + find_library(NETCDF_LIBRARY NAMES netcdf) + + message(STATUS ${NETCDF_LIBRARY}) +endif() - # find the header +# if we can find the library it is found now +# record what we have +mark_as_advanced (NETCDF_LIBRARY) +set (NETCDF_C_LIBRARIES ${NETCDF_LIBRARY}) + +# find the header +# package config failed, use cmake +if (NOT NC_TMP_FOUND OR NOT NC_TMP_LINK_LIBRARIES OR NOT NC_TMP_LIBRARY_DIRS OR NOT NC_TMP_INCLUDE_DIRS) # first look where the user told us if (NETCDF_DIR) - find_path (NETCDF_${lang}_INCLUDE_DIR netcdf.h + find_path (NETCDF_INCLUDE_DIR netcdf.h PATHS "${NETCDF_DIR}/include" NO_DEFAULT_PATH) endif() # then look relative to library dir - get_filename_component(NETCDF_${lang}_LIBRARY_DIR - ${NETCDF_${lang}_LIBRARY} DIRECTORY CACHE) + get_filename_component(NETCDF_LIBRARY_DIR + ${NETCDF_LIBRARY} DIRECTORY) - find_path (NETCDF_${lang}_INCLUDE_DIR netcdf.h - PATHS "${NETCDF_${lang}_LIBRARY_DIR}/../include" + find_path (NETCDF_INCLUDE_DIR netcdf.h + PATHS "${NETCDF_LIBRARY_DIR}/../include" NO_DEFAULT_PATH) - # finally CMake can look - find_path (NETCDF_${lang}_INCLUDE_DIR netcdf.h) - - #export to internal varS that rest of project can use directly - mark_as_advanced (NETCDF_${lang}_INCLUDE_DIR NETCDF_${lang}_LIBRARY) - - set (NETCDF_${lang}_LIBRARIES ${NETCDF_${lang}_LIBRARY}) - set (NETCDF_${lang}_INCLUDE_DIRS ${NETCDF_${lang}_INCLUDE_DIR}) - - if (NETCDF_${lang}_INCLUDE_DIR AND NETCDF_${lang}_LIBRARY) - list (APPEND NetCDF_libs ${NETCDF_${lang}_LIBRARY}) - list (APPEND NetCDF_includes ${NETCDF_${lang}_INCLUDE_DIR}) - else () - set (NETCDF_HAS_INTERFACES "NO") - message (STATUS "Failed to find NetCDF interface for ${lang}") - endif () - endif () -endmacro (NetCDF_check_interface) - -list (FIND NetCDF_FIND_COMPONENTS "CXX" _nextcomp) -if (_nextcomp GREATER -1) - set (NETCDF_CXX 1) -endif () -list (FIND NetCDF_FIND_COMPONENTS "F77" _nextcomp) -if (_nextcomp GREATER -1) - set (NETCDF_F77 1) -endif () -list (FIND NetCDF_FIND_COMPONENTS "F90" _nextcomp) -if (_nextcomp GREATER -1) - set (NETCDF_F90 1) -endif () -NetCDF_check_interface (CXX netcdfcpp.h netcdf_c++) -NetCDF_check_interface (F77 netcdf.inc netcdff) -NetCDF_check_interface (F90 netcdf.mod netcdff) + # CMake can look + find_path(NETCDF_INCLUDE_DIR netcdf.h) +endif() + +# look for header file that indicates MPI support +set(NETCDF_IS_PARALLEL FALSE) +find_file(NETCDF_PAR_INCLUDE_DIR netcdf_par.h + PATHS ${NETCDF_INCLUDE_DIR} NO_DEFAULT_PATH) +if (NETCDF_PAR_INCLUDE_DIR) + set(NETCDF_IS_PARALLEL TRUE) +endif() + +# if we can find the headers they are found now +# record what we have +mark_as_advanced(NETCDF_INCLUDE_DIR) +mark_as_advanced(NETCDF_IS_PARALLEL) +mark_as_advanced(NETCDF_PAR_INCLUDE_DIR) +set(NETCDF_C_INCLUDE_DIRS ${NETCDF_INCLUDE_DIR}) + +#start finding requested language components +set (NetCDF_libs "") +set (NetCDF_includes "${NETCDF_INCLUDE_DIR}") + +get_filename_component (NetCDF_lib_dirs "${NETCDF_LIBRARY}" PATH) #export accumulated results to internal varS that rest of project can depend on -list (APPEND NetCDF_libs "${NETCDF_C_LIBRARIES}") -set (NETCDF_LIBRARIES ${NetCDF_libs}) -set (NETCDF_INCLUDE_DIRS ${NetCDF_includes}) +list(APPEND NetCDF_libs "${NETCDF_C_LIBRARIES}") +set(NETCDF_LIBRARIES ${NetCDF_libs}) +set(NETCDF_INCLUDE_DIRS ${NetCDF_includes}) # handle the QUIETLY and REQUIRED arguments and set NETCDF_FOUND to TRUE if # all listed variables are TRUE include (FindPackageHandleStandardArgs) find_package_handle_standard_args (NetCDF - DEFAULT_MSG NETCDF_LIBRARIES NETCDF_INCLUDE_DIRS NETCDF_HAS_INTERFACES) + DEFAULT_MSG NETCDF_LIBRARIES NETCDF_INCLUDE_DIRS) diff --git a/CMake/FindNumpy.cmake b/CMake/FindNumPy.cmake similarity index 58% rename from CMake/FindNumpy.cmake rename to CMake/FindNumPy.cmake index 5e2419b90..a10ebaa78 100644 --- a/CMake/FindNumpy.cmake +++ b/CMake/FindNumPy.cmake @@ -4,22 +4,22 @@ # Check if numpy is installed and configure c-api includes # # This module defines -# NUMPY_FOUND, set TRUE if numpy and c-api are available -# NUMPY_INCLUDE_DIR, where to find c-api headers -# NUMPY_VERSION, numpy release version +# NumPy_FOUND, set TRUE if numpy and c-api are available +# NumPy_INCLUDE_DIR, where to find c-api headers +# NumPy_VERSION, numpy release version set(_TMP_PY_OUTPUT) set(_TMP_PY_RETURN) exec_program("${PYTHON_EXECUTABLE}" ARGS "-c 'import numpy; print(numpy.get_include())'" OUTPUT_VARIABLE _TMP_PY_OUTPUT RETURN_VALUE _TMP_PY_RETURN) -set(NUMPY_INCLUDE_FOUND FALSE) +set(NumPy_INCLUDE_FOUND FALSE) if(NOT _TMP_PY_RETURN AND EXISTS "${_TMP_PY_OUTPUT}") - set(NUMPY_INCLUDE_FOUND TRUE) + set(NumPy_INCLUDE_FOUND TRUE) else() set(_TMP_PY_OUTPUT) endif() -set(NUMPY_INCLUDE_DIR "${_TMP_PY_OUTPUT}") +set(NumPy_INCLUDE_DIR "${_TMP_PY_OUTPUT}") set(_TMP_PY_OUTPUT) set(_TMP_PY_RETURN) @@ -27,15 +27,15 @@ exec_program("${PYTHON_EXECUTABLE}" ARGS "-c 'import numpy; print(numpy.version.version)'" OUTPUT_VARIABLE _TMP_PY_OUTPUT RETURN_VALUE _TMP_PY_RETURN) -set(NUMPY_VERSION_FOUND FALSE) +set(NumPy_VERSION_FOUND FALSE) if(NOT _TMP_PY_RETURN) - set(NUMPY_VERSION_FOUND TRUE) + set(NumPy_VERSION_FOUND TRUE) else() set(_TMP_PY_OUTPUT) endif() -set(NUMPY_VERSION "${_TMP_PY_OUTPUT}") +set(NumPy_VERSION "${_TMP_PY_OUTPUT}") -#set(NUMPY_INCLUDE_DIR "${_TMP_PY_OUTPUT}" CACHE PATH "Numpy C API headers") -#mark_as_advanced(NUMPY_INCLUDE_DIR) +#set(NumPy_INCLUDE_DIR "${_TMP_PY_OUTPUT}" CACHE PATH "Numpy C API headers") +#mark_as_advanced(NumPy_INCLUDE_DIR) include(FindPackageHandleStandardArgs) -find_package_handle_standard_args(NUMPY DEFAULT_MSG NUMPY_INCLUDE_FOUND NUMPY_VERSION_FOUND) +find_package_handle_standard_args(NumPy DEFAULT_MSG NumPy_INCLUDE_FOUND NumPy_VERSION_FOUND) diff --git a/CMake/FindPyTorch.cmake b/CMake/FindPyTorch.cmake new file mode 100644 index 000000000..a1ea6ff57 --- /dev/null +++ b/CMake/FindPyTorch.cmake @@ -0,0 +1,25 @@ +#***************************************************************************** +# FindPyTorch +# +# Check if torch is installed and configure c-api includes +# +# This module defines +# PyTorch_FOUND, set TRUE if torch and c-api are available +# PyTorch_VERSION, torch release version + +set(_TMP_PY_OUTPUT) +set(_TMP_PY_RETURN) +exec_program("${PYTHON_EXECUTABLE}" + ARGS "-c 'import torch; print(torch.__version__)'" + OUTPUT_VARIABLE _TMP_PY_OUTPUT + RETURN_VALUE _TMP_PY_RETURN) +set(PyTorch_VERSION_FOUND FALSE) +if(NOT _TMP_PY_RETURN) + set(PyTorch_VERSION_FOUND TRUE) +else() + set(_TMP_PY_OUTPUT) +endif() +set(PyTorch_VERSION "${_TMP_PY_OUTPUT}") + +include(FindPackageHandleStandardArgs) +find_package_handle_standard_args(PyTorch DEFAULT_MSG PyTorch_VERSION_FOUND) diff --git a/CMake/teca_app.cmake b/CMake/teca_app.cmake index 70ddee83b..fbaa0e923 100644 --- a/CMake/teca_app.cmake +++ b/CMake/teca_app.cmake @@ -5,20 +5,34 @@ # ) function (teca_add_app app_name) set(opt_args) - set(val_args FEATURES) - set(array_args SOURCES LIBS) + set(val_args) + set(array_args SOURCES LIBS FEATURES) cmake_parse_arguments(APP "${opt_args}" "${val_args}" "${array_args}" ${ARGN}) - if (APP_FEATURES) + set(APP_ENABLED ON) + if (DEFINED APP_FEATURES) + foreach(feature ${APP_FEATURES}) + if (NOT feature) + set(APP_ENABLED OFF) + endif() + endforeach() + endif() + if (APP_ENABLED) + message(STATUS "command line application ${app_name} -- enabled") if (NOT APP_SOURCES) set(APP_SOURCES "${app_name}.cpp") endif() + if (TECA_HAS_BOOST) + list(APPEND APP_SOURCES teca_app_util.cxx) + endif() add_executable(${app_name} ${APP_SOURCES}) if (APP_LIBS) target_link_libraries(${app_name} - teca_core teca_data teca_io teca_alg + teca_system teca_core teca_data teca_io teca_alg ${APP_LIBS}) endif() install(TARGETS ${app_name} RUNTIME DESTINATION ${BIN_PREFIX}) + else() + message(STATUS "command line application ${app_name} -- disabled") endif() endfunction() diff --git a/CMake/teca_python.cmake b/CMake/teca_python.cmake index 1d676187d..6ee3f7397 100644 --- a/CMake/teca_python.cmake +++ b/CMake/teca_python.cmake @@ -35,3 +35,34 @@ function(teca_py_install_apps) # TODO compile the sources endif() endfunction() + +# teca_add_python_app(name +# SOURCES -- optional, source files to comile +# FEATURES -- optional, boolean condition decribing feature dependencies +# ) +function (teca_add_python_app app_name) + if (TECA_HAS_PYTHON) + set(opt_args) + set(val_args) + set(array_args SOURCES FEATURES) + cmake_parse_arguments(APP + "${opt_args}" "${val_args}" "${array_args}" ${ARGN}) + set(APP_ENABLED ON) + if (DEFINED APP_FEATURES) + foreach(feature ${APP_FEATURES}) + if (NOT feature) + set(APP_ENABLED OFF) + endif() + endforeach() + endif() + if (APP_ENABLED) + message(STATUS "command line application ${app_name} -- enabled") + if (NOT APP_SOURCES) + set(APP_SOURCES "${app_name}.in") + endif() + teca_py_install_apps(${APP_SOURCES}) + else() + message(STATUS "command line application ${app_name} -- disabled") + endif() + endif() +endfunction() diff --git a/CMake/teca_test.cmake b/CMake/teca_test.cmake index 31aa3d817..375c3524f 100644 --- a/CMake/teca_test.cmake +++ b/CMake/teca_test.cmake @@ -1,5 +1,5 @@ # teca_add_test(name -# EXEC_NAME -- optional, name of the copiled test +# EXEC_NAME -- optional, name of the compiled test # SOURCES -- optional, source files to comile # LIBS -- optional, libraries to link to the compiled test # COMMAND -- required, test command diff --git a/CMakeLists.txt b/CMakeLists.txt index 6d24e70b6..c1ecb1fbb 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -40,21 +40,24 @@ if (MSVC) CACHE STRING "" FORCE) else() if (NOT CMAKE_CXX_FLAGS) - set(tmp "-fPIC -std=c++11 -Wall -Wextra") + set(tmp "-fPIC -std=c++17 -Wall -Wextra") if ((APPLE) AND ("${CMAKE_CXX_COMPILER_ID}" MATCHES "Clang")) set(tmp "${tmp} -stdlib=libc++") endif() if ("${CMAKE_BUILD_TYPE}" MATCHES "Release") - set(tmp "${tmp} -march=native -mtune=native") + set(tmp "${tmp} -march=native -mtune=native -fno-trapping-math -fno-math-errno") + if (NOT "${CMAKE_CXX_COMPILER_ID}" MATCHES "Clang") + set(tmp "${tmp} -fno-signaling-nans") + endif() endif() set(CMAKE_CXX_FLAGS "${tmp}" CACHE STRING "TECA build defaults" FORCE) endif() if (NOT CMAKE_Fortran_FLAGS) - set(tmp "-Wall -Wextra -Wno-conversion") + set(tmp "-Wall -Wextra -Wno-conversion -Wno-compare-reals -fno-math-errno") if (CMAKE_BUILD_TYPE STREQUAL "Release") - set(tmp "${tmp} -march=native -mtune=native") + set(tmp "${tmp} -march=native -mtune=native -fno-trapping-math -fno-signaling-nans -fno-math-errno") endif() set(CMAKE_Fortran_FLAGS "${tmp}" CACHE STRING "TECA build defaults" @@ -78,6 +81,21 @@ endif() include(teca_interface_library) +# these control behavior of the build. most of the time you want to error +# out if these are not found. for those times when you don't set the corresponding +# REQUIRE variable to FALSE +set(REQUIRE_MPI TRUE CACHE BOOL "Forces build failure when MPI is missing") +set(REQUIRE_NETCDF TRUE CACHE BOOL "Forces build failure when NetCDF is missing") +set(REQUIRE_NETCDF_MPI TRUE CACHE BOOL "Forces build failure when NetCDF_MPI is missing") +set(REQUIRE_UDUNITS TRUE CACHE BOOL "Forces build failure when udunits2 is missing") +set(REQUIRE_BOOST TRUE CACHE BOOL "Forces build failure when boost is missing") +set(REQUIRE_OPENSSL ${REQUIRE_NETCDF} CACHE BOOL "Forces build failure when OpenSSL is missing") +set(REQUIRE_PYTHON TRUE CACHE BOOL "Forces build failure when Python is missing") +set(REQUIRE_LIBXLSXWRITER FALSE CACHE BOOL "Forces build failure when libxlsxwriter is missing") +set(REQUIRE_PARAVIEW FALSE CACHE BOOL "Forces build failure when ParaView is missing") +set(REQUIRE_VTK FALSE CACHE BOOL "Forces build failure when VTK is missing") +set(REQUIRE_TECA_DATA ${BUILD_TESTING} CACHE BOOL "Forces build failure when TECA_data is missing") + # locate dependencies # configure for MPI if (ENABLE_CRAY_MPICH) @@ -114,6 +132,17 @@ if (NETCDF_FOUND AND ((DEFINED TECA_HAS_NETCDF AND TECA_HAS_NETCDF) OR (NOT DEFINED TECA_HAS_NETCDF))) message(STATUS "NetCDF features -- enabled") set(tmp ON) + teca_interface_library(NetCDF SYSTEM + INCLUDES ${NETCDF_INCLUDE_DIRS} + LIBRARIES ${NETCDF_LIBRARIES}) + if (NOT NETCDF_IS_PARALLEL) + message(STATUS "Check NetcCDF for MPI support -- not found") + if (REQUIRE_NETCDF_MPI) + message(FATAL_ERROR "NetCDF MPI support -- required but not found.") + endif() + else() + message(STATUS "Check NetCDF for MPI support -- enabled") + endif() elseif (REQUIRE_NETCDF) message(FATAL_ERROR "NetCDF features -- required but not found. set NETCDF_DIR to enable.") else() @@ -121,6 +150,7 @@ else() message(WARNING "NetCDF is required for CF-2 I/O") endif() set(TECA_HAS_NETCDF ${tmp} CACHE BOOL "NetCDF features") +set(TECA_HAS_NETCDF_MPI ${NETCDF_IS_PARALLEL} CACHE BOOL "NetCDF MPI support") # configure for libxlsxwriter set(tmp OFF) @@ -129,7 +159,7 @@ if (LIBXLSXWRITER_FOUND AND ((DEFINED TECA_HAS_LIBXLSXWRITER AND TECA_HAS_LIBXLS OR (NOT DEFINED TECA_HAS_LIBXLSXWRITER))) message(STATUS "libxlsxwriter features -- enabled") set(tmp ON) -elseif (REQUIRE_LIBXLSXWRITE) +elseif (REQUIRE_LIBXLSXWRITER) message(STATUS "libxlsxwriter features -- required but not found. set LIBXLSXWRITER_DIR to enable.") else() message(STATUS "libxlsxwriter features -- not found. set LIBXLSXWRITER_DIR to enable.") @@ -211,15 +241,18 @@ set_property(CACHE TECA_PYTHON_VERSION PROPERTY STRINGS 2 3) set(tmp OFF) find_package(PythonInterp ${TECA_PYTHON_VERSION}) if(PYTHONINTERP_FOUND) - if (TECA_PYTHON_VERSION EQUAL 3) - set(py_lib_ver ${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}m) - else() - set(py_lib_ver ${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}) - endif() find_program(PYTHON_CONFIG_EXECUTABLE python${PYTHON_VERSION_MAJOR}-config) if (NOT PYTHON_CONFIG_EXECUTABLE) message(SEND_ERROR "python${PYTHON_VERSION_MAJOR}-config executable is required.") endif() + if (TECA_PYTHON_VERSION EQUAL 3) + set(PYTHON3_ABI_FLAGS) + execute_process(COMMAND ${PYTHON_CONFIG_EXECUTABLE} --abiflags + OUTPUT_VARIABLE PYTHON3_ABI_FLAGS OUTPUT_STRIP_TRAILING_WHITESPACE) + set(py_lib_ver ${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}${PYTHON3_ABI_FLAGS}) + else() + set(py_lib_ver ${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}) + endif() execute_process(COMMAND ${PYTHON_CONFIG_EXECUTABLE} --prefix OUTPUT_VARIABLE python_prefix OUTPUT_STRIP_TRAILING_WHITESPACE) set(PYTHON_INCLUDE_DIR ${python_prefix}/include/python${py_lib_ver}) @@ -234,7 +267,9 @@ if(PYTHONINTERP_FOUND) endif() endif() find_package(PythonLibs ${TECA_PYTHON_VERSION}) -find_package(Numpy) +find_package(NumPy) +find_package(PyTorch) +find_package(Matplotlib) if (TECA_HAS_MPI) find_package(MPI4Py) endif() @@ -244,21 +279,24 @@ if (swig_cmd) else() message(FATAL_ERROR "Found SWIG: FALSE") endif() -if (PYTHONINTERP_FOUND AND PYTHONLIBS_FOUND AND NUMPY_FOUND AND swig_cmd - AND ((TECA_HAS_MPI AND MPI4PY_FOUND) OR (NOT TECA_HAS_MPI)) +if (PYTHONINTERP_FOUND AND PYTHONLIBS_FOUND AND NumPy_FOUND AND swig_cmd + AND ((TECA_HAS_MPI AND MPI4Py_FOUND) OR (NOT TECA_HAS_MPI)) AND ((DEFINED TECA_HAS_PYTHON AND TECA_HAS_PYTHON) OR (NOT DEFINED TECA_HAS_PYTHON))) message(STATUS "Python ${TECA_PYTHON_VERSION} features -- enabled") set(tmp ON) teca_interface_library(PYTHON SYSTEM - INCLUDES ${PYTHON_INCLUDE_PATH} ${MPI4PY_INCLUDE_DIR} ${NUMPY_INCLUDE_DIR} + INCLUDES ${PYTHON_INCLUDE_PATH} ${MPI4Py_INCLUDE_DIR} ${NumPy_INCLUDE_DIR} LIBRARIES ${PYTHON_LIBRARIES}) elseif (REQUIRE_PYTHON) message(FATAL_ERROR "Python ${TECA_PYTHON_VERSION} features -- required but not found") else() message(STATUS "Python ${TECA_PYTHON_VERSION} features -- not found. Requires Python, SWIG, numpy, and mpi4py if compiling with MPI") endif() -set(TECA_HAS_PYTHON ${tmp} CACHE BOOL "Python binding") +set(TECA_HAS_PYTHON ${tmp} CACHE BOOL "TECA Python bindings") +set(TECA_HAS_NUMPY ${NumPy_FOUND} CACHE BOOL "Python NumPy module") +set(TECA_HAS_PYTORCH ${PyTorch_FOUND} CACHE BOOL "Python torch module") +set(TECA_HAS_MATPLOTLIB ${Matplotlib_FOUND} CACHE BOOL "Python matplotlib module") # silence the annoying cmake warnings about REQUIRE_ when # the required package X is actually found. @@ -308,6 +346,8 @@ set(TECA_DATA_ROOT "/path/to/TECA_data" if (EXISTS "${TECA_DATA_ROOT}") set(tmp ON) message(STATUS "TECA_data -- available") +elseif (REQUIRE_TECA_DATA) + message(FATAL_ERROR "TECA_data -- required but not found") else() message(STATUS "TECA_data -- not available") endif() @@ -333,16 +373,41 @@ add_subdirectory(paraview) # enable regression tests set(BUILD_TESTING OFF CACHE BOOL "Enable tests") if (BUILD_TESTING) + # these are dsiabled by default because they are slow and threaded version + # provides sufficient coverage. + set(TECA_SERIAL_TESTS OFF CACHE BOOL "Enables serial tests of threaded codes.") + include(CTest) + include(ProcessorCount) configure_file( "${CMAKE_CURRENT_SOURCE_DIR}/CTestCustom.cmake.in" "${CMAKE_CURRENT_BINARY_DIR}/CTestCustom.cmake" @ONLY) - set(TECA_TEST_CORES 4 CACHE STRING - "Number of cores for use in parallel tests") - math(EXPR HALF_CORES "${TECA_TEST_CORES}/2") - if (HALF_CORES LESS 1) - message(FATAL_ERROR "Parallel test require at lest 2 cores") + + # figure out how many cores we can use for parallel tests + set(TECA_TEST_CORES 0 CACHE STRING + "Max number of cores for use in parallel tests") + if (TECA_TEST_CORES LESS 1) + ProcessorCount(LOGICAL_CORES) + if (LOGICAL_CORES EQUAL 0) + set(LOGICAL_CORES 4) + endif() + else() + math(EXPR LOGICAL_CORES "${TECA_TEST_CORES}*2") + endif() + math(EXPR PHYSICAL_CORES "${LOGICAL_CORES}/2") + if (PHYSICAL_CORES LESS 3) + set(TEST_CORES 2) + set(HALF_TEST_CORES 2) + set(TWICE_TEST_CORES 4) + else() + set(TEST_CORES ${PHYSICAL_CORES}) + math(EXPR HALF_TEST_CORES "${TEST_CORES}/2") + set(TWICE_TEST_CORES ${LOGICAL_CORES}) endif() + message(STATUS "regression testing -- enabled (${TEST_CORES} cores).") + add_subdirectory(test) +else() + message(STATUS "regression testing -- disbaled") endif() diff --git a/CTestCustom.cmake.in b/CTestCustom.cmake.in index 3e583b0da..1f8c1336a 100644 --- a/CTestCustom.cmake.in +++ b/CTestCustom.cmake.in @@ -1,2 +1,3 @@ set(CTEST_CUSTOM_WARNING_MATCH ${CTEST_CUSTOM_WARNING_MATCH} "{standard input}:[0-9][0-9]*: [wW]arning: ") +set(CTEST_CUSTOM_MAXIMUM_PASSED_TEST_OUTPUT_SIZE 8192) diff --git a/README.md b/README.md index 482e0042d..8e0485626 100644 --- a/README.md +++ b/README.md @@ -1,14 +1,28 @@ -# The TECA, Toolkit for Extreme Climate Analaysis -![Storm Tracks Generated by TECA](doc/images/tracks_crop_2.gif) + + + +PyPI version + -TECA(Toolkit for Extreme Climate Analysis) is a collection of climate analysis algorithms geared toward extreme event detection and tracking implemented in a scalable parallel framework. The core is written in modern c++ and uses MPI+thread for parallelism. The framework supports a number of parallel design patterns including distributed data parallelism and map-reduce. Python bindings make the high performance c++ code easy to use. TECA has been used up to 750k cores. +## The Toolkit for Extreme Climate Analysis +TECA is a collection of climate analysis algorithms geared toward extreme event detection and tracking implemented in a scalable parallel framework. The code has been successfully deployed and run at massive scales on current DOE supercomputers. TECA's core is written in modern C++ and exploits MPI + X parallelism where X is one of threads, OpenMP, or GPUs. The framework supports a number of parallel design patterns including distributed data parallelism and map-reduce. While modern C++ delivers the highest performance, Python bindings make the code approachable and easy to use. -[![Build Status](https://travis-ci.com/LBL-EESA/TECA.svg?token=zV3LhFtYvjcvo67W2uji&branch=master)](https://travis-ci.com/LBL-EESA/TECA) [![Documentation Status](https://readthedocs.org/projects/teca/badge/?version=latest)](https://teca.readthedocs.io/en/latest/?badge=latest) [![DOI](doc/images/teca_doi_badge.svg)]( https://doi.org/10.20358/C8C651) +### Documentation +The [TECA User's Guide](https://teca.readthedocs.io/en/latest/) is the authorotative source for documentation on topics such as [installing TECA](https://teca.readthedocs.io/en/latest/installation.html), running TECA's [command line applications](https://teca.readthedocs.io/en/latest/applications.html), and [Python development](https://teca.readthedocs.io/en/latest/python.html). -# Documentation -For more information please see the [TECA User's Guide](https://teca.readthedocs.io/en/latest/). +### Tutorials +The [TECA tutorials](https://sourceforge.net/p/teca/TECA_tutorials) subversion repository contains slides from previous tutorials. -#Copyright Notice# +### Examples +The [TECA examples](https://github.com/LBL-EESA/TECA_examples) repository contains batch scripts and codes illustrating the use of TECA at scale. + +### Python +The [TECA Python package]() is available on PyPi or by installing from sources. For more information see the [TECA User's Guide](https://teca.readthedocs.io/en/latest/) sections on [installing TECA](https://teca.readthedocs.io/en/latest/installation.html) and [Python development](https://teca.readthedocs.io/en/latest/python.html). + +### CI and Testing +For the latest regression suite results see the [TECA CDash project site](https://cdash.nersc.gov/index.php?project=TECA). + +## Copyright Notice TECA, Copyright (c) 2015, The Regents of the University of California, through Lawrence Berkeley National Laboratory (subject to receipt of any required approvals from the U.S. Dept. of Energy). All rights reserved. If you have questions about your rights to use or distribute this software, please contact Berkeley Lab's Innovation & Partnerships Office at IPO@lbl.gov. diff --git a/alg/CMakeLists.txt b/alg/CMakeLists.txt index be76eb139..48038978d 100644 --- a/alg/CMakeLists.txt +++ b/alg/CMakeLists.txt @@ -19,20 +19,18 @@ set(teca_alg_cxx_srcs teca_2d_component_area.cxx teca_component_area_filter.cxx teca_component_statistics.cxx - teca_dataset_capture.cxx - teca_dataset_source.cxx teca_derived_quantity.cxx teca_descriptive_statistics.cxx teca_evaluate_expression.cxx + teca_face_to_cell_centering.cxx teca_geography.cxx + teca_integrated_vapor_transport.cxx teca_l2_norm.cxx teca_latitude_damper.cxx teca_laplacian.cxx teca_mask.cxx teca_normalize_coordinates.cxx teca_parser.cxx - teca_programmable_algorithm.cxx - teca_programmable_reduce.cxx teca_table_calendar.cxx teca_table_reduce.cxx teca_table_region_mask.cxx @@ -44,19 +42,14 @@ set(teca_alg_cxx_srcs teca_tc_wind_radii.cxx teca_tc_trajectory.cxx teca_temporal_average.cxx + teca_valid_value_mask.cxx teca_variant_array_operand.cxx + teca_vertical_coordinate_transform.cxx + teca_vertical_reduction.cxx teca_vorticity.cxx teca_dataset_diff.cxx ) -teca_py_install(${LIB_PREFIX} - teca_tc_stats.py - teca_tc_activity.py - teca_python_algorithm.py - teca_tc_wind_radii_stats.py - teca_tc_trajectory_scalars.py - ) - set(teca_alg_f90_srcs) set(teca_alg_f90_generics @@ -99,6 +92,10 @@ target_include_directories(teca_alg $ ) +teca_py_install(${LIB_PREFIX} + teca_deeplab_ar_detect_internals.py + ) + install(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} DESTINATION ${INCLUDE_PREFIX} FILES_MATCHING PATTERN "*.h") diff --git a/alg/teca_2d_component_area.cxx b/alg/teca_2d_component_area.cxx index 84dc37662..285d40f52 100644 --- a/alg/teca_2d_component_area.cxx +++ b/alg/teca_2d_component_area.cxx @@ -109,7 +109,7 @@ void component_area(unsigned long nlon, unsigned long nlat, // -------------------------------------------------------------------------- teca_2d_component_area::teca_2d_component_area() : - component_variable(""), contiguous_component_ids(0) + component_variable(""), contiguous_component_ids(0), background_id(-1) { this->set_number_of_input_connections(1); this->set_number_of_output_ports(1); @@ -133,6 +133,8 @@ void teca_2d_component_area::get_properties_description( TECA_POPTS_GET(int, prefix, contiguous_component_ids, "when the region label ids start at 0 and are consecutive " "this flag enables use of an optimization (0)") + TECA_POPTS_GET(long, prefix, background_id, + "the label id that corresponds to the background (-1)") ; global_opts.add(opts); @@ -144,6 +146,7 @@ void teca_2d_component_area::set_properties(const std::string &prefix, { TECA_POPTS_SET(opts, std::string, prefix, component_variable) TECA_POPTS_SET(opts, int, prefix, contiguous_component_ids) + TECA_POPTS_SET(opts, long, prefix, background_id) } #endif @@ -285,6 +288,20 @@ const_p_teca_dataset teca_2d_component_area::execute( teca_metadata &out_metadata = out_mesh->get_metadata(); + // get the background_id, and pass it through + long bg_id = this->background_id; + if (this->background_id == -1) + { + if (in_metadata.get("background_id", bg_id)) + { + TECA_ERROR("Metadata is missing the key \"background_id\". " + "One should specify it via the \"background_id\" algorithm " + "property") + return nullptr; + } + } + out_metadata.set("background_id", bg_id); + // calculate area of components NESTED_TEMPLATE_DISPATCH_FP(const teca_variant_array_impl, xc.get(), diff --git a/alg/teca_2d_component_area.h b/alg/teca_2d_component_area.h index 2b7f766e0..5a9d57e6a 100644 --- a/alg/teca_2d_component_area.h +++ b/alg/teca_2d_component_area.h @@ -12,24 +12,38 @@ TECA_SHARED_OBJECT_FORWARD_DECL(teca_2d_component_area) /// an algorithm that computes the area of labeled regions /** -Given a set of labels on a Cartesian mesh, the algorithm computes -the area of each region. Regions are identified by assigning a -unique integer value to each mesh point that belongs in the -region. The component_variable property names the variable containing -the region labels. - -if the region labels start at 0 and are contiguous then an -optimization can be used. Set contiguous_component_ids property -to enable the optimization. - -the input dataset is passed through and the results of the -calculations are stored in the output dataset metadata in -keys named: - - number_of_components - component_ids - component_area - +Given a set of labels on a Cartesian mesh, the algorithm computes the area of +each region. Regions are identified by assigning a unique integer value to each +mesh point that belongs in the region. The component_variable property names +the variable containing the region labels. + +if the region labels start at 0 and are contiguous then an optimization can be +used. Set contiguous_component_ids property to enable the optimization. Note that +TECA's connected component labeler assigns the background (i.e. cells not inside +the segmentation) the label 0. One can identify the background region and area +via this label. When processing data generated outside of TECA it might be +necessary to supply the background label. Use -2 if there is no background. + +the input dataset is passed through and the results of the calculations are +stored in the output dataset metadata in the following keys: + + number_of_components - number of component ids for which area was + computed. Note that this can include a background + component i.e. for cells outside of the segmentation. + + component_ids - a vector containing the label of each component. This is + always starts with 0, where the label 0 identifies cells + out side of the segmentation, and ranges up to + number_of_components - 1, where the labels from 1 up to + number_of_components - 1 identify connected regions of + cells inside the segmentation. + + component_area - a vector containing the area for the corresponding entry + in the component_ids array. + + background_id - the label used for cells outside of the segmentation, + i.e. the background. This can be used to skip processing + of the background when desirable. */ class teca_2d_component_area : public teca_algorithm { @@ -39,19 +53,24 @@ class teca_2d_component_area : public teca_algorithm TECA_ALGORITHM_CLASS_NAME(teca_2d_component_area) ~teca_2d_component_area(); - // report/initialize to/from Boost program options - // objects. + // report/initialize to/from Boost program options objects. TECA_GET_ALGORITHM_PROPERTIES_DESCRIPTION() TECA_SET_ALGORITHM_PROPERTIES() // set the name of the input array TECA_ALGORITHM_PROPERTY(std::string, component_variable) - // set this only if you know for certain that label ids - // are contiguous and start at 0. this enables use of a - // faster implementation. + // set this only if you know for certain that label ids are contiguous and + // start at 0. this enables use of a faster implementation. TECA_ALGORITHM_PROPERTY(int, contiguous_component_ids) + // set this to override the component label used for background. By default + // this is set to -1 to indicate that the value should be obtained from the + // metadata key `background_id`. Note that TECA's connected component + // labeler uses the id 0 for the background and passes this in a metadata + // key and as a result no action is required. + TECA_ALGORITHM_PROPERTY(long, background_id) + protected: teca_2d_component_area(); @@ -75,6 +94,7 @@ class teca_2d_component_area : public teca_algorithm private: std::string component_variable; int contiguous_component_ids; + long background_id; }; #endif diff --git a/alg/teca_bayesian_ar_detect.cxx b/alg/teca_bayesian_ar_detect.cxx index 4285f4c87..8a7705a09 100644 --- a/alg/teca_bayesian_ar_detect.cxx +++ b/alg/teca_bayesian_ar_detect.cxx @@ -2,6 +2,7 @@ #include "teca_mesh.h" #include "teca_array_collection.h" +#include "teca_array_attributes.h" #include "teca_variant_array.h" #include "teca_metadata.h" #include "teca_cartesian_mesh.h" @@ -31,6 +32,7 @@ #include #endif +namespace { // This routine appends the contents of dataset_0.get_metadata.get(property_name) // onto that from dataset_0 and overwrites the contents `property_name' in the @@ -44,7 +46,7 @@ // dataset_1 : (p_teca_dataset) the RHS dataset in the reduction // // mesh_out : (p_teca_cartesian_mesh) the output of the reduction -void property_reduce(std::string property_name, +/*void property_reduce(std::string property_name, p_teca_dataset dataset_0, p_teca_dataset dataset_1, p_teca_cartesian_mesh mesh_out) { @@ -63,12 +65,7 @@ void property_reduce(std::string property_name, // Overwrite the concatenated property vector in the output dataset mesh_out->get_metadata().set(property_name, property_vector); -} - - - - -namespace { +}*/ // drive the pipeline execution once for each parameter table row // injects the parameter values into the upstream requests @@ -166,19 +163,27 @@ class parameter_table_reduction ~parameter_table_reduction() = default; + // finalize callback // completes the reduction by scaling by the number of parameter table rows - int finalize(p_teca_cartesian_mesh &out_mesh) + p_teca_dataset operator()(const const_p_teca_dataset &ds) { - p_teca_variant_array ar_prob = + p_teca_cartesian_mesh out_mesh = + std::dynamic_pointer_cast(ds->new_instance()); + + out_mesh->shallow_copy(std::const_pointer_cast(ds)); + + p_teca_variant_array ar_prob_in = out_mesh->get_point_arrays()->get(this->probability_array_name); - if (!ar_prob) + if (!ar_prob_in) { TECA_ERROR("finalize failed, proability array \"" << this->probability_array_name << "\" not found") - return -1; + return nullptr; } + p_teca_variant_array ar_prob = ar_prob_in->new_copy(); + unsigned long n_vals = ar_prob->size(); TEMPLATE_DISPATCH_FP(teca_variant_array_impl, @@ -192,10 +197,13 @@ class parameter_table_reduction p_ar_prob[i] /= num_params; ) - return 0; + out_mesh->get_point_arrays()->set( + this->probability_array_name, ar_prob); + + return out_mesh; } - // this reducion computes the probability from each parameter table run + // this reduction computes the probability from each parameter table run // if the inputs have the probability array this is used, if not the // array is computed from the filtered connected components. after the // reduction runs, the result will need to be normalized. @@ -263,8 +271,19 @@ class parameter_table_reduction prob = prob_0; wvcc = wvcc_1; - // append ar count and param table row + // append ar count + // don't count the background label as an ar detection. + // TECA by convention uses label 0 for cells out side of + // the segmentation (i.e. the background), and by + // convention this is stored in the first entry. int val = 0; + if (md_1.get("component_ids", &val, 1)) + { + TECA_ERROR("mesh 1 is missing component_ids") + return nullptr; + } + int wvcc_bg = val == 0 ? 1 : 0; + if (md_1.get("number_of_components", val)) { TECA_ERROR("mesh 1 is missing number_of_components") @@ -272,8 +291,9 @@ class parameter_table_reduction } n_wvcc_out = n_wvcc_0->new_copy(); - n_wvcc_out->append(val); + n_wvcc_out->append(val - wvcc_bg); + // append param table row if (md_1.get("parameter_table_row", val)) { TECA_ERROR("mesh 1 is missing parameter_table_row") @@ -290,20 +310,32 @@ class parameter_table_reduction prob = prob_1; wvcc = wvcc_0; - // append ar count and param table row + // append ar count + // don't count the background label as an ar detection. + // TECA by convention uses label 0 for cells out side of + // the segmentation (i.e. the background), and by + // convention this is stored in the first entry. int val = 0; + if (md_0.get("component_ids", &val, 1)) + { + TECA_ERROR("mesh 1 is missing component_ids") + return nullptr; + } + int wvcc_bg = val == 0 ? 1 : 0; + if (md_0.get("number_of_components", val)) { - TECA_ERROR("mesh 1 is missing number_of_components") + TECA_ERROR("mesh 0 is missing number_of_components") return nullptr; } n_wvcc_out = n_wvcc_1->new_copy(); - n_wvcc_out->append(val); + n_wvcc_out->append(val - wvcc_bg); + // append param table row if (md_0.get("parameter_table_row", val)) { - TECA_ERROR("mesh 1 is missing parameter_table_row") + TECA_ERROR("mesh 0 is missing parameter_table_row") return nullptr; } @@ -372,7 +404,7 @@ class parameter_table_reduction ) ) - // append ar count and param table row + // append ar count int vals[2]; if (md_0.get("number_of_components", vals[0]) || md_1.get("number_of_components", vals[1])) @@ -381,8 +413,30 @@ class parameter_table_reduction return nullptr; } + // don't count the background label as an ar detection. + // TECA by convention uses label 0 for cells out side of + // the segmentation (i.e. the background), and by + // convention this is stored in the first entry. + int val = 0; + if (md_0.get("component_ids", &val, 1)) + { + TECA_ERROR("mesh 1 is missing component_ids") + return nullptr; + } + int wvcc_bg = val == 0 ? 1 : 0; + vals[0] -= wvcc_bg; + + if (md_1.get("component_ids", &val, 1)) + { + TECA_ERROR("mesh 1 is missing component_ids") + return nullptr; + } + wvcc_bg = val == 0 ? 1 : 0; + vals[1] -= wvcc_bg; + n_wvcc_out = teca_int_array::New(vals, 2); + // append param table row if (md_0.get("parameter_table_row", vals[0]) || md_1.get("parameter_table_row", vals[1])) { @@ -413,7 +467,7 @@ class parameter_table_reduction if (prob) { - // probability has already been comnputed, pass it through + // probability has already been computed, pass it through prob_out = prob; n_wvcc_out = n_wvcc; pt_row_out = pt_row; @@ -423,7 +477,7 @@ class parameter_table_reduction // compute the probability from the connected components p_teca_variant_array wvcc = mesh->get_point_arrays()->get(this->component_array_name); - if (wvcc) + if (!wvcc) { TECA_ERROR("pipeline error, component array \"" << this->component_array_name << "\" is not present") @@ -452,21 +506,38 @@ class parameter_table_reduction ) ) - // get ar counts and parameter table rows from metadata and - // pass into the infomration arrays + // get ar counts from metadata and pass into the information + // arrays + // don't count the background label as an ar detection. + // TECA by convention uses label 0 for cells out side of + // the segmentation (i.e. the background), and by + // convention this is stored in the first entry. int val = 0; + if (md.get("component_ids", &val, 1)) + { + TECA_ERROR("mesh 1 is missing component_ids") + return nullptr; + } + int wvcc_bg = val == 0 ? 1 : 0; + if (md.get("number_of_components", val)) { TECA_ERROR("metadata missing number_of_components") return nullptr; } + + val -= wvcc_bg; + n_wvcc_out = teca_int_array::New(&val, 1); + // get parameter table rows from metadata and pass into the + // information arrays if (md.get("parameter_table_row", val)) { TECA_ERROR("meatdata missing parameter_table_row") return nullptr; } + pt_row_out = teca_int_array::New(&val,1); } } @@ -486,22 +557,6 @@ class parameter_table_reduction else if (dataset_1) mesh_out->copy_metadata(dataset_1); - // TODO -- this essentally copies the parameter table into the metadata - // instead could we access the parameter table directly. and use the - // information arrays ar_count, and parameter_table_row? - - // Do property reduction on AR detector parameters and output that - // are stored in the metadata. This operation overwrites the metadata - // in mesh_out with the combined metadata from the LHS and RHS datasets. - /*property_reduce("low_threshold_value", dataset_0, dataset_1, mesh_out); - property_reduce("high_threshold_value", dataset_0, dataset_1, mesh_out); - property_reduce("low_area_threshold_km", dataset_0, dataset_1, mesh_out); - property_reduce("high_area_threshold_km", dataset_0, dataset_1, mesh_out); - property_reduce("gaussian_filter_center_lat", dataset_0, dataset_1, mesh_out); - property_reduce("gaussian_filter_hwhm", dataset_0, dataset_1, mesh_out); - property_reduce("number_of_components", dataset_0, dataset_1, mesh_out); - property_reduce("component_area", dataset_0, dataset_1, mesh_out);*/ - mesh_out->get_point_arrays()->append(this->probability_array_name, prob_out); mesh_out->get_information_arrays()->append("ar_count", n_wvcc_out); @@ -552,7 +607,7 @@ void teca_bayesian_ar_detect::internals_t::clear() // -------------------------------------------------------------------------- teca_bayesian_ar_detect::teca_bayesian_ar_detect() : min_component_area_variable("min_component_area"), - min_water_vapor_variable("min_water_vapor"), + min_ivt_variable("min_water_vapor"), hwhm_latitude_variable("hwhm_latitude"), thread_pool_size(1), verbose(0), internals(new internals_t) { @@ -575,12 +630,12 @@ void teca_bayesian_ar_detect::get_properties_description( + (prefix.empty()?"teca_bayesian_ar_detect":prefix)); opts.add_options() - TECA_POPTS_GET(std::string, prefix, water_vapor_variable, + TECA_POPTS_GET(std::string, prefix, ivt_variable, "name of the water vapor variable (\"\")") TECA_POPTS_GET(std::string, prefix, min_component_area_variable, "name of the column in the parameter table containing the " "component area threshold (\"min_component_area\")") - TECA_POPTS_GET(std::string, prefix, min_water_vapor_variable, + TECA_POPTS_GET(std::string, prefix, min_ivt_variable, "name of the column in the parameter table containing the " "water vapor threshold (\"min_water_vapor\")") TECA_POPTS_GET(std::string, prefix, hwhm_latitude_variable, @@ -600,9 +655,9 @@ void teca_bayesian_ar_detect::get_properties_description( void teca_bayesian_ar_detect::set_properties(const std::string &prefix, variables_map &opts) { - TECA_POPTS_SET(opts, std::string, prefix, water_vapor_variable) + TECA_POPTS_SET(opts, std::string, prefix, ivt_variable) TECA_POPTS_SET(opts, std::string, prefix, min_component_area_variable) - TECA_POPTS_SET(opts, std::string, prefix, min_water_vapor_variable) + TECA_POPTS_SET(opts, std::string, prefix, min_ivt_variable) TECA_POPTS_SET(opts, std::string, prefix, hwhm_latitude_variable) TECA_POPTS_SET(opts, int, prefix, thread_pool_size) TECA_POPTS_SET(opts, int, prefix, verbose) @@ -642,8 +697,7 @@ unsigned int teca_bayesian_ar_detect::get_thread_pool_size() const noexcept } // -------------------------------------------------------------------------- -teca_metadata -teca_bayesian_ar_detect::teca_bayesian_ar_detect::get_output_metadata( +teca_metadata teca_bayesian_ar_detect::get_output_metadata( unsigned int port, const std::vector &input_md) { #ifdef TECA_DEBUG @@ -652,107 +706,140 @@ teca_bayesian_ar_detect::teca_bayesian_ar_detect::get_output_metadata( #endif (void)port; - // this algorithm processes Cartesian mesh based data. It will fetch a - // timestep and loop over a set of parameters accumulating the result. we - // report the variable that we compute, for each timestep from the - // parameter tables. - teca_metadata md(input_md[0]); - md.set("variables", std::string("ar_probability")); - - // if we already have the parameter table bail out here - // else we will read and distribute it - if (this->internals->parameter_table) - return md; - - // execute the pipeline that retruns table of parameters - const_p_teca_dataset parameter_data; + // if don't already have the parameter table read and distribute it + if (!this->internals->parameter_table) + { + // execute the pipeline that retruns table of parameters + const_p_teca_dataset parameter_data; - p_teca_programmable_algorithm capture_parameter_data - = teca_programmable_algorithm::New(); + p_teca_programmable_algorithm capture_parameter_data + = teca_programmable_algorithm::New(); - capture_parameter_data->set_name("capture_parameter_data"); - capture_parameter_data->set_input_connection(this->internals->parameter_pipeline_port); + capture_parameter_data->set_name("capture_parameter_data"); + capture_parameter_data->set_input_connection(this->internals->parameter_pipeline_port); - capture_parameter_data->set_execute_callback( - [¶meter_data] (unsigned int, const std::vector &in_data, - const teca_metadata &) -> const_p_teca_dataset - { - parameter_data = in_data[0]; - return nullptr; - }); + capture_parameter_data->set_execute_callback( + [¶meter_data] (unsigned int, const std::vector &in_data, + const teca_metadata &) -> const_p_teca_dataset + { + parameter_data = in_data[0]; + return nullptr; + }); - capture_parameter_data->update(); + capture_parameter_data->update(); - int rank = 0; + int rank = 0; #if defined(TECA_HAS_MPI) - MPI_Comm comm = this->get_communicator(); - int is_init = 0; - MPI_Initialized(&is_init); - if (is_init) - MPI_Comm_rank(comm, &rank); + MPI_Comm comm = this->get_communicator(); + int is_init = 0; + MPI_Initialized(&is_init); + if (is_init) + MPI_Comm_rank(comm, &rank); #endif - // validate the table - if (rank == 0) - { - // did the pipeline run successfully - const_p_teca_table parameter_table = - std::dynamic_pointer_cast(parameter_data); - - if (!parameter_table) - { - TECA_ERROR("metadata pipeline failure") - } - else if (!parameter_table->has_column(this->min_water_vapor_variable)) - { - TECA_ERROR("metadata missing percentile column \"" - << this->min_water_vapor_variable << "\"") - } - else if (!parameter_table->get_column(this->min_component_area_variable)) + // validate the table + if (rank == 0) { - TECA_ERROR("metadata missing area column \"" - << this->min_component_area_variable << "\"") - } - else if (!parameter_table->get_column(this->hwhm_latitude_variable)) - { - TECA_ERROR("metadata missing hwhm column \"" - << this->hwhm_latitude_variable << "\"") + // did the pipeline run successfully + const_p_teca_table parameter_table = + std::dynamic_pointer_cast(parameter_data); + + if (!parameter_table) + { + TECA_ERROR("metadata pipeline failure") + } + else if (!parameter_table->has_column(this->min_ivt_variable)) + { + TECA_ERROR("metadata missing percentile column \"" + << this->min_ivt_variable << "\"") + } + else if (!parameter_table->get_column(this->min_component_area_variable)) + { + TECA_ERROR("metadata missing area column \"" + << this->min_component_area_variable << "\"") + } + else if (!parameter_table->get_column(this->hwhm_latitude_variable)) + { + TECA_ERROR("metadata missing hwhm column \"" + << this->hwhm_latitude_variable << "\"") + } + else + { + this->internals->parameter_table = parameter_table; + } } - else + + // distribute the table to all processes +#if defined(TECA_HAS_MPI) + if (is_init) { - this->internals->parameter_table = parameter_table; + teca_binary_stream bs; + if (this->internals->parameter_table && (rank == 0)) + this->internals->parameter_table->to_stream(bs); + bs.broadcast(comm); + if (bs && (rank != 0)) + { + p_teca_table tmp = teca_table::New(); + tmp->from_stream(bs); + this->internals->parameter_table = tmp; + } } - } +#endif - // distribute the table to all processes -#if defined(TECA_HAS_MPI) - if (is_init) - { - teca_binary_stream bs; - if (this->internals->parameter_table && (rank == 0)) - this->internals->parameter_table->to_stream(bs); - bs.broadcast(comm); - if (bs && (rank != 0)) + // some already reported error ocurred, bail out here + if (!this->internals->parameter_table) + return teca_metadata(); + + // check that we have at least one set of parameters + unsigned long num_params = + this->internals->parameter_table->get_number_of_rows(); + + if (num_params < 1) { - p_teca_table tmp = teca_table::New(); - tmp->from_stream(bs); - this->internals->parameter_table = tmp; + TECA_ERROR("Invalid parameter table, must have at least one row") + return teca_metadata(); } } -#endif - // some already reported error ocurred, bail out here - if (!this->internals->parameter_table) - return teca_metadata(); + // this algorithm processes Cartesian mesh based data. It will fetch a + // timestep and loop over a set of parameters accumulating the result. we + // report the variable that we compute, for each timestep from the + // parameter tables. + teca_metadata md(input_md[0]); + md.append("variables", std::string("ar_probability")); + + // add attributes to enable CF I/O + teca_metadata atts; + md.get("attributes", atts); + teca_array_attributes prob_atts( + teca_variant_array_code::get(), + teca_array_attributes::point_centering, + 0, "unitless", "posterior AR flag", + "the posterior probability of the presence of an atmospheric river"); + + atts.set("ar_probability", (teca_metadata)prob_atts); - // check that we have at least one set of parameters unsigned long num_params = this->internals->parameter_table->get_number_of_rows(); - if (num_params < 1) - { - TECA_ERROR("Invalid parameter table, must have at least one row") - return teca_metadata(); - } + teca_array_attributes count_atts( + teca_variant_array_code::get(), + teca_array_attributes::no_centering, + num_params, "detections", "number of AR detections", + "number of detections for the parameter table row at the same index in " + "parameter_table_row"); + + atts.set("ar_count", (teca_metadata)count_atts); + + teca_array_attributes row_atts( + teca_variant_array_code::get(), + teca_array_attributes::no_centering, + num_params, "row index", "parameter_table_row", + "the parameter table row corresponding to the value at the same index " + "in ar_count"); + + atts.set("parameter_table_row", (teca_metadata)row_atts); + + md.set("attributes", atts); return md; } @@ -773,7 +860,7 @@ std::vector teca_bayesian_ar_detect::get_upstream_request( std::vector up_reqs; // get the name of the array to request - if (this->water_vapor_variable.empty()) + if (this->ivt_variable.empty()) { TECA_ERROR("A water vapor variable was not specified") return up_reqs; @@ -785,10 +872,12 @@ std::vector teca_bayesian_ar_detect::get_upstream_request( std::set arrays; if (req.has("arrays")) req.get("arrays", arrays); - arrays.insert(this->water_vapor_variable); + arrays.insert(this->ivt_variable); // remove what we produce arrays.erase("ar_probability"); + arrays.erase("ar_count"); + arrays.erase("parameter_table_row"); req.set("arrays", arrays); @@ -870,7 +959,7 @@ const_p_teca_dataset teca_bayesian_ar_detect::execute( // and always pass the input mesh down stream ::parameter_table_request_generator request_gen(parameter_table_size, this->internals->parameter_table->get_column(this->hwhm_latitude_variable), - this->internals->parameter_table->get_column(this->min_water_vapor_variable), + this->internals->parameter_table->get_column(this->min_ivt_variable), this->internals->parameter_table->get_column(this->min_component_area_variable)); teca_metadata exec_md; @@ -910,12 +999,12 @@ const_p_teca_dataset teca_bayesian_ar_detect::execute( p_teca_latitude_damper damp = teca_latitude_damper::New(); damp->set_communicator(MPI_COMM_SELF); damp->set_input_connection(dss->get_output_port()); - damp->set_damped_variables({this->water_vapor_variable}); + damp->set_damped_variables({this->ivt_variable}); p_teca_binary_segmentation seg = teca_binary_segmentation::New(); seg->set_communicator(MPI_COMM_SELF); seg->set_input_connection(damp->get_output_port()); - seg->set_threshold_variable(this->water_vapor_variable); + seg->set_threshold_variable(this->ivt_variable); seg->set_segmentation_variable("wv_seg"); seg->set_threshold_by_percentile(); @@ -957,6 +1046,8 @@ const_p_teca_dataset teca_bayesian_ar_detect::execute( pr->set_communicator(MPI_COMM_SELF); pr->set_input_connection(pa->get_output_port()); pr->set_reduce_callback(reduce); + pr->set_finalize_callback(reduce); + pr->set_stream_size(2); pr->set_verbose(0); pr->set_data_request_queue(this->internals->queue); @@ -974,29 +1065,17 @@ const_p_teca_dataset teca_bayesian_ar_detect::execute( std::dynamic_pointer_cast( std::const_pointer_cast(dc->get_dataset())); - if (!out_mesh || reduce.finalize(out_mesh)) + if (!out_mesh) { TECA_ERROR("Pipeline execution failed") return nullptr; } - // extract a copy of the output attributes - teca_metadata &out_md = out_mesh->get_metadata(); - - teca_metadata attributes; - out_md.get("attributes", attributes); - - // insert the metadata for the ar_probability variable - teca_metadata ar_probability_metadata, ar_probability_atts; - ar_probability_atts.set("long_name", std::string("posterior AR flag")); - ar_probability_atts.set("units", std::string("probability")); - // add metadata for the ar_probability variable - attributes.set("ar_probability", ar_probability_atts); - - // overwrite the outgoing metadata with the new attributes variable - out_md.set("attributes", attributes); + // pass arrays through + out_mesh->shallow_append_arrays(in_mesh); // reset the pipeline control keys + teca_metadata &out_md = out_mesh->get_metadata(); out_md.set("index_request_key", index_request_key); out_md.set(index_request_key, index); out_md.set("time", time); diff --git a/alg/teca_bayesian_ar_detect.h b/alg/teca_bayesian_ar_detect.h index 5046b2825..c2805cf99 100644 --- a/alg/teca_bayesian_ar_detect.h +++ b/alg/teca_bayesian_ar_detect.h @@ -10,8 +10,31 @@ TECA_SHARED_OBJECT_FORWARD_DECL(teca_bayesian_ar_detect) -/// +/// CASCADE BARD atmospheric river detector /** +Given a point wise IVT (integrated vapor transport) field and a training +parameter table computes the point wise probability of an atmospheric river +using the CASCADE BARD algorithm. + +Required inputs: + + 1. IVT (integrated vapor transport) array on a Cartesian nesh. + 2. a compatible parameter table. columns of which are : min IVT, + component area, HWHM lattitude + +The names of the input varibale and columns can be specified at run time +through algorithm properties. + +Produces: + + A Cartesian mesh with probability of an AR stored in the point centered + array named "ar_probability". The diagnostic quantites "ar_count" amd + "parameter_table_row" are stored in information arrays. + +For more information see: + +Detection of Atmospheric Rivers with Inline Uncertainty Quantification: TECA-BARD v1.0 +O'Brien, T. A et al. Geoscientific Model Development, 2020 */ class teca_bayesian_ar_detect : public teca_algorithm { @@ -27,10 +50,10 @@ class teca_bayesian_ar_detect : public teca_algorithm TECA_SET_ALGORITHM_PROPERTIES() // set the name of the input array - TECA_ALGORITHM_PROPERTY(std::string, water_vapor_variable) + TECA_ALGORITHM_PROPERTY(std::string, ivt_variable) // set the names of columns in the parameter table. - TECA_ALGORITHM_PROPERTY(std::string, min_water_vapor_variable) + TECA_ALGORITHM_PROPERTY(std::string, min_ivt_variable) TECA_ALGORITHM_PROPERTY(std::string, min_component_area_variable) TECA_ALGORITHM_PROPERTY(std::string, hwhm_latitude_variable) @@ -71,9 +94,9 @@ class teca_bayesian_ar_detect : public teca_algorithm void set_modified() override; private: - std::string water_vapor_variable; + std::string ivt_variable; std::string min_component_area_variable; - std::string min_water_vapor_variable; + std::string min_ivt_variable; std::string hwhm_latitude_variable; int thread_pool_size; int verbose; diff --git a/alg/teca_bayesian_ar_detect_parameters.cxx b/alg/teca_bayesian_ar_detect_parameters.cxx index 7d0a3e04e..ca58d5c85 100644 --- a/alg/teca_bayesian_ar_detect_parameters.cxx +++ b/alg/teca_bayesian_ar_detect_parameters.cxx @@ -13,885 +13,889 @@ using parameter_t = double; parameter_t quantile_array[] = { - 81.0685551284554, 95.02868052282781, 94.1434367480948, 89.62792282077365, - 82.11626055995964, 97.70583327525188, 89.0799920348471, 80.18483683106903, - 95.15455089031937, 97.27239861046512, 96.61769538124005, 81.35449437902199, - 94.6681046584129, 88.39297127741123, 97.35926288969456, 94.9571584065397, - 81.2487697261177, 88.32261539830697, 88.44194524391638, 80.05079144466077, - 98.75857256938978, 93.81358046517933, 97.66102522539491, 80.10435878440249, - 95.49643716672726, 89.9958846675944, 94.80156576896808, 92.38507239456384, - 83.2486844150599, 94.62705651173474, 93.81031235088693, 93.09947557000667, - 92.39483172638656, 90.60434296399913, 94.78836518587121, 98.44878186517563, - 87.62629043706525, 94.17145853062942, 94.08248833511446, 94.44448786028707, - 80.07583075394285, 88.51171049678426, 98.93590284021971, 97.93472691302239, - 97.1641435208505, 87.81751514431377, 93.1577381078067, 80.11447159820204, - 80.3207330947046, 97.72476596752449, 90.53602977042246, 97.00915756036576, - 97.12339128148967, 94.96859937711531, 80.35329419089304, 88.37143226646509, - 95.92896990145469, 98.93029377645856, 80.20937514779553, 93.64797584115837, - 90.40901317178316, 92.12830519375194, 81.09217869078832, 93.38536703922588, - 93.65678786067193, 93.36179791801815, 82.03652137433059, 93.27152463823431, - 94.55342503111537, 88.99213927743311, 88.42144726890379, 80.09995688920858, - 87.77439073193915, 90.53527944529084, 80.61302211841071, 94.63908234416323, - 88.65307846993484, 83.04194925930013, 80.30937743239942, 93.89695229406051, - 80.25581989298539, 80.09914690368886, 93.08499256061285, 94.05924848023346, - 88.73377791214419, 94.73942215816054, 89.72021892166208, 94.94212002544967, - 96.96221058505533, 89.77716045355895, 96.98613118029847, 89.31470082284366, - 90.81802573870955, 98.38951647718672, 88.5991804403819, 93.9188225883629, - 94.02644578430404, 98.12317333629775, 97.25604902885581, 80.1539341403874, - 90.75095334691619, 80.60076823268841, 94.53535935885047, 89.9835653097416, - 88.80967141625479, 96.87616871485083, 81.40807614907285, 96.07638463266697, - 90.45631259153834, 92.14495063830131, 95.02947563928292, 90.09485919960412, - 80.2885132671427, 93.99711865310671, 92.4036223938414, 80.0434282781593, - 80.90328878029338, 91.74076382700966, 80.39500462053988, 97.06117799746237, - 94.57799877180008, 80.3836535103009, 98.81402587558796, 80.01829398084185, - 81.28666606939679, 97.77326056225742, 90.79995995987379, 93.62244297771767, - 93.17235327620901, 93.2255045661605, 93.47914655232154, 93.12523687586209, - 92.59493219627605, 93.48994699485186, 89.90061134461823, 93.67013083486245, - 93.11595970520476, 93.2692072954135, 93.11761814636971, 92.68507237838652, - 89.92505337322469, 93.55740862008835, 93.51909139591164, 93.47795122808931, - 93.44024905083873, 93.4413141519191, 90.0104837473299, 92.93728644926006, - 93.55401927337734, 90.01817030515579, 93.52827963096317, 93.62663173977292, - 93.11871660926809, 93.54125988834744, 93.45512682665212, 90.01889634781286, - 93.43731809420748, 93.48465962857271, 92.9620436477162, 93.59042825947635, - 89.90587647704268, 93.56929962725728, 93.31822436786574, 93.5455933666205, - 93.48835230436863, 90.01484503797369, 93.4346234221442, 93.00330508796856, - 92.733949120356, 93.62932869300509, 93.53313480605794, 93.39756998636915, - 92.58055200929687, 93.51202660911339, 93.57299625156034, 93.44370099471789, - 93.34189600595936, 93.49977395009094, 93.21306714208933, 93.19183440128913, - 93.33127999072575, 93.23671688343822, 92.73061384700868, 93.12793657010845, - 93.09611127506099, 93.55099554496012, 93.54683391793995, 93.37559134497923, - 93.2588652654513, 93.10715386898161, 89.91164813301887, 92.67265410086124, - 93.54890119451656, 93.1338315521397, 93.11330264244728, 92.89658116789937, - 93.58142276913264, 93.56367591704925, 93.38382604126186, 91.23485154769477, - 92.08579289404842, 93.58478239345965, 93.53445715299561, 93.94113108649701, - 92.87865078896193, 93.43154886144153, 89.82560646769683, 89.93181653053315, - 93.42204631423941, 93.24815447584614, 90.12632036551922, 93.46326205468924, - 92.6303509549966, 93.55369042807712, 93.48665346180682, 93.7279774322528, - 93.56011769227052, 93.54899840245727, 93.92396046399205, 93.1119469974125, - 93.22931614389644, 93.04107155174823, 93.19581047305445, 92.71257157995115, - 93.11706920661638, 92.99491127124875, 93.46888180991097, 89.91237099849322, - 89.92477194436123, 93.3472525051506, 93.42507682351162, 92.67742522196144, - 93.48859203300599, 90.01028181306397, 92.77686205903129, 93.6330180620268, - 91.77259414916108, 92.82948251544275, 93.09862837588366, 92.60104427085273, - 89.94584052069042, 93.54869581414246, 93.48979841910531, 93.11448960949994, - 93.59676893770144, 93.53782119561333, 93.46952521289684, 92.95487650417984, - 92.75801604980177, 93.67978463936598, 93.50918816070505, 92.59340587262807, - 93.14590004876358, 92.92046355463043, 93.12675720857015, 89.8628035963283, - 92.57665393439102, 80.63122577568717, 92.5343953510779, 91.43690935400414, - 92.64185837491713, 80.11574704079727, 92.5522476844849, 90.1520825228069, - 81.85715589349925, 93.09148643197283, 80.0480652383071, 92.6507726944138, - 91.99083929700397, 91.56368561251631, 92.78002973721684, 80.02653393158965, - 93.07711311208044, 91.88919945259137, 80.24214347356458, 82.04681427272014, - 92.31168121014447, 80.1130139400957, 92.5883458457424, 80.18367624327058, - 91.93582029669525, 90.35673473568198, 82.30145287952578, 90.47083072430213, - 80.44482641390776, 82.04017038805846, 80.11788758390443, 80.11381171953924, - 93.09037166431945, 80.41701489728482, 92.22797929172916, 80.04082297518647, - 81.23076755739322, 92.17806494367656, 92.85645963192395, 80.22102805938619, - 92.62960838809087, 80.09595507372318, 80.10000614366115, 81.02674782164745, - 80.05870811158343, 92.73895563522284, 93.04118784940152, 93.65976170918073, - 92.60191458315339, 92.46995662384202, 91.76465011726296, 80.02383868555714, - 80.1263818841329, 90.04479090490544, 80.36344743553508, 90.41841492067498, - 92.55008583395966, 90.74515851685516, 80.0212526375857, 90.07454529577836, - 92.04520524463784, 80.03127902293663, 90.69875913552127, 90.59522753159087, - 81.8949836752415, 92.00263719597665, 92.62329127736662, 80.61802169573875, - 80.27578624994854, 92.70448769708682, 92.38776049452264, 92.76050980862503, - 80.13113622414555, 80.78475697107709, 80.00475383686374, 90.26348559176736, - 90.46484968395725, 92.77536281235051, 82.60349247004571, 93.01512839195057, - 80.16394746901283, 93.29953067057323, 80.05713578602919, 81.77933718002758, - 92.2853128815352, 80.32777664501089, 80.25619456716142, 81.1433272190947, - 90.56469331714491, 92.62192547600586, 92.56458582879587, 80.13466794277969, - 92.44024731485048, 92.50268974142466, 93.07530965968768, 92.34495208766765, - 92.3082802377836, 90.35267314672151, 80.35489052742543, 81.8568974129581, - 80.11769857079034, 92.94695175606542, 92.79947727950737, 92.66773832478698, - 92.28658327784788, 92.61961770344809, 92.28496454830727, 92.62768039452558, - 92.4909839458329, 92.68796628895906, 92.23509650288153, 80.03863449118208, - 80.03489359048038, 80.02526078675837, 80.08582003634099, 80.06145648197763, - 92.44975371823041, 93.5928230665974, 80.01256541576419, 80.20961433286098, - 92.07643212791173, 90.08023312454455, 80.49768621473669, 91.96806983437254, - 80.2542701520305, 92.98006092969922, 93.4267515123444, 81.34652783569388, - 91.58851866534143, 92.18597294961017, 94.26411597246906, 92.19242901689381, - 96.0922992317457, 92.12804530962526, 92.16054470282114, 92.11409888277701, - 92.2140500695622, 92.15721087501464, 94.49002991805415, 96.1155789808302, - 92.13077624468345, 91.86868418567106, 92.06056480279507, 96.33101119861408, - 96.29266138280876, 92.13982243868551, 96.32686666756776, 94.44035007799118, - 94.71596532369199, 92.02884110858636, 96.43287004285344, 92.12954403211911, - 96.15291075730099, 96.35165875881914, 96.42408315585652, 92.13775885974644, - 96.28448140227346, 92.21844561829724, 92.13530125615075, 92.08810087130279, - 92.15342078155984, 92.16077932216213, 92.1081512191267, 94.65917834161101, - 92.49394591203746, 92.1672779687036, 96.26893841091653, 96.33392266197525, - 96.32949833220096, 92.11298426165146, 96.33011008467714, 94.50197732022558, - 96.30898812250214, 92.22746848076805, 94.55162745792603, 92.26609909339906, - 96.23182696024696, 91.55810931923615, 96.27048464546593, 92.05015628101796, - 92.07669816720744, 91.78200785011367, 94.54496682104873, 92.0361302997422, - 92.12434943105441, 91.75663476946345, 92.21927870356555, 96.33119623362245, - 96.33256706207008, 91.47489270238381, 92.43343973640845, 92.22214078654362, - 94.56666767924688, 92.21609946823759, 96.32073160486601, 92.83857773458446, - 91.69209476917236, 92.18528456573966, 92.10796810419855, 96.2043524265151, - 92.12389267809675, 94.64160813351687, 91.82456135642789, 96.37187803127108, - 92.40210742671447, 96.17335546023035, 92.04060436738911, 96.09352672358793, - 94.56317172936231, 96.31630146076395, 96.28726893309383, 91.85936229504725, - 96.37091126050753, 92.21409873005942, 96.2871313971717, 94.63391966570174, - 92.41895409875296, 94.24514548491663, 92.92668825567938, 91.71104605984695, - 96.2907659146643, 92.2284285697302, 92.22532471055686, 91.96964368057186, - 94.52722402729945, 92.03023952671221, 96.15991736191518, 92.2135561649041, - 92.18242047781322, 92.27197440062677, 91.87354039004258, 92.24615657461892, - 91.89113861390281, 91.84530428034515, 96.20779308903393, 92.64255629872609, - 91.60718035410733, 92.1232448450148, 92.11793072704279, 94.59355418143456, - 92.16547365507162, 96.15085173707824, 94.53128418459725, 95.97287212598314, - 94.64761761110722, 96.15271570226354, 96.38503321157246, 92.20045274175722, - 92.1397537816331, 91.44824404434316, 92.00193365899233, 96.24042271701168, - 96.1805520792421, 96.34525091869374, 96.2586952965483, 96.32796432313361, - 85.37040190025358, 90.32579108381744, 95.52835967177269, 90.68073786493066, - 89.94347264758133, 88.78183682216924, 91.23396702486615, 90.27959190726654, - 95.83770017924166, 90.53539380827206, 89.3723453222757, 95.45607059913256, - 95.39992764027257, 96.33451281280665, 95.47962599791788, 91.16683429401108, - 95.33619219445198, 95.44765911852554, 95.07010038374733, 95.51016996088451, - 96.34338550966703, 95.52150434478546, 90.35645635756721, 90.68264554550034, - 90.51883935493194, 95.35642730277503, 95.50224296639333, 95.33059491401195, - 90.11806228091883, 85.38943873163566, 95.56624120443135, 94.95281566511645, - 89.87915246761817, 85.6818294261346, 91.13078723290944, 96.19854377634172, - 95.37385394596471, 95.72178757012792, 85.52261387970526, 95.50042568162243, - 85.34921383784356, 96.33135601529393, 95.49979582414275, 90.83046497474, - 95.73485909685373, 95.42906304289914, 94.37650452764218, 90.95966314234512, - 95.50473306347384, 95.59506315691571, 90.29754773123517, 91.05668056721593, - 95.62823674682821, 89.40703553243846, 96.32302369169537, 95.7474834478683, - 95.30982052286956, 95.298891667603, 90.1263023725596, 95.37497595574965, - 90.90556037471382, 95.3972091503426, 95.53683133110786, 95.49960745093794, - 94.37785774801533, 89.8752248746269, 91.02925120362814, 90.51710647611849, - 89.9895876102595, 91.09265132073924, 89.8946907653918, 95.84557809658757, - 95.42101367223776, 96.21329843345859, 90.44043023531277, 96.28635002208806, - 95.45517944744056, 95.42466643876634, 96.23881122891893, 95.36960212077086, - 95.6075769232754, 95.551226258924, 90.27833575016523, 95.35513742033545, - 90.36821524279162, 95.65189829474541, 90.1075651501522, 95.5970995784215, - 95.43000417049217, 89.76434468382477, 96.37779888289569, 90.13787867726086, - 95.77988003986368, 91.08485916356867, 89.7633098782393, 95.44348921676615, - 96.05657067144597, 89.46114891367336, 95.54521321881347, 90.32058945134207, - 95.63521258382256, 94.57146913539106, 90.51153899798582, 90.79138183797194, - 95.54036938463211, 95.53331190822372, 85.413422747038, 95.14051369978482, - 95.40698806509987, 91.11252167225594, 96.41004218252331, 90.40029656179559, - 95.58879523589118, 95.43112923322519, 90.71230199584284, 94.30095516557272, - 95.54854969800076, 96.25424117235787, 90.64655791573347, 95.49843329654146, - 95.44805364535269, 90.61662442310147, 95.50102179668339, 95.56151290841245, - 95.41877313192991, 95.58060340326855, 85.41408183020694, 90.63084968404387, - 98.92151240960004, 98.24887209109883, 97.47144655327384, 98.15244126427774, - 98.08259611683133, 98.27014846444109, 91.31511364403157, 93.04088504403703, - 97.80682287708288, 97.53441565185675, 86.93106218199496, 82.80014989999734, - 92.7912580843689, 87.01646730846522, 97.4880954914916, 97.4340644298671, - 98.15412883995651, 98.44907845324686, 98.1941702772595, 92.84495187135323, - 80.78037144274197, 98.34628508739249, 92.83280112337961, 87.05373956940568, - 92.85496868936207, 97.98630075408771, 98.22052373536155, 98.17839184460762, - 93.41571659769396, 98.93109051431934, 92.31243770863506, 93.3053734451769, - 94.4838081185497, 97.42610420323503, 93.09102439018814, 98.38623589889477, - 97.62273708632334, 98.25342501776147, 97.85106389292754, 98.97020353339353, - 93.31533320418251, 98.2230972826201, 81.21730567814164, 97.76561972274824, - 97.88291018348512, 93.04354217637515, 97.50564259898992, 93.45548580674571, - 98.2520454134884, 98.07567697469395, 97.47477391864876, 98.07387633089974, - 93.44966435604553, 98.47340315744319, 98.36656733713215, 93.27928194582512, - 97.84381192218031, 97.30921807381927, 94.59151544349918, 93.42853708629944, - 87.62969960044002, 98.37950200545261, 80.03204114756299, 80.24104835833923, - 92.7183092840457, 89.36599872352751, 92.79230949789054, 98.88731122473502, - 98.041561260307, 97.74124285606291, 98.06722524004155, 98.97070999536555, - 98.29143762976209, 93.17555692528096, 97.8842381776009, 98.22197670609391, - 97.61612050520672, 93.2283361786624, 98.39276785650874, 97.40794531718794, - 89.60279108811136, 86.98666107829584, 98.09491879940028, 92.80575347550317, - 98.22482821448199, 98.13758106394343, 92.88696080278052, 97.85017456842155, - 92.89422931243591, 98.15464220861769, 92.44437227741787, 93.27880077993368, - 93.01215626989587, 98.11217789448918, 98.04000828094625, 92.73569168082399, - 98.048976942072, 97.87224856275566, 97.77864237286174, 98.2381497517272, - 93.19834882902275, 92.82987881125663, 93.5381396485154, 98.22669828104927, - 96.60417563891495, 98.00660232614932, 98.96150368366241, 98.15113708357734, - 89.09639017275389, 98.1615148573908, 98.00921710003732, 89.38158661257555, - 92.82627101873162, 98.34303275118955, 98.05908839617459, 98.77402486627173, - 92.8137958439152, 92.91805386611207, 93.15564517932815, 98.01845311685413, - 97.66222493620128, 98.14545595654384, 98.13006878731862, 95.89947569691192, - 97.78112311001411, 98.17973581526502, 97.41286022486247, 93.14641301768879, - 89.01314219461683, 98.17354692471343, 90.6783590817948, 93.14416618073227, - 89.56704992243108, 89.71564928433912, 87.67034099564856, 89.21308214511477, - 91.61166398295147, 90.54232232830603, 89.27771868200128, 88.82030424931087, - 89.28975177048314, 87.26327569660775, 87.70354941749548, 89.97815205525687, - 90.15108914624655, 88.11396860818046, 93.10563262358477, 87.49861586130054, - 90.85606157298221, 93.09520742005391, 87.96627879378242, 89.2627004898397, - 89.92880783508254, 87.64702116534427, 89.9356519330834, 88.76394962171513, - 90.15418690850778, 89.92290403032547, 89.75089822538503, 88.59658583142014, - 90.7777611460887, 88.81760587319793, 88.20079200852268, 90.60689249586082, - 88.25096388396902, 87.8650290681222, 93.45742742390499, 87.1619820948291, - 88.99096469816618, 87.60672383339971, 90.4726126331481, 87.5238751484116, - 87.54374326530055, 83.11413277276698, 89.27814208777095, 92.85061944840113, - 88.22862075108206, 88.28372505585281, 87.72988195973201, 90.56423820008284, - 87.64368105110051, 89.57811270867452, 90.6580792829842, 90.53828183024419, - 89.88783568119646, 88.40835897036573, 89.2124813475154, 89.46782652687433, - 89.97495081192392, 88.94189290476345, 87.61567761170457, 87.08357656078293, - 89.08127328641416, 88.46255140837489, 88.6028628416403, 87.79588747289051, - 89.21223344226858, 89.54211316319515, 90.36936609574914, 90.4351505492097, - 90.22529346890548, 88.01802051646517, 93.32174952332892, 88.65954988085868, - 90.318430504031, 88.69822288302085, 90.25603504699058, 98.48403045732432, - 87.81166365436836, 88.37785575921991, 90.35395055961384, 90.9273872901596, - 89.13694479085324, 89.40610653774961, 82.99509038092033, 89.26776560040668, - 93.0227786909515, 91.52354837705529, 89.78943016236146, 89.64972445278177, - 90.13935605982336, 92.99550193065038, 83.0924904107769, 90.91072488469493, - 87.69007659741176, 88.2947227807446, 89.10943217545709, 93.27319554502262, - 91.83407560368748, 90.65239464533806, 86.65124800424165, 90.53056567901088, - 83.43176841773618, 90.73630537488383, 90.2627552570245, 89.8778619979181, - 89.3650626527145, 90.29494283485579, 88.62665693994747, 90.4073246813736, - 82.97985519610266, 88.76350783715552, 88.58603490840254, 83.05232583121524, - 90.69514684320464, 93.18433146662619, 89.4414508408465, 88.33410520141358, - 89.90096938929567, 88.7730678392098, 90.06213872831223, 90.28746792701696, - 90.20810175250215, 88.41604001772573, 90.77944776946126, 87.54147669043203, - 92.02227217888948, 93.60485909660024, 93.23604970742305, 93.83575940607905, - 93.93286362398369, 90.52680262378851, 91.88473955501823, 94.03034470194139, - 92.28304581560425, 91.76713982511593, 93.92603421382756, 93.49179081671926, - 93.56448438195798, 92.1213759922243, 90.42057904668697, 92.2814146931995, - 93.46915160943678, 92.56253776587397, 91.70834478560911, 93.48682460228585, - 92.2474904136331, 92.73839982129249, 93.9739045206421, 91.87811740130685, - 90.97343289532081, 93.54752235815431, 91.07025885257286, 92.48381103511272, - 91.07384249180663, 92.50850476891036, 93.50067538551934, 93.94394336946468, - 93.41956123983562, 93.48503594455435, 92.14347967380017, 93.56796748568257, - 94.07332858805468, 91.6964438288818, 92.72828690669385, 90.1689186229028, - 93.86508677124726, 93.4727436741526, 95.57343532956104, 91.0031764321414, - 93.53637389779774, 91.45144202412283, 92.79964341227263, 90.88004368688767, - 92.03077048139912, 91.65291911135319, 92.35727731013654, 90.32323589099104, - 90.08782584306259, 91.98150132110652, 90.30459048027211, 93.55012148585298, - 90.34141509597924, 93.50688579697135, 91.32300615140633, 91.61911064093171, - 91.84722031291048, 93.50248554472134, 92.175405630648, 93.98272508780892, - 93.6153271334305, 92.09549470078112, 92.3646401860563, 90.36201745216042, - 92.91813064476332, 94.00875474934803, 90.2094047663066, 93.19433242762335, - 91.53911408359693, 92.54279613864699, 93.19254151354161, 92.57588176355986, - 94.59507526753936, 93.55834853510233, 91.61814335414316, 93.51877317198827, - 91.21081984494334, 93.79621974987683, 95.54143258973996, 93.41462924401563, - 94.06251859977004, 93.17704932843648, 93.54864536481163, 93.54614749332134, - 94.00211853216321, 93.9730095072521, 93.24923650885222, 90.11717193225698, - 92.6959316236152, 93.44991029486368, 93.17452808773996, 92.90906696659717, - 93.56466758383243, 92.29973854911802, 91.89578024513465, 92.27493685513622, - 91.43685349291553, 93.80472350522139, 93.57779928588155, 93.49876888167924, - 93.49178623396904, 93.4792681512799, 93.28789192156239, 91.55324566663255, - 90.35669589821957, 95.61913845813721, 90.2917619505285, 93.82345715590795, - 91.42779827264035, 92.30216548226218, 91.75582068681744, 91.54032291172824, - 93.46319531374003, 92.7340698771428, 92.50781597548551, 92.28505037190706, - 93.61568710436087, 91.8422706088649, 94.6285754750658, 92.31809409197464, - 94.04843009063896, 93.5682621641641, 93.51371186509319, 90.17850019545465 + 96.80345154841372, 96.27922469451215, 91.59660258379544, 96.69980903923916, + 95.43146266470053, 92.08840676612955, 96.93707229502259, 98.96020224527088, + 92.55573342513593, 93.2273644008184, 82.85474659840109, 98.75931137224634, + 95.73040210662207, 98.4406407264504, 95.7519270554092, 95.27687102081748, + 96.7985917507294, 98.79002464143997, 96.53163126890392, 83.30941008019472, + 96.9694821404558, 93.06001835782293, 96.31444546738913, 92.29508703692227, + 84.20421802015734, 95.37967886051851, 96.72253436251013, 83.5969424768726, + 95.35946402996436, 95.47214698270187, 85.65404084455744, 95.21046770695736, + 97.102205902178, 81.15721987588117, 96.6191698782706, 95.55761182694164, + 92.35123409738605, 95.17817973166765, 98.05774020178373, 91.3042529561893, + 95.30241202301393, 82.35307891001227, 93.00850483140121, 84.03325690762259, + 97.21199405881151, 94.17699168218239, 96.42285993365064, 84.36132077311235, + 97.02347254985052, 95.50813538165869, 96.47987284234672, 96.69569607053643, + 95.5605380520094, 82.70556594015092, 82.81946491178032, 98.11770992430444, + 97.27795524118531, 96.64489887704583, 97.11071641017294, 96.65565263754283, + 95.08575267633248, 95.80850759512208, 92.22360366266427, 95.2358384104976, + 97.05103273511074, 95.24199089075076, 82.28133471502277, 91.77993420317512, + 98.49688453273431, 97.16304863066438, 92.14300487187727, 97.10314533919767, + 97.30370140251658, 91.77047442199836, 83.08518851719903, 90.34882920852647, + 96.60946700536375, 92.20611342556771, 95.7717144088268, 80.49951882485026, + 96.88599970659821, 94.98823854306384, 86.43280697671182, 94.99131635587152, + 97.83063070426407, 82.55802999053884, 96.39463955401332, 81.7692423137813, + 81.37929044097672, 96.88439672038544, 83.62286916276247, 83.46650169161953, + 90.87096312815922, 82.67016992522593, 92.13432358191285, 84.62549635912188, + 82.8489902843626, 83.22319245854747, 95.10078442662818, 92.76363780929259, + 94.91338925050867, 83.25656534364641, 96.40211310805674, 94.9486778167881, + 82.69704118079045, 96.3074225952992, 92.67131432007844, 96.36302028680865, + 95.58986968480305, 82.31691274664831, 95.200819291442, 95.36922101298713, + 96.76303141832123, 82.84527985839634, 95.74208096007423, 84.32487212260591, + 82.39732757301131, 95.22913961669343, 95.19836463789204, 95.59094619364488, + 95.19200809440018, 96.5287083538506, 96.39349416658109, 82.9450300860394, + 95.31564371613466, 95.22988318793718, 83.66035913638108, 96.53146722722146, + 94.24970437281213, 91.04123726409755, 90.51706272806666, 91.9822030134276, + 93.16348826374254, 93.78435226923334, 91.04639181061972, 91.12918340856557, + 91.19632277751089, 91.17621668246183, 91.1603858325578, 91.00547029155423, + 93.72990777613343, 95.01562986577092, 91.11211933569963, 93.46801827428872, + 93.56326899147473, 90.97427025978676, 93.53630007903774, 93.99236752553945, + 90.89356759997627, 93.09011810436544, 93.08534544867535, 92.94807064140987, + 93.80867209255206, 93.05748549927469, 93.97977281230129, 90.7133691644792, + 91.22683970065756, 94.10279224511098, 90.83595150930157, 93.08791991125773, + 93.07879322076566, 91.0591163534394, 91.058977645417, 91.0530059996385, + 91.22039140114725, 91.60956957169857, 93.0393322117572, 91.05701092077457, + 94.0519903655626, 91.8985356474571, 91.32138688174265, 91.03454472082261, + 91.20803307012649, 93.7029154588968, 91.18262906496582, 90.63121045443482, + 91.13932478302192, 91.02682335781894, 95.08548437451219, 90.8348355493102, + 90.9827463149113, 93.07898530576239, 91.19787503083813, 91.06940683527195, + 91.17064958456692, 91.11684837170404, 91.09707850006359, 93.10981287123751, + 94.33735691551745, 90.99793014579504, 93.15190093039149, 93.97650341388349, + 94.24971020356057, 92.80388591152327, 90.89232413428915, 93.0772203893626, + 91.04304532673324, 90.8730384065544, 90.80787426322196, 93.58676852568455, + 93.55021240572083, 93.0635325579455, 91.00615928311612, 91.14588585953499, + 91.14184967861208, 93.18027680962541, 90.66977800645512, 94.0213110893482, + 91.01059721944532, 93.50183061979975, 90.97315208175807, 91.05688432662818, + 90.63581354103096, 94.21773316240512, 90.69249177522359, 90.74410584983278, + 91.24987117631173, 94.04371200633199, 90.77411728929802, 94.35576436703738, + 93.92032062661214, 93.50733750490542, 91.03805840968076, 91.09905373016433, + 91.03805410456717, 94.08078406674134, 93.50295858860734, 90.99941421301104, + 91.09392893006472, 91.01220173008002, 93.26434208716006, 91.03085668913626, + 93.06982190059666, 93.44654885824679, 94.02852674416647, 91.11594613354373, + 90.6432030799417, 91.01443866015131, 91.16282670425174, 91.08744771036655, + 92.92102966062987, 91.0809189697851, 91.14310699774806, 93.06376741944914, + 93.88590323741299, 91.1758052618241, 91.04923279259617, 91.2510386000364, + 91.17114158557845, 93.9004763763306, 90.98116362206984, 91.1571488718631, + 93.34313931387493, 91.11716498883108, 91.12559219795718, 90.97940621712968, + 91.42827488777525, 90.6034898709894, 80.20527940110892, 80.39296585293233, + 94.98115557387912, 92.42796912742783, 95.12931287184462, 80.7094028926409, + 90.7715316983666, 91.03995780702878, 80.29793000330415, 96.21370098355176, + 94.1273359688334, 96.21249250296276, 83.27519217944287, 90.3122318881175, + 93.86488684725961, 96.36273839315493, 80.15287093665125, 91.81306440162017, + 80.34384742568118, 95.85611419833558, 90.50990593106566, 95.17055012032401, + 94.12719931859165, 91.49018212242915, 90.46589496889067, 95.9263193094749, + 82.61943584069445, 80.52679305194721, 91.66200500419612, 80.65988771567153, + 96.20014803398962, 80.12964270957038, 80.09578402335985, 96.36358711327414, + 80.11360578464529, 96.28120114072163, 80.5088228302393, 93.48779255349517, + 80.22587603890987, 90.24766462602395, 80.72925660065276, 80.60902836670564, + 80.17132710429134, 96.30040217887479, 80.47985907987174, 91.61868507533474, + 96.26356353033539, 94.60750923835268, 95.1882500277905, 81.61949744089864, + 96.3798711202135, 97.65457506304082, 80.23463773839981, 96.42053150131619, + 90.80666240611413, 80.1032303866727, 95.90108596914665, 80.0927932585597, + 80.76964604931062, 96.40039996902104, 95.12980316012785, 97.79441680877746, + 91.60376714280403, 96.04462361822276, 80.68118317954394, 96.38730301199485, + 81.86295183374969, 90.89911798532593, 95.49499265240844, 93.62555461538238, + 95.39021727844258, 96.87062492258316, 80.3233820766824, 89.98623982382398, + 91.64476869873134, 80.0967294744194, 80.74531320733442, 94.28164767937571, + 84.60124182398222, 80.05335323362213, 80.0391725079532, 90.39699498064697, + 90.42108909452607, 91.40922914584854, 90.37158344053053, 80.52323813893378, + 91.60355401843417, 93.89919833961598, 80.06776856516615, 80.91729954679401, + 94.31453536335535, 80.56512507893993, 80.03139310871511, 80.04320219550864, + 96.24131595017597, 90.90390939159961, 81.28703292976918, 90.59596229175885, + 90.63756960813642, 80.27229033212346, 95.88693824969947, 90.4541107971266, + 93.69588101876131, 80.34690204572709, 90.57028696012259, 94.2534351116661, + 80.74075554482143, 94.12737678439281, 80.20644529103745, 96.63134211754803, + 80.86483769757709, 80.0848523264624, 96.14018198956165, 91.67871667527348, + 91.71474891686827, 81.42657708834699, 91.55115493924303, 90.21660056804768, + 91.34069305960138, 94.9133832909585, 96.10913671306301, 94.96462254161668, + 91.01161360605583, 80.21603897374796, 95.37414623951446, 84.53438290059087, + 94.99051222284415, 92.6928052173879, 94.8142248374923, 94.759603475945, + 94.76626414935755, 94.67319920352148, 94.77070231689518, 94.74287461225556, + 94.71638620629311, 94.6581744911858, 94.39332787234618, 94.6065083745691, + 94.75374915222004, 94.64390216603348, 94.39545407823992, 95.0366656183174, + 92.33321727321045, 94.79726262886743, 95.12423148112721, 92.77138420573046, + 94.97800371986962, 94.79384200127745, 94.78333588739449, 94.8039480008853, + 94.64133668042864, 94.71695461466389, 94.73371412222788, 94.5573549095423, + 94.39557446303002, 95.06506082525799, 94.82550885188576, 94.70384543042398, + 94.31631419559285, 95.13460914147369, 95.17055768259635, 94.89712566470352, + 94.76555364732617, 94.75147292995169, 94.64125566791354, 94.78165823005723, + 94.89590931848718, 94.79335656074205, 94.85807738697602, 94.99206940179106, + 95.01917621296123, 95.07085587353806, 94.80688878531379, 94.92691317450752, + 95.14995227991889, 94.80062086165607, 94.29969206036422, 94.5413184657779, + 94.70135776641867, 94.97516999380935, 94.29086546533043, 94.70125670834203, + 94.85337016710756, 92.54856242820497, 94.6412306105043, 94.76226165386721, + 94.93134764182643, 94.72674008402949, 94.69601361136392, 94.70163430660699, + 94.95526664575996, 94.71200746042273, 94.58368199148222, 94.6218203383258, + 95.11852890654086, 94.6446579530233, 94.92641274669909, 94.81576419518784, + 94.61143417408987, 94.74307588247429, 94.72967371279101, 94.74677020873125, + 94.75942077090427, 94.99194751360956, 94.97694013826384, 94.99930608698368, + 94.64846376679125, 94.9406489299337, 92.74416213709216, 92.5434110371824, + 94.80319092612983, 91.80284435905125, 95.06291356077648, 94.77422788885204, + 94.68375366498685, 94.65049438913356, 94.81793267050216, 94.85902049347078, + 94.71531544698321, 94.69649274082134, 94.75278285198115, 94.36354269714094, + 94.74013650550086, 94.65769845082022, 94.88446974387632, 94.70226672937332, + 94.98098715700907, 95.06035237115569, 94.83513140767356, 94.78687858688424, + 94.82364280281972, 94.98156332456016, 94.95236844041092, 94.31763621107598, + 94.63369292202945, 94.6333654158428, 94.77546336832935, 94.75603779539192, + 94.70334139765266, 94.57519504763525, 94.83247775141332, 94.79229911100421, + 94.6373958061255, 94.71427807928625, 95.0126576090965, 94.79065799564428, + 94.76405329446752, 94.51840627055758, 94.56107794258455, 94.8982745125375, + 95.04489182816229, 94.59577991113105, 92.52267421867815, 92.52643179336259, + 95.32276397996006, 95.23171801116025, 95.34529295706575, 94.77288017753648, + 95.42919115867325, 95.70432551025864, 95.20673242015117, 95.14938152779516, + 95.1320424811712, 95.2303693071069, 95.33094996442765, 95.12483744604867, + 89.6794900402412, 95.24776516896635, 95.48611674799388, 95.38331137266275, + 95.6885732130318, 95.35937523705589, 95.09940192348682, 95.34015045279706, + 95.11555084501518, 95.13547555284508, 95.19856177606826, 95.46354569229081, + 95.21573195645853, 92.69878012002005, 95.21895884002898, 95.1005080709308, + 94.92280733591944, 95.12315107864256, 94.82865658762844, 94.9160534056014, + 95.10237555131954, 95.406038831142, 95.39304588678979, 95.29789998192662, + 95.28311544786501, 95.19395410987815, 95.1903573884708, 95.37541950306577, + 95.21655580677671, 95.25970951069293, 95.69380013823213, 95.10892532431463, + 95.14744143432375, 95.48644333466842, 95.20437931317713, 95.09775237133249, + 95.31439892107284, 94.97553746985278, 95.33696457126626, 95.75755749495566, + 95.14683968641148, 95.09723118906471, 95.37862085331686, 95.26739595439997, + 95.67262595617612, 95.17252001748449, 95.21363434442223, 95.38646161970186, + 95.2326588031972, 95.1725037333094, 95.21872910347518, 95.36202787471215, + 95.2504018335747, 95.42605317413788, 95.05802301727854, 95.84984231915332, + 90.04192716297445, 95.19905572350459, 95.37823617083741, 95.12942291538553, + 95.22052793534776, 95.12378435853243, 94.84515225317945, 95.35989523343964, + 95.09803670111177, 94.87385213477683, 95.0674645047807, 95.54789909216043, + 95.26253097776019, 95.08453692884451, 95.21943791751595, 95.09537250163977, + 95.12420012469111, 91.07764570513585, 95.36563478846047, 95.12403222541236, + 94.80261103827611, 95.2113266751065, 95.25106304061259, 95.4311420353313, + 95.05039897780776, 95.2955176056433, 95.14846914931366, 95.12812254838488, + 95.41857491049039, 95.18296043958497, 95.37306893046711, 95.14425317782718, + 95.32469153077477, 95.23079068221966, 95.10184554562103, 95.07905461058482, + 95.1280415391405, 95.3008025004623, 95.15837303922815, 95.09841835054873, + 95.15348755098208, 95.43201962621896, 95.20595349847696, 95.44635978120505, + 95.42741430634672, 95.08722417685506, 95.15095752671941, 95.20135319828687, + 95.18267786451102, 95.14185348665684, 95.27947683603449, 95.11317536764517, + 95.24877964898502, 95.72190646694206, 95.24877415706511, 95.30447278545857, + 95.36730289485014, 95.34741763228658, 95.11197702656621, 95.25921424129129, + 90.94991168164175, 92.2205799324065, 92.33758932736306, 94.19023783244788, + 95.6979847834488, 91.62933595655971, 92.04077477834541, 90.82495403721975, + 97.96817273641206, 90.57066241552502, 94.25581148569151, 91.02031379993231, + 95.45133680412498, 91.37418182125528, 91.43797702484221, 89.93866275149503, + 89.63359766834544, 91.02192775454934, 91.49139169416678, 98.15786420539008, + 92.0513172335752, 97.82366937067783, 90.40372433249605, 90.96030103848825, + 91.19497017950098, 91.09186996273198, 90.78202444116303, 92.84631091850825, + 98.02412681571352, 97.9614487731187, 90.88694968047245, 91.34854639193537, + 93.45809089499066, 95.27954096932052, 90.88896197129606, 90.54506599791847, + 95.26711480673971, 97.6817699631803, 91.22125783287458, 90.96893134191903, + 94.9149971168975, 91.07269233309529, 95.99531860669411, 98.20532552786416, + 92.91065253668359, 95.3059565557538, 94.84889893592529, 90.80161421121467, + 90.03043240911582, 98.20618639800477, 89.34100563608925, 91.11280673396003, + 90.90823365345115, 91.11660604272141, 91.17566721463088, 91.29181247963707, + 95.56965077929965, 91.61494901000002, 91.61631455038437, 91.25960771432999, + 91.02115703253332, 93.72231037158821, 90.9239041425797, 95.92335422127618, + 91.3045312918902, 89.0954231264218, 93.61854877071872, 94.65362952718922, + 90.88165510829333, 91.13302864520644, 96.54412166345972, 98.12175441202584, + 95.1845359671911, 91.0572398981482, 98.31028872656657, 98.19644283422117, + 98.23198029791627, 98.0347526323175, 90.18968397871214, 90.48182474989771, + 91.63545241799741, 91.35080789095034, 91.33090711307857, 91.05051367910201, + 95.10448766407215, 90.93041499888626, 91.1728822599967, 90.95498647727125, + 94.46316048703494, 91.3089400624783, 90.81102557465637, 93.76139528091852, + 92.30175447144767, 91.02495794277414, 97.32973872874211, 91.6920367090711, + 90.83610139062081, 96.43420166746438, 91.22983584112868, 90.9815034951672, + 93.18703515463537, 90.78615801217002, 90.99950331882573, 90.94403756651384, + 90.37487628982981, 91.1142450739575, 91.00965004903104, 93.36942742095012, + 92.06766706929766, 97.03843684623942, 90.90226572121345, 91.20761067085016, + 96.26829467334983, 95.45641880746258, 91.95160988024516, 92.17607868593964, + 90.87844474347229, 97.94658530088381, 90.93628228219718, 96.49747506128674, + 92.69856123068033, 95.4336702404451, 89.97693486631897, 91.1152939444235, + 95.43964160923056, 85.2361633450459, 94.69734637927156, 90.7806507279973, + 89.17283091863864, 88.27097483721997, 88.65177209234196, 96.90277491128369, + 89.24548673315897, 88.43879158581112, 97.091434524036, 87.76617171836237, + 89.21119495804622, 88.78842124003536, 88.86253078116113, 88.62647620551407, + 90.88125896295745, 89.05703245708185, 97.1628279461647, 97.13872968974688, + 88.21095422026532, 89.07108202455485, 88.78020001431058, 88.74413674595635, + 88.98392567457543, 96.85820427918212, 89.03633726170942, 88.92753691342273, + 90.13781051118815, 88.0908869433565, 89.48572604661065, 89.56247041559148, + 88.9352417681237, 89.65242272530863, 88.44531717893622, 88.4285177241406, + 82.37840271161247, 95.65726784241335, 88.8127610179762, 89.17365726287382, + 89.93265496914692, 88.58959688555727, 88.71390509835017, 88.50114464299182, + 89.5836638100497, 88.12318420320322, 88.66857869613209, 89.17910017002238, + 88.42180124247422, 88.8998920216409, 89.41325665745391, 88.70825353664826, + 89.6136159134135, 88.80074690388537, 88.8097011817449, 90.35448501732036, + 88.47501996337692, 88.18274796968815, 88.7011485768023, 88.78502799344379, + 88.53191174195675, 88.45810473415715, 88.47957690426038, 90.0239658321718, + 89.69184326824652, 89.73718444662995, 88.4510215593043, 88.38850948795456, + 89.39278076397291, 88.38163688171564, 97.07190896149343, 89.74406181408555, + 87.99216914645783, 88.60343633583963, 88.91005377246954, 95.55027507772868, + 89.62342573469287, 88.5552325581103, 88.76146931827375, 88.27157677921387, + 89.44358640444541, 88.43768300231424, 88.97981917360497, 88.89890837079597, + 89.75620737882379, 89.43318567138087, 88.78434644809138, 88.40517967569416, + 88.70599334964959, 88.53787945783753, 89.15709769468093, 88.77333862586812, + 89.46118441031241, 90.33816616117241, 88.30104313395017, 88.96591657796047, + 88.85863187666682, 88.12460567130329, 88.87643196274784, 88.54186065013245, + 90.17574478106988, 89.45741824990363, 97.10918981962303, 88.39437369003899, + 88.67321060540395, 97.13300257776568, 89.08273980761192, 88.60258713130885, + 89.008940718446, 88.1006256548003, 88.52928123625179, 88.44014466052764, + 88.464322911038, 88.57126657541916, 87.6944778693011, 89.88604253859987, + 89.11069438292502, 89.48388131206659, 88.61797320586619, 97.13138215084113, + 88.95984561417511, 97.20948235538543, 88.53760181429666, 88.43575516266021, + 88.14587557687634, 89.29842771459256, 88.76830235767646, 89.06953484713067, + 88.34823073221227, 88.06951517467826, 88.7800303063479, 95.16639868931391, + 95.46121409251181, 95.70011399692817, 90.26321446790861, 95.70157033218513, + 91.95205969937949, 95.4891971274081, 95.10965944120265, 95.56571529490162, + 94.9832349305582, 90.52623000060403, 95.39602147446666, 92.52117996162862, + 90.3657472017902, 95.54292415251659, 95.15207823223481, 95.9517708825965, + 90.29758190935117, 92.01333304352684, 95.47045835269914, 90.70911116973367, + 95.48756146110414, 95.03276382676098, 91.55659612882813, 95.56875667265778, + 95.0044522849133, 95.61300031143702, 95.14750724072998, 95.42661813718266, + 90.63081953574199, 95.20002992812103, 90.95232699019803, 95.50846806330291, + 95.0114114401192, 95.55015721318301, 94.8815210047874, 95.5613982542475, + 95.73303616753148, 95.44183724441957, 95.7411942025147, 95.01168617799316, + 95.01259434127117, 95.49876373982829, 92.01618096964211, 91.37845675434994, + 91.85258474718374, 95.57339428292778, 90.60531779869659, 95.126124234372, + 94.11557300673141, 90.88104122924321, 95.58708815889631, 94.95341718860548, + 95.69603620956296, 95.0116553140283, 92.93531392331577, 95.02837267683847, + 95.47937531459648, 94.84825879212234, 95.44046678517174, 95.11388034338712, + 95.4247231876968, 95.7272239965474, 91.86585353138871, 91.96101251668672, + 95.1366714716923, 94.9852492035653, 91.92957701385534, 91.74979751798568, + 95.68545411736106, 96.02837279971853, 92.02154351861198, 95.49313143165041, + 95.14220025221086, 95.42625963684429, 90.36807953194709, 95.00227448089107, + 94.99358448850882, 94.94046516987015, 91.94216376113343, 95.48312156082784, + 90.73321978476247, 90.55280442330212, 95.90374026230862, 95.088307177406, + 93.9384291415678, 95.37317584677757, 95.4440224666434, 95.06728263555569, + 95.52790310961215, 95.12480175566638, 95.13558116988102, 92.0683819402827, + 95.01968107974096, 95.09276059022281, 95.42279705443656, 95.01938750188378, + 95.92138795435258, 91.84154093568405, 90.66686006613826, 95.63252160979941, + 91.84659949301097, 95.50565261075182, 95.64160592277122, 92.7362796392683, + 94.9882162887968, 91.4160477583137, 95.19124543762287, 92.48863848938178, + 95.05783953310534, 95.4210831637341, 95.87265980093838, 94.97543413073106, + 94.9353509469532, 95.88218518068821, 95.90639359445771, 95.9048755171048, + 95.06798720936303, 90.7127709790184, 95.00629491662899, 91.7838481778658, + 95.78110588276742, 95.481471089059, 94.8205654985448, 95.44023952620269, + 95.52405591449299, 95.9165607492542, 95.51428581011746, 89.97734897774056 }; parameter_t filter_lat_width_array[] = { - 15.419010722522767, 7.945894091148224, 9.28812701856356, 8.334363215413543, - 15.649986429215318, 9.464124603886033, 9.880541935366269, - 18.515140548163004, 7.796571353932571, 5.974555396737934, - 11.292993415521437, 16.478169051763544, 10.22676707174472, - 8.066787728838205, 5.435152872316131, 8.073328313734585, 16.548072837561413, - 8.746634165643746, 9.217122533136072, 15.159164624397524, 6.358580307591242, - 7.804500041400465, 9.440187400959072, 18.317441174431714, 10.20761553806539, - 8.597021348664148, 7.433140651253726, 10.871106465647923, - 15.900620983568253, 10.053429944383073, 7.756921749844513, - 8.455992205491958, 10.780052231919885, 8.937109155724084, - 10.043791435069721, 9.070365534519699, 8.373184983215006, 8.292311795234257, - 9.73808937846011, 9.064787971016115, 17.91107070817465, 7.826419771677172, - 8.546168163285676, 9.75087226262883, 10.269686925972229, 8.317947176218839, - 9.598374838534887, 18.36956623750001, 15.19391124278378, 9.430679345963473, - 8.107471126889955, 10.860828442806445, 10.672996040688624, 8.01467656220956, - 18.73966937209375, 9.978652045727662, 6.5914062020251, 8.81087298924242, - 17.87851556450381, 7.179185745799628, 8.680303450168395, 10.616471327010434, - 16.9561885094164, 10.680256738017064, 9.884262221690458, 9.076867758924623, - 16.524565816597075, 9.31868844503866, 9.588136919707269, 8.244010362545382, - 9.285225052288192, 17.510877916471152, 10.779978288193245, - 8.484963755946229, 14.904857429196756, 9.927681064373767, 8.927886733668672, - 15.15494038862947, 19.743161001889423, 8.918373804449129, - 15.501903622343306, 17.948258211353682, 8.2924992974137, 8.987103683745964, - 7.799868336344918, 7.312600990044221, 7.128253575451017, 7.7980299478845, - 10.801818560831286, 8.000442078018507, 10.043329212736634, - 10.324545127148141, 9.215601661403756, 9.589863827453964, 8.393397128366214, - 9.037406086902353, 8.321646203790877, 9.150341413759286, 10.39467014185226, - 18.118477575876742, 8.79166467237152, 18.031842407418658, 10.00278224467492, - 6.9273381939257135, 9.229647430886084, 10.217546328255883, - 16.324322994424318, 5.6226583692008845, 8.099062158132417, - 10.479225605831484, 7.806707444388995, 8.460282280026039, - 18.347468004670127, 8.454887828566763, 10.658219365687446, - 15.62481734631385, 18.658002496250845, 10.75433054264357, - 17.976621668227654, 6.161123992444061, 10.429902821197283, - 18.629728423772207, 7.783347408238426, 17.90519869407977, - 16.104562528184573, 5.234353846018936, 11.011249626213042, - 9.581181766628445, 15.60480199303698, 16.085402003889474, - 14.768887507766337, 15.519225052550244, 17.281885037696785, - 14.811860512388272, 19.135300644555926, 17.74567260551033, - 15.388707776585079, 16.152205051761246, 15.37229602028516, - 16.140791253289684, 19.810928167958142, 15.070686928706905, - 14.85913780109927, 17.281702090054644, 15.290355447846633, - 15.283697620610837, 19.68209848512328, 15.575032824674095, - 14.596777559783636, 19.5878249345401, 17.861986091399274, - 18.361878616466388, 15.517514529389095, 15.056160983608596, - 15.107542508977481, 19.712167811413483, 17.36906142752109, - 17.22993044716108, 15.113570066296145, 14.974604703600395, - 19.310440326338558, 14.949508266495938, 15.331210613586222, - 14.940413443802578, 15.016097706372125, 19.957867876782448, - 17.38164790476245, 15.081451234676228, 15.253453170108083, - 18.15449390040568, 18.4848328006835, 17.314740844006085, 17.201653790523444, - 14.724283530636864, 18.0460775389187, 17.38771131463053, 15.430900858795749, - 15.352054217811029, 17.53398437556827, 13.74401881045934, 14.98712652201096, - 15.685797235364692, 15.475945774833562, 15.45899368422629, - 15.578942009610024, 14.883925008097208, 18.315276295829843, - 15.33622640425396, 15.690541107868702, 15.393317469315132, - 19.831844537093254, 16.09215273544563, 18.256888568426533, - 15.625514301495818, 15.54733479001389, 16.669462523051433, - 17.892285863942814, 18.191751726290455, 14.791622772015517, - 17.730398544793697, 17.152770997754125, 18.642818696433636, - 18.041131344589466, 18.386142522902958, 15.501886888053592, - 17.25697047820887, 19.048567636869056, 19.678108935274953, - 17.393473957985574, 16.079748437093517, 18.993159123886606, - 14.71851471938689, 17.227433000205618, 17.947357369429056, - 17.150823057367084, 18.671139164454498, 18.127234011104573, - 15.28550978129422, 18.753352686209652, 15.348123628271905, - 15.912314363308283, 15.149926255330028, 15.836976845718329, - 16.935625932027246, 15.529398990930764, 15.67418476597375, - 17.319422627956374, 19.78777018730411, 19.718074690412884, - 17.06965176986506, 17.343641987150825, 17.117174608261976, - 18.56329300168334, 19.57741018879232, 15.639400175537878, - 15.032340274106275, 18.89894356131891, 17.575113578557872, - 15.556425030209754, 16.620100893467676, 19.61335877811249, - 15.120254723297196, 17.71649452640627, 15.590778151671065, - 14.74252140631305, 14.930819778854287, 17.16968769863352, - 18.428934421258784, 17.864693148419384, 18.15399031285955, - 15.235909969823847, 16.195389380267837, 16.376420629337886, - 16.38385817842141, 15.486409913523133, 18.903342373643582, - 5.368611089856738, 16.99156962272187, 5.0775095262118395, 5.209183702778802, - 5.351003064941193, 18.48386346265716, 5.386314205451327, 6.519045533839835, - 17.104325475566256, 5.117445257152129, 19.779841671461476, 5.35816341734823, - 5.234291726093385, 5.065022659731545, 5.591361771820639, 18.750852676148558, - 5.153019547136194, 5.004170118292563, 18.628010409803856, 16.69757322886089, - 5.444956080456233, 16.908539981864894, 5.400931600557144, - 17.164957311952286, 5.033284922539085, 5.709639189791266, - 16.277584438293907, 5.366024326278939, 15.071976362684607, - 15.854009597225073, 17.29559407972014, 19.475339742790354, - 5.001590451042035, 16.647439493201514, 5.692387143476015, - 16.993659277035174, 16.50400263628906, 5.5348071027470995, - 5.532370512356129, 18.277414136092023, 5.028147736298328, - 18.507231641407827, 16.89969202574388, 17.733902576490703, - 18.088632371527602, 5.490205261008242, 5.560298840326876, 5.294125587763357, - 5.356620153180443, 5.010315785725573, 5.59189989429083, 18.600127383598718, - 16.263387458967284, 5.645179249719708, 17.20535054037683, 5.811956736254496, - 5.015648678226811, 5.642696593262534, 18.082057389994695, 6.872589502383253, - 5.212075468365899, 17.834502731919134, 6.370912406778791, 6.49585802934854, - 16.367188098558458, 5.188232982395661, 5.297882565799137, 18.679055988234, - 19.297524009806242, 5.502719647512959, 5.19272688101649, 5.647184211231395, - 18.405934891697854, 18.189558834721147, 18.015194394688724, - 5.376104761535567, 6.519083873284315, 5.012695131985648, 15.825606925280931, - 5.227732500129015, 16.568051568200783, 5.288403967323914, - 19.078263330905667, 16.349815081799417, 5.064614562672014, - 16.955952236980323, 19.201875131252176, 16.711234059643754, - 6.677469465025525, 5.02387800779974, 5.194123684887386, 18.631983951661464, - 5.524980582676639, 5.717294922046711, 5.001999938010705, 5.585444814448566, - 5.215356956774261, 5.816003973597202, 16.90607357944617, 16.401017772588727, - 19.02530510802166, 5.203550743949205, 5.336743925879582, 5.240190992427436, - 5.0155673149288, 5.1357358435388845, 5.32674538101513, 5.007393685022532, - 5.206902763105229, 5.093021767316005, 5.162332703203443, 18.412651341062624, - 18.885508038904383, 18.04732805518533, 18.167696830315407, - 18.731966116154126, 5.112638151071066, 5.461089090391409, - 18.446778630178592, 17.813308159842435, 5.047253396451572, - 5.394546696905506, 17.498647610593704, 5.209319605371675, 18.09223472229339, - 5.033288906548985, 5.562315478774721, 16.964812540233567, - 21.181656290513818, 23.658929091444854, 21.610280577687774, - 22.722895432604037, 24.18029432654648, 22.44254664566738, 23.44021667206903, - 22.21249529294068, 23.539759014949915, 22.58875991625438, - 21.729423422587754, 23.9835848172916, 22.490324991273546, - 23.483435494483885, 22.49714639251783, 22.72742322550141, - 22.477994959760824, 22.550294178961135, 22.55386962730486, - 21.10982141142696, 22.15831816095625, 22.461694579433793, - 23.176176669322988, 22.35221694271669, 24.118327727001976, - 22.976240008411153, 22.84987057599771, 22.401527701480493, - 22.634362514012885, 23.427962979221604, 22.30922119457483, - 22.702640732665177, 22.391088957454823, 22.50917542814275, - 22.90876009046108, 22.757353491421057, 23.59632175234768, - 23.709094168421572, 22.53747173904774, 22.487255926844902, - 22.923569043453597, 22.39161855732371, 22.688097331847967, - 22.11834738446578, 24.309998828583126, 23.446261647793808, - 22.31675902507183, 23.167813500688382, 22.872392215554196, - 22.386134737282962, 22.674628650582722, 22.50672507764247, - 22.320538653388176, 22.63191061907869, 22.059828205935247, - 22.56768911561264, 22.144269122818482, 22.796056488901833, - 23.206405071155096, 22.478108080285242, 22.891263867462623, - 22.431928402036284, 23.375458905473888, 23.402640447406135, - 20.78121666130043, 23.31465323252404, 22.82201972119585, 23.06981248011736, - 22.172198002146438, 23.245571698171318, 22.641865362161937, - 22.37579681917811, 22.324110212190515, 23.50194649375325, - 23.028269676965465, 22.378614942300075, 23.236650205864382, - 24.011466591905865, 22.517846170316666, 23.826578848552227, - 21.850810616858702, 22.582686633335317, 22.706784407618553, - 22.5096368198717, 22.41105266221511, 23.49703580577535, 22.664888388343172, - 23.46073814905937, 22.172255903075637, 21.521326477130387, - 22.13515325187245, 22.237962794553606, 22.559084982384583, - 23.188835077596952, 23.161941241294407, 23.279681046701054, - 22.343014957805554, 22.38480759552343, 22.96956647701912, - 22.684876623502042, 23.199905547835026, 23.21481127835044, - 22.07812730407325, 23.240576899262138, 23.556389699351996, - 23.657716287503597, 22.372266929649886, 22.48655847407428, - 22.02597495421892, 22.454150283725806, 22.729629903297337, - 20.554422430317587, 22.527096291287148, 24.153747718937932, - 22.162187035885747, 23.42328159354725, 22.754972692561488, - 23.156754647493614, 24.26800749016336, 23.140415260148558, - 22.169706203295643, 22.719024710549014, 23.778415480469327, - 22.561215925804508, 23.86221129363362, 22.73675702525203, - 22.748414386613295, 22.861059342710856, 12.463470106489263, - 10.82532647100599, 11.27870595161571, 13.544679618874538, 9.278289116919655, - 12.38493064908284, 13.857188866474086, 10.3757506624864, 12.351642161634462, - 12.104725357447572, 13.868938087920345, 12.430128102410356, - 12.117195677167972, 11.379063874727144, 12.92309908433021, - 14.297576769788424, 11.71249506366217, 12.517394713120185, - 11.758073502027225, 12.69761000781451, 10.306480076703368, - 12.402265704704009, 10.611468829660769, 12.488690876635806, - 11.172444042950044, 11.617216837995223, 12.309542568263723, - 11.619596361449565, 10.610177412751575, 11.312657490417926, - 11.644880145448525, 11.969707401762133, 9.215310043508811, - 11.751302053903853, 13.96384508844276, 10.902154760119323, - 11.746168059389133, 10.83923870417941, 13.319251806347095, - 10.933463587122189, 11.387605311181094, 11.017909340289899, - 11.553220163993807, 13.537339353820236, 11.277031359049943, - 12.315148036769967, 11.745918413051117, 13.659979138742415, - 12.370199515933496, 11.274344311011106, 10.509026341396051, - 14.035326839029787, 11.351877273784005, 13.770602641594675, - 10.877951372503919, 12.060683167393115, 12.367976504166588, - 12.455342442857537, 9.674968332862829, 12.338802911850859, - 12.915292587868596, 12.391070368127492, 11.330662432240208, - 10.622604735343188, 13.125169838924426, 9.200621851237015, - 13.96938259854791, 10.510142489803293, 10.816395122562557, - 14.45951829957016, 10.499079480367906, 12.663089704072927, - 12.26366096311006, 11.434162192002422, 10.422156156508809, - 10.733067437268911, 12.318637469770366, 11.75525451943981, - 10.919716758140204, 12.478643286203795, 12.926228884937643, - 12.787898163133185, 10.45792182150034, 12.321350754557903, - 10.85881170057891, 11.24952383210622, 10.489270764456059, - 11.352537035433377, 12.260445823910185, 9.36294368755117, 11.22852569890115, - 9.57066252331841, 12.051330928535686, 12.644033276791887, 9.890886849563518, - 12.34250786504996, 11.360968200928077, 13.554949887159744, - 12.977338601327201, 10.538363055290782, 11.31816831887301, - 12.991396586424088, 12.26358011728771, 13.646093046103571, - 12.910369812561859, 11.551329605864101, 11.522692901524492, - 11.745373727148099, 11.811961406192898, 13.951695914613628, - 11.1031522024879, 10.297917113535073, 12.491331188034945, - 12.144710639849757, 12.226038273907692, 13.030447137867267, - 11.492891557975078, 10.629228009616124, 11.904492266610704, - 11.714737325225784, 11.93163571946579, 13.477881494769973, - 12.343188034749325, 12.756857937883824, 11.78319514102213, - 11.505826160048844, 12.75115845715631, 11.524588130790523, - 9.136868306283434, 9.211248426972631, 6.191275275052417, 10.272261469419249, - 9.544238632679379, 9.254511900554293, 13.034227837355697, - 12.525504251866385, 6.483966241610135, 7.221328356861286, - 11.520448968515819, 18.074351232287118, 9.53441966034041, - 11.495195333173012, 5.3100776444855695, 5.194861267603233, - 7.033809007197352, 9.037126014719114, 10.282196998658863, - 13.150977904085797, 23.83462343617719, 6.731564241795451, - 13.039472341127045, 11.080277771136045, 11.361423979083405, - 5.319212706468123, 6.693267242305254, 10.643463934340463, - 12.654016463864998, 10.258278709641615, 8.746575267558082, - 12.750057247400356, 10.123485238397393, 5.363901411655057, - 13.936146161272687, 6.871960257687521, 6.974751364247505, 8.47615357957265, - 7.831823340414082, 9.789890087904856, 9.762457086779781, 7.562497626337717, - 16.299983149162244, 6.267379474515059, 6.381354425375111, 13.83962577126589, - 6.521866867753972, 11.944966914244002, 6.195351625549881, 9.763347009153103, - 5.33510867156783, 7.613508134098987, 10.555226371037987, 7.192366328585493, - 6.5564765376608545, 13.346217249441633, 5.746978750971891, 5.11427595556348, - 10.531675055537788, 12.236539481924996, 12.700048999369935, - 9.785243461429463, 23.25124703392759, 23.536569714035384, 12.79757621632071, - 13.881622380160143, 12.271718203720845, 8.198845676722971, - 10.755356493622504, 5.065558215016845, 7.998101410060703, 9.803673371481297, - 8.452194923973327, 12.190298074852041, 6.494895666202307, 8.327745661026125, - 5.505562902916464, 10.553531874432059, 6.366055129470032, 5.529813259818205, - 9.73520300096234, 11.721859724249038, 10.165391335159443, - 12.262706440817642, 11.259997453246335, 10.88794626298965, - 10.576810418862191, 5.394397344608999, 12.48230134109551, - 10.152528454407866, 8.188357465582675, 9.969641088992478, - 12.935565849560058, 8.171207735895214, 7.858836662095703, - 12.428218593887866, 6.366297398435167, 5.854024605134151, 5.838421271101309, - 7.972994604118179, 8.76872974574427, 10.97213402556917, 13.294584898217046, - 7.75298720280076, 7.395501513010659, 8.249417582580161, 5.6820810114550575, - 8.059949241652992, 15.31615626400321, 6.043042638453688, 5.311449804210279, - 10.320084393939862, 9.183713470776892, 10.139382730690404, - 6.072540312748972, 8.018195551290239, 12.522708012469181, - 12.331904039650784, 10.288329517448854, 6.5208603436531005, - 5.264603030925666, 5.7764646309424865, 6.343783308467976, - 10.561484399647902, 5.595503097458902, 6.769156569370173, 5.148777976467049, - 11.732110880996213, 8.885917649687993, 10.52169177171989, - 10.625248340936432, 6.069824910638557, 12.287376082868652, - 11.479328031028006, 10.413250833158504, 9.43579290545488, 8.387485580443984, - 9.588507019291987, 9.709430341555255, 9.82453390387428, 11.661504720072248, - 9.339910099072956, 10.102311916800682, 10.932155590518946, - 11.005551119584897, 11.246576048791406, 6.1037916200949125, - 9.480168229762413, 11.38939095035154, 5.433422112488711, 9.726092311971524, - 11.497616908952361, 10.616825155562976, 10.487041295917368, - 10.991733454342313, 10.456386717458514, 11.666401035741067, - 10.560189634345255, 10.361587121529585, 10.544570409524828, - 10.19861753722904, 11.046863005903191, 13.646036274740514, - 9.940736978863686, 10.719447354537632, 11.702916740193432, - 5.854025569204502, 9.34868943582054, 11.187412190780117, 10.348117871083609, - 9.152106876413493, 9.584866143293274, 10.33251646907299, 15.133848913703003, - 12.140774485786647, 10.942402723212513, 13.269320219053466, - 13.80750500035172, 11.132788568884175, 12.066499620123974, - 10.333182423702016, 9.474497933310285, 10.687816421445502, - 9.976755850723627, 10.619792171119053, 8.922752887205103, - 11.446117878949833, 9.029908372594978, 11.122636539537863, - 10.391223702216516, 10.362171319367057, 11.06323985106155, - 11.207480490704665, 13.765693285223225, 10.386934925168644, - 11.134832600520422, 11.551772325842403, 10.318803118100785, - 10.624833766154227, 10.064787428266728, 11.597362349509678, - 10.85737380127465, 5.8184571274283465, 11.28369653582293, - 10.991578884969337, 10.553435061129983, 11.35978091538491, - 5.792532762042417, 10.648730748007292, 10.203462111617823, - 11.035324187257775, 9.697247281609755, 8.598369992261038, 9.869306689396797, - 14.855541552338938, 11.07750467007462, 6.28019007684159, 8.862965030095213, - 10.8542711971783, 9.926280042760244, 11.378506079028575, 5.1543985096006875, - 13.734862767524326, 10.416798316038701, 10.595798009192205, - 14.14709924870744, 9.210122210191905, 5.860048871204227, 8.830695672971292, - 10.104401768541914, 13.231731257952797, 11.918650511646865, - 13.863628333277644, 10.86892044635675, 10.906606972106749, - 11.15226322815876, 11.167206630351659, 8.677715664540713, - 10.318797854215328, 10.227818016565944, 14.309965709484324, - 8.502990701242405, 13.29791261523864, 14.093956321789546, 9.160193281250024, - 5.814624498109951, 11.207233309623357, 11.44689036569995, - 11.267735580364578, 10.706568600078128, 11.116312360148454, - 11.076365099261556, 11.174372102405336, 9.071104121693349, - 8.943905089914594, 10.70111983000116, 22.514800960739738, - 23.225508590335007, 22.69473014598215, 21.336641602571394, - 21.379975522633483, 18.476547033353924, 22.667561105493316, - 22.291885495080894, 22.23563500236327, 22.00249691573366, - 22.249008306304884, 22.8034036201396, 22.425562169167453, - 22.465244831474873, 18.206109007511984, 21.39441974187727, - 22.25155329736722, 20.989806516065663, 22.756875657152428, - 22.768449812779473, 23.35105378984995, 21.836920363502333, - 21.646738108256827, 22.786970554413042, 21.491971277530784, - 22.62483378919519, 23.532794846154353, 22.668918978500574, - 11.491701907062119, 20.54796765982601, 21.492013289853364, - 21.400528960862932, 22.311834806667196, 22.457086595602497, - 23.039434319993525, 22.339844692106404, 22.273517069524246, - 23.31448707036848, 21.897282395669926, 16.765423102071285, - 21.442094856144053, 22.561338432430244, 21.614093061474204, - 20.37406402891535, 22.489663215878295, 23.372860779969855, - 21.697388772405343, 23.376354096146645, 11.287428160965128, - 22.124260442421786, 22.37613696803427, 17.917101750462102, - 16.38182824621954, 17.692428529903157, 21.38449866705268, - 21.441250792459538, 18.000693381721526, 22.541077386090688, - 23.0014774897038, 22.854031589840034, 22.31472912932323, 22.06633818194661, - 22.635324162046953, 21.52709687726082, 22.873629378722026, - 22.478414203128857, 22.03554269877927, 17.1043488790075, 22.423061068619383, - 22.28138718186319, 16.74483325498512, 22.866263366488347, - 22.775116462749327, 21.35881789659324, 22.54252161992916, - 20.883593406557804, 22.38347613889588, 22.649996569460587, - 22.376289069413488, 22.61594340042184, 21.66495600326067, - 20.784003440804632, 22.894643705715247, 23.05871264807649, - 22.152301847660553, 22.126383845005485, 21.803641633169953, - 22.720481476292004, 22.36195751659776, 21.493590915381567, - 22.552431613803254, 16.211823334521128, 21.653210925567866, - 22.731337899857305, 22.380100174112915, 22.4552609316123, - 22.362623439516987, 21.631174532509707, 22.479679971029093, - 21.70406446417098, 22.934521939434408, 21.54449432801571, 21.95652018780491, - 22.603147157681413, 22.674138129626353, 22.683069618251213, - 21.946313008260937, 22.219461399445986, 18.01104964483748, - 22.34641246066283, 17.12176764820574, 21.82431455001509, 22.57587458264864, - 22.240112095507005, 10.67902299571295, 22.23445269351583, - 22.920198259718383, 21.786989473025105, 22.131114188499946, - 23.02832732971822, 22.643398806711996, 10.80919147200087, - 21.715649473211297, 21.749519310368264, 21.91146512159262, - 22.44509566633109, 22.52797601529341, 16.76764274160579 + 19.204499279681958, 7.825704972962308, 12.286103976076303, + 17.779552238877983, 16.29226092166684, 11.751078469092661, + 18.115094083656242, 18.13434319439805, 16.756327647962188, + 15.469344066572718, 15.066505149133171, 14.634625615813809, + 17.566115295413443, 13.060891076928751, 17.881810013691492, + 14.233187882683858, 19.67208835155829, 16.38575762163243, 16.76501316712692, + 14.79537515947774, 18.518410842374628, 14.2078757960791, 8.25223819230165, + 14.854364907303996, 14.74763684083352, 15.694524849741743, + 17.900187507712843, 14.266397662925783, 14.80884156037613, + 15.600048443574305, 19.23604943282066, 17.55164053330642, + 18.761927821219643, 16.26354423926527, 16.456314110960182, + 14.891219033449667, 10.984766739201994, 16.313289442058892, + 20.148787730498114, 11.835530924879038, 15.244140498393914, + 14.667044512058991, 16.3884343901779, 14.13575888277301, 20.910311974164603, + 13.08085904646877, 9.740338242685215, 13.679872902877188, 19.12469409583348, + 16.76698247205012, 11.03989615602445, 17.17432293255299, 15.725822833594284, + 15.048304591804486, 14.625481361048768, 21.237776901696346, + 20.11976992239287, 17.52504568946729, 19.574825832965963, 11.58504735697679, + 16.02323088749383, 17.03544600628022, 10.75776827278878, 17.17457862975068, + 19.398787429576764, 14.704303058135915, 14.670974668143339, + 13.217406057637842, 13.279084590064917, 21.01486090657505, + 16.827981533566604, 9.11290336036212, 20.40867648708484, 13.245312993133851, + 15.25129082923877, 9.45860888367181, 18.923459342650986, 17.495539443434907, + 16.89918208804644, 24.120827847131125, 19.0417303053895, 16.11916689608104, + 9.74375756928749, 15.894351313857872, 10.519623673845002, + 15.142577529002148, 7.596872462953234, 17.28851813109173, 16.18944631082875, + 22.831339925579393, 14.06468056215377, 19.586657569750873, + 10.570264342281353, 19.408871252160548, 16.841922008973853, + 14.003379004019672, 20.3344939368085, 14.53654838043746, 16.136271617015574, + 17.421411152157845, 16.562905349349105, 14.729849951811119, + 9.071606517208323, 17.454754740839522, 16.360282713152404, + 7.581449114032146, 11.863995388879426, 8.268928596800222, 16.3650431587956, + 14.726172444716848, 15.906537377159855, 16.72598354181906, + 16.826622057238023, 14.295491867916994, 14.885418625117563, + 13.992374822927744, 14.898503863838146, 13.967128347157129, + 15.737289415412652, 17.324614864785154, 16.957215468753372, + 16.451541334779723, 8.886532114106199, 14.214232187717744, + 15.429803212711757, 15.174782119455445, 21.420840872601467, + 8.92488926715901, 21.93704639145968, 21.099950831365202, 20.945196998400228, + 20.11628175288464, 15.293801612426451, 17.119983650215225, + 20.89896424432956, 21.211355999585138, 21.180238260110887, + 20.376602206577054, 20.851057054743023, 20.194814137851694, + 18.123919069821447, 17.63642275838325, 21.194715400263878, + 18.383885610624148, 18.961394384403846, 21.001012618326197, + 15.235446826911595, 17.98310021642838, 19.818954783808213, + 15.36806314862414, 15.419137776488432, 15.471480544676966, + 19.207368244284268, 15.581970288072316, 18.061790056071587, 20.389805106794, + 21.022596838275554, 18.38507521193285, 19.977935444544134, + 15.481546874803147, 15.514986339953921, 21.069158272205797, + 21.27787585253528, 21.142579969226226, 20.581432271858564, + 20.14352579345678, 15.545680839410098, 20.451513031716786, + 18.72243762790236, 19.997677049780123, 20.532934001818706, + 20.88836569191504, 20.50262924559535, 17.684221065463323, + 20.276270561105896, 20.310475545356635, 20.807929868609513, + 20.918591476292665, 17.88224048905984, 19.80955007846212, 20.48067115941285, + 15.45567603773857, 21.75082136469898, 21.002577416648858, + 20.983020613294837, 21.32766350996894, 21.013888356048792, + 15.313749860046297, 21.04925102342365, 20.918488586172774, + 15.599559416308072, 17.749133387384255, 21.722115658663466, + 23.348119299459857, 20.555399556673557, 15.469953032416349, + 21.421725846118512, 19.951614950746055, 19.961508172182203, + 16.750596427321025, 15.237233596012848, 15.563168194331567, + 21.188745571257037, 21.211923129896668, 20.76858474761717, + 15.95319498072165, 19.152013148156833, 17.977652098385626, + 20.53446616427803, 18.527478811916332, 20.34091091461571, 20.80405889836139, + 21.098281345699906, 18.159542801390316, 20.798385286765967, + 20.299066572640832, 21.561787367984557, 18.613194104466594, + 20.08123904681963, 20.842438055863653, 18.044253084605835, + 18.167775306079363, 21.091012977619908, 21.01500144978408, + 20.663159209756405, 18.491074613198354, 15.34477270039659, + 20.75752182765177, 20.04040435052706, 20.934420919978283, + 16.604566508825965, 21.01352788799631, 15.527158300134317, + 18.67351302070839, 18.29299044247, 20.800498593473566, 20.510815273511973, + 20.752657135007045, 20.465786428144344, 21.300886086914357, + 15.756861160672857, 20.87969352443668, 20.541285198744866, + 15.431378461735752, 17.060766857133217, 20.77418179299061, + 21.018805174419725, 21.643330173765712, 20.913341247153753, + 17.03662362641699, 20.97441217981848, 20.80652551796623, 17.081451506025978, + 21.260591279475094, 21.258770676129448, 20.790381386530576, + 9.438041357925234, 9.615152206383684, 21.064168963393072, + 23.435097212061784, 10.646605332559602, 8.134690140276339, + 11.391338703381331, 24.26016143403166, 9.180255402053323, 8.806927391382391, + 21.507555901776346, 7.137437985806258, 12.04732499826259, 7.334479623934577, + 24.469209658972893, 10.435065434411573, 12.331143501209729, + 9.301984088534972, 20.948761625638667, 8.130808228570285, + 17.216754456264926, 11.512610831284388, 10.032923496605564, + 11.23240885416377, 8.301555015495726, 8.833353869787675, 9.36900571842137, + 6.619997986470112, 16.879694590261323, 21.544909313284, 8.743827502476234, + 24.710868558837685, 7.27440868189191, 23.074762947113737, + 20.635212339070254, 7.226269534288007, 22.56640842945307, 7.166387135119871, + 23.63677995807928, 7.868883186935571, 21.155388595551248, 10.21344522392583, + 22.573482744993758, 22.341701347343772, 21.604638045163206, + 9.762284409849341, 23.366452912042796, 8.698588282307918, 9.382571185858616, + 12.684924871846837, 10.961744090389196, 15.83426012354714, + 6.985733050634871, 6.9914887542560455, 23.656436973914914, + 7.221377818490197, 9.691580026759127, 22.73322631245704, 9.28134750720564, + 23.28451287849016, 22.686091832787213, 9.68549327777749, 12.753329529156797, + 7.9612689886090005, 9.055226239411917, 7.116525433833591, 24.13526090516401, + 7.901205032726953, 16.2598943788247, 9.511352577324297, 11.813974954667454, + 9.07839364656535, 9.565862624906881, 7.8224961560519874, 21.286420914578024, + 10.523532857268329, 9.506610493043317, 23.100276745063645, + 20.705071877803896, 9.058518390068176, 20.56456737234557, + 20.688288265269815, 23.069684633860245, 10.393095232047997, + 9.997856348991098, 9.426672012402719, 10.018508814949962, + 17.905503720835906, 9.626635268629487, 9.061807300637133, + 23.091179143455726, 24.457722731647237, 12.454903941507876, + 23.44582791396889, 19.614548515631707, 22.684141516274604, + 7.4011792065504025, 10.143801741249746, 24.977718252499407, + 9.274626181485564, 10.050894760815378, 24.940646491640884, + 11.455711802138532, 10.069923641093483, 9.297230975053951, + 20.772450950732292, 9.968744168438711, 12.168115523891268, + 24.608242476470302, 8.642231214632842, 23.204931211347933, 6.60770791545531, + 24.601325336742487, 23.237757245284126, 7.067525045361201, 8.5503900469632, + 8.887104030123952, 16.927849916110425, 8.600113742034656, 10.06453881453217, + 8.904210550006857, 10.510560234425471, 7.0751165195940615, 8.88605875499069, + 9.277407195696455, 21.34276669843286, 11.663900059589249, 21.39164485108212, + 24.036949526164317, 23.739163800977348, 22.566056356548813, + 21.935491173203232, 21.937052052923196, 22.344589895844052, + 23.4938069446202, 22.50646617711909, 22.618363286909197, 22.90167141747278, + 23.91797233628538, 22.34536781313484, 22.461384012616445, 22.31911303737243, + 23.649040284865578, 23.43861043973548, 23.202608564472495, + 22.492050620946234, 22.332119529806015, 24.023906067825827, + 23.183939251915337, 23.25767378065108, 22.467156006147178, + 22.614348563934993, 23.5602860279914, 23.305950946188652, 22.64759937618327, + 22.73391796830175, 23.310467099465594, 22.06965376287572, 24.52455033745146, + 22.503473803591344, 24.805394387090363, 22.384839255966632, + 22.378572923483482, 22.124890422107597, 22.055133837769784, + 23.201195089893194, 22.871190586742575, 22.511772754945806, + 24.23469928358639, 22.501362292787217, 22.40399744228507, + 23.436186021546828, 23.607155922153204, 23.497505778756583, + 22.343971701867655, 23.21730476734626, 22.54550707642612, + 22.378677095722523, 21.936679044240172, 22.62990215361003, + 23.288258861925584, 23.13361747541459, 21.81029866188003, + 22.513983760433316, 22.806503678993295, 23.982385077252374, + 22.83973433141982, 23.022263793799016, 21.959098042495263, 22.2784134610121, + 22.35601765420137, 22.37276838488249, 23.226198164891876, + 22.513459079556664, 22.73824938508952, 22.468237449510656, + 23.068044342653995, 22.800803935065304, 22.552416981035844, + 23.084800844598345, 22.840285925661867, 22.538569696915975, + 22.619903011568667, 22.38196560434696, 22.17863292877017, + 23.222021376894954, 23.676690220018596, 23.36253655323253, 22.6889375926422, + 23.414059658808295, 23.389655936968516, 23.834551942338734, + 22.63933201414909, 8.250833140533826, 22.045668816140797, + 22.531910580655758, 22.41629250478916, 22.392499439542693, + 22.309536342846236, 22.71729201551749, 22.390155362407533, + 22.481221107526558, 22.360751868377264, 22.867112547574436, + 22.397118180193697, 22.590750481590813, 24.35714370577744, + 22.493066117634893, 23.395697997300477, 22.79162842759924, + 22.732334961122415, 22.36044854982739, 23.066177599461682, + 23.50010419622283, 21.624224861474755, 24.762401969330014, + 22.51357132699523, 22.81000221280477, 21.955028314914202, + 22.398007391961343, 22.488881082184015, 24.428567761899323, + 22.543081735101918, 22.92686286432637, 22.339760463917084, + 22.273488190557238, 21.785138864413348, 22.673874284678472, + 22.436452560486686, 24.50145487521601, 22.735503136922986, + 24.36588648639478, 23.304528886327144, 22.798063782150095, + 24.12628322188187, 23.651034832787317, 19.798345870787887, + 19.682359839902677, 19.716485721125995, 19.205826022858503, + 19.291734614472258, 19.081965885840024, 19.73776329970652, + 19.598920992875193, 18.74973811574392, 19.27580322925649, + 19.089422397436284, 19.694129257463118, 14.483118542336332, + 19.334974301548577, 19.82157954091101, 19.663629810863867, + 18.96291407738529, 19.215358269694498, 19.49262201101331, 19.45569267552717, + 19.57857058160781, 19.571708942488424, 19.208136473596134, + 19.566544811688175, 19.110215297288374, 20.1267107729145, 19.53418995590648, + 19.503060912394325, 18.3380545415412, 19.53870078131527, 18.729561060152577, + 17.537431876122486, 19.72930888867831, 19.37478697514308, + 19.394881612170217, 19.639168717535398, 19.390800989276176, + 18.946760931266514, 19.372654846080234, 19.169547504261182, + 19.759192920171017, 19.258126805451862, 18.915062093387345, + 19.33873778499118, 18.581065423201107, 19.635517188364588, + 19.21888480100841, 19.50057778460336, 18.948119453135774, 20.82793558092706, + 19.188775795370304, 19.103817067249533, 19.919146825425997, + 19.505715444981977, 19.889593193087784, 20.12841708849739, + 18.442668652801668, 20.203326108602674, 18.4368784913877, 19.36301152794451, + 19.507758251980345, 19.532850566582447, 19.267347488755135, + 19.56114054113684, 19.004998814704184, 19.628901069621083, + 19.491194438767245, 18.68723267802397, 13.31627274634327, + 19.342156978844105, 19.445767216554852, 19.550108320436625, + 19.74125222061138, 19.199500604737068, 18.06439646653765, 19.14989212189482, + 19.42011623044132, 19.45174085643436, 18.564909940179255, + 19.935692563138954, 18.45428745811353, 18.790918124800687, + 19.32912017870096, 19.394646483373048, 20.035503393786456, + 14.460147093090253, 19.742519383876974, 19.63759240467293, + 18.330961255740945, 19.03318552993774, 19.058762456462535, + 19.46889442781051, 19.1112876797079, 19.282260690363593, 19.32841293205424, + 19.406808640982693, 18.261607547162402, 19.171333414025536, + 19.774437250415325, 19.683760456272235, 19.323724678276076, + 19.24367210635008, 19.430348504541005, 19.71395601448766, 19.6816772628461, + 19.077332102426457, 19.34710760062413, 19.503358918400696, + 20.04601605981984, 19.366757425826314, 19.056328531463492, + 19.319733843729082, 19.53980702974508, 19.27165037408421, 19.57843769877787, + 19.544033307973603, 19.771357837349235, 19.62589387524786, + 18.931546094525956, 19.314292322345885, 19.619012255143794, + 19.148216210213306, 19.739408753100733, 19.266208199592395, + 19.44718789785668, 19.55443050864124, 19.44328262674957, 20.220809146688403, + 23.702131326690292, 22.333845711156386, 10.521937848402835, + 24.53290750944977, 24.41751252931328, 19.550877326328898, 18.57736059836286, + 22.57616737810481, 7.955747715294422, 23.128008940479848, 16.44951406391817, + 23.608487718316997, 22.041467874160638, 23.19761477349296, + 23.495140739948457, 21.22806737335094, 18.140077513959742, + 23.140533879423142, 23.348500169187574, 6.939759558609603, + 17.49298818385118, 7.247869297902375, 18.9265476994018, 21.111820490240678, + 24.585482258779948, 23.585901608448168, 21.133105117906354, + 21.505262171294458, 7.372949217837711, 13.450844728289189, + 23.247315634748734, 22.418288016298156, 23.61923366806372, + 20.71964484007924, 24.829877247617713, 18.998705248424493, + 23.281630252606753, 20.754072816698287, 23.431512851911936, + 23.41150101851736, 20.704102727569325, 23.227905620703215, + 24.175263935290605, 16.06375772673838, 12.108290658789432, + 22.62592267597617, 24.653589393883095, 24.878040122398378, + 17.39695655871855, 9.282527489020634, 16.128157215693175, + 20.843969841094243, 24.488031800854834, 23.33778728913009, + 24.102004753858395, 24.39194184257901, 21.862254402983204, + 23.397023857935185, 22.893577792060793, 24.61546095181768, + 23.859163444936446, 23.756838951348612, 22.992655604778893, + 24.779647757746808, 23.479653902652572, 16.37088799302653, + 13.01176654168165, 18.91354176744748, 20.2811494220215, 24.389554709511323, + 24.271208569182527, 7.951454675607477, 22.412755307006893, + 17.930714660200668, 16.12765971569283, 6.024999652738743, 12.55673808347259, + 6.909893562434789, 19.044401503812264, 21.349529028871903, + 21.992993853082826, 24.118008617495484, 23.647939055680574, + 24.26557522453679, 20.529013324277294, 24.400052583649625, + 22.67099360355961, 14.358346505522281, 23.40762435716892, + 23.279656964901434, 24.498789231297998, 23.56922114310072, + 24.63536841271129, 23.71023424182505, 13.245028578786922, 21.68124384543992, + 21.482159011467886, 18.169928405790653, 23.929825316752233, + 22.55189214707218, 22.30312895820909, 21.78375778067532, 18.41995910104335, + 23.819849589362427, 15.031668076429881, 22.827023379775916, + 23.528924619539108, 20.397945696064046, 23.571110364550847, + 24.79998572720377, 21.95621734391462, 21.36544888219936, 17.482264213966157, + 24.849666967121472, 16.273865426024447, 23.353274747740624, + 24.095425928735875, 10.288192851871699, 21.853914843843388, + 11.764936637099026, 10.217775235289025, 23.45224009113538, + 17.24966979567136, 23.992709859596378, 23.151733500646387, + 5.254443749444262, 16.253950514579763, 16.884917520661034, + 16.209951146361476, 13.825354456481453, 12.171599861028762, + 16.787930397523425, 17.132833064674937, 14.338977176252317, + 14.307477364222889, 13.39081339080065, 16.913894914261977, + 14.80281269149029, 12.33389048308558, 12.3968246031585, 15.782046965812738, + 13.05819905462471, 14.385016400520167, 17.891637262183746, + 13.685711671263961, 13.289876636077603, 12.5282128739723, + 15.180776899217026, 16.57875987634636, 14.75861214244057, + 16.825730629726905, 16.170188930106356, 17.656658455560475, + 12.082587640865084, 17.427806928736853, 16.926164276876708, + 16.397386000832533, 17.18256791384945, 14.284440988171038, + 14.284756733084796, 16.50626208083299, 15.012029060348969, + 15.440027286001596, 13.601402492172106, 18.0562658951761, + 12.605858480514101, 15.07995102953427, 14.060307385383068, 18.5934482514754, + 11.837511090221518, 15.223484214080836, 17.34148242403894, + 13.976041526220584, 16.165733235475255, 12.949172255451256, + 12.408262461968212, 19.13785692036357, 15.747790662953626, + 15.858861110699127, 17.240012992558235, 12.134819079379156, + 13.463349343576672, 15.328101604816625, 13.066023990655038, + 13.83048164981047, 14.571555830414642, 14.120530394043314, + 16.903901666051244, 17.661463144915928, 12.735955534685978, + 12.607647008737862, 14.028127042182646, 18.521304602521056, + 14.130663892429657, 14.624979676280837, 17.40353744061781, + 11.96338773477267, 12.406440616101712, 15.968940589216098, + 16.11781297501448, 12.352029608584361, 14.212449993369528, + 15.159096636548597, 11.409957803527012, 17.018797107912928, 11.984615572516, + 15.871301726392204, 15.97305366315602, 12.888923004485827, + 16.605365149268714, 16.084097003125432, 14.231383361169149, + 15.372417857428275, 11.84536943809626, 16.597554059443887, + 15.365550475162935, 18.105843089823136, 16.400058172720176, + 14.442445043268044, 16.115014306143728, 16.60546776322827, + 12.611865236839733, 15.876452397929112, 12.434011791867412, + 12.776681731482645, 16.669170671562636, 18.097951510600886, + 13.40564498592188, 17.633670123717753, 18.477280998069904, + 16.82527330868748, 13.504894273674926, 16.223803910771693, + 11.862441714285614, 13.731063410733412, 12.04573998087055, + 14.222290095815548, 14.219297106172982, 14.19873471269983, + 13.19218198387858, 16.11730992655149, 14.903866247759563, + 15.690287319870713, 14.05388060459569, 16.08437005970719, + 13.571206167937639, 11.737698688971076, 14.380621209924458, + 14.598802430474445, 12.32443697645895, 15.255002992605554, + 16.778908945245593, 14.156405371304018, 13.22946221296463, + 16.021236496710884, 15.235343154257336, 23.715110802761668, + 19.672584529374213, 9.882820378691436, 21.215495943089355, + 11.082598409997379, 24.732284208888114, 24.41150393046609, + 24.89758867255309, 21.93228937724912, 18.111317093781363, + 19.473882033562198, 18.220375263790316, 17.89366015395346, + 23.225023518506163, 24.59672778000672, 23.920487361960024, 9.96291600106326, + 10.62440980702323, 24.621476215271063, 17.899241933236457, + 24.57419822428383, 19.557238485091542, 16.98864660485015, 24.6891275338551, + 24.9235315102456, 22.136346945305206, 23.924853255239828, + 22.543351607096472, 9.892183081321612, 24.383285932976968, + 18.26167769613482, 23.920229214536036, 24.762670563410534, + 24.942751032994593, 24.87261362680492, 21.764649834009806, + 23.275034169193667, 23.614454017232998, 19.343547022013148, + 20.267410434050497, 24.757666508738968, 23.03047179835153, + 20.128154726366418, 16.225384732063876, 11.123192477189615, + 24.846238444237443, 10.102462593860798, 24.49999389576462, + 23.51398096616971, 18.515588380710053, 24.6212822874228, 24.868985682843974, + 24.702275795597043, 24.985328264503963, 18.84938795968693, + 24.946792571601677, 24.66111165186878, 19.055487439025164, + 23.12152760221229, 24.21384599637307, 24.170353349386552, + 21.812511456542953, 17.692642676139943, 10.888276678263809, + 24.34971824171208, 24.436570554509114, 10.737402772096514, + 17.727924120430757, 22.305862743186214, 24.009360874735304, + 11.104624151034336, 23.038610835997325, 24.04213835623956, + 23.209604917250072, 10.122918138678727, 24.96075518120497, + 24.87855109638933, 24.979211940702005, 17.60873020706211, 22.8014229025403, + 18.20439244260273, 10.409394427738384, 24.524932234212798, + 24.32478334428573, 24.864483877128368, 21.78012930197587, 22.44108125837416, + 24.814110182286786, 23.957415978710632, 24.56850067329278, + 24.122910346396893, 18.643166445356304, 24.87118294186844, + 24.43516083638862, 23.026312185051076, 24.959060606009544, + 24.594907388811194, 17.747774863147697, 19.10070620711319, + 21.892823788174518, 11.022207204659267, 24.955716595322034, + 24.463898479472796, 19.17725398117624, 24.77581046882243, + 17.293414206544888, 24.7348213600107, 18.006537921587963, + 24.525599512907615, 24.057660604182505, 22.1033462171131, + 24.972098672307947, 24.95213408896655, 23.49328020082815, 24.99078753115913, + 24.168659500195687, 20.16820398472823, 10.115779901121025, + 24.681206404540866, 18.012726180291, 23.561839243213072, 23.152182100101083, + 19.15069152985934, 19.01109724649357, 24.897059058200156, 24.53680325352205, + 24.91248790778304, 18.157591807468282 }; parameter_t min_area_kmsq_array[] = { - 4940915.201272564, 1975562.58262701, 2394962.382811432, 3743249.699596414, - 4702626.403200475, 914102.0950559329, 4327562.61015992, 4777188.735208742, - 1942634.826715886, 1141784.38490075, 1269771.8433168482, 4972835.72773043, - 2174264.011905834, 4507354.669377337, 1135016.8789213076, 2024458.921767253, - 4926789.474863471, 4426138.122010929, 4586229.487831621, 4999108.999676197, - 492682.07737351366, 2386888.1741949995, 977597.2941862016, - 4895962.395973537, 1852761.832637609, 3907494.0053622876, 2040648.872094789, - 2947798.4897774113, 4969713.976210027, 2183311.473846587, - 2436928.2703292044, 2617098.232534163, 2911472.5519592687, - 3509794.719063762, 2122744.56552086, 612901.0190617777, 4854093.245077397, - 2174468.433326961, 2320247.0298564225, 2168836.205612817, 4789414.793395264, - 4422764.348234517, 463497.32582870557, 857643.0324065803, - 1131754.0504224533, 4673801.700813819, 2658932.142741898, 4765720.675553703, - 4993651.653971156, 913526.8221721092, 3466809.384367344, 1200932.0848862077, - 1123409.622354156, 1980373.604536936, 4818256.5681162635, 4567587.45588333, - 1702855.7437490344, 474054.14020832436, 4822671.87665571, - 2434674.1410302706, 3536771.7913092417, 3129774.444626063, - 4851187.833288547, 2663062.761885399, 2450441.7889437065, - 2481861.1430794126, 4655030.162997784, 2637254.266547621, 2229873.743850122, - 4310493.078262166, 4608420.084383819, 4753088.629998042, 4895067.396371177, - 3879201.8809410967, 4969730.101700092, 2219591.0335806953, - 4435178.572383282, 4747807.013361398, 4497861.967781114, 2390130.8934793477, - 4968497.359064061, 4850051.626845021, 2559198.161192425, 2251839.34382282, - 4562671.879690477, 2157221.4068935425, 3807927.429485304, 2020218.099107836, - 1182041.3472377441, 3806630.7673484697, 1195512.0882155958, - 3960071.8927713907, 3290416.9804646564, 677551.635839314, 4371840.438975264, - 2396039.1816747645, 2456180.5819690977, 789706.2229083268, - 1121037.0387459956, 4790008.025774367, 3433064.5200298135, - 4656126.193100479, 2191347.1096479306, 4046628.5234471727, - 4325086.260874277, 1199892.9845023428, 4873725.004871375, - 1680422.6431590498, 3569674.3427296034, 2957489.5042692227, - 1967013.1608978854, 3865335.7280786587, 4850649.839897816, - 2281952.368166068, 2878989.515174885, 4929990.612248927, 4650552.25713, - 3114912.5907337754, 4722720.535993821, 1149727.6675022428, - 2090771.7457056127, 4757095.139964891, 490900.71278951934, - 4526940.731298506, 4950269.844340095, 980260.3386679805, 3251350.1148642125, - 2497458.534896222, 1712314.0060227078, 1716046.6855513272, - 1684183.2618578395, 1731090.7914685737, 1874501.9728702805, - 1654942.9063410682, 2228824.572795006, 1577920.7167429745, - 1727917.7690095846, 1683392.467426384, 1718575.9029768305, - 1775454.3419756072, 2181641.841420227, 1662321.71575164, 1666315.678894715, - 1672434.726944937, 1679225.6173688804, 1678627.6607742412, - 2205337.7881139214, 1768258.5939636428, 1655945.1298632184, - 2191970.01206161, 1613277.993862674, 1541219.5601135716, 1736798.1333197725, - 1665071.5703598477, 1675460.2007918179, 2180911.7368279365, - 1678203.9883909542, 1643933.0683319971, 1776257.7813744657, - 1646397.9990742125, 2234356.884553406, 1648262.417974642, - 1697030.0161524704, 1662698.2947628784, 1673508.6068936351, - 2163198.068154489, 1683151.079616502, 1750502.4638748274, - 1795240.7814923278, 1581970.6471755242, 1602199.4423560288, - 1702619.14678788, 1787746.041444159, 1673156.1126915547, 1582423.8337859714, - 1678160.8007745985, 1662484.246904078, 1672174.1959301753, - 1689361.9430033967, 1775122.072642982, 1693155.1166074318, - 1704046.075062882, 1805974.0723686959, 1730744.7765922612, - 1732558.771556781, 1657757.923486172, 1607898.437921861, 1665280.79661071, - 1669337.8789749339, 1746257.2951677202, 2188781.247236596, - 1800561.6627478232, 1576519.457712976, 1730209.8894577408, - 1732526.3031558623, 1776100.1285120193, 1593132.5343642102, - 1620764.9245265296, 1685279.355007294, 2164537.7313852413, - 1969718.151617631, 1564587.9366214261, 1594980.12114609, 1481855.7104624503, - 1773653.6633990845, 1686738.336797384, 2242493.930273068, - 2186196.4085832257, 1675925.1037212198, 1711018.015152035, - 2304922.986285965, 1675252.0218983595, 1868144.1560442431, - 1595283.176207459, 1665986.8121514216, 1522030.6171584304, - 1620119.430969758, 1668539.328124671, 1453421.4457135787, 1729299.627551086, - 1712418.5052698872, 1739573.4975648946, 1759821.5913737966, - 1810010.7949366334, 1729077.1875956203, 1741211.7211304805, - 1670981.289948398, 2241266.484298503, 2196707.0711220126, 1701544.440220376, - 1694381.6934202767, 1856376.7080818047, 1619639.8226138125, - 2163794.754596537, 1799967.6798696795, 1622773.525279753, - 2012473.7783980258, 1740558.2315271408, 1734942.9547791604, - 1841765.4914219459, 2198590.0913957977, 1660883.7146069102, - 1621532.0505540334, 1733878.710069621, 1637329.4304964954, - 1694235.6611423504, 1668789.7315343865, 1699730.101706234, - 1739644.681178664, 1570476.1738924035, 1669566.3452747695, - 1811998.286818575, 1707418.0604932692, 1782940.9256124075, - 1725123.8231920719, 2235593.5701743574, 3831222.2518792995, - 4970977.813356514, 3749761.2764986944, 4437586.737755572, 3605609.355257352, - 4895476.460033613, 3859304.0397104947, 4743596.837527586, 4948711.020963075, - 3440348.0883543384, 4477795.77034487, 3787123.1742300875, - 3990329.9762895997, 4121034.6233636225, 3707383.835667133, - 4873257.818997461, 3569624.670224335, 4156502.106126852, 4873750.095747413, - 4997883.478761243, 3906506.034539927, 4917080.3272856185, - 3835548.1295734756, 4924459.539032188, 3920527.609717655, 4843529.846141554, - 4929296.483234894, 4846413.676941523, 4899887.021403913, 4963201.8608024735, - 4964267.35732455, 4620881.561446789, 3552346.498978198, 4854249.608080046, - 3921366.7413309426, 4963311.308810963, 4995453.503278628, 3873947.353736287, - 3574165.4987771735, 4925075.472846994, 3809176.644986009, 4887106.262611834, - 4956450.488690604, 4987225.180128601, 4933514.636786396, 3702714.8298484846, - 3507806.098616758, 3281235.924480191, 3828275.178209844, 3821355.967676246, - 3945664.895484974, 4851633.806281473, 4811953.970946599, 4879130.687128103, - 4815122.128844351, 4812727.305299973, 3781909.944194331, 4432149.859824059, - 4810563.812477586, 4800146.321283585, 3957687.124827178, 4528554.787601869, - 4521756.826183347, 4580956.615146454, 4901496.933750901, 3910614.8503375477, - 3683368.296136975, 4811173.823117851, 4615716.417334701, 3712656.88795664, - 3821955.408744505, 3676350.7381979623, 4945058.034769859, 4950940.077363415, - 4853536.513989868, 4956908.684708815, 4667832.782960879, 3672076.3621930187, - 4975479.941019756, 3636007.320848885, 4952606.623148496, 3466512.9946105317, - 4675200.136680188, 4980379.729464548, 3851580.840660558, 4838281.401043028, - 4657478.942496542, 4790923.5655406965, 4620579.183273244, - 3666029.0706781577, 3708662.4446599022, 4911481.849688082, - 3821838.2165186997, 3497706.97377718, 3486600.1360198627, 3897318.335907939, - 3755049.252295232, 4714799.699858914, 4958045.286061251, 4957464.651028324, - 4493889.562957668, 3558040.8433990725, 3605592.624198978, - 3806792.8727228097, 3832342.6636218284, 3708264.4217823143, - 3787997.465577595, 3757071.0630298248, 3763920.8564911275, - 3652326.5630927826, 3961663.284851798, 4801837.721925363, 4690894.941411187, - 4947385.670841951, 4836421.356067937, 4917146.500854882, 3870296.0017687012, - 3169169.5976521648, 4943439.888264846, 4978545.1988340095, - 3842571.570779224, 4766668.12467633, 4911585.921894618, 4001921.39982145, - 4949445.653428632, 3534160.7391764177, 3375892.610451248, - 4826531.7324095955, 1351059.545033687, 1284074.5343243754, - 1037895.745661074, 1269634.1724650632, 626269.8255775039, - 1301517.8262481107, 1300587.1414787236, 1311680.091958314, - 1264695.7174920943, 1276818.4767300743, 955735.2904939697, - 627840.4816115997, 1298027.953381, 1341760.049947671, 1320711.6679697244, - 614685.9965727943, 613990.1223337643, 1278297.233973629, 613849.0062687204, - 1019602.2476383204, 984775.6763699885, 1323264.4661876895, - 590346.9033057999, 1306365.6665604536, 615617.6329789448, 605294.5599004775, - 567266.1399915857, 1304664.2124399566, 625346.9530833194, 1309800.641153945, - 1245264.8348379661, 1319130.9935989932, 1268099.6908233128, - 1301644.343526807, 1318005.6336406171, 964982.5617610374, - 1249195.2390624627, 1303405.3561251888, 624244.0704566081, 610818.576241511, - 615325.506539928, 1306853.870863813, 614132.3882996654, 959899.6170241311, - 576540.1050361553, 1303734.1230753516, 949914.4792430103, - 1309280.5995075188, 646562.7131180285, 1340557.083031508, 619848.3242041203, - 1296868.5982923768, 1323046.9515677483, 1314897.5182419387, - 954387.8703215031, 1345027.4547590483, 1315224.154181625, 1341224.300109372, - 1308841.685671632, 611337.140164553, 615787.390961553, 1371006.5613823484, - 1238156.3837690686, 1316467.3031855947, 973822.037610693, 1313988.520890941, - 617799.179884144, 1160654.6588762458, 1341015.676947195, 1275990.7064130914, - 1300765.6349284183, 634538.2208666857, 1301480.5320284145, - 880465.8642279101, 1336985.471889298, 603631.9054304892, 1252185.0679192548, - 619299.4291189188, 1299761.898298126, 627053.0692383595, 939114.4596820766, - 604294.4129876157, 621684.2400536234, 1321384.8651638117, 604687.0496887681, - 1292938.468442314, 627347.5305193714, 912220.5616509254, 1223206.495885331, - 1016920.5218396728, 1193608.288077898, 1331771.1790555536, - 613208.9001305092, 1290889.7677546947, 1286338.338433489, 1321054.91537742, - 989659.7686285044, 1327672.442199076, 637561.3898267675, 1282514.4057014424, - 1292341.2200271732, 1278186.3262754928, 1322099.5199502467, - 1302705.4265251944, 1343632.888538275, 1350051.0451540658, - 635147.3709388634, 1199923.3756604805, 1347557.5773965227, - 1303131.7285653243, 1292357.702506334, 963466.0699411187, - 1266762.1219300749, 614528.0037371499, 952066.0727403805, 684107.9458411625, - 968815.4759116026, 644938.9377081349, 576594.8294733905, 1299692.813648791, - 1278197.9359972996, 1397266.04280915, 1318928.2928271522, 630774.4698020907, - 618812.540488198, 588624.2001168852, 628541.8497560846, 616253.292160023, - 4689250.765081556, 3284942.287345501, 1561990.161928688, 3091641.3926415, - 3302481.0257419716, 3557457.52229104, 2862988.557847251, 3318802.100701138, - 1477700.1197878101, 3132843.216972138, 3427483.9319452755, - 1571467.9029723846, 1611644.498596465, 1241394.0206111001, - 1563286.9202149666, 2904242.75253835, 1620335.9696889226, - 1590652.0489967468, 1681887.6697319658, 1525280.1728402758, - 1299145.1796238741, 1574730.6494901662, 3237998.5673291306, - 3100673.689426757, 3102005.550351644, 1625475.0168763383, - 1566615.1273576163, 1570748.8797822252, 3248918.4577439707, - 4792021.142763014, 1544311.8318805122, 1718490.983338527, 3268522.114529251, - 4762836.903657066, 2939225.318989128, 1344301.1887688315, 1611286.878399616, - 1521172.1649806, 4414990.786549249, 1557740.9687521262, 4812344.823148734, - 1339423.750349598, 1565347.9634640855, 3013267.006237754, - 1479213.9779787199, 1595138.5162332128, 1849587.585505803, - 2972734.423224385, 1563725.7777786942, 1518957.7741376492, - 3254286.7804285116, 2916046.9529287694, 1478848.044864928, - 3435768.422822814, 1331712.5236708235, 1481590.3617936207, - 1624831.5050286965, 1615097.556296404, 3272038.1473084697, - 1609048.3883827175, 3017167.083584512, 1610827.3372839384, - 1548811.883290082, 1547122.8498816034, 1908145.1445577869, - 3321264.615466915, 2918621.5763906534, 3200825.8029115577, - 3412552.890074792, 2928738.8129830565, 3345394.985438163, - 1424513.8450512919, 1600925.9242994504, 1307034.0265005068, - 3238142.235623713, 1260595.1267942414, 1579232.7336311725, - 1593192.428349913, 1357587.8954091922, 1605723.1140237206, - 1471198.3707562294, 1566200.8381495439, 3334863.8710536375, - 1607443.469080856, 3087971.779359372, 1516584.080168956, 3370748.4463407267, - 1520925.6959302998, 1599770.7170870933, 3314029.3621261897, - 1327098.8928692618, 3271136.80844848, 1464015.463514628, 2987551.5688846116, - 3425388.3548945994, 1592790.3414637956, 1428801.706571422, - 3614724.523775022, 1485415.178692693, 3235208.67200566, 1509608.6319913617, - 1842595.0311935437, 3009874.7863896103, 3043092.069369858, - 1585071.9082778925, 1561587.8289395447, 4752185.429826702, - 1659454.3724410296, 1605682.9272412737, 2985306.5438876115, - 1307695.781340092, 3225076.673688169, 1568651.00013881, 1587768.3300755802, - 3170154.9046139135, 1942913.8360560243, 1554600.5997717918, - 1337326.6411501726, 3016838.209043043, 1566736.8825323756, - 1584460.0944470197, 3135231.5569204856, 1558717.636257998, - 1550523.8310978536, 1605872.4237679564, 1550916.869053492, - 4929149.158505527, 3090898.152774852, 326707.33278878423, 586582.2510686897, - 1040273.5216145609, 663456.1589854839, 628763.864136211, 631710.3409146703, - 2974969.2050759257, 2350994.1246176623, 927292.1778378924, - 906186.2849635344, 4815510.665413519, 4073054.5139100985, - 2582453.5261143036, 3791717.691912095, 1076084.5137618154, - 1050992.9359825936, 659805.0864546692, 524889.4076381266, 684610.2585634164, - 2482190.163780894, 3147675.778215055, 590779.3593612695, 2520614.1999391406, - 4284065.219542968, 2350013.6682946547, 697094.3632107817, 595999.7268945806, - 613977.9639972697, 2349616.7092046617, 348422.6741657892, - 2832447.8123704637, 2251882.2903341125, 2073292.3277191636, - 1075406.2332272863, 2268795.9083214365, 569607.0887846396, - 1035879.0282849715, 580047.4222238179, 817887.362492032, 368301.63884114136, - 2402304.3602120853, 620301.273599366, 3898149.857796321, 967233.4192427912, - 851851.2144765641, 2259799.2695220974, 1046448.682971065, - 2279694.5933771436, 589521.8901331043, 640096.2828763435, - 1001827.4259398374, 663235.5720201755, 2275511.827574643, 532635.2106719125, - 573747.1033250515, 2188882.7924725777, 811367.3035765929, - 1094404.4385784809, 1962694.5775451271, 2353297.596668562, - 3956323.036473479, 551000.2042095973, 3041620.48586888, 2716259.4635958024, - 2538286.6804631515, 3462846.4979719142, 2448933.061526958, - 425575.3746669285, 718005.6558192926, 942463.4782404585, 696237.7555338832, - 303928.94774235255, 591186.6780862442, 2293511.760762214, 850401.336267145, - 599740.9120990313, 978140.4713298326, 2405056.7897379845, 569659.0553957296, - 1058424.2152493247, 3474199.2589658867, 4059954.8716884423, - 684125.6117095214, 2441921.904455878, 607919.7475941422, 696109.5012248884, - 2479297.6525335447, 848056.3582391874, 2414800.3380516106, - 677345.9445704689, 3211300.5777462213, 2531125.0256861052, - 2363668.5401442866, 609977.8691778653, 644745.8546982565, - 2500365.7517017713, 842894.2572151578, 919599.8905766604, 935440.6412322823, - 569092.1355945703, 2686811.057083088, 2262491.9955936116, - 2120139.8799883197, 644776.6744655148, 1198670.6744998174, 632320.965975916, - 410924.61303119943, 615620.4295453876, 3315813.3264064267, - 659323.9535972808, 754323.1823059556, 3971811.7410230758, 2576445.18437659, - 604936.9859450994, 639970.59282819, 390654.3385651133, 2427295.443240825, - 2482344.9964499986, 2073237.7269530422, 784518.5356063345, 931854.605283118, - 638607.6187520134, 656036.8439098079, 1536638.2977769098, 971336.0860198564, - 657168.5925461971, 1047243.895324621, 2304245.791687425, 4046209.787567569, - 664924.925969654, 3556790.353202328, 2437183.015834753, 3853235.04146538, - 3923230.273160704, 4853915.752843315, 4019160.699485915, 3291329.647577719, - 3750760.68544669, 4310584.367353609, 4437631.401279061, 3966330.936666597, - 4791354.598704534, 4765128.609572224, 3853677.8510748516, 3726826.541438013, - 4666150.101034684, 2465417.0410843664, 4896563.910163089, - 3497868.1959586274, 2447767.361294713, 4651598.540896691, 4125003.566955331, - 3729395.921537789, 4765489.762319003, 3772815.925596426, 4506164.589281788, - 3760977.7339049596, 3712549.742073562, 3766444.9026440643, - 4596468.929871444, 3583226.0442232355, 4336138.415232669, - 3973410.9882445605, 3513849.1791014383, 4712651.793984984, 4647971.43685264, - 2419374.226331439, 4918719.422385973, 4326482.427693322, 4859350.7288916735, - 3714646.6885621673, 4781317.30605502, 4681310.863050464, 4882613.687658898, - 4018779.705926313, 2727368.5369471456, 3943267.8199042543, - 3880567.9269544436, 4790351.067798976, 3533906.3144794083, - 4920785.383121936, 4209923.168735281, 3730557.6470249994, 3620716.485843543, - 3792902.55111396, 4419569.525745951, 4086921.4133198457, 4421199.656758823, - 3824350.030476529, 4499683.668795835, 4814625.2026897, 4917461.848420044, - 4260917.267774875, 3892140.5652639293, 4611520.115512024, 4504279.598920873, - 4171080.379652097, 3876706.119385136, 3844419.803950264, 3583741.8439605073, - 3774164.9696325343, 4537462.535603455, 2445965.412728363, 4441156.388383883, - 3717402.667994187, 4460745.615674531, 3740556.182272691, 613112.7284013999, - 4606558.9385569915, 4390058.944194221, 3699055.432641474, 3446291.449309029, - 4008159.3842546693, 4266508.159241776, 4932860.021254259, 4141059.806999889, - 2563648.352683565, 3290270.3867326076, 3792633.859515982, - 3831820.2961091297, 3777663.3773713727, 2404130.7923515565, - 4947932.125297193, 3521752.3435497344, 4694321.155487998, - 3920923.9421647675, 4078946.2879539775, 2410981.5846927688, - 3248388.1723704683, 3560282.1490704515, 4273095.149068147, - 3563137.5891453493, 4980074.978088616, 3580256.672719476, 3723038.973895004, - 3812678.5545700337, 4181470.9306162866, 3765521.873407997, - 4419723.101730548, 3675922.8759539295, 4993062.691075134, 4308157.825317868, - 3899232.739929042, 4997588.5488776695, 3666551.321411249, 2478151.998356148, - 4049492.7785645127, 4540903.374589877, 3868763.2431374504, - 4391210.975772967, 3791872.78231563, 3655313.377662583, 3714030.791712675, - 4363071.112453449, 3632413.469446986, 4744545.436295112, 1404996.113977791, - 1106109.485264826, 1219207.303832193, 1155939.0479725935, - 1143390.2905779013, 1677331.7938221171, 1411431.3155341486, - 1119564.0559917681, 1426570.3878291594, 1431306.95657745, - 1141223.4393986792, 1169337.433995499, 1156817.9267457044, - 1391528.8643628042, 1778363.7813163626, 1345027.6938821492, - 1195939.8132745437, 1379083.0789655799, 1490030.5725780134, - 1170132.410903005, 1315732.1770196105, 1287278.153909237, - 1120273.2154276406, 1454377.123040126, 1659927.5940422665, - 1162022.7139142272, 1505063.6104642947, 1331134.753776843, - 1770086.978863814, 1420176.641153229, 1218055.4135871914, - 1133335.8368891685, 1211439.848723043, 1181410.1298349183, - 1291667.9347102898, 1153809.4966411195, 1068379.4426398717, - 1409470.5604184398, 1311294.627850216, 1773978.9143506521, - 1144815.2477131763, 1196972.5438849765, 771664.0970781846, - 1675200.961991336, 1166380.8725417485, 1460118.2801933468, - 1274763.8338136175, 1562428.0141500179, 1585254.552442627, - 1479464.9775765003, 1384690.3394020826, 1818171.7514018524, - 1963861.6828574243, 1495466.7143062851, 1718942.2488255645, - 1210948.2857268758, 1791661.7899116885, 1174217.8370630948, - 1486933.1250917627, 1509578.246318606, 1448027.305898901, - 1221259.7286411684, 1413189.2367192695, 1117288.4761559078, - 1112067.4599088135, 1372906.3084364105, 1360663.7641749687, - 1850576.6054628254, 1254662.0753329017, 1090764.5145491953, - 1823466.1663869221, 1189204.0634293328, 1507555.2621036393, - 1338439.041162795, 1249557.5416157367, 1414884.3899800181, - 978313.1742562433, 1141991.708387128, 1521510.8132824332, - 1149801.8136752073, 1554901.7603753367, 1140516.1649379502, - 795945.4885424556, 1160576.6618303407, 1076290.2413557677, - 1237727.503396124, 1184723.3012085543, 1161913.821666479, - 1109205.8718052292, 1125451.412579776, 1235722.6701151712, - 1938054.6767112724, 1298264.4478216292, 1166477.2857812278, - 1206334.2590484642, 1244338.7177162326, 1172407.298988676, - 1375615.2357155238, 1476764.072691892, 1376182.97314813, 1518032.5852638334, - 1144754.776572194, 1180414.77050371, 1175750.6115790065, 1187202.3127192338, - 1166610.4152340514, 1185949.8499817597, 1479151.8944341356, - 1814233.2176572597, 762476.1904101417, 1861633.4322943261, - 1148633.633942193, 1531397.4237451074, 1432681.1223998081, - 1692738.3026251197, 1501352.9583170419, 1168394.941887952, - 1293728.3720530744, 1354531.9582979905, 1362809.0080365152, - 1173049.9616134292, 1633897.5706188746, 976392.8055188047, - 1434359.9074569116, 1079273.4275430855, 1162003.4387536473, - 1188222.557990482, 1769560.9917410044 + 1076305.0548095063, 1274014.8681179238, 3028889.5806715763, + 1143890.6663075557, 1659464.2173418677, 2699309.902115875, + 1076335.503355923, 352635.7308373666, 2603161.810479186, 2347024.8785794047, + 4856283.026145798, 450234.2485880845, 1516993.3339504765, 566623.3418402866, + 1471532.449356915, 1645001.4676731045, 1075935.6489972214, + 429293.08453827247, 1239360.876342897, 4545591.372180139, + 1047240.8228514001, 2331527.0005564964, 1283031.9941565436, + 2713498.5618218705, 4866154.12984875, 1678297.5923604432, + 1118418.9036161539, 4895744.230385261, 1649300.6826094848, + 1625748.1243856796, 4239039.253812563, 1694581.2540947634, + 989692.4485468884, 4992739.9098980175, 1222170.2383768365, + 1653668.4281977327, 2660549.036851366, 1709769.7229916735, + 645251.4045816506, 2964370.5853894297, 1720580.9280216645, + 4859641.034803553, 2447540.8820611746, 4726821.906400854, 908791.2611985459, + 1994480.2665682032, 1286981.9765388693, 4840620.895066006, + 1014046.4571606299, 1607303.0998420676, 1291627.5741840743, + 1184435.9424577875, 1620873.733484054, 4524563.989567181, 4793624.05568069, + 601627.4395613252, 908781.6081437265, 1180751.5117265838, 978673.1118017595, + 1181653.4406096204, 1782534.0795208176, 1510934.0008232747, + 2630140.9553146185, 1699392.4037567868, 1003518.4613242559, + 1732441.0706005928, 4943808.7801692, 3006613.428378637, 517539.24606278306, + 921313.5629166593, 2767639.87209481, 1089948.4912755515, 878016.2002900518, + 2899032.975813972, 4570527.250315333, 3125141.1886388096, + 1168494.2661755455, 2682607.6613684827, 1519077.1318776747, + 4343212.787991921, 1058824.488228389, 1761537.4227918861, 4319864.625480922, + 1743736.384092503, 790072.7327184592, 4870824.202902482, 1209573.607617632, + 4969403.914490354, 4985441.955928911, 971473.9941969224, 4909890.219451535, + 4887207.237153354, 2980364.918567962, 4783344.829589818, 2830105.1299794693, + 4845149.958065653, 4535174.807024328, 4957464.06745129, 1761868.1004463378, + 2583014.3628261504, 1796079.588419741, 4891218.129736502, + 1267132.0487377872, 1742982.1684557865, 4564515.630172227, + 1233630.5224242643, 2454500.2014846015, 1260795.932583197, + 1597359.266254315, 4892005.080369922, 1728942.8873556408, + 1675766.4976067757, 1059325.6803339042, 4784010.510256609, + 1554400.0780338089, 4815771.932933818, 4972126.50917909, 1727371.1659421583, + 1677815.1820078525, 1552392.565583682, 1710188.4861345305, + 1249472.7177338991, 1274997.4560268659, 4983364.981181158, + 1656001.8858500635, 1733147.0821107791, 4854703.598235259, + 1247300.534541018, 1306016.117465376, 1808756.9638211022, + 1887241.2900656867, 1678863.2325617168, 1550287.5138464859, + 1372925.3516452264, 1816615.169425408, 1764267.0431933922, + 1783697.619506455, 1751494.455442991, 1818711.8713380175, + 1736358.0490799753, 1419527.5474453426, 1090186.0922483127, + 1811312.201465615, 1493285.0888632995, 1382687.7140229957, + 1821641.863630833, 1464659.4764333384, 1332415.7939280125, + 1839242.9759779212, 1541731.0569890523, 1560248.020625762, + 1569156.598945593, 1347818.8748081583, 1571010.5222268999, + 1343537.6452545812, 1877711.7652465466, 1766894.4525640432, + 1308053.84820394, 1853163.6390213596, 1550247.4640364815, + 1577192.8184346703, 1797450.3024368323, 1810260.1771596528, + 1814637.506914313, 1794544.7317371294, 1716041.90133594, 1592546.2762295837, + 1721775.972219937, 1337074.9851257284, 1686524.1816460257, + 1696276.8930557421, 1806256.278313767, 1810725.1468939104, + 1463631.336656084, 1859225.419392687, 1837718.4445689456, + 1818994.6903499917, 1831856.8501384712, 1065838.3875525293, + 1837999.5704106153, 1759611.8976574556, 1555443.7908134104, + 1762931.775326877, 1812706.3947476614, 1818533.8033644937, + 1809069.4665877197, 1804610.948996662, 1551482.6201327576, + 1309134.0288852188, 1821797.3163407429, 1551544.115458692, + 1337547.6727680198, 1314176.913315001, 1602611.256448784, + 1775996.8620366987, 1568053.8711802377, 1784776.525894697, + 1808062.0621886621, 1872933.01928909, 1425257.4900129952, + 1461229.3496091622, 1580439.413321149, 1801164.285060062, + 1773013.7134522886, 1808494.0751061696, 1491985.8815982637, + 1842461.8296703454, 1325007.4551078854, 1747211.2839785009, + 1526570.8714208321, 1755604.6829286618, 1836788.284565884, + 1913515.442566913, 1279476.4601446753, 1901676.9652631918, + 1852830.503155544, 1751481.5066980536, 1293452.486888936, + 1781172.7829757235, 1283155.5106739437, 1352037.8037197227, + 1483580.1515504823, 1820235.6662718486, 1817795.5855417356, + 1761392.5633265332, 1319857.2067011152, 1465265.4816825266, + 1756719.5062617818, 1778288.6944567624, 1825828.196046042, + 1530549.1278166254, 1818913.3304301354, 1565415.05556374, 1409475.529776979, + 1290642.1272211876, 1822917.8358831364, 1924871.4995842548, + 1849126.8746678887, 1827645.910552194, 1810051.267026682, + 1583535.3026479802, 1824908.1763596458, 1839612.3922563866, + 1592471.217966584, 1347500.3249572073, 1806056.420932641, + 1794086.3311892925, 1775856.3899944006, 1813767.73834381, + 1350190.3452361438, 1756149.5448689228, 1811403.2815803736, + 1416794.963846566, 1792657.3048866538, 1795122.4056472098, + 1842290.1411235104, 3555785.8895800197, 3907323.0998425744, + 4953433.703732318, 4893102.7342441, 2074495.1342526379, 3108683.0502333078, + 1975415.5054387737, 4881077.832985298, 3805022.4957191567, + 3635940.2089367486, 4991983.969634204, 1672299.130429524, + 2353517.3989988565, 1677036.9093593264, 4996047.8760925885, + 4030509.3310254253, 2429792.898139142, 1525058.97696747, 4863000.24384818, + 3286400.7650854783, 4899036.245445326, 1688144.1631034582, + 3959977.789934429, 1939067.623011414, 2513785.590667748, 3427265.9925740315, + 3802976.7136378014, 1833864.200825193, 4936692.443715439, 4847093.039768627, + 3439341.2680034535, 4580894.742151254, 1675632.6473680916, + 4942669.202017214, 4949911.253849516, 1539303.8882766082, 4907781.608923349, + 1599304.2270174765, 4853052.551850043, 2690312.249896324, 4977738.667160398, + 4014842.9344734782, 4772052.829148649, 4946134.073506928, 4891400.517924834, + 1563948.5470666385, 4963494.690518004, 3455916.7904207623, + 1594353.1838845664, 2136069.7399701015, 1960868.440712003, + 4946791.960446664, 1578186.0240469824, 1039332.9467361683, + 4534361.285623916, 1502628.7652763487, 3848962.32309536, 4964243.514092966, + 1746584.2283781914, 4920333.5190154705, 4992536.373555388, + 1510542.165557836, 1932077.6418148156, 947131.6275476817, + 3383765.5498580476, 1721174.9939949135, 4610120.999424338, + 1576570.6760384953, 4929844.41561427, 3794198.3896968192, + 1823897.6604270763, 2733661.3742502932, 1936627.8445350653, + 1330548.8220227514, 4928764.248920986, 4153386.8617182737, + 3474415.774746538, 4870869.032826135, 4979556.155861513, 2449924.3625269146, + 4971324.14496658, 4932477.57595642, 4989682.924902397, 4011920.4521225784, + 3998012.5411575586, 3452645.843464266, 3929289.9909262042, + 4879367.427849928, 3516330.47709839, 2603032.638057142, 4991557.342060963, + 4551954.161889161, 2272920.8670223383, 4965070.996917123, 4991829.95014887, + 4995617.005788702, 1663557.2846390805, 3800571.051961765, 4969769.300611475, + 3793917.8430706714, 3921097.8238390475, 4898314.714649072, + 1673146.2519148286, 3964490.1800356554, 2596747.2888924517, + 4946307.4772146335, 3927408.530694202, 2294459.3685813467, + 4666642.880495793, 2520052.575286024, 4882161.477727743, 1502038.0040220905, + 4952704.900322055, 4913678.4923746, 1658358.8273468106, 3396237.847982675, + 3372055.81845571, 4938288.583101874, 3429284.172309591, 3996549.3249369375, + 3473218.28398501, 2099960.7856415785, 1677519.5747223774, + 2080140.7312280592, 3683582.822791484, 4816445.490530359, + 1840800.9995570239, 4934726.684701213, 696274.1780544003, 977114.9065655936, + 704248.7546918886, 723053.1991520516, 715689.59386606, 723339.2526195339, + 733208.7694177118, 715740.2233475356, 723449.0884138979, 727075.2781786362, + 778277.0110347356, 735683.34847786, 711807.212610304, 734267.5315522886, + 771165.2035156756, 664428.4818977948, 1014027.6889774237, 702046.6978629386, + 662129.1919203124, 932504.2541218471, 677953.801200007, 714769.491370017, + 707680.4145405131, 705826.3544060204, 738714.8581226947, 694215.605582292, + 718088.5338125431, 750849.5172228182, 763949.6816692011, 687409.3561525978, + 709395.7123275797, 722853.2059985066, 805593.8974431758, 667673.6924251335, + 667089.15905645, 687333.1127375924, 715708.531916488, 713569.722485599, + 725753.0522763369, 708111.4954312729, 718670.2139866629, 704960.1323882153, + 694491.4600189694, 695174.5495501509, 671718.6348034905, 690626.4089082523, + 731323.3818467105, 675987.0714277946, 663648.748687941, 724316.9271271799, + 770106.5190920832, 757390.2255635171, 736800.5055284495, 680145.9229020884, + 778837.4612824832, 724487.4514228847, 700653.8538034341, 999377.0647286978, + 737021.6185337914, 702902.5853517273, 695081.120616649, 714525.957410045, + 723062.9989590938, 720068.8482184309, 671428.9058199461, 718488.9051946047, + 737621.6003383707, 736091.6616031877, 652433.6810583007, 732439.475198412, + 693276.7483382376, 707444.7726414837, 727943.5411774678, 714928.4948566569, + 715346.4409983024, 711292.1345171069, 730548.2917073125, 680393.1785631913, + 670762.8858943809, 670639.7702194201, 729723.8469780909, 670314.8076410824, + 965565.5899580735, 1001641.6948736723, 701996.7146784923, + 1413894.1708499803, 685550.1077210506, 708728.3667463367, 721931.9262027553, + 730997.1201023782, 710100.5878968987, 695549.6951397416, 718203.3526349481, + 720793.1054254624, 713180.6723524305, 775818.1573013286, 713594.5043765646, + 732791.7965504144, 679515.9205107052, 722744.8465232642, 679314.870643217, + 657786.2806592482, 698732.2409326098, 707951.4828380437, 698407.2581215146, + 670155.6089908336, 686954.0230547003, 780107.3769542516, 726863.1625839146, + 736532.1167107861, 715211.3118874102, 725345.2555966857, 722722.4713552416, + 733783.7703545495, 697946.6001312633, 702977.84375674, 730652.9024893688, + 718297.2715621571, 676319.0311615359, 709795.1635597708, 708038.2171009785, + 741683.532303051, 755061.4922990712, 725032.241889262, 662772.0580008812, + 735160.7020832581, 982727.5450049061, 1003581.1543877283, + 1299920.2201135424, 1339951.105516739, 1332448.0760660938, + 1523418.3154743144, 1300037.1025224805, 1236220.7764432216, + 1347236.9480274583, 1373076.7836874195, 1416332.6223184196, + 1381915.9136489546, 1326617.7074006395, 1376536.4055884148, + 2761125.5783517254, 1355030.9980485872, 1310513.1324472467, + 1296330.9070995112, 1245277.0284094333, 1322332.437505249, + 1362621.405657087, 1325752.532325614, 1376644.861136477, 1371626.88216043, + 1372802.3840331968, 1307422.9771560102, 1386702.9674253848, + 2153690.2042459655, 1353388.3149362174, 1360644.423043591, + 1405002.0291002342, 1370639.2924524955, 1498121.7204119752, + 1466801.7052036051, 1356043.979751104, 1324558.518027296, 1311380.487861534, + 1337546.218595399, 1346411.808889208, 1344912.0132030714, 1375694.620029273, + 1327133.8887974764, 1341301.6483222218, 1377984.8433633156, + 1243051.5748613412, 1429292.8855670423, 1358366.7845356686, + 1301178.6592229917, 1377286.2491162852, 1387341.3896237707, + 1324674.4870377716, 1416104.6936010437, 1337702.1462054132, + 1218939.574410686, 1353198.3064663962, 1378270.70864763, 1292141.7794373415, + 1340715.766947458, 1250518.0710822192, 1354772.6978197037, + 1397172.1491910706, 1322694.4439692998, 1332590.632346089, + 1354134.2784779884, 1338868.5735632374, 1305791.114841357, + 1375199.019312912, 1276990.9041953152, 1396163.179490825, + 1223348.7902917971, 2793693.6845021546, 1328545.7155840301, + 1307919.0821507273, 1365073.038835003, 1418930.4045920202, + 1364200.7223998422, 1485981.1677670307, 1324196.9902965955, + 1373237.3177468537, 1476194.8609728194, 1370899.950229343, + 1255921.3935113552, 1367078.385226059, 1415796.3093844508, + 1341172.3503384867, 1374349.4193466504, 1403069.7883081166, + 2442503.193849505, 1292309.5685637202, 1377443.1834685872, + 1493784.687339048, 1363070.4863121132, 1363866.49544906, 1291232.94746457, + 1433998.1200484072, 1343061.2470559785, 1358515.8587958848, + 1349212.1776381286, 1298334.2340974568, 1340092.3078014923, + 1336368.3059179292, 1360320.2477715542, 1354635.8157955513, + 1370088.428422539, 1380066.1733179132, 1375265.3038217907, + 1385093.0049005975, 1358879.8389531537, 1385127.1826908549, + 1375974.524985133, 1386994.1501215154, 1311887.3207843162, + 1385461.5062333816, 1309668.5877177285, 1288284.9730499145, + 1382867.5595568847, 1354601.5554576218, 1352921.453232807, + 1352838.7191582005, 1378908.7730626997, 1366162.022948226, + 1381309.1148536922, 1337120.383762266, 1221114.6768310044, + 1344009.4287095892, 1323962.6771482073, 1334944.6128577075, + 1304930.1822050002, 1374105.1606070376, 1316295.8754650452, + 2649581.9545928636, 2179689.9117643903, 2335530.0436406727, + 1699047.8735097337, 1164546.3731061926, 2375361.7013814757, + 2284271.5588358263, 2579727.978517542, 620006.5201369199, 2517399.144199749, + 1482205.8661101079, 2687715.3864692934, 1242290.9074079688, + 2466198.8975493023, 2470718.781803293, 3014229.0207799873, + 3210725.4354550764, 2610235.429485809, 2511486.743598464, 568857.8298705424, + 2027352.9820988036, 632731.7183452081, 2849621.872288334, 2275191.922675545, + 2321175.556050338, 2589826.902997423, 2737344.622994596, 2027979.9750475977, + 647946.767387552, 586137.3189868254, 2703946.352563406, 2500586.002419654, + 1938084.675170558, 1401370.1888256874, 2634402.766296398, + 2853552.7802412934, 1262493.4049824611, 643132.1111910554, + 2444917.4635189585, 2661042.222843053, 1377554.3115871882, + 2684629.6589713776, 1104610.4344842907, 436802.24987278297, + 2318104.892047383, 1326565.5556660495, 1400590.8770790016, + 2579960.243184823, 2882950.526230626, 467685.81039780786, 3148399.970257821, + 2538893.457180376, 2658925.854323829, 2508794.278378937, 2548108.4077355615, + 2428612.3715507598, 1138820.7121792897, 2401571.447181527, + 2410236.630640134, 2433674.04131285, 2560113.5685924026, 1783667.2606925385, + 2527719.8497590655, 1138399.7924135262, 2283031.3056275803, + 3338848.0537902447, 1729231.8717282931, 1379843.2364798114, + 2638789.087566091, 2463599.9409068604, 956599.9543763845, 607832.5142533574, + 1358536.3028137025, 2581482.1587207587, 417477.92586919543, + 570084.2111378344, 463056.77476512815, 696182.5761471054, + 3183038.8652245044, 2670885.402251232, 2447266.64780388, 2421279.008888041, + 2535671.000674254, 2587202.4848562954, 1247413.2516027016, + 2590232.487066666, 2567640.599735932, 3108299.521308468, 1548813.202480768, + 2523026.6163216154, 2689502.654408403, 1777667.3859755446, + 2204375.8585431348, 2539098.8191832625, 762645.930906595, 2497170.150488049, + 2710725.9577662516, 1188711.237674666, 2517949.239777222, 2626295.339610661, + 2152081.7055486413, 2718579.2009385503, 2694533.027028522, + 2450452.251244611, 2861770.9470257857, 2527148.787660251, + 2608968.3085201285, 2098766.9761144957, 2292912.638615212, + 868656.4336724265, 2576657.4184989184, 2464414.126744297, 919295.2904499834, + 1250346.4748481442, 2022132.9730077488, 2174700.2956683584, + 2662999.8364258115, 720016.5393023381, 2690523.305557801, + 1087520.4245356778, 2189668.5952124028, 1251126.840052398, + 3099009.0569644934, 2489605.0596307334, 1217656.5578085722, + 4733352.68383007, 1323285.9658512305, 2745860.198616557, 3374564.8068751036, + 3709464.2174411695, 3679177.637952521, 1001178.801886166, 3391827.735284281, + 3738678.700878601, 899759.5586875157, 3723038.2356516854, + 3350526.9035543273, 3607219.099166995, 3641503.8319191295, + 3685015.794694415, 3018020.9628151716, 3655278.17550592, 867588.3127390018, + 893558.4593238952, 3703003.50071019, 3411547.1962072346, 3698774.202713801, + 3627097.4627352147, 3594064.4434281997, 948037.1623909043, + 3576964.3699986273, 3317980.7307798485, 3055285.2464326336, + 3879853.998857058, 3282946.387323138, 3173030.1209533378, 3684965.06695011, + 3162278.6555179274, 3727339.3384585963, 3755433.9472270524, + 4069871.4839231954, 1367817.9696956722, 3606733.240720957, + 3513946.480780839, 3059871.6341985767, 3650823.1372403908, + 3581146.202727878, 3651790.9797169305, 3297594.167233534, + 3849784.7046917696, 3622831.2258461844, 3437703.966895848, + 3662007.990644223, 3311167.904769838, 3532404.937223719, 3645047.6268227673, + 3236984.697961253, 3620076.247635019, 3680555.33042345, 3062042.051286948, + 3798199.437823972, 3742776.7714004973, 3612318.441736371, 3721703.467915258, + 3886109.921663008, 3715057.9425746715, 3689694.703535878, + 3199587.2469107024, 3199356.043724468, 3523735.460677081, 3780241.051955793, + 3661142.402988302, 3082942.0224348684, 3674198.9428353477, + 933523.6233204609, 3153777.803309859, 3800508.217741793, 3791293.557364432, + 3599647.2568181716, 1359853.6804258646, 3474461.7942054314, + 3698498.0864352877, 3662059.2456929893, 3681518.5117378687, + 3280695.6721412116, 3796080.8992671217, 3580546.591457012, + 3656114.442955281, 3607834.23308205, 3343863.6846270226, 3665511.3814997114, + 3642059.4970669444, 3817509.4870599145, 3717414.088093791, + 3618436.965203852, 3652400.468692094, 3557235.3635695404, + 3165567.6552376514, 3753521.266886627, 3633524.645801672, + 3698611.6878310656, 3814676.999815326, 3592309.611143057, 3689282.427033135, + 3287897.4329801584, 3320058.350873581, 898926.5784512607, 3716526.600791848, + 3444925.4020958897, 883535.8610939828, 3227797.7090184162, + 3848982.970155043, 3547392.10647767, 3699434.9488856075, 3640255.674052903, + 3774800.6867642542, 3826552.3117183754, 3674731.269173022, + 3904831.8125856454, 3524399.511219751, 3404698.7432854553, + 3114189.170807802, 3665046.806957261, 870945.2234842947, 3633157.1612423393, + 870074.7230220184, 3689900.6316646123, 3514995.3848967203, + 3721760.6638671258, 3582512.357151696, 3605770.9044256615, + 3423934.807487879, 3733827.3663384076, 3841425.908724158, + 3588268.0028364193, 1420167.3140684778, 732016.046670862, 644031.4040257381, + 1808526.7144252888, 649892.0520929829, 1468141.616057523, 698397.2180529382, + 790876.9259626543, 652540.5596913998, 816942.4892338095, 1426030.4422868658, + 713171.8804068126, 1088753.231439714, 1503436.7992094944, 687308.4915954405, + 689131.0606743827, 598777.2620181814, 1811753.7904548445, + 1437538.9779876224, 698914.1441359145, 1383596.353475203, 707541.476599569, + 803890.0314525742, 1242901.905492209, 694187.1261189261, 778840.0056708413, + 687194.5986824844, 782436.9498263982, 653049.342819279, 1739825.2010588024, + 736564.9293025669, 1345544.5097697873, 711317.7728734241, 713223.7361286349, + 645849.9228666323, 841705.4573081025, 678832.705252929, 655183.476252939, + 715478.3827023609, 656888.3011520593, 764417.6911018988, 714129.6846601445, + 682880.1016793543, 1238514.5196713135, 1249610.3530250655, + 1476829.8551104246, 646985.9300449472, 1764367.0703120632, + 698613.5778207763, 829103.5364863037, 1408505.4565960327, 677098.4707271804, + 780478.6501300593, 636092.0438097507, 728416.9323471448, 1020915.4495321931, + 826751.1197668758, 700742.6761342587, 851551.3991516606, 696091.9712322927, + 781070.4899256596, 719198.5886951704, 688149.8529926995, 1256269.912330564, + 1465168.2600015292, 768889.2314932836, 733445.1366547707, + 1450483.5845273691, 1253385.895342979, 675033.0895082733, 555861.4539995224, + 1431010.6572317148, 707469.0090936286, 781259.112693656, 738566.2038128461, + 1801369.52219819, 787289.9667405515, 780462.9505890833, 723330.1933672263, + 1226067.8389819565, 665050.7158546642, 1387711.3350473302, + 1695530.4410641755, 601406.1969416084, 725047.4350843392, 976132.4640761268, + 741789.8012351829, 689671.1229565318, 722311.8243149592, 702121.7721747903, + 688224.7856890128, 788610.905786269, 1222231.1134322172, 753651.2305186274, + 767738.6759731061, 714353.7255615595, 752789.8286728447, 593672.2627815048, + 1241823.211722479, 1424598.2478901816, 645780.5968008332, + 1503660.7073566155, 645157.0208090902, 653381.1095077095, + 1015897.2164154407, 775954.4488272294, 1233929.155833334, 742386.3628787783, + 1234783.2488348281, 751594.8455553701, 708510.9302051875, 625113.0825702887, + 729554.8265901917, 782652.4008618937, 586350.6155539325, 583143.532711089, + 604502.8857466846, 744167.0813692673, 1716136.7522328242, 771124.3110175928, + 1239790.0197478037, 615232.9507233064, 693196.6279718701, 834635.2751549219, + 733415.4252643404, 696706.9400633188, 585368.92673763, 671923.3064182912, + 1555671.7049957465 }; - + struct teca_bayesian_ar_detect_parameters::internals_t { diff --git a/alg/teca_binary_segmentation.cxx b/alg/teca_binary_segmentation.cxx index 1cdec31e0..894fa5419 100644 --- a/alg/teca_binary_segmentation.cxx +++ b/alg/teca_binary_segmentation.cxx @@ -4,10 +4,12 @@ #include "teca_array_collection.h" #include "teca_variant_array.h" #include "teca_metadata.h" +#include "teca_array_attributes.h" #include "teca_cartesian_mesh.h" #include #include +#include #include #include @@ -159,7 +161,6 @@ int teca_binary_segmentation::get_segmentation_variable( return 0; } - // -------------------------------------------------------------------------- int teca_binary_segmentation::get_threshold_variable( std::string &threshold_var) @@ -185,18 +186,43 @@ teca_metadata teca_binary_segmentation::get_output_metadata( #endif (void) port; - - std::string segmentation_var = this->segmentation_variable; - if (segmentation_var.empty()) + if (this->threshold_variable.empty()) { - if (this->threshold_variable.empty()) - segmentation_var = "_segmentation"; - else - segmentation_var = this->threshold_variable + "_segmentation"; + TECA_ERROR("a threshold_variable has not been set") + return teca_metadata(); } + std::string segmentation_var; + this->get_segmentation_variable(segmentation_var); + + // add to the list of available variables teca_metadata md = input_md[0]; md.append("variables", segmentation_var); + + // insert attributes to enable this to be written by the CF writer + teca_metadata attributes; + md.get("attributes", attributes); + + std::ostringstream oss; + oss << "a binary mask non-zero where " << this->low_threshold_value + << (this->threshold_mode == BY_VALUE ? "" : "th percentile") + << " <= " << this->threshold_variable << " <= " + << (this->threshold_mode == BY_VALUE ? "" : "th percentile") + << this->high_threshold_value; + + teca_array_attributes default_atts( + teca_variant_array_code::get(), + teca_array_attributes::point_centering, + 0, "unitless", segmentation_var, oss.str()); + + // start with user provided attributes, provide default values + // where user attributes are missing + teca_metadata seg_atts(this->segmentation_variable_attributes); + default_atts.merge_to(seg_atts); + + attributes.set(segmentation_var, seg_atts); + md.set("attributes", attributes); + return md; } @@ -352,18 +378,5 @@ const_p_teca_dataset teca_binary_segmentation::execute( out_metadata.set("low_threshold_value", low); out_metadata.set("high_threshold_value", high); - - // get a copy of the attributes - teca_metadata attributes; - out_metadata.get("attributes", attributes); - - // insert attributes for the segmentation variable - // into the output metadata pointer - attributes.set(segmentation_var.c_str(), - this->get_segmentation_variable_atts()); - - // overwrite the outgoing metadata with the new attributes variable - out_metadata.set("attributes", attributes); - return out_mesh; } diff --git a/alg/teca_binary_segmentation.h b/alg/teca_binary_segmentation.h index b6b95295f..983a497fb 100644 --- a/alg/teca_binary_segmentation.h +++ b/alg/teca_binary_segmentation.h @@ -33,6 +33,9 @@ class teca_binary_segmentation : public teca_algorithm // set the name of the output array to store the resulting segmentation in TECA_ALGORITHM_PROPERTY(std::string, segmentation_variable) + // set extra metadata for the segmentation variable + TECA_ALGORITHM_PROPERTY(teca_metadata, segmentation_variable_attributes) + // set the name of the input array to segment TECA_ALGORITHM_PROPERTY(std::string, threshold_variable) @@ -40,10 +43,6 @@ class teca_binary_segmentation : public teca_algorithm TECA_ALGORITHM_PROPERTY(double, low_threshold_value) TECA_ALGORITHM_PROPERTY(double, high_threshold_value) - - // set the metadata for the segmentation variable - TECA_ALGORITHM_PROPERTY(teca_metadata, segmentation_variable_atts) - // Set the threshold mode. In BY_PERCENTILE mode low and high thresholds // define the percentiles (0 to 100) between which data is in the // segmentation. default is BY_VALUE. @@ -73,11 +72,11 @@ class teca_binary_segmentation : public teca_algorithm private: std::string segmentation_variable; + teca_metadata segmentation_variable_attributes; std::string threshold_variable; double low_threshold_value; double high_threshold_value; int threshold_mode; - teca_metadata segmentation_variable_atts; }; #endif diff --git a/alg/teca_cartesian_mesh_source.cxx b/alg/teca_cartesian_mesh_source.cxx index dbdf1ba57..7946d188f 100644 --- a/alg/teca_cartesian_mesh_source.cxx +++ b/alg/teca_cartesian_mesh_source.cxx @@ -147,9 +147,10 @@ void teca_cartesian_mesh_source::clear_cached_metadata() // -------------------------------------------------------------------------- void teca_cartesian_mesh_source::append_field_generator( - const std::string &name, field_generator_callback &callback) + const std::string &name, const teca_array_attributes &atts, + field_generator_callback &callback) { - this->append_field_generator({name, callback}); + this->append_field_generator({name, atts, callback}); } // -------------------------------------------------------------------------- @@ -184,6 +185,12 @@ teca_metadata teca_cartesian_mesh_source::get_output_metadata( this->whole_extents.data(), this->bounds.data(), x_axis, y_axis, z_axis, t_axis); + size_t nx = this->whole_extents[1] - this->whole_extents[0] + 1; + size_t ny = this->whole_extents[3] - this->whole_extents[2] + 1; + size_t nz = this->whole_extents[5] - this->whole_extents[4] + 1; + size_t nt = this->whole_extents[7] - this->whole_extents[6] + 1; + size_t nxyz = nx*ny*nz; + std::string x_ax_var_name = (this->x_axis_variable.empty() ? "x" : this->x_axis_variable); std::string y_ax_var_name = (this->y_axis_variable.empty() ? "y" : this->y_axis_variable); std::string z_ax_var_name = (this->z_axis_variable.empty() ? "z" : this->z_axis_variable); @@ -192,17 +199,29 @@ teca_metadata teca_cartesian_mesh_source::get_output_metadata( // construct attributes teca_metadata x_atts; x_atts.set("units", (this->x_axis_units.empty() ? "meters" : this->x_axis_units)); + x_atts.set("type_code", this->coordinate_type_code); + x_atts.set("size", nx); teca_metadata y_atts; y_atts.set("units", (this->y_axis_units.empty() ? "meters" : this->y_axis_units)); + y_atts.set("type_code", this->coordinate_type_code); + y_atts.set("size", ny); teca_metadata z_atts; z_atts.set("units", (this->z_axis_units.empty() ? "meters" : this->z_axis_units)); + z_atts.set("type_code", this->coordinate_type_code); + z_atts.set("size", nz); teca_metadata t_atts; t_atts.set("units", (this->time_units.empty() ? "seconds since 1970-01-01 00:00:00" : this->time_units)); + t_atts.set("calendar", (this->calendar.empty() ? + "standard" : this->calendar)); + + t_atts.set("type_code", this->coordinate_type_code); + t_atts.set("size", nt); + teca_metadata atts; atts.set(x_ax_var_name, x_atts); atts.set(y_ax_var_name, y_atts); @@ -228,8 +247,16 @@ teca_metadata teca_cartesian_mesh_source::get_output_metadata( std::vector::iterator it = this->field_generators.begin(); std::vector::iterator end = this->field_generators.end(); for (; it != end; ++it) + { vars.push_back(it->name); + // correct size + teca_array_attributes var_atts = it->attributes; + var_atts.size = nxyz; + + atts.set(it->name, teca_metadata(var_atts)); + } + this->internals->metadata.set("variables", vars); this->internals->metadata.set("attributes", atts); diff --git a/alg/teca_cartesian_mesh_source.h b/alg/teca_cartesian_mesh_source.h index 8b2cd5f6b..be1e5039a 100644 --- a/alg/teca_cartesian_mesh_source.h +++ b/alg/teca_cartesian_mesh_source.h @@ -2,6 +2,8 @@ #define teca_cartesian_mesh_source_h #include "teca_algorithm.h" +#include "teca_array_attributes.h" + #include #include #include @@ -9,7 +11,7 @@ TECA_SHARED_OBJECT_FORWARD_DECL(teca_cartesian_mesh_source) // f(x, y, z, t) -// given coordinat axes x,y,z return the field +// given spatial coordinate axes x,y,z and the time t, return the field using field_generator_callback = std::function; @@ -17,6 +19,7 @@ using field_generator_callback = std::function::get() to get specific type + // codes for C++ POD types NT. + TECA_ALGORITHM_PROPERTY(unsigned int, coordinate_type_code) + TECA_ALGORITHM_PROPERTY(unsigned int, field_type_code) // set/get the global index space extent of the data. the extents are // given by 8 values, 6 spatial plus 2 temporal, in the following order @@ -86,15 +91,15 @@ class teca_cartesian_mesh_source : public teca_algorithm TECA_ALGORITHM_PROPERTY(std::string, calendar) TECA_ALGORITHM_PROPERTY(std::string, time_units) - // set the named callbacks to generate fields on the mesh - // A callback f must have the signature f(x,y,z,t). + // set the named callbacks to generate fields on the mesh. A callback + // function must have the signature f(x,y,z,t). TECA_ALGORITHM_VECTOR_PROPERTY(field_generator_t, field_generator); // set a callback function f(x,y,z,t) that generates a field named name // x,y,z are coordinate axes in variant arrays, t is the double precision // time value. void append_field_generator(const std::string &name, - field_generator_callback &callback); + const teca_array_attributes &atts, field_generator_callback &callback); protected: teca_cartesian_mesh_source(); @@ -111,8 +116,8 @@ class teca_cartesian_mesh_source : public teca_algorithm void clear_cached_metadata(); private: - int coordinate_type_code; - int field_type_code; + unsigned int coordinate_type_code; + unsigned int field_type_code; std::string x_axis_variable; std::string y_axis_variable; std::string z_axis_variable; diff --git a/alg/teca_component_area_filter.cxx b/alg/teca_component_area_filter.cxx index df6e25ff4..2441e8d73 100644 --- a/alg/teca_component_area_filter.cxx +++ b/alg/teca_component_area_filter.cxx @@ -58,7 +58,7 @@ void apply_label_map(label_t *labels, const label_t *labels_in, teca_component_area_filter::teca_component_area_filter() : component_variable(""), number_of_components_key("number_of_components"), component_ids_key("component_ids"), component_area_key("component_area"), - mask_value(0), low_area_threshold(std::numeric_limits::lowest()), + mask_value(-1), low_area_threshold(std::numeric_limits::lowest()), high_area_threshold(std::numeric_limits::max()), variable_post_fix(""), contiguous_component_ids(0) { @@ -92,7 +92,7 @@ void teca_component_area_filter::get_properties_description( "(\"component_area\")") TECA_POPTS_GET(int, prefix, mask_value, "components with area outside of the range will be replaced " - "by this label value (0)") + "by this label value (-1)") TECA_POPTS_GET(double, prefix, low_area_threshold, "set the lower end of the range of areas to pass through. " "components smaller than this are masked out. (-inf)") @@ -287,6 +287,18 @@ const_p_teca_dataset teca_component_area_filter::execute( // get the output metadata to add results to after the filter is applied teca_metadata &out_metadata = out_mesh->get_metadata(); + long mask_value = this->mask_value; + if (this->mask_value == -1) + { + if (in_metadata.get("background_id", mask_value)) + { + TECA_ERROR("Metadata is missing the key \"background_id\". " + "One should specify it via the \"mask_value\" algorithm " + "property") + return nullptr; + } + } + // apply the filter NESTED_TEMPLATE_DISPATCH_I(teca_variant_array_impl, labels_out.get(), @@ -327,13 +339,13 @@ const_p_teca_dataset teca_component_area_filter::execute( max_id = std::max(max_id, p_ids_in[i]); // allocate the map - std::vector label_map(max_id+1, NT_LABEL(this->mask_value)); + std::vector label_map(max_id+1, NT_LABEL(mask_value)); // construct the map from input label to output label. // removing a lable from the output ammounts to applying // the mask value to the labels ::build_label_map(p_ids_in, p_areas, n_ids_in, - low_val, high_val, NT_LABEL(this->mask_value), + low_val, high_val, NT_LABEL(mask_value), label_map, ids_out, areas_out); // use the map to mask out removed labels @@ -347,7 +359,7 @@ const_p_teca_dataset teca_component_area_filter::execute( // removing a lable from the output ammounts to applying // the mask value to the labels ::build_label_map(p_ids_in, p_areas, n_ids_in, - low_val, high_val, NT_LABEL(this->mask_value), + low_val, high_val, NT_LABEL(mask_value), label_map, ids_out, areas_out); // use the map to mask out removed labels @@ -359,6 +371,7 @@ const_p_teca_dataset teca_component_area_filter::execute( out_metadata.set(this->number_of_components_key + this->variable_post_fix, ids_out.size()); out_metadata.set(this->component_ids_key + this->variable_post_fix, ids_out); out_metadata.set(this->component_area_key + this->variable_post_fix, areas_out); + out_metadata.set("background_id" + this->variable_post_fix, mask_value); // pass the threshold values used out_metadata.set("low_area_threshold_km", low_val); diff --git a/alg/teca_component_area_filter.h b/alg/teca_component_area_filter.h index b1e8efce3..f6c8e5a22 100644 --- a/alg/teca_component_area_filter.h +++ b/alg/teca_component_area_filter.h @@ -65,8 +65,12 @@ class teca_component_area_filter : public teca_algorithm // areas TECA_ALGORITHM_PROPERTY(std::string, component_area_key) - // set this to be the default label id for the filtered - // out component areas. The default will be '0' + // set this to be the default label id for the filtered out component + // areas. This will typically correspond to the label used for cells + // outside of the segmentation (i.e. in the background). One can use this + // property to override the mask value. The default mask value is '-1' + // which results in aquiring the mask value from input metadata key + // `background_id`. Use -2 to specify no background label. TECA_ALGORITHM_PROPERTY(long, mask_value) // set the range identifying values to area filter. @@ -75,7 +79,7 @@ class teca_component_area_filter : public teca_algorithm TECA_ALGORITHM_PROPERTY(double, high_area_threshold) // a string to be appended to the name of the output variable. - // setting this to an empty string will result in the damped array + // setting this to an empty string will result in the masked array // replacing the input array in the output. default is an empty // string "" TECA_ALGORITHM_PROPERTY(std::string, variable_post_fix) diff --git a/alg/teca_connected_components.cxx b/alg/teca_connected_components.cxx index 288ce828e..ff70ef875 100644 --- a/alg/teca_connected_components.cxx +++ b/alg/teca_connected_components.cxx @@ -5,11 +5,13 @@ #include "teca_variant_array.h" #include "teca_metadata.h" #include "teca_cartesian_mesh.h" +#include "teca_array_attributes.h" #include #include #include #include +#include using std::cerr; using std::endl; @@ -164,7 +166,7 @@ void periodic_labeler(unsigned long i0, unsigned long j0, unsigned long k0, /** given a binary segmentation(segments) and buffer(components), both with dimensions described by the given exent(ext), compute -the componenting. +the labeling. */ template void label(unsigned long *ext, int periodic_in_x, int periodic_in_y, @@ -273,8 +275,27 @@ teca_metadata teca_connected_components::get_output_metadata( component_var = this->segmentation_variable + "_components"; } + // tell the downstream about the variable we produce teca_metadata md = input_md[0]; md.append("variables", component_var); + + // add metadata for CF I/O + teca_metadata atts; + md.get("attributes", atts); + + std::ostringstream oss; + oss << "the connected components of " << this->segmentation_variable; + + teca_array_attributes cc_atts( + teca_variant_array_code::get(), + teca_array_attributes::point_centering, + 0, "unitless", component_var, + oss.str().c_str()); + + atts.set(component_var, (teca_metadata)cc_atts); + + md.set("attributes", atts); + return md; } @@ -417,6 +438,7 @@ const_p_teca_dataset teca_connected_components::execute( teca_metadata &omd = out_mesh->get_metadata(); omd.set("component_ids", component_id); omd.set("number_of_components", num_components); + omd.set("background_id", short(0)); return out_mesh; } diff --git a/alg/teca_connected_components.h b/alg/teca_connected_components.h index 64675ddb6..0f188f7e0 100644 --- a/alg/teca_connected_components.h +++ b/alg/teca_connected_components.h @@ -10,7 +10,7 @@ TECA_SHARED_OBJECT_FORWARD_DECL(teca_connected_components) -/// an algorithm that computes connected component componenting +/// an algorithm that computes connected component labeling /** an algorithm that computes connected component labeling for 1D, 2D, and 3D data. The components are computed from a binary segmentation provided on the @@ -21,6 +21,26 @@ component_variable property. the component ids are added to the output dataset metadata in an key named 'component_ids', and the number of components is stored in a key named 'number_of_components'. These keys facilitate further processing as one need not scan the labeled data to get the list of label ids. + +The cells outside of the segmentation (i.e. the background) are always assigned +the label 0. The cells belonging to connected regions inside the segmentation +are labeled starting from 1 up to number_of_components - 1. + +output keys: + + number_of_components - number of component ids found. this will always be + at least 1 long as the cells outside the segmentation + are assigned the label 0. + + component_ids - a vector containing the label of each component. This is + always starts with 0, where the label 0 identifies cells + out side of the segmentation, and ranges up to + number_of_components - 1, where the labels from 1 up to + number_of_components - 1 identify connected regions of + cells inside the segmentation. + + background_id - the label used for cells outside of the segmentation, + i.e. the background. always 0. */ class teca_connected_components : public teca_algorithm { diff --git a/alg/teca_dataset_diff.cxx b/alg/teca_dataset_diff.cxx index 28dd20fb8..3905871cb 100644 --- a/alg/teca_dataset_diff.cxx +++ b/alg/teca_dataset_diff.cxx @@ -2,9 +2,13 @@ #include "teca_table.h" #include "teca_cartesian_mesh.h" +#include "teca_curvilinear_mesh.h" +#include "teca_arakawa_c_grid.h" #include "teca_array_collection.h" #include "teca_metadata.h" #include "teca_file_util.h" +#include "teca_coordinate_util.h" +#include "teca_mpi.h" #include #include @@ -16,15 +20,20 @@ #include #include #include +#include +#include #if defined(TECA_HAS_BOOST) #include #endif +#define TEST_STATUS(_msg) \ + std::cerr << teca_parallel_id() \ + << " teca_dataset_diff :: " _msg << std::endl; // -------------------------------------------------------------------------- teca_dataset_diff::teca_dataset_diff() - : tolerance(1e-6) + : relative_tolerance(1.0e-6), absolute_tolerance(-1.0), verbose(1) { this->set_number_of_input_connections(2); this->set_number_of_output_ports(1); @@ -43,7 +52,9 @@ void teca_dataset_diff::get_properties_description( + (prefix.empty()?"teca_dataset_diff":prefix)); opts.add_options() - TECA_POPTS_GET(double, prefix, tolerance, "relative test tolerance") + TECA_POPTS_GET(double, prefix, relative_tolerance, "relative test tolerance") + TECA_POPTS_GET(double, prefix, absolute_tolerance, "absolute test tolerance") + TECA_POPTS_GET(int, prefix, verbose, "print status messages as the diff runs") ; global_opts.add(opts); @@ -52,10 +63,28 @@ void teca_dataset_diff::get_properties_description( // -------------------------------------------------------------------------- void teca_dataset_diff::set_properties(const std::string &prefix, variables_map &opts) { - TECA_POPTS_SET(opts, double, prefix, tolerance) + TECA_POPTS_SET(opts, double, prefix, relative_tolerance) + TECA_POPTS_SET(opts, double, prefix, absolute_tolerance) + TECA_POPTS_SET(opts, int, prefix, verbose) } #endif +// -------------------------------------------------------------------------- +double teca_dataset_diff::get_abs_tol() const +{ + return this->absolute_tolerance <= 0.0 ? + teca_coordinate_util::equal_tt::absTol() : + this->absolute_tolerance; +} + +// -------------------------------------------------------------------------- +double teca_dataset_diff::get_rel_tol() const +{ + return this->relative_tolerance <= 0.0 ? + teca_coordinate_util::equal_tt::relTol() : + this->relative_tolerance; +} + // -------------------------------------------------------------------------- teca_metadata teca_dataset_diff::get_output_metadata( unsigned int port, @@ -63,15 +92,6 @@ teca_metadata teca_dataset_diff::get_output_metadata( { (void) port; -#if defined(TECA_HAS_MPI) - int rank = 0; - - int is_init = 0; - MPI_Initialized(&is_init); - if (is_init) - MPI_Comm_rank(this->get_communicator(), &rank); -#endif - // get input 0 initializer std::string initializer_key; if (input_md[0].get("index_initializer_key", initializer_key)) @@ -88,17 +108,26 @@ teca_metadata teca_dataset_diff::get_output_metadata( return teca_metadata(); } + // if one were to run across all indices, both inputs would need to have + // the same number of them. it is not necessarily an error to have + // different numbers of indices because one could configure the executive + // to run over a mutual subset + /* // get input 1 initializer if (input_md[1].get("index_initializer_key", initializer_key)) { - TECA_ERROR("Input 0 metadata is missing index_initializer_key") + TECA_ERROR("Input 1 metadata is missing index_initializer_key") return teca_metadata(); } - // if one were to run across all indices, both inputs would need to have - // the same number of them. it is not necessarily an error to have - // different numbers of indices because one could configure the executive - // to run over a mutual subset + unsigned long n_indices_1 = 0; + if (input_md[1].get(initializer_key, n_indices_1)) + { + TECA_ERROR("Input 0 metadata is missing its intializer \"" + << initializer_key << "\"") + return teca_metadata(); + } + */ // prepare pieline executive metadata to run a test for each input dataset teca_metadata omd(input_md[0]); @@ -168,86 +197,108 @@ const_p_teca_dataset teca_dataset_diff::execute( (void) port; (void) request; + int rank = 0; +#if defined(TECA_HAS_MPI) + int is_init = 0; + MPI_Initialized(&is_init); + if (is_init) + MPI_Comm_rank(this->get_communicator(), &rank); +#endif + + const_p_teca_dataset ds0 = input_data[0]; + const_p_teca_dataset ds1 = input_data[1]; + // after map-reduce phase of a parallel run, only rank 0 // will have data. we can assume that if the first input, // which by convention is the reference dataset, is empty // then the second one should be as well. - if (!input_data[0] && !input_data[1]) + if (!ds0 && !ds1) return nullptr; // We need exactly two non-NULL inputs to compute a difference. - if (!input_data[0]) + if (!ds0) { TECA_ERROR("Input dataset 1 is NULL.") return nullptr; } - if (!input_data[1]) + if (!ds1) { TECA_ERROR("Input dataset 2 is NULL.") return nullptr; } // If one dataset is empty but not the other, the datasets differ. - if (input_data[0]->empty() && !input_data[1]->empty()) + if (ds0->empty() && !ds1->empty()) { TECA_ERROR("dataset 1 is empty, 2 is not.") return nullptr; } - if (!input_data[0]->empty() && input_data[1]->empty()) + if (!ds0->empty() && ds1->empty()) { TECA_ERROR("dataset 2 is empty, 1 is not.") return nullptr; } // If the datasets are both empty, they are "equal." :-/ - if (input_data[0]->empty() && input_data[1]->empty()) - return nullptr; - - // get the inputs. They can be tables or cartesian meshes. - const_p_teca_table table1 = - std::dynamic_pointer_cast(input_data[0]); - - const_p_teca_table table2 = - std::dynamic_pointer_cast(input_data[1]); - - const_p_teca_cartesian_mesh mesh1 = - std::dynamic_pointer_cast(input_data[0]); - - const_p_teca_cartesian_mesh mesh2 = - std::dynamic_pointer_cast(input_data[1]); - - // No mixed types! - if (((table1 && !table2) || (!table1 && table2)) || - ((mesh1 && !mesh2) || (!mesh1 && mesh2))) + if (ds0->empty() && ds1->empty()) { - TECA_ERROR("input datasets must have matching types."); + if (rank == 0) + { + TECA_ERROR("Both the reference and test datasets are empty") + } return nullptr; } - if (!table1 && !mesh1) + // compare the inputs. the type of data is inferred from the + // reference mesh. + if (dynamic_cast(ds0.get())) { - TECA_ERROR("input datasets must be teca_tables or teca_cartesian_meshes.") - return nullptr; + if (this->compare_tables( + std::dynamic_pointer_cast(ds0), + std::dynamic_pointer_cast(ds1))) + { + TECA_ERROR("Failed to compare tables."); + return nullptr; + } } - - if (table1) + else if (dynamic_cast(ds0.get())) { - if (this->compare_tables(table1, table2)) + if (this->compare_cartesian_meshes( + std::dynamic_pointer_cast(ds0), + std::dynamic_pointer_cast(ds1))) { - TECA_ERROR("Failed to compare tables."); + TECA_ERROR("Failed to compare cartesian_meshes."); return nullptr; } } - else + else if (dynamic_cast(ds0.get())) { - if (this->compare_cartesian_meshes(mesh1, mesh2)) + if (this->compare_curvilinear_meshes( + std::dynamic_pointer_cast(ds0), + std::dynamic_pointer_cast(ds1))) { - TECA_ERROR("Failed to compare cartesian meshes."); + TECA_ERROR("Failed to compare curvilinear_meshes."); return nullptr; } } + else if (dynamic_cast(ds0.get())) + { + if (this->compare_arakawa_c_grids( + std::dynamic_pointer_cast(ds0), + std::dynamic_pointer_cast(ds1))) + { + TECA_ERROR("Failed to compare arakawa_c_grids."); + return nullptr; + } + } + else + { + TECA_ERROR("Unsupported dataset type \"" + << ds0->get_class_name() << "\"") + return nullptr; + } return nullptr; } @@ -257,6 +308,18 @@ int teca_dataset_diff::compare_tables( const_p_teca_table table1, const_p_teca_table table2) { + int rank = 0; +#if defined(TECA_HAS_MPI) + int is_init = 0; + MPI_Initialized(&is_init); + if (is_init) + MPI_Comm_rank(this->get_communicator(), &rank); +#endif + if (this->verbose && (rank == 0)) + { + TEST_STATUS("comparing tables") + } + unsigned int ncols1 = table1->get_number_of_columns(); unsigned int ncols2 = table2->get_number_of_columns(); @@ -291,16 +354,28 @@ int teca_dataset_diff::compare_tables( // At this point, we know that the tables are both non-empty and the same size, // so we simply compare them one element at a time. + double absTol = this->get_abs_tol(); + double relTol = this->get_rel_tol(); + for (unsigned int col = 0; col < ncols1; ++col) { const_p_teca_variant_array col1 = table1->get_column(col); const_p_teca_variant_array col2 = table2->get_column(col); - if (compare_arrays(col1, col2)) + + const std::string &col_name = table1->get_column_name(col); + + if (this->verbose && (rank == 0)) { - TECA_ERROR("difference in column " << col << " \"" - << table1->get_column_name(col) << "\"") - return -1; + TEST_STATUS(" comparing collumn \"" << col_name + << "\" absTol=" << max_prec(double) << absTol + << " relTol=" << max_prec(double) << relTol) + } + if (compare_arrays(col1, col2, absTol, relTol)) + { + + TECA_ERROR("difference in column " << col << " \"" << col_name << "\"") + return -1; } } @@ -310,7 +385,8 @@ int teca_dataset_diff::compare_tables( // -------------------------------------------------------------------------- int teca_dataset_diff::compare_arrays( const_p_teca_variant_array array1, - const_p_teca_variant_array array2) + const_p_teca_variant_array array2, + double absTol, double relTol) { // Arrays of different sizes are different. size_t n_elem = array1->size(); @@ -338,25 +414,29 @@ int teca_dataset_diff::compare_arrays( const NT *pa1 = static_cast(array1.get())->get(); const NT *pa2 = a2->get(); + std::string diagnostic; for (size_t i = 0; i < n_elem; ++i) { - // we don't care too much about performance here so - // use double precision for the comparison. - double ref_val = static_cast(pa1[i]); // reference - double comp_val = static_cast(pa2[i]); // computed - - // Compute the relative difference. - double rel_diff = 0.0; - if (ref_val != 0.0) - rel_diff = std::abs(comp_val - ref_val) / std::abs(ref_val); - else if (comp_val != 0.0) - rel_diff = std::abs(comp_val - ref_val) / std::abs(comp_val); - - if (rel_diff > this->tolerance) + if (std::isinf(pa1[i]) && std::isinf(pa2[i])) { - TECA_ERROR("relative difference " << rel_diff << " exceeds tolerance " - << this->tolerance << " in element " << i << ". ref value \"" - << ref_val << "\" is not equal to test value \"" << comp_val << "\"") + // the GFDL TC tracker returns inf for some fields in some cases. + // warn about it so that it may be addressed in other algorithms. + if (this->verbose) + { + TECA_WARNING("Inf detected in element " << i) + } + } + else if (std::isnan(pa1[i]) || std::isnan(pa2[i])) + { + // for the time being, don't allow NaN. + TECA_ERROR("NaN detected in element " << i) + return -1; + } + else if (!teca_coordinate_util::equal(pa1[i], pa2[i], + diagnostic, relTol, absTol)) + { + TECA_ERROR("difference above the prescribed tolerance detected" + " in element " << i << ". " << diagnostic) return -1; } } @@ -365,7 +445,7 @@ int teca_dataset_diff::compare_arrays( return 0; ) // handle arrays of strings - TEMPLATE_DISPATCH_CLASS( + TEMPLATE_DISPATCH_CASE( const teca_variant_array_impl, std::string, array1.get(), if (dynamic_cast(array2.get())) @@ -380,8 +460,8 @@ int teca_dataset_diff::compare_arrays( const std::string &v2 = a2->get(i); if (v1 != v2) { - TECA_ERROR("string element " << i << " not equal. ref value \"" << v1 - << "\" is not equal to test value \"" << v2 << "\"") + TECA_ERROR("string element " << i << " not equal. ref value \"" + << v1 << "\" is not equal to test value \"" << v2 << "\"") return -1; } } @@ -402,6 +482,14 @@ int teca_dataset_diff::compare_array_collections( const_p_teca_array_collection reference_arrays, const_p_teca_array_collection data_arrays) { + int rank = 0; +#if defined(TECA_HAS_MPI) + int is_init = 0; + MPI_Initialized(&is_init); + if (is_init) + MPI_Comm_rank(this->get_communicator(), &rank); +#endif + // The data arrays should contain all the data in the reference arrays. for (unsigned int i = 0; i < reference_arrays->size(); ++i) { @@ -415,12 +503,24 @@ int teca_dataset_diff::compare_array_collections( } // Now diff the contents. + double absTol = this->get_abs_tol(); + double relTol = this->get_rel_tol(); + for (unsigned int i = 0; i < reference_arrays->size(); ++i) { const_p_teca_variant_array a1 = reference_arrays->get(i); std::string name = reference_arrays->get_name(i); + const_p_teca_variant_array a2 = data_arrays->get(name); - if (this->compare_arrays(a1, a2)) + + if (this->verbose && (rank == 0)) + { + TEST_STATUS(" comparing array \"" << name + << "\" absTol=" << max_prec(double) << absTol + << " relTol=" << max_prec(double) << relTol) + } + + if (this->compare_arrays(a1, a2, absTol, relTol)) { TECA_ERROR("difference in array " << i << " \"" << name << "\"") return -1; @@ -430,35 +530,17 @@ int teca_dataset_diff::compare_array_collections( } // -------------------------------------------------------------------------- -int teca_dataset_diff::compare_cartesian_meshes( - const_p_teca_cartesian_mesh reference_mesh, - const_p_teca_cartesian_mesh data_mesh) +int teca_dataset_diff::compare_meshes( + const_p_teca_mesh reference_mesh, + const_p_teca_mesh data_mesh) { - // If the meshes are different sizes, the datasets differ. - if (reference_mesh->get_x_coordinates()->size() - != data_mesh->get_x_coordinates()->size()) - { - TECA_ERROR("data mesh has " << data_mesh->get_x_coordinates()->size() - << " points in x, whereas reference mesh has " - << reference_mesh->get_x_coordinates()->size() << ".") - return -1; - } - if (reference_mesh->get_y_coordinates()->size() - != data_mesh->get_y_coordinates()->size()) - { - TECA_ERROR("data mesh has " << data_mesh->get_y_coordinates()->size() - << " points in y, whereas reference mesh has " - << reference_mesh->get_y_coordinates()->size() << ".") - return -1; - } - if (reference_mesh->get_z_coordinates()->size() - != data_mesh->get_z_coordinates()->size()) - { - TECA_ERROR("data mesh has " << data_mesh->get_z_coordinates()->size() - << " points in z, whereas reference mesh has " - << reference_mesh->get_z_coordinates()->size() << ".") - return -1; - } + int rank = 0; +#if defined(TECA_HAS_MPI) + int is_init = 0; + MPI_Initialized(&is_init); + if (is_init) + MPI_Comm_rank(this->get_communicator(), &rank); +#endif // If the arrays are different in shape or in content, the datasets differ. const_p_teca_array_collection arrays1, arrays2; @@ -466,6 +548,10 @@ int teca_dataset_diff::compare_cartesian_meshes( // Point arrays. arrays1 = reference_mesh->get_point_arrays(); arrays2 = data_mesh->get_point_arrays(); + if (this->verbose && (rank == 0) && arrays1->size()) + { + TEST_STATUS(" comparing point arrays") + } if (this->compare_array_collections(arrays1, arrays2)) { TECA_ERROR("difference in point arrays") @@ -475,6 +561,10 @@ int teca_dataset_diff::compare_cartesian_meshes( // cell-centered arrays. arrays1 = reference_mesh->get_cell_arrays(); arrays2 = data_mesh->get_cell_arrays(); + if (this->verbose && (rank == 0) && arrays1->size()) + { + TEST_STATUS(" comparing cell arrays") + } if (this->compare_array_collections(arrays1, arrays2)) { TECA_ERROR("difference in cell arrays") @@ -482,49 +572,232 @@ int teca_dataset_diff::compare_cartesian_meshes( } // Edge-centered arrays. - arrays1 = reference_mesh->get_edge_arrays(); - arrays2 = data_mesh->get_edge_arrays(); + arrays1 = reference_mesh->get_x_edge_arrays(); + arrays2 = data_mesh->get_x_edge_arrays(); + if (this->verbose && (rank == 0) && arrays1->size()) + { + TEST_STATUS(" comparing x-dege arrays") + } if (this->compare_array_collections(arrays1, arrays2)) { - TECA_ERROR("difference in edge arrays") + TECA_ERROR("difference in x-edge arrays") + return -1; + } + + arrays1 = reference_mesh->get_y_edge_arrays(); + arrays2 = data_mesh->get_y_edge_arrays(); + if (this->verbose && (rank == 0) && arrays1->size()) + { + TEST_STATUS(" comparing y-edge arrays") + } + if (this->compare_array_collections(arrays1, arrays2)) + { + TECA_ERROR("difference in y-edge arrays") + return -1; + } + + arrays1 = reference_mesh->get_z_edge_arrays(); + arrays2 = data_mesh->get_z_edge_arrays(); + if (this->verbose && (rank == 0) && arrays1->size()) + { + TEST_STATUS(" comparing z-edge arrays") + } + if (this->compare_array_collections(arrays1, arrays2)) + { + TECA_ERROR("difference in z-edge arrays") return -1; } // Face-centered arrays. - arrays1 = reference_mesh->get_face_arrays(); - arrays2 = data_mesh->get_face_arrays(); + arrays1 = reference_mesh->get_x_face_arrays(); + arrays2 = data_mesh->get_x_face_arrays(); + if (this->verbose && (rank == 0) && arrays1->size()) + { + TEST_STATUS(" comparing x-face arrays") + } + if (this->compare_array_collections(arrays1, arrays2)) + { + TECA_ERROR("difference in x-face arrays") + return -1; + } + + arrays1 = reference_mesh->get_y_face_arrays(); + arrays2 = data_mesh->get_y_face_arrays(); + if (this->verbose && (rank == 0) && arrays1->size()) + { + TEST_STATUS(" comparing y-face arrays") + } + if (this->compare_array_collections(arrays1, arrays2)) + { + TECA_ERROR("difference in y-face arrays") + return -1; + } + + arrays1 = reference_mesh->get_z_face_arrays(); + arrays2 = data_mesh->get_z_face_arrays(); + if (this->verbose && (rank == 0) && arrays1->size()) + { + TEST_STATUS(" comparing z-face arrays") + } if (this->compare_array_collections(arrays1, arrays2)) { - TECA_ERROR("difference in face arrays") + TECA_ERROR("difference in z-face arrays") return -1; } // Non-geometric arrays. arrays1 = reference_mesh->get_information_arrays(); arrays2 = data_mesh->get_information_arrays(); + if (this->verbose && (rank == 0) && arrays1->size()) + { + TEST_STATUS(" comparing information arrays") + } if (this->compare_array_collections(arrays1, arrays2)) { - TECA_ERROR("differrnce in informational arrays") + TECA_ERROR("difference in information arrays") + return -1; + } + + return 0; +} + +// -------------------------------------------------------------------------- +int teca_dataset_diff::compare_cartesian_meshes( + const_p_teca_cartesian_mesh reference_mesh, + const_p_teca_cartesian_mesh data_mesh) +{ + int rank = 0; +#if defined(TECA_HAS_MPI) + int is_init = 0; + MPI_Initialized(&is_init); + if (is_init) + MPI_Comm_rank(this->get_communicator(), &rank); +#endif + + // compare base class elements + if (this->verbose && (rank == 0)) + { + TEST_STATUS("comparing cartesian meshes") + } + if (this->compare_meshes(reference_mesh, data_mesh)) + { + TECA_ERROR("Difference in mesh") + return -1; + } + + // Coordinate arrays. + double absTol = this->get_abs_tol(); + double relTol = this->get_rel_tol(); + + std::string name; + const_p_teca_variant_array coord1 = reference_mesh->get_x_coordinates(); + reference_mesh->get_x_coordinate_variable(name); + if (this->verbose && (rank == 0) && coord1->size()) + { + TEST_STATUS("comparing x-coordinates " << name + << " absTol=" << max_prec(double) << absTol + << " relTol=" << max_prec(double) << relTol) + } + if (this->compare_arrays(coord1, data_mesh->get_x_coordinates(), absTol, relTol)) + { + TECA_ERROR("difference in x coordinates") + return -1; + } + + coord1 = reference_mesh->get_y_coordinates(); + reference_mesh->get_y_coordinate_variable(name); + if (this->verbose && (rank == 0) && coord1->size()) + { + TEST_STATUS("comparing y-coordinates " << name + << " absTol=" << max_prec(double) << absTol + << " relTol=" << max_prec(double) << relTol) + } + if (this->compare_arrays(coord1, data_mesh->get_y_coordinates(), absTol, relTol)) + { + TECA_ERROR("difference in y coordinates") + return -1; + } + + coord1 = reference_mesh->get_z_coordinates(); + reference_mesh->get_z_coordinate_variable(name); + if (this->verbose && (rank == 0) && coord1->size()) + { + TEST_STATUS("comparing z-coordinates " << name + << " absTol=" << max_prec(double) << absTol + << " relTol=" << max_prec(double) << relTol) + } + if (this->compare_arrays(coord1, + data_mesh->get_z_coordinates(), absTol, relTol)) + { + TECA_ERROR("difference in z coordinates") + return -1; + } + + return 0; +} + +// -------------------------------------------------------------------------- +int teca_dataset_diff::compare_curvilinear_meshes( + const_p_teca_curvilinear_mesh reference_mesh, + const_p_teca_curvilinear_mesh data_mesh) +{ + int rank = 0; +#if defined(TECA_HAS_MPI) + int is_init = 0; + MPI_Initialized(&is_init); + if (is_init) + MPI_Comm_rank(this->get_communicator(), &rank); +#endif + + // compare base class elements + if (this->verbose && (rank == 0)) + { + TEST_STATUS("comparing curvilinear meshes") + } + if (this->compare_meshes(reference_mesh, data_mesh)) + { + TECA_ERROR("Difference in mesh") return -1; } // Coordinate arrays. + double absTol = this->get_abs_tol(); + double relTol = this->get_rel_tol(); + + if (this->verbose && (rank == 0)) + { + TEST_STATUS("comparing x-coordinates" + << " absTol=" << max_prec(double) << absTol + << " relTol=" << max_prec(double) << relTol) + } if (this->compare_arrays(reference_mesh->get_x_coordinates(), - data_mesh->get_x_coordinates())) + data_mesh->get_x_coordinates(), absTol, relTol)) { TECA_ERROR("difference in x coordinates") return -1; } + if (this->verbose && (rank == 0)) + { + TEST_STATUS("comparing y-coordinates" + << " absTol=" << max_prec(double) << absTol + << " relTol=" << max_prec(double) << relTol) + } if (this->compare_arrays(reference_mesh->get_y_coordinates(), - data_mesh->get_y_coordinates())) + data_mesh->get_y_coordinates(), absTol, relTol)) { TECA_ERROR("difference in y coordinates") return -1; } + if (this->verbose && (rank == 0)) + { + TEST_STATUS("comparing z-coordinates" + << " absTol=" << max_prec(double) << absTol + << " relTol=" << max_prec(double) << relTol) + } if (this->compare_arrays(reference_mesh->get_z_coordinates(), - data_mesh->get_z_coordinates())) + data_mesh->get_z_coordinates(), absTol, relTol)) { TECA_ERROR("difference in z coordinates") return -1; @@ -532,3 +805,138 @@ int teca_dataset_diff::compare_cartesian_meshes( return 0; } + +// -------------------------------------------------------------------------- +int teca_dataset_diff::compare_arakawa_c_grids( + const_p_teca_arakawa_c_grid reference_mesh, + const_p_teca_arakawa_c_grid data_mesh) +{ + int rank = 0; +#if defined(TECA_HAS_MPI) + int is_init = 0; + MPI_Initialized(&is_init); + if (is_init) + MPI_Comm_rank(this->get_communicator(), &rank); +#endif + + // compare base class elements + if (this->verbose && (rank == 0)) + { + TEST_STATUS("comparing arakawa c grids") + } + if (this->compare_meshes(reference_mesh, data_mesh)) + { + TECA_ERROR("Difference in mesh") + return -1; + } + + // Coordinate arrays. + double absTol = this->get_abs_tol(); + double relTol = this->get_rel_tol(); + + if (this->verbose && (rank == 0)) + { + TEST_STATUS("comparing m x-coordinates" + << " absTol=" << max_prec(double) << absTol + << " relTol=" << max_prec(double) << relTol) + } + if (this->compare_arrays(reference_mesh->get_m_x_coordinates(), + data_mesh->get_m_x_coordinates(), absTol, relTol)) + { + TECA_ERROR("difference in m_x coordinates") + return -1; + } + + if (this->verbose && (rank == 0)) + { + TEST_STATUS("comparing m y-coordinates" + << " absTol=" << max_prec(double) << absTol + << " relTol=" << max_prec(double) << relTol) + } + if (this->compare_arrays(reference_mesh->get_m_y_coordinates(), + data_mesh->get_m_y_coordinates(), absTol, relTol)) + { + TECA_ERROR("difference in m_y coordinates") + return -1; + } + + if (this->verbose && (rank == 0)) + { + TEST_STATUS("comparing u x-coordinates" + << " absTol=" << max_prec(double) << absTol + << " relTol=" << max_prec(double) << relTol) + } + if (this->compare_arrays(reference_mesh->get_u_x_coordinates(), + data_mesh->get_u_x_coordinates(), absTol, relTol)) + { + TECA_ERROR("difference in u_x coordinates") + return -1; + } + + if (this->verbose && (rank == 0)) + { + TEST_STATUS("comparing u x-coordinates" + << " absTol=" << max_prec(double) << absTol + << " relTol=" << max_prec(double) << relTol) + } + if (this->compare_arrays(reference_mesh->get_u_y_coordinates(), + data_mesh->get_u_y_coordinates(), absTol, relTol)) + { + TECA_ERROR("difference in u_y coordinates") + return -1; + } + + if (this->verbose && (rank == 0)) + { + TEST_STATUS("comparing v x-coordinates" + << " absTol=" << max_prec(double) << absTol + << " relTol=" << max_prec(double) << relTol) + } + if (this->compare_arrays(reference_mesh->get_v_x_coordinates(), + data_mesh->get_v_x_coordinates(), absTol, relTol)) + { + TECA_ERROR("difference in v_x coordinates") + return -1; + } + + if (this->verbose && (rank == 0)) + { + TEST_STATUS("comparing v y-coordinates" + << " absTol=" << max_prec(double) << absTol + << " relTol=" << max_prec(double) << relTol) + } + if (this->compare_arrays(reference_mesh->get_v_y_coordinates(), + data_mesh->get_v_y_coordinates(), absTol, relTol)) + { + TECA_ERROR("difference in v_y coordinates") + return -1; + } + + if (this->verbose && (rank == 0)) + { + TEST_STATUS("comparing m z-coordinates" + << " absTol=" << max_prec(double) << absTol + << " relTol=" << max_prec(double) << relTol) + } + if (this->compare_arrays(reference_mesh->get_m_z_coordinates(), + data_mesh->get_m_z_coordinates(), absTol, relTol)) + { + TECA_ERROR("difference in m_z coordinates") + return -1; + } + + if (this->verbose && (rank == 0)) + { + TEST_STATUS("comparing w z-coordinates" + << " absTol=" << max_prec(double) << absTol + << " relTol=" << max_prec(double) << relTol) + } + if (this->compare_arrays(reference_mesh->get_w_z_coordinates(), + data_mesh->get_w_z_coordinates(), absTol, relTol)) + { + TECA_ERROR("difference in w_z coordinates") + return -1; + } + + return 0; +} diff --git a/alg/teca_dataset_diff.h b/alg/teca_dataset_diff.h index 0aa76f64b..57bfffbba 100644 --- a/alg/teca_dataset_diff.h +++ b/alg/teca_dataset_diff.h @@ -4,9 +4,12 @@ #include "teca_shared_object.h" #include "teca_algorithm.h" #include "teca_metadata.h" -#include "teca_table_fwd.h" -#include "teca_cartesian_mesh_fwd.h" -#include "teca_array_collection_fwd.h" +#include "teca_table.h" +#include "teca_mesh.h" +#include "teca_cartesian_mesh.h" +#include "teca_curvilinear_mesh.h" +#include "teca_arakawa_c_grid.h" +#include "teca_array_collection.h" #include #include @@ -38,31 +41,47 @@ class teca_dataset_diff : public teca_algorithm TECA_GET_ALGORITHM_PROPERTIES_DESCRIPTION() TECA_SET_ALGORITHM_PROPERTIES() - // Relative tolerance below which two floating-point quantities are - // considered equal. The relative difference for a computed quantity A and - // a reference quantity B is - // - // rel_diff = |A - B| / B, B != 0 - // = |A - B| / A, B == 0, A != 0 - // 0 otherwise - TECA_ALGORITHM_PROPERTY(double, tolerance) + // Relative tolerance below which two floating-point numbers a and b are + // considered equal. if |a - b| <= max(|a|,|b|)*tol then a is equal to b. + // the relative tolerance is used with numbers not close to zero. + TECA_ALGORITHM_PROPERTY(double, relative_tolerance) + // The absolute tolerance below which two floating point numbers a and b are + // considered equal. if |a - b| <= tol then a is equal to b. The absolute + // tolerance is used with numbers close to zero. + TECA_ALGORITHM_PROPERTY(double, absolute_tolerance) + + // if set infromation about the test progress is displayed during + // the test. + TECA_ALGORITHM_PROPERTY(int, verbose) protected: teca_dataset_diff(); // Comparison methods. int compare_tables(const_p_teca_table table1, const_p_teca_table table2); + int compare_meshes( + const_p_teca_mesh reference_mesh, + const_p_teca_mesh data_mesh); + int compare_cartesian_meshes( const_p_teca_cartesian_mesh reference_mesh, const_p_teca_cartesian_mesh data_mesh); + int compare_curvilinear_meshes( + const_p_teca_curvilinear_mesh reference_mesh, + const_p_teca_curvilinear_mesh data_mesh); + + int compare_arakawa_c_grids( + const_p_teca_arakawa_c_grid reference_mesh, + const_p_teca_arakawa_c_grid data_mesh); + int compare_array_collections( const_p_teca_array_collection reference_arrays, const_p_teca_array_collection data_arrays); - int compare_arrays( - const_p_teca_variant_array array1, const_p_teca_variant_array array2); + int compare_arrays(const_p_teca_variant_array array1, + const_p_teca_variant_array array2, double absTol, double relTol); // Reporting methods. @@ -82,9 +101,13 @@ class teca_dataset_diff : public teca_algorithm const std::vector &input_data, const teca_metadata &request) override; + double get_abs_tol() const; + double get_rel_tol() const; + private: - // Tolerance for equality of field values. - double tolerance; + double relative_tolerance; + double absolute_tolerance; + int verbose; }; #endif diff --git a/alg/teca_deeplab_ar_detect.py b/alg/teca_deeplab_ar_detect.py new file mode 100644 index 000000000..c2a40d467 --- /dev/null +++ b/alg/teca_deeplab_ar_detect.py @@ -0,0 +1,125 @@ +import numpy as np + +class teca_deeplab_ar_detect(teca_pytorch_algorithm): + """ + This algorithm detects Atmospheric Rivers using deep learning techniques + derived from the DeepLabv3+ architecture. Given an input field of + integrated vapor transport (IVT) magnitude, it calculates the probability + of an AR event and stores it in a new scalar field named 'ar_probability'. + """ + def __init__(self): + super().__init__() + + self.set_input_variable("IVT") + + arp_atts = teca_array_attributes( + teca_float_array_code.get(), + teca_array_attributes.point_centering, + 0, 'unitless', 'posterior AR flag', + 'the posterior probability of the presence ' + 'of an atmospheric river', + None) + + self.set_output_variable("ar_probability", arp_atts) + + def set_ivt_variable(self, var): + """ + set the name of the variable containing the integrated vapor + transport(IVT) magnitude field. + """ + self.set_input_variable(var) + + def load_model(self, filename): + """ + Load model from file system. In MPI parallel runs rank 0 + loads the model file and broadcasts it to the other ranks. + """ + event = teca_time_py_event('teca_deeplab_ar_detect::load_model') + + # this creates OpenMP thread pools and imports torch + # it must be called *before* we import torch + self.initialize() + + # import our torch codes only now that torch has been initialized + global teca_deeplab_ar_detect_internals + from teca_deeplab_ar_detect_internals \ + import teca_deeplab_ar_detect_internals + + # create an instance of the model + model = teca_deeplab_ar_detect_internals.DeepLabv3_plus( + n_classes=1, _print=False) + + # load model weights from state on disk + super().load_model(filename, model) + + def get_padding_sizes(self, div, dim): + """ + given a divisor(div) and an input mesh dimension(dim) + returns a tuple of values holding the number of values to + add onto the low and high sides of the mesh to make the mesh + dimension evely divisible by the divisor + """ + # ghost cells in the y direction + target_shape = div * np.ceil(dim / div) + target_shape_diff = target_shape - dim + + pad_low = int(np.ceil(target_shape_diff / 2.0)) + pad_high = int(np.floor(target_shape_diff / 2.0)) + + return pad_low, pad_high + + def preprocess(self, in_array): + """ + resize the array to be a multiple of 64 in the y direction and 128 in + the x direction amd convert to 3 channel (i.e. RGB image like) + """ + event = teca_time_py_event('teca_deeplab_ar_detect::preprocess') + + nx_in = in_array.shape[1] + ny_in = in_array.shape[0] + + # get the padding sizes to make the mesh evenly divisible by 64 in the + # x direction and 128 in the y direction + ng_x0, ng_x1 = self.get_padding_sizes(64.0, nx_in) + ng_y0, ng_y1 = self.get_padding_sizes(128.0, ny_in) + + nx_out = ng_x0 + ng_x1 + nx_in + ny_out = ng_y0 + ng_y1 + ny_in + + # allocate a new larger array + out_array = np.zeros((1, 3, ny_out, nx_out), dtype=np.float32) + + # copy the input array into the center + out_array[:, :, ng_y0 : ng_y0 + ny_in, + ng_x0 : ng_x0 + nx_in] = in_array + + # cache the padding info in order to extract the result + self.ng_x0 = ng_x0 + self.ng_y0 = ng_y0 + self.nx_in = nx_in + self.ny_in = ny_in + + return out_array + + def postprocess(self, out_tensor): + """ + convert the tensor to a numpy array and extract the output data from + the padded tensor. padding was added during preprocess. + """ + event = teca_time_py_event('teca_deeplab_ar_detect::postprocess') + + # normalize the output + tmp = torch.sigmoid(out_tensor) + + # move to the CPU if running on a GPU + if self.device != 'cpu': + tmp = tmp.to('cpu') + + # convert from torch tensor to numpy ndarray + out_array = tmp.numpy() + + # extract the valid portion of the result + out_array = out_array[:, :, self.ng_y0 : self.ng_y0 + self.ny_in, + self.ng_x0 : self.ng_x0 + self.nx_in] + + return out_array diff --git a/alg/teca_deeplab_ar_detect_internals.py b/alg/teca_deeplab_ar_detect_internals.py new file mode 100644 index 000000000..83280d0b8 --- /dev/null +++ b/alg/teca_deeplab_ar_detect_internals.py @@ -0,0 +1,308 @@ +import torch +import math + +class teca_deeplab_ar_detect_internals: + + # Implementation of Google's Deeplab-V3-Plus + # source: https://arxiv.org/pdf/1802.02611.pdf + class Bottleneck(torch.nn.Module): + expansion = 4 + + def __init__(self, inplanes, planes, + stride=1, rate=1, downsample=None): + + super(teca_deeplab_ar_detect_internals.Bottleneck, + self).__init__() + self.conv1 = torch.nn.Conv2d(inplanes, planes, kernel_size=1, bias=False) + self.bn1 = torch.nn.BatchNorm2d(planes) + self.conv2 = torch.nn.Conv2d(planes, planes, kernel_size=3, + stride=stride, dilation=rate, + padding=rate, bias=False) + self.bn2 = torch.nn.BatchNorm2d(planes) + self.conv3 = torch.nn.Conv2d(planes, planes * 4, kernel_size=1, + bias=False) + self.bn3 = torch.nn.BatchNorm2d(planes * 4) + self.relu = torch.nn.ReLU(inplace=True) + self.downsample = downsample + self.stride = stride + self.rate = rate + + def forward(self, x): + residual = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + out = self.relu(out) + + out = self.conv3(out) + out = self.bn3(out) + + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.relu(out) + + return out + + class ResNet(torch.nn.Module): + def __init__(self, nInputChannels, block, layers, os=16): + + self.inplanes = 64 + super(teca_deeplab_ar_detect_internals.ResNet, self).__init__() + if os == 16: + strides = [1, 2, 2, 1] + rates = [1, 1, 1, 2] + blocks = [1, 2, 4] + elif os == 8: + strides = [1, 2, 1, 1] + rates = [1, 1, 2, 2] + blocks = [1, 2, 1] + else: + raise NotImplementedError + + # Modules + self.conv1 = torch.nn.Conv2d(nInputChannels, 64, kernel_size=7, + stride=2, padding=3, bias=False) + self.bn1 = torch.nn.BatchNorm2d(64) + self.relu = torch.nn.ReLU(inplace=True) + self.maxpool = torch.nn.MaxPool2d(kernel_size=3, stride=2, padding=1) + + self.layer1 = self._make_layer(block, 64, layers[0], + stride=strides[0], rate=rates[0]) + self.layer2 = self._make_layer(block, 128, layers[1], + stride=strides[1], rate=rates[1]) + self.layer3 = self._make_layer(block, 256, layers[2], + stride=strides[2], rate=rates[2]) + self.layer4 = self._make_MG_unit(block, 512, blocks=blocks, + stride=strides[3], rate=rates[3]) + + self._init_weight() + + def _make_layer(self, block, planes, blocks, stride=1, rate=1): + downsample = None + if stride != 1 or self.inplanes != planes * block.expansion: + downsample = torch.nn.Sequential( + torch.nn.Conv2d(self.inplanes, planes * block.expansion, + kernel_size=1, stride=stride, bias=False), + torch.nn.BatchNorm2d(planes * block.expansion), + ) + + layers = [] + layers.append( + block(self.inplanes, planes, stride, rate, downsample) + ) + self.inplanes = planes * block.expansion + for i in range(1, blocks): + layers.append(block(self.inplanes, planes)) + + return torch.nn.Sequential(*layers) + + def _make_MG_unit(self, block, planes, + blocks=[1, 2, 4], stride=1, rate=1): + downsample = None + if stride != 1 or self.inplanes != planes * block.expansion: + downsample = torch.nn.Sequential( + torch.nn.Conv2d(self.inplanes, planes * block.expansion, + kernel_size=1, stride=stride, bias=False), + torch.nn.BatchNorm2d(planes * block.expansion), + ) + + layers = [] + layers.append( + block(self.inplanes, planes, stride, + rate=blocks[0]*rate, downsample=downsample) + ) + self.inplanes = planes * block.expansion + for i in range(1, len(blocks)): + layers.append( + block(self.inplanes, planes, + stride=1, rate=blocks[i]*rate) + ) + + return torch.nn.Sequential(*layers) + + def forward(self, input): + x = self.conv1(input) + x = self.bn1(x) + x = self.relu(x) + x = self.maxpool(x) + + x = self.layer1(x) + low_level_feat = x + x = self.layer2(x) + x = self.layer3(x) + x = self.layer4(x) + return x, low_level_feat + + def _init_weight(self): + for m in self.modules(): + if isinstance(m, torch.nn.Conv2d): + torch.torch.nn.init.kaiming_normal_(m.weight) + elif isinstance(m, torch.nn.BatchNorm2d): + m.weight.data.fill_(1) + m.bias.data.zero_() + + class ASPP_module(torch.nn.Module): + def __init__(self, inplanes, planes, rate): + + super(teca_deeplab_ar_detect_internals.ASPP_module, + self).__init__() + if rate == 1: + kernel_size = 1 + padding = 0 + else: + kernel_size = 3 + padding = rate + self.atrous_convolution = torch.nn.Conv2d( + inplanes, planes, kernel_size=kernel_size, + stride=1, padding=padding, dilation=rate, bias=False + ) + self.bn = torch.nn.BatchNorm2d(planes) + self.relu = torch.nn.ReLU() + + self._init_weight() + + def forward(self, x): + x = self.atrous_convolution(x) + x = self.bn(x) + + return self.relu(x) + + def _init_weight(self): + for m in self.modules(): + if isinstance(m, torch.nn.Conv2d): + torch.torch.nn.init.kaiming_normal_(m.weight) + elif isinstance(m, torch.nn.BatchNorm2d): + m.weight.data.fill_(1) + m.bias.data.zero_() + + class DeepLabv3_plus(torch.nn.Module): + def __init__(self, nInputChannels=3, n_classes=21, os=16, + _print=True): + + if _print: + sys.stdout.write("Constructing DeepLabv3+ model...\n") + sys.stdout.write("Number of classes: {}\n".format(n_classes)) + sys.stdout.write("Output stride: {}\n".format(os)) + sys.stdout.write( + "Number of Input Channels: {}\n".format(nInputChannels) + ) + super(teca_deeplab_ar_detect_internals.DeepLabv3_plus, + self).__init__() + + self.resnet_features = teca_deeplab_ar_detect_internals.ResNet( + nInputChannels, + teca_deeplab_ar_detect_internals.Bottleneck, + [3, 4, 23, 3], os + ) + + # ASPP + if os == 16: + rates = [1, 6, 12, 18] + elif os == 8: + rates = [1, 12, 24, 36] + else: + raise NotImplementedError + + self.aspp1 = teca_deeplab_ar_detect_internals.ASPP_module( + 2048, 256, rate=rates[0]) + self.aspp2 = teca_deeplab_ar_detect_internals.ASPP_module( + 2048, 256, rate=rates[1]) + self.aspp3 = teca_deeplab_ar_detect_internals.ASPP_module( + 2048, 256, rate=rates[2]) + self.aspp4 = teca_deeplab_ar_detect_internals.ASPP_module( + 2048, 256, rate=rates[3]) + + self.relu = torch.nn.ReLU() + + self.global_avg_pool = torch.nn.Sequential( + torch.nn.AdaptiveAvgPool2d((1, 1)), + torch.nn.Conv2d(2048, 256, 1, stride=1, bias=False), + torch.nn.BatchNorm2d(256), + torch.nn.ReLU() + ) + + self.conv1 = torch.nn.Conv2d(1280, 256, 1, bias=False) + self.bn1 = torch.nn.BatchNorm2d(256) + + # adopt [1x1, 48] for channel reduction. + self.conv2 = torch.nn.Conv2d(256, 48, 1, bias=False) + self.bn2 = torch.nn.BatchNorm2d(48) + + self.last_conv = torch.nn.Sequential( + torch.nn.Conv2d( + 304, 256, kernel_size=3, stride=1, + padding=1, bias=False + ), + torch.nn.BatchNorm2d(256), + torch.nn.ReLU(), + torch.nn.Conv2d( + 256, 256, kernel_size=3, stride=1, + padding=1, bias=False + ), + torch.nn.BatchNorm2d(256), + torch.nn.ReLU(), + torch.nn.Conv2d(256, n_classes, kernel_size=1, stride=1)) + + def forward(self, input): + x, low_level_features = self.resnet_features(input) + x1 = self.aspp1(x) + x2 = self.aspp2(x) + x3 = self.aspp3(x) + x4 = self.aspp4(x) + x5 = self.global_avg_pool(x) + + x5 = torch.nn.functional.interpolate( + x5, size=x4.size()[2:], mode='bilinear', + align_corners=True + ) + + x = torch.cat((x1, x2, x3, x4, x5), dim=1) + + x = self.conv1(x) + x = self.bn1(x) + x = self.relu(x) + + x = torch.nn.functional.interpolate( + x, + size=( + int(math.ceil(input.size()[-2]/4)), + int(math.ceil(input.size()[-1]/4)) + ), + mode='bilinear', + align_corners=True + ) + + low_level_features = self.conv2(low_level_features) + low_level_features = self.bn2(low_level_features) + low_level_features = self.relu(low_level_features) + + x = torch.cat((x, low_level_features), dim=1) + x = self.last_conv(x) + + x = torch.nn.functional.interpolate( + x, size=input.size()[2:], mode='bilinear', + align_corners=True + ) + + return x + + def freeze_bn(self): + for m in self.modules(): + if isinstance(m, torch.nn.BatchNorm2d): + m.eval() + + def __init_weight(self): + for m in self.modules(): + if isinstance(m, torch.nn.Conv2d): + torch.torch.nn.init.kaiming_normal_(m.weight) + elif isinstance(m, torch.nn.BatchNorm2d): + m.weight.data.fill_(1) + m.bias.data.zero_() + + diff --git a/alg/teca_descriptive_statistics.cxx b/alg/teca_descriptive_statistics.cxx index 80482ba1a..2c7794f0a 100644 --- a/alg/teca_descriptive_statistics.cxx +++ b/alg/teca_descriptive_statistics.cxx @@ -5,6 +5,7 @@ #include "teca_array_collection.h" #include "teca_variant_array.h" #include "teca_metadata.h" +#include "teca_array_attributes.h" #include #include @@ -209,19 +210,24 @@ const_p_teca_dataset teca_descriptive_statistics::execute( (void)port; // get the input mesh - const_p_teca_cartesian_mesh in_mesh - = std::dynamic_pointer_cast(input_data[0]); + const_p_teca_mesh in_mesh + = std::dynamic_pointer_cast(input_data[0]); if (!in_mesh) { TECA_ERROR("dataset is not a teca_cartesian_mesh") return nullptr; } + // dependent variables + std::vector dep_var_names; + this->get_dependent_variables(request, dep_var_names); + size_t n_dep_vars = dep_var_names.size(); // set up the output p_teca_table table = teca_table::New(); table->declare_columns("step", long(), "time", double()); + // pass calendaring metadata std::string calendar; in_mesh->get_calendar(calendar); table->set_calendar(calendar); @@ -238,12 +244,69 @@ const_p_teca_dataset teca_descriptive_statistics::execute( in_mesh->get_time(time); table << time; - // dependent variables - std::vector dep_var_names; - this->get_dependent_variables(request, dep_var_names); + // pass attributes and add the new variables we consrtuct. this is for + // NetCDF output. + teca_metadata atrs; + if (in_mesh->get_metadata().get("attributes", atrs) == 0) + { + for (size_t i = 0; i < n_dep_vars; ++i) + { + const std::string &dep_var_name = dep_var_names[i]; + + teca_metadata var_atts; + if (atrs.get(dep_var_names[i], var_atts) == 0) + { + teca_array_attributes out_atts(var_atts); + + std::string out_var_name = "min_" + dep_var_name; + out_atts.long_name = out_var_name; + out_atts.description = "minimum of " + dep_var_name; + atrs.set(out_var_name, (teca_metadata)out_atts); + + out_var_name = "max_" + dep_var_name; + out_atts.long_name = out_var_name; + out_atts.description = "maximum of " + dep_var_name; + atrs.set(out_var_name, (teca_metadata)out_atts); + + out_var_name = "avg_" + dep_var_name; + out_atts.long_name = out_var_name; + out_atts.description = "average of " + dep_var_name; + atrs.set(out_var_name, (teca_metadata)out_atts); + + out_var_name = "var_" + dep_var_name; + out_atts.long_name = out_var_name; + out_atts.description = "variance of " + dep_var_name; + atrs.set(out_var_name, (teca_metadata)out_atts); + + out_var_name = "med_" + dep_var_name; + out_atts.long_name = out_var_name; + out_atts.description = "median of " + dep_var_name; + atrs.set(out_var_name, (teca_metadata)out_atts); + + out_var_name = "low_q_" + dep_var_name; + out_atts.long_name = out_var_name; + out_atts.description = "lower quartile of " + dep_var_name; + atrs.set(out_var_name, (teca_metadata)out_atts); + + out_var_name = "up_q_" + dep_var_name; + out_atts.long_name = out_var_name; + out_atts.description = "upper quartile of " + dep_var_name; + atrs.set(out_var_name, (teca_metadata)out_atts); + } + } + + teca_array_attributes step_atts; + step_atts.long_name = "time step"; + step_atts.description = "input dataset time step"; + step_atts.units = "unitless"; + atrs.set("step", (teca_metadata)step_atts); + + table->get_metadata().set("attributes", atrs); + + atrs.to_stream(std::cerr); + } // for each variable - size_t n_dep_vars = dep_var_names.size(); for (size_t i = 0; i < n_dep_vars; ++i) { const std::string &dep_var_name = dep_var_names[i]; @@ -274,10 +337,10 @@ const_p_teca_dataset teca_descriptive_statistics::execute( // add to output table table->declare_columns( - "min " + dep_var_name, NT(), "max " + dep_var_name, NT(), - "avg " + dep_var_name, NT(), "var " + dep_var_name, NT(), - "low_q " + dep_var_name, NT(), "med " + dep_var_name, NT(), - "up_q " + dep_var_name, NT()); + "min_" + dep_var_name, NT(), "max_" + dep_var_name, NT(), + "avg_" + dep_var_name, NT(), "var_" + dep_var_name, NT(), + "low_q_" + dep_var_name, NT(), "med_" + dep_var_name, NT(), + "up_q_" + dep_var_name, NT()); table << mn << mx << av << vr << lq << med << uq; ) diff --git a/alg/teca_face_to_cell_centering.cxx b/alg/teca_face_to_cell_centering.cxx new file mode 100644 index 000000000..993df96c4 --- /dev/null +++ b/alg/teca_face_to_cell_centering.cxx @@ -0,0 +1,336 @@ +#include "teca_face_to_cell_centering.h" + +#include "teca_arakawa_c_grid.h" +#include "teca_array_collection.h" +#include "teca_variant_array.h" +#include "teca_metadata.h" +#include "teca_array_attributes.h" + +#include +#include +#include +#include + +#if defined(TECA_HAS_BOOST) +#include +#endif + +using std::string; +using std::vector; +using std::cerr; +using std::endl; +using std::cos; + +//#define TECA_DEBUG + +namespace { + +template +void x_face_to_cell(unsigned long nx, unsigned long ny, + unsigned long nz, unsigned long nxy, const num_t *fc, num_t *cc) +{ + unsigned long nxf = nx + 1; + unsigned long nxyf = nxf*ny; + + for (unsigned long k = 0; k < nz; ++k) + { + unsigned long knxy = k*nxy; + unsigned long knxyf = k*nxyf; + + for (unsigned long j = 0; j < ny; ++j) + { + unsigned long jnx = j*nx; + unsigned long jnxf = j*nxf; + + const num_t *src = fc + knxyf + jnxf; + num_t *dest = cc + knxy + jnx; + + for (unsigned long i = 0; i < nx; ++i) + { + dest[i] = 0.5*(src[i] + src[i+1]); + } + } + } +} + +template +void y_face_to_cell(unsigned long nx, unsigned long ny, + unsigned long nz, unsigned long nxy, const num_t *fc, num_t *cc) +{ + unsigned long nyf = ny + 1; + unsigned long nxyf = nx*nyf; + + for (unsigned long k = 0; k < nz; ++k) + { + unsigned long knxy = k*nxy; + unsigned long knxyf = k*nxyf; + + for (unsigned long j = 0; j < ny; ++j) + { + unsigned long jnx = j*nx; + unsigned long j1nx = (j + 1)*nx; + + const num_t *src = fc + knxyf + jnx; + const num_t *src1 = fc + knxyf + j1nx; + + num_t *dest = cc + knxy + jnx; + + for (unsigned long i = 0; i < nx; ++i) + { + dest[i] = 0.5*(src[i] + src1[i]); + } + } + } +} + +template +void z_face_to_cell(unsigned long nx, unsigned long ny, + unsigned long nz, unsigned long nxy, const num_t *fc, num_t *cc) +{ + for (unsigned long k = 0; k < nz; ++k) + { + unsigned long knxy = k*nxy; + unsigned long k1nxy = (k + 1)*nxy; + + for (unsigned long j = 0; j < ny; ++j) + { + unsigned long jnx = j*nx; + + const num_t *src = fc + knxy + jnx; + const num_t *src1 = fc + k1nxy + jnx; + + num_t *dest = cc + knxy + jnx; + + for (unsigned long i = 0; i < nx; ++i) + { + dest[i] = 0.5*(src[i] + src1[i]); + } + } + } +} + +}; + + +// -------------------------------------------------------------------------- +teca_face_to_cell_centering::teca_face_to_cell_centering() +{ + this->set_number_of_input_connections(1); + this->set_number_of_output_ports(1); +} + +// -------------------------------------------------------------------------- +teca_face_to_cell_centering::~teca_face_to_cell_centering() +{} + +#if defined(TECA_HAS_BOOST) +// -------------------------------------------------------------------------- +void teca_face_to_cell_centering::get_properties_description( + const string &prefix, options_description &global_opts) +{ + (void)prefix; + (void)global_opts; + /*options_description opts("Options for " + + (prefix.empty()?"teca_face_to_cell_centering":prefix)); + + opts.add_options() + TECA_POPTS_GET(int, prefix, mode, + "transform mode (mode_wrf_v3)") + ; + + global_opts.add(opts);*/ +} + +// -------------------------------------------------------------------------- +void teca_face_to_cell_centering::set_properties( + const string &prefix, variables_map &opts) +{ + (void)prefix; + (void)opts; + //TECA_POPTS_SET(opts, int, prefix, mode) +} +#endif + +// -------------------------------------------------------------------------- +teca_metadata teca_face_to_cell_centering::get_output_metadata( + unsigned int port, + const std::vector &input_md) +{ +#ifdef TECA_DEBUG + cerr << teca_parallel_id() + << "teca_face_to_cell_centering::get_output_metadata" << endl; +#endif + (void)port; + + teca_metadata out_md(input_md[0]); + + // get array attributes + teca_metadata atrs; + if (out_md.get("attributes", atrs)) + { + TECA_ERROR("failed to get array attributes") + return teca_metadata(); + } + + // get the list of array names + std::vector arrays; + if (out_md.get("variables", arrays)) + { + TECA_ERROR("failed to get array names") + return teca_metadata(); + } + + size_t n_arrays = arrays.size(); + for (size_t i = 0; i < n_arrays; ++i) + { + // get the name of the ith array + const std::string &array_name = arrays[i]; + + // get the array's attributes + teca_metadata array_atrs; + if (atrs.get(array_name, array_atrs)) + { + TECA_ERROR("failed to get the attributes for array " + << i << " \"" << array_name << "\"") + return teca_metadata(); + } + + // get the array's centering + int centering = teca_array_attributes::invalid_value; + if (array_atrs.get("centering", centering)) + { + TECA_ERROR("failed to get the centering for array " + << i << " \"" << array_name << "\"") + return teca_metadata(); + } + + // if this is a face centered array change to cell centering + if ((centering == teca_array_attributes::x_face_centering) + || (centering == teca_array_attributes::y_face_centering) + || (centering == teca_array_attributes::z_face_centering)) + { + array_atrs.set("centering", + int(teca_array_attributes::cell_centering)); + } + + // update the array's attributes + atrs.set(array_name, array_atrs); + } + + // update the array attributes collection + out_md.set("attributes", atrs); + + return out_md; +} + +// -------------------------------------------------------------------------- +std::vector +teca_face_to_cell_centering::get_upstream_request( + unsigned int port, + const std::vector &input_md, + const teca_metadata &request) +{ + (void)port; + (void)input_md; + std::vector up_reqs; + up_reqs.push_back(request); + return up_reqs; +} + +// -------------------------------------------------------------------------- +const_p_teca_dataset teca_face_to_cell_centering::execute( + unsigned int port, + const std::vector &input_data, + const teca_metadata &request) +{ +#ifdef TECA_DEBUG + cerr << teca_parallel_id() + << "teca_face_to_cell_centering::execute" << endl; +#endif + (void)port; + (void)request; + + // get the input mesh + const_p_teca_arakawa_c_grid in_mesh + = std::dynamic_pointer_cast(input_data[0]); + + if (!in_mesh) + { + TECA_ERROR("teca_arakawa_c_grid is required") + return nullptr; + } + + // get the mesh dimensions + unsigned long extent[6] = {0}; + in_mesh->get_extent(extent); + + unsigned long nx = extent[1] - extent[0] + 1; + unsigned long ny = extent[3] - extent[2] + 1; + unsigned long nz = extent[5] - extent[4] + 1; + unsigned long nxy = nx*ny; + unsigned long nxyz = nxy*nz; + + // allocate the output mesh + p_teca_arakawa_c_grid out_mesh = teca_arakawa_c_grid::New(); + out_mesh->shallow_copy(std::const_pointer_cast(in_mesh)); + + // convert x-face centering to cell centering + p_teca_array_collection &x_face_arrays = out_mesh->get_x_face_arrays(); + p_teca_array_collection &cell_arrays = out_mesh->get_cell_arrays(); + size_t n_arrays = x_face_arrays->size(); + for (size_t i = 0; i < n_arrays; ++i) + { + std::string &array_name = x_face_arrays->get_name(i); + p_teca_variant_array fc = x_face_arrays->get(i); + p_teca_variant_array cc = fc->new_instance(nxyz); + TEMPLATE_DISPATCH(teca_variant_array_impl, + fc.get(), + const NT *pfc = static_cast(fc.get())->get(); + NT *pcc = static_cast(cc.get())->get(); + ::x_face_to_cell(nx, ny, nz, nxy, pfc, pcc); + ) + + x_face_arrays->remove(i); + cell_arrays->append(array_name, cc); + } + + // convert y-face centering to cell centering + p_teca_array_collection &y_face_arrays = out_mesh->get_y_face_arrays(); + n_arrays = y_face_arrays->size(); + for (size_t i = 0; i < n_arrays; ++i) + { + std::string &array_name = y_face_arrays->get_name(i); + p_teca_variant_array fc = y_face_arrays->get(i); + p_teca_variant_array cc = fc->new_instance(nxyz); + TEMPLATE_DISPATCH(teca_variant_array_impl, + fc.get(), + const NT *pfc = static_cast(fc.get())->get(); + NT *pcc = static_cast(cc.get())->get(); + ::y_face_to_cell(nx, ny, nz, nxy, pfc, pcc); + ) + + y_face_arrays->remove(i); + cell_arrays->append(array_name, cc); + } + + // convert z-face centering to cell centering + p_teca_array_collection &z_face_arrays = out_mesh->get_z_face_arrays(); + n_arrays = z_face_arrays->size(); + for (size_t i = 0; i < n_arrays; ++i) + { + std::string &array_name = z_face_arrays->get_name(i); + p_teca_variant_array fc = z_face_arrays->get(i); + p_teca_variant_array cc = fc->new_instance(nxyz); + TEMPLATE_DISPATCH(teca_variant_array_impl, + fc.get(), + const NT *pfc = static_cast(fc.get())->get(); + NT *pcc = static_cast(cc.get())->get(); + ::z_face_to_cell(nx, ny, nz, nxy, pfc, pcc); + ) + + z_face_arrays->remove(i); + cell_arrays->append(array_name, cc); + } + + return out_mesh; +} diff --git a/alg/teca_face_to_cell_centering.h b/alg/teca_face_to_cell_centering.h new file mode 100644 index 000000000..563c9ae1f --- /dev/null +++ b/alg/teca_face_to_cell_centering.h @@ -0,0 +1,49 @@ +#ifndef teca_face_to_cell_centering_h +#define teca_face_to_cell_centering_h + +#include "teca_shared_object.h" +#include "teca_algorithm.h" +#include "teca_metadata.h" + +#include +#include + +TECA_SHARED_OBJECT_FORWARD_DECL(teca_face_to_cell_centering) + +/// an algorithm that transforms vertical cooridinates +/** +An algorithm that transforms vertical coordinates of a mesh. +*/ +class teca_face_to_cell_centering : public teca_algorithm +{ +public: + TECA_ALGORITHM_STATIC_NEW(teca_face_to_cell_centering) + TECA_ALGORITHM_DELETE_COPY_ASSIGN(teca_face_to_cell_centering) + TECA_ALGORITHM_CLASS_NAME(teca_face_to_cell_centering) + ~teca_face_to_cell_centering(); + + // report/initialize to/from Boost program options + // objects. + TECA_GET_ALGORITHM_PROPERTIES_DESCRIPTION() + TECA_SET_ALGORITHM_PROPERTIES() + +protected: + teca_face_to_cell_centering(); + +private: + teca_metadata get_output_metadata( + unsigned int port, + const std::vector &input_md) override; + + std::vector get_upstream_request( + unsigned int port, + const std::vector &input_md, + const teca_metadata &request) override; + + const_p_teca_dataset execute( + unsigned int port, + const std::vector &input_data, + const teca_metadata &request) override; +}; + +#endif diff --git a/alg/teca_integrated_vapor_transport.cxx b/alg/teca_integrated_vapor_transport.cxx new file mode 100644 index 000000000..39f2fd5f2 --- /dev/null +++ b/alg/teca_integrated_vapor_transport.cxx @@ -0,0 +1,423 @@ +#include "teca_integrated_vapor_transport.h" + +#include "teca_cartesian_mesh.h" +#include "teca_array_collection.h" +#include "teca_variant_array.h" +#include "teca_metadata.h" +#include "teca_coordinate_util.h" + +#include +#include +#include +#include + +#if defined(TECA_HAS_BOOST) +#include +#endif + +using std::string; +using std::vector; +using std::cerr; +using std::endl; +using std::cos; + +//#define TECA_DEBUG + +namespace { +template +void cartesian_ivt(unsigned long nx, unsigned long ny, + unsigned long nz, const coord_t *plev, const num_t *wind, + const num_t *q, num_t *ivt) +{ + unsigned long nxy = nx*ny; + unsigned long nxyz = nxy*nz; + + // compute the integrand + num_t *f = (num_t*)malloc(nxyz*sizeof(num_t)); + for (unsigned long i = 0; i < nxyz; ++i) + f[i] = wind[i]*q[i]; + + // initialize the result + memset(ivt, 0, nxy*sizeof(num_t)); + + // work an x-y slice at a time + unsigned long nzm1 = nz - 1; + for (unsigned long k = 0; k < nzm1; ++k) + { + // dp over the slice + num_t h2 = num_t(0.5) * (plev[k+1] - plev[k]); + + // the current two x-y-planes of data + unsigned long knxy = k*nxy; + num_t *f_k0 = f + knxy; + num_t *f_k1 = f_k0 + nxy; + + // accumulate this plane of data using trapazoid rule + for (unsigned long q = 0; q < nxy; ++q) + { + ivt[q] += h2 * (f_k0[q] + f_k1[q]); + } + } + + // free up the integrand + free(f); + + // check the sign, in this way we can handle both increasing and decreasing + // pressure coordinates + num_t s = plev[1] - plev[0] < num_t(0) ? num_t(-1) : num_t(1); + + // scale by -1/g + num_t m1g = s/num_t(9.80665); + for (unsigned long i = 0; i < nxy; ++i) + ivt[i] *= m1g; +} + +template +void cartesian_ivt(unsigned long nx, unsigned long ny, + unsigned long nz, const coord_t *plev, const num_t *wind, + const char *wind_valid, const num_t *q, const char *q_valid, + num_t *ivt) +{ + unsigned long nxy = nx*ny; + unsigned long nxyz = nxy*nz; + + // compute the mask + char *mask = (char*)malloc(nxyz); + for (unsigned long i = 0; i < nxyz; ++i) + mask[i] = wind_valid[i] && q_valid[i] ? 1 : 0; + + // compute the integrand + num_t *f = (num_t*)malloc(nxyz*sizeof(num_t)); + for (unsigned long i = 0; i < nxyz; ++i) + f[i] = wind[i]*q[i]; + + // initialize the result + memset(ivt, 0, nxy*sizeof(num_t)); + + // work an x-y slice at a time + unsigned long nzm1 = nz - 1; + for (unsigned long k = 0; k < nzm1; ++k) + { + // dp over the slice + num_t h2 = num_t(0.5) * (plev[k+1] - plev[k]); + + // the current two x-y-planes of data + unsigned long knxy = k*nxy; + num_t *f_k0 = f + knxy; + num_t *f_k1 = f_k0 + nxy; + + char *mask_k0 = mask + knxy; + char *mask_k1 = mask_k0 + nxy; + + // accumulate this plane of data using trapazoid rule + for (unsigned long q = 0; q < nxy; ++q) + { + ivt[q] += ((mask_k0[q] && mask_k1[q]) ? + h2 * (f_k0[q] + f_k1[q]) : num_t(0)); + } + } + + // free up the integrand and mask + free(mask); + free(f); + + // check the sign, in this way we can handle both increasing and decreasing + // pressure coordinates + num_t s = plev[1] - plev[0] < num_t(0) ? num_t(-1) : num_t(1); + + // scale by -1/g + num_t m1g = s/num_t(9.80665); + for (unsigned long i = 0; i < nxy; ++i) + ivt[i] *= m1g; +} +} + +// -------------------------------------------------------------------------- +teca_integrated_vapor_transport::teca_integrated_vapor_transport() : + wind_u_variable("ua"), wind_v_variable("va"), + specific_humidity_variable("hus"), ivt_u_variable("ivt_u"), + ivt_v_variable("ivt_v"), fill_value(1.0e20) +{ + this->set_number_of_input_connections(1); + this->set_number_of_output_ports(1); +} + +// -------------------------------------------------------------------------- +teca_integrated_vapor_transport::~teca_integrated_vapor_transport() +{} + +#if defined(TECA_HAS_BOOST) +// -------------------------------------------------------------------------- +void teca_integrated_vapor_transport::get_properties_description( + const string &prefix, options_description &global_opts) +{ + options_description opts("Options for " + + (prefix.empty()?"teca_integrated_vapor_transport":prefix)); + + opts.add_options() + TECA_POPTS_GET(std::string, prefix, wind_u_variable, + "name of the variable containg the lon component of the wind vector (ua)") + TECA_POPTS_GET(std::string, prefix, wind_v_variable, + "name of the variable containg the lat component of the wind vector (va)") + TECA_POPTS_GET(std::string, prefix, specific_humidty_variable, + "name of the variable containg the specific humidity (hus)") + TECA_POPTS_GET(double, prefix, fill_value, + "the value of the NetCDF _FillValue attribute (1e20)") + ; + + global_opts.add(opts); +} + +// -------------------------------------------------------------------------- +void teca_integrated_vapor_transport::set_properties( + const string &prefix, variables_map &opts) +{ + TECA_POPTS_SET(opts, std::string, prefix, wind_u_variable) + TECA_POPTS_SET(opts, std::string, prefix, wind_v_variable) + TECA_POPTS_SET(opts, std::string, prefix, specific_humidity_variable) + TECA_POPTS_SET(opts, double, prefix, fill_value) +} +#endif + +// -------------------------------------------------------------------------- +teca_metadata teca_integrated_vapor_transport::get_output_metadata( + unsigned int port, + const std::vector &input_md) +{ +#ifdef TECA_DEBUG + std::cerr << teca_parallel_id() + << "teca_integrated_vapor_transport::get_output_metadata" << std::endl; +#endif + (void)port; + + // set things up in the first pass, and don't modify in subsequent passes + // due to threading concerns + + if (this->get_number_of_derived_variables() == 0) + { + // the base class will handle dealing with the transformation of + // mesh dimensions and reporting the array we produce, but we have + // to determine the data type and tell the name of the produced array. + const teca_metadata &md = input_md[0]; + + teca_metadata attributes; + if (md.get("attributes", attributes)) + { + TECA_ERROR("Failed to determine output data type " + "because attributes are misisng") + return teca_metadata(); + } + + teca_metadata u_atts; + if (attributes.get(this->wind_u_variable, u_atts)) + { + TECA_ERROR("Failed to determine output data type " + "because attributes for \"" << this->wind_u_variable + << "\" are misisng") + return teca_metadata(); + } + + int type_code = 0; + if (u_atts.get("type_code", type_code)) + { + TECA_ERROR("Failed to determine output data type " + "because attributes for \"" << this->wind_u_variable + << "\" is misisng a \"type_code\"") + return teca_metadata(); + } + + teca_array_attributes ivt_u_atts( + type_code, teca_array_attributes::point_centering, + 0, "kg m^{-1} s^{-1}", "longitudinal integrated vapor transport", + "the longitudinal component of integrated vapor transport", + 1, this->fill_value); + + teca_array_attributes ivt_v_atts( + type_code, teca_array_attributes::point_centering, + 0, "kg m^{-1} s^{-1}", "latitudinal integrated vapor transport", + "the latitudinal component of integrated vapor transport", + this->fill_value); + + // install name and attributes of the output variables in the base classs + this->append_derived_variable(this->ivt_u_variable); + this->append_derived_variable(this->ivt_v_variable); + + this->append_derived_variable_attribute(ivt_u_atts); + this->append_derived_variable_attribute(ivt_v_atts); + + } + + if (this->get_number_of_dependent_variables() == 0) + { + // install the names of the input variables in the base class + this->append_dependent_variable(this->wind_u_variable); + this->append_dependent_variable(this->wind_v_variable); + this->append_dependent_variable(this->specific_humidity_variable); + } + + // invoke the base class method, which does the work of transforming + // the mesh and reporting the variables and their attributes. + return teca_vertical_reduction::get_output_metadata(port, input_md); +} + +// -------------------------------------------------------------------------- +std::vector teca_integrated_vapor_transport::get_upstream_request( + unsigned int port, + const std::vector &input_md, + const teca_metadata &request) +{ + // invoke the base class method + return teca_vertical_reduction::get_upstream_request(port, input_md, request); +} + +// -------------------------------------------------------------------------- +const_p_teca_dataset teca_integrated_vapor_transport::execute( + unsigned int port, + const std::vector &input_data, + const teca_metadata &request) +{ +#ifdef TECA_DEBUG + std::cerr << teca_parallel_id() + << "teca_integrated_vapor_transport::execute" << std::endl; +#endif + (void)port; + + // get the input mesh + const_p_teca_cartesian_mesh in_mesh + = std::dynamic_pointer_cast(input_data[0]); + + if (!in_mesh) + { + TECA_ERROR("Failed to compute IVT because a cartesian mesh is required.") + return nullptr; + } + + // get the input dimensions + unsigned long extent[6] = {0}; + if (in_mesh->get_extent(extent)) + { + TECA_ERROR("Failed to compute IVT because mesh extent is missing.") + return nullptr; + } + + unsigned long nx = extent[1] - extent[0] + 1; + unsigned long ny = extent[3] - extent[2] + 1; + unsigned long nz = extent[5] - extent[4] + 1; + + // get the pressure coordinates + const_p_teca_variant_array p = in_mesh->get_z_coordinates(); + if (!p) + { + TECA_ERROR("Failed to compute IVT because pressure coordinates are missing") + return nullptr; + } + + if (p->size() < 2) + { + TECA_ERROR("Failed to compute IVT because z dimensions " + << p->size() << " < 2 as required by the integration method") + return nullptr; + } + + // gather the input arrays + const_p_teca_variant_array wind_u = + in_mesh->get_point_arrays()->get(this->wind_u_variable); + + if (!wind_u) + { + TECA_ERROR("Failed to compute IVT because longitudinal wind \"" + << this->wind_u_variable << "\" is missing") + return nullptr; + } + + const_p_teca_variant_array wind_u_valid = + in_mesh->get_point_arrays()->get(this->wind_u_variable + "_valid"); + + const_p_teca_variant_array wind_v = + in_mesh->get_point_arrays()->get(this->wind_v_variable); + + if (!wind_v) + { + TECA_ERROR("Failed to compute IVT because latitudinal wind \"" + << this->wind_v_variable << "\" is missing") + return nullptr; + } + + const_p_teca_variant_array wind_v_valid = + in_mesh->get_point_arrays()->get(this->wind_v_variable + "_valid"); + + const_p_teca_variant_array q = + in_mesh->get_point_arrays()->get(this->specific_humidity_variable); + + if (!q) + { + TECA_ERROR("Failed to compute IVT because specific humidity \"" + << this->specific_humidity_variable << "\" is missing") + return nullptr; + } + + const_p_teca_variant_array q_valid = + in_mesh->get_point_arrays()->get(this->specific_humidity_variable + "_valid"); + + // the base class will construct the output mesh + p_teca_cartesian_mesh out_mesh + = std::dynamic_pointer_cast( + std::const_pointer_cast( + teca_vertical_reduction::execute(port, input_data, request))); + + if (!out_mesh) + { + TECA_ERROR("Failed to compute IVT because the output mesh was " + "not constructed") + return nullptr; + } + + // allocate the output arrays + unsigned long nxy = nx*ny; + p_teca_variant_array ivt_u = wind_u->new_instance(nxy); + p_teca_variant_array ivt_v = wind_u->new_instance(nxy); + + // store the result + out_mesh->get_point_arrays()->set(this->ivt_u_variable, ivt_u); + out_mesh->get_point_arrays()->set(this->ivt_v_variable, ivt_v); + + // calculate IVT + NESTED_TEMPLATE_DISPATCH_FP(const teca_variant_array_impl, + p.get(), _COORDS, + + const NT_COORDS *p_p = static_cast(p.get())->get(); + + NESTED_TEMPLATE_DISPATCH_FP(teca_variant_array_impl, + ivt_u.get(), _DATA, + + NT_DATA *p_ivt_u = static_cast(ivt_u.get())->get(); + NT_DATA *p_ivt_v = static_cast(ivt_v.get())->get(); + + const NT_DATA *p_wind_u = static_cast(wind_u.get())->get(); + const NT_DATA *p_wind_v = static_cast(wind_v.get())->get(); + const NT_DATA *p_q = static_cast(q.get())->get(); + + const char *p_wind_u_valid = nullptr; + const char *p_wind_v_valid = nullptr; + const char *p_q_valid = nullptr; + if (wind_u_valid) + { + using TT_MASK = teca_char_array; + + p_wind_u_valid = dynamic_cast(wind_u_valid.get())->get(); + p_wind_v_valid = dynamic_cast(wind_v_valid.get())->get(); + p_q_valid = dynamic_cast(q_valid.get())->get(); + + ::cartesian_ivt(nx, ny, nz, p_p, p_wind_u, p_wind_u_valid, p_q, p_q_valid, p_ivt_u); + ::cartesian_ivt(nx, ny, nz, p_p, p_wind_v, p_wind_v_valid, p_q, p_q_valid, p_ivt_v); + } + else + { + ::cartesian_ivt(nx, ny, nz, p_p, p_wind_u, p_q, p_ivt_u); + ::cartesian_ivt(nx, ny, nz, p_p, p_wind_v, p_q, p_ivt_v); + } + ) + ) + + return out_mesh; +} diff --git a/alg/teca_integrated_vapor_transport.h b/alg/teca_integrated_vapor_transport.h new file mode 100644 index 000000000..44bd896a4 --- /dev/null +++ b/alg/teca_integrated_vapor_transport.h @@ -0,0 +1,91 @@ +#ifndef teca_integrated_vapor_transport_h +#define teca_integrated_vapor_transport_h + +#include "teca_shared_object.h" +#include "teca_vertical_reduction.h" +#include "teca_metadata.h" + +#include +#include + +TECA_SHARED_OBJECT_FORWARD_DECL(teca_integrated_vapor_transport) + +/// an algorithm that computes integrated vapor transport (IVT) +/** +Compute integrated vaport transport (IVT) from wind vector and +specific humidity. + +IVT = - \frac{1}{g} \int_{p_0}^{p_1} \vec{v} q dp + +where q is the specific humidity, and \vec{v} = (u, v) are the +longitudinal and latitudinal components of wind. + +This calculation is an instance of a vertical reduction where +a 3D mesh is transformed into a 2D one. +*/ +class teca_integrated_vapor_transport : public teca_vertical_reduction +{ +public: + TECA_ALGORITHM_STATIC_NEW(teca_integrated_vapor_transport) + TECA_ALGORITHM_DELETE_COPY_ASSIGN(teca_integrated_vapor_transport) + TECA_ALGORITHM_CLASS_NAME(teca_integrated_vapor_transport) + ~teca_integrated_vapor_transport(); + + // report/initialize to/from Boost program options + // objects. + TECA_GET_ALGORITHM_PROPERTIES_DESCRIPTION() + TECA_SET_ALGORITHM_PROPERTIES() + + // set the name of the varaiable that contains the longitudinal + // component of the wind vector ("ua") + TECA_ALGORITHM_PROPERTY(std::string, wind_u_variable) + + // set the name of the varaiable that contains the latitudinal + // component of the wind vector ("va") + TECA_ALGORITHM_PROPERTY(std::string, wind_v_variable) + + // set the name of the variable that contains the specific + // humidity ("hus") + TECA_ALGORITHM_PROPERTY(std::string, + specific_humidity_variable) + + // set the name of the varaiable that contains the longitudinal + // component of the ivt vector ("ivt_u") + TECA_ALGORITHM_PROPERTY(std::string, ivt_u_variable) + + // set the name of the varaiable that contains the latitudinal + // component of the ivt vector ("ivt_v") + TECA_ALGORITHM_PROPERTY(std::string, ivt_v_variable) + + // set the _fillValue attribute for the output data. + // default 1.0e20 + TECA_ALGORITHM_PROPERTY(double, fill_value) + +protected: + teca_integrated_vapor_transport(); + +private: + teca_metadata get_output_metadata( + unsigned int port, + const std::vector &input_md) override; + + std::vector get_upstream_request( + unsigned int port, + const std::vector &input_md, + const teca_metadata &request) override; + + const_p_teca_dataset execute( + unsigned int port, + const std::vector &input_data, + const teca_metadata &request) override; + +private: + std::string wind_u_variable; + std::string wind_v_variable; + std::string specific_humidity_variable; + std::string ivt_u_variable; + std::string ivt_v_variable; + double fill_value; +}; + +#endif diff --git a/alg/teca_l2_norm.cxx b/alg/teca_l2_norm.cxx index 594bf4809..9d66dbdd0 100644 --- a/alg/teca_l2_norm.cxx +++ b/alg/teca_l2_norm.cxx @@ -4,6 +4,7 @@ #include "teca_array_collection.h" #include "teca_variant_array.h" #include "teca_metadata.h" +#include "teca_array_attributes.h" #include #include @@ -169,6 +170,12 @@ teca_metadata teca_l2_norm::get_output_metadata( #endif (void)port; + if (this->component_0_variable.empty()) + { + TECA_ERROR("The component_0_variable was not set") + return teca_metadata(); + } + // add in the array we will generate teca_metadata out_md(input_md[0]); @@ -178,6 +185,37 @@ teca_metadata teca_l2_norm::get_output_metadata( out_md.append("variables", norm_var); + // insert attributes to enable this to be written by the CF writer + teca_metadata attributes; + out_md.get("attributes", attributes); + + teca_metadata comp_0_atts; + if (attributes.get(this->component_0_variable, comp_0_atts)) + { + TECA_WARNING("Failed to get component 0 \"" << this->component_0_variable + << "\" attrbibutes. Writing the result will not be possible") + } + else + { + // copy the attributes from the input. this will capture the + // data type, size, units, etc. + teca_array_attributes norm_atts(comp_0_atts); + + // update name, long_name, and description. + norm_atts.long_name = norm_var; + + norm_atts.description = + std::string("The L2 norm of (" + this->component_0_variable); + if (!this->component_1_variable.empty()) + norm_atts.description += ", " + this->component_1_variable; + if (!this->component_2_variable.empty()) + norm_atts.description += ", " + this->component_2_variable; + norm_atts.description += ")"; + + attributes.set(norm_var, (teca_metadata)norm_atts); + out_md.set("attributes", attributes); + } + return out_md; } diff --git a/alg/teca_python_algorithm.py b/alg/teca_python_algorithm.py deleted file mode 100644 index c1d15cf51..000000000 --- a/alg/teca_python_algorithm.py +++ /dev/null @@ -1,108 +0,0 @@ - -class teca_python_algorithm(object): - """ - The base class used for writing new algorithms in Python. - Contains plumbing that connects user provided callbacks - to an instance of teca_programmable_algorithm. Users are - expected to override one or more of get_report_callback, - get_request_callback, and/or get_execute_callback. These - methods return a callable with the correct signature, and - use a closure to access class state. - """ - @classmethod - def New(derived_class): - """ factory method returns an instance of the derived type """ - dc = derived_class() - dc.initialize_implementation() - return dc - - def initialize_implementation(self): - """ - Initializes the instance and wires up the plumbing. - """ - # call overridable methods to get number of inputs/outputs - n_inputs = self.get_number_of_input_connections() - n_outputs = self.get_number_of_output_ports() - - # call overrides to get implementation for teca execution - # phase implementations - import teca_py - self.impl = teca_py.teca_programmable_algorithm.New() - self.impl.set_number_of_input_connections(n_inputs) - self.impl.set_number_of_output_ports(n_outputs) - self.impl.set_name(self.__class__.__name__) - self.impl.set_report_callback(self.get_report_callback()) - self.impl.set_request_callback(self.get_request_callback()) - self.impl.set_execute_callback(self.get_execute_callback()) - - def __getattr__(self, name): - """ forward stuff to the programmable algorithm """ - - # guard against confusing infinite recursion that - # occurs if impl is not present. one common way - # that this occurs is if the instance was not - # created with the New method - if name == 'impl': - raise RuntimeError('The teca_python_algorithm ' \ - 'was imporperly initialized. Did you use the ' \ - 'factory method, New(), to create this ' \ - 'instance of %s?'%(self.__class__.__name__)) - - # forward to the teca_programmable_algorithm - return self.impl.__getattribute__(name) - - def get_number_of_input_connections(self): - """ Override to change number of inputs """ - return 1 - - def get_number_of_output_ports(self): - """ Override to change number of outputs """ - return 1 - - def get_report_callback(self): - """ - Returns a function with the signature - - report_callback(port, md_in) -> teca_metadata - - The default implementation passes the report down stream. - Override this to customize the behavior of the report - phase of execution. - """ - def report_callback(port, md_in): - import teca_py - return teca_py.teca_metadata(md_in[0]) - return report_callback - - def get_request_callback(self): - """ - Returns a function with the signature - - request_callback(port, md_in, req_in) -> [teca_metadata] - - The default implementation passes the request up stream. - Override this to customize the behavior of the request - phase of execution. - """ - def request_callback(port, md_in, req_in): - import teca_py - return [teca_py.teca_metadata(req_in)] - return request_callback - - def get_execute_callback(self): - """ - Returns a function with the signature - - execute_callback(port, data_in, req_in) -> teca_dataset - - The default implementation shallow copies the input dataset. - Override this to customize the behavior of the execute - phase of execution. - """ - def execute_callback(port, data_in, req_in): - import teca_py - if len(data_in): - data_out = data_in[0].new_instance() - data_out.shallow_copy(teca_py.as_non_const_teca_dataset(data_out)) - return data_out - return execute_callback diff --git a/alg/teca_pytorch_algorithm.py b/alg/teca_pytorch_algorithm.py new file mode 100644 index 000000000..47c2d4511 --- /dev/null +++ b/alg/teca_pytorch_algorithm.py @@ -0,0 +1,385 @@ +import os +import sys +from socket import gethostname +import numpy as np + +class teca_pytorch_algorithm(teca_python_algorithm): + """ + A TECA algorithm that provides access to torch. To use this class, derive + a new class from it and from your class: + + 1. call set input_/output_variable. this tells the pytorch_algorithm + which array to process and how to name the result. + + 2. call set_model. this installs your torch model. Use load_state_dict + to load state dict from the file system in parallel. + + 3. override preprocess. The input numpy array is passed in. return the + array to send to torch after applying any preprocessing or transforms. + + 4. override postprocess. the tensor returned from torch is passed. return a + numpy array with the correct mesh dimensions + + 5. Optionally override the usual teca_python_algorithm methods as needed. + + """ + def __init__(self): + + self.input_variable = None + self.output_variable = None + self.output_variable_atts = None + + self.model = None + self.model_path = None + self.device = 'cpu' + self.n_threads = -1 + self.n_threads_max = 4 + self.verbose = 0 + self.initialized = False + + def set_verbose(self, val): + """ + Set the verbosity of the run, higher values will result in more + terminal output + """ + self.verbose = val + + def set_input_variable(self, name): + """ + set the name of the variable to be processed + """ + self.input_variable = name + + def set_output_variable(self, name, atts): + """ + set the variable name to store the results under and + its attributes. Attributes are optional and may be None + but are required for the CF writer to write the result + to disk. + """ + self.output_variable = name + self.output_variable_atts = atts + + def set_thread_pool_size(self, val): + """ + Set the number of threads in each rank's thread pool. Setting + to a value of -1 will result in the thread pool being sized + such that each thread is uniquely and exclusively bound to a + specific core accounting for thread pools in other ranks + running on the same node + """ + self.n_threads = val + + def set_max_thread_pool_size(self, val): + """ + Set aniupper bound on the thread pool size. This is applied + during automatic thread pool sizing. + """ + self.n_threads_max = val + + def set_target_device(self, val): + """ + Set the target device. May be one of 'cpu' or 'cuda'. + """ + if val == 'cpu' or val == 'cuda': + self.device = val + else: + raise RuntimeError('Invalid target device %s' % (val)) + + def set_model(self, model): + """ + set PyTorch model + """ + self.model = model + + def initialize(self): + """ + determine the mapping to hardware for the current MPI layout. + if device is cpu then this configures OpenMP such that its + thread pools have 1 thread per physical core. + this also imports torch. this must be called prior to using any + torch api's etc. + """ + event = teca_time_py_event('teca_pytorch_algorithm::initialize') + + if self.initialized: + return + + rank = 0 + n_ranks = 1 + comm = self.get_communicator() + if get_teca_has_mpi(): + rank = comm.Get_rank() + n_ranks = comm.Get_size() + + # tell OpenMP to report on what it does + if self.verbose > 2: + os.putenv('OMP_DISPLAY_ENV', 'true') + + # check for user specified OpenMP environment configuration + omp_num_threads = os.getenv('OMP_NUM_THREADS') + omp_places = os.getenv('OMP_PLACES') + omp_proc_bind = os.getenv('OMP_PROC_BIND') + if omp_num_threads is not None or omp_places is not None \ + or omp_proc_bind is not None: + + # at least one of the OpenMP environment control variables + # was set. we will now bail out and use those settings + if rank == 0: + sys.stderr.write('[0] STATUS: OpenMP environment override ' + 'detected. OMP_NUM_THREADS=%s ' + 'OMP_PROC_BIND=%s OMP_PLACES=%s\n' % ( + str(omp_num_threads), str(omp_proc_bind), + str(omp_places))) + sys.stderr.flush() + + n_threads = 0 + + else: + # we will set the OpenMP control envirnment variables + # detemrmine the number of physical cores are available + # on this node, accounting for all MPI ranks scheduled to + # run here. + try: + # let the user request a specific number of threads + n_threads = self.n_threads + + n_threads, affinity = \ + thread_util.thread_parameters(comm, n_threads, 1, + 0 if self.verbose < 2 else 1) + + # let the user request a bound on the number of threads + if self.n_threads_max > 0: + n_threads = min(n_threads, self.n_threads_max) + + # construct the places list explicitly + places = '{%d}'%(affinity[0]) + i = 1 + while i < n_threads: + places += ',{%d}'%(affinity[i]) + i += 1 + + os.putenv('OMP_NUM_THREADS', '%d'%(n_threads)) + os.putenv('OMP_PROC_BIND', 'true') + os.putenv('OMP_PLACES', places) + + if self.verbose: + sys.stderr.write('[%d] STATUS: %s : %d : OMP_NUM_THREADS=%d' + ' OMP_PROC_BIND=true OMP_PLACES=%s\n' % ( + rank, gethostname(), rank, n_threads, + places)) + sys.stderr.flush() + + except(RuntimeError): + # we failed to detect the number of physical cores per MPI rank + os.putenv('OMP_NUM_THREADS', '1') + n_threads = 1 + + sys.stderr.write('[0] STATUS: Failed to determine the ' + 'number of physical cores available per ' + 'MPI rank. OMP_NUM_THREADS=1\n') + sys.stderr.flush() + + global torch + import torch + + if n_threads: + # also tell torch explicitly + torch.set_num_threads(n_threads) + torch.set_num_interop_threads(n_threads) + + if 'cuda' in self.device: + # check that CUDA is present + if torch.cuda.is_available(): + # get the number of devices and assign them to ranks round + # robin + n_dev = torch.cuda.device_count() + dev_id = rank % n_dev + + if self.device == 'cuda': + # select the GPU that this rank will use. + self.device = 'cuda:%d' % (dev_id) + + if self.verbose: + dev_name = torch.cuda.get_device_name(self.device) + + sys.stderr.write('[%d] STATUS: %s : %d : %d/%d : %s\n' % ( + rank, gethostname(), rank, dev_id, n_dev, + dev_name)) + sys.stderr.flush() + else: + # fall back to OpenMP + if rank == 0: + sys.stderr.write('[%d] WARNING: CUDA was requested but is not' + ' available. OpenMP will be used.\n') + sys.stderr.flush() + + self.device = 'cpu' + + self.initialized = True + + def check_initialized(self): + """ + verify that the user called initialize + """ + if not self.initialized: + raise RuntimeError('Not initialized! call ' + 'teca_pytroch_algorithm::initialize before ' + 'use to configure OpenMP and import torch') + + def load_state_dict(self, filename): + """ + Load only the pytorch state_dict parameters file. + """ + event = teca_time_py_event('teca_pytorch_algorithm::load_state_dict') + + self.check_initialized() + + comm = self.get_communicator() + rank = comm.Get_rank() + + sd = None + if rank == 0: + sd = torch.load(filename, map_location=self.device) + + sd = comm.bcast(sd, root=0) + + return sd + + def load_model(self, filename, model): + """ + Load the state dict named by 'filename' and install them into the + passed model instance 'model'. This also moves the model on the current + target device, and puts the model into inference mode. + """ + event = teca_time_py_event('teca_pytorch_algorithm::load_model') + + self.check_initialized() + + # load the model weights from disk + model_state = self.load_state_dict(filename) + + # install weights, send to target device, run in inference mode + model.load_state_dict(model_state) + model.to(self.device) + model.eval() + + self.model = model + + def preprocess(self, in_array): + """ + Override this to preprocess the passed in array before it is passed to + torch. The passed array has the shape of the input/output mesh. the + default implementation does nothing. + """ + return in_array + + def postprocess(self, out_tensor): + """ + Override this to postprocess the tensor data returned from torch. + return the result as a numpy array. the return should be sized + compatibly with the output mesh. The default implementation converts + the tensor to a ndarray. + """ + return out_tensor.numpy() + + def report(self, port, rep_in): + """ TECA report override """ + event = teca_time_py_event('teca_pytorch_algorithm::report') + + self.check_initialized() + + # check for required parameters. + if self.model is None: + raise RuntimeError('A torch model has not been specified') + + if self.input_variable is None: + raise RuntimeError('input_variable has not been specified') + + if self.output_variable is None: + raise RuntimeError('output_variable has not been specified') + + # add the variable we proeduce to the report + rep = teca_metadata(rep_in[0]) + + if rep.has('variables'): + rep.append('variables', self.output_variable) + else: + rep.set('variables', self.output_variable) + + attributes = rep["attributes"] + attributes["ar_probability"] = self.output_variable_atts.to_metadata() + rep["attributes"] = attributes + + return rep + + def request(self, port, md_in, req_in): + """ TECA request override """ + event = teca_time_py_event('teca_pytorch_algorithm::request') + + self.check_initialized() + + req = teca_metadata(req_in) + + arrays = [] + if req.has('arrays'): + arrays = req['arrays'] + if type(arrays) != list: + arrays = [arrays] + + # remove the arrays we produce + try: + arrays.remove(self.output_variable) + except(Exception): + pass + + # add the arrays we need + arrays.append(self.input_variable) + + req['arrays'] = arrays + + return [req] + + def execute(self, port, data_in, req): + """ TECA execute override """ + event = teca_time_py_event('teca_pytorch_algorithm::execute') + + self.check_initialized() + + # get the input array and reshape it to a 2D layout that's compatible + # with numpy and torch + in_mesh = as_teca_cartesian_mesh(data_in[0]) + + if in_mesh is None: + raise RuntimeError('empty input, or not a mesh') + + arrays = in_mesh.get_point_arrays() + in_va = arrays[self.input_variable] + + ext = in_mesh.get_extent() + in_va.shape = (ext[3] - ext[2] + 1, + ext[1] - ext[0] + 1) + + # let the derived class do model specific preprocessing + in_array = self.preprocess(in_va) + + # send to torch for processing + in_tensor = torch.from_numpy(in_array).to(self.device) + + with torch.no_grad(): + out_tensor = self.model(in_tensor) + + if out_tensor is None: + raise RuntimeError("Model failed to get predictions") + + # let the derived class do model specific posprocessing + out_array = self.postprocess(out_tensor) + + # build the output + out_mesh = teca_cartesian_mesh.New() + out_mesh.shallow_copy(in_mesh) + + out_va = teca_variant_array.New(out_array) + out_mesh.get_point_arrays().set(self.output_variable, out_va) + + return out_mesh diff --git a/alg/teca_table_calendar.cxx b/alg/teca_table_calendar.cxx index 61692697b..79768f323 100644 --- a/alg/teca_table_calendar.cxx +++ b/alg/teca_table_calendar.cxx @@ -4,6 +4,7 @@ #include "teca_array_collection.h" #include "teca_variant_array.h" #include "teca_metadata.h" +#include "teca_array_attributes.h" #include #include @@ -130,16 +131,14 @@ const_p_teca_dataset teca_table_calendar::execute( // get calendar and unit system std::string units = this->units; - if (units.empty() && - ((in_table->get_time_units(units)) && units.empty())) + if (units.empty() && (in_table->get_time_units(units) || units.empty())) { TECA_ERROR("Units are missing") return nullptr; } std::string calendar = this->calendar; - if (calendar.empty() && - ((in_table->get_calendar(calendar)) && calendar.empty())) + if (calendar.empty() && (in_table->get_calendar(calendar) || calendar.empty())) { TECA_ERROR("Calendar is missing") return nullptr; @@ -158,6 +157,9 @@ const_p_teca_dataset teca_table_calendar::execute( p_teca_table out_table = teca_table::New(); out_table->copy_metadata(in_table); + teca_metadata atrs; + out_table->get_metadata().get("attributes", atrs); + unsigned long n_rows = time->size(); p_teca_variant_array_impl year; @@ -168,6 +170,11 @@ const_p_teca_dataset teca_table_calendar::execute( year = std::static_pointer_cast >(out_table->get_column(year_col)); year->reserve(n_rows); + + teca_array_attributes atts; + atts.long_name = "year"; + atts.description = calendar + " calendar year"; + atrs.set("year", (teca_metadata)atts); } p_teca_variant_array_impl month; @@ -178,6 +185,11 @@ const_p_teca_dataset teca_table_calendar::execute( month = std::static_pointer_cast >(out_table->get_column(month_col)); month->reserve(n_rows); + + teca_array_attributes atts; + atts.long_name = "month"; + atts.description = calendar + " calendar month of year"; + atrs.set("month", (teca_metadata)atts); } p_teca_variant_array_impl day; @@ -188,6 +200,11 @@ const_p_teca_dataset teca_table_calendar::execute( day = std::static_pointer_cast >(out_table->get_column(day_col)); day->reserve(n_rows); + + teca_array_attributes atts; + atts.long_name = "day"; + atts.description = calendar + " calendar day of the monnth"; + atrs.set("day", (teca_metadata)atts); } p_teca_variant_array_impl hour; @@ -198,6 +215,11 @@ const_p_teca_dataset teca_table_calendar::execute( hour = std::static_pointer_cast >(out_table->get_column(hour_col)); hour->reserve(n_rows); + + teca_array_attributes atts; + atts.long_name = "hour"; + atts.description = "hour of the day"; + atrs.set("hour", (teca_metadata)atts); } p_teca_variant_array_impl minute; @@ -208,6 +230,11 @@ const_p_teca_dataset teca_table_calendar::execute( minute = std::static_pointer_cast >(out_table->get_column(minute_col)); minute->reserve(n_rows); + + teca_array_attributes atts; + atts.long_name = "minute"; + atts.description = "minute of the hour"; + atrs.set("minute", (teca_metadata)atts); } p_teca_variant_array_impl second; @@ -218,8 +245,15 @@ const_p_teca_dataset teca_table_calendar::execute( second = std::static_pointer_cast >(out_table->get_column(second_col)); second->reserve(n_rows); + + teca_array_attributes atts; + atts.long_name = "second"; + atts.description = "second of the minute"; + atrs.set("second", (teca_metadata)atts); } + out_table->get_metadata().set("attributes", atrs); + // make the date computations TEMPLATE_DISPATCH( const teca_variant_array_impl, diff --git a/alg/teca_table_reduce.cxx b/alg/teca_table_reduce.cxx index 720a5cb9c..b611d796c 100644 --- a/alg/teca_table_reduce.cxx +++ b/alg/teca_table_reduce.cxx @@ -48,14 +48,14 @@ teca_metadata teca_table_reduce::initialize_output_metadata( } // -------------------------------------------------------------------------- -p_teca_dataset teca_table_reduce::reduce( - const const_p_teca_dataset &left_ds, +p_teca_dataset teca_table_reduce::reduce(const const_p_teca_dataset &left_ds, const const_p_teca_dataset &right_ds) { #ifdef TECA_DEBUG cerr << teca_parallel_id() << "teca_table_reduce::reduce" << endl; #endif + const_p_teca_table left_table = std::dynamic_pointer_cast(left_ds); @@ -64,10 +64,10 @@ p_teca_dataset teca_table_reduce::reduce( p_teca_table output_table; - bool left = left_table && *left_table; - bool right = right_table && *right_table; + bool have_left = left_table && *left_table; + bool have_right = right_table && *right_table; - if (left && right) + if (have_left && have_right) { output_table = std::dynamic_pointer_cast(left_table->new_copy()); @@ -75,13 +75,13 @@ p_teca_dataset teca_table_reduce::reduce( output_table->concatenate_rows(right_table); } else - if (left) + if (have_left) { output_table = std::dynamic_pointer_cast(left_table->new_copy()); } else - if (right) + if (have_right) { output_table = std::dynamic_pointer_cast(right_table->new_copy()); diff --git a/alg/teca_table_reduce.h b/alg/teca_table_reduce.h index 17c25fcc0..2740494d6 100644 --- a/alg/teca_table_reduce.h +++ b/alg/teca_table_reduce.h @@ -29,17 +29,14 @@ class teca_table_reduce : public teca_index_reduce teca_table_reduce(); // overrides - p_teca_dataset reduce( - const const_p_teca_dataset &left, + p_teca_dataset reduce(const const_p_teca_dataset &left, const const_p_teca_dataset &right) override; std::vector initialize_upstream_request( - unsigned int port, - const std::vector &input_md, + unsigned int port, const std::vector &input_md, const teca_metadata &request) override; - teca_metadata initialize_output_metadata( - unsigned int port, + teca_metadata initialize_output_metadata(unsigned int port, const std::vector &input_md) override; }; diff --git a/alg/teca_tc_activity.py b/alg/teca_tc_activity.py index 384297c3f..ed52c472c 100644 --- a/alg/teca_tc_activity.py +++ b/alg/teca_tc_activity.py @@ -1,8 +1,7 @@ import sys -import teca_py import numpy as np -class teca_tc_activity(teca_py.teca_python_algorithm): +class teca_tc_activity(teca_python_algorithm): """ Computes summary statistics, histograms on sorted, classified, TC trajectory output. @@ -51,98 +50,92 @@ def set_color_map(self, color_map): """ self.color_map = color_map - def get_execute_callback(self): + def execute(self, port, data_in, req): """ - return a teca_algorithm::execute function. a closure - is used to gain self. + expects the output of the teca_tc_classify algorithm + generates a handful of histograms, summary statistics, + and plots. returns summary table with counts of annual + storms and their categories. """ - def execute(port, data_in, req): - """ - expects the output of the teca_tc_classify algorithm - generates a handful of histograms, summary statistics, - and plots. returns summary table with counts of annual - storms and their categories. - """ - global plt - global plt_mp - global plt_tick - - import matplotlib.pyplot as plt - import matplotlib.patches as plt_mp - import matplotlib.ticker as plt_tick - - # store matplotlib state we modify - legend_frame_on_orig = plt.rcParams['legend.frameon'] - - # tweak matplotlib slightly - plt.rcParams['figure.max_open_warning'] = 0 - plt.rcParams['legend.frameon'] = 1 - - # get the input table - in_table = teca_py.as_teca_table(data_in[0]) - if in_table is None: - # TODO if this is part of a parallel pipeline then - # only rank 0 should report an error. - sys.stderr.write('ERROR: empty input, or not a table\n') - return teca_table.New() - - time_units = in_table.get_time_units() - - # get the columns of raw data - year = in_table.get_column('year').as_array() - region_id = in_table.get_column('region_id').as_array() - region_name = in_table.get_column('region_name') - region_long_name = in_table.get_column('region_long_name') - start_y = in_table.get_column('start_y').as_array() - - ACE = in_table.get_column('ACE').as_array() - PDI = in_table.get_column('PDI').as_array() - - # organize the data by year month etc... - regional_ACE = [] - regional_PDI = [] - - # get unique for use as indices etc - uyear = sorted(set(year)) - n_year = len(uyear) - ureg = sorted(set(zip(region_id, region_name, region_long_name))) + global plt + global plt_mp + global plt_tick + + import matplotlib.pyplot as plt + import matplotlib.patches as plt_mp + import matplotlib.ticker as plt_tick + + # store matplotlib state we modify + legend_frame_on_orig = plt.rcParams['legend.frameon'] + + # tweak matplotlib slightly + plt.rcParams['figure.max_open_warning'] = 0 + plt.rcParams['legend.frameon'] = 1 + + # get the input table + in_table = as_teca_table(data_in[0]) + if in_table is None: + # TODO if this is part of a parallel pipeline then + # only rank 0 should report an error. + sys.stderr.write('ERROR: empty input, or not a table\n') + return teca_table.New() + + time_units = in_table.get_time_units() + + # get the columns of raw data + year = in_table.get_column('year').as_array() + region_id = in_table.get_column('region_id').as_array() + region_name = in_table.get_column('region_name') + region_long_name = in_table.get_column('region_long_name') + start_y = in_table.get_column('start_y').as_array() + + ACE = in_table.get_column('ACE').as_array() + PDI = in_table.get_column('PDI').as_array() + + # organize the data by year month etc... + regional_ACE = [] + regional_PDI = [] + + # get unique for use as indices etc + uyear = sorted(set(year)) + n_year = len(uyear) + ureg = sorted(set(zip(region_id, region_name, region_long_name))) - self.accum_by_year_and_region(uyear, \ - ureg, year, region_id, start_y, ACE, regional_ACE) + self.accum_by_year_and_region(uyear, \ + ureg, year, region_id, start_y, ACE, regional_ACE) - self.accum_by_year_and_region(uyear, \ - ureg, year, region_id, start_y, PDI, regional_PDI) + self.accum_by_year_and_region(uyear, \ + ureg, year, region_id, start_y, PDI, regional_PDI) - # now plot the organized data in various ways - if self.color_map is None: - self.color_map = plt.cm.jet + # now plot the organized data in various ways + if self.color_map is None: + self.color_map = plt.cm.jet - self.plot_individual(uyear, \ - ureg, regional_ACE,'ACE', '$10^4 kn^2$') + self.plot_individual(uyear, \ + ureg, regional_ACE,'ACE', '$10^4 kn^2$') - self.plot_individual(uyear, \ - ureg, regional_PDI, 'PDI', '$m^3 s^{-2}$') + self.plot_individual(uyear, \ + ureg, regional_PDI, 'PDI', '$m^3 s^{-2}$') - self.plot_cumulative(uyear, \ - ureg, regional_ACE, 'ACE', '$10^4 kn^2$') + self.plot_cumulative(uyear, \ + ureg, regional_ACE, 'ACE', '$10^4 kn^2$') - self.plot_cumulative(uyear, \ - ureg, regional_PDI, 'PDI', '$m^3 s^{-2}$') + self.plot_cumulative(uyear, \ + ureg, regional_PDI, 'PDI', '$m^3 s^{-2}$') - if (self.interactive): - plt.show() + if (self.interactive): + plt.show() - # restore matplot lib global state - plt.rcParams['legend.frameon'] = legend_frame_on_orig + # restore matplot lib global state + plt.rcParams['legend.frameon'] = legend_frame_on_orig - # send data downstream - return in_table - return execute + # send data downstream + return in_table @staticmethod def two_digit_year_fmt(x, pos): q = int(x) - q = q - q/100*100 + q = q - q // 100 * 100 return '%02d'%q @staticmethod @@ -186,8 +179,8 @@ def plot_individual(self, uyear, ureg, var, var_name, units): rnms += ('Southern','Northern','Global') n_plots = n_reg + 1 - n_left = n_plots%n_cols - n_rows = n_plots/n_cols + (1 if n_left else 0) + n_left = n_plots % n_cols + n_rows = n_plots // n_cols + (1 if n_left else 0) wid = 2.5*n_cols ht = 2.0*n_rows reg_t_fig.set_size_inches(wid, ht) @@ -214,7 +207,8 @@ def plot_individual(self, uyear, ureg, var, var_name, units): plt.plot(uyear, var[q::n_reg],'-',color=fill_col[q],linewidth=2) ax.set_xticks(uyear[:] if n_year < 10 else uyear[::2]) - ax.set_xlim([uyear[0], uyear[-1]]) + if len(uyear) > 1: + ax.set_xlim([uyear[0], uyear[-1]]) if self.rel_axes and q < n_reg - 1: ax.set_ylim([0, 1.05*(max_y_reg if q < n_reg - 3 else max_y_hem)]) @@ -263,7 +257,8 @@ def plot_cumulative(self, uyear, ureg, var, var_name, units): q += 1 ax.set_xticks(uyear[:] if n_year < 15 else uyear[::2]) - ax.set_xlim([uyear[0], uyear[-1]]) + if len(uyear) > 1: + ax.set_xlim([uyear[0], uyear[-1]]) plt.grid(True, zorder=0) ylim = ax.get_ylim() @@ -297,7 +292,8 @@ def plot_cumulative(self, uyear, ureg, var, var_name, units): q += 1 ax.set_xticks(uyear[:] if n_year < 15 else uyear[::2]) - ax.set_xlim([uyear[0], uyear[-1]]) + if len(uyear) > 1: + ax.set_xlim([uyear[0], uyear[-1]]) plt.grid(True, zorder=0) ylim = ax.get_ylim() @@ -315,23 +311,3 @@ def plot_cumulative(self, uyear, ureg, var, var_name, units): return -# def write_table(state, uyear, ureg, var, var_name, units, table_out): -# -# n_reg = len(ureg) + 3 # add 2 for n & s hemi, 1 for global -# n_year = len(uyear) -# -# rnms = zip(*ureg)[2] -# rnms += ('Southern','Northern','Global') -# -# -# tab = teca_table.New() -# tab.declare_columns(['Year'] + rnms, -# ['l'] + ['d']*n_reg) -# -# for val in var: -# if -# q = 0 -# while q < n_reg: -# tmp = var[q::n_reg] -# -# plt.plot(uyear, ,'-',color=fill_col[q],linewidth=2) diff --git a/alg/teca_tc_stats.py b/alg/teca_tc_stats.py index 5f474cac3..e7539ca1a 100644 --- a/alg/teca_tc_stats.py +++ b/alg/teca_tc_stats.py @@ -1,8 +1,7 @@ import sys -import teca_py import numpy as np -class teca_tc_stats(teca_py.teca_python_algorithm): +class teca_tc_stats(teca_python_algorithm): """ Computes summary statistics, histograms on sorted, classified, TC trajectory output. @@ -44,500 +43,496 @@ def set_rel_axes(self, rel_axes): """ self.rel_axes = rel_axes - def get_execute_callback(self): + def execute(self, port, data_in, req): """ - return a teca_algorithm::execute function. a closure - is used to gain self. + expects the output of the teca_tc_classify algorithm + generates a handful of histograms, summary statistics, + and plots. returns summary table with counts of annual + storms and their categories. """ - def execute(port, data_in, req): - """ - expects the output of the teca_tc_classify algorithm - generates a handful of histograms, summary statistics, - and plots. returns summary table with counts of annual - storms and their categories. - """ - import matplotlib.pyplot as plt - import matplotlib.patches as plt_mp - - # store matplotlib state we modify - legend_frame_on_orig = plt.rcParams['legend.frameon'] - - # tweak matplotlib slightly - plt.rcParams['figure.max_open_warning'] = 0 - plt.rcParams['legend.frameon'] = 1 - - # get the input table - in_table = teca_py.as_teca_table(data_in[0]) - if in_table is None: - # TODO if this is part of a parallel pipeline then - # only rank 0 should report an error. - sys.stderr.write('ERROR: empty input, or not a table\n') - return teca_table.New() - - time_units = in_table.get_time_units() - - # get the columns of raw data - year = in_table.get_column('year').as_array() - month = in_table.get_column('month').as_array() - duration = in_table.get_column('duration').as_array() - length = in_table.get_column('length').as_array()/1000.0 - category = in_table.get_column('category').as_array() - region_id = in_table.get_column('region_id').as_array() - region_name = in_table.get_column('region_name') - region_long_name = in_table.get_column('region_long_name') - wind = in_table.get_column('max_surface_wind').as_array() - press = in_table.get_column('min_sea_level_pressure').as_array() - start_y = in_table.get_column('start_y').as_array() - ACE = in_table.get_column('ACE').as_array() - - # organize the data by year month etc... - annual_cat = [] - annual_count = [] - annual_wind = [] - annual_press = [] - annual_dur = [] - annual_len = [] - annual_ACE = [] - by_month = [] - by_region = [] - totals = [] - - # get unique for use as indices etc - uyear = sorted(set(year)) - n_year = len(uyear) - - ureg = sorted(set(zip(region_id, region_name, region_long_name))) - n_reg = len(ureg) + 3 # add 2 for n & s hemi, 1 for global - - for yy in uyear: - yids = np.where(year==yy) - # break these down by year - annual_count.append(len(yids[0])) - annual_cat.append(category[yids]) - annual_wind.append(wind[yids]) - annual_press.append(press[yids]) - annual_dur.append(duration[yids]) - annual_len.append(length[yids]) - annual_ACE.append(ACE[yids]) - - # global totals - tmp = [annual_count[-1]] + import matplotlib.pyplot as plt + import matplotlib.patches as plt_mp + + # store matplotlib state we modify + legend_frame_on_orig = plt.rcParams['legend.frameon'] + + # tweak matplotlib slightly + plt.rcParams['figure.max_open_warning'] = 0 + plt.rcParams['legend.frameon'] = 1 + + # get the input table + in_table = as_teca_table(data_in[0]) + if in_table is None: + # TODO if this is part of a parallel pipeline then + # only rank 0 should report an error. + sys.stderr.write('ERROR: empty input, or not a table\n') + return teca_table.New() + + time_units = in_table.get_time_units() + + # get the columns of raw data + year = in_table.get_column('year').as_array() + month = in_table.get_column('month').as_array() + duration = in_table.get_column('duration').as_array() + length = in_table.get_column('length').as_array()/1000.0 + category = in_table.get_column('category').as_array() + region_id = in_table.get_column('region_id').as_array() + region_name = in_table.get_column('region_name') + region_long_name = in_table.get_column('region_long_name') + wind = in_table.get_column('max_surface_wind').as_array() + press = in_table.get_column('min_sea_level_pressure').as_array() + start_y = in_table.get_column('start_y').as_array() + ACE = in_table.get_column('ACE').as_array() + + # organize the data by year month etc... + annual_cat = [] + annual_count = [] + annual_wind = [] + annual_press = [] + annual_dur = [] + annual_len = [] + annual_ACE = [] + by_month = [] + by_region = [] + totals = [] + + # get unique for use as indices etc + uyear = sorted(set(year)) + n_year = len(uyear) + + ureg = sorted(set(zip(region_id, region_name, region_long_name))) + n_reg = len(ureg) + 3 # add 2 for n & s hemi, 1 for global + + for yy in uyear: + yids = np.where(year==yy) + # break these down by year + annual_count.append(len(yids[0])) + annual_cat.append(category[yids]) + annual_wind.append(wind[yids]) + annual_press.append(press[yids]) + annual_dur.append(duration[yids]) + annual_len.append(length[yids]) + annual_ACE.append(ACE[yids]) + + # global totals + tmp = [annual_count[-1]] + for c in np.arange(0,6,1): + cids = np.where(category[yids]==c) + tmp.append(len(cids[0])) + totals.append(tmp) + + # break down by year, month, and category + mm = month[yids] + mnum = np.arange(1,13,1) + monthly = [] + for m in mnum: + mids = np.where(mm==m) + mcats = category[yids][mids] + cats = [] for c in np.arange(0,6,1): - cids = np.where(category[yids]==c) - tmp.append(len(cids[0])) - totals.append(tmp) - - # break down by year, month, and category - mm = month[yids] - mnum = np.arange(1,13,1) - monthly = [] - for m in mnum: - mids = np.where(mm==m) - mcats = category[yids][mids] - cats = [] - for c in np.arange(0,6,1): - cids = np.where(mcats==c) - cats.append(len(cids[0])) - monthly.append(cats) - by_month.append(monthly) - # break down by year and region - rr = region_id[yids] - max_reg = np.max(rr) - regional = [] - for r,n,l in ureg: - rids = np.where(rr==r) - rcats = category[yids][rids] - cats = [] - for c in np.arange(0,6,1): - cids = np.where(rcats==c) - cats.append(len(cids[0])) - regional.append(cats) - by_region.append(regional) - # add north and south hemisphere regions - hemi = [] - nhids = np.where(start_y[yids] >= 0.0) - cats = category[yids][nhids] - nhcats = [] + cids = np.where(mcats==c) + cats.append(len(cids[0])) + monthly.append(cats) + by_month.append(monthly) + # break down by year and region + rr = region_id[yids] + max_reg = np.max(rr) + regional = [] + for r,n,l in ureg: + rids = np.where(rr==r) + rcats = category[yids][rids] + cats = [] for c in np.arange(0,6,1): - cids = np.where(cats==c) - nhcats.append(len(cids[0])) - by_region[-1].append(nhcats) - shids = np.where(start_y[yids] < 0.0) - cats = category[yids][shids] - shcats = [] - for c in np.arange(0,6,1): - cids = np.where(cats==c) - shcats.append(len(cids[0])) - by_region[-1].append(shcats) - # global break down - gcats = [] - cats = category[yids] - for c in np.arange(0,6,1): - cids = np.where(cats==c) - gcats.append(len(cids[0])) - by_region[-1].append(gcats) - - # dump annual totals - summary = teca_py.teca_table.New() - summary.declare_columns(['year', 'total', 'cat 0', \ - 'cat 1', 'cat 2', 'cat 3', 'cat 4', 'cat 5'], \ - ['i', 'ul', 'i', 'i', 'i', 'i', 'i', 'i']) - q = 0 - while q < n_year: - summary << int(uyear[q]) << int(totals[q][0]) \ - << int(totals[q][1]) << int(totals[q][2]) \ - << int(totals[q][3]) << int(totals[q][4]) \ - << int(totals[q][5]) << int(totals[q][6]) - q += 1 - f = open('%s_summary.csv'%(self.basename),'w') - f.write(str(summary)) - f.close() - - # now plot the organized data in various ways - n_cols = 3 - n_plots = n_year + 1 - n_left = n_plots%n_cols - n_rows = n_plots/n_cols + (1 if n_left else 0) - wid = 2.5*n_cols - ht = 2.0*n_rows - - # use this color map for Saphir-Simpson scale - red_cmap = ['#ffd2a3','#ffa749','#ff7c04', \ - '#ea4f00','#c92500','#a80300'] - - red_cmap_pats = [] - q = 0 - while q < 6: - red_cmap_pats.append( \ - plt_mp.Patch(color=red_cmap[q], label='cat %d'%(q))) - q += 1 - - # plot annual saphir-simpson distribution - page_no = 1 - cat_fig = plt.figure() - cat_fig.set_size_inches(wid, ht) - - max_y = 0 - q = 0 - while q < n_year: - max_y = max(max_y, len(np.where(annual_cat[q]==0)[0])) - q += 1 - - q = 0 - for yy in uyear: - plt.subplot(n_rows, n_cols, q+1) - ax = plt.gca() - ax.grid(zorder=0) - n,bins,pats = plt.hist(annual_cat[q], bins=np.arange(-0.5, 6.0, 1.0), \ - facecolor='steelblue', alpha=0.95, edgecolor='black', \ - linewidth=2, zorder=3) - j = 0 - while j < 6: - pats[j].set_facecolor(red_cmap[j]) - j += 1 - plt.xticks(np.arange(0,6,1)) - if self.rel_axes: - ax.set_ylim([0, max_y*1.05]) - if (q%n_cols == 0): - plt.ylabel('Count', fontweight='normal', fontsize=10) - if (q >= (n_year - n_cols)): - plt.xlabel('Category', fontweight='normal', fontsize=10) - plt.title('%d'%(yy), fontweight='bold', fontsize=11) - plt.grid(True) - - q += 1 - + cids = np.where(rcats==c) + cats.append(len(cids[0])) + regional.append(cats) + by_region.append(regional) + # add north and south hemisphere regions + hemi = [] + nhids = np.where(start_y[yids] >= 0.0) + cats = category[yids][nhids] + nhcats = [] + for c in np.arange(0,6,1): + cids = np.where(cats==c) + nhcats.append(len(cids[0])) + by_region[-1].append(nhcats) + shids = np.where(start_y[yids] < 0.0) + cats = category[yids][shids] + shcats = [] + for c in np.arange(0,6,1): + cids = np.where(cats==c) + shcats.append(len(cids[0])) + by_region[-1].append(shcats) + # global break down + gcats = [] + cats = category[yids] + for c in np.arange(0,6,1): + cids = np.where(cats==c) + gcats.append(len(cids[0])) + by_region[-1].append(gcats) + + # dump annual totals + summary = teca_table.New() + summary.set_request_index('table_id', 0) + summary.declare_columns(['year', 'total', 'cat 0', \ + 'cat 1', 'cat 2', 'cat 3', 'cat 4', 'cat 5'], \ + ['i', 'ul', 'i', 'i', 'i', 'i', 'i', 'i']) + q = 0 + while q < n_year: + summary << int(uyear[q]) << int(totals[q][0]) \ + << int(totals[q][1]) << int(totals[q][2]) \ + << int(totals[q][3]) << int(totals[q][4]) \ + << int(totals[q][5]) << int(totals[q][6]) + q += 1 + f = open('%s_summary.csv'%(self.basename),'w') + f.write(str(summary)) + f.close() + + # now plot the organized data in various ways + n_cols = 3 + n_plots = n_year + 1 + n_left = n_plots % n_cols + n_rows = n_plots // n_cols + (1 if n_left else 0) + wid = 2.5*n_cols + ht = 2.0*n_rows + + # use this color map for Saphir-Simpson scale + red_cmap = ['#ffd2a3','#ffa749','#ff7c04', \ + '#ea4f00','#c92500','#a80300'] + + red_cmap_pats = [] + q = 0 + while q < 6: + red_cmap_pats.append( \ + plt_mp.Patch(color=red_cmap[q], label='cat %d'%(q))) + q += 1 + + # plot annual saphir-simpson distribution + page_no = 1 + cat_fig = plt.figure() + cat_fig.set_size_inches(wid, ht) + + max_y = 0 + q = 0 + while q < n_year: + max_y = max(max_y, len(np.where(annual_cat[q]==0)[0])) + q += 1 + + q = 0 + for yy in uyear: plt.subplot(n_rows, n_cols, q+1) ax = plt.gca() ax.grid(zorder=0) - l = plt.legend(handles=red_cmap_pats, loc=2, bbox_to_anchor=(0.0, 1.0)) - plt.axis('off') - - plt.suptitle('Annual Saphir-Simpson Distribution', fontweight='bold') - plt.subplots_adjust(hspace=0.4, top=0.92) - - plt.savefig('%s_annual_saphire_simpson_distribution_%d.png'%( \ - self.basename, page_no), dpi=self.dpi) - - # break annual distributions down by month - mos_fig = plt.figure() - mos_fig.set_size_inches(wid, ht) + n,bins,pats = plt.hist(annual_cat[q], bins=np.arange(-0.5, 6.0, 1.0), \ + facecolor='steelblue', alpha=0.95, edgecolor='black', \ + linewidth=2, zorder=3) + j = 0 + while j < 6: + pats[j].set_facecolor(red_cmap[j]) + j += 1 + plt.xticks(np.arange(0,6,1)) + if self.rel_axes: + ax.set_ylim([0, max_y*1.05]) + if (q%n_cols == 0): + plt.ylabel('Count', fontweight='normal', fontsize=10) + if (q >= (n_year - n_cols)): + plt.xlabel('Category', fontweight='normal', fontsize=10) + plt.title('%d'%(yy), fontweight='bold', fontsize=11) + plt.grid(True) + + q += 1 + + plt.subplot(n_rows, n_cols, q+1) + ax = plt.gca() + ax.grid(zorder=0) + l = plt.legend(handles=red_cmap_pats, loc=2, bbox_to_anchor=(0.0, 1.0)) + plt.axis('off') + + plt.suptitle('Annual Saphir-Simpson Distribution', fontweight='bold') + plt.subplots_adjust(hspace=0.4, top=0.92) + + plt.savefig('%s_annual_saphire_simpson_distribution_%d.png'%( \ + self.basename, page_no), dpi=self.dpi) + + # break annual distributions down by month + mos_fig = plt.figure() + mos_fig.set_size_inches(wid, ht) + + max_y = 0 + q = 0 + while q < n_year: + p = 0 + while p < 12: + max_y = max(max_y, sum(by_month[q][p])) + p += 1 + q += 1 - max_y = 0 - q = 0 - while q < n_year: + q = 0 + for yy in uyear: + plt.subplot(n_rows, n_cols, q+1) + ax = plt.gca() + ax.grid(zorder=0) + # build up a stacked bar chart, each category is a layer + # copy that cat for all months into a temp array then add + # it to the plot at the right hight and color. + mcts = by_month[q] + bot = np.zeros((12)) + c = 0 + while c < 6: + tmp = [] p = 0 while p < 12: - max_y = max(max_y, sum(by_month[q][p])) + tmp.append(mcts[p][c]) p += 1 - q += 1 - - q = 0 - for yy in uyear: - plt.subplot(n_rows, n_cols, q+1) - ax = plt.gca() - ax.grid(zorder=0) - # build up a stacked bar chart, each category is a layer - # copy that cat for all months into a temp array then add - # it to the plot at the right hight and color. - mcts = by_month[q] - bot = np.zeros((12)) - c = 0 - while c < 6: - tmp = [] - p = 0 - while p < 12: - tmp.append(mcts[p][c]) - p += 1 - plt.bar(np.arange(1,13,1)-0.375, tmp, width=0.75, bottom=bot, \ - facecolor=red_cmap[c], edgecolor='k', linewidth=1, \ - tick_label=['J','F','M','A','M','J','J','A','S','O','N','D'], \ - zorder=3) - bot += tmp - c += 1 - - plt.xticks(np.arange(1,13,1)) - if self.rel_axes: - ax.set_ylim([0, 1.05*max_y]) - if (q%n_cols == 0): - plt.ylabel('Count', fontweight='normal', fontsize=10) - if (q >= (n_year - n_cols)): - plt.xlabel('Month', fontweight='normal', fontsize=10) - plt.title('%d'%(yy), fontweight='bold', fontsize=11) - plt.grid(True) - - q += 1 - + plt.bar(np.arange(1,13,1)-0.375, tmp, width=0.75, bottom=bot, \ + facecolor=red_cmap[c], edgecolor='k', linewidth=1, \ + tick_label=['J','F','M','A','M','J','J','A','S','O','N','D'], \ + zorder=3) + bot += tmp + c += 1 + + plt.xticks(np.arange(1,13,1)) + if self.rel_axes: + ax.set_ylim([0, 1.05*max_y]) + if (q%n_cols == 0): + plt.ylabel('Count', fontweight='normal', fontsize=10) + if (q >= (n_year - n_cols)): + plt.xlabel('Month', fontweight='normal', fontsize=10) + plt.title('%d'%(yy), fontweight='bold', fontsize=11) + plt.grid(True) + + q += 1 + + plt.subplot(n_rows, n_cols, q+1) + ax = plt.gca() + ax.grid(zorder=0) + l = plt.legend(handles=red_cmap_pats, loc=2, bbox_to_anchor=(0.0, 1.0)) + plt.axis('off') + + plt.suptitle('Monthly Breakdown', fontweight='bold') + plt.subplots_adjust(hspace=0.4, top=0.92) + + plt.savefig('%s_monthly_breakdown_%d.png'%( \ + self.basename, page_no), dpi=self.dpi) + + # plot annual counts by region + reg_fig = plt.figure() + reg_fig.set_size_inches(wid, ht) + + rcds = list(zip(*ureg))[1] + rcds += ('NH', 'SH', 'G') + + max_y = 0 + q = 0 + while q < n_year: + j = 0 + while j < n_reg: + max_y = max(max_y, sum(by_region[q][j])) + j += 1 + q += 1 + + q = 0 + for yy in uyear: plt.subplot(n_rows, n_cols, q+1) ax = plt.gca() ax.grid(zorder=0) - l = plt.legend(handles=red_cmap_pats, loc=2, bbox_to_anchor=(0.0, 1.0)) - plt.axis('off') - - plt.suptitle('Monthly Breakdown', fontweight='bold') - plt.subplots_adjust(hspace=0.4, top=0.92) - - plt.savefig('%s_monthly_breakdown_%d.png'%( \ - self.basename, page_no), dpi=self.dpi) - - # plot annual counts by region - reg_fig = plt.figure() - reg_fig.set_size_inches(wid, ht) - - rcds = list(zip(*ureg))[1] - rcds += ('NH', 'SH', 'G') + # build up a stacked bar chart, each category is a layer + # copy that cat for all months into a temp array then add + # it to the plot at the right height and color. + rcnts = by_region[q] + bot = np.zeros((n_reg)) + c = 0 + while c < 6: + tmp = [] + p = 0 + while p < n_reg: + tmp.append(rcnts[p][c]) + p += 1 - max_y = 0 + plt.bar(np.arange(0,n_reg,1)-0.375, tmp, width=0.75, bottom=bot, \ + facecolor=red_cmap[c], edgecolor='k', linewidth=1, \ + tick_label=rcds, \ + zorder=3) + + bot += tmp + c += 1 + + plt.xticks(np.arange(0,n_reg,1), rotation='vertical') + if self.rel_axes: + ax.set_ylim([0, 1.05*max_y]) + if (q%n_cols == 0): + plt.ylabel('Count', fontweight='normal', fontsize=10) + if (q >= (n_year - n_cols)): + plt.xlabel('Region', fontweight='normal', fontsize=10) + plt.title('%d'%(yy), fontweight='bold', fontsize=11) + plt.grid(True) + + q += 1 + + # add the color map legend + plt.subplot(n_rows, n_cols, q+1) + ax = plt.gca() + ax.grid(zorder=0) + l = plt.legend(handles=red_cmap_pats, loc=2, bbox_to_anchor=(0.0, 1.0)) + plt.axis('off') + + plt.suptitle('Regional Breakdown', fontweight='bold') + plt.subplots_adjust(wspace=0.3, hspace=0.6, top=0.92) + + plt.savefig('%s_regional_break_down_%d.png'%( \ + self.basename, page_no), dpi=self.dpi) + + # plot annual distributions + dist_fig = plt.figure() + + wid = n_year*0.65 + dist_fig.set_size_inches(wid, 9.0) + + ax = plt.subplot(5,1,1) + plt.boxplot(annual_wind, labels=uyear) + plt.xlabel('Year') + plt.ylabel('ms^-1') + plt.title('Peak Instantaneous Wind', fontweight='bold') + ax.get_yaxis().set_label_coords(-0.1,0.5) + + ax = plt.subplot(5,1,2) + plt.boxplot(annual_press, labels=uyear) + plt.xlabel('Year') + plt.ylabel('Pa') + plt.title('Min Instantaneous Pressure', fontweight='bold') + ax.get_yaxis().set_label_coords(-0.1,0.5) + + ax = plt.subplot(5,1,3) + plt.boxplot(annual_dur, labels=uyear) + plt.xlabel('Year') + plt.ylabel('%s'%(time_units.split()[0])) + plt.title('Track Duration', fontweight='bold') + ax.get_yaxis().set_label_coords(-0.1,0.5) + + ax = plt.subplot(5,1,4) + plt.boxplot(annual_len, labels=uyear) + plt.xlabel('Year') + plt.ylabel('km') + plt.title('Track Length', fontweight='bold') + ax.get_yaxis().set_label_coords(-0.1,0.5) + + ax = plt.subplot(5,1,5) + #plt.axhline(82,color='k',linestyle='--',alpha=0.25) + plt.boxplot(annual_ACE, labels=uyear) + plt.xlabel('Year') + plt.ylabel('10^4 kn^2') + plt.title('ACE', fontweight='bold') + ax.get_yaxis().set_label_coords(-0.1,0.5) + + plt.suptitle('Distributions', fontweight='bold') + plt.subplots_adjust(hspace=0.72, top=0.93) + + plt.savefig('%s_distribution_%d.png'%( \ + self.basename, page_no), dpi=self.dpi) + + # plot region over time + reg_t_fig = plt.figure() + + rnms = list(zip(*ureg))[2] + rnms += ('Northern', 'Southern', 'Global') + + tmp = np.array(uyear) + tmp = tmp - tmp/100*100 + ynms = [] + for t in tmp: + ynms.append('%02d'%t) + + n_plots = n_reg + 1 + n_left = n_plots % n_cols + n_rows = n_plots // n_cols + (1 if n_left else 0) + wid = 2.5*n_cols + ht = 2.0*n_rows + reg_t_fig.set_size_inches(wid, ht) + + reg_by_t = [] + p = 0 + while p < n_reg: + reg = [] q = 0 while q < n_year: - j = 0 - while j < n_reg: - max_y = max(max_y, sum(by_region[q][j])) - j += 1 + reg.append(by_region[q][p]) q += 1 + reg_by_t.append(reg) + p += 1 + + max_y_reg = -1 + max_y_hem = -1 + q = 0 + while q < n_reg: + dat = reg_by_t[q] + p = 0 + while p < n_year: + if q < n_reg-3: + max_y_reg = max(max_y_reg, sum(dat[p])) + elif q < n_reg-1: + max_y_hem = max(max_y_hem, sum(dat[p])) + p += 1 + q += 1 - q = 0 - for yy in uyear: - plt.subplot(n_rows, n_cols, q+1) - ax = plt.gca() - ax.grid(zorder=0) - # build up a stacked bar chart, each category is a layer - # copy that cat for all months into a temp array then add - # it to the plot at the right height and color. - rcnts = by_region[q] - bot = np.zeros((n_reg)) - c = 0 - while c < 6: - tmp = [] - p = 0 - while p < n_reg: - tmp.append(rcnts[p][c]) - p += 1 - - plt.bar(np.arange(0,n_reg,1)-0.375, tmp, width=0.75, bottom=bot, \ - facecolor=red_cmap[c], edgecolor='k', linewidth=1, \ - tick_label=rcds, \ - zorder=3) - - bot += tmp - c += 1 - - plt.xticks(np.arange(0,n_reg,1), rotation='vertical') - if self.rel_axes: - ax.set_ylim([0, 1.05*max_y]) - if (q%n_cols == 0): - plt.ylabel('Count', fontweight='normal', fontsize=10) - if (q >= (n_year - n_cols)): - plt.xlabel('Region', fontweight='normal', fontsize=10) - plt.title('%d'%(yy), fontweight='bold', fontsize=11) - plt.grid(True) - - q += 1 + q = 0 + while q < n_reg: + dat = reg_by_t[q] - # add the color map legend plt.subplot(n_rows, n_cols, q+1) ax = plt.gca() ax.grid(zorder=0) - l = plt.legend(handles=red_cmap_pats, loc=2, bbox_to_anchor=(0.0, 1.0)) - plt.axis('off') - - plt.suptitle('Regional Breakdown', fontweight='bold') - plt.subplots_adjust(wspace=0.3, hspace=0.6, top=0.92) - - plt.savefig('%s_regional_break_down_%d.png'%( \ - self.basename, page_no), dpi=self.dpi) - - # plot annual distributions - dist_fig = plt.figure() - - wid = n_year*0.65 - dist_fig.set_size_inches(wid, 9.0) - - ax = plt.subplot(5,1,1) - plt.boxplot(annual_wind, labels=uyear) - plt.xlabel('Year') - plt.ylabel('ms^-1') - plt.title('Peak Instantaneous Wind', fontweight='bold') - ax.get_yaxis().set_label_coords(-0.1,0.5) - - ax = plt.subplot(5,1,2) - plt.boxplot(annual_press, labels=uyear) - plt.xlabel('Year') - plt.ylabel('Pa') - plt.title('Min Instantaneous Pressure', fontweight='bold') - ax.get_yaxis().set_label_coords(-0.1,0.5) - - ax = plt.subplot(5,1,3) - plt.boxplot(annual_dur, labels=uyear) - plt.xlabel('Year') - plt.ylabel('%s'%(time_units.split()[0])) - plt.title('Track Duration', fontweight='bold') - ax.get_yaxis().set_label_coords(-0.1,0.5) - - ax = plt.subplot(5,1,4) - plt.boxplot(annual_len, labels=uyear) - plt.xlabel('Year') - plt.ylabel('km') - plt.title('Track Length', fontweight='bold') - ax.get_yaxis().set_label_coords(-0.1,0.5) - - ax = plt.subplot(5,1,5) - #plt.axhline(82,color='k',linestyle='--',alpha=0.25) - plt.boxplot(annual_ACE, labels=uyear) - plt.xlabel('Year') - plt.ylabel('10^4 kn^2') - plt.title('ACE', fontweight='bold') - ax.get_yaxis().set_label_coords(-0.1,0.5) - - plt.suptitle('Distributions', fontweight='bold') - plt.subplots_adjust(hspace=0.72, top=0.93) - - plt.savefig('%s_distribution_%d.png'%( \ - self.basename, page_no), dpi=self.dpi) - - # plot region over time - reg_t_fig = plt.figure() - - rnms = list(zip(*ureg))[2] - rnms += ('Northern', 'Southern', 'Global') - - tmp = np.array(uyear) - tmp = tmp - tmp/100*100 - ynms = [] - for t in tmp: - ynms.append('%02d'%t) - - n_plots = n_reg + 1 - n_left = n_plots%n_cols - n_rows = n_plots/n_cols + (1 if n_left else 0) - wid = 2.5*n_cols - ht = 2.0*n_rows - reg_t_fig.set_size_inches(wid, ht) - - reg_by_t = [] - p = 0 - while p < n_reg: - reg = [] - q = 0 - while q < n_year: - reg.append(by_region[q][p]) - q += 1 - reg_by_t.append(reg) - p += 1 - max_y_reg = -1 - max_y_hem = -1 - q = 0 - while q < n_reg: - dat = reg_by_t[q] + # build up a stacked bar chart, each category is a layer + # copy that cat for all months into a temp array then add + # it to the plot at the right height and color. + bot = np.zeros((n_year)) + c = 0 + while c < 6: + tmp = [] p = 0 while p < n_year: - if q < n_reg-3: - max_y_reg = max(max_y_reg, sum(dat[p])) - elif q < n_reg-1: - max_y_hem = max(max_y_hem, sum(dat[p])) + tmp.append(dat[p][c]) p += 1 - q += 1 - q = 0 - while q < n_reg: - dat = reg_by_t[q] - - plt.subplot(n_rows, n_cols, q+1) - ax = plt.gca() - ax.grid(zorder=0) - - # build up a stacked bar chart, each category is a layer - # copy that cat for all months into a temp array then add - # it to the plot at the right height and color. - bot = np.zeros((n_year)) - c = 0 - while c < 6: - tmp = [] - p = 0 - while p < n_year: - tmp.append(dat[p][c]) - p += 1 - - plt.bar(np.arange(0,n_year,1)-0.375, tmp, width=0.75, bottom=bot, \ - facecolor=red_cmap[c], edgecolor='k', linewidth=1, \ - tick_label=ynms, \ - zorder=3) - - bot += tmp - c += 1 - - plt.xticks(np.arange(0,n_year,1), rotation='vertical') - if self.rel_axes and q < n_reg - 1: - ax.set_ylim([0, 1.05*(max_y_reg if q < n_reg - 3 else max_y_hem)]) - if (q%n_cols == 0): - plt.ylabel('Count', fontweight='normal', fontsize=10) - if (q >= (n_reg - n_cols)): - plt.xlabel('Year', fontweight='normal', fontsize=10) - plt.title('%s'%(rnms[q]), fontweight='bold', fontsize=11) - plt.grid(True) + plt.bar(np.arange(0,n_year,1)-0.375, tmp, width=0.75, bottom=bot, \ + facecolor=red_cmap[c], edgecolor='k', linewidth=1, \ + tick_label=ynms, \ + zorder=3) - q += 1 + bot += tmp + c += 1 - plt.suptitle('Regional Trend', fontweight='bold') - plt.subplots_adjust(wspace=0.3, hspace=0.6, top=0.92) + plt.xticks(np.arange(0,n_year,1), rotation='vertical') + if self.rel_axes and q < n_reg - 1: + ax.set_ylim([0, 1.05*(max_y_reg if q < n_reg - 3 else max_y_hem)]) + if (q%n_cols == 0): + plt.ylabel('Count', fontweight='normal', fontsize=10) + if (q >= (n_reg - n_cols)): + plt.xlabel('Year', fontweight='normal', fontsize=10) + plt.title('%s'%(rnms[q]), fontweight='bold', fontsize=11) + plt.grid(True) - # add the color map legend - plt.subplot(n_rows, n_cols, q+1) - ax = plt.gca() - ax.grid(zorder=0) - l = plt.legend(handles=red_cmap_pats, loc=2, bbox_to_anchor=(0.0, 1.0)) - plt.axis('off') + q += 1 + + plt.suptitle('Regional Trend', fontweight='bold') + plt.subplots_adjust(wspace=0.3, hspace=0.6, top=0.92) + + # add the color map legend + plt.subplot(n_rows, n_cols, q+1) + ax = plt.gca() + ax.grid(zorder=0) + l = plt.legend(handles=red_cmap_pats, loc=2, bbox_to_anchor=(0.0, 1.0)) + plt.axis('off') + + plt.savefig('%s_regional_trend_%d.png'%( \ + self.basename, page_no), dpi=self.dpi) - plt.savefig('%s_regional_trend_%d.png'%( \ - self.basename, page_no), dpi=self.dpi) + if (self.interactive): + plt.show() - if (self.interactive): - plt.show() + # restore matplotlib global state + plt.rcParams['legend.frameon'] = legend_frame_on_orig - # restore matplotlib global state - plt.rcParams['legend.frameon'] = legend_frame_on_orig + # send data downstream + return summary - # send data downstream - return summary - return execute diff --git a/alg/teca_tc_trajectory_scalars.py b/alg/teca_tc_trajectory_scalars.py index 7499f136e..e0e767e67 100644 --- a/alg/teca_tc_trajectory_scalars.py +++ b/alg/teca_tc_trajectory_scalars.py @@ -1,8 +1,7 @@ import sys -import teca_py import numpy as np -class teca_tc_trajectory_scalars(teca_py.teca_python_algorithm): +class teca_tc_trajectory_scalars(teca_python_algorithm): """ Computes summary statistics, histograms on sorted, classified, TC trajectory output. @@ -58,362 +57,356 @@ def set_plot_peak_radius(self, plot_peak_radius): """ self.plot_peak_radius = plot_peak_radius - def get_execute_callback(self): + def execute(self, port, data_in, req): """ - return a teca_algorithm::execute function. a closure - is used to gain self. + expects the output of the teca_tc_classify algorithm + generates a handful of histograms, summary statistics, + and plots. returns summary table with counts of annual + storms and their categories. """ - def execute(port, data_in, req): - """ - expects the output of the teca_tc_classify algorithm - generates a handful of histograms, summary statistics, - and plots. returns summary table with counts of annual - storms and their categories. - """ - #sys.stderr.write('teca_tc_trajectory_scalars::execute\n') - - import matplotlib.pyplot as plt - import matplotlib.patches as plt_mp - import matplotlib.image as plt_img - import matplotlib.gridspec as plt_gridspec - - # store matplotlib state we modify - legend_frame_on_orig = plt.rcParams['legend.frameon'] - - # tweak matplotlib slightly - plt.rcParams['figure.max_open_warning'] = 0 - plt.rcParams['legend.frameon'] = 1 - - # get the input table - in_table = teca_py.as_teca_table(data_in[0]) - if in_table is None: - # TODO if this is part of a parallel pipeline then - # only rank 0 should report an error. - sys.stderr.write('ERROR: empty input, or not a table\n') - return teca_py.teca_table.New() - - # use this color map for Saphir-Simpson scale - red_cmap = ['#ffd2a3','#ffa749','#ff7c04', \ - '#ea4f00','#c92500','#a80300'] - - km_per_deg_lat = 111 - - time_units = in_table.get_time_units() - - time = in_table.get_column('time').as_array() - step = in_table.get_column('step').as_array() - track = in_table.get_column('track_id').as_array() - - lon = in_table.get_column('lon').as_array() - lat = in_table.get_column('lat').as_array() - - year = in_table.get_column('year').as_array() - month = in_table.get_column('month').as_array() - day = in_table.get_column('day').as_array() - hour = in_table.get_column('hour').as_array() - minute = in_table.get_column('minute').as_array() - - wind = in_table.get_column('surface_wind').as_array() - vort = in_table.get_column('850mb_vorticity').as_array() - psl = in_table.get_column('sea_level_pressure').as_array() - temp = in_table.get_column('core_temp').as_array() - have_temp = in_table.get_column('have_core_temp').as_array() - thick = in_table.get_column('thickness').as_array() - have_thick = in_table.get_column('have_thickness').as_array() - speed = in_table.get_column('storm_speed').as_array() - - wind_rad = [] - i = 0 - while i < 5: - col_name = 'wind_radius_%d'%(i) - if in_table.has_column(col_name): - wind_rad.append(in_table.get_column(col_name).as_array()) - i += 1 - peak_rad = in_table.get_column('peak_radius').as_array() \ - if in_table.has_column('peak_radius') else None - - # get the list of unique track ids, this is our loop index - utrack = sorted(set(track)) - nutracks = len(utrack) - - # load background image - if (self.tex is None) and self.tex_file: - self.tex = plt_img.imread(self.tex_file) - - for i in utrack: - #sys.stderr.write('processing track %d\n'%(i)) - sys.stderr.write('.') - - fig = plt.figure() - fig.set_size_inches(10,9.75) - - ii = np.where(track == i)[0] - - # get the scalar values for this storm - lon_i = lon[ii] - lat_i = lat[ii] - wind_i = wind[ii] - psl_i = psl[ii] - vort_i = vort[ii] - thick_i = thick[ii] - temp_i = temp[ii] - speed_i = speed[ii]/24.0 - - wind_rad_i = [] - for col in wind_rad: - wind_rad_i.append(col[ii]) - peak_rad_i = peak_rad[ii] if peak_rad is not None else None - - # construct the title - q = ii[0] - r = ii[-1] - - t0 = time[q] - t1 = time[r] - - s0 = step[q] - s1 = step[r] - - Y0 = year[q] - Y1 = year[r] - - M0 = month[q] - M1 = month[r] - - D0 = day[q] - D1 = day[r] - - h0 = hour[q] - h1 = hour[r] - - m0 = minute[q] - m1 = minute[r] - - tt = time[ii] - t0 - - cat = teca_py.teca_tc_saffir_simpson.classify_mps(float(np.max(wind_i))) - - plt.suptitle( \ - 'Track %d, cat %d, steps %d - %d\n%d/%d/%d %d:%d:00 - %d/%d/%d %d:%d:00'%(\ - i, cat, s0, s1, Y0, M0, D0, h0, m0, Y1, M1, D1, h1, m1), \ - fontweight='bold') - - # plot the scalars - gs = plt_gridspec.GridSpec(5, 4) - - plt.subplot2grid((5,4),(0,0),colspan=2,rowspan=2) - # prepare the texture - if self.tex is not None: - ext = [np.min(lon_i), np.max(lon_i), np.min(lat_i), np.max(lat_i)] - if self.axes_equal: - w = ext[1]-ext[0] - h = ext[3]-ext[2] - if w > h: - c = (ext[2] + ext[3])/2.0 - w2 = w/2.0 - ext[2] = c - w2 - ext[3] = c + w2 - else: - c = (ext[0] + ext[1])/2.0 - h2 = h/2.0 - ext[0] = c - h2 - ext[1] = c + h2 - border_size = 0.15 - wrimax = 0 if peak_rad_i is None else \ - max(0 if not self.plot_peak_radius else \ - np.max(peak_rad_i), np.max(wind_rad_i[0])) - dlon = max(wrimax, (ext[1] - ext[0])*border_size) - dlat = max(wrimax, (ext[3] - ext[2])*border_size) - ext[0] = max(ext[0] - dlon, 0.0) - ext[1] = min(ext[1] + dlon, 360.0) - ext[2] = max(ext[2] - dlat, -90.0) - ext[3] = min(ext[3] + dlat, 90.0) - i0 = int(self.tex.shape[1]/360.0*ext[0]) - i1 = int(self.tex.shape[1]/360.0*ext[1]) - j0 = int(-((ext[3] + 90.0)/180.0 - 1.0)*self.tex.shape[0]) - j1 = int(-((ext[2] + 90.0)/180.0 - 1.0)*self.tex.shape[0]) - plt.imshow(self.tex[j0:j1, i0:i1], extent=ext, aspect='auto') - - edge_color = '#ffff00' if self.tex is not None else 'b' - - # plot the storm size - if peak_rad_i is None: - plt.plot(lon_i, lat_i, '.', linewidth=2, color=edge_color) - else: - # compute track unit normals - npts = len(ii) - norm_x = np.zeros(npts) - norm_y = np.zeros(npts) - npts -= 1 - q = 1 - while q < npts: - norm_x[q] = lat_i[q+1] - lat_i[q-1] - norm_y[q] = -(lon_i[q+1] - lon_i[q-1]) - nmag = np.sqrt(norm_x[q]**2 + norm_y[q]**2) - norm_x[q] = norm_x[q]/nmag - norm_y[q] = norm_y[q]/nmag - q += 1 - # normal at first and last point on the track - norm_x[0] = lat_i[1] - lat_i[0] - norm_y[0] = -(lon_i[1] - lon_i[0]) - norm_x[0] = norm_x[0]/nmag - norm_y[0] = norm_y[0]/nmag - norm_x[npts] = lat_i[npts] - lat_i[npts-1] - norm_y[npts] = -(lon_i[npts] - lon_i[npts-1]) - norm_x[npts] = norm_x[npts]/nmag - norm_y[npts] = norm_y[npts]/nmag - # for each wind radius, render a polygon of width 2*wind - # centered on the track. have to break it into continuous - # segments - nwri = len(wind_rad_i) - q = nwri - 1 - while q >= 0: - self.plot_wind_rad(lon_i, lat_i, norm_x, norm_y, \ - wind_rad_i[q], '-', edge_color if q==0 else red_cmap[q], \ - 2 if q==0 else 1, red_cmap[q], 0.98, q+4) - q -= 1 - # plot the peak radius - if (self.plot_peak_radius): - # peak radius is only valid if one of the other wind radii - # exist, zero out other values - kk = wind_rad_i[0] > 1.0e-6 - q = 1 - while q < nwri: - kk = np.logical_or(kk, wind_rad_i[q] > 1.0e-6) - q += 1 - peak_rad_i[np.logical_not(kk)] = 0.0 - self.plot_wind_rad(lon_i, lat_i, norm_x, norm_y, \ - peak_rad_i, '--', (0,0,0,0.25), 1, 'none', 1.00, nwri+4) - # mark track - marks = wind_rad_i[0] <= 1.0e-6 - q = 1 - while q < nwri: - marks = np.logical_and(marks, np.logical_not(wind_rad_i[q] > 1.0e-6)) - q += 1 - kk = np.where(marks)[0] - - plt.plot(lon_i[kk], lat_i[kk], '.', linewidth=2, \ - color=edge_color,zorder=10) + #sys.stderr.write('teca_tc_trajectory_scalars::execute\n') - plt.plot(lon_i[kk], lat_i[kk], '.', linewidth=1, \ - color='k', zorder=10, markersize=1) - - marks = wind_rad_i[0] > 1.0e-6 + import matplotlib.pyplot as plt + import matplotlib.patches as plt_mp + import matplotlib.image as plt_img + import matplotlib.gridspec as plt_gridspec + + # store matplotlib state we modify + legend_frame_on_orig = plt.rcParams['legend.frameon'] + + # tweak matplotlib slightly + plt.rcParams['figure.max_open_warning'] = 0 + plt.rcParams['legend.frameon'] = 1 + + # get the input table + in_table = as_teca_table(data_in[0]) + if in_table is None: + # TODO if this is part of a parallel pipeline then + # only rank 0 should report an error. + sys.stderr.write('ERROR: empty input, or not a table\n') + return teca_table.New() + + # use this color map for Saphir-Simpson scale + red_cmap = ['#ffd2a3','#ffa749','#ff7c04', \ + '#ea4f00','#c92500','#a80300'] + + km_per_deg_lat = 111 + + time_units = in_table.get_time_units() + + time = in_table.get_column('time').as_array() + step = in_table.get_column('step').as_array() + track = in_table.get_column('track_id').as_array() + + lon = in_table.get_column('lon').as_array() + lat = in_table.get_column('lat').as_array() + + year = in_table.get_column('year').as_array() + month = in_table.get_column('month').as_array() + day = in_table.get_column('day').as_array() + hour = in_table.get_column('hour').as_array() + minute = in_table.get_column('minute').as_array() + + wind = in_table.get_column('surface_wind').as_array() + vort = in_table.get_column('850mb_vorticity').as_array() + psl = in_table.get_column('sea_level_pressure').as_array() + temp = in_table.get_column('core_temp').as_array() + have_temp = in_table.get_column('have_core_temp').as_array() + thick = in_table.get_column('thickness').as_array() + have_thick = in_table.get_column('have_thickness').as_array() + speed = in_table.get_column('storm_speed').as_array() + + wind_rad = [] + i = 0 + while i < 5: + col_name = 'wind_radius_%d'%(i) + if in_table.has_column(col_name): + wind_rad.append(in_table.get_column(col_name).as_array()) + i += 1 + peak_rad = in_table.get_column('peak_radius').as_array() \ + if in_table.has_column('peak_radius') else None + + # get the list of unique track ids, this is our loop index + utrack = sorted(set(track)) + nutracks = len(utrack) + + # load background image + if (self.tex is None) and self.tex_file: + self.tex = plt_img.imread(self.tex_file) + + for i in utrack: + #sys.stderr.write('processing track %d\n'%(i)) + sys.stderr.write('.') + + fig = plt.figure() + fig.set_size_inches(10,9.75) + + ii = np.where(track == i)[0] + + # get the scalar values for this storm + lon_i = lon[ii] + lat_i = lat[ii] + wind_i = wind[ii] + psl_i = psl[ii] + vort_i = vort[ii] + thick_i = thick[ii] + temp_i = temp[ii] + speed_i = speed[ii]/24.0 + + wind_rad_i = [] + for col in wind_rad: + wind_rad_i.append(col[ii]) + peak_rad_i = peak_rad[ii] if peak_rad is not None else None + + # construct the title + q = ii[0] + r = ii[-1] + + t0 = time[q] + t1 = time[r] + + s0 = step[q] + s1 = step[r] + + Y0 = year[q] + Y1 = year[r] + + M0 = month[q] + M1 = month[r] + + D0 = day[q] + D1 = day[r] + + h0 = hour[q] + h1 = hour[r] + + m0 = minute[q] + m1 = minute[r] + + tt = time[ii] - t0 + + cat = teca_tc_saffir_simpson.classify_mps(float(np.max(wind_i))) + + plt.suptitle( \ + 'Track %d, cat %d, steps %d - %d\n%d/%d/%d %d:%d:00 - %d/%d/%d %d:%d:00'%(\ + i, cat, s0, s1, Y0, M0, D0, h0, m0, Y1, M1, D1, h1, m1), \ + fontweight='bold') + + # plot the scalars + gs = plt_gridspec.GridSpec(5, 4) + + plt.subplot2grid((5,4),(0,0),colspan=2,rowspan=2) + # prepare the texture + if self.tex is not None: + ext = [np.min(lon_i), np.max(lon_i), np.min(lat_i), np.max(lat_i)] + if self.axes_equal: + w = ext[1]-ext[0] + h = ext[3]-ext[2] + if w > h: + c = (ext[2] + ext[3])/2.0 + w2 = w/2.0 + ext[2] = c - w2 + ext[3] = c + w2 + else: + c = (ext[0] + ext[1])/2.0 + h2 = h/2.0 + ext[0] = c - h2 + ext[1] = c + h2 + border_size = 0.15 + wrimax = 0 if peak_rad_i is None else \ + max(0 if not self.plot_peak_radius else \ + np.max(peak_rad_i), np.max(wind_rad_i[0])) + dlon = max(wrimax, (ext[1] - ext[0])*border_size) + dlat = max(wrimax, (ext[3] - ext[2])*border_size) + ext[0] = max(ext[0] - dlon, 0.0) + ext[1] = min(ext[1] + dlon, 360.0) + ext[2] = max(ext[2] - dlat, -90.0) + ext[3] = min(ext[3] + dlat, 90.0) + i0 = int(self.tex.shape[1]/360.0*ext[0]) + i1 = int(self.tex.shape[1]/360.0*ext[1]) + j0 = int(-((ext[3] + 90.0)/180.0 - 1.0)*self.tex.shape[0]) + j1 = int(-((ext[2] + 90.0)/180.0 - 1.0)*self.tex.shape[0]) + plt.imshow(self.tex[j0:j1, i0:i1], extent=ext, aspect='auto') + + edge_color = '#ffff00' if self.tex is not None else 'b' + + # plot the storm size + if peak_rad_i is None: + plt.plot(lon_i, lat_i, '.', linewidth=2, color=edge_color) + else: + # compute track unit normals + npts = len(ii) + norm_x = np.zeros(npts) + norm_y = np.zeros(npts) + npts -= 1 + q = 1 + while q < npts: + norm_x[q] = lat_i[q+1] - lat_i[q-1] + norm_y[q] = -(lon_i[q+1] - lon_i[q-1]) + nmag = np.sqrt(norm_x[q]**2 + norm_y[q]**2) + norm_x[q] = norm_x[q]/nmag + norm_y[q] = norm_y[q]/nmag + q += 1 + # normal at first and last point on the track + norm_x[0] = lat_i[1] - lat_i[0] + norm_y[0] = -(lon_i[1] - lon_i[0]) + norm_x[0] = norm_x[0]/nmag + norm_y[0] = norm_y[0]/nmag + norm_x[npts] = lat_i[npts] - lat_i[npts-1] + norm_y[npts] = -(lon_i[npts] - lon_i[npts-1]) + norm_x[npts] = norm_x[npts]/nmag + norm_y[npts] = norm_y[npts]/nmag + # for each wind radius, render a polygon of width 2*wind + # centered on the track. have to break it into continuous + # segments + nwri = len(wind_rad_i) + q = nwri - 1 + while q >= 0: + self.plot_wind_rad(lon_i, lat_i, norm_x, norm_y, \ + wind_rad_i[q], '-', edge_color if q==0 else red_cmap[q], \ + 2 if q==0 else 1, red_cmap[q], 0.98, q+4) + q -= 1 + # plot the peak radius + if (self.plot_peak_radius): + # peak radius is only valid if one of the other wind radii + # exist, zero out other values + kk = wind_rad_i[0] > 1.0e-6 q = 1 while q < nwri: - marks = np.logical_or(marks, wind_rad_i[q] > 1.0e-6) + kk = np.logical_or(kk, wind_rad_i[q] > 1.0e-6) q += 1 - kk = np.where(marks)[0] - - plt.plot(lon_i[kk], lat_i[kk], '.', linewidth=1, \ - color='k', zorder=10, markersize=2, alpha=0.1) - - # mark track start and end - plt.plot(lon_i[0], lat_i[0], 'o', markersize=6, markeredgewidth=2, \ - color=edge_color, markerfacecolor='g',zorder=10) - - plt.plot(lon_i[-1], lat_i[-1], '^', markersize=6, markeredgewidth=2, \ - color=edge_color, markerfacecolor='r',zorder=10) - - plt.grid(True) - plt.xlabel('deg lon') - plt.ylabel('deg lat') - plt.title('Track', fontweight='bold') - - plt.subplot2grid((5,4),(0,2),colspan=2) - plt.plot(tt, psl_i, 'b-', linewidth=2) - plt.grid(True) - plt.xlabel('time (days)') - plt.ylabel('millibars') - plt.title('Sea Level Pressure', fontweight='bold') - plt.xlim([0, tt[-1]]) - - plt.subplot2grid((5,4),(1,2),colspan=2) - plt.plot(tt, wind_i, 'b-', linewidth=2) + peak_rad_i[np.logical_not(kk)] = 0.0 + self.plot_wind_rad(lon_i, lat_i, norm_x, norm_y, \ + peak_rad_i, '--', (0,0,0,0.25), 1, 'none', 1.00, nwri+4) + # mark track + marks = wind_rad_i[0] <= 1.0e-6 + q = 1 + while q < nwri: + marks = np.logical_and(marks, np.logical_not(wind_rad_i[q] > 1.0e-6)) + q += 1 + kk = np.where(marks)[0] + + plt.plot(lon_i[kk], lat_i[kk], '.', linewidth=2, \ + color=edge_color,zorder=10) + + plt.plot(lon_i[kk], lat_i[kk], '.', linewidth=1, \ + color='k', zorder=10, markersize=1) + + marks = wind_rad_i[0] > 1.0e-6 + q = 1 + while q < nwri: + marks = np.logical_or(marks, wind_rad_i[q] > 1.0e-6) + q += 1 + kk = np.where(marks)[0] + + plt.plot(lon_i[kk], lat_i[kk], '.', linewidth=1, \ + color='k', zorder=10, markersize=2, alpha=0.1) + + # mark track start and end + plt.plot(lon_i[0], lat_i[0], 'o', markersize=6, markeredgewidth=2, \ + color=edge_color, markerfacecolor='g',zorder=10) + + plt.plot(lon_i[-1], lat_i[-1], '^', markersize=6, markeredgewidth=2, \ + color=edge_color, markerfacecolor='r',zorder=10) + + plt.grid(True) + plt.xlabel('deg lon') + plt.ylabel('deg lat') + plt.title('Track', fontweight='bold') + + plt.subplot2grid((5,4),(0,2),colspan=2) + plt.plot(tt, psl_i, 'b-', linewidth=2) + plt.grid(True) + plt.xlabel('time (days)') + plt.ylabel('millibars') + plt.title('Sea Level Pressure', fontweight='bold') + plt.xlim([0, tt[-1]]) + + plt.subplot2grid((5,4),(1,2),colspan=2) + plt.plot(tt, wind_i, 'b-', linewidth=2) + plt.grid(True) + plt.xlabel('time (days)') + plt.ylabel('ms^-1') + plt.title('Surface Wind', fontweight='bold') + plt.xlim([0, tt[-1]]) + + plt.subplot2grid((5,4),(2,0),colspan=2) + plt.plot(tt, speed_i, 'b-', linewidth=2) + plt.grid(True) + plt.xlabel('time (days)') + plt.ylabel('km d^-1') + plt.title('Propagation Speed', fontweight='bold') + plt.xlim([0, tt[-1]]) + + plt.subplot2grid((5,4),(2,2),colspan=2) + plt.plot(tt, vort_i, 'b-', linewidth=2) + plt.grid(True) + plt.xlabel('time (days)') + plt.ylabel('s^-1') + plt.title('Vorticity', fontweight='bold') + plt.xlim([0, tt[-1]]) + + plt.subplot2grid((5,4),(3,0),colspan=2) + plt.plot(tt, thick_i, 'b-', linewidth=2) + plt.grid(True) + plt.xlabel('time (days)') + plt.ylabel('meters') + plt.title('Thickness', fontweight='bold') + plt.xlim([0, tt[-1]]) + + plt.subplot2grid((5,4),(3,2),colspan=2) + plt.plot(tt, temp_i, 'b-', linewidth=2) + plt.grid(True) + plt.xlabel('time (days)') + plt.ylabel('deg K') + plt.title('Core Temperature', fontweight='bold') + plt.xlim([0, tt[-1]]) + + if peak_rad_i is not None: + plt.subplot2grid((5,4),(4,0),colspan=2) + q = len(wind_rad_i) - 1 + while q >= 0: + wr_i_q = km_per_deg_lat*wind_rad_i[q] + plt.fill_between(tt, 0, wr_i_q, color=red_cmap[q], alpha=0.9, zorder=q+3) + plt.plot(tt, wr_i_q, '-', linewidth=2, color=red_cmap[q], zorder=q+3) + q -= 1 + if (self.plot_peak_radius): + plt.plot(tt, km_per_deg_lat*peak_rad_i, 'k--', linewidth=1, zorder=10) + plt.plot(tt, np.zeros(len(tt)), 'w-', linewidth=2, zorder=10) plt.grid(True) plt.xlabel('time (days)') - plt.ylabel('ms^-1') - plt.title('Surface Wind', fontweight='bold') + plt.ylabel('radius (km)') + plt.title('Storm Size', fontweight='bold') plt.xlim([0, tt[-1]]) - - plt.subplot2grid((5,4),(2,0),colspan=2) - plt.plot(tt, speed_i, 'b-', linewidth=2) - plt.grid(True) - plt.xlabel('time (days)') - plt.ylabel('km d^-1') - plt.title('Propagation Speed', fontweight='bold') - plt.xlim([0, tt[-1]]) - - plt.subplot2grid((5,4),(2,2),colspan=2) - plt.plot(tt, vort_i, 'b-', linewidth=2) - plt.grid(True) - plt.xlabel('time (days)') - plt.ylabel('s^-1') - plt.title('Vorticity', fontweight='bold') - plt.xlim([0, tt[-1]]) - - plt.subplot2grid((5,4),(3,0),colspan=2) - plt.plot(tt, thick_i, 'b-', linewidth=2) - plt.grid(True) - plt.xlabel('time (days)') - plt.ylabel('meters') - plt.title('Thickness', fontweight='bold') - plt.xlim([0, tt[-1]]) - - plt.subplot2grid((5,4),(3,2),colspan=2) - plt.plot(tt, temp_i, 'b-', linewidth=2) - plt.grid(True) - plt.xlabel('time (days)') - plt.ylabel('deg K') - plt.title('Core Temperature', fontweight='bold') - plt.xlim([0, tt[-1]]) - - if peak_rad_i is not None: - plt.subplot2grid((5,4),(4,0),colspan=2) - q = len(wind_rad_i) - 1 - while q >= 0: - wr_i_q = km_per_deg_lat*wind_rad_i[q] - plt.fill_between(tt, 0, wr_i_q, color=red_cmap[q], alpha=0.9, zorder=q+3) - plt.plot(tt, wr_i_q, '-', linewidth=2, color=red_cmap[q], zorder=q+3) - q -= 1 - if (self.plot_peak_radius): - plt.plot(tt, km_per_deg_lat*peak_rad_i, 'k--', linewidth=1, zorder=10) - plt.plot(tt, np.zeros(len(tt)), 'w-', linewidth=2, zorder=10) - plt.grid(True) - plt.xlabel('time (days)') - plt.ylabel('radius (km)') - plt.title('Storm Size', fontweight='bold') - plt.xlim([0, tt[-1]]) - plt.ylim(ymin=0) - - plt.subplot2grid((5,4),(4,2)) - red_cmap_pats = [] - q = 0 - while q < 6: - red_cmap_pats.append( \ - plt_mp.Patch(color=red_cmap[q], label='R%d'%(q))) - q += 1 - if (self.plot_peak_radius): - red_cmap_pats.append(plt_mp.Patch(color='k', label='RP')) - l = plt.legend(handles=red_cmap_pats, loc=2, \ - bbox_to_anchor=(-0.1, 1.0), borderaxespad=0.0, \ - frameon=True, ncol=2) - plt.axis('off') - - plt.subplots_adjust(left=0.065, right=0.98, \ - bottom=0.05, top=0.9, wspace=0.6, hspace=0.7) - - plt.savefig('%s_%06d.png'%(self.basename, i), dpi=self.dpi) - if (not self.interactive): - plt.close(fig) - - if (self.interactive): - plt.show() - - out_table = teca_py.teca_table.New() - out_table.shallow_copy(in_table) - return out_table - return execute + plt.ylim(ymin=0) + + plt.subplot2grid((5,4),(4,2)) + red_cmap_pats = [] + q = 0 + while q < 6: + red_cmap_pats.append( \ + plt_mp.Patch(color=red_cmap[q], label='R%d'%(q))) + q += 1 + if (self.plot_peak_radius): + red_cmap_pats.append(plt_mp.Patch(color='k', label='RP')) + l = plt.legend(handles=red_cmap_pats, loc=2, \ + bbox_to_anchor=(-0.1, 1.0), borderaxespad=0.0, \ + frameon=True, ncol=2) + plt.axis('off') + + plt.subplots_adjust(left=0.065, right=0.98, \ + bottom=0.05, top=0.9, wspace=0.6, hspace=0.7) + + plt.savefig('%s_%06d.png'%(self.basename, i), dpi=self.dpi) + if (not self.interactive): + plt.close(fig) + + if (self.interactive): + plt.show() + + out_table = teca_table.New() + out_table.shallow_copy(in_table) + return out_table @staticmethod def render_poly(x, y, norm_x, norm_y, rad, edge_style, \ @@ -623,3 +616,4 @@ def plot_wind_rad(self, x, y, norm_x, norm_y, wind_rad, \ p0 = -1 qq += 1 + diff --git a/alg/teca_tc_wind_radii_stats.py b/alg/teca_tc_wind_radii_stats.py index fc66e8c6d..8e31fc78a 100644 --- a/alg/teca_tc_wind_radii_stats.py +++ b/alg/teca_tc_wind_radii_stats.py @@ -1,8 +1,7 @@ import sys -import teca_py import numpy as np -class teca_tc_wind_radii_stats(teca_py.teca_python_algorithm): +class teca_tc_wind_radii_stats(teca_python_algorithm): """ Computes statistics using track wind radii """ @@ -48,170 +47,164 @@ def set_output_prefix(self, output_prefix): """ self.output_prefix = output_prefix - def get_execute_callback(self): + def execute(self, port, data_in, req): """ - return a teca_algorithm::execute function. a closure - is used to gain self. + expects a table with track data containing wind radii computed + along each point of the track. produces statistical plots showing + the global distribution of wind radii. """ - def execute(port, data_in, req): - """ - expects a table with track data containing wind radii computed - along each point of the track. produces statistical plots showing - the global distribution of wind radii. - """ - track_table = teca_py.as_teca_table(data_in[0]) - - # plot stats - import matplotlib.pyplot as plt - import matplotlib.patches as plt_mp - from matplotlib.colors import LogNorm - - red_cmap = ['#ffd2a3','#ffa749','#ff7c04', \ - '#ea4f00','#c92500','#a80300'] - - km_per_deg_lat = 111 - km_s_per_m_hr = 3.6 - - fig = plt.figure(figsize=(9.25,6.75),dpi=self.dpi) - - # scatter - plt.subplot('331') - - if not track_table.has_column(self.wind_column): - sys.stderr.write('ERROR: track table missing %s\n'%(self.wind_column)) - sys.exit(-1) - - - year = track_table.get_column('year').as_array() - month = track_table.get_column('month').as_array() - day = track_table.get_column('day').as_array() - - ws = km_s_per_m_hr*track_table.get_column(self.wind_column).as_array() - - wr = [] - nwr = 0 - while track_table.has_column('wind_radius_%d'%(nwr)): - wr.append(km_per_deg_lat*track_table.get_column('wind_radius_%d'%(nwr)).as_array()) - nwr += 1 - - i = 0 - while i < nwr: - wc = teca_py.teca_tc_saffir_simpson.get_upper_bound_kmph(i-1) - wri = wr[i] - ii = np.where(wri > 0.0) - plt.scatter(wri[ii], ws[ii], c=red_cmap[i], alpha=0.25, marker='.', zorder=3+i) - i += 1 - - plt.ylabel('Wind speed (km/hr)', fontweight='normal', fontsize=10) - plt.title('R0 - R5 vs Wind speed', fontweight='bold', fontsize=11) - plt.grid(True) - ax = plt.gca() - ax.set_xlim([0.0, 6.0*km_per_deg_lat]) - - # all - plt.subplot('332') - i = 0 - while i < nwr: - wc = teca_py.teca_tc_saffir_simpson.get_upper_bound_kmph(i-1) - wri = wr[i] - n,bins,pats = plt.hist(wri[np.where(wri > 0.0)], 32, range=[0,6.0*km_per_deg_lat], \ - facecolor=red_cmap[i], alpha=0.95, edgecolor='black', \ - linewidth=2, zorder=3+i) - i += 1 - plt.ylabel('Number', fontweight='normal', fontsize=10) - plt.title('All R0 - R5', fontweight='bold', fontsize=11) - plt.grid(True) - ax = plt.gca() - ax.set_xlim([0.0, 6.0*km_per_deg_lat]) - - # r0 - r5 - i = 0 - while i < nwr: - plt.subplot(333+i) - wc = teca_py.teca_tc_saffir_simpson.get_upper_bound_kmph(i-1) - wri = wr[i] - wrii=wri[np.where(wri > 0.0)] - n,bins,pats = plt.hist(wrii, 32, \ - facecolor=red_cmap[i], alpha=1.00, edgecolor='black', \ - linewidth=2, zorder=3) - if ((i % 3) == 1): - plt.ylabel('Number', fontweight='normal', fontsize=10) - if (i >= 3): - plt.xlabel('Radius (km)', fontweight='normal', fontsize=10) - plt.title('R%d (%0.1f km/hr)'%(i,wc), fontweight='bold', fontsize=11) - plt.grid(True) - ax = plt.gca() - try: - ax.set_xlim([np.min(wrii), np.max(wrii)]) - except: - pass - i += 1 - - # legend - plt.subplot('339') - red_cmap_pats = [] - q = 0 - while q < nwr: - red_cmap_pats.append( \ - plt_mp.Patch(color=red_cmap[q], label='R%d'%(q))) - q += 1 - l = plt.legend(handles=red_cmap_pats, loc=2, bbox_to_anchor=(-0.1, 1.0), fancybox=True) - plt.axis('off') - - - plt.suptitle('Wind Radii %s/%d/%d - %s/%d/%d'%(month[0],day[0],year[0], \ - month[-1],day[-1],year[-1]), fontweight='bold', fontsize=12) - plt.subplots_adjust(hspace=0.35, wspace=0.35, top=0.90) - - plt.savefig(self.output_prefix + 'wind_radii_stats.png') - - fig = plt.figure(figsize=(7.5,4.0),dpi=100) - # peak radius - pr = km_per_deg_lat*track_table.get_column('peak_radius').as_array() - # peak radius is only valid if one of the other wind radii - # exist - kk = wr[0] > 1.0e-6 - q = 1 - while q < nwr: - kk = np.logical_or(kk, wr[q] > 1.0e-6) - q += 1 - pr = pr[kk] - - plt.subplot(121) - n,bins,pats = plt.hist(pr[np.where(pr > 0.0)], 24, \ - facecolor='steelblue', alpha=0.95, edgecolor='black', \ + track_table = as_teca_table(data_in[0]) + + # plot stats + import matplotlib.pyplot as plt + import matplotlib.patches as plt_mp + from matplotlib.colors import LogNorm + + red_cmap = ['#ffd2a3','#ffa749','#ff7c04', \ + '#ea4f00','#c92500','#a80300'] + + km_per_deg_lat = 111 + km_s_per_m_hr = 3.6 + + fig = plt.figure(figsize=(9.25,6.75),dpi=self.dpi) + + # scatter + plt.subplot(331) + + if not track_table.has_column(self.wind_column): + sys.stderr.write('ERROR: track table missing %s\n'%(self.wind_column)) + sys.exit(-1) + + + year = track_table.get_column('year').as_array() + month = track_table.get_column('month').as_array() + day = track_table.get_column('day').as_array() + + ws = km_s_per_m_hr*track_table.get_column(self.wind_column).as_array() + + wr = [] + nwr = 0 + while track_table.has_column('wind_radius_%d'%(nwr)): + wr.append(km_per_deg_lat*track_table.get_column('wind_radius_%d'%(nwr)).as_array()) + nwr += 1 + + i = 0 + while i < nwr: + wc = teca_tc_saffir_simpson.get_upper_bound_kmph(i-1) + wri = wr[i] + ii = np.where(wri > 0.0) + plt.scatter(wri[ii], ws[ii], c=red_cmap[i], alpha=0.25, marker='.', zorder=3+i) + i += 1 + + plt.ylabel('Wind speed (km/hr)', fontweight='normal', fontsize=10) + plt.title('R0 - R5 vs Wind speed', fontweight='bold', fontsize=11) + plt.grid(True) + ax = plt.gca() + ax.set_xlim([0.0, 6.0*km_per_deg_lat]) + + # all + plt.subplot(332) + i = 0 + while i < nwr: + wc = teca_tc_saffir_simpson.get_upper_bound_kmph(i-1) + wri = wr[i] + n,bins,pats = plt.hist(wri[np.where(wri > 0.0)], 32, range=[0,6.0*km_per_deg_lat], \ + facecolor=red_cmap[i], alpha=0.95, edgecolor='black', \ + linewidth=2, zorder=3+i) + i += 1 + plt.ylabel('Number', fontweight='normal', fontsize=10) + plt.title('All R0 - R5', fontweight='bold', fontsize=11) + plt.grid(True) + ax = plt.gca() + ax.set_xlim([0.0, 6.0*km_per_deg_lat]) + + # r0 - r5 + i = 0 + while i < nwr: + plt.subplot(333+i) + wc = teca_tc_saffir_simpson.get_upper_bound_kmph(i-1) + wri = wr[i] + wrii=wri[np.where(wri > 0.0)] + n,bins,pats = plt.hist(wrii, 32, \ + facecolor=red_cmap[i], alpha=1.00, edgecolor='black', \ linewidth=2, zorder=3) - plt.ylabel('Number', fontweight='normal', fontsize=10) - plt.xlabel('Radius (km)', fontweight='normal', fontsize=10) - plt.title('RP (radius at peak wind)', fontweight='bold', fontsize=11) - plt.grid(True) - ax = plt.gca() - ax.set_xlim([0.0, np.max(pr)]) - - # scatter - plt.subplot('122') - ii = np.where(pr > 0.0) - cnts,xe,ye,im = plt.hist2d(pr[ii], ws[ii], bins=24, norm=LogNorm(), zorder=2) - plt.ylabel('Wind speed (km/hr)', fontweight='normal', fontsize=10) - plt.xlabel('Radius (km)', fontweight='normal', fontsize=10) - plt.title('RP vs Wind speed', fontweight='bold', fontsize=11) + if ((i % 3) == 1): + plt.ylabel('Number', fontweight='normal', fontsize=10) + if (i >= 3): + plt.xlabel('Radius (km)', fontweight='normal', fontsize=10) + plt.title('R%d (%0.1f km/hr)'%(i,wc), fontweight='bold', fontsize=11) plt.grid(True) ax = plt.gca() - ax.set_xlim([0.0, np.max(pr)]) - - fig.subplots_adjust(right=0.85) - cbar_ax = fig.add_axes([0.88, 0.35, 0.05, 0.5]) - fig.colorbar(im, cax=cbar_ax) - - plt.suptitle('Wind Radii %s/%d/%d - %s/%d/%d'%(month[0],day[0],year[0], \ - month[-1],day[-1],year[-1]), fontweight='bold', fontsize=12) - plt.subplots_adjust(hspace=0.3, wspace=0.3, top=0.85) - - plt.savefig(self.output_prefix + 'peak_radius_stats.png') - - if self.interactive: - plt.show() - - # send data downstream - return track_table - return execute + try: + ax.set_xlim([np.min(wrii), np.max(wrii)]) + except: + pass + i += 1 + + # legend + plt.subplot(339) + red_cmap_pats = [] + q = 0 + while q < nwr: + red_cmap_pats.append( \ + plt_mp.Patch(color=red_cmap[q], label='R%d'%(q))) + q += 1 + l = plt.legend(handles=red_cmap_pats, loc=2, bbox_to_anchor=(-0.1, 1.0), fancybox=True) + plt.axis('off') + + + plt.suptitle('Wind Radii %s/%d/%d - %s/%d/%d'%(month[0],day[0],year[0], \ + month[-1],day[-1],year[-1]), fontweight='bold', fontsize=12) + plt.subplots_adjust(hspace=0.35, wspace=0.35, top=0.90) + + plt.savefig(self.output_prefix + 'wind_radii_stats.png') + + fig = plt.figure(figsize=(7.5,4.0),dpi=100) + # peak radius + pr = km_per_deg_lat*track_table.get_column('peak_radius').as_array() + # peak radius is only valid if one of the other wind radii + # exist + kk = wr[0] > 1.0e-6 + q = 1 + while q < nwr: + kk = np.logical_or(kk, wr[q] > 1.0e-6) + q += 1 + pr = pr[kk] + + plt.subplot(121) + n,bins,pats = plt.hist(pr[np.where(pr > 0.0)], 24, \ + facecolor='steelblue', alpha=0.95, edgecolor='black', \ + linewidth=2, zorder=3) + plt.ylabel('Number', fontweight='normal', fontsize=10) + plt.xlabel('Radius (km)', fontweight='normal', fontsize=10) + plt.title('RP (radius at peak wind)', fontweight='bold', fontsize=11) + plt.grid(True) + ax = plt.gca() + ax.set_xlim([0.0, np.max(pr)]) + + # scatter + plt.subplot(122) + ii = np.where(pr > 0.0) + cnts,xe,ye,im = plt.hist2d(pr[ii], ws[ii], bins=24, norm=LogNorm(), zorder=2) + plt.ylabel('Wind speed (km/hr)', fontweight='normal', fontsize=10) + plt.xlabel('Radius (km)', fontweight='normal', fontsize=10) + plt.title('RP vs Wind speed', fontweight='bold', fontsize=11) + plt.grid(True) + ax = plt.gca() + ax.set_xlim([0.0, np.max(pr)]) + + fig.subplots_adjust(right=0.85) + cbar_ax = fig.add_axes([0.88, 0.35, 0.05, 0.5]) + fig.colorbar(im, cax=cbar_ax) + + plt.suptitle('Wind Radii %s/%d/%d - %s/%d/%d'%(month[0],day[0],year[0], \ + month[-1],day[-1],year[-1]), fontweight='bold', fontsize=12) + plt.subplots_adjust(hspace=0.3, wspace=0.3, top=0.85) + + plt.savefig(self.output_prefix + 'peak_radius_stats.png') + + if self.interactive: + plt.show() + + # send data downstream + return track_table diff --git a/alg/teca_temporal_reduction.py b/alg/teca_temporal_reduction.py new file mode 100644 index 000000000..034d551f2 --- /dev/null +++ b/alg/teca_temporal_reduction.py @@ -0,0 +1,850 @@ +import sys +import numpy as np + + +class teca_temporal_reduction_internals: + class time_point: + """ + A structure holding a floating point time value and its + corresponding year, month day, hour minute and second + """ + def __init__(self, t, units, calendar): + self.t = t + self.units = units + self.calendar = calendar + + self.year, self.month, self.day, \ + self.hour, self.minutes, self.seconds = \ + calendar_util.date(t, self.units, self.calendar) + + def __str__(self): + return '%g (%s, %s) --> %04d-%02d-%02d %02d:%02d:%02g' % ( + self.t, self.units, self.calendar, self.year, self.month, + self.day, self.hour, self.minutes, self.seconds) + + class c_struct: + """ + A c like data structure + """ + def __init__(self, **kwds): + self.__dict__.update(kwds) + + def __str__(self): + strg = '' + for k, v in self.__dict__.items(): + strg += k + '=' + str(v) + ', ' + return strg + + class interval_iterator: + class season_iterator: + """ + An iterator over seasons (DJF, MAM, JJA, SON) between 2 + time_point's. A pair of time steps bracketing the current season + are returned at each iteration. Only full seasonal intervals are + processed. If the input data doesn't start or end on a seasonal + boundary it is skipped. + """ + + def __init__(self, t, units, calendar): + """ + t - an array of floating point time values + units - string units of the time values + calendar - string name of the calendar system + """ + self.t = t + self.units = units + + calendar = calendar.lower() + self.calendar = calendar + + # time point's to iterate between + self.t0 = teca_temporal_reduction_internals.time_point( + t[0], units, calendar) + + self.t1 = teca_temporal_reduction_internals.time_point( + t[-1], units, calendar) + + # current time state + self.year, self.month = \ + self.get_first_season(self.t0.year, self.t0.month) + + def get_season_name(self, month): + """ + returns one of DJF,MAM,JJA,SON based on the month passed in + """ + if (month == 12) or ((month >= 1) and (month <= 2)): + return 'DJF' + elif (month >= 3) and (month <= 5): + return 'MAM' + elif (month >= 6) and (month <= 8): + return 'JJA' + elif (month >= 9) and (month <= 11): + return 'SON' + + raise RuntimeError('Invalid month %d' % (month)) + + def get_first_season(self, y, m): + """ + given a year and month, checks that the values fall on + a seasonal boundary. if not, returns the year and month + of the start of the next season. + """ + if (m == 12) or (m == 3) or (m == 6) or (m == 9): + return y, m + else: + return self.get_next_season(y, m) + + def get_season_end(self, year, month): + """ + Given a year and month returns the year month and day + of the end of the season. the input month need not be on + a seasonal boundary. + """ + if (month == 12): + y = year + 1 + m = 2 + elif (month >= 1) and (month <= 2): + y = year + m = 2 + elif (month >= 3) and (month <= 5): + y = year + m = 5 + elif (month >= 6) and (month <= 8): + y = year + m = 8 + elif (month >= 9) and (month <= 11): + y = year + m = 11 + else: + raise RuntimeError('Invalid month %d' % (month)) + + d = self.last_day_of_month(y, m) + + return y, m, d + + def get_next_season(self, year, month): + """ + Given a year and month returns the year and month + of the next season. the input momnth doesn't need to be + on a seasonal boundary. + """ + if (month == 12): + y = year + 1 + m = 3 + elif (month >= 1) and (month <= 2): + y = year + m = 3 + elif (month >= 3) and (month <= 5): + y = year + m = 6 + elif (month >= 6) and (month <= 8): + y = year + m = 9 + elif (month >= 9) and (month <= 11): + y = year + m = 12 + else: + raise RuntimeError('Invalid month %d' % (month)) + + return y, m + + def last_day_of_month(self, year, month): + """ + get the number of days in the month, with logic for + leap years + """ + return \ + calendar_util.days_in_month(self.calendar, + self.units, year, + month) + + def __iter__(self): + return self + + def __next__(self): + """ + return a pair of time steps bracketing the current month. + both returned time steps belong to the current month. + """ + # get the end of the current season + ey, em, ed = self.get_season_end(self.year, self.month) + + # verify that we have data for the current season + if ((ey > self.t1.year) or + ((ey == self.t1.year) and (em > self.t1.month)) or + ((ey == self.t1.year) and (em == self.t1.month) and + (ed > self.t1.day))): + raise StopIteration + + # find the time step of the first day + sy = self.year + sm = self.month + + t0 = '%04d-%02d-01 00:00:00' % (sy, sm) + i0 = coordinate_util.time_step_of(self.t, True, True, + self.calendar, + self.units, t0) + + # find the time step of the last day + t1 = '%04d-%02d-%02d 23:59:59' % (ey, em, ed) + i1 = coordinate_util.time_step_of(self.t, True, True, + self.calendar, + self.units, t1) + + # move to next season + self.year, self.month = \ + self.get_next_season(sy, sm) + + return teca_temporal_reduction_internals.c_struct( + time=self.t[i0], year=sy, month=sm, + day=1, start_index=i0, end_index=i1) + + + class month_iterator: + """ + An iterator over all months between 2 time_point's. A pair + of time steps bracketing the current month are returned at + each iteration. + """ + + def __init__(self, t, units, calendar): + """ + t - an array of floating point time values + units - string units of the time values + calendar - string name of the calendar system + """ + self.t = t + self.units = units + + calendar = calendar.lower() + self.calendar = calendar + + # time point's to iterate between + self.t0 = teca_temporal_reduction_internals.time_point( + t[0], units, calendar) + + self.t1 = teca_temporal_reduction_internals.time_point( + t[-1], units, calendar) + + # current time state + self.year = self.t0.year + self.month = self.t0.month + + def last_day_of_month(self): + """ + get the number of days in the month, with logic for + leap years + """ + return \ + calendar_util.days_in_month(self.calendar, + self.units, self.year, + self.month) + + def __iter__(self): + return self + + def __next__(self): + """ + return a pair of time steps bracketing the current month. + both returned time steps belong to the current month. + """ + # check for more months to process + if (self.year > self.t1.year) or \ + (self.year == self.t1.year) and \ + (self.month > self.t1.month): + raise StopIteration + + # find the time step of the first day + year = self.year + month = self.month + + t0 = '%04d-%02d-01 00:00:00' % (self.year, self.month) + i0 = coordinate_util.time_step_of(self.t, True, True, + self.calendar, + self.units, t0) + + # find the time step of the last day + n_days = self.last_day_of_month() + + t1 = '%04d-%02d-%02d 23:59:59' % \ + (self.year, self.month, n_days) + + i1 = coordinate_util.time_step_of(self.t, True, True, + self.calendar, + self.units, t1) + + # move to next month + self.month += 1 + + # move to next year + if self.month == 13: + self.month = 1 + self.year += 1 + + return teca_temporal_reduction_internals.c_struct( + time=self.t[i0], year=year, month=month, + day=1, start_index=i0, end_index=i1) + + class day_iterator: + """ + An iterator over all days between 2 time_point's. A pair + of time steps bracketing the current day are returned at + each iteration. + """ + + def __init__(self, t, units, calendar): + """ + t - an array of floating point time values + units - string units of the time values + calendar - string name of the calendar system + """ + # time values + self.t = t + self.units = units + + calendar = calendar.lower() + self.calendar = calendar + + # time point's to iterate between + self.t0 = teca_temporal_reduction_internals.time_point( + t[0], units, calendar) + + self.t1 = teca_temporal_reduction_internals.time_point( + t[-1], units, calendar) + + # current time state + self.year = self.t0.year + self.month = self.t0.month + self.day = self.t0.day + + def last_day_of_month(self): + """ + get the number of days in the month, with logic for + leap years + """ + return calendar_util.days_in_month( + self.calendar, self.units, self.year, self.month) + + def __iter__(self): + return self + + def __next__(self): + """ + return a pair of time steps bracketing the current month. + both returned time steps belong to the current month. + """ + # check for more days to process + if (self.year > self.t1.year) or \ + ((self.year == self.t1.year) and + (self.month > self.t1.month)) or \ + ((self.year == self.t1.year) and + (self.month == self.t1.month) and + (self.day > self.t1.day)): + raise StopIteration + + # find the time step of the first day + year = self.year + month = self.month + day = self.day + + t0 = '%04d-%02d-%02d 00:00:00' % \ + (self.year, self.month, self.day) + + i0 = coordinate_util.time_step_of(self.t, True, True, + self.calendar, + self.units, t0) + + # find the time step of the last day + t1 = '%04d-%02d-%02d 23:59:59' % \ + (self.year, self.month, self.day) + + i1 = coordinate_util.time_step_of(self.t, True, True, + self.calendar, + self.units, t1) + + # move to next day + n_days = self.last_day_of_month() + self.day += 1 + + # move to next month + if self.day > n_days: + self.month += 1 + self.day = 1 + + # move to next year + if self.month == 13: + self.month = 1 + self.year += 1 + + return teca_temporal_reduction_internals.c_struct( + time=self.t[i0], year=year, month=month, day=day, + start_index=i0, end_index=i1) + + @staticmethod + def New(interval, t, units, calendar): + if interval == 'seasonal': + + return teca_temporal_reduction_internals. \ + interval_iterator.season_iterator(t, units, calendar) + + if interval == 'monthly': + + return teca_temporal_reduction_internals. \ + interval_iterator.month_iterator(t, units, calendar) + + elif interval == 'daily': + + return teca_temporal_reduction_internals. \ + interval_iterator.day_iterator(t, units, calendar) + + else: + + raise RuntimeError('Invlid interval %s' % (interval)) + + class reduction_operator: + class average: + def __init__(self): + self.count = None + self.fill_value = None + + def initialize(self, fill_value): + self.fill_value = fill_value + + def update(self, out_array, in_array): + # don't use integer types for this calculation + if in_array.dtype.kind == 'i': + in_array = in_array.astype(np.float32) \ + if in_array.itemsize < 8 else \ + in_array.astype(float64) + + if out_array.dtype.kind == 'i': + out_array = out_array.astype(np.float32) \ + if out_array.itemsize < 8 else \ + out_array.astype(float64) + + # identify the invalid values + if self.fill_value is not None: + out_is_bad = np.isclose(out_array, self.fill_value) + in_is_bad = np.isclose(in_array, self.fill_value) + + # initialize the count the first time through. this needs to + # happen now since before this we don't know where invalid + # values are. + if self.count is None: + if self.fill_value is None: + self.count = 1.0 + else: + self.count = np.where(out_is_bad, np.float32(0.0), + np.float32(1.0)) + + if self.fill_value is not None: + # update the count only where there is valid data + self.count += np.where(in_is_bad, np.float32(0.0), + np.float32(1.0)) + + # accumulate + tmp = np.where(out_is_bad, np.float32(0.0), out_array) \ + + np.where(in_is_bad, np.float32(0.0), in_array) + + else: + # update count + self.count += np.float32(1.0) + + # accumulate + tmp = out_array + in_array + + return tmp + + def finalize(self, out_array): + if self.fill_value is not None: + # finish the average. We keep track of the invalid + # values (these will have a zero count) set them to + # the fill value + n = self.count + ii = np.isclose(n, np.float32(0.0)) + n[ii] = np.float32(1.0) + tmp = out_array / n + tmp[ii] = self.fill_value + else: + tmp = out_array / self.count + self.count = None + return tmp + + class minimum: + def __init__(self): + self.fill_value = None + + def initialize(self, fill_value): + self.fill_value = fill_value + + def update(self, out_array, in_array): + tmp = np.minimum(out_array, in_array) + # fix invalid values + if self.fill_value is not None: + out_is_bad = np.isclose(out_array, self.fill_value) + out_is_good = np.logical_not(out_is_bad) + in_is_bad = np.isclose(in_array, self.fill_value) + in_is_good = np.logical_not(in_is_bad) + tmp = np.where(np.logical_and(out_is_bad, in_is_good), in_array, tmp) + tmp = np.where(np.logical_and(in_is_bad, out_is_good), out_array, tmp) + tmp = np.where(np.logical_and(in_is_bad, out_is_bad), self.fill_value, tmp) + return tmp + + def finalize(self, out_array): + return out_array + + class maximum: + def __init__(self): + self.fill_value = None + + def initialize(self, fill_value): + self.fill_value = fill_value + + def update(self, out_array, in_array): + tmp = np.maximum(out_array, in_array) + # fix invalid values + if self.fill_value is not None: + out_is_bad = np.isclose(out_array, self.fill_value) + out_is_good = np.logical_not(out_is_bad) + in_is_bad = np.isclose(in_array, self.fill_value) + in_is_good = np.logical_not(in_is_bad) + tmp = np.where(np.logical_and(out_is_bad, in_is_good), in_array, tmp) + tmp = np.where(np.logical_and(in_is_bad, out_is_good), out_array, tmp) + tmp = np.where(np.logical_and(in_is_bad, out_is_bad), self.fill_value, tmp) + return tmp + + def finalize(self, out_array): + return out_array + + @staticmethod + def New(op_name): + if op_name == 'average': + return teca_temporal_reduction_internals. \ + reduction_operator.average() + + elif op_name == 'minimum': + return teca_temporal_reduction_internals. \ + reduction_operator.minimum() + + elif op_name == 'maximum': + return teca_temporal_reduction_internals. \ + reduction_operator.maximum() + + raise RuntimeError('Invalid operator %s' % (op_name)) + + +class teca_temporal_reduction(teca_threaded_python_algorithm): + """ + Reduce a mesh across the time dimensions by a defined increment using + a defined operation. + + time increments: daily, monthly, seasonal + reduction operators: average, min, max + + The output time axis will be defined using the selected increment. + The output data will be accumulated/reduced using the selected + operation. + + The set_use_fill_value method controls how invalid or missing values are + teated. When set to 1, NetCDF CF fill values are detected and handled. + This is the default. If it is known that the dataset has no invalid or + missing values one may set this to 0 for faster processing. By default the + fill value will be obtained from metadata stored in the NetCDF CF file + (_FillValue). One may override this by explicitly calling set_fill_value + method with the desired fill value. + + For minimum and maximum operations, at given grid point only valid values + over the interval are used in the calculation. if there are no valid + values over the interval at the grid point it is set to the fill_value. + + For the averaging operation, during summation missing values are treated + as 0.0 and a per-grid point count of valid values over the interval is + maintained and used in the average. Grid points with no valid values over + the inteval are set to the fill value. + """ + def __init__(self): + self.indices = [] + self.point_arrays = [] + self.interval_name = None + self.operator_name = None + self.use_fill_value = 1 + self.fill_value = None + self.operator = {} + + def set_fill_value(self, fill_value): + """ + set the output fill_value + """ + self.fill_value = fill_value + + def set_use_fill_value(self, use): + """ + set the output fill_value + """ + self.use_fill_value = use + + def set_interval(self, interval): + """ + set the output interval + """ + self.interval_name = interval + + def set_interval_to_seasonal(self): + """ + set the output interval to seasonal. + """ + self.interval_name = 'seasonal' + + def set_interval_to_monthly(self): + """ + set the output interval to monthly. + """ + self.interval_name = 'monthly' + + def set_interval_to_daily(self): + """ + set the output interval to daily. + """ + self.interval_name = 'daily' + + def set_operator(self, operator): + """ + set the reduction operator + """ + self.operator_name = operator + + def set_operator_to_maximum(self): + """ + set the reduction operator to maximum. + """ + self.operator_name = 'maximum' + + def set_operator_to_minimum(self): + """ + set the reduction operator to minimum. + """ + self.operator_name = 'minimum' + + def set_operator_to_average(self): + """ + set the reduction operator to average. + """ + self.operator_name = 'average' + + def set_point_arrays(self, arrays): + """ + Set the list of arrays to reduce + """ + if isinstance(arrays, list): + arrays = list(arrays) + self.point_arrays = arrays + + def report(self, port, md_in): + """ + implements the report phase of pipeline execution + """ + if self.get_verbose() > 0: + try: + rank = self.get_communicator().Get_rank() + except Exception: + rank = 0 + sys.stderr.write('[%d] teca_temporal_reduction::report\n' % (rank)) + + # sanity checks + if self.interval_name is None: + raise RuntimeError('No interval specified') + + if self.operator_name is None: + raise RuntimeError('No operator specified') + + if self.point_arrays is None: + raise RuntimeError('No arrays specified') + + md_out = md_in[0] + + # get the input time axis and metadata + atts = md_out['attributes'] + coords = md_out['coordinates'] + + t = coords['t'] + t_var = coords['t_variable'] + t_atts = atts[t_var] + + try: + cal = t_atts['calendar'] + except KeyError: + cal = 'standard' + sys.stderr.write('Attributes for the time axis %s is missing ' + 'calendar. The "standard" calendar will be ' + 'used'%(t_var)) + + t_units = t_atts['units'] + + # convert the time axis to the specified interval + self.indices = [ii for ii in teca_temporal_reduction_internals. + interval_iterator.New( + self.interval_name, t, t_units, cal)] + + if self.get_verbose() > 1: + sys.stderr.write('indices = [\n') + for ii in self.indices: + sys.stderr.write('\t%s\n' % (str(ii))) + sys.stderr.write(']\n') + + # update the pipeline control keys + initializer_key = md_out['index_initializer_key'] + md_out[initializer_key] = len(self.indices) + + # update the metadata so that modified time axis and reduced variables + # are presented + out_atts = teca_metadata() + out_vars = [] + + for array in self.point_arrays: + # name of the output array + out_vars.append(array) + + # pass the attributes + in_atts = atts[array] + + # convert integer to floating point for averaging operations + if self.operator_name == 'average': + tc = in_atts['type_code'] + if tc == teca_int_array_code.get() \ + or tc == teca_char_array_code.get() \ + or tc == teca_unsigned_int_array_code.get() \ + or tc == teca_unsigned_char_array_code.get(): + tc = teca_float_array_code.get() + elif tc == teca_long_long_array_code.get() \ + or tc == teca_unsigned_long_long_array_code.get(): + tc = teca_double_array_code.get() + in_atts['type_code'] = tc + + # document the transformation + in_atts['description'] = '%s %s of %s' % (self.interval_name, + self.operator_name, + array) + + out_atts[array] = in_atts + + # update time axis + q = 0 + t_out = np.empty(len(self.indices), dtype=np.float64) + for ii in self.indices: + t_out[q] = ii.time + q += 1 + coords['t'] = t_out + md_out['coordinates'] = coords + + out_atts[t_var] = t_atts + + # package it all up and return + md_out['variables'] = out_vars + md_out["attributes"] = out_atts + + return md_out + + def request(self, port, md_in, req_in): + """ + implements the request phase of pipeline execution + """ + if self.get_verbose() > 0: + try: + rnk = self.get_communicator().Get_rank() + except Exception: + rnk = 0 + sys.stderr.write('[%d] teca_temporal_reduction::request\n' % (rnk)) + + md = md_in[0] + + # initialize a new reduction operator, for the subsequent + # execute + atrs = md['attributes'] + for array in self.point_arrays: + # get the fill value + fill_value = self.fill_value + if self.use_fill_value and fill_value is None: + array_atrs = atrs[array] + if array_atrs.has('_FillValue'): + fill_value = array_atrs['_FillValue'] + elif array_atrs.has('missing_value'): + fill_value = array_atrs['missing_value'] + else: + raise RuntimeError('Array %s has no fill value. With use_' + 'fill_value arrays must have _FillValue' + ' or missing_value attribute or you ' + 'must set a fill_value explicitly.'%( + array)) + + # create and initialize the operator + op = teca_temporal_reduction_internals. \ + reduction_operator.New(self.operator_name) + + op.initialize(fill_value) + + # save the operator + self.operator[array] = op + + # generate one request for each time step in the interval + up_reqs = [] + + request_key = md['index_request_key'] + req_id = req_in[request_key] + ii = self.indices[req_id] + i = ii.start_index + while i <= ii.end_index: + req = teca_metadata(req_in) + req[request_key] = i + up_reqs.append(req) + i += 1 + + return up_reqs + + def execute(self, port, data_in, req_in, streaming): + """ + implements the execute phase of pipeline execution + """ + + # get the requested index + request_key = req_in['index_request_key'] + req_id = req_in[request_key] + ii = self.indices[req_id] + + if self.get_verbose() > 0: + try: + rank = self.get_communicator().Get_rank() + except Exception: + rank = 0 + sys.stderr.write('[%d] teca_temporal_reduction::execute ' + 'request %d (%d - %d), reducing %d, %d ' + 'remain\n' % (rank, req_id, ii.start_index, + ii.end_index, len(data_in), + streaming)) + + # copy the first mesh + mesh_in = as_teca_cartesian_mesh(data_in.pop()) + mesh_out = teca_cartesian_mesh.New() + mesh_out.copy(mesh_in) + arrays_out = mesh_out.get_point_arrays() + + # accumulate incoming values + while len(data_in): + mesh_in = as_teca_cartesian_mesh(data_in.pop()) + arrays_in = mesh_in.get_point_arrays() + for array in self.point_arrays: + arrays_out[array] = \ + self.operator[array].update(arrays_out[array], + arrays_in[array]) + + # when all the data is processed + if not streaming: + # finalize reduction + for array in self.point_arrays: + arrays_out[array] = \ + self.operator[array].finalize(arrays_out[array]) + + # fix time + mesh_out.set_time_step(req_id) + mesh_out.set_time(ii.time) + + return mesh_out diff --git a/alg/teca_valid_value_mask.cxx b/alg/teca_valid_value_mask.cxx new file mode 100644 index 000000000..c9240efec --- /dev/null +++ b/alg/teca_valid_value_mask.cxx @@ -0,0 +1,408 @@ +#include "teca_valid_value_mask.h" + +#include "teca_mesh.h" +#include "teca_array_collection.h" +#include "teca_variant_array.h" +#include "teca_metadata.h" +#include "teca_array_attributes.h" +#include "teca_coordinate_util.h" +#include "teca_mpi.h" + +#include +#include +#include +#include +#include +#include + +#if defined(TECA_HAS_BOOST) +#include +#endif + +namespace +{ +bool is_mask_array(const std::string &array) +{ + size_t n = array.size(); + size_t pos = n - 6; + + if ((n < 6) || (strncmp(array.c_str() + pos, "_valid", 6) != 0)) + return false; + + return true; +} +} + +//#define TECA_DEBUG + +// -------------------------------------------------------------------------- +teca_valid_value_mask::teca_valid_value_mask() : + mask_arrays(), enable_valid_range(0), verbose(0) +{ + this->set_number_of_input_connections(1); + this->set_number_of_output_ports(1); +} + +// -------------------------------------------------------------------------- +teca_valid_value_mask::~teca_valid_value_mask() +{} + +#if defined(TECA_HAS_BOOST) +// -------------------------------------------------------------------------- +void teca_valid_value_mask::get_properties_description( + const std::string &prefix, options_description &global_opts) +{ + options_description opts("Options for " + + (prefix.empty()?"teca_valid_value_mask":prefix)); + + opts.add_options() + TECA_POPTS_MULTI_GET(std::vector, + prefix, mask_arrays, + "A list of arrays to compute a mask for.") + TECA_POPTS_GET(int, prefix, enable_valid_range, + "If set non-zero vald_range, valid_min, and valid_max attributes" + " would be used if there is no _FillValue attribute.") + TECA_POPTS_GET(int, prefix, verbose, + "If set then status messages are sent to the terminal.") + ; + + global_opts.add(opts); +} + +// -------------------------------------------------------------------------- +void teca_valid_value_mask::set_properties( + const std::string &prefix, variables_map &opts) +{ + TECA_POPTS_SET(opts, std::vector, prefix, mask_arrays) + TECA_POPTS_SET(opts, int, prefix, enable_valid_range) + TECA_POPTS_SET(opts, int, prefix, verbose) +} +#endif + + +// -------------------------------------------------------------------------- +teca_metadata teca_valid_value_mask::get_output_metadata( + unsigned int port, + const std::vector &input_md) +{ +#ifdef TECA_DEBUG + std::cerr << teca_parallel_id() + << "teca_valid_value_mask::get_output_metadata" << std::endl; +#endif + (void)port; + + // get the list of available variables and their attriibutes + teca_metadata out_md(input_md[0]); + + std::vector variables(this->mask_arrays); + if (variables.empty() && out_md.get("variables", variables)) + { + TECA_ERROR("Failed to get the list of variables") + return teca_metadata(); + } + + teca_metadata attributes; + if (out_md.get("attributes", attributes)) + { + TECA_ERROR("Failed to get the array attributes") + return teca_metadata(); + } + + // for each mask array we might generate, report that it is available and + // supply attributes to enable the CF writer. + size_t n_arrays = variables.size(); + for (size_t i = 0; i < n_arrays; ++i) + { + const std::string &array_name = variables[i]; + + teca_metadata array_atts; + if (attributes.get(array_name, array_atts)) + { + // this could be reported as an error or a warning but unless this + // becomes problematic quietly ignore it + continue; + } + + // get the centering and size from the array + unsigned int centering = 0; + array_atts.get("centering", centering); + + unsigned long size = 0; + array_atts.get("size", size); + + // construct attributes + teca_array_attributes mask_atts( + teca_variant_array_code::get(), + centering, size, "none", "", "valid value mask"); + + std::string mask_name = array_name + "_valid"; + + // update attributes + attributes.set(mask_name, (teca_metadata)mask_atts); + + // add to the list of available variables + out_md.append("variables", mask_name); + } + + // update the output metadata + out_md.set("attributes", attributes); + + return out_md; +} + +// -------------------------------------------------------------------------- +std::vector teca_valid_value_mask::get_upstream_request( + unsigned int port, + const std::vector &input_md, + const teca_metadata &request) +{ +#ifdef TECA_DEBUG + std::cerr << teca_parallel_id() + << "teca_valid_value_mask::get_output_metadata" << std::endl; +#endif + (void)port; + (void)input_md; + + std::vector up_reqs; + + // copy the incoming request to preserve the downstream + // requirements and add the arrays we need + teca_metadata req(request); + + // get the requested arrays. pass up only those that we don't generate. + std::vector arrays; + req.get("arrays", arrays); + + std::set arrays_up; + + int n_arrays = arrays.size(); + for (int i = 0; i < n_arrays; ++i) + { + const std::string &array = arrays[i]; + if (::is_mask_array(array)) + { + // remove _valid and request the base array + arrays_up.insert(array.substr(0, array.size()-6)); + } + else + { + // not ours, pass through + arrays_up.insert(array); + } + } + + // request explcitly named arrays + if (!this->mask_arrays.empty()) + { + arrays_up.insert(this->mask_arrays.begin(), + this->mask_arrays.end()); + } + + // update the request + req.set("arrays", arrays_up); + + up_reqs.push_back(req); + + return up_reqs; +} + +// -------------------------------------------------------------------------- +const_p_teca_dataset teca_valid_value_mask::execute( + unsigned int port, + const std::vector &input_data, + const teca_metadata &request) +{ +#ifdef TECA_DEBUG + std::cerr << teca_parallel_id() << "teca_valid_value_mask::execute" << std::endl; +#endif + (void)port; + + int rank = 0; +#if defined(TECA_HAS_MPI) + MPI_Comm comm = this->get_communicator(); + int is_init = 0; + MPI_Initialized(&is_init); + if (is_init) + MPI_Comm_rank(comm, &rank); +#endif + + // get the input mesh + const_p_teca_mesh in_mesh = + std::dynamic_pointer_cast(input_data[0]); + + if (!in_mesh) + { + TECA_ERROR("Empty input dataset or not a teca_mesh") + return nullptr; + } + + // allocate the output + p_teca_mesh out_mesh = std::static_pointer_cast + (std::const_pointer_cast(in_mesh)->new_shallow_copy()); + + // get the arrays to process + std::vector tmp = this->mask_arrays; + if (tmp.empty()) + { + request.get("arrays", tmp); + } + + std::vector arrays; + int n_arrays = tmp.size(); + for (int i = 0; i < n_arrays; ++i) + { + const std::string &array = tmp[i]; + if (::is_mask_array(array)) + { + arrays.push_back(array.substr(0, array.size()-6)); + } + } + + // get the array attributes, the fill value controls will be found here + teca_metadata &md = out_mesh->get_metadata(); + + teca_metadata attributes; + if (md.get("attributes", attributes)) + { + TECA_ERROR("Failed to get the array attributes") + return nullptr; + } + + // for each array generate the mask + n_arrays = arrays.size(); + for (int i = 0; i < n_arrays; ++i) + { + const std::string &array_name = arrays[i]; + + // get the attributes + teca_metadata array_atts; + if (attributes.get(array_name, array_atts)) + { + TECA_ERROR("The mask for array \"" << array_name + << "\" not computed. The array has no attributes") + return nullptr; + } + + + // get the centering + unsigned int centering = 0; + if (array_atts.get("centering", centering)) + { + TECA_ERROR("Mask for array \"" << array_name << "\" not computed." + " Attributes are missing centering metadata") + return nullptr; + } + + p_teca_array_collection arrays = out_mesh->get_arrays(centering); + if (!arrays) + { + TECA_ERROR("Mask for array \"" << array_name << "\" not computed." + " Failed to get the array collection with centering " << centering) + return nullptr; + } + + // get the input array + p_teca_variant_array array = arrays->get(array_name); + if (!array) + { + TECA_ERROR("Mask for array \"" << array_name << "\" not computed." + " No array named \"" << array_name << "\"") + return nullptr; + } + + + TEMPLATE_DISPATCH(teca_variant_array_impl, + array.get(), + + // look for a _FillValue + bool have_fill_value = false; + + NT fill_value = std::numeric_limits::max(); + + have_fill_value = ((array_atts.get("_FillValue", fill_value) == 0) || + (array_atts.get("missing_value", fill_value) == 0)); + + // look for some combination of valid rnage attributes. + bool have_valid_range = false; + bool have_valid_min = false; + bool have_valid_max = false; + + NT valid_range[2]; + valid_range[0] = std::numeric_limits::lowest(); + valid_range[1] = std::numeric_limits::max(); + + if (this->enable_valid_range) + { + have_valid_range = !have_fill_value && + (array_atts.get("valid_range", valid_range, 2) == 0); + + have_valid_min = !have_fill_value && !have_valid_range && + (array_atts.get("valid_min", valid_range[0]) == 0); + + have_valid_max = !have_fill_value && !have_valid_range && + (array_atts.get("valid_max", valid_range[1]) == 0); + } + + // get a pointer to the values + const NT *p_array = static_cast(array.get())->get(); + size_t n_elem = array->size(); + + p_teca_char_array mask; + + if (have_fill_value) + { + // allocate and compute the mask + mask = teca_char_array::New(n_elem); + char *p_mask = mask->get(); + for (size_t i = 0; i < n_elem; ++i) + { + p_mask[i] = teca_coordinate_util::equal(p_array[i], fill_value) ? 0 : 1; + } + + if (this->verbose && (rank == 0)) + { + TECA_STATUS("Mask for array \"" + << array_name << "\" will be generated using _FillValue=" + << fill_value) + } + } + else if (have_valid_min || have_valid_max || have_valid_range) + { + // allocate and compute the mask + mask = teca_char_array::New(n_elem); + char *p_mask = mask->get(); + for (size_t i = 0; i < n_elem; ++i) + { + NT val = p_array[i]; + p_mask[i] = ((val >= valid_range[0]) && (val <= valid_range[1])) ? 1 : 0; + } + + if (this->verbose && (rank == 0)) + { + TECA_STATUS("Mask for array \"" + << array_name << "\" will be generated using valid_range=[" + << valid_range[0] << ", " << valid_range[1] << "]") + } + } + else + { + if (this->verbose && (rank == 0)) + { + TECA_STATUS("Mask array for \"" << array_name + << "\" was requested but could not be computed. Attributes may" + " be missing a _FillValue, missing_value, valid_min, valid_max;" + " or valid_range. call enable_valid_range to enable the use of" + " valid_min, valid_max and valid_range attributes.") + } + continue; + } + + // save the mask in the output + std::string mask_name = array_name + "_valid"; + arrays->set(mask_name, mask); + ) + + } + + return out_mesh; +} diff --git a/alg/teca_valid_value_mask.h b/alg/teca_valid_value_mask.h new file mode 100644 index 000000000..ebea5f21a --- /dev/null +++ b/alg/teca_valid_value_mask.h @@ -0,0 +1,100 @@ +#ifndef teca_valid_value_mask_h +#define teca_valid_value_mask_h + +#include "teca_shared_object.h" +#include "teca_algorithm.h" +#include "teca_metadata.h" + +#include +#include + +TECA_SHARED_OBJECT_FORWARD_DECL(teca_valid_value_mask) + +/// an algorithm that computes a mask identifying valid values +/** + * For each requested mask, from its associated input array, compute a mask set to + * 1 where the data is valid and 0 everywhere else. Downstream algorithms then + * may look for the mask array and process the data in such a way as to produce + * valid results in the presence of missing data. + * + * Validity is determined by comparing the array's elements to the fill value as + * specified in the array's attributes _FillValue or missing_value field. If + * neither of these attribute fields are present then no mask is computed. + * + * The masks generated are stored in the output mesh with the same centering as + * the input variable they were generated from, and named using the variable's + * name with the string "_valid" appended. For example if a mask was generated for + * a variable named "V" it will be named "V_valid". + * + * Masks are requested for specific arrays in one of two ways. One may use the + * @ref mask_arrays algorithm property to explicitly name the list of variables to + * compute masks for. Alternatively, a heuristic applied to incoming requests + * determines if masks should be generated. Specifically the string "_valid" is + * looked for at the end of each requested array. If it is found then the mask + * for the variable named by removing "_valid" is generated. For example the + * request for "V_valid" would result in the mask being generated for the variable + * "V". +*/ +class teca_valid_value_mask : public teca_algorithm +{ +public: + TECA_ALGORITHM_STATIC_NEW(teca_valid_value_mask) + TECA_ALGORITHM_DELETE_COPY_ASSIGN(teca_valid_value_mask) + TECA_ALGORITHM_CLASS_NAME(teca_valid_value_mask) + ~teca_valid_value_mask(); + + // report/initialize to/from Boost program options + // objects. + TECA_GET_ALGORITHM_PROPERTIES_DESCRIPTION() + TECA_SET_ALGORITHM_PROPERTIES() + + /** @anchor mask_arrays + * @name mask_arrays + * explicitly set a list of input arrays to process. By default + * all arrays are processed. Use this property to compute masks + * for a subset of the arrays, + */ + ///@{ + TECA_ALGORITHM_VECTOR_PROPERTY(std::string, mask_array) + ///@} + + /** @anchor enable_valid_range + * @name enable_valid_range + * enable the use of valid_range, valid_min, valid_max attributes. + * by default this is off. + */ + ///@{ + TECA_ALGORITHM_PROPERTY(int, enable_valid_range) + ///@} + + /** @anchor verbose + * @name verbose + * if set to a non-zero value, rank 0 will send status information to the + * terminal. The default setting of zero results in minimal output. + */ + ///@{ + TECA_ALGORITHM_PROPERTY(int, verbose) + ///@} + +protected: + teca_valid_value_mask(); + +private: + teca_metadata get_output_metadata(unsigned int port, + const std::vector &input_md) override; + + std::vector get_upstream_request( + unsigned int port, const std::vector &input_md, + const teca_metadata &request) override; + + const_p_teca_dataset execute(unsigned int port, + const std::vector &input_data, + const teca_metadata &request) override; + +private: + std::vector mask_arrays; + int enable_valid_range; + int verbose; +}; + +#endif diff --git a/alg/teca_variant_array_operator.h b/alg/teca_variant_array_operator.h index 1818af465..e1c0decaf 100644 --- a/alg/teca_variant_array_operator.h +++ b/alg/teca_variant_array_operator.h @@ -246,7 +246,7 @@ struct resolver switch (*op) { case '!': - r = apply(a1, logical_not()); + r = teca_variant_array_operator::apply(a1, logical_not()); return 0; break; } diff --git a/alg/teca_vertical_coordinate_transform.cxx b/alg/teca_vertical_coordinate_transform.cxx new file mode 100644 index 000000000..6e9b25d72 --- /dev/null +++ b/alg/teca_vertical_coordinate_transform.cxx @@ -0,0 +1,328 @@ +#include "teca_vertical_coordinate_transform.h" + +#include "teca_arakawa_c_grid.h" +#include "teca_curvilinear_mesh.h" +#include "teca_array_collection.h" +#include "teca_variant_array.h" +#include "teca_metadata.h" + +#include +#include +#include +#include + +#if defined(TECA_HAS_BOOST) +#include +#endif + +using std::string; +using std::vector; +using std::cerr; +using std::endl; +using std::cos; + +//#define TECA_DEBUG + +namespace { + +/* unsigned long nx = ext[1] - ext[0] + 1; + unsigned long ny = ext[3] - ext[2] + 1; + unsigned long nz = ext[5] - ext[4] + 1; + unsigned long nxy = nx*ny;*/ + +// the inputs xi, yi are 2D fields, eta is a 1D field +// the outputs xo,yo, and phd are 3D fields +template +void transform_wrf_v3(unsigned long nx, unsigned long ny, + unsigned long nz, unsigned long nxy, const num_t *xi, + const num_t *yi, const num_t *eta, const num_t *ps, num_t pt, + num_t* xo, num_t *yo, num_t *ph) +{ + unsigned long nxy_bytes = nxy*sizeof(num_t); + for (unsigned long k = 0; k < nz; ++k) + { + unsigned long knxy = k*nxy; + + // copy x + memcpy(xo + knxy, xi, nxy_bytes); + + // copy y + memcpy(yo + knxy, yi, nxy_bytes); + + // transform z + num_t eta_k = eta[k]; + for (unsigned long j = 0; j < ny; ++j) + { + unsigned long jnx = j*nx; + unsigned long knxy_jnx = knxy + jnx; + + // transform from mass vertical coordinate into hydrostatic dry pressure + // see "A descritpion of Advanced Research WRF Model Version 4" page 8 + // note: this is the WRF ARW 3 coordinate system + const num_t *ps_jnx = ps + jnx; + num_t *ph_knxy_jnx = ph + knxy_jnx; + for (unsigned long i = 0; i < nx; ++i) + ph_knxy_jnx[i] = eta_k*(ps_jnx[i] - pt) + pt; + } + } +} + +}; + + +// -------------------------------------------------------------------------- +teca_vertical_coordinate_transform::teca_vertical_coordinate_transform() : + mode(mode_wrf_v3) +{ + this->set_number_of_input_connections(1); + this->set_number_of_output_ports(1); +} + +// -------------------------------------------------------------------------- +teca_vertical_coordinate_transform::~teca_vertical_coordinate_transform() +{} + +#if defined(TECA_HAS_BOOST) +// -------------------------------------------------------------------------- +void teca_vertical_coordinate_transform::get_properties_description( + const string &prefix, options_description &global_opts) +{ + options_description opts("Options for " + + (prefix.empty()?"teca_vertical_coordinate_transform":prefix)); + + opts.add_options() + TECA_POPTS_GET(int, prefix, mode, + "transform mode (mode_wrf_v3)") + ; + + global_opts.add(opts); +} + +// -------------------------------------------------------------------------- +void teca_vertical_coordinate_transform::set_properties( + const string &prefix, variables_map &opts) +{ + TECA_POPTS_SET(opts, int, prefix, mode) +} +#endif + +// -------------------------------------------------------------------------- +teca_metadata teca_vertical_coordinate_transform::get_output_metadata( + unsigned int port, + const std::vector &input_md) +{ +#ifdef TECA_DEBUG + cerr << teca_parallel_id() + << "teca_vertical_coordinate_transform::get_output_metadata" << endl; +#endif + (void)port; + + teca_metadata out_md(input_md[0]); + + // get coordinate metadata + teca_metadata coords; + if (out_md.get("coordinates", coords)) + { + TECA_ERROR("metadata issue, missing coordinate metadata") + return teca_metadata(); + } + + // get array metadata + teca_metadata atrs; + if (out_md.get("attributes", atrs)) + { + TECA_ERROR("failed to get array attributes") + return teca_metadata(); + } + + switch (this->mode) + { + case mode_wrf_v3: + { + // update the z coordinate axes variable names so that down + // stream algorithms correctly identify them + coords.set("m_z_variable", "ZPDM"); + coords.set("w_z_varibale", "ZPDW"); + + // pass metadata for the atrrays we generate + teca_metadata ps_atts; + if (atrs.get("PSFC", ps_atts)) + { + TECA_ERROR("failed to get PSFC attributes") + return teca_metadata(); + } + atrs.set("ZPDM", ps_atts); + atrs.set("ZPDW", ps_atts); + break; + } + default: + { + TECA_ERROR("Invlaid mode " << this->mode) + return teca_metadata(); + } + } + + out_md.set("coordinates", coords); + out_md.set("attributes", atrs); + + return out_md; +} + +// -------------------------------------------------------------------------- +std::vector +teca_vertical_coordinate_transform::get_upstream_request( + unsigned int port, + const std::vector &input_md, + const teca_metadata &request) +{ + (void)port; + (void)input_md; + + vector up_reqs; + + // copy the incoming request to preserve the downstream + // requirements and add the arrays we need + teca_metadata req(request); + + std::set arrays; + if (req.has("arrays")) + req.get("arrays", arrays); + + switch (this->mode) + { + case mode_wrf_v3: + { + arrays.insert("PSFC"); + arrays.insert("P_TOP"); + break; + } + default: + TECA_ERROR("Invlaid mode " << this->mode) + return up_reqs; + } + + // update the request + req.set("arrays", arrays); + + // if/when bounds based requests are + // implemented the transform from pressure coordinates to + // sigma coordinates would be handled here. + + // send it up + up_reqs.push_back(req); + + return up_reqs; +} + +// -------------------------------------------------------------------------- +const_p_teca_dataset teca_vertical_coordinate_transform::execute( + unsigned int port, + const std::vector &input_data, + const teca_metadata &request) +{ +#ifdef TECA_DEBUG + cerr << teca_parallel_id() + << "teca_vertical_coordinate_transform::execute" << endl; +#endif + (void)port; + (void)request; + + // get the input mesh + const_p_teca_arakawa_c_grid in_mesh + = std::dynamic_pointer_cast(input_data[0]); + + if (!in_mesh) + { + TECA_ERROR("teca_arakawa_c_grid is required") + return nullptr; + } + + // create the output mesh. it is a curvilinear mesh because + // of the fully 3D realization of the coordinate arrays. + // also cell centered values are moved to node centering, i.e. + // this is the dual mesh. + p_teca_curvilinear_mesh out_mesh = teca_curvilinear_mesh::New(); + out_mesh->copy_metadata(in_mesh); + + switch (this->mode) + { + case mode_wrf_v3: + { + // get the input coordinate system + std::string x_coord_name; + std::string y_coord_name; + + in_mesh->get_m_x_coordinate_variable(x_coord_name); + in_mesh->get_m_y_coordinate_variable(y_coord_name); + + const_p_teca_variant_array xi = in_mesh->get_m_x_coordinates(); + const_p_teca_variant_array yi = in_mesh->get_m_y_coordinates(); + const_p_teca_variant_array eta = in_mesh->get_m_z_coordinates(); + + const_p_teca_variant_array pt = in_mesh->get_information_arrays()->get("P_TOP"); + if (!pt) + { + TECA_ERROR("Failed to get P_TOP") + return nullptr; + } + + const_p_teca_variant_array ps = in_mesh->get_cell_arrays()->get("PSFC"); + if (!ps) + { + TECA_ERROR("Failed to get PSFC") + return nullptr; + } + + // get the mesh dimensions + unsigned long extent[6] = {0}; + in_mesh->get_extent(extent); + + unsigned long nx = extent[1] - extent[0] + 1; + unsigned long ny = extent[3] - extent[2] + 1; + unsigned long nz = extent[5] - extent[4] + 1; + unsigned long nxy = nx*ny; + unsigned long nxyz = nxy*nz; + + // allocate the output coordinates + p_teca_variant_array xo = xi->new_instance(nxyz); + p_teca_variant_array yo = xi->new_instance(nxyz); + p_teca_variant_array ph = xi->new_instance(nxyz); + + TEMPLATE_DISPATCH(teca_variant_array_impl, + xo.get(), + + const NT *ppt = dynamic_cast(pt.get())->get(); + const NT *pps = dynamic_cast(ps.get())->get(); + const NT *pxi = dynamic_cast(xi.get())->get(); + const NT *pyi = dynamic_cast(yi.get())->get(); + const NT *peta = dynamic_cast(eta.get())->get(); + + NT *pxo = dynamic_cast(xo.get())->get(); + NT *pyo = dynamic_cast(yo.get())->get(); + NT *pph = dynamic_cast(ph.get())->get(); + + ::transform_wrf_v3(nx, ny, nz, nxy, pxi, pyi, + peta, pps, ppt[0], pxo, pyo, pph); + ) + + // pass coordinates to output + out_mesh->set_x_coordinates(x_coord_name, xo); + out_mesh->set_y_coordinates(y_coord_name, yo); + out_mesh->set_z_coordinates("ZPHM", ph); + + // pass arrays to the output + out_mesh->get_point_arrays() = + std::const_pointer_cast + (in_mesh->get_cell_arrays()); + + break; + } + default: + { + TECA_ERROR("Invlaid mode " << this->mode) + return nullptr; + } + } + + return out_mesh; +} diff --git a/alg/teca_vertical_coordinate_transform.h b/alg/teca_vertical_coordinate_transform.h new file mode 100644 index 000000000..1e3b27ba1 --- /dev/null +++ b/alg/teca_vertical_coordinate_transform.h @@ -0,0 +1,57 @@ +#ifndef teca_vertical_coordinate_transform_h +#define teca_vertical_coordinate_transform_h + +#include "teca_shared_object.h" +#include "teca_algorithm.h" +#include "teca_metadata.h" + +#include +#include + +TECA_SHARED_OBJECT_FORWARD_DECL(teca_vertical_coordinate_transform) + +/// an algorithm that transforms vertical cooridinates +/** +An algorithm that transforms vertical coordinates of a mesh. +*/ +class teca_vertical_coordinate_transform : public teca_algorithm +{ +public: + TECA_ALGORITHM_STATIC_NEW(teca_vertical_coordinate_transform) + TECA_ALGORITHM_DELETE_COPY_ASSIGN(teca_vertical_coordinate_transform) + TECA_ALGORITHM_CLASS_NAME(teca_vertical_coordinate_transform) + ~teca_vertical_coordinate_transform(); + + // report/initialize to/from Boost program options + // objects. + TECA_GET_ALGORITHM_PROPERTIES_DESCRIPTION() + TECA_SET_ALGORITHM_PROPERTIES() + + + // set the transform mode. + enum { + mode_invalid = 0, + mode_wrf_v3 = 1 + }; + TECA_ALGORITHM_PROPERTY(int, mode) + +protected: + teca_vertical_coordinate_transform(); + +private: + teca_metadata get_output_metadata(unsigned int port, + const std::vector &input_md) override; + + std::vector get_upstream_request( + unsigned int port, const std::vector &input_md, + const teca_metadata &request) override; + + const_p_teca_dataset execute(unsigned int port, + const std::vector &input_data, + const teca_metadata &request) override; + +private: + int mode; +}; + +#endif diff --git a/alg/teca_vertical_reduction.cxx b/alg/teca_vertical_reduction.cxx new file mode 100644 index 000000000..be806acdd --- /dev/null +++ b/alg/teca_vertical_reduction.cxx @@ -0,0 +1,285 @@ +#include "teca_vertical_reduction.h" + +#include "teca_cartesian_mesh.h" +#include "teca_array_collection.h" +#include "teca_variant_array.h" +#include "teca_metadata.h" +#include "teca_coordinate_util.h" + +#include +#include +#include +#include + +#if defined(TECA_HAS_BOOST) +#include +#endif + +// -------------------------------------------------------------------------- +teca_vertical_reduction::teca_vertical_reduction() +{ + this->set_number_of_input_connections(1); + this->set_number_of_output_ports(1); +} + +// -------------------------------------------------------------------------- +teca_vertical_reduction::~teca_vertical_reduction() +{} + +#if defined(TECA_HAS_BOOST) +// -------------------------------------------------------------------------- +void teca_vertical_reduction::get_properties_description( + const std::string &prefix, options_description &global_opts) +{ + options_description opts("Options for " + + (prefix.empty()?"teca_vertical_reduction":prefix)); + + opts.add_options() + TECA_POPTS_GET(std::vector, prefix, dependent_variables, + "list of arrays needed to compute the derived quantity") + TECA_POPTS_GET(std::vector, prefix, derived_variables, + "name of the derived quantity") + ; + + global_opts.add(opts); +} + +// -------------------------------------------------------------------------- +void teca_vertical_reduction::set_properties( + const std::string &prefix, variables_map &opts) +{ + TECA_POPTS_SET(opts, std::vector, prefix, dependent_variables) + TECA_POPTS_SET(opts, std::vector, prefix, derived_variables) +} +#endif + +// -------------------------------------------------------------------------- +teca_metadata teca_vertical_reduction::get_output_metadata( + unsigned int port, + const std::vector &input_md) +{ +#ifdef TECA_DEBUG + std::cerr << teca_parallel_id() + << "teca_vertical_reduction::get_output_metadata" << std::endl; +#endif + (void)port; + + if (this->derived_variables.empty()) + { + TECA_ERROR("A derived variable was not specififed") + return teca_metadata(); + } + + // add in the arrays we will generate and their attributes + teca_metadata out_md(input_md[0]); + + teca_metadata attributes; + out_md.get("attributes", attributes); + + size_t n_derived = this->derived_variables.size(); + for (size_t i = 0; i < n_derived; ++i) + { + out_md.append("variables", this->derived_variables[i]); + + attributes.set(this->derived_variables[i], + (teca_metadata)this->derived_variable_attributes[i]); + } + out_md.set("attributes", attributes); + + // get the input extents + unsigned long whole_extent[6] = {0}; + if (out_md.get("whole_extent", whole_extent, 6)) + { + TECA_ERROR("Metadata is missing whole_whole_extent") + return teca_metadata(); + } + + // set the output extent, with vertical dim reduced + whole_extent[4] = whole_extent[5] = 0; + out_md.set("whole_extent", whole_extent, 6); + + // fix bounds if it is present + double bounds[6] = {0.0}; + if (out_md.get("bounds", bounds, 6) == 0) + { + bounds[4] = bounds[5] = 0.0; + out_md.set("bounds", bounds, 6); + } + + return out_md; +} + +// -------------------------------------------------------------------------- +std::vector teca_vertical_reduction::get_upstream_request( + unsigned int port, + const std::vector &input_md, + const teca_metadata &request) +{ + (void)port; + + std::vector up_reqs; + + // copy the incoming request to preserve the downstream + // requirements and add the arrays we need + teca_metadata req(request); + + // transform extent, add back the vertical dimension + const teca_metadata md = input_md[0]; + + // get the whole extent and bounds + double bounds[6] = {0.0}; + unsigned long whole_extent[6] = {0}; + if (teca_coordinate_util::get_cartesian_mesh_extent(md, + whole_extent, bounds)) + { + TECA_ERROR("Failed to get input data set extent") + return up_reqs; + } + + /*bool has_bounds = request.has("bounds"); + bool has_extent = request.has("extent");*/ + + // restore vertical bounds + double bounds_up[6] = {0.0}; + unsigned long extent_up[6] = {0}; + if (request.get("bounds", bounds_up, 6) == 0) + { + bounds_up[4] = bounds[4]; + bounds_up[5] = bounds[5]; + req.set("bounds", bounds_up, 6); + } + + // restore vertical extent + else if (request.get("extent", extent_up, 6) == 0) + { + extent_up[4] = whole_extent[4]; + extent_up[5] = whole_extent[5]; + req.set("extent", extent_up, 6); + } + // no subset requested, request all the data + else + { + req.set("extent", whole_extent); + } + + // get the list of variable available. we need to see if + // the valid value mask is available and if so request it + std::set variables; + if (md.get("variables", variables)) + { + TECA_ERROR("Metadata issue. variables is missing") + return up_reqs; + } + + // add the dependent variables into the requested arrays + std::set arrays; + if (req.has("arrays")) + req.get("arrays", arrays); + + int n_dep_vars = this->dependent_variables.size(); + for (int i = 0; i < n_dep_vars; ++i) + { + const std::string &dep_var = this->dependent_variables[i]; + + // request the array needed for the calculation + arrays.insert(dep_var); + + // request the valid value mask if they are available. + std::string mask_var = dep_var + "_valid"; + if (variables.count(mask_var)) + arrays.insert(mask_var); + } + + // capture the arrays we produce + size_t n_derived = this->derived_variables.size(); + for (size_t i = 0; i < n_derived; ++i) + arrays.erase(this->derived_variables[i]); + + // update the request + req.set("arrays", arrays); + + // send it up + up_reqs.push_back(req); + return up_reqs; +} + +// -------------------------------------------------------------------------- +const_p_teca_dataset teca_vertical_reduction::execute( + unsigned int port, + const std::vector &input_data, + const teca_metadata &request) +{ +#ifdef TECA_DEBUG + std::cerr << teca_parallel_id() + << "teca_vertical_reduction::execute" << std::endl; +#endif + (void)port; + (void)request; + + // get the input mesh + const_p_teca_mesh in_mesh = + std::dynamic_pointer_cast(input_data[0]); + + if (!in_mesh) + { + TECA_ERROR("teca_mesh is required") + return nullptr; + } + + // construct the output + p_teca_mesh out_mesh = + std::dynamic_pointer_cast(in_mesh->new_instance()); + + // copy metadata + out_mesh->copy_metadata(in_mesh); + + // copy the coordinates + // out_mesh->copy_coordinates(in_mesh); + + // fix the metadata + teca_metadata out_md = out_mesh->get_metadata(); + + // fix whole extent + unsigned long whole_extent[6] = {0}; + if (out_md.get("whole_extent", whole_extent, 6) == 0) + { + whole_extent[4] = whole_extent[5] = 0; + out_md.set("whole_extent", whole_extent, 6); + } + + // fix extent + unsigned long extent[6] = {0}; + if (out_md.get("extent", extent, 6) == 0) + { + extent[4] = extent[5] = 0; + out_md.set("extent", extent, 6); + } + + // fix bounds + double bounds[6] = {0}; + if (out_md.get("bounds", bounds, 6) == 0) + { + bounds[4] = bounds[5] = 0.0; + out_md.set("bounds", bounds, 6); + } + + out_mesh->set_metadata(out_md); + + + // fix the z axis + p_teca_cartesian_mesh cart_mesh = + std::dynamic_pointer_cast(out_mesh); + + if (cart_mesh) + { + std::string z_var; + cart_mesh->get_z_coordinate_variable(z_var); + + const_p_teca_variant_array in_z = cart_mesh->get_z_coordinates(); + + p_teca_variant_array out_z = in_z->new_instance(1); + cart_mesh->set_z_coordinates(z_var, out_z); + } + + return out_mesh; +} diff --git a/alg/teca_vertical_reduction.h b/alg/teca_vertical_reduction.h new file mode 100644 index 000000000..a47362408 --- /dev/null +++ b/alg/teca_vertical_reduction.h @@ -0,0 +1,67 @@ +#ifndef teca_vertical_reduction_h +#define teca_vertical_reduction_h + +#include "teca_shared_object.h" +#include "teca_algorithm.h" +#include "teca_metadata.h" +#include "teca_array_attributes.h" + +#include +#include + +TECA_SHARED_OBJECT_FORWARD_DECL(teca_vertical_reduction) + +/// base class for vertical reducitons +/** +implements common operations associated with computing a vertical +reduction where a 3D dataset is transformed into a 2D dataset +by a reduction along the 3rd spatial dimension. +*/ +class teca_vertical_reduction : public teca_algorithm +{ +public: + TECA_ALGORITHM_STATIC_NEW(teca_vertical_reduction) + TECA_ALGORITHM_DELETE_COPY_ASSIGN(teca_vertical_reduction) + TECA_ALGORITHM_CLASS_NAME(teca_vertical_reduction) + ~teca_vertical_reduction(); + + // report/initialize to/from Boost program options + // objects. + TECA_GET_ALGORITHM_PROPERTIES_DESCRIPTION() + TECA_SET_ALGORITHM_PROPERTIES() + + // set/get the list of variables that are needed to produce + // the derived quantity + TECA_ALGORITHM_VECTOR_PROPERTY(std::string, dependent_variable) + + // set/get the name of the variable that is produced + TECA_ALGORITHM_VECTOR_PROPERTY(std::string, derived_variable) + + // set/get the attributes of the variable that is produced + TECA_ALGORITHM_VECTOR_PROPERTY(teca_array_attributes, + derived_variable_attribute) + +protected: + teca_vertical_reduction(); + + teca_metadata get_output_metadata( + unsigned int port, + const std::vector &input_md) override; + + std::vector get_upstream_request( + unsigned int port, + const std::vector &input_md, + const teca_metadata &request) override; + + const_p_teca_dataset execute( + unsigned int port, + const std::vector &input_data, + const teca_metadata &request) override; + +private: + std::vector dependent_variables; + std::vector derived_variables; + std::vector derived_variable_attributes; +}; + +#endif diff --git a/apps/CMakeLists.txt b/apps/CMakeLists.txt index cb361726a..2d8a1be85 100644 --- a/apps/CMakeLists.txt +++ b/apps/CMakeLists.txt @@ -2,6 +2,7 @@ project(teca_apps) include_directories( ${CMAKE_CURRENT_SOURCE_DIR} + $ $ $ $ @@ -15,27 +16,42 @@ if (TECA_HAS_BOOST) list(APPEND teca_app_link ${Boost_LIBRARIES}) endif() +teca_add_app(teca_bayesian_ar_detect LIBS ${teca_app_link} + FEATURES ${TECA_HAS_BOOST} ${TECA_HAS_NETCDF} ${TECA_HAS_UDUNITS}) + +teca_add_app(teca_integrated_vapor_transport LIBS ${teca_app_link} + FEATURES ${TECA_HAS_BOOST} ${TECA_HAS_NETCDF} ${TECA_HAS_UDUNITS}) + teca_add_app(teca_tc_detect LIBS ${teca_app_link} - FEATURES (TECA_HAS_BOOST AND TECA_HAS_NETCDF AND TECA_HAS_UDUNITS)) + FEATURES ${TECA_HAS_BOOST} ${TECA_HAS_NETCDF} ${TECA_HAS_UDUNITS}) teca_add_app(teca_tc_wind_radii LIBS ${teca_app_link} - FEATURES (TECA_HAS_BOOST AND TECA_HAS_NETCDF AND TECA_HAS_UDUNITS)) + FEATURES ${TECA_HAS_BOOST} ${TECA_HAS_NETCDF} ${TECA_HAS_UDUNITS}) teca_add_app(teca_tc_trajectory LIBS ${teca_app_link} - FEATURES (TECA_HAS_BOOST AND TECA_HAS_UDUNITS)) + FEATURES ${TECA_HAS_BOOST} ${TECA_HAS_UDUNITS}) teca_add_app(teca_metadata_probe LIBS ${teca_app_link} - FEATURES (TECA_HAS_BOOST AND TECA_HAS_NETCDF AND TECA_HAS_UDUNITS)) - -teca_add_app(teca_bayesian_ar_detect LIBS ${teca_app_link} - FEATURES (TECA_HAS_BOOST AND TECA_HAS_NETCDF AND TECA_HAS_UDUNITS)) - -teca_py_install_apps( - teca_convert_table.in - teca_dataset_metadata.in - teca_event_filter.in - teca_profile_explorer.in - teca_tc_stats.in - teca_tc_wind_radii_stats.in - teca_tc_trajectory_scalars.in - ) + FEATURES ${TECA_HAS_BOOST} ${TECA_HAS_NETCDF} ${TECA_HAS_UDUNITS}) + +teca_add_app(teca_cf_restripe LIBS ${teca_app_link} + FEATURES ${TECA_HAS_BOOST} ${TECA_HAS_NETCDF} ${TECA_HAS_UDUNITS}) + +teca_add_app(teca_table_diff LIBS ${teca_app_link} + FEATURES ${TECA_HAS_NETCDF}) + +teca_add_app(teca_cartesian_mesh_diff LIBS ${teca_app_link} + FEATURES ${TECA_HAS_NETCDF}) + +teca_add_python_app(teca_convert_table) +teca_add_python_app(teca_dataset_metadata FEATURES ${TECA_HAS_NETCDF}) + +teca_add_python_app(teca_deeplab_ar_detect + FEATURES ${TECA_HAS_NETCDF} ${TECA_HAS_PYTORCH}) + +teca_add_python_app(teca_temporal_reduction FEATURES ${TECA_HAS_NETCDF}) +teca_add_python_app(teca_event_filter) +teca_add_python_app(teca_profile_explorer) +teca_add_python_app(teca_tc_stats) +teca_add_python_app(teca_tc_wind_radii_stats) +teca_add_python_app(teca_tc_trajectory_scalars) diff --git a/apps/teca_app_util.cxx b/apps/teca_app_util.cxx new file mode 100644 index 000000000..04374839f --- /dev/null +++ b/apps/teca_app_util.cxx @@ -0,0 +1,77 @@ +#include "teca_app_util.h" + +#include "teca_config.h" +#include "teca_common.h" +#include "teca_file_util.h" +#include "teca_system_interface.h" + +#include +#include + + +namespace teca_app_util +{ + +// -------------------------------------------------------------------------- +int process_command_line_help(int rank, const std::string &flag, + boost::program_options::options_description &opt_defs, + boost::program_options::variables_map &opt_vals) +{ + if (opt_vals.count(flag)) + { + if (rank == 0) + { + std::string app_name = + teca_file_util::filename(teca_system_interface::get_program_name()); + + std::cerr << std::endl + << "TECA version " << TECA_VERSION_DESCR + << " compiled on " << __DATE__ << " " << __TIME__ << std::endl + << std::endl + << "Application usage: " << app_name << " [options]" << std::endl + << std::endl + << opt_defs << std::endl + << std::endl; + } + return 1; + } + return 0; +} + +// -------------------------------------------------------------------------- +int process_command_line_help(int rank, int argc, char **argv, + boost::program_options::options_description &basic_opt_defs, + boost::program_options::options_description &advanced_opt_defs, + boost::program_options::options_description &all_opt_defs, + boost::program_options::variables_map &opt_vals) +{ + try + { + boost::program_options::store( + boost::program_options::command_line_parser(argc, argv) + .style(boost::program_options::command_line_style::unix_style ^ + boost::program_options::command_line_style::allow_short) + .options(all_opt_defs) + .run(), + opt_vals); + + if (process_command_line_help(rank, "help", basic_opt_defs, opt_vals) || + process_command_line_help(rank, "advanced_help", advanced_opt_defs, opt_vals) || + process_command_line_help(rank, "full_help", all_opt_defs, opt_vals)) + { + return 1; + } + + boost::program_options::notify(opt_vals); + } + catch (std::exception &e) + { + TECA_ERROR("Error parsing command line options. See --help " + "for a list of supported options. " << e.what()) + return -1; + } + + return 0; +} + +} diff --git a/apps/teca_app_util.h b/apps/teca_app_util.h new file mode 100644 index 000000000..9269c9896 --- /dev/null +++ b/apps/teca_app_util.h @@ -0,0 +1,31 @@ +#ifndef teca_app_util_h +#define teca_app_util_h + +#include "teca_config.h" + +#include +#include + +namespace teca_app_util +{ + +// check for flag and if found print the help message +// and the option definitions. return non-zero if the flag +// was found. +int process_command_line_help(int rank, const std::string &flag, + boost::program_options::options_description &opt_defs, + boost::program_options::variables_map &opt_vals); + +// parses the command line options and checks for --help, --advanced_help, and +// --full_help flags. if any are found prints the associated option +// defintions. if any of the help flags were found 1 is returned. If there is +// an error -1 is returned. Otherwise 0 is returned. +int process_command_line_help(int rank, int argc, char **argv, + boost::program_options::options_description &basic_opt_defs, + boost::program_options::options_description &advanced_opt_defs, + boost::program_options::options_description &all_opt_defs, + boost::program_options::variables_map &opt_vals); + +} + +#endif diff --git a/apps/teca_bayesian_ar_detect.cpp b/apps/teca_bayesian_ar_detect.cpp index 9fa619c68..dedfb99bc 100644 --- a/apps/teca_bayesian_ar_detect.cpp +++ b/apps/teca_bayesian_ar_detect.cpp @@ -7,10 +7,15 @@ #include "teca_bayesian_ar_detect.h" #include "teca_bayesian_ar_detect_parameters.h" #include "teca_binary_segmentation.h" +#include "teca_l2_norm.h" +#include "teca_multi_cf_reader.h" +#include "teca_integrated_vapor_transport.h" +#include "teca_valid_value_mask.h" #include "teca_mpi_manager.h" #include "teca_coordinate_util.h" #include "teca_table.h" #include "teca_dataset_source.h" +#include "teca_app_util.h" #include "calcalcs.h" #include @@ -31,33 +36,88 @@ int main(int argc, char **argv) // initialize command line options description // set up some common options to simplify use for most // common scenarios + int help_width = 100; options_description basic_opt_defs( "Basic usage:\n\n" "The following options are the most commonly used. Information\n" - "on advanced options can be displayed using --advanced_help\n\n" - "Basic command line options", 120, -1 + "on all available options can be displayed using --advanced_help\n\n" + "Basic command line options", help_width, help_width - 4 ); basic_opt_defs.add_options() - ("input_file", value(), "file path to the simulation to search for atmospheric rivers") - ("output_file", value()->default_value(std::string("bayesian_ar_detect_%t%.nc")), - "file pattern for output netcdf files (%t% is the time index)") - ("input_regex", value(), "regex matching simulation files to search for atmospheric rivers") - ("ivt", value()->default_value(std::string("IVT")), - "name of variable with integrated vapor transport (IVT)") - ("binary_ar_threshold", value()->default_value(0.6666666667), - "probability threshold for segmenting ar_probability to produce ar_binary_tag") - ("first_step", value(), "first time step to process") - ("last_step", value(), "last time step to process") - ("steps_per_file", value(), "number of time steps per output filr") - ("start_date", value(), "first time to proces in YYYY-MM-DD hh:mm:ss format") - ("end_date", value(), "first time to proces in YYYY-MM-DD hh:mm:ss format") - ("n_threads", value(), "thread pool size. default is -1. -1 for all") + ("input_file", value(), "\na teca_multi_cf_reader configuration file" + " identifying the set of NetCDF CF2 files to process. When present data is" + " read using the teca_multi_cf_reader. Use one of either --input_file or" + " --input_regex.\n") + + ("input_regex", value(), "\na teca_cf_reader regex identifying the" + " set of NetCDF CF2 files to process. When present data is read using the" + " teca_cf_reader. Use one of either --input_file or --input_regex.\n") + + ("ivt", value()->default_value(std::string("IVT")), + "\nname of variable with the magnitude of integrated vapor transport\n") + + ("compute_ivt_magnitude", "\nwhen this flag is present magnitude of vector" + " IVT is calculated. use --ivt_u and --ivt_v to set the name of the IVT" + " vector components if needed.\n") + ("ivt_u", value()->default_value(std::string("IVT_U")), + "\nname of variable with longitudinal component of the integrated vapor" + " transport vector.\n") + ("ivt_v", value()->default_value(std::string("IVT_V")), + "\nname of variable with latitudinal component of the integrated vapor" + " transport vector.\n") + ("write_ivt_magnitude", "\nwhen this flag is present IVT magnitude is" + " written to disk with the AR detector results\n") + + ("compute_ivt", "\nwhen this flag is present IVT vector is calculated from" + " specific humidity, and wind vector components. use --specific_humidity" + " --wind_u and --wind_v to set the name of the specific humidity and wind" + " vector components, and --ivt_u and --ivt_v to control the names of" + " the results, if needed.\n") + ("specific_humidity", value()->default_value(std::string("Q")), + "\nname of variable with the 3D specific humidity field.(Q)\n") + ("wind_u", value()->default_value(std::string("U")), + "\nname of variable with the 3D longitudinal component of the wind vector.\n") + ("wind_v", value()->default_value(std::string("V")), + "\nname of variable with the 3D latitudinal component of the wind vector.\n") + ("write_ivt", "\nwhen this flag is present IVT vector is written to disk with" + " the result\n") + + ("x_axis_variable", value()->default_value("lon"), + "\nname of x coordinate variable\n") + ("y_axis_variable", value()->default_value("lat"), + "\nname of y coordinate variable\n") + ("z_axis_variable", value()->default_value("plev"), + "\nname of z coordinate variable\n") + ("periodic_in_x", value()->default_value(1), - "Flags whether the x dimension (typically longitude) is periodic.") - ("verbose", "enable extra terminal output") - ("help", "display the basic options help") - ("advanced_help", "display the advanced options help") - ("full_help", "display entire help message") + "\nFlags whether the x dimension (typically longitude) is periodic.\n") + + ("binary_ar_threshold", value()->default_value(2.0/3.0,"0.667"), + "\nprobability threshold for segmenting ar_probability to produce ar_binary_tag\n") + + ("output_file", value()->default_value(std::string("CASCADE_BARD_%t%.nc")), + "\nA path and file name pattern for the output NetCDF files. %t% is replaced with a" + " human readable date and time corresponding to the time of the first time step in" + " the file. Use --cf_writer::date_format to change the formatting\n") + + ("steps_per_file", value()->default_value(128), + "\nnumber of time steps per output file\n") + + ("first_step", value()->default_value(0), "\nfirst time step to process\n") + ("last_step", value()->default_value(-1), "\nlast time step to process\n") + + ("start_date", value(), "\nThe first time to process in 'Y-M-D h:m:s'" + " format. Note: There must be a space between the date and time specification\n") + ("end_date", value(), "\nThe last time to process in 'Y-M-D h:m:s' format\n") + + ("n_threads", value()->default_value(-1), "\nSets the thread pool size on each" + " MPI rank. When the default value of -1 is used TECA will coordinate the thread" + " pools across ranks such each thread is bound to a unique physical core.\n") + + ("verbose", "\nenable extra terminal output\n") + ("help", "\ndisplays documentation for application specific command line options\n") + ("advanced_help", "\ndisplays documentation for algorithm specific command line options\n") + ("full_help", "\ndisplays both basic and advanced documentation together\n") ; // add all options from each pipeline stage for more advanced use @@ -67,7 +127,7 @@ int main(int argc, char **argv) "control over all runtime modifiable parameters. The basic options\n" "(see" "--help) map to these, and will override them if both are\n" "specified.\n\n" - "Advanced command line options", -1, 1 + "Advanced command line options:", help_width, help_width - 4 ); // create the pipeline stages here, they contain the @@ -77,10 +137,30 @@ int main(int argc, char **argv) // them. while we are at it connect the pipeline p_teca_cf_reader cf_reader = teca_cf_reader::New(); cf_reader->get_properties_description("cf_reader", advanced_opt_defs); + cf_reader->set_periodic_in_x(1); + + p_teca_multi_cf_reader mcf_reader = teca_multi_cf_reader::New(); + mcf_reader->get_properties_description("mcf_reader", advanced_opt_defs); + + p_teca_l2_norm l2_norm = teca_l2_norm::New(); + l2_norm->get_properties_description("ivt_magnitude", advanced_opt_defs); + l2_norm->set_component_0_variable("IVT_U"); + l2_norm->set_component_1_variable("IVT_V"); + l2_norm->set_l2_norm_variable("IVT"); + + p_teca_integrated_vapor_transport ivt_int = teca_integrated_vapor_transport::New(); + ivt_int->get_properties_description("ivt_integral", advanced_opt_defs); + ivt_int->set_specific_humidity_variable("Q"); + ivt_int->set_wind_u_variable("U"); + ivt_int->set_wind_v_variable("V"); + ivt_int->set_ivt_u_variable("IVT_U"); + ivt_int->set_ivt_v_variable("IVT_V"); + + p_teca_valid_value_mask vv_mask = teca_valid_value_mask::New(); + vv_mask->get_properties_description("vv_mask", advanced_opt_defs); p_teca_normalize_coordinates norm_coords = teca_normalize_coordinates::New(); norm_coords->get_properties_description("norm_coords", advanced_opt_defs); - norm_coords->set_input_connection(cf_reader->get_output_port()); // parameter source p_teca_bayesian_ar_detect_parameters params @@ -91,12 +171,11 @@ int main(int argc, char **argv) // Construct the AR detector and attach the input file and parameters p_teca_bayesian_ar_detect ar_detect = teca_bayesian_ar_detect::New(); ar_detect->get_properties_description("ar_detect", advanced_opt_defs); - ar_detect->set_input_connection(0, params->get_output_port()); - ar_detect->set_input_connection(1, norm_coords->get_output_port()); + ar_detect->set_ivt_variable("IVT"); + // segment the ar probability field p_teca_binary_segmentation ar_tag = teca_binary_segmentation::New(); - ar_tag->set_input_connection(0, ar_detect->get_output_port()); ar_tag->set_threshold_mode(ar_tag->BY_VALUE); ar_tag->set_threshold_variable("ar_probability"); ar_tag->set_segmentation_variable("ar_binary_tag"); @@ -107,60 +186,19 @@ int main(int argc, char **argv) // Add the writer p_teca_cf_writer cf_writer = teca_cf_writer::New(); cf_writer->get_properties_description("cf_writer", advanced_opt_defs); - cf_writer->set_input_connection(ar_tag->get_output_port()); cf_writer->set_verbose(0); cf_writer->set_thread_pool_size(1); + cf_writer->set_steps_per_file(128); // package basic and advanced options for display - options_description all_opt_defs(-1, -1); + options_description all_opt_defs(help_width, help_width - 4); all_opt_defs.add(basic_opt_defs).add(advanced_opt_defs); // parse the command line variables_map opt_vals; - try - { - boost::program_options::store( - boost::program_options::command_line_parser(argc, argv).options(all_opt_defs).run(), - opt_vals); - - if (mpi_man.get_comm_rank() == 0) - { - if (opt_vals.count("help")) - { - cerr << endl - << "usage: teca_bayesian_ar_detect [options]" << endl - << endl - << basic_opt_defs << endl - << endl; - return -1; - } - if (opt_vals.count("advanced_help")) - { - cerr << endl - << "usage: teca_bayesian_ar_detect [options]" << endl - << endl - << advanced_opt_defs << endl - << endl; - return -1; - } - - if (opt_vals.count("full_help")) - { - cerr << endl - << "usage: teca_bayesian_ar_detect [options]" << endl - << endl - << all_opt_defs << endl - << endl; - return -1; - } - } - - boost::program_options::notify(opt_vals); - } - catch (std::exception &e) + if (teca_app_util::process_command_line_help(mpi_man.get_comm_rank(), + argc, argv, basic_opt_defs, advanced_opt_defs, all_opt_defs, opt_vals)) { - TECA_ERROR("Error parsing command line options. See --help " - "for a list of supported options. " << e.what()) return -1; } @@ -168,6 +206,10 @@ int main(int argc, char **argv) // advanced options are processed first, so that the basic // options will override them cf_reader->set_properties("cf_reader", opt_vals); + mcf_reader->set_properties("mcf_reader", opt_vals); + l2_norm->set_properties("ivt_magnitude", opt_vals); + ivt_int->set_properties("ivt_integral", opt_vals); + vv_mask->set_properties("vv_mask", opt_vals); norm_coords->set_properties("norm_coords", opt_vals); params->set_properties("parameter_table", opt_vals); ar_detect->set_properties("ar_detect", opt_vals); @@ -175,34 +217,131 @@ int main(int argc, char **argv) // now pass in the basic options, these are processed // last so that they will take precedence - if (opt_vals.count("input_file")) - cf_reader->append_file_name( - opt_vals["input_file"].as()); + // configure the pipeline from the command line options. + p_teca_algorithm head; - if (opt_vals.count("input_regex")) - cf_reader->set_files_regex( - opt_vals["input_regex"].as()); + // configure the reader + bool have_file = opt_vals.count("input_file"); + bool have_regex = opt_vals.count("input_regex"); - if (opt_vals.count("periodic_in_x")) - cf_reader->set_periodic_in_x( - opt_vals["periodic_in_x"].as()); + if (have_file) + { + mcf_reader->set_input_file(opt_vals["input_file"].as()); + head = mcf_reader; + } + else if (have_regex) + { + cf_reader->set_files_regex(opt_vals["input_regex"].as()); + head = cf_reader; + } + p_teca_algorithm reader = head; - if (opt_vals.count("output_file")) - cf_writer->set_file_name( - opt_vals["output_file"].as()); + if (!opt_vals["periodic_in_x"].defaulted()) + { + cf_reader->set_periodic_in_x(opt_vals["periodic_in_x"].as()); + mcf_reader->set_periodic_in_x(opt_vals["periodic_in_x"].as()); + } + + if (!opt_vals["x_axis_variable"].defaulted()) + { + cf_reader->set_x_axis_variable(opt_vals["x_axis_variable"].as()); + mcf_reader->set_x_axis_variable(opt_vals["x_axis_variable"].as()); + } + + if (!opt_vals["y_axis_variable"].defaulted()) + { + cf_reader->set_y_axis_variable(opt_vals["y_axis_variable"].as()); + mcf_reader->set_y_axis_variable(opt_vals["y_axis_variable"].as()); + } - if (opt_vals.count("ivt")) - ar_detect->set_water_vapor_variable( - opt_vals["ivt"].as()); + // set the inputs to the integrator + if (!opt_vals["wind_u"].defaulted()) + { + ivt_int->set_wind_u_variable(opt_vals["wind_u"].as()); + } - if (opt_vals.count("steps_per_file")) - cf_writer->set_steps_per_file( - opt_vals["steps_per_file"].as()); + if (!opt_vals["wind_v"].defaulted()) + { + ivt_int->set_wind_v_variable(opt_vals["wind_v"].as()); + } - if (opt_vals.count("first_step")) + if (!opt_vals["specific_humidity"].defaulted()) + { + ivt_int->set_specific_humidity_variable( + opt_vals["specific_humidity"].as()); + } + + // set all that use or produce ivt + if (!opt_vals["ivt_u"].defaulted()) + { + ivt_int->set_ivt_u_variable(opt_vals["ivt_u"].as()); + l2_norm->set_component_0_variable(opt_vals["ivt_u"].as()); + } + + if (!opt_vals["ivt_v"].defaulted()) + { + ivt_int->set_ivt_v_variable(opt_vals["ivt_v"].as()); + l2_norm->set_component_1_variable(opt_vals["ivt_v"].as()); + } + + if (!opt_vals["ivt"].defaulted()) + { + l2_norm->set_l2_norm_variable(opt_vals["ivt"].as()); + ar_detect->set_ivt_variable(opt_vals["ivt"].as()); + } + + // add the ivt caluation stages if needed + bool do_ivt = opt_vals.count("compute_ivt"); + bool do_ivt_magnitude = opt_vals.count("compute_ivt_magnitude"); + + if (do_ivt) + { + std::string z_var = "plev"; + if (!opt_vals["z_axis_variable"].defaulted()) + z_var = opt_vals["z_axis_variable"].as(); + + cf_reader->set_z_axis_variable(z_var); + mcf_reader->set_z_axis_variable(z_var); + + vv_mask->set_input_connection(head->get_output_port()); + ivt_int->set_input_connection(vv_mask->get_output_port()); + l2_norm->set_input_connection(ivt_int->get_output_port()); + + head = l2_norm; + } + else if (do_ivt_magnitude) + { + l2_norm->set_input_connection(head->get_output_port()); + head = l2_norm; + } + + // tell the writer to write ivt if needed + std::vector point_arrays({"ar_probability", "ar_binary_tag"}); + if ((do_ivt || do_ivt_magnitude) && opt_vals.count("write_ivt_magnitude")) + { + point_arrays.push_back(l2_norm->get_l2_norm_variable()); + } + + if (do_ivt && opt_vals.count("write_ivt")) + { + point_arrays.push_back(ivt_int->get_ivt_u_variable()); + point_arrays.push_back(ivt_int->get_ivt_v_variable()); + } + + cf_writer->set_information_arrays({"ar_count", "parameter_table_row"}); + cf_writer->set_point_arrays(point_arrays); + + + if (!opt_vals["output_file"].defaulted()) + cf_writer->set_file_name(opt_vals["output_file"].as()); + + if (!opt_vals["steps_per_file"].defaulted()) + cf_writer->set_steps_per_file(opt_vals["steps_per_file"].as()); + + if (!opt_vals["first_step"].defaulted()) cf_writer->set_first_step(opt_vals["first_step"].as()); - if (opt_vals.count("last_step")) + if (!opt_vals["last_step"].defaulted()) cf_writer->set_last_step(opt_vals["last_step"].as()); if (opt_vals.count("verbose")) @@ -212,21 +351,30 @@ int main(int argc, char **argv) exec->set_verbose(1); } - if (opt_vals.count("n_threads")) + if (!opt_vals["n_threads"].defaulted()) ar_detect->set_thread_pool_size(opt_vals["n_threads"].as()); else ar_detect->set_thread_pool_size(-1); // some minimal check for missing options - if (cf_reader->get_number_of_file_names() == 0 - && cf_reader->get_files_regex().empty()) + if ((have_file && have_regex) || !(have_file || have_regex)) { if (mpi_man.get_comm_rank() == 0) { - TECA_ERROR( - "missing file name or regex for simulation reader. " - "See --help for a list of command line options.") + TECA_ERROR("Extacly one of --input_file or --input_regex can be specified. " + "Use --input_file to activate the multi_cf_reader (HighResMIP datasets) " + "and --input_regex to activate the cf_reader (CAM like datasets)") + } + return -1; + } + + if (do_ivt && do_ivt_magnitude) + { + if (mpi_man.get_comm_rank() == 0) + { + TECA_ERROR("Only one of --compute_ivt and compute_ivt_magnitude can " + "be specified. --compute_ivt implies --compute_ivt_magnitude") } return -1; } @@ -235,25 +383,31 @@ int main(int argc, char **argv) { if (mpi_man.get_comm_rank() == 0) { - TECA_ERROR( - "missing file name pattern for netcdf writer. " + TECA_ERROR("missing file name pattern for netcdf writer. " "See --help for a list of command line options.") } return -1; } + // connect the fixed stages of the pipeline + norm_coords->set_input_connection(head->get_output_port()); + ar_detect->set_input_connection(0, params->get_output_port()); + ar_detect->set_input_connection(1, norm_coords->get_output_port()); + ar_tag->set_input_connection(0, ar_detect->get_output_port()); + cf_writer->set_input_connection(ar_tag->get_output_port()); + // look for requested time step range, start bool parse_start_date = opt_vals.count("start_date"); bool parse_end_date = opt_vals.count("end_date"); if (parse_start_date || parse_end_date) { // run the reporting phase of the pipeline - teca_metadata md = cf_reader->update_metadata(); + teca_metadata md = reader->update_metadata(); teca_metadata atrs; if (md.get("attributes", atrs)) { - TECA_ERROR("metadata mising attributes") + TECA_ERROR("metadata missing attributes") return -1; } @@ -283,14 +437,14 @@ int main(int argc, char **argv) { unsigned long first_step = 0; std::string start_date = opt_vals["start_date"].as(); - if (teca_coordinate_util::time_step_of(time, true, calendar, + if (teca_coordinate_util::time_step_of(time, true, true, calendar, units, start_date, first_step)) { - TECA_ERROR("Failed to lcoate time step for start date \"" + TECA_ERROR("Failed to locate time step for start date \"" << start_date << "\"") return -1; } - exec->set_start_index(first_step); + cf_writer->set_first_step(first_step); } // and end date @@ -298,34 +452,33 @@ int main(int argc, char **argv) { unsigned long last_step = 0; std::string end_date = opt_vals["end_date"].as(); - if (teca_coordinate_util::time_step_of(time, false, calendar, + if (teca_coordinate_util::time_step_of(time, false, true, calendar, units, end_date, last_step)) { - TECA_ERROR("Failed to lcoate time step for end date \"" + TECA_ERROR("Failed to locate time step for end date \"" << end_date << "\"") return -1; } - exec->set_end_index(last_step); + cf_writer->set_last_step(last_step); } } - - double ar_tag_threshold = opt_vals["binary_ar_threshold"].as(); // set the threshold for calculating ar_binary_tag + double ar_tag_threshold = opt_vals["binary_ar_threshold"].as(); ar_tag->set_low_threshold_value(ar_tag_threshold); + // add metadata for ar_binary_tag teca_metadata seg_atts; - seg_atts.set("long_name",std::string("binary indicator of atmospheric river")); - seg_atts.set("description",std::string("binary indicator of atmospheric river")); - seg_atts.set("scheme",std::string("cascade_bard")); - seg_atts.set("version",std::string("1.0")); + seg_atts.set("long_name", std::string("binary indicator of atmospheric river")); + seg_atts.set("description", std::string("binary indicator of atmospheric river")); + seg_atts.set("scheme", std::string("cascade_bard")); + seg_atts.set("version", std::string("1.0")); seg_atts.set("note", - std::string("derived by thresholding ar_probability >= ") + + std::string("derived by thresholding ar_probability >= ") + std::to_string(ar_tag_threshold)); - ar_tag->set_segmentation_variable_atts(seg_atts); + ar_tag->set_segmentation_variable_attributes(seg_atts); // run the pipeline - cf_writer->set_executive(exec); cf_writer->update(); diff --git a/apps/teca_cartesian_mesh_diff.cpp b/apps/teca_cartesian_mesh_diff.cpp new file mode 100644 index 000000000..26be1d2e2 --- /dev/null +++ b/apps/teca_cartesian_mesh_diff.cpp @@ -0,0 +1,254 @@ +#include "teca_config.h" +#include "teca_common.h" +#include "teca_algorithm.h" +#include "teca_cartesian_mesh_reader_factory.h" +#include "teca_cartesian_mesh_writer_factory.h" +#include "teca_dataset_diff.h" +#include "teca_index_executive.h" +#include "teca_mpi_manager.h" +#include "teca_system_interface.h" +#include "teca_system_util.h" +#include +using boost::program_options::value; + +#include +#include +#include + + +int main(int argc, char **argv) +{ + teca_mpi_manager mpi_man(argc, argv); + teca_system_interface::set_stack_trace_on_error(); + teca_system_interface::set_stack_trace_on_mpi_error(); + + // are we doing the test or updating the baseline? + bool do_test = true; + teca_system_util::get_environment_variable("TECA_DO_TEST", do_test); + + // grab file names first so we can construct an instance + // then we can use the instance to get advanced options. + std::string ref_file; + if (teca_system_util::get_command_line_option( + argc, argv, "--reference_dataset", 1, ref_file)) + return -1; + + std::string test_file; + if (teca_system_util::get_command_line_option( + argc, argv, "--test_dataset", 1, test_file)) + return -1; + + p_teca_algorithm test_reader = teca_cartesian_mesh_reader_factory::New(test_file); + if (!test_reader) + { + TECA_ERROR("the test file format was not recognized from \"" + << test_file << "\"") + return -1; + } + + p_teca_algorithm ref_reader; + p_teca_algorithm ref_writer; + + if (do_test) + ref_reader = teca_cartesian_mesh_reader_factory::New(ref_file); + else + ref_writer = teca_cartesian_mesh_writer_factory::New(ref_file); + + if (!ref_reader && !ref_writer) + { + TECA_ERROR("the refence file format was not recognized from \"" + << ref_file << "\"") + return -1; + } + + p_teca_dataset_diff diff = teca_dataset_diff::New(); + + // initialize command line options description + // set up some common options to simplify use for most + // common scenarios + options_description basic_opt_defs( + "teca_cartesian_mesh_diff an application that compares two datasets.\n\n" + "Command line options", 120, -1 + ); + basic_opt_defs.add_options() + ("reference_dataset", value()->required(), + "cf_reader regex identifying the reference dataset") + + ("test_dataset", value()->required(), + "cf_reader regex identifying the test dataset") + + ("arrays", value>()->multitoken()->required(), + "a list of arrays to compare") + + ("relative_tolerance", value()->default_value(-1.0), + "max allowable relative difference in array values") + + ("absolute_tolerance", value()->default_value(-1.0), + "max allowable relative difference in array values") + + ("start_index", value()->default_value(0), + "first time step to process (0)") + + ("end_index", value()->default_value(-1), + "last time step to process (-1)") + + ("verbose", "enable extra terminal output") + ("help", "display the basic options help") + ("full_help", "display all options help information") + ("advanced_help", "display the advanced options help") + ; + + // add all options from each pipeline stage for more advanced use + options_description advanced_opt_defs( + "Advanced usage:\n\n" + "The following list contains the full set options giving one full\n" + "control over all runtime modifiable parameters. The basic options\n" + "(see" "--help) map to these, and will override them if both are\n" + "specified.\n\n" + "Advanced command line options", -1, 1 + ); + + // create the pipeline stages here, they contain the + // documentation and parse command line. + // objects report all of their properties directly + // set default options here so that command line options override + // them. while we are at it connect the pipeline + test_reader->get_properties_description("test_reader", advanced_opt_defs); + if (do_test) + { + ref_reader->get_properties_description("ref_reader", advanced_opt_defs); + diff->get_properties_description("diff", advanced_opt_defs); + } + else + { + ref_writer->get_properties_description("ref_writer", advanced_opt_defs); + } + + // package basic and advanced options for display + options_description all_opt_defs(-1, -1); + all_opt_defs.add(basic_opt_defs).add(advanced_opt_defs); + + // parse the command line + variables_map opt_vals; + try + { + boost::program_options::store( + boost::program_options::command_line_parser(argc, argv) + .style(boost::program_options::command_line_style::unix_style ^ + boost::program_options::command_line_style::allow_short) + .options(all_opt_defs) + .run(), + opt_vals); + + if (mpi_man.get_comm_rank() == 0) + { + if (opt_vals.count("help")) + { + std::cerr << std::endl + << "usage: teca_cartesian_mesh_diff [options]" << std::endl + << std::endl + << basic_opt_defs << std::endl + << std::endl; + return -1; + } + + if (opt_vals.count("advanced_help")) + { + std::cerr << std::endl + << "usage: teca_cartesian_mesh_diff [options]" << std::endl + << std::endl + << advanced_opt_defs << std::endl + << std::endl; + return -1; + } + + if (opt_vals.count("full_help")) + { + std::cerr << std::endl + << "usage: teca_cartesian_mesh_diff [options]" << std::endl + << std::endl + << all_opt_defs << std::endl + << std::endl; + return -1; + } + } + + boost::program_options::notify(opt_vals); + } + catch (std::exception &e) + { + if (mpi_man.get_comm_rank() == 0) + { + TECA_ERROR("Error parsing command line options. See --help " + "for a list of supported options. " << e.what()) + } + return -1; + } + + // pass command line arguments into the pipeline objects + // advanced options are processed first, so that the basic + // options will override them + test_reader->set_properties("test_reader", opt_vals); + if (do_test) + { + ref_reader->set_properties("ref_reader", opt_vals); + diff->set_properties("diff", opt_vals); + } + else + { + ref_writer->set_properties("ref_writer", opt_vals); + } + + if (!opt_vals["relative_tolerance"].defaulted()) + { + diff->set_relative_tolerance(opt_vals["relative_tolerance"].as()); + } + + if (!opt_vals["absolute_tolerance"].defaulted()) + { + diff->set_absolute_tolerance(opt_vals["absolute_tolerance"].as()); + } + + std::vector arrays = opt_vals["arrays"].as>(); + long start_index = opt_vals["start_index"].as(); + long end_index = opt_vals["end_index"].as(); + + p_teca_index_executive exec = teca_index_executive::New(); + exec->set_start_index(start_index); + exec->set_end_index(end_index); + exec->set_arrays(arrays); + + bool verbose = opt_vals.count("verbose"); + if (verbose) + { + exec->set_verbose(1); + diff->set_verbose(1); + } + else + { + exec->set_verbose(0); + diff->set_verbose(0); + } + + if (do_test) + { + TECA_STATUS("Running the test") + + diff->set_input_connection(0, ref_reader->get_output_port()); + diff->set_input_connection(1, test_reader->get_output_port()); + diff->set_executive(exec); + + diff->update(); + } + else + { + TECA_STATUS("Writing the baseline") + + ref_writer->set_input_connection(test_reader->get_output_port()); + ref_writer->set_executive(exec); + + ref_writer->update(); + } + + return 0; +} diff --git a/apps/teca_cf_restripe.cpp b/apps/teca_cf_restripe.cpp new file mode 100644 index 000000000..bbf7584bd --- /dev/null +++ b/apps/teca_cf_restripe.cpp @@ -0,0 +1,342 @@ +#include "teca_config.h" +#include "teca_metadata.h" +#include "teca_array_attributes.h" +#include "teca_variant_array.h" +#include "teca_cf_reader.h" +#include "teca_multi_cf_reader.h" +#include "teca_cf_writer.h" +#include "teca_dataset_diff.h" +#include "teca_index_executive.h" +#include "teca_system_interface.h" +#include "teca_coordinate_util.h" +#include "teca_file_util.h" +#include "teca_mpi_manager.h" +#include "teca_mpi.h" +#include "teca_app_util.h" +#include "calcalcs.h" + +#include +#include +#include +#include +#include + +using boost::program_options::value; + + +int main(int argc, char **argv) +{ + teca_mpi_manager mpi_man(argc, argv); + teca_system_interface::set_stack_trace_on_error(); + teca_system_interface::set_stack_trace_on_mpi_error(); + + // initialize command line options description + // set up some common options to simplify use for most + // common scenarios + int help_width = 100; + options_description basic_opt_defs( + "Basic usage:\n\n" + "The following options are the most commonly used. Information\n" + "on advanced options can be displayed using --advanced_help\n\n" + "Basic command line options", help_width, help_width - 4 + ); + basic_opt_defs.add_options() + ("input_file", value(), "\na teca_multi_cf_reader configuration file" + " identifying the set of NetCDF CF2 files to process. When present data is" + " read using the teca_multi_cf_reader. Use one of either --input_file or" + " --input_regex.\n") + + ("input_regex", value(), "\na teca_cf_reader regex identifying the" + " set of NetCDF CF2 files to process. When present data is read using the" + " teca_cf_reader. Use one of either --input_file or --input_regex.\n") + + ("x_axis_variable", value(), "\nname of x coordinate variable (lon)\n") + ("y_axis_variable", value(), "\nname of y coordinate variable (lat)\n") + ("z_axis_variable", value(), "\nname of z coordinate variable (plev)\n") + + ("point_arrays", value>()->multitoken(), + "\nA list of point centered arrays to write\n") + + ("information_arrays", value>()->multitoken(), + "\nA list of non-geometric arrays to write\n") + + ("output_file", value()->default_value(std::string("IVT_%t%.nc")), + "\nA path and file name pattern for the output NetCDF files. %t% is replaced with a" + " human readable date and time corresponding to the time of the first time step in" + " the file. Use --cf_writer::date_format to change the formatting\n") + + ("steps_per_file", value(), "\nnumber of time steps per output file\n") + + ("bounds", value>()->multitoken(), + "\nlat lon lev bounding box to subset with\n") + + ("first_step", value(), "\nfirst time step to process\n") + ("last_step", value(), "\nlast time step to process\n") + ("start_date", value(), "\nfirst time to proces in YYYY-MM-DD hh:mm:ss format\n") + ("end_date", value(), "\nfirst time to proces in YYYY-MM-DD hh:mm:ss format\n") + ("n_threads", value(), "\nSets the thread pool size on each MPI rank. When the default" + " value of -1 is used TECA will coordinate the thread pools across ranks such each" + " thread is bound to a unique physical core.\n") + ("verbose", "\nenable extra terminal output\n") + ("help", "\ndisplays documentation for application specific command line options\n") + ("advanced_help", "\ndisplays documentation for algorithm specific command line options\n") + ("full_help", "\ndisplays both basic and advanced documentation together\n") + ; + + // add all options from each pipeline stage for more advanced use + options_description advanced_opt_defs( + "Advanced usage:\n\n" + "The following list contains the full set options giving one full\n" + "control over all runtime modifiable parameters. The basic options\n" + "(see" "--help) map to these, and will override them if both are\n" + "specified.\n\n" + "Advanced command line options", help_width, help_width - 4 + ); + + // create the pipeline stages here, they contain the + // documentation and parse command line. + // objects report all of their properties directly + // set default options here so that command line options override + // them. while we are at it connect the pipeline + p_teca_cf_reader cf_reader = teca_cf_reader::New(); + cf_reader->get_properties_description("cf_reader", advanced_opt_defs); + + p_teca_multi_cf_reader mcf_reader = teca_multi_cf_reader::New(); + mcf_reader->get_properties_description("mcf_reader", advanced_opt_defs); + + p_teca_cf_writer cf_writer = teca_cf_writer::New(); + cf_writer->get_properties_description("cf_writer", advanced_opt_defs); + + // Add an executive for the writer + p_teca_index_executive exec = teca_index_executive::New(); + + // package basic and advanced options for display + options_description all_opt_defs(help_width, help_width - 4); + all_opt_defs.add(basic_opt_defs).add(advanced_opt_defs); + + // parse the command line + variables_map opt_vals; + if (teca_app_util::process_command_line_help(mpi_man.get_comm_rank(), + argc, argv, basic_opt_defs, advanced_opt_defs, all_opt_defs, opt_vals)) + { + return -1; + } + + // pass command line arguments into the pipeline objects + // advanced options are processed first, so that the basic + // options will override them + cf_reader->set_properties("cf_reader", opt_vals); + mcf_reader->set_properties("mcf_reader", opt_vals); + cf_writer->set_properties("cf_writer", opt_vals); + + // now pass in the basic options, these are processed + // last so that they will take precedence + p_teca_algorithm reader; + bool have_file = opt_vals.count("input_file"); + bool have_regex = opt_vals.count("input_regex"); + if (have_file) + { + mcf_reader->set_input_file(opt_vals["input_file"].as()); + reader = mcf_reader; + } + else if (have_regex) + { + cf_reader->set_files_regex(opt_vals["input_regex"].as()); + reader = cf_reader; + } + + if (opt_vals.count("x_axis_variable")) + { + cf_reader->set_x_axis_variable(opt_vals["x_axis_variable"].as()); + mcf_reader->set_x_axis_variable(opt_vals["x_axis_variable"].as()); + } + + if (opt_vals.count("y_axis_variable")) + { + cf_reader->set_y_axis_variable(opt_vals["y_axis_variable"].as()); + mcf_reader->set_y_axis_variable(opt_vals["y_axis_variable"].as()); + } + + if (opt_vals.count("z_axis_variable")) + { + cf_reader->set_z_axis_variable(opt_vals["z_axis_variable"].as()); + mcf_reader->set_z_axis_variable(opt_vals["z_axis_variable"].as()); + } + + if (opt_vals.count("output_file")) + cf_writer->set_file_name( + opt_vals["output_file"].as()); + + if (opt_vals.count("point_arrays")) + cf_writer->set_point_arrays( + opt_vals["point_arrays"].as>()); + + if (opt_vals.count("information_arrays")) + cf_writer->set_information_arrays( + opt_vals["information_arrays"].as>()); + + if (opt_vals.count("steps_per_file")) + cf_writer->set_steps_per_file( + opt_vals["steps_per_file"].as()); + + if (opt_vals.count("first_step")) + cf_writer->set_first_step(opt_vals["first_step"].as()); + + if (opt_vals.count("last_step")) + cf_writer->set_last_step(opt_vals["last_step"].as()); + + if (opt_vals.count("bounds")) + exec->set_bounds( + opt_vals["bounds"].as>()); + + if (opt_vals.count("verbose")) + { + cf_writer->set_verbose(1); + exec->set_verbose(1); + } + + if (opt_vals.count("n_threads")) + cf_writer->set_thread_pool_size(opt_vals["n_threads"].as()); + else + cf_writer->set_thread_pool_size(1); + + // some minimal check for missing options + if ((have_file && have_regex) || !(have_file || have_regex)) + { + if (mpi_man.get_comm_rank() == 0) + { + TECA_ERROR("Extacly one of --input_file or --input_regex can be specified. " + "Use --input_file to activate the multi_cf_reader (HighResMIP datasets) " + "and --input_regex to activate the cf_reader (CAM like datasets)") + } + return -1; + } + + if (cf_writer->get_file_name().empty()) + { + if (mpi_man.get_comm_rank() == 0) + { + TECA_ERROR("missing file name pattern for the NetCDF CF writer. " + "See --help for a list of command line options.") + } + return -1; + } + + // if no point arrays were specified on the command line by default + // write all point arrays + teca_metadata md; + teca_metadata atts; + // TODO -- this will need some more work in the reader as currently + // all arrays are marked as being point centered, but here we need + // to identify only the arrays on the mesh. + /*if (cf_writer->get_number_of_point_arrays() == 0) + { + // run the reporting phase of the pipeline + if (md.empty()) + md = cf_reader->update_metadata(); + + // if array attributes are present, use them to locate the set of + // point centered arrrays + if (atts.empty() && md.get("attributes", atts)) + { + TECA_ERROR("metadata missing attributes") + return -1; + } + + // for each array check if it's point centered, if so add it to + // the list of arrays to write. + unsigned int n_arrays = atts.size(); + for (unsigned int i = 0; i < n_arrays; ++i) + { + std::string array_name; + atts.get_name(i, array_name); + + teca_metadata array_atts; + atts.get(array_name, array_atts); + + unsigned int array_cen = 0; + array_atts.get("centering", array_cen); + + if (array_cen == teca_array_attributes::point_centering) + { + cf_writer->append_point_array(array_name); + } + } + }*/ + + // look for requested time step range, start + bool parse_start_date = opt_vals.count("start_date"); + bool parse_end_date = opt_vals.count("end_date"); + if (parse_start_date || parse_end_date) + { + // run the reporting phase of the pipeline + if (md.empty()) + md = cf_reader->update_metadata(); + + if (atts.empty() && md.get("attributes", atts)) + { + TECA_ERROR("metadata missing attributes") + return -1; + } + + teca_metadata time_atts; + std::string calendar; + std::string units; + if (atts.get("time", time_atts) + || time_atts.get("calendar", calendar) + || time_atts.get("units", units)) + { + TECA_ERROR("failed to determine the calendaring parameters") + return -1; + } + + teca_metadata coords; + p_teca_double_array time; + if (md.get("coordinates", coords) || + !(time = std::dynamic_pointer_cast( + coords.get("t")))) + { + TECA_ERROR("failed to determine time coordinate") + return -1; + } + + // convert date string to step, start date + if (parse_start_date) + { + unsigned long first_step = 0; + std::string start_date = opt_vals["start_date"].as(); + if (teca_coordinate_util::time_step_of(time, true, true, calendar, + units, start_date, first_step)) + { + TECA_ERROR("Failed to locate time step for start date \"" + << start_date << "\"") + return -1; + } + cf_writer->set_first_step(first_step); + } + + // and end date + if (parse_end_date) + { + unsigned long last_step = 0; + std::string end_date = opt_vals["end_date"].as(); + if (teca_coordinate_util::time_step_of(time, false, true, calendar, + units, end_date, last_step)) + { + TECA_ERROR("Failed to locate time step for end date \"" + << end_date << "\"") + return -1; + } + cf_writer->set_last_step(last_step); + } + } + + // connect the pipeline + cf_writer->set_input_connection(reader->get_output_port()); + + // run the pipeline + cf_writer->set_executive(exec); + cf_writer->update(); + + return 0; +} diff --git a/apps/teca_convert_table.in b/apps/teca_convert_table.in index f2ec103ab..1c3d6b227 100755 --- a/apps/teca_convert_table.in +++ b/apps/teca_convert_table.in @@ -4,23 +4,44 @@ import sys import argparse # parse the command line -parser = argparse.ArgumentParser() +parser = argparse.ArgumentParser( + formatter_class=lambda prog: argparse.ArgumentDefaultsHelpFormatter( + prog, max_help_position=4, width=100)) + parser.add_argument('in_file', type=str, - help='path to table to read') + help='path to table to read') + +parser.add_argument('--in_format', type=str, default='auto', + help='The file format to assume on the input. ' + 'One of: auto, bin, csv. (auto)') parser.add_argument('out_file', type=str, - help='path to write result') + help='path to write result') + +parser.add_argument('--out_format', type=str, default='auto', + help='The file formate to use in the output. ' + 'One of: auto, bin, csv, netcdf. (auto)') parser.add_argument('--select', type=str, required=False, - help='a logical expression on table columns. ' \ - 'Row where this evaluates to true are passed ' \ - 'to the output') + help='a logical expression on table columns. ' + 'Row where this evaluates to true are passed ' + 'to the output') args = parser.parse_args() # read the table reader = teca_table_reader.New() reader.set_file_name(args.in_file) +if args.in_format == 'auto': + reader.set_file_format_auto() +elif args.in_format == 'bin': + reader.set_file_format_bin() +elif args.in_format == 'csv': + reader.set_file_format_csv() +elif args.in_format == 'netcdf': + reader.set_file_format_netcdf() +else: + raise RuntimeError('Invalid input file format %s' % (args.in_format)) # optionally remove unselected rows tip = reader @@ -36,6 +57,14 @@ if args.select: writer = teca_table_writer.New() writer.set_input_connection(tip.get_output_port()) writer.set_file_name(args.out_file) +if args.in_format == 'auto': + writer.set_output_format_auto() +elif args.in_format == 'bin': + writer.set_output_format_bin() +elif args.in_format == 'csv': + writer.set_output_format_csv() +else: + raise RuntimeError('Invalid in output format %s' % (args.out_format)) # execute the pipeline writer.update() diff --git a/apps/teca_deeplab_ar_detect.in b/apps/teca_deeplab_ar_detect.in new file mode 100755 index 000000000..6d07f5c3d --- /dev/null +++ b/apps/teca_deeplab_ar_detect.in @@ -0,0 +1,293 @@ +#!/usr/bin/env python@TECA_PYTHON_VERSION@ +try: + from mpi4py import MPI + comm = MPI.COMM_WORLD + rank = comm.Get_rank() +except ImportError: + rank = 0 +from teca import * +import sys +import argparse +import numpy as np + +teca_profiler.initialize() +teca_profiler.start_event('deeplab ar_detect') + +# parse the command line +parser = argparse.ArgumentParser( + formatter_class=lambda prog: argparse.ArgumentDefaultsHelpFormatter( + prog, max_help_position=4, width=100)) + +parser.add_argument('--input_file', type=str, required=False, + help='a teca_multi_cf_reader configuration file identifying the set' + ' of NetCDF CF2 files to process. When present data is read' + ' using the teca_multi_cf_reader. Use one of either --input_file' + ' or --input_regex.') + +parser.add_argument('--input_regex', type=str, required=False, + help='a teca_cf_reader regex identifying the' + ' set of NetCDF CF2 files to process. When present data is read' + ' using the teca_cf_reader. Use one of either --input_file or ' + ' --input_regex.') + +parser.add_argument('--ivt', type=str, required=False, default='IVT', + help='name of variable with integrated vapor transport magnitude') + +parser.add_argument('--compute_ivt_magnitude', action='store_true', + help='when this flag is present magnitude of vector IVT is calculated.' + ' use --ivt_u and --ivt_v to set the name of the IVT vector' + ' components if needed.') + +parser.add_argument('--ivt_u', type=str, required=False, default='IVT_U', + help='name of variable with longitudinal component of the integrated vapor' + ' transport vector.') + +parser.add_argument('--ivt_v', type=str, required=False, default='IVT_V', + help='name of variable with latitudinal component of the integrated vapor' + ' transport vector.') + +parser.add_argument('--write_ivt_magnitude', action='store_true', + help='when this flag is present IVT magnitude is written to disk with the' + ' AR detector results') + +parser.add_argument('--compute_ivt', action='store_true', + help='when this flag is present IVT vector is calculated from' + ' specific humidity, and wind vector components. use' + ' --specific_humidity --wind_u and --wind_v to set the name of the' + ' specific humidity and wind vector components, and --ivt_u and' + ' --ivt_v to control the names of' ' the results, if needed.') + +parser.add_argument('--specific_humidity', type=str, required=False, + default='Q', help='name of variable with the 3D specific humidity field.') + +parser.add_argument('--wind_u', type=str, required=False, default='U', + help='name of variable with the 3D longitudinal component of the wind' + 'vector.') + +parser.add_argument('--wind_v', type=str, required=False, default='V', + help='name of variable with the 3D latitudinal component of the wind' + ' vector.') + +parser.add_argument('--write_ivt', action='store_true', required=False, + help='when this flag is present IVT vector is written to disk with' + ' the result') + +parser.add_argument('--x_axis_variable', type=str, default='lon', + required=False, help='name of x coordinate variable') + +parser.add_argument('--y_axis_variable', type=str, default='lat', + required=False, help='name of y coordinate variable') + +parser.add_argument('--z_axis_variable', type=str, default='plev', + required=False, help='name of z coordinate variable') + +parser.add_argument('--output_file', type=str, required=True, + help='A path and file name pattern for the output NetCDF files. %%t%% is' + ' replaced with a human readable date and time corresponding to the' + ' time of the first time step in the file. Use --date_format to change' + ' the formatting') + +parser.add_argument('--steps_per_file', type=int, required=False, default=128, + help='number of time steps per output file') + +parser.add_argument('--target_device', type=str, default='cpu', + help='set the execution target. May be one of "cpu", or "cuda"') + +parser.add_argument('--n_threads', type=int, default=-1, + help='Sets the thread pool size on each MPI rank. When the default' + ' value of -1 is used TECA will coordinate the thread pools across' + ' ranks such each thread is bound to a unique physical core.') + +parser.add_argument('--n_threads_max', type=int, default=4, + help='Sets the max thread pool size on each MPI rank. Set to -1' + ' to use all available cores.') + +parser.add_argument('--binary_ar_threshold', type=float, + default=(2.0/3.0), help='probability threshold for segmenting' + 'ar_probability to produce ar_binary_tag') + +parser.add_argument('--pytorch_model', type=str, required=False, + help='path to the the pytorch model file') + +parser.add_argument('--t_axis_variable', type=str, required=False, + help='time dimension name') + +parser.add_argument('--t_calendar', type=str, required=False, + help='time calendar') + +parser.add_argument('--t_units', type=str, required=False, + help='time unit') + +parser.add_argument('--filename_time_template', type=str, required=False, + help='filename time template') + +parser.add_argument('--date_format', type=str, required=False, + help='A strftime format used when encoding dates into the output' + ' file names (%%F-%%HZ). %%t%% in the file name is replaced with date/time' + ' of the first time step in the file using this format specifier.') + +parser.add_argument('--first_step', type=int, default=0, required=False, + help='first time step to process') + +parser.add_argument('--last_step', type=int, default=-1, required=False, + help='last time step to process') +parser.add_argument('--start_date', type=str, required=False, + help='first time to process in "YYYY-MM-DD hh:mm:ss" format') + +parser.add_argument('--end_date', type=str, required=False, + help='end time to process in "YYYY-MM-DD hh:mm:ss" format') + +parser.add_argument('--verbose', action='store_true', + help='Enable verbose output') + + +# prevent spew when running under mpi +try: + args = parser.parse_args() +except Exception: + if rank == 0: raise + +# configure the reader +if args.input_file and not args.input_regex: + reader = teca_multi_cf_reader.New() + reader.set_input_file(args.input_file) +elif args.input_regex and not args.input_file: + reader = teca_cf_reader.New() + reader.set_files_regex(args.input_regex) +else: + if rank == 0: + raise RuntimeError('Exactly one of --input_file or --input_regex' + ' must be provided') + +reader.set_x_axis_variable(args.x_axis_variable) +reader.set_y_axis_variable(args.y_axis_variable) + +if args.t_axis_variable is not None: + reader.set_t_axis_variable(args.t_axis_variable) + +if args.t_calendar: + reader.set_t_calendar(args.t_calendar) + +if args.t_units: + reader.set_t_units(args.t_units) + +if args.filename_time_template: + reader.set_filename_time_template(args.filename_time_template) + +head = reader + +# configure the integrator +if args.compute_ivt: + reader.set_z_axis_variable(args.z_axis_variable) + + ivt_int = teca_integrated_vapor_transport.New() + ivt_int.set_wind_u_variable(args.wind_u) + ivt_int.set_wind_v_variable(args.wind_v) + ivt_int.set_specific_humidity_variable(args.specific_humidity) + ivt_int.set_ivt_u_variable(args.ivt_u) + ivt_int.set_ivt_v_variable(args.ivt_v) + + ivt_int.set_input_connection(reader.get_output_port()) + + head = ivt_int + +# configure the norm +if args.compute_ivt or args.compute_ivt_magnitude: + + l2_norm = teca_l2_norm.New() + l2_norm.set_component_0_variable(args.ivt_u); + l2_norm.set_component_1_variable(args.ivt_v); + l2_norm.set_l2_norm_variable(args.ivt); + + l2_norm.set_input_connection(head.get_output_port()) + + head = l2_norm + +# coordinate normalization +coords = teca_normalize_coordinates.New() +coords.set_input_connection(head.get_output_port()) + +# ar detector +ar_detect = teca_deeplab_ar_detect.New() +ar_detect.set_input_connection(coords.get_output_port()) +ar_detect.set_verbose(args.verbose) +ar_detect.set_ivt_variable(args.ivt) +ar_detect.set_target_device(args.target_device) +ar_detect.set_thread_pool_size(args.n_threads) +ar_detect.set_max_thread_pool_size(args.n_threads_max) +ar_detect.load_model(args.pytorch_model) + +# post detection segemntation +seg_atts = teca_metadata() +seg_atts["long_name"] = "binary indicator of atmospheric river" +seg_atts["description"] = "binary indicator of atmospheric river" +seg_atts["scheme"] = "deeplab" +seg_atts["version"] = "0.0" +seg_atts["note"] = "derived by thresholding ar_probability >= %f" \ + % args.binary_ar_threshold + +ar_tag = teca_binary_segmentation.New() +ar_tag.set_input_connection(ar_detect.get_output_port()) +ar_tag.set_threshold_mode(ar_tag.BY_VALUE) +ar_tag.set_threshold_variable("ar_probability") +ar_tag.set_segmentation_variable("ar_binary_tag") +ar_tag.set_low_threshold_value(args.binary_ar_threshold) +ar_tag.set_segmentation_variable_attributes(seg_atts) + +# configure the writer +exe = teca_index_executive.New() +writer = teca_cf_writer.New() +writer.set_input_connection(ar_tag.get_output_port()) +writer.set_executive(exe) +writer.set_thread_pool_size(1) +writer.set_file_name(args.output_file) +writer.set_steps_per_file(args.steps_per_file) +writer.set_first_step(args.first_step) +writer.set_last_step(args.last_step) + +point_arrays = ['ar_probability', 'ar_binary_tag'] +if args.compute_ivt and args.write_ivt: + point_arrays.append(args.ivt_u) + point_arrays.append(args.ivt_v) + +if ((args.compute_ivt or args.compute_ivt_magnitude) + and args.write_ivt_magnitude): + point_arrays.append(args.ivt) + +writer.set_point_arrays(point_arrays) + +if args.date_format: + writer.set_date_format(args.date_format) + +if args.start_date or args.end_date: + + # run the metadata reporting phase of the pipeline + md = reader.update_metadata() + + # get the time axis array attributes + atrs = md['attributes'] + + time_atts = atrs['time'] + calendar = time_atts['calendar'] + units = time_atts['units'] + + coords = md['coordinates'] + time = coords['t'] + + # convert date string to step, start date + if args.start_date: + first_step = coordinate_util.time_step_of(time, True, True, calendar, + units, args.start_date) + writer.set_first_step(first_step) + + # and end date + if args.end_date: + last_step = coordinate_util.time_step_of(time, False, True, calendar, + units, args.end_date) + writer.set_last_step(last_step) + +# run the pipeline +writer.update() + +teca_profiler.end_event('deeplab ar_detect') +teca_profiler.finalize() diff --git a/apps/teca_integrated_vapor_transport.cpp b/apps/teca_integrated_vapor_transport.cpp new file mode 100644 index 000000000..d078f6b31 --- /dev/null +++ b/apps/teca_integrated_vapor_transport.cpp @@ -0,0 +1,424 @@ +#include "teca_config.h" +#include "teca_cf_reader.h" +#include "teca_cf_writer.h" +#include "teca_index_executive.h" +#include "teca_normalize_coordinates.h" +#include "teca_metadata.h" +#include "teca_integrated_vapor_transport.h" +#include "teca_binary_segmentation.h" +#include "teca_l2_norm.h" +#include "teca_multi_cf_reader.h" +#include "teca_integrated_vapor_transport.h" +#include "teca_valid_value_mask.h" +#include "teca_mpi_manager.h" +#include "teca_coordinate_util.h" +#include "teca_table.h" +#include "teca_dataset_source.h" +#include "teca_app_util.h" +#include "calcalcs.h" + +#include +#include +#include +#include + +using namespace std; + +using boost::program_options::value; + +// -------------------------------------------------------------------------- +int main(int argc, char **argv) +{ + // initialize mpi + teca_mpi_manager mpi_man(argc, argv); + + // initialize command line options description + // set up some common options to simplify use for most + // common scenarios + int help_width = 100; + options_description basic_opt_defs( + "Basic usage:\n\n" + "The following options are the most commonly used. Information\n" + "on advanced options can be displayed using --advanced_help\n\n" + "Basic command line options", help_width, help_width - 4 + ); + basic_opt_defs.add_options() + ("input_file", value(), "\na teca_multi_cf_reader configuration file" + " identifying the set of NetCDF CF2 files to process. When present data is" + " read using the teca_multi_cf_reader. Use one of either --input_file or" + " --input_regex.\n") + + ("input_regex", value(), "\na teca_cf_reader regex identifying the" + " set of NetCDF CF2 files to process. When present data is read using the" + " teca_cf_reader. Use one of either --input_file or --input_regex.\n") + + ("specific_humidity", value()->default_value("Q"), + "\nname of variable with the 3D specific humidity field.\n") + + ("wind_u", value()->default_value("U"), + "\nname of variable with the 3D longitudinal component of the wind vector.\n") + + ("wind_v", value()->default_value("V"), + "\nname of variable with the 3D latitudinal component of the wind vector.\n") + + ("ivt_u", value()->default_value("IVT_U"), + "\nname to use for the longitudinal component of the integrated vapor transport vector.\n") + + ("ivt_v", value()->default_value("IVT_V"), + "\nname to use for the latitudinal component of the integrated vapor transport vector.\n") + + ("ivt", value()->default_value("IVT"), + "\nname of variable with the magnitude of integrated vapor transport (IVT)\n") + + ("write_ivt_magnitude", value()->default_value(0), + "\nwhen this is set to 1 magnitude of vector IVT is calculated. use --ivt_u and" + " --ivt_v to set the name of the IVT vector components and --ivt to set the name" + " of the result if needed.\n") + + ("write_ivt", value()->default_value(1), + "\nwhen this is set to 1 IVT vector is written to disk with the result. use" + " --ivt_u and --ivt_v to set the name of the IVT vector components of the" + " result if needed.\n") + + ("output_file", value()->default_value("IVT_%t%.nc"), + "\nA path and file name pattern for the output NetCDF files. %t% is replaced with a" + " human readable date and time corresponding to the time of the first time step in" + " the file. Use --cf_writer::date_format to change the formatting\n") + + ("steps_per_file", value()->default_value(128), + "\nnumber of time steps per output file\n") + + ("x_axis_variable", value()->default_value("lon"), + "\nname of x coordinate variable\n") + ("y_axis_variable", value()->default_value("lat"), + "\nname of y coordinate variable\n") + ("z_axis_variable", value()->default_value("plev"), + "\nname of z coordinate variable\n") + + ("first_step", value()->default_value(0), "\nfirst time step to process\n") + ("last_step", value()->default_value(-1), "\nlast time step to process\n") + + ("start_date", value(), "\nThe first time to process in 'Y-M-D h:m:s'" + " format. Note: There must be a space between the date and time specification\n") + ("end_date", value(), "\nThe last time to process in 'Y-M-D h:m:s' format\n") + + ("n_threads", value(), "\nSets the thread pool size on each MPI rank. When the default" + " value of -1 is used TECA will coordinate the thread pools across ranks such each" + " thread is bound to a unique physical core.\n") + + ("verbose", "\nenable extra terminal output\n") + + ("help", "\ndisplays documentation for application specific command line options\n") + ("advanced_help", "\ndisplays documentation for algorithm specific command line options\n") + ("full_help", "\ndisplays both basic and advanced documentation together\n") + ; + + // add all options from each pipeline stage for more advanced use + options_description advanced_opt_defs( + "Advanced usage:\n\n" + "The following list contains the full set options giving one full\n" + "control over all runtime modifiable parameters. The basic options\n" + "(see" "--help) map to these, and will override them if both are\n" + "specified.\n\n" + "integrated vapor transport pipeline:\n\n" + " (cf / mcf_reader)\n" + " \\\n" + " (ivt_integral)--(ivt_magnitude)\n" + " \\\n" + " (cf_writer)\n\n" + "Advanced command line options", help_width, help_width - 4 + ); + + // create the pipeline stages here, they contain the + // documentation and parse command line. + // objects report all of their properties directly + // set default options here so that command line options override + // them. while we are at it connect the pipeline + p_teca_cf_reader cf_reader = teca_cf_reader::New(); + cf_reader->get_properties_description("cf_reader", advanced_opt_defs); + + p_teca_multi_cf_reader mcf_reader = teca_multi_cf_reader::New(); + mcf_reader->get_properties_description("mcf_reader", advanced_opt_defs); + + p_teca_integrated_vapor_transport ivt_int = teca_integrated_vapor_transport::New(); + ivt_int->get_properties_description("ivt_integral", advanced_opt_defs); + ivt_int->set_specific_humidity_variable("Q"); + ivt_int->set_wind_u_variable("U"); + ivt_int->set_wind_v_variable("V"); + ivt_int->set_ivt_u_variable("IVT_U"); + ivt_int->set_ivt_v_variable("IVT_V"); + + p_teca_l2_norm l2_norm = teca_l2_norm::New(); + l2_norm->get_properties_description("ivt_magnitude", advanced_opt_defs); + l2_norm->set_component_0_variable("IVT_U"); + l2_norm->set_component_1_variable("IVT_V"); + l2_norm->set_l2_norm_variable("IVT"); + + p_teca_valid_value_mask vv_mask = teca_valid_value_mask::New(); + vv_mask->get_properties_description("vv_mask", advanced_opt_defs); + + // Add an executive for the writer + p_teca_index_executive exec = teca_index_executive::New(); + + // Add the writer + p_teca_cf_writer cf_writer = teca_cf_writer::New(); + cf_writer->get_properties_description("cf_writer", advanced_opt_defs); + cf_writer->set_verbose(0); + cf_writer->set_steps_per_file(128); + + // package basic and advanced options for display + options_description all_opt_defs(-1, -1); + all_opt_defs.add(basic_opt_defs).add(advanced_opt_defs); + + // parse the command line + variables_map opt_vals; + if (teca_app_util::process_command_line_help(mpi_man.get_comm_rank(), + argc, argv, basic_opt_defs, advanced_opt_defs, all_opt_defs, opt_vals)) + { + return -1; + } + + // pass command line arguments into the pipeline objects + // advanced options are processed first, so that the basic + // options will override them + cf_reader->set_properties("cf_reader", opt_vals); + mcf_reader->set_properties("mcf_reader", opt_vals); + vv_mask->set_properties("vv_mask", opt_vals); + ivt_int->set_properties("ivt_integral", opt_vals); + l2_norm->set_properties("ivt_magnitude", opt_vals); + cf_writer->set_properties("cf_writer", opt_vals); + + // now pass in the basic options, these are processed + // last so that they will take precedence + // configure the pipeline from the command line options. + p_teca_algorithm head; + + // configure the reader + bool have_file = opt_vals.count("input_file"); + bool have_regex = opt_vals.count("input_regex"); + + if (opt_vals.count("input_file")) + { + mcf_reader->set_input_file(opt_vals["input_file"].as()); + head = mcf_reader; + } + else if (opt_vals.count("input_regex")) + { + cf_reader->set_files_regex(opt_vals["input_regex"].as()); + head = cf_reader; + } + p_teca_algorithm reader = head; + + if (!opt_vals["x_axis_variable"].defaulted()) + { + cf_reader->set_x_axis_variable(opt_vals["x_axis_variable"].as()); + mcf_reader->set_x_axis_variable(opt_vals["x_axis_variable"].as()); + } + + if (!opt_vals["y_axis_variable"].defaulted()) + { + cf_reader->set_y_axis_variable(opt_vals["y_axis_variable"].as()); + mcf_reader->set_y_axis_variable(opt_vals["y_axis_variable"].as()); + } + + // set the inputs to the integrator + if (!opt_vals["wind_u"].defaulted()) + { + ivt_int->set_wind_u_variable(opt_vals["wind_u"].as()); + } + + if (!opt_vals["wind_v"].defaulted()) + { + ivt_int->set_wind_v_variable(opt_vals["wind_v"].as()); + } + + if (!opt_vals["specific_humidity"].defaulted()) + { + ivt_int->set_specific_humidity_variable( + opt_vals["specific_humidity"].as()); + } + + // set all that use or produce ivt + if (!opt_vals["ivt_u"].defaulted()) + { + ivt_int->set_ivt_u_variable(opt_vals["ivt_u"].as()); + l2_norm->set_component_0_variable(opt_vals["ivt_u"].as()); + } + + if (!opt_vals["ivt_v"].defaulted()) + { + ivt_int->set_ivt_v_variable(opt_vals["ivt_v"].as()); + l2_norm->set_component_1_variable(opt_vals["ivt_v"].as()); + } + + if (!opt_vals["ivt"].defaulted()) + { + l2_norm->set_l2_norm_variable(opt_vals["ivt"].as()); + } + + // add the valid value mask stage + vv_mask->set_input_connection(head->get_output_port()); + head = vv_mask; + + // add the ivt caluation stages if needed + bool do_ivt = opt_vals["write_ivt"].as(); + bool do_ivt_magnitude = opt_vals["write_ivt_magnitude"].as(); + + std::string z_var = "plev"; + if (!opt_vals["z_axis_variable"].defaulted()) + z_var = opt_vals["z_axis_variable"].as(); + + cf_reader->set_z_axis_variable(z_var); + mcf_reader->set_z_axis_variable(z_var); + + ivt_int->set_input_connection(head->get_output_port()); + + if (do_ivt_magnitude) + { + l2_norm->set_input_connection(ivt_int->get_output_port()); + head = l2_norm; + } + + // tell the writer to write ivt if needed + std::vector point_arrays; + if (do_ivt) + { + point_arrays.push_back(ivt_int->get_ivt_u_variable()); + point_arrays.push_back(ivt_int->get_ivt_v_variable()); + } + if (do_ivt_magnitude) + { + point_arrays.push_back(l2_norm->get_l2_norm_variable()); + } + cf_writer->set_point_arrays(point_arrays); + + cf_writer->set_file_name(opt_vals["output_file"].as()); + + if (!opt_vals["steps_per_file"].defaulted()) + cf_writer->set_steps_per_file(opt_vals["steps_per_file"].as()); + + if (!opt_vals["first_step"].defaulted()) + cf_writer->set_first_step(opt_vals["first_step"].as()); + + if (!opt_vals["last_step"].defaulted()) + cf_writer->set_last_step(opt_vals["last_step"].as()); + + if (opt_vals.count("verbose")) + { + cf_writer->set_verbose(1); + exec->set_verbose(1); + } + + if (!opt_vals["n_threads"].defaulted()) + cf_writer->set_thread_pool_size(opt_vals["n_threads"].as()); + else + cf_writer->set_thread_pool_size(-1); + + // some minimal check for missing options + if ((have_file && have_regex) || !(have_file || have_regex)) + { + if (mpi_man.get_comm_rank() == 0) + { + TECA_ERROR("Extacly one of --input_file or --input_regex can be specified. " + "Use --input_file to activate the multi_cf_reader (HighResMIP datasets) " + "and --input_regex to activate the cf_reader (CAM like datasets)") + } + return -1; + } + + if (!(do_ivt || do_ivt_magnitude)) + { + if (mpi_man.get_comm_rank() == 0) + { + TECA_ERROR("AT least one of --write_ivt or --write_ivt_magnitude " + " must be set.") + } + return -1; + } + + if (cf_writer->get_file_name().empty()) + { + if (mpi_man.get_comm_rank() == 0) + { + TECA_ERROR("missing file name pattern for netcdf writer. " + "See --help for a list of command line options.") + } + return -1; + } + + // connect the fixed stages of the pipeline + cf_writer->set_input_connection(head->get_output_port()); + + // look for requested time step range, start + bool parse_start_date = opt_vals.count("start_date"); + bool parse_end_date = opt_vals.count("end_date"); + if (parse_start_date || parse_end_date) + { + // run the reporting phase of the pipeline + teca_metadata md = reader->update_metadata(); + + teca_metadata atrs; + if (md.get("attributes", atrs)) + { + TECA_ERROR("metadata missing attributes") + return -1; + } + + teca_metadata time_atts; + std::string calendar; + std::string units; + if (atrs.get("time", time_atts) + || time_atts.get("calendar", calendar) + || time_atts.get("units", units)) + { + TECA_ERROR("failed to determine the calendaring parameters") + return -1; + } + + teca_metadata coords; + p_teca_double_array time; + if (md.get("coordinates", coords) || + !(time = std::dynamic_pointer_cast( + coords.get("t")))) + { + TECA_ERROR("failed to determine time coordinate") + return -1; + } + + // convert date string to step, start date + if (parse_start_date) + { + unsigned long first_step = 0; + std::string start_date = opt_vals["start_date"].as(); + if (teca_coordinate_util::time_step_of(time, true, true, calendar, + units, start_date, first_step)) + { + TECA_ERROR("Failed to locate time step for start date \"" + << start_date << "\"") + return -1; + } + cf_writer->set_first_step(first_step); + } + + // and end date + if (parse_end_date) + { + unsigned long last_step = 0; + std::string end_date = opt_vals["end_date"].as(); + if (teca_coordinate_util::time_step_of(time, false, true, calendar, + units, end_date, last_step)) + { + TECA_ERROR("Failed to locate time step for end date \"" + << end_date << "\"") + return -1; + } + cf_writer->set_last_step(last_step); + } + } + + // run the pipeline + cf_writer->set_executive(exec); + cf_writer->update(); + + return 0; +} diff --git a/apps/teca_metadata_probe.cpp b/apps/teca_metadata_probe.cpp index 2a9a1bbdf..6d9f3d192 100644 --- a/apps/teca_metadata_probe.cpp +++ b/apps/teca_metadata_probe.cpp @@ -1,9 +1,15 @@ #include "teca_config.h" #include "teca_metadata.h" +#include "teca_netcdf_util.h" #include "teca_cf_reader.h" +#include "teca_multi_cf_reader.h" #include "teca_array_collection.h" #include "teca_variant_array.h" #include "teca_coordinate_util.h" +#include "teca_mpi_manager.h" +#include "teca_system_interface.h" +#include "teca_app_util.h" + #if defined(TECA_HAS_UDUNITS) #include "calcalcs.h" #endif @@ -12,6 +18,7 @@ #include #include #include +#include #include #include @@ -21,23 +28,48 @@ using boost::program_options::value; // -------------------------------------------------------------------------- int main(int argc, char **argv) { - // initialize comand line options description - // set up some comon options to simplify use for most - // comon scenarios + teca_mpi_manager mpi_man(argc, argv); + int rank = mpi_man.get_comm_rank(); + + teca_system_interface::set_stack_trace_on_error(); + teca_system_interface::set_stack_trace_on_mpi_error(); + + // initialize comand line options description set up some comon options to + // simplify use for most comon scenarios + int help_width = 100; options_description basic_opt_defs( "Basic usage:\n\n" "The following options are the most comonly used. Information\n" - "on advanced options can be displayed using --advanced_help\n\n" - "Basic comand line options", 120, -1 + "on all available options can be displayed using --advanced_help\n\n" + "Basic comand line options", help_width, help_width - 4 ); basic_opt_defs.add_options() - ("input_file", value(), "file path to the simulation to search for tropical cyclones") - ("input_regex", value(), "regex matching simulation files to search for tropical cylones") - ("start_date", value(), "first time to proces in Y-M-D h:m:s format") - ("end_date", value(), "first time to proces in Y-M-D h:m:s format") - ("help", "display the basic options help") - ("advanced_help", "display the advanced options help") - ("full_help", "display all options help") + + ("input_file", value(), "\na teca_multi_cf_reader configuration file" + " identifying the set of NetCDF CF2 files to process. When present data is" + " read using the teca_multi_cf_reader. Use one of either --input_file or" + " --input_regex.\n") + + ("input_regex", value(), "\na teca_cf_reader regex identyifying the" + " set of NetCDF CF2 files to process. When present data is read using the" + " teca_cf_reader. Use one of either --input_file or --input_regex.\n") + + ("x_axis_variable", value()->default_value("lon"), + "\nname of x coordinate variable\n") + ("y_axis_variable", value()->default_value("lat"), + "\nname of y coordinate variable\n") + ("z_axis_variable", value()->default_value(""), + "\nname of z coordinate variable. When processing 3D set this to" + " the variable containing vertical coordinates. When empty the" + " data will be treated as 2D.\n") + + ("start_date", value(), "\nThe first time to process in 'Y-M-D h:m:s'" + " format. Note: There must be a space between the date and time specification\n") + ("end_date", value(), "\nThe last time to process in 'Y-M-D h:m:s' format\n") + + ("help", "\ndisplays documentation for application specific command line options\n") + ("advanced_help", "\ndisplays documentation for algorithm specific command line options\n") + ("full_help", "\ndisplays both basic and advanced documentation together\n") ; // add all options from each pipeline stage for more advanced use @@ -47,7 +79,7 @@ int main(int argc, char **argv) "control over all runtime modifiable parameters. The basic options\n" "(see" "--help) map to these, and will override them if both are\n" "specified.\n\n" - "Advanced comand line options", 120, -1 + "Advanced comand line options", help_width, help_width - 4 ); // create the pipeline stages here, they contain the @@ -55,73 +87,74 @@ int main(int argc, char **argv) // objects report all of their properties directly // set default options here so that comand line options override // them. while we are at it connect the pipeline - p_teca_cf_reader sim_reader = teca_cf_reader::New(); - sim_reader->get_properties_description("sim_reader", advanced_opt_defs); + p_teca_cf_reader cf_reader = teca_cf_reader::New(); + cf_reader->get_properties_description("cf_reader", advanced_opt_defs); + + p_teca_multi_cf_reader mcf_reader = teca_multi_cf_reader::New(); + mcf_reader->get_properties_description("mcf_reader", advanced_opt_defs); // package basic and advanced options for display - options_description all_opt_defs(-1, -1); + options_description all_opt_defs(help_width, help_width - 4); all_opt_defs.add(basic_opt_defs).add(advanced_opt_defs); - // parse the comand line + // parse the command line variables_map opt_vals; - try + if (teca_app_util::process_command_line_help(mpi_man.get_comm_rank(), + argc, argv, basic_opt_defs, advanced_opt_defs, all_opt_defs, opt_vals)) { - boost::program_options::store( - boost::program_options::command_line_parser(argc, argv).options(all_opt_defs).run(), - opt_vals); - - if (opt_vals.count("help")) - { - cerr << endl - << "usage: teca_data_probe [options]" << endl - << endl - << basic_opt_defs << endl - << endl; - return -1; - } - if (opt_vals.count("advanced_help")) - { - cerr << endl - << "usage: teca_data_probe [options]" << endl - << endl - << advanced_opt_defs << endl - << endl; - return -1; - } - - if (opt_vals.count("full_help")) - { - cerr << endl - << "usage: teca_data_probe [options]" << endl - << endl - << all_opt_defs << endl - << endl; - return -1; - } - - boost::program_options::notify(opt_vals); - } - catch (std::exception &e) - { - TECA_ERROR("Error parsing comand line options. See --help " - "for a list of supported options. " << e.what()) return -1; } // pass comand line arguments into the pipeline objects // advanced options are procesed first, so that the basic // options will override them - sim_reader->set_properties("sim_reader", opt_vals); + cf_reader->set_properties("cf_reader", opt_vals); + mcf_reader->set_properties("mcf_reader", opt_vals); // now pas in the basic options, these are procesed // last so that they will take precedence - if (opt_vals.count("input_file")) - sim_reader->append_file_name( - opt_vals["input_file"].as()); + if (!opt_vals["x_axis_variable"].defaulted()) + { + cf_reader->set_x_axis_variable(opt_vals["x_axis_variable"].as()); + mcf_reader->set_x_axis_variable(opt_vals["x_axis_variable"].as()); + } + + if (!opt_vals["y_axis_variable"].defaulted()) + { + cf_reader->set_y_axis_variable(opt_vals["y_axis_variable"].as()); + mcf_reader->set_y_axis_variable(opt_vals["y_axis_variable"].as()); + } + + if (!opt_vals["z_axis_variable"].defaulted()) + { + cf_reader->set_z_axis_variable(opt_vals["z_axis_variable"].as()); + mcf_reader->set_z_axis_variable(opt_vals["z_axis_variable"].as()); + } + + std::string x_var; + std::string y_var; + std::string z_var; - if (opt_vals.count("input_regex")) - sim_reader->set_files_regex( - opt_vals["input_regex"].as()); + bool have_file = opt_vals.count("input_file"); + bool have_regex = opt_vals.count("input_regex"); + + p_teca_algorithm reader; + if (opt_vals.count("input_file")) + { + mcf_reader->set_input_file(opt_vals["input_file"].as()); + x_var = mcf_reader->get_x_axis_variable(); + y_var = mcf_reader->get_y_axis_variable(); + z_var = mcf_reader->get_z_axis_variable(); + reader = mcf_reader; + } + else if (opt_vals.count("input_regex")) + { + cf_reader->set_files_regex(opt_vals["input_regex"].as()); + x_var = cf_reader->get_x_axis_variable(); + y_var = cf_reader->get_y_axis_variable(); + z_var = cf_reader->get_z_axis_variable(); + reader = cf_reader; + } std::string time_i; if (opt_vals.count("start_date")) @@ -132,148 +165,307 @@ int main(int argc, char **argv) time_j = opt_vals["end_date"].as(); // some minimal check for mising options - if (sim_reader->get_number_of_file_names() == 0 - && sim_reader->get_files_regex().empty()) + if ((have_file && have_regex) || !(have_file || have_regex)) { - TECA_ERROR( - "mising file name or regex for simulation reader. " - "See --help for a list of comand line options.") + if (rank == 0) + { + TECA_ERROR("Extacly one of --input_file or --input_regex can be specified. " + "Use --input_file to activate the multi_cf_reader (HighResMIP datasets) " + "and --input_regex to activate the cf_reader (CAM like datasets)") + } return -1; } // run the reporting phase of the pipeline - teca_metadata md = sim_reader->update_metadata(); - //md.to_stream(cerr); + teca_metadata md = reader->update_metadata(); - // extract metadata - if (!md.has("files")) + // from here on out just rank 0 + if (rank == 0) { - TECA_ERROR("no files were located") - return -1; - } - int n_files = md.get("files")->size(); + //md.to_stream(cerr); - teca_metadata atrs; - if (md.get("attributes", atrs)) - { - TECA_ERROR("metadata mising attributes") - return -1; - } + // extract metadata + int n_files = -1; + if (md.has("files")) + { + n_files = md.get("files")->size(); + } - teca_metadata time_atts; - std::string calendar; - std::string units; - if (atrs.get("time", time_atts) - || time_atts.get("calendar", calendar) - || time_atts.get("units", units)) - { - TECA_ERROR("failed to determine the calendaring parameters") - return -1; - } + teca_metadata atrs; + if (md.get("attributes", atrs)) + { + TECA_ERROR("metadata mising attributes") + return -1; + } - teca_metadata coords; - p_teca_double_array time; - if (md.get("coordinates", coords) - || !(time = std::dynamic_pointer_cast(coords.get("t")))) - { - TECA_ERROR("failed to determine time coordinate") - return -1; - } + teca_metadata time_atts; + std::string calendar; + std::string units; + if (atrs.get("time", time_atts) + || time_atts.get("calendar", calendar) + || time_atts.get("units", units)) + { + TECA_ERROR("failed to determine the calendaring parameters") + return -1; + } - unsigned long n_time_steps = 0; - if (md.get("number_of_time_steps", n_time_steps)) - { - TECA_ERROR("failed to deermine the number of steps") - return -1; - } + teca_metadata coords; + p_teca_variant_array t; + if (md.get("coordinates", coords) || !(t = coords.get("t"))) + { + TECA_ERROR("failed to determine time coordinate") + return -1; + } + + p_teca_double_array time = + std::dynamic_pointer_cast(t); + + if (!time) + { + // convert to double precision + size_t n = t->size(); + time = teca_double_array::New(n); + double *p_time = time->get(); + TEMPLATE_DISPATCH(teca_variant_array_impl, + t.get(), + NT *p_t = std::dynamic_pointer_cast(t)->get(); + for (size_t i = 0; i < n; ++i) + p_time[i] = static_cast(p_t[i]); + ) + } - unsigned long i0 = 0; - unsigned long i1 = n_time_steps - 1; + unsigned long n_time_steps = 0; + if (md.get("number_of_time_steps", n_time_steps)) + { + TECA_ERROR("failed to deermine the number of steps") + return -1; + } + + unsigned long i0 = 0; + unsigned long i1 = n_time_steps - 1; - // human readable first time available - int Y = 0, M = 0, D = 0, h = 0, m = 0; - double s = 0; + // human readable first time available + int Y = 0, M = 0, D = 0, h = 0, m = 0; + double s = 0; #if defined(TECA_HAS_UDUNITS) - if (calcalcs::date(time->get(i0), &Y, &M, &D, &h, &m, &s, - units.c_str(), calendar.c_str())) - { - TECA_ERROR("failed to detmine the first available time in the file") - return -1; - } + if (calcalcs::date(time->get(i0), &Y, &M, &D, &h, &m, &s, + units.c_str(), calendar.c_str())) + { + TECA_ERROR("failed to detmine the first available time in the file") + return -1; + } #else - TECA_ERROR("UDUnits is required for human readable dates") + TECA_ERROR("UDUnits is required for human readable dates") #endif - std::ostringstream oss; - oss << Y << "-" << M << "-" << D << " " << h << ":" << m << ":" << s; - std::string time_0(oss.str()); + std::ostringstream oss; + oss << Y << "-" << M << "-" << D << " " << h << ":" << m << ":" << s; + std::string time_0(oss.str()); - // human readbale last time available - Y = 0, M = 0, D = 0, h = 0, m = 0, s = 0; + // human readbale last time available + Y = 0, M = 0, D = 0, h = 0, m = 0, s = 0; #if defined(TECA_HAS_UDUNITS) - if (calcalcs::date(time->get(i1), &Y, &M, &D, &h, &m, &s, - units.c_str(), calendar.c_str())) - { - TECA_ERROR("failed to detmine the last available time in the file") - return -1; - } + if (calcalcs::date(time->get(i1), &Y, &M, &D, &h, &m, &s, + units.c_str(), calendar.c_str())) + { + TECA_ERROR("failed to detmine the last available time in the file") + return -1; + } #endif - oss.str(""); - oss << Y << "-" << M << "-" << D << " " << h << ":" << m << ":" << s; - std::string time_n(oss.str()); + oss.str(""); + oss << Y << "-" << M << "-" << D << " " << h << ":" << m << ":" << s; + std::string time_n(oss.str()); - // look for requested time step range, start - if (!time_i.empty()) - { - if (teca_coordinate_util::time_step_of( - time, true, calendar, units, time_i, i0)) + // look for requested time step range, start + if (!time_i.empty()) { - TECA_ERROR("Failed to locate time step for start date \"" - << time_i << "\"") - return -1; + if (teca_coordinate_util::time_step_of( + time, true, true, calendar, units, time_i, i0)) + { + TECA_ERROR("Failed to locate time step for start date \"" + << time_i << "\"") + return -1; + } } - } - // end step - if (!time_j.empty()) - { - if (teca_coordinate_util::time_step_of( - time, false, calendar, units, time_j, i1)) + // end step + if (!time_j.empty()) { - TECA_ERROR("Failed to locate time step for end date \"" - << time_j << "\"") - return -1; + if (teca_coordinate_util::time_step_of( + time, false, true, calendar, units, time_j, i1)) + { + TECA_ERROR("Failed to locate time step for end date \"" + << time_j << "\"") + return -1; + } } - } - oss.str(""); - oss << "A total of " << n_time_steps << " steps available in " << n_files - << " files. Using the " << calendar << " calendar. Times are specified in units of " - << units << ". The available times range from " << time_0 << " (" << time->get(0) - << ") to " << time_n << " (" << time->get(time->size()-1) << ")."; + oss.str(""); + oss << "A total of " << n_time_steps << " steps available"; + if (n_files > 0) + oss << " in " << n_files << " files"; + oss << ". Using the " << calendar << " calendar. Times are specified in units of " + << units << ". The available times range from " << time_0 << " (" << time->get(0) + << ") to " << time_n << " (" << time->get(time->size()-1) << ")."; - if (!time_i.empty() || !time_j.empty()) - { - oss << " The requested range contains " << i1 - i0 + 1 << " time steps and ranges from " - << time_i << " (" << time->get(i0) << ") to " << time_j << " (" << time->get(i1) << ") " - << "), starts at time step " << i0 << " and goes to time step " << i1; - } + if (!time_i.empty() || !time_j.empty()) + { + oss << " The requested range contains " << i1 - i0 + 1 << " time steps and ranges from " + << time_i << " (" << time->get(i0) << ") to " << time_j << " (" << time->get(i1) << ") " + << "), starts at time step " << i0 << " and goes to time step " << i1; + } - std::string report(oss.str()); - std::string::iterator it = report.begin(); - std::string::iterator end = report.end(); - unsigned long i = 0; - unsigned long line_len = 74; - for (; it != end; ++it, ++i) - { - if ((i >= line_len) && (*it == ' ')) + std::string report(oss.str()); + + std::string::iterator it = report.begin(); + std::string::iterator end = report.end(); + unsigned long i = 0; + unsigned long line_len = 74; + std::cerr << std::endl; + for (; it != end; ++it, ++i) + { + if ((i >= line_len) && (*it == ' ')) + { + std::cerr << std::endl; + ++it; + i = 0; + } + std::cerr << *it; + } + std::cerr << std::endl << std::endl; + + // report the mesh dimensionality and coordinates + std::cerr << "Mesh dimension: " << (z_var.empty() ? 2 : 3) + << "D" << std::endl; + + std::cerr << "Mesh coordinates: " << x_var << ", " << y_var; + if (!z_var.empty()) { - cerr << endl; - ++it; - i = 0; + std::cerr << ", " << z_var; } - cerr << *it; + std::cerr << std::endl; + + // report the arrays + size_t n_arrays = atrs.size(); + + // column widths + int aiw = 0; + int anw = 0; + int atw = 0; + int adw = 0; + int asw = 0; + + // column data + std::vector ai; + std::vector an; + std::vector at; + std::vector ad; + std::vector as; + + ai.reserve(n_arrays); + an.reserve(n_arrays); + at.reserve(n_arrays); + ad.reserve(n_arrays); + as.reserve(n_arrays); + + for (size_t i = 0; i < n_arrays; ++i) + { + std::string array; + atrs.get_name(i, array); + + // get metadata + teca_metadata atts; + int type = 0; + int id = 0; + p_teca_size_t_array dims; + p_teca_string_array dim_names; + + if (atrs.get(array, atts) + || atts.get("cf_type_code", 0, type) + || atts.get("cf_id", 0, id) + || !(dims = std::dynamic_pointer_cast(atts.get("cf_dims"))) + || !(dim_names = std::dynamic_pointer_cast(atts.get("cf_dim_names")))) + { + // TODO -- Michael's CAM5 sometimes triggers this with an empty array name + //TECA_ERROR("metadata issue in array " << i << "\"" << array << "\"") + continue; + } + + // id + ai.push_back(std::to_string(i+1)); + aiw = std::max(aiw, ai.back().size() + 4); + + // name + an.push_back(array); + anw = std::max(anw, an.back().size() + 4); + + // type + NC_DISPATCH(type, + at.push_back(teca_netcdf_util::netcdf_tt::name()); + ) + atw = std::max(atw, at.back().size() + 4); + + // dims + int n_dims = dim_names->size(); + + oss.str(""); + oss << "[" << dim_names->get(0); + for (int i = 1; i < n_dims; ++i) + { + oss << ", " << dim_names->get(i); + } + oss << "]"; + ad.push_back(oss.str()); + adw = std::max(adw, ad.back().size() + 4); + + // shape + oss.str(""); + if (dim_names->get(0) == "time") + oss << "[" << n_time_steps; + else + oss << "[" << dims->get(0); + for (int i = 1; i < n_dims; ++i) + { + if (dim_names->get(i) == "time") + oss << ", " << n_time_steps; + else + oss << ", " << dims->get(i); + } + oss << "]"; + as.push_back(oss.str()); + asw = std::max(asw, as.back().size() + 4); + } + + // update with the number found + n_arrays = ai.size(); + + std::cerr << std::endl + << n_arrays << " data arrays available" << std::endl << std::endl + << " " + << std::setw(aiw) << std::left << "Id" + << std::setw(anw) << std::left << "Name" + << std::setw(atw) << std::left << "Type" + << std::setw(adw) << std::left << "Dimensions" + << std::setw(asw) << std::left << "Shape" << std::endl; + + int tw = anw + atw + adw + asw; + for (int i = 0; i < tw; ++i) + std::cerr << '-'; + std::cerr << std::endl; + + for (size_t i = 0; i < n_arrays; ++i) + { + std::cerr + << " " + << std::setw(aiw) << std::left << ai[i] + << std::setw(anw) << std::left << an[i] + << std::setw(atw) << std::left << at[i] + << std::setw(adw) << std::left << ad[i] + << std::setw(asw) << std::left << as[i] + << std::endl; + } + + std::cerr << std::endl; } - cerr << endl; return 0; } diff --git a/apps/teca_profile_explorer.in b/apps/teca_profile_explorer.in index 3790a791d..b41dd15df 100755 --- a/apps/teca_profile_explorer.in +++ b/apps/teca_profile_explorer.in @@ -157,7 +157,7 @@ class profiler_data: -class dsiplay_rank_events: +class display_rank_events: def __init__(self): self.data = None self.axes = None @@ -165,7 +165,7 @@ class dsiplay_rank_events: self.patches = None self.verbose = False - def initialize(self, data, ax): + def initialize(self, data, ax, title): """ renders the data to axes ax and sets up the click handlers to print event data when the plot is clicked on """ @@ -218,7 +218,7 @@ class dsiplay_rank_events: cmap = [] for cmap_name in cmap_names: - cmap.append(plt.get_cmap(cmap_name, cmap_n)) + cmap.append(plt.get_cmap(cmap_name, 1.5*cmap_n)) # in a patch collection all rectangles have # the same zorder, hence we need a collection @@ -248,12 +248,12 @@ class dsiplay_rank_events: edt = data.delta_t[i] if self.verbose: - sys.stderr.write('rank=%d event_index=%d thread=%d ' \ - 'event_id=%d event_name=%s rect = %s\n'%( \ - data.rank, i, tid, eid, enm, str([et0, tid*dy, \ - edt, dy]))) + sys.stderr.write('%srank=%d event_index=%d thread=%d ' \ + 'event_id=%d event_name=%s rect = %s\n' % ( + title, data.rank, i, tid, eid, enm, str([et0, + tid*dy, edt, dy]))) else: - if int(i % (ne // 78)) == 0: + if (ne < 78) or int(i % (ne // 78)) == 0: sys.stderr.write('=') sys.stderr.flush() @@ -263,24 +263,24 @@ class dsiplay_rank_events: if ' thread_pool ' in enm: fa = 0.5 - ec = fc + ec = 'b' ew = 3 fill = True pz = 2 else: fa = 0.9 - ec = fc - ew = None + ec = 'k' + ew = 1 fill = True pz = 3 dep = data.depth[i] - df = dep*.15*dy + df = dep*(0.5 - 0.125)/float(max_depth) - rect = Rectangle((et0, tid*dy+df), edt, dy-2*df, \ - alpha=1, facecolor=fc, edgecolor=ec, \ - linewidth=ew, zorder=dep+1, fill=fill, \ - label=enm) + rect = Rectangle((et0, tid*dy+df), edt, dy-2.*df, + alpha=1, facecolor=fc, edgecolor=ec, + linewidth=ew, zorder=dep+1, fill=fill, + label=enm) #ax.add_patch(rect) self.patches[dep].append(rect) @@ -308,12 +308,13 @@ class dsiplay_rank_events: mmx = np.max(self.data.mem_use) self.data.mem_use = self.data.mem_use/mmx * n_threads*dy - plt.plot(self.data.mem_time, self.data.mem_use, 'k--', linewidth=3) + plt.plot(self.data.mem_time, self.data.mem_use, 'r--', linewidth=3) - plt.title('MPI rank %d event times, max RSS %0.1f MiB'%( \ - data.rank, mmx/(2.**10)), fontweight='bold') + plt.title('%sMPI rank %d event times, max RSS %0.1f MiB'%( + title, data.rank, mmx/(2.**10)), fontweight='bold') else: - plt.title('MPI rank %d event times'%(data.rank), fontweight='bold') + plt.title('%sMPI rank %d event times'%(title, data.rank), + fontweight='bold') pfx = 0.01 pfy = 0.10 @@ -348,10 +349,13 @@ class dsiplay_rank_events: for i in ids['ind']: ii = self.patch_ids[d][i] #rect = self.patches[i] - sys.stderr.write( \ - 'rank=%d i=%d ii=%d event_name=%s thread=%d start=%g end=%g duration=%g depth=%d\n'%( \ - self.data.rank, i, ii, self.data.event_name[ii], self.data.thread_id[ii], self.data.start_t[ii], \ - self.data.end_t[ii], self.data.delta_t[ii], self.data.depth[ii])) + sys.stderr.write( + 'rank=%d i=%d ii=%d event_name=%s thread=%d start=%g' \ + ' end=%g duration=%g depth=%d\n'%( + self.data.rank, i, ii, self.data.event_name[ii], + self.data.thread_id[ii], self.data.start_t[ii], + self.data.end_t[ii], self.data.delta_t[ii], + self.data.depth[ii])) if n_hits > 0: sys.stderr.write('\n') @@ -363,20 +367,26 @@ class dsiplay_rank_events: parser = argparse.ArgumentParser(prog='teca_profile_explorer') -parser.add_argument('-e', '--event_file', required=True, type=str, \ - help='path to a TECA profiler event data file') +parser.add_argument('-e', '--event_file', required=True, type=str, + help='path to a TECA profiler event data file') -parser.add_argument('-m', '--mem_file', required=False, type=str, \ - default=None, help='path to a TECA profiler memory data file') +parser.add_argument('-m', '--mem_file', required=False, type=str, default=None, + help='path to a TECA profiler memory data file') -parser.add_argument('-r', '--ranks', nargs='+', required=True, \ - type=int, help='path to a TECA profiler data file') +parser.add_argument('-r', '--ranks', nargs='+', required=True, type=int, + help='path to a TECA profiler data file') -parser.add_argument('-x', '--xlim', required=False, type=float, \ - default=-1., help='set the high x axis limmit used in plots') +parser.add_argument('-o', '--out_prefix', required=False, type=str, default='', + help='a string that is preprended to output file names') -parser.add_argument('-v', '--verbose', required=False, type=int, \ - default=0, help='verbosity level') +parser.add_argument('-t', '--title', required=False, type=str, default='', + help='a string that is preprended to plot titles') + +parser.add_argument('-x', '--xlim', required=False, type=float, default=-1., + help='set the high x axis limmit used in plots') + +parser.add_argument('-v', '--verbose', required=False, type=int, + default=0, help='set the verbosity level') args = parser.parse_args() @@ -386,22 +396,22 @@ data.verbose = args.verbose > 0 data.initialize(args.event_file, args.mem_file) dres = [] -for r in args.ranks: +for rank in args.ranks: # get this rank's data - subset = data.subset(r) + subset = data.subset(rank) # size the plot n_threads = subset.n_threads fig_width = 12 - fig_height = 0.625*n_threads + fig_height = 0.625*(n_threads + 2) fig = plt.figure(figsize=(fig_width, fig_height)) # draw the plot & connect the event hander ax = plt.gca() - dre = dsiplay_rank_events() + dre = display_rank_events() dre.verbose = args.verbose > 1 - dre.initialize(subset, ax) + dre.initialize(subset, ax, args.title) dre.connect() dres.append(dre) @@ -411,7 +421,9 @@ for r in args.ranks: x0,x1 = plt.xlim() plt.xlim(x0, args.xlim) - plt.savefig('rank_profile_data_%d.png'%(r), dpi=200) + plt.savefig('%srank_profile_data_%d.png'%( + args.out_prefix,rank), + dpi=200) plt.show() diff --git a/apps/teca_table_diff.cpp b/apps/teca_table_diff.cpp new file mode 100644 index 000000000..867efdb9e --- /dev/null +++ b/apps/teca_table_diff.cpp @@ -0,0 +1,83 @@ +#include "teca_config.h" +#include "teca_table_reader.h" +#include "teca_table_writer.h" +#include "teca_table_sort.h" +#include "teca_dataset_diff.h" +#include "teca_file_util.h" +#include "teca_mpi_manager.h" +#include "teca_system_interface.h" +#include "teca_mpi.h" + +#include +#include +#include +using namespace std; + + +int main(int argc, char **argv) +{ + teca_mpi_manager mpi_man(argc, argv); + int rank = mpi_man.get_comm_rank(); + + teca_system_interface::set_stack_trace_on_error(); + teca_system_interface::set_stack_trace_on_mpi_error(); + + if (argc < 3) + { + if (rank == 0) + { + cerr << endl + << "Usage:" << endl + << "teca_table_diff [reference file] [test file] " + "[column to sort by (optional)]" << endl + << endl; + } + return -1; + } + + // parse command line + std::string ref_file = argv[1]; + std::string t_file = argv[2]; + std::string column_name; + if (argc == 4) column_name = argv[3]; + + + if (!teca_file_util::file_exists(ref_file.c_str())) + { + TECA_ERROR("no reference file to compare to"); + return -1; + } + + if (!teca_file_util::file_exists(t_file.c_str())) + { + TECA_ERROR("test file doesn't exist"); + return -1; + } + + p_teca_table_reader ref_reader = teca_table_reader::New(); + ref_reader->set_file_name(ref_file); + + p_teca_table_reader t_reader = teca_table_reader::New(); + t_reader->set_file_name(t_file); + + p_teca_table_sort sort = teca_table_sort::New(); + + p_teca_dataset_diff diff = teca_dataset_diff::New(); + diff->set_input_connection(0, ref_reader->get_output_port()); + + if (!column_name.empty()) + { + sort->set_input_connection(t_reader->get_output_port()); + sort->set_index_column(column_name); + + diff->set_input_connection(1, sort->get_output_port()); + } + else + { + diff->set_input_connection(1, t_reader->get_output_port()); + } + + diff->update(); + + return 0; +} diff --git a/apps/teca_tc_detect.cpp b/apps/teca_tc_detect.cpp index 799040d83..f8483f171 100644 --- a/apps/teca_tc_detect.cpp +++ b/apps/teca_tc_detect.cpp @@ -1,5 +1,6 @@ #include "teca_config.h" #include "teca_cf_reader.h" +#include "teca_multi_cf_reader.h" #include "teca_normalize_coordinates.h" #include "teca_l2_norm.h" #include "teca_vorticity.h" @@ -17,6 +18,7 @@ #include "teca_table_writer.h" #include "teca_mpi_manager.h" #include "teca_coordinate_util.h" +#include "teca_app_util.h" #include "calcalcs.h" #include @@ -24,6 +26,7 @@ #include #include + using namespace std; using namespace teca_derived_quantity_numerics; @@ -38,48 +41,70 @@ int main(int argc, char **argv) // initialize command line options description // set up some common options to simplify use for most // common scenarios + int help_width = 100; options_description basic_opt_defs( "Basic usage:\n\n" "The following options are the most commonly used. Information\n" "on advanced options can be displayed using --advanced_help\n\n" - "Basic command line options", 120, -1 + "Basic command line options", help_width, help_width - 4 ); basic_opt_defs.add_options() - ("input_file", value(), "file path to the simulation to search for tropical cyclones") - ("input_regex", value(), "regex matching simulation files to search for tropical cylones") - ("candidate_file", value(), "file path to write the storm candidates to (candidates.bin)") - ("850mb_wind_u", value(), "name of variable with 850 mb wind x-component (U850)") - ("850mb_wind_v", value(), "name of variable with 850 mb wind x-component (V850)") - ("surface_wind_u", value(), "name of variable with surface wind x-component (UBOT)") - ("surface_wind_v", value(), "name of variable with surface wind y-component (VBOT)") - ("sea_level_pressure", value(), "name of variable with sea level pressure (PSL)") - ("500mb_temp", value(), "name of variable with 500mb temperature for warm core calc (T500)") - ("200mb_temp", value(), "name of variable with 200mb temperature for warm core calc (T200)") - ("1000mb_height", value(), "name of variable with 1000mb height for thickness calc (Z1000)") - ("200mb_height", value(), "name of variable with 200mb height for thickness calc (Z200)") - ("storm_core_radius", value(), "maximum number of degrees latitude separation between vorticity max and pressure min defining a storm (2.0)") - ("min_vorticity", value(), "minimum vorticty to be considered a tropical storm (1.6e-4)") - ("vorticity_window", value(), "size of the search window in degrees. storms core must have a local vorticity max centered on this window (7.74446)") - ("pressure_delta", value(), "maximum pressure change within specified radius (400.0)") - ("pressure_delta_radius", value(), "radius in degrees over which max pressure change is computed (5.0)") - ("core_temp_delta", value(), "maximum core temperature change over the specified radius (0.8)") - ("core_temp_radius", value(), "radius in degrees over which max core temperature change is computed (5.0)") - ("thickness_delta", value(), "maximum thickness change over the specified radius (50.0)") - ("thickness_radius", value(), "radius in degrees over with max thickness change is comuted (4.0)") - ("lowest_lat", value(), "lowest latitude in degrees to search for storms (80)") - ("highest_lat", value(), "highest latitude in degrees to search for storms (80)") - ("max_daily_distance", value(), "max distance in km that a storm can travel in one day (1600)") - ("min_wind_speed", value(), "minimum peak wind speed to be considered a tropical storm (17.0)") - ("min_wind_duration", value(), "number of, not necessarily consecutive, days min wind speed sustained (2.0)") - ("track_file", value(), "file path to write storm tracks to") - ("first_step", value(), "first time step to process") - ("last_step", value(), "last time step to process") - ("start_date", value(), "first time to proces in YYYY-MM-DD hh:mm:ss format") - ("end_date", value(), "first time to proces in YYYY-MM-DD hh:mm:ss format") - ("n_threads", value(), "thread pool size. default is 1. -1 for all") - ("help", "display the basic options help") - ("advanced_help", "display the advanced options help") - ("full_help", "display entire help message") + ("input_file", value(), "\na teca_multi_cf_reader configuration file" + " identifying the set of NetCDF CF2 files to process. When present data is" + " read using the teca_multi_cf_reader. Use one of either --input_file or" + " --input_regex.\n") + + ("input_regex", value(), "\na teca_cf_reader regex identifying the" + " set of NetCDF CF2 files to process. When present data is read using the" + " teca_cf_reader. Use one of either --input_file or --input_regex.\n") + + ("candidate_file", value()->default_value("candidates.bin"), + "\nfile path to write the storm candidates to. The extension determines" + " the file format. May be one of `.nc`, `.csv`, or `.bin`\n") + + ("850mb_wind_u", value()->default_value("U850"), "\nname of variable with 850 mb wind x-component\n") + ("850mb_wind_v", value()->default_value("V850"), "\nname of variable with 850 mb wind x-component\n") + ("surface_wind_u", value()->default_value("UBOT"), "\nname of variable with surface wind x-component\n") + ("surface_wind_v", value()->default_value("VBOT"), "\nname of variable with surface wind y-component\n") + ("sea_level_pressure", value()->default_value("PSL"), "\nname of variable with sea level pressure\n") + ("500mb_temp", value()->default_value("T500"), "\nname of variable with 500mb temperature for warm core calc\n") + ("200mb_temp", value()->default_value("T200"), "\nname of variable with 200mb temperature for warm core calc\n") + ("1000mb_height", value()->default_value("Z1000"), "\nname of variable with 1000mb height for thickness calc\n") + ("200mb_height", value()->default_value("Z200"), "\nname of variable with 200mb height for thickness calc\n") + ("storm_core_radius", value()->default_value(2.0), + "\nmaximum number of degrees latitude separationi between vorticity max and pressure min defining a storm\n") + ("min_vorticity", value()->default_value(1.6e-4, "1.6e-4"), "\nminimum vorticty to be considered a tropical storm\n") + ("vorticity_window", value()->default_value(7.74446, "7.74446"), + "\nsize of the search window in degrees. storms core must have a local vorticity max centered on this window\n") + ("pressure_delta", value()->default_value(400.0), "\nmaximum pressure change within specified radius\n") + ("pressure_delta_radius", value()->default_value(5.0), + "\nradius in degrees over which max pressure change is computed\n") + ("core_temp_delta", value()->default_value(0.8, "0.8"), "\nmaximum core temperature change over the specified radius\n") + ("core_temp_radius", value()->default_value(5.0), "\nradius in degrees over which max core temperature change is computed\n") + ("thickness_delta", value()->default_value(50.0), "\nmaximum thickness change over the specified radius\n") + ("thickness_radius", value()->default_value(4.0), "\nradius in degrees over with max thickness change is computed\n") + ("lowest_lat", value()->default_value(80), "\nlowest latitude in degrees to search for storms\n") + ("highest_lat", value()->default_value(80), "\nhighest latitude in degrees to search for storms\n") + ("max_daily_distance", value()->default_value(1600), "\nmax distance in km that a storm can travel in one day\n") + ("min_wind_speed", value()->default_value(17.0), "\nminimum peak wind speed to be considered a tropical storm\n") + ("min_wind_duration", value()->default_value(2.0), "\nnumber of, not necessarily consecutive, days min wind speed sustained\n") + + ("track_file", value()->default_value("tracks.bin"), "\nfile path to" + " write storm tracks to. The extension determines the file format. May be" + " one of `.nc`, `.csv`, or `.bin`\n") + + ("first_step", value()->default_value(0), "\nfirst time step to process\n") + ("last_step", value()->default_value(-1), "\nlast time step to process\n") + ("start_date", value(), "\nThe first time to process in 'Y-M-D h:m:s'" + " format. Note: There must be a space between the date and time specification\n") + ("end_date", value(), "\nThe last time to process in 'Y-M-D h:m:s' format\n") + ("n_threads", value()->default_value(-1), "\nSets the thread pool size on each" + " MPI rank. When the default value of -1 is used TECA will coordinate the thread" + " pools across ranks such each thread is bound to a unique physical core.\n") + + ("help", "\ndisplays documentation for application specific command line options\n") + ("advanced_help", "\ndisplays documentation for algorithm specific command line options\n") + ("full_help", "\ndisplays both basic and advanced documentation together\n") ; // add all options from each pipeline stage for more advanced use @@ -90,14 +115,14 @@ int main(int argc, char **argv) "(see" "--help) map to these, and will override them if both are\n" "specified.\n\n" "tropical storms pipeline:\n\n" - " (sim_reader)\n" - " \\\n" + " (cf / mcf_reader)\n" + " \\\n" " (surface_wind_speed)--(850mb_vorticity)--(core_temperature)\n" " /\n" " (tracks)--(sort)--(map_reduce)--(candidates)--(thickness)\n" " \\ /\n" " (track_writer) (candidate_writer)\n\n" - "Advanced command line options", -1, 1 + "Advanced command line options", help_width, help_width - 4 ); // create the pipeline stages here, they contain the @@ -105,40 +130,37 @@ int main(int argc, char **argv) // objects report all of their properties directly // set default options here so that command line options override // them. while we are at it connect the pipeline - p_teca_cf_reader sim_reader = teca_cf_reader::New(); - sim_reader->get_properties_description("sim_reader", advanced_opt_defs); + p_teca_cf_reader cf_reader = teca_cf_reader::New(); + cf_reader->get_properties_description("cf_reader", advanced_opt_defs); + + p_teca_multi_cf_reader mcf_reader = teca_multi_cf_reader::New(); + mcf_reader->get_properties_description("mcf_reader", advanced_opt_defs); p_teca_normalize_coordinates sim_coords = teca_normalize_coordinates::New(); - sim_coords->set_input_connection(sim_reader->get_output_port()); p_teca_l2_norm surf_wind = teca_l2_norm::New(); - surf_wind->set_input_connection(sim_coords->get_output_port()); surf_wind->set_component_0_variable("UBOT"); surf_wind->set_component_1_variable("VBOT"); surf_wind->set_l2_norm_variable("surface_wind"); surf_wind->get_properties_description("surface_wind_speed", advanced_opt_defs); p_teca_vorticity vort_850mb = teca_vorticity::New(); - vort_850mb->set_input_connection(surf_wind->get_output_port()); vort_850mb->set_component_0_variable("U850"); vort_850mb->set_component_1_variable("V850"); vort_850mb->set_vorticity_variable("850mb_vorticity"); vort_850mb->get_properties_description("850mb_vorticity", advanced_opt_defs); p_teca_derived_quantity core_temp = teca_derived_quantity::New(); - core_temp->set_input_connection(vort_850mb->get_output_port()); core_temp->set_dependent_variables({"T500", "T200"}); core_temp->set_derived_variable("core_temperature"); core_temp->get_properties_description("core_temperature", advanced_opt_defs); p_teca_derived_quantity thickness = teca_derived_quantity::New(); - thickness->set_input_connection(core_temp->get_output_port()); thickness->set_dependent_variables({"Z1000", "Z200"}); thickness->set_derived_variable("thickness"); thickness->get_properties_description("thickness", advanced_opt_defs); p_teca_tc_candidates candidates = teca_tc_candidates::New(); - candidates->set_input_connection(thickness->get_output_port()); candidates->set_surface_wind_speed_variable("surface_wind"); candidates->set_vorticity_850mb_variable("850mb_vorticity"); candidates->set_sea_level_pressure_variable("PSL"); @@ -158,32 +180,26 @@ int main(int argc, char **argv) candidates->get_properties_description("candidates", advanced_opt_defs); p_teca_table_reduce map_reduce = teca_table_reduce::New(); - map_reduce->set_input_connection(candidates->get_output_port()); map_reduce->get_properties_description("map_reduce", advanced_opt_defs); p_teca_table_writer candidate_writer = teca_table_writer::New(); - candidate_writer->set_input_connection(map_reduce->get_output_port()); candidate_writer->set_output_format_auto(); candidate_writer->get_properties_description("candidate_writer", advanced_opt_defs); p_teca_table_sort sort = teca_table_sort::New(); - sort->set_input_connection(candidate_writer->get_output_port()); sort->set_index_column("storm_id"); sort->get_properties_description("sort", advanced_opt_defs); p_teca_tc_trajectory tracks = teca_tc_trajectory::New(); - tracks->set_input_connection(sort->get_output_port()); tracks->set_max_daily_distance(1600.0); tracks->set_min_wind_speed(17.0); tracks->set_min_wind_duration(2.0); tracks->get_properties_description("tracks", advanced_opt_defs); p_teca_table_calendar calendar = teca_table_calendar::New(); - calendar->set_input_connection(tracks->get_output_port()); calendar->get_properties_description("calendar", advanced_opt_defs); p_teca_table_writer track_writer = teca_table_writer::New(); - track_writer->set_input_connection(calendar->get_output_port()); track_writer->set_output_format_auto(); track_writer->get_properties_description("track_writer", advanced_opt_defs); @@ -193,57 +209,17 @@ int main(int argc, char **argv) // parse the command line variables_map opt_vals; - try - { - boost::program_options::store( - boost::program_options::command_line_parser(argc, argv).options(all_opt_defs).run(), - opt_vals); - - if (mpi_man.get_comm_rank() == 0) - { - if (opt_vals.count("help")) - { - cerr << endl - << "usage: teca_tc_detect [options]" << endl - << endl - << basic_opt_defs << endl - << endl; - return -1; - } - if (opt_vals.count("advanced_help")) - { - cerr << endl - << "usage: teca_tc_detect [options]" << endl - << endl - << advanced_opt_defs << endl - << endl; - return -1; - } - - if (opt_vals.count("full_help")) - { - cerr << endl - << "usage: teca_tc_detect [options]" << endl - << endl - << all_opt_defs << endl - << endl; - return -1; - } - } - - boost::program_options::notify(opt_vals); - } - catch (std::exception &e) + if (teca_app_util::process_command_line_help(mpi_man.get_comm_rank(), + argc, argv, basic_opt_defs, advanced_opt_defs, all_opt_defs, opt_vals)) { - TECA_ERROR("Error parsing command line options. See --help " - "for a list of supported options. " << e.what()) return -1; } // pass command line arguments into the pipeline objects // advanced options are processed first, so that the basic // options will override them - sim_reader->set_properties("sim_reader", opt_vals); + cf_reader->set_properties("cf_reader", opt_vals); + mcf_reader->set_properties("mcf_reader", opt_vals); surf_wind->set_properties("surface_wind_speed", opt_vals); vort_850mb->set_properties("850mb_vorticity", opt_vals); core_temp->set_properties("core_temperature", opt_vals); @@ -258,136 +234,141 @@ int main(int argc, char **argv) // now pass in the basic options, these are processed // last so that they will take precedence - if (opt_vals.count("input_file")) - sim_reader->append_file_name( - opt_vals["input_file"].as()); - if (opt_vals.count("input_regex")) - sim_reader->set_files_regex( - opt_vals["input_regex"].as()); + // configure the reader + bool have_file = opt_vals.count("input_file"); + bool have_regex = opt_vals.count("input_regex"); - if (opt_vals.count("850mb_wind_u")) + p_teca_algorithm reader; + if (opt_vals.count("input_file")) + { + mcf_reader->set_input_file(opt_vals["input_file"].as()); + reader = mcf_reader; + } + else if (opt_vals.count("input_regex")) + { + cf_reader->set_files_regex(opt_vals["input_regex"].as()); + reader = cf_reader; + } + + if (!opt_vals["850mb_wind_u"].defaulted()) vort_850mb->set_component_0_variable( opt_vals["850mb_wind_u"].as()); - if (opt_vals.count("850mb_wind_v")) + if (!opt_vals["850mb_wind_v"].defaulted()) vort_850mb->set_component_1_variable( opt_vals["850mb_wind_v"].as()); - if (opt_vals.count("surface_wind_u")) + if (!opt_vals["surface_wind_u"].defaulted()) surf_wind->set_component_0_variable( opt_vals["surface_wind_u"].as()); - if (opt_vals.count("surface_wind_v")) + if (!opt_vals["surface_wind_v"].defaulted()) surf_wind->set_component_1_variable( opt_vals["surface_wind_v"].as()); std::vector dep_var; core_temp->get_dependent_variables(dep_var); - if (opt_vals.count("500mb_temp")) + if (!opt_vals["500mb_temp"].defaulted()) dep_var[0] = opt_vals["500mb_temp"].as(); - if (opt_vals.count("200mb_temp")) + if (!opt_vals["200mb_temp"].defaulted()) dep_var[1] = opt_vals["200mb_temp"].as(); core_temp->set_dependent_variables(dep_var); dep_var.clear(); thickness->get_dependent_variables(dep_var); - if (opt_vals.count("1000mb_height")) + if (!opt_vals["1000mb_height"].defaulted()) dep_var[0] = opt_vals["1000mb_height"].as(); - if (opt_vals.count("200mb_height")) + if (!opt_vals["200mb_height"].defaulted()) dep_var[1] = opt_vals["200mb_height"].as(); thickness->set_dependent_variables(dep_var); dep_var.clear(); - if (opt_vals.count("sea_level_pressure")) + if (!opt_vals["sea_level_pressure"].defaulted()) candidates->set_sea_level_pressure_variable( opt_vals["sea_level_pressure"].as()); - if (opt_vals.count("storm_core_radius")) + if (!opt_vals["storm_core_radius"].defaulted()) candidates->set_max_core_radius( opt_vals["storm_core_radius"].as()); - if (opt_vals.count("min_vorticity")) + if (!opt_vals["min_vorticity"].defaulted()) candidates->set_min_vorticity_850mb( opt_vals["min_vorticity"].as()); - if (opt_vals.count("vorticity_window")) + if (!opt_vals["vorticity_window"].defaulted()) candidates->set_vorticity_850mb_window( opt_vals["vorticity_window"].as()); - if (opt_vals.count("pressure_delta")) + if (!opt_vals["pressure_delta"].defaulted()) candidates->set_max_pressure_delta( opt_vals["pressure_delta"].as()); - if (opt_vals.count("pressure_delta_radius")) + if (!opt_vals["pressure_delta_radius"].defaulted()) candidates->set_max_pressure_radius( opt_vals["pressure_delta_radius"].as()); - if (opt_vals.count("core_temp_delta")) + if (!opt_vals["core_temp_delta"].defaulted()) candidates->set_max_core_temperature_delta( opt_vals["core_temp_delta"].as()); - if (opt_vals.count("core_temp_radius")) + if (!opt_vals["core_temp_radius"].defaulted()) candidates->set_max_core_temperature_radius( opt_vals["core_temp_radius"].as()); - if (opt_vals.count("thickness_delta")) + if (!opt_vals["thickness_delta"].defaulted()) candidates->set_max_thickness_delta( opt_vals["thickness_delta"].as()); - if (opt_vals.count("thickness_radius")) + if (!opt_vals["thickness_radius"].defaulted()) candidates->set_max_thickness_radius( opt_vals["thickness_radius"].as()); - if (opt_vals.count("lowest_lat")) + if (!opt_vals["lowest_lat"].defaulted()) candidates->set_search_lat_low( opt_vals["lowest_lat"].as()); - if (opt_vals.count("highest_lat")) + if (!opt_vals["highest_lat"].defaulted()) candidates->set_search_lat_high( opt_vals["highest_lat"].as()); - if (opt_vals.count("first_step")) + if (!opt_vals["first_step"].defaulted()) map_reduce->set_start_index(opt_vals["first_step"].as()); - if (opt_vals.count("last_step")) + if (!opt_vals["last_step"].defaulted()) map_reduce->set_end_index(opt_vals["last_step"].as()); - if (opt_vals.count("n_threads")) + if (!opt_vals["n_threads"].defaulted()) map_reduce->set_thread_pool_size(opt_vals["n_threads"].as()); else map_reduce->set_thread_pool_size(-1); - if (opt_vals.count("max_daily_distance")) + if (!opt_vals["max_daily_distance"].defaulted()) tracks->set_max_daily_distance( opt_vals["max_daily_distance"].as()); - if (opt_vals.count("min_wind_speed")) + if (!opt_vals["min_wind_speed"].defaulted()) tracks->set_min_wind_speed( opt_vals["min_wind_speed"].as()); - if (opt_vals.count("min_wind_duration")) + if (!opt_vals["min_wind_duration"].defaulted()) tracks->set_min_wind_duration( opt_vals["min_wind_duration"].as()); - if (opt_vals.count("candidate_file")) - candidate_writer->set_file_name( - opt_vals["candidate_file"].as()); - - if (opt_vals.count("track_file")) - track_writer->set_file_name( - opt_vals["track_file"].as()); + candidate_writer->set_file_name( + opt_vals["candidate_file"].as()); + track_writer->set_file_name( + opt_vals["track_file"].as()); // some minimal check for missing options - if (sim_reader->get_number_of_file_names() == 0 - && sim_reader->get_files_regex().empty()) + if ((have_file && have_regex) || !(have_file || have_regex)) { if (mpi_man.get_comm_rank() == 0) { - TECA_ERROR( - "missing file name or regex for simulation reader. " - "See --help for a list of command line options.") + TECA_ERROR("Extacly one of --input_file or --input_regex can be specified. " + "Use --input_file to activate the multi_cf_reader (HighResMIP datasets) " + "and --input_regex to activate the cf_reader (CAM like datasets)") } return -1; } @@ -430,7 +411,7 @@ int main(int argc, char **argv) if (parse_start_date || parse_end_date) { // run the reporting phase of the pipeline - teca_metadata md = sim_reader->update_metadata(); + teca_metadata md = reader->update_metadata(); teca_metadata atrs; if (md.get("attributes", atrs)) @@ -465,7 +446,7 @@ int main(int argc, char **argv) { unsigned long first_step = 0; std::string start_date = opt_vals["start_date"].as(); - if (teca_coordinate_util::time_step_of(time, true, calendar, + if (teca_coordinate_util::time_step_of(time, true, true, calendar, units, start_date, first_step)) { TECA_ERROR("Failed to lcoate time step for start date \"" @@ -480,7 +461,7 @@ int main(int argc, char **argv) { unsigned long last_step = 0; std::string end_date = opt_vals["end_date"].as(); - if (teca_coordinate_util::time_step_of(time, false, calendar, + if (teca_coordinate_util::time_step_of(time, false, true, calendar, units, end_date, last_step)) { TECA_ERROR("Failed to lcoate time step for end date \"" @@ -491,6 +472,20 @@ int main(int argc, char **argv) } } + // connect all the stages + sim_coords->set_input_connection(reader->get_output_port()); + surf_wind->set_input_connection(sim_coords->get_output_port()); + vort_850mb->set_input_connection(surf_wind->get_output_port()); + core_temp->set_input_connection(vort_850mb->get_output_port()); + thickness->set_input_connection(core_temp->get_output_port()); + candidates->set_input_connection(thickness->get_output_port()); + map_reduce->set_input_connection(candidates->get_output_port()); + candidate_writer->set_input_connection(map_reduce->get_output_port()); + sort->set_input_connection(candidate_writer->get_output_port()); + tracks->set_input_connection(sort->get_output_port()); + calendar->set_input_connection(tracks->get_output_port()); + track_writer->set_input_connection(calendar->get_output_port()); + // run the pipeline track_writer->update(); diff --git a/apps/teca_tc_stats.in b/apps/teca_tc_stats.in index 97bf9d811..c9cd63f02 100755 --- a/apps/teca_tc_stats.in +++ b/apps/teca_tc_stats.in @@ -20,6 +20,9 @@ parser.add_argument('-i', '--interact', action='store_true', parser.add_argument('-a', '--ind_axes', action='store_false', help="normalize y-axis in grouped plots") +parser.add_argument('-f', '--out-format', type=str, + default='csv', help='output in a csv or bin format (bin)') + args = parser.parse_args() # construct the pipeline @@ -35,7 +38,7 @@ calendar.set_time_column('start_time') writer = teca_table_writer.New() writer.set_input_connection(calendar.get_output_port()) -writer.set_file_name('%s_class_table.csv'%(args.output_prefix)) +writer.set_file_name('%s_class_table.%s'%(args.output_prefix, args.out_format)) act = teca_tc_activity.New() act.set_input_connection(writer.get_output_port()) @@ -52,4 +55,4 @@ stats.set_interactive(args.interact) stats.set_rel_axes(args.ind_axes) # execute -act.update() +stats.update() diff --git a/apps/teca_tc_storm_size.cpp b/apps/teca_tc_storm_size.cpp deleted file mode 100644 index a5cabb81b..000000000 --- a/apps/teca_tc_storm_size.cpp +++ /dev/null @@ -1,252 +0,0 @@ -#include "teca_config.h" -#include "teca_table_reader.h" -#include "teca_table_remove_rows.h" -#include "teca_table_sort.h" -#include "teca_cf_reader.h" -#include "teca_normalize_coordinates.h" -#include "teca_tc_storm_size.h" -#include "teca_table_reduce.h" -#include "teca_table_to_stream.h" -#include "teca_table_writer.h" -#include "teca_dataset_diff.h" -#include "teca_index_executive.h" -#include "teca_file_util.h" -#include "teca_mpi_manager.h" - -#include -#include -#include - -#include - -using std::cerr; -using std::endl; - -using boost::program_options::value; - - -int main(int argc, char **argv) -{ - // initialize MPI - teca_mpi_manager mpi_man(argc, argv); - - // initialize command line options description - // set up some common options to simplify use for most - // common scenarios - options_description basic_opt_defs( - "Basic usage:\n\n" - "The following options are the most commonly used. Information\n" - "on advanced options can be displayed using --advanced_help\n\n" - "Basic command line options", 120, -1 - ); - basic_opt_defs.add_options() - ("track_file", value(), "file path to read the cyclone from (tracks.bin)") - ("wind_files", value(), "regex matching simulation files containing wind fields ()") - ("track_file_out", value(), "file path to write cyclone tracks with size (tracks_size.bin)") - ("wind_u_var", value(), "name of variable with wind x-component (UBOT)") - ("wind_v_var", value(), "name of variable with wind y-component (VBOT)") - ("track_mask", value(), "expression to filter tracks by ()") - ("number_of_bins", value(), "number of bins in the radial wind decomposition (32)") - ("profile_type", value(), "radial wind profile type. max or avg (avg)") - ("search_radius", value(), "size of search window in deg lat (6)") - ("first_track", value(), "first track to process") - ("last_track", value(), "last track to process") - ("n_threads", value(), "thread pool size. default is 1. -1 for all") - ("help", "display the basic options help") - ("advanced_help", "display the advanced options help") - ("full_help", "display entire help message") - ; - - // add all options from each pipeline stage for more advanced use - options_description advanced_opt_defs( - "Advanced usage:\n\n" - "The following list contains the full set options giving one full\n" - "control over all runtime modifiable parameters. The basic options\n" - "(see" "--help) map to these, and will override them if both are\n" - "specified.\n\n" - "tc storm size pipeline:\n\n" - " (track reader)--(track filter)\n" - " \\\n" - " (wind reader)--(storm size)\n" - " \\\n" - " (map reduce)--(table sort)\n" - " \\\n" - " (track writer)\n\n" - "Advanced command line options", -1, 1 - ); - - // create the pipeline stages here, they contain the - // documentation and parse command line. - // objects report all of their properties directly - // set default options here so that command line options override - // them. while we are at it connect the pipeline - p_teca_table_reader track_reader = teca_table_reader::New(); - track_reader->get_properties_description("track_reader", advanced_opt_defs); - track_reader->set_file_name("tracks.bin"); - - p_teca_table_remove_rows track_filter = teca_table_remove_rows::New(); - track_filter->get_properties_description("track_filter", advanced_opt_defs); - - p_teca_cf_reader wind_reader = teca_cf_reader::New(); - wind_reader->get_properties_description("wind_reader", advanced_opt_defs); - - p_teca_normalize_coordinates wind_coords = teca_normalize_coordinates::New(); - wind_coords->set_input_connection(wind_reader->get_output_port()); - - p_teca_tc_storm_size storm_size = teca_tc_storm_size::New(); - storm_size->get_properties_description("storm_size", advanced_opt_defs); - storm_size->set_input_connection(1, wind_coords->get_output_port()); - - p_teca_table_reduce map_reduce = teca_table_reduce::New(); - map_reduce->get_properties_description("map_reduce", advanced_opt_defs); - map_reduce->set_input_connection(storm_size->get_output_port()); - - p_teca_table_sort sort = teca_table_sort::New(); - sort->get_properties_description("table_sort", advanced_opt_defs); - sort->set_input_connection(map_reduce->get_output_port()); - sort->set_index_column("track_id"); - sort->enable_stable_sort(); - - p_teca_table_writer track_writer = teca_table_writer::New(); - track_writer->get_properties_description("track_writer", advanced_opt_defs); - track_writer->set_input_connection(sort->get_output_port()); - track_writer->set_file_name("tracks_size.bin"); - - // package basic and advanced options for display - options_description all_opt_defs(-1, -1); - all_opt_defs.add(basic_opt_defs).add(advanced_opt_defs); - - // parse the command line - variables_map opt_vals; - try - { - boost::program_options::store( - boost::program_options::command_line_parser(argc, argv).options(all_opt_defs).run(), - opt_vals); - - if (mpi_man.get_comm_rank() == 0) - { - if (opt_vals.count("help")) - { - cerr << endl - << "usage: teca_tc_storm_size [options]" << endl - << endl - << basic_opt_defs << endl - << endl; - return -1; - } - if (opt_vals.count("advanced_help")) - { - cerr << endl - << "usage: teca_tc_detect [options]" << endl - << endl - << advanced_opt_defs << endl - << endl; - return -1; - } - if (opt_vals.count("full_help")) - { - cerr << endl - << "usage: teca_tc_detect [options]" << endl - << endl - << all_opt_defs << endl - << endl; - return -1; - } - } - - boost::program_options::notify(opt_vals); - } - catch (std::exception &e) - { - TECA_ERROR("Error parsing command line options. See --help " - "for a list of supported options. " << e.what()) - return -1; - } - - // pass command line arguments into the pipeline objects - // advanced options are processed first, so that the basic - // options will override them - track_reader->set_properties("track_reader", opt_vals); - track_filter->set_properties("track_filter", opt_vals); - wind_reader->set_properties("wind_reader", opt_vals); - storm_size->set_properties("storm_size", opt_vals); - map_reduce->set_properties("map_reduce", opt_vals); - sort->set_properties("table_sort", opt_vals); - track_writer->set_properties("track_writer", opt_vals); - - // now pass in the basic options, these are processed - // last so that they will take precedence - if (opt_vals.count("track_file")) - track_reader->set_file_name(opt_vals["track_file"].as()); - - if (opt_vals.count("wind_files")) - { - wind_reader->set_files_regex(opt_vals["wind_files"].as()); - } - else - { - TECA_ERROR("--wind_files is a required option") - return -1; - } - - if (opt_vals.count("track_file_out")) - track_writer->set_file_name(opt_vals["track_file_out"].as()); - - if (opt_vals.count("track_mask")) - { - track_filter->set_input_connection(track_reader->get_output_port()); - track_filter->set_mask_expression(opt_vals["track_mask"].as()); - storm_size->set_input_connection(0, track_filter->get_output_port()); - } - else - { - storm_size->set_input_connection(0, track_reader->get_output_port()); - } - - if (opt_vals.count("wind_u_var")) - storm_size->set_wind_u_variable(opt_vals["wind_u_var"].as()); - - if (opt_vals.count("wind_v_var")) - storm_size->set_wind_v_variable(opt_vals["wind_v_var"].as()); - - if (opt_vals.count("n_radial_bins")) - storm_size->set_number_of_radial_bins(opt_vals["n_radial_bins"].as()); - - if (opt_vals.count("profile_type")) - { - std::string profile_type = opt_vals["profile_type"].as(); - if (profile_type == "avg") - { - storm_size->set_profile_type(1); - } - else if (profile_type == "max") - { - storm_size->set_profile_type(0); - } - else - { - TECA_ERROR("invalid profile_type " << profile_type) - return -1; - } - } - - if (opt_vals.count("search_radius")) - storm_size->set_search_radius(opt_vals["search_radius"].as()); - - if (opt_vals.count("first_track")) - map_reduce->set_start_index(opt_vals["first_track"].as()); - - if (opt_vals.count("last_track")) - map_reduce->set_end_index(opt_vals["last_track"].as()); - - if (opt_vals.count("n_threads")) - map_reduce->set_thread_pool_size(opt_vals["n_threads"].as()); - else - map_reduce->set_thread_pool_size(-1); - - // run the pipeline - track_writer->update(); - - return 0; -} diff --git a/apps/teca_tc_trajectory.cpp b/apps/teca_tc_trajectory.cpp index 0b7338ed9..6e9c190dd 100644 --- a/apps/teca_tc_trajectory.cpp +++ b/apps/teca_tc_trajectory.cpp @@ -17,6 +17,7 @@ #include "teca_mpi_manager.h" #include "teca_coordinate_util.h" #include "calcalcs.h" +#include "teca_app_util.h" #include #include @@ -38,21 +39,24 @@ int main(int argc, char **argv) // initialize command line options description // set up some common options to simplify use for most // common scenarios + int help_width = 100; options_description basic_opt_defs( "Basic usage:\n\n" "The following options are the most commonly used. Information\n" "on advanced options can be displayed using --advanced_help\n\n" - "Basic command line options", 120, -1 + "Basic command line options", help_width, help_width - 4 ); basic_opt_defs.add_options() - ("candidate_file", value(), "file path to read the storm candidates from (candidates.bin)") - ("max_daily_distance", value(), "max distance in km that a storm can travel in one day (1600)") - ("min_wind_speed", value(), "minimum peak wind speed to be considered a tropical storm (17.0)") - ("min_wind_duration", value(), "number of, not necessarily consecutive, days min wind speed sustained (2.0)") - ("track_file", value(), "file path to write storm tracks to") - ("help", "display the basic options help") - ("advanced_help", "display the advanced options help") - ("full_help", "display entire help message") + ("candidate_file", value()->default_value("candidates.bin"), "\nfile path to read the storm candidates from\n") + ("max_daily_distance", value()->default_value(1600), "\nmax distance in km that a storm can travel in one day\n") + ("min_wind_speed", value()->default_value(17.0), "\nminimum peak wind speed to be considered a tropical storm\n") + ("min_wind_duration", value()->default_value(2.0), "\nnumber of, not necessarily consecutive, days min wind speed sustained\n") + ("track_file", value()->default_value("tracks.bin"), "\nfile path to" + " write storm tracks to. The extension determines the file format. May be" + " one of `.nc`, `.csv`, or `.bin`\n") + ("help", "\ndisplays documentation for application specific command line options\n") + ("advanced_help", "\ndisplays documentation for algorithm specific command line options\n") + ("full_help", "\ndisplays both basic and advanced documentation together\n") ; // add all options from each pipeline stage for more advanced use @@ -64,7 +68,7 @@ int main(int argc, char **argv) "specified.\n\n" "tropical storms trajectory pipeline:\n\n" " (candidate reader)--(sort)--(tracks)--(track writer)\n\n" - "Advanced command line options", -1, 1 + "Advanced command line options", help_width, help_width - 4 ); // create the pipeline stages here, they contain the @@ -75,11 +79,8 @@ int main(int argc, char **argv) p_teca_table_reader candidate_reader = teca_table_reader::New(); candidate_reader->get_properties_description("candidate_reader", advanced_opt_defs); - p_teca_normalize_coordinates candidate_coords = teca_normalize_coordinates::New(); - candidate_coords->set_input_connection(candidate_reader->get_output_port()); - p_teca_table_sort sort = teca_table_sort::New(); - sort->set_input_connection(candidate_coords->get_output_port()); + sort->set_input_connection(candidate_reader->get_output_port()); sort->set_index_column("storm_id"); sort->get_properties_description("sort", advanced_opt_defs); @@ -100,55 +101,14 @@ int main(int argc, char **argv) track_writer->get_properties_description("track_writer", advanced_opt_defs); // package basic and advanced options for display - options_description all_opt_defs(-1, -1); + options_description all_opt_defs(help_width, help_width - 4); all_opt_defs.add(basic_opt_defs).add(advanced_opt_defs); // parse the command line variables_map opt_vals; - try - { - boost::program_options::store( - boost::program_options::command_line_parser(argc, argv).options(all_opt_defs).run(), - opt_vals); - - if (mpi_man.get_comm_rank() == 0) - { - if (opt_vals.count("help")) - { - cerr << endl - << "usage: teca_tc_detect [options]" << endl - << endl - << basic_opt_defs << endl - << endl; - return -1; - } - if (opt_vals.count("advanced_help")) - { - cerr << endl - << "usage: teca_tc_detect [options]" << endl - << endl - << advanced_opt_defs << endl - << endl; - return -1; - } - - if (opt_vals.count("full_help")) - { - cerr << endl - << "usage: teca_tc_detect [options]" << endl - << endl - << all_opt_defs << endl - << endl; - return -1; - } - } - - boost::program_options::notify(opt_vals); - } - catch (std::exception &e) + if (teca_app_util::process_command_line_help(mpi_man.get_comm_rank(), + argc, argv, basic_opt_defs, advanced_opt_defs, all_opt_defs, opt_vals)) { - TECA_ERROR("Error parsing command line options. See --help " - "for a list of supported options. " << e.what()) return -1; } @@ -163,25 +123,23 @@ int main(int argc, char **argv) // now pass in the basic options, these are processed // last so that they will take precedence - if (opt_vals.count("candidate_file")) - candidate_reader->set_file_name( - opt_vals["candidate_file"].as()); + candidate_reader->set_file_name( + opt_vals["candidate_file"].as()); - if (opt_vals.count("max_daily_distance")) + if (!opt_vals["max_daily_distance"].defaulted()) tracks->set_max_daily_distance( opt_vals["max_daily_distance"].as()); - if (opt_vals.count("min_wind_speed")) + if (!opt_vals["min_wind_speed"].defaulted()) tracks->set_min_wind_speed( opt_vals["min_wind_speed"].as()); - if (opt_vals.count("min_wind_duration")) + if (!opt_vals["min_wind_duration"].defaulted()) tracks->set_min_wind_duration( opt_vals["min_wind_duration"].as()); - if (opt_vals.count("track_file")) - track_writer->set_file_name( - opt_vals["track_file"].as()); + track_writer->set_file_name( + opt_vals["track_file"].as()); // some minimal check for missing options if (candidate_reader->get_file_name().empty()) diff --git a/apps/teca_tc_trajectory_scalars.in b/apps/teca_tc_trajectory_scalars.in index ab3086511..7dd0416ec 100755 --- a/apps/teca_tc_trajectory_scalars.in +++ b/apps/teca_tc_trajectory_scalars.in @@ -1,9 +1,9 @@ #!/usr/bin/env python@TECA_PYTHON_VERSION@ try: - from mpi4py import * + from mpi4py import MPI rank = MPI.COMM_WORLD.Get_rank() n_ranks = MPI.COMM_WORLD.Get_size() -except: +except ImportError: rank = 0 n_ranks = 1 import sys diff --git a/apps/teca_tc_wind_radii.cpp b/apps/teca_tc_wind_radii.cpp index 761cfc771..4ded8748e 100644 --- a/apps/teca_tc_wind_radii.cpp +++ b/apps/teca_tc_wind_radii.cpp @@ -3,6 +3,7 @@ #include "teca_table_remove_rows.h" #include "teca_table_sort.h" #include "teca_cf_reader.h" +#include "teca_multi_cf_reader.h" #include "teca_normalize_coordinates.h" #include "teca_tc_wind_radii.h" #include "teca_table_reduce.h" @@ -10,6 +11,7 @@ #include "teca_table_writer.h" #include "teca_dataset_diff.h" #include "teca_file_util.h" +#include "teca_app_util.h" #include "teca_mpi_manager.h" #include @@ -32,28 +34,47 @@ int main(int argc, char **argv) // initialize command line options description // set up some common options to simplify use for most // common scenarios + int help_width = 100; options_description basic_opt_defs( "Basic usage:\n\n" "The following options are the most commonly used. Information\n" "on advanced options can be displayed using --advanced_help\n\n" - "Basic command line options", 120, -1 + "Basic command line options", help_width, help_width - 4 ); basic_opt_defs.add_options() - ("track_file", value(), "file path to read the cyclone from (tracks.bin)") - ("wind_files", value(), "regex matching simulation files containing wind fields ()") - ("track_file_out", value(), "file path to write cyclone tracks with size (tracks_size.bin)") - ("wind_u_var", value(), "name of variable with wind x-component (UBOT)") - ("wind_v_var", value(), "name of variable with wind y-component (VBOT)") - ("track_mask", value(), "expression to filter tracks by ()") - ("number_of_bins", value(), "number of bins in the radial wind decomposition (32)") - ("profile_type", value(), "radial wind profile type. max or avg (avg)") - ("search_radius", value(), "size of search window in deg lat (6)") - ("first_track", value(), "first track to process") - ("last_track", value(), "last track to process") - ("n_threads", value(), "thread pool size. default is 1. -1 for all") - ("help", "display the basic options help") - ("advanced_help", "display the advanced options help") - ("full_help", "display entire help message") + ("track_file", value(), "\na file containing cyclone tracks (tracks.bin)\n") + + ("input_file", value(), "\na teca_multi_cf_reader configuration file" + " identifying the set of NetCDF CF2 files to process. When present data is" + " read using the teca_multi_cf_reader. Use one of either --input_file or" + " --input_regex.\n") + + ("input_regex", value(), "\na teca_cf_reader regex identifying the" + " set of NetCDF CF2 files to process. When present data is read using the" + " teca_cf_reader. Use one of either --input_file or --input_regex.\n") + + ("wind_files", value(), "\na synonym for --input_regex.\n") + + ("track_file_out", value()->default_value("tracks_size.bin"), + "\nfile path to write cyclone tracks with size\n") + + ("wind_u_var", value()->default_value("UBOT"), "\nname of variable with wind x-component\n") + ("wind_v_var", value()->default_value("VBOT"), "\nname of variable with wind y-component\n") + ("track_mask", value(), "\nAn expression to filter tracks by\n") + ("number_of_bins", value()->default_value(32), "\nnumber of bins in the radial wind decomposition\n") + ("profile_type", value()->default_value("avg"), "\nradial wind profile type. max or avg\n") + ("search_radius", value()->default_value(6), "\nsize of search window in decimal degrees\n") + + ("first_track", value()->default_value(0), "\nfirst track to process\n") + ("last_track", value()->default_value(-1), "\nlast track to process\n") + + ("n_threads", value()->default_value(-1), "\nSets the thread pool size on each" + " MPI rank. When the default value of -1 is used TECA will coordinate the thread" + " pools across ranks such each thread is bound to a unique physical core.\n") + + ("help", "\ndisplays documentation for application specific command line options\n") + ("advanced_help", "\ndisplays documentation for algorithm specific command line options\n") + ("full_help", "\ndisplays both basic and advanced documentation together\n") ; // add all options from each pipeline stage for more advanced use @@ -66,12 +87,12 @@ int main(int argc, char **argv) "tc storm size pipeline:\n\n" " (track reader)--(track filter)\n" " \\\n" - " (wind reader)--(storm size)\n" + " (cf / mcf_reader)--(storm size)\n" " \\\n" " (map reduce)--(table sort)\n" " \\\n" " (track writer)\n\n" - "Advanced command line options", -1, 1 + "Advanced command line options", help_width, help_width - 4 ); // create the pipeline stages here, they contain the @@ -86,80 +107,38 @@ int main(int argc, char **argv) p_teca_table_remove_rows track_filter = teca_table_remove_rows::New(); track_filter->get_properties_description("track_filter", advanced_opt_defs); - p_teca_cf_reader wind_reader = teca_cf_reader::New(); - wind_reader->get_properties_description("wind_reader", advanced_opt_defs); + p_teca_cf_reader cf_reader = teca_cf_reader::New(); + cf_reader->get_properties_description("cf_reader", advanced_opt_defs); + + p_teca_multi_cf_reader mcf_reader = teca_multi_cf_reader::New(); + mcf_reader->get_properties_description("mcf_reader", advanced_opt_defs); p_teca_normalize_coordinates wind_coords = teca_normalize_coordinates::New(); - wind_coords->set_input_connection(wind_coords->get_output_port()); p_teca_tc_wind_radii wind_radii = teca_tc_wind_radii::New(); wind_radii->get_properties_description("wind_radii", advanced_opt_defs); - wind_radii->set_input_connection(1, wind_coords->get_output_port()); p_teca_table_reduce map_reduce = teca_table_reduce::New(); map_reduce->get_properties_description("map_reduce", advanced_opt_defs); - map_reduce->set_input_connection(wind_radii->get_output_port()); p_teca_table_sort sort = teca_table_sort::New(); sort->get_properties_description("table_sort", advanced_opt_defs); - sort->set_input_connection(map_reduce->get_output_port()); sort->set_index_column("track_id"); sort->enable_stable_sort(); p_teca_table_writer track_writer = teca_table_writer::New(); track_writer->get_properties_description("track_writer", advanced_opt_defs); - track_writer->set_input_connection(sort->get_output_port()); track_writer->set_file_name("tracks_size.bin"); // package basic and advanced options for display - options_description all_opt_defs(-1, -1); + options_description all_opt_defs(help_width, help_width - 4); all_opt_defs.add(basic_opt_defs).add(advanced_opt_defs); // parse the command line variables_map opt_vals; - try - { - boost::program_options::store( - boost::program_options::command_line_parser(argc, argv).options(all_opt_defs).run(), - opt_vals); - - if (mpi_man.get_comm_rank() == 0) - { - if (opt_vals.count("help")) - { - cerr << endl - << "usage: teca_tc_wind_radii [options]" << endl - << endl - << basic_opt_defs << endl - << endl; - return -1; - } - if (opt_vals.count("advanced_help")) - { - cerr << endl - << "usage: teca_tc_wind_radii [options]" << endl - << endl - << advanced_opt_defs << endl - << endl; - return -1; - } - if (opt_vals.count("full_help")) - { - cerr << endl - << "usage: teca_tc_wind_radii [options]" << endl - << endl - << all_opt_defs << endl - << endl; - return -1; - } - } - - boost::program_options::notify(opt_vals); - } - catch (std::exception &e) + if (teca_app_util::process_command_line_help(mpi_man.get_comm_rank(), + argc, argv, basic_opt_defs, advanced_opt_defs, all_opt_defs, opt_vals)) { - TECA_ERROR("Error parsing command line options. See --help " - "for a list of supported options. " << e.what()) return -1; } @@ -168,7 +147,8 @@ int main(int argc, char **argv) // options will override them track_reader->set_properties("track_reader", opt_vals); track_filter->set_properties("track_filter", opt_vals); - wind_reader->set_properties("wind_reader", opt_vals); + cf_reader->set_properties("cf_reader", opt_vals); + mcf_reader->set_properties("mcf_reader", opt_vals); wind_radii->set_properties("wind_radii", opt_vals); map_reduce->set_properties("map_reduce", opt_vals); sort->set_properties("table_sort", opt_vals); @@ -176,43 +156,55 @@ int main(int argc, char **argv) // now pass in the basic options, these are processed // last so that they will take precedence - if (opt_vals.count("track_file")) + if (!opt_vals["track_file"].defaulted()) track_reader->set_file_name(opt_vals["track_file"].as()); - if (opt_vals.count("wind_files")) + bool have_file = opt_vals.count("input_file"); + bool have_wind_files = opt_vals.count("wind_files"); + bool have_regex = opt_vals.count("input_regex"); + p_teca_algorithm wind_reader; + if (have_file) { - wind_reader->set_files_regex(opt_vals["wind_files"].as()); + mcf_reader->set_input_file(opt_vals["input_file"].as()); + wind_reader = mcf_reader; } - else + else if (have_wind_files) { - TECA_ERROR("--wind_files is a required option") - return -1; + have_regex = true; + cf_reader->set_files_regex(opt_vals["wind_files"].as()); + wind_reader = cf_reader; + } + else if (have_regex) + { + cf_reader->set_files_regex(opt_vals["input_regex"].as()); + wind_reader = cf_reader; } - if (opt_vals.count("track_file_out")) + if (!opt_vals["track_file_out"].defaulted()) track_writer->set_file_name(opt_vals["track_file_out"].as()); + p_teca_algorithm track_input; if (opt_vals.count("track_mask")) { track_filter->set_input_connection(track_reader->get_output_port()); track_filter->set_mask_expression(opt_vals["track_mask"].as()); - wind_radii->set_input_connection(0, track_filter->get_output_port()); + track_input = track_filter; } else { - wind_radii->set_input_connection(0, track_reader->get_output_port()); + track_input = track_reader; } - if (opt_vals.count("wind_u_var")) + if (!opt_vals["wind_u_var"].defaulted()) wind_radii->set_wind_u_variable(opt_vals["wind_u_var"].as()); - if (opt_vals.count("wind_v_var")) + if (!opt_vals["wind_v_var"].defaulted()) wind_radii->set_wind_v_variable(opt_vals["wind_v_var"].as()); - if (opt_vals.count("n_radial_bins")) - wind_radii->set_number_of_radial_bins(opt_vals["n_radial_bins"].as()); + if (!opt_vals["number_of_bins"].defaulted()) + wind_radii->set_number_of_radial_bins(opt_vals["number_of_bins"].as()); - if (opt_vals.count("profile_type")) + if (!opt_vals["profile_type"].defaulted()) { std::string profile_type = opt_vals["profile_type"].as(); if (profile_type == "avg") @@ -230,20 +222,40 @@ int main(int argc, char **argv) } } - if (opt_vals.count("search_radius")) + if (!opt_vals["search_radius"].defaulted()) wind_radii->set_search_radius(opt_vals["search_radius"].as()); - if (opt_vals.count("first_track")) + if (!opt_vals["first_track"].defaulted()) map_reduce->set_start_index(opt_vals["first_track"].as()); - if (opt_vals.count("last_track")) + if (!opt_vals["last_track"].defaulted()) map_reduce->set_end_index(opt_vals["last_track"].as()); - if (opt_vals.count("n_threads")) + if (!opt_vals["n_threads"].defaulted()) map_reduce->set_thread_pool_size(opt_vals["n_threads"].as()); else map_reduce->set_thread_pool_size(-1); + // some minimal check for missing options + if ((have_file && have_regex) || !(have_file || have_regex)) + { + if (mpi_man.get_comm_rank() == 0) + { + TECA_ERROR("Extacly one of --input_file or --input_regex can be specified. " + "Use --input_file to activate the multi_cf_reader (HighResMIP datasets) " + "and --input_regex to activate the cf_reader (CAM like datasets)") + } + return -1; + } + + // connect the pipeline + wind_coords->set_input_connection(wind_reader->get_output_port()); + wind_radii->set_input_connection(0, track_input->get_output_port()); + wind_radii->set_input_connection(1, wind_coords->get_output_port()); + map_reduce->set_input_connection(wind_radii->get_output_port()); + sort->set_input_connection(map_reduce->get_output_port()); + track_writer->set_input_connection(sort->get_output_port()); + // run the pipeline track_writer->update(); diff --git a/apps/teca_temporal_reduction.in b/apps/teca_temporal_reduction.in new file mode 100755 index 000000000..f14d653df --- /dev/null +++ b/apps/teca_temporal_reduction.in @@ -0,0 +1,161 @@ +#!/usr/bin/env python@TECA_PYTHON_VERSION@ +try: + from mpi4py import MPI + rank = MPI.COMM_WORLD.Get_rank() + n_ranks = MPI.COMM_WORLD.Get_size() +except ImportError: + rank = 0 + n_ranks = 1 +from teca import * +import argparse +import numpy as np +import sys +import os + +teca_profiler.initialize() +teca_profiler.start_event('temporal_reduction') + +# parse the command line +parser = argparse.ArgumentParser( + description='Reduce the time axis of a NetcCDF CF2 dataset ' + 'using a predfined interval and reduction operator', + formatter_class=lambda prog: argparse.ArgumentDefaultsHelpFormatter( + prog, max_help_position=4, width=100)) + +parser.add_argument('--input_file', type=str, required=False, + help='a teca_multi_cf_reader configuration file identifying' + ' the set of NetCDF CF2 files to process. When present' + ' data is read using the teca_multi_cf_reader. Use one' + ' of either --input_file or --input_regex.') + +parser.add_argument('--input_regex', type=str, required=False, + help='a teca_cf_reader regex identifying the set of NetCDF' + ' CF2 files to process. When present data is read' + ' using the teca_cf_reader. Use one of either' + ' --input_file or --input_regex.') + +parser.add_argument('--interval', type=str, default='monthly', + help='interval to reduce the time axis to. One of ' + 'daily, monthly, or seasonal') + +parser.add_argument('--operator', type=str, default='average', + help='reduction operator to use. One of minimum, ' + 'maximum, or average') + +parser.add_argument('--point_arrays', nargs='+', required=True, + help='list of point centered arrays to process.') + +parser.add_argument('--fill_value', type=float, default=None, + help='A value that identifies missing or invalid data. ' + 'Specifying the fill value on the command line ' + 'overrides array specific fill values stored in ' + 'the file.') + +parser.add_argument('--ignore_fill_value', + default=False, action='store_true', + help='Boolean flag that enables missing or invalid ' + 'value handling. When enabled NetCDF CF conventions ' + 'are used to determine fill value. Alternativley one ' + 'can explicitly provide a fill value on the command ' + 'line via the --fill_value argument.') + +parser.add_argument('--output_file', type=str, required=True, + help='A path and file name pattern for the output NetCDF' + ' files. %%t%% is replaced with a human readable date' + ' and time corresponding to the time of the first time' + ' step in the file. Use --date_format to change' + ' the formatting') + +parser.add_argument('--steps_per_file', type=int, default=128, + help='number of time steps to write to each output ' + 'file') + +parser.add_argument('--x_axis_variable', type=str, default='lon', + help='name of the variable to use for x-coordinates') + +parser.add_argument('--y_axis_variable', type=str, default='lat', + help='name of the variable to use for y-coordinates') + +parser.add_argument('--z_axis_variable', type=str, default='', + help='name of z coordinate variable. When processing 3D set this to' + ' the variable containing vertical coordinates. When empty the' + ' data will be treated as 2D.') + +parser.add_argument('--t_axis_variable', type=str, default='time', + help='name of the variable to use for t-coordinates') + +parser.add_argument('--n_threads', type=int, default=2, + help='Number of threads to use when stremaing the ' + 'reduction') + +parser.add_argument('--verbose', type=int, default=0, + help='enable verbose mode.') + +# prevent spew when running under mpi +try: + args = parser.parse_args() +except Exception: + if rank == 0: raise + +in_files = args.input_regex +out_files = args.output_file +steps_per_file = args.steps_per_file +n_threads = args.n_threads +interval = args.interval +operator = args.operator +point_arrays = args.point_arrays +fill_value = args.fill_value +ignore_fill_value = args.ignore_fill_value +x_axis_var = args.x_axis_variable +y_axis_var = args.y_axis_variable +z_axis_var = args.z_axis_variable +t_axis_var = args.t_axis_variable +verbose = args.verbose + +if verbose and rank == 0: + sys.stderr.write('running on %d ranks' % (n_ranks)) + sys.stderr.write('n_threads=%d\n' % (n_threads)) + sys.stderr.write('steps_per_file=%d\n' % (steps_per_file)) + sys.stderr.write('interval=%s\n' % (interval)) + sys.stderr.write('operator=%s\n' % (operator)) + sys.stderr.write('point_arrays=%s\n' % (str(point_arrays))) + sys.stderr.write('ignore_fill_value=%d\n' % (ignore_fill_value)) + sys.stderr.write('fill_value=%s\n' % (str(fill_value))) + +if args.input_regex: + cfr = teca_cf_reader.New() + cfr.set_files_regex(args.input_regex) +elif args.input_file: + cfr = teca_multi_cf_reader.New() + cfr.set_input_file(args.input_file) +else: + if rank == 0: + raise RuntimeError('Missing one of --input_file or --input_regex') + +cfr.set_x_axis_variable(x_axis_var) +cfr.set_y_axis_variable(y_axis_var) +cfr.set_z_axis_variable(z_axis_var) +cfr.set_t_axis_variable(t_axis_var) + +mav = teca_temporal_reduction.New() +mav.set_input_connection(cfr.get_output_port()) +mav.set_interval(interval) +mav.set_operator(operator) +mav.set_fill_value(fill_value) +mav.set_use_fill_value( 0 if ignore_fill_value else 1 ) +mav.set_point_arrays(point_arrays) +mav.set_verbose(verbose) +mav.set_thread_pool_size(n_threads) +mav.set_stream_size(2) + +cfw = teca_cf_writer.New() +cfw.set_input_connection(mav.get_output_port()) +cfw.set_verbose(verbose) +cfw.set_thread_pool_size(1) +cfw.set_steps_per_file(steps_per_file) +cfw.set_file_name(out_files) +cfw.set_point_arrays(point_arrays) +cfw.update() + +teca_profiler.end_event('temporal_reduction') +teca_profiler.finalize() diff --git a/core/CMakeLists.txt b/core/CMakeLists.txt index 29b469b5c..525e3bcef 100644 --- a/core/CMakeLists.txt +++ b/core/CMakeLists.txt @@ -5,19 +5,29 @@ include_directories(${CMAKE_CURRENT_SOURCE_DIR}) set(teca_core_srcs teca_algorithm.cxx teca_algorithm_executive.cxx + teca_bad_cast.cxx teca_binary_stream.cxx teca_calendar.cxx + teca_common.cxx teca_dataset.cxx + teca_dataset_source.cxx + teca_dataset_capture.cxx teca_index_executive.cxx teca_index_reduce.cxx teca_memory_profiler.cxx teca_metadata.cxx teca_metadata_util.cxx teca_mpi_manager.cxx + teca_mpi_util.cxx teca_parallel_id.cxx - teca_threaded_algorithm.cxx - teca_thread_pool.cxx teca_profiler.cxx + teca_programmable_algorithm.cxx + teca_programmable_reduce.cxx + teca_string_util.cxx + teca_system_util.cxx + teca_threaded_algorithm.cxx + teca_threaded_programmable_algorithm.cxx + teca_thread_util.cxx teca_variant_array.cxx ) diff --git a/core/calcalcs.cxx b/core/calcalcs.cxx index 46896f0ad..d76612055 100644 --- a/core/calcalcs.cxx +++ b/core/calcalcs.cxx @@ -1621,6 +1621,18 @@ static int ncals_known=0; static calcalcs_cal **known_cal; /* ptr to array of calcals_cal ptrs */ static char **known_cal_name; +/* for some oiptimizations that skip initialization of + * calendar when the calendar is not changed in between + * invocations */ +static char current_calendar_name[1024] = {'\0'}; +static char current_unit_str[1024] = {'\0'}; +static calcalcs_cal *current_calendar = nullptr; +static ut_unit *current_units = nullptr; +static cv_converter *conv_user_units_to_days = nullptr; +static cv_converter *conv_days_to_user_units = nullptr; +static int y0=0, mon0=0, d0=0, h0=0, min0=0, jday0=0; +static double s0=0.0, fpartday0=0.0, extra_seconds0=0.0; + /* Stores previously emitted "unknown calendar" warnings static void unknown_cal_emit_warning( const char *calendar_name ); #define UTC2_MAX_UNKCAL_WARNS 1000 @@ -1637,83 +1649,9 @@ int date(double val, int *year, int *month, int *day, int *hour, double fdays, extra_seconds, tot_extra_seconds; int ndays; - /* Following vars are saved between invocations and reused if the - * passed units are the same as last time. */ - static char prev_unit_str[1024] = {'\0'}; - static char prev_calendar[1024] = {'\0'}; - - static ut_unit *prev_units = nullptr; - static cv_converter *conv_user_units_to_days = nullptr; - static calcalcs_cal *cal2use = nullptr; - - static int y0, mon0, d0, h0, min0, jday0; - static double s0, extra_seconds0; - -#if defined(CALCALCS_THREAD) - calcalcs_mutex.lock(); -#endif - /* See if we are being passed the same units and calendar as last time. If so, - * we can optimize by not recomputing all this junk - */ - if (strncmp(prev_calendar, calendar_name, 1024) - || strncmp(prev_unit_str, unit_str, 1024)) - { - // initialize - if ((!have_initted) && initialize()) - { - fprintf(stderr, "Error, failed to initialized"); - return -1; - } - - /* Get the calendar we will be using, based on the passed name */ - if (!(cal2use = getcal(calendar_name))) - { - fprintf(stderr, "Error, unknown calendar %s\n", calendar_name); - return UT_EINVALID; - } - - /* create units object from the string, and update the cached string */ - ut_unit *dataunits = ut_parse(u_system, unit_str, UT_ASCII); - if (!dataunits) - { - fprintf(stderr, "Error, bad units %s\n", unit_str); - return UT_EINVALID; - } - strncpy(prev_unit_str, unit_str, 1023); - - /* Get origin day of the data units */ - get_origin(dataunits, &y0, &mon0, &d0, &h0, &min0, &s0); /* Note: static vars */ - - /* Number of seconds into the specified origin day */ - extra_seconds0 = h0*3600.0 + min0*60.0 + s0; /* Note: static vars */ - - /* Convert the origin day to Julian Day number in the specified calendar */ - if( (ierr = ccs_date2jday( cal2use, y0, mon0, d0, &jday0 )) != 0 ) - { - fprintf( stderr, "Error in utCalendar2: %s\n", ccs_err_str(ierr) ); - return UT_EINVALID; - } - - /* Get converter from user-specified units to "days" */ - if (conv_user_units_to_days) - cv_free( conv_user_units_to_days ); - - conv_user_units_to_days = - get_user_to_day_converter(dataunits, y0, mon0, d0, h0, min0, s0); - - /* Save these units so we can reuse our time-consuming - * calculations next time if they are the same units - */ - if (prev_units) - ut_free(prev_units); - - prev_units = dataunits; - - strncpy(prev_calendar, cal2use->name, 1023); - } -#if defined(CALCALCS_THREAD) - calcalcs_mutex.unlock(); -#endif + // initialize and select the calendar + if ((ierr = set_current_calendar(calendar_name, unit_str))) + return ierr; /* Convert user value of offset to floating point days */ fdays = cv_convert_double( conv_user_units_to_days, val ); @@ -1737,7 +1675,7 @@ int date(double val, int *year, int *month, int *day, int *hour, } /* Convert to a date */ - if ((ierr = ccs_jday2date( cal2use, jdnew, year, month, day ))) + if ((ierr = ccs_jday2date( current_calendar, jdnew, year, month, day ))) { fprintf(stderr, "Error in utCalendar2: %s\n", ccs_err_str(ierr)); return UT_EINVALID; @@ -1763,7 +1701,7 @@ int date(double val, int *year, int *month, int *day, int *hour, *hour += 1.0; if( *hour >= 24.0 ) { *hour -= 24.0; - if( (ierr = ccs_jday2date( cal2use, jdnew+1, year, month, day )) != 0 ) { + if( (ierr = ccs_jday2date( current_calendar, jdnew+1, year, month, day )) != 0 ) { fprintf( stderr, "Error in utCalendar2: %s\n", ccs_err_str(ierr) ); return( UT_EINVALID ); } @@ -1782,87 +1720,18 @@ int date(double val, int *year, int *month, int *day, int *hour, int coordinate(int year, int month, int day, int hour, int minute, double second, const char *unit_str, const char *calendar_name, double *value) { - int jday, ierr, diff_in_days; - double fdiff_in_days, val_days, val_partdays, fdiff_in_partdays, fpartday; - - /* Following vars are static and retained between invocations for efficiency */ - - static char prev_unit_str[1024] = {'\0'}; - static char prev_calendar[1024] = {'\0'}; - - static ut_unit *prev_units = nullptr; - static cv_converter *conv_days_to_user_units = nullptr; - static calcalcs_cal *cal2use = nullptr; - - static int y0, mon0, d0, h0, min0, jday0; - static double s0, fpartday0; - -#if defined(CALCALCS_THREAD) - calcalcs_mutex.lock(); -#endif - /* See if we are being passed the same units and calendar as last time. If so, - * we can optimize by not recomputing all this junk - */ - if (strncmp(prev_calendar, calendar_name, 1024) - || strncmp(prev_unit_str, unit_str, 1024)) - { - if ((!have_initted) && initialize()) - { - fprintf(stderr, "Error, failed to initialized"); - return -1; - } - - /* Get the calendar we will be using, based on the passed name */ - if (!(cal2use = getcal(calendar_name))) - { - fprintf(stderr, "Error, unknown calendar %s\n", calendar_name); - return UT_EINVALID; - } - strncpy(prev_calendar, cal2use->name, 1023); - - /* create units object from the string, and update the cached string */ - ut_unit *user_unit = ut_parse(u_system, unit_str, UT_ASCII); - if (!user_unit) - { - fprintf(stderr, "Error, bad units %s\n", unit_str); - return UT_EINVALID; - } - strncpy(prev_unit_str, unit_str, 1023); - - /* Get origin day of the data units */ - get_origin(user_unit, &y0, &mon0, &d0, &h0, &min0, &s0); - - /* Convert the origin day to Julian Day number in the specified calendar */ - if( (ierr = ccs_date2jday( cal2use, y0, mon0, d0, &jday0 )) != 0 ) - { - fprintf(stderr, "Error in utCalendar2: %s\n", ccs_err_str(ierr)); - return UT_EINVALID; - } - - /* Get the origin's HMS in fractional (floating point) part of a Julian day */ - fpartday0 = (double)h0/24.0 + (double)min0/1440.0 + s0/86400.0; - - /* Get converter for turning days into user's units */ - if (conv_days_to_user_units) - cv_free(conv_days_to_user_units); - - conv_days_to_user_units = - get_day_to_user_converter(user_unit, y0, mon0, d0, h0, min0, s0); - - /* Save these units so we can reuse our time-consuming - * calculations next time if they are the same units */ - if (prev_units) - ut_free(prev_units); + int jday=0, ierr=0, diff_in_days=0; - prev_units = user_unit; + double fdiff_in_days=0.0, val_days=0.0, + val_partdays=0.0, fdiff_in_partdays=0.0, + fpartday=0.0; - } -#if defined(CALCALCS_THREAD) - calcalcs_mutex.unlock(); -#endif + // initialize and select the calendar + if ((ierr = set_current_calendar(calendar_name, unit_str))) + return ierr; /* Turn passed date into a Julian day */ - if((ierr = ccs_date2jday( cal2use, year, month, day, &jday ))) + if((ierr = ccs_date2jday( current_calendar, year, month, day, &jday ))) { fprintf( stderr, "Error in utInvCalendar2: %s\n", ccs_err_str(ierr)); return UT_EINVALID; @@ -1896,7 +1765,7 @@ static cv_converter *get_user_to_day_converter( ut_unit *uu, int y0, int mon0, i { char daystr[1024]; ut_unit *udu_days; - cv_converter *conv_user_units_to_days; + cv_converter *conv = nullptr; sprintf( daystr, "days since %04d-%02d-%02d %02d:%02d:%f", y0, mon0, d0, h0, min0, s0 ); @@ -1907,15 +1776,15 @@ static cv_converter *get_user_to_day_converter( ut_unit *uu, int y0, int mon0, i daystr ); exit(-1); } - conv_user_units_to_days = ut_get_converter( uu, udu_days ); - if( conv_user_units_to_days == NULL ) { + conv = ut_get_converter( uu, udu_days ); + if( conv == NULL ) { fprintf( stderr, "internal error in utCalendar2/conv_to_days: cannot convert from \"%s\" to user units\n", daystr ); exit(-1); } ut_free( udu_days ); - return( conv_user_units_to_days ); + return( conv ); } /*============================================================================================== @@ -1925,7 +1794,7 @@ static cv_converter *get_day_to_user_converter( ut_unit *uu, int y0, int mon0, i { char daystr[1024]; ut_unit *udu_days; - cv_converter *conv_days_to_user_units; + cv_converter *conv; sprintf( daystr, "days since %04d-%02d-%02d %02d:%02d:%f", y0, mon0, d0, h0, min0, s0 ); @@ -1936,15 +1805,15 @@ static cv_converter *get_day_to_user_converter( ut_unit *uu, int y0, int mon0, i daystr ); exit(-1); } - conv_days_to_user_units = ut_get_converter( udu_days, uu ); - if( conv_days_to_user_units == NULL ) { + conv = ut_get_converter( udu_days, uu ); + if( conv == NULL ) { fprintf( stderr, "internal error in utCalendar2/conv_to_user_units: cannot convert from user units to \"%s\"\n", daystr ); exit(-1); } free( udu_days ); - return( conv_days_to_user_units ); + return( conv ); } /*========================================================================================== @@ -2123,4 +1992,137 @@ static void unknown_cal_emit_warning( const char *calendar_name ) strcpy( unknown_cal_emitted_warning_for[ n_unkcal ], calendar_name ); n_unkcal++; }*/ + + +// ========================================================================== +int set_current_calendar( const char *calendar_name, const char *unit_str ) +{ +#if defined(CALCALCS_THREAD) + { + const std::lock_guard lock(calcalcs_mutex); +#endif + int ierr = 0; + + /* See if we are being passed the same units and calendar as last time. If so, + * we can optimize by not recomputing all this junk + */ + if (strncmp(current_calendar_name, calendar_name, 1024) + || strncmp(current_unit_str, unit_str, 1024)) + { + // initialize + if ((!have_initted) && initialize()) + { + fprintf(stderr, "Error, failed to initialized"); + return -1; + } + + /* Get the calendar we will be using, based on the passed name */ + if (!(current_calendar = getcal(calendar_name))) + { + fprintf(stderr, "Error, unknown calendar %s\n", calendar_name); + return UT_EINVALID; + } + + /* create units object from the string, and update the cached string */ + ut_unit *dataunits = ut_parse(u_system, unit_str, UT_ASCII); + if (!dataunits) + { + fprintf(stderr, "Error, bad units %s\n", unit_str); + return UT_EINVALID; + } + strncpy(current_unit_str, unit_str, 1023); + + /* Get origin day of the data units */ + get_origin(dataunits, &y0, &mon0, &d0, &h0, &min0, &s0); /* Note: static vars */ + + /* Number of seconds into the specified origin day */ + extra_seconds0 = h0*3600.0 + min0*60.0 + s0; /* Note: static vars */ + + /* Convert the origin day to Julian Day number in the specified calendar */ + if( (ierr = ccs_date2jday( current_calendar, y0, mon0, d0, &jday0 )) != 0 ) + { + fprintf( stderr, "Error in utCalendar2: %s\n", ccs_err_str(ierr) ); + return UT_EINVALID; + } + + /* Get the origin's HMS in fractional (floating point) part of a Julian day */ + fpartday0 = (double)h0/24.0 + (double)min0/1440.0 + s0/86400.0; + + /* Get converter from user-specified units to "days" */ + if (conv_user_units_to_days) + cv_free( conv_user_units_to_days ); + + conv_user_units_to_days = + get_user_to_day_converter(dataunits, y0, mon0, d0, h0, min0, s0); + + /* Get converter for turning days into user's units */ + if (conv_days_to_user_units) + cv_free(conv_days_to_user_units); + + conv_days_to_user_units = + get_day_to_user_converter(dataunits, y0, mon0, d0, h0, min0, s0); + + /* Save these units so we can reuse our time-consuming + * calculations next time if they are the same units + */ + if (current_units) + ut_free(current_units); + + current_units = dataunits; + + strncpy(current_calendar_name, current_calendar->name, 1023); + } +#if defined(CALCALCS_THREAD) + } +#endif + return 0; +} + +// ========================================================================== +int is_leap_year( const char *calendar_name, const char *unit_str, + int year, int &leap ) +{ + // initialize and select the calendar + int ierr = 0; + if ((ierr = set_current_calendar(calendar_name, unit_str))) + { + fprintf(stderr, "Error: system initialization failed\n"); + return ierr; + } + + // calculate leap year + if ((ierr = ccs_isleap(current_calendar, year, &leap))) + { + fprintf(stderr, "Error, failed to determine if %d in the \"%s\" " + "calendar with units \"%s\" is a leap year\n", year, + calendar_name, unit_str); + return ierr; + } + + return 0; +} + +// ========================================================================== +int days_in_month( const char *calendar_name, const char *unit_str, + int year, int month, int &dpm ) +{ + int ierr = 0; + + // initialize and select the calendar + if ((ierr = set_current_calendar(calendar_name, unit_str))) + { + fprintf(stderr, "Error: system initialization failed"); + return ierr; + } + + // calculate days in the month + if ((ierr = ccs_dpm(current_calendar, year, month, &dpm))) + { + fprintf(stderr, "Error: failed to get days per month"); + return ierr; + } + + return 0; +} + }; diff --git a/core/calcalcs.h b/core/calcalcs.h index e57700635..48fa55cad 100644 --- a/core/calcalcs.h +++ b/core/calcalcs.h @@ -97,7 +97,7 @@ int ccs_jday2date( calcalcs_cal *calendar, int jday, int *year, int *month, int /*-------------------------------------------------------------------------- * ccs_isleap: determine if the specified year is a leap year in - * the specified calendar + * the specified calendar. return 0 if successful. */ int ccs_isleap( calcalcs_cal *calendar, int year, int *leap ); @@ -127,8 +127,9 @@ int ccs_doy2date( calcalcs_cal *calendar, int year, int doy, int *month, int *da * Note that specifying "zero" days since, and giving different calendars as the original * and new calendars, essentially converts dates between calendars. */ -int ccs_dayssince( calcalcs_cal *calendar_orig, int year_orig, int month_orig, int day_orig, - int ndays_since, calcalcs_cal *calendar_new, int *year_new, int *month_new, int *day_new ); +int ccs_dayssince( calcalcs_cal *calendar_orig, int year_orig, int month_orig, + int day_orig, int ndays_since, calcalcs_cal *calendar_new, + int *year_new, int *month_new, int *day_new ); /*-------------------------------------------------------------------------- * get/set_cal_xition_date: these routines set the transition date for a Standard @@ -205,22 +206,54 @@ char *ccs_err_str(int ccs_errno); #define CALCALCS_ERR_INVALID_CALENDAR -17 -/* TODO -- calendar object based on the below (maybe a singleton) - * to eliminate static variable and thread safety issues. - * - * code below is from utCalendar2_cal - * we're including it here so that all the calendaring utility - * is in one h,c source +/* The high level API is defined below. This API is thread safe and + * has implementations optimized for use with calendar and units strings. */ - #define UT_ENOINIT -10 #define UT_EINVALID -11 -int date(double val, int *year, int *month, int *day, int *hour, - int *minute, double *second, const char *dataunits, const char *calendar_name ); +/*-------------------------------------------------------------------------- + * high level thread safe initialize the library and select a calendar + * to use in subsequent calls. + * return 0 upon success + */ +int set_current_calendar( const char *calendar, const char *units ); -int coordinate(int year, int month, int day, int hour, int minute, - double second, const char *user_unit, const char *calendar_name, double *value ); +/*-------------------------------------------------------------------------- + * is_leap_year: determine if the specified year is a leap year in + * the specified calendar. this wraps ccs_isleap such that initialization + * is automatically handled and optimizes for repeat calls. return 0 + * if successful. + */ +int is_leap_year( const char *calendar, const char *units, + int year, int &leap ); + +/*-------------------------------------------------------------------------- + * days_in_month: returns the days per month for the given year/month. + * Note that during the month that transitions from a Julian to a + * Gregorian calendar, this might be a strange number of days. this + * wraps ccs_dpm such that initialization is automatically handled and + * optimizes for repeat calls. returns 0 on success. + */ +int days_in_month( const char *calendar, const char *units, + int year, int month, int &dpm ); + +/*-------------------------------------------------------------------------- + * date : given a floating point offset in the given calendar return + * year, month, day, hour, minute, seconds. returns 0 upon success. + */ +int date( double val, int *year, int *month, int *day, int *hour, + int *minute, double *second, const char *dataunits, + const char *calendar_name ); + + +/*-------------------------------------------------------------------------- + * given a year, month, day, hour, minute, second and calendar find + * the floating point offset. returns 0 upon success. + */ +int coordinate( int year, int month, int day, int hour, int minute, + double second, const char *user_unit, const char *calendar_name, + double *value ); }; diff --git a/core/teca_algorithm_fwd.h b/core/teca_algorithm_fwd.h index 9ee6d7587..ee0a58b21 100644 --- a/core/teca_algorithm_fwd.h +++ b/core/teca_algorithm_fwd.h @@ -62,11 +62,6 @@ void set_##NAME(const T &v) \ } \ \ const T &get_##NAME() const \ -{ \ - return this->NAME; \ -} \ - \ -T &get_##NAME() \ { \ return this->NAME; \ } @@ -122,11 +117,6 @@ void set_##NAME##s(const const_p_teca_variant_array &v) \ } \ \ const T &get_##NAME(size_t i) const \ -{ \ - return this->NAME##s[i]; \ -} \ - \ -T &get_##NAME(size_t i) \ { \ return this->NAME##s[i]; \ } \ diff --git a/core/teca_bad_cast.cxx b/core/teca_bad_cast.cxx new file mode 100644 index 000000000..e551624a9 --- /dev/null +++ b/core/teca_bad_cast.cxx @@ -0,0 +1,11 @@ +#include "teca_bad_cast.h" + +#include + +// -------------------------------------------------------------------------- +teca_bad_cast::teca_bad_cast(const std::string &from, const std::string &to) +{ + std::ostringstream oss; + oss << "Failed to cast from " << from << " to " << to; + m_what = oss.str(); +} diff --git a/core/teca_bad_cast.h b/core/teca_bad_cast.h new file mode 100644 index 000000000..bf4138c6e --- /dev/null +++ b/core/teca_bad_cast.h @@ -0,0 +1,27 @@ +#ifndef teca_bad_cast_h +#define teca_bad_cast_h + +#include +#include + +class teca_bad_cast : public std::exception +{ +public: + teca_bad_cast() = delete; + ~teca_bad_cast() = default; + + teca_bad_cast(const std::string &from, const std::string &to); + + const char* what() const noexcept { return m_what.c_str(); } + +private: + std::string m_what; +}; + +template +const std::string safe_class_name(const class_t &o) +{ + return o ? std::string(o->get_class_name()) : std::string("nullptr"); +} + +#endif diff --git a/core/teca_binary_stream.h b/core/teca_binary_stream.h index 3488c0369..dac237602 100644 --- a/core/teca_binary_stream.h +++ b/core/teca_binary_stream.h @@ -86,6 +86,19 @@ class teca_binary_stream template void pack(const std::vector &v); template void unpack(std::vector &v); + template void pack(const std::map &v); + template void unpack(std::map &v); + + template void pack(const std::pair &v); + template void unpack(std::pair &v); + + // verify that the passed value is in the stream + // advance past the value. return 0 if the value is found + // for char * case null terminator is not read + template int expect(const T &val); + template int expect(const T *val, unsigned long n); + int expect(const char *str); + // broadcast the stream from the root process to all other processes int broadcast(MPI_Comm comm, int root_rank=0); @@ -212,4 +225,98 @@ void teca_binary_stream::unpack(std::vector &v) this->unpack(v.data(), vlen); } +//----------------------------------------------------------------------------- +template +void teca_binary_stream::pack(const std::map &m) +{ + unsigned long n_elem = m.size(); + this->pack(n_elem); + + typename std::map::const_iterator it = m.begin(); + + for (unsigned long i = 0; i < n_elem; ++i) + { + this->pack(it->first); + this->pack(it->second); + } +} + +//----------------------------------------------------------------------------- +template +void teca_binary_stream::unpack(std::map &m) +{ + unsigned long n_elem = 0; + this->unpack(n_elem); + + for (unsigned long i = 0; i < n_elem; ++i) + { + KT key; + VT val; + + this->unpack(key); + this->unpack(val); + + m.emplace(std::move(key), std::move(val)); + } +} + +//----------------------------------------------------------------------------- +template +void teca_binary_stream::pack(const std::pair &p) +{ + this->pack(p.first); + this->pack(p.second); +} + +//----------------------------------------------------------------------------- +template +void teca_binary_stream::unpack(std::pair &p) +{ + this->unpack(p.first); + this->unpack(p.second); +} + +//----------------------------------------------------------------------------- +template +int teca_binary_stream::expect(const T &val) +{ + T tmp; + this->unpack(tmp); + + if (tmp == val) + return 0; + + return -1; +} + +//----------------------------------------------------------------------------- +template +int teca_binary_stream::expect(const T *val, unsigned long n) +{ + int same = 0; + T *tmp = (T*)malloc(n*sizeof(T)); + this->unpack(tmp, n); + for (unsigned long i = 0; i < n; ++i) + { + if (tmp[i] != val[i]) + { + same = -1; + break; + } + } + free(tmp); + return same; +} + +//----------------------------------------------------------------------------- +inline +int teca_binary_stream::expect(const char *str) +{ + unsigned long n = strlen(str); + char *tmp = (char*)malloc(n); + this->unpack(tmp, n); + int same = strncmp(str, tmp, n); + free(tmp); + return same; +} #endif diff --git a/core/teca_common.cxx b/core/teca_common.cxx new file mode 100644 index 000000000..52baaccef --- /dev/null +++ b/core/teca_common.cxx @@ -0,0 +1,23 @@ +#include "teca_common.h" + +// ************************************************************************** +std::ostream &operator<<(std::ostream &os, const std::vector &vec) +{ + if (!vec.empty()) + { + os << "\"" << vec[0] << "\""; + size_t n = vec.size(); + for (size_t i = 1; i < n; ++i) + os << ", \"" << vec[i] << "\""; + } + return os; +} + +// ************************************************************************** +int have_tty() +{ + static int have = -1; + if (have < 0) + have = isatty(fileno(stderr)); + return have; +} diff --git a/core/teca_common.h b/core/teca_common.h index ba54ee65b..61d06afcf 100644 --- a/core/teca_common.h +++ b/core/teca_common.h @@ -6,17 +6,30 @@ #include #include #include +#include +#include -// detect if we are writing to a tty, if not then -// we should not use ansi color codes -inline int have_tty() +// send a vector to a stream +template +std::ostream &operator<<(std::ostream &os, const std::vector &vec) { - static int have = -1; - if (have < 0) - have = isatty(fileno(stderr)); - return have; + if (!vec.empty()) + { + os << vec[0]; + size_t n = vec.size(); + for (size_t i = 1; i < n; ++i) + os << ", " << vec[i]; + } + return os; } +// send a vector of strings to a stream +std::ostream &operator<<(std::ostream &os, const std::vector &vec); + +// detect if we are writing to a tty, if not then +// we should not use ansi color codes +int have_tty(); + #define ANSI_RED "\033[1;31;40m" #define ANSI_GREEN "\033[1;32;40m" #define ANSI_YELLOW "\033[1;33;40m" diff --git a/core/teca_dataset.cxx b/core/teca_dataset.cxx index 4130774ee..b2a870ac4 100644 --- a/core/teca_dataset.cxx +++ b/core/teca_dataset.cxx @@ -1,6 +1,7 @@ #include "teca_dataset.h" #include "teca_metadata.h" + // -------------------------------------------------------------------------- teca_dataset::teca_dataset() { @@ -13,6 +14,46 @@ teca_dataset::~teca_dataset() delete this->metadata; } +// -------------------------------------------------------------------------- +int teca_dataset::set_request_index(const std::string &key, long val) +{ + if (this->metadata->set("index_request_key", key) || + this->metadata->set(key, val)) + { + TECA_ERROR("failed to set the index_request_key \"" + << key << "\" to " << val) + return -1; + } + return 0; +} + +// -------------------------------------------------------------------------- +int teca_dataset::set_request_index(long val) +{ + std::string index_request_key; + if (this->metadata->get("index_request_key", index_request_key)) + { + TECA_ERROR("An index_request_key has not been set") + return -1; + } + + this->metadata->set(index_request_key, val); + return 0; +} + +// -------------------------------------------------------------------------- +int teca_dataset::get_request_index(long &val) const +{ + std::string index_request_key; + if (this->metadata->get("index_request_key", index_request_key)) + { + TECA_ERROR("An index_request_key has not been set") + return -1; + } + + return this->metadata->get(index_request_key, val); +} + // -------------------------------------------------------------------------- void teca_dataset::copy(const const_p_teca_dataset &other) { @@ -58,23 +99,47 @@ void teca_dataset::set_metadata(const teca_metadata &md) } // -------------------------------------------------------------------------- -void teca_dataset::to_stream(teca_binary_stream &bs) const +int teca_dataset::to_stream(teca_binary_stream &bs) const { - this->metadata->to_stream(bs); + std::string class_name = this->get_class_name(); + bs.pack(class_name.c_str(), class_name.size()); + + if (this->metadata->to_stream(bs)) + return -1; + + return 0; } // -------------------------------------------------------------------------- -void teca_dataset::from_stream(teca_binary_stream &bs) +int teca_dataset::from_stream(teca_binary_stream &bs) { - this->metadata->from_stream(bs); + std::string class_name = this->get_class_name(); + + if (bs.expect(class_name.c_str(), class_name.size())) + { + TECA_ERROR("invalid stream") + return -1; + } + + if (this->metadata->from_stream(bs)) + { + TECA_ERROR("Failed to deserialize metadata") + return -1; + } + + return 0; } // -------------------------------------------------------------------------- -void teca_dataset::to_stream(std::ostream &os) const +int teca_dataset::to_stream(std::ostream &os) const { - this->metadata->to_stream(os); + if (this->metadata->to_stream(os)) + return -1; + return 0; } // -------------------------------------------------------------------------- -void teca_dataset::from_stream(std::istream &) -{} +int teca_dataset::from_stream(std::istream &) +{ + return -1; +} diff --git a/core/teca_dataset.h b/core/teca_dataset.h index 6719488db..3f5b167a0 100644 --- a/core/teca_dataset.h +++ b/core/teca_dataset.h @@ -1,6 +1,7 @@ #ifndef teca_dataset_h #define teca_dataset_h +#include "teca_variant_array.h" #include "teca_dataset_fwd.h" #include class teca_binary_stream; @@ -14,6 +15,17 @@ class teca_dataset : public std::enable_shared_from_this public: virtual ~teca_dataset(); + // the name of the key that holds the index identifing this dataset + // this should be set by the algorithm that creates the dataset. + TECA_DATASET_METADATA(index_request_key, std::string, 1) + + // a dataset metadata that uses the value of index_request_key to + // store the index that identifies this dataset. this should be set + // by the algorithm that creates the dataset. + virtual int get_request_index(long &val) const; + virtual int set_request_index(const std::string &key, long val); + virtual int set_request_index(long val); + // covert to bool. true if the dataset is not empty. // otherwise false. explicit operator bool() const noexcept @@ -26,9 +38,16 @@ class teca_dataset : public std::enable_shared_from_this // virtual constructor. return a new dataset of the same type. virtual p_teca_dataset new_instance() const = 0; - // virtual copy constructor. return a deep copy of this + // virtual copy constructor. return a shallow/deep copy of this // dataset in a new instance. virtual p_teca_dataset new_copy() const = 0; + virtual p_teca_dataset new_shallow_copy() = 0; + + // return a string identifier uniquely naming the dataset type + virtual std::string get_class_name() const = 0; + + // return an integer identifier uniquely naming the dataset type + virtual int get_type_code() const = 0; // copy data and metadata. shallow copy uses reference // counting, while copy duplicates the data. @@ -48,12 +67,12 @@ class teca_dataset : public std::enable_shared_from_this // serialize the dataset to/from the given stream // for I/O or communication - virtual void to_stream(teca_binary_stream &) const; - virtual void from_stream(teca_binary_stream &); + virtual int to_stream(teca_binary_stream &) const; + virtual int from_stream(teca_binary_stream &); // stream to/from human readable representation - virtual void to_stream(std::ostream &) const; - virtual void from_stream(std::istream &); + virtual int to_stream(std::ostream &) const; + virtual int from_stream(std::istream &); protected: teca_dataset(); diff --git a/alg/teca_dataset_capture.cxx b/core/teca_dataset_capture.cxx similarity index 100% rename from alg/teca_dataset_capture.cxx rename to core/teca_dataset_capture.cxx diff --git a/alg/teca_dataset_capture.h b/core/teca_dataset_capture.h similarity index 100% rename from alg/teca_dataset_capture.h rename to core/teca_dataset_capture.h diff --git a/core/teca_dataset_fwd.h b/core/teca_dataset_fwd.h index b998261fa..46fd125d9 100644 --- a/core/teca_dataset_fwd.h +++ b/core/teca_dataset_fwd.h @@ -44,6 +44,13 @@ virtual p_teca_dataset new_copy() const override \ p_teca_dataset o = this->new_instance(); \ o->copy(this->shared_from_this()); \ return o; \ +} \ + \ +virtual p_teca_dataset new_shallow_copy() override \ +{ \ + p_teca_dataset o = this->new_instance(); \ + o->shallow_copy(this->shared_from_this()); \ + return o; \ } // convenience macro for adding properties to dataset diff --git a/alg/teca_dataset_source.cxx b/core/teca_dataset_source.cxx similarity index 100% rename from alg/teca_dataset_source.cxx rename to core/teca_dataset_source.cxx diff --git a/alg/teca_dataset_source.h b/core/teca_dataset_source.h similarity index 100% rename from alg/teca_dataset_source.h rename to core/teca_dataset_source.h diff --git a/core/teca_index_executive.cxx b/core/teca_index_executive.cxx index 9540d6c40..d864cec5a 100644 --- a/core/teca_index_executive.cxx +++ b/core/teca_index_executive.cxx @@ -75,6 +75,7 @@ int teca_index_executive::initialize(MPI_Comm comm, const teca_metadata &md) #if !defined(TECA_HAS_MPI) (void)comm; #endif + this->requests.clear(); // locate the keys that enable us to know how many diff --git a/core/teca_index_reduce.cxx b/core/teca_index_reduce.cxx index 3c450e21c..741ec2c11 100644 --- a/core/teca_index_reduce.cxx +++ b/core/teca_index_reduce.cxx @@ -86,19 +86,24 @@ void block_decompose(MPI_Comm comm, unsigned long n_indices, unsigned long n_ran if (verbose) { std::vector decomp = {block_start, block_size}; - if (rank == 0) - { - decomp.resize(2*n_ranks); #if defined(TECA_HAS_MPI) - MPI_Gather(MPI_IN_PLACE, 0, MPI_DATATYPE_NULL, decomp.data(), - 2, MPI_UNSIGNED_LONG, 0, comm); - } - else + int is_init = 0; + MPI_Initialized(&is_init); + if (is_init) { - MPI_Gather(decomp.data(), 2, MPI_UNSIGNED_LONG, nullptr, - 0, MPI_DATATYPE_NULL, 0, comm); -#endif + if (rank == 0) + { + decomp.resize(2*n_ranks); + MPI_Gather(MPI_IN_PLACE, 0, MPI_DATATYPE_NULL, + decomp.data(), 2, MPI_UNSIGNED_LONG, 0, comm); + } + else + { + MPI_Gather(decomp.data(), 2, MPI_UNSIGNED_LONG, + nullptr, 0, MPI_DATATYPE_NULL, 0, comm); + } } +#endif if (rank == 0) { std::ostringstream oss; @@ -108,8 +113,7 @@ void block_decompose(MPI_Comm comm, unsigned long n_indices, unsigned long n_ran oss << i << " : " << decomp[ii] << " - " << decomp[ii] + decomp[ii+1] -1 << (i < n_ranks-1 ? "\n" : ""); } - TECA_STATUS("map index decomposition:" - << std::endl << oss.str()) + TECA_STATUS("map index decomposition:" << std::endl << oss.str()) } } } @@ -119,7 +123,9 @@ void block_decompose(MPI_Comm comm, unsigned long n_indices, unsigned long n_ran // -------------------------------------------------------------------------- teca_index_reduce::teca_index_reduce() : start_index(0), end_index(-1) -{} +{ + this->set_stream_size(2); +} #if defined(TECA_HAS_BOOST) // -------------------------------------------------------------------------- @@ -158,6 +164,26 @@ std::vector teca_index_reduce::get_upstream_request( { std::vector up_req; + unsigned long rank = 0; + unsigned long n_ranks = 1; + MPI_Comm comm = this->get_communicator(); +#if defined(TECA_HAS_MPI) + int is_init = 0; + MPI_Initialized(&is_init); + if (is_init) + { + // this is excluded from processing + if (comm == MPI_COMM_NULL) + return up_req; + + int tmp = 0; + MPI_Comm_size(comm, &tmp); + n_ranks = tmp; + MPI_Comm_rank(comm, &tmp); + rank = tmp; + } +#endif + // locate the keys that enable us to know how many // requests we need to make and what key to use const teca_metadata &md = input_md[0]; @@ -193,21 +219,6 @@ std::vector teca_index_reduce::get_upstream_request( // partition indices across MPI ranks. each rank will end up with a unique // block of indices to process. - unsigned long rank = 0; - unsigned long n_ranks = 1; - MPI_Comm comm = this->get_communicator(); -#if defined(TECA_HAS_MPI) - int is_init = 0; - MPI_Initialized(&is_init); - if (is_init) - { - int tmp = 0; - MPI_Comm_size(comm, &tmp); - n_ranks = tmp; - MPI_Comm_rank(comm, &tmp); - rank = tmp; - } -#endif unsigned long block_size = 1; unsigned long block_start = 0; @@ -262,40 +273,39 @@ teca_metadata teca_index_reduce::get_output_metadata( // -------------------------------------------------------------------------- const_p_teca_dataset teca_index_reduce::reduce_local( - std::vector input_data) // pass by value is intentional + std::vector input_data) // pass by value is necessary { unsigned long n_in = input_data.size(); if (n_in == 0) return p_teca_dataset(); - if (n_in == 1) - return input_data[0]; - - while (n_in > 1) + do { if (n_in % 2) TECA_PROFILE_METHOD(128, this, "reduce", - input_data[0] = this->reduce(input_data[0], input_data[n_in-1]); + input_data[0] = this->reduce(input_data[0], + (n_in > 1 ? input_data[n_in-1] : nullptr)); ) - unsigned long n = n_in/2; - for (unsigned long i = 0; i < n; ++i) + n_in /= 2; + for (unsigned long i = 0; i < n_in; ++i) { unsigned long ii = 2*i; TECA_PROFILE_METHOD(128, this, "reduce", - input_data[i] = this->reduce(input_data[ii], input_data[ii+1]); + input_data[i] = this->reduce(input_data[ii], + input_data[ii+1]); ) } - - n_in = n; } + while (n_in > 1); + return input_data[0]; } // -------------------------------------------------------------------------- const_p_teca_dataset teca_index_reduce::reduce_remote( - const_p_teca_dataset local_data) // pass by value is intentional + const_p_teca_dataset local_data) { #if defined(TECA_HAS_MPI) int is_init = 0; @@ -389,21 +399,38 @@ const_p_teca_dataset teca_index_reduce::reduce_remote( } // -------------------------------------------------------------------------- -const_p_teca_dataset teca_index_reduce::execute( - unsigned int port, +const_p_teca_dataset teca_index_reduce::execute(unsigned int port, const std::vector &input_data, - const teca_metadata &request) + const teca_metadata &request, int streaming) { (void)port; (void)request; - // note: it is not an error to have no input data. - // this can occur if there are fewer indices - // to process than there are MPI ranks. +#if defined(TECA_HAS_MPI) + int is_init = 0; + MPI_Initialized(&is_init); + if (is_init) + { + MPI_Comm comm = this->get_communicator(); + + // this rank is excluded from processing + if (comm == MPI_COMM_NULL) + return nullptr; + } +#endif + + // note: it is not an error to have no input data. this can occur if there + // are fewer indices to process than there are MPI ranks. + + const_p_teca_dataset tmp = this->reduce_local(input_data); - const_p_teca_dataset tmp = - this->reduce_remote(this->reduce_local(input_data)); + // when streaming execute will be called multiple times with 1 or more + // input datasets. When all the data has been passed streaming is 0. Only + // then do we reduce remote data and finalize the reduction. + if (streaming) + return tmp; + tmp = this->finalize(this->reduce_remote(tmp)); if (!tmp) return nullptr; diff --git a/core/teca_index_reduce.h b/core/teca_index_reduce.h index 1f8916e4f..818cb282c 100644 --- a/core/teca_index_reduce.h +++ b/core/teca_index_reduce.h @@ -60,6 +60,13 @@ class teca_index_reduce : public teca_threaded_algorithm virtual p_teca_dataset reduce(const const_p_teca_dataset &left, const const_p_teca_dataset &right) = 0; + // override that is called when the reduction is complete. + // the default implementation passes data through. + virtual p_teca_dataset finalize(const const_p_teca_dataset &ds) + { + return std::const_pointer_cast(ds); + } + // override that allows derived classes to generate upstream // requests that will be applied over all time steps. derived // classes implement this method instead of get_upstream_request, @@ -75,7 +82,6 @@ class teca_index_reduce : public teca_threaded_algorithm virtual teca_metadata initialize_output_metadata(unsigned int port, const std::vector &input_md) = 0; - protected: // customized pipeline behavior and parallel code. // most derived classes won't need to override these. @@ -94,7 +100,7 @@ class teca_index_reduce : public teca_threaded_algorithm // dataset, which is returned. const_p_teca_dataset execute(unsigned int port, const std::vector &input_data, - const teca_metadata &request) override; + const teca_metadata &request, int streaming) override; // consumes time metadata, partitions time's across // MPI ranks. diff --git a/core/teca_metadata.cxx b/core/teca_metadata.cxx index 35a74a711..c550e720c 100644 --- a/core/teca_metadata.cxx +++ b/core/teca_metadata.cxx @@ -142,6 +142,19 @@ int teca_metadata::get_name(unsigned long i, std::string &name) const return 0; } +// -------------------------------------------------------------------------- +int teca_metadata::get_names(std::vector &names) const +{ + prop_map_t::const_iterator it = this->props.cbegin(); + prop_map_t::const_iterator end = this->props.cend(); + for (; it != end; ++it) + { + names.push_back(it->first); + } + + return names.size() > 0 ? 0 : -1; +} + // -------------------------------------------------------------------------- int teca_metadata::size(const std::string &name, unsigned int &n) const noexcept { @@ -200,8 +213,10 @@ unsigned long long teca_metadata::get_next_id() const noexcept } // -------------------------------------------------------------------------- -void teca_metadata::to_stream(teca_binary_stream &s) const +int teca_metadata::to_stream(teca_binary_stream &s) const { + s.pack("teca_metadata", 13); + unsigned int n_props = this->props.size(); s.pack(n_props); @@ -213,13 +228,21 @@ void teca_metadata::to_stream(teca_binary_stream &s) const s.pack(it->second->type_code()); it->second->to_stream(s); } + + return 0; } // -------------------------------------------------------------------------- -void teca_metadata::from_stream(teca_binary_stream &s) +int teca_metadata::from_stream(teca_binary_stream &s) { this->clear(); + if (s.expect("teca_metadata")) + { + TECA_ERROR("invalid stream") + return -1; + } + unsigned int n_props; s.unpack(n_props); @@ -238,10 +261,12 @@ void teca_metadata::from_stream(teca_binary_stream &s) this->set(key, val); } + + return 0; } // -------------------------------------------------------------------------- -void teca_metadata::to_stream(ostream &os) const +int teca_metadata::to_stream(ostream &os) const { prop_map_t::const_iterator it = this->props.cbegin(); prop_map_t::const_iterator end = this->props.cend(); @@ -269,6 +294,8 @@ void teca_metadata::to_stream(ostream &os) const ) os << "}" << endl; } + + return 0; } // -------------------------------------------------------------------------- diff --git a/core/teca_metadata.h b/core/teca_metadata.h index 21b9b6762..2168aae12 100644 --- a/core/teca_metadata.h +++ b/core/teca_metadata.h @@ -76,12 +76,11 @@ class teca_metadata int set(const std::string &name, const p_teca_variant_array &prop_val); template - void set(const std::string &name, + int set(const std::string &name, const p_teca_variant_array_impl &prop_val); - // append a value to the named property. reports - // an error and does nothing if the property doesn't - // exist. return 0 on success. + // append a value to the named property. if the property doesn't + // exist it is created. return 0 on success. template int append(const std::string &name, const T &val); @@ -118,7 +117,7 @@ class teca_metadata // get prop value. return 0 if successful template int get(const std::string &name, T &val) const - { return this->get(name, 0, val); } + { return this->get(name, (unsigned int)(0), val); } // get ith prop value. return 0 if successful template @@ -153,6 +152,10 @@ class teca_metadata // return 0 if i is valid index. int get_name(unsigned long i, std::string &name) const; + // get the names of all name, value pairs. returns 0 + // if there are any properties. + int get_names(std::vector &names) const; + // remove. return 0 if successful int remove(const std::string &name) noexcept; @@ -171,12 +174,12 @@ class teca_metadata { return !empty(); } // serialize to/from binary - void to_stream(teca_binary_stream &s) const; - void from_stream(teca_binary_stream &s); + int to_stream(teca_binary_stream &s) const; + int from_stream(teca_binary_stream &s); // serialize to/from ascii - void to_stream(std::ostream &os) const; - void from_stream(std::ostream &) {} + int to_stream(std::ostream &os) const; + int from_stream(std::ostream &) { return -1; } private: unsigned long long get_next_id() const noexcept; @@ -235,7 +238,7 @@ int teca_metadata::append(const std::string &name, const T &val) prop_map_t::iterator it = this->props.find(name); if (it == this->props.end()) { - return -1; + return this->set(name, val); } it->second->append(val); @@ -321,11 +324,11 @@ int teca_metadata::set(const std::string &name, // -------------------------------------------------------------------------- template -void teca_metadata::set( - const std::string &name, +int teca_metadata::set(const std::string &name, const p_teca_variant_array_impl &prop_val) { this->props[name] = prop_val; + return 0; } // -------------------------------------------------------------------------- @@ -481,4 +484,7 @@ int teca_metadata::get(const std::string &name, // convenience defs for nesting metadata using teca_metadata_array = teca_variant_array_impl; +using p_teca_metadata_array = std::shared_ptr>; +using const_p_teca_metadata_array = std::shared_ptr>; + #endif diff --git a/core/teca_metadata_util.h b/core/teca_metadata_util.h index a384e14bc..14213c971 100644 --- a/core/teca_metadata_util.h +++ b/core/teca_metadata_util.h @@ -6,10 +6,10 @@ namespace teca_metadata_util { -// remove post-fix from the arrays in get_upstream_request if -// the post-fix is set. For example if post-fix is set to "_filtered" -// then we remove all the variables in the "arrays" set that end with -// this post-fix, and replace it with the actual requested array. -void remove_post_fix(std::set &arrays, std::string post_fix); +// given a set of names, where names end with a common string, here called +// a post-fix, modifies the set of names by removing the post fix from each +// name. +void remove_post_fix(std::set &names, std::string post_fix); + }; #endif diff --git a/core/teca_mpi_manager.cxx b/core/teca_mpi_manager.cxx index d1b9729e8..f38274a42 100644 --- a/core/teca_mpi_manager.cxx +++ b/core/teca_mpi_manager.cxx @@ -2,8 +2,10 @@ #include "teca_config.h" #include "teca_common.h" #include "teca_profiler.h" +#include "teca_system_util.h" #include +#include #if defined(TECA_HAS_MPI) #include @@ -21,13 +23,24 @@ teca_mpi_manager::teca_mpi_manager(int &argc, char **&argv) teca_profiler::start_event("app_initialize"); #if defined(TECA_HAS_MPI) - int mpi_thread_required = MPI_THREAD_SERIALIZED; - int mpi_thread_provided = 0; - MPI_Init_thread(&argc, &argv, mpi_thread_required, &mpi_thread_provided); - if (mpi_thread_provided < mpi_thread_required) + // let the user disable MPI_Init. This is primarilly to work around Cray's + // practice of calling abort from MPI_Init on login nodes. + bool init_mpi = true; + teca_system_util::get_environment_variable("TECA_INITIALIZE_MPI", init_mpi); + if (init_mpi) { - TECA_ERROR("This MPI does not support thread serialized"); - abort(); + int mpi_thread_required = MPI_THREAD_SERIALIZED; + int mpi_thread_provided = 0; + MPI_Init_thread(&argc, &argv, mpi_thread_required, &mpi_thread_provided); + if (mpi_thread_provided < mpi_thread_required) + { + TECA_ERROR("This MPI does not support thread serialized"); + abort(); + } + } + else + { + TECA_WARNING("TECA_INITIALIZE_MPI=FALSE MPI_Init was not called.") } #endif diff --git a/core/teca_mpi_manager.h b/core/teca_mpi_manager.h index 9a08770ce..d33bc8184 100644 --- a/core/teca_mpi_manager.h +++ b/core/teca_mpi_manager.h @@ -4,7 +4,7 @@ /// A RAII class to ease MPI initalization and finalization // MPI_Init is handled in the constructor, MPI_Finalize is handled in the // destructor. Given that this is an application level helper rank and size -// are reported relatoive to MPI_COMM_WORLD. +// are reported relative to MPI_COMM_WORLD. class teca_mpi_manager { public: diff --git a/core/teca_mpi_util.cxx b/core/teca_mpi_util.cxx new file mode 100644 index 000000000..8e9673dfd --- /dev/null +++ b/core/teca_mpi_util.cxx @@ -0,0 +1,54 @@ +#include "teca_mpi_util.h" + +#include "teca_config.h" + +#include + +namespace teca_mpi_util +{ +// ************************************************************************** +int equipartition_communicator(MPI_Comm comm, + int new_comm_size, MPI_Comm *new_comm) +{ +#if defined(TECA_HAS_MPI) + int is_init = 0; + MPI_Initialized(&is_init); + if (is_init) + { + int rank = 0; + int n_ranks = 1; + MPI_Comm_rank(comm, &rank); + MPI_Comm_size(comm, &n_ranks); + + if (n_ranks < new_comm_size) + { + // can't increase beyond the original sizew + return 0; + } + + int stride = n_ranks / new_comm_size; + //int in_new_comm = (n_ranks % new_comm_size) == 0; + + // get the ranks in the new commmunicator + std::vector ranks(new_comm_size); + for (int i = 0; i < new_comm_size; ++i) + ranks[i] = i*stride; + + // make a group containing the ranks + MPI_Group world_group; + MPI_Comm_group(comm, &world_group); + + MPI_Group new_group; + MPI_Group_incl(world_group, new_comm_size, ranks.data(), &new_group); + + // create the new communicator + MPI_Comm_create_group(comm, new_group, 0, new_comm); + + // clean up + MPI_Group_free(&world_group); + MPI_Group_free(&new_group); + } +#endif + return 0; +} +} diff --git a/core/teca_mpi_util.h b/core/teca_mpi_util.h new file mode 100644 index 000000000..2b22c5b89 --- /dev/null +++ b/core/teca_mpi_util.h @@ -0,0 +1,16 @@ +#ifndef teca_mpi_util_h +#define teca_mpi_util_h + +#include "teca_mpi.h" + +namespace teca_mpi_util +{ +// subset the the communicator comm into a new communicator with +// new_comm_size ranks. ranks are selected from comm with a uniform +// stride spreading them approximatelyt equally across nodes. +int equipartition_communicator(MPI_Comm comm, + int new_comm_size, MPI_Comm *new_comm); + +}; + +#endif diff --git a/core/teca_program_options.h b/core/teca_program_options.h index a990e3d8e..be4340fe2 100644 --- a/core/teca_program_options.h +++ b/core/teca_program_options.h @@ -42,12 +42,12 @@ using variables_map // #define TECA_POPTS_GET(_type, _prefix, _name, _desc) \ (((_prefix.empty()?"":_prefix+"::") + #_name).c_str(), \ - boost::program_options::value<_type>(), _desc) + boost::program_options::value<_type>(), "\n" _desc "\n") #define TECA_POPTS_MULTI_GET(_type, _prefix, _name, _desc) \ (((_prefix.empty()?"":_prefix+"::") + #_name).c_str(), \ boost::program_options::value<_type>()->multitoken(), \ - _desc) + "\n" _desc "\n") #define TECA_POPTS_SET(_opts, _type, _prefix, _name) \ {std::string opt_name = \ diff --git a/alg/teca_programmable_algorithm.cxx b/core/teca_programmable_algorithm.cxx similarity index 100% rename from alg/teca_programmable_algorithm.cxx rename to core/teca_programmable_algorithm.cxx diff --git a/alg/teca_programmable_algorithm.h b/core/teca_programmable_algorithm.h similarity index 100% rename from alg/teca_programmable_algorithm.h rename to core/teca_programmable_algorithm.h diff --git a/alg/teca_programmable_algorithm_fwd.h b/core/teca_programmable_algorithm_fwd.h similarity index 73% rename from alg/teca_programmable_algorithm_fwd.h rename to core/teca_programmable_algorithm_fwd.h index ce33ba5a1..626fcfd57 100644 --- a/alg/teca_programmable_algorithm_fwd.h +++ b/core/teca_programmable_algorithm_fwd.h @@ -6,11 +6,13 @@ #include "teca_dataset_fwd.h" TECA_SHARED_OBJECT_FORWARD_DECL(teca_programmable_algorithm) +TECA_SHARED_OBJECT_FORWARD_DECL(teca_threaded_programmable_algorithm) #ifdef SWIG typedef void* report_callback_t; typedef void* request_callback_t; typedef void* execute_callback_t; +typedef void* threaded_execute_callback_t; #else using report_callback_t = std::function&)>; @@ -22,5 +24,9 @@ using request_callback_t = std::function( using execute_callback_t = std::function &, const teca_metadata &)>; + +using threaded_execute_callback_t = std::function &, + const teca_metadata &, int)>; #endif #endif diff --git a/alg/teca_programmable_reduce.cxx b/core/teca_programmable_reduce.cxx similarity index 86% rename from alg/teca_programmable_reduce.cxx rename to core/teca_programmable_reduce.cxx index f4757b4c1..74864db32 100644 --- a/alg/teca_programmable_reduce.cxx +++ b/core/teca_programmable_reduce.cxx @@ -1,5 +1,4 @@ #include "teca_programmable_reduce.h" -#include "teca_table.h" #include #include @@ -86,3 +85,18 @@ p_teca_dataset teca_programmable_reduce::reduce( return this->reduce_callback(left_ds, right_ds); } + +// -------------------------------------------------------------------------- +p_teca_dataset teca_programmable_reduce::finalize( + const const_p_teca_dataset &ds) +{ +#ifdef TECA_DEBUG + cerr << teca_parallel_id() + << "teca_programmable_reduce::finalize" << endl; +#endif + + if (!this->finalize_callback) + return this->teca_index_reduce::finalize(ds); + + return this->finalize_callback(ds); +} diff --git a/alg/teca_programmable_reduce.h b/core/teca_programmable_reduce.h similarity index 86% rename from alg/teca_programmable_reduce.h rename to core/teca_programmable_reduce.h index a4ffe4de0..a0e14dc99 100644 --- a/alg/teca_programmable_reduce.h +++ b/core/teca_programmable_reduce.h @@ -62,6 +62,15 @@ class teca_programmable_reduce : public teca_index_reduce // the default implementation returns a nullptr TECA_ALGORITHM_CALLBACK_PROPERTY(reduce_callback_t, reduce_callback) + // set the callback that finalizes the reduction. + // The callback must be a callable with the signature: + // + // p_teca_dataset reduce(const const_p_teca_dataset &ds); + // + // the default implementation passes the input dataset + // through + TECA_ALGORITHM_CALLBACK_PROPERTY(finalize_callback_t, finalize_callback) + protected: teca_programmable_reduce(); @@ -69,6 +78,8 @@ class teca_programmable_reduce : public teca_index_reduce p_teca_dataset reduce(const const_p_teca_dataset &left, const const_p_teca_dataset &right) override; + p_teca_dataset finalize(const const_p_teca_dataset &input) override; + std::vector initialize_upstream_request( unsigned int port, const std::vector &input_md, const teca_metadata &request) override; @@ -78,6 +89,7 @@ class teca_programmable_reduce : public teca_index_reduce private: reduce_callback_t reduce_callback; + finalize_callback_t finalize_callback; request_callback_t request_callback; report_callback_t report_callback; char class_name[64]; diff --git a/alg/teca_programmable_reduce_fwd.h b/core/teca_programmable_reduce_fwd.h similarity index 75% rename from alg/teca_programmable_reduce_fwd.h rename to core/teca_programmable_reduce_fwd.h index fc27a9612..e17706fae 100644 --- a/alg/teca_programmable_reduce_fwd.h +++ b/core/teca_programmable_reduce_fwd.h @@ -9,8 +9,12 @@ TECA_SHARED_OBJECT_FORWARD_DECL(teca_programmable_reduce) #ifdef SWIG typedef void* reduce_callback_t; +typedef void* finalize_callback_t; #else using reduce_callback_t = std::function; + +using finalize_callback_t = std::function; #endif #endif diff --git a/core/teca_python_algorithm.py b/core/teca_python_algorithm.py new file mode 100644 index 000000000..2b630ade4 --- /dev/null +++ b/core/teca_python_algorithm.py @@ -0,0 +1,121 @@ + +class teca_python_algorithm(object): + """ + The base class used for writing new algorithms in Python. Contains + plumbing that connects user provided overrides to an instance of + teca_programmable_algorithm. Users are expected to override one or more of + report, request, and/or execute. + """ + + @classmethod + def New(derived_class): + """ + factory method returns an instance of the derived type + """ + dc = derived_class() + dc.initialize_implementation() + return dc + + def initialize_implementation(self): + """ + Initializes the instance and wires up the plumbing. + """ + # call overridable methods to get number of inputs/outputs + n_inputs = self.get_number_of_input_connections() + n_outputs = self.get_number_of_output_ports() + + # call overrides to get implementation for teca execution + # phase implementations + self.impl = teca_programmable_algorithm.New() + self.impl.set_number_of_input_connections(n_inputs) + self.impl.set_number_of_output_ports(n_outputs) + self.impl.set_name(self.__class__.__name__) + self.impl.set_report_callback(self.get_report_callback()) + self.impl.set_request_callback(self.get_request_callback()) + self.impl.set_execute_callback(self.get_execute_callback()) + + def __getattr__(self, name): + """ + forward calls to the programmable algorithm + """ + + # guard against confusing infinite recursion that + # occurs if impl is not present. one common way + # that this occurs is if the instance was not + # created with the New method + if name == 'impl': + raise RuntimeError('The teca_python_algorithm ' \ + 'was imporperly initialized. Did you use the ' \ + 'factory method, New(), to create this ' \ + 'instance of %s?'%(self.__class__.__name__)) + + # forward to the teca_programmable_algorithm + return self.impl.__getattribute__(name) + + def get_report_callback(self): + """ + returns a callback to be used by the programmable algorithm that + forwards calls to the class method. + """ + def report_callback(port, md_in): + return self.report(port, md_in) + return report_callback + + def get_request_callback(self): + """ + returns a callback to be used by the programmable algorithm that + forwards calls to the class method. + """ + def request_callback(port, md_in, req_in): + return self.request(port, md_in, req_in) + return request_callback + + def get_execute_callback(self): + """ + returns a callback to be used by the programmable algorithm that + forwards calls to the class method. + """ + def execute_callback(port, data_in, req_in): + return self.execute(port, data_in, req_in) + return execute_callback + + def get_number_of_input_connections(self): + """ + return the number of input connections this algorithm needs. + The default is 1, override to modify. + """ + return 1 + + def get_number_of_output_ports(self): + """ + return the number of output ports this algorithm provides. + The default is 1, override to modify. + """ + return 1 + + def report(self, port, md_in): + """ + return the metadata decribing the data available for consumption. + Override this to customize the behavior of the report phase of + execution. The default passes metadata on the first input through. + """ + return teca_metadata(md_in[0]) + + def request(self, port, md_in, req_in): + """ + return the request for needed data for execution. Override this to + customize the behavior of the request phase of execution. The default + passes the request on the first input port through. + """ + return [teca_metadata(req_in)] + + def execute(self, port, data_in, req_in): + """ + return the processed data. Override this to customize the behavior of + the execute phase of execution. The default passes the dataset on the + first input port through. + """ + if len(data_in): + data_out = data_in[0].new_instance() + data_out.shallow_copy(as_non_const_teca_dataset(data_out)) + return data_out diff --git a/core/teca_python_reduce.py b/core/teca_python_reduce.py new file mode 100644 index 000000000..efa474d64 --- /dev/null +++ b/core/teca_python_reduce.py @@ -0,0 +1,126 @@ + +class teca_python_reduce(object): + """ + The base class used for writing new reductions in Python. Contains + plumbing that connects user provided overrides to an instance of + teca_programmable_reduce. Users are expected to override one or more of + report, request, reduce, and/or finalize methods. + """ + + @classmethod + def New(derived_class): + """ + factory method returns an instance of the derived type + """ + dc = derived_class() + dc.initialize_implementation() + return dc + + def initialize_implementation(self): + """ + Initializes the instance and wires up the plumbing. + """ + # call overrides to get implementation for teca execution + # phase implementations + self.impl = teca_programmable_reduce.New() + self.impl.set_name(self.__class__.__name__) + self.impl.set_report_callback(self.get_report_callback()) + self.impl.set_request_callback(self.get_request_callback()) + self.impl.set_reduce_callback(self.get_reduce_callback()) + self.impl.set_finalize_callback(self.get_finalize_callback()) + + def __getattr__(self, name): + """ + forward calls to the programmable reduce + """ + + # guard against confusing infinite recursion that + # occurs if impl is not present. one common way + # that this occurs is if the instance was not + # created with the New method + if name == 'impl': + raise RuntimeError('The teca_python_reduce ' \ + 'was imporperly initialized. Did you use the ' \ + 'factory method, New(), to create this ' \ + 'instance of %s?'%(self.__class__.__name__)) + + # forward to the teca_programmable_reduce + return self.impl.__getattribute__(name) + + def get_report_callback(self): + """ + returns a callback to be used by the programmable reduce that + forwards calls to the class method. + """ + def report_callback(port, md_in): + return self.report(port, md_in) + return report_callback + + def get_request_callback(self): + """ + returns a callback to be used by the programmable reduce that + forwards calls to the class method. + """ + def request_callback(port, md_in, req_in): + return self.request(port, md_in, req_in) + return request_callback + + def get_execute_callback(self): + """ + returns a callback to be used by the programmable reduce that + forwards calls to the class method. + """ + def execute_callback(port, data_in, req_in): + return self.execute(port, data_in, req_in) + return execute_callback + + def get_reduce_callback(self): + """ + returns a callback used by the programmable reduce that forwards + calls to the class method + """ + def reduce_callback(left, right): + return self.reduce(left, right) + return reduce_callback + + def get_finalize_callback(self): + """ + returns a callback used by the programmable reduce that forwards + calls to the class method + """ + def finalize_callback(data): + return self.finalize(data) + return finalize_callback + + def report(self, port, md_in): + """ + return the metadata decribing the data available for consumption. + Override this to customize the behavior of the report phase of + execution. The default passes metadata on the first input through. + """ + return teca_metadata(md_in[0]) + + def request(self, port, md_in, req_in): + """ + return the request for needed data for execution. Override this to + customize the behavior of the request phase of execution. The default + passes the request on the first input port through. + """ + return [teca_metadata(req_in)] + + def reduce(self, left, right): + """ + given two input datasets return the reduced data. Override this to customize + the behavior of the reduction. the default raises an exception, this must be + overridden. + """ + raise RuntimeError('%s::reduce method was not overridden'%(self.get_class_name())) + + def finalize(self, data_in): + """ + Called after the reduction is complete. Override this method to customize the + finalization of the reduction. the default passes the dataset through. + """ + data_out = data_in.new_instance() + data_out.shallow_copy(as_non_const_teca_dataset(data_out)) + return data_out diff --git a/core/teca_string_util.cxx b/core/teca_string_util.cxx new file mode 100644 index 000000000..d1288eb29 --- /dev/null +++ b/core/teca_string_util.cxx @@ -0,0 +1,89 @@ +#include "teca_string_util.h" + +namespace teca_string_util +{ + +// ************************************************************************** +int extract_string(const char *istr, std::string &field) +{ + const char *sb = istr; + while (*sb != '"') + { + if (*sb == '\0') + { + TECA_ERROR("End of string encountered before opening \"") + return -1; + } + ++sb; + } + ++sb; + const char *se = sb; + while (*se != '"') + { + if (*se == '\\') + { + ++se; + } + if (*se == '\0') + { + TECA_ERROR("End of string encountered before closing \"") + return -1; + } + ++se; + } + field = std::string(sb, se); + return 0; +} + +// ************************************************************************** +int tokenize(char *istr, char delim, int n_cols, char **ostr) +{ + // skip delim at the beginning + while ((*istr == delim) && (*istr != '\0')) + ++istr; + + // nothing here + if (*istr == '\0') + return -1; + + // save the first + ostr[0] = istr; + int col = 1; + + while ((*istr != '\0') && (col < n_cols)) + { + // seek to delim + while ((*istr != delim) && (*istr != '\0')) + ++istr; + + if (*istr == delim) + { + // terminate the token + *istr = '\0'; + + // move past the terminator + ++istr; + + // check for end, if not start the next token + if (*istr != '\0') + ostr[col] = istr; + + // count it + ++col; + } + } + + // we should have found n_cols + if (col != n_cols) + { + TECA_ERROR("Failed to process all the data, " + << col << "columns of the " << n_cols + << " expected were processed.") + return -1; + } + + return 0; +} + +} + diff --git a/core/teca_string_util.h b/core/teca_string_util.h new file mode 100644 index 000000000..e1d7ff54f --- /dev/null +++ b/core/teca_string_util.h @@ -0,0 +1,261 @@ +#ifndef teca_string_util_h +#define teca_string_util_h + +#include "teca_common.h" + +#include +#include +#include +#include +#include + +namespace teca_string_util +{ +// convert the characters between the first and second double +// quote to a std::string. Escaped characters are skipped. Return +// 0 if successful. +int extract_string(const char *istr, std::string &field); + +// scan the input string (istr) for the given a delimiter (delim). push a pointer +// to the first non-delimiter character and the first character after each +// instance of the delimiter. return zero if successful. when successful there +// will be at least one value. +int tokenize(char *istr, char delim, int n_cols, char **ostr); + + +// scan the input string (istr) for the given a delimiter (delim). push a point +// to the first non-delimiter character and the first character after each +// instance of the delimiter. return zero if successful. when successful there +// will be at least one value. +template > +int tokenize(char *istr, char delim, container_t &ostr) +{ + // skip delim at the beginning + while ((*istr == delim) && (*istr != '\0')) + ++istr; + + // nothing here + if (*istr == '\0') + return -1; + + // save the first + ostr.push_back(istr); + + while (*istr != '\0') + { + while ((*istr != delim) && (*istr != '\0')) + ++istr; + + if (*istr == delim) + { + // terminate the token + *istr = '\0'; + ++istr; + if (*istr != '\0') + { + // not at the end, start the next token + ostr.push_back(istr); + } + } + } + + return 0; +} + +// skip space, tabs, and new lines. return non-zero if the end of the string +// is reached before a non-pad character is encountered +inline +int skip_pad(char *&buf) +{ + while ((*buf != '\0') && + ((*buf == ' ') || (*buf == '\n') || (*buf == '\r') || (*buf == '\t'))) + ++buf; + return *buf == '\0' ? -1 : 0; +} + +// return 0 if the first non-pad character is # +inline +int is_comment(char *buf) +{ + skip_pad(buf); + if (buf[0] == '#') + return 1; + return 0; +} + +template +struct scanf_tt {}; + +#define DECLARE_SCANF_TT(_CPP_T, _FMT_STR) \ +template<> \ +struct scanf_tt<_CPP_T> \ +{ \ + static \ + const char *format() { return _FMT_STR; } \ +}; +DECLARE_SCANF_TT(float," %g") +DECLARE_SCANF_TT(double," %lg") +DECLARE_SCANF_TT(char," %hhi") +DECLARE_SCANF_TT(short, " %hi") +DECLARE_SCANF_TT(int, " %i") +DECLARE_SCANF_TT(long, " %li") +DECLARE_SCANF_TT(long long, "%lli") +DECLARE_SCANF_TT(unsigned char," %hhu") +DECLARE_SCANF_TT(unsigned short, " %hu") +DECLARE_SCANF_TT(unsigned int, " %u") +DECLARE_SCANF_TT(unsigned long, " %lu") +DECLARE_SCANF_TT(unsigned long long, "%llu") +DECLARE_SCANF_TT(std::string, " \"%128s") + +template +struct string_tt {}; + +#define DECLARE_STR_CONVERSION_I(_CPP_T, _FUNC) \ +template <> \ +struct string_tt<_CPP_T> \ +{ \ + static const char *type_name() { return # _CPP_T; } \ + \ + static int convert(char *str, _CPP_T &val) \ + { \ + errno = 0; \ + char *endp = nullptr; \ + _CPP_T tmp = _FUNC(str, &endp, 0); \ + if (errno != 0) \ + { \ + TECA_ERROR("Failed to convert string \"" \ + << str << "\" to a nunber." << strerror(errno)) \ + return -1; \ + } \ + else if (endp == str) \ + { \ + TECA_ERROR("Failed to convert string \"" \ + << str << "\" to a nunber. Invalid string.") \ + return -1; \ + } \ + val = tmp; \ + return 0; \ + } \ +}; + +#define DECLARE_STR_CONVERSION_F(_CPP_T, _FUNC) \ +template <> \ +struct string_tt<_CPP_T> \ +{ \ + static const char *type_name() { return # _CPP_T; } \ + \ + static int convert(const char *str, _CPP_T &val) \ + { \ + errno = 0; \ + char *endp = nullptr; \ + _CPP_T tmp = _FUNC(str, &endp); \ + if (errno != 0) \ + { \ + TECA_ERROR("Failed to convert string \"" \ + << str << "\" to a nunber." << strerror(errno)) \ + return -1; \ + } \ + else if (endp == str) \ + { \ + TECA_ERROR("Failed to convert string \"" \ + << str << "\" to a nunber. Invalid string.") \ + return -1; \ + } \ + val = tmp; \ + return 0; \ + } \ +}; + +DECLARE_STR_CONVERSION_F(float, strtof) +DECLARE_STR_CONVERSION_F(double, strtod) +DECLARE_STR_CONVERSION_I(char, strtol) +DECLARE_STR_CONVERSION_I(short, strtol) +DECLARE_STR_CONVERSION_I(int, strtol) +DECLARE_STR_CONVERSION_I(long, strtoll) +DECLARE_STR_CONVERSION_I(long long, strtoll) + +template <> +struct string_tt +{ + static const char *type_name() { return "bool"; } + + static int convert(const char *str, bool &val) + { + char buf[17]; + buf[16] = '\0'; + size_t n = strlen(str); + n = n < 17 ? n : 16; + for (size_t i = 0; i < n && i < 16; ++i) + buf[i] = tolower(str[i]); + buf[n] = '\0'; + if ((strcmp(buf, "0") == 0) + || (strcmp(buf, "false") == 0) || (strcmp(buf, "off") == 0)) + { + val = false; + return 0; + } + else if ((strcmp(buf, "1") == 0) + || (strcmp(buf, "true") == 0) || (strcmp(buf, "on") == 0)) + { + val = true; + return 0; + } + + TECA_ERROR("Failed to convert string \"" << str << "\" to a bool") + return -1; + } +}; + +template <> +struct string_tt +{ + static const char *type_name() { return "std::string"; } + + static int convert(const char *str, std::string &val) + { + val = str; + return 0; + } +}; + + +// watch out for memory leak, val needs to be free'd +template <> +struct string_tt +{ + static const char *type_name() { return "char*"; } + + static int convert(const char *str, char *&val) + { + val = strdup(str); + return 0; + } +}; + +// extract the value in a "name = value" pair. +// an error occurs if splitting the input on '=' doesn't produce 2 tokens +// or if the conversion to val_t fails. returns 0 if successful. +template +int extract_value(char *l, val_t &val) +{ + std::vector tmp; + if (tokenize(l, '=', tmp) || (tmp.size() != 2)) + { + TECA_ERROR("Invalid name specifier in \"" << l << "\"") + return -1; + } + + char *r = tmp[1]; + if (skip_pad(r) || string_tt::convert(r, val)) + { + TECA_ERROR("Invalid " << string_tt::type_name() + << " value \"" << r << "\" in \"" << l << "\"") + return -1; + } + + return 0; +} + +} + +#endif diff --git a/core/teca_system_util.cxx b/core/teca_system_util.cxx new file mode 100644 index 000000000..308fb4c7e --- /dev/null +++ b/core/teca_system_util.cxx @@ -0,0 +1,46 @@ +#include "teca_system_util.h" + +#include + +namespace teca_system_util +{ + +// -------------------------------------------------------------------------- +int get_command_line_option(int argc, char **argv, + const char *arg_name, int require, std::string &arg_val) +{ + for (int i = 1; i < argc; ++i) + { + if (strcmp(arg_name, argv[i]) == 0) + { + if (++i == argc) + { + TECA_ERROR(<< arg_name << " is missing its value") + return -1; + } + arg_val = argv[i]; + break; + } + } + + if (require && arg_val.empty()) + { + TECA_ERROR("missing required command line option " << arg_name) + return -1; + } + + return 0; +} + +// -------------------------------------------------------------------------- +int command_line_option_check(int argc, char **argv, const char *arg_name) +{ + for (int i = 1; i < argc; ++i) + { + if (strcmp(arg_name, argv[i]) == 0) + return 1; + } + return 0; +} + +} diff --git a/core/teca_system_util.h b/core/teca_system_util.h new file mode 100644 index 000000000..2139501ef --- /dev/null +++ b/core/teca_system_util.h @@ -0,0 +1,48 @@ +#ifndef teca_system_util_h +#define teca_system_util_h + +#include "teca_common.h" +#include "teca_string_util.h" + +#include + +namespace teca_system_util +{ +// initialize val with the environment variable named by var converted to a +// numeric type. Only floating point and signed integers are implemented. For +// unsigned types, check that the return is greater or equal to zero. +// +// returns: +// 0 if the variable was found and val was initialized from it +// 1 if the varibale was not found +// -1 if the variable was found but conversion from string failed +template +int get_environment_variable(const char *var, T &val) +{ + const char *tmp = getenv(var); + if (tmp) + { + if (teca_string_util::string_tt::convert(tmp, val)) + { + TECA_ERROR("Failed to convert " << var << " = \"" + << tmp << "\" to a number") + return -1; + } + return 0; + } + return 1; +} + +// extract the value of the named command line argument. +// return 0 if successful. If require is not zero then an error +// will be reported if the argument is not present. +int get_command_line_option(int argc, char **argv, + const char *arg_name, int require, std::string &arg_val); + +// check for the presence of the name command line option. +// return non-zero if it is found. +int command_line_option_check(int argc, char **argv, + const char *arg_name); +} + +#endif diff --git a/core/teca_thread_pool.h b/core/teca_thread_pool.h index a08251ca0..55721111b 100644 --- a/core/teca_thread_pool.h +++ b/core/teca_thread_pool.h @@ -3,6 +3,7 @@ #include "teca_common.h" #include "teca_algorithm_fwd.h" +#include "teca_thread_util.h" #include "teca_threadsafe_queue.h" #include "teca_mpi.h" @@ -11,6 +12,7 @@ #include #include #include +#include #include #if defined(_GNU_SOURCE) #include @@ -18,12 +20,6 @@ #include #endif -namespace internal -{ -int thread_parameters(MPI_Comm comm, int base_core_id, int n_req, - bool bind, bool verbose, std::deque &affinity); -} - template class teca_thread_pool; @@ -65,7 +61,19 @@ class teca_thread_pool // datasets in the order that corresponding requests // were added to the queue. template