From b2cda481ba83aec50577db2773702f937ea75711 Mon Sep 17 00:00:00 2001 From: Augustin Degomme Date: Wed, 7 Apr 2021 10:29:07 +0200 Subject: [PATCH] Display leak origin in message, if -trace-call-location is used. + downgrade messages to INFO. --- src/smpi/internals/smpi_utils.cpp | 13 ++++--- teshsuite/smpi/CMakeLists.txt | 2 +- .../coll-allreduce-with-leaks.tesh | 24 ++++++------- .../mc-coll-allreduce-with-leaks.tesh | 34 ++++++++++--------- 4 files changed, 40 insertions(+), 33 deletions(-) diff --git a/src/smpi/internals/smpi_utils.cpp b/src/smpi/internals/smpi_utils.cpp index cc3fe5f788..90ef1550bb 100644 --- a/src/smpi/internals/smpi_utils.cpp +++ b/src/smpi/internals/smpi_utils.cpp @@ -151,15 +151,20 @@ void print_memory_analysis() bool truncate = max < handles.size(); if (truncate) handles.resize(max); + bool printed_advice=false; for (const auto& p : handles) { - if (xbt_log_no_loc) { - XBT_WARN("Leaked handle of type %s", p.second->name().c_str()); + if (xbt_log_no_loc || p.second->call_location().empty()) { + if (!printed_advice){ + XBT_INFO("To get more information (location of allocations), compile your code with -trace-call-location flag of smpicc/f90"); + printed_advice=true; + } + XBT_INFO("Leaked handle of type %s", p.second->name().c_str()); } else { - XBT_WARN("Leaked handle of type %s at %p", p.second->name().c_str(), p.second); + XBT_INFO("Leaked handle of type %s at %s", p.second->name().c_str(), p.second->call_location().c_str()); } } if (truncate) - XBT_WARN("(more handle leaks hidden as you wanted to see only %lu of them)", max); + XBT_INFO("(more handle leaks hidden as you wanted to see only %lu of them)", max); } } diff --git a/teshsuite/smpi/CMakeLists.txt b/teshsuite/smpi/CMakeLists.txt index 423fd69966..a3314a2391 100644 --- a/teshsuite/smpi/CMakeLists.txt +++ b/teshsuite/smpi/CMakeLists.txt @@ -147,7 +147,7 @@ if(enable_smpi) # Extra allreduce test : with leak : don't run on asan (and ignore leaks on valgrind) if (NOT HAVE_SANITIZER_ADDRESS) ADD_TESH(tesh-smpi-coll-allreduce-with-leaks --setenv platfdir=${CMAKE_HOME_DIRECTORY}/examples/platforms --setenv bindir=${CMAKE_BINARY_DIR}/teshsuite/smpi/coll-allreduce-with-leaks --cd ${CMAKE_BINARY_DIR}/teshsuite/smpi/coll-allreduce-with-leaks ${CMAKE_HOME_DIRECTORY}/teshsuite/smpi/coll-allreduce-with-leaks/coll-allreduce-with-leaks.tesh) - + set_target_properties(coll-allreduce-with-leaks PROPERTIES COMPILE_FLAGS "-trace-call-location") if(enable_model-checking) add_dependencies(tests-mc coll-allreduce-with-leaks) ADD_TESH(tesh-mc-smpi-coll-allreduce-with-leaks --setenv platfdir=${CMAKE_HOME_DIRECTORY}/examples/platforms --setenv bindir=${CMAKE_BINARY_DIR}/teshsuite/smpi/coll-allreduce-with-leaks --cd ${CMAKE_BINARY_DIR}/teshsuite/smpi/coll-allreduce-with-leaks ${CMAKE_HOME_DIRECTORY}/teshsuite/smpi/coll-allreduce-with-leaks/mc-coll-allreduce-with-leaks.tesh) diff --git a/teshsuite/smpi/coll-allreduce-with-leaks/coll-allreduce-with-leaks.tesh b/teshsuite/smpi/coll-allreduce-with-leaks/coll-allreduce-with-leaks.tesh index 92cc86b339..ab2c29a49c 100644 --- a/teshsuite/smpi/coll-allreduce-with-leaks/coll-allreduce-with-leaks.tesh +++ b/teshsuite/smpi/coll-allreduce-with-leaks/coll-allreduce-with-leaks.tesh @@ -2,7 +2,7 @@ ! output sort p Test allreduce -$ $VALGRIND_NO_LEAK_CHECK ${bindir:=.}/../../../smpi_script/bin/smpirun -map -hostfile ../hostfile_coll -platform ${platfdir:=.}/small_platform.xml -np 16 --log=xbt_cfg.thres:critical ${bindir:=.}/coll-allreduce-with-leaks --log=smpi_config.thres:warning --cfg=smpi/display-allocs:yes --cfg=smpi/simulate-computation:no --log=smpi_coll.thres:error --log=smpi_mpi.thres:error --log=smpi_pmpi.thres:error --cfg=smpi/list-leaks:10 --log=no_loc +$ $VALGRIND_NO_LEAK_CHECK ${bindir:=.}/../../../smpi_script/bin/smpirun -map -hostfile ../hostfile_coll -platform ${platfdir:=.}/small_platform.xml -np 16 --log=xbt_cfg.thres:critical ${bindir:=.}/coll-allreduce-with-leaks --log=smpi_config.thres:warning --cfg=smpi/display-allocs:yes --cfg=smpi/simulate-computation:no --log=smpi_coll.thres:error --log=smpi_mpi.thres:error --log=smpi_pmpi.thres:error --cfg=smpi/list-leaks:10 > [rank 0] -> Tremblay > [rank 1] -> Tremblay > [rank 2] -> Tremblay @@ -24,17 +24,17 @@ $ $VALGRIND_NO_LEAK_CHECK ${bindir:=.}/../../../smpi_script/bin/smpirun -map -ho > Running smpirun with -wrapper "valgrind --leak-check=full" can provide more information > This can be done automatically by setting --cfg=smpi/auto-shared-malloc-thresh to the minimum size wanted size (this can alter execution if data content is necessary) > [0.023768] [smpi_utils/INFO] Probable memory leaks in your code: SMPI detected 32 unfreed MPI handles : display types and addresses (n max) with --cfg=smpi/list-leaks:n. -> [0.023768] [smpi_utils/WARNING] Leaked handle of type MPI_Group -> [0.023768] [smpi_utils/WARNING] Leaked handle of type MPI_Comm -> [0.023768] [smpi_utils/WARNING] Leaked handle of type MPI_Group -> [0.023768] [smpi_utils/WARNING] Leaked handle of type MPI_Comm -> [0.023768] [smpi_utils/WARNING] Leaked handle of type MPI_Group -> [0.023768] [smpi_utils/WARNING] Leaked handle of type MPI_Comm -> [0.023768] [smpi_utils/WARNING] Leaked handle of type MPI_Group -> [0.023768] [smpi_utils/WARNING] Leaked handle of type MPI_Comm -> [0.023768] [smpi_utils/WARNING] Leaked handle of type MPI_Group -> [0.023768] [smpi_utils/WARNING] Leaked handle of type MPI_Comm -> [0.023768] [smpi_utils/WARNING] (more handle leaks hidden as you wanted to see only 10 of them) +> [0.023768] [smpi_utils/INFO] Leaked handle of type MPI_Group at coll-allreduce-with-leaks.c:23 +> [0.023768] [smpi_utils/INFO] Leaked handle of type MPI_Comm at coll-allreduce-with-leaks.c:23 +> [0.023768] [smpi_utils/INFO] Leaked handle of type MPI_Group at coll-allreduce-with-leaks.c:23 +> [0.023768] [smpi_utils/INFO] Leaked handle of type MPI_Comm at coll-allreduce-with-leaks.c:23 +> [0.023768] [smpi_utils/INFO] Leaked handle of type MPI_Group at coll-allreduce-with-leaks.c:23 +> [0.023768] [smpi_utils/INFO] Leaked handle of type MPI_Comm at coll-allreduce-with-leaks.c:23 +> [0.023768] [smpi_utils/INFO] Leaked handle of type MPI_Group at coll-allreduce-with-leaks.c:23 +> [0.023768] [smpi_utils/INFO] Leaked handle of type MPI_Comm at coll-allreduce-with-leaks.c:23 +> [0.023768] [smpi_utils/INFO] Leaked handle of type MPI_Group at coll-allreduce-with-leaks.c:23 +> [0.023768] [smpi_utils/INFO] Leaked handle of type MPI_Comm at coll-allreduce-with-leaks.c:23 +> [0.023768] [smpi_utils/INFO] (more handle leaks hidden as you wanted to see only 10 of them) > [0.023768] [smpi_utils/INFO] Memory Usage: Simulated application allocated 2048 bytes during its lifetime through malloc/calloc calls. diff --git a/teshsuite/smpi/coll-allreduce-with-leaks/mc-coll-allreduce-with-leaks.tesh b/teshsuite/smpi/coll-allreduce-with-leaks/mc-coll-allreduce-with-leaks.tesh index 3a93258773..51f0f52048 100644 --- a/teshsuite/smpi/coll-allreduce-with-leaks/mc-coll-allreduce-with-leaks.tesh +++ b/teshsuite/smpi/coll-allreduce-with-leaks/mc-coll-allreduce-with-leaks.tesh @@ -9,14 +9,15 @@ $ $VALGRIND_NO_LEAK_CHECK ${bindir:=.}/../../../smpi_script/bin/smpirun -wrapper > [0.000000] [mc_safety/INFO] Check a safety property. Reduction is: dpor. > [0.000000] [smpi_utils/INFO] Probable memory leaks in your code: SMPI detected 8 unfreed MPI handles : display types and addresses (n max) with --cfg=smpi/list-leaks:n. > Running smpirun with -wrapper "valgrind --leak-check=full" can provide more information -> [0.000000] [smpi_utils/WARNING] Leaked handle of type MPI_Comm -> [0.000000] [smpi_utils/WARNING] Leaked handle of type MPI_Group -> [0.000000] [smpi_utils/WARNING] Leaked handle of type MPI_Comm -> [0.000000] [smpi_utils/WARNING] Leaked handle of type MPI_Group -> [0.000000] [smpi_utils/WARNING] Leaked handle of type MPI_Comm -> [0.000000] [smpi_utils/WARNING] Leaked handle of type MPI_Group -> [0.000000] [smpi_utils/WARNING] Leaked handle of type MPI_Comm -> [0.000000] [smpi_utils/WARNING] Leaked handle of type MPI_Group +> [0.000000] [smpi_utils/INFO] To get more information (location of allocations), compile your code with -trace-call-location flag of smpicc/f90 +> [0.000000] [smpi_utils/INFO] Leaked handle of type MPI_Comm +> [0.000000] [smpi_utils/INFO] Leaked handle of type MPI_Group +> [0.000000] [smpi_utils/INFO] Leaked handle of type MPI_Comm +> [0.000000] [smpi_utils/INFO] Leaked handle of type MPI_Group +> [0.000000] [smpi_utils/INFO] Leaked handle of type MPI_Comm +> [0.000000] [smpi_utils/INFO] Leaked handle of type MPI_Group +> [0.000000] [smpi_utils/INFO] Leaked handle of type MPI_Comm +> [0.000000] [smpi_utils/INFO] Leaked handle of type MPI_Group > [0.000000] [smpi_utils/INFO] Memory Usage: Simulated application allocated 128 bytes during its lifetime through malloc/calloc calls. > Largest allocation at once from a single process was 16 bytes, at sysdep.h:59. It was called 8 times during the whole simulation. > If this is too much, consider sharing allocations for computation buffers. @@ -24,14 +25,15 @@ $ $VALGRIND_NO_LEAK_CHECK ${bindir:=.}/../../../smpi_script/bin/smpirun -wrapper > > [0.000000] [smpi_utils/INFO] Probable memory leaks in your code: SMPI detected 8 unfreed MPI handles : display types and addresses (n max) with --cfg=smpi/list-leaks:n. > Running smpirun with -wrapper "valgrind --leak-check=full" can provide more information -> [0.000000] [smpi_utils/WARNING] Leaked handle of type MPI_Comm -> [0.000000] [smpi_utils/WARNING] Leaked handle of type MPI_Group -> [0.000000] [smpi_utils/WARNING] Leaked handle of type MPI_Comm -> [0.000000] [smpi_utils/WARNING] Leaked handle of type MPI_Group -> [0.000000] [smpi_utils/WARNING] Leaked handle of type MPI_Comm -> [0.000000] [smpi_utils/WARNING] Leaked handle of type MPI_Group -> [0.000000] [smpi_utils/WARNING] Leaked handle of type MPI_Comm -> [0.000000] [smpi_utils/WARNING] Leaked handle of type MPI_Group +> [0.000000] [smpi_utils/INFO] To get more information (location of allocations), compile your code with -trace-call-location flag of smpicc/f90 +> [0.000000] [smpi_utils/INFO] Leaked handle of type MPI_Comm +> [0.000000] [smpi_utils/INFO] Leaked handle of type MPI_Group +> [0.000000] [smpi_utils/INFO] Leaked handle of type MPI_Comm +> [0.000000] [smpi_utils/INFO] Leaked handle of type MPI_Group +> [0.000000] [smpi_utils/INFO] Leaked handle of type MPI_Comm +> [0.000000] [smpi_utils/INFO] Leaked handle of type MPI_Group +> [0.000000] [smpi_utils/INFO] Leaked handle of type MPI_Comm +> [0.000000] [smpi_utils/INFO] Leaked handle of type MPI_Group > [0.000000] [smpi_utils/INFO] Memory Usage: Simulated application allocated 128 bytes during its lifetime through malloc/calloc calls. > Largest allocation at once from a single process was 16 bytes, at sysdep.h:59. It was called 8 times during the whole simulation. > If this is too much, consider sharing allocations for computation buffers. -- 2.20.1