teshsuite/smpi/mpich3-test/f77/init/CMakeLists.txt
teshsuite/smpi/mpich3-test/f77/pt2pt/CMakeLists.txt
teshsuite/smpi/mpich3-test/f77/util/CMakeLists.txt
+ teshsuite/smpi/mpich3-test/f77/topo/CMakeLists.txt
teshsuite/smpi/mpich3-test/f90/coll/CMakeLists.txt
teshsuite/smpi/mpich3-test/f90/datatype/CMakeLists.txt
teshsuite/smpi/mpich3-test/f90/init/CMakeLists.txt
add_subdirectory(${CMAKE_HOME_DIRECTORY}/teshsuite/smpi/mpich3-test/f77/ext)
add_subdirectory(${CMAKE_HOME_DIRECTORY}/teshsuite/smpi/mpich3-test/f77/init)
add_subdirectory(${CMAKE_HOME_DIRECTORY}/teshsuite/smpi/mpich3-test/f77/pt2pt)
+add_subdirectory(${CMAKE_HOME_DIRECTORY}/teshsuite/smpi/mpich3-test/f77/topo)
add_subdirectory(${CMAKE_HOME_DIRECTORY}/teshsuite/smpi/mpich3-test/f90/util)
add_subdirectory(${CMAKE_HOME_DIRECTORY}/teshsuite/smpi/mpich3-test/f90/coll)
add_subdirectory(${CMAKE_HOME_DIRECTORY}/teshsuite/smpi/mpich3-test/f90/datatype)
init
#comm
ext
-#topo
+topo
--- /dev/null
+cmake_minimum_required(VERSION 2.6)
+
+if(enable_smpi AND enable_smpi_MPICH3_testsuite AND SMPI_FORTRAN)
+ if(WIN32)
+ set(CMAKE_C_FLAGS "-include ${CMAKE_HOME_DIRECTORY}/include/smpi/smpi_main.h")
+ else()
+ set(CMAKE_C_COMPILER "${CMAKE_BINARY_DIR}/smpi_script/bin/smpicc")
+ set(CMAKE_Fortran_COMPILER "${CMAKE_BINARY_DIR}/smpi_script/bin/smpiff")
+ endif()
+
+ set(EXECUTABLE_OUTPUT_PATH "${CMAKE_CURRENT_BINARY_DIR}")
+ include_directories("${CMAKE_HOME_DIRECTORY}/include/smpi")
+ include_directories("${CMAKE_CURRENT_SOURCE_DIR}/../include/")
+
+ add_executable(cartcrf cartcrf.f)
+# add_executable(dgraph_unwgtf dgraph_unwgtf.f)
+# add_executable(dgraph_wgtf dgraph_unwgtf.f)
+ target_link_libraries(cartcrf simgrid mtest_f77)
+# target_link_libraries(dgraph_wgtf simgrid mtest_f77)
+# target_link_libraries(dgraph_unwgtf simgrid mtest_f77)
+endif()
+
+set(tesh_files
+ ${tesh_files}
+ PARENT_SCOPE
+ )
+set(xml_files
+ ${xml_files}
+ PARENT_SCOPE
+ )
+set(examples_src
+ ${examples_src}
+ ${CMAKE_CURRENT_SOURCE_DIR}/dgraph_wgtf.f
+ ${CMAKE_CURRENT_SOURCE_DIR}/dgraph_unwgtf.f
+ ${CMAKE_CURRENT_SOURCE_DIR}/cartcrf.f
+ PARENT_SCOPE
+ )
+set(bin_files
+ ${bin_files}
+ PARENT_SCOPE
+ )
+set(txt_files
+ ${txt_files}
+ ${CMAKE_CURRENT_SOURCE_DIR}/testlist
+ PARENT_SCOPE
+ )
--- /dev/null
+C -*- Mode: Fortran; -*-
+C
+C (C) 2004 by Argonne National Laboratory.
+C See COPYRIGHT in top-level directory.
+C
+C Test various combinations of periodic and non-periodic Cartesian
+C communicators
+C
+ program main
+ implicit none
+ include 'mpif.h'
+ integer errs, ierr
+ integer ndims, nperiods, i, size
+ integer comm, source, dest, newcomm
+ integer maxdims
+ parameter (maxdims=7)
+ logical periods(maxdims), outperiods(maxdims)
+ integer dims(maxdims), outdims(maxdims)
+ integer outcoords(maxdims)
+
+ errs = 0
+ call mtest_init( ierr )
+
+C
+C For upto 6 dimensions, test with periodicity in 0 through all
+C dimensions. The test is computed by both:
+C get info about the created communicator
+C apply cart shift
+C Note that a dimension can have size one, so that these tests
+C can work with small numbers (even 1) of processes
+C
+ comm = MPI_COMM_WORLD
+ call mpi_comm_size( comm, size, ierr )
+ do ndims = 1, 6
+ do nperiods = 0, ndims
+ do i=1,ndims
+ periods(i) = .false.
+ dims(i) = 0
+ enddo
+ do i=1,nperiods
+ periods(i) = .true.
+ enddo
+
+ call mpi_dims_create( size, ndims, dims, ierr )
+ call mpi_cart_create( comm, ndims, dims, periods, .false.,
+ $ newcomm, ierr )
+
+ if (newcomm .ne. MPI_COMM_NULL) then
+ call mpi_cart_get( newcomm, maxdims, outdims, outperiods,
+ $ outcoords, ierr )
+C print *, 'Coords = '
+ do i=1, ndims
+C print *, i, '(', outcoords(i), ')'
+ if (periods(i) .neqv. outperiods(i)) then
+ errs = errs + 1
+ print *, ' Wrong value for periods ', i
+ print *, ' ndims = ', ndims
+ endif
+ enddo
+
+ do i=1, ndims
+ call mpi_cart_shift( newcomm, i-1, 1, source, dest,
+ $ ierr )
+ if (outcoords(i) .eq. outdims(i)-1) then
+ if (periods(i)) then
+ if (dest .eq. MPI_PROC_NULL) then
+ errs = errs + 1
+ print *, 'Expected rank, got proc_null'
+ endif
+ else
+ if (dest .ne. MPI_PROC_NULL) then
+ errs = errs + 1
+ print *, 'Expected procnull, got ', dest
+ endif
+ endif
+ endif
+
+ call mpi_cart_shift( newcomm, i-1, -1, source, dest,
+ $ ierr )
+ if (outcoords(i) .eq. 0) then
+ if (periods(i)) then
+ if (dest .eq. MPI_PROC_NULL) then
+ errs = errs + 1
+ print *, 'Expected rank, got proc_null'
+ endif
+ else
+ if (dest .ne. MPI_PROC_NULL) then
+ errs = errs + 1
+ print *, 'Expected procnull, got ', dest
+ endif
+ endif
+ endif
+ enddo
+ call mpi_comm_free( newcomm, ierr )
+ endif
+
+ enddo
+ enddo
+
+ call mtest_finalize( errs )
+ call mpi_finalize( ierr )
+ end
--- /dev/null
+C -*- Mode: Fortran; -*-
+C
+C (C) 2011 by Argonne National Laboratory.
+C See COPYRIGHT in top-level directory.
+C
+C This program is Fortran version of dgraph_unwgt.c
+C Specify a distributed graph of a bidirectional ring of the MPI_COMM_WORLD,
+C i.e. everyone only talks to left and right neighbors.
+
+ logical function validate_dgraph(dgraph_comm)
+ implicit none
+ include 'mpif.h'
+
+ integer dgraph_comm
+ integer comm_topo
+ integer src_sz, dest_sz
+ integer ierr;
+ logical wgt_flag;
+ integer srcs(2), dests(2)
+
+ integer world_rank, world_size;
+ integer idx, nbr_sep
+
+ comm_topo = MPI_UNDEFINED
+ call MPI_Topo_test(dgraph_comm, comm_topo, ierr);
+ if (comm_topo .ne. MPI_DIST_GRAPH) then
+ validate_dgraph = .false.
+ write(6,*) "dgraph_comm is NOT of type MPI_DIST_GRAPH."
+ return
+ endif
+
+ call MPI_Dist_graph_neighbors_count(dgraph_comm,
+ & src_sz, dest_sz, wgt_flag,
+ & ierr)
+ if (ierr .ne. MPI_SUCCESS) then
+ validate_dgraph = .false.
+ write(6,*) "MPI_Dist_graph_neighbors_count() fails!"
+ return
+ endif
+ if (wgt_flag) then
+ validate_dgraph = .false.
+ write(6,*) "dgraph_comm is NOT created with MPI_UNWEIGHTED."
+ return
+ endif
+
+ if (src_sz .ne. 2 .or. dest_sz .ne. 2) then
+ validate_dgraph = .false.
+ write(6,*) "source or destination edge array is not size 2."
+ write(6,"('src_sz = ',I3,', dest_sz = ',I3)") src_sz, dest_sz
+ return
+ endif
+
+ call MPI_Dist_graph_neighbors(dgraph_comm,
+ & src_sz, srcs, MPI_UNWEIGHTED,
+ & dest_sz, dests, MPI_UNWEIGHTED,
+ & ierr)
+ if (ierr .ne. MPI_SUCCESS) then
+ validate_dgraph = .false.
+ write(6,*) "MPI_Dist_graph_neighbors() fails!"
+ return
+ endif
+
+C Check if the neighbors returned from MPI are really
+C the nearest neighbors that within a ring.
+ call MPI_Comm_rank(MPI_COMM_WORLD, world_rank, ierr)
+ call MPI_Comm_size(MPI_COMM_WORLD, world_size, ierr)
+
+ do idx = 1, src_sz
+ nbr_sep = iabs(srcs(idx) - world_rank)
+ if (nbr_sep .ne. 1 .and. nbr_sep .ne. (world_size-1)) then
+ validate_dgraph = .false.
+ write(6,"('srcs[',I3,']=',I3,
+ & ' is NOT a neighbor of my rank',I3)")
+ & idx, srcs(idx), world_rank
+ return
+ endif
+ enddo
+ do idx = 1, dest_sz
+ nbr_sep = iabs(dests(idx) - world_rank)
+ if (nbr_sep .ne. 1 .and. nbr_sep .ne. (world_size-1)) then
+ validate_dgraph = .false.
+ write(6,"('dests[',I3,']=',I3,
+ & ' is NOT a neighbor of my rank',I3)")
+ & idx, dests(idx), world_rank
+ return
+ endif
+ enddo
+
+ validate_dgraph = .true.
+ return
+ end
+
+ integer function ring_rank(world_size, in_rank)
+ implicit none
+ integer world_size, in_rank
+ if (in_rank .ge. 0 .and. in_rank .lt. world_size) then
+ ring_rank = in_rank
+ return
+ endif
+ if (in_rank .lt. 0 ) then
+ ring_rank = in_rank + world_size
+ return
+ endif
+ if (in_rank .ge. world_size) then
+ ring_rank = in_rank - world_size
+ return
+ endif
+ ring_rank = -99999
+ return
+ end
+
+
+
+ program dgraph_unwgt
+ implicit none
+ include 'mpif.h'
+
+ integer ring_rank
+ external ring_rank
+ logical validate_dgraph
+ external validate_dgraph
+ integer errs, ierr
+
+ integer dgraph_comm
+ integer world_size, world_rank
+ integer src_sz, dest_sz
+ integer degs(1)
+ integer srcs(2), dests(2)
+
+ errs = 0
+ call MTEST_Init(ierr)
+ call MPI_Comm_rank(MPI_COMM_WORLD, world_rank, ierr)
+ call MPI_Comm_size(MPI_COMM_WORLD, world_size, ierr)
+
+ srcs(1) = world_rank
+ degs(1) = 2;
+ dests(1) = ring_rank(world_size, world_rank-1)
+ dests(2) = ring_rank(world_size, world_rank+1)
+ call MPI_Dist_graph_create(MPI_COMM_WORLD, 1, srcs, degs, dests,
+ & MPI_UNWEIGHTED, MPI_INFO_NULL,
+ & .true., dgraph_comm, ierr)
+ if (ierr .ne. MPI_SUCCESS) then
+ write(6,*) "MPI_Dist_graph_create() fails!"
+ call MPI_Abort(MPI_COMM_WORLD, 1, ierr)
+ stop
+ endif
+ if (.not. validate_dgraph(dgraph_comm)) then
+ write(6,*) "MPI_Dist_graph_create() does not create"
+ & //"a bidirectional ring graph!"
+ call MPI_Abort(MPI_COMM_WORLD, 1, ierr)
+ stop
+ endif
+ call MPI_Comm_free(dgraph_comm, ierr)
+
+C now create one with MPI_WEIGHTS_EMPTY
+C NOTE that MPI_WEIGHTS_EMPTY was added in MPI-3 and does not
+C appear before then. Incluing this test means that this test cannot
+C be compiled if the MPI version is less than 3 (see the testlist file)
+
+ degs(1) = 0;
+ call MPI_Dist_graph_create(MPI_COMM_WORLD, 1, srcs, degs, dests,
+ & MPI_WEIGHTS_EMPTY, MPI_INFO_NULL,
+ & .true., dgraph_comm, ierr)
+ if (ierr .ne. MPI_SUCCESS) then
+ write(6,*) "MPI_Dist_graph_create() fails!"
+ call MPI_Abort(MPI_COMM_WORLD, 1, ierr)
+ stop
+ endif
+ call MPI_Comm_free(dgraph_comm, ierr)
+
+ src_sz = 2
+ srcs(1) = ring_rank(world_size, world_rank-1)
+ srcs(2) = ring_rank(world_size, world_rank+1)
+ dest_sz = 2
+ dests(1) = ring_rank(world_size, world_rank-1)
+ dests(2) = ring_rank(world_size, world_rank+1)
+ call MPI_Dist_graph_create_adjacent(MPI_COMM_WORLD,
+ & src_sz, srcs,
+ & MPI_UNWEIGHTED,
+ & dest_sz, dests,
+ & MPI_UNWEIGHTED,
+ & MPI_INFO_NULL, .true.,
+ & dgraph_comm, ierr)
+ if (ierr .ne. MPI_SUCCESS) then
+ write(6,*) "MPI_Dist_graph_create_adjacent() fails!"
+ call MPI_Abort(MPI_COMM_WORLD, 1, ierr)
+ stop
+ endif
+ if (.not. validate_dgraph(dgraph_comm)) then
+ write(6,*) "MPI_Dist_graph_create_adjacent() does not create"
+ & //"a bidirectional ring graph!"
+ call MPI_Abort(MPI_COMM_WORLD, 1, ierr)
+ stop
+ endif
+ call MPI_Comm_free(dgraph_comm, ierr)
+
+C now create one with MPI_WEIGHTS_EMPTY
+ src_sz = 0
+ dest_sz = 0
+ call MPI_Dist_graph_create_adjacent(MPI_COMM_WORLD,
+ & src_sz, srcs,
+ & MPI_WEIGHTS_EMPTY,
+ & dest_sz, dests,
+ & MPI_WEIGHTS_EMPTY,
+ & MPI_INFO_NULL, .true.,
+ & dgraph_comm, ierr)
+ if (ierr .ne. MPI_SUCCESS) then
+ write(6,*) "MPI_Dist_graph_create_adjacent() fails!"
+ call MPI_Abort(MPI_COMM_WORLD, 1, ierr)
+ stop
+ endif
+ call MPI_Comm_free(dgraph_comm, ierr)
+
+ call MTEST_Finalize(errs)
+ call MPI_Finalize(ierr)
+ end
--- /dev/null
+C -*- Mode: Fortran; -*-
+C
+C (C) 2011 by Argonne National Laboratory.
+C See COPYRIGHT in top-level directory.
+C
+C This program is Fortran version of dgraph_unwgt.c
+C Specify a distributed graph of a bidirectional ring of the MPI_COMM_WORLD,
+C i.e. everyone only talks to left and right neighbors.
+
+ logical function validate_dgraph(dgraph_comm)
+ implicit none
+ include 'mpif.h'
+
+ integer dgraph_comm
+ integer comm_topo
+ integer src_sz, dest_sz
+ integer ierr;
+ logical wgt_flag;
+ integer srcs(2), dests(2)
+ integer src_wgts(2), dest_wgts(2)
+
+ integer world_rank, world_size;
+ integer idx, nbr_sep
+
+ comm_topo = MPI_UNDEFINED
+ call MPI_Topo_test(dgraph_comm, comm_topo, ierr);
+ if (comm_topo .ne. MPI_DIST_GRAPH) then
+ validate_dgraph = .false.
+ write(6,*) "dgraph_comm is NOT of type MPI_DIST_GRAPH."
+ return
+ endif
+
+ call MPI_Dist_graph_neighbors_count(dgraph_comm,
+ & src_sz, dest_sz, wgt_flag,
+ & ierr)
+ if (ierr .ne. MPI_SUCCESS) then
+ validate_dgraph = .false.
+ write(6,*) "MPI_Dist_graph_neighbors_count() fails!"
+ return
+ endif
+ if (.not. wgt_flag) then
+ validate_dgraph = .false.
+ write(6,*) "dgraph_comm is created with MPI_UNWEIGHTED!"
+ return
+ endif
+
+ if (src_sz .ne. 2 .or. dest_sz .ne. 2) then
+ validate_dgraph = .false.
+ write(6,*) "source or destination edge array is not size 2."
+ write(6,"('src_sz = ',I3,', dest_sz = ',I3)") src_sz, dest_sz
+ return
+ endif
+
+ call MPI_Dist_graph_neighbors(dgraph_comm,
+ & src_sz, srcs, src_wgts,
+ & dest_sz, dests, dest_wgts,
+ & ierr)
+ if (ierr .ne. MPI_SUCCESS) then
+ validate_dgraph = .false.
+ write(6,*) "MPI_Dist_graph_neighbors() fails!"
+ return
+ endif
+
+C Check if the neighbors returned from MPI are really
+C the nearest neighbors that within a ring.
+ call MPI_Comm_rank(MPI_COMM_WORLD, world_rank, ierr)
+ call MPI_Comm_size(MPI_COMM_WORLD, world_size, ierr)
+
+ do idx = 1, src_sz
+ nbr_sep = iabs(srcs(idx) - world_rank)
+ if (nbr_sep .ne. 1 .and. nbr_sep .ne. (world_size-1)) then
+ validate_dgraph = .false.
+ write(6,"('srcs[',I3,']=',I3,
+ & ' is NOT a neighbor of my rank',I3)")
+ & idx, srcs(idx), world_rank
+ return
+ endif
+ enddo
+ if (src_wgts(1) .ne. src_wgts(2)) then
+ validate_dgraph = .false.
+ write(6,"('src_wgts = [',I3,',',I3,']')")
+ & src_wgts(1), src_wgts(2)
+ return
+ endif
+ do idx = 1, dest_sz
+ nbr_sep = iabs(dests(idx) - world_rank)
+ if (nbr_sep .ne. 1 .and. nbr_sep .ne. (world_size-1)) then
+ validate_dgraph = .false.
+ write(6,"('dests[',I3,']=',I3,
+ & ' is NOT a neighbor of my rank',I3)")
+ & idx, dests(idx), world_rank
+ return
+ endif
+ enddo
+ if (dest_wgts(1) .ne. dest_wgts(2)) then
+ validate_dgraph = .false.
+ write(6,"('dest_wgts = [',I3,',',I3,']')")
+ & dest_wgts(1), dest_wgts(2)
+ return
+ endif
+
+ validate_dgraph = .true.
+ return
+ end
+
+ integer function ring_rank(world_size, in_rank)
+ implicit none
+ integer world_size, in_rank
+ if (in_rank .ge. 0 .and. in_rank .lt. world_size) then
+ ring_rank = in_rank
+ return
+ endif
+ if (in_rank .lt. 0 ) then
+ ring_rank = in_rank + world_size
+ return
+ endif
+ if (in_rank .ge. world_size) then
+ ring_rank = in_rank - world_size
+ return
+ endif
+ ring_rank = -99999
+ return
+ end
+
+
+
+ program dgraph_unwgt
+ implicit none
+ include 'mpif.h'
+
+ integer ring_rank
+ external ring_rank
+ logical validate_dgraph
+ external validate_dgraph
+ integer errs, ierr
+
+ integer dgraph_comm
+ integer world_size, world_rank
+ integer src_sz, dest_sz
+ integer degs(1)
+ integer srcs(2), dests(2)
+ integer src_wgts(2), dest_wgts(2)
+
+ errs = 0
+ call MTEST_Init(ierr)
+ call MPI_Comm_rank(MPI_COMM_WORLD, world_rank, ierr)
+ call MPI_Comm_size(MPI_COMM_WORLD, world_size, ierr)
+
+ srcs(1) = world_rank
+ degs(1) = 2;
+ dests(1) = ring_rank(world_size, world_rank-1)
+ dests(2) = ring_rank(world_size, world_rank+1)
+ dest_wgts(1) = 1
+ dest_wgts(2) = 1
+ call MPI_Dist_graph_create(MPI_COMM_WORLD, 1, srcs, degs, dests,
+ & dest_wgts, MPI_INFO_NULL,
+ & .true., dgraph_comm, ierr)
+ if (ierr .ne. MPI_SUCCESS) then
+ write(6,*) "MPI_Dist_graph_create() fails!"
+ call MPI_Abort(MPI_COMM_WORLD, 1, ierr)
+ stop
+ endif
+ if (.not. validate_dgraph(dgraph_comm)) then
+ write(6,*) "MPI_Dist_graph_create() does not create "
+ & //"a bidirectional ring graph!"
+ call MPI_Abort(MPI_COMM_WORLD, 1, ierr)
+ stop
+ endif
+ call MPI_Comm_free(dgraph_comm, ierr)
+
+ src_sz = 2
+ srcs(1) = ring_rank(world_size, world_rank-1)
+ srcs(2) = ring_rank(world_size, world_rank+1)
+ src_wgts(1) = 1
+ src_wgts(2) = 1
+ dest_sz = 2
+ dests(1) = ring_rank(world_size, world_rank-1)
+ dests(2) = ring_rank(world_size, world_rank+1)
+ dest_wgts(1) = 1
+ dest_wgts(2) = 1
+ call MPI_Dist_graph_create_adjacent(MPI_COMM_WORLD,
+ & src_sz, srcs, src_wgts,
+ & dest_sz, dests, dest_wgts,
+ & MPI_INFO_NULL, .true.,
+ & dgraph_comm, ierr)
+ if (ierr .ne. MPI_SUCCESS) then
+ write(6,*) "MPI_Dist_graph_create_adjacent() fails!"
+ call MPI_Abort(MPI_COMM_WORLD, 1, ierr)
+ stop
+ endif
+ if (.not. validate_dgraph(dgraph_comm)) then
+ write(6,*) "MPI_Dist_graph_create_adjacent() does not create "
+ & //"a bidirectional ring graph!"
+ call MPI_Abort(MPI_COMM_WORLD, 1, ierr)
+ stop
+ endif
+ call MPI_Comm_free(dgraph_comm, ierr)
+
+ call MTEST_Finalize(errs)
+ call MPI_Finalize(ierr)
+ end
--- /dev/null
+cartcrf 4
+dgraph_wgtf 4 mpiversion=2.2
+dgraph_unwgtf 4 mpiversion=3.0