1 /* -*- Mode: C; c-basic-offset:4 ; -*- */
9 int main( int argc, char **argv )
11 int size, rank, key, his_key, lrank, result;
14 MPI_Comm mySecondComm;
15 int errors = 0, sum_errors;
19 MPI_Init ( &argc, &argv );
20 MPI_Comm_rank ( MPI_COMM_WORLD, &rank);
21 MPI_Comm_size ( MPI_COMM_WORLD, &size);
23 /* Only works for 2 or more processes */
25 MPI_Comm merge1, merge2, merge3, merge4;
27 /* Generate membership key in the range [0,1] */
30 MPI_Comm_split ( MPI_COMM_WORLD, key, rank, &myComm );
31 /* This creates an intercomm that is the size of comm world
32 but has processes grouped by even and odd */
33 MPI_Intercomm_create (myComm, 0, MPI_COMM_WORLD, (key+1)%2, 1,
35 /* Dup an intercomm */
36 MPI_Comm_dup ( myFirstComm, &mySecondComm );
37 MPI_Comm_rank( mySecondComm, &lrank );
40 /* Leaders communicate with each other */
42 MPI_Sendrecv (&key, 1, MPI_INT, 0, 0,
43 &his_key, 1, MPI_INT, 0, 0, mySecondComm, &status);
44 if (key != (his_key+1)%2) {
45 printf( "Received %d but expected %d\n", his_key, (his_key+1)%2 );
51 printf("[%d] Failed!\n",rank);
53 if (verbose) printf( "About to merge intercommunicators\n" );
54 MPI_Intercomm_merge ( mySecondComm, key, &merge1 );
55 MPI_Intercomm_merge ( mySecondComm, (key+1)%2, &merge2 );
56 MPI_Intercomm_merge ( mySecondComm, 0, &merge3 );
57 MPI_Intercomm_merge ( mySecondComm, 1, &merge4 );
59 /* We should check that these are correct! An easy test is that
60 the merged comms are all MPI_SIMILAR (unless 2 processes used,
61 in which case MPI_CONGRUENT is ok */
62 MPI_Comm_compare( merge1, MPI_COMM_WORLD, &result );
63 if ((size > 2 && result != MPI_SIMILAR) ||
64 (size == 2 && result != MPI_CONGRUENT)) {
66 printf( "merge1 is not the same size as comm world\n" );
68 /* merge 2 isn't ordered the same way as the others, even for 2 processes */
69 MPI_Comm_compare( merge2, MPI_COMM_WORLD, &result );
70 if (result != MPI_SIMILAR) {
72 printf( "merge2 is not the same size as comm world\n" );
74 MPI_Comm_compare( merge3, MPI_COMM_WORLD, &result );
75 if ((size > 2 && result != MPI_SIMILAR) ||
76 (size == 2 && result != MPI_CONGRUENT)) {
78 printf( "merge3 is not the same size as comm world\n" );
80 MPI_Comm_compare( merge4, MPI_COMM_WORLD, &result );
81 if ((size > 2 && result != MPI_SIMILAR) ||
82 (size == 2 && result != MPI_CONGRUENT)) {
84 printf( "merge4 is not the same size as comm world\n" );
87 /* Free communicators */
88 if (verbose) printf( "About to free communicators\n" );
89 MPI_Comm_free( &myComm );
90 MPI_Comm_free( &myFirstComm );
91 MPI_Comm_free( &mySecondComm );
92 MPI_Comm_free( &merge1 );
93 MPI_Comm_free( &merge2 );
94 MPI_Comm_free( &merge3 );
95 MPI_Comm_free( &merge4 );
99 printf("[%d] Failed - at least 2 nodes must be used\n",rank);
102 MPI_Barrier( MPI_COMM_WORLD );
103 MPI_Allreduce( &errors, &sum_errors, 1, MPI_INT, MPI_SUM, MPI_COMM_WORLD );
104 if (sum_errors > 0) {
105 printf( "%d errors on process %d\n", errors, rank );
107 else if (rank == 0) {
108 printf( " No Errors\n" );
110 /* Finalize and end! */