/* Non-topology-specific pipelined linear-bcast function
0->1, 1->2 ,2->3, ....., ->last node : in a pipeline fashion
*/
-int smpi_coll_tuned_reduce_NTSL(void *buf, void *rbuf, int count,
+int Coll_reduce_NTSL::reduce(void *buf, void *rbuf, int count,
MPI_Datatype datatype, MPI_Op op, int root,
MPI_Comm comm)
{
/* when count is not divisible by block size, use default BCAST for the remainder */
if ((remainder != 0) && (count > segment)) {
XBT_WARN("MPI_reduce_NTSL use default MPI_reduce.");
- smpi_mpi_reduce((char *)buf + (pipe_length * increment),
+ Coll_reduce_default::reduce((char *)buf + (pipe_length * increment),
(char *)rbuf + (pipe_length * increment), remainder, datatype, op, root,
comm);
}