-/* Copyright (c) 2007-2021. The SimGrid Team. All rights reserved. */
+/* Copyright (c) 2007-2023. The SimGrid Team. All rights reserved. */
/* This program is free software; you can redistribute it and/or modify it
* under the terms of the license (GNU LGPL) which comes with this package. */
CHECK_REQUEST(7)\
*request = MPI_REQUEST_NULL;\
CHECK_SEND_INPUTS
-
+
#define CHECK_IRECV_INPUTS\
SET_BUF1(buf)\
CHECK_REQUEST(7)\
MPI_Request req = *request;
aid_t my_proc_id = (req->comm() != MPI_COMM_NULL) ? simgrid::s4u::this_actor::get_pid() : -1;
TRACE_smpi_comm_in(my_proc_id, __func__,
- new simgrid::instr::Pt2PtTIData("Start", req->dst(),
- req->size(),
- req->tag(),
+ new simgrid::instr::Pt2PtTIData("Start", MPI_COMM_WORLD->group()->rank(req->dst()), req->size(), req->tag(),
simgrid::smpi::Datatype::encode(req->type())));
if (not TRACE_smpi_view_internals() && req->flags() & MPI_REQ_SEND)
TRACE_smpi_send(my_proc_id, my_proc_id, getPid(req->comm(), req->dst()), req->tag(), req->size());
const SmpiBenchGuard suspend_bench;
aid_t my_proc_id = simgrid::s4u::this_actor::get_pid();
TRACE_smpi_comm_in(my_proc_id, __func__,
- new simgrid::instr::Pt2PtTIData("irecv", src,
+ new simgrid::instr::Pt2PtTIData("irecv", MPI_COMM_WORLD->group()->rank(getPid(comm, src)),
count,
tag, simgrid::smpi::Datatype::encode(datatype)));
*request = simgrid::smpi::Request::irecv(buf, count, datatype, src, tag, comm);
aid_t my_proc_id = simgrid::s4u::this_actor::get_pid();
aid_t trace_dst = getPid(comm, dst);
TRACE_smpi_comm_in(my_proc_id, __func__,
- new simgrid::instr::Pt2PtTIData("isend", dst,
+ new simgrid::instr::Pt2PtTIData("isend", MPI_COMM_WORLD->group()->rank(trace_dst),
count,
tag, simgrid::smpi::Datatype::encode(datatype)));
TRACE_smpi_send(my_proc_id, my_proc_id, trace_dst, tag, count * datatype->size());
aid_t my_proc_id = simgrid::s4u::this_actor::get_pid();
aid_t trace_dst = getPid(comm, dst);
TRACE_smpi_comm_in(my_proc_id, __func__,
- new simgrid::instr::Pt2PtTIData("ISsend", dst,
+ new simgrid::instr::Pt2PtTIData("ISsend", MPI_COMM_WORLD->group()->rank(trace_dst),
count,
tag, simgrid::smpi::Datatype::encode(datatype)));
TRACE_smpi_send(my_proc_id, my_proc_id, trace_dst, tag, count * datatype->size());
} else {
aid_t my_proc_id = simgrid::s4u::this_actor::get_pid();
TRACE_smpi_comm_in(my_proc_id, __func__,
- new simgrid::instr::Pt2PtTIData("recv", src,
+ new simgrid::instr::Pt2PtTIData("recv", MPI_COMM_WORLD->group()->rank(getPid(comm, src)),
count,
tag, simgrid::smpi::Datatype::encode(datatype)));
aid_t src_traced = (status != MPI_STATUS_IGNORE) ? getPid(comm, status->MPI_SOURCE) : getPid(comm, src);
TRACE_smpi_recv(src_traced, my_proc_id, tag);
}
-
+
TRACE_smpi_comm_out(my_proc_id);
}
aid_t my_proc_id = simgrid::s4u::this_actor::get_pid();
aid_t dst_traced = getPid(comm, dst);
TRACE_smpi_comm_in(my_proc_id, __func__,
- new simgrid::instr::Pt2PtTIData("send", dst,
+ new simgrid::instr::Pt2PtTIData("send", MPI_COMM_WORLD->group()->rank(dst_traced),
count,
tag, simgrid::smpi::Datatype::encode(datatype)));
if (not TRACE_smpi_view_internals()) {
int bsend_buf_size = 0;
void* bsend_buf = nullptr;
smpi_process()->bsend_buffer(&bsend_buf, &bsend_buf_size);
- int size = datatype->get_extent() * count;
- if (bsend_buf == nullptr || bsend_buf_size < size + MPI_BSEND_OVERHEAD)
+ if (bsend_buf == nullptr || bsend_buf_size < datatype->get_extent() * count + MPI_BSEND_OVERHEAD)
return MPI_ERR_BUFFER;
TRACE_smpi_comm_in(my_proc_id, __func__,
- new simgrid::instr::Pt2PtTIData("bsend", dst,
+ new simgrid::instr::Pt2PtTIData("bsend", MPI_COMM_WORLD->group()->rank(dst_traced),
count,
tag, simgrid::smpi::Datatype::encode(datatype)));
if (not TRACE_smpi_view_internals()) {
int bsend_buf_size = 0;
void* bsend_buf = nullptr;
smpi_process()->bsend_buffer(&bsend_buf, &bsend_buf_size);
- int size = datatype->get_extent() * count;
- if (bsend_buf == nullptr || bsend_buf_size < size + MPI_BSEND_OVERHEAD)
+ if (bsend_buf == nullptr || bsend_buf_size < datatype->get_extent() * count + MPI_BSEND_OVERHEAD)
return MPI_ERR_BUFFER;
TRACE_smpi_comm_in(my_proc_id, __func__,
- new simgrid::instr::Pt2PtTIData("ibsend", dst,
+ new simgrid::instr::Pt2PtTIData("ibsend", MPI_COMM_WORLD->group()->rank(trace_dst),
count,
tag, simgrid::smpi::Datatype::encode(datatype)));
TRACE_smpi_send(my_proc_id, my_proc_id, trace_dst, tag, count * datatype->size());
aid_t my_proc_id = simgrid::s4u::this_actor::get_pid();
aid_t dst_traced = getPid(comm, dst);
TRACE_smpi_comm_in(my_proc_id, __func__,
- new simgrid::instr::Pt2PtTIData("Ssend", dst,
+ new simgrid::instr::Pt2PtTIData("Ssend", MPI_COMM_WORLD->group()->rank(dst_traced),
count,
tag, simgrid::smpi::Datatype::encode(datatype)));
TRACE_smpi_send(my_proc_id, my_proc_id, dst_traced, tag, count * datatype->size());
CHECK_TYPE(8, recvtype)
CHECK_BUFFER(1, sendbuf, sendcount, sendtype)
CHECK_BUFFER(6, recvbuf, recvcount, recvtype)
+ CHECK_ARGS(sendbuf == recvbuf && sendcount > 0 && recvcount > 0, MPI_ERR_BUFFER,
+ "%s: Invalid parameters 1 and 6: sendbuf and recvbuf must be disjoint", __func__);
CHECK_TAG(10, recvtag)
CHECK_COMM(11)
const SmpiBenchGuard suspend_bench;
retval = MPI_ERR_RANK;
} else {
aid_t my_proc_id = simgrid::s4u::this_actor::get_pid();
- aid_t dst_traced = getPid(comm, dst);
- aid_t src_traced = getPid(comm, src);
+ aid_t dst_traced = MPI_COMM_WORLD->group()->rank(getPid(comm, dst));
+ aid_t src_traced = MPI_COMM_WORLD->group()->rank(getPid(comm, src));
// FIXME: Hack the way to trace this one
auto dst_hack = std::make_shared<std::vector<int>>();
return retval;
}
+int PMPI_Isendrecv(const void* sendbuf, int sendcount, MPI_Datatype sendtype, int dst, int sendtag, void* recvbuf,
+ int recvcount, MPI_Datatype recvtype, int src, int recvtag, MPI_Comm comm, MPI_Request* request)
+{
+ int retval = 0;
+ SET_BUF1(sendbuf)
+ SET_BUF2(recvbuf)
+ CHECK_COUNT(2, sendcount)
+ CHECK_TYPE(3, sendtype)
+ CHECK_TAG(5, sendtag)
+ CHECK_COUNT(7, recvcount)
+ CHECK_TYPE(8, recvtype)
+ CHECK_BUFFER(1, sendbuf, sendcount, sendtype)
+ CHECK_BUFFER(6, recvbuf, recvcount, recvtype)
+ CHECK_ARGS(sendbuf == recvbuf && sendcount > 0 && recvcount > 0, MPI_ERR_BUFFER,
+ "%s: Invalid parameters 1 and 6: sendbuf and recvbuf must be disjoint", __func__);
+ CHECK_TAG(10, recvtag)
+ CHECK_COMM(11)
+ CHECK_REQUEST(12)
+ *request = MPI_REQUEST_NULL;
+ const SmpiBenchGuard suspend_bench;
+
+ if (src == MPI_PROC_NULL && dst != MPI_PROC_NULL){
+ *request=simgrid::smpi::Request::isend(sendbuf, sendcount, sendtype, dst, sendtag, comm);
+ retval = MPI_SUCCESS;
+ } else if (dst == MPI_PROC_NULL){
+ *request = simgrid::smpi::Request::irecv(recvbuf, recvcount, recvtype, src, recvtag, comm);
+ retval = MPI_SUCCESS;
+ } else if (dst >= comm->group()->size() || dst <0 ||
+ (src!=MPI_ANY_SOURCE && (src >= comm->group()->size() || src <0))){
+ retval = MPI_ERR_RANK;
+ } else {
+ aid_t my_proc_id = simgrid::s4u::this_actor::get_pid();
+ aid_t dst_traced = MPI_COMM_WORLD->group()->rank(getPid(comm, dst));
+ aid_t src_traced = MPI_COMM_WORLD->group()->rank(getPid(comm, src));
+
+ // FIXME: Hack the way to trace this one
+ auto dst_hack = std::make_shared<std::vector<int>>();
+ auto src_hack = std::make_shared<std::vector<int>>();
+ dst_hack->push_back(dst_traced);
+ src_hack->push_back(src_traced);
+ TRACE_smpi_comm_in(my_proc_id, __func__,
+ new simgrid::instr::VarCollTIData(
+ "isendRecv", -1, sendcount,
+ dst_hack, recvcount, src_hack,
+ simgrid::smpi::Datatype::encode(sendtype), simgrid::smpi::Datatype::encode(recvtype)));
+
+ TRACE_smpi_send(my_proc_id, my_proc_id, dst_traced, sendtag, sendcount * sendtype->size());
+
+ simgrid::smpi::Request::isendrecv(sendbuf, sendcount, sendtype, dst, sendtag, recvbuf, recvcount, recvtype, src,
+ recvtag, comm, request);
+ retval = MPI_SUCCESS;
+
+ TRACE_smpi_recv(src_traced, my_proc_id, recvtag);
+ TRACE_smpi_comm_out(my_proc_id);
+ }
+
+ return retval;
+}
+
+
int PMPI_Sendrecv_replace(void* buf, int count, MPI_Datatype datatype, int dst, int sendtag, int src, int recvtag,
MPI_Comm comm, MPI_Status* status)
{
CHECK_BUFFER(1, buf, count, datatype)
int size = datatype->get_extent() * count;
- xbt_assert(size > 0);
+ if (size == 0)
+ return MPI_SUCCESS;
+ else if (size <0)
+ return MPI_ERR_ARG;
std::vector<char> recvbuf(size);
retval =
MPI_Sendrecv(buf, count, datatype, dst, sendtag, recvbuf.data(), count, datatype, src, recvtag, comm, status);
return retval;
}
+int PMPI_Isendrecv_replace(void* buf, int count, MPI_Datatype datatype, int dst, int sendtag, int src, int recvtag,
+ MPI_Comm comm, MPI_Request* request)
+{
+ int retval = 0;
+ SET_BUF1(buf)
+ CHECK_COUNT(2, count)
+ CHECK_TYPE(3, datatype)
+ CHECK_BUFFER(1, buf, count, datatype)
+ CHECK_REQUEST(10)
+ *request = MPI_REQUEST_NULL;
+
+ int size = datatype->get_extent() * count;
+ if (size == 0)
+ return MPI_SUCCESS;
+ else if (size <0)
+ return MPI_ERR_ARG;
+ std::vector<char> sendbuf(size);
+ simgrid::smpi::Datatype::copy(buf, count, datatype, sendbuf.data(), count, datatype);
+ retval =
+ MPI_Isendrecv(sendbuf.data(), count, datatype, dst, sendtag, buf, count, datatype, src, recvtag, comm, request);
+ return retval;
+}
+
int PMPI_Test(MPI_Request * request, int *flag, MPI_Status * status)
{
int retval = 0;
} else {
aid_t my_proc_id = ((*request)->comm() != MPI_COMM_NULL) ? simgrid::s4u::this_actor::get_pid() : -1;
- TRACE_smpi_comm_in(my_proc_id, __func__, new simgrid::instr::NoOpTIData("test"));
+ TRACE_smpi_comm_in(my_proc_id, __func__,
+ new simgrid::instr::WaitTIData("test", MPI_COMM_WORLD->group()->rank((*request)->src()),
+ MPI_COMM_WORLD->group()->rank((*request)->dst()),
+ (*request)->tag()));
retval = simgrid::smpi::Request::test(request,status, flag);
TRACE_smpi_comm_out(my_proc_id);
simgrid::smpi::Status::empty(status);
- CHECK_NULL(1, MPI_ERR_ARG, request)
+ CHECK_NULL(1, MPI_ERR_ARG, request)
if (*request == MPI_REQUEST_NULL) {
retval = MPI_SUCCESS;
} else {
aid_t my_proc_id = (*request)->comm() != MPI_COMM_NULL ? simgrid::s4u::this_actor::get_pid() : -1;
TRACE_smpi_comm_in(my_proc_id, __func__,
- new simgrid::instr::WaitTIData(MPI_COMM_WORLD->group()->rank((*request)->src()),
+ new simgrid::instr::WaitTIData("wait", MPI_COMM_WORLD->group()->rank((*request)->src()),
MPI_COMM_WORLD->group()->rank((*request)->dst()),
(*request)->tag()));
return MPI_ERR_ARG;
}
*flag=simgrid::smpi::Status::cancelled(status);
- return MPI_SUCCESS;
+ return MPI_SUCCESS;
}
int PMPI_Status_set_cancelled(MPI_Status* status, int flag){
return MPI_ERR_ARG;
}
simgrid::smpi::Status::set_cancelled(status,flag);
- return MPI_SUCCESS;
+ return MPI_SUCCESS;
}
int PMPI_Status_set_elements(MPI_Status* status, MPI_Datatype datatype, int count){