Logo AND Algorithmique Numérique Distribuée

Public GIT Repository
[SMPI] Replay: Add typedefs
[simgrid.git] / src / smpi / internals / smpi_replay.cpp
index 8b5a76c..6ac0204 100644 (file)
 #include <memory>
 #include <numeric>
 #include <unordered_map>
+#include <sstream>
 #include <vector>
 
 using simgrid::s4u::Actor;
 
+#include <tuple>
+// From https://stackoverflow.com/questions/7110301/generic-hash-for-tuples-in-unordered-map-unordered-set
+// This is all just to make std::unordered_map work with std::tuple. If we need this in other places,
+// this could go into a header file.
+namespace hash_tuple{
+    template <typename TT>
+    struct hash
+    {
+        size_t
+        operator()(TT const& tt) const
+        {
+            return std::hash<TT>()(tt);
+        }
+    };
+
+    template <class T>
+    inline void hash_combine(std::size_t& seed, T const& v)
+    {
+        seed ^= hash_tuple::hash<T>()(v) + 0x9e3779b9 + (seed<<6) + (seed>>2);
+    }
+
+    // Recursive template code derived from Matthieu M.
+    template <class Tuple, size_t Index = std::tuple_size<Tuple>::value - 1>
+    struct HashValueImpl
+    {
+      static void apply(size_t& seed, Tuple const& tuple)
+      {
+        HashValueImpl<Tuple, Index-1>::apply(seed, tuple);
+        hash_combine(seed, std::get<Index>(tuple));
+      }
+    };
+
+    template <class Tuple>
+    struct HashValueImpl<Tuple,0>
+    {
+      static void apply(size_t& seed, Tuple const& tuple)
+      {
+        hash_combine(seed, std::get<0>(tuple));
+      }
+    };
+
+    template <typename ... TT>
+    struct hash<std::tuple<TT...>>
+    {
+        size_t
+        operator()(std::tuple<TT...> const& tt) const
+        {
+            size_t seed = 0;
+            HashValueImpl<std::tuple<TT...> >::apply(seed, tt);
+            return seed;
+        }
+    };
+}
+
 XBT_LOG_NEW_DEFAULT_SUBCATEGORY(smpi_replay,smpi,"Trace Replay with SMPI");
 
-static int active_processes  = 0;
 static std::unordered_map<int, std::vector<MPI_Request>*> reqq;
+typedef std::tuple</*sender*/ int, /* reciever */ int, /* tag */int> req_key_t;
+typedef std::unordered_map<req_key_t, MPI_Request, hash_tuple::hash<std::tuple<int,int,int>>> req_storage_t;
 
 static MPI_Datatype MPI_DEFAULT_TYPE;
 
 #define CHECK_ACTION_PARAMS(action, mandatory, optional)                                                               \
   {                                                                                                                    \
-    if (action.size() < static_cast<unsigned long>(mandatory + 2))                                                     \
+    if (action.size() < static_cast<unsigned long>(mandatory + 2)) {                                                   \
+      std::stringstream ss;                                                                                            \
+      for (const auto& elem : action) {                                                                                \
+        ss << elem << " ";                                                                                             \
+      }                                                                                                                \
       THROWF(arg_error, 0, "%s replay failed.\n"                                                                       \
                            "%zu items were given on the line. First two should be process_id and action.  "            \
                            "This action needs after them %lu mandatory arguments, and accepts %lu optional ones. \n"   \
+                           "The full line that was given is:\n   %s\n"                                                 \
                            "Please contact the Simgrid team if support is needed",                                     \
-             __FUNCTION__, action.size(), static_cast<unsigned long>(mandatory),                                       \
-             static_cast<unsigned long>(optional));                                                                    \
+             __func__, action.size(), static_cast<unsigned long>(mandatory), static_cast<unsigned long>(optional),     \
+             ss.str().c_str());                                                                                        \
+    }                                                                                                                  \
   }
 
 static void log_timed_action(simgrid::xbt::ReplayAction& action, double clock)
@@ -48,12 +110,12 @@ static void log_timed_action(simgrid::xbt::ReplayAction& action, double clock)
 
 static std::vector<MPI_Request>* get_reqq_self()
 {
-  return reqq.at(Actor::self()->getPid());
+  return reqq.at(simgrid::s4u::this_actor::get_pid());
 }
 
 static void set_reqq_self(std::vector<MPI_Request> *mpi_request)
 {
-   reqq.insert({Actor::self()->getPid(), mpi_request});
+  reqq.insert({simgrid::s4u::this_actor::get_pid(), mpi_request});
 }
 
 /* Helper function */
@@ -65,26 +127,99 @@ static double parse_double(std::string string)
 namespace simgrid {
 namespace smpi {
 
-namespace Replay {
+namespace replay {
+
+class RequestStorage {
+private:
+    req_storage_t store;
+
+public:
+    RequestStorage() {}
+    int size()
+    {
+      return store.size();
+    }
+
+    req_storage_t& get_store()
+    {
+      return store;
+    }
+
+    void get_requests(std::vector<MPI_Request>& vec)
+    {
+      for (auto& pair : store) {
+        auto& req = pair.second;
+        auto my_proc_id = simgrid::s4u::this_actor::getPid();
+        if (req != MPI_REQUEST_NULL && (req->src() == my_proc_id || req->dst() == my_proc_id)) {
+          vec.push_back(pair.second);
+          pair.second->print_request("MM");
+        }
+      }
+    }
+
+    MPI_Request find(int src, int dst, int tag)
+    {
+      req_storage_t::iterator it = store.find(req_key_t(src, dst, tag));
+      return (it == store.end()) ? MPI_REQUEST_NULL : it->second;
+    }
+
+    void remove(MPI_Request req)
+    {
+      if (req == MPI_REQUEST_NULL) return;
+
+      store.erase(req_key_t(req->src()-1, req->dst()-1, req->tag()));
+    }
+
+    void add(MPI_Request req)
+    {
+      if (req != MPI_REQUEST_NULL) // Can and does happen in the case of TestAction
+        store.insert({req_key_t(req->src()-1, req->dst()-1, req->tag()), req});
+    }
+
+    /* Sometimes we need to re-insert MPI_REQUEST_NULL but we still need src,dst and tag */
+    void addNullRequest(int src, int dst, int tag)
+    {
+      store.insert({req_key_t(src, dst, tag), MPI_REQUEST_NULL});
+    }
+};
+
 class ActionArgParser {
 public:
+  virtual ~ActionArgParser() = default;
   virtual void parse(simgrid::xbt::ReplayAction& action, std::string name) { CHECK_ACTION_PARAMS(action, 0, 0) }
 };
 
+class WaitTestParser : public ActionArgParser {
+public:
+  int src;
+  int dst;
+  int tag;
+
+  void parse(simgrid::xbt::ReplayAction& action, std::string name) override
+  {
+    CHECK_ACTION_PARAMS(action, 3, 0)
+    src = std::stoi(action[2]);
+    dst = std::stoi(action[3]);
+    tag = std::stoi(action[4]);
+  }
+};
+
 class SendRecvParser : public ActionArgParser {
 public:
   /* communication partner; if we send, this is the receiver and vice versa */
   int partner;
   double size;
+  int tag;
   MPI_Datatype datatype1 = MPI_DEFAULT_TYPE;
 
   void parse(simgrid::xbt::ReplayAction& action, std::string name) override
   {
-    CHECK_ACTION_PARAMS(action, 2, 1)
+    CHECK_ACTION_PARAMS(action, 3, 1)
     partner = std::stoi(action[2]);
-    size    = parse_double(action[3]);
-    if (action.size() > 4)
-      datatype1 = simgrid::smpi::Datatype::decode(action[4]);
+    tag     = std::stoi(action[3]);
+    size    = parse_double(action[4]);
+    if (action.size() > 5)
+      datatype1 = simgrid::smpi::Datatype::decode(action[5]);
   }
 };
 
@@ -229,24 +364,28 @@ public:
         datatype2 = simgrid::smpi::Datatype::decode(action[5 + comm_size]);
     }
     else {
-      int datatype_index = 0, disp_index = 0;
-      if (action.size() > 3 + 2 * comm_size) { /* datatype + disp are specified */
+      int datatype_index = 0;
+      int disp_index     = 0;
+      /* The 3 comes from "0 gather <sendcount>", which must always be present.
+       * The + comm_size is the recvcounts array, which must also be present
+       */
+      if (action.size() > 3 + comm_size + comm_size) { /* datatype + disp are specified */
         datatype_index = 3 + comm_size;
         disp_index     = datatype_index + 1;
-      } else if (action.size() > 3 + 2 * comm_size) { /* disps specified; datatype is not specified; use the default one */
-        datatype_index = -1;
+        datatype1      = simgrid::smpi::Datatype::decode(action[datatype_index]);
+        datatype2      = simgrid::smpi::Datatype::decode(action[datatype_index]);
+      } else if (action.size() > 3 + comm_size + 2) { /* disps specified; datatype is not specified; use the default one */
         disp_index     = 3 + comm_size;
-      } else if (action.size() > 3 + comm_size) { /* only datatype, no disp specified */
+      } else if (action.size() > 3 + comm_size)  { /* only datatype, no disp specified */
         datatype_index = 3 + comm_size;
+        datatype1      = simgrid::smpi::Datatype::decode(action[datatype_index]);
+        datatype2      = simgrid::smpi::Datatype::decode(action[datatype_index]);
       }
 
       if (disp_index != 0) {
         for (unsigned int i = 0; i < comm_size; i++)
           disps[i]          = std::stoi(action[disp_index + i]);
       }
-
-      datatype1 = simgrid::smpi::Datatype::decode(action[datatype_index]);
-      datatype2 = simgrid::smpi::Datatype::decode(action[datatype_index]);
     }
 
     for (unsigned int i = 0; i < comm_size; i++) {
@@ -390,12 +529,12 @@ public:
 template <class T> class ReplayAction {
 protected:
   const std::string name;
+  const int my_proc_id;
   T args;
 
-  int my_proc_id;
-
 public:
-  explicit ReplayAction(std::string name) : name(name), my_proc_id(simgrid::s4u::Actor::self()->getPid()) {}
+  explicit ReplayAction(std::string name) : name(name), my_proc_id(simgrid::s4u::this_actor::get_pid()) {}
+  virtual ~ReplayAction() = default;
 
   virtual void execute(simgrid::xbt::ReplayAction& action)
   {
@@ -420,7 +559,7 @@ public:
   }
 };
 
-class WaitAction : public ReplayAction<ActionArgParser> {
+class WaitAction : public ReplayAction<WaitTestParser> {
 public:
   WaitAction() : ReplayAction("Wait") {}
   void kernel(simgrid::xbt::ReplayAction& action) override
@@ -430,7 +569,7 @@ public:
     MPI_Request request = get_reqq_self()->back();
     get_reqq_self()->pop_back();
 
-    if (request == nullptr) {
+    if (request == MPI_REQUEST_NULL) {
       /* Assume that the trace is well formed, meaning the comm might have been caught by a MPI_test. Then just
        * return.*/
       return;
@@ -440,38 +579,36 @@ public:
 
     // Must be taken before Request::wait() since the request may be set to
     // MPI_REQUEST_NULL by Request::wait!
-    int src                  = request->comm()->group()->rank(request->src());
-    int dst                  = request->comm()->group()->rank(request->dst());
     bool is_wait_for_receive = (request->flags() & RECV);
     // TODO: Here we take the rank while we normally take the process id (look for my_proc_id)
-    TRACE_smpi_comm_in(rank, __FUNCTION__, new simgrid::instr::NoOpTIData("wait"));
+    TRACE_smpi_comm_in(rank, __func__, new simgrid::instr::NoOpTIData("wait"));
 
     MPI_Status status;
     Request::wait(&request, &status);
 
     TRACE_smpi_comm_out(rank);
     if (is_wait_for_receive)
-      TRACE_smpi_recv(src, dst, 0);
+      TRACE_smpi_recv(args.src, args.dst, args.tag);
   }
 };
 
 class SendAction : public ReplayAction<SendRecvParser> {
 public:
   SendAction() = delete;
-  SendAction(std::string name) : ReplayAction(name) {}
+  explicit SendAction(std::string name) : ReplayAction(name) {}
   void kernel(simgrid::xbt::ReplayAction& action) override
   {
-    int dst_traced = MPI_COMM_WORLD->group()->actor(args.partner)->getPid();
+    int dst_traced = MPI_COMM_WORLD->group()->actor(args.partner)->get_pid();
 
-    TRACE_smpi_comm_in(my_proc_id, __FUNCTION__, new simgrid::instr::Pt2PtTIData(name, args.partner, args.size,
-                                                                                 Datatype::encode(args.datatype1)));
+    TRACE_smpi_comm_in(my_proc_id, __func__, new simgrid::instr::Pt2PtTIData(name, args.partner, args.size,
+                                                                             args.tag, Datatype::encode(args.datatype1)));
     if (not TRACE_smpi_view_internals())
-      TRACE_smpi_send(my_proc_id, my_proc_id, dst_traced, 0, args.size * args.datatype1->size());
+      TRACE_smpi_send(my_proc_id, my_proc_id, dst_traced, args.tag, args.size * args.datatype1->size());
 
     if (name == "send") {
-      Request::send(nullptr, args.size, args.datatype1, args.partner, 0, MPI_COMM_WORLD);
+      Request::send(nullptr, args.size, args.datatype1, args.partner, args.tag, MPI_COMM_WORLD);
     } else if (name == "Isend") {
-      MPI_Request request = Request::isend(nullptr, args.size, args.datatype1, args.partner, 0, MPI_COMM_WORLD);
+      MPI_Request request = Request::isend(nullptr, args.size, args.datatype1, args.partner, args.tag, MPI_COMM_WORLD);
       get_reqq_self()->push_back(request);
     } else {
       xbt_die("Don't know this action, %s", name.c_str());
@@ -487,29 +624,29 @@ public:
   explicit RecvAction(std::string name) : ReplayAction(name) {}
   void kernel(simgrid::xbt::ReplayAction& action) override
   {
-    int src_traced = MPI_COMM_WORLD->group()->actor(args.partner)->getPid();
+    int src_traced = MPI_COMM_WORLD->group()->actor(args.partner)->get_pid();
 
-    TRACE_smpi_comm_in(my_proc_id, __FUNCTION__, new simgrid::instr::Pt2PtTIData(name, args.partner, args.size,
-                                                                                 Datatype::encode(args.datatype1)));
+    TRACE_smpi_comm_in(my_proc_id, __func__, new simgrid::instr::Pt2PtTIData(name, args.partner, args.size,
+                                                                             args.tag, Datatype::encode(args.datatype1)));
 
     MPI_Status status;
     // unknown size from the receiver point of view
     if (args.size <= 0.0) {
-      Request::probe(args.partner, 0, MPI_COMM_WORLD, &status);
+      Request::probe(args.partner, args.tag, MPI_COMM_WORLD, &status);
       args.size = status.count;
     }
 
     if (name == "recv") {
-      Request::recv(nullptr, args.size, args.datatype1, args.partner, 0, MPI_COMM_WORLD, &status);
+      Request::recv(nullptr, args.size, args.datatype1, args.partner, args.tag, MPI_COMM_WORLD, &status);
     } else if (name == "Irecv") {
-      MPI_Request request = Request::irecv(nullptr, args.size, args.datatype1, args.partner, 0, MPI_COMM_WORLD);
+      MPI_Request request = Request::irecv(nullptr, args.size, args.datatype1, args.partner, args.tag, MPI_COMM_WORLD);
       get_reqq_self()->push_back(request);
     }
 
     TRACE_smpi_comm_out(my_proc_id);
     // TODO: Check why this was only activated in the "recv" case and not in the "Irecv" case
     if (name == "recv" && not TRACE_smpi_view_internals()) {
-      TRACE_smpi_recv(src_traced, my_proc_id, 0);
+      TRACE_smpi_recv(src_traced, my_proc_id, args.tag);
     }
   }
 };
@@ -525,7 +662,7 @@ public:
   }
 };
 
-class TestAction : public ReplayAction<ActionArgParser> {
+class TestAction : public ReplayAction<WaitTestParser> {
 public:
   TestAction() : ReplayAction("Test") {}
   void kernel(simgrid::xbt::ReplayAction& action) override
@@ -535,7 +672,7 @@ public:
     // if request is null here, this may mean that a previous test has succeeded
     // Different times in traced application and replayed version may lead to this
     // In this case, ignore the extra calls.
-    if (request != nullptr) {
+    if (request != MPI_REQUEST_NULL) {
       TRACE_smpi_testing_in(my_proc_id);
 
       MPI_Status status;
@@ -562,9 +699,6 @@ public:
 
     /* start a simulated timer */
     smpi_process()->simulated_start();
-    /*initialize the number of active processes */
-    active_processes = smpi_process_count();
-
     set_reqq_self(new std::vector<MPI_Request>);
   }
 };
@@ -583,8 +717,7 @@ public:
     const unsigned int count_requests = get_reqq_self()->size();
 
     if (count_requests > 0) {
-      TRACE_smpi_comm_in(my_proc_id, __FUNCTION__,
-                         new simgrid::instr::Pt2PtTIData("waitAll", -1, count_requests, ""));
+      TRACE_smpi_comm_in(my_proc_id, __func__, new simgrid::instr::Pt2PtTIData("waitAll", -1, count_requests, ""));
       std::vector<std::pair</*sender*/int,/*recv*/int>> sender_receiver;
       for (const auto& req : (*get_reqq_self())) {
         if (req && (req->flags() & RECV)) {
@@ -607,7 +740,7 @@ public:
   BarrierAction() : ReplayAction("barrier") {}
   void kernel(simgrid::xbt::ReplayAction& action) override
   {
-    TRACE_smpi_comm_in(my_proc_id, __FUNCTION__, new simgrid::instr::NoOpTIData("barrier"));
+    TRACE_smpi_comm_in(my_proc_id, __func__, new simgrid::instr::NoOpTIData("barrier"));
     Colls::barrier(MPI_COMM_WORLD);
     TRACE_smpi_comm_out(my_proc_id);
   }
@@ -619,7 +752,7 @@ public:
   void kernel(simgrid::xbt::ReplayAction& action) override
   {
     TRACE_smpi_comm_in(my_proc_id, "action_bcast",
-                       new simgrid::instr::CollTIData("bcast", MPI_COMM_WORLD->group()->actor(args.root)->getPid(),
+                       new simgrid::instr::CollTIData("bcast", MPI_COMM_WORLD->group()->actor(args.root)->get_pid(),
                                                       -1.0, args.size, -1, Datatype::encode(args.datatype1), ""));
 
     Colls::bcast(send_buffer(args.size * args.datatype1->size()), args.size, args.datatype1, args.root, MPI_COMM_WORLD);
@@ -634,8 +767,9 @@ public:
   void kernel(simgrid::xbt::ReplayAction& action) override
   {
     TRACE_smpi_comm_in(my_proc_id, "action_reduce",
-                       new simgrid::instr::CollTIData("reduce", MPI_COMM_WORLD->group()->actor(args.root)->getPid(), args.comp_size,
-                                                      args.comm_size, -1, Datatype::encode(args.datatype1), ""));
+                       new simgrid::instr::CollTIData("reduce", MPI_COMM_WORLD->group()->actor(args.root)->get_pid(),
+                                                      args.comp_size, args.comm_size, -1,
+                                                      Datatype::encode(args.datatype1), ""));
 
     Colls::reduce(send_buffer(args.comm_size * args.datatype1->size()),
         recv_buffer(args.comm_size * args.datatype1->size()), args.comm_size, args.datatype1, MPI_OP_NULL, args.root, MPI_COMM_WORLD);
@@ -671,9 +805,9 @@ public:
                                                     Datatype::encode(args.datatype1),
                                                     Datatype::encode(args.datatype2)));
 
-    Colls::alltoall(send_buffer(args.send_size*args.comm_size* args.datatype1->size()), 
-      args.send_size, args.datatype1, recv_buffer(args.recv_size * args.comm_size * args.datatype2->size()),
-      args.recv_size, args.datatype2, MPI_COMM_WORLD);
+    Colls::alltoall(send_buffer(args.send_size * args.comm_size * args.datatype1->size()), args.send_size,
+                    args.datatype1, recv_buffer(args.recv_size * args.comm_size * args.datatype2->size()),
+                    args.recv_size, args.datatype2, MPI_COMM_WORLD);
 
     TRACE_smpi_comm_out(my_proc_id);
   }
@@ -681,7 +815,7 @@ public:
 
 class GatherAction : public ReplayAction<GatherArgParser> {
 public:
-  GatherAction(std::string name) : ReplayAction(name) {}
+  explicit GatherAction(std::string name) : ReplayAction(name) {}
   void kernel(simgrid::xbt::ReplayAction& action) override
   {
     TRACE_smpi_comm_in(my_proc_id, name.c_str(), new simgrid::instr::CollTIData(name, (name == "gather") ? args.root : -1, -1.0, args.send_size, args.recv_size,
@@ -702,7 +836,7 @@ public:
 
 class GatherVAction : public ReplayAction<GatherVArgParser> {
 public:
-  GatherVAction(std::string name) : ReplayAction(name) {}
+  explicit GatherVAction(std::string name) : ReplayAction(name) {}
   void kernel(simgrid::xbt::ReplayAction& action) override
   {
     int rank = MPI_COMM_WORLD->rank();
@@ -712,14 +846,14 @@ public:
                                                Datatype::encode(args.datatype1), Datatype::encode(args.datatype2)));
 
     if (name == "gatherV") {
-      Colls::gatherv(send_buffer(args.send_size * args.datatype1->size()), args.send_size, args.datatype1, 
-                     (rank == args.root) ? recv_buffer(args.recv_size_sum  * args.datatype2->size()) : nullptr, args.recvcounts->data(), args.disps.data(), args.datatype2, args.root,
-                     MPI_COMM_WORLD);
+      Colls::gatherv(send_buffer(args.send_size * args.datatype1->size()), args.send_size, args.datatype1,
+                     (rank == args.root) ? recv_buffer(args.recv_size_sum * args.datatype2->size()) : nullptr,
+                     args.recvcounts->data(), args.disps.data(), args.datatype2, args.root, MPI_COMM_WORLD);
     }
     else {
-      Colls::allgatherv(send_buffer(args.send_size * args.datatype1->size()), args.send_size, args.datatype1, 
-                        recv_buffer(args.recv_size_sum * args.datatype2->size()), args.recvcounts->data(), args.disps.data(), args.datatype2,
-                    MPI_COMM_WORLD);
+      Colls::allgatherv(send_buffer(args.send_size * args.datatype1->size()), args.send_size, args.datatype1,
+                        recv_buffer(args.recv_size_sum * args.datatype2->size()), args.recvcounts->data(),
+                        args.disps.data(), args.datatype2, MPI_COMM_WORLD);
     }
 
     TRACE_smpi_comm_out(my_proc_id);
@@ -754,9 +888,10 @@ public:
           nullptr, Datatype::encode(args.datatype1),
           Datatype::encode(args.datatype2)));
 
-    Colls::scatterv((rank == args.root) ? send_buffer(args.send_size_sum * args.datatype1->size()) : nullptr, args.sendcounts->data(), args.disps.data(), 
-        args.datatype1, recv_buffer(args.recv_size * args.datatype2->size()), args.recv_size, args.datatype2, args.root,
-        MPI_COMM_WORLD);
+    Colls::scatterv((rank == args.root) ? send_buffer(args.send_size_sum * args.datatype1->size()) : nullptr,
+                    args.sendcounts->data(), args.disps.data(), args.datatype1,
+                    recv_buffer(args.recv_size * args.datatype2->size()), args.recv_size, args.datatype2, args.root,
+                    MPI_COMM_WORLD);
 
     TRACE_smpi_comm_out(my_proc_id);
   }
@@ -772,8 +907,9 @@ public:
                                                          std::to_string(args.comp_size), /* ugly hack to print comp_size */
                                                          Datatype::encode(args.datatype1)));
 
-    Colls::reduce_scatter(send_buffer(args.recv_size_sum * args.datatype1->size()), recv_buffer(args.recv_size_sum * args.datatype1->size()), 
-                          args.recvcounts->data(), args.datatype1, MPI_OP_NULL, MPI_COMM_WORLD);
+    Colls::reduce_scatter(send_buffer(args.recv_size_sum * args.datatype1->size()),
+                          recv_buffer(args.recv_size_sum * args.datatype1->size()), args.recvcounts->data(),
+                          args.datatype1, MPI_OP_NULL, MPI_COMM_WORLD);
 
     smpi_execute_flops(args.comp_size);
     TRACE_smpi_comm_out(my_proc_id);
@@ -785,10 +921,10 @@ public:
   AllToAllVAction() : ReplayAction("allToAllV") {}
   void kernel(simgrid::xbt::ReplayAction& action) override
   {
-    TRACE_smpi_comm_in(my_proc_id, __FUNCTION__,
-        new simgrid::instr::VarCollTIData("allToAllV", -1, args.send_size_sum, args.sendcounts, args.recv_size_sum, args.recvcounts,
-          Datatype::encode(args.datatype1),
-          Datatype::encode(args.datatype2)));
+    TRACE_smpi_comm_in(my_proc_id, __func__,
+                       new simgrid::instr::VarCollTIData(
+                           "allToAllV", -1, args.send_size_sum, args.sendcounts, args.recv_size_sum, args.recvcounts,
+                           Datatype::encode(args.datatype1), Datatype::encode(args.datatype2)));
 
     Colls::alltoallv(send_buffer(args.send_buf_size * args.datatype1->size()), args.sendcounts->data(), args.senddisps.data(), args.datatype1,
                      recv_buffer(args.recv_buf_size * args.datatype2->size()), args.recvcounts->data(), args.recvdisps.data(), args.datatype2, MPI_COMM_WORLD);
@@ -799,6 +935,7 @@ public:
 } // Replay Namespace
 }} // namespace simgrid::smpi
 
+std::vector<simgrid::smpi::replay::RequestStorage> storage;
 /** @brief Only initialize the replay, don't do it for real */
 void smpi_replay_init(int* argc, char*** argv)
 {
@@ -806,38 +943,38 @@ void smpi_replay_init(int* argc, char*** argv)
   smpi_process()->mark_as_initialized();
   smpi_process()->set_replaying(true);
 
-  int my_proc_id = Actor::self()->getPid();
+  int my_proc_id = simgrid::s4u::this_actor::get_pid();
   TRACE_smpi_init(my_proc_id);
   TRACE_smpi_computing_init(my_proc_id);
   TRACE_smpi_comm_in(my_proc_id, "smpi_replay_run_init", new simgrid::instr::NoOpTIData("init"));
   TRACE_smpi_comm_out(my_proc_id);
-  xbt_replay_action_register("init", [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::Replay::InitAction().execute(action); });
+  xbt_replay_action_register("init", [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::replay::InitAction().execute(action); });
   xbt_replay_action_register("finalize", [](simgrid::xbt::ReplayAction& action) { /* nothing to do */ });
-  xbt_replay_action_register("comm_size", [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::Replay::CommunicatorAction().execute(action); });
-  xbt_replay_action_register("comm_split",[](simgrid::xbt::ReplayAction& action) { simgrid::smpi::Replay::CommunicatorAction().execute(action); });
-  xbt_replay_action_register("comm_dup",  [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::Replay::CommunicatorAction().execute(action); });
-
-  xbt_replay_action_register("send",  [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::Replay::SendAction("send").execute(action); });
-  xbt_replay_action_register("Isend", [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::Replay::SendAction("Isend").execute(action); });
-  xbt_replay_action_register("recv",  [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::Replay::RecvAction("recv").execute(action); });
-  xbt_replay_action_register("Irecv", [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::Replay::RecvAction("Irecv").execute(action); });
-  xbt_replay_action_register("test",  [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::Replay::TestAction().execute(action); });
-  xbt_replay_action_register("wait",  [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::Replay::WaitAction().execute(action); });
-  xbt_replay_action_register("waitAll", [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::Replay::WaitAllAction().execute(action); });
-  xbt_replay_action_register("barrier", [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::Replay::BarrierAction().execute(action); });
-  xbt_replay_action_register("bcast",   [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::Replay::BcastAction().execute(action); });
-  xbt_replay_action_register("reduce",  [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::Replay::ReduceAction().execute(action); });
-  xbt_replay_action_register("allReduce", [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::Replay::AllReduceAction().execute(action); });
-  xbt_replay_action_register("allToAll", [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::Replay::AllToAllAction().execute(action); });
-  xbt_replay_action_register("allToAllV", [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::Replay::AllToAllVAction().execute(action); });
-  xbt_replay_action_register("gather",   [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::Replay::GatherAction("gather").execute(action); });
-  xbt_replay_action_register("scatter",  [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::Replay::ScatterAction().execute(action); });
-  xbt_replay_action_register("gatherV",  [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::Replay::GatherVAction("gatherV").execute(action); });
-  xbt_replay_action_register("scatterV", [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::Replay::ScatterVAction().execute(action); });
-  xbt_replay_action_register("allGather", [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::Replay::GatherAction("allGather").execute(action); });
-  xbt_replay_action_register("allGatherV", [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::Replay::GatherVAction("allGatherV").execute(action); });
-  xbt_replay_action_register("reduceScatter", [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::Replay::ReduceScatterAction().execute(action); });
-  xbt_replay_action_register("compute", [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::Replay::ComputeAction().execute(action); });
+  xbt_replay_action_register("comm_size", [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::replay::CommunicatorAction().execute(action); });
+  xbt_replay_action_register("comm_split",[](simgrid::xbt::ReplayAction& action) { simgrid::smpi::replay::CommunicatorAction().execute(action); });
+  xbt_replay_action_register("comm_dup",  [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::replay::CommunicatorAction().execute(action); });
+
+  xbt_replay_action_register("send",  [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::replay::SendAction("send").execute(action); });
+  xbt_replay_action_register("Isend", [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::replay::SendAction("Isend").execute(action); });
+  xbt_replay_action_register("recv",  [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::replay::RecvAction("recv").execute(action); });
+  xbt_replay_action_register("Irecv", [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::replay::RecvAction("Irecv").execute(action); });
+  xbt_replay_action_register("test",  [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::replay::TestAction().execute(action); });
+  xbt_replay_action_register("wait",  [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::replay::WaitAction().execute(action); });
+  xbt_replay_action_register("waitAll", [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::replay::WaitAllAction().execute(action); });
+  xbt_replay_action_register("barrier", [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::replay::BarrierAction().execute(action); });
+  xbt_replay_action_register("bcast",   [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::replay::BcastAction().execute(action); });
+  xbt_replay_action_register("reduce",  [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::replay::ReduceAction().execute(action); });
+  xbt_replay_action_register("allReduce", [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::replay::AllReduceAction().execute(action); });
+  xbt_replay_action_register("allToAll", [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::replay::AllToAllAction().execute(action); });
+  xbt_replay_action_register("allToAllV", [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::replay::AllToAllVAction().execute(action); });
+  xbt_replay_action_register("gather",   [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::replay::GatherAction("gather").execute(action); });
+  xbt_replay_action_register("scatter",  [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::replay::ScatterAction().execute(action); });
+  xbt_replay_action_register("gatherV",  [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::replay::GatherVAction("gatherV").execute(action); });
+  xbt_replay_action_register("scatterV", [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::replay::ScatterVAction().execute(action); });
+  xbt_replay_action_register("allGather", [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::replay::GatherAction("allGather").execute(action); });
+  xbt_replay_action_register("allGatherV", [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::replay::GatherVAction("allGatherV").execute(action); });
+  xbt_replay_action_register("reduceScatter", [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::replay::ReduceScatterAction().execute(action); });
+  xbt_replay_action_register("compute", [](simgrid::xbt::ReplayAction& action) { simgrid::smpi::replay::ComputeAction().execute(action); });
 
   //if we have a delayed start, sleep here.
   if(*argc>2){
@@ -854,6 +991,8 @@ void smpi_replay_init(int* argc, char*** argv)
 /** @brief actually run the replay after initialization */
 void smpi_replay_main(int* argc, char*** argv)
 {
+  static int active_processes = 0;
+  active_processes++;
   simgrid::xbt::replay_runner(*argc, *argv);
 
   /* and now, finalize everything */
@@ -880,12 +1019,13 @@ void smpi_replay_main(int* argc, char*** argv)
     smpi_free_replay_tmp_buffers();
   }
 
-  TRACE_smpi_comm_in(Actor::self()->getPid(), "smpi_replay_run_finalize", new simgrid::instr::NoOpTIData("finalize"));
+  TRACE_smpi_comm_in(simgrid::s4u::this_actor::get_pid(), "smpi_replay_run_finalize",
+                     new simgrid::instr::NoOpTIData("finalize"));
 
   smpi_process()->finalize();
 
-  TRACE_smpi_comm_out(Actor::self()->getPid());
-  TRACE_smpi_finalize(Actor::self()->getPid());
+  TRACE_smpi_comm_out(simgrid::s4u::this_actor::get_pid());
+  TRACE_smpi_finalize(simgrid::s4u::this_actor::get_pid());
 }
 
 /** @brief chain a replay initialization and a replay start */