Logo AND Algorithmique Numérique Distribuée

Public GIT Repository
0758414a04f1fc473f21fec9b5b3be31e9ac74d5
[simgrid.git] / src / smpi / internals / smpi_actor.cpp
1 /* Copyright (c) 2009-2021. The SimGrid Team. All rights reserved.          */
2
3 /* This program is free software; you can redistribute it and/or modify it
4  * under the terms of the license (GNU LGPL) which comes with this package. */
5
6 #include "src/smpi/include/smpi_actor.hpp"
7 #include "mc/mc.h"
8 #include "simgrid/s4u/Engine.hpp"
9 #include "simgrid/s4u/Mutex.hpp"
10 #include "smpi_comm.hpp"
11 #include "smpi_info.hpp"
12 #include "src/mc/mc_replay.hpp"
13 #include "src/simix/smx_private.hpp"
14
15 #if HAVE_PAPI
16 #include "papi.h"
17 #endif
18
19 XBT_LOG_NEW_DEFAULT_SUBCATEGORY(smpi_process, smpi, "Logging specific to SMPI (kernel)");
20
21 namespace simgrid {
22 namespace smpi {
23 simgrid::xbt::Extension<simgrid::s4u::Actor, ActorExt> ActorExt::EXTENSION_ID;
24
25 ActorExt::ActorExt(s4u::Actor* actor) : actor_(actor)
26 {
27   if (not simgrid::smpi::ActorExt::EXTENSION_ID.valid())
28     simgrid::smpi::ActorExt::EXTENSION_ID = simgrid::s4u::Actor::extension_create<simgrid::smpi::ActorExt>();
29
30   mailbox_         = s4u::Mailbox::by_name("SMPI-" + std::to_string(actor_->get_pid()));
31   mailbox_small_   = s4u::Mailbox::by_name("small-" + std::to_string(actor_->get_pid()));
32   mailboxes_mutex_ = s4u::Mutex::create();
33   timer_           = xbt_os_timer_new();
34   state_           = SmpiProcessState::UNINITIALIZED;
35   info_env_        = MPI_INFO_NULL;
36   if (MC_is_active())
37     MC_ignore_heap(timer_, xbt_os_timer_size());
38
39 #if HAVE_PAPI
40   if (not smpi_cfg_papi_events_file().empty()) {
41     // TODO: Implement host/process/thread based counters. This implementation
42     // just always takes the values passed via "default", like this:
43     // "default:COUNTER1:COUNTER2:COUNTER3;".
44     auto it = units2papi_setup.find("default");
45     if (it != units2papi_setup.end()) {
46       papi_event_set_    = it->second.event_set;
47       papi_counter_data_ = it->second.counter_data;
48       XBT_DEBUG("Setting PAPI set for process %li", actor->get_pid());
49     } else {
50       papi_event_set_ = PAPI_NULL;
51       XBT_DEBUG("No PAPI set for process %li", actor->get_pid());
52     }
53   }
54 #endif
55 }
56
57 ActorExt::~ActorExt()
58 {
59   xbt_os_timer_free(timer_);
60 }
61
62 /** @brief Prepares the current process for termination. */
63 void ActorExt::finalize()
64 {
65   state_ = SmpiProcessState::FINALIZED;
66   XBT_DEBUG("<%ld> Process left the game", actor_->get_pid());
67   if (info_env_ != MPI_INFO_NULL)
68     simgrid::smpi::Info::unref(info_env_);
69   if (comm_self_ != MPI_COMM_NULL)
70     simgrid::smpi::Comm::destroy(comm_self_);
71   if (comm_intra_ != MPI_COMM_NULL)
72     simgrid::smpi::Comm::destroy(comm_intra_);
73   smpi_deployment_unregister_process(instance_id_);
74 }
75
76 /** @brief Check if a process is finalized */
77 int ActorExt::finalized() const
78 {
79   return (state_ == SmpiProcessState::FINALIZED);
80 }
81
82 /** @brief Check if a process is partially initialized already */
83 int ActorExt::initializing() const
84 {
85   return (state_ == SmpiProcessState::INITIALIZING);
86 }
87
88 /** @brief Check if a process is initialized */
89 int ActorExt::initialized() const
90 {
91   // TODO cheinrich: Check if we still need this. This should be a global condition, not for a
92   // single process ... ?
93   return (state_ == SmpiProcessState::INITIALIZED);
94 }
95
96 /** @brief Mark a process as initialized (=MPI_Init called) */
97 void ActorExt::mark_as_initialized()
98 {
99   if (state_ != SmpiProcessState::FINALIZED)
100     state_ = SmpiProcessState::INITIALIZED;
101 }
102
103 void ActorExt::set_replaying(bool value)
104 {
105   if (state_ != SmpiProcessState::FINALIZED)
106     replaying_ = value;
107 }
108
109 bool ActorExt::replaying() const
110 {
111   return replaying_;
112 }
113
114 s4u::ActorPtr ActorExt::get_actor()
115 {
116   return actor_;
117 }
118
119 /**
120  * @brief Returns a structure that stores the location (filename + linenumber) of the last calls to MPI_* functions.
121  *
122  * @see smpi_trace_set_call_location
123  */
124 smpi_trace_call_location_t* ActorExt::call_location()
125 {
126   return &trace_call_loc_;
127 }
128
129 void ActorExt::set_privatized_region(smpi_privatization_region_t region)
130 {
131   privatized_region_ = region;
132 }
133
134 smpi_privatization_region_t ActorExt::privatized_region() const
135 {
136   return privatized_region_;
137 }
138
139 MPI_Comm ActorExt::comm_world() const
140 {
141   return comm_world_ == nullptr ? MPI_COMM_NULL : *comm_world_;
142 }
143
144 s4u::MutexPtr ActorExt::mailboxes_mutex() const
145 {
146   return mailboxes_mutex_;
147 }
148
149 #if HAVE_PAPI
150 int ActorExt::papi_event_set() const
151 {
152   return papi_event_set_;
153 }
154
155 papi_counter_t& ActorExt::papi_counters()
156 {
157   return papi_counter_data_;
158 }
159 #endif
160
161 xbt_os_timer_t ActorExt::timer()
162 {
163   return timer_;
164 }
165
166 void ActorExt::simulated_start()
167 {
168   simulated_ = s4u::Engine::get_clock();
169 }
170
171 double ActorExt::simulated_elapsed() const
172 {
173   return s4u::Engine::get_clock() - simulated_;
174 }
175
176 MPI_Comm ActorExt::comm_self()
177 {
178   if (comm_self_ == MPI_COMM_NULL) {
179     auto* group = new Group(1);
180     comm_self_  = new Comm(group, nullptr);
181     comm_self_->set_name("MPI_COMM_SELF");
182     group->set_mapping(actor_->get_pid(), 0);
183   }
184   return comm_self_;
185 }
186
187 MPI_Info ActorExt::info_env()
188 {
189   if (info_env_==MPI_INFO_NULL)
190     info_env_=new Info();
191   return info_env_;
192 }
193
194 MPI_Comm ActorExt::comm_intra()
195 {
196   return comm_intra_;
197 }
198
199 void ActorExt::set_comm_intra(MPI_Comm comm)
200 {
201   comm_intra_ = comm;
202 }
203
204 void ActorExt::set_sampling(int s)
205 {
206   sampling_ = s;
207 }
208
209 int ActorExt::sampling() const
210 {
211   return sampling_;
212 }
213
214 void ActorExt::init()
215 {
216   xbt_assert(smpi_get_universe_size() != 0, "SimGrid was not initialized properly before entering MPI_Init. "
217                                             "Aborting, please check compilation process and use smpirun.");
218
219   ActorExt* ext = smpi_process();
220   // if we are in MPI_Init and argc handling has already been done.
221   if (ext->initialized())
222     return;
223
224   const simgrid::s4u::Actor* self = simgrid::s4u::Actor::self();
225   ext->instance_id_ = self->get_property("instance_id");
226   const int rank    = xbt_str_parse_int(self->get_property("rank"), "Cannot parse rank");
227
228   ext->state_ = SmpiProcessState::INITIALIZING;
229   smpi_deployment_register_process(ext->instance_id_, rank, self);
230
231   ext->comm_world_ = smpi_deployment_comm_world(ext->instance_id_);
232
233   // set the process attached to the mailbox
234   ext->mailbox_small_->set_receiver(ext->actor_);
235   XBT_DEBUG("<%ld> SMPI process has been initialized: %p", ext->actor_->get_pid(), ext->actor_);
236 }
237
238 int ActorExt::get_optind() const
239 {
240   return optind_;
241 }
242
243 void ActorExt::set_optind(int new_optind)
244 {
245   optind_ = new_optind;
246 }
247
248 void ActorExt::bsend_buffer(void** buf, int* size)
249 {
250   *buf  = bsend_buffer_;
251   *size = bsend_buffer_size_;
252 }
253
254 int ActorExt::set_bsend_buffer(void* buf, int size)
255 {
256   if(buf!=nullptr && bsend_buffer_!=nullptr)
257     return MPI_ERR_BUFFER;
258   bsend_buffer_     = buf;
259   bsend_buffer_size_= size;
260   return MPI_SUCCESS;
261 }
262
263 } // namespace smpi
264 } // namespace simgrid