Logo AND Algorithmique Numérique Distribuée

Public GIT Repository
Remove redundant guard.
[simgrid.git] / src / smpi / internals / smpi_actor.cpp
1 /* Copyright (c) 2009-2022. The SimGrid Team. All rights reserved.          */
2
3 /* This program is free software; you can redistribute it and/or modify it
4  * under the terms of the license (GNU LGPL) which comes with this package. */
5
6 #include "src/smpi/include/smpi_actor.hpp"
7 #include "mc/mc.h"
8 #include "simgrid/s4u/Engine.hpp"
9 #include "simgrid/s4u/Mutex.hpp"
10 #include "smpi_comm.hpp"
11 #include "smpi_info.hpp"
12 #include "src/mc/mc_replay.hpp"
13 #include "xbt/str.h"
14
15 #if HAVE_PAPI
16 #include "papi.h"
17 #endif
18
19 XBT_LOG_NEW_DEFAULT_SUBCATEGORY(smpi_process, smpi, "Logging specific to SMPI (kernel)");
20
21 namespace simgrid::smpi {
22 simgrid::xbt::Extension<simgrid::s4u::Actor, ActorExt> ActorExt::EXTENSION_ID;
23
24 ActorExt::ActorExt(s4u::Actor* actor) : actor_(actor)
25 {
26   if (not simgrid::smpi::ActorExt::EXTENSION_ID.valid())
27     simgrid::smpi::ActorExt::EXTENSION_ID = simgrid::s4u::Actor::extension_create<simgrid::smpi::ActorExt>();
28
29   mailbox_         = s4u::Mailbox::by_name("SMPI-" + std::to_string(actor_->get_pid()));
30   mailbox_small_   = s4u::Mailbox::by_name("small-" + std::to_string(actor_->get_pid()));
31   mailboxes_mutex_ = s4u::Mutex::create();
32   timer_           = xbt_os_timer_new();
33   state_           = SmpiProcessState::UNINITIALIZED;
34   info_env_        = MPI_INFO_NULL;
35   MC_ignore_heap(timer_, xbt_os_timer_size());
36
37 #if HAVE_PAPI
38   if (not smpi_cfg_papi_events_file().empty()) {
39     // TODO: Implement host/process/thread based counters. This implementation
40     // just always takes the values passed via "default", like this:
41     // "default:COUNTER1:COUNTER2:COUNTER3;".
42     auto it = units2papi_setup.find("default");
43     if (it != units2papi_setup.end()) {
44       papi_event_set_    = it->second.event_set;
45       papi_counter_data_ = it->second.counter_data;
46       XBT_DEBUG("Setting PAPI set for process %li", actor->get_pid());
47     } else {
48       papi_event_set_ = PAPI_NULL;
49       XBT_DEBUG("No PAPI set for process %li", actor->get_pid());
50     }
51   }
52 #endif
53 }
54
55 ActorExt::~ActorExt()
56 {
57   xbt_os_timer_free(timer_);
58 }
59
60 /** @brief Prepares the current process for termination. */
61 void ActorExt::finalize()
62 {
63   state_ = SmpiProcessState::FINALIZED;
64   XBT_DEBUG("<%ld> Process left the game", actor_->get_pid());
65   if (info_env_ != MPI_INFO_NULL)
66     simgrid::smpi::Info::unref(info_env_);
67   if (comm_self_ != MPI_COMM_NULL)
68     simgrid::smpi::Comm::destroy(comm_self_);
69   if (comm_intra_ != MPI_COMM_NULL)
70     simgrid::smpi::Comm::destroy(comm_intra_);
71   smpi_deployment_unregister_process(instance_id_);
72 }
73
74 /** @brief Check if a process is finalized */
75 int ActorExt::finalized() const
76 {
77   return (state_ == SmpiProcessState::FINALIZED);
78 }
79
80 /** @brief Check if a process is partially initialized already */
81 int ActorExt::initializing() const
82 {
83   return (state_ == SmpiProcessState::INITIALIZING);
84 }
85
86 /** @brief Check if a process is initialized */
87 int ActorExt::initialized() const
88 {
89   // TODO cheinrich: Check if we still need this. This should be a global condition, not for a
90   // single process ... ?
91   return (state_ == SmpiProcessState::INITIALIZED);
92 }
93
94 /** @brief Mark a process as initialized (=MPI_Init called) */
95 void ActorExt::mark_as_initialized()
96 {
97   if (state_ != SmpiProcessState::FINALIZED)
98     state_ = SmpiProcessState::INITIALIZED;
99 }
100
101 /** @brief Mark a process as finalizing (=MPI_Finalize called) */
102 void ActorExt::mark_as_finalizing()
103 {
104   if (state_ != SmpiProcessState::FINALIZED)
105     state_ = SmpiProcessState::FINALIZING;
106 }
107
108 /** @brief Check if a process is finalizing */
109 int ActorExt::finalizing() const
110 {
111   return (state_ == SmpiProcessState::FINALIZING);
112 }
113
114 void ActorExt::set_replaying(bool value)
115 {
116   if (state_ != SmpiProcessState::FINALIZED)
117     replaying_ = value;
118 }
119
120 bool ActorExt::replaying() const
121 {
122   return replaying_;
123 }
124
125 s4u::ActorPtr ActorExt::get_actor()
126 {
127   return actor_;
128 }
129
130 /**
131  * @brief Returns a structure that stores the location (filename + linenumber) of the last calls to MPI_* functions.
132  *
133  * @see smpi_trace_set_call_location
134  */
135 smpi_trace_call_location_t* ActorExt::call_location()
136 {
137   return &trace_call_loc_;
138 }
139
140 void ActorExt::set_privatized_region(smpi_privatization_region_t region)
141 {
142   privatized_region_ = region;
143 }
144
145 smpi_privatization_region_t ActorExt::privatized_region() const
146 {
147   return privatized_region_;
148 }
149
150 MPI_Comm ActorExt::comm_world() const
151 {
152   return comm_world_ == nullptr ? MPI_COMM_NULL : *comm_world_;
153 }
154
155 s4u::MutexPtr ActorExt::mailboxes_mutex() const
156 {
157   return mailboxes_mutex_;
158 }
159
160 #if HAVE_PAPI
161 int ActorExt::papi_event_set() const
162 {
163   return papi_event_set_;
164 }
165
166 papi_counter_t& ActorExt::papi_counters()
167 {
168   return papi_counter_data_;
169 }
170 #endif
171
172 xbt_os_timer_t ActorExt::timer()
173 {
174   return timer_;
175 }
176
177 void ActorExt::simulated_start()
178 {
179   simulated_ = s4u::Engine::get_clock();
180 }
181
182 double ActorExt::simulated_elapsed() const
183 {
184   return s4u::Engine::get_clock() - simulated_;
185 }
186
187 MPI_Comm ActorExt::comm_self()
188 {
189   if (comm_self_ == MPI_COMM_NULL) {
190     auto* group = new Group(1);
191     comm_self_  = new Comm(group, nullptr);
192     comm_self_->set_name("MPI_COMM_SELF");
193     group->set_mapping(actor_->get_pid(), 0);
194   }
195   return comm_self_;
196 }
197
198 MPI_Info ActorExt::info_env()
199 {
200   if (info_env_==MPI_INFO_NULL)
201     info_env_=new Info();
202   return info_env_;
203 }
204
205 MPI_Comm ActorExt::comm_intra()
206 {
207   return comm_intra_;
208 }
209
210 void ActorExt::set_comm_intra(MPI_Comm comm)
211 {
212   comm_intra_ = comm;
213 }
214
215 void ActorExt::set_sampling(int s)
216 {
217   sampling_ = s;
218 }
219
220 int ActorExt::sampling() const
221 {
222   return sampling_;
223 }
224
225 void ActorExt::init()
226 {
227   xbt_assert(smpi_get_universe_size() != 0, "SimGrid was not initialized properly before entering MPI_Init. "
228                                             "Aborting, please check compilation process and use smpirun.");
229
230   ActorExt* ext = smpi_process();
231   // if we are in MPI_Init and argc handling has already been done.
232   if (ext->initialized())
233     return;
234
235   const simgrid::s4u::Actor* self = simgrid::s4u::Actor::self();
236   ext->instance_id_ = self->get_property("instance_id");
237   const int rank = static_cast<int>(xbt_str_parse_int(self->get_property("rank"), "Cannot parse rank"));
238
239   ext->state_ = SmpiProcessState::INITIALIZING;
240   smpi_deployment_register_process(ext->instance_id_, rank, self);
241
242   ext->comm_world_ = smpi_deployment_comm_world(ext->instance_id_);
243
244   // set the process attached to the mailbox
245   ext->mailbox_small_->set_receiver(ext->actor_);
246   XBT_DEBUG("<%ld> SMPI process has been initialized: %p", ext->actor_->get_pid(), ext->actor_);
247 }
248
249 int ActorExt::get_optind() const
250 {
251   return optind_;
252 }
253
254 void ActorExt::set_optind(int new_optind)
255 {
256   optind_ = new_optind;
257 }
258
259 void ActorExt::bsend_buffer(void** buf, int* size)
260 {
261   *buf  = bsend_buffer_;
262   *size = bsend_buffer_size_;
263 }
264
265 int ActorExt::set_bsend_buffer(void* buf, int size)
266 {
267   if(buf!=nullptr && bsend_buffer_!=nullptr)
268     return MPI_ERR_BUFFER;
269   bsend_buffer_     = buf;
270   bsend_buffer_size_= size;
271   return MPI_SUCCESS;
272 }
273
274 } // namespace simgrid::smpi