Logo AND Algorithmique Numérique Distribuée

Public GIT Repository
Merge branch 'master' of https://framagit.org/simgrid/simgrid
[simgrid.git] / src / smpi / internals / smpi_actor.cpp
1 /* Copyright (c) 2009-2019. The SimGrid Team. All rights reserved.          */
2
3 /* This program is free software; you can redistribute it and/or modify it
4  * under the terms of the license (GNU LGPL) which comes with this package. */
5
6 #include "src/smpi/include/smpi_actor.hpp"
7 #include "mc/mc.h"
8 #include "smpi_comm.hpp"
9 #include "smpi_info.hpp"
10 #include "src/mc/mc_replay.hpp"
11 #include "src/simix/smx_private.hpp"
12
13 #if HAVE_PAPI
14 #include "papi.h"
15 extern std::string papi_default_config_name;
16 #endif
17
18 XBT_LOG_NEW_DEFAULT_SUBCATEGORY(smpi_process, smpi, "Logging specific to SMPI (kernel)");
19
20 namespace simgrid {
21 namespace smpi {
22 simgrid::xbt::Extension<simgrid::s4u::Actor, ActorExt> ActorExt::EXTENSION_ID;
23
24 ActorExt::ActorExt(s4u::Actor* actor) : actor_(actor)
25 {
26   if (not simgrid::smpi::ActorExt::EXTENSION_ID.valid())
27     simgrid::smpi::ActorExt::EXTENSION_ID = simgrid::s4u::Actor::extension_create<simgrid::smpi::ActorExt>();
28
29   mailbox_         = s4u::Mailbox::by_name("SMPI-" + std::to_string(actor_->get_pid()));
30   mailbox_small_   = s4u::Mailbox::by_name("small-" + std::to_string(actor_->get_pid()));
31   mailboxes_mutex_ = s4u::Mutex::create();
32   timer_           = xbt_os_timer_new();
33   state_           = SmpiProcessState::UNINITIALIZED;
34   info_env_        = MPI_INFO_NULL;
35   if (MC_is_active())
36     MC_ignore_heap(timer_, xbt_os_timer_size());
37
38 #if HAVE_PAPI
39   if (not smpi_cfg_papi_events_file().empty()) {
40     // TODO: Implement host/process/thread based counters. This implementation
41     // just always takes the values passed via "default", like this:
42     // "default:COUNTER1:COUNTER2:COUNTER3;".
43     auto it = units2papi_setup.find(papi_default_config_name);
44     if (it != units2papi_setup.end()) {
45       papi_event_set_    = it->second.event_set;
46       papi_counter_data_ = it->second.counter_data;
47       XBT_DEBUG("Setting PAPI set for process %li", actor->get_pid());
48     } else {
49       papi_event_set_ = PAPI_NULL;
50       XBT_DEBUG("No PAPI set for process %li", actor->get_pid());
51     }
52   }
53 #endif
54 }
55
56 ActorExt::~ActorExt()
57 {
58   if (info_env_ != MPI_INFO_NULL)
59     simgrid::smpi::Info::unref(info_env_);
60   if (comm_self_ != MPI_COMM_NULL)
61     simgrid::smpi::Comm::destroy(comm_self_);
62   if (comm_intra_ != MPI_COMM_NULL)
63     simgrid::smpi::Comm::destroy(comm_intra_);
64   xbt_os_timer_free(timer_);
65 }
66
67 /** @brief Prepares the current process for termination. */
68 void ActorExt::finalize()
69 {
70   state_ = SmpiProcessState::FINALIZED;
71   XBT_DEBUG("<%ld> Process left the game", actor_->get_pid());
72
73   smpi_deployment_unregister_process(instance_id_);
74 }
75
76 /** @brief Check if a process is finalized */
77 int ActorExt::finalized()
78 {
79   return (state_ == SmpiProcessState::FINALIZED);
80 }
81
82 /** @brief Check if a process is partially initialized already */
83 int ActorExt::initializing()
84 {
85   return (state_ == SmpiProcessState::INITIALIZING);
86 }
87
88 /** @brief Check if a process is initialized */
89 int ActorExt::initialized()
90 {
91   // TODO cheinrich: Check if we still need this. This should be a global condition, not for a
92   // single process ... ?
93   return (state_ == SmpiProcessState::INITIALIZED);
94 }
95
96 /** @brief Mark a process as initialized (=MPI_Init called) */
97 void ActorExt::mark_as_initialized()
98 {
99   if (state_ != SmpiProcessState::FINALIZED)
100     state_ = SmpiProcessState::INITIALIZED;
101 }
102
103 void ActorExt::set_replaying(bool value)
104 {
105   if (state_ != SmpiProcessState::FINALIZED)
106     replaying_ = value;
107 }
108
109 bool ActorExt::replaying()
110 {
111   return replaying_;
112 }
113
114 s4u::ActorPtr ActorExt::get_actor()
115 {
116   return actor_;
117 }
118
119 /**
120  * @brief Returns a structure that stores the location (filename + linenumber) of the last calls to MPI_* functions.
121  *
122  * @see smpi_trace_set_call_location
123  */
124 smpi_trace_call_location_t* ActorExt::call_location()
125 {
126   return &trace_call_loc_;
127 }
128
129 void ActorExt::set_privatized_region(smpi_privatization_region_t region)
130 {
131   privatized_region_ = region;
132 }
133
134 smpi_privatization_region_t ActorExt::privatized_region()
135 {
136   return privatized_region_;
137 }
138
139 MPI_Comm ActorExt::comm_world()
140 {
141   return comm_world_ == nullptr ? MPI_COMM_NULL : *comm_world_;
142 }
143
144 s4u::MutexPtr ActorExt::mailboxes_mutex()
145 {
146   return mailboxes_mutex_;
147 }
148
149 #if HAVE_PAPI
150 int ActorExt::papi_event_set()
151 {
152   return papi_event_set_;
153 }
154
155 papi_counter_t& ActorExt::papi_counters()
156 {
157   return papi_counter_data_;
158 }
159 #endif
160
161 xbt_os_timer_t ActorExt::timer()
162 {
163   return timer_;
164 }
165
166 void ActorExt::simulated_start()
167 {
168   simulated_ = SIMIX_get_clock();
169 }
170
171 double ActorExt::simulated_elapsed()
172 {
173   return SIMIX_get_clock() - simulated_;
174 }
175
176 MPI_Comm ActorExt::comm_self()
177 {
178   if (comm_self_ == MPI_COMM_NULL) {
179     MPI_Group group = new Group(1);
180     comm_self_      = new Comm(group, nullptr);
181     group->set_mapping(actor_, 0);
182   }
183   return comm_self_;
184 }
185
186 MPI_Info ActorExt::info_env()
187 {
188   if (info_env_==MPI_INFO_NULL)
189     info_env_=new Info();
190   return info_env_;
191 }
192
193 MPI_Comm ActorExt::comm_intra()
194 {
195   return comm_intra_;
196 }
197
198 void ActorExt::set_comm_intra(MPI_Comm comm)
199 {
200   comm_intra_ = comm;
201 }
202
203 void ActorExt::set_sampling(int s)
204 {
205   sampling_ = s;
206 }
207
208 int ActorExt::sampling()
209 {
210   return sampling_;
211 }
212
213 void ActorExt::init()
214 {
215   xbt_assert(smpi_get_universe_size() != 0, "SimGrid was not initialized properly before entering MPI_Init. "
216                                             "Aborting, please check compilation process and use smpirun.");
217
218   simgrid::s4u::Actor* self = simgrid::s4u::Actor::self();
219   // cheinrich: I'm not sure what the impact of the SMPI_switch_data_segment on this call is. I moved
220   // this up here so that I can set the privatized region before the switch.
221   ActorExt* ext = smpi_process();
222   // if we are in MPI_Init and argc handling has already been done.
223   if (ext->initialized())
224     return;
225
226   if (smpi_cfg_privatization() == SmpiPrivStrategies::MMAP) {
227     /* Now using the segment index of this process  */
228     ext->set_privatized_region(smpi_init_global_memory_segment_process());
229     /* Done at the process's creation */
230     SMPI_switch_data_segment(self);
231   }
232
233   ext->instance_id_ = self->get_property("instance_id");
234   const int rank    = xbt_str_parse_int(self->get_property("rank"), "Cannot parse rank");
235
236   ext->state_ = SmpiProcessState::INITIALIZING;
237   smpi_deployment_register_process(ext->instance_id_, rank, self);
238
239   ext->comm_world_ = smpi_deployment_comm_world(ext->instance_id_);
240
241   // set the process attached to the mailbox
242   ext->mailbox_small_->set_receiver(ext->actor_);
243   XBT_DEBUG("<%ld> SMPI process has been initialized: %p", ext->actor_->get_pid(), ext->actor_);
244 }
245
246 int ActorExt::get_optind()
247 {
248   return optind_;
249 }
250
251 void ActorExt::set_optind(int new_optind)
252 {
253   optind_ = new_optind;
254 }
255
256 void ActorExt::bsend_buffer(void** buf, int* size)
257 {
258   *buf  = bsend_buffer_;
259   *size = bsend_buffer_size_;
260 }
261
262 void ActorExt::set_bsend_buffer(void* buf, int size)
263 {
264   bsend_buffer_     = buf;
265   bsend_buffer_size_= size;
266 }
267
268 } // namespace smpi
269 } // namespace simgrid