#include "smpi_process.hpp"
#include "mc/mc.h"
-#include "private.hpp"
-#include "simgrid/s4u/forward.hpp"
#include "smpi_comm.hpp"
-#include "smpi_group.hpp"
#include "src/mc/mc_replay.hpp"
#include "src/msg/msg_private.hpp"
#include "src/simix/smx_private.hpp"
using simgrid::s4u::ActorPtr;
Process::Process(ActorPtr actor, msg_bar_t finalization_barrier)
- : finalization_barrier_(finalization_barrier), process_(actor)
+ : finalization_barrier_(finalization_barrier), actor_(actor)
{
- mailbox_ = simgrid::s4u::Mailbox::byName("SMPI-" + std::to_string(process_->getPid()));
- mailbox_small_ = simgrid::s4u::Mailbox::byName("small-" + std::to_string(process_->getPid()));
+ mailbox_ = simgrid::s4u::Mailbox::by_name("SMPI-" + std::to_string(actor_->get_pid()));
+ mailbox_small_ = simgrid::s4u::Mailbox::by_name("small-" + std::to_string(actor_->get_pid()));
mailboxes_mutex_ = xbt_mutex_init();
timer_ = xbt_os_timer_new();
state_ = SMPI_UNINITIALIZED;
MC_ignore_heap(timer_, xbt_os_timer_size());
#if HAVE_PAPI
- if (xbt_cfg_get_string("smpi/papi-events")[0] != '\0') {
+ if (simgrid::config::get_value<std::string>("smpi/papi-events")[0] != '\0') {
// TODO: Implement host/process/thread based counters. This implementation
// just always takes the values passed via "default", like this:
// "default:COUNTER1:COUNTER2:COUNTER3;".
void Process::set_data(int* argc, char*** argv)
{
instance_id_ = std::string((*argv)[1]);
- comm_world_ = smpi_deployment_comm_world(instance_id_.c_str());
- msg_bar_t barrier = smpi_deployment_finalization_barrier(instance_id_.c_str());
+ comm_world_ = smpi_deployment_comm_world(instance_id_);
+ msg_bar_t barrier = smpi_deployment_finalization_barrier(instance_id_);
if (barrier != nullptr) // don't overwrite the current one if the instance has none
finalization_barrier_ = barrier;
- process_ = simgrid::s4u::Actor::self();
- static_cast<simgrid::msg::ActorExt*>(process_->getImpl()->userdata)->data = this;
+ actor_ = simgrid::s4u::Actor::self();
+ static_cast<simgrid::msg::ActorExt*>(actor_->get_impl()->getUserData())->data = this;
if (*argc > 3) {
memmove(&(*argv)[0], &(*argv)[2], sizeof(char*) * (*argc - 2));
argc_ = argc;
argv_ = argv;
// set the process attached to the mailbox
- mailbox_small_->setReceiver(process_);
- XBT_DEBUG("<%ld> SMPI process has been initialized: %p", process_->getPid(), process_.get());
+ mailbox_small_->set_receiver(actor_);
+ XBT_DEBUG("<%ld> SMPI process has been initialized: %p", actor_->get_pid(), actor_.get());
}
/** @brief Prepares the current process for termination. */
void Process::finalize()
{
state_ = SMPI_FINALIZED;
- XBT_DEBUG("<%ld> Process left the game", process_->getPid());
+ XBT_DEBUG("<%ld> Process left the game", actor_->get_pid());
// This leads to an explosion of the search graph which cannot be reduced:
if(MC_is_active() || MC_record_replay_is_active())
return data_;
}
-ActorPtr Process::process(){
- return process_;
+ActorPtr Process::get_actor()
+{
+ return actor_;
}
/**
smx_mailbox_t Process::mailbox()
{
- return mailbox_->getImpl();
+ return mailbox_->get_impl();
}
smx_mailbox_t Process::mailbox_small()
{
- return mailbox_small_->getImpl();
+ return mailbox_small_->get_impl();
}
xbt_mutex_t Process::mailboxes_mutex()
if(comm_self_==MPI_COMM_NULL){
MPI_Group group = new Group(1);
comm_self_ = new Comm(group, nullptr);
- group->set_mapping(process_, 0);
+ group->set_mapping(actor_, 0);
}
return comm_self_;
}
return sampling_;
}
-msg_bar_t Process::finalization_barrier(){
- return finalization_barrier_;
-}
-
void Process::init(int *argc, char ***argv){
if (smpi_process_count() == 0) {
}
if (argc != nullptr && argv != nullptr) {
simgrid::s4u::ActorPtr proc = simgrid::s4u::Actor::self();
- proc->getImpl()->context->set_cleanup(&MSG_process_cleanup_from_SIMIX);
+ proc->get_impl()->context->set_cleanup(&MSG_process_cleanup_from_SIMIX);
char* instance_id = (*argv)[1];
try {
// cheinrich: I'm not sure what the impact of the SMPI_switch_data_segment on this call is. I moved
// this up here so that I can set the privatized region before the switch.
Process* process = smpi_process_remote(proc);
- if(smpi_privatize_global_variables == SMPI_PRIVATIZE_MMAP){
+ if (smpi_privatize_global_variables == SmpiPrivStrategies::Mmap) {
/* Now using the segment index of this process */
process->set_privatized_region(smpi_init_global_memory_segment_process());
/* Done at the process's creation */
"Please use MPI_Init(&argc, &argv) as usual instead.");
}
+int Process::get_optind(){
+ return optind;
+}
+void Process::set_optind(int new_optind){
+ optind=new_optind;
+}
+
}
}