From: Martin Quinson Date: Wed, 1 Feb 2012 16:36:58 +0000 (+0100) Subject: Merge branch 'master' of scm.gforge.inria.fr:/gitroot/simgrid/simgrid X-Git-Tag: exp_20120216~97 X-Git-Url: http://info.iut-bm.univ-fcomte.fr/pub/gitweb/simgrid.git/commitdiff_plain/276b2d9b83a3e3afcd449fb53a2dd8d662070036?hp=1086dc93eea1d88ee42e9317fda2278c784d0b60 Merge branch 'master' of scm.gforge.inria.fr:/gitroot/simgrid/simgrid --- diff --git a/src/msg/msg_mailbox.c b/src/msg/msg_mailbox.c index 4eb5053183..c03ab29de2 100644 --- a/src/msg/msg_mailbox.c +++ b/src/msg/msg_mailbox.c @@ -186,7 +186,7 @@ MSG_mailbox_put_with_timeout(msg_mailbox_t mailbox, m_task_t task, t_simdata->isused = 0; } - + t_simdata->comm = NULL; p_simdata->waiting_task = NULL; #ifdef HAVE_TRACING if (call_end) diff --git a/src/surf/network.c b/src/surf/network.c index 1c881426b3..0779efeadf 100644 --- a/src/surf/network.c +++ b/src/surf/network.c @@ -373,7 +373,7 @@ void net_action_recycle(surf_action_t action) } #ifdef HAVE_LATENCY_BOUND_TRACKING -static int net_get_link_latency_limited(surf_action_t action) +int net_get_link_latency_limited(surf_action_t action) { return action->latency_limited; } diff --git a/src/xbt/parmap.c b/src/xbt/parmap.c index 13c7baf7d0..41bf72392f 100644 --- a/src/xbt/parmap.c +++ b/src/xbt/parmap.c @@ -25,40 +25,41 @@ XBT_LOG_NEW_DEFAULT_SUBCATEGORY(xbt_parmap, xbt, "parmap: parallel map"); XBT_LOG_NEW_SUBCATEGORY(xbt_parmap_unit, xbt_parmap, "parmap unit testing"); typedef enum { - XBT_PARMAP_WORK = 0, + XBT_PARMAP_WORK, XBT_PARMAP_DESTROY } e_xbt_parmap_flag_t; static void xbt_parmap_set_mode(xbt_parmap_t parmap, e_xbt_parmap_mode_t mode); static void *xbt_parmap_worker_main(void *parmap); +static void xbt_parmap_work(xbt_parmap_t parmap); -static void xbt_parmap_posix_start(xbt_parmap_t parmap); -static void xbt_parmap_posix_end(xbt_parmap_t parmap); -static void xbt_parmap_posix_signal(xbt_parmap_t parmap); -static void xbt_parmap_posix_wait(xbt_parmap_t parmap); +static void xbt_parmap_posix_master_wait(xbt_parmap_t parmap); +static void xbt_parmap_posix_worker_signal(xbt_parmap_t parmap); +static void xbt_parmap_posix_master_signal(xbt_parmap_t parmap); +static void xbt_parmap_posix_worker_wait(xbt_parmap_t parmap, unsigned round); #ifdef HAVE_FUTEX_H -static void xbt_parmap_futex_start(xbt_parmap_t parmap); -static void xbt_parmap_futex_end(xbt_parmap_t parmap); -static void xbt_parmap_futex_signal(xbt_parmap_t parmap); -static void xbt_parmap_futex_wait(xbt_parmap_t parmap); -static void futex_wait(int *uaddr, int val); -static void futex_wake(int *uaddr, int val); +static void xbt_parmap_futex_master_wait(xbt_parmap_t parmap); +static void xbt_parmap_futex_worker_signal(xbt_parmap_t parmap); +static void xbt_parmap_futex_master_signal(xbt_parmap_t parmap); +static void xbt_parmap_futex_worker_wait(xbt_parmap_t parmap, unsigned round); +static void futex_wait(unsigned *uaddr, unsigned val); +static void futex_wake(unsigned *uaddr, unsigned val); #endif -static void xbt_parmap_busy_start(xbt_parmap_t parmap); -static void xbt_parmap_busy_end(xbt_parmap_t parmap); -static void xbt_parmap_busy_signal(xbt_parmap_t parmap); -static void xbt_parmap_busy_wait(xbt_parmap_t parmap); +static void xbt_parmap_busy_master_wait(xbt_parmap_t parmap); +static void xbt_parmap_busy_worker_signal(xbt_parmap_t parmap); +static void xbt_parmap_busy_master_signal(xbt_parmap_t parmap); +static void xbt_parmap_busy_worker_wait(xbt_parmap_t parmap, unsigned round); /** * \brief Parallel map structure */ typedef struct s_xbt_parmap { e_xbt_parmap_flag_t status; /**< is the parmap active or being destroyed? */ - int work; /**< index of the current round (1 is the first) */ - int done; /**< number of rounds already done (futexes only) */ - unsigned int thread_counter; /**< number of threads currently working */ + unsigned work; /**< index of the current round */ + unsigned thread_counter; /**< number of workers that have done the work */ + unsigned int num_workers; /**< total number of worker threads including the controller */ void_f_pvoid_t fun; /**< function to run in parallel on each element of data */ xbt_dynar_t data; /**< parameters to pass to fun in parallel */ @@ -72,10 +73,10 @@ typedef struct s_xbt_parmap { /* fields that depend on the synchronization mode */ e_xbt_parmap_mode_t mode; /**< synchronization mode */ - void (*start_f)(xbt_parmap_t); /**< initializes the worker threads */ - void (*end_f)(xbt_parmap_t); /**< finalizes the worker threads */ - void (*signal_f)(xbt_parmap_t); /**< wakes the workers threads to process tasks */ - void (*wait_f)(xbt_parmap_t); /**< waits for more work */ + void (*master_wait_f)(xbt_parmap_t); /**< wait for the workers to have done the work */ + void (*worker_signal_f)(xbt_parmap_t); /**< signal the master that a worker has done the work */ + void (*master_signal_f)(xbt_parmap_t); /**< wakes the workers threads to process tasks */ + void (*worker_wait_f)(xbt_parmap_t, unsigned); /**< waits for more work */ } s_xbt_parmap_t; /** @@ -99,11 +100,10 @@ xbt_parmap_t xbt_parmap_new(unsigned int num_workers, e_xbt_parmap_mode_t mode) xbt_parmap_set_mode(parmap, mode); /* Create the pool of worker threads */ - for (i = 0; i < num_workers - 1; i++) { + for (i = 1; i < num_workers; i++) { worker = xbt_os_thread_create(NULL, xbt_parmap_worker_main, parmap, NULL); xbt_os_thread_detach(worker); } - parmap->start_f(parmap); return parmap; } @@ -118,7 +118,8 @@ void xbt_parmap_destroy(xbt_parmap_t parmap) } parmap->status = XBT_PARMAP_DESTROY; - parmap->signal_f(parmap); + parmap->master_signal_f(parmap); + parmap->master_wait_f(parmap); xbt_os_cond_destroy(parmap->ready_cond); xbt_os_mutex_destroy(parmap->ready_mutex); @@ -147,10 +148,10 @@ static void xbt_parmap_set_mode(xbt_parmap_t parmap, e_xbt_parmap_mode_t mode) switch (mode) { case XBT_PARMAP_POSIX: - parmap->start_f = xbt_parmap_posix_start; - parmap->end_f = xbt_parmap_posix_end; - parmap->signal_f = xbt_parmap_posix_signal; - parmap->wait_f = xbt_parmap_posix_wait; + parmap->master_wait_f = xbt_parmap_posix_master_wait; + parmap->worker_signal_f = xbt_parmap_posix_worker_signal; + parmap->master_signal_f = xbt_parmap_posix_master_signal; + parmap->worker_wait_f = xbt_parmap_posix_worker_wait; parmap->ready_cond = xbt_os_cond_init(); parmap->ready_mutex = xbt_os_mutex_init(); @@ -161,10 +162,10 @@ static void xbt_parmap_set_mode(xbt_parmap_t parmap, e_xbt_parmap_mode_t mode) case XBT_PARMAP_FUTEX: #ifdef HAVE_FUTEX_H - parmap->start_f = xbt_parmap_futex_start; - parmap->end_f = xbt_parmap_futex_end; - parmap->signal_f = xbt_parmap_futex_signal; - parmap->wait_f = xbt_parmap_futex_wait; + parmap->master_wait_f = xbt_parmap_futex_master_wait; + parmap->worker_signal_f = xbt_parmap_futex_worker_signal; + parmap->master_signal_f = xbt_parmap_futex_master_signal; + parmap->worker_wait_f = xbt_parmap_futex_worker_wait; xbt_os_cond_destroy(parmap->ready_cond); xbt_os_mutex_destroy(parmap->ready_mutex); @@ -176,10 +177,10 @@ static void xbt_parmap_set_mode(xbt_parmap_t parmap, e_xbt_parmap_mode_t mode) #endif case XBT_PARMAP_BUSY_WAIT: - parmap->start_f = xbt_parmap_busy_start; - parmap->end_f = xbt_parmap_busy_end; - parmap->signal_f = xbt_parmap_busy_signal; - parmap->wait_f = xbt_parmap_busy_wait; + parmap->master_wait_f = xbt_parmap_busy_master_wait; + parmap->worker_signal_f = xbt_parmap_busy_worker_signal; + parmap->master_signal_f = xbt_parmap_busy_master_signal; + parmap->worker_wait_f = xbt_parmap_busy_worker_wait; xbt_os_cond_destroy(parmap->ready_cond); xbt_os_mutex_destroy(parmap->ready_mutex); @@ -205,7 +206,9 @@ void xbt_parmap_apply(xbt_parmap_t parmap, void_f_pvoid_t fun, xbt_dynar_t data) parmap->fun = fun; parmap->data = data; parmap->index = 0; - parmap->signal_f(parmap); + parmap->master_signal_f(parmap); + xbt_parmap_work(parmap); + parmap->master_wait_f(parmap); XBT_DEBUG("Job done"); } @@ -225,6 +228,14 @@ void* xbt_parmap_next(xbt_parmap_t parmap) return NULL; } +static void xbt_parmap_work(xbt_parmap_t parmap) +{ + unsigned index; + while ((index = __sync_fetch_and_add(&parmap->index, 1)) + < xbt_dynar_length(parmap->data)) + parmap->fun(xbt_dynar_get_as(parmap->data, index, void*)); +} + /** * \brief Main function of a worker thread. * \param arg the parmap @@ -232,40 +243,38 @@ void* xbt_parmap_next(xbt_parmap_t parmap) static void *xbt_parmap_worker_main(void *arg) { xbt_parmap_t parmap = (xbt_parmap_t) arg; + unsigned round = 0; XBT_DEBUG("New worker thread created"); /* Worker's main loop */ while (1) { - parmap->wait_f(parmap); + parmap->worker_wait_f(parmap, ++round); if (parmap->status == XBT_PARMAP_WORK) { XBT_DEBUG("Worker got a job"); - void* work = xbt_parmap_next(parmap); - while (work != NULL) { - parmap->fun(work); - work = xbt_parmap_next(parmap); - } + xbt_parmap_work(parmap); + parmap->worker_signal_f(parmap); XBT_DEBUG("Worker has finished"); /* We are destroying the parmap */ } else { - parmap->end_f(parmap); + parmap->worker_signal_f(parmap); return NULL; } } } #ifdef HAVE_FUTEX_H -static void futex_wait(int *uaddr, int val) +static void futex_wait(unsigned *uaddr, unsigned val) { XBT_VERB("Waiting on futex %p", uaddr); syscall(SYS_futex, uaddr, FUTEX_WAIT_PRIVATE, val, NULL, NULL, 0); } -static void futex_wake(int *uaddr, int val) +static void futex_wake(unsigned *uaddr, unsigned val) { XBT_VERB("Waking futex %p", uaddr); syscall(SYS_futex, uaddr, FUTEX_WAKE_PRIVATE, val, NULL, NULL, 0); @@ -279,13 +288,14 @@ static void futex_wake(int *uaddr, int val) * * \param parmap a parmap */ -static void xbt_parmap_posix_start(xbt_parmap_t parmap) +static void xbt_parmap_posix_master_wait(xbt_parmap_t parmap) { - unsigned int counter = __sync_fetch_and_add(&parmap->thread_counter, 1); - if (counter < parmap->num_workers) { - /* wait for all workers to be initialized */ + xbt_os_mutex_acquire(parmap->done_mutex); + if (parmap->thread_counter < parmap->num_workers) { + /* wait for all workers to be ready */ xbt_os_cond_wait(parmap->done_cond, parmap->done_mutex); } + xbt_os_mutex_release(parmap->done_mutex); } /** @@ -296,14 +306,14 @@ static void xbt_parmap_posix_start(xbt_parmap_t parmap) * * \param parmap a parmap */ -static void xbt_parmap_posix_end(xbt_parmap_t parmap) +static void xbt_parmap_posix_worker_signal(xbt_parmap_t parmap) { - unsigned int counter = __sync_add_and_fetch(&parmap->thread_counter, 1); - XBT_DEBUG("Shutting down worker %d", counter); - if (counter == parmap->num_workers) { + xbt_os_mutex_acquire(parmap->done_mutex); + if (++parmap->thread_counter == parmap->num_workers) { /* all workers have finished, wake the controller */ xbt_os_cond_signal(parmap->done_cond); } + xbt_os_mutex_release(parmap->done_mutex); } /** @@ -313,30 +323,14 @@ static void xbt_parmap_posix_end(xbt_parmap_t parmap) * * \param parmap a parmap */ -static void xbt_parmap_posix_signal(xbt_parmap_t parmap) +static void xbt_parmap_posix_master_signal(xbt_parmap_t parmap) { - parmap->thread_counter = 0; + xbt_os_mutex_acquire(parmap->ready_mutex); + parmap->thread_counter = 1; parmap->work++; - XBT_DEBUG("Starting work %d", parmap->work); - /* wake all workers */ xbt_os_cond_broadcast(parmap->ready_cond); - - if (parmap->status == XBT_PARMAP_WORK) { - /* also work myself */ - void* work = xbt_parmap_next(parmap); - while (work != NULL) { - parmap->fun(work); - work = xbt_parmap_next(parmap); - } - } - - unsigned int counter = __sync_add_and_fetch(&parmap->thread_counter, 1); - if (counter < parmap->num_workers) { - /* some workers have not finished yet */ - XBT_DEBUG("Some workers have not finished yet, waiting for them"); - xbt_os_cond_wait(parmap->done_cond, parmap->done_mutex); - } + xbt_os_mutex_release(parmap->ready_mutex); } /** @@ -346,23 +340,16 @@ static void xbt_parmap_posix_signal(xbt_parmap_t parmap) * when it has no more work to do. * * \param parmap a parmap + * \param round the expected round number */ -static void xbt_parmap_posix_wait(xbt_parmap_t parmap) +static void xbt_parmap_posix_worker_wait(xbt_parmap_t parmap, unsigned round) { - int work = parmap->work; - unsigned int counter = __sync_add_and_fetch(&parmap->thread_counter, 1); - if (counter == parmap->num_workers) { - /* all workers have finished, wake the controller */ - parmap->done++; - XBT_DEBUG("Last worker has finished, waking the controller"); - xbt_os_cond_signal(parmap->done_cond); - } - + xbt_os_mutex_acquire(parmap->ready_mutex); /* wait for more work */ - XBT_DEBUG("Worker %d waiting for more work", counter); - if (parmap->work == work) { + if (parmap->work < round) { xbt_os_cond_wait(parmap->ready_cond, parmap->ready_mutex); } + xbt_os_mutex_release(parmap->ready_mutex); } #ifdef HAVE_FUTEX_H @@ -373,13 +360,13 @@ static void xbt_parmap_posix_wait(xbt_parmap_t parmap) * * \param parmap a parmap */ -static void xbt_parmap_futex_start(xbt_parmap_t parmap) +static void xbt_parmap_futex_master_wait(xbt_parmap_t parmap) { - int myflag = parmap->done; - __sync_fetch_and_add(&parmap->thread_counter, 1); - if (parmap->thread_counter < parmap->num_workers) { + unsigned count = parmap->thread_counter; + while (count < parmap->num_workers) { /* wait for all workers to be ready */ - futex_wait(&parmap->done, myflag); + futex_wait(&parmap->thread_counter, count); + count = parmap->thread_counter; } } @@ -391,15 +378,12 @@ static void xbt_parmap_futex_start(xbt_parmap_t parmap) * * \param parmap a parmap */ -static void xbt_parmap_futex_end(xbt_parmap_t parmap) +static void xbt_parmap_futex_worker_signal(xbt_parmap_t parmap) { - unsigned int mycount; - - mycount = __sync_add_and_fetch(&parmap->thread_counter, 1); - if (mycount == parmap->num_workers) { + unsigned count = __sync_add_and_fetch(&parmap->thread_counter, 1); + if (count == parmap->num_workers) { /* all workers have finished, wake the controller */ - parmap->done++; - futex_wake(&parmap->done, 1); + futex_wake(&parmap->thread_counter, 1); } } @@ -410,29 +394,12 @@ static void xbt_parmap_futex_end(xbt_parmap_t parmap) * * \param parmap a parmap */ -static void xbt_parmap_futex_signal(xbt_parmap_t parmap) +static void xbt_parmap_futex_master_signal(xbt_parmap_t parmap) { - int myflag = parmap->done; - parmap->thread_counter = 0; - parmap->work++; - + parmap->thread_counter = 1; + __sync_add_and_fetch(&parmap->work, 1); /* wake all workers */ - futex_wake(&parmap->work, parmap->num_workers); - - if (parmap->status == XBT_PARMAP_WORK) { - /* also work myself */ - void* work = xbt_parmap_next(parmap); - while (work != NULL) { - parmap->fun(work); - work = xbt_parmap_next(parmap); - } - } - - unsigned int mycount = __sync_add_and_fetch(&parmap->thread_counter, 1); - if (mycount < parmap->num_workers) { - /* some workers have not finished yet */ - futex_wait(&parmap->done, myflag); - } + futex_wake(&parmap->work, parmap->num_workers - 1); } /** @@ -442,22 +409,14 @@ static void xbt_parmap_futex_signal(xbt_parmap_t parmap) * when it has no more work to do. * * \param parmap a parmap + * \param round the expected round number */ -static void xbt_parmap_futex_wait(xbt_parmap_t parmap) +static void xbt_parmap_futex_worker_wait(xbt_parmap_t parmap, unsigned round) { - int myflag; - unsigned int mycount; - - myflag = parmap->work; - mycount = __sync_add_and_fetch(&parmap->thread_counter, 1); - if (mycount == parmap->num_workers) { - /* all workers have finished, wake the controller */ - parmap->done++; - futex_wake(&parmap->done, 1); - } - + unsigned work = parmap->work; /* wait for more work */ - futex_wait(&parmap->work, myflag); + if (work < round) + futex_wait(&parmap->work, work); } #endif @@ -468,9 +427,8 @@ static void xbt_parmap_futex_wait(xbt_parmap_t parmap) * * \param parmap a parmap */ -static void xbt_parmap_busy_start(xbt_parmap_t parmap) +static void xbt_parmap_busy_master_wait(xbt_parmap_t parmap) { - __sync_fetch_and_add(&parmap->thread_counter, 1); while (parmap->thread_counter < parmap->num_workers) { xbt_os_thread_yield(); } @@ -483,7 +441,7 @@ static void xbt_parmap_busy_start(xbt_parmap_t parmap) * * \param parmap a parmap */ -static void xbt_parmap_busy_end(xbt_parmap_t parmap) +static void xbt_parmap_busy_worker_signal(xbt_parmap_t parmap) { __sync_add_and_fetch(&parmap->thread_counter, 1); } @@ -495,25 +453,10 @@ static void xbt_parmap_busy_end(xbt_parmap_t parmap) * * \param parmap a parmap */ -static void xbt_parmap_busy_signal(xbt_parmap_t parmap) +static void xbt_parmap_busy_master_signal(xbt_parmap_t parmap) { - parmap->thread_counter = 0; - parmap->work++; - - if (parmap->status == XBT_PARMAP_WORK) { - /* also work myself */ - void* work = xbt_parmap_next(parmap); - while (work != NULL) { - parmap->fun(work); - work = xbt_parmap_next(parmap); - } - } - - /* I have finished, wait for the others */ - __sync_add_and_fetch(&parmap->thread_counter, 1); - while (parmap->thread_counter < parmap->num_workers) { - xbt_os_thread_yield(); - } + parmap->thread_counter = 1; + __sync_add_and_fetch(&parmap->work, 1); } /** @@ -523,14 +466,12 @@ static void xbt_parmap_busy_signal(xbt_parmap_t parmap) * when it has no more work to do. * * \param parmap a parmap + * \param round the expected round number */ -static void xbt_parmap_busy_wait(xbt_parmap_t parmap) +static void xbt_parmap_busy_worker_wait(xbt_parmap_t parmap, unsigned round) { - int work = parmap->work; - __sync_add_and_fetch(&parmap->thread_counter, 1); - /* wait for more work */ - while (parmap->work == work) { + while (parmap->work < round) { xbt_os_thread_yield(); } } @@ -538,43 +479,148 @@ static void xbt_parmap_busy_wait(xbt_parmap_t parmap) #ifdef SIMGRID_TEST #include "xbt.h" #include "xbt/ex.h" +#include "xbt/xbt_os_thread.h" +#include "xbt/xbt_os_time.h" +#include "gras_config.h" /* HAVE_FUTEX_H */ XBT_TEST_SUITE("parmap", "Parallel Map"); XBT_LOG_EXTERNAL_DEFAULT_CATEGORY(xbt_parmap_unit); -xbt_parmap_t parmap; +#ifdef HAVE_FUTEX_H +#define TEST_PARMAP_SKIP_TEST(mode) 0 +#else +#define TEST_PARMAP_SKIP_TEST(mode) ((mode) == XBT_PARMAP_FUTEX) +#endif -void fun(void *arg); +#define TEST_PARMAP_VALIDATE_MODE(mode) \ + if (TEST_PARMAP_SKIP_TEST(mode)) { xbt_test_skip(); return; } else ((void)0) -void fun(void *arg) +static void fun_double(void *arg) { - //XBT_INFO("I'm job %lu", (unsigned long)arg); + unsigned *u = arg; + *u = 2 * *u + 1; } -XBT_TEST_UNIT("basic", test_parmap_basic, "Basic usage") +/* Check that the computations are correctly done. */ +static void test_parmap_basic(e_xbt_parmap_mode_t mode) { - xbt_test_add("Create the parmap"); + unsigned num_workers; + + for (num_workers = 1 ; num_workers <= 16 ; num_workers *= 2) { + const unsigned len = 1033; + const unsigned num = 5; + unsigned *a; + xbt_dynar_t data; + xbt_parmap_t parmap; + unsigned i; + + xbt_test_add("Basic parmap usage (%u workers)", num_workers); + + TEST_PARMAP_VALIDATE_MODE(mode); + parmap = xbt_parmap_new(num_workers, mode); + + a = xbt_malloc(len * sizeof *a); + data = xbt_dynar_new(sizeof a, NULL); + for (i = 0; i < len; i++) { + a[i] = i; + xbt_dynar_push_as(data, void *, &a[i]); + } - unsigned long i, j; - xbt_dynar_t data = xbt_dynar_new(sizeof(void *), NULL); + for (i = 0; i < num; i++) + xbt_parmap_apply(parmap, fun_double, data); - /* Create the parallel map */ -#ifdef HAVE_FUTEX_H - parmap = xbt_parmap_new(10, XBT_PARMAP_FUTEX); -#else - parmap = xbt_parmap_new(10, XBT_PARMAP_BUSY_WAIT); -#endif - for (j = 0; j < 100; j++) { - xbt_dynar_push_as(data, void *, (void *)j); + for (i = 0; i < len; i++) { + unsigned expected = (1U << num) * (i + 1) - 1; + xbt_test_assert(a[i] == expected, + "a[%u]: expected %u, got %u", i, expected, a[i]); + } + + xbt_dynar_free(&data); + xbt_free(a); + xbt_parmap_destroy(parmap); } +} + +XBT_TEST_UNIT("basic_posix", test_parmap_basic_posix, "Basic usage: posix") +{ + test_parmap_basic(XBT_PARMAP_POSIX); +} + +XBT_TEST_UNIT("basic_futex", test_parmap_basic_futex, "Basic usage: futex") +{ + test_parmap_basic(XBT_PARMAP_FUTEX); +} - for (i = 0; i < 5; i++) { - xbt_parmap_apply(parmap, fun, data); +XBT_TEST_UNIT("basic_busy_wait", test_parmap_basic_busy_wait, "Basic usage: busy_wait") +{ + test_parmap_basic(XBT_PARMAP_BUSY_WAIT); +} + +static void fun_get_id(void *arg) +{ + *(uintptr_t *)arg = (uintptr_t)xbt_os_thread_self(); + xbt_os_sleep(0.5); +} + +static int fun_compare(const void *pa, const void *pb) +{ + uintptr_t a = *(uintptr_t *)pa; + uintptr_t b = *(uintptr_t *)pb; + return a < b ? -1 : a > b ? 1 : 0; +} + +/* Check that all threads are working. */ +static void test_parmap_extended(e_xbt_parmap_mode_t mode) +{ + unsigned num_workers; + + for (num_workers = 1 ; num_workers <= 16 ; num_workers *= 2) { + const unsigned len = 2 * num_workers; + uintptr_t *a; + xbt_parmap_t parmap; + xbt_dynar_t data; + unsigned i; + unsigned count; + + xbt_test_add("Extended parmap usage (%u workers)", num_workers); + + TEST_PARMAP_VALIDATE_MODE(mode); + parmap = xbt_parmap_new(num_workers, mode); + + a = xbt_malloc(len * sizeof *a); + data = xbt_dynar_new(sizeof a, NULL); + for (i = 0; i < len; i++) + xbt_dynar_push_as(data, void *, &a[i]); + + xbt_parmap_apply(parmap, fun_get_id, data); + + qsort(a, len, sizeof a[0], fun_compare); + count = 1; + for (i = 1; i < len; i++) + if (a[i] != a[i - 1]) + count++; + xbt_test_assert(count == num_workers, + "only %u/%u threads did some work", count, num_workers); + + xbt_dynar_free(&data); + xbt_free(a); + xbt_parmap_destroy(parmap); } +} + +XBT_TEST_UNIT("extended_posix", test_parmap_extended_posix, "Extended usage: posix") +{ + test_parmap_extended(XBT_PARMAP_POSIX); +} - /* Destroy the parmap */ - xbt_parmap_destroy(parmap); - xbt_dynar_free(&data); +XBT_TEST_UNIT("extended_futex", test_parmap_extended_futex, "Extended usage: futex") +{ + test_parmap_extended(XBT_PARMAP_FUTEX); +} + +XBT_TEST_UNIT("extended_busy_wait", test_parmap_extended_busy_wait, "Extended usage: busy_wait") +{ + test_parmap_extended(XBT_PARMAP_BUSY_WAIT); } #endif /* SIMGRID_TEST */ diff --git a/testsuite/xbt/CMakeLists.txt b/testsuite/xbt/CMakeLists.txt index ad4566e86c..72310077e8 100644 --- a/testsuite/xbt/CMakeLists.txt +++ b/testsuite/xbt/CMakeLists.txt @@ -5,14 +5,17 @@ set(EXECUTABLE_OUTPUT_PATH "${CMAKE_CURRENT_BINARY_DIR}") add_executable(log_usage "log_usage.c") add_executable(graphxml_usage "graphxml_usage.c") add_executable(heap_bench "heap_bench.c") +add_executable(parmap_bench "parmap_bench.c") ### Add definitions for compile if(NOT WIN32) target_link_libraries(log_usage gras m ) target_link_libraries(graphxml_usage simgrid m ) target_link_libraries(heap_bench gras m ) +target_link_libraries(parmap_bench simgrid m ) else(NOT WIN32) target_link_libraries(log_usage gras ) target_link_libraries(graphxml_usage simgrid ) target_link_libraries(heap_bench gras ) +target_link_libraries(parmap_bench simgrid ) endif(NOT WIN32) diff --git a/testsuite/xbt/parmap_bench.c b/testsuite/xbt/parmap_bench.c new file mode 100644 index 0000000000..112fbae788 --- /dev/null +++ b/testsuite/xbt/parmap_bench.c @@ -0,0 +1,209 @@ +#include +#include +#include +#include +#include +#include /* HAVE_FUTEX_H */ +#include "xbt/xbt_os_time.h" + +#define MODES_DEFAULT 0x7 +#define TIMEOUT 10.0 +#define ARRAY_SIZE 10007 +#define FIBO_MAX 25 + +void (*fun_to_apply)(void *); + +static const char *parmap_mode_name(e_xbt_parmap_mode_t mode) +{ + static char name[80]; + switch (mode) { + case XBT_PARMAP_POSIX: + snprintf(name, sizeof name, "POSIX"); + break; + case XBT_PARMAP_FUTEX: + snprintf(name, sizeof name, "FUTEX"); + break; + case XBT_PARMAP_BUSY_WAIT: + snprintf(name, sizeof name, "BUSY_WAIT"); + break; + case XBT_PARMAP_DEFAULT: + snprintf(name, sizeof name, "DEFAULT"); + break; + default: + snprintf(name, sizeof name, "UNKNOWN(%d)", mode); + break; + } + return name; +} + +static int parmap_skip_mode(e_xbt_parmap_mode_t mode) +{ + switch (mode) { +#ifndef HAVE_FUTEX_H + case XBT_PARMAP_FUTEX: + printf("not available\n"); + return 1; +#endif + default: + return 0; + } +} + +static unsigned fibonacci(unsigned n) +{ + if (n < 2) + return n; + else + return fibonacci(n - 1) + fibonacci(n - 2); +} + +static void fun_small_comp(void *arg) +{ + unsigned *u = arg; + *u = 2 * *u + 1; +} + +static void fun_big_comp(void *arg) +{ + unsigned *u = arg; + *u = fibonacci(*u % FIBO_MAX); +} + +static void array_new(unsigned **a, xbt_dynar_t *data) +{ + int i; + *a = xbt_malloc(ARRAY_SIZE * sizeof **a); + *data = xbt_dynar_new(sizeof *a, NULL); + xbt_dynar_shrink(*data, ARRAY_SIZE); + for (i = 0 ; i < ARRAY_SIZE ; i++) { + (*a)[i] = i; + xbt_dynar_push_as(*data, void*, &(*a)[i]); + } +} + +static void bench_parmap_full(int nthreads, e_xbt_parmap_mode_t mode) +{ + unsigned *a; + xbt_dynar_t data; + xbt_parmap_t parmap; + int i; + double start_time, elapsed_time; + + printf("** mode = %-15s ", parmap_mode_name(mode)); + fflush(stdout); + + if (parmap_skip_mode(mode)) + return; + + array_new(&a, &data); + + i = 0; + start_time = xbt_os_time(); + do { + parmap = xbt_parmap_new(nthreads, mode); + xbt_parmap_apply(parmap, fun_to_apply, data); + xbt_parmap_destroy(parmap); + elapsed_time = xbt_os_time() - start_time; + i++; + } while (elapsed_time < TIMEOUT); + + printf("ran %d times in %g seconds (%g/s)\n", + i, elapsed_time, i / elapsed_time); + + xbt_dynar_free(&data); + xbt_free(a); +} + +static void bench_parmap_apply(int nthreads, e_xbt_parmap_mode_t mode) +{ + unsigned *a; + xbt_dynar_t data; + xbt_parmap_t parmap; + int i; + double start_time, elapsed_time; + + printf("** mode = %-15s ", parmap_mode_name(mode)); + fflush(stdout); + + if (parmap_skip_mode(mode)) + return; + + array_new(&a, &data); + + parmap = xbt_parmap_new(nthreads, mode); + i = 0; + start_time = xbt_os_time(); + do { + xbt_parmap_apply(parmap, fun_to_apply, data); + elapsed_time = xbt_os_time() - start_time; + i++; + } while (elapsed_time < TIMEOUT); + xbt_parmap_destroy(parmap); + + printf("ran %d times in %g seconds (%g/s)\n", + i, elapsed_time, i / elapsed_time); + + xbt_dynar_free(&data); + xbt_free(a); +} + +static void bench_all_modes(void (*bench_fun)(int, e_xbt_parmap_mode_t), + int nthreads, unsigned modes) +{ + e_xbt_parmap_mode_t all_modes[] = { + XBT_PARMAP_POSIX, XBT_PARMAP_FUTEX, + XBT_PARMAP_BUSY_WAIT, XBT_PARMAP_DEFAULT + }; + unsigned i; + for (i = 0 ; i < sizeof all_modes / sizeof all_modes[0] ; i++) { + if (1U << i & modes) + bench_fun(nthreads, all_modes[i]); + } +} + +int main(int argc, char *argv[]) +{ + int nthreads; + unsigned modes = MODES_DEFAULT; + + if (argc != 2 && argc != 3) { + fprintf(stderr, + "Usage: %s nthreads [modes]\n" + " nthreads - number of working threads\n" + " modes - bitmask of modes to test\n", + argv[0]); + return EXIT_FAILURE; + } + nthreads = atoi(argv[1]); + if (nthreads < 1) { + fprintf(stderr, "ERROR: invalid thread count: %d\n", nthreads); + return EXIT_FAILURE; + } + if (argc == 3) + modes = atoi(argv[2]); + + printf("Parmap benchmark with %d workers (modes = %#x)...\n\n", + nthreads, modes); + + fun_to_apply = fun_small_comp; + + printf("Benchmark for parmap create+apply+destroy (small comp):\n"); + bench_all_modes(bench_parmap_full, nthreads, modes); + printf("\n"); + + printf("Benchmark for parmap apply only (small comp):\n"); + bench_all_modes(bench_parmap_apply, nthreads, modes); + printf("\n"); + + fun_to_apply = fun_big_comp; + + printf("Benchmark for parmap create+apply+destroy (big comp):\n"); + bench_all_modes(bench_parmap_full, nthreads, modes); + printf("\n"); + + printf("Benchmark for parmap apply only (big comp):\n"); + bench_all_modes(bench_parmap_apply, nthreads, modes); + printf("\n"); + + return EXIT_SUCCESS; +}