2 /* Copyright (c) 2007 Arnaud Legrand, Pedro Velho. All rights reserved. */
3 /* This program is free software; you can redistribute it and/or modify it
4 * under the terms of the license (GNU LGPL) which comes with this package. */
6 * Modelling the proportional fairness using the Lagrange Optimization
7 * Approach. For a detailed description see:
8 * "ssh://username@scm.gforge.inria.fr/svn/memo/people/pvelho/lagrange/ppf.ps".
11 #include "xbt/sysdep.h"
12 #include "xbt/mallocator.h"
13 #include "maxmin_private.h"
20 #define VEGAS_SCALING 1000.0
23 XBT_LOG_NEW_DEFAULT_SUBCATEGORY(surf_lagrange, surf,
24 "Logging specific to SURF (lagrange)");
25 XBT_LOG_NEW_SUBCATEGORY(surf_lagrange_dichotomy, surf,
26 "Logging specific to SURF (lagrange dichotomy)");
29 * Local prototypes to implement the lagrangian optimization with optimal step, also called dichotomy.
31 //solves the proportional fairness using a lagrange optimizition with dichotomy step
32 void lagrange_solve(lmm_system_t sys);
33 //computes the value of the dichotomy using a initial values, init, with a specific variable or constraint
34 double dichotomy(double init, double diff(double, void *), void *var_cnst,
36 //computes the value of the differential of variable param_var applied to mu
37 double partial_diff_mu(double mu, void *param_var);
38 //computes the value of the differential of constraint param_cnst applied to lambda
39 double partial_diff_lambda(double lambda, void *param_cnst);
40 //auxiliar function to compute the partial_diff
41 double diff_aux(lmm_variable_t var, double x);
44 static int __check_feasible(xbt_swag_t cnst_list, xbt_swag_t var_list, int warn)
46 xbt_swag_t elem_list = NULL;
47 lmm_element_t elem = NULL;
48 lmm_constraint_t cnst = NULL;
49 lmm_variable_t var = NULL;
53 xbt_swag_foreach(cnst, cnst_list) {
55 elem_list = &(cnst->element_set);
56 xbt_swag_foreach(elem, elem_list) {
63 if (double_positive(tmp - cnst->bound)) {
66 ("The link (%p) is over-used. Expected less than %f and got %f",
67 cnst, cnst->bound, tmp);
70 DEBUG3("Checking feasability for constraint (%p): sat = %f, lambda = %f ",
71 cnst, tmp - cnst->bound, cnst->lambda);
74 xbt_swag_foreach(var, var_list) {
75 if (var->bound < 0 || var->weight <= 0)
77 DEBUG3("Checking feasability for variable (%p): sat = %f mu = %f", var,
78 var->value - var->bound, var->mu);
80 if (double_positive(var->value - var->bound)) {
83 ("The variable (%p) is too large. Expected less than %f and got %f",
84 var, var->bound, var->value);
91 void lagrange_solve(lmm_system_t sys)
96 int max_iterations = 100;
97 double epsilon_min_error = MAXMIN_PRECISION;
98 double dichotomy_min_error = 1e-20;
99 double overall_error = 1;
102 * Variables to manipulate the data structure proposed to model the maxmin
103 * fairness. See docummentation for more details.
105 xbt_swag_t cnst_list = NULL;
106 lmm_constraint_t cnst = NULL;
108 xbt_swag_t var_list = NULL;
109 lmm_variable_t var = NULL;
112 * Auxiliar variables.
119 DEBUG0("Iterative method configuration snapshot =====>");
120 DEBUG1("#### Maximum number of iterations : %d", max_iterations);
121 DEBUG1("#### Minimum error tolerated : %e",
123 DEBUG1("#### Minimum error tolerated (dichotomy) : %e",
124 dichotomy_min_error);
126 if (!(sys->modified))
130 * Initialize the var list variable with only the active variables.
131 * Associate an index in the swag variables. Initialize mu.
133 var_list = &(sys->variable_set);
135 xbt_swag_foreach(var, var_list) {
136 if ((var->bound < 0.0) || (var->weight <= 0.0)) {
137 DEBUG1("#### NOTE var(%d) is a boundless (or inactive) variable", i);
143 DEBUG3("#### var(%d) %p ->mu : %e", i, var, var->mu);
144 DEBUG3("#### var(%d) %p ->weight: %e", i, var, var->weight);
145 DEBUG3("#### var(%d) %p ->bound: %e", i, var, var->bound);
152 cnst_list = &(sys->active_constraint_set);
153 xbt_swag_foreach(cnst, cnst_list) {
155 cnst->new_lambda = 2.0;
156 DEBUG2("#### cnst(%p)->lambda : %e", cnst, cnst->lambda);
160 * While doesn't reach a minimun error or a number maximum of iterations.
162 while (overall_error > epsilon_min_error && iteration < max_iterations) {
166 DEBUG1("************** ITERATION %d **************", iteration);
167 DEBUG0("-------------- Gradient Descent ----------");
169 * Compute the value of mu_i
171 //forall mu_i in mu_1, mu_2, ..., mu_n
172 xbt_swag_foreach(var, var_list) {
173 if ((var->bound >= 0) && (var->weight > 0)) {
174 DEBUG1("Working on var (%p)", var);
176 dichotomy(var->mu, partial_diff_mu, var, dichotomy_min_error);
177 dual_updated += (fabs(var->new_mu-var->mu)>dichotomy_min_error);
178 DEBUG2("dual_updated (%d) : %1.20f",dual_updated,fabs(var->new_mu-var->mu));
179 DEBUG3("Updating mu : var->mu (%p) : %1.20f -> %1.20f", var, var->mu, var->new_mu);
180 var->mu = var->new_mu;
185 * Compute the value of lambda_i
187 //forall lambda_i in lambda_1, lambda_2, ..., lambda_n
188 xbt_swag_foreach(cnst, cnst_list) {
189 DEBUG1("Working on cnst (%p)", cnst);
191 dichotomy(cnst->lambda, partial_diff_lambda, cnst,
192 dichotomy_min_error);
193 dual_updated += (fabs(cnst->new_lambda-cnst->lambda)>dichotomy_min_error);
194 DEBUG2("dual_updated (%d) : %1.20f",dual_updated,fabs(cnst->new_lambda-cnst->lambda));
195 DEBUG3("Updating lambda : cnst->lambda (%p) : %1.20f -> %1.20f", cnst, cnst->lambda, cnst->new_lambda);
196 cnst->lambda = cnst->new_lambda;
200 * Now computes the values of each variable (\rho) based on
201 * the values of \lambda and \mu.
203 DEBUG0("-------------- Check convergence ----------");
205 xbt_swag_foreach(var, var_list) {
206 if (var->weight <= 0)
209 //compute sigma_i + mu_i
211 for (i = 0; i < var->cnsts_number; i++) {
212 tmp += (var->cnsts[i].constraint)->lambda;
216 DEBUG3("\t Working on var (%p). cost = %e; Df = %e", var, tmp,
219 //uses the partial differential inverse function
220 tmp = var->func_fpi(var, tmp);
222 //computes de overall_error using normalized value
223 if (overall_error < (fabs(var->value - tmp)/tmp)) {
224 overall_error = (fabs(var->value - tmp)/tmp);
227 if (overall_error < (fabs(var->value - tmp))) {
228 overall_error = (fabs(var->value - tmp));
232 DEBUG3("New value of var (%p) = %e, overall_error = %e", var,
233 var->value, overall_error);
237 if (!__check_feasible(cnst_list, var_list, 0))
239 DEBUG2("Iteration %d: Overall_error : %f", iteration, overall_error);
241 DEBUG1("Could not improve the convergence at iteration %d. Drop it!",iteration);
247 __check_feasible(cnst_list, var_list, 1);
249 if (overall_error <= epsilon_min_error) {
250 DEBUG1("The method converges in %d iterations.", iteration);
252 if (iteration >= max_iterations) {
254 ("Method reach %d iterations, which is the maximum number of iterations allowed.",
257 /* INFO1("Method converged after %d iterations", iteration); */
259 if (XBT_LOG_ISENABLED(surf_lagrange, xbt_log_priority_debug)) {
265 * Returns a double value corresponding to the result of a dichotomy proccess with
266 * respect to a given variable/constraint (\mu in the case of a variable or \lambda in
267 * case of a constraint) and a initial value init.
269 * @param init initial value for \mu or \lambda
270 * @param diff a function that computes the differential of with respect a \mu or \lambda
271 * @param var_cnst a pointer to a variable or constraint
272 * @param min_erro a minimun error tolerated
274 * @return a double correponding to the result of the dichotomyal process
276 double dichotomy(double init, double diff(double, void *), void *var_cnst,
280 double overall_error;
282 double min_diff, max_diff, middle_diff;
292 min_diff = max_diff = middle_diff = 0.0;
295 if ((diff_0 = diff(1e-16, var_cnst)) >= 0) {
296 CDEBUG1(surf_lagrange_dichotomy, "returning 0.0 (diff = %e)",
302 min_diff = diff(min, var_cnst);
303 max_diff = diff(max, var_cnst);
305 while (overall_error > min_error) {
306 CDEBUG4(surf_lagrange_dichotomy,
307 "[min, max] = [%1.20f, %1.20f] || diffmin, diffmax = %1.20f, %1.20f", min, max,
310 if (min_diff > 0 && max_diff > 0) {
312 CDEBUG0(surf_lagrange_dichotomy, "Decreasing min");
314 min_diff = diff(min, var_cnst);
316 CDEBUG0(surf_lagrange_dichotomy, "Decreasing max");
321 } else if (min_diff < 0 && max_diff < 0) {
323 CDEBUG0(surf_lagrange_dichotomy, "Increasing max");
325 max_diff = diff(max, var_cnst);
327 CDEBUG0(surf_lagrange_dichotomy, "Increasing min");
331 } else if (min_diff < 0 && max_diff > 0) {
332 middle = (max + min) / 2.0;
333 CDEBUG1(surf_lagrange_dichotomy, "Trying (max+min)/2 : %1.20f",middle);
335 if((min==middle) || (max==middle)) {
336 DEBUG0("Cannot improve the convergence!");
339 middle_diff = diff(middle, var_cnst);
341 if (middle_diff < 0) {
342 CDEBUG0(surf_lagrange_dichotomy, "Increasing min");
344 min_diff = middle_diff;
345 } else if (middle_diff > 0) {
346 CDEBUG0(surf_lagrange_dichotomy, "Decreasing max");
348 max_diff = middle_diff;
352 } else if (min_diff == 0) {
355 } else if (max_diff == 0) {
358 } else if (min_diff > 0 && max_diff < 0) {
359 CWARN0(surf_lagrange_dichotomy,
360 "The impossible happened, partial_diff(min) > 0 && partial_diff(max) < 0");
363 CWARN2(surf_lagrange_dichotomy,
364 "diffmin (%1.20f) or diffmax (%1.20f) are something I don't know, taking no action.",
370 CDEBUG1(surf_lagrange_dichotomy, "returning %e", (min + max) / 2.0);
372 return ((min + max) / 2.0);
378 double partial_diff_mu(double mu, void *param_var)
380 double mu_partial = 0.0;
381 double sigma_mu = 0.0;
382 lmm_variable_t var = (lmm_variable_t) param_var;
386 for (i = 0; i < var->cnsts_number; i++)
387 sigma_mu += (var->cnsts[i].constraint)->lambda;
389 //compute sigma_i + mu_i
392 //use auxiliar function passing (sigma_i + mu_i)
393 mu_partial = diff_aux(var, sigma_mu);
396 mu_partial += var->bound;
405 double partial_diff_lambda(double lambda, void *param_cnst)
409 xbt_swag_t elem_list = NULL;
410 lmm_element_t elem = NULL;
411 lmm_variable_t var = NULL;
412 lmm_constraint_t cnst = (lmm_constraint_t) param_cnst;
413 double lambda_partial = 0.0;
414 double sigma_i = 0.0;
417 elem_list = &(cnst->element_set);
419 CDEBUG1(surf_lagrange_dichotomy,"Computting diff of cnst (%p)", cnst);
421 xbt_swag_foreach(elem, elem_list) {
422 var = elem->variable;
423 if (var->weight <= 0)
426 //initilize de sumation variable
429 //compute sigma_i of variable var
430 for (i = 0; i < var->cnsts_number; i++) {
431 sigma_i += (var->cnsts[i].constraint)->lambda;
434 //add mu_i if this flow has a RTT constraint associated
438 //replace value of cnst->lambda by the value of parameter lambda
439 sigma_i = (sigma_i - cnst->lambda) + lambda;
441 //use the auxiliar function passing (\sigma_i + \mu_i)
442 lambda_partial += diff_aux(var, sigma_i);
446 lambda_partial += cnst->bound;
449 return lambda_partial;
453 double diff_aux(lmm_variable_t var, double x)
455 double tmp_fpi, result;
457 XBT_IN2("(var (%p), x (%1.20f))", var, x);
458 xbt_assert0(var->func_fpi,
459 "Initialize the protocol functions first create variables before.");
461 tmp_fpi = var->func_fpi(var, x);
469 /**************** Vegas and Reno functions *************************/
471 * NOTE for Reno: all functions consider the network
472 * coeficient (alpha) equal to 1.
476 * For Vegas fpi: $\frac{\alpha D_f}{x}$
478 double func_vegas_fpi(lmm_variable_t var, double x){
479 xbt_assert0(x>0.0,"Don't call me with stupid values!");
480 return VEGAS_SCALING*var->df/x;
484 * For Reno fpi: $\sqrt{\frac{1}{{D_f}^2 x} - \frac{2}{3{D_f}^2}}$
486 double func_reno_fpi(lmm_variable_t var, double x){
489 xbt_assert0(var->df>0.0,"Don't call me with stupid values!");
490 xbt_assert0(x>0.0,"Don't call me with stupid values!");
492 res_fpi = 1/(var->df*var->df*x) - 2/(3*var->df*var->df);
493 if(res_fpi<=0.0) return 0.0;
494 xbt_assert0(res_fpi>0.0,"Don't call me with stupid values!");
495 return sqrt(res_fpi);