//computes the value of the dichotomy using a initial values, init, with a specific variable or constraint
static double dichotomy(double init, double diff(double, void *),
void *var_cnst, double min_error);
-//computes the value of the differential of variable param_var applied to mu
-static double partial_diff_mu(double mu, void *param_var);
//computes the value of the differential of constraint param_cnst applied to lambda
static double partial_diff_lambda(double lambda, void *param_cnst);
new_obj = dual_objective(var_list, cnst_list);
XBT_DEBUG("Improvement for Objective (%g -> %g) : %g", obj, new_obj,
obj - new_obj);
- xbt_assert1(obj - new_obj >= -epsilon_min_error,
+ xbt_assert(obj - new_obj >= -epsilon_min_error,
"Our gradient sucks! (%1.20f)", obj - new_obj);
obj = new_obj;
}
new_obj = dual_objective(var_list, cnst_list);
XBT_DEBUG("Improvement for Objective (%g -> %g) : %g", obj, new_obj,
obj - new_obj);
- xbt_assert1(obj - new_obj >= -epsilon_min_error,
+ xbt_assert(obj - new_obj >= -epsilon_min_error,
"Our gradient sucks! (%1.20f)", obj - new_obj);
obj = new_obj;
}
double diff_0 = 0.0;
min = max = init;
- XBT_IN;
+ XBT_IN("");
if (init == 0.0) {
min = max = 0.5;
if ((diff_0 = diff(1e-16, var_cnst)) >= 0) {
XBT_CDEBUG(surf_lagrange_dichotomy, "returning 0.0 (diff = %e)", diff_0);
- XBT_OUT;
+ XBT_OUT();
return 0.0;
}
}
XBT_CDEBUG(surf_lagrange_dichotomy, "returning %e", (min + max) / 2.0);
- XBT_OUT;
+ XBT_OUT();
return ((min + max) / 2.0);
}
double diff = 0.0;
double sigma_i = 0.0;
- XBT_IN;
+ XBT_IN("");
elem_list = &(cnst->element_set);
XBT_CDEBUG(surf_lagrange_dichotomy, "Computing diff of cnst (%p)", cnst);
XBT_CDEBUG(surf_lagrange_dichotomy,
"d D/d lambda for cnst (%p) at %1.20f = %1.20f", cnst, lambda,
diff);
- XBT_OUT;
+ XBT_OUT();
return diff;
}
double func_vegas_f(lmm_variable_t var, double x)
{
- xbt_assert1(x > 0.0, "Don't call me with stupid values! (%1.20f)", x);
+ xbt_assert(x > 0.0, "Don't call me with stupid values! (%1.20f)", x);
return VEGAS_SCALING * var->weight * log(x);
}
double func_vegas_fp(lmm_variable_t var, double x)
{
- xbt_assert1(x > 0.0, "Don't call me with stupid values! (%1.20f)", x);
+ xbt_assert(x > 0.0, "Don't call me with stupid values! (%1.20f)", x);
return VEGAS_SCALING * var->weight / x;
}
double func_vegas_fpi(lmm_variable_t var, double x)
{
- xbt_assert1(x > 0.0, "Don't call me with stupid values! (%1.20f)", x);
+ xbt_assert(x > 0.0, "Don't call me with stupid values! (%1.20f)", x);
return var->weight / (x / VEGAS_SCALING);
}
#define RENO_SCALING 1.0
double func_reno_f(lmm_variable_t var, double x)
{
- xbt_assert0(var->weight > 0.0, "Don't call me with stupid values!");
+ xbt_assert(var->weight > 0.0, "Don't call me with stupid values!");
return RENO_SCALING * sqrt(3.0 / 2.0) / var->weight *
atan(sqrt(3.0 / 2.0) * var->weight * x);
{
double res_fpi;
- xbt_assert0(var->weight > 0.0, "Don't call me with stupid values!");
- xbt_assert0(x > 0.0, "Don't call me with stupid values!");
+ xbt_assert(var->weight > 0.0, "Don't call me with stupid values!");
+ xbt_assert(x > 0.0, "Don't call me with stupid values!");
res_fpi =
1.0 / (var->weight * var->weight * (x / RENO_SCALING)) -
2.0 / (3.0 * var->weight * var->weight);
if (res_fpi <= 0.0)
return 0.0;
-/* xbt_assert0(res_fpi>0.0,"Don't call me with stupid values!"); */
+/* xbt_assert(res_fpi>0.0,"Don't call me with stupid values!"); */
return sqrt(res_fpi);
}
#define RENO2_SCALING 1.0
double func_reno2_f(lmm_variable_t var, double x)
{
- xbt_assert0(var->weight > 0.0, "Don't call me with stupid values!");
+ xbt_assert(var->weight > 0.0, "Don't call me with stupid values!");
return RENO2_SCALING * (1.0 / var->weight) * log((x * var->weight) /
(2.0 * x * var->weight +
3.0));
double res_fpi;
double tmp;
- xbt_assert0(x > 0.0, "Don't call me with stupid values!");
+ xbt_assert(x > 0.0, "Don't call me with stupid values!");
tmp = x * var->weight * var->weight;
res_fpi = tmp * (9.0 * x + 24.0);