new crash procedure for the dual simplex, off for now

This commit is contained in:
Laurent Perron
2021-03-10 11:40:47 +01:00
parent 9de99d789e
commit 7bcfc3a223
7 changed files with 361 additions and 47 deletions

View File

@@ -22,7 +22,7 @@ syntax = "proto2";
package operations_research.glop;
// next id = 62
// next id = 63
message GlopParameters {
// Supported algorithms for scaling:
@@ -413,6 +413,16 @@ message GlopParameters {
// simplex solver", Ph.D, dissertation, University of Edinburgh.
optional bool perturb_costs_in_dual_simplex = 53 [default = false];
// We have two possible dual phase I algorithms. Both work on an LP that
// minimize the sum of dual infeasiblities. One use dedicated code (when this
// param is true), the other one use exactly the same code as the dual phase
// II but on an auxiliary problem where the variable bounds of the original
// problem are changed.
//
// TODO(user): For now we have both, but ideally the non-dedicated version
// will win since it is a lot less code to maintain.
optional bool use_dedicated_dual_feasibility_algorithm = 62 [default = true];
// The magnitude of the cost perturbation is given by
// RandomIn(1.0, 2.0) * (
// relative_cost_perturbation * cost

View File

@@ -156,6 +156,25 @@ Fractional ReducedCosts::ComputeMaximumDualInfeasibility() const {
return maximum_dual_infeasibility;
}
Fractional ReducedCosts::ComputeMaximumDualInfeasibilityOnNonBoxedVariables()
const {
SCOPED_TIME_STAT(&stats_);
Fractional maximum_dual_infeasibility = 0.0;
const DenseBitRow& can_decrease = variables_info_.GetCanDecreaseBitRow();
const DenseBitRow& can_increase = variables_info_.GetCanIncreaseBitRow();
const DenseBitRow& is_boxed = variables_info_.GetNonBasicBoxedVariables();
for (const ColIndex col : variables_info_.GetNotBasicBitRow()) {
if (is_boxed[col]) continue;
const Fractional rc = reduced_costs_[col];
if ((can_increase.IsSet(col) && rc < 0.0) ||
(can_decrease.IsSet(col) && rc > 0.0)) {
maximum_dual_infeasibility =
std::max(maximum_dual_infeasibility, std::abs(rc));
}
}
return maximum_dual_infeasibility;
}
Fractional ReducedCosts::ComputeSumOfDualInfeasibilities() const {
SCOPED_TIME_STAT(&stats_);
DCHECK(!recompute_reduced_costs_);
@@ -315,6 +334,12 @@ void ReducedCosts::MaintainDualInfeasiblePositions(bool maintain) {
}
}
const DenseRow& ReducedCosts::GetFullReducedCosts() {
SCOPED_TIME_STAT(&stats_);
if (!are_reduced_costs_recomputed_) recompute_reduced_costs_ = true;
return GetReducedCosts();
}
const DenseRow& ReducedCosts::GetReducedCosts() {
SCOPED_TIME_STAT(&stats_);
RecomputeReducedCostsAndPrimalEnteringCandidatesIfNeeded();

View File

@@ -74,6 +74,12 @@ class ReducedCosts {
Fractional ComputeMaximumDualInfeasibility() const;
Fractional ComputeSumOfDualInfeasibilities() const;
// Same as ComputeMaximumDualInfeasibility() but ignore boxed variables.
// Because we can always switch bounds of boxed variables, if this is under
// the dual tolerance, then we can easily have a dual feasible solution and do
// not need to run a dual phase I algorithm.
Fractional ComputeMaximumDualInfeasibilityOnNonBoxedVariables() const;
// Updates any internal data BEFORE the given simplex pivot is applied to B.
// Note that no updates are needed in case of a bound flip.
// The arguments are in order:
@@ -158,6 +164,11 @@ class ReducedCosts {
// variables_info_.GetIsRelevantBitRow().
const DenseRow& GetReducedCosts();
// Same as GetReducedCosts() but trigger a recomputation if not already done
// to have access to the reduced costs on all positions, not just the relevant
// one.
const DenseRow& GetFullReducedCosts();
// Returns the non-basic columns that are dual-infeasible. These are also
// the primal simplex possible entering columns.
const DenseBitRow& GetDualInfeasiblePositions() const {

View File

@@ -194,29 +194,92 @@ Status RevisedSimplex::Solve(const LinearProgram& lp, TimeLimit* time_limit) {
reduced_costs_.PerturbCosts();
}
variables_info_.MakeBoxedVariableRelevant(false);
GLOP_RETURN_IF_ERROR(DualMinimize(time_limit));
DisplayIterationInfo();
if (parameters_.use_dedicated_dual_feasibility_algorithm()) {
variables_info_.MakeBoxedVariableRelevant(false);
GLOP_RETURN_IF_ERROR(DualMinimize(feasibility_phase_, time_limit));
DisplayIterationInfo();
if (problem_status_ != ProblemStatus::DUAL_INFEASIBLE) {
// Note(user): In most cases, the matrix will already be refactorized and
// both Refactorize() and PermuteBasis() will do nothing. However, if the
// time limit is reached during the first phase, this might not be the
// case and RecomputeBasicVariableValues() below DCHECKs that the matrix
// is refactorized. This is not required, but we currently only want to
// recompute values from scratch when the matrix was just refactorized to
// maximize precision.
GLOP_RETURN_IF_ERROR(basis_factorization_.Refactorize());
PermuteBasis();
if (problem_status_ != ProblemStatus::DUAL_INFEASIBLE) {
// Note(user): In most cases, the matrix will already be refactorized
// and both Refactorize() and PermuteBasis() will do nothing. However,
// if the time limit is reached during the first phase, this might not
// be the case and RecomputeBasicVariableValues() below DCHECKs that the
// matrix is refactorized. This is not required, but we currently only
// want to recompute values from scratch when the matrix was just
// refactorized to maximize precision.
GLOP_RETURN_IF_ERROR(basis_factorization_.Refactorize());
PermuteBasis();
variables_info_.MakeBoxedVariableRelevant(true);
variables_info_.MakeBoxedVariableRelevant(true);
reduced_costs_.MakeReducedCostsPrecise();
// This is needed to display errors properly.
MakeBoxedVariableDualFeasible(
variables_info_.GetNonBasicBoxedVariables(),
/*update_basic_values=*/false);
variable_values_.RecomputeBasicVariableValues();
variable_values_.ResetPrimalInfeasibilityInformation();
}
} else {
// Test initial dual infeasibility, ignoring boxed variables. We currently
// refactorize/recompute the reduced costs if not already done.
// TODO(user): Not ideal in an incremental setting.
reduced_costs_.MaintainDualInfeasiblePositions(false);
reduced_costs_.MakeReducedCostsPrecise();
bool unused;
RefactorizeBasisIfNeeded(&unused);
reduced_costs_.GetReducedCosts();
// This is needed to display errors properly.
MakeBoxedVariableDualFeasible(variables_info_.GetNonBasicBoxedVariables(),
/*update_basic_values=*/false);
variable_values_.RecomputeBasicVariableValues();
variable_values_.ResetPrimalInfeasibilityInformation();
const Fractional initial_infeasibility =
reduced_costs_.ComputeMaximumDualInfeasibilityOnNonBoxedVariables();
if (initial_infeasibility <
reduced_costs_.GetDualFeasibilityTolerance()) {
if (log_info) LOG(INFO) << "Initial basis is dual feasible.";
problem_status_ = ProblemStatus::DUAL_FEASIBLE;
MakeBoxedVariableDualFeasible(
variables_info_.GetNonBasicBoxedVariables(),
/*update_basic_values=*/false);
variable_values_.RecomputeBasicVariableValues();
variable_values_.ResetPrimalInfeasibilityInformation();
} else {
// Transform problem and recompute variable values.
variables_info_.TransformToDualPhaseIProblem(
reduced_costs_.GetDualFeasibilityTolerance(),
reduced_costs_.GetReducedCosts());
variable_values_.ResetAllNonBasicVariableValues();
variable_values_.RecomputeBasicVariableValues();
variable_values_.ResetPrimalInfeasibilityInformation();
// Optimize.
DisplayErrors();
GLOP_RETURN_IF_ERROR(DualMinimize(false, time_limit));
DisplayIterationInfo();
// Restore original problem and recompute variable values. Note that we
// need the reduced cost on the fixed positions here.
variables_info_.EndDualPhaseI(
reduced_costs_.GetDualFeasibilityTolerance(),
reduced_costs_.GetFullReducedCosts());
variable_values_.ResetAllNonBasicVariableValues();
variable_values_.RecomputeBasicVariableValues();
variable_values_.ResetPrimalInfeasibilityInformation();
// TODO(user): Note that if there was cost shifts, we just keep them
// until the end of the optim.
//
// TODO(user): What if slightly infeasible? we shouldn't really stop.
// Call primal ? use higher tolerance ? It seems we can always kind of
// continue and deal with the issue later. Find a way.
if (problem_status_ == ProblemStatus::OPTIMAL) {
if (reduced_costs_.ComputeMaximumDualInfeasibility() <
reduced_costs_.GetDualFeasibilityTolerance()) {
problem_status_ = ProblemStatus::DUAL_FEASIBLE;
} else {
if (log_info) LOG(INFO) << "Infeasible after first phase.";
problem_status_ = ProblemStatus::DUAL_INFEASIBLE;
}
}
}
}
} else {
reduced_costs_.MaintainDualInfeasiblePositions(true);
@@ -275,7 +338,7 @@ Status RevisedSimplex::Solve(const LinearProgram& lp, TimeLimit* time_limit) {
} else {
// Run the dual simplex.
reduced_costs_.MaintainDualInfeasiblePositions(false);
GLOP_RETURN_IF_ERROR(DualMinimize(time_limit));
GLOP_RETURN_IF_ERROR(DualMinimize(feasibility_phase_, time_limit));
}
// Minimize() or DualMinimize() always double check the result with maximum
@@ -314,6 +377,10 @@ Status RevisedSimplex::Solve(const LinearProgram& lp, TimeLimit* time_limit) {
// DUAL_INFEASIBLE or PRIMAL_INFEASIBLE. For instance we currently report
// PRIMAL_UNBOUNDED with the primal on the problem l30.mps instead of
// OPTIMAL and the dual does not have issues on this problem.
//
// TODO(user): There is another issue on infeas/qual.mps. I think we should
// just check the dual ray, not really the current solution dual
// feasibility.
if (problem_status_ == ProblemStatus::DUAL_UNBOUNDED) {
const Fractional tolerance = parameters_.solution_feasibility_tolerance();
if (reduced_costs_.ComputeMaximumDualResidual() > tolerance ||
@@ -2680,13 +2747,15 @@ Status RevisedSimplex::Minimize(TimeLimit* time_limit) {
// PhD thesis seem worth trying at some point:
// - The subproblem approach, which enables one to use a normal phase II dual,
// but requires an efficient bound-flipping ratio test since the new problem
// has all its variables boxed.
// has all its variables boxed. This one is implemented now, but require
// a bit more tunning.
// - Pan's method, which is really fast but have no theoretical guarantee of
// terminating and thus needs to use one of the other methods as a fallback if
// it fails to make progress.
//
// Note that the returned status applies to the primal problem!
Status RevisedSimplex::DualMinimize(TimeLimit* time_limit) {
Status RevisedSimplex::DualMinimize(bool feasibility_phase,
TimeLimit* time_limit) {
Cleanup update_deterministic_time_on_return(
[this, time_limit]() { AdvanceDeterministicTime(time_limit); });
num_consecutive_degenerate_iterations_ = 0;
@@ -2726,7 +2795,7 @@ Status RevisedSimplex::DualMinimize(TimeLimit* time_limit) {
//
// Reduced costs are needed by MakeBoxedVariableDualFeasible(), so if we
// do recompute them, it is better to do that first.
if (!feasibility_phase_ && !reduced_costs_.AreReducedCostsRecomputed() &&
if (!feasibility_phase && !reduced_costs_.AreReducedCostsRecomputed() &&
!old_refactorize_value) {
const Fractional dual_residual_error =
reduced_costs_.ComputeMaximumDualResidual();
@@ -2748,7 +2817,7 @@ Status RevisedSimplex::DualMinimize(TimeLimit* time_limit) {
// refactorize the matrix, like for the reduced costs? That may lead to
// a worse behavior than keeping the "imprecise" version and only
// recomputing it when its precision is above a threshold.
if (!feasibility_phase_) {
if (!feasibility_phase) {
MakeBoxedVariableDualFeasible(
variables_info_.GetNonBasicBoxedVariables(),
/*update_basic_values=*/false);
@@ -2757,7 +2826,13 @@ Status RevisedSimplex::DualMinimize(TimeLimit* time_limit) {
// Computing the objective at each iteration takes time, so we just
// check the limit when the basis is refactorized.
if (ComputeObjectiveValue() > dual_objective_limit_) {
//
// Hack: We need the feasibility_phase_ here and not the local variable
// because this must not be checked for the dual phase I algo that use
// the same code as the dual phase II (i.e. the local feasibility_phase
// will be false).
if (!feasibility_phase_ && dual_objective_limit_ != kInfinity &&
ComputeObjectiveValue() > dual_objective_limit_) {
VLOG(1) << "Stopping the dual simplex because"
<< " the objective limit " << dual_objective_limit_
<< " has been reached.";
@@ -2772,7 +2847,7 @@ Status RevisedSimplex::DualMinimize(TimeLimit* time_limit) {
} else {
// Updates from the previous iteration that can be skipped if we
// recompute everything (see other case above).
if (!feasibility_phase_) {
if (!feasibility_phase) {
// Make sure the boxed variables are dual-feasible before choosing the
// leaving variable row.
MakeBoxedVariableDualFeasible(bound_flip_candidates_,
@@ -2786,7 +2861,7 @@ Status RevisedSimplex::DualMinimize(TimeLimit* time_limit) {
}
}
if (feasibility_phase_) {
if (feasibility_phase) {
GLOP_RETURN_IF_ERROR(DualPhaseIChooseLeavingVariableRow(
&leaving_row, &cost_variation, &target_bound));
} else {
@@ -2799,7 +2874,7 @@ Status RevisedSimplex::DualMinimize(TimeLimit* time_limit) {
refactorize = true;
continue;
}
if (feasibility_phase_) {
if (feasibility_phase) {
// Note that since the basis is refactorized, the variable values
// will be recomputed at the beginning of the second phase. The boxed
// variable values will also be corrected by
@@ -2821,7 +2896,7 @@ Status RevisedSimplex::DualMinimize(TimeLimit* time_limit) {
update_row_.IgnoreUpdatePosition(pair.second);
}
}
if (feasibility_phase_) {
if (feasibility_phase) {
GLOP_RETURN_IF_ERROR(entering_variable_.DualPhaseIChooseEnteringColumn(
update_row_, cost_variation, &entering_col, &ratio));
} else {
@@ -2830,7 +2905,7 @@ Status RevisedSimplex::DualMinimize(TimeLimit* time_limit) {
&ratio));
}
// No entering_col: Unbounded problem / Infeasible problem.
// No entering_col: dual unbounded (i.e. primal infeasible).
if (entering_col == kInvalidCol) {
if (!reduced_costs_.AreReducedCostsPrecise()) {
VLOG(1) << "No entering column. Double checking...";
@@ -2838,7 +2913,7 @@ Status RevisedSimplex::DualMinimize(TimeLimit* time_limit) {
continue;
}
DCHECK(basis_factorization_.IsRefactorized());
if (feasibility_phase_) {
if (feasibility_phase) {
// This shouldn't happen by construction.
VLOG(1) << "Unbounded dual feasibility problem !?";
problem_status_ = ProblemStatus::ABNORMAL;
@@ -2910,7 +2985,7 @@ Status RevisedSimplex::DualMinimize(TimeLimit* time_limit) {
//
// During phase I, we do not need the basic variable values at all.
Fractional primal_step = 0.0;
if (feasibility_phase_) {
if (feasibility_phase) {
DualPhaseIUpdatePrice(leaving_row, entering_col);
} else {
primal_step =
@@ -3007,22 +3082,36 @@ void RevisedSimplex::PropagateParameters() {
void RevisedSimplex::DisplayIterationInfo() const {
if (parameters_.log_search_progress() || VLOG_IS_ON(1)) {
Fractional objective;
std::string name;
if (!feasibility_phase_) {
// Note that in the dual phase II, ComputeObjectiveValue() is also
// computing the dual objective even if it uses the variable values. This
// is because if we modify the bounds to make the problem primal-feasible,
// we are at the optimal and hence the two objectives are the same.
objective = ComputeInitialProblemObjectiveValue();
name = "objective";
} else if (parameters_.use_dual_simplex()) {
if (parameters_.use_dedicated_dual_feasibility_algorithm()) {
objective = reduced_costs_.ComputeSumOfDualInfeasibilities();
} else {
// The internal objective of the transformed problem is the negation
// of the sum of the dual infeasibility of the original problem.
objective = -PreciseScalarProduct(
objective_, Transpose(variable_values_.GetDenseRow()));
}
name = "sum_dual_infeasibilities";
} else {
objective = variable_values_.ComputeSumOfPrimalInfeasibilities();
name = "sum_primal_infeasibilities";
}
const int iter = feasibility_phase_
? num_iterations_
: num_iterations_ - num_feasibility_iterations_;
// Note that in the dual phase II, ComputeObjectiveValue() is also computing
// the dual objective even if it uses the variable values. This is because
// if we modify the bounds to make the problem primal-feasible, we are at
// the optimal and hence the two objectives are the same.
const Fractional objective =
!feasibility_phase_
? ComputeInitialProblemObjectiveValue()
: (parameters_.use_dual_simplex()
? reduced_costs_.ComputeSumOfDualInfeasibilities()
: variable_values_.ComputeSumOfPrimalInfeasibilities());
LOG(INFO) << (feasibility_phase_ ? "Feasibility" : "Optimization")
<< " phase, iteration # " << iter
<< ", objective = " << absl::StrFormat("%.15E", objective);
<< " phase, iteration # " << iter << ", " << name << " = "
<< absl::StrFormat("%.15E", objective);
}
}

View File

@@ -519,7 +519,8 @@ class RevisedSimplex {
// Same as Minimize() for the dual simplex algorithm.
// TODO(user): remove duplicate code between the two functions.
ABSL_MUST_USE_RESULT Status DualMinimize(TimeLimit* time_limit);
ABSL_MUST_USE_RESULT Status DualMinimize(bool feasibility_phase,
TimeLimit* time_limit);
// Experimental. This is useful in a MIP context. It performs a few degenerate
// pivot to try to mimize the fractionality of the optimal basis.

View File

@@ -97,7 +97,13 @@ void VariablesInfo::InitializeFromBasisState(ColIndex first_slack_col,
UpdateToNonBasicStatus(col, DefaultVariableStatus(col));
} else {
++num_basic_variables;
UpdateToBasicStatus(col);
// Because we just called ResetStatusInfo(), we optimize the call to
// UpdateToNonBasicStatus(col) here. In an incremental setting with
// almost no work per call, the update of all the DenseBitRow are
// visible.
variable_status_[col] = VariableStatus::BASIC;
is_basic_.Set(col, true);
}
break;
case VariableStatus::AT_LOWER_BOUND:
@@ -165,6 +171,13 @@ void VariablesInfo::MakeBoxedVariableRelevant(bool value) {
}
void VariablesInfo::UpdateToBasicStatus(ColIndex col) {
if (in_dual_phase_one_) {
// TODO(user): A bit annoying that we need to test this even if we
// don't use the dual. But the cost is minimal.
if (lower_bounds_[col] != 0.0) lower_bounds_[col] = -kInfinity;
if (upper_bounds_[col] != 0.0) upper_bounds_[col] = +kInfinity;
variable_type_[col] = ComputeVariableType(col);
}
variable_status_[col] = VariableStatus::BASIC;
is_basic_.Set(col, true);
not_basic_.Set(col, false);
@@ -254,5 +267,130 @@ void VariablesInfo::SetRelevance(ColIndex col, bool relevance) {
}
}
// This is really similar to InitializeFromBasisState() but there is less
// cases to consider for TransformToDualPhaseIProblem()/EndDualPhaseI().
void VariablesInfo::UpdateStatusForNewType(ColIndex col) {
switch (variable_status_[col]) {
case VariableStatus::BASIC:
UpdateToBasicStatus(col);
break;
case VariableStatus::AT_LOWER_BOUND:
if (lower_bounds_[col] == upper_bounds_[col]) {
UpdateToNonBasicStatus(col, VariableStatus::FIXED_VALUE);
} else if (lower_bounds_[col] == -kInfinity) {
UpdateToNonBasicStatus(col, DefaultVariableStatus(col));
} else {
// TODO(user): This is only needed for boxed variable to update their
// relevance. It should probably be done with the type and not the
// status update.
UpdateToNonBasicStatus(col, variable_status_[col]);
}
break;
case VariableStatus::AT_UPPER_BOUND:
if (lower_bounds_[col] == upper_bounds_[col]) {
UpdateToNonBasicStatus(col, VariableStatus::FIXED_VALUE);
} else if (upper_bounds_[col] == kInfinity) {
UpdateToNonBasicStatus(col, DefaultVariableStatus(col));
} else {
// TODO(user): Same as in the AT_LOWER_BOUND branch above.
UpdateToNonBasicStatus(col, variable_status_[col]);
}
break;
default:
// TODO(user): boxed variable that become fixed in
// TransformToDualPhaseIProblem() will be changed status twice. Once here,
// and once when we make them dual feasible according to their reduced
// cost. We should probably just do all at once.
UpdateToNonBasicStatus(col, DefaultVariableStatus(col));
}
}
void VariablesInfo::TransformToDualPhaseIProblem(
Fractional dual_feasibility_tolerance, const DenseRow& reduced_costs) {
DCHECK(!in_dual_phase_one_);
in_dual_phase_one_ = true;
saved_lower_bounds_ = lower_bounds_;
saved_upper_bounds_ = upper_bounds_;
// Transform the bound and type to get a new problem. If this problem has an
// optimal value of 0.0, then the problem is dual feasible. And more
// importantly, by keeping the same basis, we have a feasible solution of the
// original problem.
const ColIndex num_cols = matrix_.num_cols();
for (ColIndex col(0); col < num_cols; ++col) {
switch (variable_type_[col]) {
case VariableType::FIXED_VARIABLE: // ABSL_FALLTHROUGH_INTENDED
case VariableType::UPPER_AND_LOWER_BOUNDED:
lower_bounds_[col] = 0.0;
upper_bounds_[col] = 0.0;
variable_type_[col] = VariableType::FIXED_VARIABLE;
break;
case VariableType::LOWER_BOUNDED:
lower_bounds_[col] = 0.0;
upper_bounds_[col] = 1.0;
variable_type_[col] = VariableType::UPPER_AND_LOWER_BOUNDED;
break;
case VariableType::UPPER_BOUNDED:
lower_bounds_[col] = -1.0;
upper_bounds_[col] = 0.0;
variable_type_[col] = VariableType::UPPER_AND_LOWER_BOUNDED;
break;
case VariableType::UNCONSTRAINED:
lower_bounds_[col] = -1000.0;
upper_bounds_[col] = 1000.0;
variable_type_[col] = VariableType::UPPER_AND_LOWER_BOUNDED;
break;
}
// Make sure we start with a feasible dual solution.
// If the reduced cost is close to zero, we keep the "default" status.
if (variable_type_[col] == VariableType::UPPER_AND_LOWER_BOUNDED) {
if (reduced_costs[col] > dual_feasibility_tolerance) {
variable_status_[col] = VariableStatus::AT_LOWER_BOUND;
} else if (reduced_costs[col] < -dual_feasibility_tolerance) {
variable_status_[col] = VariableStatus::AT_UPPER_BOUND;
}
}
UpdateStatusForNewType(col);
}
}
void VariablesInfo::EndDualPhaseI(Fractional dual_feasibility_tolerance,
const DenseRow& reduced_costs) {
DCHECK(in_dual_phase_one_);
in_dual_phase_one_ = false;
std::swap(saved_lower_bounds_, lower_bounds_);
std::swap(saved_upper_bounds_, upper_bounds_);
// This is to clear the memory of the saved bounds since it is no longer
// needed.
DenseRow empty1, empty2;
std::swap(empty1, saved_lower_bounds_);
std::swap(empty1, saved_upper_bounds_);
// Restore the type and update all other fields.
const ColIndex num_cols = matrix_.num_cols();
for (ColIndex col(0); col < num_cols; ++col) {
variable_type_[col] = ComputeVariableType(col);
// We make sure that the old fixed variables that are now boxed are dual
// feasible.
//
// TODO(user): When there is a choice, use the previous status that might
// have been warm-started ? but then this is not high priority since
// warm-starting with a non-dual feasible basis seems unfrequent.
if (variable_type_[col] == VariableType::UPPER_AND_LOWER_BOUNDED) {
if (reduced_costs[col] > dual_feasibility_tolerance) {
variable_status_[col] = VariableStatus::AT_LOWER_BOUND;
} else if (reduced_costs[col] < -dual_feasibility_tolerance) {
variable_status_[col] = VariableStatus::AT_UPPER_BOUND;
}
}
UpdateStatusForNewType(col);
}
}
} // namespace glop
} // namespace operations_research

View File

@@ -112,6 +112,34 @@ class VariablesInfo {
return upper_bounds_[col] - lower_bounds_[col];
}
// This is used for the (SP) method of "Progress in the dual simplex method
// for large scale LP problems: practical dual phase I algorithms". Achim
// Koberstein & Uwe H. Suhl.
//
// This just set the bounds according to the variable types:
// - Boxed variables get fixed at [0,0].
// - Upper bounded variables get [-1, 0] bounds
// - Lower bounded variables get [0, 1] bounds
// - Free variables get [-1000, 1000] to heuristically move them to the basis.
// I.e. they cost in the dual infeasibility minimization problem is
// multiplied by 1000.
//
// It then update the status to get an inital dual feasible solution, and
// then one just have to apply the phase II algo on this problem to try to
// find a feasible solution to the original problem.
//
// Optimization: When a variable become basic, its non-zero bounds are
// relaxed. This is a bit hacky as it requires that the status is updated
// before the bounds are read (which is the case). It is however an important
// optimization.
//
// TODO(user): Shall we re-add the bound when the variable is moved out of
// the base? it is not needed, but might allow for more bound flips?
void TransformToDualPhaseIProblem(Fractional dual_feasibility_tolerance,
const DenseRow& reduced_costs);
void EndDualPhaseI(Fractional dual_feasibility_tolerance,
const DenseRow& reduced_costs);
private:
// Computes the initial/default variable status from its type. A constrained
// variable is set to the lowest of its 2 bounds in absolute value.
@@ -126,6 +154,9 @@ class VariablesInfo {
// Sets the column relevance and updates num_entries_in_relevant_columns_.
void SetRelevance(ColIndex col, bool relevance);
// Used by TransformToDualPhaseIProblem()/EndDualPhaseI().
void UpdateStatusForNewType(ColIndex col);
// Problem data that should be updated from outside.
const CompactSparseMatrix& matrix_;
@@ -134,6 +165,11 @@ class VariablesInfo {
DenseRow lower_bounds_;
DenseRow upper_bounds_;
// This is just used temporarily by the dual phase I algo to save the original
// bounds.
DenseRow saved_lower_bounds_;
DenseRow saved_upper_bounds_;
// Array of variable statuses, indexed by column index.
VariableStatusRow variable_status_;
@@ -167,6 +203,10 @@ class VariablesInfo {
// Whether or not a boxed variable should be considered relevant.
bool boxed_variables_are_relevant_ = true;
// Whether we are between the calls TransformToDualPhaseIProblem() and
// EndDualPhaseI().
bool in_dual_phase_one_ = false;
DISALLOW_COPY_AND_ASSIGN(VariablesInfo);
};