Skip to content

Commit

Permalink
Merge pull request #69 from mdcoury/refactor_techniques
Browse files Browse the repository at this point in the history
Refactor techniques
  • Loading branch information
arcondello authored Dec 19, 2022
2 parents 8e2dc83 + 46f45e5 commit 0868ba4
Showing 1 changed file with 159 additions and 123 deletions.
282 changes: 159 additions & 123 deletions dwave/preprocessing/include/dwave/presolve.h
Original file line number Diff line number Diff line change
Expand Up @@ -182,8 +182,34 @@ class Presolver {
}
}

// todo: break into separate presolver
void substitute_self_loops() {
//----- One-time Techniques -----//

void technique_spin_to_binary() {
for (size_type v = 0; v < model_.num_variables(); ++v) {
if (model_.vartype(v) == dimod::Vartype::SPIN) {
postsolver_.substitute_variable(v, 2, -1);
model_.change_vartype(dimod::Vartype::BINARY, v);
}
}
}
void technique_remove_offsets() {
for (size_type c = 0; c < model_.num_constraints(); ++c) {
auto& constraint = model_.constraint_ref(c);
if (constraint.offset()) {
constraint.set_rhs(constraint.rhs() - constraint.offset());
constraint.set_offset(0);
}
}
}
void technique_flip_constraints() {
for (size_type c = 0; c < model_.num_constraints(); ++c) {
auto& constraint = model_.constraint_ref(c);
if (constraint.sense() == dimod::Sense::GE) {
constraint.scale(-1);
}
}
}
void technique_remove_self_loops() {
std::unordered_map<index_type, index_type> mapping;

substitute_self_loops_expr(model_.objective, mapping);
Expand All @@ -198,96 +224,53 @@ class Presolver {
model_.add_linear_constraint({uv.first, uv.second}, {1, -1}, dimod::Sense::EQ, 0);
}
}
void technique_remove_invalid_markers() {
std::vector<index_type> discrete;
for (size_type c = 0; c < model_.num_constraints(); ++c) {
auto& constraint = model_.constraint_ref(c);

static bool remove_zero_biases(dimod::Expression<bias_type, index_type>& expression) {
// quadratic
std::vector<std::pair<index_type, index_type>> empty_interactions;
for (auto it = expression.cbegin_quadratic(); it != expression.cend_quadratic(); ++it) {
if (!(it->bias)) {
empty_interactions.emplace_back(it->u, it->v);
}
}
for (auto& uv : empty_interactions) {
expression.remove_interaction(uv.first, uv.second);
}
if (!constraint.marked_discrete()) continue;

// linear
std::vector<index_type> empty_variables;
for (auto& v : expression.variables()) {
if (expression.linear(v)) continue;
if (expression.num_interactions(v)) continue;
empty_variables.emplace_back(v);
}
for (auto& v : empty_variables) {
expression.remove_variable(v);
// we can check if it's well formed
if (constraint.is_onehot()) {
discrete.push_back(c);
} else {
constraint.mark_discrete(false); // if it's not one-hot, it's not discrete
}
}
// check if they overlap
size_type i = 0;
while (i < discrete.size()) {
// check if ci overlaps with any other constraints
auto& constraint = model_.constraint_ref(discrete[i]);

bool overlap = false;
for (size_type j = i + 1; j < discrete.size(); ++j) {
if (model_.constraint_ref(discrete[j]).shares_variables(constraint)) {
// we have overlap!
overlap = true;
constraint.mark_discrete(false);
break;
}
}

return empty_interactions.size() || empty_variables.size();
}
};

template <class bias_type, class index_type, class assignment_type>
Presolver<bias_type, index_type, assignment_type>::Presolver()
: model_(), postsolver_(), default_techniques_(false), detached_(false) {}

template <class bias_type, class index_type, class assignment_type>
Presolver<bias_type, index_type, assignment_type>::Presolver(model_type model)
: model_(std::move(model)), postsolver_(), default_techniques_(), detached_(false) {}

template <class bias_type, class index_type, class assignment_type>
void Presolver<bias_type, index_type, assignment_type>::apply() {
if (detached_) throw std::logic_error("model has been detached, presolver is no longer valid");

// If no techniques have been loaded, return early.
if (!default_techniques_) return;

// One time techniques ----------------------------------------------------
if (overlap) {
discrete.erase(discrete.begin() + i);
continue;
}

// *-- spin-to-binary
for (size_type v = 0; v < model_.num_variables(); ++v) {
if (model_.vartype(v) == dimod::Vartype::SPIN) {
postsolver_.substitute_variable(v, 2, -1);
model_.change_vartype(dimod::Vartype::BINARY, v);
++i;
}
}

// *-- remove offsets
for (size_type c = 0; c < model_.num_constraints(); ++c) {
auto& constraint = model_.constraint_ref(c);
if (constraint.offset()) {
constraint.set_rhs(constraint.rhs() - constraint.offset());
constraint.set_offset(0);
}
}
//----- Trivial Techniques -----//

// *-- flip >= constraints
for (size_type c = 0; c < model_.num_constraints(); ++c) {
auto& constraint = model_.constraint_ref(c);
if (constraint.sense() == dimod::Sense::GE) {
constraint.scale(-1);
}
bool technique_check_for_nan() {
// TODO: Implement
return false;
}

// *-- remove self-loops
substitute_self_loops();

// Trivial techniques -----------------------------------------------------

bool changes = true;
const index_type max_num_rounds = 100; // todo: make configurable
for (index_type num_rounds = 0; num_rounds < max_num_rounds; ++num_rounds) {
if (!changes) break;
changes = false;

// *-- clear out 0 variables/interactions in the constraints and objective
changes = remove_zero_biases(model_.objective) || changes;
for (index_type c = 0; c < model_.num_constraints(); ++c) {
changes = remove_zero_biases(model_.constraint_ref(c)) || changes;
}

// *-- todo: check for NAN

// *-- remove single variable constraints
bool technique_remove_single_variable_constraints() {
bool ret = false;
size_type c = 0;
while (c < model_.num_constraints()) {
auto& constraint = model_.constraint_ref(c);
Expand Down Expand Up @@ -323,7 +306,7 @@ void Presolver<bias_type, index_type, assignment_type>::apply() {
// presolve does not preserve the energy in general, so it's
// better to avoid side effects and just remove.
model_.remove_constraint(c);
changes = true;
ret = true;
continue;
} else if (constraint.num_variables() == 1 && !constraint.is_soft()) {
index_type v = constraint.variables()[0];
Expand All @@ -348,14 +331,26 @@ void Presolver<bias_type, index_type, assignment_type>::apply() {
}

model_.remove_constraint(c);
changes = true;
ret = true;
continue;
}

++c;
}
return ret;
}
bool technique_remove_zero_biases() {
bool ret = false;

// *-- tighten bounds based on vartype
ret |= remove_zero_biases(model_.objective);
for (size_t c = 0; c < model_.num_constraints(); ++c) {
ret |= remove_zero_biases(model_.constraint_ref(c));
}

return ret;
}
bool technique_tighten_bounds() {
bool ret = false;
bias_type lb;
bias_type ub;
for (size_type v = 0; v < model_.num_variables(); ++v) {
Expand All @@ -366,70 +361,111 @@ void Presolver<bias_type, index_type, assignment_type>::apply() {
ub = model_.upper_bound(v);
if (ub != std::floor(ub)) {
model_.set_upper_bound(v, std::floor(ub));
changes = true;
ret = true;
}
lb = model_.lower_bound(v);
if (lb != std::ceil(lb)) {
model_.set_lower_bound(v, std::ceil(lb));
changes = true;
ret = true;
}
break;
case dimod::Vartype::REAL:
break;
}
}

// *-- remove variables that are fixed by bounds
return ret;
}
bool technique_remove_fixed_variables() {
bool ret = false;
size_type v = 0;
while (v < model_.num_variables()) {
if (model_.lower_bound(v) == model_.upper_bound(v)) {
postsolver_.fix_variable(v, model_.lower_bound(v));
model_.fix_variable(v, model_.lower_bound(v));
changes = true;
ret = true;
}
++v;
}
return ret;
}

// Cleanup

// *-- remove any invalid discrete markers
std::vector<index_type> discrete;
for (size_type c = 0; c < model_.num_constraints(); ++c) {
auto& constraint = model_.constraint_ref(c);

if (!constraint.marked_discrete()) continue;

// we can check if it's well formed
if (constraint.is_onehot()) {
discrete.push_back(c);
} else {
constraint.mark_discrete(false); // if it's not one-hot, it's not discrete
}
}
// check if they overlap
size_type i = 0;
while (i < discrete.size()) {
// check if ci overlaps with any other constraints
auto& constraint = model_.constraint_ref(discrete[i]);

bool overlap = false;
for (size_type j = i + 1; j < discrete.size(); ++j) {
if (model_.constraint_ref(discrete[j]).shares_variables(constraint)) {
// we have overlap!
overlap = true;
constraint.mark_discrete(false);
break;
static bool remove_zero_biases(dimod::Expression<bias_type, index_type>& expression) {
// quadratic
std::vector<std::pair<index_type, index_type>> empty_interactions;
for (auto it = expression.cbegin_quadratic(); it != expression.cend_quadratic(); ++it) {
if (!(it->bias)) {
empty_interactions.emplace_back(it->u, it->v);
}
}
for (auto& uv : empty_interactions) {
expression.remove_interaction(uv.first, uv.second);
}

if (overlap) {
discrete.erase(discrete.begin() + i);
continue;
// linear
std::vector<index_type> empty_variables;
for (auto& v : expression.variables()) {
if (expression.linear(v)) continue;
if (expression.num_interactions(v)) continue;
empty_variables.emplace_back(v);
}
for (auto& v : empty_variables) {
expression.remove_variable(v);
}

++i;
return empty_interactions.size() || empty_variables.size();
}
};

template <class bias_type, class index_type, class assignment_type>
Presolver<bias_type, index_type, assignment_type>::Presolver()
: model_(), postsolver_(), default_techniques_(false), detached_(false) {}

template <class bias_type, class index_type, class assignment_type>
Presolver<bias_type, index_type, assignment_type>::Presolver(model_type model)
: model_(std::move(model)), postsolver_(), default_techniques_(), detached_(false) {}

template <class bias_type, class index_type, class assignment_type>
void Presolver<bias_type, index_type, assignment_type>::apply() {
if (detached_) throw std::logic_error("model has been detached, presolver is no longer valid");

// If no techniques have been loaded, return early.
if (!default_techniques_) return;

// One time techniques ----------------------------------------------------

// *-- spin-to-binary
technique_spin_to_binary();
// *-- remove offsets
technique_remove_offsets();
// *-- flip >= constraints
technique_flip_constraints();
// *-- remove self-loops
technique_remove_self_loops();

// Trivial techniques -----------------------------------------------------

bool changes = true;
const index_type max_num_rounds = 100; // todo: make configurable
for (index_type num_rounds = 0; num_rounds < max_num_rounds; ++num_rounds) {
if (!changes) break;
changes = false;

// *-- clear out 0 variables/interactions in the constraints and objective
changes |= technique_remove_zero_biases();
// *-- todo: check for NAN
changes |= technique_check_for_nan();
// *-- remove single variable constraints
changes |= technique_remove_single_variable_constraints();
// *-- tighten bounds based on vartype
changes |= technique_tighten_bounds();
// *-- remove variables that are fixed by bounds
changes |= technique_remove_fixed_variables();
}

// Cleanup

// *-- remove any invalid discrete markers
technique_remove_invalid_markers();
}

template <class bias_type, class index_type, class assignment_type>
Expand Down

0 comments on commit 0868ba4

Please sign in to comment.