From f5d135cafce72133c08920826d2e772c86ce229a Mon Sep 17 00:00:00 2001 From: Corentin Le Molgat Date: Wed, 19 Nov 2025 13:30:29 +0100 Subject: [PATCH 001/111] lp_data: remove unused code --- ortools/lp_data/lp_data_utils.cc | 31 ------------------------------- ortools/lp_data/lp_data_utils.h | 11 ----------- 2 files changed, 42 deletions(-) diff --git a/ortools/lp_data/lp_data_utils.cc b/ortools/lp_data/lp_data_utils.cc index 48829adff26..6347247b777 100644 --- a/ortools/lp_data/lp_data_utils.cc +++ b/ortools/lp_data/lp_data_utils.cc @@ -29,37 +29,6 @@ namespace operations_research { namespace glop { -void ComputeSlackVariablesValues(const LinearProgram& linear_program, - DenseRow* values) { - DCHECK(values); - DCHECK_EQ(linear_program.num_variables(), values->size()); - - // If there are no slack variable, we can give up. - if (linear_program.GetFirstSlackVariable() == kInvalidCol) return; - - const auto& transposed_matrix = linear_program.GetTransposeSparseMatrix(); - for (RowIndex row(0); row < linear_program.num_constraints(); row++) { - const ColIndex slack_variable = linear_program.GetSlackVariable(row); - - if (slack_variable == kInvalidCol) continue; - - DCHECK_EQ(0.0, linear_program.constraint_lower_bounds()[row]); - DCHECK_EQ(0.0, linear_program.constraint_upper_bounds()[row]); - - const RowIndex transposed_slack = ColToRowIndex(slack_variable); - Fractional activation = 0.0; - // Row in the initial matrix (column in the transposed). - const SparseColumn& sparse_row = - transposed_matrix.column(RowToColIndex(row)); - for (const auto& entry : sparse_row) { - if (transposed_slack == entry.index()) continue; - activation += - (*values)[RowToColIndex(entry.index())] * entry.coefficient(); - } - (*values)[slack_variable] = -activation; - } -} - // This is separated from the LinearProgram class because of a cyclic dependency // when scaling as an LP. void Scale(LinearProgram* lp, SparseMatrixScaler* scaler) { diff --git a/ortools/lp_data/lp_data_utils.h b/ortools/lp_data/lp_data_utils.h index 59c6d54da4e..1c4252c0590 100644 --- a/ortools/lp_data/lp_data_utils.h +++ b/ortools/lp_data/lp_data_utils.h @@ -26,17 +26,6 @@ namespace operations_research { namespace glop { -// For all constraints in linear_program, if the constraint has a slack -// variable, change its value in *values so that the constraints itself is -// satisfied. -// Note that this obviously won't always imply that the bounds of the slack -// variable itself will be satisfied. -// The code assumes (and DCHECKs) that all constraints with a slack variable -// have their upper and lower bounds both set to 0. This is ensured by -// LinearProgram::AddSlackVariablesWhereNecessary(). -void ComputeSlackVariablesValues(const LinearProgram& linear_program, - DenseRow* values); - // This is separated from LinearProgram class because of a cyclic dependency // when scaling as an LP. void Scale(LinearProgram* lp, SparseMatrixScaler* scaler, From 8159ee45fc5af87d38eb4c10182ff80adfc0f936 Mon Sep 17 00:00:00 2001 From: Corentin Le Molgat Date: Wed, 19 Nov 2025 13:35:14 +0100 Subject: [PATCH 002/111] util: Delete function SortedDisjointIntervalList::GrowRightByOne which is unused --- ortools/util/sorted_interval_list.cc | 42 ---------------------------- ortools/util/sorted_interval_list.h | 11 -------- 2 files changed, 53 deletions(-) diff --git a/ortools/util/sorted_interval_list.cc b/ortools/util/sorted_interval_list.cc index 1379669bd0d..ff499581d09 100644 --- a/ortools/util/sorted_interval_list.cc +++ b/ortools/util/sorted_interval_list.cc @@ -949,48 +949,6 @@ SortedDisjointIntervalList::Iterator SortedDisjointIntervalList::InsertInterval( return it; } -SortedDisjointIntervalList::Iterator SortedDisjointIntervalList::GrowRightByOne( - int64_t value, int64_t* newly_covered) { - auto it = intervals_.upper_bound({value, kint64max}); - auto it_prev = it; - - // No interval containing or adjacent to "value" on the left (i.e. below). - if (it != begin()) { - --it_prev; - } - if (it == begin() || ((value != kint64min) && it_prev->end < value - 1)) { - *newly_covered = value; - if (it == end() || it->start != value + 1) { - // No interval adjacent to "value" on the right: insert a singleton. - return intervals_.insert(it, {value, value}); - } else { - // There is an interval adjacent to "value" on the right. Extend it by - // one. Note that we already know that there won't be a merge with another - // interval on the left, since there were no interval adjacent to "value" - // on the left. - DCHECK_EQ(it->start, value + 1); - const_cast(&(*it))->start = value; - return it; - } - } - - // At this point, "it_prev" points to an interval containing or adjacent to - // "value" on the left: grow it by one, and if it now touches the next - // interval, merge with it. - CHECK_NE(kint64max, it_prev->end) << "Cannot grow right by one: the interval " - "that would grow already ends at " - "kint64max"; - *newly_covered = it_prev->end + 1; - if (it != end() && it_prev->end + 2 == it->start) { - // We need to merge it_prev with 'it'. - const_cast(&(*it_prev))->end = it->end; - intervals_.erase(it); - } else { - const_cast(&(*it_prev))->end = it_prev->end + 1; - } - return it_prev; -} - template void SortedDisjointIntervalList::InsertAll(const std::vector& starts, const std::vector& ends) { diff --git a/ortools/util/sorted_interval_list.h b/ortools/util/sorted_interval_list.h index c27fc3beca0..e4b19443560 100644 --- a/ortools/util/sorted_interval_list.h +++ b/ortools/util/sorted_interval_list.h @@ -616,17 +616,6 @@ class SortedDisjointIntervalList { */ Iterator InsertInterval(int64_t start, int64_t end); - /** - * If value is in an interval, increase its end by one, otherwise insert the - * interval [value, value]. In both cases, this returns an iterator to the - * new/modified interval (possibly merged with others) and fills newly_covered - * with the new value that was just added in the union of all the intervals. - * - * If this causes an interval ending at kint64max to grow, it will die with a - * CHECK fail. - */ - Iterator GrowRightByOne(int64_t value, int64_t* newly_covered); - /** * Adds all intervals [starts[i]..ends[i]]. * From 96bddc82f9aa3b5dc43a682f39ec465d0a2322f7 Mon Sep 17 00:00:00 2001 From: Corentin Le Molgat Date: Wed, 19 Nov 2025 14:21:05 +0100 Subject: [PATCH 003/111] graph: export from google3 --- ortools/graph/connected_components.cc | 2 + ortools/graph/flow_graph.h | 9 +- ortools/graph/graph.h | 134 ++++++++++++++------------ ortools/graph/graph_test.cc | 4 + ortools/graph/topologicalsorter.h | 5 +- 5 files changed, 87 insertions(+), 67 deletions(-) diff --git a/ortools/graph/connected_components.cc b/ortools/graph/connected_components.cc index 62afd3994c0..a07def46e5c 100644 --- a/ortools/graph/connected_components.cc +++ b/ortools/graph/connected_components.cc @@ -20,6 +20,7 @@ #include #include +#include "absl/log/check.h" #include "ortools/base/stl_util.h" void DenseConnectedComponentsFinder::SetNumberOfNodes(int num_nodes) { @@ -27,6 +28,7 @@ void DenseConnectedComponentsFinder::SetNumberOfNodes(int num_nodes) { if (num_nodes == old_num_nodes) { return; } + CHECK_GE(num_nodes, 0) << "Number of nodes overflowed the `int` type."; CHECK_GT(num_nodes, old_num_nodes); // Each new node starts as an isolated component: // It has itself as root. diff --git a/ortools/graph/flow_graph.h b/ortools/graph/flow_graph.h index 4bb84b7daf6..6718efa2a5e 100644 --- a/ortools/graph/flow_graph.h +++ b/ortools/graph/flow_graph.h @@ -49,10 +49,13 @@ namespace util { // TODO(user): Currently only max-flow handles this graph, but not // min-cost-flow. template -class FlowGraph : public BaseGraph { +class FlowGraph : public BaseGraph, + NodeIndexType, ArcIndexType, false> { // Note that we do NOT use negated indices for reverse arc. So we use false // for the last template argument here HasNegativeReverseArcs. - typedef BaseGraph Base; + typedef BaseGraph, NodeIndexType, + ArcIndexType, false> + Base; using Base::arc_capacity_; using Base::const_capacities_; using Base::node_capacity_; @@ -146,7 +149,7 @@ class FlowGraph : public BaseGraph { } void Build() { Build(nullptr); } - void Build(std::vector* permutation); + void Build(std::vector* permutation) final; // This influence what Build() does. If true, we will detect already existing // pairs of (arc, reverse_arc) and only construct new reverse arc for the one diff --git a/ortools/graph/graph.h b/ortools/graph/graph.h index 5bd1cfd66db..6d598f06139 100644 --- a/ortools/graph/graph.h +++ b/ortools/graph/graph.h @@ -201,9 +201,10 @@ class Vector; // strong integer types (e.g. `StrongInt`). Strong integer types are types that // behave like integers (comparison, arithmetic, etc.), and are (explicitly) // constructible/convertible from/to integers. -template -class BaseGraph { +template +class BaseGraph // +{ public: // Typedef so you can use Graph::NodeIndex and Graph::ArcIndex to be generic // but also to improve the readability of your code. We also recommend @@ -291,6 +292,21 @@ class BaseGraph { static constexpr ArcIndexType kNilArc = std::numeric_limits::max(); + // Some graph implementations need to be finalized with Build() before they + // can be used. Build() may change the arc indices (which had been the + // return values of previous AddArc() calls): the new index of former arc #i + // will be stored in permutation[i] if #i is smaller than permutation.size(), + // or will be unchanged otherwise. If you don't care about these, just call + // the simple no-output version Build(). + // + // Note that some implementations become immutable after calling Build(). + // By default, Build() is a no-op. + virtual void Build(std::vector* permutation) { + if (permutation != nullptr) permutation->clear(); + } + void Build() { Build(nullptr); } + virtual bool IsBuilt() const { return true; } + protected: // Functions commented when defined because they are implementation details. void ComputeCumulativeSum(internal::Vector* v); @@ -643,8 +659,11 @@ struct GraphTraits { // result in the same order). // template -class ListGraph : public BaseGraph { - typedef BaseGraph Base; +class ListGraph : public BaseGraph, + NodeIndexType, ArcIndexType, false> { + typedef BaseGraph, NodeIndexType, + ArcIndexType, false> + Base; using Base::arc_capacity_; using Base::const_capacities_; using Base::node_capacity_; @@ -679,17 +698,6 @@ class ListGraph : public BaseGraph { // Note: Self referencing arcs and duplicate arcs are supported. ArcIndexType AddArc(NodeIndexType tail, NodeIndexType head); - // Some graph implementations need to be finalized with Build() before they - // can be used. After Build() is called, the arc indices (which had been the - // return values of previous AddArc() calls) may change: the new index of - // former arc #i will be stored in permutation[i] if #i is smaller than - // permutation.size() or will be unchanged otherwise. If you don't care about - // these, just call the simple no-output version Build(). - // - // Note that some implementations become immutable after calling Build(). - void Build() { Build(nullptr); } - void Build(std::vector* permutation); - // Returns the tail/head of a valid arc. NodeIndexType Tail(ArcIndexType arc) const; NodeIndexType Head(ArcIndexType arc) const; @@ -759,8 +767,11 @@ class ListGraph : public BaseGraph { // StaticGraphWithoutTail<>. This almost corresponds to a past implementation // of StaticGraph<> @CL 116144340. template -class StaticGraph : public BaseGraph { - typedef BaseGraph Base; +class StaticGraph : public BaseGraph, + NodeIndexType, ArcIndexType, false> { + typedef BaseGraph, NodeIndexType, + ArcIndexType, false> + Base; using Base::arc_capacity_; using Base::const_capacities_; using Base::node_capacity_; @@ -811,8 +822,9 @@ class StaticGraph : public BaseGraph { void AddNode(NodeIndexType node); ArcIndexType AddArc(NodeIndexType tail, NodeIndexType head); + void Build(std::vector* permutation) final; void Build() { Build(nullptr); } - void Build(std::vector* permutation); + bool IsBuilt() const final { return is_built_; } private: ArcIndexType DirectArcLimit(NodeIndexType node) const { @@ -839,11 +851,14 @@ class StaticGraph : public BaseGraph { // + 2 * (ArcIndexType + NodeIndexType) * arc_capacity() memory. template class ReverseArcListGraph - : public BaseGraph { + : public BaseGraph, + NodeIndexType, ArcIndexType, true> { static_assert(internal::IsSigned(), "ArcIndexType must be signed"); - typedef BaseGraph Base; + typedef BaseGraph, + NodeIndexType, ArcIndexType, true> + Base; using Base::arc_capacity_; using Base::const_capacities_; using Base::node_capacity_; @@ -954,9 +969,6 @@ class ReverseArcListGraph void AddNode(NodeIndexType node); ArcIndexType AddArc(NodeIndexType tail, NodeIndexType head); - void Build() { Build(nullptr); } - void Build(std::vector* permutation); - private: internal::Vector start_; internal::Vector reverse_start_; @@ -975,11 +987,14 @@ class ReverseArcListGraph // time lookup function). template class ReverseArcStaticGraph - : public BaseGraph { + : public BaseGraph, + NodeIndexType, ArcIndexType, true> { static_assert(internal::IsSigned(), "ArcIndexType must be signed"); - typedef BaseGraph Base; + typedef BaseGraph, + NodeIndexType, ArcIndexType, true> + Base; using Base::arc_capacity_; using Base::const_capacities_; using Base::node_capacity_; @@ -1069,8 +1084,9 @@ class ReverseArcStaticGraph void AddNode(NodeIndexType node); ArcIndexType AddArc(NodeIndexType tail, NodeIndexType head); + void Build(std::vector* permutation) final; void Build() { Build(nullptr); } - void Build(std::vector* permutation); + bool IsBuilt() const final { return is_built_; } private: ArcIndexType DirectArcLimit(NodeIndexType node) const { @@ -1124,41 +1140,42 @@ void Permute(const IntVector& permutation, Array* array_to_permute) { // BaseGraph implementation ---------------------------------------------------- -template -IntegerRange BaseGraph< - NodeIndexType, ArcIndexType, HasNegativeReverseArcs>::AllNodes() const { +IntegerRange +BaseGraph::AllNodes() + const { return IntegerRange(NodeIndexType(0), num_nodes_); } -template -IntegerRange -BaseGraph::AllForwardArcs() +IntegerRange BaseGraph::AllForwardArcs() const { return IntegerRange(ArcIndexType(0), num_arcs_); } -template -NodeIndexType BaseGraph::node_capacity() const { // TODO(user): Is it needed? remove completely? return the real capacities // at the cost of having a different implementation for each graphs? return node_capacity_ > num_nodes_ ? node_capacity_ : num_nodes_; } -template -ArcIndexType BaseGraph::arc_capacity() const { // TODO(user): Same questions as the ones in node_capacity(). return arc_capacity_ > num_arcs_ ? arc_capacity_ : num_arcs_; } -template -void BaseGraph::FreezeCapacities() { // TODO(user): Only define this in debug mode at the cost of having a lot // of ifndef NDEBUG all over the place? remove the function completely ? @@ -1169,9 +1186,9 @@ void BaseGraph -void BaseGraph:: +void BaseGraph:: ComputeCumulativeSum(internal::Vector* v) { DCHECK_EQ(v->size(), num_nodes_ + NodeIndexType(1)); ArcIndexType sum(0); @@ -1189,9 +1206,9 @@ void BaseGraph:: // - Put the head of the new arc #i in (*head)[i]. // - Put in start[i] the index of the first arc with tail >= i. // - Update "permutation" to reflect the change, unless it is NULL. -template -void BaseGraph:: +void BaseGraph:: BuildStartAndForwardHead( internal::SVector* head, internal::Vector* start, @@ -1362,14 +1379,6 @@ void ListGraph::ReserveArcs(ArcIndexType bound) { next_.reserve(bound); } -template -void ListGraph::Build( - std::vector* permutation) { - if (permutation != nullptr) { - permutation->clear(); - } -} - // StaticGraph implementation -------------------------------------------------- template @@ -1627,14 +1636,6 @@ ArcIndexType ReverseArcListGraph::AddArc( return num_arcs_++; } -template -void ReverseArcListGraph::Build( - std::vector* permutation) { - if (permutation != nullptr) { - permutation->clear(); - } -} - template class ReverseArcListGraph::OutgoingOrOppositeIncomingArcIterator { @@ -1852,8 +1853,12 @@ class ReverseArcStaticGraph< // Nodes and arcs are implicit and not stored. template -class CompleteGraph : public BaseGraph { - typedef BaseGraph Base; +class CompleteGraph + : public BaseGraph, + NodeIndexType, ArcIndexType, false> { + typedef BaseGraph, NodeIndexType, + ArcIndexType, false> + Base; using Base::arc_capacity_; using Base::const_capacities_; using Base::node_capacity_; @@ -1936,8 +1941,11 @@ CompleteGraph::operator[]( template class CompleteBipartiteGraph - : public BaseGraph { - typedef BaseGraph Base; + : public BaseGraph, + NodeIndexType, ArcIndexType, false> { + typedef BaseGraph, + NodeIndexType, ArcIndexType, false> + Base; using Base::arc_capacity_; using Base::const_capacities_; using Base::node_capacity_; diff --git a/ortools/graph/graph_test.cc b/ortools/graph/graph_test.cc index 3cb1c6338c9..fda59ac7611 100644 --- a/ortools/graph/graph_test.cc +++ b/ortools/graph/graph_test.cc @@ -25,6 +25,7 @@ #include #include "absl/algorithm/container.h" +#include "absl/base/log_severity.h" #include "absl/log/check.h" #include "absl/random/random.h" #include "absl/strings/str_cat.h" @@ -37,7 +38,10 @@ namespace util { +using testing::_; +using testing::AllOf; using testing::ElementsAre; +using testing::Field; using testing::Pair; using testing::UnorderedElementsAre; diff --git a/ortools/graph/topologicalsorter.h b/ortools/graph/topologicalsorter.h index e171b5c805a..d5295cd06f0 100644 --- a/ortools/graph/topologicalsorter.h +++ b/ortools/graph/topologicalsorter.h @@ -63,6 +63,9 @@ namespace graph { // StaticGraph<> in ./graph.h: FastTopologicalSort() can take any such graph as // input. // +// If you have a util_graph::Graph and don't need input validation, consider +// util_graph::TopoOrder(): it has an even simpler API and is only 1.5x slower. +// // ERRORS: returns InvalidArgumentError if the input is broken (negative or // out-of-bounds integers) or if the graph is cyclic. In the latter case, the // error message will contain "cycle". Note that if cycles may occur in your @@ -72,7 +75,7 @@ namespace graph { // TIE BREAKING: the returned topological order is deterministic and fixed, and // corresponds to iterating on nodes in a LIFO (Breadth-first) order. // -// Benchmark: gpaste/6147236302946304, 4-10x faster than util_graph::TopoSort(). +// Benchmark: gpaste/4894742655664128. // // EXAMPLES: // std::vector> adj = {{..}, {..}, ..}; From bb8da74956598536437790f34f7e38e177c29bc7 Mon Sep 17 00:00:00 2001 From: Corentin Le Molgat Date: Wed, 19 Nov 2025 15:37:23 +0100 Subject: [PATCH 004/111] linear_solver: export from google3 --- .../proto_solver/sat_proto_solver.cc | 242 +++++++++--------- .../proto_solver/sat_proto_solver.h | 9 + .../linear_solver/python/linear_solver.swig | 9 + ortools/linear_solver/xpress_interface.cc | 4 +- 4 files changed, 143 insertions(+), 121 deletions(-) diff --git a/ortools/linear_solver/proto_solver/sat_proto_solver.cc b/ortools/linear_solver/proto_solver/sat_proto_solver.cc index 2eaa939b1e2..3071128eee6 100644 --- a/ortools/linear_solver/proto_solver/sat_proto_solver.cc +++ b/ortools/linear_solver/proto_solver/sat_proto_solver.cc @@ -157,68 +157,13 @@ MPSolutionResponse TimeLimitResponse(SolverLogger& logger) { } // namespace -MPSolutionResponse SatSolveProto( - LazyMutableCopy request, std::atomic* interrupt_solve, - std::function logging_callback, - std::function solution_callback, - std::function best_bound_callback) { - sat::SatParameters params; - params.set_log_search_progress(request->enable_internal_solver_output()); - - // TODO(user): We do not support all the parameters here. In particular the - // logs before the solver is called will not be appended to the response. Fix - // that, and remove code duplication for the logger config. One way should be - // to not touch/configure anything if the logger is already created while - // calling SolveCpModel() and call a common config function from here or from - // inside Solve()? - SolverLogger logger; - if (logging_callback != nullptr) { - logger.AddInfoLoggingCallback(logging_callback); - } - logger.EnableLogging(params.log_search_progress()); - logger.SetLogToStdOut(params.log_to_stdout()); - - // Set it now so that it can be overwritten by the solver specific parameters. - if (request->has_solver_specific_parameters()) { - // See EncodeSatParametersAsString() documentation. - if constexpr (!std::is_base_of::value) { - if (!params.MergeFromString(request->solver_specific_parameters())) { - return InvalidParametersResponse( - logger, - "solver_specific_parameters is not a valid binary stream of the " - "SatParameters proto"); - } - } else { - if (!ProtobufTextFormatMergeFromString( - request->solver_specific_parameters(), ¶ms)) { - return InvalidParametersResponse( - logger, - "solver_specific_parameters is not a valid textual representation " - "of the SatParameters proto"); - } - } - } - - // Validate parameters. - { - const std::string error = sat::ValidateParameters(params); - if (!error.empty()) { - return InvalidParametersResponse( - logger, absl::StrCat("Invalid CP-SAT parameters: ", error)); - } - } - - // Reconfigure the logger in case the solver_specific_parameters overwrite its - // configuration. Note that the invalid parameter message will be logged - // before that though according to request.enable_internal_solver_output(). - logger.EnableLogging(params.log_search_progress()); - logger.SetLogToStdOut(params.log_to_stdout()); - - if (request->has_solver_time_limit_seconds()) { - params.set_max_time_in_seconds(request->solver_time_limit_seconds()); - } - - std::unique_ptr time_limit = TimeLimit::FromParameters(params); +MPSolutionResponse SatSolveProtoInternal( + LazyMutableCopy request, sat::Model* sat_model, + sat::CpSolverResponse* cp_response, + std::function solution_callback) { + SolverLogger* logger = sat_model->GetOrCreate(); + sat::SatParameters& params = *sat_model->GetOrCreate(); + TimeLimit* time_limit = sat_model->GetOrCreate(); // Model validation and delta handling. MPSolutionResponse response; @@ -231,10 +176,10 @@ MPSolutionResponse SatSolveProto( // // The logging is only needed for our benchmark script, so we use UNKNOWN // here, but we could log the proper status instead. - if (logger.LoggingIsEnabled()) { + if (logger->LoggingIsEnabled()) { sat::CpSolverResponse cp_response; cp_response.set_status(FromMPSolverResponseStatus(response.status())); - SOLVER_LOG(&logger, CpSolverResponseStats(cp_response)); + SOLVER_LOG(logger, CpSolverResponseStats(cp_response)); } return response; } @@ -252,23 +197,22 @@ MPSolutionResponse SatSolveProto( // of input. if (params.mip_treat_high_magnitude_bounds_as_infinity()) { sat::ChangeLargeBoundsToInfinity(params.mip_max_valid_magnitude(), - mp_model.get(), &logger); + mp_model.get(), logger); } - if (!sat::MPModelProtoValidationBeforeConversion(params, *mp_model, - &logger)) { - return InvalidModelResponse(logger, "Extra CP-SAT validation failed."); + if (!sat::MPModelProtoValidationBeforeConversion(params, *mp_model, logger)) { + return InvalidModelResponse(*logger, "Extra CP-SAT validation failed."); } // This is good to do before any presolve. if (!sat::MakeBoundsOfIntegerVariablesInteger(params, mp_model.get(), - &logger)) { - return InfeasibleResponse(logger, + logger)) { + return InfeasibleResponse(*logger, "An integer variable has an empty domain"); } // Coefficients really close to zero can cause issues. // We remove them right away according to our parameters. - RemoveNearZeroTerms(params, mp_model.get(), &logger); + RemoveNearZeroTerms(params, mp_model.get(), logger); // Note(user): the LP presolvers API is a bit weird and keep a reference to // the given GlopParameters, so we need to make sure it outlive them. @@ -276,17 +220,17 @@ MPSolutionResponse SatSolveProto( std::vector> for_postsolve; if (!params.enumerate_all_solutions() && params.mip_presolve_level() > 0) { const glop::ProblemStatus status = ApplyMipPresolveSteps( - glop_params, mp_model.get(), &for_postsolve, &logger); + glop_params, mp_model.get(), &for_postsolve, logger); switch (status) { case glop::ProblemStatus::INIT: // Continue with the solve. break; case glop::ProblemStatus::PRIMAL_INFEASIBLE: return InfeasibleResponse( - logger, "Problem proven infeasible during MIP presolve"); + *logger, "Problem proven infeasible during MIP presolve"); case glop::ProblemStatus::INVALID_PROBLEM: return InvalidModelResponse( - logger, "Problem detected invalid during MIP presolve"); + *logger, "Problem detected invalid during MIP presolve"); default: // TODO(user): We put the INFEASIBLE_OR_UNBOUNBED case here since there // is no return status that exactly matches it. @@ -294,7 +238,7 @@ MPSolutionResponse SatSolveProto( // This is needed for our benchmark scripts. sat::CpSolverResponse cp_response; cp_response.set_status(sat::CpSolverStatus::UNKNOWN); - SOLVER_LOG(&logger, "MIP presolve: problem infeasible or unbounded."); + SOLVER_LOG(logger, "MIP presolve: problem infeasible or unbounded."); LOG(INFO) << CpSolverResponseStats(cp_response); } response.set_status(MPSolverResponseStatus::MPSOLVER_UNKNOWN_STATUS); @@ -307,22 +251,22 @@ MPSolutionResponse SatSolveProto( } if (time_limit->LimitReached()) { - return TimeLimitResponse(logger); + return TimeLimitResponse(*logger); } // We need to do that before the automatic detection of integers. - RemoveNearZeroTerms(params, mp_model.get(), &logger); + RemoveNearZeroTerms(params, mp_model.get(), logger); - SOLVER_LOG(&logger, ""); - SOLVER_LOG(&logger, "Scaling to pure integer problem."); + SOLVER_LOG(logger, ""); + SOLVER_LOG(logger, "Scaling to pure integer problem."); const int num_variables = mp_model->variable_size(); std::vector var_scaling(num_variables, 1.0); if (params.mip_automatically_scale_variables()) { - var_scaling = sat::DetectImpliedIntegers(mp_model.get(), &logger); + var_scaling = sat::DetectImpliedIntegers(mp_model.get(), logger); if (!sat::MakeBoundsOfIntegerVariablesInteger(params, mp_model.get(), - &logger)) { + logger)) { return InfeasibleResponse( - logger, "A detected integer variable has an empty domain"); + *logger, "A detected integer variable has an empty domain"); } } if (params.mip_var_scaling() != 1.0) { @@ -347,7 +291,7 @@ MPSolutionResponse SatSolveProto( } if (!all_integer) { return InvalidModelResponse( - logger, + *logger, "The model contains non-integer variables but the parameter " "'only_solve_ip' was set. Change this parameter if you " "still want to solve a more constrained version of the original MIP " @@ -357,8 +301,8 @@ MPSolutionResponse SatSolveProto( sat::CpModelProto cp_model; if (!ConvertMPModelProtoToCpModelProto(params, *mp_model, &cp_model, - &logger)) { - return InvalidModelResponse(logger, + logger)) { + return InvalidModelResponse(*logger, "Failed to convert model into CP-SAT model"); } DCHECK_EQ(cp_model.variables().size(), var_scaling.size()); @@ -391,30 +335,16 @@ MPSolutionResponse SatSolveProto( const bool is_maximize = mp_model->maximize(); mp_model.reset(); - params.set_max_time_in_seconds(time_limit->GetTimeLeft()); - if (time_limit->GetDeterministicTimeLeft() != - std::numeric_limits::infinity()) { - params.set_max_deterministic_time(time_limit->GetDeterministicTimeLeft()); - } - // Configure model. - sat::Model sat_model; - sat_model.Register(&logger); - sat_model.Add(NewSatParameters(params)); - if (interrupt_solve != nullptr) { - sat_model.GetOrCreate()->RegisterExternalBooleanAsLimit( - interrupt_solve); - } - - auto post_solve = [&](const sat::CpSolverResponse& cp_response) { + auto post_solve = [&](const sat::CpSolverResponse& sat_response) { MPSolution mp_solution; - mp_solution.set_objective_value(cp_response.objective_value()); + mp_solution.set_objective_value(sat_response.objective_value()); // Postsolve the bound shift and scaling. glop::ProblemSolution glop_solution((glop::RowIndex(old_num_constraints)), (glop::ColIndex(old_num_variables))); for (int v = 0; v < glop_solution.primal_values.size(); ++v) { glop_solution.primal_values[glop::ColIndex(v)] = - static_cast(cp_response.solution(v)) / var_scaling[v]; + static_cast(sat_response.solution(v)) / var_scaling[v]; } for (int i = for_postsolve.size(); --i >= 0;) { for_postsolve[i]->RecoverSolution(&glop_solution); @@ -427,33 +357,29 @@ MPSolutionResponse SatSolveProto( }; if (solution_callback != nullptr) { - sat_model.Add(sat::NewFeasibleSolutionObserver( - [&](const sat::CpSolverResponse& cp_response) { - solution_callback(post_solve(cp_response)); + sat_model->Add(sat::NewFeasibleSolutionObserver( + [&](const sat::CpSolverResponse& sat_response) { + solution_callback(post_solve(sat_response)); })); } - if (best_bound_callback != nullptr) { - sat_model.Add(sat::NewBestBoundCallback(best_bound_callback)); - } // Solve. - const sat::CpSolverResponse cp_response = - sat::SolveCpModel(cp_model, &sat_model); + *cp_response = sat::SolveCpModel(cp_model, sat_model); // Convert the response. // // TODO(user): Implement the row and column status. response.mutable_solve_info()->set_solve_wall_time_seconds( - cp_response.wall_time()); + cp_response->wall_time()); response.mutable_solve_info()->set_solve_user_time_seconds( - cp_response.user_time()); - response.set_status( - ToMPSolverResponseStatus(cp_response.status(), cp_model.has_objective())); + cp_response->user_time()); + response.set_status(ToMPSolverResponseStatus(cp_response->status(), + cp_model.has_objective())); if (response.status() == MPSOLVER_FEASIBLE || response.status() == MPSOLVER_OPTIMAL) { - response.set_objective_value(cp_response.objective_value()); - response.set_best_objective_bound(cp_response.best_objective_bound()); - MPSolution post_solved_solution = post_solve(cp_response); + response.set_objective_value(cp_response->objective_value()); + response.set_best_objective_bound(cp_response->best_objective_bound()); + MPSolution post_solved_solution = post_solve(*cp_response); *response.mutable_variable_value() = std::move(*post_solved_solution.mutable_variable_value()); } @@ -462,9 +388,9 @@ MPSolutionResponse SatSolveProto( // // TODO(user): Remove the postsolve hack of copying to a response. for (const sat::CpSolverSolution& additional_solution : - cp_response.additional_solutions()) { + cp_response->additional_solutions()) { if (absl::MakeConstSpan(additional_solution.values()) == - absl::MakeConstSpan(cp_response.solution())) { + absl::MakeConstSpan(cp_response->solution())) { continue; } double obj = cp_model.floating_point_objective().offset(); @@ -494,6 +420,84 @@ MPSolutionResponse SatSolveProto( return response; } +MPSolutionResponse SatSolveProto( + LazyMutableCopy request, std::atomic* interrupt_solve, + std::function logging_callback, + std::function solution_callback, + std::function best_bound_callback) { + sat::Model sat_model; + sat::SatParameters& params = *sat_model.GetOrCreate(); + params.set_log_search_progress(request->enable_internal_solver_output()); + + // TODO(user): We do not support all the parameters here. In particular the + // logs before the solver is called will not be appended to the response. Fix + // that, and remove code duplication for the logger config. One way should be + // to not touch/configure anything if the logger is already created while + // calling SolveCpModel() and call a common config function from here or from + // inside Solve()? + SolverLogger* logger = sat_model.GetOrCreate(); + if (logging_callback != nullptr) { + logger->AddInfoLoggingCallback(logging_callback); + } + logger->EnableLogging(params.log_search_progress()); + logger->SetLogToStdOut(params.log_to_stdout()); + + // Set it now so that it can be overwritten by the solver specific parameters. + if (request->has_solver_specific_parameters()) { + // See EncodeSatParametersAsString() documentation. + if constexpr (!std::is_base_of::value) { + if (!params.MergeFromString(request->solver_specific_parameters())) { + return InvalidParametersResponse( + *logger, + "solver_specific_parameters is not a valid binary stream of the " + "SatParameters proto"); + } + } else { + if (!ProtobufTextFormatMergeFromString( + request->solver_specific_parameters(), ¶ms)) { + return InvalidParametersResponse( + *logger, + "solver_specific_parameters is not a valid textual representation " + "of the SatParameters proto"); + } + } + } + + // Validate parameters. + { + const std::string error = sat::ValidateParameters(params); + if (!error.empty()) { + return InvalidParametersResponse( + *logger, absl::StrCat("Invalid CP-SAT parameters: ", error)); + } + } + + // Reconfigure the logger in case the solver_specific_parameters overwrite its + // configuration. Note that the invalid parameter message will be logged + // before that though according to request.enable_internal_solver_output(). + logger->EnableLogging(params.log_search_progress()); + logger->SetLogToStdOut(params.log_to_stdout()); + + if (request->has_solver_time_limit_seconds()) { + params.set_max_time_in_seconds(request->solver_time_limit_seconds()); + } + + sat_model.GetOrCreate()->ResetLimitFromParameters(params); + + if (interrupt_solve != nullptr) { + sat_model.GetOrCreate()->RegisterExternalBooleanAsLimit( + interrupt_solve); + } + + if (best_bound_callback != nullptr) { + sat_model.Add(sat::NewBestBoundCallback(best_bound_callback)); + } + + sat::CpSolverResponse cp_response; + return SatSolveProtoInternal(std::move(request), &sat_model, &cp_response, + solution_callback); +} + std::string SatSolverVersion() { return sat::CpSatSolverVersion(); } } // namespace operations_research diff --git a/ortools/linear_solver/proto_solver/sat_proto_solver.h b/ortools/linear_solver/proto_solver/sat_proto_solver.h index 8a2ee65df19..5bc57f81b36 100644 --- a/ortools/linear_solver/proto_solver/sat_proto_solver.h +++ b/ortools/linear_solver/proto_solver/sat_proto_solver.h @@ -19,6 +19,8 @@ #include #include "ortools/linear_solver/linear_solver.pb.h" +#include "ortools/sat/cp_model.pb.h" +#include "ortools/sat/model.h" #include "ortools/util/lazy_mutable_copy.h" #include "ortools/util/logging.h" @@ -65,6 +67,13 @@ MPSolutionResponse SatSolveProto( // Returns a string that describes the version of the CP-SAT solver. std::string SatSolverVersion(); +// Internal version of SatSolveProto that can configure a sat::Model object +// before the solve and return the CpSolverResponse proto to extract statistics. +MPSolutionResponse SatSolveProtoInternal( + LazyMutableCopy request, sat::Model* sat_model, + sat::CpSolverResponse* cp_response, + std::function solution_callback = nullptr); + } // namespace operations_research #endif // ORTOOLS_LINEAR_SOLVER_PROTO_SOLVER_SAT_PROTO_SOLVER_H_ diff --git a/ortools/linear_solver/python/linear_solver.swig b/ortools/linear_solver/python/linear_solver.swig index 5027c78996a..53e9eda2d68 100644 --- a/ortools/linear_solver/python/linear_solver.swig +++ b/ortools/linear_solver/python/linear_solver.swig @@ -36,6 +36,15 @@ %import "ortools/util/python/vector.swig" +// Ignore warnings for operators like: Unreferenced %newobject *::operator -= +#pragma SWIG nowarn=551 + +// In some parts of the generated code, SWIG 4 decides to use vector instead of +// std::vector. +%insert(header) %{ +using std::vector; +%} + // We need to forward-declare the proto here, so that the PROTO_* macros // involving them work correctly. The order matters very much: this declaration // needs to be before the %{ #include ".../linear_solver.h" %}. diff --git a/ortools/linear_solver/xpress_interface.cc b/ortools/linear_solver/xpress_interface.cc index b10e4c78dee..aaa3b9abfb2 100644 --- a/ortools/linear_solver/xpress_interface.cc +++ b/ortools/linear_solver/xpress_interface.cc @@ -232,8 +232,8 @@ class XpressMPCallbackContext : public MPCallbackContext { LOG(WARNING) << "AddCut is not implemented yet in XPRESS interface"; }; void AddLazyConstraint(const LinearRange& lazy_constraint) override { - LOG(WARNING) - << "AddLazyConstraint inside Callback is not implemented yet in XPRESS interface"; + LOG(WARNING) << "AddLazyConstraint inside Callback is not implemented yet " + "in XPRESS interface"; }; double SuggestSolution( const absl::flat_hash_map& solution) override; From ebe81847a83dc8692b9ed1f024c687d84b4da22e Mon Sep 17 00:00:00 2001 From: Corentin Le Molgat Date: Fri, 21 Nov 2025 10:06:27 +0100 Subject: [PATCH 005/111] cmake: fix configure when using -DBUILD_TESTING=OFF --- CMakeLists.txt | 2 ++ cmake/dependencies/CMakeLists.txt | 6 +++++- ortools/math_opt/solvers/CMakeLists.txt | 18 ++++++++++++------ 3 files changed, 19 insertions(+), 7 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 7f34130af3c..04366241a9a 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -196,6 +196,7 @@ CMAKE_DEPENDENT_OPTION(BUILD_re2 "Build the re2 dependency Library" OFF message(STATUS "Build re2: ${BUILD_re2}") if(BUILD_TESTING) + set(OR_TOOLS_BUILD_TESTING ON) CMAKE_DEPENDENT_OPTION(BUILD_googletest "Build googletest" OFF "NOT BUILD_DEPS" ON) CMAKE_DEPENDENT_OPTION(BUILD_benchmark "Build benchmark" OFF @@ -215,6 +216,7 @@ if(BUILD_TESTING) "NOT BUILD_DEPS" ON) endif() else() + set(OR_TOOLS_BUILD_TESTING OFF) set(BUILD_googletest OFF) set(BUILD_protobuf_matchers OFF) set(BUILD_benchmark OFF) diff --git a/cmake/dependencies/CMakeLists.txt b/cmake/dependencies/CMakeLists.txt index 78404ec1979..a045bdf805d 100644 --- a/cmake/dependencies/CMakeLists.txt +++ b/cmake/dependencies/CMakeLists.txt @@ -120,7 +120,11 @@ if(BUILD_absl) set(ABSL_USE_SYSTEM_INCLUDES ON) # We want Abseil to declare what C++ standard it was compiled with. set(ABSL_PROPAGATE_CXX_STD ON) - set(ABSL_BUILD_TEST_HELPERS ON) + if(OR_TOOLS_BUILD_TESTING) + set(ABSL_BUILD_TEST_HELPERS ON) + else() + set(ABSL_BUILD_TEST_HELPERS OFF) + endif() set(ABSL_USE_EXTERNAL_GOOGLETEST ON) set(ABSL_FIND_GOOGLETEST OFF) # We want Abseil to keep the INSTALL rules enabled, even though it is a diff --git a/ortools/math_opt/solvers/CMakeLists.txt b/ortools/math_opt/solvers/CMakeLists.txt index 7bb72c8bbdc..2cf42805d91 100644 --- a/ortools/math_opt/solvers/CMakeLists.txt +++ b/ortools/math_opt/solvers/CMakeLists.txt @@ -89,8 +89,10 @@ if(USE_SCIP) "$" ) # This test fail on windows and takes too long so we disable it. - set_tests_properties(cxx_math_opt_solvers_gscip_solver_test - PROPERTIES DISABLED TRUE) + if(TARGET cxx_math_opt_solvers_gscip_solver_test) + set_tests_properties(cxx_math_opt_solvers_gscip_solver_test + PROPERTIES DISABLED TRUE) + endif() endif() if(USE_GLOP) @@ -148,8 +150,10 @@ ortools_cxx_test( "$" ) # This test takes too long so we disable it. -set_tests_properties(cxx_math_opt_solvers_cp_sat_solver_test - PROPERTIES DISABLED TRUE) +if(TARGET cxx_math_opt_solvers_cp_sat_solver_test) + set_tests_properties(cxx_math_opt_solvers_cp_sat_solver_test + PROPERTIES DISABLED TRUE) +endif() ortools_cxx_test( NAME @@ -248,8 +252,10 @@ if(USE_HIGHS) "$" ) # This test fail on windows and takes too long so we disable it. - set_tests_properties(cxx_math_opt_solvers_highs_solver_test - PROPERTIES DISABLED TRUE) + if(TARGET cxx_math_opt_solvers_highs_solver_test) + set_tests_properties(cxx_math_opt_solvers_highs_solver_test + PROPERTIES DISABLED TRUE) + endif() endif() if(USE_XPRESS) From 7adffccc0d42f398fbc696decfe1f8675236699f Mon Sep 17 00:00:00 2001 From: Laurent Perron Date: Fri, 21 Nov 2025 10:42:58 +0100 Subject: [PATCH 006/111] [CP-SAT] more work on encodings, lrat --- ortools/sat/BUILD.bazel | 2 + ortools/sat/clause.h | 5 + ortools/sat/cp_model_postsolve.cc | 2 +- ortools/sat/cp_model_presolve.cc | 14 +- ortools/sat/cp_model_presolve_test.cc | 1 + ortools/sat/integer_search.cc | 41 ++-- ortools/sat/lrat_proof_handler.cc | 24 ++ ortools/sat/lrat_proof_handler.h | 14 +- ortools/sat/probing.cc | 341 +++++++++++++++++++++++++- ortools/sat/probing.h | 40 ++- ortools/sat/sat_inprocessing.cc | 14 +- ortools/sat/sat_solver.cc | 81 +++--- ortools/sat/sat_solver.h | 14 +- ortools/sat/variable_expand.cc | 156 +++++++----- ortools/sat/variable_expand.h | 3 +- 15 files changed, 609 insertions(+), 143 deletions(-) diff --git a/ortools/sat/BUILD.bazel b/ortools/sat/BUILD.bazel index 1a91eeefaa7..ecb69b34b4f 100644 --- a/ortools/sat/BUILD.bazel +++ b/ortools/sat/BUILD.bazel @@ -1209,6 +1209,7 @@ cc_library( "//ortools/util:saturated_arithmetic", "//ortools/util:sorted_interval_list", "@abseil-cpp//absl/algorithm:container", + "@abseil-cpp//absl/base:log_severity", "@abseil-cpp//absl/container:btree", "@abseil-cpp//absl/container:flat_hash_set", "@abseil-cpp//absl/log", @@ -1559,6 +1560,7 @@ cc_library( "//ortools/util:sorted_interval_list", "//ortools/util:strong_integers", "//ortools/util:time_limit", + "@abseil-cpp//absl/cleanup", "@abseil-cpp//absl/container:btree", "@abseil-cpp//absl/container:flat_hash_map", "@abseil-cpp//absl/container:inlined_vector", diff --git a/ortools/sat/clause.h b/ortools/sat/clause.h index 4cfda6f011e..aa9736d8566 100644 --- a/ortools/sat/clause.h +++ b/ortools/sat/clause.h @@ -262,6 +262,11 @@ class ClauseManager : public SatPropagator { absl::flat_hash_map* mutable_clauses_info() { return &clauses_info_; } + int LbdOrZeroIfNotRemovable(SatClause* const clause) const { + auto it = clauses_info_.find(clause); + if (it == clauses_info_.end()) return 0; + return it->second.lbd; + } // Total number of clauses inspected during calls to Propagate(). int64_t num_inspected_clauses() const { return num_inspected_clauses_; } diff --git a/ortools/sat/cp_model_postsolve.cc b/ortools/sat/cp_model_postsolve.cc index 851cea1c4ea..2529bd87c17 100644 --- a/ortools/sat/cp_model_postsolve.cc +++ b/ortools/sat/cp_model_postsolve.cc @@ -97,7 +97,7 @@ void PostsolveExactlyOne(const ConstraintProto& ct, // There must be one. void SetEnforcementLiteralToFalse(const ConstraintProto& ct, std::vector* domains) { - CHECK(!ct.enforcement_literal().empty()); + CHECK(!ct.enforcement_literal().empty()) << ProtobufShortDebugString(ct); bool has_free_enforcement_literal = false; for (const int enf : ct.enforcement_literal()) { if ((*domains)[PositiveRef(enf)].IsFixed()) continue; diff --git a/ortools/sat/cp_model_presolve.cc b/ortools/sat/cp_model_presolve.cc index d747725ba3f..2327bc3d07f 100644 --- a/ortools/sat/cp_model_presolve.cc +++ b/ortools/sat/cp_model_presolve.cc @@ -4504,6 +4504,7 @@ bool CpModelPresolver::PropagateDomainsInLinear(int ct_index, // actual changes. if (is_in_objective && !context_->SubstituteVariableInObjective(var, var_coeff, *ct)) { + if (context_->ModelIsUnsat()) return false; continue; } @@ -13170,13 +13171,12 @@ void CpModelPresolver::ProcessVariableOnlyUsedInEncoding(int var) { return; } - int new_exo_to_presolve_index = -1; - TryToReplaceVariableByItsEncoding(var, new_exo_to_presolve_index, context_, - solution_crush_); - if (new_exo_to_presolve_index != -1) { - if (PresolveExactlyOne(context_->working_model->mutable_constraints( - new_exo_to_presolve_index))) { - context_->UpdateConstraintVariableUsage(new_exo_to_presolve_index); + // Presolve newly created constraints. + const int old_size = context_->working_model->constraints_size(); + TryToReplaceVariableByItsEncoding(var, context_, solution_crush_); + for (int c = old_size; c < context_->working_model->constraints_size(); ++c) { + if (PresolveOneConstraint(c)) { + context_->UpdateConstraintVariableUsage(c); } } } diff --git a/ortools/sat/cp_model_presolve_test.cc b/ortools/sat/cp_model_presolve_test.cc index 97700854c0d..31e7ee18159 100644 --- a/ortools/sat/cp_model_presolve_test.cc +++ b/ortools/sat/cp_model_presolve_test.cc @@ -8158,6 +8158,7 @@ TEST(PresolveCpModelTest, SolveDiophantine) { // Should solve in < .01 second. Note that deterministic time is not // completely accurate. params.set_max_deterministic_time(.001); + params.set_num_workers(1); const CpSolverResponse response_with = SolveWithParameters(model_proto, params); diff --git a/ortools/sat/integer_search.cc b/ortools/sat/integer_search.cc index 74e1558736b..172d3c5be2a 100644 --- a/ortools/sat/integer_search.cc +++ b/ortools/sat/integer_search.cc @@ -1366,9 +1366,9 @@ bool IntegerSearchHelper::BeforeTakingDecision() { DCHECK(sat_solver_->PropagationIsDone()); // If we pushed root level deductions, we go back to level zero and call - // Propagate() to incorporate them. Note that the propagation is not strcily + // Propagate() to incorporate them. Note that the propagation is not strictly // needed, but it is nicer to be at fixed point when we call the level zero - // callabacks. + // callbacks. if (integer_trail_->HasPendingRootLevelDeduction()) { sat_solver_->Backtrack(0); if (!sat_solver_->Propagate()) { @@ -1843,7 +1843,8 @@ SatSolver::Status ContinuousProber::Probe() { tmp_dnf_.push_back({literal}); } ++num_at_least_one_probed_; - if (!prober_->ProbeDnf("at_least_one", tmp_dnf_)) { + if (!prober_->ProbeDnf("at_least_one", tmp_dnf_, + Prober::DnfType::kAtLeastOne, clause)) { return SatSolver::INFEASIBLE; } @@ -1866,7 +1867,8 @@ SatSolver::Status ContinuousProber::Probe() { } tmp_dnf_.push_back(tmp_literals_); ++num_at_most_one_probed_; - if (!prober_->ProbeDnf("at_most_one", tmp_dnf_)) { + if (!prober_->ProbeDnf("at_most_one", tmp_dnf_, + Prober::DnfType::kAtLeastOneOrZero)) { return SatSolver::INFEASIBLE; } @@ -1887,12 +1889,12 @@ SatSolver::Status ContinuousProber::Probe() { for (; current_bv2_ < bool_vars_.size(); ++current_bv2_) { const BooleanVariable& bv2 = bool_vars_[current_bv2_]; if (assignment.VariableIsAssigned(bv2)) continue; - if (!prober_->ProbeDnf( - "pair_of_bool_vars", - {{Literal(bv1, true), Literal(bv2, true)}, - {Literal(bv1, true), Literal(bv2, false)}, - {Literal(bv1, false), Literal(bv2, true)}, - {Literal(bv1, false), Literal(bv2, false)}})) { + if (!prober_->ProbeDnf("pair_of_bool_vars", + {{Literal(bv1, false), Literal(bv2, false)}, + {Literal(bv1, false), Literal(bv2, true)}, + {Literal(bv1, true), Literal(bv2, false)}, + {Literal(bv1, true), Literal(bv2, true)}}, + Prober::DnfType::kAtLeastOneCombination)) { return SatSolver::INFEASIBLE; } RETURN_IF_NOT_FEASIBLE(PeriodicSyncAndCheck()); @@ -1910,12 +1912,12 @@ SatSolver::Status ContinuousProber::Probe() { if (assignment.VariableIsAssigned(bv2) || bv1 == bv2) { continue; } - if (!prober_->ProbeDnf( - "rnd_pair_of_bool_vars", - {{Literal(bv1, true), Literal(bv2, true)}, - {Literal(bv1, true), Literal(bv2, false)}, - {Literal(bv1, false), Literal(bv2, true)}, - {Literal(bv1, false), Literal(bv2, false)}})) { + if (!prober_->ProbeDnf("rnd_pair_of_bool_vars", + {{Literal(bv1, false), Literal(bv2, false)}, + {Literal(bv1, false), Literal(bv2, true)}, + {Literal(bv1, true), Literal(bv2, false)}, + {Literal(bv1, true), Literal(bv2, true)}}, + Prober::DnfType::kAtLeastOneCombination)) { return SatSolver::INFEASIBLE; } @@ -1948,12 +1950,13 @@ SatSolver::Status ContinuousProber::Probe() { } tmp_dnf_.clear(); for (int i = 0; i < 8; ++i) { - tmp_dnf_.push_back({Literal(bv1, (i & 1) > 0), + tmp_dnf_.push_back({Literal(bv1, (i & 4) > 0), Literal(bv2, (i & 2) > 0), - Literal(bv3, (i & 4) > 0)}); + Literal(bv3, (i & 1) > 0)}); } - if (!prober_->ProbeDnf("rnd_triplet_of_bool_vars", tmp_dnf_)) { + if (!prober_->ProbeDnf("rnd_triplet_of_bool_vars", tmp_dnf_, + Prober::DnfType::kAtLeastOneCombination)) { return SatSolver::INFEASIBLE; } diff --git a/ortools/sat/lrat_proof_handler.cc b/ortools/sat/lrat_proof_handler.cc index 4a19fafd94c..0eb1e4f6ec5 100644 --- a/ortools/sat/lrat_proof_handler.cc +++ b/ortools/sat/lrat_proof_handler.cc @@ -146,8 +146,32 @@ bool LratProofHandler::AddAssumedClause(ClauseId id, return true; } +void LratProofHandler::PinClause(ClauseId id, + absl::Span clause) { + DCHECK_NE(id, kNoClauseId); + DCHECK_EQ(pinned_clause_id_, kNoClauseId); + pinned_clause_id_ = id; + if (drat_checker_ != nullptr || drat_writer_ != nullptr) { + pinned_clause_.assign(clause.begin(), clause.end()); + } + delete_pinned_clause_ = false; +} + +void LratProofHandler::UnpinClause(ClauseId id) { + DCHECK_NE(id, kNoClauseId); + DCHECK_EQ(pinned_clause_id_, id); + pinned_clause_id_ = kNoClauseId; + if (delete_pinned_clause_) { + DeleteClause(id, pinned_clause_); + } +} + void LratProofHandler::DeleteClause(ClauseId id, absl::Span clause) { + if (pinned_clause_id_ == id) { + delete_pinned_clause_ = true; + return; + } VLOG(1) << "DeleteClause: id=" << id << " literals=" << absl::StrJoin(clause, ","); if (drat_checker_ != nullptr) { diff --git a/ortools/sat/lrat_proof_handler.h b/ortools/sat/lrat_proof_handler.h index 43644b0ed92..8006804cb8b 100644 --- a/ortools/sat/lrat_proof_handler.h +++ b/ortools/sat/lrat_proof_handler.h @@ -63,6 +63,14 @@ class LratProofHandler { // checks are enabled and the ID is already used by another clause). bool AddAssumedClause(ClauseId id, absl::Span clause); + // Prevents the given clause from being deleted, until UnpinClause() is called + // with the same ID. At most one clause can be pinned at any time. + void PinClause(ClauseId id, absl::Span clause); + + // Unpins the clause with the given ID, and deletes it if a call to + // DeleteClause() for this clause was made since it was pinned. + void UnpinClause(ClauseId id); + // Deletes a problem or inferred clause. The clause literals are only needed // when checking DRAT. void DeleteClause(ClauseId id, absl::Span clause); @@ -92,7 +100,11 @@ class LratProofHandler { bool all_problem_clauses_loaded_ = false; int64_t num_assumed_clauses_ = 0; - bool debug_crash_on_error_; + bool debug_crash_on_error_ = false; + + ClauseId pinned_clause_id_ = kNoClauseId; + std::vector pinned_clause_; + bool delete_pinned_clause_ = false; // Only used when checking DRAT, because the DRAT checker does not support // interleaving problem and inferred clauses. diff --git a/ortools/sat/probing.cc b/ortools/sat/probing.cc index f8602d40035..4a048e39234 100644 --- a/ortools/sat/probing.cc +++ b/ortools/sat/probing.cc @@ -19,6 +19,7 @@ #include #include +#include "absl/cleanup/cleanup.h" #include "absl/container/btree_map.h" #include "absl/container/btree_set.h" #include "absl/container/flat_hash_map.h" @@ -62,6 +63,9 @@ Prober::Prober(Model* model) clause_manager_(model->GetOrCreate()), clause_id_generator_(model->GetOrCreate()), lrat_proof_handler_(model->Mutable()), + drat_enabled_(lrat_proof_handler_ != nullptr && + (lrat_proof_handler_->drat_check_enabled() || + lrat_proof_handler_->drat_output_enabled())), logger_(model->GetOrCreate()) {} bool Prober::ProbeBooleanVariables(const double deterministic_time_limit) { @@ -380,16 +384,40 @@ bool Prober::ProbeBooleanVariables( } bool Prober::ProbeDnf(absl::string_view name, - absl::Span> dnf) { + absl::Span> dnf, + DnfType dnf_type, const SatClause* dnf_clause) { if (dnf.size() <= 1) return true; + // dnf_clause can be deleted as a side effect of probing, but is needed for + // LRAT in FixProbedDnfLiterals(). We thus copy its literals first, and + // prevent the corresponding LRAT clause from being deleted. + ClauseId dnf_clause_id = kNoClauseId; + std::vector dnf_clause_literals; + if (dnf_clause != nullptr && lrat_proof_handler_ != nullptr) { + dnf_clause_id = clause_manager_->GetClauseId(dnf_clause); + dnf_clause_literals.assign(dnf_clause->AsSpan().begin(), + dnf_clause->AsSpan().end()); + lrat_proof_handler_->PinClause(dnf_clause_id, dnf_clause_literals); + } + absl::Cleanup cleanup = [this, dnf_clause_id] { + if (dnf_clause_id != kNoClauseId) { + lrat_proof_handler_->UnpinClause(dnf_clause_id); + } + }; + // Reset the solver in case it was already used. if (!sat_solver_->ResetToLevelZero()) return false; always_propagated_bounds_.clear(); always_propagated_literals_.clear(); int num_valid_conjunctions = 0; - for (const std::vector& conjunction : dnf) { + for (absl::Span conjunction : dnf) { + // TODO(user): instead of going back to level zero, we could backtrack + // to level 'n', where n is the length of the longest prefix shared between + // the current conjunction and the previous one (more or less -- conjunction + // literals which are already assigned or lead to a conflict do not + // translate to a decision). For a kAtLeastOneCombination DNF with 8 + // conjunctions, this would reduce the number of enqueues from 8*3=24 to 14. if (!sat_solver_->ResetToLevelZero()) return false; if (num_valid_conjunctions > 0 && always_propagated_bounds_.empty() && always_propagated_literals_.empty()) { @@ -398,7 +426,6 @@ bool Prober::ProbeDnf(absl::string_view name, } bool conjunction_is_valid = true; - int num_literals_enqueued = 0; const int root_trail_index = trail_.Index(); const int root_integer_trail_index = integer_trail_->Index(); for (const Literal& lit : conjunction) { @@ -407,7 +434,6 @@ bool Prober::ProbeDnf(absl::string_view name, conjunction_is_valid = false; break; } - ++num_literals_enqueued; const int decision_level_before_enqueue = sat_solver_->CurrentDecisionLevel(); sat_solver_->EnqueueDecisionAndBackjumpOnConflict(lit); @@ -461,11 +487,18 @@ bool Prober::ProbeDnf(absl::string_view name, if (!sat_solver_->ResetToLevelZero()) return false; // Fix literals implied by the dnf. const int previous_num_literals_fixed = num_new_literals_fixed_; - for (const LiteralIndex literal_index : always_propagated_literals_) { - const Literal lit(literal_index); - if (assignment_.LiteralIsTrue(lit)) continue; - ++num_new_literals_fixed_; - if (!sat_solver_->AddUnitClause(lit)) return false; + if (lrat_proof_handler_ != nullptr) { + if (!FixProbedDnfLiterals(dnf, always_propagated_literals_, dnf_type, + dnf_clause_id, dnf_clause_literals)) { + return false; + } + } else { + for (const LiteralIndex literal_index : always_propagated_literals_) { + const Literal lit(literal_index); + if (assignment_.LiteralIsTrue(lit)) continue; + ++num_new_literals_fixed_; + if (!sat_solver_->AddUnitClause(lit)) return false; + } } // Fix integer bounds implied by the dnf. @@ -495,6 +528,289 @@ bool Prober::ProbeDnf(absl::string_view name, return true; } +namespace { +// Sets `implication` to the clause "conjunction => literal". Returns true if +// `conjunction` does not contain `literal`. Otherwise ("conjunction => literal" +// is a tautology), returns false and leaves `implication` in an undefined +// state. +bool GetConjunctionImpliesLiteralClause(absl::Span conjunction, + Literal literal, + std::vector& implication) { + implication.clear(); + for (const Literal lit : conjunction) { + if (lit == literal) return false; + if (lit.Negated() == literal) continue; + implication.push_back(lit.Negated()); + } + implication.push_back(literal); + return true; +} +} // namespace + +bool Prober::FixProbedDnfLiterals( + absl::Span> dnf, + const absl::btree_set& propagated_literals, DnfType dnf_type, + ClauseId dnf_clause_id, absl::Span dnf_clause_literals) { + if (propagated_literals.empty()) return true; + + // For each propagated literal (in propagated_literals order), and for each + // conjunction, the ID of a temporary LRAT clause "conjunction => propagated + // literal" (or kNoClauseId if "conjunction" contains "propagated_literal", if + // the clause has not been created yet, or has been deleted). + CompactVectorVector& propagation_clause_ids = + tmp_dnf_clause_ids_; + propagation_clause_ids.clear(); + propagation_clause_ids.reserve(propagated_literals.size() * dnf.size()); + for (int i = 0; i < propagated_literals.size(); ++i) { + propagation_clause_ids.Add({}); + for (int j = 0; j < dnf.size(); ++j) { + propagation_clause_ids.AppendToLastVector(kNoClauseId); + } + } + // Redo the loop that was done in ProbeDnf() to compute the LRAT proofs of the + // propagated literals. This allows computing proofs only for those literals + // (on the other hand, we need to redo the propagations). Another method might + // be to make copies of the trail (one per conjunction), but how to handle + // backjump on conflict in this case?. + for (int conjunction_index = 0; conjunction_index < dnf.size(); + ++conjunction_index) { + absl::Span conjunction = dnf[conjunction_index]; + // TODO(user): same comment as in ProbeDnf(). + if (!sat_solver_->ResetToLevelZero()) return false; + + // Enqueue the literals of `conjunction` one by one. + // The first literal of `conjunction` which is propagated to false, if any, + // and the ID of a temporary LRAT clause proving that the previous literals + // of `conjunction` imply this. + LiteralIndex first_false_literal = kNoLiteralIndex; + ClauseId first_false_literal_clause_id = kNoClauseId; + tmp_literals_.clear(); + for (const Literal lit : conjunction) { + tmp_literals_.push_back(lit.Negated()); + if (assignment_.LiteralIsAssigned(lit)) { + if (assignment_.LiteralIsTrue(lit)) continue; + first_false_literal = lit.Index(); + first_false_literal_clause_id = clause_id_generator_->GetNextId(); + tmp_clause_ids_.clear(); + sat_solver_->AppendClausesFixing({lit.Negated()}, &tmp_clause_ids_); + lrat_proof_handler_->AddInferredClause(first_false_literal_clause_id, + tmp_literals_, tmp_clause_ids_); + break; + } + + // If enqueuing `lit` causes a conflict, the previous literals of + // `conjunction` imply not(lit). Use the learned conflict to prove that. + auto conflict_callback = [&](ClauseId conflict_id, + absl::Span conflict_clause) { + if (first_false_literal != kNoLiteralIndex) return; + first_false_literal = lit.Index(); + first_false_literal_clause_id = clause_id_generator_->GetNextId(); + tmp_clause_ids_.clear(); + sat_solver_->AppendClausesFixing(conflict_clause, &tmp_clause_ids_); + tmp_clause_ids_.push_back(conflict_id); + lrat_proof_handler_->AddInferredClause(first_false_literal_clause_id, + tmp_literals_, tmp_clause_ids_); + }; + sat_solver_->EnqueueDecisionAndBackjumpOnConflict(lit, conflict_callback); + + if (sat_solver_->ModelIsUnsat()) return false; + if (first_false_literal != kNoLiteralIndex) break; + } + + // Use the trail to compute the LRAT proofs that `conjunction` implies the + // propagated literals. + int i = 0; + for (const LiteralIndex literal_index : propagated_literals) { + const Literal propagated_lit(literal_index); + absl::Span propagation_ids = propagation_clause_ids[i++]; + // Create the clause "conjunction => propagated_lit". + if (!GetConjunctionImpliesLiteralClause(conjunction, propagated_lit, + tmp_literals_)) { + // The clause is a tautology. + continue; + } + // Compute its proof. + tmp_clause_ids_.clear(); + if (first_false_literal_clause_id != kNoClauseId) { + // If some literals of `conjunction` imply that another one is false, + // the corresponding LRAT clause is sufficient to prove that + // `conjunction` is false and thus that "conjunction => propagated_lit". + tmp_clause_ids_.push_back(first_false_literal_clause_id); + } else { + // TODO(user): processing the propagated literals in trail order + // and reusing the previous proofs to compute new ones + // could reduce the algorithmic complexity here. + sat_solver_->AppendClausesFixing({propagated_lit}, &tmp_clause_ids_); + } + // Add the inferred clause to the LratProofHandler. + const ClauseId clause_id = clause_id_generator_->GetNextId(); + lrat_proof_handler_->AddInferredClause(clause_id, tmp_literals_, + tmp_clause_ids_); + propagation_ids[conjunction_index] = clause_id; + } + if (first_false_literal_clause_id != kNoClauseId) { + if (drat_enabled_) { + // DRAT needs the clause literals to delete a clause. + tmp_literals_.clear(); + for (const Literal lit : conjunction) { + tmp_literals_.push_back(lit.Negated()); + if (lit.Index() == first_false_literal) break; + } + lrat_proof_handler_->DeleteClause(first_false_literal_clause_id, + tmp_literals_); + } else { + lrat_proof_handler_->DeleteClause(first_false_literal_clause_id, {}); + } + } + } + + if (!sat_solver_->ResetToLevelZero()) return false; + + // Fix literals implied by the dnf. + int i = 0; + for (const LiteralIndex literal_index : propagated_literals) { + const Literal propagated_lit(literal_index); + absl::Span propagation_ids = propagation_clause_ids[i++]; + if (assignment_.LiteralIsTrue(propagated_lit)) continue; + + ++num_new_literals_fixed_; + switch (dnf_type) { + case DnfType::kAtLeastOne: + // `propagation_ids` contains the clauses "not(l_i) OR propagated_lit" + // for each literal l_i of the dnf. Together with the unit clauses for + // the already assigned literals of the original clause, and the clause + // itself, they prove that propagated_lit is true. + CHECK_NE(dnf_clause_id, kNoClauseId); + tmp_clause_ids_.clear(); + for (const ClauseId clause_id : propagation_ids) { + if (clause_id == kNoClauseId) continue; + tmp_clause_ids_.push_back(clause_id); + } + for (const Literal lit : dnf_clause_literals) { + if (assignment_.LiteralIsAssigned(lit)) { + tmp_clause_ids_.push_back(trail_.GetUnitClauseId(lit.Variable())); + } + } + tmp_clause_ids_.push_back(dnf_clause_id); + if (!clause_manager_->InprocessingFixLiteral(propagated_lit, + tmp_clause_ids_)) { + return false; + } + break; + case DnfType::kAtLeastOneOrZero: + // `propagation_ids` contains the clauses "not(l_i) OR propagated_lit" + // (for each single literal conjunction), and "l1 OR ... OR ln OR + // propagated_lit", in this order. These are sufficient to prove that + // propagated_lit is true. + tmp_clause_ids_.clear(); + for (const ClauseId clause_id : propagation_ids) { + if (clause_id == kNoClauseId) continue; + tmp_clause_ids_.push_back(clause_id); + } + if (!clause_manager_->InprocessingFixLiteral(propagated_lit, + tmp_clause_ids_)) { + return false; + } + break; + case DnfType::kAtLeastOneCombination: + if (!FixLiteralImpliedByAnAtLeastOneCombinationDnf(dnf, propagation_ids, + propagated_lit)) { + return false; + } + break; + } + } + + // Delete the temporary LRAT clauses. + i = 0; + for (const LiteralIndex literal_index : propagated_literals) { + const Literal propagated_lit(literal_index); + const absl::Span propagation_ids = propagation_clause_ids[i++]; + for (int j = 0; j < dnf.size(); ++j) { + const ClauseId clause_id = propagation_ids[j]; + if (clause_id == kNoClauseId) continue; + if (drat_enabled_) { + // DRAT needs the clause literals to delete a clause. + GetConjunctionImpliesLiteralClause(dnf[j], propagated_lit, + tmp_literals_); + lrat_proof_handler_->DeleteClause(clause_id, tmp_literals_); + } else { + lrat_proof_handler_->DeleteClause(clause_id, {}); + } + } + } + return true; +} + +bool Prober::FixLiteralImpliedByAnAtLeastOneCombinationDnf( + absl::Span> conjunctions, + absl::Span clause_ids, Literal propagated_lit) { + const int num_clauses = clause_ids.size(); + CHECK_EQ(conjunctions.size(), num_clauses); + // Combine the clauses 2 by 2 repeatedly, to remove one literal from each + // conjunction at each step, until we get the unit clause `propagated_lit`. + // For instance, with 4 conjunctions: + // + // step1 step2 + // not(a) and not(b) => p ----> not(a) => p ----> p + // not(a) and b => p _/ / + // a and not(b) => p ----> a => p _/ + // a and b => p _/ + // + // The combined clauses are stored in `clause_ids`, and replace the ones of + // the previous step, which are deleted. At step i=0,1,..., each conjunction + // has n-i remaining literals, and we combine the clauses at indices + // (2*stride)k and (2*stride)k + stride, where stride = 2^i. This relies on + // the conjunctions being sorted as described in kAtLeastOneCombination's + // comment. + int num_literals_per_conjunction = conjunctions[0].size(); + int stride = 1; + while (true) { + for (int i = 0; i < num_clauses; i += 2 * stride) { + // The two clauses "... AND not(b) => propagated_lit" and "... AND b => + // propagated_lit" prove that "... => propagated_lit". + tmp_clause_ids_.clear(); + // Tautologies have no clause ID. + if (clause_ids[i] != kNoClauseId) { + tmp_clause_ids_.push_back(clause_ids[i]); + } + if (clause_ids[i + stride] != kNoClauseId) { + tmp_clause_ids_.push_back(clause_ids[i + stride]); + } + if (tmp_clause_ids_.empty()) continue; + const ClauseId new_clause_id = clause_id_generator_->GetNextId(); + GetConjunctionImpliesLiteralClause( + absl::MakeConstSpan(conjunctions[i]) + .subspan(0, num_literals_per_conjunction - 1), + propagated_lit, tmp_literals_); + lrat_proof_handler_->AddInferredClause(new_clause_id, tmp_literals_, + tmp_clause_ids_); + // Delete the clauses used to derive the new one. + for (const int index : {i, i + stride}) { + if (clause_ids[index] == kNoClauseId) continue; + if (drat_enabled_) { + // DRAT needs the clause literals to delete a clause. + GetConjunctionImpliesLiteralClause( + absl::MakeConstSpan(conjunctions[index]) + .subspan(0, num_literals_per_conjunction), + propagated_lit, tmp_literals_); + lrat_proof_handler_->DeleteClause(clause_ids[index], tmp_literals_); + } else { + lrat_proof_handler_->DeleteClause(clause_ids[index], {}); + } + clause_ids[index] = kNoClauseId; + } + if (num_literals_per_conjunction == 1) { + return clause_manager_->InprocessingAddUnitClause(new_clause_id, + propagated_lit); + } + clause_ids[i] = new_clause_id; + } + num_literals_per_conjunction--; + stride *= 2; + } +} + bool LookForTrivialSatSolution(double deterministic_time_limit, Model* model, SolverLogger* logger) { WallTimer wall_timer; @@ -520,6 +836,11 @@ bool LookForTrivialSatSolution(double deterministic_time_limit, Model* model, double elapsed_dtime = 0.0; + // We need to keep a copy of the time limit to restore it later since we will + // reset it by calling Model::SetParameters(). + TimeLimit original_time_limit; + original_time_limit.MergeWithGlobalTimeLimit(model->GetOrCreate()); + const int num_times = 1000; bool limit_reached = false; auto* random = model->GetOrCreate(); @@ -532,6 +853,7 @@ bool LookForTrivialSatSolution(double deterministic_time_limit, Model* model, // SetParameters() reset the deterministic time to zero inside time_limit. sat_solver->SetParameters(new_params); + time_limit->MergeWithGlobalTimeLimit(&original_time_limit); sat_solver->ResetDecisionHeuristic(); const SatSolver::Status result = sat_solver->SolveWithTimeLimit(time_limit); elapsed_dtime += time_limit->GetElapsedDeterministicTime(); @@ -559,6 +881,7 @@ bool LookForTrivialSatSolution(double deterministic_time_limit, Model* model, // Restore the initial parameters. sat_solver->SetParameters(initial_params); sat_solver->ResetDecisionHeuristic(); + time_limit->MergeWithGlobalTimeLimit(&original_time_limit); time_limit->AdvanceDeterministicTime(elapsed_dtime); if (!sat_solver->ResetToLevelZero()) return false; diff --git a/ortools/sat/probing.h b/ortools/sat/probing.h index 4c9c40d4c09..57572f4c1e9 100644 --- a/ortools/sat/probing.h +++ b/ortools/sat/probing.h @@ -35,6 +35,7 @@ #include "ortools/sat/model.h" #include "ortools/sat/sat_base.h" #include "ortools/sat/sat_solver.h" +#include "ortools/sat/util.h" #include "ortools/util/bitset.h" #include "ortools/util/logging.h" #include "ortools/util/time_limit.h" @@ -91,8 +92,25 @@ class Prober { // Probes the given problem DNF (disjunction of conjunctions). Since one of // the conjunction must be true, we might be able to fix literal or improve // integer bounds if all conjunction propagate the same thing. + enum DnfType { + // DNF is an existing clause 'dnf_clause' = (l1) OR ... (ln), minus its + // literals which are already assigned. + kAtLeastOne, + // DNF is the tautology "either at least one of n literals is true, or all + // of them are false": (l1) OR ... (ln) OR (not(l1) AND ... not(ln)). The + // single literal conjunctions must be listed first. + kAtLeastOneOrZero, + // DNF is the tautology "one of the 2^n possible assignments of n Boolean + // variables is true". The n variables must be in the same order in each + // conjunction, and their assignment in the i-th conjunction must be the + // binary representation of i. For instance, if the variables are b0 and b1, + // the conjunctions must be (not(b0) AND not(b1)), (not(b0) AND b1), + // (b0 AND not(b1)), and (b0 AND b1), in this order. + kAtLeastOneCombination, + }; bool ProbeDnf(absl::string_view name, - absl::Span> dnf); + absl::Span> dnf, DnfType type, + const SatClause* dnf_clause = nullptr); // Statistics. // They are reset each time ProbleBooleanVariables() is called. @@ -111,6 +129,23 @@ class Prober { private: bool ProbeOneVariableInternal(BooleanVariable b); + // Computes the LRAT proofs that all the `propagated_literals` can be fixed to + // true, and fixes them. + bool FixProbedDnfLiterals( + absl::Span> dnf, + const absl::btree_set& propagated_literals, DnfType type, + ClauseId dnf_clause_id, absl::Span dnf_clause_literals); + + // Computes the LRAT proof that `propagated_lit` can be fixed to true, and + // fixes it. `conjunctions` must have the property described for + // DnfType::kAtLeastOneCombination. `clause_ids` must contain the IDs of the + // LRAT clauses "conjunctions[i] => propagated_lit" (some IDs can be + // kNoClauseId, if a conjunction contains `propagated_lit`). Deletes all + // `clause_ids` and replaces these IDs with kNoClauseId values. + bool FixLiteralImpliedByAnAtLeastOneCombinationDnf( + absl::Span> conjunctions, + absl::Span clause_ids, Literal propagated_lit); + // Model owned classes. const Trail& trail_; const VariablesAssignment& assignment_; @@ -123,6 +158,7 @@ class Prober { ClauseManager* clause_manager_; ClauseIdGenerator* clause_id_generator_; LratProofHandler* lrat_proof_handler_; + const bool drat_enabled_; // To detect literal x that must be true because b => x and not(b) => x. // When probing b, we add all propagated literal to propagated, and when @@ -141,6 +177,8 @@ class Prober { absl::flat_hash_map, ClauseId> tmp_binary_clause_ids_; std::vector tmp_clause_ids_; + std::vector tmp_literals_; + CompactVectorVector tmp_dnf_clause_ids_; // Probing statistics. int num_decisions_ = 0; diff --git a/ortools/sat/sat_inprocessing.cc b/ortools/sat/sat_inprocessing.cc index b88af37e331..c41665b8fb6 100644 --- a/ortools/sat/sat_inprocessing.cc +++ b/ortools/sat/sat_inprocessing.cc @@ -314,13 +314,23 @@ bool Inprocessing::InprocessingRound() { sat_solver_->AdvanceDeterministicTime(time_limit_); total_dtime_ += time_limit_->GetElapsedDeterministicTime() - start_dtime; if (log_info) { + const int num_fixed = trail_->Index(); + const int num_equiv = implication_graph_->num_redundant_literals() / 2; + SOLVER_LOG( logger_, "Inprocessing.", " fixed:", FormatCounter(trail_->Index()), " equiv:", FormatCounter(implication_graph_->num_redundant_literals() / 2), - " bools:", FormatCounter(sat_solver_->NumVariables()), " implications:", + " left:", + FormatCounter(sat_solver_->NumVariables() - num_fixed - num_equiv), + " binary:", FormatCounter(implication_graph_->ComputeNumImplicationsForLog()), - " watched:", FormatCounter(clause_manager_->num_watched_clauses()), + " clauses:", + FormatCounter(clause_manager_->num_watched_clauses() - + clause_manager_->num_removable_clauses()), + "|", FormatCounter(clause_manager_->num_removable_clauses()), "|", + FormatCounter(sat_solver_->counters().num_deleted_clauses), "|", + FormatCounter(sat_solver_->num_failures()), " minimization:", FormatCounter(mini_num_clause), "|", FormatCounter(mini_num_removed), " dtime:", time_limit_->GetElapsedDeterministicTime() - start_dtime, diff --git a/ortools/sat/sat_solver.cc b/ortools/sat/sat_solver.cc index 7eef18ef063..7a89d68b309 100644 --- a/ortools/sat/sat_solver.cc +++ b/ortools/sat/sat_solver.cc @@ -458,7 +458,8 @@ bool SatSolver::AddLinearConstraint(bool use_lower_bound, } int SatSolver::AddLearnedClauseAndEnqueueUnitPropagation( - ClauseId clause_id, absl::Span literals, bool is_redundant) { + ClauseId clause_id, absl::Span literals, bool is_redundant, + int min_lbd_of_subsumed_clauses) { SCOPED_TIME_STAT(&stats_); if (literals.size() == 1) { @@ -485,7 +486,7 @@ int SatSolver::AddLearnedClauseAndEnqueueUnitPropagation( // Important: Even though the only literal at the last decision level has // been unassigned, its level was not modified, so ComputeLbd() works. - const int lbd = ComputeLbd(literals); + const int lbd = std::min(min_lbd_of_subsumed_clauses, ComputeLbd(literals)); if (is_redundant && lbd > parameters_->clause_cleanup_lbd_bound()) { --num_learned_clause_before_cleanup_; @@ -1026,8 +1027,9 @@ void SatSolver::ProcessCurrentConflict( // Note that this should happen after the new_conflict "proof", but before // we backtrack and add the new conflict to the clause_propagator_. - const bool is_redundant = SubsumptionsInConflictResolution( - learned_conflict_, reason_used_to_infer_the_conflict_); + const auto [is_redundant, min_lbd_of_subsumed_clauses] = + SubsumptionsInConflictResolution(learned_conflict_, + reason_used_to_infer_the_conflict_); // Backtrack and add the reason to the set of learned clause. counters_.num_literals_learned += learned_conflict_.size(); @@ -1046,7 +1048,9 @@ void SatSolver::ProcessCurrentConflict( // Create and attach the new learned clause. const int conflict_lbd = AddLearnedClauseAndEnqueueUnitPropagation( - learned_conflict_clause_id, learned_conflict_, is_redundant); + learned_conflict_clause_id, learned_conflict_, is_redundant, + min_lbd_of_subsumed_clauses); + restart_->OnConflict(conflict_trail_index, conflict_level, conflict_lbd); } @@ -1065,38 +1069,43 @@ bool ClauseSubsumption(absl::Span a, SatClause* b) { } // namespace -bool SatSolver::SubsumptionsInConflictResolution( +std::pair SatSolver::SubsumptionsInConflictResolution( absl::Span conflict, absl::Span reason_used) { // Note that conflict is not yet in the clauses_propagator_. tmp_literal_set_.Resize(Literal(num_variables_, true).Index()); for (const Literal l : conflict) tmp_literal_set_.Set(l); bool is_redundant = true; + int min_lbd_of_subsumed_clauses = std::numeric_limits::max(); const auto in_conflict = tmp_literal_set_.const_view(); - const auto maybe_subsume = [&is_redundant, in_conflict, conflict, this]( - SatClause* clause, - DeletionSourceForStat source) { - if (clause == nullptr || clause->size() < conflict.size()) return; - const int limit = clause->size() - conflict.size(); - int missing = 0; - for (const Literal l : clause->AsSpan()) { - if (!in_conflict[l]) { - ++missing; - if (missing > limit) break; - } - } + const auto maybe_subsume = + [&is_redundant, &min_lbd_of_subsumed_clauses, in_conflict, conflict, + this](SatClause* clause, DeletionSourceForStat source) { + if (clause == nullptr || clause->size() < conflict.size()) return; + const int limit = clause->size() - conflict.size(); + int missing = 0; + for (const Literal l : clause->AsSpan()) { + if (!in_conflict[l]) { + ++missing; + if (missing > limit) break; + } + } - // This algorithm relies of never having duplicate literals in a clause. - // TODO(user): double check that this is always the case. - if (missing <= limit) { - ++counters_.num_subsumed_clauses; - DCHECK(ClauseSubsumption(conflict, clause)); - if (!clauses_propagator_->IsRemovable(clause)) { - is_redundant = false; - } - clauses_propagator_->LazyDelete(clause, source); - } - }; + // This algorithm relies of never having duplicate literals in a clause. + // TODO(user): double check that this is always the case. + if (missing <= limit) { + ++counters_.num_subsumed_clauses; + DCHECK(ClauseSubsumption(conflict, clause)); + if (!clauses_propagator_->IsRemovable(clause)) { + is_redundant = false; + } else { + min_lbd_of_subsumed_clauses = + std::min(min_lbd_of_subsumed_clauses, + clauses_propagator_->LbdOrZeroIfNotRemovable(clause)); + } + clauses_propagator_->LazyDelete(clause, source); + } + }; // This is faster than conflict analysis, and stronger than the old assumption // mecanism we had. This is because once the conflict is minimized, we might @@ -1122,7 +1131,7 @@ bool SatSolver::SubsumptionsInConflictResolution( for (const Literal l : conflict) tmp_literal_set_.Clear(l); clauses_propagator_->CleanUpWatchers(); - return is_redundant; + return {is_redundant, min_lbd_of_subsumed_clauses}; } void SatSolver::FillLratProofForLearnedConflict( @@ -1935,7 +1944,6 @@ void SatSolver::AppendClausesFixing( std::vector& non_unit_clause_ids = tmp_clause_ids_for_append_clauses_fixing_; non_unit_clause_ids.clear(); - clause_ids->clear(); while (true) { // Find next marked literal to expand from the trail. @@ -1946,14 +1954,18 @@ void SatSolver::AppendClausesFixing( if (trail_index < min_trail_index) break; const Literal marked_literal = (*trail_)[trail_index--]; - // Stop at decisions and at literals implied by the decision at their level. + // Stop at decisions, at literals fixed at root, and at literals implied by + // the decision at their level. const int level = trail_->Info(marked_literal.Variable()).level; - min_level = std::min(min_level, level); + if (level > 0) min_level = std::min(min_level, level); if (trail_->AssignmentType(marked_literal.Variable()) == AssignmentType::kSearchDecision) { continue; } - DCHECK_GT(level, 0); + if (level == 0) { + clause_ids->push_back(trail_->GetUnitClauseId(marked_literal.Variable())); + continue; + } const Literal level_decision = decisions_[level - 1].literal; ClauseId clause_id = binary_implication_graph_->GetClauseId( level_decision.Negated(), marked_literal); @@ -3316,6 +3328,7 @@ void SatSolver::CleanClauseDatabaseIfNeeded() { VLOG(1) << "Database cleanup, #protected:" << num_protected_clauses << " #kept:" << num_kept_clauses << " #deleted:" << num_deleted_clauses; + counters_.num_deleted_clauses += num_deleted_clauses; } std::string SatStatusString(SatSolver::Status status) { diff --git a/ortools/sat/sat_solver.h b/ortools/sat/sat_solver.h index 53dbe45ee31..d1c9b2a9317 100644 --- a/ortools/sat/sat_solver.h +++ b/ortools/sat/sat_solver.h @@ -481,6 +481,7 @@ class SatSolver { int64_t num_literals_forgotten = 0; int64_t num_subsumed_clauses = 0; int64_t num_cleanup_rounds = 0; + int64_t num_deleted_clauses = 0; // TryToMinimizeClause() stats. int64_t minimization_num_clauses = 0; @@ -680,8 +681,8 @@ class SatSolver { // // Returns the LBD of the clause. int AddLearnedClauseAndEnqueueUnitPropagation( - ClauseId clause_id, absl::Span literals, - bool is_redundant); + ClauseId clause_id, absl::Span literals, bool is_redundant, + int min_lbd_of_subsumed_clauses); // Creates a new decision which corresponds to setting the given literal to // True and Enqueue() this change. @@ -713,10 +714,11 @@ class SatSolver { // Use the learned conflict to subsumes some clause. // - // Returns false iff the conflict is no longer "redundant" and need to be kept - // forever. - bool SubsumptionsInConflictResolution(absl::Span conflict, - absl::Span reason_used); + // Returns the pair . + // A clause will be marked as redundant only if all the subsumed clauses are. + std::pair SubsumptionsInConflictResolution( + absl::Span conflict, + absl::Span reason_used); // Fills `clause_ids` with the LRAT proof for the learned conflict. void FillLratProofForLearnedConflict(std::vector* clause_ids); diff --git a/ortools/sat/variable_expand.cc b/ortools/sat/variable_expand.cc index 93be2ac81c2..f62db162d13 100644 --- a/ortools/sat/variable_expand.cc +++ b/ortools/sat/variable_expand.cc @@ -22,6 +22,7 @@ #include #include "absl/algorithm/container.h" +#include "absl/base/log_severity.h" #include "absl/container/btree_map.h" #include "absl/log/check.h" #include "absl/log/log.h" @@ -271,55 +272,96 @@ void OrderEncoding::InsertGeLiteral(int64_t value, int literal) { // x_ge_1 => not(x == 0) // x_ge_3 => not(x == 1) && not(x == 2) && x_ge_1 // x_ge_4 => not(x == 3) && x_ge_3 +// +// x_le_0 => x == 0 +// x_le_1 => x == 1 || x_le_0 +// x_le_3 => x == 3 || x == 2 || x_le_1 +// +// x_ge_1 => x == 1 || x == 2 || x_ge_3 +// x_ge_3 => x == 3 || x == x_ge_4 +// x_ge_4 => x == 4 +// +// If we have x_le_0 and x_ge_4, then we can infer x_le_4 and x_ge_0. +// This is done by the code below. void OrderEncoding::CreateAllOrderEncodingLiterals( const ValueEncoding& values) { CollectAllOrderEncodingValues(); - for (const auto& [value, literal] : encoded_le_literal_) { - DCHECK(values.encoding().contains(value)); - DCHECK(values.encoding().contains(var_domain_.ValueAtOrAfter(value + 1))) - << "Cannot find " << var_domain_.ValueAtOrAfter(value + 1) - << " for var <= " << value; + if (encoded_le_literal_.empty()) return; + + if (DEBUG_MODE) { + // Check that all values are present in the encoding. + for (const auto& [value, literal] : encoded_le_literal_) { + CHECK(values.encoding().contains(value)); + CHECK(values.encoding().contains(var_domain_.ValueAtOrAfter(value + 1))) + << "Cannot find " << var_domain_.ValueAtOrAfter(value + 1) + << " for var <= " << value; + } } - if (!encoded_le_literal_.empty()) { - const int64_t max_ge_value = - var_domain_.ValueAtOrAfter(encoded_le_literal_.rbegin()->first + 1); - ConstraintProto* not_le = nullptr; - ConstraintProto* not_ge = context_->working_model->add_constraints(); - for (const auto [value, eq_literal] : values.encoding()) { - const int ne_literal = NegatedRef(eq_literal); - - // Lower or equal. - if (not_le != nullptr) { - not_le->mutable_bool_and()->add_literals(ne_literal); + + const int64_t max_le_value = encoded_le_literal_.rbegin()->first; + const int64_t max_ge_value = var_domain_.ValueAtOrAfter(max_le_value + 1); + ConstraintProto* not_le = nullptr; + ConstraintProto* not_ge = context_->working_model->add_constraints(); + ConstraintProto* le = context_->working_model->add_constraints(); + ConstraintProto* ge = nullptr; + + for (const auto [value, eq_literal] : values.encoding()) { + const int ne_literal = NegatedRef(eq_literal); + + // Lower or equal. + if (not_le != nullptr) { + not_le->mutable_bool_and()->add_literals(ne_literal); + } + if (le != nullptr) { + le->mutable_bool_or()->add_literals(eq_literal); + } + + const auto it_le = encoded_le_literal_.find(value); + if (it_le != encoded_le_literal_.end()) { + const int le_literal = it_le->second; + + DCHECK(le != nullptr); + le->add_enforcement_literal(le_literal); + if (value < max_le_value) { + le = context_->working_model->add_constraints(); + le->mutable_bool_or()->add_literals(le_literal); + } else { + le = nullptr; } - const auto it_le = encoded_le_literal_.find(value); - if (it_le != encoded_le_literal_.end()) { - const int le_literal = it_le->second; - if (not_le != nullptr) { - not_le->mutable_bool_and()->add_literals(le_literal); - } - not_le = context_->AddEnforcedConstraint({le_literal}); + + if (not_le != nullptr) { + not_le->mutable_bool_and()->add_literals(le_literal); } + not_le = context_->AddEnforcedConstraint({le_literal}); + } + + // Greater or equal. + if (value > var_domain_.Min()) { // var >= min is not created.. + const auto it_ge = + encoded_le_literal_.find(var_domain_.ValueAtOrBefore(value - 1)); + if (it_ge != encoded_le_literal_.end()) { + const int ge_literal = NegatedRef(it_ge->second); - // Greater or equal. - if (value > var_domain_.Min()) { // var >= min is not created.. - const auto it_ge = - encoded_le_literal_.find(var_domain_.ValueAtOrBefore(value - 1)); - if (it_ge != encoded_le_literal_.end()) { - const int ge_literal = NegatedRef(it_ge->second); - DCHECK(not_ge != nullptr); - not_ge->add_enforcement_literal(ge_literal); - if (value != max_ge_value) { - not_ge = context_->working_model->add_constraints(); - not_ge->mutable_bool_and()->add_literals(ge_literal); - } else { - not_ge = nullptr; - } + if (ge != nullptr) { + ge->mutable_bool_or()->add_literals(ge_literal); + } + ge = context_->AddEnforcedConstraint({ge_literal}); + + DCHECK(not_ge != nullptr); + not_ge->add_enforcement_literal(ge_literal); + if (value != max_ge_value) { + not_ge = context_->working_model->add_constraints(); + not_ge->mutable_bool_and()->add_literals(ge_literal); + } else { + not_ge = nullptr; } } - if (not_ge != nullptr) { - not_ge->mutable_bool_and()->add_literals(ne_literal); - } + } + if (ge != nullptr) { + ge->mutable_bool_or()->add_literals(eq_literal); + } + if (not_ge != nullptr) { + not_ge->mutable_bool_and()->add_literals(ne_literal); } } } @@ -445,8 +487,7 @@ bool ProcessEncodingConstraints( return true; } -void TryToReplaceVariableByItsEncoding(int var, int& new_exo_to_presolve_index, - PresolveContext* context, +void TryToReplaceVariableByItsEncoding(int var, PresolveContext* context, SolutionCrush& solution_crush) { const Domain var_domain = context->DomainOf(var); std::vector> linear_ones_by_type( @@ -495,13 +536,6 @@ void TryToReplaceVariableByItsEncoding(int var, int& new_exo_to_presolve_index, return; } - // Compute how many literals are implied by the complex domains. - int num_implied_literals_in_complex_domains = 0; - for (const EncodingLinear1& info : lin_domain) { - num_implied_literals_in_complex_domains += - var_domain.Size() - info.rhs.Size(); - } - VLOG(2) << "ProcessVariableOnlyUsedInEncoding(): var(" << var << "): " << var_domain << ", size: " << var_domain.Size() << ", #encoded_values: " << values.encoded_values().size() @@ -513,25 +547,20 @@ void TryToReplaceVariableByItsEncoding(int var, int& new_exo_to_presolve_index, << ", #var_in_domain: " << lin_domain.size() << ", var_in_objective: " << var_in_objective << ", var_has_positive_objective_coefficient: " - << var_has_positive_objective_coefficient - << ", #implied_literals_in_complex_domains: " - << num_implied_literals_in_complex_domains; + << var_has_positive_objective_coefficient; if (full_encoding_is_needed && (!values.is_fully_encoded() || - num_implied_literals_in_complex_domains > 2500)) { + var_domain.Size() * lin_domain.size() > 2500)) { VLOG(2) << "Abort - fully_encode_var: " << values.is_fully_encoded() - << ", num_implied_literals_in_complex_domains: " - << num_implied_literals_in_complex_domains << ", full_encoding_is_not_too_expensive: " << full_encoding_is_not_too_expensive << ", full_encoding_is_needed: " << full_encoding_is_needed; if (var_in_objective) { context->UpdateRuleStats( - "TODO variables: only used in constrained objective and in encoding"); + "TODO variables: only used in objective and in complex encodings"); } else { context->UpdateRuleStats( - "TODO variables: only used in large value encoding and order " - "encoding."); + "TODO variables: only used in large complex encodings"); } return; } @@ -565,12 +594,18 @@ void TryToReplaceVariableByItsEncoding(int var, int& new_exo_to_presolve_index, } for (const EncodingLinear1& info_in : lin_domain) { - ConstraintProto* imply = + ConstraintProto* forces = + context->AddEnforcedConstraint({info_in.enforcement_literal}); + for (const int64_t v : info_in.rhs.Values()) { + forces->mutable_bool_or()->add_literals(values.literal(v)); + } + + ConstraintProto* remove = context->AddEnforcedConstraint({info_in.enforcement_literal}); const Domain implied_complement = var_domain.IntersectionWith(info_in.rhs.Complement()); for (const int64_t v : implied_complement.Values()) { - imply->mutable_bool_and()->add_literals(NegatedRef(values.literal(v))); + remove->mutable_bool_and()->add_literals(NegatedRef(values.literal(v))); } } @@ -710,7 +745,6 @@ void TryToReplaceVariableByItsEncoding(int var, int& new_exo_to_presolve_index, } // This must be done after we removed all the constraint containing var. - new_exo_to_presolve_index = context->working_model->constraints_size(); ConstraintProto* exo = context->working_model->add_constraints(); BoolArgumentProto* arg = exo->mutable_exactly_one(); for (const auto& [value, literal] : values.encoding()) { diff --git a/ortools/sat/variable_expand.h b/ortools/sat/variable_expand.h index 1f4db009ccc..0afb21c57b1 100644 --- a/ortools/sat/variable_expand.h +++ b/ortools/sat/variable_expand.h @@ -89,8 +89,7 @@ class OrderEncoding { absl::btree_map encoded_le_literal_; }; -void TryToReplaceVariableByItsEncoding(int var, int& new_exo_to_presolve_index, - PresolveContext* context, +void TryToReplaceVariableByItsEncoding(int var, PresolveContext* context, SolutionCrush& solution_crush); } // namespace sat From e89fcb39a77ef4c65798c21224bf1b062e024c3f Mon Sep 17 00:00:00 2001 From: Corentin Le Molgat Date: Fri, 21 Nov 2025 10:24:47 +0100 Subject: [PATCH 007/111] gurobi: add 13.0.0 and 12.0.3 dynamic support --- ortools/third_party_solvers/gurobi_environment.cc | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/ortools/third_party_solvers/gurobi_environment.cc b/ortools/third_party_solvers/gurobi_environment.cc index 007e7665271..4c1e1334a05 100644 --- a/ortools/third_party_solvers/gurobi_environment.cc +++ b/ortools/third_party_solvers/gurobi_environment.cc @@ -336,10 +336,16 @@ void LoadGurobiFunctions(DynamicLibrary* gurobi_dynamic_library) { std::vector GurobiDynamicLibraryPotentialPaths() { std::vector potential_paths; + // clang-format off const std::vector kGurobiVersions = { - "1202", "1201", "1200", "1103", "1102", "1101", "1100", - "1003", "1002", "1001", "1000", "952", "951", "950", - "911", "910", "903", "902", "811", "801", "752"}; + "1300", + "1203", "1202", "1201", "1200", + "1103", "1102", "1101", "1100", + "1003", "1002", "1001", "1000", + "952", "951", "950", + "911", "910", "903", "902", + "811", "801", "752"}; + // clang-format on potential_paths.reserve(kGurobiVersions.size() * 3); // Look for libraries pointed by GUROBI_HOME first. From 5fc3aff39a57d7eb84a134244b4f93a4f00ee027 Mon Sep 17 00:00:00 2001 From: Corentin Le Molgat Date: Wed, 19 Nov 2025 15:38:53 +0100 Subject: [PATCH 008/111] routing: export from google3 --- ortools/constraint_solver/routing.cc | 39 +++- ortools/constraint_solver/routing_ils.cc | 171 +++++++++++------- ortools/constraint_solver/routing_ils.h | 45 ++--- ortools/constraint_solver/routing_ils.proto | 44 ++--- .../constraint_solver/routing_parameters.cc | 102 ++++++----- 5 files changed, 234 insertions(+), 167 deletions(-) diff --git a/ortools/constraint_solver/routing.cc b/ortools/constraint_solver/routing.cc index 3e91a0d8677..2f2e3c08d82 100644 --- a/ortools/constraint_solver/routing.cc +++ b/ortools/constraint_solver/routing.cc @@ -3580,12 +3580,29 @@ const Assignment* RoutingModel::SolveWithIteratedLocalSearch( return true; }; - std::unique_ptr acceptance_criterion = - MakeNeighborAcceptanceCriterion(*this, parameters, &rnd); + const IteratedLocalSearchParameters& ils_parameters = + parameters.iterated_local_search_parameters(); + + const absl::Duration final_duration = + !parameters.has_time_limit() + ? absl::InfiniteDuration() + : util_time::DecodeGoogleApiProto(parameters.time_limit()).value(); + + NeighborAcceptanceCriterion::SearchState final_search_state = { + final_duration, parameters.solution_limit()}; + + std::unique_ptr reference_acceptance_criterion = + MakeNeighborAcceptanceCriterion( + *this, ils_parameters.reference_solution_acceptance_strategy(), + final_search_state, &rnd); + + std::unique_ptr best_acceptance_criterion = + MakeNeighborAcceptanceCriterion( + *this, ils_parameters.best_solution_acceptance_strategy(), + final_search_state, &rnd); const bool improve_perturbed_solution = - parameters.iterated_local_search_parameters() - .improve_perturbed_solution(); + ils_parameters.improve_perturbed_solution(); while (update_time_limits() && explored_solutions < parameters.solution_limit()) { @@ -3610,15 +3627,17 @@ const Assignment* RoutingModel::SolveWithIteratedLocalSearch( } } - if (neighbor_solution->ObjectiveValue() < best_solution->ObjectiveValue()) { + absl::Duration elapsed_time = + absl::Milliseconds(solver_->wall_time() - start_time_ms); + + if (best_acceptance_criterion->Accept({elapsed_time, explored_solutions}, + neighbor_solution, best_solution)) { best_solution->CopyIntersection(neighbor_solution); } - absl::Duration elapsed_time = - absl::Milliseconds(solver_->wall_time() - start_time_ms); - if (acceptance_criterion->Accept({elapsed_time, explored_solutions}, - neighbor_solution, - last_accepted_solution)) { + if (reference_acceptance_criterion->Accept( + {elapsed_time, explored_solutions}, neighbor_solution, + last_accepted_solution)) { // Note that the perturbation_db is using last_accepted_solution as // reference assignment. By updating last_accepted_solution here we thus // also keep the perturbation_db reference assignment up to date. diff --git a/ortools/constraint_solver/routing_ils.cc b/ortools/constraint_solver/routing_ils.cc index a8cc1801fec..ddfcebcf196 100644 --- a/ortools/constraint_solver/routing_ils.cc +++ b/ortools/constraint_solver/routing_ils.cc @@ -385,33 +385,20 @@ class LinearCoolingSchedule : public CoolingSchedule { // Returns a cooling schedule based on the given input parameters. std::unique_ptr MakeCoolingSchedule( - const RoutingModel& model, const RoutingSearchParameters& parameters, + const RoutingModel& model, + const SimulatedAnnealingAcceptanceStrategy& sa_params, + const NeighborAcceptanceCriterion::SearchState& final_search_state, std::mt19937* rnd) { - const absl::Duration final_duration = - !parameters.has_time_limit() - ? absl::InfiniteDuration() - : util_time::DecodeGoogleApiProto(parameters.time_limit()).value(); - - const SimulatedAnnealingParameters& sa_params = - parameters.iterated_local_search_parameters() - .simulated_annealing_parameters(); - - NeighborAcceptanceCriterion::SearchState final_search_state{ - final_duration, parameters.solution_limit()}; - const auto [initial_temperature, final_temperature] = GetSimulatedAnnealingTemperatures(model, sa_params, rnd); switch (sa_params.cooling_schedule_strategy()) { case CoolingScheduleStrategy::EXPONENTIAL: return std::make_unique( - NeighborAcceptanceCriterion::SearchState{final_duration, - parameters.solution_limit()}, - initial_temperature, final_temperature); + final_search_state, initial_temperature, final_temperature); case CoolingScheduleStrategy::LINEAR: return std::make_unique( - std::move(final_search_state), initial_temperature, - final_temperature); + final_search_state, initial_temperature, final_temperature); default: LOG(DFATAL) << "Unsupported cooling schedule strategy."; return nullptr; @@ -444,6 +431,43 @@ class SimulatedAnnealingAcceptanceCriterion std::uniform_real_distribution probability_distribution_; }; +// Acceptance criterion in which a candidate assignment is accepted when it has +// all nodes performed. +class AllNodesPerformedAcceptanceCriterion + : public NeighborAcceptanceCriterion { + public: + explicit AllNodesPerformedAcceptanceCriterion(const RoutingModel& model) + : model_(model) {} + + bool Accept([[maybe_unused]] const SearchState& search_state, + const Assignment* candidate, + [[maybe_unused]] const Assignment* reference) override { + for (RoutingModel::DisjunctionIndex d(0); + d < model_.GetNumberOfDisjunctions(); ++d) { + // This solution avoids counting non-fixed variables as inactive. + int num_possible_actives = model_.GetDisjunctionNodeIndices(d).size(); + for (const int64_t node : model_.GetDisjunctionNodeIndices(d)) { + if (candidate->Value(model_.NextVar(node)) == node) { + --num_possible_actives; + } + } + if (num_possible_actives < model_.GetDisjunctionMaxCardinality(d)) { + return false; + } + } + + for (int node = 0; node < model_.Size(); ++node) { + if (model_.IsStart(node) || model_.IsEnd(node)) continue; + if (!model_.GetDisjunctionIndices(node).empty()) continue; + if (candidate->Value(model_.NextVar(node)) == node) return false; + } + return true; + } + + private: + const RoutingModel& model_; +}; + // Returns whether the given assignment has at least one performed node. bool HasPerformedNodes(const RoutingModel& model, const Assignment& assignment) { @@ -479,21 +503,21 @@ double ComputeAverageNonEmptyRouteSize(const RoutingModel& model, // performed visits. int64_t PickRandomPerformedVisit( const RoutingModel& model, const Assignment& assignment, std::mt19937& rnd, - std::uniform_int_distribution& customer_dist) { - DCHECK_EQ(customer_dist.min(), 0); - DCHECK_EQ(customer_dist.max(), model.Size() - model.vehicles()); + std::uniform_int_distribution& node_dist) { + DCHECK_EQ(node_dist.min(), 0); + DCHECK_EQ(node_dist.max(), model.Size() - model.vehicles()); if (!HasPerformedNodes(model, assignment)) { return -1; } - int64_t customer; + int64_t node; do { - customer = customer_dist(rnd); - } while (model.IsStart(customer) || - assignment.Value(model.VehicleVar(customer)) == -1); - DCHECK(!model.IsEnd(customer)); - return customer; + node = node_dist(rnd); + } while (model.IsStart(node) || + assignment.Value(model.VehicleVar(node)) == -1); + DCHECK(!model.IsEnd(node)); + return node; } } // namespace @@ -546,20 +570,20 @@ void RoutingSolution::InitializeRouteInfoIfNeeded(int vehicle) { prevs_[end] = prev; } -bool RoutingSolution::BelongsToInitializedRoute(int64_t node_index) const { - DCHECK_EQ(nexts_[node_index] != -1, prevs_[node_index] != -1); - return nexts_[node_index] != -1; +bool RoutingSolution::BelongsToInitializedRoute(int64_t node) const { + DCHECK_EQ(nexts_[node] != -1, prevs_[node] != -1); + return nexts_[node] != -1; } -int64_t RoutingSolution::GetNextNodeIndex(int64_t node_index) const { - return BelongsToInitializedRoute(node_index) - ? nexts_[node_index] - : assignment_->Value(model_.NextVar(node_index)); +int64_t RoutingSolution::GetNextNodeIndex(int64_t node) const { + return BelongsToInitializedRoute(node) + ? nexts_[node] + : assignment_->Value(model_.NextVar(node)); } -int64_t RoutingSolution::GetInitializedPrevNodeIndex(int64_t node_index) const { - DCHECK(BelongsToInitializedRoute(node_index)); - return prevs_[node_index]; +int64_t RoutingSolution::GetInitializedPrevNodeIndex(int64_t node) const { + DCHECK(BelongsToInitializedRoute(node)); + return prevs_[node]; } int RoutingSolution::GetRouteSize(int vehicle) const { @@ -567,37 +591,40 @@ int RoutingSolution::GetRouteSize(int vehicle) const { return route_sizes_[vehicle]; } -bool RoutingSolution::CanBeRemoved(int64_t node_index) const { - return !model_.IsStart(node_index) && !model_.IsEnd(node_index) && - GetNextNodeIndex(node_index) != node_index; +bool RoutingSolution::CanBeRemoved(int64_t node) const { + return !model_.IsStart(node) && !model_.IsEnd(node) && + GetNextNodeIndex(node) != node; } -void RoutingSolution::RemoveNode(int64_t node_index) { - DCHECK(BelongsToInitializedRoute(node_index)); +void RoutingSolution::RemoveNode(int64_t node) { + DCHECK(BelongsToInitializedRoute(node)); - DCHECK_NE(nexts_[node_index], node_index); - DCHECK_NE(prevs_[node_index], node_index); + DCHECK_NE(nexts_[node], node); + DCHECK_NE(prevs_[node], node); - const int64_t next = nexts_[node_index]; - const int64_t prev = prevs_[node_index]; + const int64_t next = nexts_[node]; + const int64_t prev = prevs_[node]; - const int vehicle = assignment_->Value(model_.VehicleVar(node_index)); + const int vehicle = assignment_->Value(model_.VehicleVar(node)); --route_sizes_[vehicle]; DCHECK_GE(route_sizes_[vehicle], 0); nexts_[prev] = next; prevs_[next] = prev; - nexts_[node_index] = node_index; - prevs_[node_index] = node_index; + nexts_[node] = node; + prevs_[node] = node; } -void RoutingSolution::RemovePerformedPickupDeliverySibling(int64_t customer) { - DCHECK(!model_.IsStart(customer)); - DCHECK(!model_.IsEnd(customer)); +void RoutingSolution::RemovePerformedPickupDeliverySibling(int64_t node) { + DCHECK(!model_.IsStart(node)); + DCHECK(!model_.IsEnd(node)); if (const std::optional sibling_node = model_.GetFirstMatchingPickupDeliverySibling( - customer, [this](int64_t node) { return CanBeRemoved(node); }); + node, + [this](int64_t candidate_node) { + return CanBeRemoved(candidate_node); + }); sibling_node.has_value()) { const int sibling_vehicle = assignment_->Value(model_.VehicleVar(sibling_node.value())); @@ -753,7 +780,7 @@ CloseRoutesRemovalRuinProcedure::CloseRoutesRemovalRuinProcedure( /*only_sort_neighbors_for_partial_neighborhoods=*/false})), num_routes_(num_routes), rnd_(*rnd), - customer_dist_(0, model->Size() - model->vehicles()), + node_dist_(0, model->Size() - model->vehicles()), removed_routes_(model->vehicles()) {} std::function CloseRoutesRemovalRuinProcedure::Ruin( @@ -765,7 +792,7 @@ std::function CloseRoutesRemovalRuinProcedure::Ruin( } const int64_t seed_node = - PickRandomPerformedVisit(model_, *assignment, rnd_, customer_dist_); + PickRandomPerformedVisit(model_, *assignment, rnd_, node_dist_); if (seed_node == -1) { return [this, assignment](int64_t node) { return assignment->Value(model_.NextVar(node)); @@ -799,7 +826,7 @@ std::function CloseRoutesRemovalRuinProcedure::Ruin( } return [this, assignment](int64_t node) { - // Shortcut removed routes to remove associated customers. + // Shortcut removed routes to remove associated nodes. if (model_.IsStart(node)) { const int route = assignment->Value(model_.VehicleVar(node)); if (removed_routes_[route]) { @@ -822,7 +849,7 @@ RandomWalkRemovalRuinProcedure::RandomWalkRemovalRuinProcedure( /*only_sort_neighbors_for_partial_neighborhoods=*/false})), rnd_(*rnd), walk_length_(walk_length), - customer_dist_(0, model->Size() - model->vehicles()) {} + node_dist_(0, model->Size() - model->vehicles()) {} std::function RandomWalkRemovalRuinProcedure::Ruin( const Assignment* assignment) { @@ -833,7 +860,7 @@ std::function RandomWalkRemovalRuinProcedure::Ruin( } int64_t curr_node = - PickRandomPerformedVisit(model_, *assignment, rnd_, customer_dist_); + PickRandomPerformedVisit(model_, *assignment, rnd_, node_dist_); if (curr_node == -1) { return [this, assignment](int64_t node) { return assignment->Value(model_.NextVar(node)); @@ -908,8 +935,8 @@ int64_t RandomWalkRemovalRuinProcedure::GetNextNodeToRemove( return neighbor; } - // If we are not able to find a customer in another route, we are ok - // with taking a customer from the current one. + // If we are not able to find a node in another route, we are ok + // with taking a node from the current one. // Note that it can be -1 if no removable neighbor was found for the input // node. return same_route_closest_neighbor; @@ -929,7 +956,7 @@ SISRRuinProcedure::SISRRuinProcedure(RoutingModel* model, std::mt19937* rnd, /*add_vehicle_starts_to_neighbors=*/false, /*add_vehicle_ends_to_neighbors=*/false, /*only_sort_neighbors_for_partial_neighborhoods=*/false})), - customer_dist_(0, model->Size() - model->vehicles()), + node_dist_(0, model->Size() - model->vehicles()), probability_dist_(0.0, 1.0), ruined_routes_(model->vehicles()), routing_solution_(*model) {} @@ -937,7 +964,7 @@ SISRRuinProcedure::SISRRuinProcedure(RoutingModel* model, std::mt19937* rnd, std::function SISRRuinProcedure::Ruin( const Assignment* assignment) { const int64_t seed_node = - PickRandomPerformedVisit(model_, *assignment, rnd_, customer_dist_); + PickRandomPerformedVisit(model_, *assignment, rnd_, node_dist_); if (seed_node == -1) { return [this, assignment](int64_t node) { return assignment->Value(model_.NextVar(node)); @@ -1126,15 +1153,19 @@ DecisionBuilder* MakePerturbationDecisionBuilder( } std::unique_ptr MakeNeighborAcceptanceCriterion( - const RoutingModel& model, const RoutingSearchParameters& parameters, + const RoutingModel& model, const AcceptanceStrategy& acceptance_strategy, + const NeighborAcceptanceCriterion::SearchState& final_search_state, std::mt19937* rnd) { - CHECK(parameters.has_iterated_local_search_parameters()); - switch (parameters.iterated_local_search_parameters().acceptance_strategy()) { - case AcceptanceStrategy::GREEDY_DESCENT: + switch (acceptance_strategy.strategy_case()) { + case AcceptanceStrategy::kGreedyDescent: return std::make_unique(); - case AcceptanceStrategy::SIMULATED_ANNEALING: + case AcceptanceStrategy::kSimulatedAnnealing: return std::make_unique( - MakeCoolingSchedule(model, parameters, rnd), rnd); + MakeCoolingSchedule(model, acceptance_strategy.simulated_annealing(), + final_search_state, rnd), + rnd); + case AcceptanceStrategy::kAllNodesPerformed: + return std::make_unique(model); default: LOG(DFATAL) << "Unsupported acceptance strategy."; return nullptr; @@ -1142,8 +1173,8 @@ std::unique_ptr MakeNeighborAcceptanceCriterion( } std::pair GetSimulatedAnnealingTemperatures( - const RoutingModel& model, const SimulatedAnnealingParameters& sa_params, - std::mt19937* rnd) { + const RoutingModel& model, + const SimulatedAnnealingAcceptanceStrategy& sa_params, std::mt19937* rnd) { if (!sa_params.automatic_temperatures()) { return {sa_params.initial_temperature(), sa_params.final_temperature()}; } diff --git a/ortools/constraint_solver/routing_ils.h b/ortools/constraint_solver/routing_ils.h index 8d5800d18ab..c783355c4ff 100644 --- a/ortools/constraint_solver/routing_ils.h +++ b/ortools/constraint_solver/routing_ils.h @@ -43,30 +43,30 @@ class RoutingSolution { // vehicle, if not already done. void InitializeRouteInfoIfNeeded(int vehicle); - // Returns whether node_index belongs to a route that has been initialized. - bool BelongsToInitializedRoute(int64_t node_index) const; + // Returns whether node belongs to a route that has been initialized. + bool BelongsToInitializedRoute(int64_t node) const; - // Returns the next node index of the given node_index. - int64_t GetNextNodeIndex(int64_t node_index) const; + // Returns the next node index of the given node. + int64_t GetNextNodeIndex(int64_t node) const; - // Returns the previous node index of the given node_index. - // This must be called for node_index belonging to initialized routes. - int64_t GetInitializedPrevNodeIndex(int64_t node_index) const; + // Returns the previous node index of the given node. + // This must be called for node belonging to initialized routes. + int64_t GetInitializedPrevNodeIndex(int64_t node) const; // Returns the number of visits performed by the given vehicle. // This must be called for a vehicle associated with an initialized route. int GetRouteSize(int vehicle) const; - // Returns whether node_index can be removed from the solution. - // This must be called for node_index belonging to initialized routes. - bool CanBeRemoved(int64_t node_index) const; + // Returns whether node can be removed from the solution. + // This must be called for node belonging to initialized routes. + bool CanBeRemoved(int64_t node) const; - // Removes the node with the given node_index. - // This must be called for node_index belonging to initialized routes. - void RemoveNode(int64_t node_index); + // Removes the node with the given node. + // This must be called for node belonging to initialized routes. + void RemoveNode(int64_t node); - // Removes the performed sibling pickup or delivery of customer, if any. - void RemovePerformedPickupDeliverySibling(int64_t customer); + // Removes the performed sibling pickup or delivery of node, if any. + void RemovePerformedPickupDeliverySibling(int64_t node); // Randomly returns the next or previous visit of the given performed // visit. Returns -1 if there are no other available visits. When the @@ -115,7 +115,7 @@ class CloseRoutesRemovalRuinProcedure : public RuinProcedure { int num_neighbors_for_route_selection); // Returns next accessors where at most num_routes routes have been shortcut, // i.e., next(shortcut route begin) = shortcut route end. - // Next accessors for customers belonging to shortcut routes are still set to + // Next accessors for nodes belonging to shortcut routes are still set to // their original value and should not be used. std::function Ruin(const Assignment* assignment) override; @@ -124,7 +124,7 @@ class CloseRoutesRemovalRuinProcedure : public RuinProcedure { const RoutingModel::NodeNeighborsByCostClass* const neighbors_manager_; const size_t num_routes_; std::mt19937& rnd_; - std::uniform_int_distribution customer_dist_; + std::uniform_int_distribution node_dist_; SparseBitset removed_routes_; }; @@ -149,7 +149,7 @@ class RandomWalkRemovalRuinProcedure : public RuinProcedure { const RoutingModel::NodeNeighborsByCostClass* const neighbors_manager_; std::mt19937& rnd_; const int walk_length_; - std::uniform_int_distribution customer_dist_; + std::uniform_int_distribution node_dist_; std::bernoulli_distribution boolean_dist_; }; @@ -233,7 +233,7 @@ class SISRRuinProcedure : public RuinProcedure { int avg_num_removed_visits_; double bypass_factor_; const RoutingModel::NodeNeighborsByCostClass* const neighbors_manager_; - std::uniform_int_distribution customer_dist_; + std::uniform_int_distribution node_dist_; std::bernoulli_distribution boolean_dist_; std::uniform_real_distribution probability_dist_; SparseBitset ruined_routes_; @@ -269,14 +269,15 @@ class NeighborAcceptanceCriterion { // Returns a neighbor acceptance criterion based on the given parameters. std::unique_ptr MakeNeighborAcceptanceCriterion( - const RoutingModel& model, const RoutingSearchParameters& parameters, + const RoutingModel& model, const AcceptanceStrategy& acceptance_strategy, + const NeighborAcceptanceCriterion::SearchState& final_search_state, std::mt19937* rnd); // Returns initial and final simulated annealing temperatures according to the // given simulated annealing input parameters. std::pair GetSimulatedAnnealingTemperatures( - const RoutingModel& model, const SimulatedAnnealingParameters& sa_params, - std::mt19937* rnd); + const RoutingModel& model, + const SimulatedAnnealingAcceptanceStrategy& sa_params, std::mt19937* rnd); } // namespace operations_research diff --git a/ortools/constraint_solver/routing_ils.proto b/ortools/constraint_solver/routing_ils.proto index 468c52fb652..0a1369e007d 100644 --- a/ortools/constraint_solver/routing_ils.proto +++ b/ortools/constraint_solver/routing_ils.proto @@ -36,7 +36,7 @@ message SpatiallyCloseRoutesRuinStrategy { optional uint32 num_ruined_routes = 3; } -// Ruin strategy that removes a number of customers by performing a random walk +// Ruin strategy that removes a number of nodes by performing a random walk // on the underlying routing solution. More precisely, starting from a randomly // selected seed visit, the walk is extended by either moving within the // same route or by jumping to a visit served by a different neighboring @@ -132,7 +132,7 @@ message SISRRuinStrategy { // paper is \bar{c} and the suggested value is 10. optional uint32 avg_num_removed_visits = 2; - // Value in [0, 1] ruling the number of preserved customers in the split + // Value in [0, 1] ruling the number of preserved nodes in the split // sequence removal. The parameter name in the paper is \alpha and the // suggested value is 0.01. optional double bypass_factor = 3; @@ -262,8 +262,12 @@ message CoolingScheduleStrategy { } } -// Specifies the behavior of a simulated annealing acceptance strategy. -message SimulatedAnnealingParameters { +// Acceptance strategy in which only improving solutions are accepted. +message GreedyDescentAcceptanceStrategy {} + +// Acceptance strategy in which solutions are accepted with a probability that +// depends on its quality and on the current state of the search. +message SimulatedAnnealingAcceptanceStrategy { // Determines the speed at which the temperature changes from initial to // final. CoolingScheduleStrategy.Value cooling_schedule_strategy = 1; @@ -287,21 +291,17 @@ message SimulatedAnnealingParameters { optional bool automatic_temperatures = 4; } -// Determines when a neighbor solution, obtained by the application of a -// perturbation and improvement step to a reference solution, is used to -// replace the reference solution. -message AcceptanceStrategy { - enum Value { - // Unspecified value. - UNSET = 0; - - // Accepts only solutions that are improving with respect to the reference - // one. - GREEDY_DESCENT = 1; +// Acceptance strategy in which a solution is accepted only if all nodes +// are performed. Disjunctions are respected when several nodes can be +// performed. +message AllNodesPerformedAcceptanceStrategy {} - // Accepts a candidate solution with a probability that depends on its - // quality and on the current state of the search. - SIMULATED_ANNEALING = 2; +// Determines when a candidate solution replaces another one. +message AcceptanceStrategy { + oneof strategy { + GreedyDescentAcceptanceStrategy greedy_descent = 1; + SimulatedAnnealingAcceptanceStrategy simulated_annealing = 2; + AllNodesPerformedAcceptanceStrategy all_nodes_performed = 3; } } @@ -320,9 +320,9 @@ message IteratedLocalSearchParameters { // Determines when the neighbor solution S', possibly improved if // `improve_perturbed_solution` is true, replaces the reference solution S. - AcceptanceStrategy.Value acceptance_strategy = 4; + AcceptanceStrategy reference_solution_acceptance_strategy = 4; - // Parameters to customize a simulated annealing acceptance strategy. These - // parameters are required iff the acceptance_strategy is SIMULATED_ANNEALING. - SimulatedAnnealingParameters simulated_annealing_parameters = 5; + // Determines when the neighbor solution S' replaces the best solution found + // so far. + AcceptanceStrategy best_solution_acceptance_strategy = 5; } diff --git a/ortools/constraint_solver/routing_parameters.cc b/ortools/constraint_solver/routing_parameters.cc index 92a1f90d52f..c9733516626 100644 --- a/ortools/constraint_solver/routing_parameters.cc +++ b/ortools/constraint_solver/routing_parameters.cc @@ -78,9 +78,10 @@ IteratedLocalSearchParameters CreateDefaultIteratedLocalSearchParameters() { rr->set_route_selection_min_neighbors(10); rr->set_route_selection_max_neighbors(100); ils.set_improve_perturbed_solution(true); - ils.set_acceptance_strategy(AcceptanceStrategy::GREEDY_DESCENT); - SimulatedAnnealingParameters* sa = - ils.mutable_simulated_annealing_parameters(); + ils.mutable_best_solution_acceptance_strategy()->mutable_greedy_descent(); + SimulatedAnnealingAcceptanceStrategy* sa = + ils.mutable_reference_solution_acceptance_strategy() + ->mutable_simulated_annealing(); sa->set_cooling_schedule_strategy(CoolingScheduleStrategy::EXPONENTIAL); sa->set_initial_temperature(100.0); sa->set_final_temperature(0.01); @@ -506,55 +507,70 @@ void FindErrorsInIteratedLocalSearchParameters( } } - if (ils.acceptance_strategy() == AcceptanceStrategy::UNSET) { + struct NamedAcceptanceStrategy { + std::string name; + AcceptanceStrategy acceptance_strategy; + }; + std::vector named_acceptance_strategies; + + if (!ils.has_reference_solution_acceptance_strategy()) { errors.emplace_back( - StrCat("Invalid value for " - "iterated_local_search_parameters.acceptance_strategy: ", - ils.acceptance_strategy())); + StrCat("Unset value for " + "iterated_local_search_parameters.reference_solution_acceptance_" + "strategy.")); + } else { + named_acceptance_strategies.push_back( + {"reference_solution", ils.reference_solution_acceptance_strategy()}); } - if (ils.acceptance_strategy() == AcceptanceStrategy::SIMULATED_ANNEALING) { - if (!ils.has_simulated_annealing_parameters()) { - errors.emplace_back( - StrCat("iterated_local_search_parameters.acceptance_strategy is ", - AcceptanceStrategy::SIMULATED_ANNEALING, - " but " - "iterated_local_search_parameters.simulated_annealing_" - "parameters are missing.")); - return; - } - - const SimulatedAnnealingParameters& sa_params = - ils.simulated_annealing_parameters(); + if (!ils.has_best_solution_acceptance_strategy()) { + errors.emplace_back(StrCat( + "Unset value for " + "iterated_local_search_parameters.best_solution_acceptance_strategy.")); + } else { + named_acceptance_strategies.push_back( + {"best_solution", ils.best_solution_acceptance_strategy()}); + } - if (sa_params.cooling_schedule_strategy() == - CoolingScheduleStrategy::UNSET) { - errors.emplace_back( - StrCat("Invalid value for " - "iterated_local_search_parameters.simulated_annealing_" - "parameters.cooling_schedule_strategy: ", - sa_params.cooling_schedule_strategy())); - } + for (const auto& [name, acceptance_strategy] : named_acceptance_strategies) { + if (acceptance_strategy.has_simulated_annealing()) { + const SimulatedAnnealingAcceptanceStrategy& sa_params = + acceptance_strategy.simulated_annealing(); - if (!sa_params.automatic_temperatures()) { - if (sa_params.initial_temperature() < sa_params.final_temperature()) { + if (sa_params.cooling_schedule_strategy() == + CoolingScheduleStrategy::UNSET) { errors.emplace_back( - "iterated_local_search_parameters.simulated_annealing_parameters." - "initial_temperature cannot be lower than " - "iterated_local_search_parameters.simulated_annealing_parameters." - "final_temperature."); + StrCat("Invalid value for " + "iterated_local_search_parameters.", + name, + "_acceptance_strategy.simulated_annealing.cooling_schedule_" + "strategy: ", + sa_params.cooling_schedule_strategy())); } - if (sa_params.initial_temperature() < 1e-9) { - errors.emplace_back( - "iterated_local_search_parameters.simulated_annealing_parameters." - "initial_temperature cannot be lower than 1e-9."); - } + if (!sa_params.automatic_temperatures()) { + if (sa_params.initial_temperature() < sa_params.final_temperature()) { + errors.emplace_back(StrCat( + "iterated_local_search_parameters.", name, + "_acceptance_strategy.simulated_annealing." + "initial_temperature cannot be lower than " + "iterated_local_search_parameters.simulated_annealing_parameters." + "final_temperature.")); + } - if (sa_params.final_temperature() < 1e-9) { - errors.emplace_back( - "iterated_local_search_parameters.simulated_annealing_parameters." - "final_temperature cannot be lower than 1e-9."); + if (sa_params.initial_temperature() < 1e-9) { + errors.emplace_back( + StrCat("iterated_local_search_parameters.", name, + "_acceptance_strategy.simulated_annealing." + "initial_temperature cannot be lower than 1e-9.")); + } + + if (sa_params.final_temperature() < 1e-9) { + errors.emplace_back( + StrCat("iterated_local_search_parameters.", name, + "_acceptance_strategy.simulated_annealing." + "final_temperature cannot be lower than 1e-9.")); + } } } } From 8e56816c8a13cd577d9cea2b4c0e247024a16816 Mon Sep 17 00:00:00 2001 From: Corentin Le Molgat Date: Fri, 21 Nov 2025 14:15:31 +0100 Subject: [PATCH 009/111] cmake: fixup --- ortools/math_opt/solvers/CMakeLists.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ortools/math_opt/solvers/CMakeLists.txt b/ortools/math_opt/solvers/CMakeLists.txt index 2cf42805d91..4f1343e053b 100644 --- a/ortools/math_opt/solvers/CMakeLists.txt +++ b/ortools/math_opt/solvers/CMakeLists.txt @@ -89,7 +89,7 @@ if(USE_SCIP) "$" ) # This test fail on windows and takes too long so we disable it. - if(TARGET cxx_math_opt_solvers_gscip_solver_test) + if(TARGET math_opt_solvers_gscip_solver_test) set_tests_properties(cxx_math_opt_solvers_gscip_solver_test PROPERTIES DISABLED TRUE) endif() @@ -150,7 +150,7 @@ ortools_cxx_test( "$" ) # This test takes too long so we disable it. -if(TARGET cxx_math_opt_solvers_cp_sat_solver_test) +if(TARGET math_opt_solvers_cp_sat_solver_test) set_tests_properties(cxx_math_opt_solvers_cp_sat_solver_test PROPERTIES DISABLED TRUE) endif() @@ -252,7 +252,7 @@ if(USE_HIGHS) "$" ) # This test fail on windows and takes too long so we disable it. - if(TARGET cxx_math_opt_solvers_highs_solver_test) + if(TARGET math_opt_solvers_highs_solver_test) set_tests_properties(cxx_math_opt_solvers_highs_solver_test PROPERTIES DISABLED TRUE) endif() From 5d31a932d6fb8c4da6c5e8c866e1b014606e6e9f Mon Sep 17 00:00:00 2001 From: Corentin Le Molgat Date: Fri, 21 Nov 2025 14:25:40 +0100 Subject: [PATCH 010/111] cmake: bump scip to v924 note: also bump soplex to v7.1.6 tested using: `./build/bin/math_opt_solvers_gscip_solver_test` --- Dependencies.txt | 2 +- cmake/dependencies/CMakeLists.txt | 8 +-- patches/{scip-v923.patch => scip-v924.patch} | 0 ...oplex-v7.1.3.patch => soplex-v7.1.6.patch} | 63 +++++-------------- 4 files changed, 19 insertions(+), 54 deletions(-) rename patches/{scip-v923.patch => scip-v924.patch} (100%) rename patches/{soplex-v7.1.3.patch => soplex-v7.1.6.patch} (81%) diff --git a/Dependencies.txt b/Dependencies.txt index fb93cab93da..30033bd6ff9 100644 --- a/Dependencies.txt +++ b/Dependencies.txt @@ -10,7 +10,7 @@ Cgl=0.60.9 Cbc=2.10.12 GLPK=5.0 HiGHS=v1.12.0 -Scip=v923 +Scip=v924 # Python pybind11=v2.13.6 pybind11_abseil=v202402.0 diff --git a/cmake/dependencies/CMakeLists.txt b/cmake/dependencies/CMakeLists.txt index a045bdf805d..2e6c0639560 100644 --- a/cmake/dependencies/CMakeLists.txt +++ b/cmake/dependencies/CMakeLists.txt @@ -353,11 +353,11 @@ if(BUILD_soplex) FetchContent_Declare( soplex GIT_REPOSITORY "https://github.com/scipopt/soplex.git" - GIT_TAG "release-713" + GIT_TAG "release-716" GIT_SHALLOW TRUE UPDATE_COMMAND git reset --hard PATCH_COMMAND git apply --ignore-whitespace - "${CMAKE_CURRENT_LIST_DIR}/../../patches/soplex-v7.1.3.patch" + "${CMAKE_CURRENT_LIST_DIR}/../../patches/soplex-v7.1.6.patch" ) set(SHARED ON CACHE BOOL "Soplex param" FORCE) set(ZLIB ON CACHE BOOL "Soplex param" FORCE) @@ -383,11 +383,11 @@ if(BUILD_SCIP) FetchContent_Declare( scip GIT_REPOSITORY "https://github.com/scipopt/scip.git" - GIT_TAG "v923" + GIT_TAG "v924" GIT_SHALLOW TRUE UPDATE_COMMAND git reset --hard PATCH_COMMAND git apply --ignore-whitespace - "${CMAKE_CURRENT_LIST_DIR}/../../patches/scip-v923.patch" + "${CMAKE_CURRENT_LIST_DIR}/../../patches/scip-v924.patch" ) set(SHARED ON CACHE BOOL "Scip param" FORCE) set(ZLIB ON CACHE BOOL "Scip param" FORCE) diff --git a/patches/scip-v923.patch b/patches/scip-v924.patch similarity index 100% rename from patches/scip-v923.patch rename to patches/scip-v924.patch diff --git a/patches/soplex-v7.1.3.patch b/patches/soplex-v7.1.6.patch similarity index 81% rename from patches/soplex-v7.1.3.patch rename to patches/soplex-v7.1.6.patch index 2df6a368417..7e4c3839f72 100644 --- a/patches/soplex-v7.1.3.patch +++ b/patches/soplex-v7.1.6.patch @@ -1,5 +1,5 @@ diff --git a/CMakeLists.txt b/CMakeLists.txt -index 0b21f5a..6f08341 100644 +index 96aefab..760b0f8 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -27,6 +27,10 @@ set(CPACK_PACKAGE_VERSION_PATCH "${SOPLEX_VERSION_PATCH}") @@ -123,7 +123,7 @@ index 0b21f5a..6f08341 100644 endif() # disable fused floating point contraction to enhance reproducibility across compilers and architectures -@@ -247,7 +243,7 @@ configure_file(${CMAKE_CURRENT_SOURCE_DIR}/src/soplex/config.h.in ${PROJECT_BINA +@@ -244,7 +240,7 @@ configure_file(${CMAKE_CURRENT_SOURCE_DIR}/src/soplex/config.h.in ${PROJECT_BINA configure_file(${PROJECT_SOURCE_DIR}/soplex-config.cmake.in "${PROJECT_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/soplex-config.cmake" @ONLY) add_subdirectory(src) @@ -136,21 +136,10 @@ index 0b21f5a..6f08341 100644 + add_subdirectory(check) +endif() diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt -index 84ec5a5..4552300 100644 +index 16ffb17..b5a0b56 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt -@@ -3,8 +3,8 @@ - # - function(setLibProperties targetname outputname) - set_target_properties(${targetname} PROPERTIES -- OUTPUT_NAME ${outputname} -- MACOSX_RPATH "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}") -+ OUTPUT_NAME ${outputname} -+ ) - endfunction(setLibProperties) - - include(GNUInstallDirs) -@@ -193,24 +193,28 @@ target_link_libraries(libsoplexshared libsoplex ${libs}) +@@ -193,25 +193,27 @@ target_link_libraries(libsoplexshared libsoplex ${libs}) set_target_properties(libsoplexshared PROPERTIES CXX_VISIBILITY_PRESET default) # create soplex binary using library without pic @@ -159,9 +148,6 @@ index 84ec5a5..4552300 100644 +if(SOPLEX_SOPLEX) + add_executable(soplex EXCLUDE_FROM_ALL soplexmain.cpp) + target_link_libraries(soplex PRIVATE libsoplex ${Boost_LIBRARIES}) -+ -+ # set the install rpath to the installed destination -+ set_target_properties(soplex PROPERTIES INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}") -if(EMSCRIPTEN AND EMSCRIPTEN_HTML) + if(EMSCRIPTEN AND EMSCRIPTEN_HTML) @@ -169,6 +155,9 @@ index 84ec5a5..4552300 100644 set(CMAKE_EXECUTABLE_SUFFIX ".html") -endif() + endif() ++ ++ # set the install rpath to the installed destination ++ set_target_properties(soplex PROPERTIES INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}") -if(CMAKE_BUILD_TYPE EQUAL "Debug") - find_package(Sanitizers) @@ -179,19 +168,16 @@ index 84ec5a5..4552300 100644 + endif() endif() --add_executable(example EXCLUDE_FROM_ALL example.cpp) --target_link_libraries(example libsoplex) -- + add_executable(example EXCLUDE_FROM_ALL example.cpp) + target_link_libraries(example libsoplex) + -# set the install rpath to the installed destination -set_target_properties(soplex PROPERTIES INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}") -+if(SOPLEX_EXAMPLE) -+ add_executable(example example.cpp) -+ target_link_libraries(example libsoplex) -+endif() - +- # install the header files of soplex install(FILES ${headers} ${PROJECT_BINARY_DIR}/soplex/config.h DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/soplex) -@@ -237,15 +241,28 @@ install(FILES + install(FILES soplex.h soplex.hpp soplex_interface.h DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) +@@ -237,15 +239,23 @@ install(FILES DESTINATION include/soplex/external/zstr) # install the binary and the library to appropriate lcoations and add them to an export group @@ -202,17 +188,12 @@ index 84ec5a5..4552300 100644 + RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}) +endif() + -+install(TARGETS libsoplex libsoplex-pic libsoplexshared -+ EXPORT soplex-targets ++install(TARGETS libsoplex libsoplex-pic libsoplexshared EXPORT soplex-targets LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} INCLUDES DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) -+install(EXPORT soplex-targets -+ FILE soplex-targets.cmake -+ DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/soplex) -+ # Add library targets to the build-tree export set -export(TARGETS libsoplex libsoplex-pic libsoplexshared - FILE "${CMAKE_BINARY_DIR}/soplex-targets.cmake") @@ -223,19 +204,3 @@ index 84ec5a5..4552300 100644 #configure the config file for the build tree set(CONF_INCLUDE_DIRS "${PROJECT_SOURCE_DIR}/src" "${PROJECT_BINARY_DIR}") -@@ -259,7 +276,6 @@ ${CMAKE_BINARY_DIR}/soplex-config-version.cmake - COMPATIBILITY SameMajorVersion - ) - -- - #configure the config file for the install - set(CONF_INCLUDE_DIRS "\${CMAKE_CURRENT_LIST_DIR}/../../../include") - configure_file(${PROJECT_SOURCE_DIR}/soplex-config.cmake.in -@@ -267,7 +283,6 @@ configure_file(${PROJECT_SOURCE_DIR}/soplex-config.cmake.in - - # install the targets of the soplex export group and the config file so that other projects - # can link easily against soplex --install(EXPORT soplex-targets FILE soplex-targets.cmake DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/soplex) - install(FILES "${PROJECT_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/soplex-config.cmake" - ${CMAKE_BINARY_DIR}/soplex-config-version.cmake - DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/soplex) From cf21748fdb273363978b66cb5d2479ff0c8249ae Mon Sep 17 00:00:00 2001 From: Corentin Le Molgat Date: Fri, 21 Nov 2025 16:27:05 +0100 Subject: [PATCH 011/111] cmake: Add Python 3.14 support --- .github/workflows/amd64_linux_bazel.yml | 1 + .../workflows/amd64_linux_cmake_python.yml | 1 + .github/workflows/amd64_macos_bazel.yml | 1 + .../workflows/amd64_macos_cmake_python.yml | 1 + .github/workflows/amd64_windows_bazel.yml | 1 + .../workflows/amd64_windows_cmake_python.yml | 1 + .github/workflows/arm64_macos_bazel.yml | 1 + .../workflows/arm64_macos_cmake_python.yml | 1 + ortools/python/setup.py.in | 1 + tools/docker/Makefile | 3 +- tools/release/build_delivery_macos.sh | 4 +-- tools/release/build_delivery_win.cmd | 6 ++-- tools/release/test_delivery_macos.sh | 4 +-- tools/release/test_delivery_win.cmd | 35 +++++++++++++++++++ 14 files changed, 53 insertions(+), 8 deletions(-) diff --git a/.github/workflows/amd64_linux_bazel.yml b/.github/workflows/amd64_linux_bazel.yml index 283517e21c6..948195e1032 100644 --- a/.github/workflows/amd64_linux_bazel.yml +++ b/.github/workflows/amd64_linux_bazel.yml @@ -26,6 +26,7 @@ jobs: #{version: '3.11'}, {version: '3.12'}, #{version: '3.13'}, + #{version: '3.14'}, ] fail-fast: false env: diff --git a/.github/workflows/amd64_linux_cmake_python.yml b/.github/workflows/amd64_linux_cmake_python.yml index ec7f93e6e01..8f211d896c8 100644 --- a/.github/workflows/amd64_linux_cmake_python.yml +++ b/.github/workflows/amd64_linux_cmake_python.yml @@ -28,6 +28,7 @@ jobs: #{version: "3.11"}, #{version: "3.12"}, {version: "3.13"}, + {version: "3.14"}, ] fail-fast: false name: amd64•Linux•CMake•Python${{matrix.python.version}} diff --git a/.github/workflows/amd64_macos_bazel.yml b/.github/workflows/amd64_macos_bazel.yml index d336ba15e88..cea71927893 100644 --- a/.github/workflows/amd64_macos_bazel.yml +++ b/.github/workflows/amd64_macos_bazel.yml @@ -26,6 +26,7 @@ jobs: #{version: '3.11'}, {version: '3.12'}, #{version: '3.13'}, + #{version: '3.14'}, ] fail-fast: false env: diff --git a/.github/workflows/amd64_macos_cmake_python.yml b/.github/workflows/amd64_macos_cmake_python.yml index 2e087620b0d..f069af65a78 100644 --- a/.github/workflows/amd64_macos_cmake_python.yml +++ b/.github/workflows/amd64_macos_cmake_python.yml @@ -32,6 +32,7 @@ jobs: #{version: "3.11"}, #{version: "3.12"}, {version: "3.13"}, + {version: "3.14"}, ] fail-fast: false name: amd64•MacOS•CMake(${{matrix.cmake.name}})•Python-${{matrix.python.version}} diff --git a/.github/workflows/amd64_windows_bazel.yml b/.github/workflows/amd64_windows_bazel.yml index 44490ebf6f5..c950fb3831e 100644 --- a/.github/workflows/amd64_windows_bazel.yml +++ b/.github/workflows/amd64_windows_bazel.yml @@ -26,6 +26,7 @@ jobs: # {version: '3.11'}, {version: '3.12'}, # {version: '3.13'}, + # {version: '3.14'}, ] fail-fast: false # Don't cancel all jobs if one fails. env: diff --git a/.github/workflows/amd64_windows_cmake_python.yml b/.github/workflows/amd64_windows_cmake_python.yml index ee8a494c8d0..12a1fc81ddc 100644 --- a/.github/workflows/amd64_windows_cmake_python.yml +++ b/.github/workflows/amd64_windows_cmake_python.yml @@ -27,6 +27,7 @@ jobs: #{version: "3.11", dir: Python311}, #{version: "3.12", dir: Python312}, {version: "3.13", dir: Python313}, + {version: "3.14", dir: Python314}, ] fail-fast: false name: amd64•Windows•CMake(${{matrix.cmake.name}})•Python-${{matrix.python.version}} diff --git a/.github/workflows/arm64_macos_bazel.yml b/.github/workflows/arm64_macos_bazel.yml index 9aec1697579..95fbe05bac2 100644 --- a/.github/workflows/arm64_macos_bazel.yml +++ b/.github/workflows/arm64_macos_bazel.yml @@ -26,6 +26,7 @@ jobs: #{version: '3.11'}, {version: '3.12'}, #{version: '3.13'}, + #{version: '3.14'}, ] fail-fast: false env: diff --git a/.github/workflows/arm64_macos_cmake_python.yml b/.github/workflows/arm64_macos_cmake_python.yml index 489e9faa23d..28e59dc1bdd 100644 --- a/.github/workflows/arm64_macos_cmake_python.yml +++ b/.github/workflows/arm64_macos_cmake_python.yml @@ -32,6 +32,7 @@ jobs: #{version: "3.11"}, {version: "3.12"}, #{version: "3.13"}, + #{version: "3.14"}, ] fail-fast: false name: arm64•MacOS•CMake(${{matrix.cmake.name}})•Python-${{matrix.python.version}} diff --git a/ortools/python/setup.py.in b/ortools/python/setup.py.in index 00ca7092208..62362cc4bc8 100644 --- a/ortools/python/setup.py.in +++ b/ortools/python/setup.py.in @@ -166,6 +166,7 @@ setup( 'Programming Language :: Python :: 3.11', 'Programming Language :: Python :: 3.12', 'Programming Language :: Python :: 3.13', + 'Programming Language :: Python :: 3.14', 'Programming Language :: C++', 'Programming Language :: Python :: Implementation :: CPython', 'Topic :: Office/Business :: Scheduling', diff --git a/tools/docker/Makefile b/tools/docker/Makefile index 70c88c92dbd..4b14e7453bc 100644 --- a/tools/docker/Makefile +++ b/tools/docker/Makefile @@ -59,6 +59,7 @@ help: @echo -e "\t\t${BOLD}311${RESET} Python3.11" @echo -e "\t\t${BOLD}312${RESET} Python3.12" @echo -e "\t\t${BOLD}313${RESET} Python3.13" + @echo -e "\t\t${BOLD}314${RESET} Python3.14" @echo @echo -e "\t${BOLD}${RESET}:" @echo -e "\t\t${BOLD}env${RESET}" @@ -204,7 +205,7 @@ cache/python: | cache -mkdir $@ ## MANYLINUX ## -PYTHON_VERSIONS := 39 310 311 312 313 +PYTHON_VERSIONS := 39 310 311 312 313 314 export/python/manylinux: | export/python -mkdir -p $@ diff --git a/tools/release/build_delivery_macos.sh b/tools/release/build_delivery_macos.sh index f03ddf3577c..79244cae903 100755 --- a/tools/release/build_delivery_macos.sh +++ b/tools/release/build_delivery_macos.sh @@ -225,9 +225,9 @@ function build_python() { echo "DONE" | tee -a build.log if [[ ${PLATFORM} == "arm64" ]]; then - local -r PY=(3.9 3.10 3.11 3.12 3.13) + local -r PY=(3.9 3.10 3.11 3.12 3.13 3.14) else - local -r PY=(3.9 3.10 3.11 3.12 3.13) + local -r PY=(3.9 3.10 3.11 3.12 3.13 3.14) fi # Check Python env diff --git a/tools/release/build_delivery_win.cmd b/tools/release/build_delivery_win.cmd index 940e0ff7bd4..846c2f593ca 100644 --- a/tools/release/build_delivery_win.cmd +++ b/tools/release/build_delivery_win.cmd @@ -269,7 +269,7 @@ set PATH=%userprofile%\AppData\Roaming\Python\Python3%1\Scripts;%PATH% ::echo "python path: %PATH%" GOTO :eof -REM PYTHON 3.9, 3.10, 3.11, 3.12, 3.13 +REM PYTHON 3.9, 3.10, 3.11, 3.12, 3.13, 3.14 :BUILD_PYTHON title Build Python set HASH= @@ -279,7 +279,7 @@ echo Python build seems up to date, skipping exit /B 0 ) -FOR %%v IN (9 10 11 12 13) DO ( +FOR %%v IN (9 10 11 12 13 14) DO ( title Build Python 3.%%v echo Check python3.%%v... | tee.exe -a build.log which.exe "C:\python3%%v-64\python.exe" || exit 1 @@ -342,7 +342,7 @@ del /s /f /q temp_dotnet rmdir /s /q temp_dotnet del /s /f /q temp_java rmdir /s /q temp_java -FOR %%v IN (9 10 11 12 13) do ( +FOR %%v IN (9 10 11 12 13 14) do ( del /s /f /q temp_python3%%v rmdir /s /q temp_python3%%v ) diff --git a/tools/release/test_delivery_macos.sh b/tools/release/test_delivery_macos.sh index d5d09ff23e0..fd8110ebd76 100755 --- a/tools/release/test_delivery_macos.sh +++ b/tools/release/test_delivery_macos.sh @@ -23,9 +23,9 @@ command -v make | xargs echo "make: " | tee -a test.log command -v swig | xargs echo "swig: " | tee -a test.log # python if [[ ${PLATFORM} == "arm64" ]]; then - local -r PY=(3.9 3.10 3.11 3.12 3.13) + local -r PY=(3.9 3.10 3.11 3.12 3.13 3.14) else - local -r PY=(3.9 3.10 3.11 3.12 3.13) + local -r PY=(3.9 3.10 3.11 3.12 3.13 3.14) fi for i in "${PY[@]}"; do diff --git a/tools/release/test_delivery_win.cmd b/tools/release/test_delivery_win.cmd index 2cd35cdd760..ca7586d76a5 100755 --- a/tools/release/test_delivery_win.cmd +++ b/tools/release/test_delivery_win.cmd @@ -21,6 +21,8 @@ which.exe C:\python312-64\python.exe || exit 1 echo C:\python312-64\python.exe: FOUND | tee.exe -a test.log which.exe C:\python313-64\python.exe || exit 1 echo C:\python313-64\python.exe: FOUND | tee.exe -a test.log +which.exe C:\python314-64\python.exe || exit 1 +echo C:\python314-64\python.exe: FOUND | tee.exe -a test.log set LOCAL_PATH=%PATH% @@ -188,3 +190,36 @@ echo Testing ortools Python3.13... | tee.exe -a test.log echo Testing ortools Python3.13...DONE | tee.exe -a test.log FOR %%i IN (%TEMP_DIR%\ortools\dist\*.whl) DO copy %%i . + +REM ################### +REM ## PYTHON 3.14 ## +REM ################### +echo Cleaning Python... | tee.exe -a test.log +make.exe clean_python WINDOWS_PATH_TO_PYTHON=c:\python314-64 +echo Cleaning Python...DONE | tee.exe -a test.log + +REM make.exe python WINDOWS_PATH_TO_PYTHON=c:\python314-64 || exit 1 +REM echo make python3.14: DONE | tee.exe -a build.log +REM make.exe test_python WINDOWS_PATH_TO_PYTHON=c:\python314-64 || exit 1 +REM echo make test_python3.14: DONE | tee.exe -a build.log +echo Rebuild Python3.14 pypi archive... | tee.exe -a test.log +make.exe package_python WINDOWS_PATH_TO_PYTHON=c:\python314-64 || exit 1 +echo Rebuild Python3.14 pypi archive...DONE | tee.exe -a test.log + +echo Creating Python3.14 venv... | tee.exe -a test.log +set PATH=c:\python314-64;c:\python314-64\Scripts;%PATH% +python -m pip install virtualenv +set TEMP_DIR=temp_python314 +python -m virtualenv %TEMP_DIR%\venv +set PATH=%LOCAL_PATH% +echo Creating Python3.14 venv...DONE | tee.exe -a test.log + +echo Installing ortools Python3.14 venv... | tee.exe -a test.log +FOR %%i IN (%TEMP_DIR%\ortools\dist\*.whl) DO %TEMP_DIR%\venv\Scripts\python -m pip install %%i +echo Installing ortools Python3.14 venv...DONE | tee.exe -a test.log + +echo Testing ortools Python3.14... | tee.exe -a test.log +%TEMP_DIR%\venv\Scripts\python cmake\samples\python\sample.py 2>&1 | tee.exe -a test.log +echo Testing ortools Python3.14...DONE | tee.exe -a test.log + +FOR %%i IN (%TEMP_DIR%\ortools\dist\*.whl) DO copy %%i . From 0573aacc5ae7bf79bcdb60cc563274c3c6623d50 Mon Sep 17 00:00:00 2001 From: Corentin Le Molgat Date: Mon, 24 Nov 2025 10:24:23 +0100 Subject: [PATCH 012/111] bazel: bump python requirements . Bump immutabledict to 4.2.2 . Bump numpy to 2.3.5 . Bump typing-extensions to 4.15.0 --- bazel/notebook_requirements.in | 10 +++++----- bazel/notebook_requirements.txt | 10 +++++----- bazel/ortools_requirements.in | 10 +++++----- bazel/ortools_requirements.txt | 24 ++++++++++++------------ 4 files changed, 27 insertions(+), 27 deletions(-) diff --git a/bazel/notebook_requirements.in b/bazel/notebook_requirements.in index d1c56d4459f..40914e5bbe4 100644 --- a/bazel/notebook_requirements.in +++ b/bazel/notebook_requirements.in @@ -1,11 +1,11 @@ # OR-Tools code dependencies absl-py==2.2.2 -immutabledict==4.2.1 -numpy==2.2.0 +immutabledict==4.2.2 +numpy==2.3.5 protobuf==6.32.0 requests==2.32.4 -scipy==1.14.1 -typing-extensions==4.13.1 +scipy==1.16.3 +typing-extensions==4.15.0 # OR-Tools build dependencies mypy==1.6.1 @@ -14,7 +14,7 @@ virtualenv==20.28.1 black==24.8.0 # Example dependencies -pandas==2.2.3 +pandas==2.3.3 # Visualization dependencies svgwrite==1.4.3 diff --git a/bazel/notebook_requirements.txt b/bazel/notebook_requirements.txt index 4f44c1c1246..7307754dba3 100644 --- a/bazel/notebook_requirements.txt +++ b/bazel/notebook_requirements.txt @@ -73,7 +73,7 @@ idna==3.10 # httpx # jsonschema # requests -immutabledict==4.2.1 +immutabledict==4.2.2 # via -r bazel/notebook_requirements.in ipykernel==6.30.1 # via jupyterlab @@ -177,7 +177,7 @@ notebook-shim==0.2.4 # via # jupyterlab # notebook -numpy==2.2.0 +numpy==2.3.5 # via # -r bazel/notebook_requirements.in # pandas @@ -194,7 +194,7 @@ packaging==25.0 # jupyterlab-server # nbconvert # plotly -pandas==2.2.3 +pandas==2.3.3 # via -r bazel/notebook_requirements.in pandocfilters==1.5.1 # via nbconvert @@ -272,7 +272,7 @@ rpds-py==0.26.0 # via # jsonschema # referencing -scipy==1.14.1 +scipy==1.16.3 # via -r bazel/notebook_requirements.in send2trash==1.8.3 # via jupyter-server @@ -322,7 +322,7 @@ types-protobuf==6.30.2.20250703 # via mypy-protobuf types-python-dateutil==2.9.0.20250708 # via arrow -typing-extensions==4.13.1 +typing-extensions==4.15.0 # via # -r bazel/notebook_requirements.in # anyio diff --git a/bazel/ortools_requirements.in b/bazel/ortools_requirements.in index 115af98e150..1d0889df0a1 100644 --- a/bazel/ortools_requirements.in +++ b/bazel/ortools_requirements.in @@ -1,11 +1,11 @@ # OR-Tools code dependencies absl-py==2.2.2 -immutabledict==4.2.1 -numpy==2.2.0 +immutabledict==4.2.2 +numpy==2.3.5 protobuf==6.32.0 requests==2.32.4 -scipy==1.14.1 -typing-extensions==4.13.1 +scipy==1.16.3 +typing-extensions==4.15.0 # OR-Tools build dependencies mypy==1.6.1 @@ -14,7 +14,7 @@ virtualenv==20.28.1 black==24.8.0 # Example dependencies -pandas==2.2.3 +pandas==2.3.3 # Visualization dependencies svgwrite==1.4.3 diff --git a/bazel/ortools_requirements.txt b/bazel/ortools_requirements.txt index e11a30013e6..5d17a52032c 100644 --- a/bazel/ortools_requirements.txt +++ b/bazel/ortools_requirements.txt @@ -8,19 +8,19 @@ absl-py==2.2.2 # via -r bazel/ortools_requirements.in black==24.8.0 # via -r bazel/ortools_requirements.in -certifi==2025.8.3 +certifi==2025.11.12 # via requests -charset-normalizer==3.4.2 +charset-normalizer==3.4.4 # via requests -click==8.2.1 +click==8.3.1 # via black distlib==0.4.0 # via virtualenv -filelock==3.18.0 +filelock==3.20.0 # via virtualenv -idna==3.10 +idna==3.11 # via requests -immutabledict==4.2.1 +immutabledict==4.2.2 # via -r bazel/ortools_requirements.in mypy==1.6.1 # via -r bazel/ortools_requirements.in @@ -30,18 +30,18 @@ mypy-extensions==1.1.0 # mypy mypy-protobuf==3.5.0 # via -r bazel/ortools_requirements.in -numpy==2.2.0 +numpy==2.3.5 # via # -r bazel/ortools_requirements.in # pandas # scipy packaging==25.0 # via black -pandas==2.2.3 +pandas==2.3.3 # via -r bazel/ortools_requirements.in pathspec==0.12.1 # via black -platformdirs==4.3.8 +platformdirs==4.5.0 # via # black # virtualenv @@ -55,15 +55,15 @@ pytz==2025.2 # via pandas requests==2.32.4 # via -r bazel/ortools_requirements.in -scipy==1.14.1 +scipy==1.16.3 # via -r bazel/ortools_requirements.in six==1.17.0 # via python-dateutil svgwrite==1.4.3 # via -r bazel/ortools_requirements.in -types-protobuf==6.30.2.20250703 +types-protobuf==6.32.1.20251105 # via mypy-protobuf -typing-extensions==4.13.1 +typing-extensions==4.15.0 # via # -r bazel/ortools_requirements.in # mypy From 444331501f26f58a8bfbac93d9fb4a3f87336c8d Mon Sep 17 00:00:00 2001 From: Corentin Le Molgat Date: Mon, 24 Nov 2025 08:29:59 +0100 Subject: [PATCH 013/111] routing: export filters update from google3 --- ortools/constraint_solver/routing_filters.cc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ortools/constraint_solver/routing_filters.cc b/ortools/constraint_solver/routing_filters.cc index 9e194bb6d05..8ec9f765681 100644 --- a/ortools/constraint_solver/routing_filters.cc +++ b/ortools/constraint_solver/routing_filters.cc @@ -172,6 +172,8 @@ class MaxActiveVehiclesFilter : public IntVarLocalSearchFilter { routing_model_(routing_model), is_active_(routing_model.vehicles(), false), active_vehicles_(0) {} + ~MaxActiveVehiclesFilter() override = default; + std::string DebugString() const override { return "MaxActiveVehiclesFilter"; } bool Accept(const Assignment* delta, const Assignment* /*deltadelta*/, int64_t /*objective_min*/, int64_t /*objective_max*/) override { const int64_t kUnassigned = -1; From d15a2e67e3a710196adc299420c2755e1be521d8 Mon Sep 17 00:00:00 2001 From: Corentin Le Molgat Date: Mon, 24 Nov 2025 08:30:12 +0100 Subject: [PATCH 014/111] math_opt: export from google3 --- ortools/math_opt/callback.proto | 6 +- ortools/math_opt/core/base_solver.h | 12 +- ortools/math_opt/core/solver.cc | 10 +- ortools/math_opt/core/solver_interface.h | 39 +++- ortools/math_opt/cpp/BUILD.bazel | 16 ++ ortools/math_opt/cpp/callback.h | 4 +- ortools/math_opt/cpp/incremental_solver.cc | 34 +++ ortools/math_opt/cpp/incremental_solver.h | 13 +- .../math_opt/cpp/incremental_solver_test.cc | 121 ++++++++++ ortools/math_opt/cpp/matchers.cc | 10 +- ortools/math_opt/cpp/matchers.h | 20 +- ortools/math_opt/cpp/matchers_test.cc | 23 +- ortools/math_opt/cpp/solve_impl.cc | 15 -- ortools/math_opt/cpp/solve_impl.h | 5 - ortools/math_opt/python/callback.py | 4 +- ortools/math_opt/solver_tests/BUILD.bazel | 1 + .../math_opt/solver_tests/callback_tests.cc | 54 ++++- ortools/math_opt/solvers/BUILD.bazel | 4 + ortools/math_opt/solvers/cp_sat_solver.cc | 209 +++++++++++++++--- .../math_opt/solvers/cp_sat_solver_test.cc | 19 +- 20 files changed, 501 insertions(+), 118 deletions(-) create mode 100644 ortools/math_opt/cpp/incremental_solver.cc create mode 100644 ortools/math_opt/cpp/incremental_solver_test.cc diff --git a/ortools/math_opt/callback.proto b/ortools/math_opt/callback.proto index e604b101e51..8d1d52a6363 100644 --- a/ortools/math_opt/callback.proto +++ b/ortools/math_opt/callback.proto @@ -47,7 +47,8 @@ enum CallbackEventProto { // node). Useful for early termination. Note that this event does not provide // information on LP relaxations nor about new incumbent solutions. // - // This event is supported for MIP models by SOLVER_TYPE_GUROBI only. + // This event is fully supported for MIP models by SOLVER_TYPE_GUROBI only. If + // used with SOLVER_TYPE_CP_SAT, it is called when the dual bound is improved. CALLBACK_EVENT_MIP = 3; // Called every time a new MIP incumbent is found. @@ -127,7 +128,8 @@ message CallbackDataProto { BarrierStats barrier_stats = 6; // MIP B&B stats. Only available during CALLBACK_EVENT_MIPxxxx events. - // Not supported for CP-SAT. + // When using CP-SAT, only primal_bound, dual_bound and + // number_of_solutions_found are populated. message MipStats { optional double primal_bound = 1; optional double dual_bound = 2; diff --git a/ortools/math_opt/core/base_solver.h b/ortools/math_opt/core/base_solver.h index 2958f741bf5..2c26b270f39 100644 --- a/ortools/math_opt/core/base_solver.h +++ b/ortools/math_opt/core/base_solver.h @@ -33,8 +33,9 @@ namespace operations_research::math_opt { // The API of solvers (in-process, sub-process and streaming RPC ones). // -// Thread-safety: methods Solve() and Update() must not be called concurrently; -// they should immediately return with an error status if this happens. +// Thread-safety: methods Solve(), ComputeInfeasibleSubsystem() and Update() +// must not be called concurrently; they should immediately return with an error +// status if this happens. // // TODO: b/350984134 - Rename `Solver` into `InProcessSolver` and then rename // `BaseSolver` into `Solver`. @@ -65,7 +66,14 @@ class BaseSolver { // printed on stdout/stderr/logs anymore. MessageCallback message_callback = nullptr; + // Registration parameter controlling calls to user_cb. CallbackRegistrationProto callback_registration; + + // An optional MIP/LP callback. Only called for events registered in + // callback_registration. + // + // Solve() returns an error if called without a user_cb but with some + // non-empty callback_registration.request_registration. Callback user_cb = nullptr; // An optional interrupter that the solver can use to interrupt the solve diff --git a/ortools/math_opt/core/solver.cc b/ortools/math_opt/core/solver.cc index 382bb65823f..6d6d309a612 100644 --- a/ortools/math_opt/core/solver.cc +++ b/ortools/math_opt/core/solver.cc @@ -120,10 +120,16 @@ absl::StatusOr Solver::Solve(const SolveArgs& arguments) { ValidateModelSolveParameters(arguments.model_parameters, model_summary_)) << "invalid model_parameters"; + RETURN_IF_ERROR(ValidateCallbackRegistration(arguments.callback_registration, + model_summary_)); SolverInterface::Callback cb = nullptr; + if (!arguments.callback_registration.request_registration().empty() && + arguments.user_cb == nullptr) { + return absl::InvalidArgumentError( + "no callback function was provided but callback events were " + "registered"); + } if (arguments.user_cb != nullptr) { - RETURN_IF_ERROR(ValidateCallbackRegistration( - arguments.callback_registration, model_summary_)); cb = [&](const CallbackDataProto& callback_data) -> absl::StatusOr { RETURN_IF_ERROR(ValidateCallbackDataProto( diff --git a/ortools/math_opt/core/solver_interface.h b/ortools/math_opt/core/solver_interface.h index 7329f4dc3eb..6fc3f8dfad7 100644 --- a/ortools/math_opt/core/solver_interface.h +++ b/ortools/math_opt/core/solver_interface.h @@ -43,8 +43,9 @@ namespace math_opt { // // This interface is not meant to be used directly. The actual API is the one of // the Solver class. The Solver class validates the models before calling this -// interface. It makes sure no concurrent calls happen on Solve(), CanUpdate() -// and Update(). It makes sure no other function is called after Solve(), +// interface. It makes sure no concurrent calls happen on Solve(), +// ComputeInfeasibleSubsystem(), CanUpdate() and Update(). It makes sure no +// other function is called after Solve(), ComputeInfeasibleSubsystem(), // Update() or a callback have failed. // // Implementations of this interface should not have public constructors but @@ -69,12 +70,28 @@ class SolverInterface { // See Solver::MessageCallback documentation for details. using MessageCallback = std::function&)>; - // A callback function (if non null) is a function that validates its input - // and its output, and if fails, return a status. The invariant is that the - // solver implementation can rely on receiving valid data. The implementation - // of this interface must provide valid input (which will be validated) and - // in error, it will return a status (without actually calling the callback - // function). This is enforced in the solver.cc layer. + // A callback function (if non null) provided by the Solver class to its + // SolverInterface that wraps the user callback function + // (BaseSolver::Callback) and validates its inputs (provided by the + // SolverInterface implementation) and outputs (provided by the user). A + // failing status is returned if those inputs or outputs are invalid. + // + // To be clear the SolverInterface::Callback is implemented by the Solver + // class and looks like: + // + // absl::Status Callback(const CallbackDataProto& callback_data) { + // RETURN_IF_ERROR(ValidateCallbackDataProto(callback_data, ...)); + // CallbackResultProto result = user_cb(callback_data); + // RETURN_IF_ERROR(ValidateCallbackResultProto(result)); + // return result; + // } + // + // As a consequence SolverInterface implementations can rely on receiving a + // valid CallbackResultProto. + // + // When the SolverInterface::Callback returns an error the SolverInterface + // implementation must interrupt the Solve() as soon as possible and return + // this error. using Callback = std::function( const CallbackDataProto&)>; @@ -114,7 +131,11 @@ class SolverInterface { // When parameter `message_cb` is not null and the underlying solver does not // supports message callbacks, it should ignore it. // - // Solvers should return a InvalidArgumentError when called with events on + // The parameter `cb` won't be null when + // callback_registration.request_registration is not empty (solver.cc will + // return an error in that case before calling SolverInterface::Solve()). + // + // Solvers should return an InvalidArgumentError when called with events on // callback_registration that are not supported by the solver for the type of // model being solved (for example MIP events if the model is an LP, or events // that are not emitted by the solver). Solvers should use diff --git a/ortools/math_opt/cpp/BUILD.bazel b/ortools/math_opt/cpp/BUILD.bazel index 0d0a83a09ef..6c1e4d88900 100644 --- a/ortools/math_opt/cpp/BUILD.bazel +++ b/ortools/math_opt/cpp/BUILD.bazel @@ -895,6 +895,7 @@ cc_library( cc_library( name = "incremental_solver", + srcs = ["incremental_solver.cc"], hdrs = ["incremental_solver.h"], deps = [ ":compute_infeasible_subsystem_arguments", @@ -903,10 +904,25 @@ cc_library( ":solve_arguments", ":solve_result", ":update_result", + "//ortools/base:status_macros", "@abseil-cpp//absl/status:statusor", ], ) +cc_test( + name = "incremental_solver_test", + srcs = ["incremental_solver_test.cc"], + deps = [ + ":incremental_solver", + ":matchers", + ":math_opt", + "//ortools/base:gmock_main", + "@abseil-cpp//absl/status", + "@abseil-cpp//absl/status:statusor", + "@abseil-cpp//absl/strings:string_view", + ], +) + cc_library( name = "remote_streaming_mode", srcs = ["remote_streaming_mode.cc"], diff --git a/ortools/math_opt/cpp/callback.h b/ortools/math_opt/cpp/callback.h index 7b98f7473dc..73fc1459b8d 100644 --- a/ortools/math_opt/cpp/callback.h +++ b/ortools/math_opt/cpp/callback.h @@ -109,7 +109,9 @@ enum class CallbackEvent { // node). Useful for early termination. Note that this event does not provide // information on LP relaxations nor about new incumbent solutions. // - // This event is supported for MIP models with SolverType::kGurobi only. + // This event is fully supported for MIP models with SolverType::kGurobi only. + // If used with SolverType::kCpSat, it is called when the dual bound is + // improved. kMip = CALLBACK_EVENT_MIP, // Called every time a new MIP incumbent is found. diff --git a/ortools/math_opt/cpp/incremental_solver.cc b/ortools/math_opt/cpp/incremental_solver.cc new file mode 100644 index 00000000000..70b7140f9fc --- /dev/null +++ b/ortools/math_opt/cpp/incremental_solver.cc @@ -0,0 +1,34 @@ +// Copyright 2010-2025 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "ortools/math_opt/cpp/incremental_solver.h" + +#include "absl/status/statusor.h" +#include "ortools/base/status_macros.h" + +namespace operations_research::math_opt { + +absl::StatusOr IncrementalSolver::Solve( + const SolveArguments& arguments) { + RETURN_IF_ERROR(Update().status()); + return SolveWithoutUpdate(arguments); +} + +absl::StatusOr +IncrementalSolver::ComputeInfeasibleSubsystem( + const ComputeInfeasibleSubsystemArguments& arguments) { + RETURN_IF_ERROR(Update().status()); + return ComputeInfeasibleSubsystemWithoutUpdate(arguments); +} + +} // namespace operations_research::math_opt diff --git a/ortools/math_opt/cpp/incremental_solver.h b/ortools/math_opt/cpp/incremental_solver.h index e424b569b3b..32833cbb65c 100644 --- a/ortools/math_opt/cpp/incremental_solver.h +++ b/ortools/math_opt/cpp/incremental_solver.h @@ -112,21 +112,14 @@ class IncrementalSolver { // // See callback.h for documentation on arguments.callback and // arguments.callback_registration. - virtual absl::StatusOr Solve( - const SolveArguments& arguments) = 0; - absl::StatusOr Solve() { return Solve({}); } + absl::StatusOr Solve(const SolveArguments& arguments = {}); // Updates the underlying solver with latest model changes and runs the // computation. // // Same as Solve() but compute the infeasible subsystem. - virtual absl::StatusOr - ComputeInfeasibleSubsystem( - const ComputeInfeasibleSubsystemArguments& arguments) = 0; - absl::StatusOr - ComputeInfeasibleSubsystem() { - return ComputeInfeasibleSubsystem({}); - } + absl::StatusOr ComputeInfeasibleSubsystem( + const ComputeInfeasibleSubsystemArguments& arguments = {}); // Updates the model to solve. // diff --git a/ortools/math_opt/cpp/incremental_solver_test.cc b/ortools/math_opt/cpp/incremental_solver_test.cc new file mode 100644 index 00000000000..64318bc9620 --- /dev/null +++ b/ortools/math_opt/cpp/incremental_solver_test.cc @@ -0,0 +1,121 @@ +// Copyright 2010-2025 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "ortools/math_opt/cpp/incremental_solver.h" + +#include + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/string_view.h" +#include "gtest/gtest.h" +#include "ortools/base/gmock.h" +#include "ortools/math_opt/cpp/matchers.h" +#include "ortools/math_opt/cpp/math_opt.h" + +namespace operations_research::math_opt { +namespace { + +using ::testing::_; +using ::testing::Return; +using ::testing::status::IsOkAndHolds; +using ::testing::status::StatusIs; + +class MockIncrementalSolver final : public IncrementalSolver { + public: + MOCK_METHOD(absl::StatusOr, Update, (), (override)); + MOCK_METHOD(absl::StatusOr, SolveWithoutUpdate, + (const SolveArguments&), (const, override)); + MOCK_METHOD(absl::StatusOr, + ComputeInfeasibleSubsystemWithoutUpdate, + (const ComputeInfeasibleSubsystemArguments&), (const, override)); + MOCK_METHOD(SolverType, solver_type, (), (const, override)); +}; + +TEST(IncrementalSolverTest, SolveWithFailingUpdate) { + MockIncrementalSolver incremental_solver; + EXPECT_CALL(incremental_solver, Update()) + .WillOnce(Return(absl::InternalError("oops"))); + EXPECT_THAT(incremental_solver.Solve(), + StatusIs(absl::StatusCode::kInternal, "oops")); +} + +TEST(IncrementalSolverTest, SolveWithFailingSolveWithoutUpdate) { + MockIncrementalSolver incremental_solver; + EXPECT_CALL(incremental_solver, Update()) + .WillOnce(Return(UpdateResult(/*did_update=*/true))); + EXPECT_CALL(incremental_solver, SolveWithoutUpdate(_)) + .WillOnce(Return(absl::InternalError("oops"))); + EXPECT_THAT(incremental_solver.Solve(), + StatusIs(absl::StatusCode::kInternal, "oops")); +} + +TEST(IncrementalSolverTest, SuccessfulSolve) { + MockIncrementalSolver incremental_solver; + EXPECT_CALL(incremental_solver, Update()) + .WillOnce(Return(UpdateResult(/*did_update=*/true))); + constexpr double kObjectiveValue = 3.5; + constexpr absl::string_view kDetail = "found the optimum!"; + EXPECT_CALL(incremental_solver, SolveWithoutUpdate(_)) + .WillOnce(Return( + SolveResult(Termination::Optimal(/*objective_value=*/kObjectiveValue, + /*detail=*/std::string(kDetail))))); + ASSERT_OK_AND_ASSIGN(const SolveResult solve_result, + incremental_solver.Solve()); + EXPECT_THAT(solve_result.termination, + TerminationIsOptimal(/*primal_objective_value=*/kObjectiveValue)); + EXPECT_EQ(solve_result.termination.detail, kDetail); +} + +TEST(IncrementalSolverTest, ComputeInfeasibleSubsystemWithFailingUpdate) { + MockIncrementalSolver incremental_solver; + EXPECT_CALL(incremental_solver, Update()) + .WillOnce(Return(absl::InternalError("oops"))); + EXPECT_THAT(incremental_solver.ComputeInfeasibleSubsystem(), + StatusIs(absl::StatusCode::kInternal, "oops")); +} + +TEST(IncrementalSolverTest, + ComputeInfeasibleSubsystemWithFailingComputeWithoutUpdate) { + MockIncrementalSolver incremental_solver; + EXPECT_CALL(incremental_solver, Update()) + .WillOnce(Return(UpdateResult(/*did_update=*/true))); + EXPECT_CALL(incremental_solver, ComputeInfeasibleSubsystemWithoutUpdate(_)) + .WillOnce(Return(absl::InternalError("oops"))); + EXPECT_THAT(incremental_solver.ComputeInfeasibleSubsystem(), + StatusIs(absl::StatusCode::kInternal, "oops")); +} + +TEST(IncrementalSolverTest, SuccessfulComputeInfeasibleSubsystem) { + MockIncrementalSolver incremental_solver; + EXPECT_CALL(incremental_solver, Update()) + .WillOnce(Return(UpdateResult(/*did_update=*/true))); + Model model; + const Variable v = model.AddBinaryVariable("v"); + const ModelSubset model_subset = { + .variable_integrality = {v}, + }; + EXPECT_CALL(incremental_solver, ComputeInfeasibleSubsystemWithoutUpdate(_)) + .WillOnce(Return(ComputeInfeasibleSubsystemResult{ + .feasibility = FeasibilityStatus::kInfeasible, + .infeasible_subsystem = model_subset, + .is_minimal = false, + })); + ASSERT_THAT(incremental_solver.ComputeInfeasibleSubsystem(), + IsOkAndHolds(IsInfeasible( + /*expected_is_minimal=*/false, + /*expected_infeasible_subsystem=*/model_subset))); +} + +} // namespace +} // namespace operations_research::math_opt diff --git a/ortools/math_opt/cpp/matchers.cc b/ortools/math_opt/cpp/matchers.cc index f4260718015..10a0936e439 100644 --- a/ortools/math_opt/cpp/matchers.cc +++ b/ortools/math_opt/cpp/matchers.cc @@ -209,8 +209,8 @@ class MapToDoubleMatcher } // namespace -Matcher> IsNearlySubsetOf(VariableMap expected, - double tolerance) { +Matcher> IsNearlySupersetOf(VariableMap expected, + double tolerance) { return Matcher>(new MapToDoubleMatcher( std::move(expected), /*all_keys=*/false, tolerance)); } @@ -221,7 +221,7 @@ Matcher> IsNear(VariableMap expected, std::move(expected), /*all_keys=*/true, tolerance)); } -Matcher> IsNearlySubsetOf( +Matcher> IsNearlySupersetOf( LinearConstraintMap expected, double tolerance) { return Matcher>( new MapToDoubleMatcher(std::move(expected), @@ -243,7 +243,7 @@ Matcher> IsNear( std::move(expected), /*all_keys=*/true, tolerance)); } -Matcher> IsNearlySubsetOf( +Matcher> IsNearlySupersetOf( absl::flat_hash_map expected, double tolerance) { return Matcher>( @@ -260,7 +260,7 @@ Matcher> IsNear( } template -Matcher> IsNearlySubsetOf( +Matcher> IsNearlySupersetOf( absl::flat_hash_map expected, const double tolerance) { return Matcher>(new MapToDoubleMatcher( std::move(expected), /*all_keys=*/false, tolerance)); diff --git a/ortools/math_opt/cpp/matchers.h b/ortools/math_opt/cpp/matchers.h index 5e6cd544f0b..29dd1657a1f 100644 --- a/ortools/math_opt/cpp/matchers.h +++ b/ortools/math_opt/cpp/matchers.h @@ -121,11 +121,11 @@ constexpr double kMatcherDefaultTolerance = 1e-5; testing::Matcher> IsNear( VariableMap expected, double tolerance = kMatcherDefaultTolerance); -// Checks that the keys of actual are a subset of the keys of expected, and that -// for all shared keys, the values are within tolerance. This factory will +// Checks that the keys of actual are a superset of the keys of expected, and +// that for all shared keys, the values are within tolerance. This factory will // CHECK-fail if expected contains any NaN values, and any NaN values in the // expression compared against will result in the matcher failing. -testing::Matcher> IsNearlySubsetOf( +testing::Matcher> IsNearlySupersetOf( VariableMap expected, double tolerance = kMatcherDefaultTolerance); // Checks that the maps have identical keys and values within tolerance. This @@ -135,11 +135,11 @@ testing::Matcher> IsNear( LinearConstraintMap expected, double tolerance = kMatcherDefaultTolerance); -// Checks that the keys of actual are a subset of the keys of expected, and that -// for all shared keys, the values are within tolerance. This factory will +// Checks that the keys of actual are a superset of the keys of expected, and +// that for all shared keys, the values are within tolerance. This factory will // CHECK-fail if expected contains any NaN values, and any NaN values in the // expression compared against will result in the matcher failing. -testing::Matcher> IsNearlySubsetOf( +testing::Matcher> IsNearlySupersetOf( LinearConstraintMap expected, double tolerance = kMatcherDefaultTolerance); @@ -149,13 +149,13 @@ testing::Matcher> IsNear( absl::flat_hash_map expected, double tolerance = kMatcherDefaultTolerance); -// Checks that the keys of actual are a subset of the keys of expected, and that -// for all shared keys, the values are within tolerance. This factory will +// Checks that the keys of actual are a superset of the keys of expected, and +// that for all shared keys, the values are within tolerance. This factory will // CHECK-fail if expected contains any NaN values, and any NaN values in the // expression compared against will result in the matcher failing. testing::Matcher> -IsNearlySubsetOf(absl::flat_hash_map expected, - double tolerance = kMatcherDefaultTolerance); +IsNearlySupersetOf(absl::flat_hash_map expected, + double tolerance = kMatcherDefaultTolerance); //////////////////////////////////////////////////////////////////////////////// // Matchers for various Variable expressions (e.g. LinearExpression) diff --git a/ortools/math_opt/cpp/matchers_test.cc b/ortools/math_opt/cpp/matchers_test.cc index cfef8a8e2d5..7d104f57db6 100644 --- a/ortools/math_opt/cpp/matchers_test.cc +++ b/ortools/math_opt/cpp/matchers_test.cc @@ -115,20 +115,21 @@ TEST(ApproximateMapMatcherTest, VariableIsNear) { EXPECT_THAT(actual, Not(IsNear({{z, -2.5}}))); } -TEST(ApproximateMapMatcherTest, VariableIsNearlySubsetOf) { +TEST(ApproximateMapMatcherTest, VariableIsNearlySupersetOf) { Model model; const Variable w = model.AddBinaryVariable("w"); const Variable x = model.AddBinaryVariable("x"); const Variable y = model.AddBinaryVariable("y"); const Variable z = model.AddBinaryVariable("z"); const VariableMap actual = {{x, 2.0}, {y, 4.1}, {z, -2.5}}; - EXPECT_THAT(actual, IsNearlySubsetOf(actual)); - EXPECT_THAT(actual, IsNearlySubsetOf({{y, 4.1}, {z, -2.5}})); - EXPECT_THAT(actual, Not(IsNearlySubsetOf({{w, 1}, {y, 4.1}, {z, -2.5}}))); - EXPECT_THAT(actual, Not(IsNearlySubsetOf({{y, 4.4}, {z, -2.5}}))); + EXPECT_THAT(actual, IsNearlySupersetOf(actual)); + EXPECT_THAT(actual, IsNearlySupersetOf({{y, 4.1}, {z, -2.5}})); + EXPECT_THAT(actual, Not(IsNearlySupersetOf({{w, 1}, {y, 4.1}, {z, -2.5}}))); + EXPECT_THAT(actual, Not(IsNearlySupersetOf({{y, 4.4}, {z, -2.5}}))); } -TEST(ApproximateMapMatcherTest, QuadraticConstraintIsNearAndIsNearlySubsetOf) { +TEST(ApproximateMapMatcherTest, + QuadraticConstraintIsNearAndIsNearlySupersetOf) { Model model; const Variable x = model.AddBinaryVariable("x"); const QuadraticConstraint c = model.AddQuadraticConstraint(x * x <= 0, "c"); @@ -137,29 +138,29 @@ TEST(ApproximateMapMatcherTest, QuadraticConstraintIsNearAndIsNearlySubsetOf) { const absl::flat_hash_map actual = {{c, 2}, {e, 5}}; - EXPECT_THAT(actual, IsNearlySubsetOf(actual)); + EXPECT_THAT(actual, IsNearlySupersetOf(actual)); EXPECT_THAT(actual, IsNear(actual)); EXPECT_THAT(actual, IsNear({{c, 2 + 1e-8}, {e, 5}})); EXPECT_THAT(actual, Not(IsNear({{e, 5}}))); EXPECT_THAT(actual, Not(IsNear({{c, 2 + 1e-2}, {e, 5}}))); EXPECT_THAT(actual, Not(IsNear({{d, 5}}))); - EXPECT_THAT(actual, IsNearlySubsetOf({{e, 5}})); + EXPECT_THAT(actual, IsNearlySupersetOf({{e, 5}})); } -TEST(ApproximateMapMatcherTest, LinearConstraintIsNearAndIsNearlySubsetOf) { +TEST(ApproximateMapMatcherTest, LinearConstraintIsNearAndIsNearlySupersetOf) { Model model; const LinearConstraint c = model.AddLinearConstraint("c"); const LinearConstraint d = model.AddLinearConstraint("d"); const LinearConstraint e = model.AddLinearConstraint("e"); const LinearConstraintMap actual = {{c, 2}, {e, 5}}; - EXPECT_THAT(actual, IsNearlySubsetOf(actual)); + EXPECT_THAT(actual, IsNearlySupersetOf(actual)); EXPECT_THAT(actual, IsNear(actual)); EXPECT_THAT(actual, IsNear({{c, 2 + 1e-8}, {e, 5}})); EXPECT_THAT(actual, Not(IsNear({{e, 5}}))); EXPECT_THAT(actual, Not(IsNear({{c, 2 + 1e-2}, {e, 5}}))); EXPECT_THAT(actual, Not(IsNear({{d, 5}}))); - EXPECT_THAT(actual, IsNearlySubsetOf({{e, 5}})); + EXPECT_THAT(actual, IsNearlySupersetOf({{e, 5}})); } TEST(LinearExpressionMatcherTest, IsIdentical) { diff --git a/ortools/math_opt/cpp/solve_impl.cc b/ortools/math_opt/cpp/solve_impl.cc index a9eb04724f4..0586b2e7f5b 100644 --- a/ortools/math_opt/cpp/solve_impl.cc +++ b/ortools/math_opt/cpp/solve_impl.cc @@ -193,21 +193,6 @@ IncrementalSolverImpl::IncrementalSolverImpl( update_tracker_(std::move(update_tracker)), solver_(std::move(solver)) {} -absl::StatusOr IncrementalSolverImpl::Solve( - const SolveArguments& arguments) { - // TODO: b/260337466 - Add permanent errors and concurrency protection. - RETURN_IF_ERROR(Update().status()); - return SolveWithoutUpdate(arguments); -} - -absl::StatusOr -IncrementalSolverImpl::ComputeInfeasibleSubsystem( - const ComputeInfeasibleSubsystemArguments& arguments) { - // TODO: b/260337466 - Add permanent errors and concurrency protection. - RETURN_IF_ERROR(Update().status()); - return ComputeInfeasibleSubsystemWithoutUpdate(arguments); -} - absl::StatusOr IncrementalSolverImpl::Update() { // TODO: b/260337466 - Add permanent errors and concurrency protection. ASSIGN_OR_RETURN(std::optional model_update, diff --git a/ortools/math_opt/cpp/solve_impl.h b/ortools/math_opt/cpp/solve_impl.h index fbb082732fc..9c410b4ee28 100644 --- a/ortools/math_opt/cpp/solve_impl.h +++ b/ortools/math_opt/cpp/solve_impl.h @@ -81,11 +81,6 @@ class IncrementalSolverImpl : public IncrementalSolver { BaseSolverFactory solver_factory, Model* model, SolverType solver_type, const SolveInterrupter* user_canceller, bool remove_names); - absl::StatusOr Solve(const SolveArguments& arguments) override; - - absl::StatusOr ComputeInfeasibleSubsystem( - const ComputeInfeasibleSubsystemArguments& arguments) override; - absl::StatusOr Update() override; absl::StatusOr SolveWithoutUpdate( diff --git a/ortools/math_opt/python/callback.py b/ortools/math_opt/python/callback.py index 59c088d038f..918f8dfc575 100644 --- a/ortools/math_opt/python/callback.py +++ b/ortools/math_opt/python/callback.py @@ -13,6 +13,7 @@ # limitations under the License. """Defines how to request a callback and the input and output of a callback.""" + import dataclasses import datetime import enum @@ -36,7 +37,8 @@ class Event(enum.Enum): * MIP: The solver is in the MIP loop (called periodically before starting a new node). Useful for early termination. Note that this event does not provide information on LP relaxations nor about new incumbent solutions. - Gurobi only. + Fully supported by Gurobi only. If used with CP-SAT, it is called when the + dual bound is improved. * MIP_SOLUTION: Called every time a new MIP incumbent is found. Fully supported by Gurobi, partially supported by CP-SAT (you can observe new solutions, but not add lazy constraints). diff --git a/ortools/math_opt/solver_tests/BUILD.bazel b/ortools/math_opt/solver_tests/BUILD.bazel index b8247fb8a1f..9f31d474e6d 100644 --- a/ortools/math_opt/solver_tests/BUILD.bazel +++ b/ortools/math_opt/solver_tests/BUILD.bazel @@ -52,6 +52,7 @@ cc_library( "//ortools/math_opt/io:mps_converter", "//ortools/port:proto_utils", "//ortools/port:scoped_std_stream_capture", + "//ortools/util:fp_roundtrip_conv", "@abseil-cpp//absl/container:flat_hash_set", "@abseil-cpp//absl/log:check", "@abseil-cpp//absl/status", diff --git a/ortools/math_opt/solver_tests/callback_tests.cc b/ortools/math_opt/solver_tests/callback_tests.cc index a4adbdcbb8e..d7511cccddd 100644 --- a/ortools/math_opt/solver_tests/callback_tests.cc +++ b/ortools/math_opt/solver_tests/callback_tests.cc @@ -13,6 +13,8 @@ #include "ortools/math_opt/solver_tests/callback_tests.h" +#include +#include #include #include #include @@ -44,6 +46,7 @@ #include "ortools/math_opt/solver_tests/test_models.h" #include "ortools/port/proto_utils.h" #include "ortools/port/scoped_std_stream_capture.h" +#include "ortools/util/fp_roundtrip_conv.h" namespace operations_research { namespace math_opt { @@ -418,8 +421,8 @@ TEST_P(CallbackTest, EventSolutionAlwaysCalled) { SolveArguments args = { .callback_registration = {.events = {CallbackEvent::kMipSolution}}}; absl::Mutex mutex; - bool cb_called = false; - bool cb_called_on_optimal = false; + std::atomic cb_called = false; + std::atomic cb_called_on_optimal = false; args.callback = [&](const CallbackData& callback_data) { const absl::MutexLock lock(mutex); cb_called = true; @@ -433,6 +436,8 @@ TEST_P(CallbackTest, EventSolutionAlwaysCalled) { EXPECT_THAT( sol, AnyOf(IsNear({{x, 0.0}, {y, 0.0}}), IsNear({{x, 1.0}, {y, 0.0}}), IsNear({{x, 0.0}, {y, 1.0}}))); + EXPECT_LE(callback_data.mip_stats.primal_bound(), 2.05); + EXPECT_GE(callback_data.mip_stats.dual_bound(), 1.95); if (gtl::FindWithDefault(sol, y) > 0.5) { cb_called_on_optimal = true; } @@ -646,8 +651,8 @@ TEST_P(CallbackTest, EventSolutionFilter) { .events = {CallbackEvent::kMipSolution}, .mip_solution_filter = MakeKeepKeysFilter({y})}}; absl::Mutex mutex; - bool cb_called = false; - bool cb_called_on_optimal = false; + std::atomic cb_called = false; + std::atomic cb_called_on_optimal = false; args.callback = [&](const CallbackData& callback_data) { const absl::MutexLock lock(mutex); cb_called = true; @@ -795,6 +800,47 @@ TEST_P(CallbackTest, EventNodeFilter) { EXPECT_THAT(solutions, Each(UnorderedElementsAre(Pair(x0, _), Pair(x2, _)))); } +TEST_P(CallbackTest, EventMip) { + if (!GetParam().supported_events.contains(CallbackEvent::kMip)) { + GTEST_SKIP() << "Test skipped because this solver does not support " + "CallbackEvent::kMip."; + } + + // This test must use integer variables. + ASSERT_TRUE(GetParam().integer_variables); + + // Use the MIPLIB instance 23588, which has optimal solution 8090 and LP + // relaxation of 7649.87. This instance was selected because every + // supported solver can solve it quickly (a few seconds), but no solver can + // solve it in one node (so the node callback will be invoked). + ASSERT_OK_AND_ASSIGN(const std::unique_ptr model, + LoadMiplibInstance("23588")); + + std::atomic best_primal_bound = + std::numeric_limits::infinity(); + std::atomic best_dual_bound = + -std::numeric_limits::infinity(); + const SolveArguments args = { + .callback_registration = {.events = {CallbackEvent::kMip}}, + .callback = [&](const CallbackData& callback_data) { + CHECK_EQ(callback_data.event, CallbackEvent::kMip); + const double primal_bound = callback_data.mip_stats.primal_bound(); + const double dual_bound = callback_data.mip_stats.dual_bound(); + best_primal_bound = std::fmin(best_primal_bound, primal_bound); + best_dual_bound = std::fmax(best_dual_bound, dual_bound); + return CallbackResult(); + }}; + EXPECT_THAT(Solve(*model, GetParam().solver_type, args), + IsOkAndHolds(IsOptimal(8090))); + LOG(INFO) << "best_primal_bound: " + << RoundTripDoubleFormat(best_primal_bound.load()); + LOG(INFO) << "best_dual_bound: " + << RoundTripDoubleFormat(best_dual_bound.load()); + EXPECT_THAT(best_primal_bound.load(), testing::DoubleNear(8090, 0.5)); + EXPECT_LE(best_dual_bound.load(), 8090.5); + EXPECT_GE(best_dual_bound.load(), 7640); +} + TEST_P(CallbackTest, StatusPropagation) { if (!GetParam().supported_events.contains(CallbackEvent::kMipSolution)) { GTEST_SKIP() << "Test skipped because this solver does not support " diff --git a/ortools/math_opt/solvers/BUILD.bazel b/ortools/math_opt/solvers/BUILD.bazel index acee4c5fbd3..2055098cc6f 100644 --- a/ortools/math_opt/solvers/BUILD.bazel +++ b/ortools/math_opt/solvers/BUILD.bazel @@ -238,13 +238,17 @@ cc_library( "//ortools/port:proto_utils", "//ortools/sat:sat_parameters_cc_proto", "//ortools/util:solve_interrupter", + "@abseil-cpp//absl/base:core_headers", + "@abseil-cpp//absl/base:nullability", "@abseil-cpp//absl/container:flat_hash_set", + "@abseil-cpp//absl/functional:any_invocable", "@abseil-cpp//absl/log", "@abseil-cpp//absl/log:check", "@abseil-cpp//absl/memory", "@abseil-cpp//absl/status", "@abseil-cpp//absl/status:statusor", "@abseil-cpp//absl/strings", + "@abseil-cpp//absl/synchronization", "@abseil-cpp//absl/time", "@abseil-cpp//absl/types:span", ], diff --git a/ortools/math_opt/solvers/cp_sat_solver.cc b/ortools/math_opt/solvers/cp_sat_solver.cc index 436a89661dd..c4da11cb149 100644 --- a/ortools/math_opt/solvers/cp_sat_solver.cc +++ b/ortools/math_opt/solvers/cp_sat_solver.cc @@ -14,15 +14,21 @@ #include "ortools/math_opt/solvers/cp_sat_solver.h" #include +#include #include #include +#include #include #include #include #include #include +#include "absl/base/attributes.h" +#include "absl/base/nullability.h" +#include "absl/base/thread_annotations.h" #include "absl/container/flat_hash_set.h" +#include "absl/functional/any_invocable.h" #include "absl/log/check.h" #include "absl/log/log.h" #include "absl/memory/memory.h" @@ -33,6 +39,7 @@ #include "absl/strings/str_join.h" #include "absl/strings/str_split.h" #include "absl/strings/string_view.h" +#include "absl/synchronization/mutex.h" #include "absl/time/clock.h" #include "absl/time/time.h" #include "absl/types/span.h" @@ -65,6 +72,8 @@ namespace math_opt { namespace { +constexpr double kInf = std::numeric_limits::infinity(); + constexpr SupportedProblemStructures kCpSatSupportedStructures = { .integer_variables = SupportType::kSupported, .quadratic_objectives = SupportType::kNotImplemented, @@ -316,6 +325,162 @@ absl::StatusOr GetTermination( absl::StrCat("unimplemented solve status: ", response.status())); } +// This class gathers the solution callback and best bound callback together +// with some solver state that we need to update as the solver progresses. +class CpSatCallbacks { + public: + CpSatCallbacks(const absl_nullable SolverInterface::Callback& cb + ABSL_ATTRIBUTE_LIFETIME_BOUND, + SolveInterrupter* absl_nonnull local_interrupter + ABSL_ATTRIBUTE_LIFETIME_BOUND, + absl_nonnull absl::AnyInvocable< + SparseDoubleVectorProto(absl::Span) const> + extract_solution, + absl::flat_hash_set events, + bool is_maximize); + + // CpSatCallbacks is neither copyable nor movable as callbacks point to it. + CpSatCallbacks(const CpSatCallbacks&) = delete; + CpSatCallbacks& operator=(const CpSatCallbacks&) = delete; + + // Returns a solution callback that wraps the user callback and updates the + // state of CpSatCallbacks. Returns nullptr if it is not needed. + absl_nullable std::function MakeSolutionCallback(); + + // Returns a best bound callback that wraps the user callback and updates the + // state of CpSatCallbacks. Returns nullptr if it is not needed. + absl_nullable std::function MakeBestBoundCallback(); + + absl::Status error() const { + absl::MutexLock lock(mutex_); + return error_; + } + + private: + void ExecuteCallback(const CallbackDataProto& cb_data); + void UpdateMipStatsFromNewSolution(const MPSolution& mp_solution) + ABSL_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + + const SolverInterface::Callback& cb_; + SolveInterrupter* absl_nonnull const local_interrupter_; + const absl::AnyInvocable) + const> + extract_solution_; + const bool has_mip_solution_event_; + const bool has_mip_event_; + const bool is_maximize_; + + mutable absl::Mutex mutex_; + absl::Status error_ ABSL_GUARDED_BY(mutex_) = absl::OkStatus(); + CallbackDataProto::MipStats current_mip_stats_ ABSL_GUARDED_BY(mutex_); +}; + +CpSatCallbacks::CpSatCallbacks( + const SolverInterface::Callback& cb ABSL_ATTRIBUTE_LIFETIME_BOUND, + SolveInterrupter* absl_nonnull local_interrupter + ABSL_ATTRIBUTE_LIFETIME_BOUND, + absl_nonnull + absl::AnyInvocable) const> + extract_solution ABSL_ATTRIBUTE_LIFETIME_BOUND, + absl::flat_hash_set events, const bool is_maximize) + : cb_(cb), + local_interrupter_(local_interrupter), + extract_solution_(std::move(extract_solution)), + // If there is no user callback, we make sure not calling it. + has_mip_solution_event_(cb != nullptr && + events.contains(CALLBACK_EVENT_MIP_SOLUTION)), + has_mip_event_(cb != nullptr && events.contains(CALLBACK_EVENT_MIP)), + is_maximize_(is_maximize) { + current_mip_stats_.set_primal_bound(is_maximize ? -kInf : kInf); + current_mip_stats_.set_dual_bound(is_maximize ? kInf : -kInf); + current_mip_stats_.set_number_of_solutions_found(0); +} + +std::function absl_nullable +CpSatCallbacks::MakeSolutionCallback() { + if (!has_mip_solution_event_ && !has_mip_event_) { + return nullptr; + } + if (!has_mip_solution_event_) { + return [this](const MPSolution& mp_solution) { + absl::MutexLock lock(mutex_); + UpdateMipStatsFromNewSolution(mp_solution); + }; + } + return [this](const MPSolution& mp_solution) { + CallbackDataProto cb_data; + cb_data.set_event(CALLBACK_EVENT_MIP_SOLUTION); + *cb_data.mutable_primal_solution_vector() = + extract_solution_(mp_solution.variable_value()); + { + absl::MutexLock lock(mutex_); + UpdateMipStatsFromNewSolution(mp_solution); + *cb_data.mutable_mip_stats() = current_mip_stats_; + } + ExecuteCallback(cb_data); + }; +} + +std::function absl_nullable +CpSatCallbacks::MakeBestBoundCallback() { + if (!has_mip_solution_event_ && !has_mip_event_) { + return nullptr; + } + if (!has_mip_event_) { + return [this](const double best_bound) { + absl::MutexLock lock(mutex_); + current_mip_stats_.set_dual_bound(best_bound); + }; + } + return [this](const double best_bound) { + CallbackDataProto cb_data; + cb_data.set_event(CALLBACK_EVENT_MIP); + { + absl::MutexLock lock(mutex_); + current_mip_stats_.set_dual_bound(best_bound); + *cb_data.mutable_mip_stats() = current_mip_stats_; + } + ExecuteCallback(cb_data); + }; +} + +void CpSatCallbacks::ExecuteCallback(const CallbackDataProto& cb_data) { + { + absl::MutexLock lock(mutex_); + if (!error_.ok()) { + // A previous callback failed. + return; + } + } + const absl::StatusOr cb_result = cb_(cb_data); + // Note cb_result.cuts and cb_result.suggested solutions are not supported + // by CP-SAT and we have validated they are empty. + if (!cb_result.ok()) { + { + absl::MutexLock lock(mutex_); + error_ = cb_result.status(); + } + // Note: we will be returning a status error, we do not need to worry + // about interpreting this as TERMINATION_REASON_INTERRUPTED. + local_interrupter_->Interrupt(); + } else if (cb_result->terminate()) { + local_interrupter_->Interrupt(); + } +} + +void CpSatCallbacks::UpdateMipStatsFromNewSolution( + const MPSolution& mp_solution) ABSL_EXCLUSIVE_LOCKS_REQUIRED(mutex_) { + if (is_maximize_) { + current_mip_stats_.set_primal_bound(std::fmax( + current_mip_stats_.primal_bound(), mp_solution.objective_value())); + } else { + current_mip_stats_.set_primal_bound(std::fmin( + current_mip_stats_.primal_bound(), mp_solution.objective_value())); + } + current_mip_stats_.set_number_of_solutions_found( + current_mip_stats_.number_of_solutions_found() + 1); +} + } // namespace absl::StatusOr> CpSatSolver::New( @@ -345,7 +510,7 @@ absl::StatusOr CpSatSolver::Solve( RETURN_IF_ERROR(CheckRegisteredCallbackEvents( callback_registration, - /*supported_events=*/{CALLBACK_EVENT_MIP_SOLUTION})); + /*supported_events=*/{CALLBACK_EVENT_MIP_SOLUTION, CALLBACK_EVENT_MIP})); if (callback_registration.add_lazy_constraints()) { return absl::InvalidArgumentError( "CallbackRegistrationProto.add_lazy_constraints=true is not supported " @@ -392,7 +557,7 @@ absl::StatusOr CpSatSolver::Solve( } // We need to chain the user interrupter through a local interrupter, because - // if we termiante early from a callback request, we don't want to incorrectly + // if we terminate early from a callback request, we don't want to incorrectly // modify the input state. SolveInterrupter local_interrupter; std::atomic interrupt_solve = false; @@ -411,41 +576,21 @@ absl::StatusOr CpSatSolver::Solve( const absl::flat_hash_set events = EventSet(callback_registration); - std::function solution_callback; - absl::Status callback_error = absl::OkStatus(); - if (events.contains(CALLBACK_EVENT_MIP_SOLUTION)) { - solution_callback = - [this, &cb, &callback_error, &local_interrupter, - &callback_registration](const MPSolution& mp_solution) { - if (!callback_error.ok()) { - // A previous callback failed. - return; - } - CallbackDataProto cb_data; - cb_data.set_event(CALLBACK_EVENT_MIP_SOLUTION); - *cb_data.mutable_primal_solution_vector() = - ExtractSolution(mp_solution.variable_value(), - callback_registration.mip_solution_filter()); - const absl::StatusOr cb_result = cb(cb_data); - if (!cb_result.ok()) { - callback_error = cb_result.status(); - // Note: we will be returning a status error, we do not need to - // worry about interpreting this as TERMINATION_REASON_INTERRUPTED. - local_interrupter.Interrupt(); - } else if (cb_result->terminate()) { - local_interrupter.Interrupt(); - } - // Note cb_result.cuts and cb_result.suggested solutions are not - // supported by CP-SAT and we have validated they are empty. - }; - } + absl::AnyInvocable) const> + extract_solution = [&](absl::Span cp_sat_variable_values) { + return ExtractSolution(cp_sat_variable_values, + callback_registration.mip_solution_filter()); + }; + CpSatCallbacks callbacks(cb, &local_interrupter, std::move(extract_solution), + events, cp_sat_model_.maximize()); // CP-SAT returns "infeasible" for inverted bounds. RETURN_IF_ERROR(ListInvertedBounds().ToStatus()); const MPSolutionResponse response = SatSolveProto( - std::move(req), &interrupt_solve, logging_callback, solution_callback); - RETURN_IF_ERROR(callback_error) << "error in callback"; + std::move(req), &interrupt_solve, logging_callback, + callbacks.MakeSolutionCallback(), callbacks.MakeBestBoundCallback()); + RETURN_IF_ERROR(callbacks.error()) << "error in callback"; ASSIGN_OR_RETURN(*result.mutable_termination(), GetTermination(local_interrupter.IsInterrupted(), /*maximize=*/cp_sat_model_.maximize(), diff --git a/ortools/math_opt/solvers/cp_sat_solver_test.cc b/ortools/math_opt/solvers/cp_sat_solver_test.cc index fb91ec639a7..623a272d3b7 100644 --- a/ortools/math_opt/solvers/cp_sat_solver_test.cc +++ b/ortools/math_opt/solvers/cp_sat_solver_test.cc @@ -328,15 +328,16 @@ SolveParameters AllSolutions() { return result; } -INSTANTIATE_TEST_SUITE_P(CpSatCallbackTest, CallbackTest, - Values(CallbackTestParams( - SolverType::kCpSat, - /*integer_variables=*/true, - /*add_lazy_constraints=*/false, - /*add_cuts=*/false, - /*supported_events=*/{CallbackEvent::kMipSolution}, - /*all_solutions=*/AllSolutions(), - /*reaches_cut_callback=*/std::nullopt))); +INSTANTIATE_TEST_SUITE_P( + CpSatCallbackTest, CallbackTest, + Values(CallbackTestParams( + SolverType::kCpSat, + /*integer_variables=*/true, + /*add_lazy_constraints=*/false, + /*add_cuts=*/false, + /*supported_events=*/{CallbackEvent::kMipSolution, CallbackEvent::kMip}, + /*all_solutions=*/AllSolutions(), + /*reaches_cut_callback=*/std::nullopt))); TEST(CpSatInvalidCallbackTest, RequestLazyConstraints) { Model model("model"); From 84046f9e6b27ee506669688b54b3fea09eff2f94 Mon Sep 17 00:00:00 2001 From: Corentin Le Molgat Date: Mon, 24 Nov 2025 16:01:05 +0100 Subject: [PATCH 015/111] bazel: bump absl-py to 2.3.1 --- bazel/notebook_requirements.in | 2 +- bazel/notebook_requirements.txt | 2 +- bazel/ortools_requirements.in | 2 +- bazel/ortools_requirements.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/bazel/notebook_requirements.in b/bazel/notebook_requirements.in index 40914e5bbe4..cd8dffe087b 100644 --- a/bazel/notebook_requirements.in +++ b/bazel/notebook_requirements.in @@ -1,5 +1,5 @@ # OR-Tools code dependencies -absl-py==2.2.2 +absl-py==2.3.1 immutabledict==4.2.2 numpy==2.3.5 protobuf==6.32.0 diff --git a/bazel/notebook_requirements.txt b/bazel/notebook_requirements.txt index 7307754dba3..58223c499cb 100644 --- a/bazel/notebook_requirements.txt +++ b/bazel/notebook_requirements.txt @@ -4,7 +4,7 @@ # # bazel run //bazel:notebook_requirements.update # -absl-py==2.2.2 +absl-py==2.3.1 # via -r bazel/notebook_requirements.in anyio==4.10.0 # via diff --git a/bazel/ortools_requirements.in b/bazel/ortools_requirements.in index 1d0889df0a1..9f6f3965dc9 100644 --- a/bazel/ortools_requirements.in +++ b/bazel/ortools_requirements.in @@ -1,5 +1,5 @@ # OR-Tools code dependencies -absl-py==2.2.2 +absl-py==2.3.1 immutabledict==4.2.2 numpy==2.3.5 protobuf==6.32.0 diff --git a/bazel/ortools_requirements.txt b/bazel/ortools_requirements.txt index 5d17a52032c..2805d96ecc0 100644 --- a/bazel/ortools_requirements.txt +++ b/bazel/ortools_requirements.txt @@ -4,7 +4,7 @@ # # bazel run //bazel:ortools_requirements.update # -absl-py==2.2.2 +absl-py==2.3.1 # via -r bazel/ortools_requirements.in black==24.8.0 # via -r bazel/ortools_requirements.in From cd37bacd2a56f19836d950718ae792d3416b925d Mon Sep 17 00:00:00 2001 From: Laurent Perron Date: Mon, 24 Nov 2025 15:39:26 +0100 Subject: [PATCH 016/111] backport sat from main --- ortools/sat/BUILD.bazel | 8 +-- ortools/sat/clause.cc | 35 +++++++++++++ ortools/sat/clause.h | 45 +++++++++++----- ortools/sat/cp_model_presolve_test.cc | 2 +- ortools/sat/cp_model_search.cc | 7 +++ ortools/sat/cp_model_solver.cc | 60 ++------------------- ortools/sat/cp_model_solver.h | 3 -- ortools/sat/cp_model_solver_test.cc | 23 +++----- ortools/sat/csharp/CpSolver.cs | 2 + ortools/sat/java/CpSolverTest.java | 28 +++++++++- ortools/sat/lrat_proof_handler.cc | 55 ++++++++++++++------ ortools/sat/lrat_proof_handler.h | 17 +++--- ortools/sat/parameters_validation.cc | 1 + ortools/sat/probing.cc | 7 ++- ortools/sat/python/cp_model.py | 24 +++++---- ortools/sat/python/cp_model_test.py | 21 +++++++- ortools/sat/sat_parameters.proto | 67 +++++++++++++++++++----- ortools/sat/sat_solver.cc | 75 +++++++++++---------------- ortools/sat/stat_tables.cc | 18 ++++--- 19 files changed, 302 insertions(+), 196 deletions(-) diff --git a/ortools/sat/BUILD.bazel b/ortools/sat/BUILD.bazel index ecb69b34b4f..808a16d57dd 100644 --- a/ortools/sat/BUILD.bazel +++ b/ortools/sat/BUILD.bazel @@ -904,23 +904,16 @@ cc_test( ":cp_model_cc_proto", ":cp_model_checker", ":cp_model_solver", - ":cp_model_solver_helpers", ":cp_model_test_utils", - ":cp_model_utils", - ":drat_checker", ":lp_utils", ":model", - ":sat_base", ":sat_parameters_cc_proto", - ":sat_solver", - ":synchronization", "//ortools/base:gmock_main", "//ortools/base:parse_test_proto", "//ortools/linear_solver:linear_solver_cc_proto", "//ortools/port:os", "//ortools/util:logging", "@abseil-cpp//absl/container:flat_hash_set", - "@abseil-cpp//absl/flags:flag", "@abseil-cpp//absl/log", "@abseil-cpp//absl/strings", ], @@ -4274,6 +4267,7 @@ cc_library( ":model", ":sat_base", "//ortools/base:file", + "@abseil-cpp//absl/flags:flag", "@abseil-cpp//absl/log", "@abseil-cpp//absl/log:check", "@abseil-cpp//absl/strings", diff --git a/ortools/sat/clause.cc b/ortools/sat/clause.cc index f396b1089c3..32f09527184 100644 --- a/ortools/sat/clause.cc +++ b/ortools/sat/clause.cc @@ -103,6 +103,8 @@ bool LiteralsAreFixedAtRoot(const Trail& trail, ClauseManager::ClauseManager(Model* model) : SatPropagator("ClauseManager"), clause_id_generator_(model->GetOrCreate()), + parameters_(*model->GetOrCreate()), + assignment_(model->GetOrCreate()->Assignment()), implication_graph_(model->GetOrCreate()), trail_(model->GetOrCreate()), num_inspected_clauses_(0), @@ -327,6 +329,9 @@ SatClause* ClauseManager::AddRemovableClause(ClauseId id, } if (add_clause_callback_ != nullptr) add_clause_callback_(lbd, literals); CHECK(AttachAndPropagate(clause, trail)); + + // Create an entry in clauses_info_ to mark that clause as removable. + clauses_info_[clause].lbd = lbd; return clause; } @@ -484,6 +489,35 @@ bool ClauseManager::InprocessingFixLiteral( return implication_graph_->Propagate(trail_); } +void ClauseManager::ChangeLbdIfBetter(SatClause* clause, int new_lbd) { + auto it = clauses_info_.find(clause); + if (it == clauses_info_.end()) return; + + // Always take the min. + if (new_lbd > it->second.lbd) return; + + ++num_lbd_promotions_; + if (new_lbd <= parameters_.clause_cleanup_lbd_bound()) { + // We keep the clause forever. + clauses_info_.erase(it); + } else { + it->second.lbd = new_lbd; + } +} + +bool ClauseManager::RemoveFixedLiteralsAndTestIfTrue(SatClause* clause) { + if (clause->RemoveFixedLiteralsAndTestIfTrue(assignment_)) { + // The clause is always true, detach it. + LazyDelete(clause, DeletionSourceForStat::FIXED_AT_TRUE); + return true; + } + + // We should have dealt with unit and unsat clause before this. + CHECK_GE(clause->size(), 2); + ChangeLbdIfBetter(clause, clause->size()); + return false; +} + bool ClauseManager::InprocessingRewriteClause( SatClause* clause, absl::Span new_clause, absl::Span clause_ids) { @@ -543,6 +577,7 @@ bool ClauseManager::InprocessingRewriteClause( } clause->Rewrite(new_clause); + ChangeLbdIfBetter(clause, new_clause.size()); // And we reattach it. if (all_clauses_are_attached_) { diff --git a/ortools/sat/clause.h b/ortools/sat/clause.h index aa9736d8566..43294089c6d 100644 --- a/ortools/sat/clause.h +++ b/ortools/sat/clause.h @@ -107,14 +107,6 @@ class SatClause { return absl::Span(&(literals_[0]), size_); } - // Removes literals that are fixed. This should only be called at level 0 - // where a literal is fixed iff it is assigned. Aborts and returns true if - // they are not all false. - // - // Note that the removed literal can still be accessed in the portion [size, - // old_size) of literals(). - bool RemoveFixedLiteralsAndTestIfTrue(const VariablesAssignment& assignment); - // Returns true if the clause is satisfied for the given assignment. Note that // the assignment may be partial, so false does not mean that the clause can't // be satisfied by completing the assignment. @@ -134,6 +126,14 @@ class SatClause { // never be used afterwards. void Clear() { size_ = 0; } + // Removes literals that are fixed. This should only be called at level 0 + // where a literal is fixed iff it is assigned. Aborts and returns true if + // they are not all false. + // + // Note that the removed literal can still be accessed in the portion [size, + // old_size) of literals(). + bool RemoveFixedLiteralsAndTestIfTrue(const VariablesAssignment& assignment); + // Rewrites a clause with another shorter one. Note that the clause shouldn't // be attached when this is called. void Rewrite(absl::Span new_clause) { @@ -154,7 +154,7 @@ class SatClause { struct ClauseInfo { double activity = 0.0; int32_t lbd = 0; - bool protected_during_next_cleanup = false; + int32_t num_cleanup_rounds_since_last_bumped = 0; }; class BinaryImplicationGraph; @@ -267,6 +267,18 @@ class ClauseManager : public SatPropagator { if (it == clauses_info_.end()) return 0; return it->second.lbd; } + void KeepClauseForever(SatClause* const clause) { + clauses_info_.erase(clause); + } + void RescaleClauseActivities(double scaling_factor) { + for (auto& entry : clauses_info_) { + entry.second.activity *= scaling_factor; + } + } + + // If the new lbd is better than the stored one, update it. + // And return the result of IsRemovable() (this save one hash lookup). + void ChangeLbdIfBetter(SatClause* clause, int new_lbd); // Total number of clauses inspected during calls to Propagate(). int64_t num_inspected_clauses() const { return num_inspected_clauses_; } @@ -280,6 +292,10 @@ class ClauseManager : public SatPropagator { // Number of clauses currently watched. int64_t num_watched_clauses() const { return num_watched_clauses_; } + // Number of time an existing clause lbd was reduced (due to inprocessing or + // recomputation of lbd in different branches). + int64_t num_lbd_promotions() const { return num_lbd_promotions_; } + ClauseId GetClauseId(const SatClause* clause) const { const auto it = clause_id_.find(clause); return it != clause_id_.end() ? it->second : kNoClauseId; @@ -343,6 +359,8 @@ class ClauseManager : public SatPropagator { SatClause* clause, absl::Span new_clause, absl::Span clause_ids = {}); + bool RemoveFixedLiteralsAndTestIfTrue(SatClause* clause); + // Fix a literal either with an existing LRAT `unit_clause_id`, or with a new // inferred unit clause, using `clause_ids` as proof. // This do NOT need to be between [Detach/Attach]AllClauses() calls. @@ -427,14 +445,17 @@ class ClauseManager : public SatPropagator { SparseBitset needs_cleaning_; bool is_clean_ = true; + const SatParameters& parameters_; + const VariablesAssignment& assignment_; BinaryImplicationGraph* implication_graph_; Trail* trail_; // For statistic reporting. std::vector deletion_counters_; - int64_t num_inspected_clauses_; - int64_t num_inspected_clause_literals_; - int64_t num_watched_clauses_; + int64_t num_inspected_clauses_ = 0; + int64_t num_inspected_clause_literals_ = 0; + int64_t num_watched_clauses_ = 0; + int64_t num_lbd_promotions_ = 0; mutable StatsGroup stats_; // For DetachAllClauses()/AttachAllClauses(). diff --git a/ortools/sat/cp_model_presolve_test.cc b/ortools/sat/cp_model_presolve_test.cc index 31e7ee18159..f086e60cc5c 100644 --- a/ortools/sat/cp_model_presolve_test.cc +++ b/ortools/sat/cp_model_presolve_test.cc @@ -8155,10 +8155,10 @@ TEST(PresolveCpModelTest, SolveDiophantine) { SatParameters params; params.set_cp_model_presolve(true); + params.set_num_workers(1); // Should solve in < .01 second. Note that deterministic time is not // completely accurate. params.set_max_deterministic_time(.001); - params.set_num_workers(1); const CpSolverResponse response_with = SolveWithParameters(model_proto, params); diff --git a/ortools/sat/cp_model_search.cc b/ortools/sat/cp_model_search.cc index ca2c41950a2..b54b1667d3e 100644 --- a/ortools/sat/cp_model_search.cc +++ b/ortools/sat/cp_model_search.cc @@ -934,6 +934,13 @@ std::vector GetFullWorkerParameters( params.search_branching() == SatParameters::FIXED_SEARCH) { continue; } + // As of November 2025, we don't support any LP reasoning when producing an + // UNSAT proof. + if ((params.check_lrat_proof() || params.check_drat_proof() || + params.output_drat_proof()) && + params.linearization_level() > 1) { + continue; + } // TODO(user): Enable probing_search in deterministic mode. // Currently it timeouts on small problems as the deterministic time limit diff --git a/ortools/sat/cp_model_solver.cc b/ortools/sat/cp_model_solver.cc index 7453ee558f4..74ca39db91d 100644 --- a/ortools/sat/cp_model_solver.cc +++ b/ortools/sat/cp_model_solver.cc @@ -49,7 +49,6 @@ #include "absl/types/span.h" #include "google/protobuf/arena.h" #include "google/protobuf/text_format.h" -#include "ortools/base/file.h" #include "ortools/base/helpers.h" #include "ortools/base/logging.h" #include "ortools/base/options.h" @@ -132,32 +131,6 @@ ABSL_FLAG(bool, cp_model_use_hint_for_debug_only, false, "complete, validate that no buggy propagator make it infeasible."); ABSL_FLAG(bool, cp_model_fingerprint_model, true, "Fingerprint the model."); -ABSL_FLAG(std::string, cp_model_drat_output, "", - "If non-empty, a proof in DRAT format will be written to this file. " - "This only works in the same conditions as the --cp_model_lrat_check " - "flag, and only for pure SAT models."); - -ABSL_FLAG(bool, cp_model_drat_check, false, - "If true, a proof in DRAT format will be stored in memory and " - "checked if the problem is UNSAT. This only works in the same " - "conditions as the --cp_model_lrat_check flag, and only for pure SAT " - "models."); - -ABSL_FLAG(bool, cp_model_lrat_check, false, - "If true, inferred clauses are checked with an LRAT checker as they " - "are learned. As of November 2025, this only works with a single " - "worker and symmetry level 0 or 1. This also works with presolve, if " - "find_clauses_that_are_exactly_one is false and " - "merge_at_most_one_work_limit is 0. However, in this case, the " - "presolved problem is assumed to be correct, without proof. If the " - "model is not pure SAT, the checks are only partial (some clauses " - "can be assumed without proof)."); - -ABSL_FLAG(double, cp_model_max_drat_time_in_seconds, - std::numeric_limits::infinity(), - "Maximum time in seconds to check the DRAT proof. This will only " - "be used is the cp_model_drat_check flag is enabled."); - ABSL_FLAG(bool, cp_model_check_intermediate_solutions, false, "When true, all intermediate solutions found by the solver will be " "checked. This can be expensive, therefore it is off by default."); @@ -1035,26 +1008,6 @@ bool SolutionHintIsCompleteAndFeasible( } } -std::unique_ptr MaybeCreateLratProofHandler(Model* model) { - const bool check_lrat = absl::GetFlag(FLAGS_cp_model_lrat_check); - const bool check_drat = absl::GetFlag(FLAGS_cp_model_drat_check); - File* drat_output = nullptr; - if (!absl::GetFlag(FLAGS_cp_model_drat_output).empty()) { - CHECK_OK(file::Open(absl::GetFlag(FLAGS_cp_model_drat_output), "w", - &drat_output, file::Defaults())); - } - if (!check_lrat && !check_drat && drat_output == nullptr) return nullptr; - - // TODO(user): pass the [presolved] model proto to the handler, so that - // it can map internal problem clause IDs to constraint indices in the - // original model. This will be needed to write the LRAT proof in a file that - // can be checked with an external LRAT checker, expecting the standard LRAT - // ASCII file format (which requires problem clauses IDs between 1 and n). - return std::make_unique(model, check_lrat, check_drat, - drat_output, - /*in_binary_drat_format=*/false); -} - // Encapsulate a full CP-SAT solve without presolve in the SubSolver API. class FullProblemSolver : public SubSolver { public: @@ -1082,7 +1035,7 @@ class FullProblemSolver : public SubSolver { shared_->RegisterSharedClassesInLocalModel(&local_model_); std::unique_ptr lrat_proof_handler = - MaybeCreateLratProofHandler(&local_model_); + LratProofHandler::MaybeCreate(&local_model_); if (lrat_proof_handler != nullptr) { local_model_.Register(lrat_proof_handler.get()); local_model_.TakeOwnership(lrat_proof_handler.release()); @@ -1111,8 +1064,7 @@ class FullProblemSolver : public SubSolver { WallTimer timer; timer.Start(); const bool valid = local_model_.GetOrCreate()->ModelIsUnsat() - ? lrat_proof_handler->Check(absl::GetFlag( - FLAGS_cp_model_max_drat_time_in_seconds)) + ? lrat_proof_handler->Check() : lrat_proof_handler->Valid(); shared_->lrat_proof_status->NewSubsolverProofStatus( valid ? DratChecker::Status::VALID : DratChecker::Status::INVALID, @@ -1801,12 +1753,8 @@ void SolveCpModelParallel(SharedClasses* shared, Model* global_model) { const SatParameters& params = *global_model->GetOrCreate(); if (global_model->GetOrCreate()->LimitReached()) return; - if (absl::GetFlag(FLAGS_cp_model_drat_check) || - !absl::GetFlag(FLAGS_cp_model_drat_output).empty()) { - LOG(WARNING) - << "DRAT check and output are skipped when using several workers"; - absl::SetFlag(&FLAGS_cp_model_drat_check, false); - absl::SetFlag(&FLAGS_cp_model_drat_output, ""); + if (params.check_drat_proof() || params.output_drat_proof()) { + LOG(FATAL) << "DRAT proofs are not supported with several workers"; } // If specified by the user, we might disable some parameters based on their diff --git a/ortools/sat/cp_model_solver.h b/ortools/sat/cp_model_solver.h index 3dc66e97515..aaafcaa2aa5 100644 --- a/ortools/sat/cp_model_solver.h +++ b/ortools/sat/cp_model_solver.h @@ -26,9 +26,6 @@ #ifndef SWIG OR_DLL ABSL_DECLARE_FLAG(bool, cp_model_dump_response); -OR_DLL ABSL_DECLARE_FLAG(bool, cp_model_drat_check); -OR_DLL ABSL_DECLARE_FLAG(bool, cp_model_lrat_check); -OR_DLL ABSL_DECLARE_FLAG(double, cp_model_max_drat_time_in_seconds); #endif namespace operations_research { diff --git a/ortools/sat/cp_model_solver_test.cc b/ortools/sat/cp_model_solver_test.cc index e5a223885e1..d5a45c6eaff 100644 --- a/ortools/sat/cp_model_solver_test.cc +++ b/ortools/sat/cp_model_solver_test.cc @@ -14,12 +14,10 @@ #include "ortools/sat/cp_model_solver.h" #include -#include #include #include #include "absl/container/flat_hash_set.h" -#include "absl/flags/flag.h" #include "absl/log/log.h" #include "absl/strings/str_join.h" #include "gtest/gtest.h" @@ -29,15 +27,10 @@ #include "ortools/port/os.h" #include "ortools/sat/cp_model.pb.h" #include "ortools/sat/cp_model_checker.h" -#include "ortools/sat/cp_model_solver_helpers.h" #include "ortools/sat/cp_model_test_utils.h" -#include "ortools/sat/cp_model_utils.h" #include "ortools/sat/lp_utils.h" #include "ortools/sat/model.h" -#include "ortools/sat/sat_base.h" #include "ortools/sat/sat_parameters.pb.h" -#include "ortools/sat/sat_solver.h" -#include "ortools/sat/synchronization.h" #include "ortools/util/logging.h" namespace operations_research { @@ -5462,11 +5455,11 @@ TEST(CpModelSolverTest, DratProofIsValidForRandom3Sat) { SatParameters params; params.set_num_workers(1); params.set_cp_model_presolve(false); - params.set_symmetry_level(0); - params.set_linearization_level(0); + params.set_symmetry_level(1); + params.set_linearization_level(1); + params.set_check_drat_proof(true); + params.set_max_drat_time_in_seconds(60); params.set_debug_crash_if_lrat_check_fails(true); - absl::SetFlag(&FLAGS_cp_model_drat_check, true); - absl::SetFlag(&FLAGS_cp_model_max_drat_time_in_seconds, 60); int num_infeasible = 0; for (int i = 0; i < 100; ++i) { @@ -5484,12 +5477,12 @@ TEST(CpModelSolverTest, DratProofIsValidForRandom3Sat) { TEST(CpModelSolverTest, LratProofIsValidForRandom3Sat) { SatParameters params; - params.set_num_workers(1); + params.set_num_workers(8); params.set_cp_model_presolve(false); - params.set_symmetry_level(0); - params.set_linearization_level(0); + params.set_symmetry_level(1); + params.set_linearization_level(1); + params.set_check_lrat_proof(true); params.set_debug_crash_if_lrat_check_fails(true); - absl::SetFlag(&FLAGS_cp_model_lrat_check, true); int num_infeasible = 0; for (int i = 0; i < 100; ++i) { diff --git a/ortools/sat/csharp/CpSolver.cs b/ortools/sat/csharp/CpSolver.cs index ab9bc8b3364..c2a1449ee01 100644 --- a/ortools/sat/csharp/CpSolver.cs +++ b/ortools/sat/csharp/CpSolver.cs @@ -201,6 +201,8 @@ public bool BooleanValue(ILiteral literal) public double WallTime() => Response!.WallTime; + public string SolveLog() => Response!.SolveLog; + public IList SufficientAssumptionsForInfeasibility() => Response!.SufficientAssumptionsForInfeasibility; public string SolutionInfo() => Response!.SolutionInfo; diff --git a/ortools/sat/java/CpSolverTest.java b/ortools/sat/java/CpSolverTest.java index 743436f7835..e3c15e9bd0d 100644 --- a/ortools/sat/java/CpSolverTest.java +++ b/ortools/sat/java/CpSolverTest.java @@ -109,7 +109,7 @@ public void testCpSolver_invalidModel() throws Exception { final CpSolverStatus status = solver.solve(model); assertThat(status).isEqualTo(CpSolverStatus.MODEL_INVALID); - assertEquals("var #0 has no domain(): name: \"x\"", solver.getSolutionInfo()); + assertEquals("var #0 has no domain(): name: \"x\"", solver.solutionInfo()); } @Test @@ -309,6 +309,32 @@ public void testCpSolver_customLog() throws Exception { assertThat(log).contains("OPTIMAL"); } + @Test + public void testCpSolver_logToResponse() throws Exception { + System.out.println("testCpSolver_logToResponse"); + final CpModel model = new CpModel(); + assertNotNull(model); + // Creates the variables. + final int numVals = 3; + final IntVar x = model.newIntVar(0, numVals - 1, "x"); + final IntVar y = model.newIntVar(0, numVals - 1, "y"); + // Creates the constraints. + model.addDifferent(x, y); + + // Creates a solver and solves the model. + final CpSolver solver = new CpSolver(); + assertNotNull(solver); + solver.getParameters().setLogToStdout(false).setLogSearchProgress(true).setLogToResponse(true); + final CpSolverStatus status = solver.solve(model); + + assertThat(status).isEqualTo(CpSolverStatus.OPTIMAL); + String log = solver.solveLog(); + assertThat(log).isNotEmpty(); + assertThat(log).contains("Parameters"); + assertThat(log).contains("log_to_stdout: false"); + assertThat(log).contains("OPTIMAL"); + } + @Test public void testCpSolver_customLogMultiThread() { System.out.println("testCpSolver_customLogMultiThread"); diff --git a/ortools/sat/lrat_proof_handler.cc b/ortools/sat/lrat_proof_handler.cc index 0eb1e4f6ec5..0fb196165bb 100644 --- a/ortools/sat/lrat_proof_handler.cc +++ b/ortools/sat/lrat_proof_handler.cc @@ -16,19 +16,30 @@ #include #include #include +#include #include +#include "absl/flags/flag.h" #include "absl/log/check.h" #include "absl/log/log.h" #include "absl/strings/str_join.h" #include "absl/types/span.h" #include "ortools/base/file.h" +#include "ortools/base/options.h" #include "ortools/sat/drat_checker.h" #include "ortools/sat/drat_writer.h" #include "ortools/sat/lrat_checker.h" #include "ortools/sat/model.h" #include "ortools/sat/sat_base.h" +#if defined(_MSC_VER) +ABSL_FLAG(std::string, cp_model_drat_output, ".\\drat.txt", + "File name for the generated DRAT proof, if DRAT output is enabled."); +#else +ABSL_FLAG(std::string, cp_model_drat_output, "/tmp/drat.txt", + "File name for the generated DRAT proof, if DRAT output is enabled."); +#endif + namespace operations_research { namespace sat { @@ -46,18 +57,33 @@ std::vector SortClauseForDrat(absl::Span clause) { } } // namespace -LratProofHandler::LratProofHandler(Model* model, bool check_lrat, - bool check_drat, File* drat_output, - bool in_binary_drat_format) - : lrat_checker_(check_lrat ? std::make_unique(model) - : nullptr), - drat_checker_(check_drat ? std::make_unique() : nullptr), - drat_writer_( - drat_output != nullptr - ? std::make_unique(in_binary_drat_format, drat_output) - : nullptr), - debug_crash_on_error_(model->GetOrCreate() - ->debug_crash_if_lrat_check_fails()) {} +std::unique_ptr LratProofHandler::MaybeCreate(Model* model) { + const SatParameters& params = *model->GetOrCreate(); + if (!params.check_lrat_proof() && !params.check_drat_proof() && + !params.output_drat_proof()) { + return nullptr; + } + return std::unique_ptr(new LratProofHandler(model)); +} + +LratProofHandler::LratProofHandler(Model* model) { + const SatParameters& params = *model->GetOrCreate(); + if (params.check_lrat_proof()) { + lrat_checker_ = std::make_unique(model); + } + if (params.check_drat_proof()) { + drat_checker_ = std::make_unique(); + } + if (params.output_drat_proof()) { + File* drat_output = nullptr; + CHECK_OK(file::Open(absl::GetFlag(FLAGS_cp_model_drat_output), "w", + &drat_output, file::Defaults())); + drat_writer_ = std::make_unique( + /*in_binary_drat_format=*/false, drat_output); + } + max_drat_time_in_seconds_ = params.max_drat_time_in_seconds(); + debug_crash_on_error_ = params.debug_crash_if_lrat_check_fails(); +} bool LratProofHandler::AddProblemClause(ClauseId id, absl::Span clause) { @@ -193,15 +219,14 @@ DratChecker::Status LratProofHandler::Valid() const { return DratChecker::Status::UNKNOWN; } -DratChecker::Status LratProofHandler::Check( - double max_drat_check_time_in_seconds) { +DratChecker::Status LratProofHandler::Check() { DratChecker::Status status = DratChecker::Status::UNKNOWN; if (lrat_checker_ != nullptr) { status = CheckResult(lrat_checker_->Check()) ? DratChecker::Status::VALID : DratChecker::Status::INVALID; } if (status != DratChecker::Status::INVALID && drat_checker_ != nullptr) { - drat_checker_->Check(max_drat_check_time_in_seconds); + drat_checker_->Check(max_drat_time_in_seconds_); if (status == DratChecker::Status::INVALID && debug_crash_on_error_) { LOG(FATAL) << "DRAT check failed"; } diff --git a/ortools/sat/lrat_proof_handler.h b/ortools/sat/lrat_proof_handler.h index 8006804cb8b..be5d1c76493 100644 --- a/ortools/sat/lrat_proof_handler.h +++ b/ortools/sat/lrat_proof_handler.h @@ -20,7 +20,6 @@ #include #include "absl/types/span.h" -#include "ortools/base/file.h" #include "ortools/sat/drat_checker.h" #include "ortools/sat/drat_writer.h" #include "ortools/sat/lrat_checker.h" @@ -34,8 +33,12 @@ namespace sat { // and/or by saving it to a file. class LratProofHandler { public: - explicit LratProofHandler(Model* model, bool check_lrat, bool check_drat, - File* drat_output, bool in_binary_drat_format); + // TODO(user): pass the [presolved] model proto to the handler, so that + // it can map internal problem clause IDs to constraint indices in the + // original model. This will be needed to write the LRAT proof in a file that + // can be checked with an external LRAT checker, expecting the standard LRAT + // ASCII file format (which requires problem clauses IDs between 1 and n). + static std::unique_ptr MaybeCreate(Model* model); bool lrat_check_enabled() const { return lrat_checker_ != nullptr; } bool drat_check_enabled() const { return drat_checker_ != nullptr; } @@ -84,23 +87,25 @@ class LratProofHandler { // whether the empty clause has been successfully inferred. Returns INVALID if // it is not. Returns UNKNOWN if the check timed out (this can only occur // with DRAT checks), or if neither LRAT nor DRAT checks were enabled. - DratChecker::Status Check(double max_drat_check_time_in_seconds = - std::numeric_limits::infinity()); + DratChecker::Status Check(); void AddStats() const; int64_t num_assumed_clauses() const { return num_assumed_clauses_; } private: + explicit LratProofHandler(Model* model); + bool CheckResult(bool result) const; std::unique_ptr lrat_checker_; std::unique_ptr drat_checker_; std::unique_ptr drat_writer_; + double max_drat_time_in_seconds_ = std::numeric_limits::infinity(); + bool debug_crash_on_error_ = false; bool all_problem_clauses_loaded_ = false; int64_t num_assumed_clauses_ = 0; - bool debug_crash_on_error_ = false; ClauseId pinned_clause_id_ = kNoClauseId; std::vector pinned_clause_; diff --git a/ortools/sat/parameters_validation.cc b/ortools/sat/parameters_validation.cc index 1e7f52e628a..598d536d21b 100644 --- a/ortools/sat/parameters_validation.cc +++ b/ortools/sat/parameters_validation.cc @@ -106,6 +106,7 @@ std::string ValidateParameters(const SatParameters& params) { TEST_NOT_NAN(max_time_in_seconds); TEST_NOT_NAN(max_deterministic_time); + TEST_NOT_NAN(max_drat_time_in_seconds); // Parallelism. const int kMaxReasonableParallelism = 10'000; diff --git a/ortools/sat/probing.cc b/ortools/sat/probing.cc index 4a048e39234..49cecc4f94e 100644 --- a/ortools/sat/probing.cc +++ b/ortools/sat/probing.cc @@ -1353,7 +1353,12 @@ void FailedLiteralProbing::MaybeSubsumeWithBinaryClause( break; } } - if (!subsumed) return; + if (!subsumed) { + // The clause is not subsumed but its lbd is 2 when last_decision is + // propagated. This is a "glue" clause. + clause_manager_->ChangeLbdIfBetter(clause, 2); + return; + } // Since we will remove the clause, we need to make sure we do have the // implication in our repository. diff --git a/ortools/sat/python/cp_model.py b/ortools/sat/python/cp_model.py index d6a0094666d..dfc01b3432c 100644 --- a/ortools/sat/python/cp_model.py +++ b/ortools/sat/python/cp_model.py @@ -1869,6 +1869,19 @@ def user_time(self) -> float: """Returns the user time in seconds since the creation of the solver.""" return self._checked_response.user_time + @property + def solve_log(self) -> str: + """Returns the solve log. + + To enable this, the parameter log_to_response must be set to True. + """ + return self._checked_response.solve_log + + @property + def solve_info(self) -> str: + """Returns the information about the solve.""" + return self._checked_response.solve_info + @property def response_proto(self) -> cmh.CpSolverResponse: """Returns the response object.""" @@ -1948,17 +1961,6 @@ def UserTime(self) -> float: def WallTime(self) -> float: return self.wall_time - def SolveWithSolutionCallback( - self, model: CpModel, callback: "CpSolverSolutionCallback" - ) -> cmh.CpSolverStatus: - """DEPRECATED Use solve() with the callback argument.""" - warnings.warn( - "solve_with_solution_callback is deprecated; use solve() with" - + "the callback argument.", - DeprecationWarning, - ) - return self.solve(model, callback) - def SearchForAllSolutions( self, model: CpModel, callback: "CpSolverSolutionCallback" ) -> cmh.CpSolverStatus: diff --git a/ortools/sat/python/cp_model_test.py b/ortools/sat/python/cp_model_test.py index 28d182ef170..92505d347c6 100644 --- a/ortools/sat/python/cp_model_test.py +++ b/ortools/sat/python/cp_model_test.py @@ -2080,7 +2080,26 @@ def test_custom_log(self) -> None: self.assertEqual(10, solver.value(x)) self.assertEqual(-5, solver.value(y)) - self.assertRegex(log_callback.log, ".*log_to_stdout.*") + self.assertRegex(log_callback.log, "Starting CP-SAT solver") + + def test_log_to_response(self) -> None: + model = cp_model.CpModel() + x = model.new_int_var(-10, 10, "x") + y = model.new_int_var(-10, 10, "y") + model.add_linear_constraint(x + 2 * y, 0, 10) + model.minimize(y) + solver = cp_model.CpSolver() + solver.parameters.log_search_progress = True + solver.parameters.log_to_stdout = False + solver.parameters.log_to_response = True + + self.assertEqual(cp_model.OPTIMAL, solver.solve(model)) + self.assertEqual(10, solver.value(x)) + self.assertEqual(-5, solver.value(y)) + + self.assertRegex(solver.solve_log, "Starting CP-SAT solver") + print(solver.solution_info) + print(solver.solve_log) def test_issue2762(self) -> None: model = cp_model.CpModel() diff --git a/ortools/sat/sat_parameters.proto b/ortools/sat/sat_parameters.proto index 74604a19240..90871777e35 100644 --- a/ortools/sat/sat_parameters.proto +++ b/ortools/sat/sat_parameters.proto @@ -24,7 +24,7 @@ option java_multiple_files = true; // Contains the definitions for all the sat algorithm parameters and their // default values. // -// NEXT TAG: 344 +// NEXT TAG: 351 message SatParameters { // In some context, like in a portfolio of search, it makes sense to name a // given parameters set for logging purpose. @@ -176,23 +176,25 @@ message SatParameters { // target of clauses to keep. optional double clause_cleanup_ratio = 190 [default = 0.5]; - // Each time a clause activity is bumped, the clause has a chance to be - // protected during the next cleanup phase. Note that clauses used as a reason - // are always protected. - enum ClauseProtection { - PROTECTION_NONE = 0; // No protection. - PROTECTION_ALWAYS = 1; // Protect all clauses whose activity is bumped. - PROTECTION_LBD = 2; // Only protect clause with a better LBD. - } - optional ClauseProtection clause_cleanup_protection = 58 - [default = PROTECTION_NONE]; - // All the clauses with a LBD (literal blocks distance) lower or equal to this // parameters will always be kept. + // + // Note that the LBD of a clause that just propagated is 1 + number of + // different decision levels of its literals. So that the "classic" LBD of a + // learned conflict is the same as its LBD when we backjump and then propagate + // it. optional int32 clause_cleanup_lbd_bound = 59 [default = 5]; - // Only protect clauses up to this maximum LBD. - optional int32 clause_protection_lbd_bound = 338 [default = 8]; + // All the clause with a LBD lower or equal to this will be kept except if + // its activity hasn't been bumped in the last 32 cleanup phase. Note that + // this has no effect if it is <= clause_cleanup_lbd_bound. + optional int32 clause_cleanup_lbd_tier1 = 349 [default = 0]; + + // All the clause with a LBD lower or equal to this will be kept except if its + // activity hasn't been bumped since the previous cleanup phase. Note that + // this has no effect if it is <= clause_cleanup_lbd_bound or <= + // clause_cleanup_lbd_tier1. + optional int32 clause_cleanup_lbd_tier2 = 350 [default = 0]; // The clauses that will be kept during a cleanup are the ones that come // first under this order. We always keep or exclude ties together. @@ -778,6 +780,43 @@ message SatParameters { // The amount of dtime between each export of shared glue clauses. optional double share_glue_clauses_dtime = 322 [default = 1.0]; + // ========================================================================== + // Proofs + // ========================================================================== + + // If true, inferred clauses are checked with an LRAT checker as they are + // learned. As of November 2025, this only works with pure SAT problems, with + // - cp_model_presolve = false, + // - linearization_level <= 1, + // - symmetry_level <= 1, + // - shared_tree_num_workers = 0. + optional bool check_lrat_proof = 344 [default = false]; + + // TODO(user): add and implement an output_lrat_proof field. + reserved 345; + + // If true, and if the problem is UNSAT, a DRAT proof of this UNSAT property + // is checked after the solver has finished. As of November 2025, this only + // works for pure SAT problems, with + // - num_workers = 1, + // - cp_model_presolve = false, + // - linearization_level <= 1, + // - symmetry_level <= 1. + optional bool check_drat_proof = 346 [default = false]; + + // If true, a DRAT proof that all the clauses inferred by the solver are valid + // is output to a file. As of November 2025, this only works for pure SAT + // problems, with + // - num_workers = 1, + // - cp_model_presolve = false, + // - linearization_level <= 1, + // - symmetry_level <= 1. + optional bool output_drat_proof = 347 [default = false]; + + // The maximum time allowed to check the DRAT proof (this can take more time + // than the solve itself). Only used if check_drat_proof is true. + optional double max_drat_time_in_seconds = 348 [default = inf]; + // ========================================================================== // Debugging parameters // ========================================================================== diff --git a/ortools/sat/sat_solver.cc b/ortools/sat/sat_solver.cc index 7a89d68b309..08e2d8f2e56 100644 --- a/ortools/sat/sat_solver.cc +++ b/ortools/sat/sat_solver.cc @@ -492,10 +492,6 @@ int SatSolver::AddLearnedClauseAndEnqueueUnitPropagation( SatClause* clause = clauses_propagator_->AddRemovableClause( clause_id, literals, trail_, lbd); - - // BumpClauseActivity() must be called after clauses_info_[clause] has - // been created or it will have no effect. - (*clauses_propagator_->mutable_clauses_info())[clause].lbd = lbd; BumpClauseActivity(clause); } else { CHECK(clauses_propagator_->AddClause(clause_id, literals, trail_, lbd)); @@ -831,16 +827,17 @@ void SatSolver::ProcessCurrentConflict( } // Bump the clause activities. - // Note that the activity of the learned clause will be bumped too - // by AddLearnedClauseAndEnqueueUnitPropagation(). + // + // Note that the activity of the learned clause will be bumped too by + // AddLearnedClauseAndEnqueueUnitPropagation() after we update the increment. if (trail_->FailingSatClause() != nullptr) { BumpClauseActivity(trail_->FailingSatClause()); } BumpReasonActivities(reason_used_to_infer_the_conflict_); + UpdateClauseActivityIncrement(); // Decay the activities. decision_policy_->UpdateVariableActivityIncrement(); - UpdateClauseActivityIncrement(); pb_constraints_->UpdateActivityIncrement(); // Hack from Glucose that seems to perform well. @@ -1385,8 +1382,7 @@ void SatSolver::KeepAllClausesUsedToInfer(BooleanVariable variable) { const BooleanVariable var = (*trail_)[trail_index].Variable(); SatClause* clause = ReasonClauseOrNull(var); if (clause != nullptr) { - // Keep this clause. - clauses_propagator_->mutable_clauses_info()->erase(clause); + clauses_propagator_->KeepClauseForever(clause); } if (trail_->AssignmentType(var) == AssignmentType::kSearchDecision) { continue; @@ -2044,43 +2040,26 @@ void SatSolver::BumpClauseActivity(SatClause* clause) { auto it = clauses_propagator_->mutable_clauses_info()->find(clause); if (it == clauses_propagator_->mutable_clauses_info()->end()) return; - // Check if the new clause LBD is below our threshold to keep this clause - // indefinitely. - const int new_lbd = ComputeLbd(clause->AsSpan()); - if (new_lbd <= parameters_->clause_cleanup_lbd_bound()) { - clauses_propagator_->mutable_clauses_info()->erase(clause); - return; - } - - // Potentially protect this clause for the next cleanup phase. - if (new_lbd <= parameters_->clause_protection_lbd_bound()) { - switch (parameters_->clause_cleanup_protection()) { - case SatParameters::PROTECTION_NONE: - break; - case SatParameters::PROTECTION_ALWAYS: - it->second.protected_during_next_cleanup = true; - break; - case SatParameters::PROTECTION_LBD: - if (new_lbd < it->second.lbd) { - it->second.protected_during_next_cleanup = true; - it->second.lbd = new_lbd; - } - } - } + it->second.num_cleanup_rounds_since_last_bumped = 0; // Increase the activity. const double activity = it->second.activity += clause_activity_increment_; if (activity > parameters_->max_clause_activity_value()) { RescaleClauseActivities(1.0 / parameters_->max_clause_activity_value()); } + + // Update this clause LBD using the new decision orders. + // Note that this can keep the clause forever depending on the parameters. + // + // TODO(user): This cause one more hash lookup, probably not a big deal, but + // could be optimized away. + clauses_propagator_->ChangeLbdIfBetter(clause, ComputeLbd(clause->AsSpan())); } void SatSolver::RescaleClauseActivities(double scaling_factor) { SCOPED_TIME_STAT(&stats_); clause_activity_increment_ *= scaling_factor; - for (auto& entry : *clauses_propagator_->mutable_clauses_info()) { - entry.second.activity *= scaling_factor; - } + clauses_propagator_->RescaleClauseActivities(scaling_factor); } void SatSolver::UpdateClauseActivityIncrement() { @@ -2157,9 +2136,7 @@ int SatSolver::ComputeLbd(absl::Span literals) { const SatDecisionLevel level(AssignmentLevel(literal.Variable())); DCHECK_GE(level, 0); num_at_max_level += (level == max_level) ? 1 : 0; - if (level > limit && !is_level_marked_[level]) { - is_level_marked_.Set(level); - } + if (level > limit) is_level_marked_.Set(level); } return is_level_marked_.NumberOfSetCallsWithDifferentArguments() + @@ -2258,10 +2235,7 @@ void SatSolver::ProcessNewlyFixedVariables() { if (clause->IsRemoved()) continue; const size_t old_size = clause->size(); - if (clause->RemoveFixedLiteralsAndTestIfTrue(trail_->Assignment())) { - // The clause is always true, detach it. - clauses_propagator_->LazyDelete(clause, - DeletionSourceForStat::FIXED_AT_TRUE); + if (clauses_propagator_->RemoveFixedLiteralsAndTestIfTrue(clause)) { ++num_detached_clauses; continue; } @@ -3253,14 +3227,25 @@ void SatSolver::CleanClauseDatabaseIfNeeded() { std::vector entries; auto& clauses_info = *(clauses_propagator_->mutable_clauses_info()); for (auto& entry : clauses_info) { + entry.second.num_cleanup_rounds_since_last_bumped++; if (clauses_propagator_->ClauseIsUsedAsReason(entry.first)) continue; - if (entry.second.protected_during_next_cleanup) { - entry.second.protected_during_next_cleanup = false; + + if (entry.second.lbd <= parameters_->clause_cleanup_lbd_tier1() && + entry.second.num_cleanup_rounds_since_last_bumped <= 32) { continue; } + + if (entry.second.lbd <= parameters_->clause_cleanup_lbd_tier2() && + entry.second.num_cleanup_rounds_since_last_bumped <= 1) { + continue; + } + + // The LBD should always have been updated to be <= size. + DCHECK_LE(entry.second.lbd, entry.first->size()); entries.push_back(entry); } - const int num_protected_clauses = clauses_info.size() - entries.size(); + const int num_protected_clauses = + clauses_propagator_->num_removable_clauses() - entries.size(); if (parameters_->clause_cleanup_ordering() == SatParameters::CLAUSE_LBD) { // Order the clauses by decreasing LBD and then increasing activity. diff --git a/ortools/sat/stat_tables.cc b/ortools/sat/stat_tables.cc index 1853d7498c0..35c79c8280a 100644 --- a/ortools/sat/stat_tables.cc +++ b/ortools/sat/stat_tables.cc @@ -115,16 +115,17 @@ void SharedStatTables::AddClausesStat(absl::string_view name, Model* model) { // Track reductions of Boolean variables. if (bool_var_table_.empty()) { bool_var_table_.push_back( - {"Boolean variables", "Fixed", "Equiv", "Total", "Left"}); + {"Boolean variables", "Fixed", "Equiv", "Total", "Left", "Binary"}); } + auto* binary = model->GetOrCreate(); const int64_t num_fixed = sat_solver->NumFixedVariables(); - const int64_t num_equiv = - model->GetOrCreate()->num_redundant_literals() / - 2; + const int64_t num_equiv = binary->num_redundant_literals() / 2; const int64_t num_bools = sat_solver->NumVariables(); - bool_var_table_.push_back({FormatName(name), FormatCounter(num_fixed), - FormatCounter(num_equiv), FormatCounter(num_bools), - FormatCounter(num_bools - num_equiv - num_fixed)}); + bool_var_table_.push_back( + {FormatName(name), FormatCounter(num_fixed), FormatCounter(num_equiv), + FormatCounter(num_bools), + FormatCounter(num_bools - num_equiv - num_fixed), + FormatCounter(binary->ComputeNumImplicationsForLog())}); // Track the "life of a non-binary clause". CpSolverResponse r; @@ -134,7 +135,7 @@ void SharedStatTables::AddClausesStat(absl::string_view name, Model* model) { clauses_deletion_table_.push_back( {"Clause deletion", "at_true", "l_and_not(l)", "to_binary", "sub_conflict", "sub_eager", "sub_vivify", "sub_probing", "sub_inpro", - "blocked", "eliminated", "forgotten", "#conflicts"}); + "blocked", "eliminated", "forgotten", "promoted", "conflicts"}); } absl::Span deletion_by_source = model->GetOrCreate()->DeletionCounters(); @@ -162,6 +163,7 @@ void SharedStatTables::AddClausesStat(absl::string_view name, Model* model) { DeletionSourceForStat::ELIMINATED)]), FormatCounter(deletion_by_source[static_cast( DeletionSourceForStat::GARBAGE_COLLECTED)]), + FormatCounter(model->GetOrCreate()->num_lbd_promotions()), FormatCounter(r.num_conflicts())}); } From 0da2566929d341d01c4ac32b6b1371d70d8bde1b Mon Sep 17 00:00:00 2001 From: Mizux Seiha Date: Tue, 25 Nov 2025 15:47:25 +0100 Subject: [PATCH 017/111] backport sat/CpSolver.java from main --- .../java/com/google/ortools/sat/CpSolver.java | 47 +++++-------------- 1 file changed, 13 insertions(+), 34 deletions(-) diff --git a/ortools/java/com/google/ortools/sat/CpSolver.java b/ortools/java/com/google/ortools/sat/CpSolver.java index cccddcbdbe6..04a09c2ecbb 100644 --- a/ortools/java/com/google/ortools/sat/CpSolver.java +++ b/ortools/java/com/google/ortools/sat/CpSolver.java @@ -68,40 +68,6 @@ public CpSolverStatus solve(CpModel model, CpSolverSolutionCallback cb) { return solveResponse.getStatus(); } - /** - * Solves the given model, passes each incumber solution to the solution callback if not null, and - * returns the solve status. - * - * @deprecated Use the solve() method with the same signature. - */ - @Deprecated - public CpSolverStatus solveWithSolutionCallback(CpModel model, CpSolverSolutionCallback cb) { - return solve(model, cb); - } - - /** - * Searches for all solutions of a satisfiability problem. - * - *

This method searches for all feasible solutions of a given model. Then it feeds the - * solutions to the callback. - * - *

Note that the model cannot have an objective. - * - * @param model the model to solve - * @param cb the callback that will be called at each solution - * @return the status of the solve (FEASIBLE, INFEASIBLE...) - * @deprecated Use the solve() method with the same signature, after setting the - * enumerate_all_solution parameter to true. - */ - @Deprecated - public CpSolverStatus searchAllSolutions(CpModel model, CpSolverSolutionCallback cb) { - boolean oldValue = solveParameters.getEnumerateAllSolutions(); - solveParameters.setEnumerateAllSolutions(true); - CpSolverStatus status = solve(model, cb); - solveParameters.setEnumerateAllSolutions(oldValue); - return status; - } - private synchronized void createSolveWrapper() { solveWrapper = new SolveWrapper(); } @@ -213,10 +179,23 @@ public String responseStats() { * Returns some information on how the solution was found, or the reason why the model or the * parameters are invalid. */ + public String solutionInfo() { + return solveResponse.getSolutionInfo(); + } + + /** Returns the solution info. @Deprecated */ public String getSolutionInfo() { return solveResponse.getSolutionInfo(); } + /** + * Returns the solve log. You need to set the parameters log_to_response to true to get the solve + * log. + */ + public String solveLog() { + return solveResponse.getSolveLog(); + } + private CpSolverResponse solveResponse; private final SatParameters.Builder solveParameters; private Consumer logCallback; From fce013c63e21591fc03644930f2fa29374ed1796 Mon Sep 17 00:00:00 2001 From: Laurent Perron Date: Fri, 28 Nov 2025 16:31:13 +0100 Subject: [PATCH 018/111] tentative fix for #4929 --- .../python/model_builder_helper.cc | 97 ++++++++++++------- ortools/sat/python/BUILD.bazel | 2 +- ortools/sat/python/cp_model_helper.cc | 80 ++++++++++----- 3 files changed, 119 insertions(+), 60 deletions(-) diff --git a/ortools/linear_solver/python/model_builder_helper.cc b/ortools/linear_solver/python/model_builder_helper.cc index 214f0b26dca..326e52c2e86 100644 --- a/ortools/linear_solver/python/model_builder_helper.cc +++ b/ortools/linear_solver/python/model_builder_helper.cc @@ -15,6 +15,15 @@ #include "ortools/linear_solver/wrappers/model_builder_helper.h" +#include + +#if PY_VERSION_HEX >= 0x030E00A7 && !defined(PYPY_VERSION) +#define Py_BUILD_CORE +#include "internal/pycore_frame.h" +#include "internal/pycore_interpframe.h" +#undef Py_BUILD_CORE +#endif + #include #include #include @@ -300,6 +309,41 @@ std::shared_ptr WeightedSumArguments( } } +#if PY_VERSION_HEX >= 0x030E00A7 && !defined(PYPY_VERSION) +bool check_unique_temporary(PyObject* op) { + PyFrameObject* frame = PyEval_GetFrame(); + if (frame == NULL) { + return false; + } + _PyInterpreterFrame* f = frame->f_frame; + _PyStackRef* base = _PyFrame_Stackbase(f); + _PyStackRef* stackpointer = f->stackpointer; + + while (stackpointer > base) { + stackpointer--; + if (op == PyStackRef_AsPyObjectBorrow(*stackpointer)) { + // We want detect if the object is a temporary and borrowed. If so, it + // should be only referenced once in the stack, but it should not be safe. + return !PyStackRef_IsHeapSafe(*stackpointer); + } + } + return false; +} + +template +bool IsFree(std::shared_ptr expr) { + PyObject* lhs = py::cast(expr).ptr(); + const int num_uses = Py_REFCNT(lhs); + const bool is_referenced_in_caller_frame = check_unique_temporary(lhs); + return num_uses == 3 && !is_referenced_in_caller_frame; +} +#else +template +bool IsFree(std::shared_ptr expr) { + return Py_REFCNT(py::cast(expr).ptr()) == 4; +} +#endif + PYBIND11_MODULE(model_builder_helper, m) { pybind11_protobuf::ImportNativeProtoCasters(); @@ -434,43 +478,33 @@ PYBIND11_MODULE(model_builder_helper, m) { .def(py::init>, double>()) .def( "__add__", - [](py::object self, + [](std::shared_ptr expr, std::shared_ptr other) -> std::shared_ptr { - const int num_uses = Py_REFCNT(self.ptr()); - std::shared_ptr expr = - self.cast>(); - return (num_uses == 4) ? expr->AddInPlace(other) : expr->Add(other); + return IsFree(expr) ? expr->AddInPlace(other) : expr->Add(other); }, py::arg("other").none(false), "Returns the sum of `self` and `other`.") .def( "__add__", - [](py::object self, double cst) -> std::shared_ptr { - const int num_uses = Py_REFCNT(self.ptr()); - std::shared_ptr expr = - self.cast>(); - return (num_uses == 4) ? expr->AddFloatInPlace(cst) - : expr->AddFloat(cst); + [](std::shared_ptr expr, + double cst) -> std::shared_ptr { + return IsFree(expr) ? expr->AddFloatInPlace(cst) + : expr->AddFloat(cst); }, py::arg("cst"), "Returns `self` + `cst`.") .def( "__radd__", - [](py::object self, + [](std::shared_ptr expr, std::shared_ptr other) -> std::shared_ptr { - const int num_uses = Py_REFCNT(self.ptr()); - std::shared_ptr expr = - self.cast>(); - return (num_uses == 4) ? expr->AddInPlace(other) : expr->Add(other); + return IsFree(expr) ? expr->AddInPlace(other) : expr->Add(other); }, py::arg("cst"), "Returns `self` + `cst`.") .def( "__radd__", - [](py::object self, double cst) -> std::shared_ptr { - const int num_uses = Py_REFCNT(self.ptr()); - std::shared_ptr expr = - self.cast>(); - return (num_uses == 4) ? expr->AddFloatInPlace(cst) - : expr->AddFloat(cst); + [](std::shared_ptr expr, + double cst) -> std::shared_ptr { + return IsFree(expr) ? expr->AddFloatInPlace(cst) + : expr->AddFloat(cst); }, py::arg("cst"), "Returns `self` + `cst`.") .def( @@ -490,23 +524,18 @@ PYBIND11_MODULE(model_builder_helper, m) { py::arg("cst"), "Returns `self` + `cst`.") .def( "__sub__", - [](py::object self, + [](std::shared_ptr expr, std::shared_ptr other) -> std::shared_ptr { - const int num_uses = Py_REFCNT(self.ptr()); - std::shared_ptr expr = - self.cast>(); - return (num_uses == 4) ? expr->AddInPlace(other->Neg()) - : expr->Sub(other); + return IsFree(expr) ? expr->AddInPlace(other->Neg()) + : expr->Sub(other); }, py::arg("other").none(false), "Returns `self` - `other`.") .def( "__sub__", - [](py::object self, double cst) -> std::shared_ptr { - const int num_uses = Py_REFCNT(self.ptr()); - std::shared_ptr expr = - self.cast>(); - return (num_uses == 4) ? expr->AddFloatInPlace(-cst) - : expr->SubFloat(cst); + [](std::shared_ptr expr, + double cst) -> std::shared_ptr { + return IsFree(expr) ? expr->AddFloatInPlace(-cst) + : expr->SubFloat(cst); }, py::arg("cst"), "Returns `self` - `cst`.") .def( diff --git a/ortools/sat/python/BUILD.bazel b/ortools/sat/python/BUILD.bazel index e607ddf06be..2230b4742c6 100644 --- a/ortools/sat/python/BUILD.bazel +++ b/ortools/sat/python/BUILD.bazel @@ -138,7 +138,7 @@ py_library( py_test( name = "cp_model_test", - size = "small", + size = "medium", srcs = ["cp_model_test.py"], deps = [ ":cp_model", diff --git a/ortools/sat/python/cp_model_helper.cc b/ortools/sat/python/cp_model_helper.cc index a13d1aa22f4..e21a33b6024 100644 --- a/ortools/sat/python/cp_model_helper.cc +++ b/ortools/sat/python/cp_model_helper.cc @@ -13,6 +13,13 @@ #include +#if PY_VERSION_HEX >= 0x030E00A7 && !defined(PYPY_VERSION) +#define Py_BUILD_CORE +#include "internal/pycore_frame.h" +#include "internal/pycore_interpframe.h" +#undef Py_BUILD_CORE +#endif + #include #include #include @@ -1107,6 +1114,41 @@ std::shared_ptr CpBaseModel::AddRoutesInternal( return std::make_shared(shared_from_this(), ct_index); } +#if PY_VERSION_HEX >= 0x030E00A7 && !defined(PYPY_VERSION) +bool check_unique_temporary(PyObject* op) { + PyFrameObject* frame = PyEval_GetFrame(); + if (frame == NULL) { + return false; + } + _PyInterpreterFrame* f = frame->f_frame; + _PyStackRef* base = _PyFrame_Stackbase(f); + _PyStackRef* stackpointer = f->stackpointer; + + while (stackpointer > base) { + stackpointer--; + if (op == PyStackRef_AsPyObjectBorrow(*stackpointer)) { + // We want detect if the object is a temporary and borrowed. If so, it + // should be only referenced once in the stack, but it should not be safe. + return !PyStackRef_IsHeapSafe(*stackpointer); + } + } + return false; +} + +template +bool IsFree(std::shared_ptr expr) { + PyObject* lhs = py::cast(expr).ptr(); + const int num_uses = Py_REFCNT(lhs); + const bool is_referenced_in_caller_frame = check_unique_temporary(lhs); + return num_uses == 3 && !is_referenced_in_caller_frame; +} +#else +template +bool IsFree(std::shared_ptr expr) { + return Py_REFCNT(py::cast(expr).ptr()) == 4; +} +#endif + PYBIND11_MODULE(cp_model_helper, m) { py::module::import("ortools.util.python.sorted_interval_list"); @@ -1536,8 +1578,7 @@ PYBIND11_MODULE(cp_model_helper, m) { "__add__", [](std::shared_ptr expr, std::shared_ptr other) -> std::shared_ptr { - const int num_uses = Py_REFCNT(py::cast(expr).ptr()); - return (num_uses == 4) ? expr->AddInPlace(other) : expr->Add(other); + return IsFree(expr) ? expr->AddInPlace(other) : expr->Add(other); }, py::arg("other").none(false), DOC(operations_research, sat, python, LinearExpr, Add)) @@ -1545,18 +1586,15 @@ PYBIND11_MODULE(cp_model_helper, m) { "__add__", [](std::shared_ptr expr, int64_t cst) -> std::shared_ptr { - const int num_uses = Py_REFCNT(py::cast(expr).ptr()); - return (num_uses == 4) ? expr->AddIntInPlace(cst) - : expr->AddInt(cst); + return IsFree(expr) ? expr->AddIntInPlace(cst) : expr->AddInt(cst); }, DOC(operations_research, sat, python, LinearExpr, AddInt)) .def( "__add__", [](std::shared_ptr expr, double cst) -> std::shared_ptr { - const int num_uses = Py_REFCNT(py::cast(expr).ptr()); - return (num_uses == 4) ? expr->AddFloatInPlace(cst) - : expr->AddFloat(cst); + return IsFree(expr) ? expr->AddFloatInPlace(cst) + : expr->AddFloat(cst); }, py::arg("other").none(false), DOC(operations_research, sat, python, LinearExpr, AddFloat)) @@ -1564,8 +1602,7 @@ PYBIND11_MODULE(cp_model_helper, m) { "__radd__", [](std::shared_ptr expr, std::shared_ptr other) -> std::shared_ptr { - const int num_uses = Py_REFCNT(py::cast(expr).ptr()); - return (num_uses == 4) ? expr->AddInPlace(other) : expr->Add(other); + return IsFree(expr) ? expr->AddInPlace(other) : expr->Add(other); }, py::arg("other").none(false), DOC(operations_research, sat, python, LinearExpr, Add)) @@ -1573,9 +1610,7 @@ PYBIND11_MODULE(cp_model_helper, m) { "__radd__", [](std::shared_ptr expr, int64_t cst) -> std::shared_ptr { - const int num_uses = Py_REFCNT(py::cast(expr).ptr()); - return (num_uses == 4) ? expr->AddIntInPlace(cst) - : expr->AddInt(cst); + return IsFree(expr) ? expr->AddIntInPlace(cst) : expr->AddInt(cst); }, py::arg("cst"), DOC(operations_research, sat, python, LinearExpr, AddInt)) @@ -1583,9 +1618,8 @@ PYBIND11_MODULE(cp_model_helper, m) { "__radd__", [](std::shared_ptr expr, double cst) -> std::shared_ptr { - const int num_uses = Py_REFCNT(py::cast(expr).ptr()); - return (num_uses == 4) ? expr->AddFloatInPlace(cst) - : expr->AddFloat(cst); + return IsFree(expr) ? expr->AddFloatInPlace(cst) + : expr->AddFloat(cst); }, py::arg("cst"), DOC(operations_research, sat, python, LinearExpr, AddFloat)) @@ -1616,9 +1650,8 @@ PYBIND11_MODULE(cp_model_helper, m) { "__sub__", [](std::shared_ptr expr, std::shared_ptr other) -> std::shared_ptr { - const int num_uses = Py_REFCNT(py::cast(expr).ptr()); - return (num_uses == 4) ? expr->AddInPlace(other->Neg()) - : expr->Sub(other); + return IsFree(expr) ? expr->AddInPlace(other->Neg()) + : expr->Sub(other); }, py::arg("other").none(false), DOC(operations_research, sat, python, LinearExpr, Sub)) @@ -1626,9 +1659,7 @@ PYBIND11_MODULE(cp_model_helper, m) { "__sub__", [](std::shared_ptr expr, int64_t cst) -> std::shared_ptr { - const int num_uses = Py_REFCNT(py::cast(expr).ptr()); - return (num_uses == 4) ? expr->AddIntInPlace(-cst) - : expr->SubInt(cst); + return IsFree(expr) ? expr->AddIntInPlace(-cst) : expr->SubInt(cst); }, py::arg("cst"), DOC(operations_research, sat, python, LinearExpr, SubInt)) @@ -1636,9 +1667,8 @@ PYBIND11_MODULE(cp_model_helper, m) { "__sub__", [](std::shared_ptr expr, double cst) -> std::shared_ptr { - const int num_uses = Py_REFCNT(py::cast(expr).ptr()); - return (num_uses == 4) ? expr->AddFloatInPlace(-cst) - : expr->SubFloat(cst); + return IsFree(expr) ? expr->AddFloatInPlace(-cst) + : expr->SubFloat(cst); }, py::arg("cst"), DOC(operations_research, sat, python, LinearExpr, SubFloat)) From 56613156ce745f4fa388d6a4cbd25f6da673dbee Mon Sep 17 00:00:00 2001 From: Laurent Perron Date: Fri, 28 Nov 2025 17:33:22 +0100 Subject: [PATCH 019/111] [CP-SAT] work on lrat; work in inprocessing; fix bug with reduced variables domains and hints --- ortools/sat/BUILD.bazel | 46 +- ortools/sat/clause.cc | 166 +++++- ortools/sat/clause.h | 36 ++ ortools/sat/clause_test.cc | 1 - ortools/sat/cp_model_copy.cc | 83 ++- ortools/sat/cp_model_copy.h | 26 +- ortools/sat/cp_model_search.cc | 4 +- ortools/sat/cp_model_solver.cc | 41 +- ortools/sat/cp_model_solver_helpers.cc | 3 +- ortools/sat/docs/channeling.md | 4 +- ortools/sat/docs/scheduling.md | 20 +- ortools/sat/implied_bounds.cc | 2 +- ortools/sat/integer_test.cc | 27 +- ortools/sat/lb_tree_search.cc | 12 +- ortools/sat/lrat.proto | 75 +++ ortools/sat/lrat_proof_handler.cc | 445 ++++++++++++++- ortools/sat/lrat_proof_handler.h | 114 +++- ortools/sat/pb_constraint_test.cc | 76 +-- ortools/sat/precedences_test.cc | 18 +- ortools/sat/presolve_context.cc | 1 + ortools/sat/presolve_util.cc | 58 +- ortools/sat/presolve_util.h | 9 +- ortools/sat/probing.cc | 31 +- ortools/sat/recordio.cc | 50 ++ ortools/sat/recordio.h | 63 +++ ortools/sat/recordio_test.cc | 66 +++ ortools/sat/sat_base.h | 61 +- ortools/sat/sat_parameters.proto | 35 +- ortools/sat/sat_solver.cc | 751 +++++++++++++++---------- ortools/sat/sat_solver.h | 108 ++-- ortools/sat/solution_crush.cc | 13 +- ortools/sat/solution_crush.h | 2 +- ortools/sat/stat_tables.cc | 9 +- ortools/sat/synchronization.cc | 14 +- ortools/sat/synchronization.h | 7 +- ortools/sat/util.h | 16 + ortools/sat/util_test.cc | 23 + ortools/sat/variable_expand.cc | 77 ++- ortools/sat/work_assignment.cc | 2 +- 39 files changed, 1971 insertions(+), 624 deletions(-) create mode 100644 ortools/sat/lrat.proto create mode 100644 ortools/sat/recordio.cc create mode 100644 ortools/sat/recordio.h create mode 100644 ortools/sat/recordio_test.cc diff --git a/ortools/sat/BUILD.bazel b/ortools/sat/BUILD.bazel index 808a16d57dd..2bde6191da7 100644 --- a/ortools/sat/BUILD.bazel +++ b/ortools/sat/BUILD.bazel @@ -173,7 +173,9 @@ cc_library( deps = [ ":cp_model_cc_proto", ":cp_model_utils", + ":lrat_proof_handler", ":presolve_context", + ":sat_base", ":sat_parameters_cc_proto", "//ortools/base:logging", "//ortools/base:protobuf_util", @@ -1030,21 +1032,16 @@ cc_library( ":cp_model_utils", ":util", "//ortools/base:strong_vector", - "//ortools/base:timer", "//ortools/util:bitset", - "//ortools/util:logging", "//ortools/util:saturated_arithmetic", "//ortools/util:sorted_interval_list", "//ortools/util:strong_integers", - "//ortools/util:time_limit", "@abseil-cpp//absl/container:flat_hash_map", "@abseil-cpp//absl/container:flat_hash_set", + "@abseil-cpp//absl/log", "@abseil-cpp//absl/log:check", - "@abseil-cpp//absl/meta:type_traits", "@abseil-cpp//absl/random:bit_gen_ref", "@abseil-cpp//absl/random:distributions", - "@abseil-cpp//absl/strings", - "@abseil-cpp//absl/strings:str_format", "@abseil-cpp//absl/types:span", ], ) @@ -3424,6 +3421,7 @@ cc_test( "//ortools/base:logging", "//ortools/base:mathutil", "//ortools/base:stl_util", + "//ortools/util:bitset", "//ortools/util:random_engine", "//ortools/util:sorted_interval_list", "@abseil-cpp//absl/container:btree", @@ -4256,6 +4254,18 @@ cc_library( ], ) +proto_library( + name = "lrat_proto", + srcs = ["lrat.proto"], + visibility = ["//visibility:public"], +) + +cc_proto_library( + name = "lrat_cc_proto", + visibility = ["//visibility:public"], + deps = [":lrat_proto"], +) + cc_library( name = "lrat_proof_handler", srcs = ["lrat_proof_handler.cc"], @@ -4263,10 +4273,16 @@ cc_library( deps = [ ":drat_checker", ":drat_writer", + ":lrat_cc_proto", ":lrat_checker", ":model", + ":recordio", ":sat_base", + ":synchronization", "//ortools/base:file", + "//ortools/base:intops", + "//ortools/base:timer", + "@abseil-cpp//absl/container:flat_hash_map", "@abseil-cpp//absl/flags:flag", "@abseil-cpp//absl/log", "@abseil-cpp//absl/log:check", @@ -4331,6 +4347,24 @@ cc_test( ], ) +cc_library( + name = "recordio", + srcs = ["recordio.cc"], + hdrs = ["recordio.h"], + deps = ["@protobuf"], +) + +cc_test( + name = "recordio_test", + srcs = ["recordio_test.cc"], + deps = [ + ":cp_model_cc_proto", + ":recordio", + "//ortools/base:gmock_main", + "//ortools/base:path", + ], +) + cc_binary( name = "sat_runner", srcs = [ diff --git a/ortools/sat/clause.cc b/ortools/sat/clause.cc index 32f09527184..7e27074d926 100644 --- a/ortools/sat/clause.cc +++ b/ortools/sat/clause.cc @@ -294,13 +294,17 @@ SatClause* ClauseManager::ReasonClause(int trail_index) const { SatClause* ClauseManager::ReasonClauseOrNull(BooleanVariable var) const { if (!trail_->Assignment().VariableIsAssigned(var)) return nullptr; if (trail_->AssignmentType(var) != propagator_id_) return nullptr; + SatClause* result = reasons_[trail_->Info(var).trail_index]; - DCHECK_EQ(trail_->Reason(var), - reasons_[trail_->Info(var).trail_index]->PropagationReason()); - return reasons_[trail_->Info(var).trail_index]; + // Tricky: In some corner case, that clause was subsumed, so we don't want + // to check it nor use it. + if (result->size() == 0) return nullptr; + DCHECK_EQ(trail_->Reason(var), result->PropagationReason()); + return result; } bool ClauseManager::ClauseIsUsedAsReason(SatClause* clause) const { + DCHECK(clause != nullptr); return clause == ReasonClauseOrNull(clause->PropagatedLiteral().Variable()); } @@ -699,6 +703,131 @@ SatClause* ClauseManager::NextClauseToProbe() { return nullptr; } +ClauseId ClauseManager::ReasonClauseId(Literal literal) const { + const BooleanVariable var = literal.Variable(); + DCHECK(trail_->Assignment().VariableIsAssigned(var)); + const int assignment_type = trail_->AssignmentType(var); + const int trail_index = trail_->Info(var).trail_index; + if (assignment_type == AssignmentType::kUnitReason) { + return trail_->GetUnitClauseId(var); + } else if (assignment_type == implication_graph_->PropagatorId()) { + absl::Span reason = + implication_graph_->Reason(*trail_, trail_index, + /*conflict_id=*/-1); + CHECK_EQ(reason.size(), 1); + return implication_graph_->GetClauseId(literal, reason[0]); + } else if (assignment_type == propagator_id_) { + const SatClause* reason = ReasonClause(trail_index); + if (reason != nullptr) { + return GetClauseId(reason); + } + } + return kNoClauseId; +} + +void ClauseManager::AppendClauseIdsFixing( + absl::Span literals, std::vector* clause_ids, + LiteralIndex decision, + absl::flat_hash_map, ClauseId>* + additional_binary_clause_ids) { + SCOPED_TIME_STAT(&stats_); + const auto& assignment = trail_->Assignment(); + + // Mark the literals whose reason must be expanded, and compute their min and + // max trail index. + tmp_mark_.ClearAndResize(BooleanVariable(trail_->NumVariables())); + int trail_index = 0; + int min_trail_index = trail_->Index(); + for (const Literal lit : literals) { + CHECK(assignment.LiteralIsAssigned(lit)); + const int var_trail_index = trail_->Info(lit.Variable()).trail_index; + trail_index = std::max(trail_index, var_trail_index); + min_trail_index = std::min(min_trail_index, var_trail_index); + tmp_mark_.Set(lit.Variable()); + } + + const int current_level = trail_->CurrentDecisionLevel(); + + // The min level of the expanded literals. + int min_level = current_level; + + // Unit clauses must come first. We put them in clause_ids directly. We put + // the others in non_unit_clause_ids and append them to clause_ids at the end. + std::vector& non_unit_clause_ids = + tmp_clause_ids_for_append_clauses_fixing_; + non_unit_clause_ids.clear(); + + const auto& decisions = trail_->Decisions(); + while (true) { + // Find next marked literal to expand from the trail. + while (trail_index >= min_trail_index && + !tmp_mark_[(*trail_)[trail_index].Variable()]) { + --trail_index; + } + if (trail_index < min_trail_index) break; + const Literal marked_literal = (*trail_)[trail_index--]; + + // Stop at decisions, at literals fixed at root, and at literals implied by + // the decision at their level. + const int level = trail_->Info(marked_literal.Variable()).level; + if (level > 0) min_level = std::min(min_level, level); + if (trail_->AssignmentType(marked_literal.Variable()) == + AssignmentType::kSearchDecision) { + continue; + } + if (level == 0) { + clause_ids->push_back(trail_->GetUnitClauseId(marked_literal.Variable())); + continue; + } + const Literal level_decision = decisions[level - 1].literal; + ClauseId clause_id = implication_graph_->GetClauseId( + level_decision.Negated(), marked_literal); + if (clause_id == kNoClauseId && additional_binary_clause_ids != nullptr) { + const auto it = additional_binary_clause_ids->find( + std::minmax(level_decision.Negated(), marked_literal)); + if (it != additional_binary_clause_ids->end()) { + clause_id = it->second; + } + } + if (clause_id != kNoClauseId) { + non_unit_clause_ids.push_back(clause_id); + continue; + } + + // Mark all the literals of its reason. + for (const Literal literal : trail_->Reason(marked_literal.Variable())) { + const BooleanVariable var = literal.Variable(); + if (!tmp_mark_[var]) { + tmp_mark_.Set(var); + const AssignmentInfo& info = trail_->Info(var); + if (info.level > 0) { + min_trail_index = std::min(min_trail_index, info.trail_index); + } else { + clause_ids->push_back(trail_->GetUnitClauseId(var)); + } + } + } + non_unit_clause_ids.push_back(ReasonClauseId(marked_literal)); + } + + if (decision != kNoLiteralIndex) { + // Add the implication chain from `decision` to all the decisions found + // during the expansion. + if (Literal(decision) != decisions[current_level - 1].literal) { + // If `decision` is not the last decision, it must directly imply it. + clause_ids->push_back(implication_graph_->GetClauseId( + Literal(decision).Negated(), decisions[current_level - 1].literal)); + } + for (int level = current_level - 1; level >= min_level; --level) { + clause_ids->push_back(implication_graph_->GetClauseId( + decisions[level].literal.Negated(), decisions[level - 1].literal)); + } + } + + clause_ids->insert(clause_ids->end(), non_unit_clause_ids.rbegin(), + non_unit_clause_ids.rend()); +} + // ----- BinaryImplicationGraph ----- void BinaryImplicationGraph::Resize(int num_variables) { @@ -888,25 +1017,20 @@ bool BinaryImplicationGraph::AddBinaryClauseInternal( add_binary_callback_(a, b); } - // TODO(user): with chronological backtracking, we should deal with literal - // fixed at level zero even if we call this at a positive level. const auto& assignment = trail_->Assignment(); - if (trail_->CurrentDecisionLevel() == 0) { - DCHECK(!assignment.LiteralIsAssigned(a)); - DCHECK(!assignment.LiteralIsAssigned(b)); - } else { - if (assignment.LiteralIsFalse(a)) { - if (assignment.LiteralIsAssigned(b)) { - if (assignment.LiteralIsFalse(b)) return false; - } else { - reasons_[trail_->Index()] = a; - trail_->EnqueueAtLevel(b, propagator_id_, trail_->AssignmentLevel(a)); - } - } else if (assignment.LiteralIsFalse(b)) { - if (!assignment.LiteralIsAssigned(a)) { - reasons_[trail_->Index()] = b; - trail_->EnqueueAtLevel(a, propagator_id_, trail_->AssignmentLevel(b)); - } + if (assignment.LiteralIsFalse(a)) { + if (assignment.LiteralIsAssigned(b)) { + if (assignment.LiteralIsFalse(b)) return false; + } else { + reasons_[trail_->Index()] = a; + trail_->EnqueueAtLevel(b, propagator_id_, trail_->AssignmentLevel(a)); + } + } else if (assignment.LiteralIsFalse(b)) { + if (assignment.LiteralIsAssigned(a)) { + if (assignment.LiteralIsFalse(a)) return false; + } else { + reasons_[trail_->Index()] = b; + trail_->EnqueueAtLevel(a, propagator_id_, trail_->AssignmentLevel(b)); } } diff --git a/ortools/sat/clause.h b/ortools/sat/clause.h index 43294089c6d..0324b54c76a 100644 --- a/ortools/sat/clause.h +++ b/ortools/sat/clause.h @@ -74,6 +74,7 @@ class SatClause { // Number of literals in the clause. int size() const { return size_; } + bool empty() const { return size_ == 0; } // We re-use the size to lazily remove clause and notify that they need to be // deleted. It is why this is not called empty() to emphasis that fact. Note @@ -167,6 +168,8 @@ enum class DeletionSourceForStat { SUBSUMPTION_PROBING, SUBSUMPTION_VIVIFY, SUBSUMPTION_CONFLICT, + SUBSUMPTION_CONFLICT_EXTRA, + SUBSUMPTION_DECISIONS, SUBSUMPTION_EAGER, SUBSUMPTION_INPROCESSING, BLOCKED, @@ -418,6 +421,36 @@ class ClauseManager : public SatPropagator { return std::move(add_clause_callback_); } + // Returns the ID of the unit, binary, or general clause that is the reason + // for the given literal, or kNoClauseId if there is none. + ClauseId ReasonClauseId(Literal literal) const; + + // Appends to `clause_ids` the IDs of the clauses which, by unit propagation + // from some decisions, are sufficient to ensure that all literals in + // `literals` are fixed to their current value. + // + // If `decision` is not `kNoLiteralIndex`, also appends the IDs of the clauses + // proving that `decision` implies all the literals in `literals`. In this + // case, `decision` must either be the last decision on the trail, or must + // directly imply it. Furthermore, each decision must directly imply the + // previous one on the trail. + // + // This method expands the reasons of each literal recursively until a + // decision, or a literal implied by the decision at its decision level, is + // found. The latter criterion avoids a quadratic complexity when implications + // of the form "decision => literal" are added for each newly propagated + // literal after taking a decision (provided these implications are added to + // the binary implication graph right away, in trail index order). + // + // If `additional_binary_clause_ids` is not null, it is used to look for + // existing binary clauses if they are not found in the binary implication + // graph. + void AppendClauseIdsFixing( + absl::Span literals, std::vector* clause_ids, + LiteralIndex decision = kNoLiteralIndex, + absl::flat_hash_map, ClauseId>* + additional_binary_clause_ids = nullptr); + private: // Attaches the given clause. This eventually propagates a literal which is // enqueued on the trail. Returns false if a contradiction was encountered. @@ -484,6 +517,9 @@ class ClauseManager : public SatPropagator { absl::AnyInvocable)> add_clause_callback_ = nullptr; + + SparseBitset tmp_mark_; + std::vector tmp_clause_ids_for_append_clauses_fixing_; }; // A binary clause. This is used by BinaryClauseManager. diff --git a/ortools/sat/clause_test.cc b/ortools/sat/clause_test.cc index ceb7c289b38..bf05edad02a 100644 --- a/ortools/sat/clause_test.cc +++ b/ortools/sat/clause_test.cc @@ -363,7 +363,6 @@ TEST(BinaryImplicationGraphTest, LargeAtMostOnePropagation) { EXPECT_TRUE(graph->AddAtMostOne(large_at_most_one)); const Literal decision = Literal(BooleanVariable(42), true); - trail->SetDecisionLevel(1); trail->EnqueueSearchDecision(Literal(decision)); EXPECT_TRUE(graph->Propagate(trail)); diff --git a/ortools/sat/cp_model_copy.cc b/ortools/sat/cp_model_copy.cc index f823284e0cf..cfb56633828 100644 --- a/ortools/sat/cp_model_copy.cc +++ b/ortools/sat/cp_model_copy.cc @@ -37,14 +37,35 @@ #include "ortools/base/protobuf_util.h" #include "ortools/sat/cp_model.pb.h" #include "ortools/sat/cp_model_utils.h" +#include "ortools/sat/lrat_proof_handler.h" #include "ortools/sat/presolve_context.h" +#include "ortools/sat/sat_base.h" #include "ortools/sat/sat_parameters.pb.h" #include "ortools/util/sorted_interval_list.h" namespace operations_research { namespace sat { -ModelCopy::ModelCopy(PresolveContext* context) : context_(context) {} +namespace { +// This assumes an identity mapping between positive proto refs and Boolean +// variables (this might not be the case if the input proto contains non Boolean +// variables between Boolean ones). +Literal RefToLiteral(int ref) { + return Literal(BooleanVariable(PositiveRef(ref)), RefIsPositive(ref)); +} +int LiteralToRef(Literal lit) { + const int var = lit.Variable().value(); + return lit.IsPositive() ? var : NegatedRef(var); +} +} // namespace + +ModelCopy::ModelCopy(PresolveContext* context, + LratProofHandler* lrat_proof_handler) + : context_(context), lrat_proof_handler_(lrat_proof_handler) {} + +ClauseId ModelCopy::NextInferredClauseId() { + return next_inferred_clause_id_++; +} void ModelCopy::ImportVariablesAndMaybeIgnoreNames( const CpModelProto& in_model) { @@ -86,6 +107,13 @@ bool ModelCopy::ImportAndSimplifyConstraints( interval_mapping_.assign(in_model.constraints().size(), -1); boolean_product_encoding_.clear(); + // The LRAT ASCII file format numbers input problem clauses from 1 to n. + // Assuming that each input constraint yields at most one clause, we can + // number the inferred clauses starting from in_model.constraints_size() + 1 + // without risk of collisions. + next_inferred_clause_id_ = ClauseId(in_model.constraints_size() + 1); + unit_clause_ids_.clear(); + starting_constraint_index_ = context_->working_model->constraints_size(); for (int c = 0; c < in_model.constraints_size(); ++c) { if (active_constraints != nullptr && !active_constraints(c)) { @@ -106,7 +134,9 @@ bool ModelCopy::ImportAndSimplifyConstraints( break; case ConstraintProto::kBoolOr: if (first_copy) { - if (!CopyBoolOrWithDupSupport(ct)) return CreateUnsatModel(c, ct); + if (!CopyBoolOrWithDupSupport(ct, ClauseId(c + 1))) { + return CreateUnsatModel(c, ct); + } } else { if (!CopyBoolOr(ct)) return CreateUnsatModel(c, ct); } @@ -271,11 +301,14 @@ void ModelCopy::FinishEnforcementCopy(ConstraintProto* ct) { temp_enforcement_literals_.end()); } -bool ModelCopy::FinishBoolOrCopy() { +bool ModelCopy::FinishBoolOrCopy(ClauseId clause_id) { if (temp_literals_.empty()) return false; if (temp_literals_.size() == 1) { context_->UpdateRuleStats("bool_or: only one literal"); + if (lrat_proof_handler_ != nullptr) { + unit_clause_ids_[RefToLiteral(temp_literals_[0])] = clause_id; + } return context_->SetLiteralToTrue(temp_literals_[0]); } @@ -302,7 +335,8 @@ bool ModelCopy::CopyBoolOr(const ConstraintProto& ct) { return FinishBoolOrCopy(); } -bool ModelCopy::CopyBoolOrWithDupSupport(const ConstraintProto& ct) { +bool ModelCopy::CopyBoolOrWithDupSupport(const ConstraintProto& ct, + ClauseId clause_id) { temp_literals_.clear(); temp_literals_set_.clear(); for (const int enforcement_lit : temp_enforcement_literals_) { @@ -328,7 +362,39 @@ bool ModelCopy::CopyBoolOrWithDupSupport(const ConstraintProto& ct) { const auto [it, inserted] = temp_literals_set_.insert(lit); if (inserted) temp_literals_.push_back(lit); } - return FinishBoolOrCopy(); + if (lrat_proof_handler_ != nullptr) { + // Add the original clause as a problem clause, and its simplified version + // as an inferred clause (only if it is different), with proof. + temp_clause_.clear(); + for (const int lit : ct.enforcement_literal()) { + temp_clause_.push_back(RefToLiteral(lit).Negated()); + } + for (const int lit : ct.bool_or().literals()) { + temp_clause_.push_back(RefToLiteral(lit)); + } + lrat_proof_handler_->AddProblemClause(clause_id, temp_clause_); + + if (temp_literals_set_.size() != temp_clause_.size()) { + temp_clause_ids_.clear(); + for (const Literal lit : temp_clause_) { + if (!temp_literals_set_.contains(LiteralToRef(lit))) { + DCHECK(unit_clause_ids_.contains(lit.Negated())) << lit.Negated(); + temp_clause_ids_.push_back(unit_clause_ids_[lit.Negated()]); + } + } + temp_clause_ids_.push_back(clause_id); + temp_simplified_clause_.clear(); + for (const int lit : temp_literals_set_) { + temp_simplified_clause_.push_back(RefToLiteral(lit)); + } + ClauseId new_clause_id = NextInferredClauseId(); + lrat_proof_handler_->AddInferredClause( + new_clause_id, temp_simplified_clause_, temp_clause_ids_); + lrat_proof_handler_->DeleteClause(clause_id, temp_clause_); + clause_id = new_clause_id; + } + } + return FinishBoolOrCopy(clause_id); } bool ModelCopy::CopyBoolAnd(const ConstraintProto& ct) { @@ -1132,9 +1198,10 @@ void ModelCopy::MaybeExpandNonAffineExpressions( } } -bool ImportModelWithBasicPresolveIntoContext(const CpModelProto& in_model, - PresolveContext* context) { - ModelCopy copier(context); +bool ImportModelWithBasicPresolveIntoContext( + const CpModelProto& in_model, PresolveContext* context, + LratProofHandler* lrat_proof_handler) { + ModelCopy copier(context, lrat_proof_handler); copier.ImportVariablesAndMaybeIgnoreNames(in_model); if (copier.ImportAndSimplifyConstraints(in_model, /*first_copy=*/true)) { CopyEverythingExceptVariablesAndConstraintsFieldsIntoContext(in_model, diff --git a/ortools/sat/cp_model_copy.h b/ortools/sat/cp_model_copy.h index 75087fc79ed..ef826d5aea4 100644 --- a/ortools/sat/cp_model_copy.h +++ b/ortools/sat/cp_model_copy.h @@ -23,7 +23,9 @@ #include "absl/container/flat_hash_set.h" #include "absl/types/span.h" #include "ortools/sat/cp_model.pb.h" +#include "ortools/sat/lrat_proof_handler.h" #include "ortools/sat/presolve_context.h" +#include "ortools/sat/sat_base.h" #include "ortools/sat/sat_parameters.pb.h" #include "ortools/util/sorted_interval_list.h" @@ -38,7 +40,8 @@ namespace sat { // that generates partial assignments. class ModelCopy { public: - explicit ModelCopy(PresolveContext* context); + explicit ModelCopy(PresolveContext* context, + LratProofHandler* lrat_proof_handler = nullptr); // Copies all constraints from in_model to working model of the context. // @@ -85,8 +88,8 @@ class ModelCopy { // All these functions return false if the constraint is found infeasible. bool CopyBoolOr(const ConstraintProto& ct); - bool CopyBoolOrWithDupSupport(const ConstraintProto& ct); - bool FinishBoolOrCopy(); + bool CopyBoolOrWithDupSupport(const ConstraintProto& ct, ClauseId clause_id); + bool FinishBoolOrCopy(ClauseId clause_id = kNoClauseId); bool CopyBoolAnd(const ConstraintProto& ct); bool CopyBoolAndWithDupSupport(const ConstraintProto& ct); @@ -134,7 +137,10 @@ class ModelCopy { void MaybeExpandNonAffineExpression(LinearExpressionProto* expr); void MaybeExpandNonAffineExpressions(LinearArgumentProto* linear_argument); + ClauseId NextInferredClauseId(); + PresolveContext* context_; + LratProofHandler* lrat_proof_handler_; // Temp vectors. std::vector non_fixed_variables_; @@ -150,6 +156,15 @@ class ModelCopy { ConstraintProto tmp_constraint_; + // The unit clause IDs of the literals which are fixed to true. Only used if + // lrat_proof_handler_ is not null. + absl::flat_hash_map unit_clause_ids_; + // Temp vectors used for LRAT. + std::vector temp_clause_; + std::vector temp_simplified_clause_; + std::vector temp_clause_ids_; + ClauseId next_inferred_clause_id_; + // Map used in GetOrCreateVariableForConjunction() to avoid creating duplicate // variables for identical sets of literals. absl::flat_hash_map, int> boolean_product_encoding_; @@ -167,8 +182,9 @@ class ModelCopy { // This should only be called on the first copy of the user given model. // Note that this reorder all constraints that use intervals last. We loose the // user-defined order, but hopefully that should not matter too much. -bool ImportModelWithBasicPresolveIntoContext(const CpModelProto& in_model, - PresolveContext* context); +bool ImportModelWithBasicPresolveIntoContext( + const CpModelProto& in_model, PresolveContext* context, + LratProofHandler* lrat_proof_handler = nullptr); // Same as ImportModelWithBasicPresolveIntoContext() except that variable // domains are read from domains and constraint might be filtered. diff --git a/ortools/sat/cp_model_search.cc b/ortools/sat/cp_model_search.cc index b54b1667d3e..9d574354695 100644 --- a/ortools/sat/cp_model_search.cc +++ b/ortools/sat/cp_model_search.cc @@ -936,8 +936,8 @@ std::vector GetFullWorkerParameters( } // As of November 2025, we don't support any LP reasoning when producing an // UNSAT proof. - if ((params.check_lrat_proof() || params.check_drat_proof() || - params.output_drat_proof()) && + if ((params.check_lrat_proof() || params.output_lrat_proof() || + params.check_drat_proof() || params.output_drat_proof()) && params.linearization_level() > 1) { continue; } diff --git a/ortools/sat/cp_model_solver.cc b/ortools/sat/cp_model_solver.cc index 74ca39db91d..c6caa6efc17 100644 --- a/ortools/sat/cp_model_solver.cc +++ b/ortools/sat/cp_model_solver.cc @@ -70,7 +70,6 @@ #include "ortools/sat/cp_model_symmetries.h" #include "ortools/sat/cp_model_utils.h" #include "ortools/sat/diffn_util.h" -#include "ortools/sat/drat_checker.h" #include "ortools/sat/feasibility_jump.h" #include "ortools/sat/feasibility_pump.h" #include "ortools/sat/integer.h" @@ -801,11 +800,12 @@ void LogSubsolverNames(absl::Span> subsolvers, SOLVER_LOG(logger, ""); } -void LaunchSubsolvers(const SatParameters& params, SharedClasses* shared, +void LaunchSubsolvers(Model* global_model, SharedClasses* shared, std::vector>& subsolvers, absl::Span ignored) { // Initial logging. SOLVER_LOG(shared->logger, ""); + SatParameters& params = *global_model->GetOrCreate(); if (params.interleave_search()) { SOLVER_LOG(shared->logger, absl::StrFormat("Starting deterministic search at %.2fs with " @@ -842,6 +842,13 @@ void LaunchSubsolvers(const SatParameters& params, SharedClasses* shared, for (int i = 0; i < subsolvers.size(); ++i) { subsolvers[i].reset(); } + + if (params.check_merged_lrat_proof() && shared->response->ProblemIsSolved() && + !shared->response->HasFeasibleSolution()) { + LratMerger(global_model) + .Merge(shared->lrat_proof_status->GetProofFilenames()); + } + shared->LogFinalStatistics(); } @@ -1039,7 +1046,6 @@ class FullProblemSolver : public SubSolver { if (lrat_proof_handler != nullptr) { local_model_.Register(lrat_proof_handler.get()); local_model_.TakeOwnership(lrat_proof_handler.release()); - shared_->lrat_proof_status->NewSubSolver(); } // Setup the local logger, in multi-thread log_search_progress should be @@ -1061,17 +1067,8 @@ class FullProblemSolver : public SubSolver { LratProofHandler* lrat_proof_handler = local_model_.Mutable(); if (lrat_proof_handler != nullptr) { - WallTimer timer; - timer.Start(); - const bool valid = local_model_.GetOrCreate()->ModelIsUnsat() - ? lrat_proof_handler->Check() - : lrat_proof_handler->Valid(); - shared_->lrat_proof_status->NewSubsolverProofStatus( - valid ? DratChecker::Status::VALID : DratChecker::Status::INVALID, - lrat_proof_handler->lrat_check_enabled(), - lrat_proof_handler->drat_check_enabled(), - lrat_proof_handler->num_assumed_clauses(), timer.Get()); - lrat_proof_handler->AddStats(); + lrat_proof_handler->Close( + local_model_.GetOrCreate()->ModelIsUnsat()); } } @@ -2221,7 +2218,7 @@ void SolveCpModelParallel(SharedClasses* shared, Model* global_model) { [shared]() { shared->response->UpdateGapIntegral(); })); } - LaunchSubsolvers(params, shared, subsolvers, name_filter.AllIgnored()); + LaunchSubsolvers(global_model, shared, subsolvers, name_filter.AllIgnored()); } #endif // ORTOOLS_TARGET_OS_SUPPORTS_THREADS @@ -2579,9 +2576,15 @@ CpSolverResponse SolveCpModel(const CpModelProto& model_proto, Model* model) { auto context = std::make_unique(model, new_cp_model_proto, mapping_proto); - if (!ImportModelWithBasicPresolveIntoContext(model_proto, context.get())) { + std::unique_ptr lrat_proof_handler = + LratProofHandler::MaybeCreate(model); + if (!ImportModelWithBasicPresolveIntoContext(model_proto, context.get(), + lrat_proof_handler.get())) { const std::string info = "Problem proven infeasible during initial copy."; SOLVER_LOG(logger, info); + if (lrat_proof_handler != nullptr) { + lrat_proof_handler->Close(/*model_is_unsat=*/true); + } CpSolverResponse status_response; status_response.set_status(CpSolverStatus::INFEASIBLE); status_response.set_solution_info(info); @@ -2751,6 +2754,10 @@ CpSolverResponse SolveCpModel(const CpModelProto& model_proto, Model* model) { // Delete the context as soon as the presolve is done. Note that only // postsolve_mapping and mapping_proto are needed for postsolve. context.reset(nullptr); + if (lrat_proof_handler != nullptr) { + lrat_proof_handler->Close(presolve_status == CpSolverStatus::INFEASIBLE); + lrat_proof_handler.reset(); + } if (presolve_status != CpSolverStatus::UNKNOWN) { if (presolve_status == CpSolverStatus::INFEASIBLE && @@ -3037,7 +3044,7 @@ CpSolverResponse SolveCpModel(const CpModelProto& model_proto, Model* model) { std::vector> subsolvers; subsolvers.push_back(std::make_unique( "main", params, /*split_in_chunks=*/false, &shared)); - LaunchSubsolvers(params, &shared, subsolvers, {}); + LaunchSubsolvers(model, &shared, subsolvers, {}); } } diff --git a/ortools/sat/cp_model_solver_helpers.cc b/ortools/sat/cp_model_solver_helpers.cc index e8a7644707e..30ad271273f 100644 --- a/ortools/sat/cp_model_solver_helpers.cc +++ b/ortools/sat/cp_model_solver_helpers.cc @@ -847,7 +847,7 @@ void RegisterVariableBoundsLevelZeroImport( ClauseId clause_id = kNoClauseId; if (lrat_proof_handler != nullptr) { clause_id = clause_id_generator->GetNextId(); - lrat_proof_handler->AddSharedClause(clause_id, {lit}); + lrat_proof_handler->AddImportedClause(clause_id, {lit}); } if (trail->Assignment().LiteralIsFalse(lit)) { if (lrat_proof_handler != nullptr) { @@ -2291,6 +2291,7 @@ void SharedClasses::RegisterSharedClassesInLocalModel(Model* local_model) { local_model->Register(shared_tree_manager); local_model->Register(stats); local_model->Register(stat_tables); + local_model->Register(lrat_proof_status); // TODO(user): Use parameters and not the presence/absence of these class // to decide when to use them? this is not clear. diff --git a/ortools/sat/docs/channeling.md b/ortools/sat/docs/channeling.md index 28cb3b4cbf1..53bb49397c4 100644 --- a/ortools/sat/docs/channeling.md +++ b/ortools/sat/docs/channeling.md @@ -524,8 +524,8 @@ To make this more concrete, let's say you have 10 bins of capacity 100, and items to pack into the bins. You would like to maximize the number of bins that can accept one emergency load of size 20. -To do this, you need to maximize the number of bins that have a load less -than 80. In the code below, channeling is used to link the *load* and *slack* +To do this, you need to maximize the number of bins that have a load less than +80. In the code below, channeling is used to link the *load* and *slack* variables together: ### Python code diff --git a/ortools/sat/docs/scheduling.md b/ortools/sat/docs/scheduling.md index 60f193b2376..8a336fea033 100644 --- a/ortools/sat/docs/scheduling.md +++ b/ortools/sat/docs/scheduling.md @@ -14,8 +14,8 @@ exclusivity between tasks, and temporal relations between tasks. ## Interval variables Intervals are constraints containing three constant of affine expressions -(start, size, and end). Creating an interval constraint will enforce that -`start + size == end`. +(start, size, and end). Creating an interval constraint will enforce that `start ++ size == end`. The more general API uses three expressions to define the interval. If the size is fixed, a simpler API uses the start expression and the fixed size. @@ -2266,13 +2266,15 @@ of the start of the task. This is implemented using channeling constraints. The following code displays: - start=8 duration=3 across=0 - start=9 duration=3 across=0 - start=10 duration=3 across=0 - start=11 duration=4 across=1 - start=12 duration=4 across=1 - start=14 duration=3 across=0 - start=15 duration=3 across=0 +``` +start=8 duration=3 across=0 +start=9 duration=3 across=0 +start=10 duration=3 across=0 +start=11 duration=4 across=1 +start=12 duration=4 across=1 +start=14 duration=3 across=0 +start=15 duration=3 across=0 +``` ### Python code diff --git a/ortools/sat/implied_bounds.cc b/ortools/sat/implied_bounds.cc index 34853e568a1..7a9db59fcf5 100644 --- a/ortools/sat/implied_bounds.cc +++ b/ortools/sat/implied_bounds.cc @@ -686,7 +686,7 @@ void ProductDetector::ProcessTrailAtLevelOne() { if (trail_->CurrentDecisionLevel() != 1) return; ++num_trail_updates_; - const SatSolver::Decision decision = sat_solver_->Decisions()[0]; + const LiteralWithTrailIndex decision = trail_->Decisions()[0]; if (decision.literal.Index() >= seen_.size() || !seen_[decision.literal.Index()]) { return; diff --git a/ortools/sat/integer_test.cc b/ortools/sat/integer_test.cc index c0bde792460..7931c06d9d2 100644 --- a/ortools/sat/integer_test.cc +++ b/ortools/sat/integer_test.cc @@ -236,10 +236,12 @@ TEST(IntegerTrailTest, Untrail) { // We need a reason for the Enqueue(): const Literal r(model.Add(NewBooleanVariable()), true); + const Literal dummy1(model.Add(NewBooleanVariable()), true); + const Literal dummy2(model.Add(NewBooleanVariable()), true); trail->EnqueueWithUnitReason(r.Negated()); // Enqueue. - trail->SetDecisionLevel(1); + trail->EnqueueSearchDecision(dummy1); EXPECT_TRUE(p->Propagate(trail)); EXPECT_TRUE( p->Enqueue(IntegerLiteral::GreaterOrEqual(a, IntegerValue(5)), {r}, {})); @@ -248,18 +250,18 @@ TEST(IntegerTrailTest, Untrail) { p->Enqueue(IntegerLiteral::GreaterOrEqual(b, IntegerValue(7)), {r}, {})); EXPECT_EQ(7, p->LowerBound(b)); - trail->SetDecisionLevel(2); + trail->EnqueueSearchDecision(dummy2); EXPECT_TRUE(p->Propagate(trail)); EXPECT_TRUE( p->Enqueue(IntegerLiteral::GreaterOrEqual(b, IntegerValue(9)), {r}, {})); EXPECT_EQ(9, p->LowerBound(b)); // Untrail. - trail->SetDecisionLevel(1); + trail->Untrail(trail->PrepareBacktrack(1)); p->Untrail(*trail, 0); EXPECT_EQ(7, p->LowerBound(b)); - trail->SetDecisionLevel(0); + trail->Untrail(trail->PrepareBacktrack(0)); p->Untrail(*trail, 0); EXPECT_EQ(1, p->LowerBound(a)); EXPECT_EQ(2, p->LowerBound(b)); @@ -275,8 +277,7 @@ TEST(IntegerTrailTest, BasicReason) { trail->EnqueueWithUnitReason(Literal(-1)); trail->EnqueueWithUnitReason(Literal(-2)); trail->EnqueueWithUnitReason(Literal(+3)); - trail->EnqueueWithUnitReason(Literal(+4)); - trail->SetDecisionLevel(1); + trail->EnqueueSearchDecision(Literal(+4)); EXPECT_TRUE(p->Propagate(trail)); // Enqueue. @@ -321,7 +322,7 @@ TEST(IntegerTrailTest, LazyReason) { Trail* trail = model.GetOrCreate(); trail->Resize(10); - trail->SetDecisionLevel(1); + trail->EnqueueSearchDecision(Literal(+1)); EXPECT_TRUE(p->Propagate(trail)); LazyReasonForTest mock; @@ -350,8 +351,7 @@ TEST(IntegerTrailTest, LiteralAndBoundReason) { trail->EnqueueWithUnitReason(Literal(-1)); trail->EnqueueWithUnitReason(Literal(-2)); trail->EnqueueWithUnitReason(Literal(-3)); - trail->EnqueueWithUnitReason(Literal(-4)); - trail->SetDecisionLevel(1); + trail->EnqueueSearchDecision(Literal(-4)); EXPECT_TRUE(p->Propagate(trail)); // Enqueue. @@ -380,9 +380,8 @@ TEST(IntegerTrailTest, LevelZeroBounds) { Trail* trail = model.GetOrCreate(); trail->Resize(10); - trail->SetDecisionLevel(1); trail->EnqueueWithUnitReason(Literal(-1)); - trail->EnqueueWithUnitReason(Literal(-2)); + trail->EnqueueSearchDecision(Literal(-2)); EXPECT_TRUE(integer_trail->Propagate(trail)); // Enqueue. @@ -509,7 +508,8 @@ TEST(GenericLiteralWatcherTest, LevelZeroModifiedVariablesCallbackTest) { EXPECT_EQ(NegationOf(b), collector[1]); // Modify 1 variable at level 1. - model.GetOrCreate()->SetDecisionLevel(1); + const Literal dummy1(model.Add(NewBooleanVariable()), true); + model.GetOrCreate()->EnqueueSearchDecision(dummy1); EXPECT_TRUE(sat_solver->Propagate()); collector.clear(); EXPECT_TRUE(integer_trail->Enqueue( @@ -990,13 +990,11 @@ TEST(IntegerEncoderTest, EncodingToIntegerTrailPropagation) { EXPECT_BOUNDS_EQ(var, 3, 9); // We remove the value 4, nothing happen. - trail->SetDecisionLevel(1); trail->EnqueueSearchDecision(encoding[1].literal.Negated()); EXPECT_TRUE(sat_solver->Propagate()); EXPECT_BOUNDS_EQ(var, 3, 9); // When we remove 3, the lower bound change though. - trail->SetDecisionLevel(2); trail->EnqueueSearchDecision(encoding[0].literal.Negated()); EXPECT_TRUE(sat_solver->Propagate()); EXPECT_BOUNDS_EQ(var, 7, 9); @@ -1011,7 +1009,6 @@ TEST(IntegerEncoderTest, EncodingToIntegerTrailPropagation) { } // Test the other direction. - trail->SetDecisionLevel(3); trail->EnqueueSearchDecision(encoding[3].literal.Negated()); EXPECT_TRUE(sat_solver->Propagate()); EXPECT_BOUNDS_EQ(var, 7, 7); diff --git a/ortools/sat/lb_tree_search.cc b/ortools/sat/lb_tree_search.cc index ebad779f174..cf4912a47dc 100644 --- a/ortools/sat/lb_tree_search.cc +++ b/ortools/sat/lb_tree_search.cc @@ -117,7 +117,7 @@ void LbTreeSearch::EnableLpAndLoadBestBasis() { DCHECK(lp_constraint_ != nullptr); lp_constraint_->EnablePropagation(true); - const int level = sat_solver_->CurrentDecisionLevel(); + const int level = trail_->CurrentDecisionLevel(); if (current_branch_.empty()) return; NodeIndex n = current_branch_[0]; // Root. @@ -129,7 +129,7 @@ void LbTreeSearch::EnableLpAndLoadBestBasis() { basis_level = i; last_node_with_basis = n; } - const Literal decision = sat_solver_->Decisions()[i].literal; + const Literal decision = trail_->Decisions()[i].literal; if (nodes_[n].literal_index == decision.Index()) { n = nodes_[n].true_child; } else { @@ -927,7 +927,7 @@ SatSolver::Status LbTreeSearch::Search( if (!search_helper_->TakeDecision(Literal(decision))) { return sat_solver_->UnsatStatus(); } - if (sat_solver_->CurrentDecisionLevel() < base_level) { + if (trail_->CurrentDecisionLevel() < base_level) { // TODO(user): it would be nice to mark some node as infeasible if // this is the case. However this could happen after many decision and // we realize with the lp that one of them should have been fixed @@ -939,7 +939,7 @@ SatSolver::Status LbTreeSearch::Search( } } - if (sat_solver_->CurrentDecisionLevel() <= base_level) { + if (trail_->CurrentDecisionLevel() <= base_level) { continue; } @@ -985,9 +985,9 @@ SatSolver::Status LbTreeSearch::Search( // The decision level is the number of decision taken. // Decision()[level] is the decision at that level. int backtrack_level = base_level; - DCHECK_LE(current_branch_.size(), sat_solver_->CurrentDecisionLevel()); + DCHECK_LE(current_branch_.size(), trail_->CurrentDecisionLevel()); while (backtrack_level < current_branch_.size() && - sat_solver_->Decisions()[backtrack_level].literal.Index() == + trail_->Decisions()[backtrack_level].literal.Index() == nodes_[current_branch_[backtrack_level]].literal_index) { ++backtrack_level; } diff --git a/ortools/sat/lrat.proto b/ortools/sat/lrat.proto new file mode 100644 index 00000000000..000a26736b6 --- /dev/null +++ b/ortools/sat/lrat.proto @@ -0,0 +1,75 @@ +// Copyright 2010-2025 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Proto used to store LRAT proofs (https://arxiv.org/abs/1612.02353) with some +// extensions to support multi-threading (with one output file per file). This +// proto is only used internally to store partial proofs on disk before merging +// them. It can be changed without backward compatibility, and should not be +// used directly by users. + +// LINT: LEGACY_NAMES +syntax = "proto2"; + +package operations_research.sat; + +// A clause imported from the input problem, or from another worker. +message LratImportedClause { + optional int64 clause_id = 1; + repeated int32 literals = 2 [packed = true]; +} + +// An LRAT inferred clause. +message LratInferredClause { + optional int64 clause_id = 1; + // Literals are represented with LiteralIndex values. + repeated int32 literals = 2 [packed = true]; + // Clauses which become unit and possibly empty if all the `literals` are + // assumed to be false (verification stops at the first empty clause). This + // list must be in unit propagation order. See LratChecker for more details. + repeated int64 unit_ids = 3 [packed = true]; + + // If `rat_infos` is empty, the last `unit_ids` clause must become empty after + // unit propagation. If the last `unit_ids` clause does not become empty by + // unit propagation, then `rat_infos` must contain all the clauses which + // contain the negation of the first `literals` (called the pivot 'p') -- and + // no other clauses. Moreover, for each r in `rat_infos`, all the `r.unit_ids` + // clauses must become unit and eventually empty if all the literals of the + // `r.resolvant_id` clause (minus ~p), plus those in `literals`, are assumed + // to be false (this list must be in unit propagation order; verification + // stops at the first empty clause). See LratChecker for more details. + message RatInfo { + optional int64 resolvant_id = 1; + repeated int64 unit_ids = 2 [packed = true]; + } + repeated RatInfo rat_infos = 4; +} + +// A list of clauses to delete. +message LratDeletedClauses { + // IDs of the imported or inferred clauses to delete. A deleted clause can no + // longer be used to infer clauses. + repeated int64 clause_ids = 1 [packed = true]; +} + +// An LRAT UNSAT proof is a sequence of steps, starting from imported clauses +// and ending with the empty clause. At each step new clauses can be inferred +// from previous ones (with an explicit proof), or imported from another proof +// built by another thread. A proof step can also delete clauses which are no +// longer needed. Each clause is identified by a unique clause ID. +message LratProofStep { + oneof step { + LratImportedClause imported_clause = 1; + LratInferredClause inferred_clause = 2; + LratDeletedClauses deleted_clauses = 3; + } +} diff --git a/ortools/sat/lrat_proof_handler.cc b/ortools/sat/lrat_proof_handler.cc index 0fb196165bb..b772e442746 100644 --- a/ortools/sat/lrat_proof_handler.cc +++ b/ortools/sat/lrat_proof_handler.cc @@ -14,35 +14,390 @@ #include "ortools/sat/lrat_proof_handler.h" #include +#include #include +#include +#include #include #include +#include #include #include "absl/flags/flag.h" #include "absl/log/check.h" #include "absl/log/log.h" +#include "absl/strings/str_cat.h" #include "absl/strings/str_join.h" #include "absl/types/span.h" #include "ortools/base/file.h" #include "ortools/base/options.h" +#include "ortools/base/timer.h" #include "ortools/sat/drat_checker.h" #include "ortools/sat/drat_writer.h" +#include "ortools/sat/lrat.pb.h" #include "ortools/sat/lrat_checker.h" #include "ortools/sat/model.h" +#include "ortools/sat/recordio.h" #include "ortools/sat/sat_base.h" +#include "ortools/sat/synchronization.h" #if defined(_MSC_VER) ABSL_FLAG(std::string, cp_model_drat_output, ".\\drat.txt", "File name for the generated DRAT proof, if DRAT output is enabled."); +ABSL_FLAG(std::string, cp_model_lrat_output_prefix, ".\\lrat", + "File name prefix for the generated LRAT proof files, if LRAT output " + "is enabled. One file is created for each worker."); #else ABSL_FLAG(std::string, cp_model_drat_output, "/tmp/drat.txt", "File name for the generated DRAT proof, if DRAT output is enabled."); +ABSL_FLAG(std::string, cp_model_lrat_output_prefix, "/tmp/lrat", + "File name prefix for the generated LRAT proof files, if LRAT output " + "is enabled. One file is created for each worker."); #endif namespace operations_research { namespace sat { +LratWriter::LratWriter(std::string_view filename) + : filename_(filename), + ofstream_(filename_, std::ios::binary), + writer_(&ofstream_) { + if (!ofstream_.good()) { + LOG(FATAL) << "Failed to open LRAT output file: " << filename; + } +} + +LratWriter::~LratWriter() { writer_.Close(); } + +void LratWriter::AddImportedClause(ClauseId id, + absl::Span clause) { + LratProofStep step; + LratImportedClause* imported_clause = step.mutable_imported_clause(); + imported_clause->set_clause_id(id.value()); + for (const Literal literal : clause) { + imported_clause->add_literals(literal.Index().value()); + } + CHECK(writer_.WriteRecord(step)); +} + +void LratWriter::AddInferredClause(ClauseId id, + absl::Span clause, + absl::Span unit_ids, + absl::Span rat) { + LratProofStep step; + LratInferredClause* inferred_clause = step.mutable_inferred_clause(); + inferred_clause->set_clause_id(id.value()); + for (const Literal literal : clause) { + inferred_clause->add_literals(literal.Index().value()); + } + for (const ClauseId unit_id : unit_ids) { + inferred_clause->add_unit_ids(unit_id.value()); + } + for (const LratChecker::RatIds& rat_ids : rat) { + LratInferredClause::RatInfo* rat_info = inferred_clause->add_rat_infos(); + rat_info->set_resolvant_id(rat_ids.resolvant_id.value()); + for (const ClauseId unit_id : rat_ids.unit_ids) { + rat_info->add_unit_ids(unit_id.value()); + } + } + CHECK(writer_.WriteRecord(step)); +} + +void LratWriter::DeleteClause(ClauseId id) { + LratProofStep step; + step.mutable_deleted_clauses()->add_clause_ids(id.value()); + CHECK(writer_.WriteRecord(step)); +} + +namespace { +void IndicesToLiterals(absl::Span literal_indices, + std::vector* literals) { + literals->clear(); + literals->reserve(literal_indices.size()); + for (const int lit : literal_indices) { + literals->push_back(Literal(LiteralIndex(lit))); + } +} +} // namespace + +LratMerger::LratMerger(Model* model) + : id_(model->GetOrCreate()->NewSubSolverId()), + proof_status_(model->GetOrCreate()) { + const SatParameters& params = *model->GetOrCreate(); + if (params.check_merged_lrat_proof()) { + lrat_checker_ = std::make_unique(model); + } + debug_crash_on_error_ = params.debug_crash_if_lrat_check_fails(); +} + +LratMerger::~LratMerger() { + DratChecker::Status status = DratChecker::Status::UNKNOWN; + if (lrat_checker_ != nullptr) { + status = lrat_checker_->Check() ? DratChecker::Status::VALID + : DratChecker::Status::INVALID; + if (status == DratChecker::Status::INVALID && debug_crash_on_error_) { + LOG(FATAL) << "LRAT error: " << lrat_checker_->error_message(); + } + lrat_checker_->AddStats(); + } + proof_status_->NewSubsolverProofStatus(status, lrat_checker_ != nullptr, + /*drat_check_enabled=*/false, + /*num_assumed_clauses=*/0, 0.0); +} + +bool LratMerger::Merge(absl::Span proof_filenames) { + if (proof_filenames.empty()) return true; + merged_proof_filename_ = + absl::StrCat(absl::GetFlag(FLAGS_cp_model_lrat_output_prefix), ".txt"); + merged_proof_file_.open(merged_proof_filename_); + if (!merged_proof_file_.good()) { + return Error(absl::StrCat("failed to open LRAT output file: ", + merged_proof_filename_)); + } + if (!ReadPresolveProof(proof_filenames[0])) return false; + + const int num_workers = proof_filenames.size() - 1; + std::vector inputs(num_workers); + std::vector> readers(num_workers); + last_read_steps_.resize(num_workers); + local_to_global_ids_.resize(num_workers); + for (int i = 0; i < num_workers; ++i) { + const std::string& filename = proof_filenames[i + 1]; + inputs[i].open(filename, std::ios::binary); + if (!inputs[i].good()) { + return Error(absl::StrCat("failed to open LRAT input file: ", filename)); + } + readers[i] = std::make_unique(&inputs[i]); + if (!readers[i]->ReadRecord(&last_read_steps_[i])) { + last_read_steps_[i].Clear(); + } + } + + std::vector clause; + while (true) { + bool at_least_one_step_read = false; + int worker_with_missing_import = -1; + for (int i = 0; i < num_workers; ++i) { + const std::string& filename = proof_filenames[i + 1]; + // An empty step means that the reader is at the end of the file. + bool missing_import = false; + while (last_read_steps_[i].step_case() != LratProofStep::STEP_NOT_SET && + !missing_import) { + LratProofStep& step = last_read_steps_[i]; + switch (step.step_case()) { + case LratProofStep::kImportedClause: { + ClauseId local_id(step.imported_clause().clause_id()); + IndicesToLiterals(step.imported_clause().literals(), &clause); + std::sort(clause.begin(), clause.end()); + auto it = shared_clause_ids_.find(clause); + if (it != shared_clause_ids_.end()) { + local_to_global_ids_[i][local_id] = it->second; + } else { + missing_import = true; + } + break; + } + case LratProofStep::kInferredClause: + if (!RemapInferredClause(i, filename, + *step.mutable_inferred_clause())) { + return false; + } + clause.clear(); + IndicesToLiterals(step.inferred_clause().literals(), &clause); + std::sort(clause.begin(), clause.end()); + shared_clause_ids_.insert( + {clause, GlobalId(step.inferred_clause().clause_id())}); + if (!WriteInferredClause(step.inferred_clause())) return false; + // We found the empty clause, we don't need anymore steps. + if (step.inferred_clause().literals().empty()) return true; + break; + case LratProofStep::kDeletedClauses: + // TODO(user): implement this case. + break; + default: + return Error(absl::StrCat("unknown step type ", step.step_case(), + " in ", filename)); + } + if (missing_import) { + worker_with_missing_import = i; + } else { + if (!readers[i]->ReadRecord(&last_read_steps_[i])) { + last_read_steps_[i].Clear(); + } + at_least_one_step_read = true; + } + } + } + if (!at_least_one_step_read) { + if (worker_with_missing_import >= 0) { + const LratImportedClause& missing_import = + last_read_steps_[worker_with_missing_import].imported_clause(); + clause.clear(); + IndicesToLiterals(missing_import.literals(), &clause); + return Error( + absl::StrCat("imported clause not found in ", + proof_filenames[worker_with_missing_import + 1], + ": id=", missing_import.clause_id(), + ", literals=", absl::StrJoin(clause, ","))); + } else { + return true; + } + } + } +} + +bool LratMerger::ReadPresolveProof(const std::string& filename) { + std::ifstream input(filename, std::ios::binary); + if (!input.good()) { + return Error(absl::StrCat("failed to open LRAT input file: ", filename)); + } + RecordReader reader(&input); + LratProofStep step; + std::vector clause; + GlobalId max_global_id(0); + while (reader.ReadRecord(&step)) { + switch (step.step_case()) { + case LratProofStep::kImportedClause: { + GlobalId global_id(step.imported_clause().clause_id()); + max_global_id = std::max(max_global_id, global_id); + IndicesToLiterals(step.imported_clause().literals(), &clause); + SortAndAddSharedClause(global_id, clause); + if (lrat_checker_ != nullptr && + !lrat_checker_->AddProblemClause(ClauseId(global_id.value()), + clause)) { + return LratError(); + } + break; + } + case LratProofStep::kInferredClause: { + GlobalId global_id(step.inferred_clause().clause_id()); + max_global_id = std::max(max_global_id, global_id); + IndicesToLiterals(step.inferred_clause().literals(), &clause); + SortAndAddSharedClause(global_id, clause); + if (!WriteInferredClause(step.inferred_clause())) return false; + break; + } + case LratProofStep::kDeletedClauses: + // TODO(user): implement this. + break; + default: + return Error(absl::StrCat("unknown proof step type ", step.step_case(), + " in ", filename)); + } + } + next_global_id_ = ++max_global_id; + return true; +} + +void LratMerger::SortAndAddSharedClause(GlobalId id, + std::vector& literals) { + std::sort(literals.begin(), literals.end()); + shared_clause_ids_.insert({literals, id}); +} + +bool LratMerger::RemapInferredClause(int worker_index, + const std::string& filename, + LratInferredClause& inferred_clause) { + const GlobalId global_id = NextGlobalId(); + ClauseId local_id = ClauseId(inferred_clause.clause_id()); + local_to_global_ids_[worker_index][local_id] = global_id; + + inferred_clause.set_clause_id(global_id.value()); + if (!RemapClauseIds(worker_index, filename, + inferred_clause.mutable_unit_ids())) { + return false; + } + for (LratInferredClause::RatInfo& rat_info : + *inferred_clause.mutable_rat_infos()) { + local_id = ClauseId(rat_info.resolvant_id()); + auto it = local_to_global_ids_[worker_index].find(local_id); + if (it == local_to_global_ids_[worker_index].end()) { + return Error( + absl::StrCat("unknown clause ID ", local_id, " in ", filename)); + } + rat_info.set_resolvant_id(it->second.value()); + if (!RemapClauseIds(worker_index, filename, rat_info.mutable_unit_ids())) { + return false; + } + } + return true; +} + +bool LratMerger::RemapClauseIds( + int worker_index, const std::string& filename, + google::protobuf::RepeatedField* clause_ids) { + for (int i = 0; i < clause_ids->size(); ++i) { + const ClauseId local_id = ClauseId(clause_ids->Get(i)); + auto it = local_to_global_ids_[worker_index].find(local_id); + if (it == local_to_global_ids_[worker_index].end()) { + return Error( + absl::StrCat("unknown clause ID ", local_id, " in ", filename)); + } + clause_ids->Set(i, it->second.value()); + } + return true; +} + +bool LratMerger::WriteInferredClause( + const LratInferredClause& inferred_clause) { + if (lrat_checker_ != nullptr) { + // TODO(user): can we optimize away this format conversion? + IndicesToLiterals(inferred_clause.literals(), &tmp_clause_); + tmp_unit_ids_.clear(); + tmp_unit_ids_.reserve(inferred_clause.unit_ids_size()); + for (const int64_t id : inferred_clause.unit_ids()) { + tmp_unit_ids_.push_back(ClauseId(id)); + } + tmp_rat_ids_.clear(); + tmp_rat_ids_.reserve(inferred_clause.rat_infos_size()); + for (const LratInferredClause::RatInfo& rat_info : + inferred_clause.rat_infos()) { + tmp_rat_ids_.push_back( + {ClauseId(rat_info.resolvant_id()), + std::vector(rat_info.unit_ids().begin(), + rat_info.unit_ids().end())}); + } + if (!lrat_checker_->AddInferredClause(ClauseId(inferred_clause.clause_id()), + tmp_clause_, tmp_unit_ids_, + tmp_rat_ids_)) { + return LratError(); + } + } + merged_proof_file_ << inferred_clause.clause_id(); + for (const int lit : inferred_clause.literals()) { + merged_proof_file_ << " " << Literal(LiteralIndex(lit)).SignedValue(); + } + merged_proof_file_ << " 0"; + for (const int unit_id : inferred_clause.unit_ids()) { + merged_proof_file_ << " " << unit_id; + } + for (const LratInferredClause::RatInfo& rat_info : + inferred_clause.rat_infos()) { + merged_proof_file_ << " " << -rat_info.resolvant_id(); + for (const int unit_id : rat_info.unit_ids()) { + merged_proof_file_ << " " << unit_id; + } + } + merged_proof_file_ << " 0\n"; + return true; +} + +bool LratMerger::Error(std::string_view message) const { + if (debug_crash_on_error_) { + LOG(FATAL) << "LRAT merge error: " << message; + } else { + LOG(ERROR) << "LRAT merge error: " << message; + } + return false; +} + +bool LratMerger::LratError() const { + if (debug_crash_on_error_) { + LOG(FATAL) << "LRAT error: " << lrat_checker_->error_message(); + } + return false; +} + namespace { std::vector SortClauseForDrat(absl::Span clause) { // The sorting is such that new variables appear first. This is important for @@ -59,18 +414,24 @@ std::vector SortClauseForDrat(absl::Span clause) { std::unique_ptr LratProofHandler::MaybeCreate(Model* model) { const SatParameters& params = *model->GetOrCreate(); - if (!params.check_lrat_proof() && !params.check_drat_proof() && - !params.output_drat_proof()) { + if (!params.check_lrat_proof() && !params.output_lrat_proof() && + !params.check_drat_proof() && !params.output_drat_proof()) { return nullptr; } return std::unique_ptr(new LratProofHandler(model)); } -LratProofHandler::LratProofHandler(Model* model) { +LratProofHandler::LratProofHandler(Model* model) + : id_(model->GetOrCreate()->NewSubSolverId()), + proof_status_(model->GetOrCreate()) { const SatParameters& params = *model->GetOrCreate(); if (params.check_lrat_proof()) { lrat_checker_ = std::make_unique(model); } + if (params.output_lrat_proof()) { + lrat_writer_ = std::make_unique(absl::StrCat( + absl::GetFlag(FLAGS_cp_model_lrat_output_prefix), id_, ".bin")); + } if (params.check_drat_proof()) { drat_checker_ = std::make_unique(); } @@ -93,12 +454,16 @@ bool LratProofHandler::AddProblemClause(ClauseId id, LOG(FATAL) << "LRAT error: problem clauses must not be added after " "EndProblemClauses()"; } - if (lrat_checker_ != nullptr) { - return CheckResult(lrat_checker_->AddProblemClause(id, clause)); + if (lrat_checker_ != nullptr && + !lrat_checker_->AddProblemClause(id, clause)) { + return LratError(); } if (drat_checker_ != nullptr) { drat_checker_->AddProblemClause(SortClauseForDrat(clause)); } + if (lrat_writer_ != nullptr) { + lrat_writer_->AddImportedClause(id, clause); + } return true; } @@ -120,9 +485,9 @@ bool LratProofHandler::AddInferredClause( << " literals=" << absl::StrJoin(clause, ",") << " unit_ids=" << absl::StrJoin(unit_ids, ",") << " rat={" << absl::StrJoin(rat, " ") << "}"; - if (lrat_checker_ != nullptr) { - return CheckResult( - lrat_checker_->AddInferredClause(id, clause, unit_ids, rat)); + if (lrat_checker_ != nullptr && + !lrat_checker_->AddInferredClause(id, clause, unit_ids, rat)) { + return LratError(); } if (drat_checker_ != nullptr) { if (all_problem_clauses_loaded_) { @@ -132,23 +497,30 @@ bool LratProofHandler::AddInferredClause( SortClauseForDrat(clause)); } } + if (lrat_writer_ != nullptr) { + lrat_writer_->AddInferredClause(id, clause, unit_ids, rat); + } if (drat_writer_ != nullptr) { drat_writer_->AddClause(clause); } return true; } -bool LratProofHandler::AddSharedClause(ClauseId id, - absl::Span clause) { - VLOG(1) << "AddSharedClause: id=" << id +bool LratProofHandler::AddImportedClause(ClauseId id, + absl::Span clause) { + VLOG(1) << "AddImportedClause: id=" << id << " literals=" << absl::StrJoin(clause, ","); - if (lrat_checker_ != nullptr) { - return CheckResult(lrat_checker_->AddProblemClause(id, clause)); + if (lrat_checker_ != nullptr && + !lrat_checker_->AddProblemClause(id, clause)) { + return LratError(); } if (drat_checker_ != nullptr) { - LOG(ERROR) << "Shared clauses are not supported by the DRAT checker."; + LOG(ERROR) << "Imported clauses are not supported by the DRAT checker."; return false; } + if (lrat_writer_ != nullptr) { + lrat_writer_->AddImportedClause(id, clause); + } return true; } @@ -160,8 +532,9 @@ bool LratProofHandler::AddAssumedClause(ClauseId id, LOG(FATAL) << "LRAT error: assumed clauses are not supposed to happen"; } ++num_assumed_clauses_; - if (lrat_checker_ != nullptr) { - return CheckResult(lrat_checker_->AddProblemClause(id, clause)); + if (lrat_checker_ != nullptr && + !lrat_checker_->AddProblemClause(id, clause)) { + return LratError(); } if (drat_checker_ != nullptr) { // The DRAT checker requires all problem clauses first, followed by inferred @@ -200,21 +573,26 @@ void LratProofHandler::DeleteClause(ClauseId id, } VLOG(1) << "DeleteClause: id=" << id << " literals=" << absl::StrJoin(clause, ","); + if (lrat_checker_ != nullptr) { + lrat_checker_->DeleteClauses({id}); + } if (drat_checker_ != nullptr) { drat_checker_->DeleteClause(clause); } + if (lrat_writer_ != nullptr) { + lrat_writer_->DeleteClause(id); + } if (drat_writer_ != nullptr) { drat_writer_->DeleteClause(clause); } - if (lrat_checker_ != nullptr) { - lrat_checker_->DeleteClauses({id}); - } } DratChecker::Status LratProofHandler::Valid() const { if (lrat_checker_ != nullptr) { - return CheckResult(lrat_checker_->Valid()) ? DratChecker::Status::VALID - : DratChecker::Status::INVALID; + if (lrat_checker_->Valid()) { + return DratChecker::Status::VALID; + } + return DratChecker::Status::INVALID; } return DratChecker::Status::UNKNOWN; } @@ -222,8 +600,11 @@ DratChecker::Status LratProofHandler::Valid() const { DratChecker::Status LratProofHandler::Check() { DratChecker::Status status = DratChecker::Status::UNKNOWN; if (lrat_checker_ != nullptr) { - status = CheckResult(lrat_checker_->Check()) ? DratChecker::Status::VALID - : DratChecker::Status::INVALID; + status = lrat_checker_->Check() ? DratChecker::Status::VALID + : DratChecker::Status::INVALID; + if (status == DratChecker::Status::INVALID && debug_crash_on_error_) { + LOG(FATAL) << "LRAT error: " << lrat_checker_->error_message(); + } } if (status != DratChecker::Status::INVALID && drat_checker_ != nullptr) { drat_checker_->Check(max_drat_time_in_seconds_); @@ -234,17 +615,27 @@ DratChecker::Status LratProofHandler::Check() { return status; } -bool LratProofHandler::CheckResult(bool result) const { - if (debug_crash_on_error_ && !result && lrat_checker_ != nullptr) { +bool LratProofHandler::LratError() const { + if (debug_crash_on_error_) { LOG(FATAL) << "LRAT error: " << lrat_checker_->error_message(); } - return result; + return false; } -void LratProofHandler::AddStats() const { +void LratProofHandler::Close(bool model_is_unsat) { + WallTimer timer; + timer.Start(); + const bool valid = model_is_unsat ? Check() : Valid(); + proof_status_->NewSubsolverProofStatus( + valid ? DratChecker::Status::VALID : DratChecker::Status::INVALID, + lrat_check_enabled(), drat_check_enabled(), num_assumed_clauses(), + timer.Get()); if (lrat_checker_ != nullptr) { lrat_checker_->AddStats(); } + if (lrat_writer_ != nullptr) { + proof_status_->NewProofFile(lrat_writer_->filename()); + } } } // namespace sat diff --git a/ortools/sat/lrat_proof_handler.h b/ortools/sat/lrat_proof_handler.h index be5d1c76493..3de8ec68e35 100644 --- a/ortools/sat/lrat_proof_handler.h +++ b/ortools/sat/lrat_proof_handler.h @@ -15,35 +15,128 @@ #define ORTOOLS_SAT_LRAT_PROOF_HANDLER_H_ #include +#include #include #include +#include +#include #include +#include "absl/container/flat_hash_map.h" #include "absl/types/span.h" +#include "ortools/base/strong_int.h" #include "ortools/sat/drat_checker.h" #include "ortools/sat/drat_writer.h" +#include "ortools/sat/lrat.pb.h" #include "ortools/sat/lrat_checker.h" #include "ortools/sat/model.h" +#include "ortools/sat/recordio.h" #include "ortools/sat/sat_base.h" +#include "ortools/sat/synchronization.h" namespace operations_research { namespace sat { +// Writes an LRAT proof to a file in "record io" format. +class LratWriter { + public: + explicit LratWriter(std::string_view filename); + ~LratWriter(); + + std::string_view filename() const { return filename_; } + + void AddImportedClause(ClauseId id, absl::Span clause); + + void AddInferredClause(ClauseId id, absl::Span clause, + absl::Span unit_ids, + absl::Span rat = {}); + + void DeleteClause(ClauseId id); + + private: + std::string filename_; + std::ofstream ofstream_; + RecordWriter writer_; +}; + +// Merges separate LRAT proofs into a single LRAT file in ASCII format. +class LratMerger { + public: + explicit LratMerger(Model* model); + ~LratMerger(); + + // Merges the given LRAT proofs in a single one, and writes it to a file in + // ASCII format. The first proof must be the presolve proof. Its imported + // clauses must be the input problem clauses, and their ID must be the 1-based + // clause index in the input CNF file. Returns true on success, false + // otherwise. + bool Merge(absl::Span proof_filenames); + + private: + // Clause IDs used in the merged proof. Local clause IDs in individual proof + // files are remapped to global clause IDs (except for the presolve proof: its + // IDs are kept unchanged). This mapping is stored in `local_to_global_ids_` + // (one map per proof file, except for the presolve proof). + DEFINE_STRONG_INT_TYPE(GlobalId, int64_t); + + // Reads the proof of the presolved model and adds its clauses to + // `shared_clause_ids_`. Also checks this proof if lrat_checker_ is not null. + // Returns true on success, false otherwise. + bool ReadPresolveProof(const std::string& filename); + + // Canonicalizes (i.e., sorts) and registers a clause so that it can be + // imported from an individual proof file. + // TODO(user): is the canonicalization really needed? + void SortAndAddSharedClause(GlobalId id, std::vector& literals); + + // Remaps the local clause IDs in the given inferred clause to global IDs, in + // place. Returns true on success, false otherwise. + bool RemapInferredClause(int worker_index, const std::string& filename, + LratInferredClause& inferred_clause); + bool RemapClauseIds(int worker_index, const std::string& filename, + google::protobuf::RepeatedField* clause_ids); + + // Writes the given clause to the merged proof file, in LRAT ASCII file + // format. Also checks it if lrat_checker_ is not null. Returns true on + // success, false otherwise. + bool WriteInferredClause(const LratInferredClause& inferred_clause); + + GlobalId NextGlobalId() { return GlobalId(next_global_id_++); } + + bool Error(std::string_view message) const; + bool LratError() const; + + const int id_; + SharedLratProofStatus* proof_status_; + std::unique_ptr lrat_checker_; + bool debug_crash_on_error_; + + std::string merged_proof_filename_; + std::ofstream merged_proof_file_; + GlobalId next_global_id_; + + absl::flat_hash_map, GlobalId> shared_clause_ids_; + std::vector> local_to_global_ids_; + std::vector last_read_steps_; + + std::vector tmp_clause_; + std::vector tmp_unit_ids_; + std::vector tmp_rat_ids_; +}; + // Handles the LRAT proof of a SAT problem by either checking it incrementally // and/or by saving it to a file. class LratProofHandler { public: - // TODO(user): pass the [presolved] model proto to the handler, so that - // it can map internal problem clause IDs to constraint indices in the - // original model. This will be needed to write the LRAT proof in a file that - // can be checked with an external LRAT checker, expecting the standard LRAT - // ASCII file format (which requires problem clauses IDs between 1 and n). static std::unique_ptr MaybeCreate(Model* model); bool lrat_check_enabled() const { return lrat_checker_ != nullptr; } + bool lrat_output_enabled() const { return lrat_writer_ != nullptr; } bool drat_check_enabled() const { return drat_checker_ != nullptr; } bool drat_output_enabled() const { return drat_writer_ != nullptr; } + int64_t num_assumed_clauses() const { return num_assumed_clauses_; } + // Adds a clause of the problem. See LratChecker for more details. bool AddProblemClause(ClauseId id, absl::Span clause); @@ -59,7 +152,7 @@ class LratProofHandler { // Adds a clause which was inferred by another worker. Returns true if // successful (the operation can fail if LRAT checks are enabled, and the ID // is already used by another clause). - bool AddSharedClause(ClauseId id, absl::Span clause); + bool AddImportedClause(ClauseId id, absl::Span clause); // Adds a clause which is assumed to be true, without proof. Returns true if // successful (the operation fails if DRAT checks are enabled, or if LRAT @@ -89,16 +182,17 @@ class LratProofHandler { // with DRAT checks), or if neither LRAT nor DRAT checks were enabled. DratChecker::Status Check(); - void AddStats() const; - - int64_t num_assumed_clauses() const { return num_assumed_clauses_; } + void Close(bool model_is_unsat); private: explicit LratProofHandler(Model* model); - bool CheckResult(bool result) const; + bool LratError() const; + const int id_; + SharedLratProofStatus* proof_status_; std::unique_ptr lrat_checker_; + std::unique_ptr lrat_writer_; std::unique_ptr drat_checker_; std::unique_ptr drat_writer_; double max_drat_time_in_seconds_ = std::numeric_limits::infinity(); diff --git a/ortools/sat/pb_constraint_test.cc b/ortools/sat/pb_constraint_test.cc index b8415dbbdaf..8aa9cc63f57 100644 --- a/ortools/sat/pb_constraint_test.cc +++ b/ortools/sat/pb_constraint_test.cc @@ -290,10 +290,8 @@ TEST(UpperBoundedLinearConstraintTest, Conflict) { EXPECT_EQ(threshold, 0); // Two assignment from other part of the solver. - trail.SetDecisionLevel(1); - trail.Enqueue(Literal(+1), AssignmentType::kSearchDecision); - trail.SetDecisionLevel(2); - trail.Enqueue(Literal(+2), AssignmentType::kSearchDecision); + trail.EnqueueSearchDecision(Literal(+1)); + trail.EnqueueSearchDecision(Literal(+2)); // We propagate only +1. threshold -= 1; @@ -318,12 +316,9 @@ TEST(UpperBoundedLinearConstraintTest, CompactReason) { EXPECT_EQ(threshold, 3); // Two assignment from other part of the solver. - trail.SetDecisionLevel(1); - trail.Enqueue(Literal(+1), AssignmentType::kSearchDecision); - trail.SetDecisionLevel(2); - trail.Enqueue(Literal(+2), AssignmentType::kSearchDecision); - trail.SetDecisionLevel(3); - trail.Enqueue(Literal(+3), AssignmentType::kSearchDecision); + trail.EnqueueSearchDecision(Literal(+1)); + trail.EnqueueSearchDecision(Literal(+2)); + trail.EnqueueSearchDecision(Literal(+3)); // We propagate when +3 is processed. threshold = -3; @@ -357,16 +352,11 @@ TEST(UpperBoundedLinearConstraintTest, ConflictAfterEnforcementStatusChange) { EXPECT_EQ(threshold, 3); // Some assignments from other parts of the solver. - trail.SetDecisionLevel(1); - trail.Enqueue(Literal(+1), AssignmentType::kSearchDecision); - trail.SetDecisionLevel(2); - trail.Enqueue(Literal(+2), AssignmentType::kSearchDecision); - trail.SetDecisionLevel(3); - trail.Enqueue(Literal(+3), AssignmentType::kSearchDecision); - trail.SetDecisionLevel(4); - trail.Enqueue(Literal(+4), AssignmentType::kSearchDecision); - trail.SetDecisionLevel(5); - trail.Enqueue(Literal(+9), AssignmentType::kSearchDecision); + trail.EnqueueSearchDecision(Literal(+1)); + trail.EnqueueSearchDecision(Literal(+2)); + trail.EnqueueSearchDecision(Literal(+3)); + trail.EnqueueSearchDecision(Literal(+4)); + trail.EnqueueSearchDecision(Literal(+9)); // We detect a conflict when +9 is processed. threshold = -7; @@ -395,16 +385,11 @@ TEST(UpperBoundedLinearConstraintTest, PropagateEnforcementAfterStatusChange) { EXPECT_EQ(threshold, 3); // Some assignments from other parts of the solver. - trail.SetDecisionLevel(1); - trail.Enqueue(Literal(+1), AssignmentType::kSearchDecision); - trail.SetDecisionLevel(2); - trail.Enqueue(Literal(+2), AssignmentType::kSearchDecision); - trail.SetDecisionLevel(3); - trail.Enqueue(Literal(+3), AssignmentType::kSearchDecision); - trail.SetDecisionLevel(4); - trail.Enqueue(Literal(+4), AssignmentType::kSearchDecision); - trail.SetDecisionLevel(5); - trail.Enqueue(Literal(+9), AssignmentType::kSearchDecision); + trail.EnqueueSearchDecision(Literal(+1)); + trail.EnqueueSearchDecision(Literal(+2)); + trail.EnqueueSearchDecision(Literal(+3)); + trail.EnqueueSearchDecision(Literal(+4)); + trail.EnqueueSearchDecision(Literal(+9)); // We should propagate -8 when +9 is processed. threshold = -7; @@ -442,10 +427,8 @@ TEST(UpperBoundedLinearConstraintTest, EXPECT_EQ(threshold, 0); // Some assignments from other parts of the solver. - trail.SetDecisionLevel(1); - trail.Enqueue(Literal(+1), AssignmentType::kSearchDecision); - trail.SetDecisionLevel(2); - trail.Enqueue(Literal(+2), AssignmentType::kSearchDecision); + trail.EnqueueSearchDecision(Literal(+1)); + trail.EnqueueSearchDecision(Literal(+2)); // We should propagate -9 when +2 is processed. const int source_trail_index = trail.Info(Literal(+1).Variable()).trail_index; @@ -497,8 +480,7 @@ TEST(PbConstraintsTest, BasicPropagation) { Trail& trail = *(model.GetOrCreate()); trail.Resize(10); - trail.SetDecisionLevel(1); - trail.Enqueue(Literal(-1), AssignmentType::kSearchDecision); + trail.EnqueueSearchDecision(Literal(-1)); csts.Resize(10); csts.AddConstraint(MakePb({{-1, 1}, {+2, 1}}), Coefficient(1), &trail); @@ -519,7 +501,7 @@ TEST(PbConstraintsTest, BasicPropagation) { // Untrail, and repropagate everything. csts.Untrail(trail, 0); trail.Untrail(0); - trail.Enqueue(Literal(-1), AssignmentType::kSearchDecision); + trail.EnqueueSearchDecision(Literal(-1)); while (!csts.PropagationIsDone(trail)) EXPECT_TRUE(csts.Propagate(&trail)); EXPECT_THAT(TrailToVector(trail), LiteralsAre(-1, -2, -3, -4)); } @@ -532,7 +514,6 @@ TEST(PbConstraintsTest, BasicDeletion) { PbConstraintsEnqueueHelper helper; helper.reasons.resize(10); trail.Resize(10); - trail.SetDecisionLevel(0); csts.Resize(10); csts.AddConstraint(MakePb({{-1, 1}, {+2, 1}}), Coefficient(1), &trail); csts.AddConstraint(MakePb({{-1, 7}, {-2, 7}, {+3, 7}}), Coefficient(20), @@ -584,8 +565,7 @@ TEST(PbConstraintsTest, UnsatAtConstruction) { Trail& trail = *(model.GetOrCreate()); trail.Resize(10); - trail.SetDecisionLevel(1); - trail.Enqueue(Literal(+1), AssignmentType::kUnitReason); + trail.EnqueueSearchDecision(Literal(+1)); trail.Enqueue(Literal(+2), AssignmentType::kUnitReason); trail.Enqueue(Literal(+3), AssignmentType::kUnitReason); @@ -610,7 +590,6 @@ TEST(PbConstraintsTest, AddConstraintWithLevel0Propagation) { Trail& trail = *(model.GetOrCreate()); trail.Resize(10); - trail.SetDecisionLevel(0); csts.Resize(10); EXPECT_TRUE(csts.AddConstraint(MakePb({{+1, 1}, {+2, 3}, {+3, 7}}), @@ -637,7 +616,7 @@ TEST(PbConstraintsDeathTest, AddConstraintWithLevel0PropagationInSearch) { Trail& trail = *(model.GetOrCreate()); trail.Resize(10); - trail.SetDecisionLevel(10); + trail.EnqueueSearchDecision(Literal(+10)); csts.Resize(10); // If the decision level is not 0, this will fail. @@ -652,11 +631,10 @@ TEST(PbConstraintsDeathTest, AddConstraintPrecondition) { Trail& trail = *(model.GetOrCreate()); trail.Resize(10); - trail.SetDecisionLevel(1); - trail.Enqueue(Literal(+1), AssignmentType::kSearchDecision); + trail.EnqueueSearchDecision(Literal(+1)); trail.Enqueue(Literal(+2), AssignmentType::kUnitReason); - trail.SetDecisionLevel(2); - trail.Enqueue(Literal(+3), AssignmentType::kSearchDecision); + trail.EnqueueSearchDecision(Literal(+4)); // dummy. + trail.Enqueue(Literal(+3), AssignmentType::kUnitReason); csts.Resize(10); // We can't add this constraint since it is conflicting under the current @@ -665,8 +643,8 @@ TEST(PbConstraintsDeathTest, AddConstraintPrecondition) { Coefficient(2), &trail)); trail.Untrail(trail.Index() - 1); // Remove the +3. - EXPECT_EQ(trail.Index(), 2); - csts.Untrail(trail, 2); + EXPECT_EQ(trail.Index(), 3); + csts.Untrail(trail, 3); // Adding this one at a decision level of 2 will also fail because it will // propagate 3 from decision level 1. @@ -676,7 +654,7 @@ TEST(PbConstraintsDeathTest, AddConstraintPrecondition) { // However, adding the same constraint while the decision level is 1 is ok. // It will propagate -3 at the correct decision level. - trail.SetDecisionLevel(1); + trail.Untrail(trail.PrepareBacktrack(1)); EXPECT_TRUE(csts.AddConstraint(MakePb({{+1, 1}, {+2, 1}, {+3, 2}}), Coefficient(3), &trail)); EXPECT_EQ(trail.Index(), 3); diff --git a/ortools/sat/precedences_test.cc b/ortools/sat/precedences_test.cc index 2b7d5819f9b..7d64e243835 100644 --- a/ortools/sat/precedences_test.cc +++ b/ortools/sat/precedences_test.cc @@ -372,7 +372,7 @@ TEST(PrecedencesPropagatorTest, Cycles) { EXPECT_TRUE(propagator->Propagate(trail)); // Cycle of weight zero is fine. - trail->SetDecisionLevel(1); + trail->EnqueueSearchDecision(Literal(+7)); // dummy EXPECT_TRUE(integer_trail->Propagate(trail)); trail->Enqueue(Literal(+1), AssignmentType::kUnitReason); trail->Enqueue(Literal(+2), AssignmentType::kUnitReason); @@ -386,16 +386,15 @@ TEST(PrecedencesPropagatorTest, Cycles) { UnorderedElementsAre(Literal(-1), Literal(-3))); // Test the untrail. - trail->SetDecisionLevel(0); + trail->PrepareBacktrack(0); integer_trail->Untrail(*trail, 0); propagator->Untrail(*trail, 0); trail->Untrail(0); EXPECT_TRUE(propagator->Propagate(trail)); // Still fine here. - trail->SetDecisionLevel(1); + trail->EnqueueSearchDecision(Literal(+5)); EXPECT_TRUE(integer_trail->Propagate(trail)); - trail->Enqueue(Literal(+5), AssignmentType::kUnitReason); EXPECT_TRUE(propagator->Propagate(trail)); // But fail there with a different and longer reason. @@ -416,7 +415,8 @@ TEST(PrecedencesPropagatorTest, TrickyCycle) { IntegerTrail* integer_trail = model.GetOrCreate(); PrecedencesPropagator* propagator = model.GetOrCreate(); - trail->Resize(10); + const Literal a(model.Add(NewBooleanVariable()), true); + const Literal b(model.Add(NewBooleanVariable()), true); std::vector vars = AddVariables(integer_trail); propagator->AddPrecedenceWithVariableOffset(vars[0], vars[1], vars[2]); @@ -424,17 +424,17 @@ TEST(PrecedencesPropagatorTest, TrickyCycle) { // This will cause an infinite cycle. propagator->AddConditionalPrecedenceWithOffset(vars[3], vars[0], - IntegerValue(1), Literal(+1)); + IntegerValue(1), a); // So far so good. + trail->EnqueueSearchDecision(b); EXPECT_TRUE(propagator->Propagate(trail)); - trail->SetDecisionLevel(1); EXPECT_TRUE(integer_trail->Propagate(trail)); // Conflict. - trail->Enqueue(Literal(+1), AssignmentType::kUnitReason); + trail->Enqueue(a, AssignmentType::kUnitReason); EXPECT_FALSE(propagator->Propagate(trail)); - EXPECT_THAT(trail->FailingClause(), ElementsAre(Literal(-1))); + EXPECT_THAT(trail->FailingClause(), ElementsAre(a.Negated())); // Test that the code detected properly a positive cycle in the dependency // graph instead of just pushing the bounds until the upper bound is reached. diff --git a/ortools/sat/presolve_context.cc b/ortools/sat/presolve_context.cc index 344c892d1c2..19bf5ffcac3 100644 --- a/ortools/sat/presolve_context.cc +++ b/ortools/sat/presolve_context.cc @@ -2019,6 +2019,7 @@ bool PresolveContext::CanonicalizeOneObjectiveVariable(int var) { } bool PresolveContext::CanonicalizeObjective(bool simplify_domain) { + if (ModelIsUnsat()) return false; objective_proto_is_up_to_date_ = false; // We replace each entry by its affine representative. diff --git a/ortools/sat/presolve_util.cc b/ortools/sat/presolve_util.cc index a76d8d8e8c7..292519d00a7 100644 --- a/ortools/sat/presolve_util.cc +++ b/ortools/sat/presolve_util.cc @@ -18,7 +18,6 @@ #include #include #include -#include #include #include #include @@ -26,18 +25,14 @@ #include "absl/container/flat_hash_map.h" #include "absl/container/flat_hash_set.h" #include "absl/log/check.h" -#include "absl/meta/type_traits.h" +#include "absl/log/log.h" #include "absl/random/distributions.h" -#include "absl/strings/str_cat.h" -#include "absl/strings/str_format.h" -#include "absl/strings/str_join.h" #include "absl/types/span.h" #include "ortools/base/strong_vector.h" #include "ortools/sat/cp_model.pb.h" #include "ortools/sat/cp_model_utils.h" #include "ortools/sat/util.h" #include "ortools/util/bitset.h" -#include "ortools/util/logging.h" #include "ortools/util/saturated_arithmetic.h" #include "ortools/util/sorted_interval_list.h" #include "ortools/util/strong_integers.h" @@ -511,7 +506,7 @@ int64_t ActivityBoundHelper::ComputeMaxActivityInternal( return max_activity; } -bool ActivityBoundHelper::AppearInTriggeredAmo(int literal) { +bool ActivityBoundHelper::AppearInTriggeredAmo(int literal) const { const Index index = IndexFromLiteral(literal); if (index >= amo_indices_.size()) return false; for (const int amo : amo_indices_[index]) { @@ -582,7 +577,46 @@ bool ActivityBoundHelper::PresolveEnforcement( int ActivityBoundHelper::RemoveEnforcementThatMakesConstraintTrivial( absl::Span> boolean_terms, const Domain& other_terms, const Domain& rhs, ConstraintProto* ct) { + if (boolean_terms.empty()) return 0; tmp_set_.clear(); + triggered_amo_.clear(); + tmp_boolean_terms_in_some_amo_.clear(); + tmp_boolean_terms_in_some_amo_.reserve(boolean_terms.size()); + int num_enforcement_to_check = 0; + for (const int enf_lit : ct->enforcement_literal()) { + const Index negated_index = IndexFromLiteral(NegatedRef(enf_lit)); + if (negated_index >= amo_indices_.size()) continue; + if (amo_indices_[negated_index].empty()) continue; + triggered_amo_.insert(amo_indices_[negated_index].begin(), + amo_indices_[negated_index].end()); + ++num_enforcement_to_check; + } + int non_amo_min_activity = 0; + int non_amo_max_activity = 0; + auto log_work = [&]() { + VLOG(1) << "RemoveEnforcementThatMakesConstraintTrivial: " + "aborting because too expensive: " + << num_enforcement_to_check << " " << boolean_terms.size(); + return 0; + }; + static const int kMaxWork = 1e9; + int work = 0; + for (int i = 0; i < boolean_terms.size(); ++i) { + const int ref = boolean_terms[i].first; + const int64_t coeff = boolean_terms[i].second; + if (AppearInTriggeredAmo(ref) || AppearInTriggeredAmo(NegatedRef(ref))) { + tmp_boolean_terms_in_some_amo_.push_back(i); + } else { + if (coeff > 0) { + non_amo_max_activity += coeff; + } else { + non_amo_min_activity += coeff; + } + } + work += NumAmoForVariable(ref); + if (work > kMaxWork) return log_work(); + } + for (const int enf_lit : ct->enforcement_literal()) { const Index negated_index = IndexFromLiteral(NegatedRef(enf_lit)); if (negated_index >= amo_indices_.size()) continue; @@ -593,15 +627,19 @@ int ActivityBoundHelper::RemoveEnforcementThatMakesConstraintTrivial( amo_indices_[negated_index].end()); // Compute min_max activity when enf_lit is false. - int64_t min_activity = 0; - int64_t max_activity = 0; - for (const auto [ref, coeff] : boolean_terms) { + int64_t min_activity = non_amo_min_activity; + int64_t max_activity = non_amo_max_activity; + for (const int i : tmp_boolean_terms_in_some_amo_) { + const int ref = boolean_terms[i].first; + const int64_t coeff = boolean_terms[i].second; // This is not supposed to happen after PresolveEnforcement(), so we // just abort in this case. if (ref == enf_lit || ref == NegatedRef(enf_lit)) break; const bool is_true = AppearInTriggeredAmo(NegatedRef(ref)); const bool is_false = AppearInTriggeredAmo(ref); + work += NumAmoForVariable(ref); + if (work > kMaxWork) return log_work(); // Similarly, this is not supposed to happen after PresolveEnforcement(). if (is_true && is_false) break; diff --git a/ortools/sat/presolve_util.h b/ortools/sat/presolve_util.h index 937b986e59e..abcf1157d31 100644 --- a/ortools/sat/presolve_util.h +++ b/ortools/sat/presolve_util.h @@ -16,7 +16,6 @@ #include #include -#include #include #include @@ -25,15 +24,12 @@ #include "absl/random/bit_gen_ref.h" #include "absl/types/span.h" #include "ortools/base/strong_vector.h" -#include "ortools/base/timer.h" #include "ortools/sat/cp_model.pb.h" #include "ortools/sat/cp_model_utils.h" #include "ortools/sat/util.h" #include "ortools/util/bitset.h" -#include "ortools/util/logging.h" #include "ortools/util/sorted_interval_list.h" #include "ortools/util/strong_integers.h" -#include "ortools/util/time_limit.h" namespace operations_research { namespace sat { @@ -229,7 +225,7 @@ class ActivityBoundHelper { absl::flat_hash_set* literals_at_true); // For each enforcement literal enf, if not(enf) implies that the constraint - // is trivial, then we can just remove not(enf) from the list. + // is trivial, then we can just remove enf from the list. // // Actually, we could even "lift" such enforcement so that if it is negative // the constraint is still trivial but tighter. @@ -259,7 +255,7 @@ class ActivityBoundHelper { return Index(ref >= 0 ? 2 * ref : -2 * ref - 1); } - bool AppearInTriggeredAmo(int literal); + bool AppearInTriggeredAmo(int literal) const; int64_t ComputeActivity( bool compute_min, absl::Span> terms, @@ -298,6 +294,7 @@ class ActivityBoundHelper { absl::flat_hash_set triggered_amo_; absl::flat_hash_set tmp_set_; + std::vector tmp_boolean_terms_in_some_amo_; }; // Class to help detects clauses that differ on a single literal. diff --git a/ortools/sat/probing.cc b/ortools/sat/probing.cc index 49cecc4f94e..2b3d6d86f2e 100644 --- a/ortools/sat/probing.cc +++ b/ortools/sat/probing.cc @@ -141,7 +141,7 @@ bool Prober::ProbeOneVariableInternal(BooleanVariable b) { // MarkDescendant() + parent inspection for this. if (lrat_proof_handler_ != nullptr) { tmp_clause_ids_.clear(); - sat_solver_->AppendClausesFixing( + clause_manager_->AppendClauseIdsFixing( {l}, &tmp_clause_ids_, decision.Index(), &tmp_binary_clause_ids_); const ClauseId clause_id = clause_id_generator_->GetNextId(); lrat_proof_handler_->AddInferredClause( @@ -592,7 +592,8 @@ bool Prober::FixProbedDnfLiterals( first_false_literal = lit.Index(); first_false_literal_clause_id = clause_id_generator_->GetNextId(); tmp_clause_ids_.clear(); - sat_solver_->AppendClausesFixing({lit.Negated()}, &tmp_clause_ids_); + clause_manager_->AppendClauseIdsFixing({lit.Negated()}, + &tmp_clause_ids_); lrat_proof_handler_->AddInferredClause(first_false_literal_clause_id, tmp_literals_, tmp_clause_ids_); break; @@ -606,7 +607,8 @@ bool Prober::FixProbedDnfLiterals( first_false_literal = lit.Index(); first_false_literal_clause_id = clause_id_generator_->GetNextId(); tmp_clause_ids_.clear(); - sat_solver_->AppendClausesFixing(conflict_clause, &tmp_clause_ids_); + clause_manager_->AppendClauseIdsFixing(conflict_clause, + &tmp_clause_ids_); tmp_clause_ids_.push_back(conflict_id); lrat_proof_handler_->AddInferredClause(first_false_literal_clause_id, tmp_literals_, tmp_clause_ids_); @@ -640,7 +642,8 @@ bool Prober::FixProbedDnfLiterals( // TODO(user): processing the propagated literals in trail order // and reusing the previous proofs to compute new ones // could reduce the algorithmic complexity here. - sat_solver_->AppendClausesFixing({propagated_lit}, &tmp_clause_ids_); + clause_manager_->AppendClauseIdsFixing({propagated_lit}, + &tmp_clause_ids_); } // Add the inferred clause to the LratProofHandler. const ClauseId clause_id = clause_id_generator_->GetNextId(); @@ -1019,8 +1022,7 @@ bool FailedLiteralProbing::DoOneRound(ProbingOptions options) { // literals on the trail so that they do not need to be probed later. const int new_level = sat_solver_->CurrentDecisionLevel(); if (new_level == 0) continue; - const Literal last_decision = - sat_solver_->Decisions()[new_level - 1].literal; + const Literal last_decision = trail_.Decisions()[new_level - 1].literal; for (int i = first_new_trail_index; i < trail_.Index(); ++i) { const Literal l = trail_[i]; if (l == last_decision) continue; @@ -1114,7 +1116,7 @@ bool FailedLiteralProbing::ComputeNextDecisionInOrder( // nice for binary extraction), we could try to maximize reusability in // some way. const Literal last_decision = - sat_solver_->Decisions()[sat_solver_->CurrentDecisionLevel() - 1].literal; + trail_.Decisions()[sat_solver_->CurrentDecisionLevel() - 1].literal; // If l => last_decision, then not(last_decision) => not(l). We can thus // find the candidates for the next decision by looking at all the // implications of not(last_decision). @@ -1155,7 +1157,7 @@ bool FailedLiteralProbing::ComputeNextDecisionInOrder( bool FailedLiteralProbing::GetNextDecisionInNoParticularOrder( LiteralIndex& next_decision) { const int level = sat_solver_->CurrentDecisionLevel(); - const Literal last_decision = sat_solver_->Decisions()[level - 1].literal; + const Literal last_decision = trail_.Decisions()[level - 1].literal; const absl::Span list = implication_graph_->Implications(last_decision.Negated()); @@ -1213,8 +1215,8 @@ bool FailedLiteralProbing::EnqueueDecisionAndBackjumpOnConflict( absl::Span conflict_clause) { if (fixed_decision_unit_id != kNoClauseId) return; tmp_clause_ids_.clear(); - sat_solver_->AppendClausesFixing(conflict_clause, &tmp_clause_ids_, - next_decision); + clause_manager_->AppendClauseIdsFixing(conflict_clause, &tmp_clause_ids_, + next_decision); tmp_clause_ids_.push_back(conflict_id); fixed_decision_unit_id = clause_id_generator_->GetNextId(); lrat_proof_handler_->AddInferredClause(fixed_decision_unit_id, @@ -1279,7 +1281,7 @@ bool FailedLiteralProbing::EnqueueDecisionAndBackjumpOnConflict( // no reason? it will be backtracked over, but we will still lazily fix // it later. if (sat_solver_->CurrentDecisionLevel() != 0 || - assignment_.LiteralIsFalse(Literal(next_decision))) { + !assignment_.LiteralIsFalse(Literal(next_decision))) { to_fix_.push_back(Literal(next_decision).Negated()); if (lrat_proof_handler_ != nullptr) { to_fix_unit_id_.push_back(fixed_decision_unit_id); @@ -1323,7 +1325,8 @@ void FailedLiteralProbing::MaybeExtractImplication(const Literal last_decision, if (lrat_proof_handler_ != nullptr) { clause_id = clause_id_generator_->GetNextId(); tmp_clause_ids_.clear(); - sat_solver_->AppendClausesFixing({l}, &tmp_clause_ids_, last_decision); + clause_manager_->AppendClauseIdsFixing({l}, &tmp_clause_ids_, + last_decision); lrat_proof_handler_->AddInferredClause( clause_id, {last_decision.Negated(), l}, tmp_clause_ids_); } @@ -1420,8 +1423,8 @@ void FailedLiteralProbing::AddFailedLiteralToFix(const Literal literal) { if (lrat_proof_handler_ == nullptr) return; tmp_clause_ids_.clear(); - sat_solver_->AppendClausesFixing({literal.Negated()}, &tmp_clause_ids_, - literal); + clause_manager_->AppendClauseIdsFixing({literal.Negated()}, &tmp_clause_ids_, + literal); const ClauseId unit_id = clause_id_generator_->GetNextId(); lrat_proof_handler_->AddInferredClause(unit_id, {literal.Negated()}, tmp_clause_ids_); diff --git a/ortools/sat/recordio.cc b/ortools/sat/recordio.cc new file mode 100644 index 00000000000..8b0c79c48f0 --- /dev/null +++ b/ortools/sat/recordio.cc @@ -0,0 +1,50 @@ +// Copyright 2010-2025 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "ortools/sat/recordio.h" + +#include +#include +#include + +#include "google/protobuf/message_lite.h" + +namespace operations_research { +namespace sat { + +RecordReader::RecordReader(std::istream* istream) + : istream_(istream), coded_istream_(&istream_) {} + +bool RecordReader::ReadRecord(google::protobuf::MessageLite* record) { + uint32_t size; + if (!coded_istream_.ReadVarint32(&size)) return false; + auto limit = coded_istream_.PushLimit(size); + if (!record->ParseFromCodedStream(&coded_istream_)) return false; + coded_istream_.PopLimit(limit); + return true; +} + +void RecordReader::Close() {} + +RecordWriter::RecordWriter(std::ostream* ostream) + : ostream_(ostream), coded_ostream_(&ostream_) {} + +bool RecordWriter::WriteRecord(const google::protobuf::MessageLite& record) { + coded_ostream_.WriteVarint32(record.ByteSizeLong()); + return record.SerializeToCodedStream(&coded_ostream_); +} + +void RecordWriter::Close() { coded_ostream_.Trim(); } + +} // namespace sat +} // namespace operations_research diff --git a/ortools/sat/recordio.h b/ortools/sat/recordio.h new file mode 100644 index 00000000000..f3b7e6e197d --- /dev/null +++ b/ortools/sat/recordio.h @@ -0,0 +1,63 @@ +// Copyright 2010-2025 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef ORTOOLS_SAT_RECORDIO_H_ +#define ORTOOLS_SAT_RECORDIO_H_ + +#include +#include + +#include "google/protobuf/io/coded_stream.h" +#include "google/protobuf/io/zero_copy_stream_impl.h" +#include "google/protobuf/message_lite.h" + +namespace operations_research { +namespace sat { + +// Reads a sequence of serialized protos from a stream, written by a +// RecordWriter. +class RecordReader { + public: + explicit RecordReader(std::istream* istream); + // This class is neither copyable nor movable. + RecordReader(const RecordReader&) = delete; + RecordReader& operator=(const RecordReader&) = delete; + + bool ReadRecord(google::protobuf::MessageLite* record); + void Close(); + + private: + google::protobuf::io::IstreamInputStream istream_; + google::protobuf::io::CodedInputStream coded_istream_; +}; + +// Writes a sequence of serialized protos to a stream. +class RecordWriter { + public: + explicit RecordWriter(std::ostream* ostream); + // This class is neither copyable nor movable. + RecordWriter(const RecordWriter&) = delete; + RecordWriter& operator=(const RecordWriter&) = delete; + + bool WriteRecord(const google::protobuf::MessageLite& record); + void Close(); + + private: + google::protobuf::io::OstreamOutputStream ostream_; + google::protobuf::io::CodedOutputStream coded_ostream_; +}; + +} // namespace sat +} // namespace operations_research + +#endif // ORTOOLS_SAT_RECORDIO_H_ diff --git a/ortools/sat/recordio_test.cc b/ortools/sat/recordio_test.cc new file mode 100644 index 00000000000..9da1461da78 --- /dev/null +++ b/ortools/sat/recordio_test.cc @@ -0,0 +1,66 @@ +// Copyright 2010-2025 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "ortools/sat/recordio.h" + +#include +#include +#include +#include + +#include "gtest/gtest.h" +#include "ortools/base/gmock.h" +#include "ortools/base/path.h" +#include "ortools/sat/cp_model.pb.h" + +namespace operations_research { +namespace sat { +namespace { + +using ::testing::EqualsProto; + +TEST(RecordIoTest, WriteAndReadBack) { + std::string filename = + file::JoinPath(testing::TempDir(), "recordio_test.bin"); + + std::vector cp_models; + std::ofstream output_stream(filename, std::ios::binary); + RecordWriter writer(&output_stream); + for (int i = 0; i < 128; ++i) { + CpModelProto cp_model; + for (int j = 0; j < i % 11; ++j) { + cp_model.add_variables()->add_domain(j); + } + for (int j = 0; j < i % 17; ++j) { + cp_model.add_constraints()->add_enforcement_literal(j); + } + cp_models.push_back(cp_model); + EXPECT_TRUE(writer.WriteRecord(cp_model)); + } + writer.Close(); + output_stream.close(); + + std::ifstream input_stream(filename, std::ios::binary); + RecordReader reader(&input_stream); + CpModelProto cp_model; + int index = 0; + while (reader.ReadRecord(&cp_model)) { + EXPECT_THAT(cp_model, EqualsProto(cp_models[index++])); + } + reader.Close(); + input_stream.close(); +} + +} // namespace +} // namespace sat +} // namespace operations_research diff --git a/ortools/sat/sat_base.h b/ortools/sat/sat_base.h index 423005dd3bd..1ea2bed4d3a 100644 --- a/ortools/sat/sat_base.h +++ b/ortools/sat/sat_base.h @@ -293,6 +293,14 @@ struct AssignmentType { static constexpr int kFirstFreePropagationId = 4; }; +// A Boolean "decision" taken by the solver. +struct LiteralWithTrailIndex { + LiteralWithTrailIndex() = default; + LiteralWithTrailIndex(Literal l, int i) : literal(l), trail_index(i) {} + Literal literal; + int trail_index = 0; +}; + // The solver trail stores the assignment made by the solver in order. // This class is responsible for maintaining the assignment of each variable // and the information of each assignment. @@ -389,11 +397,37 @@ class Trail { &assignment_, &unit_clause_id_); } - // Specific Enqueue() version for the search decision. + // Specific Enqueue() for search decisions. void EnqueueSearchDecision(Literal true_literal) { + decisions_[current_decision_level_] = + LiteralWithTrailIndex(true_literal, Index()); + current_info_.level = ++current_decision_level_; Enqueue(true_literal, AssignmentType::kSearchDecision); } + // Specific Enqueue() for assumptions. + void EnqueueAssumption(Literal assumptions) { + if (current_decision_level_ == 0) { + // Special decision. This should never be accessed. + decisions_[0] = LiteralWithTrailIndex(Literal(), Index()); + current_info_.level = ++current_decision_level_; + } + CHECK_EQ(current_decision_level_, 1); + Enqueue(assumptions, AssignmentType::kSearchDecision); + } + + void OverrideDecision(int level, Literal literal) { + decisions_[level].literal = literal; + } + + // Allows to recover the list of decisions. + // Note that the Decisions() vector is always of size NumVariables(), and that + // only the first CurrentDecisionLevel() entries have a meaning. The decision + // made at level l is Decisions()[l - 1] (there are no decisions at level 0). + const std::vector& Decisions() const { + return decisions_; + } + // Specific Enqueue() version for a fixed variable. void EnqueueWithUnitReason(Literal true_literal) { EnqueueAtLevel(true_literal, AssignmentType::kUnitReason, 0); @@ -515,6 +549,17 @@ class Trail { old_type_[var] = propagator_id; } + // On bactrack we should always do: + // + // const int target_trail_index = PrepareBacktrack(level); + // ... + // Untrail(target_trail_index); + int PrepareBacktrack(int level) { + current_decision_level_ = level; + current_info_.level = level; + return decisions_[level].trail_index; + } + // Reverts the trail and underlying assignment to the given target trail // index. Note that we do not touch the assignment info. void Untrail(int target_trail_index) { @@ -529,8 +574,6 @@ class Trail { } } - // Changes the decision level used by the next Enqueue(). - void SetDecisionLevel(int level) { current_info_.level = level; } int CurrentDecisionLevel() const { return current_info_.level; } // Generic interface to set the current failing clause. @@ -678,6 +721,13 @@ class Trail { nullptr; int last_num_reimplication_ = 0; + + // The stack of decisions taken by the solver. They are stored in [0, + // current_decision_level_). The vector is of size num_variables_ so it can + // store all the decisions. This is done this way because in some situation we + // need to remember the previously taken decisions after a backtrack. + int current_decision_level_ = 0; + std::vector decisions_; }; // Base class for all the SAT constraints. @@ -809,6 +859,11 @@ inline void Trail::Resize(int num_variables) { // dynamically. old_type_.resize(num_variables); reference_var_with_same_reason_as_.resize(num_variables); + + // The +1 is a bit tricky, it is because in + // EnqueueDecisionAndBacktrackOnConflict() we artificially enqueue the + // decision before checking if it is not already assigned. + decisions_.resize(num_variables + 1); } inline void Trail::RegisterPropagator(SatPropagator* propagator) { diff --git a/ortools/sat/sat_parameters.proto b/ortools/sat/sat_parameters.proto index 90871777e35..587d51a164c 100644 --- a/ortools/sat/sat_parameters.proto +++ b/ortools/sat/sat_parameters.proto @@ -24,7 +24,7 @@ option java_multiple_files = true; // Contains the definitions for all the sat algorithm parameters and their // default values. // -// NEXT TAG: 351 +// NEXT TAG: 354 message SatParameters { // In some context, like in a portfolio of search, it makes sense to name a // given parameters set for logging purpose. @@ -138,6 +138,16 @@ message SatParameters { // from the problem. optional bool subsumption_during_conflict_analysis = 56 [default = true]; + // It is possible that "intermediate" clauses during conflict resolution + // subsumes some of the clauses that propagated. This is quite cheap to detect + // and result in more subsumption/strengthening of clauses. + optional bool extra_subsumption_during_conflict_analysis = 351 + [default = true]; + + // Try even more subsumption options during conflict analysis. + optional bool decision_subsumption_during_conflict_analysis = 353 + [default = true]; + // If >=0, each time we have a conflict, we try to subsume the last n learned // clause with it. optional int32 eagerly_subsume_last_n_conflicts = 343 [default = 4]; @@ -785,15 +795,32 @@ message SatParameters { // ========================================================================== // If true, inferred clauses are checked with an LRAT checker as they are - // learned. As of November 2025, this only works with pure SAT problems, with + // learned, in presolve (reduced to trivial simplifications if + // cp_model_presolve is false), and in each worker. As of November 2025, this + // only works with pure SAT problems, with // - cp_model_presolve = false, // - linearization_level <= 1, // - symmetry_level <= 1, // - shared_tree_num_workers = 0. optional bool check_lrat_proof = 344 [default = false]; - // TODO(user): add and implement an output_lrat_proof field. - reserved 345; + // If true, and if output_lrat_proof is true and the problem is UNSAT, check + // that the merged proof file is valid, i.e., that clause sharing between + // workers is correct. This checks each inferred clause, so you might want to + // disable check_lrat_proof to avoid redundant work. As of November 2025, this + // only works for pure SAT problems, with num_workers = 1. + optional bool check_merged_lrat_proof = 352 [default = false]; + + // If true, an LRAT proof that all the clauses inferred by the solver are + // valid is output to several files (one for presolve -- reduced to trivial + // simplifications if cp_model_presolve is false, one per worker, and one for + // the merged proof). As of November 2025, this only works for pure SAT + // problems, with + // - cp_model_presolve = false, + // - linearization_level <= 1, + // - symmetry_level <= 1, + // - shared_tree_num_workers = 0. + optional bool output_lrat_proof = 345 [default = false]; // If true, and if the problem is UNSAT, a DRAT proof of this UNSAT property // is checked after the solver has finished. As of November 2025, this only diff --git a/ortools/sat/sat_solver.cc b/ortools/sat/sat_solver.cc index 08e2d8f2e56..78f4ac9e1b8 100644 --- a/ortools/sat/sat_solver.cc +++ b/ortools/sat/sat_solver.cc @@ -99,11 +99,6 @@ void SatSolver::SetNumVariables(int num_variables) { decision_policy_->IncreaseNumVariables(num_variables); pb_constraints_->Resize(num_variables); same_reason_identifier_.Resize(num_variables); - - // The +1 is a bit tricky, it is because in - // EnqueueDecisionAndBacktrackOnConflict() we artificially enqueue the - // decision before checking if it is not already assigned. - decisions_.resize(num_variables + 1); } int64_t SatSolver::num_branches() const { return counters_.num_branches; } @@ -226,11 +221,29 @@ bool SatSolver::AddProblemClause(absl::Span literals, // Filter already assigned literals. Note that we also remap literal in case // we discovered equivalence later in the search. tmp_literals_.clear(); - for (Literal l : literals) { - l = binary_implication_graph_->RepresentativeOf(l); - if (trail_->Assignment().LiteralIsTrue(l)) return true; - if (trail_->Assignment().LiteralIsFalse(l)) continue; - tmp_literals_.push_back(l); + if (lrat_proof_handler_ != nullptr) { + tmp_clause_ids_.clear(); + for (Literal l : literals) { + const Literal rep = binary_implication_graph_->RepresentativeOf(l); + if (trail_->Assignment().LiteralIsTrue(rep)) return true; + if (trail_->Assignment().LiteralIsFalse(rep)) { + tmp_clause_ids_.push_back(trail_->GetUnitClauseId(rep.Variable())); + } + if (rep != l) { + tmp_clause_ids_.push_back( + binary_implication_graph_->GetClauseId(l.Negated(), rep)); + } + if (!trail_->Assignment().LiteralIsFalse(rep)) { + tmp_literals_.push_back(rep); + } + } + } else { + for (Literal l : literals) { + l = binary_implication_graph_->RepresentativeOf(l); + if (trail_->Assignment().LiteralIsTrue(l)) return true; + if (trail_->Assignment().LiteralIsFalse(l)) continue; + tmp_literals_.push_back(l); + } } // A clause with l and not(l) is trivially true. @@ -240,27 +253,38 @@ bool SatSolver::AddProblemClause(absl::Span literals, return true; } } + ClauseId id = kNoClauseId; + if (lrat_proof_handler_ != nullptr) { + // Add the original problem clause. + id = clause_id_generator_->GetNextId(); + if (shared) { + lrat_proof_handler_->AddImportedClause(id, literals); + } else { + lrat_proof_handler_->AddProblemClause(id, literals); + } + // If the filtered clause is different, add it (with proof), and delete the + // original one. + if (!tmp_clause_ids_.empty()) { + tmp_clause_ids_.push_back(id); + ClauseId new_id = clause_id_generator_->GetNextId(); + lrat_proof_handler_->AddInferredClause(new_id, tmp_literals_, + tmp_clause_ids_); + lrat_proof_handler_->DeleteClause(id, literals); + id = new_id; + } + } - return AddProblemClauseInternal(tmp_literals_, shared); + return AddProblemClauseInternal(id, tmp_literals_); } -bool SatSolver::AddProblemClauseInternal(absl::Span literals, - bool shared) { +bool SatSolver::AddProblemClauseInternal(ClauseId id, + absl::Span literals) { SCOPED_TIME_STAT(&stats_); if (DEBUG_MODE && CurrentDecisionLevel() == 0) { for (const Literal l : literals) { CHECK(!trail_->Assignment().LiteralIsAssigned(l)); } } - ClauseId id = kNoClauseId; - if (lrat_proof_handler_ != nullptr) { - id = clause_id_generator_->GetNextId(); - if (shared) { - lrat_proof_handler_->AddSharedClause(id, literals); - } else { - lrat_proof_handler_->AddProblemClause(id, literals); - } - } if (literals.empty()) return SetModelUnsat(); @@ -310,7 +334,7 @@ bool SatSolver::AddLinearConstraintInternal( for (const Literal& literal : enforcement_literals) { tmp_literals_.push_back(literal.Negated()); } - return AddProblemClauseInternal(tmp_literals_); + return AddProblemClauseInternal(kNoClauseId, tmp_literals_); } } if (rhs >= max_value) return true; // Always satisfied constraint. @@ -328,7 +352,7 @@ bool SatSolver::AddLinearConstraintInternal( for (const LiteralWithCoeff& term : cst) { tmp_literals_.push_back(term.literal.Negated()); } - return AddProblemClauseInternal(tmp_literals_); + return AddProblemClauseInternal(kNoClauseId, tmp_literals_); } else { std::vector literals; for (const Literal& literal : enforcement_literals) { @@ -462,7 +486,15 @@ int SatSolver::AddLearnedClauseAndEnqueueUnitPropagation( int min_lbd_of_subsumed_clauses) { SCOPED_TIME_STAT(&stats_); + // Note that we might learn more than one conflict per "failure" actually. + // TODO(user): this should be called num_conflicts. + ++counters_.num_failures; + if (literals.size() == 1) { + // Corner case where we "learn" more than one conflict. + if (Assignment().LiteralIsTrue(literals[0])) return 1; + CHECK(!Assignment().LiteralIsFalse(literals[0])); + if (!trail_->ChronologicalBacktrackingEnabled()) { // A length 1 clause fix a literal for all the search. // ComputeBacktrackLevel() should have returned 0. @@ -474,7 +506,7 @@ int SatSolver::AddLearnedClauseAndEnqueueUnitPropagation( if (literals.size() == 2) { if (track_binary_clauses_) { - // This clause MUST be knew, otherwise something is wrong. + // This clause MUST be new, otherwise something is wrong. CHECK(binary_clauses_.Add(BinaryClause(literals[0], literals[1]))); } CHECK(binary_implication_graph_->AddBinaryClause(clause_id, literals[0], @@ -525,28 +557,6 @@ UpperBoundedLinearConstraint* SatSolver::ReasonPbConstraintOrNull( return nullptr; } -ClauseId SatSolver::ReasonClauseId(Literal literal) const { - const BooleanVariable var = literal.Variable(); - DCHECK(trail_->Assignment().VariableIsAssigned(var)); - const int assignment_type = trail_->AssignmentType(var); - const int trail_index = trail_->Info(var).trail_index; - if (assignment_type == AssignmentType::kUnitReason) { - return trail_->GetUnitClauseId(var); - } else if (assignment_type == binary_implication_graph_->PropagatorId()) { - absl::Span reason = - binary_implication_graph_->Reason(*trail_, trail_index, - /*conflict_id=*/-1); - CHECK_EQ(reason.size(), 1); - return binary_implication_graph_->GetClauseId(literal, reason[0]); - } else if (assignment_type == clauses_propagator_->PropagatorId()) { - const SatClause* reason = clauses_propagator_->ReasonClause(trail_index); - if (reason != nullptr) { - return clauses_propagator_->GetClauseId(reason); - } - } - return kNoClauseId; -} - void SatSolver::SaveDebugAssignment() { debug_assignment_.Resize(num_variables_.value()); for (BooleanVariable i(0); i < num_variables_; ++i) { @@ -617,7 +627,7 @@ int SatSolver::EnqueueDecisionAndBackjumpOnConflict( // We should never enqueue before the assumptions_. if (DEBUG_MODE && !assumptions_.empty()) { - CHECK_GE(current_decision_level_, assumption_level_); + CHECK_GE(trail_->CurrentDecisionLevel(), assumption_level_); } EnqueueNewDecision(true_literal); @@ -630,11 +640,11 @@ bool SatSolver::FinishPropagation(std::optional callback) { if (model_is_unsat_) return false; int num_loop = 0; while (true) { - const int old_decision_level = current_decision_level_; + const int old_decision_level = trail_->CurrentDecisionLevel(); if (!Propagate()) { ProcessCurrentConflict(callback); if (model_is_unsat_) return false; - if (current_decision_level_ == old_decision_level) { + if (trail_->CurrentDecisionLevel() == old_decision_level) { CHECK(!assumptions_.empty()); return false; } @@ -687,12 +697,8 @@ bool SatSolver::ReapplyAssumptionsIfNeeded() { while (CurrentDecisionLevel() == 0 && !assumptions_.empty()) { // When assumptions_ is not empty, the first "decision" actually contains // multiple ones, and we should never use its literal. - CHECK_EQ(current_decision_level_, 0); + CHECK_EQ(trail_->CurrentDecisionLevel(), 0); last_decision_or_backtrack_trail_index_ = trail_->Index(); - decisions_[0] = Decision(trail_->Index(), Literal()); - - ++current_decision_level_; - trail_->SetDecisionLevel(current_decision_level_); // We enqueue all assumptions at once at decision level 1. int num_decisions = 0; @@ -701,21 +707,14 @@ bool SatSolver::ReapplyAssumptionsIfNeeded() { if (Assignment().LiteralIsFalse(lit)) { // See GetLastIncompatibleDecisions(). *trail_->MutableConflict() = {lit.Negated(), lit}; - if (num_decisions == 0) { - // This is needed to avoid an empty level that cause some CHECK fail. - current_decision_level_ = 0; - trail_->SetDecisionLevel(0); - } return false; } ++num_decisions; - trail_->EnqueueSearchDecision(lit); + trail_->EnqueueAssumption(lit); } // Corner case: all assumptions are fixed at level zero, we ignore them. if (num_decisions == 0) { - current_decision_level_ = 0; - trail_->SetDecisionLevel(0); return ResetToLevelZero(); } @@ -739,7 +738,6 @@ void SatSolver::ProcessCurrentConflict( SCOPED_TIME_STAT(&stats_); if (model_is_unsat_) return; - ++counters_.num_failures; const int conflict_trail_index = trail_->Index(); // A conflict occurred, compute a nice reason for this failure. @@ -747,7 +745,14 @@ void SatSolver::ProcessCurrentConflict( // If the trail as a registered "higher level conflict resolution", pick // this one instead. learned_conflict_.clear(); + subsumed_clauses_.clear(); same_reason_identifier_.Clear(); + + subsuming_lrat_index_.clear(); + subsuming_clauses_.clear(); + subsuming_groups_.clear(); + subsumed_clauses_.clear(); + if (trail_->GetConflictResolutionFunction() == nullptr) { const int max_trail_index = ComputeMaxTrailIndex(trail_->FailingClause()); if (!assumptions_.empty() && !trail_->FailingClause().empty()) { @@ -766,8 +771,7 @@ void SatSolver::ProcessCurrentConflict( } ComputeFirstUIPConflict(max_trail_index, &learned_conflict_, - &reason_used_to_infer_the_conflict_, - &subsumed_clauses_); + &reason_used_to_infer_the_conflict_); } else { trail_->GetConflictResolutionFunction()(&learned_conflict_, &reason_used_to_infer_the_conflict_, @@ -796,7 +800,10 @@ void SatSolver::ProcessCurrentConflict( std::vector* clause_ids_for_1iup = &tmp_clause_ids_for_1uip_; if (lrat_proof_handler_ != nullptr) { - FillLratProofForLearnedConflict(clause_ids_for_1iup); + clause_ids_for_1iup->clear(); + AppendLratProofFromReasons(reason_used_to_infer_the_conflict_, + clause_ids_for_1iup); + AppendLratProofForFailingClause(clause_ids_for_1iup); } // An empty conflict means that the problem is UNSAT. @@ -1025,7 +1032,8 @@ void SatSolver::ProcessCurrentConflict( // Note that this should happen after the new_conflict "proof", but before // we backtrack and add the new conflict to the clause_propagator_. const auto [is_redundant, min_lbd_of_subsumed_clauses] = - SubsumptionsInConflictResolution(learned_conflict_, + SubsumptionsInConflictResolution(learned_conflict_clause_id, + learned_conflict_, reason_used_to_infer_the_conflict_); // Backtrack and add the reason to the set of learned clause. @@ -1043,6 +1051,39 @@ void SatSolver::ProcessCurrentConflict( Backtrack(backtrack_level); DCHECK(ClauseIsValidUnderDebugAssignment(learned_conflict_)); + // Tricky: in case of propagation not at the right level we might need to + // backjump further. + for (const auto& [id, is_redundant, min_lbd, clause] : delayed_to_add_) { + if (clause.empty()) return (void)SetModelUnsat(); + + int num_false = 0; + for (const Literal l : clause) { + if (Assignment().LiteralIsFalse(l)) ++num_false; + } + if (num_false == clause.size()) { + int max_level = 0; + for (const Literal l : clause) { + const int level = AssignmentLevel(l.Variable()); + max_level = std::max(max_level, level); + } + int propag_level = 0; + for (const Literal l : clause) { + const int level = AssignmentLevel(l.Variable()); + if (level < max_level) { + propag_level = std::max(propag_level, level); + } + } + Backtrack(propag_level); + } + } + + // Add any delayed clause before the final conflict. + for (const auto& [id, is_redundant, min_lbd, clause] : delayed_to_add_) { + DCHECK((lrat_proof_handler_ == nullptr) || (id != kNoClauseId)); + AddLearnedClauseAndEnqueueUnitPropagation(id, clause, is_redundant, + min_lbd); + } + // Create and attach the new learned clause. const int conflict_lbd = AddLearnedClauseAndEnqueueUnitPropagation( learned_conflict_clause_id, learned_conflict_, is_redundant, @@ -1067,42 +1108,174 @@ bool ClauseSubsumption(absl::Span a, SatClause* b) { } // namespace std::pair SatSolver::SubsumptionsInConflictResolution( - absl::Span conflict, absl::Span reason_used) { + ClauseId learned_conflict_id, absl::Span conflict, + absl::Span reason_used) { + CHECK_NE(CurrentDecisionLevel(), 0); + delayed_to_add_.clear(); + + // This is used to see if the learned conflict subsumes some clauses. // Note that conflict is not yet in the clauses_propagator_. tmp_literal_set_.Resize(Literal(num_variables_, true).Index()); for (const Literal l : conflict) tmp_literal_set_.Set(l); - - bool is_redundant = true; - int min_lbd_of_subsumed_clauses = std::numeric_limits::max(); const auto in_conflict = tmp_literal_set_.const_view(); - const auto maybe_subsume = - [&is_redundant, &min_lbd_of_subsumed_clauses, in_conflict, conflict, - this](SatClause* clause, DeletionSourceForStat source) { - if (clause == nullptr || clause->size() < conflict.size()) return; - const int limit = clause->size() - conflict.size(); - int missing = 0; - for (const Literal l : clause->AsSpan()) { - if (!in_conflict[l]) { - ++missing; - if (missing > limit) break; - } + + // This is used to see if the set of decision that implies the conflict + // (further resolution) subsumes some clauses. + // + // TODO(user): Also consider the ALL UIP conflict ? I know other solver do + // learn this version sometimes, or anything in-between. See the concept of + // conflict "shrinking" in the literature. + std::vector subsumed_by_decisions; + bool decision_is_redundant = true; + int decision_min_lbd = std::numeric_limits::max(); + int decisions_clause_size = 0; + if (assumption_level_ == 0 && + parameters_->decision_subsumption_during_conflict_analysis()) { + if (/* DISABLES CODE */ (false)) { + // This is shorter but more costly... Note that if any subsumption occur, + // this is the one we will use. + for (const Literal l : GetDecisionsFixing(conflict)) { + ++decisions_clause_size; + tmp_decision_set_.Set(l.Negated()); + } + } else { + // Add all the decision up to max_non_decision_level + the one after that + // from the conflict. + tmp_decision_set_.Resize(Literal(num_variables_, true).Index()); + int max_non_decision_level = 0; + for (const Literal l : conflict) { + const auto& info = trail_->Info(l.Variable()); + if (info.type != AssignmentType::kSearchDecision) { + max_non_decision_level = + std::max(max_non_decision_level, info.level); } + } - // This algorithm relies of never having duplicate literals in a clause. - // TODO(user): double check that this is always the case. - if (missing <= limit) { - ++counters_.num_subsumed_clauses; - DCHECK(ClauseSubsumption(conflict, clause)); - if (!clauses_propagator_->IsRemovable(clause)) { - is_redundant = false; - } else { - min_lbd_of_subsumed_clauses = - std::min(min_lbd_of_subsumed_clauses, - clauses_propagator_->LbdOrZeroIfNotRemovable(clause)); - } - clauses_propagator_->LazyDelete(clause, source); + for (int i = 0; i < max_non_decision_level; ++i) { + // To act like conflict. + const Literal l = Decisions()[i].literal.Negated(); + ++decisions_clause_size; + tmp_decision_set_.Set(l); + } + for (const Literal l : conflict) { + const auto& info = trail_->Info(l.Variable()); + if (info.type == AssignmentType::kSearchDecision && + !tmp_decision_set_[l]) { + ++decisions_clause_size; + tmp_decision_set_.Set(l); } - }; + } + } + } + + // Deal with subsuming_groups_. + // We need to infer the intermediary clause before we subsume them. + // + // TODO(user): We can use the intermediary step to shorten the conflict proof. + ClauseId last_clause_id = kNoClauseId; + int reason_index = 0; + for (int i = 0; i < subsuming_groups_.size(); ++i) { + // If the conflict subsume subsumed_clauses_[i], it will subsume all + // the other clause too, so that will be covered below, and we don't need + // to create that intermediary at all. + const int limit = subsuming_clauses_[i].size() - conflict.size(); + int missing = 0; + for (const Literal l : subsuming_clauses_[i]) { + if (!in_conflict[l]) { + ++missing; + if (missing > limit) break; + } + } + + // Intermediary conflict is sumbsumed, skip. + if (missing <= limit) continue; + + // Intermediary proof to reach this step in the conflict resolution. + ClauseId new_id = kNoClauseId; + if (lrat_proof_handler_ != nullptr) { + tmp_clause_ids_.clear(); + is_marked_.ClearAndResize(num_variables_); // Make sure not used anymore + + AppendLratProofFromReasons( + reason_used.subspan(reason_index, + subsuming_lrat_index_[i] - reason_index), + &tmp_clause_ids_); + if (last_clause_id == kNoClauseId) { + AppendLratProofForFailingClause(&tmp_clause_ids_); + } else { + tmp_clause_ids_.push_back(last_clause_id); + } + + new_id = clause_id_generator_->GetNextId(); + last_clause_id = new_id; + reason_index = subsuming_lrat_index_[i]; + lrat_proof_handler_->AddInferredClause(new_id, subsuming_clauses_[i], + tmp_clause_ids_); + } + + // Then this clause subsumes all entry in the group. + bool new_clause_is_redundant = true; + int new_clause_min_lbd = std::numeric_limits::max(); + for (SatClause* clause : subsuming_groups_[i]) { + CHECK_NE(clause->size(), 0); // Not subsumed yet. + if (clauses_propagator_->IsRemovable(clause)) { + new_clause_min_lbd = + std::min(new_clause_min_lbd, + clauses_propagator_->LbdOrZeroIfNotRemovable(clause)); + } else { + new_clause_is_redundant = false; + } + DCHECK(ClauseSubsumption(subsuming_clauses_[i], clause)); + clauses_propagator_->LazyDelete( + clause, DeletionSourceForStat::SUBSUMPTION_CONFLICT_EXTRA); + } + + // We can only add them after backtracking, since these are currently + // conflict. + delayed_to_add_.push_back( + {new_id, new_clause_is_redundant, new_clause_min_lbd, + std::vector(subsuming_clauses_[i].begin(), + subsuming_clauses_[i].end())}); + } + + bool is_redundant = true; + int min_lbd_of_subsumed_clauses = std::numeric_limits::max(); + const auto in_decision = tmp_decision_set_.const_view(); + const auto maybe_subsume = [&is_redundant, &min_lbd_of_subsumed_clauses, + in_conflict, conflict, in_decision, + decisions_clause_size, &subsumed_by_decisions, + &decision_is_redundant, &decision_min_lbd, + this](SatClause* clause, + DeletionSourceForStat source) { + if (clause == nullptr || clause->empty()) return; + + if (IsStrictlyIncluded(in_conflict, conflict.size(), clause->AsSpan())) { + ++counters_.num_subsumed_clauses; + DCHECK(ClauseSubsumption(conflict, clause)); + if (clauses_propagator_->IsRemovable(clause)) { + min_lbd_of_subsumed_clauses = + std::min(min_lbd_of_subsumed_clauses, + clauses_propagator_->LbdOrZeroIfNotRemovable(clause)); + } else { + is_redundant = false; + } + clauses_propagator_->LazyDelete(clause, source); + return; + } + + if (decisions_clause_size > 0 && + IsStrictlyIncluded(in_decision, decisions_clause_size, + clause->AsSpan())) { + if (clauses_propagator_->IsRemovable(clause)) { + decision_min_lbd = + std::min(decision_min_lbd, + clauses_propagator_->LbdOrZeroIfNotRemovable(clause)); + } else { + decision_is_redundant = false; + } + subsumed_by_decisions.push_back(clause); + } + }; // This is faster than conflict analysis, and stronger than the old assumption // mecanism we had. This is because once the conflict is minimized, we might @@ -1112,7 +1285,9 @@ std::pair SatSolver::SubsumptionsInConflictResolution( // conflict. See ComputeFirstUIPConflict(). if (parameters_->subsumption_during_conflict_analysis()) { for (const Literal l : reason_used) { - maybe_subsume(ReasonClauseOrNull(l.Variable()), + // Tricky: these clause might habe been deleted by the subsumption above. + // So ReasonClauseOrNull() must handle that case. + maybe_subsume(clauses_propagator_->ReasonClauseOrNull(l.Variable()), DeletionSourceForStat::SUBSUMPTION_CONFLICT); } } @@ -1124,30 +1299,54 @@ std::pair SatSolver::SubsumptionsInConflictResolution( } } + if (!subsumed_by_decisions.empty()) { + // This one should always be a subset of the one we tried. + std::vector decision_clause; + for (const Literal l : GetDecisionsFixing(conflict)) { + DCHECK(in_decision[l.Negated()]); + decision_clause.push_back(l.Negated()); + } + + // Construct the proof. + ClauseId new_clause_id = kNoClauseId; + if (lrat_proof_handler_ != nullptr) { + tmp_clause_ids_.clear(); + clauses_propagator_->AppendClauseIdsFixing(conflict, &tmp_clause_ids_); + tmp_clause_ids_.push_back(learned_conflict_id); + + new_clause_id = clause_id_generator_->GetNextId(); + lrat_proof_handler_->AddInferredClause(new_clause_id, decision_clause, + tmp_clause_ids_); + } + + // Remove subsumed clause. + for (SatClause* clause : subsumed_by_decisions) { + if (clause->empty()) continue; + DCHECK(ClauseSubsumption(decision_clause, clause)); + clauses_propagator_->LazyDelete( + clause, DeletionSourceForStat::SUBSUMPTION_DECISIONS); + } + + // Also learn the "decision" conflict. + delayed_to_add_.push_back({new_clause_id, decision_is_redundant, + decision_min_lbd, decision_clause}); + } + // Sparse clear. for (const Literal l : conflict) tmp_literal_set_.Clear(l); + if (decisions_clause_size > 0) { + for (int i = 0; i < CurrentDecisionLevel(); ++i) { + tmp_decision_set_.Clear(Decisions()[i].literal.Negated()); + } + } clauses_propagator_->CleanUpWatchers(); return {is_redundant, min_lbd_of_subsumed_clauses}; } -void SatSolver::FillLratProofForLearnedConflict( - std::vector* clause_ids) { - clause_ids->clear(); - // First add all the unit clauses used in the reasons to infer the conflict. - // They can be added in any order since they don't depend on each other. - for (const Literal literal : reason_used_to_infer_the_conflict_) { - DCHECK_NE(trail_->AssignmentLevel(literal), 0); - for (const Literal reason : trail_->Reason(literal.Variable())) { - const BooleanVariable reason_var = reason.Variable(); - if (!is_marked_[reason_var] && trail_->AssignmentLevel(reason) == 0) { - is_marked_.Set(reason_var); - clause_ids->push_back(trail_->GetUnitClauseId(reason_var)); - DCHECK_NE(clause_ids->back(), kNoClauseId); - } - } - } - for (const Literal literal : trail_->FailingClause()) { +void SatSolver::AppendLratProofForFixedLiterals( + absl::Span literals, std::vector* clause_ids) { + for (const Literal literal : literals) { const BooleanVariable var = literal.Variable(); if (!is_marked_[var] && trail_->AssignmentLevel(literal) == 0) { is_marked_.Set(var); @@ -1155,22 +1354,14 @@ void SatSolver::FillLratProofForLearnedConflict( DCHECK_NE(clause_ids->back(), kNoClauseId); } } - // Then add the clauses which become unit when all the unit clauses above and - // all the literals in learned_conflict_ are assumed to be false, in unit - // propagation order. - for (int i = reason_used_to_infer_the_conflict_.size() - 1; i >= 0; --i) { - const Literal literal = reason_used_to_infer_the_conflict_[i]; - ClauseId clause_id = ReasonClauseId(literal); - if (clause_id == kNoClauseId) { - clause_id = clause_id_generator_->GetNextId(); - DCHECK_NE(trail_->AssignmentLevel(literal), 0); - lrat_proof_handler_->AddAssumedClause(clause_id, - trail_->Reason(literal.Variable())); - } - clause_ids->push_back(clause_id); - } - // Finally add the failing SAT clause, which becomes empty when all the - // clause_ids above become unit. +} + +void SatSolver::AppendLratProofForFailingClause( + std::vector* clause_ids) { + // Add all the non-yet marked unit-clause. + AppendLratProofForFixedLiterals(trail_->FailingClause(), clause_ids); + + // Add the failing SAT clause. ClauseId failing_clause_id = kNoClauseId; const SatClause* failing_sat_clause = trail_->FailingSatClause(); if (failing_sat_clause != nullptr) { @@ -1190,17 +1381,44 @@ void SatSolver::FillLratProofForLearnedConflict( clause_ids->push_back(failing_clause_id); } +void SatSolver::AppendLratProofFromReasons(absl::Span reasons, + std::vector* clause_ids) { + // First add all the unit clauses used in the reasons to infer the conflict. + // They can be added in any order since they don't depend on each other. + for (const Literal literal : reasons) { + DCHECK_NE(trail_->AssignmentLevel(literal), 0); + AppendLratProofForFixedLiterals(trail_->Reason(literal.Variable()), + clause_ids); + } + + // Then add the clauses which become unit when all the unit clauses above and + // all the literals in learned_conflict_ are assumed to be false, in unit + // propagation order. + for (int i = reasons.size() - 1; i >= 0; --i) { + const Literal literal = reasons[i]; + ClauseId clause_id = clauses_propagator_->ReasonClauseId(literal); + if (clause_id == kNoClauseId) { + clause_id = clause_id_generator_->GetNextId(); + DCHECK_NE(trail_->AssignmentLevel(literal), 0); + lrat_proof_handler_->AddAssumedClause(clause_id, + trail_->Reason(literal.Variable())); + } + clause_ids->push_back(clause_id); + } +} + SatSolver::Status SatSolver::ReapplyDecisionsUpTo( int max_level, int* first_propagation_index) { SCOPED_TIME_STAT(&stats_); DCHECK(assumptions_.empty()); - int decision_index = current_decision_level_; + int decision_index = trail_->CurrentDecisionLevel(); + const auto& decisions = trail_->Decisions(); while (decision_index <= max_level) { - DCHECK_GE(decision_index, current_decision_level_); - const Literal previous_decision = decisions_[decision_index].literal; + DCHECK_GE(decision_index, trail_->CurrentDecisionLevel()); + const Literal previous_decision = decisions[decision_index].literal; ++decision_index; if (Assignment().LiteralIsTrue(previous_decision)) { - // Note that this particular position in decisions_ will be overridden, + // Note that this particular position in decisions will be overridden, // but that is fine since this is a consequence of the previous decision, // so we will never need to take it into account again. continue; @@ -1213,13 +1431,13 @@ SatSolver::Status SatSolver::ReapplyDecisionsUpTo( } // Not assigned, we try to take it. - const int old_level = current_decision_level_; + const int old_level = trail_->CurrentDecisionLevel(); const int index = EnqueueDecisionAndBackjumpOnConflict(previous_decision); if (first_propagation_index != nullptr) { *first_propagation_index = std::min(*first_propagation_index, index); } if (index == kUnsatTrailIndex) return INFEASIBLE; - if (current_decision_level_ <= old_level) { + if (trail_->CurrentDecisionLevel() <= old_level) { // A conflict occurred which backjumped to an earlier decision level. // We potentially backjumped over some valid decisions, so we need to // continue the loop and try to re-enqueue them. @@ -1231,7 +1449,7 @@ SatSolver::Status SatSolver::ReapplyDecisionsUpTo( // it is already propagated to false. There is no guarantee of this // because we learn the first-UIP conflict. If it is not the case, we will // then learn a new conflict, backjump, and continue the loop. - decision_index = current_decision_level_; + decision_index = trail_->CurrentDecisionLevel(); } } return FEASIBLE; @@ -1243,8 +1461,8 @@ SatSolver::Status SatSolver::EnqueueDecisionAndBacktrackOnConflict( CHECK(PropagationIsDone()); CHECK(assumptions_.empty()); if (model_is_unsat_) return SatSolver::INFEASIBLE; - DCHECK_LT(CurrentDecisionLevel(), decisions_.size()); - decisions_[CurrentDecisionLevel()].literal = true_literal; + DCHECK_LT(CurrentDecisionLevel(), trail_->Decisions().size()); + trail_->OverrideDecision(CurrentDecisionLevel(), true_literal); if (first_propagation_index != nullptr) { *first_propagation_index = trail_->Index(); } @@ -1277,22 +1495,16 @@ void SatSolver::Backtrack(int target_level) { // that will cause some problems. Note that we could forbid a user to call // Backtrack() with the current level, but that is annoying when you just // want to reset the solver with Backtrack(0). - DCHECK(target_level == 0 || !Decisions().empty()); - if (CurrentDecisionLevel() == target_level || Decisions().empty()) return; DCHECK_GE(target_level, 0); DCHECK_LE(target_level, CurrentDecisionLevel()); + if (CurrentDecisionLevel() == target_level) return; // Any backtrack to the root from a positive one is counted as a restart. counters_.num_backtracks++; if (target_level == 0) counters_.num_restarts++; // Per the SatPropagator interface, this is needed before calling Untrail. - trail_->SetDecisionLevel(target_level); - - current_decision_level_ = target_level; - const int target_trail_index = - decisions_[current_decision_level_].trail_index; - + const int target_trail_index = trail_->PrepareBacktrack(target_level); DCHECK_LT(target_trail_index, trail_->Index()); for (SatPropagator* propagator : propagators_) { if (propagator->IsEmpty()) continue; @@ -1380,7 +1592,7 @@ void SatSolver::KeepAllClausesUsedToInfer(BooleanVariable variable) { --num; const BooleanVariable var = (*trail_)[trail_index].Variable(); - SatClause* clause = ReasonClauseOrNull(var); + SatClause* clause = clauses_propagator_->ReasonClauseOrNull(var); if (clause != nullptr) { clauses_propagator_->KeepClauseForever(clause); } @@ -1422,7 +1634,7 @@ bool SatSolver::SubsumptionIsInteresting(BooleanVariable variable, const int type = trail_->AssignmentType(var); if (type == AssignmentType::kSearchDecision) continue; if (type != binary_id && type != clause_id) return false; - SatClause* clause = ReasonClauseOrNull(var); + SatClause* clause = clauses_propagator_->ReasonClauseOrNull(var); if (clause != nullptr && clauses_propagator_->IsRemovable(clause)) { if (clause->size() > max_size) { return false; @@ -1473,12 +1685,14 @@ bool SatSolver::TryToMinimizeClause(SatClause* clause) { int longest_valid_prefix = 0; if (CurrentDecisionLevel() > 0) { candidate.resize(clause->size()); + // Insert any compatible decisions into their correct place in candidate + const auto& decisions = trail_->Decisions(); for (Literal lit : *clause) { if (!Assignment().LiteralIsFalse(lit)) continue; const AssignmentInfo& info = trail_->Info(lit.Variable()); if (info.level <= 0 || info.level > clause->size()) continue; - if (decisions_[info.level - 1].literal == lit.Negated()) { + if (decisions[info.level - 1].literal == lit.Negated()) { candidate[info.level - 1] = lit; } } @@ -1564,7 +1778,8 @@ bool SatSolver::TryToMinimizeClause(SatClause* clause) { // clauses. If we can subsume this clause by making only 1 additional // clause permanent and that clause is no longer than this one, we will // do so. - if (ReasonClauseOrNull(literal.Variable()) != clause && + if (clauses_propagator_->ReasonClauseOrNull(literal.Variable()) != + clause && SubsumptionIsInteresting(literal.Variable(), candidate.size())) { counters_.minimization_num_subsumed++; counters_.minimization_num_removed_literals += clause->size(); @@ -1612,14 +1827,16 @@ bool SatSolver::TryToMinimizeClause(SatClause* clause) { // need the propagating clauses to prove this (assuming that all the // minimized clause literals are false will lead to a conflict on this // 'fixed to true' literal). - AppendClausesFixing({Literal(fixed_true_literal)}, &clause_ids); + clauses_propagator_->AppendClauseIdsFixing({Literal(fixed_true_literal)}, + &clause_ids); } else { // If some literals of the minimized clause fix those that have been // removed to false, the propagating clauses and the original one prove // this (assuming that all the minimized clause literals are false will // lead to all the literals of the original clause fixed to false, which // is a conflict with the original clause). - AppendClausesFixing(fixed_false_literals, &clause_ids); + clauses_propagator_->AppendClauseIdsFixing(fixed_false_literals, + &clause_ids); clause_ids.push_back(clauses_propagator_->GetClauseId(clause)); } } @@ -1766,12 +1983,12 @@ SatSolver::Status SatSolver::SolveInternal(TimeLimit* time_limit, next_display = NextMultipleOf(num_failures(), kDisplayFrequency); } - const int old_level = current_decision_level_; + const int old_level = trail_->CurrentDecisionLevel(); if (!Propagate()) { // A conflict occurred, continue the loop. ProcessCurrentConflict(); if (model_is_unsat_) return StatusWithLog(INFEASIBLE); - if (old_level == current_decision_level_) { + if (old_level == trail_->CurrentDecisionLevel()) { CHECK(!assumptions_.empty()); return StatusWithLog(ASSUMPTIONS_UNSAT); } @@ -1879,8 +2096,9 @@ std::vector SatSolver::GetDecisionsFixing( } // We just expand the reasons recursively until we only have decisions. + const auto& decisions = trail_->Decisions(); const int limit = - CurrentDecisionLevel() > 0 ? decisions_[0].trail_index : trail_->Index(); + CurrentDecisionLevel() > 0 ? decisions[0].trail_index : trail_->Index(); CHECK_LT(trail_index, trail_->Index()); while (true) { // Find next marked literal to expand from the trail. @@ -1911,112 +2129,12 @@ std::vector SatSolver::GetDecisionsFixing( return unsat_assumptions; } -void SatSolver::AppendClausesFixing( - absl::Span literals, std::vector* clause_ids, - LiteralIndex decision, - absl::flat_hash_map, ClauseId>* - additional_binary_clause_ids) { - SCOPED_TIME_STAT(&stats_); - - // Mark the literals whose reason must be expanded, and compute their min and - // max trail index. - tmp_mark_.ClearAndResize(num_variables_); - int trail_index = 0; - int min_trail_index = trail_->Index(); - for (const Literal lit : literals) { - CHECK(Assignment().LiteralIsAssigned(lit)); - const int var_trail_index = trail_->Info(lit.Variable()).trail_index; - trail_index = std::max(trail_index, var_trail_index); - min_trail_index = std::min(min_trail_index, var_trail_index); - tmp_mark_.Set(lit.Variable()); - } - - const int current_level = CurrentDecisionLevel(); - // The min level of the expanded literals. - int min_level = current_level; - - // Unit clauses must come first. We put them in clause_ids directly. We put - // the others in non_unit_clause_ids and append them to clause_ids at the end. - std::vector& non_unit_clause_ids = - tmp_clause_ids_for_append_clauses_fixing_; - non_unit_clause_ids.clear(); - - while (true) { - // Find next marked literal to expand from the trail. - while (trail_index >= min_trail_index && - !tmp_mark_[(*trail_)[trail_index].Variable()]) { - --trail_index; - } - if (trail_index < min_trail_index) break; - const Literal marked_literal = (*trail_)[trail_index--]; - - // Stop at decisions, at literals fixed at root, and at literals implied by - // the decision at their level. - const int level = trail_->Info(marked_literal.Variable()).level; - if (level > 0) min_level = std::min(min_level, level); - if (trail_->AssignmentType(marked_literal.Variable()) == - AssignmentType::kSearchDecision) { - continue; - } - if (level == 0) { - clause_ids->push_back(trail_->GetUnitClauseId(marked_literal.Variable())); - continue; - } - const Literal level_decision = decisions_[level - 1].literal; - ClauseId clause_id = binary_implication_graph_->GetClauseId( - level_decision.Negated(), marked_literal); - if (clause_id == kNoClauseId && additional_binary_clause_ids != nullptr) { - const auto it = additional_binary_clause_ids->find( - std::minmax(level_decision.Negated(), marked_literal)); - if (it != additional_binary_clause_ids->end()) { - clause_id = it->second; - } - } - if (clause_id != kNoClauseId) { - non_unit_clause_ids.push_back(clause_id); - continue; - } - - // Mark all the literals of its reason. - for (const Literal literal : trail_->Reason(marked_literal.Variable())) { - const BooleanVariable var = literal.Variable(); - if (!tmp_mark_[var]) { - tmp_mark_.Set(var); - const AssignmentInfo& info = trail_->Info(var); - if (info.level > 0) { - min_trail_index = std::min(min_trail_index, info.trail_index); - } else { - clause_ids->push_back(trail_->GetUnitClauseId(var)); - } - } - } - non_unit_clause_ids.push_back(ReasonClauseId(marked_literal)); - } - - if (decision != kNoLiteralIndex) { - // Add the implication chain from `decision` to all the decisions found - // during the expansion. - if (Literal(decision) != decisions_[current_level - 1].literal) { - // If `decision` is not the last decision, it must directly imply it. - clause_ids->push_back(binary_implication_graph_->GetClauseId( - Literal(decision).Negated(), decisions_[current_level - 1].literal)); - } - for (int level = current_level - 1; level >= min_level; --level) { - clause_ids->push_back(binary_implication_graph_->GetClauseId( - decisions_[level].literal.Negated(), decisions_[level - 1].literal)); - } - } - - clause_ids->insert(clause_ids->end(), non_unit_clause_ids.rbegin(), - non_unit_clause_ids.rend()); -} - void SatSolver::BumpReasonActivities(absl::Span literals) { SCOPED_TIME_STAT(&stats_); for (const Literal literal : literals) { const BooleanVariable var = literal.Variable(); if (AssignmentLevel(var) > 0) { - SatClause* clause = ReasonClauseOrNull(var); + SatClause* clause = clauses_propagator_->ReasonClauseOrNull(var); if (clause != nullptr) { BumpClauseActivity(clause); } else { @@ -2452,9 +2570,6 @@ void SatSolver::EnqueueNewDecision(Literal literal) { counters_.num_branches++; last_decision_or_backtrack_trail_index_ = trail_->Index(); - decisions_[current_decision_level_] = Decision(trail_->Index(), literal); - ++current_decision_level_; - trail_->SetDecisionLevel(current_decision_level_); trail_->EnqueueSearchDecision(literal); } @@ -2489,18 +2604,18 @@ int SatSolver::ComputeMaxTrailIndex(absl::Span clause) const { // http://gauss.ececs.uc.edu/SAT/articles/FAIA185-0131.pdf void SatSolver::ComputeFirstUIPConflict( int max_trail_index, std::vector* conflict, - std::vector* reason_used_to_infer_the_conflict, - std::vector* subsumed_clauses) { + std::vector* reason_used_to_infer_the_conflict) { SCOPED_TIME_STAT(&stats_); const int64_t conflict_id = counters_.num_failures; + Literal previous_literal; + // This will be used to mark all the literals inspected while we process the // conflict and the reasons behind each of its variable assignments. is_marked_.ClearAndResize(num_variables_); conflict->clear(); reason_used_to_infer_the_conflict->clear(); - subsumed_clauses->clear(); if (max_trail_index == -1) return; absl::Span conflict_or_reason_to_expand = @@ -2510,7 +2625,6 @@ void SatSolver::ComputeFirstUIPConflict( // and its level (Which is almost always equals to the CurrentDecisionLevel(), // except for symmetry propagation). DCHECK_EQ(max_trail_index, ComputeMaxTrailIndex(trail_->FailingClause())); - int trail_index = max_trail_index; int highest_level = trail_->Info((*trail_)[max_trail_index].Variable()).level; if (trail_->ChronologicalBacktrackingEnabled()) { for (const Literal literal : conflict_or_reason_to_expand) { @@ -2520,6 +2634,17 @@ void SatSolver::ComputeFirstUIPConflict( } if (highest_level == 0) return; + // We use a max-heap to find the literal to eliminate. + struct LiteralWithIndex { + Literal literal; + int index; + + bool operator<(const LiteralWithIndex& other) const { + return index < other.index; + } + }; + std::vector last_level_heap; + // To find the 1-UIP conflict clause, we start by the failing_clause, and // expand each of its literal using the reason for this literal assignment to // false. The is_marked_ set allow us to never expand the same literal twice. @@ -2528,17 +2653,18 @@ void SatSolver::ComputeFirstUIPConflict( // decision level below the current one. If the level of such literal is not // zero, it is added to the conflict clause. // - // Now, the trick is that we use the trail to expand the literal of the - // current level in a very specific order. Namely the reverse order of the one - // in which they were inferred. We stop as soon as - // num_literal_at_highest_level_that_needs_to_be_processed is exactly one. + // We use a heap to expand the literals of the highest_level by decreasing + // assignment order, aka trail index. We stop when there is a single literal + // left at the higest level. // // This last literal will be the first UIP because by definition all the // propagation done at the current level will pass though it at some point. SatClause* sat_clause = trail_->FailingSatClause(); DCHECK(!conflict_or_reason_to_expand.empty()); - int num_literal_at_highest_level_that_needs_to_be_processed = 0; while (true) { + const int old_conflict_size = conflict->size(); + const int old_heap_size = last_level_heap.size(); + int num_new_vars_at_positive_level = 0; int num_vars_at_positive_level_in_clause_to_expand = 0; for (const Literal literal : conflict_or_reason_to_expand) { @@ -2551,7 +2677,7 @@ void SatSolver::ComputeFirstUIPConflict( is_marked_.Set(var); ++num_new_vars_at_positive_level; if (level == highest_level) { - ++num_literal_at_highest_level_that_needs_to_be_processed; + last_level_heap.push_back({literal, trail_->Info(var).trail_index}); } else { // Note that all these literals are currently false since the clause // to expand was used to infer the value of a literal at this level. @@ -2563,33 +2689,40 @@ void SatSolver::ComputeFirstUIPConflict( // If there is new variables, then all the previously subsumed clauses are // not subsumed by the current conflict anymore. However they are still - // subsumed by the earlier version we saw during the resolution. + // subsumed by the state of the conflict just before. // - // TODO(user): Implement the strenghtening and the subsumption. + // TODO(user): Think about minimization of these intermediate conflicts. if (num_new_vars_at_positive_level > 0) { - if (subsumed_clauses->size() > 1 || - (!subsumed_clauses->empty() && - (*subsumed_clauses)[0] != trail_->FailingSatClause())) { - // The last clause of "subsumed_clauses" can be strenghtened to remove - // the propagated literal. - // - // This will be the new "base conflict" clause for the proof point of - // view. - absl::Span strenghtened = - subsumed_clauses->back()->AsSpan().subspan(1); - + if (parameters_->extra_subsumption_during_conflict_analysis() && + !subsumed_clauses_.empty() && + reason_used_to_infer_the_conflict->size() > 1) { + // The "old" conflict should subsume all of that. tmp_literals_.clear(); - for (const Literal l : strenghtened) { - if (trail_->AssignmentLevel(l) > 0) tmp_literals_.push_back(l); + tmp_literals_.push_back(previous_literal.Negated()); + for (int i = 0; i < old_conflict_size; ++i) { + tmp_literals_.push_back((*conflict)[i]); } - - // Then this clause subsumes all earlier entry in subsumed_clauses. - const int left = subsumed_clauses->size() - 1; - for (int i = 0; i < left; ++i) { - DCHECK(ClauseSubsumption(tmp_literals_, (*subsumed_clauses)[i])); + for (int i = 0; i < old_heap_size; ++i) { + tmp_literals_.push_back(last_level_heap[i].literal); } + if (DEBUG_MODE) { + for (SatClause* clause : subsumed_clauses_) { + CHECK(ClauseSubsumption(tmp_literals_, clause)) + << tmp_literals_ << " " << clause->AsSpan(); + } + } + + subsuming_lrat_index_.push_back( + reason_used_to_infer_the_conflict->size() - 1); + subsuming_clauses_.Add(tmp_literals_); + subsuming_groups_.Add(subsumed_clauses_); } - subsumed_clauses->clear(); + subsumed_clauses_.clear(); + } + + // Restore the heap property. + for (int i = old_heap_size + 1; i <= last_level_heap.size(); ++i) { + std::push_heap(last_level_heap.begin(), last_level_heap.begin() + i); } // This check if the new conflict is exactly equal to @@ -2601,33 +2734,25 @@ void SatSolver::ComputeFirstUIPConflict( // literal). if (sat_clause != nullptr && num_vars_at_positive_level_in_clause_to_expand == - conflict->size() + - num_literal_at_highest_level_that_needs_to_be_processed) { - subsumed_clauses->push_back(sat_clause); + conflict->size() + last_level_heap.size()) { + subsumed_clauses_.push_back(sat_clause); } - // Find next marked literal to expand from the trail. - DCHECK_GT(num_literal_at_highest_level_that_needs_to_be_processed, 0); - while ( - !is_marked_[(*trail_)[trail_index].Variable()] || - (trail_->ChronologicalBacktrackingEnabled() && - AssignmentLevel((*trail_)[trail_index].Variable()) < highest_level)) { - --trail_index; - DCHECK_GE(trail_index, 0); - } + DCHECK(!last_level_heap.empty()); + const Literal literal = (*trail_)[last_level_heap.front().index]; + DCHECK(is_marked_[literal.Variable()]); - if (num_literal_at_highest_level_that_needs_to_be_processed == 1) { + if (last_level_heap.size() == 1) { // We have the first UIP. Add its negation to the conflict clause. // This way, after backtracking to the proper level, the conflict clause // will be unit, and infer the negation of the UIP that caused the fail. - conflict->push_back((*trail_)[trail_index].Negated()); + conflict->push_back(literal.Negated()); // To respect the function API move the first UIP in the first position. std::swap(conflict->back(), conflict->front()); break; } - const Literal literal = (*trail_)[trail_index]; reason_used_to_infer_the_conflict->push_back(literal); // If we already encountered the same reason, we can just skip this literal @@ -2640,10 +2765,11 @@ void SatSolver::ComputeFirstUIPConflict( conflict_or_reason_to_expand = trail_->Reason(literal.Variable(), conflict_id); } - sat_clause = ReasonClauseOrNull(literal.Variable()); + sat_clause = clauses_propagator_->ReasonClauseOrNull(literal.Variable()); - --num_literal_at_highest_level_that_needs_to_be_processed; - --trail_index; + previous_literal = literal; + absl::c_pop_heap(last_level_heap); + last_level_heap.pop_back(); } } @@ -2907,7 +3033,7 @@ void SatSolver::MinimizeConflictSimple(std::vector* conflict, trail_->Info(b.Variable()).trail_index; }); for (const Literal literal : tmp_literals_) { - clause_ids->push_back(ReasonClauseId(literal)); + clause_ids->push_back(clauses_propagator_->ReasonClauseId(literal)); DCHECK_NE(clause_ids->back(), kNoClauseId); } } @@ -3168,7 +3294,8 @@ void SatSolver::AppendInferenceChain(BooleanVariable variable, const Literal current_literal = trail_->Assignment().GetTrueLiteralForAssignedVariable(current_var); - clause_ids->push_back(ReasonClauseId(current_literal)); + clause_ids->push_back( + clauses_propagator_->ReasonClauseId(current_literal)); DCHECK_NE(clause_ids->back(), kNoClauseId); is_marked_for_lrat_.Set(current_var); continue; diff --git a/ortools/sat/sat_solver.h b/ortools/sat/sat_solver.h index d1c9b2a9317..3a119ab1546 100644 --- a/ortools/sat/sat_solver.h +++ b/ortools/sat/sat_solver.h @@ -257,32 +257,6 @@ class SatSolver { // `literals` are fixed to their current value. std::vector GetDecisionsFixing(absl::Span literals); - // Appends to `clause_ids` the IDs of the clauses which, by unit propagation - // from some decisions, are sufficient to ensure that all literals in - // `literals` are fixed to their current value. - // - // If `decision` is not `kNoLiteralIndex`, also appends the IDs of the clauses - // proving that `decision` implies all the literals in `literals`. In this - // case, `decision` must either be the last decision on the trail, or must - // directly imply it. Furthermore, each decision must directly imply the - // previous one on the trail. - // - // This method expands the reasons of each literal recursively until a - // decision, or a literal implied by the decision at its decision level, is - // found. The latter criterion avoids a quadratic complexity when implications - // of the form "decision => literal" are added for each newly propagated - // literal after taking a decision (provided these implications are added to - // the binary implication graph right away, in trail index order). - // - // If `additional_binary_clause_ids` is not null, it is used to look for - // existing binary clauses if they are not found in the binary implication - // graph. - void AppendClausesFixing( - absl::Span literals, std::vector* clause_ids, - LiteralIndex decision = kNoLiteralIndex, - absl::flat_hash_map, ClauseId>* - additional_binary_clause_ids = nullptr); - // Advanced usage. The next 3 functions allow to drive the search from outside // the solver. @@ -435,18 +409,10 @@ class SatSolver { const std::vector& NewlyAddedBinaryClauses(); void ClearNewlyAddedBinaryClauses(); - struct Decision { - Decision() = default; - Decision(int i, Literal l) : trail_index(i), literal(l) {} - int trail_index = 0; - Literal literal; - }; - - // Note that the Decisions() vector is always of size NumVariables(), and that - // only the first CurrentDecisionLevel() entries have a meaning. The decision - // made at level l is Decisions()[l - 1] (there are no decisions at level 0). - const std::vector& Decisions() const { return decisions_; } - int CurrentDecisionLevel() const { return current_decision_level_; } + const std::vector& Decisions() const { + return trail_->Decisions(); + } + int CurrentDecisionLevel() const { return trail_->CurrentDecisionLevel(); } const Trail& LiteralTrail() const { return *trail_; } const VariablesAssignment& Assignment() const { return trail_->Assignment(); } @@ -543,8 +509,9 @@ class SatSolver { void ProcessNewlyFixedVariables(); int64_t NumFixedVariables() const { - if (!decisions_.empty()) return decisions_[0].trail_index; - CHECK_EQ(CurrentDecisionLevel(), 0); + if (CurrentDecisionLevel() > 0) { + return trail_->Decisions()[0].trail_index; + } return trail_->Index(); } @@ -603,10 +570,10 @@ class SatSolver { // assumption_level of 0 (meaning no assumptions). Status SolveInternal(int assumption_level); - // Applies the previous decisions (which are still on decisions_), in order, - // starting from the one at the current decision level. Stops at the one at - // decisions_[level] or on the first decision already propagated to "false" - // and thus incompatible. + // Applies the previous decisions (which are still on trail_->Decisions()), in + // order, starting from the one at the current decision level. Stops at the + // one at decisions[level] or on the first decision already propagated to + // "false" and thus incompatible. // // Note that during this process, conflicts may arise which will lead to // backjumps. In this case, we will simply keep reapplying decisions from the @@ -635,14 +602,8 @@ class SatSolver { // Returns the relevant pointer if the given variable was propagated by the // constraint in question. This is used to bump the activity of the learned // clauses or pb constraints. - SatClause* ReasonClauseOrNull(BooleanVariable var) const { - return clauses_propagator_->ReasonClauseOrNull(var); - } UpperBoundedLinearConstraint* ReasonPbConstraintOrNull( BooleanVariable var) const; - // Returns the ID of the unit, binary, or general clause that is the reason - // for the given literal, or kNoClauseId if there is none. - ClauseId ReasonClauseId(Literal literal) const; // This does one step of a pseudo-Boolean resolution: // - The variable var has been assigned to l at a given trail_index. @@ -659,8 +620,8 @@ class SatSolver { // Add a problem clause. The clause is assumed to be "cleaned", that is no // duplicate variables (not strictly required) and not empty. - bool AddProblemClauseInternal(absl::Span literals, - bool shared = false); + bool AddProblemClauseInternal(ClauseId id, + absl::Span literals); // This is used by all the Add*LinearConstraint() functions. It detects // infeasible/trivial constraints or clause constraints and takes the proper @@ -709,19 +670,23 @@ class SatSolver { // http://www.cs.tau.ac.il/~msagiv/courses/ATP/iccad2001_final.pdf void ComputeFirstUIPConflict( int max_trail_index, std::vector* conflict, - std::vector* reason_used_to_infer_the_conflict, - std::vector* subsumed_clauses); + std::vector* reason_used_to_infer_the_conflict); // Use the learned conflict to subsumes some clause. // // Returns the pair . // A clause will be marked as redundant only if all the subsumed clauses are. std::pair SubsumptionsInConflictResolution( - absl::Span conflict, + ClauseId learned_conflict_id, absl::Span conflict, absl::Span reason_used); - // Fills `clause_ids` with the LRAT proof for the learned conflict. - void FillLratProofForLearnedConflict(std::vector* clause_ids); + // Append the necessary `clause_ids` for the corresponding part of an LRAT + // proof. Note that the first function modify is_marked_. + void AppendLratProofForFixedLiterals(absl::Span literals, + std::vector* clause_ids); + void AppendLratProofForFailingClause(std::vector* clause_ids); + void AppendLratProofFromReasons(absl::Span reasons, + std::vector* clause_ids); // Fills literals with all the literals in the reasons of the literals in the // given input. The output vector will have no duplicates and will not contain @@ -851,13 +816,6 @@ class SatSolver { // Used for debugging only. See SaveDebugAssignment(). VariablesAssignment debug_assignment_; - // The stack of decisions taken by the solver. They are stored in [0, - // current_decision_level_). The vector is of size num_variables_ so it can - // store all the decisions. This is done this way because in some situation we - // need to remember the previously taken decisions after a backtrack. - int current_decision_level_ = 0; - std::vector decisions_; - // The trail index after the last Backtrack() call or before the last // EnqueueNewDecision() call. int last_decision_or_backtrack_trail_index_ = 0; @@ -892,6 +850,7 @@ class SatSolver { // Temporary members used during conflict analysis. Bitset64 tmp_literal_set_; + Bitset64 tmp_decision_set_; SparseBitset is_marked_; SparseBitset is_marked_for_lrat_; SparseBitset is_independent_; @@ -905,9 +864,9 @@ class SatSolver { // Temporary member used when adding clauses. std::vector tmp_literals_; // Temporary members used when adding LRAT inferred clauses. + std::vector tmp_clause_ids_; std::vector tmp_clause_ids_for_1uip_; std::vector tmp_clause_ids_for_minimization_; - std::vector tmp_clause_ids_for_append_clauses_fixing_; absl::flat_hash_set tmp_clause_id_set_; // A boolean vector used to temporarily mark decision levels. @@ -918,8 +877,21 @@ class SatSolver { std::vector learned_conflict_; std::vector reason_used_to_infer_the_conflict_; std::vector extra_reason_literals_; + std::vector subsumed_clauses_; + std::vector subsuming_lrat_index_; + CompactVectorVector subsuming_clauses_; + CompactVectorVector subsuming_groups_; + + struct DelayedNewClause { + ClauseId id; + bool is_redundant; + int min_lbd_of_subsumed_clauses; + std::vector clause; + }; + std::vector delayed_to_add_; + // When true, temporarily disable the deletion of clauses that are not needed // anymore. This is a hack for TryToMinimizeClause() because we use // propagation in this function which might trigger a clause database @@ -1128,7 +1100,8 @@ inline std::function Value(BooleanVariable b) { // is no more new solutions. inline std::function ExcludeCurrentSolutionAndBacktrack() { return [=](Model* model) { - SatSolver* sat_solver = model->GetOrCreate(); + const auto& decisions = model->GetOrCreate()->Decisions(); + auto* sat_solver = model->GetOrCreate(); // Note that we only exclude the current decisions, which is an efficient // way to not get the same SAT assignment. @@ -1136,8 +1109,7 @@ inline std::function ExcludeCurrentSolutionAndBacktrack() { std::vector clause_to_exclude_solution; clause_to_exclude_solution.reserve(current_level); for (int i = 0; i < current_level; ++i) { - clause_to_exclude_solution.push_back( - sat_solver->Decisions()[i].literal.Negated()); + clause_to_exclude_solution.push_back(decisions[i].literal.Negated()); } sat_solver->Backtrack(0); model->Add(ClauseConstraint(clause_to_exclude_solution)); diff --git a/ortools/sat/solution_crush.cc b/ortools/sat/solution_crush.cc index bb49a23ae92..7ca57d61b14 100644 --- a/ortools/sat/solution_crush.cc +++ b/ortools/sat/solution_crush.cc @@ -238,20 +238,21 @@ void SolutionCrush::SetOrUpdateVarToDomain(int var, const Domain& domain) { void SolutionCrush::SetOrUpdateVarToDomain( int var, const Domain& domain, - const absl::btree_map& encoding, bool has_objective, - bool minimize) { + const absl::btree_map& encoding, + bool push_down_when_repairing_hints) { if (!solution_is_loaded_) return; if (HasValue(var)) { const int64_t old_value = GetVarValue(var); if (domain.Contains(old_value)) return; int64_t new_value = old_value; - if (!has_objective) { - new_value = domain.ClosestValue(old_value); - } else if (minimize) { + if (push_down_when_repairing_hints) { new_value = domain.ValueAtOrBefore(old_value); } else { new_value = domain.ValueAtOrAfter(old_value); } + for (const auto [value, lit] : encoding) { + SetLiteralValue(lit, value == new_value); + } SetVarValue(var, new_value); VLOG(3) << "SetOrUpdateVarToDomain: " << var << ", old_value: " << old_value << ", new_value: " << new_value @@ -259,8 +260,6 @@ void SolutionCrush::SetOrUpdateVarToDomain( DCHECK(encoding.contains(new_value)) << "domain: " << domain.ToString() << "old_value: " << old_value << " new_value: " << new_value; - const int encoding_lit = encoding.at(new_value); - SetLiteralValue(encoding_lit, true); } else if (domain.IsFixed()) { SetVarValue(var, domain.FixedValue()); } diff --git a/ortools/sat/solution_crush.h b/ortools/sat/solution_crush.h index dbc95ece36e..462ab26fd93 100644 --- a/ortools/sat/solution_crush.h +++ b/ortools/sat/solution_crush.h @@ -158,7 +158,7 @@ class SolutionCrush { // encoding literals to reflect the new value of `var`. void SetOrUpdateVarToDomain(int var, const Domain& domain, const absl::btree_map& encoding, - bool has_objective, bool minimize); + bool push_down_when_repairing_hints); // Updates the value of the given literals to false if their current values // are different (or does nothing otherwise). diff --git a/ortools/sat/stat_tables.cc b/ortools/sat/stat_tables.cc index 35c79c8280a..626a13351df 100644 --- a/ortools/sat/stat_tables.cc +++ b/ortools/sat/stat_tables.cc @@ -134,8 +134,9 @@ void SharedStatTables::AddClausesStat(absl::string_view name, Model* model) { if (clauses_deletion_table_.empty()) { clauses_deletion_table_.push_back( {"Clause deletion", "at_true", "l_and_not(l)", "to_binary", - "sub_conflict", "sub_eager", "sub_vivify", "sub_probing", "sub_inpro", - "blocked", "eliminated", "forgotten", "promoted", "conflicts"}); + "sub_conflict", "sub_extra", "sub_decisions", "sub_eager", + "sub_vivify", "sub_probing", "sub_inpro", "blocked", "eliminated", + "forgotten", "promoted", "conflicts"}); } absl::Span deletion_by_source = model->GetOrCreate()->DeletionCounters(); @@ -149,6 +150,10 @@ void SharedStatTables::AddClausesStat(absl::string_view name, Model* model) { DeletionSourceForStat::PROMOTED_TO_BINARY)]), FormatCounter(deletion_by_source[static_cast( DeletionSourceForStat::SUBSUMPTION_CONFLICT)]), + FormatCounter(deletion_by_source[static_cast( + DeletionSourceForStat::SUBSUMPTION_CONFLICT_EXTRA)]), + FormatCounter(deletion_by_source[static_cast( + DeletionSourceForStat::SUBSUMPTION_DECISIONS)]), FormatCounter(deletion_by_source[static_cast( DeletionSourceForStat::SUBSUMPTION_EAGER)]), FormatCounter(deletion_by_source[static_cast( diff --git a/ortools/sat/synchronization.cc b/ortools/sat/synchronization.cc index f6cef39a762..71c7e64fd90 100644 --- a/ortools/sat/synchronization.cc +++ b/ortools/sat/synchronization.cc @@ -1607,9 +1607,9 @@ SharedLratProofStatus::SharedLratProofStatus() num_assumed_clauses_(0), walltime_in_seconds_(0.0) {} -void SharedLratProofStatus::NewSubSolver() { +int SharedLratProofStatus::NewSubSolverId() { absl::MutexLock mutex_lock(mutex_); - num_subsolvers_++; + return num_subsolvers_++; } void SharedLratProofStatus::NewSubsolverProofStatus( @@ -1632,6 +1632,16 @@ void SharedLratProofStatus::NewSubsolverProofStatus( } } +void SharedLratProofStatus::NewProofFile(absl::string_view filename) { + absl::MutexLock mutex_lock(mutex_); + proof_filenames_.push_back(std::string(filename)); +} + +std::vector SharedLratProofStatus::GetProofFilenames() { + absl::MutexLock mutex_lock(mutex_); + return proof_filenames_; +} + void SharedLratProofStatus::Log(SolverLogger* logger) { absl::MutexLock mutex_lock(mutex_); if (lrat_check_enabled_ || drat_check_enabled_) { diff --git a/ortools/sat/synchronization.h b/ortools/sat/synchronization.h index ca75d9c9171..1a238df925e 100644 --- a/ortools/sat/synchronization.h +++ b/ortools/sat/synchronization.h @@ -1298,13 +1298,17 @@ class SharedLratProofStatus { public: SharedLratProofStatus(); - void NewSubSolver(); + // Each LratProofHandler should call this to get a unique "worker ID". + int NewSubSolverId(); void NewSubsolverProofStatus(DratChecker::Status status, bool lrat_check_enabled, bool drat_check_enabled, int num_assumed_clauses, double walltime_in_seconds); + void NewProofFile(absl::string_view filename); + std::vector GetProofFilenames(); + void Log(SolverLogger* logger); private: @@ -1317,6 +1321,7 @@ class SharedLratProofStatus { bool drat_check_enabled_ ABSL_GUARDED_BY(mutex_); int num_assumed_clauses_ ABSL_GUARDED_BY(mutex_); double walltime_in_seconds_ ABSL_GUARDED_BY(mutex_); + std::vector proof_filenames_ ABSL_GUARDED_BY(mutex_); }; } // namespace sat diff --git a/ortools/sat/util.h b/ortools/sat/util.h index 57254581356..87cf217f9a5 100644 --- a/ortools/sat/util.h +++ b/ortools/sat/util.h @@ -870,6 +870,22 @@ class TopN { std::vector elements_; }; +// Returns true iff subset is strictly included in superset. +// This assumes that superset has no duplicates (otherwise it is wrong). +inline bool IsStrictlyIncluded(Bitset64::ConstView in_subset, + int subset_size, + absl::Span superset) { + if (subset_size >= superset.size()) return false; + int budget = superset.size() - subset_size; + for (const Literal l : superset) { + if (!in_subset[l]) { + --budget; + if (budget < 0) return false; + } + } + return true; +} + // ============================================================================ // Implementation. // ============================================================================ diff --git a/ortools/sat/util_test.cc b/ortools/sat/util_test.cc index 990037686b4..b4a4f80b551 100644 --- a/ortools/sat/util_test.cc +++ b/ortools/sat/util_test.cc @@ -45,6 +45,7 @@ #include "ortools/sat/cp_model_utils.h" #include "ortools/sat/sat_base.h" #include "ortools/sat/sat_parameters.pb.h" +#include "ortools/util/bitset.h" #include "ortools/util/random_engine.h" #include "ortools/util/sorted_interval_list.h" @@ -1289,6 +1290,28 @@ TEST(HeuristicallySplitLongLinearTest, BasicExamples) { ElementsAre(Pair(0, 2), Pair(2, 2), Pair(4, 2), Pair(6, 3))); } +bool IsStrictlyIncludedWrapper(absl::Span a, + absl::Span b) { + std::vector a_lits, b_lits; + for (const int i : a) a_lits.push_back(Literal(i)); + for (const int i : b) b_lits.push_back(Literal(i)); + + Bitset64 in_a(LiteralIndex(1000)); + for (const Literal l : a_lits) in_a.Set(l); + return IsStrictlyIncluded(in_a.const_view(), a_lits.size(), b_lits); +} + +TEST(IsStricltyIncludedTest, BasicExamples) { + EXPECT_FALSE(IsStrictlyIncludedWrapper({}, {})); + EXPECT_FALSE(IsStrictlyIncludedWrapper({+3, +1}, {+1, +3})); + EXPECT_FALSE(IsStrictlyIncludedWrapper({+3, +1}, {+2, +3, +5})); + + EXPECT_TRUE(IsStrictlyIncludedWrapper({}, {+1})); + EXPECT_TRUE(IsStrictlyIncludedWrapper({}, {+1, +2, +3})); + EXPECT_TRUE(IsStrictlyIncludedWrapper({+3, +1}, {+1, +2, +3})); + EXPECT_TRUE(IsStrictlyIncludedWrapper({+3, +1, +4}, {+4, +1, +2, +3})); +} + } // namespace } // namespace sat } // namespace operations_research diff --git a/ortools/sat/variable_expand.cc b/ortools/sat/variable_expand.cc index f62db162d13..328a2264b1f 100644 --- a/ortools/sat/variable_expand.cc +++ b/ortools/sat/variable_expand.cc @@ -411,6 +411,46 @@ bool ProcessEncodingConstraints( std::vector>& linear_ones_by_type, std::vector& constraint_indices, bool& var_in_objective, bool& var_has_positive_objective_coefficient) { + // We have a variable that appears only in linear1 constraints. That means + // that the model should not change feasibility as long as the values that + // the variable take does not the satisfiability of the linear1s. Thus, we + // have "domains of equivalence" of the possible values of the variable. + // Suppose we have a variable `v` in the domain `[0, 15]` that is only used in + // the following linear1: + // (c1) x => v <= 10 + // (c2) y => v == 5 + // (c3) z => 2 <= v <= 7 + // + // We can represent that as intervals: + // + // 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 + // +---------------------------------------------+ + // v <= 10 ...............................* + // v == 5 * + // 2 <= v <= 7 *..............* + // + // If we cut the classes of equivalences for `v` into contiguous intervals, + // they are: + // - [0, 1] (c1, ~c2, ~c3) + // - [2, 4] (c1, ~c2, c3) + // - {5} (c1, c2, c3) + // - [6, 7] (c1, ~c2, c3) + // - [8, 10] (c1, ~c2, ~c3) + // - [11, 15] (~c1, ~c2, ~c3) + // + // We can pick either the lowest or the highest value of each interval + // depending on the sign of the objective coefficient. Thus, a possible + // encoding would be: + // - x == 0 + // - x == 2 + // - x == 5 + // - x == 6 + // - x == 8 + // - x == 11 + // + // TODO(user): Pick a single value for each equivalence class, not one + // per contiguous interval. + // TODO(user): Supports more domains, for now only <= and >= are supported. const Domain& var_domain = context->DomainOf(var); constraint_indices.clear(); var_in_objective = false; @@ -426,6 +466,9 @@ bool ProcessEncodingConstraints( constraint_indices.push_back(c); } + const bool push_down_when_unconstrained = + !var_in_objective || var_has_positive_objective_coefficient; + // Sort the constraint indices to make the encoding deterministic. absl::c_sort(constraint_indices); for (const int c : constraint_indices) { @@ -458,9 +501,27 @@ bool ProcessEncodingConstraints( switch (lin.type) { case EncodingLinear1Type::kVarEqValue: values.AddValueToEncode(lin.value); + if (push_down_when_unconstrained) { + if (lin.value < var_domain.Max()) { + values.AddValueToEncode(var_domain.ValueAtOrAfter(lin.value + 1)); + } + } else { + if (lin.value > var_domain.Min()) { + values.AddValueToEncode(var_domain.ValueAtOrBefore(lin.value - 1)); + } + } break; case EncodingLinear1Type::kVarNeValue: values.AddValueToEncode(lin.value); + if (push_down_when_unconstrained) { + if (lin.value < var_domain.Max()) { + values.AddValueToEncode(var_domain.ValueAtOrAfter(lin.value + 1)); + } + } else { + if (lin.value > var_domain.Min()) { + values.AddValueToEncode(var_domain.ValueAtOrBefore(lin.value - 1)); + } + } break; case EncodingLinear1Type::kVarGeValue: { values.AddValueToEncode(lin.value); @@ -480,6 +541,12 @@ bool ProcessEncodingConstraints( } } + if (push_down_when_unconstrained) { + values.AddValueToEncode(var_domain.Min()); + } else { + values.AddValueToEncode(var_domain.Max()); + } + linear_ones_by_type[static_cast(lin.type)].push_back(lin); } values.CanonicalizeEncodedValuesAndAddEscapeValue( @@ -567,9 +634,11 @@ void TryToReplaceVariableByItsEncoding(int var, PresolveContext* context, values.CreateAllValueEncodingLiterals(); // Fix the hinted value if needed. + const bool push_down_when_unconstrained = + !var_in_objective || var_has_positive_objective_coefficient; solution_crush.SetOrUpdateVarToDomain( var, Domain::FromValues(values.encoded_values()), values.encoding(), - var_in_objective, var_has_positive_objective_coefficient); + push_down_when_unconstrained); order.CreateAllOrderEncodingLiterals(values); // Link all Boolean in our linear1 to the encoding literals. @@ -711,15 +780,15 @@ void TryToReplaceVariableByItsEncoding(int var, PresolveContext* context, return; } context->UpdateRuleStats( - "variables: only used in objective and in encoding"); + "variables: only used in objective and in encodings"); } else { if ((!lin_eq.empty() || !lin_ne.empty()) && lin_domain.empty()) { context->UpdateRuleStats( "variables: only used in value and order encodings"); } else if (!lin_domain.empty()) { - context->UpdateRuleStats("variables: only used in complex encoding"); + context->UpdateRuleStats("variables: only used in complex encodings"); } else { - context->UpdateRuleStats("variables: only used in value encoding"); + context->UpdateRuleStats("variables: only used in value encodings"); } } if (!values.is_fully_encoded()) { diff --git a/ortools/sat/work_assignment.cc b/ortools/sat/work_assignment.cc index 48eba0f5bb4..8241449c64a 100644 --- a/ortools/sat/work_assignment.cc +++ b/ortools/sat/work_assignment.cc @@ -770,7 +770,7 @@ void SharedTreeWorker::MaybeProposeSplits() { const int max_split_level = std::min(trail_->CurrentDecisionLevel(), manager_->MaxPathDepth()); for (int i = assigned_tree_.MaxLevel(); i < max_split_level; ++i) { - const Literal split_decision = sat_solver_->Decisions()[i].literal; + const Literal split_decision = trail_->Decisions()[i].literal; const std::optional encoded = EncodeDecision(split_decision); if (!encoded.has_value()) break; tmp_splits_.push_back(*encoded); From c8821290d5299f9e584f8ac93f83a1f0bc379dd4 Mon Sep 17 00:00:00 2001 From: Corentin Le Molgat Date: Mon, 1 Dec 2025 10:41:49 +0100 Subject: [PATCH 020/111] cmake: cleanup ortools_cxx_test --- cmake/cpp.cmake | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/cmake/cpp.cmake b/cmake/cpp.cmake index c5724511e91..19af6869042 100644 --- a/cmake/cpp.cmake +++ b/cmake/cpp.cmake @@ -219,13 +219,11 @@ function(ortools_cxx_test) INSTALL_RPATH "$ORIGIN/${libdir_relative_path}:$ORIGIN") endif() - if(BUILD_TESTING) - add_test( - NAME cxx_${TEST_NAME} - COMMAND ${TEST_NAME} - WORKING_DIRECTORY ${PROJECT_SOURCE_DIR} - ) - endif() + add_test( + NAME cxx_${TEST_NAME} + COMMAND ${TEST_NAME} + WORKING_DIRECTORY ${PROJECT_SOURCE_DIR} + ) message(STATUS "Configuring test ${TEST_NAME} ...DONE") endfunction() From 5255bea64554bbbdabee9620eab592c3a31656ca Mon Sep 17 00:00:00 2001 From: Corentin Le Molgat Date: Mon, 1 Dec 2025 09:33:47 +0100 Subject: [PATCH 021/111] bazel(ci): fix almalinux dockerfile --- bazel/docker/almalinux/Dockerfile | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/bazel/docker/almalinux/Dockerfile b/bazel/docker/almalinux/Dockerfile index 527fef22a51..85ec9f5c1ec 100644 --- a/bazel/docker/almalinux/Dockerfile +++ b/bazel/docker/almalinux/Dockerfile @@ -5,14 +5,11 @@ FROM almalinux:latest AS env # Install system build dependencies ENV PATH=/usr/local/bin:$PATH RUN dnf -y update \ -&& dnf -y install git wget zlib-devel gcc-toolset-13 \ +&& dnf -y install git wget zlib-devel \ && dnf -y group install "Development Tools" \ && dnf clean all \ && rm -rf /var/cache/dnf -RUN echo "source /opt/rh/gcc-toolset-13/enable" >> /etc/bashrc -SHELL ["/bin/bash", "--login", "-c"] - # Install Bazelisk ARG TARGETARCH=amd64 RUN wget \ @@ -22,7 +19,7 @@ https://github.com/bazelbuild/bazelisk/releases/download/v1.27.0/bazelisk-linux- # Install Java RUN dnf -y update \ -&& dnf -y install java-11-openjdk java-11-openjdk-devel maven \ +&& dnf -y install java-21-openjdk java-21-openjdk-devel maven \ && dnf clean all ENV JAVA_HOME=/usr/lib/jvm/java-openjdk From 032fad4c94cef80dbeeee3dc043f7814fba5448e Mon Sep 17 00:00:00 2001 From: Corentin Le Molgat Date: Mon, 1 Dec 2025 09:34:03 +0100 Subject: [PATCH 022/111] cmake(ci): fix almalinux dockerfile --- cmake/docker/almalinux/java.Dockerfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/cmake/docker/almalinux/java.Dockerfile b/cmake/docker/almalinux/java.Dockerfile index ea70a77475c..2ac99b0742f 100644 --- a/cmake/docker/almalinux/java.Dockerfile +++ b/cmake/docker/almalinux/java.Dockerfile @@ -1,11 +1,11 @@ FROM ortools/cmake:almalinux_swig AS env RUN dnf -y update \ -&& dnf -y install java-11-openjdk java-11-openjdk-devel maven \ +&& dnf -y install java-21-openjdk java-21-openjdk-devel maven \ && dnf clean all \ && rm -rf /var/cache/dnf -RUN alternatives --set java /usr/lib/jvm/java-11-openjdk*.x86_64/bin/java -ENV JAVA_HOME=/usr/lib/jvm/java-11-openjdk +RUN alternatives --set java /usr/lib/jvm/java-21-openjdk/bin/java +ENV JAVA_HOME=/usr/lib/jvm/java-21-openjdk FROM env AS devel WORKDIR /home/project From 6d3a78e9d50c6805ef15cc7902b714c1ae4ee160 Mon Sep 17 00:00:00 2001 From: Laurent Perron Date: Mon, 1 Dec 2025 11:34:13 +0100 Subject: [PATCH 023/111] [Routing] add search event: at optimal --- ortools/constraint_solver/BUILD.bazel | 1 + ortools/constraint_solver/constraint_solver.h | 12 ++++++++ ortools/constraint_solver/local_search.cc | 21 ++++++------- ortools/constraint_solver/routing.cc | 7 ++++- ortools/constraint_solver/search.cc | 30 ++++++++++++++++++- 5 files changed, 57 insertions(+), 14 deletions(-) diff --git a/ortools/constraint_solver/BUILD.bazel b/ortools/constraint_solver/BUILD.bazel index daef322c02d..97a76963c96 100644 --- a/ortools/constraint_solver/BUILD.bazel +++ b/ortools/constraint_solver/BUILD.bazel @@ -214,6 +214,7 @@ cc_library( "@abseil-cpp//absl/algorithm:container", "@abseil-cpp//absl/base:core_headers", "@abseil-cpp//absl/base:log_severity", + "@abseil-cpp//absl/base:nullability", "@abseil-cpp//absl/container:flat_hash_map", "@abseil-cpp//absl/container:flat_hash_set", "@abseil-cpp//absl/flags:flag", diff --git a/ortools/constraint_solver/constraint_solver.h b/ortools/constraint_solver/constraint_solver.h index 9bb4ebe1f2f..590ace73a4a 100644 --- a/ortools/constraint_solver/constraint_solver.h +++ b/ortools/constraint_solver/constraint_solver.h @@ -4663,6 +4663,15 @@ class OptimizeVar : public ObjectiveMonitor { /// Returns the variable that is optimized. IntVar* var() const { return Size() == 0 ? nullptr : ObjectiveVar(0); } +#ifndef SWIG + /// Sets a callback to be called when the objective value is found to be + /// optimal. + void SetOnOptimalFoundcallback( + std::function on_optimal_found) { + on_optimal_found_ = std::move(on_optimal_found); + } +#endif // SWIG + /// Internal methods. void BeginNextDecision(DecisionBuilder* db) override; void RefuteDecision(Decision* d) override; @@ -4672,6 +4681,9 @@ class OptimizeVar : public ObjectiveMonitor { std::string DebugString() const override; void ApplyBound(); + + private: + std::function on_optimal_found_; }; /// Base class of all search limits. diff --git a/ortools/constraint_solver/local_search.cc b/ortools/constraint_solver/local_search.cc index 2a0f237af5c..ad30188e7fc 100644 --- a/ortools/constraint_solver/local_search.cc +++ b/ortools/constraint_solver/local_search.cc @@ -25,6 +25,7 @@ #include #include "absl/algorithm/container.h" +#include "absl/base/nullability.h" #include "absl/container/flat_hash_map.h" #include "absl/container/flat_hash_set.h" #include "absl/flags/flag.h" @@ -4372,7 +4373,7 @@ namespace { // ----- NestedSolve decision wrapper ----- // This decision calls a nested Solve on the given DecisionBuilder in its -// left branch; does nothing in the left branch. +// left branch; does nothing in the right branch. // The state of the decision corresponds to the result of the nested Solve: // DECISION_PENDING - Nested Solve not called yet // DECISION_FAILED - Nested Solve failed @@ -4383,9 +4384,9 @@ class NestedSolveDecision : public Decision { // This enum is used internally to tag states in the local search tree. enum StateType { DECISION_PENDING, DECISION_FAILED, DECISION_FOUND }; - NestedSolveDecision(DecisionBuilder* db, bool restore, + NestedSolveDecision(DecisionBuilder* absl_nonnull db, bool restore, const std::vector& monitors); - NestedSolveDecision(DecisionBuilder* db, bool restore); + NestedSolveDecision(DecisionBuilder* absl_nonnull db, bool restore); ~NestedSolveDecision() override {} void Apply(Solver* solver) override; void Refute(Solver* solver) override; @@ -4395,25 +4396,21 @@ class NestedSolveDecision : public Decision { private: DecisionBuilder* const db_; const bool restore_; - std::vector monitors_; + const std::vector monitors_; int state_; }; NestedSolveDecision::NestedSolveDecision( - DecisionBuilder* const db, bool restore, + DecisionBuilder* absl_nonnull db, bool restore, const std::vector& monitors) : db_(db), restore_(restore), monitors_(monitors), - state_(DECISION_PENDING) { - CHECK(nullptr != db); -} + state_(DECISION_PENDING) {} -NestedSolveDecision::NestedSolveDecision(DecisionBuilder* const db, +NestedSolveDecision::NestedSolveDecision(DecisionBuilder* absl_nonnull db, bool restore) - : db_(db), restore_(restore), state_(DECISION_PENDING) { - CHECK(nullptr != db); -} + : NestedSolveDecision(db, restore, {}) {} void NestedSolveDecision::Apply(Solver* const solver) { CHECK(nullptr != solver); diff --git a/ortools/constraint_solver/routing.cc b/ortools/constraint_solver/routing.cc index 2f2e3c08d82..17b43e351c8 100644 --- a/ortools/constraint_solver/routing.cc +++ b/ortools/constraint_solver/routing.cc @@ -6410,7 +6410,12 @@ void RoutingModel::SetupMetaheuristics( } default: limit_too_long = false; - optimize = solver_->MakeMinimize(cost_, optimization_step); + OptimizeVar* const minimize = + solver_->MakeMinimize(cost_, optimization_step); + optimize = minimize; + minimize->SetOnOptimalFoundcallback([this](int64_t value) { + objective_lower_bound_ = std::max(objective_lower_bound_, value); + }); } if (limit_too_long) { LOG(WARNING) << LocalSearchMetaheuristic::Value_Name(metaheuristic) diff --git a/ortools/constraint_solver/search.cc b/ortools/constraint_solver/search.cc index b3014ebf4e7..4ba1f25210e 100644 --- a/ortools/constraint_solver/search.cc +++ b/ortools/constraint_solver/search.cc @@ -3217,7 +3217,35 @@ void OptimizeVar::ApplyBound() { } } -void OptimizeVar::RefuteDecision(Decision*) { ApplyBound(); } +namespace { +class ApplyBoundDecisionBuilder : public DecisionBuilder { + public: + explicit ApplyBoundDecisionBuilder(OptimizeVar* optimize_var) + : optimize_var_(optimize_var) {} + ~ApplyBoundDecisionBuilder() override = default; + Decision* Next(Solver*) override { + optimize_var_->ApplyBound(); + return nullptr; + } + + private: + OptimizeVar* optimize_var_; +}; +} // namespace + +void OptimizeVar::RefuteDecision(Decision*) { + if (!solver()->SolveAndCommit( + solver()->RevAlloc(new ApplyBoundDecisionBuilder(this)))) { + if (on_optimal_found_) { + // TODO(user): Support multiple objectives. + const int64_t value = CurrentInternalValue(0); + on_optimal_found_(objective_vars()[0] == minimization_vars()[0] + ? value + : CapOpp(value)); + } + solver()->Fail(); + } +} bool OptimizeVar::AcceptSolution() { if (!found_initial_solution_ || !is_active()) { From 993ac168d60e558f1785d6d793b444bb148b5fa9 Mon Sep 17 00:00:00 2001 From: Laurent Perron Date: Mon, 1 Dec 2025 11:34:57 +0100 Subject: [PATCH 024/111] [Python] improve support for gil-less python 3.14 --- .../python/model_builder_helper.cc | 22 ++++++++++++++----- ortools/sat/python/cp_model_helper.cc | 22 ++++++++++++++----- 2 files changed, 34 insertions(+), 10 deletions(-) diff --git a/ortools/linear_solver/python/model_builder_helper.cc b/ortools/linear_solver/python/model_builder_helper.cc index 326e52c2e86..5d361c3246c 100644 --- a/ortools/linear_solver/python/model_builder_helper.cc +++ b/ortools/linear_solver/python/model_builder_helper.cc @@ -310,7 +310,7 @@ std::shared_ptr WeightedSumArguments( } #if PY_VERSION_HEX >= 0x030E00A7 && !defined(PYPY_VERSION) -bool check_unique_temporary(PyObject* op) { +bool was_optimized_in_function_call(PyObject* op) { PyFrameObject* frame = PyEval_GetFrame(); if (frame == NULL) { return false; @@ -330,12 +330,24 @@ bool check_unique_temporary(PyObject* op) { return false; } +bool IsOnwedExclusivelyThroughPyBind11(PyObject* op) { +#if !defined(Py_GIL_DISABLED) + return Py_REFCNT(ob) == 3; +#else + // NOTE: the entire ob_ref_shared field must be zero, including flags, to + // ensure that other threads cannot concurrently create new references to + // this object. + return (_Py_IsOwnedByCurrentThread(ob) && + _Py_atomic_load_uint32_relaxed(&ob->ob_ref_local) == 3 && + _Py_atomic_load_ssize_relaxed(&ob->ob_ref_shared) == 0); +#endif +} + template bool IsFree(std::shared_ptr expr) { - PyObject* lhs = py::cast(expr).ptr(); - const int num_uses = Py_REFCNT(lhs); - const bool is_referenced_in_caller_frame = check_unique_temporary(lhs); - return num_uses == 3 && !is_referenced_in_caller_frame; + PyObject* op = py::cast(expr).ptr(); + return IsOnwedExclusivelyThroughPyBind11(op) && + !was_optimized_in_function_call(op); } #else template diff --git a/ortools/sat/python/cp_model_helper.cc b/ortools/sat/python/cp_model_helper.cc index e21a33b6024..0661ba717ef 100644 --- a/ortools/sat/python/cp_model_helper.cc +++ b/ortools/sat/python/cp_model_helper.cc @@ -1115,7 +1115,7 @@ std::shared_ptr CpBaseModel::AddRoutesInternal( } #if PY_VERSION_HEX >= 0x030E00A7 && !defined(PYPY_VERSION) -bool check_unique_temporary(PyObject* op) { +bool was_optimized_in_function_call(PyObject* op) { PyFrameObject* frame = PyEval_GetFrame(); if (frame == NULL) { return false; @@ -1135,12 +1135,24 @@ bool check_unique_temporary(PyObject* op) { return false; } +bool IsOnwedExclusivelyThroughPyBind11(PyObject* op) { +#if !defined(Py_GIL_DISABLED) + return Py_REFCNT(ob) == 3; +#else + // NOTE: the entire ob_ref_shared field must be zero, including flags, to + // ensure that other threads cannot concurrently create new references to + // this object. + return (_Py_IsOwnedByCurrentThread(ob) && + _Py_atomic_load_uint32_relaxed(&ob->ob_ref_local) == 3 && + _Py_atomic_load_ssize_relaxed(&ob->ob_ref_shared) == 0); +#endif +} + template bool IsFree(std::shared_ptr expr) { - PyObject* lhs = py::cast(expr).ptr(); - const int num_uses = Py_REFCNT(lhs); - const bool is_referenced_in_caller_frame = check_unique_temporary(lhs); - return num_uses == 3 && !is_referenced_in_caller_frame; + PyObject* op = py::cast(expr).ptr(); + return IsOnwedExclusivelyThroughPyBind11(op) && + !was_optimized_in_function_call(op); } #else template From 1623b1cb18d416f175b0eb4039f1e092c0904427 Mon Sep 17 00:00:00 2001 From: Laurent Perron Date: Mon, 1 Dec 2025 11:35:55 +0100 Subject: [PATCH 025/111] [CP-SAT] add soft constraint examples --- ortools/sat/BUILD.bazel | 1 + ortools/sat/samples/BUILD.bazel | 7 + ortools/sat/samples/soft_constraints_sat.py | 219 ++++++++++++++++++++ ortools/sat/solution_crush.cc | 2 +- 4 files changed, 228 insertions(+), 1 deletion(-) create mode 100644 ortools/sat/samples/soft_constraints_sat.py diff --git a/ortools/sat/BUILD.bazel b/ortools/sat/BUILD.bazel index 2bde6191da7..c0721320e89 100644 --- a/ortools/sat/BUILD.bazel +++ b/ortools/sat/BUILD.bazel @@ -4382,6 +4382,7 @@ cc_binary( "//ortools/base", "//ortools/base:file", "//ortools/base:path", + "//ortools/base:zipfile", "//ortools/util:file_util", "//ortools/util:logging", "//ortools/util:sigint", diff --git a/ortools/sat/samples/BUILD.bazel b/ortools/sat/samples/BUILD.bazel index a0c458a6855..c3290ba560c 100644 --- a/ortools/sat/samples/BUILD.bazel +++ b/ortools/sat/samples/BUILD.bazel @@ -879,6 +879,13 @@ cc_test( ], ) +py_test( + name = "soft_constraints_sat_py3", + srcs = ["soft_constraints_sat.py"], + main = "soft_constraints_sat.py", + deps = ["//ortools/sat/python:cp_model"], +) + go_binary( name = "solution_hinting_sample_sat_go", srcs = ["solution_hinting_sample_sat.go"], diff --git a/ortools/sat/samples/soft_constraints_sat.py b/ortools/sat/samples/soft_constraints_sat.py new file mode 100644 index 00000000000..d55302cee7f --- /dev/null +++ b/ortools/sat/samples/soft_constraints_sat.py @@ -0,0 +1,219 @@ +#!/usr/bin/env python3 +# Copyright 2010-2025 Google LLC +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""The sample shows multiple ways to model soft constraints in CP-SAT.""" + +# [START program] +# [START import] +from ortools.sat.python import cp_model + +# [END import] + + +def infeasible_model() -> None: + """Base model that is infeasible.""" + # Creates the model. + # [START infeasible_model] + model = cp_model.CpModel() + # [END infeasible_model] + + # Creates the variables. + # [START infeasible_model_variables] + x = model.new_int_var(0, 10, "x") + y = model.new_int_var(0, 10, "y") + z = model.new_int_var(0, 10, "z") + # [END infeasible_model_variables] + + # Creates the constraints. + # [START infeasible_model_constraints] + model.add(x > y) + model.add(y > z) + model.add(z > x) + # [END infeasible_model_constraints] + + # Creates a solver and solves. + # [START infeasible_model_solve] + solver = cp_model.CpSolver() + status = solver.solve(model) + # [END infeasible_model_solve] + + # Print solution. + # [START infeasible_model_print_solution] + print(f" Status = {solver.status_name(status)}") + # [END infeasible_model_print_solution] + + +def model_with_enforcement_literals() -> None: + """Adds fixed costs to violated constraints.""" + # Creates the model. + # [START model_with_enforcement_literals] + model = cp_model.CpModel() + # [END model_with_enforcement_literals] + + # Creates the variables. + # [START model_with_enforcement_literals_variables] + x = model.new_int_var(0, 10, "x") + y = model.new_int_var(0, 10, "y") + z = model.new_int_var(0, 10, "z") + a = model.new_bool_var("a") + b = model.new_bool_var("b") + # [END variables_with_enforcement_literals] + + # Creates the constraints. Adds enforcement literals to the first two + # constraints, we assume the third constraint is always enforced. + # [START constraints_with_enforcement_literals] + model.add(x > y).only_enforce_if(a) + model.add(y > z).only_enforce_if(b) + model.add(z > x) + # [END constraints_with_enforcement_literals] + + # Adds an objective to maximize the number of enforced constraints. + # [START objective_with_enforcement_literals] + model.maximize(a + 2 * b) + # [END objective_with_enforcement_literals] + + # Creates a solver and solves. + # [START solve_with_enforcement_literals] + solver = cp_model.CpSolver() + status = solver.solve(model) + # [END solve_with_enforcement_literals] + + # Print solution. + # [START print_solution_with_enforcement_literals] + print(f" Status = {solver.status_name(status)}") + if status == cp_model.OPTIMAL: + print(f" Objective value = {solver.objective_value}") + print(f" Value of x = {solver.value(x)}") + print(f" Value of y = {solver.value(y)}") + print(f" Value of z = {solver.value(z)}") + print(f" Value of a = {solver.boolean_value(a)}") + print(f" Value of b = {solver.boolean_value(b)}") + # [END print_solution_with_enforcement_literals] + + +def model_with_linear_violations() -> None: + """Adds fixed costs to violated constraints.""" + # Creates the model. + # [START model_with_linear_violations] + model = cp_model.CpModel() + # [END model_with_linear_violations] + + # Creates the variables. + # [START model_with_linear_violations_variables] + x = model.new_int_var(0, 10, "x") + y = model.new_int_var(0, 10, "y") + z = model.new_int_var(0, 10, "z") + a = model.new_int_var(0, 10, "a") + b = model.new_int_var(0, 10, "b") + # [END model_with_linear_violations_variables] + + # Creates the constraints. Adds enforcement literals to the first two + # constraints, we assume the third constraint is always enforced. + # [START model_with_linear_violations_constraints] + model.add(x > y - a) + model.add(y > z - b) + model.add(z > x) + # [END model_with_linear_violations_constraints] + + # Adds an objective to minimize the added slacks. + # [START model_with_linear_violations_objective] + model.minimize(a + 2 * b) + # [END model_with_linear_violations_objective] + + # Creates a solver and solves. + # [START model_with_linear_violations_solve] + solver = cp_model.CpSolver() + status = solver.solve(model) + # [END model_with_linear_violations_solve] + + # Print solution. + # [START model_with_linear_violations_print_solution] + print(f" Status = {solver.status_name(status)}") + if status == cp_model.OPTIMAL: + print(f" Objective value = {solver.objective_value}") + print(f" Value of x = {solver.value(x)}") + print(f" Value of y = {solver.value(y)}") + print(f" Value of z = {solver.value(z)}") + print(f" Value of a = {solver.value(a)}") + print(f" Value of b = {solver.value(b)}") + # [END model_with_linear_violations_print_solution] + + +def model_with_quadratic_violations() -> None: + """Adds fixed costs to violated constraints.""" + # Creates the model. + # [START model_with_quadratic_violations] + model = cp_model.CpModel() + # [END model_with_quadratic_violations] + + # Creates the variables. + # [START model_with_quadratic_violations_variables] + x = model.new_int_var(0, 10, "x") + y = model.new_int_var(0, 10, "y") + z = model.new_int_var(0, 10, "z") + a = model.new_int_var(0, 10, "a") + b = model.new_int_var(0, 10, "b") + square_a = model.new_int_var(0, 100, "square_a") + square_b = model.new_int_var(0, 100, "square_b") + # [END model_with_quadratic_violations_variables] + + # Creates the constraints. Adds enforcement literals to the first two + # constraints, we assume the third constraint is always enforced. + # [START model_with_quadratic_violations_constraints] + model.add(x > y - a) + model.add(y > z - b) + model.add(z > x) + + model.add_multiplication_equality(square_a, a, a) + model.add_multiplication_equality(square_b, b, b) + # [END constraints_with_quadratic_violations] + + # Adds an objective to minimize the added slacks. + # [START model_with_quadratic_violations_objective] + model.minimize(square_a + 2 * square_b) + # [END model_with_quadratic_violations_objective] + + # Creates a solver and solves. + # [START model_with_quadratic_violations_solve] + solver = cp_model.CpSolver() + status = solver.solve(model) + # [END model_with_quadratic_violations_solve] + + # Print solution. + # [START print_solution_with_quadratic_violations] + print(f" Status = {solver.status_name(status)}") + if status == cp_model.OPTIMAL: + print(f" Objective value = {solver.objective_value}") + print(f" Value of x = {solver.value(x)}") + print(f" Value of y = {solver.value(y)}") + print(f" Value of z = {solver.value(z)}") + print(f" Value of a = {solver.value(a)}") + print(f" Value of b = {solver.value(b)}") + # [END print_solution_with_quadratic_violations] + + +def main() -> None: + print("Infeasible model:") + infeasible_model() + print("Model with enforcement literals:") + model_with_enforcement_literals() + print("Model with linear violations:") + model_with_linear_violations() + print("Model with quadratic violations:") + model_with_quadratic_violations() + + +if __name__ == "__main__": + main() +# [END program] diff --git a/ortools/sat/solution_crush.cc b/ortools/sat/solution_crush.cc index 7ca57d61b14..c35e9848b8a 100644 --- a/ortools/sat/solution_crush.cc +++ b/ortools/sat/solution_crush.cc @@ -245,7 +245,7 @@ void SolutionCrush::SetOrUpdateVarToDomain( const int64_t old_value = GetVarValue(var); if (domain.Contains(old_value)) return; int64_t new_value = old_value; - if (push_down_when_repairing_hints) { + if (push_down_when_repairing_hints && old_value >= domain.Min()) { new_value = domain.ValueAtOrBefore(old_value); } else { new_value = domain.ValueAtOrAfter(old_value); From cfdbc48ad28f3ae39a05fd19117483abf8336f15 Mon Sep 17 00:00:00 2001 From: Laurent Perron Date: Mon, 1 Dec 2025 16:22:03 +0100 Subject: [PATCH 026/111] fix --- ortools/linear_solver/python/model_builder_helper.cc | 8 ++++---- ortools/sat/python/cp_model_helper.cc | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/ortools/linear_solver/python/model_builder_helper.cc b/ortools/linear_solver/python/model_builder_helper.cc index 5d361c3246c..ed12480e8db 100644 --- a/ortools/linear_solver/python/model_builder_helper.cc +++ b/ortools/linear_solver/python/model_builder_helper.cc @@ -332,14 +332,14 @@ bool was_optimized_in_function_call(PyObject* op) { bool IsOnwedExclusivelyThroughPyBind11(PyObject* op) { #if !defined(Py_GIL_DISABLED) - return Py_REFCNT(ob) == 3; + return Py_REFCNT(op) == 3; #else // NOTE: the entire ob_ref_shared field must be zero, including flags, to // ensure that other threads cannot concurrently create new references to // this object. - return (_Py_IsOwnedByCurrentThread(ob) && - _Py_atomic_load_uint32_relaxed(&ob->ob_ref_local) == 3 && - _Py_atomic_load_ssize_relaxed(&ob->ob_ref_shared) == 0); + return (_Py_IsOwnedByCurrentThread(op) && + _Py_atomic_load_uint32_relaxed(&op->ob_ref_local) == 3 && + _Py_atomic_load_ssize_relaxed(&op->ob_ref_shared) == 0); #endif } diff --git a/ortools/sat/python/cp_model_helper.cc b/ortools/sat/python/cp_model_helper.cc index 0661ba717ef..822d6f7392d 100644 --- a/ortools/sat/python/cp_model_helper.cc +++ b/ortools/sat/python/cp_model_helper.cc @@ -1137,14 +1137,14 @@ bool was_optimized_in_function_call(PyObject* op) { bool IsOnwedExclusivelyThroughPyBind11(PyObject* op) { #if !defined(Py_GIL_DISABLED) - return Py_REFCNT(ob) == 3; + return Py_REFCNT(op) == 3; #else // NOTE: the entire ob_ref_shared field must be zero, including flags, to // ensure that other threads cannot concurrently create new references to // this object. - return (_Py_IsOwnedByCurrentThread(ob) && - _Py_atomic_load_uint32_relaxed(&ob->ob_ref_local) == 3 && - _Py_atomic_load_ssize_relaxed(&ob->ob_ref_shared) == 0); + return (_Py_IsOwnedByCurrentThread(op) && + _Py_atomic_load_uint32_relaxed(&op->ob_ref_local) == 3 && + _Py_atomic_load_ssize_relaxed(&op->ob_ref_shared) == 0); #endif } From 91e63a7d412024c86b8243baaac5b91b30bf03db Mon Sep 17 00:00:00 2001 From: Corentin Le Molgat Date: Mon, 1 Dec 2025 13:47:58 +0100 Subject: [PATCH 027/111] scip: Fix 10.0.0 support --- ortools/linear_solver/proto_solver/scip_proto_solver.cc | 6 ++++++ ortools/math_opt/solvers/gscip/gscip.cc | 5 +++++ 2 files changed, 11 insertions(+) diff --git a/ortools/linear_solver/proto_solver/scip_proto_solver.cc b/ortools/linear_solver/proto_solver/scip_proto_solver.cc index f40a10d4749..d96d74755da 100644 --- a/ortools/linear_solver/proto_solver/scip_proto_solver.cc +++ b/ortools/linear_solver/proto_solver/scip_proto_solver.cc @@ -50,7 +50,13 @@ #include "scip/cons_indicator.h" #include "scip/cons_linear.h" #include "scip/cons_or.h" +#if SCIP_VERSION_MAJOR >= 10 +#include "scip/cons_nonlinear.h" +#define SCIPcreateConsBasicQuadratic SCIPcreateConsBasicQuadraticNonlinear +#define SCIPcreateConsQuadratic SCIPcreateConsQuadraticNonlinear +#else #include "scip/cons_quadratic.h" +#endif // SCIP_VERSION_MAJOR >= 10 #include "scip/cons_sos1.h" #include "scip/cons_sos2.h" #include "scip/def.h" diff --git a/ortools/math_opt/solvers/gscip/gscip.cc b/ortools/math_opt/solvers/gscip/gscip.cc index 872043d23aa..7bcac209d5f 100644 --- a/ortools/math_opt/solvers/gscip/gscip.cc +++ b/ortools/math_opt/solvers/gscip/gscip.cc @@ -47,7 +47,12 @@ #include "scip/cons_indicator.h" #include "scip/cons_linear.h" #include "scip/cons_or.h" +#if SCIP_VERSION_MAJOR >= 10 +#include "scip/cons_nonlinear.h" +#define SCIPcreateConsQuadratic SCIPcreateConsQuadraticNonlinear +#else #include "scip/cons_quadratic.h" +#endif // SCIP_VERSION_MAJOR >= 10 #include "scip/cons_sos1.h" #include "scip/cons_sos2.h" #include "scip/def.h" From 6555f4d2e4fe8ae1f609a77cd440c6940eb29cf1 Mon Sep 17 00:00:00 2001 From: Corentin Le Molgat Date: Mon, 1 Dec 2025 14:56:45 +0100 Subject: [PATCH 028/111] cmake: migrate to scip 10.0.0 and soplex 8.0.0 --- Dependencies.txt | 3 +- cmake/dependencies/CMakeLists.txt | 8 +- .../{scip-v924.patch => scip-v10.0.0.patch} | 97 ++++--------------- ...oplex-v7.1.6.patch => soplex-v8.0.0.patch} | 96 ++++++++++-------- 4 files changed, 81 insertions(+), 123 deletions(-) rename patches/{scip-v924.patch => scip-v10.0.0.patch} (56%) rename patches/{soplex-v7.1.6.patch => soplex-v8.0.0.patch} (72%) diff --git a/Dependencies.txt b/Dependencies.txt index 30033bd6ff9..e8df0494300 100644 --- a/Dependencies.txt +++ b/Dependencies.txt @@ -10,7 +10,8 @@ Cgl=0.60.9 Cbc=2.10.12 GLPK=5.0 HiGHS=v1.12.0 -Scip=v924 +Scip=v10.0.0 +Soplex=v8.0.0 # Python pybind11=v2.13.6 pybind11_abseil=v202402.0 diff --git a/cmake/dependencies/CMakeLists.txt b/cmake/dependencies/CMakeLists.txt index 2e6c0639560..ca539e46bde 100644 --- a/cmake/dependencies/CMakeLists.txt +++ b/cmake/dependencies/CMakeLists.txt @@ -353,11 +353,11 @@ if(BUILD_soplex) FetchContent_Declare( soplex GIT_REPOSITORY "https://github.com/scipopt/soplex.git" - GIT_TAG "release-716" + GIT_TAG "v8.0.0" GIT_SHALLOW TRUE UPDATE_COMMAND git reset --hard PATCH_COMMAND git apply --ignore-whitespace - "${CMAKE_CURRENT_LIST_DIR}/../../patches/soplex-v7.1.6.patch" + "${CMAKE_CURRENT_LIST_DIR}/../../patches/soplex-v8.0.0.patch" ) set(SHARED ON CACHE BOOL "Soplex param" FORCE) set(ZLIB ON CACHE BOOL "Soplex param" FORCE) @@ -383,11 +383,11 @@ if(BUILD_SCIP) FetchContent_Declare( scip GIT_REPOSITORY "https://github.com/scipopt/scip.git" - GIT_TAG "v924" + GIT_TAG "v10.0.0" GIT_SHALLOW TRUE UPDATE_COMMAND git reset --hard PATCH_COMMAND git apply --ignore-whitespace - "${CMAKE_CURRENT_LIST_DIR}/../../patches/scip-v924.patch" + "${CMAKE_CURRENT_LIST_DIR}/../../patches/scip-v10.0.0.patch" ) set(SHARED ON CACHE BOOL "Scip param" FORCE) set(ZLIB ON CACHE BOOL "Scip param" FORCE) diff --git a/patches/scip-v924.patch b/patches/scip-v10.0.0.patch similarity index 56% rename from patches/scip-v924.patch rename to patches/scip-v10.0.0.patch index eb05ff90d1a..dbb15310156 100644 --- a/patches/scip-v924.patch +++ b/patches/scip-v10.0.0.patch @@ -1,8 +1,8 @@ diff --git a/CMakeLists.txt b/CMakeLists.txt -index 38917b8e..a8dff6e9 100644 +index b3c57799..eeeca4c8 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt -@@ -38,9 +38,11 @@ set(CPACK_PACKAGE_VENDOR "Zuse Institute Berlin") +@@ -52,9 +52,11 @@ set(CPACK_PACKAGE_VENDOR "Zuse Institute Berlin") set(CPACK_PACKAGE_CONTACT "http://scipopt.org") include(CPack) @@ -17,34 +17,12 @@ index 38917b8e..a8dff6e9 100644 if(SCIPOptSuite_BINARY_DIR) set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${SCIPOptSuite_BINARY_DIR}/bin) -@@ -239,7 +241,7 @@ if(DEBUGSOL) - endif() - - #set the correct rpath for OS X --set(CMAKE_MACOSX_RPATH ON) -+set(CMAKE_MACOSX_RPATH TRUE) - - #set defines for Windows - if(WIN32) -@@ -412,22 +414,11 @@ endif() - #search the selected LP solver library +@@ -517,10 +519,10 @@ endif() message(STATUS "Finding Solver \"${LPS}\"") if(LPS STREQUAL "spx") -- message(STATUS "Finding Soplex") -- find_package(SOPLEX CONFIG HINTS ${SOPLEX_DIR}) -- if(NOT SOPLEX_FOUND) -- # Utilities to automatically download missing dependencies -- include(cmake/Dependencies.cmake) -- find_or_download_package( -- NAME SOPLEX -- VERSION 7.0.1 -- URL https://github.com/scipopt/soplex/archive/refs/tags/release-701.tar.gz -- URL_HASH SHA256=80cce994dcbe45fd52b60e31a3aeb5d2c60a7ddbaae495e0ce6bf58481675696 -- COMPONENTS soplex) -- find_package(SOPLEX REQUIRED CONFIG HINTS _deps/local) -- endif() + message(STATUS "Finding Soplex") +- find_package(SOPLEX REQUIRED CONFIG HINTS ${SOPLEX_DIR}) - if(NOT SOPLEX_FOUND) -+ message(STATUS "Finding Soplex...") + if(NOT TARGET libsoplex-pic OR NOT TARGET libsoplex) message(FATAL_ERROR "Requested LP solver SoPlex not found.") endif() @@ -52,63 +30,26 @@ index 38917b8e..a8dff6e9 100644 if (DEFINED SOPLEX_WITH_PAPILO) message(STATUS "SOPLEX links PAPILO") if((NOT SCIP_WITH_PAPILO)) # TODO not sure how to handle AUTOBUILD -@@ -502,9 +493,9 @@ if(SOPLEX_FOUND) +@@ -593,11 +595,11 @@ if(SOPLEX_FOUND) message(STATUS "Finding SOPLEX - found") # SoPlex headers can be directly included include_directories(${SOPLEX_INCLUDE_DIRS}) - set(LPS_LIBRARIES ${SOPLEX_LIBRARIES}) +- set(LPS_PIC_LIBRARIES ${SOPLEX_PIC_LIBRARIES}) + set(LPS_LIBRARIES libsoplex) - if(SHARED) -- set(LPS_PIC_LIBRARIES ${SOPLEX_PIC_LIBRARIES}) -+ set(LPS_PIC_LIBRARIES libsoplex-pic) - else() - set(LPS_PIC_LIBRARIES ${LPS_LIBRARIES}) - endif() -@@ -514,7 +505,7 @@ if(SOPLEX_FOUND) - set(lpi lpi/lpi_spx2.cpp) - endif() ++ set(LPS_PIC_LIBRARIES libsoplex-pic) + set(lpi lpi/lpi_spx.cpp) else() - message(STATUS "Support SOPLEX: OFF") + message(FATAL_ERROR "SOPLEX not found !") endif() if(CLP_FOUND) -diff --git a/scip-config.cmake.in b/scip-config.cmake.in -index 559552f9..682ac40a 100644 ---- a/scip-config.cmake.in -+++ b/scip-config.cmake.in -@@ -1,17 +1,16 @@ - if(NOT TARGET libscip) -- include("${CMAKE_CURRENT_LIST_DIR}/scip-targets.cmake") --endif() -+ include(CMakeFindDependencyMacro) -+ if(@ZIMPL_NEEDED@) -+ find_dependency(ZIMPL REQUIRED NO_MODULE) -+ endif() -+ if(@SOPLEX_NEEDED@) -+ find_dependency(SOPLEX REQUIRED NO_MODULE) -+ endif() - --if(@ZIMPL_NEEDED@) -- set(ZIMPL_DIR "@CONF_ZIMPL_DIR@") -- find_package(ZIMPL QUIET CONFIG) --endif() -- --if(@SOPLEX_NEEDED@) -- set(SOPLEX_DIR "@CONF_SOPLEX_DIR@") -- find_package(SOPLEX QUIET CONFIG) -+ include("${CMAKE_CURRENT_LIST_DIR}/scip-targets.cmake") - endif() - -+# Legacy - set(SCIP_LIBRARIES libscip) - set(SCIP_INCLUDE_DIRS "@CONF_INCLUDE_DIRS@") - set(SCIP_FOUND TRUE) diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt -index be3760c4..b764f0b4 100644 +index c6ce7283..6b6b1fc8 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt -@@ -1115,6 +1115,13 @@ target_link_libraries(scip +@@ -1213,6 +1213,13 @@ target_link_libraries(scip add_dependencies(libscip scip_update_githash) add_dependencies(scip scip_update_githash) @@ -120,10 +61,10 @@ index be3760c4..b764f0b4 100644 + INSTALL_RPATH "$ORIGIN") +endif() set_target_properties(libscip PROPERTIES - VERSION ${SCIP_VERSION_MAJOR}.${SCIP_VERSION_MINOR}.${SCIP_VERSION_PATCH}.${SCIP_VERSION_SUB} + VERSION ${SCIP_VERSION_MAJOR}.${SCIP_VERSION_MINOR}.${SCIP_VERSION_PATCH} SOVERSION ${SCIP_VERSION_MAJOR}.${SCIP_VERSION_MINOR} -@@ -1153,17 +1160,8 @@ install(TARGETS scip libscip EXPORT scip-targets - INCLUDES DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) +@@ -1265,17 +1272,8 @@ install(FILES ${PROJECT_SOURCE_DIR}/LICENSE DESTINATION ${CMAKE_INSTALL_DATADIR} + install(FILES ${PROJECT_SOURCE_DIR}/src/tclique/LICENSE DESTINATION ${CMAKE_INSTALL_DATADIR}/licenses/scip/tclique) # Add all targets to the build-tree export set -export(TARGETS scip libscip @@ -140,11 +81,11 @@ index be3760c4..b764f0b4 100644 +#export(TARGETS scip libscip +# FILE "${PROJECT_BINARY_DIR}/scip-targets.cmake") - # configure the config file for the build tree - set(CONF_INCLUDE_DIRS "${PROJECT_SOURCE_DIR}/src" "${PROJECT_BINARY_DIR}") -@@ -1179,18 +1177,16 @@ ${PROJECT_BINARY_DIR}/scip-config-version.cmake - - #configure the config file for the install + # Configure scip-config.cmake for the build-tree: + # We add the CMake module path from the sources. +@@ -1295,18 +1293,16 @@ ${PROJECT_BINARY_DIR}/scip-config-version.cmake + # We add the current directory of the installed scip-config.cmake as a CMake module path. + set(EXTRA_CMAKE_MODULE_PATH "\${CMAKE_CURRENT_LIST_DIR}") set(CONF_INCLUDE_DIRS "\${CMAKE_CURRENT_LIST_DIR}/../../../include") -if(SOPLEX_NEEDED) - set(CONF_SOPLEX_DIR "\${CMAKE_CURRENT_LIST_DIR}/../soplex") diff --git a/patches/soplex-v7.1.6.patch b/patches/soplex-v8.0.0.patch similarity index 72% rename from patches/soplex-v7.1.6.patch rename to patches/soplex-v8.0.0.patch index 7e4c3839f72..f7f4410a30c 100644 --- a/patches/soplex-v7.1.6.patch +++ b/patches/soplex-v8.0.0.patch @@ -1,8 +1,8 @@ diff --git a/CMakeLists.txt b/CMakeLists.txt -index 96aefab..760b0f8 100644 +index 9511442..58a8a58 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt -@@ -27,6 +27,10 @@ set(CPACK_PACKAGE_VERSION_PATCH "${SOPLEX_VERSION_PATCH}") +@@ -31,6 +31,10 @@ set(CPACK_PACKAGE_VERSION_PATCH "${SOPLEX_VERSION_PATCH}") set(CPACK_PACKAGE_VENDOR "Zuse Institute Berlin") include(CPack) @@ -13,7 +13,7 @@ index 96aefab..760b0f8 100644 option(ZLIB "Use ZLIB" ON) option(GMP "Use GMP" ON) option(EMSCRIPTEN_HTML "Emscripten HTML output" OFF) -@@ -43,11 +47,17 @@ option(SANITIZE_THREAD "should the thread sanitizer be enabled in debug mode if +@@ -44,11 +48,17 @@ option(SANITIZE "should sanitizers be enabled if available" OFF) option(COVERAGE "enable coverage support" OFF) option(PAPILO "should papilo library be linked" ON) @@ -34,12 +34,7 @@ index 96aefab..760b0f8 100644 # for colorized output if(NOT WIN32) -@@ -65,10 +75,12 @@ if(NOT CMAKE_BUILD_TYPE) - endif() - - # set the correct rpath for OS X --set(CMAKE_MACOSX_RPATH ON) -+set(CMAKE_MACOSX_RPATH TRUE) +@@ -70,6 +80,8 @@ set(CMAKE_MACOSX_RPATH ON) # use C++14 standard set(CMAKE_CXX_STANDARD 14) @@ -48,7 +43,7 @@ index 96aefab..760b0f8 100644 # set function visibility default to hidden set(CMAKE_CXX_VISIBILITY_PRESET hidden) -@@ -123,12 +135,11 @@ if(COVERAGE) +@@ -124,12 +136,11 @@ if(COVERAGE) endif() if(ZLIB) @@ -66,7 +61,7 @@ index 96aefab..760b0f8 100644 endif() if(GMP) -@@ -170,39 +181,24 @@ else() +@@ -171,42 +182,24 @@ else() set(SOPLEX_WITH_PAPILO off) endif() @@ -76,7 +71,6 @@ index 96aefab..760b0f8 100644 - find_package(Boost ${BOOST_MINIMUM_VERSION}) # PaPILO requires at least Boost 1.65 (on mac 1.72) - if(Boost_FOUND) - set(SOPLEX_WITH_BOOST on) -- include_directories(SYSTEM ${Boost_INCLUDE_DIRS}) - if(NOT Boost_VERSION_MACRO) - set(Boost_VERSION_MACRO ${Boost_VERSION}) - endif() @@ -89,11 +83,15 @@ index 96aefab..760b0f8 100644 - Found Boost version is ${Boost_VERSION_STRING}.") - endif() - endif() +- if(Boost_DIR) +- message(STATUS "Found Boost: ${Boost_DIR}") +- endif() +- set(libs ${libs} Boost::boost) - if(MPFR) # MPFR is used within boost multiprecision, so using it without Boost does not make sense - find_package(MPFR) - endif() - if(MPFR_FOUND) -- message(STATUS "SoPlex with Boost MPFR libraries") +- message(STATUS "SoPlex with Boost MPFR ${MPFR_VERSION} libraries") - set(SOPLEX_WITH_MPFR on) - include_directories(${MPFR_INCLUDE_DIRS}) - set(libs ${libs} ${MPFR_LIBRARIES}) @@ -123,7 +121,7 @@ index 96aefab..760b0f8 100644 endif() # disable fused floating point contraction to enhance reproducibility across compilers and architectures -@@ -244,7 +240,7 @@ configure_file(${CMAKE_CURRENT_SOURCE_DIR}/src/soplex/config.h.in ${PROJECT_BINA +@@ -251,7 +244,7 @@ set(EXTRA_CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake/Modules) configure_file(${PROJECT_SOURCE_DIR}/soplex-config.cmake.in "${PROJECT_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/soplex-config.cmake" @ONLY) add_subdirectory(src) @@ -136,18 +134,18 @@ index 96aefab..760b0f8 100644 + add_subdirectory(check) +endif() diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt -index 16ffb17..b5a0b56 100644 +index d57c5fe..11f914a 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt -@@ -193,25 +193,27 @@ target_link_libraries(libsoplexshared libsoplex ${libs}) +@@ -193,28 +193,32 @@ target_link_libraries(libsoplexshared libsoplex ${libs}) set_target_properties(libsoplexshared PROPERTIES CXX_VISIBILITY_PRESET default) # create soplex binary using library without pic -add_executable(soplex soplexmain.cpp) --target_link_libraries(soplex LINK_PUBLIC libsoplex ${Boost_LIBRARIES}) +-target_link_libraries(soplex LINK_PUBLIC libsoplex) +if(SOPLEX_SOPLEX) -+ add_executable(soplex EXCLUDE_FROM_ALL soplexmain.cpp) -+ target_link_libraries(soplex PRIVATE libsoplex ${Boost_LIBRARIES}) ++ add_executable(soplex soplexmain.cpp) ++ target_link_libraries(soplex LINK_PUBLIC libsoplex) -if(EMSCRIPTEN AND EMSCRIPTEN_HTML) + if(EMSCRIPTEN AND EMSCRIPTEN_HTML) @@ -159,40 +157,58 @@ index 16ffb17..b5a0b56 100644 + # set the install rpath to the installed destination + set_target_properties(soplex PROPERTIES INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}") --if(CMAKE_BUILD_TYPE EQUAL "Debug") -- find_package(Sanitizers) -- add_sanitizers(soplex) -+ if(CMAKE_BUILD_TYPE EQUAL "Debug") -+ find_package(Sanitizers) -+ add_sanitizers(soplex) -+ endif() - endif() - - add_executable(example EXCLUDE_FROM_ALL example.cpp) - target_link_libraries(example libsoplex) +-if(SANITIZE) ++ if(SANITIZE) + find_package(Sanitizers) + add_sanitizers(libsoplex) + add_sanitizers(libsoplex-pic) + add_sanitizers(libsoplexshared) + add_sanitizers(soplex) + get_target_property(CONF_SANITIZE_FLAGS libsoplex SANITIZE_FLAGS) +-endif(SANITIZE) +- +-add_executable(example EXCLUDE_FROM_ALL example.cpp) +-target_link_libraries(example libsoplex) ++ endif(SANITIZE) ++endif() -# set the install rpath to the installed destination -set_target_properties(soplex PROPERTIES INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}") -- ++if(SOPLEX_EXAMPLE) ++ add_executable(example EXCLUDE_FROM_ALL example.cpp) ++ target_link_libraries(example libsoplex) ++endif() + # install the header files of soplex install(FILES ${headers} ${PROJECT_BINARY_DIR}/soplex/config.h DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/soplex) - install(FILES soplex.h soplex.hpp soplex_interface.h DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) -@@ -237,15 +239,23 @@ install(FILES - DESTINATION include/soplex/external/zstr) +@@ -243,14 +247,23 @@ install(FILES + install(FILES ${PROJECT_SOURCE_DIR}/src/soplex/external/zstr/License.txt DESTINATION ${CMAKE_INSTALL_DATADIR}/licenses/soplex/zstr) # install the binary and the library to appropriate lcoations and add them to an export group -install(TARGETS soplex libsoplex libsoplex-pic libsoplexshared EXPORT soplex-targets +if(SOPLEX_SOPLEX) -+ install(TARGETS soplex -+ EXPORT soplex-targets ++ install(TARGETS soplex EXPORT soplex-targets + RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}) ++ ++ if(MSVC) ++ install(FILES $ DESTINATION ${CMAKE_INSTALL_BINDIR} OPTIONAL) ++ endif() +endif() + +install(TARGETS libsoplex libsoplex-pic libsoplexshared EXPORT soplex-targets - LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} - ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} - RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} - INCLUDES DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) + LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} + ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} + RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} + INCLUDES DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) + + if(MSVC) +- install(FILES $ $ DESTINATION ${CMAKE_INSTALL_BINDIR} OPTIONAL) ++ install(FILES $ DESTINATION ${CMAKE_INSTALL_BINDIR} OPTIONAL) + endif() + + # install license files of soplex and fmt +@@ -258,8 +271,10 @@ install(FILES ${PROJECT_SOURCE_DIR}/LICENSE DESTINATION ${CMAKE_INSTALL_DATADIR} + install(FILES ${PROJECT_SOURCE_DIR}/src/soplex/external/fmt/LICENSE.rst DESTINATION ${CMAKE_INSTALL_DATADIR}/licenses/soplex/fmt) # Add library targets to the build-tree export set -export(TARGETS libsoplex libsoplex-pic libsoplexshared From b880e0fb6447ce68bdfad7c83792d20064b1f47e Mon Sep 17 00:00:00 2001 From: Guillaume Chatelet Date: Mon, 1 Dec 2025 10:22:54 +0100 Subject: [PATCH 029/111] Use new bintest framework (#4928) --- .gitignore | 3 +- bazel/BUILD.bazel | 7 - bazel/run_binary_test.bzl | 106 --- cmake/cpp.cmake | 120 +++ cmake/python.cmake | 41 + examples/cpp/BUILD.bazel | 301 ++++--- examples/cpp/CMakeBazel.txt | 425 ++++++++++ examples/cpp/CMakeLists.txt | 49 +- ...king_2d_sat_class01_instance2_test.bintest | 1 + examples/cpp/cgc.cc | 573 +++++++++++++ examples/cpp/cgc.h | 91 +++ examples/cpp/cgc_data.h | 70 ++ examples/cpp/cgc_main.cc | 81 ++ examples/cpp/cgc_test_solution.bintest | 24 + .../constraint_programming_cp_test.bintest | 1 + .../cpp/costas_array_sat_model1_test.bintest | 1 + .../cpp/costas_array_sat_model2_test.bintest | 1 + .../cpp/costas_array_sat_model3_test.bintest | 1 + examples/cpp/course_scheduling.proto | 188 ----- examples/cpp/cryptarithm_sat_test.bintest | 1 + examples/cpp/dimacs_assignment.cc | 15 +- .../dimacs_assignment_max_cost_test.bintest | 2 + .../dimacs_assignment_min_cost_test.bintest | 2 + examples/cpp/dobble_ls_test.bintest | 1 + examples/cpp/fap_parser.cc | 19 +- examples/cpp/fap_parser.h | 3 +- examples/cpp/flow_api_test.bintest | 1 + examples/cpp/golomb_sat_test.bintest | 1 + examples/cpp/integer_programming_test.bintest | 1 + examples/cpp/jobshop_sat_ft06.bintest | 1 + ...sack_2d_sat_class01_instance2_test.bintest | 1 + .../cpp/linear_assignment_api_test.bintest | 1 + examples/cpp/linear_programming_test.bintest | 1 + ...inear_solver_protocol_buffers_test.bintest | 1 + examples/cpp/magic_sequence_sat_test.bintest | 1 + examples/cpp/magic_square_sat_test.bintest | 1 + examples/cpp/max_flow_test.bintest | 1 + examples/cpp/min_cost_flow_test.bintest | 1 + examples/cpp/mps_driver_test.bintest | 1 + examples/cpp/multi_knapsack_sat_test.bintest | 1 + examples/cpp/network_routing_sat_test.bintest | 1 + examples/cpp/nqueens_test.bintest | 1 + .../pdptw_non_homogenous_fleet_test.bintest | 2 + examples/cpp/pdptw_test.bintest | 2 + examples/cpp/pdptw_with_alternatives.cc | 377 +++++++++ ...rnatives_non_homogenous_fleet_test.bintest | 2 + .../cpp/pdptw_with_alternatives_test.bintest | 2 + .../cpp/shift_minimization_sat_test.bintest | 1 + examples/cpp/slitherlink_sat_test.bintest | 1 + .../cpp/sports_scheduling_sat_test.bintest | 1 + ...fields_with_column_generation_test.bintest | 1 + examples/cpp/testdata/cgc/1.in | 6 + examples/cpp/testdata/cgc/2.in | 6 + examples/cpp/testdata/cgc/3.in | 3 + examples/cpp/testdata/cgc/cgcut1.in | 9 + examples/cpp/testdata/cgc/cgcut2.in | 12 + examples/cpp/testdata/cgc/cgcut3.in | 22 + examples/cpp/testdata/dimacs_example.txt | 25 + examples/cpp/testdata/lc102.txt | 108 +++ examples/cpp/testdata/shift_minimization.dat | 69 ++ examples/cpp/testdata/wt40.txt | 751 +++++++++++++++++ .../cpp/variable_intervals_sat_test.bintest | 1 + .../cpp/weighted_tardiness_sat_test.bintest | 1 + examples/python/BUILD.bazel | 217 ++--- examples/python/CMakeBazel.txt | 434 ++++++++++ examples/python/CMakeLists.txt | 12 +- examples/python/appointments_py_test.bintest | 1 + ...nment_with_constraints_sat_py_test.bintest | 1 + .../python/balance_group_sat_py_test.bintest | 1 + .../bus_driver_scheduling_sat_py_test.bintest | 1 + ...equencing_optimization_sat_py_test.bintest | 1 + .../chemical_balance_sat_py_test.bintest | 1 + .../python/clustering_sat_py_test.bintest | 1 + .../cover_rectangle_sat_py_test.bintest | 1 + .../python/cryptarithm_sat_py_test.bintest | 1 + examples/python/cvrptw_plot.py | 753 ------------------ .../flexible_job_shop_sat_py_test.bintest | 1 + examples/python/gate_scheduling_sat.py | 3 +- .../gate_scheduling_sat_py_test.bintest | 1 + examples/python/golomb8_py_test.bintest | 1 + examples/python/golomb_sat_py_test.bintest | 1 + examples/python/hidato_sat_py_test.bintest | 1 + examples/python/horse_jumping_show.py | 297 +++++++ .../python/horse_jumping_show_py_test.bintest | 2 + .../integer_programming_py_test.bintest | 1 + .../jobshop_ft06_distance_sat_py_test.bintest | 1 + .../python/jobshop_ft06_sat_py_test.bintest | 1 + ...bshop_with_maintenance_sat_py_test.bintest | 1 + .../python/knapsack_2d_sat_py_test.bintest | 1 + ...e_balancing_sat_salbp_20_1_py_test.bintest | 2 + .../linear_assignment_api_py_test.bintest | 1 + .../python/linear_programming_py_test.bintest | 1 + .../magic_sequence_distribute_py_test.bintest | 1 + ...quence_distribute_with_arg_py_test.bintest | 1 + .../maximize_combinations_sat_py_test.bintest | 1 + .../python/maze_escape_sat_py_test.bintest | 1 + ...yout_and_infeasibility_sat_py_test.bintest | 1 + .../python/music_playlist_sat_py_test.bintest | 1 + ...wait_baking_scheduling_sat_py_test.bintest | 1 + examples/python/nqueens_sat_py_test.bintest | 1 + .../python/pell_equation_sat_py_test.bintest | 1 + .../python/pentominoes_sat_py_test.bintest | 1 + .../prize_collecting_tsp_sat_py_test.bintest | 1 + .../prize_collecting_vrp_sat_py_test.bintest | 1 + .../python/pyflow_example_py_test.bintest | 1 + examples/python/qubo_sat_py_test.bintest | 1 + .../python/rcpsp_sat_c1510_1_py_test.bintest | 2 + .../python/rcpsp_sat_j301_1_py_test.bintest | 2 + .../python/rcpsp_sat_rip1_py_test.bintest | 2 + ...cpsp_sat_testset_mm30_psp3_py_test.bintest | 1 + .../rcpsp_sat_ubo_10_psp2_py_test.bintest | 2 + .../shift_scheduling_sat_py_test.bintest | 1 + ...etup_release_due_dates_sat_py_test.bintest | 1 + examples/python/spillover_sat.py | 360 +++++++++ .../python/spillover_sat_test_py_test.bintest | 2 + .../python/spread_robots_sat_py_test.bintest | 1 + .../steel_mill_slab_sat_py_test.bintest | 1 + examples/python/sudoku_sat_py_test.bintest | 1 + .../task_allocation_sat_py_test.bintest | 1 + ...and_workers_assignment_sat_py_test.bintest | 1 + .../test_scheduling_sat_py_test.bintest | 1 + examples/python/tsp_norandom_py_test.bintest | 1 + examples/python/tsp_py_test.bintest | 1 + examples/python/tsp_sat_py_test.bintest | 1 + .../vendor_scheduling_sat_py_test.bintest | 1 + .../wedding_optimal_chart_sat_py_test.bintest | 1 + ...ighted_latency_problem_sat_py_test.bintest | 1 + examples/python/zebra_sat_py_test.bintest | 1 + ortools/algorithms/samples/BUILD.bazel | 7 +- .../algorithms/samples/KnapsackTest.bintest | 1 + ortools/constraint_solver/samples/BUILD.bazel | 402 ++++++++++ ortools/glop/samples/BUILD.bazel | 7 +- .../simple_glop_program_cc_test.bintest | 1 + .../AssignmentLinearSumAssignmentTest.bintest | 1 + .../samples/AssignmentMinFlowTest.bintest | 1 + ortools/graph/samples/BUILD.bazel | 170 ++-- .../graph/samples/BalanceMinFlowTest.bintest | 1 + .../samples/SimpleMaxFlowProgramTest.bintest | 1 + .../SimpleMinCostFlowProgramTest.bintest | 1 + .../graph/samples/bfs_directed_test.bintest | 2 + .../graph/samples/bfs_one_to_all_test.bintest | 2 + .../graph/samples/bfs_undirected_test.bintest | 2 + ...ined_shortest_path_sequential_test.bintest | 5 + ...ple_shortest_paths_one_to_all_test.bintest | 3 + ...ple_shortest_paths_sequential_test.bintest | 11 + .../dag_shortest_path_one_to_all_test.bintest | 2 + .../dag_shortest_path_sequential_test.bintest | 5 + ...ple_constrained_shortest_path_test.bintest | 2 + ...imple_multiple_shortest_paths_test.bintest | 3 + .../dag_simple_shortest_path_test.bintest | 2 + ...stra_all_pairs_shortest_paths_test.bintest | 1 + .../samples/dijkstra_directed_test.bintest | 2 + .../samples/dijkstra_one_to_all_test.bintest | 4 + .../samples/dijkstra_sequential_test.bintest | 5 + .../samples/dijkstra_undirected_test.bintest | 2 + .../graph/samples/root_a_tree_test.bintest | 2 + .../samples/rooted_tree_paths_test.bintest | 2 + .../samples/AssignmentMbTest.bintest | 1 + ortools/linear_solver/samples/BUILD.bazel | 27 +- .../samples/BinPackingMbTest.bintest | 1 + .../samples/CloneModelMbTest.bintest | 1 + .../samples/SimpleLpProgramMbTest.bintest | 1 + .../samples/SimpleMipProgramMbTest.bintest | 1 + ortools/math_opt/core/c_api/BUILD.bazel | 11 +- .../core/c_api/cpp_example_test.bintest | 3 + ortools/routing/samples/BUILD.bazel | 133 ++++ .../samples/cvrp_disjoint_tw_test.bintest | 1 + ortools/routing/samples/cvrptw_test.bintest | 1 + .../cvrptw_with_precedences_test.bintest | 1 + .../cvrptw_with_refueling_test.bintest | 1 + .../cvrptw_with_resources_test.bintest | 1 + ...with_stop_times_and_resources_test.bintest | 1 + .../samples/AssignmentGroupsSatTest.bintest | 1 + ortools/sat/samples/AssignmentSatTest.bintest | 1 + .../AssignmentTaskSizesSatTest.bintest | 1 + .../samples/AssignmentTeamsSatTest.bintest | 1 + .../samples/AssumptionsSampleSatTest.bintest | 1 + ortools/sat/samples/BUILD.bazel | 162 ++-- .../samples/BinPackingProblemSatTest.bintest | 1 + .../sat/samples/BoolOrSampleSatTest.bintest | 1 + .../samples/ChannelingSampleSatTest.bintest | 1 + .../samples/CloneModelSampleSatTest.bintest | 1 + ortools/sat/samples/CpIsFunSatTest.bintest | 1 + ortools/sat/samples/CpSatExampleTest.bintest | 1 + ...arlinessTardinessCostSampleSatTest.bintest | 1 + .../sat/samples/IntervalSampleSatTest.bintest | 1 + .../sat/samples/LiteralSampleSatTest.bintest | 1 + .../sat/samples/MinimalJobshopSatTest.bintest | 1 + .../samples/MultipleKnapsackSatTest.bintest | 1 + ortools/sat/samples/NQueensSatTest.bintest | 1 + .../samples/NoOverlapSampleSatTest.bintest | 1 + ortools/sat/samples/NonLinearSatTest.bintest | 1 + ortools/sat/samples/NursesSatTest.bintest | 1 + .../OptionalIntervalSampleSatTest.bintest | 1 + .../RabbitsAndPheasantsSatTest.bintest | 1 + .../sat/samples/RankingSampleSatTest.bintest | 1 + .../sat/samples/ReifiedSampleSatTest.bintest | 1 + .../samples/ScheduleRequestsSatTest.bintest | 1 + ...SearchForAllSolutionsSampleSatTest.bintest | 1 + .../sat/samples/SimpleSatProgramTest.bintest | 1 + .../SolutionHintingSampleSatTest.bintest | 1 + ...IntermediateSolutionsSampleSatTest.bintest | 1 + .../SolveWithTimeLimitSampleSatTest.bintest | 1 + .../samples/StepFunctionSampleSatTest.bintest | 1 + .../StopAfterNSolutionsSampleSatTest.bintest | 1 + ortools/set_cover/samples/BUILD.bazel | 12 +- .../samples/set_cover_cc_test.bintest | 1 + .../samples/set_cover_py_test.bintest | 1 + tools/build/BUILD.bazel | 26 + tools/build/bazel2cmake.py | 273 +++++++ tools/testing/BUILD.bazel | 134 ++++ tools/testing/README.md | 230 ++++++ tools/testing/binary_test.py | 131 +++ tools/testing/bintest.bzl | 85 ++ tools/testing/bintest_matchers.py | 222 ++++++ tools/testing/bintest_matchers_test.py | 136 ++++ tools/testing/bintest_run_utils.py | 92 +++ tools/testing/bintest_run_utils_test.py | 117 +++ .../testing/bintest_script_launcher.py | 14 +- tools/testing/bintest_script_runner.py | 108 +++ tools/testing/bintest_script_runner_test.py | 111 +++ tools/testing/echo.cc | 20 + tools/testing/fail.cc | 16 + tools/testing/print_args.bintest | 36 + tools/testing/print_args.cc | 63 ++ tools/testing/print_args_data.txt | 1 + tools/testing/print_args_test.py | 80 ++ 227 files changed, 7091 insertions(+), 1533 deletions(-) delete mode 100644 bazel/run_binary_test.bzl create mode 100644 examples/cpp/CMakeBazel.txt create mode 100644 examples/cpp/binpacking_2d_sat_class01_instance2_test.bintest create mode 100644 examples/cpp/cgc.cc create mode 100644 examples/cpp/cgc.h create mode 100644 examples/cpp/cgc_data.h create mode 100644 examples/cpp/cgc_main.cc create mode 100644 examples/cpp/cgc_test_solution.bintest create mode 100644 examples/cpp/constraint_programming_cp_test.bintest create mode 100644 examples/cpp/costas_array_sat_model1_test.bintest create mode 100644 examples/cpp/costas_array_sat_model2_test.bintest create mode 100644 examples/cpp/costas_array_sat_model3_test.bintest delete mode 100644 examples/cpp/course_scheduling.proto create mode 100644 examples/cpp/cryptarithm_sat_test.bintest create mode 100644 examples/cpp/dimacs_assignment_max_cost_test.bintest create mode 100644 examples/cpp/dimacs_assignment_min_cost_test.bintest create mode 100644 examples/cpp/dobble_ls_test.bintest create mode 100644 examples/cpp/flow_api_test.bintest create mode 100644 examples/cpp/golomb_sat_test.bintest create mode 100644 examples/cpp/integer_programming_test.bintest create mode 100644 examples/cpp/jobshop_sat_ft06.bintest create mode 100644 examples/cpp/knapsack_2d_sat_class01_instance2_test.bintest create mode 100644 examples/cpp/linear_assignment_api_test.bintest create mode 100644 examples/cpp/linear_programming_test.bintest create mode 100644 examples/cpp/linear_solver_protocol_buffers_test.bintest create mode 100644 examples/cpp/magic_sequence_sat_test.bintest create mode 100644 examples/cpp/magic_square_sat_test.bintest create mode 100644 examples/cpp/max_flow_test.bintest create mode 100644 examples/cpp/min_cost_flow_test.bintest create mode 100644 examples/cpp/mps_driver_test.bintest create mode 100644 examples/cpp/multi_knapsack_sat_test.bintest create mode 100644 examples/cpp/network_routing_sat_test.bintest create mode 100644 examples/cpp/nqueens_test.bintest create mode 100644 examples/cpp/pdptw_non_homogenous_fleet_test.bintest create mode 100644 examples/cpp/pdptw_test.bintest create mode 100644 examples/cpp/pdptw_with_alternatives.cc create mode 100644 examples/cpp/pdptw_with_alternatives_non_homogenous_fleet_test.bintest create mode 100644 examples/cpp/pdptw_with_alternatives_test.bintest create mode 100644 examples/cpp/shift_minimization_sat_test.bintest create mode 100644 examples/cpp/slitherlink_sat_test.bintest create mode 100644 examples/cpp/sports_scheduling_sat_test.bintest create mode 100644 examples/cpp/strawberry_fields_with_column_generation_test.bintest create mode 100644 examples/cpp/testdata/cgc/1.in create mode 100644 examples/cpp/testdata/cgc/2.in create mode 100644 examples/cpp/testdata/cgc/3.in create mode 100644 examples/cpp/testdata/cgc/cgcut1.in create mode 100644 examples/cpp/testdata/cgc/cgcut2.in create mode 100644 examples/cpp/testdata/cgc/cgcut3.in create mode 100644 examples/cpp/testdata/dimacs_example.txt create mode 100644 examples/cpp/testdata/lc102.txt create mode 100644 examples/cpp/testdata/shift_minimization.dat create mode 100644 examples/cpp/testdata/wt40.txt create mode 100644 examples/cpp/variable_intervals_sat_test.bintest create mode 100644 examples/cpp/weighted_tardiness_sat_test.bintest create mode 100644 examples/python/CMakeBazel.txt create mode 100644 examples/python/appointments_py_test.bintest create mode 100644 examples/python/assignment_with_constraints_sat_py_test.bintest create mode 100644 examples/python/balance_group_sat_py_test.bintest create mode 100644 examples/python/bus_driver_scheduling_sat_py_test.bintest create mode 100644 examples/python/car_sequencing_optimization_sat_py_test.bintest create mode 100644 examples/python/chemical_balance_sat_py_test.bintest create mode 100644 examples/python/clustering_sat_py_test.bintest create mode 100644 examples/python/cover_rectangle_sat_py_test.bintest create mode 100644 examples/python/cryptarithm_sat_py_test.bintest delete mode 100644 examples/python/cvrptw_plot.py create mode 100644 examples/python/flexible_job_shop_sat_py_test.bintest create mode 100644 examples/python/gate_scheduling_sat_py_test.bintest create mode 100644 examples/python/golomb8_py_test.bintest create mode 100644 examples/python/golomb_sat_py_test.bintest create mode 100644 examples/python/hidato_sat_py_test.bintest create mode 100644 examples/python/horse_jumping_show.py create mode 100644 examples/python/horse_jumping_show_py_test.bintest create mode 100644 examples/python/integer_programming_py_test.bintest create mode 100644 examples/python/jobshop_ft06_distance_sat_py_test.bintest create mode 100644 examples/python/jobshop_ft06_sat_py_test.bintest create mode 100644 examples/python/jobshop_with_maintenance_sat_py_test.bintest create mode 100644 examples/python/knapsack_2d_sat_py_test.bintest create mode 100644 examples/python/line_balancing_sat_salbp_20_1_py_test.bintest create mode 100644 examples/python/linear_assignment_api_py_test.bintest create mode 100644 examples/python/linear_programming_py_test.bintest create mode 100644 examples/python/magic_sequence_distribute_py_test.bintest create mode 100644 examples/python/magic_sequence_distribute_with_arg_py_test.bintest create mode 100644 examples/python/maximize_combinations_sat_py_test.bintest create mode 100644 examples/python/maze_escape_sat_py_test.bintest create mode 100644 examples/python/memory_layout_and_infeasibility_sat_py_test.bintest create mode 100644 examples/python/music_playlist_sat_py_test.bintest create mode 100644 examples/python/no_wait_baking_scheduling_sat_py_test.bintest create mode 100644 examples/python/nqueens_sat_py_test.bintest create mode 100644 examples/python/pell_equation_sat_py_test.bintest create mode 100644 examples/python/pentominoes_sat_py_test.bintest create mode 100644 examples/python/prize_collecting_tsp_sat_py_test.bintest create mode 100644 examples/python/prize_collecting_vrp_sat_py_test.bintest create mode 100644 examples/python/pyflow_example_py_test.bintest create mode 100644 examples/python/qubo_sat_py_test.bintest create mode 100644 examples/python/rcpsp_sat_c1510_1_py_test.bintest create mode 100644 examples/python/rcpsp_sat_j301_1_py_test.bintest create mode 100644 examples/python/rcpsp_sat_rip1_py_test.bintest create mode 100644 examples/python/rcpsp_sat_testset_mm30_psp3_py_test.bintest create mode 100644 examples/python/rcpsp_sat_ubo_10_psp2_py_test.bintest create mode 100644 examples/python/shift_scheduling_sat_py_test.bintest create mode 100644 examples/python/single_machine_scheduling_with_setup_release_due_dates_sat_py_test.bintest create mode 100644 examples/python/spillover_sat.py create mode 100644 examples/python/spillover_sat_test_py_test.bintest create mode 100644 examples/python/spread_robots_sat_py_test.bintest create mode 100644 examples/python/steel_mill_slab_sat_py_test.bintest create mode 100644 examples/python/sudoku_sat_py_test.bintest create mode 100644 examples/python/task_allocation_sat_py_test.bintest create mode 100644 examples/python/tasks_and_workers_assignment_sat_py_test.bintest create mode 100644 examples/python/test_scheduling_sat_py_test.bintest create mode 100644 examples/python/tsp_norandom_py_test.bintest create mode 100644 examples/python/tsp_py_test.bintest create mode 100644 examples/python/tsp_sat_py_test.bintest create mode 100644 examples/python/vendor_scheduling_sat_py_test.bintest create mode 100644 examples/python/wedding_optimal_chart_sat_py_test.bintest create mode 100644 examples/python/weighted_latency_problem_sat_py_test.bintest create mode 100644 examples/python/zebra_sat_py_test.bintest create mode 100644 ortools/algorithms/samples/KnapsackTest.bintest create mode 100644 ortools/glop/samples/simple_glop_program_cc_test.bintest create mode 100644 ortools/graph/samples/AssignmentLinearSumAssignmentTest.bintest create mode 100644 ortools/graph/samples/AssignmentMinFlowTest.bintest create mode 100644 ortools/graph/samples/BalanceMinFlowTest.bintest create mode 100644 ortools/graph/samples/SimpleMaxFlowProgramTest.bintest create mode 100644 ortools/graph/samples/SimpleMinCostFlowProgramTest.bintest create mode 100644 ortools/graph/samples/bfs_directed_test.bintest create mode 100644 ortools/graph/samples/bfs_one_to_all_test.bintest create mode 100644 ortools/graph/samples/bfs_undirected_test.bintest create mode 100644 ortools/graph/samples/dag_constrained_shortest_path_sequential_test.bintest create mode 100644 ortools/graph/samples/dag_multiple_shortest_paths_one_to_all_test.bintest create mode 100644 ortools/graph/samples/dag_multiple_shortest_paths_sequential_test.bintest create mode 100644 ortools/graph/samples/dag_shortest_path_one_to_all_test.bintest create mode 100644 ortools/graph/samples/dag_shortest_path_sequential_test.bintest create mode 100644 ortools/graph/samples/dag_simple_constrained_shortest_path_test.bintest create mode 100644 ortools/graph/samples/dag_simple_multiple_shortest_paths_test.bintest create mode 100644 ortools/graph/samples/dag_simple_shortest_path_test.bintest create mode 100644 ortools/graph/samples/dijkstra_all_pairs_shortest_paths_test.bintest create mode 100644 ortools/graph/samples/dijkstra_directed_test.bintest create mode 100644 ortools/graph/samples/dijkstra_one_to_all_test.bintest create mode 100644 ortools/graph/samples/dijkstra_sequential_test.bintest create mode 100644 ortools/graph/samples/dijkstra_undirected_test.bintest create mode 100644 ortools/graph/samples/root_a_tree_test.bintest create mode 100644 ortools/graph/samples/rooted_tree_paths_test.bintest create mode 100644 ortools/linear_solver/samples/AssignmentMbTest.bintest create mode 100644 ortools/linear_solver/samples/BinPackingMbTest.bintest create mode 100644 ortools/linear_solver/samples/CloneModelMbTest.bintest create mode 100644 ortools/linear_solver/samples/SimpleLpProgramMbTest.bintest create mode 100644 ortools/linear_solver/samples/SimpleMipProgramMbTest.bintest create mode 100644 ortools/math_opt/core/c_api/cpp_example_test.bintest create mode 100644 ortools/routing/samples/cvrp_disjoint_tw_test.bintest create mode 100644 ortools/routing/samples/cvrptw_test.bintest create mode 100644 ortools/routing/samples/cvrptw_with_precedences_test.bintest create mode 100644 ortools/routing/samples/cvrptw_with_refueling_test.bintest create mode 100644 ortools/routing/samples/cvrptw_with_resources_test.bintest create mode 100644 ortools/routing/samples/cvrptw_with_stop_times_and_resources_test.bintest create mode 100644 ortools/sat/samples/AssignmentGroupsSatTest.bintest create mode 100644 ortools/sat/samples/AssignmentSatTest.bintest create mode 100644 ortools/sat/samples/AssignmentTaskSizesSatTest.bintest create mode 100644 ortools/sat/samples/AssignmentTeamsSatTest.bintest create mode 100644 ortools/sat/samples/AssumptionsSampleSatTest.bintest create mode 100644 ortools/sat/samples/BinPackingProblemSatTest.bintest create mode 100644 ortools/sat/samples/BoolOrSampleSatTest.bintest create mode 100644 ortools/sat/samples/ChannelingSampleSatTest.bintest create mode 100644 ortools/sat/samples/CloneModelSampleSatTest.bintest create mode 100644 ortools/sat/samples/CpIsFunSatTest.bintest create mode 100644 ortools/sat/samples/CpSatExampleTest.bintest create mode 100644 ortools/sat/samples/EarlinessTardinessCostSampleSatTest.bintest create mode 100644 ortools/sat/samples/IntervalSampleSatTest.bintest create mode 100644 ortools/sat/samples/LiteralSampleSatTest.bintest create mode 100644 ortools/sat/samples/MinimalJobshopSatTest.bintest create mode 100644 ortools/sat/samples/MultipleKnapsackSatTest.bintest create mode 100644 ortools/sat/samples/NQueensSatTest.bintest create mode 100644 ortools/sat/samples/NoOverlapSampleSatTest.bintest create mode 100644 ortools/sat/samples/NonLinearSatTest.bintest create mode 100644 ortools/sat/samples/NursesSatTest.bintest create mode 100644 ortools/sat/samples/OptionalIntervalSampleSatTest.bintest create mode 100644 ortools/sat/samples/RabbitsAndPheasantsSatTest.bintest create mode 100644 ortools/sat/samples/RankingSampleSatTest.bintest create mode 100644 ortools/sat/samples/ReifiedSampleSatTest.bintest create mode 100644 ortools/sat/samples/ScheduleRequestsSatTest.bintest create mode 100644 ortools/sat/samples/SearchForAllSolutionsSampleSatTest.bintest create mode 100644 ortools/sat/samples/SimpleSatProgramTest.bintest create mode 100644 ortools/sat/samples/SolutionHintingSampleSatTest.bintest create mode 100644 ortools/sat/samples/SolveAndPrintIntermediateSolutionsSampleSatTest.bintest create mode 100644 ortools/sat/samples/SolveWithTimeLimitSampleSatTest.bintest create mode 100644 ortools/sat/samples/StepFunctionSampleSatTest.bintest create mode 100644 ortools/sat/samples/StopAfterNSolutionsSampleSatTest.bintest create mode 100644 ortools/set_cover/samples/set_cover_cc_test.bintest create mode 100644 ortools/set_cover/samples/set_cover_py_test.bintest create mode 100644 tools/build/BUILD.bazel create mode 100644 tools/build/bazel2cmake.py create mode 100644 tools/testing/BUILD.bazel create mode 100644 tools/testing/README.md create mode 100644 tools/testing/binary_test.py create mode 100644 tools/testing/bintest.bzl create mode 100644 tools/testing/bintest_matchers.py create mode 100644 tools/testing/bintest_matchers_test.py create mode 100644 tools/testing/bintest_run_utils.py create mode 100644 tools/testing/bintest_run_utils_test.py rename bazel/test_runner_template.sh => tools/testing/bintest_script_launcher.py (74%) create mode 100644 tools/testing/bintest_script_runner.py create mode 100644 tools/testing/bintest_script_runner_test.py create mode 100644 tools/testing/echo.cc create mode 100644 tools/testing/fail.cc create mode 100644 tools/testing/print_args.bintest create mode 100644 tools/testing/print_args.cc create mode 100644 tools/testing/print_args_data.txt create mode 100644 tools/testing/print_args_test.py diff --git a/.gitignore b/.gitignore index 2f91a2e2177..fad35df17c8 100644 --- a/.gitignore +++ b/.gitignore @@ -107,8 +107,7 @@ examples/dotnet/obj CMakeCache.txt CMakeFiles DartConfiguration.tcl -*build*/* -build/ +/build*/ # Ignore Bzlmod lock file until it is more stable MODULE.bazel.lock diff --git a/bazel/BUILD.bazel b/bazel/BUILD.bazel index 9c85c963070..f3560a8f7ec 100644 --- a/bazel/BUILD.bazel +++ b/bazel/BUILD.bazel @@ -36,10 +36,3 @@ compile_pip_requirements( ) package(default_visibility = ["//visibility:public"]) - -filegroup( - name = "test_runner_template", - testonly = 1, - srcs = ["test_runner_template.sh"], - visibility = ["//visibility:public"], -) diff --git a/bazel/run_binary_test.bzl b/bazel/run_binary_test.bzl deleted file mode 100644 index 000b4dc7c2e..00000000000 --- a/bazel/run_binary_test.bzl +++ /dev/null @@ -1,106 +0,0 @@ -# Copyright 2010-2025 Google LLC -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""run_binary_test will run a xx_binary as test with the given args.""" - -load("@bazel_skylib//rules:expand_template.bzl", "expand_template") -load("@rules_shell//shell:sh_test.bzl", "sh_test") - -def parse_label(label): - """Parse a label into (package, name). - - Args: - label: string in relative or absolute form. - - Returns: - Pair of strings: package, relative_name - - Raises: - ValueError for malformed label (does not do an exhaustive validation) - """ - if label.startswith("//"): - label = label[2:] # drop the leading // - colon_split = label.split(":") - if len(colon_split) == 1: # no ":" in label - pkg = label - _, _, target = label.rpartition("/") - else: - pkg, target = colon_split # fails if len(colon_split) != 2 - else: - colon_split = label.split(":") - if len(colon_split) == 1: # no ":" in label - pkg, target = native.package_name(), label - else: - pkg2, target = colon_split # fails if len(colon_split) != 2 - pkg = native.package_name() + ("/" + pkg2 if pkg2 else "") - return pkg, target - -def get_check_contains_code(line): - return """ -if ! grep --quiet --fixed-strings "{line}" "${{LOGFILE}}"; then - echo "---------------------------------------------------------------" - cat "${{LOGFILE}}" - echo "---------------------------------------------------------------" - echo "FAILURE: string '{line}' was not found in the output." - echo "---------------------------------------------------------------" - exit 1 -fi -""".format(line = line) - -def run_binary_test( - name, - binary, - template = "//bazel:test_runner_template", - args = [], - data = [], - grep_lines = [], - **kwargs): - """Create a sh_test to run the given binary as test. - - Args: - name: name of the test target. - binary: name of the binary target to run. - template: template file for executing the binary target. - args: args to use to run the binary. - data: data files required by this test. - grep_lines: lines to grep for in the log file. - **kwargs: other attributes that are applicable to tests, size, tags, etc. - - """ - shell_script = name + ".sh" - - # Get the path to the binary we want to run. - binary_pkg, binary_name = parse_label(binary) - binary_path = "/".join([binary_pkg, binary_name]) - - # We would like to include args in the generated shell script, so that "blaze-bin/.../test" can - # be run manually. Unfortunately `expand_template` does not resolve $(location) and other Make - # variables so we only pass them in `sh_test` below. - expand_template( - name = name + "_gensh", - template = template, - out = shell_script, - testonly = 1, - substitutions = { - "{{binary_path}}": binary_path, - "{{post_script}}": "\n".join([get_check_contains_code(line) for line in grep_lines]), - }, - ) - sh_test( - name = name, - testonly = 1, - srcs = [shell_script], - data = data + [binary], - args = args, - **kwargs - ) diff --git a/cmake/cpp.cmake b/cmake/cpp.cmake index 19af6869042..285b5dd6b33 100644 --- a/cmake/cpp.cmake +++ b/cmake/cpp.cmake @@ -307,6 +307,126 @@ function(ortools_cxx_library) message(STATUS "Configuring library ${LIBRARY_NAME} ...DONE") endfunction() + +# ortools_cxx_binary() +# CMake function to generate and build C++ library. +# Parameters: +# NAME: CMake target name +# SOURCES: List of source files +# [COMPILE_DEFINITIONS]: List of private compile definitions +# [COMPILE_OPTIONS]: List of private compile options +# [LINK_LIBRARIES]: List of **public** libraries to use when linking +# note: ortools::ortools is always linked to the target +# [LINK_OPTIONS]: List of private link options +# e.g.: +# ortools_cxx_binary( +# NAME +# foo_bar_binary +# SOURCES +# bar_binary.cc +# ${PROJECT_SOURCE_DIR}/ortools/foo/bar_binary.cc +# LINK_LIBRARIES +# GTest::gmock +# GTest::gtest_main +# TESTING +# ) +function(ortools_cxx_binary) + set(options "TESTING") + set(oneValueArgs "NAME") + set(multiValueArgs + "SOURCES;COMPILE_DEFINITIONS;COMPILE_OPTIONS;LINK_LIBRARIES;LINK_OPTIONS") + cmake_parse_arguments(BINARY + "${options}" + "${oneValueArgs}" + "${multiValueArgs}" + ${ARGN} + ) + if(BINARY_TESTING AND NOT BUILD_TESTING) + return() + endif() + + if(NOT BINARY_NAME) + message(FATAL_ERROR "no NAME provided") + endif() + if(NOT BINARY_SOURCES) + message(FATAL_ERROR "no SOURCES provided") + endif() + message(STATUS "Configuring binary ${BINARY_NAME} ...") + + add_executable(${BINARY_NAME} ${BINARY_TYPE} "") + target_sources(${BINARY_NAME} PRIVATE ${BINARY_SOURCES}) + target_include_directories(${BINARY_NAME} PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}) + target_compile_definitions(${BINARY_NAME} PRIVATE ${BINARY_COMPILE_DEFINITIONS}) + target_compile_features(${BINARY_NAME} PRIVATE cxx_std_17) + target_compile_options(${BINARY_NAME} PRIVATE ${BINARY_COMPILE_OPTIONS}) + target_link_libraries(${BINARY_NAME} PRIVATE ${PROJECT_NAMESPACE}::ortools ${BINARY_LINK_LIBRARIES}) + target_link_options(${BINARY_NAME} PRIVATE ${BINARY_LINK_OPTIONS}) + + include(GNUInstallDirs) + if(APPLE) + set_target_properties(${BINARY_NAME} PROPERTIES + INSTALL_RPATH "@loader_path/../${CMAKE_INSTALL_LIBDIR};@loader_path") + elseif(UNIX) + cmake_path(RELATIVE_PATH CMAKE_INSTALL_FULL_LIBDIR + BASE_DIRECTORY ${CMAKE_INSTALL_FULL_BINDIR} + OUTPUT_VARIABLE libdir_relative_path) + set_target_properties(${BINARY_NAME} PROPERTIES + INSTALL_RPATH "$ORIGIN/${libdir_relative_path}:$ORIGIN") + endif() + add_executable(${PROJECT_NAMESPACE}::${BINARY_NAME} ALIAS ${BINARY_NAME}) + message(STATUS "Configuring binary ${BINARY_NAME} ...DONE") +endfunction() + +find_package(Python3 COMPONENTS Interpreter) + +# ortools_cxx_bintest() +# CMake function to generate and build C++ test. +# Parameters: +# NAME: CMake target name +# SCRIPT: The script to run the test. +# e.g.: +# ortools_cxx_bintest( +# NAME +# foo_bar_bintest +# SCRIPT +# foo_bar.bintest +# ENVIRONMENT +# "BINTEST_foo_bar=$" +# "BINTEST_foo_bar_data=$(CMAKE_CURRENT_SOURCE_DIR)/foo_bar_data.txt" +# ) +function(ortools_cxx_bintest) + set(options "") + set(oneValueArgs "NAME;SCRIPT") + set(multiValueArgs "ENVIRONMENT") + cmake_parse_arguments(BINTEST + "${options}" + "${oneValueArgs}" + "${multiValueArgs}" + ${ARGN} + ) + if(NOT BINTEST_NAME) + message(FATAL_ERROR "no NAME provided") + endif() + if(NOT BINTEST_SCRIPT) + message(FATAL_ERROR "no SCRIPT provided") + endif() + if(NOT Python3_Interpreter_FOUND) + message(WARNING "No python3 interpreter found, the bintest ${BINTEST_NAME} is disable") + return() + endif() + + message(STATUS "Configuring bintest ${BINTEST_NAME} ...") + add_test( + NAME ${BINTEST_NAME} + COMMAND ${Python3_EXECUTABLE} -m tools.testing.bintest_script_launcher ${BINTEST_SCRIPT} + WORKING_DIRECTORY ${PROJECT_SOURCE_DIR} + ) + set_tests_properties(${BINTEST_NAME} PROPERTIES + ENVIRONMENT "${BINTEST_ENVIRONMENT}" + ) + message(STATUS "Configuring bintest ${BINTEST_NAME} ...DONE") +endfunction() + ################## ## PROTO FILE ## ################## diff --git a/cmake/python.cmake b/cmake/python.cmake index d9af4a65a8e..6aa1796b0a2 100644 --- a/cmake/python.cmake +++ b/cmake/python.cmake @@ -1002,3 +1002,44 @@ if(NOT EXAMPLE_FILE_NAME) endif() message(STATUS "Configuring example ${EXAMPLE_FILE_NAME} ...DONE") endfunction() + +# add_python_binary() +# CMake function to generate a shell wrapper to execute a Python program. +# Parameters: +# NAME: the target name +# FILE: the Python filename +# e.g.: +# add_python_binary( +# NAME +# foo_bin +# FILE +# ${PROJECT_SOURCE_DIR}/examples/foo/bar.py +# ) +function(add_python_binary) + set(options "") + set(oneValueArgs NAME;FILE) + set(multiValueArgs "") + cmake_parse_arguments(PY_BINARY + "${options}" + "${oneValueArgs}" + "${multiValueArgs}" + ${ARGN} + ) + message(STATUS "Configuring python binary ${PY_BINARY_NAME} ...") + if(NOT PY_BINARY_NAME) + message(FATAL_ERROR "no NAME provided for python binary") + endif() + if(NOT PY_BINARY_FILE) + message(FATAL_ERROR "no FILE provided for python binary") + endif() + file(WRITE "${CMAKE_CURRENT_BINARY_DIR}/${PY_BINARY_NAME}" "#!/usr/bin/env sh\n${VENV_Python3_EXECUTABLE} ${PY_BINARY_FILE} \"$@\"\n") + file(CHMOD "${CMAKE_CURRENT_BINARY_DIR}/${PY_BINARY_NAME}" + FILE_PERMISSIONS + OWNER_READ OWNER_EXECUTE + GROUP_READ GROUP_EXECUTE + WORLD_READ WORLD_EXECUTE + ) + add_executable("${PY_BINARY_NAME}" IMPORTED) + set_target_properties("${PY_BINARY_NAME}" PROPERTIES IMPORTED_LOCATION "${CMAKE_CURRENT_BINARY_DIR}/${PY_BINARY_NAME}") + message(STATUS "Configuring python binary ${PY_BINARY_NAME} ...DONE") +endfunction() diff --git a/examples/cpp/BUILD.bazel b/examples/cpp/BUILD.bazel index 52c50f822fa..f351e4dbd42 100644 --- a/examples/cpp/BUILD.bazel +++ b/examples/cpp/BUILD.bazel @@ -13,7 +13,7 @@ load("@rules_cc//cc:cc_binary.bzl", "cc_binary") load("@rules_cc//cc:cc_library.bzl", "cc_library") -load("//bazel:run_binary_test.bzl", "run_binary_test") +load("//tools/testing:bintest.bzl", "bintest") # Description: # C++ examples for operations_research. @@ -44,15 +44,61 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "binpacking_2d_sat_class01_instance2_test", size = "medium", - args = [ - "--input $(rootpath //ortools/packing/testdata:Class_01.2bp)", - "--instance 2", + srcs = [":binpacking_2d_sat_class01_instance2_test.bintest"], + named_data = { + "binpacking_2d_sat": ":binpacking_2d_sat", + "Class_01.2bp": "//ortools/packing/testdata:Class_01.2bp", + }, +) + +cc_library( + name = "cgc", + srcs = ["cgc.cc"], + hdrs = [ + "cgc.h", + "cgc_data.h", ], - binary = ":binpacking_2d_sat", - data = ["//ortools/packing/testdata:Class_01.2bp"], + deps = [ + "//ortools/base", + "//ortools/base:file", + "//ortools/constraint_solver:cp", + "@abseil-cpp//absl/container:btree", + "@abseil-cpp//absl/strings", + "@abseil-cpp//absl/strings:str_format", + "@abseil-cpp//absl/time", + "@abseil-cpp//absl/types:span", + "@re2", + ], +) + +cc_binary( + name = "cgc_main", + srcs = ["cgc_main.cc"], + deps = [ + ":cgc", + "//ortools/base", + "@abseil-cpp//absl/flags:flag", + "@abseil-cpp//absl/strings:str_format", + "@abseil-cpp//absl/time", + ], +) + +bintest( + name = "cgc_test_solution", + size = "small", + srcs = [":cgc_test_solution.bintest"], + named_data = { + "cgc_main": ":cgc_main", + "1.in": "testdata/cgc/1.in", + "2.in": "testdata/cgc/2.in", + "3.in": "testdata/cgc/3.in", + "cgcut1.in": "testdata/cgc/cgcut1.in", + "cgcut2.in": "testdata/cgc/cgcut2.in", + "cgcut3.in": "testdata/cgc/cgcut3.in", + }, ) cc_binary( @@ -67,10 +113,11 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "constraint_programming_cp_test", size = "small", - binary = ":constraint_programming_cp", + srcs = [":constraint_programming_cp_test.bintest"], + named_data = {"constraint_programming_cp": ":constraint_programming_cp"}, ) cc_binary( @@ -91,37 +138,25 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "costas_array_sat_model1_test", size = "medium", - args = [ - "--minsize=6", - "--maxsize=6", - "--model=1", - ], - binary = ":costas_array_sat", + srcs = [":costas_array_sat_model1_test.bintest"], + named_data = {"costas_array_sat": ":costas_array_sat"}, ) -run_binary_test( +bintest( name = "costas_array_sat_model2_test", size = "medium", - args = [ - "--minsize=6", - "--maxsize=6", - "--model=2", - ], - binary = ":costas_array_sat", + srcs = [":costas_array_sat_model2_test.bintest"], + named_data = {"costas_array_sat": ":costas_array_sat"}, ) -run_binary_test( +bintest( name = "costas_array_sat_model3_test", size = "medium", - args = [ - "--minsize=6", - "--maxsize=6", - "--model=3", - ], - binary = ":costas_array_sat", + srcs = [":costas_array_sat_model3_test.bintest"], + named_data = {"costas_array_sat": ":costas_array_sat"}, ) cc_binary( @@ -130,10 +165,11 @@ cc_binary( deps = ["//ortools/sat:cp_model"], ) -run_binary_test( +bintest( name = "cryptarithm_sat_test", size = "small", - binary = ":cryptarithm_sat", + srcs = [":cryptarithm_sat_test.bintest"], + named_data = {"cryptarithm_sat": ":cryptarithm_sat"}, ) cc_binary( @@ -151,11 +187,11 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "dobble_ls_test", size = "medium", - args = ["--time_limit_in_ms=10000"], - binary = ":dobble_ls", + srcs = [":dobble_ls_test.bintest"], + named_data = {"dobble_ls": ":dobble_ls"}, ) cc_binary( @@ -176,11 +212,11 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "golomb_sat_test", size = "medium", - args = ["--size 5"], - binary = ":golomb_sat", + srcs = [":golomb_sat_test.bintest"], + named_data = {"golomb_sat": ":golomb_sat"}, ) cc_binary( @@ -203,15 +239,14 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "knapsack_2d_sat_class01_instance2_test", size = "medium", - args = [ - "--input $(rootpath //ortools/packing/testdata:Class_01.2bp)", - "--instance 2", - ], - binary = ":knapsack_2d_sat", - data = ["//ortools/packing/testdata:Class_01.2bp"], + srcs = [":knapsack_2d_sat_class01_instance2_test.bintest"], + named_data = { + "knapsack_2d_sat": ":knapsack_2d_sat", + "Class_01.2bp": "//ortools/packing/testdata:Class_01.2bp", + }, ) cc_binary( @@ -241,12 +276,14 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "jobshop_sat_ft06", size = "small", - args = ["--input $(rootpath //ortools/scheduling/testdata:ft06)"], - binary = ":jobshop_sat", - data = ["//ortools/scheduling/testdata:ft06"], + srcs = [":jobshop_sat_ft06.bintest"], + named_data = { + "jobshop_sat": ":jobshop_sat", + "ft06": "//ortools/scheduling/testdata:ft06", + }, ) cc_binary( @@ -264,10 +301,11 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "magic_sequence_sat_test", size = "medium", - binary = ":magic_sequence_sat", + srcs = [":magic_sequence_sat_test.bintest"], + named_data = {"magic_sequence_sat": ":magic_sequence_sat"}, ) cc_binary( @@ -284,10 +322,11 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "multi_knapsack_sat_test", size = "medium", - binary = ":multi_knapsack_sat", + srcs = [":multi_knapsack_sat_test.bintest"], + named_data = {"multi_knapsack_sat": ":multi_knapsack_sat"}, ) cc_binary( @@ -309,12 +348,14 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "shift_minimization_sat_test", size = "medium", - args = ["--input $(rootpath :shift_minimization.dat)"], - binary = ":shift_minimization_sat", - data = [":shift_minimization.dat"], + srcs = [":shift_minimization_sat_test.bintest"], + named_data = { + "shift_minimization_sat": ":shift_minimization_sat", + "shift_minimization.dat": "testdata/shift_minimization.dat", + }, ) cc_binary( @@ -339,12 +380,14 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "weighted_tardiness_sat_test", size = "medium", - args = ["--input $(rootpath :wt40.txt)"], - binary = ":weighted_tardiness_sat", - data = [":wt40.txt"], + srcs = [":weighted_tardiness_sat_test.bintest"], + named_data = { + "weighted_tardiness_sat": ":weighted_tardiness_sat", + "wt40.txt": "testdata/wt40.txt", + }, ) cc_binary( @@ -363,10 +406,11 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "magic_square_sat_test", size = "medium", - binary = ":magic_square_sat", + srcs = [":magic_square_sat_test.bintest"], + named_data = {"magic_square_sat": ":magic_square_sat"}, ) cc_binary( @@ -394,23 +438,11 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "network_routing_sat_test", size = "medium", - args = [ - "--clients=10", - "--backbones=5", - "--demands=10", - "--traffic_min=5", - "--traffic_max=10", - "--min_client_degree=2", - "--max_client_degree=5", - "--min_backbone_degree=3", - "--max_backbone_degree=5", - "--max_capacity=20", - "--fixed_charge_cost=10", - ], - binary = ":network_routing_sat", + srcs = [":network_routing_sat_test.bintest"], + named_data = {"network_routing_sat": ":network_routing_sat"}, ) cc_binary( @@ -426,10 +458,11 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "nqueens_test", size = "small", - binary = ":nqueens", + srcs = [":nqueens_test.bintest"], + named_data = {"nqueens": ":nqueens"}, ) cc_binary( @@ -451,10 +484,11 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "sports_scheduling_sat_test", size = "medium", - binary = ":sports_scheduling_sat", + srcs = [":sports_scheduling_sat_test.bintest"], + named_data = {"sports_scheduling_sat": ":sports_scheduling_sat"}, ) cc_binary( @@ -480,6 +514,26 @@ cc_binary( ], ) +bintest( + name = "pdptw_test", + size = "medium", + srcs = [":pdptw_test.bintest"], + named_data = { + "pdptw": ":pdptw", + "lc102.txt": "testdata/lc102.txt", + }, +) + +bintest( + name = "pdptw_non_homogenous_fleet_test", + size = "medium", + srcs = [":pdptw_non_homogenous_fleet_test.bintest"], + named_data = { + "pdptw": ":pdptw", + "lc102.txt": "testdata/lc102.txt", + }, +) + # Routing examples. cc_binary( name = "random_tsp", @@ -515,10 +569,11 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "integer_programming_test", size = "small", - binary = ":integer_programming", + srcs = [":integer_programming_test.bintest"], + named_data = {"integer_programming": ":integer_programming"}, ) cc_binary( @@ -535,10 +590,11 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "linear_programming_test", size = "small", - binary = ":linear_programming", + srcs = [":linear_programming_test.bintest"], + named_data = {"linear_programming": ":linear_programming"}, ) cc_binary( @@ -551,10 +607,11 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "linear_solver_protocol_buffers_test", size = "small", - binary = ":linear_solver_protocol_buffers", + srcs = [":linear_solver_protocol_buffers_test.bintest"], + named_data = {"linear_solver_protocol_buffers": ":linear_solver_protocol_buffers"}, ) cc_binary( @@ -571,11 +628,11 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "strawberry_fields_with_column_generation_test", size = "large", - args = ["--colgen_instance=4"], - binary = ":strawberry_fields_with_column_generation", + srcs = [":strawberry_fields_with_column_generation_test.bintest"], + named_data = {"strawberry_fields_with_column_generation": ":strawberry_fields_with_column_generation"}, ) # Dimacs assignment problems @@ -617,10 +674,30 @@ cc_binary( "//ortools/graph:linear_assignment", "@abseil-cpp//absl/container:flat_hash_map", "@abseil-cpp//absl/flags:flag", - "@abseil-cpp//absl/strings:str_format", + "@abseil-cpp//absl/strings:string_view", ], ) +bintest( + name = "dimacs_assignment_min_cost_test", + size = "small", + srcs = [":dimacs_assignment_min_cost_test.bintest"], + named_data = { + "dimacs_assignment": ":dimacs_assignment", + "dimacs_example.txt": "testdata/dimacs_example.txt", + }, +) + +bintest( + name = "dimacs_assignment_max_cost_test", + size = "small", + srcs = [":dimacs_assignment_max_cost_test.bintest"], + named_data = { + "dimacs_assignment": ":dimacs_assignment", + "dimacs_example.txt": "testdata/dimacs_example.txt", + }, +) + # MPS driver for LP and MIP. cc_binary( name = "mps_driver", @@ -650,12 +727,14 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "mps_driver_test", size = "small", - args = ["--input $(rootpath //ortools/linear_solver/testdata:maximization.mps)"], - binary = ":mps_driver", - data = ["//ortools/linear_solver/testdata:maximization.mps"], + srcs = [":mps_driver_test.bintest"], + named_data = { + "mps_driver": ":mps_driver", + "maximization.mps": "//ortools/linear_solver/testdata:maximization.mps", + }, ) # Linear Assignment C++ Example @@ -669,10 +748,11 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "linear_assignment_api_test", size = "small", - binary = ":linear_assignment_api", + srcs = [":linear_assignment_api_test.bintest"], + named_data = {"linear_assignment_api": ":linear_assignment_api"}, ) # Flow C++ Example @@ -687,10 +767,11 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "flow_api_test", size = "small", - binary = ":flow_api", + srcs = [":flow_api_test.bintest"], + named_data = {"flow_api": ":flow_api"}, ) cc_binary( @@ -706,10 +787,11 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "max_flow_test", size = "small", - binary = ":max_flow", + srcs = [":max_flow_test.bintest"], + named_data = {"max_flow": ":max_flow"}, ) cc_binary( @@ -724,10 +806,11 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "min_cost_flow_test", size = "small", - binary = ":min_cost_flow", + srcs = [":min_cost_flow_test.bintest"], + named_data = {"min_cost_flow": ":min_cost_flow"}, ) # Frequency Assignment Problem @@ -819,10 +902,11 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "slitherlink_sat_test", size = "small", - binary = ":slitherlink_sat", + srcs = [":slitherlink_sat_test.bintest"], + named_data = {"slitherlink_sat": ":slitherlink_sat"}, ) cc_binary( @@ -854,10 +938,11 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "variable_intervals_sat_test", size = "small", - binary = ":variable_intervals_sat", + srcs = [":variable_intervals_sat_test.bintest"], + named_data = {"variable_intervals_sat": ":variable_intervals_sat"}, ) cc_binary( diff --git a/examples/cpp/CMakeBazel.txt b/examples/cpp/CMakeBazel.txt new file mode 100644 index 00000000000..063affbafd9 --- /dev/null +++ b/examples/cpp/CMakeBazel.txt @@ -0,0 +1,425 @@ +# This file is auto generated by bazel2cmake.py from examples/cpp/BUILD.bazel +# Don't edit manually, your changes will be lost. +# You can update this file by running: +# python3 tools/build/bazel2cmake.py examples/cpp/BUILD.bazel + + +ortools_cxx_binary( + NAME bzl_cc_example_binpacking_2d_sat + SOURCES binpacking_2d_sat.cc +) + +ortools_cxx_bintest( + NAME bzl_cc_example_binpacking_2d_sat_class01_instance2_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/binpacking_2d_sat_class01_instance2_test.bintest + ENVIRONMENT BINTEST_binpacking_2d_sat=$ BINTEST_Class_01.2bp=${CMAKE_SOURCE_DIR}/ortools/packing/testdata/Class_01.2bp +) + +ortools_cxx_library( + NAME bzl_cc_example_cgc + SOURCES cgc.cc cgc.h cgc_data.h + TYPE SHARED +) + +ortools_cxx_binary( + NAME bzl_cc_example_cgc_main + SOURCES cgc_main.cc + LINK_LIBRARIES bzl_cc_example_cgc +) + +ortools_cxx_bintest( + NAME bzl_cc_example_cgc_test_solution + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/cgc_test_solution.bintest + ENVIRONMENT BINTEST_cgc_main=$ BINTEST_1.in=${CMAKE_CURRENT_SOURCE_DIR}/testdata/cgc/1.in BINTEST_2.in=${CMAKE_CURRENT_SOURCE_DIR}/testdata/cgc/2.in BINTEST_3.in=${CMAKE_CURRENT_SOURCE_DIR}/testdata/cgc/3.in BINTEST_cgcut1.in=${CMAKE_CURRENT_SOURCE_DIR}/testdata/cgc/cgcut1.in BINTEST_cgcut2.in=${CMAKE_CURRENT_SOURCE_DIR}/testdata/cgc/cgcut2.in BINTEST_cgcut3.in=${CMAKE_CURRENT_SOURCE_DIR}/testdata/cgc/cgcut3.in +) + +ortools_cxx_binary( + NAME bzl_cc_example_constraint_programming_cp + SOURCES constraint_programming_cp.cc +) + +ortools_cxx_bintest( + NAME bzl_cc_example_constraint_programming_cp_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/constraint_programming_cp_test.bintest + ENVIRONMENT BINTEST_constraint_programming_cp=$ +) + +ortools_cxx_binary( + NAME bzl_cc_example_costas_array_sat + SOURCES costas_array_sat.cc +) + +ortools_cxx_bintest( + NAME bzl_cc_example_costas_array_sat_model1_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/costas_array_sat_model1_test.bintest + ENVIRONMENT BINTEST_costas_array_sat=$ +) + +ortools_cxx_bintest( + NAME bzl_cc_example_costas_array_sat_model2_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/costas_array_sat_model2_test.bintest + ENVIRONMENT BINTEST_costas_array_sat=$ +) + +ortools_cxx_bintest( + NAME bzl_cc_example_costas_array_sat_model3_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/costas_array_sat_model3_test.bintest + ENVIRONMENT BINTEST_costas_array_sat=$ +) + +ortools_cxx_binary( + NAME bzl_cc_example_cryptarithm_sat + SOURCES cryptarithm_sat.cc +) + +ortools_cxx_bintest( + NAME bzl_cc_example_cryptarithm_sat_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/cryptarithm_sat_test.bintest + ENVIRONMENT BINTEST_cryptarithm_sat=$ +) + +ortools_cxx_binary( + NAME bzl_cc_example_dobble_ls + SOURCES dobble_ls.cc +) + +ortools_cxx_bintest( + NAME bzl_cc_example_dobble_ls_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/dobble_ls_test.bintest + ENVIRONMENT BINTEST_dobble_ls=$ +) + +ortools_cxx_binary( + NAME bzl_cc_example_golomb_sat + SOURCES golomb_sat.cc +) + +ortools_cxx_bintest( + NAME bzl_cc_example_golomb_sat_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/golomb_sat_test.bintest + ENVIRONMENT BINTEST_golomb_sat=$ +) + +ortools_cxx_binary( + NAME bzl_cc_example_knapsack_2d_sat + SOURCES knapsack_2d_sat.cc +) + +ortools_cxx_bintest( + NAME bzl_cc_example_knapsack_2d_sat_class01_instance2_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/knapsack_2d_sat_class01_instance2_test.bintest + ENVIRONMENT BINTEST_knapsack_2d_sat=$ BINTEST_Class_01.2bp=${CMAKE_SOURCE_DIR}/ortools/packing/testdata/Class_01.2bp +) + +ortools_cxx_binary( + NAME bzl_cc_example_jobshop_sat + SOURCES jobshop_sat.cc +) + +ortools_cxx_bintest( + NAME bzl_cc_example_jobshop_sat_ft06 + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/jobshop_sat_ft06.bintest + ENVIRONMENT BINTEST_jobshop_sat=$ BINTEST_ft06=${CMAKE_SOURCE_DIR}/ortools/scheduling/testdata/ft06 +) + +ortools_cxx_binary( + NAME bzl_cc_example_magic_sequence_sat + SOURCES magic_sequence_sat.cc +) + +ortools_cxx_bintest( + NAME bzl_cc_example_magic_sequence_sat_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/magic_sequence_sat_test.bintest + ENVIRONMENT BINTEST_magic_sequence_sat=$ +) + +ortools_cxx_binary( + NAME bzl_cc_example_multi_knapsack_sat + SOURCES multi_knapsack_sat.cc +) + +ortools_cxx_bintest( + NAME bzl_cc_example_multi_knapsack_sat_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/multi_knapsack_sat_test.bintest + ENVIRONMENT BINTEST_multi_knapsack_sat=$ +) + +ortools_cxx_binary( + NAME bzl_cc_example_shift_minimization_sat + SOURCES shift_minimization_sat.cc +) + +ortools_cxx_bintest( + NAME bzl_cc_example_shift_minimization_sat_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/shift_minimization_sat_test.bintest + ENVIRONMENT BINTEST_shift_minimization_sat=$ BINTEST_shift_minimization.dat=${CMAKE_CURRENT_SOURCE_DIR}/testdata/shift_minimization.dat +) + +ortools_cxx_binary( + NAME bzl_cc_example_weighted_tardiness_sat + SOURCES weighted_tardiness_sat.cc +) + +ortools_cxx_bintest( + NAME bzl_cc_example_weighted_tardiness_sat_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/weighted_tardiness_sat_test.bintest + ENVIRONMENT BINTEST_weighted_tardiness_sat=$ BINTEST_wt40.txt=${CMAKE_CURRENT_SOURCE_DIR}/testdata/wt40.txt +) + +ortools_cxx_binary( + NAME bzl_cc_example_magic_square_sat + SOURCES magic_square_sat.cc +) + +ortools_cxx_bintest( + NAME bzl_cc_example_magic_square_sat_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/magic_square_sat_test.bintest + ENVIRONMENT BINTEST_magic_square_sat=$ +) + +ortools_cxx_binary( + NAME bzl_cc_example_network_routing_sat + SOURCES network_routing_sat.cc +) + +ortools_cxx_bintest( + NAME bzl_cc_example_network_routing_sat_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/network_routing_sat_test.bintest + ENVIRONMENT BINTEST_network_routing_sat=$ +) + +ortools_cxx_binary( + NAME bzl_cc_example_nqueens + SOURCES nqueens.cc +) + +ortools_cxx_bintest( + NAME bzl_cc_example_nqueens_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/nqueens_test.bintest + ENVIRONMENT BINTEST_nqueens=$ +) + +ortools_cxx_binary( + NAME bzl_cc_example_sports_scheduling_sat + SOURCES sports_scheduling_sat.cc +) + +ortools_cxx_bintest( + NAME bzl_cc_example_sports_scheduling_sat_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/sports_scheduling_sat_test.bintest + ENVIRONMENT BINTEST_sports_scheduling_sat=$ +) + +ortools_cxx_binary( + NAME bzl_cc_example_pdptw + SOURCES pdptw.cc +) + +ortools_cxx_bintest( + NAME bzl_cc_example_pdptw_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/pdptw_test.bintest + ENVIRONMENT BINTEST_pdptw=$ BINTEST_lc102.txt=${CMAKE_CURRENT_SOURCE_DIR}/testdata/lc102.txt +) + +ortools_cxx_bintest( + NAME bzl_cc_example_pdptw_non_homogenous_fleet_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/pdptw_non_homogenous_fleet_test.bintest + ENVIRONMENT BINTEST_pdptw=$ BINTEST_lc102.txt=${CMAKE_CURRENT_SOURCE_DIR}/testdata/lc102.txt +) + +ortools_cxx_binary( + NAME bzl_cc_example_random_tsp + SOURCES random_tsp.cc +) + +ortools_cxx_binary( + NAME bzl_cc_example_integer_programming + SOURCES integer_programming.cc +) + +ortools_cxx_bintest( + NAME bzl_cc_example_integer_programming_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/integer_programming_test.bintest + ENVIRONMENT BINTEST_integer_programming=$ +) + +ortools_cxx_binary( + NAME bzl_cc_example_linear_programming + SOURCES linear_programming.cc +) + +ortools_cxx_bintest( + NAME bzl_cc_example_linear_programming_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/linear_programming_test.bintest + ENVIRONMENT BINTEST_linear_programming=$ +) + +ortools_cxx_binary( + NAME bzl_cc_example_linear_solver_protocol_buffers + SOURCES linear_solver_protocol_buffers.cc +) + +ortools_cxx_bintest( + NAME bzl_cc_example_linear_solver_protocol_buffers_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/linear_solver_protocol_buffers_test.bintest + ENVIRONMENT BINTEST_linear_solver_protocol_buffers=$ +) + +ortools_cxx_binary( + NAME bzl_cc_example_strawberry_fields_with_column_generation + SOURCES strawberry_fields_with_column_generation.cc +) + +ortools_cxx_bintest( + NAME bzl_cc_example_strawberry_fields_with_column_generation_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/strawberry_fields_with_column_generation_test.bintest + ENVIRONMENT BINTEST_strawberry_fields_with_column_generation=$ +) + +ortools_cxx_library( + NAME bzl_cc_example_print_dimacs_assignment + SOURCES print_dimacs_assignment.h + TYPE INTERFACE +) + +ortools_cxx_library( + NAME bzl_cc_example_parse_dimacs_assignment + SOURCES parse_dimacs_assignment.cc parse_dimacs_assignment.h + TYPE SHARED +) + +ortools_cxx_binary( + NAME bzl_cc_example_dimacs_assignment + SOURCES dimacs_assignment.cc + LINK_LIBRARIES bzl_cc_example_parse_dimacs_assignment bzl_cc_example_print_dimacs_assignment +) + +ortools_cxx_bintest( + NAME bzl_cc_example_dimacs_assignment_min_cost_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/dimacs_assignment_min_cost_test.bintest + ENVIRONMENT BINTEST_dimacs_assignment=$ BINTEST_dimacs_example.txt=${CMAKE_CURRENT_SOURCE_DIR}/testdata/dimacs_example.txt +) + +ortools_cxx_bintest( + NAME bzl_cc_example_dimacs_assignment_max_cost_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/dimacs_assignment_max_cost_test.bintest + ENVIRONMENT BINTEST_dimacs_assignment=$ BINTEST_dimacs_example.txt=${CMAKE_CURRENT_SOURCE_DIR}/testdata/dimacs_example.txt +) + +ortools_cxx_binary( + NAME bzl_cc_example_mps_driver + SOURCES mps_driver.cc +) + +ortools_cxx_bintest( + NAME bzl_cc_example_mps_driver_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/mps_driver_test.bintest + ENVIRONMENT BINTEST_mps_driver=$ BINTEST_maximization.mps=${CMAKE_SOURCE_DIR}/ortools/linear_solver/testdata/maximization.mps +) + +ortools_cxx_binary( + NAME bzl_cc_example_linear_assignment_api + SOURCES linear_assignment_api.cc +) + +ortools_cxx_bintest( + NAME bzl_cc_example_linear_assignment_api_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/linear_assignment_api_test.bintest + ENVIRONMENT BINTEST_linear_assignment_api=$ +) + +ortools_cxx_binary( + NAME bzl_cc_example_flow_api + SOURCES flow_api.cc +) + +ortools_cxx_bintest( + NAME bzl_cc_example_flow_api_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/flow_api_test.bintest + ENVIRONMENT BINTEST_flow_api=$ +) + +ortools_cxx_binary( + NAME bzl_cc_example_max_flow + SOURCES max_flow.cc +) + +ortools_cxx_bintest( + NAME bzl_cc_example_max_flow_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/max_flow_test.bintest + ENVIRONMENT BINTEST_max_flow=$ +) + +ortools_cxx_binary( + NAME bzl_cc_example_min_cost_flow + SOURCES min_cost_flow.cc +) + +ortools_cxx_bintest( + NAME bzl_cc_example_min_cost_flow_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/min_cost_flow_test.bintest + ENVIRONMENT BINTEST_min_cost_flow=$ +) + +ortools_cxx_library( + NAME bzl_cc_example_fap_parser + SOURCES fap_parser.cc fap_parser.h + TYPE SHARED +) + +ortools_cxx_library( + NAME bzl_cc_example_fap_model_printer + SOURCES fap_model_printer.cc fap_model_printer.h + LINK_LIBRARIES bzl_cc_example_fap_parser + TYPE SHARED +) + +ortools_cxx_library( + NAME bzl_cc_example_fap_utilities + SOURCES fap_utilities.cc fap_utilities.h + LINK_LIBRARIES bzl_cc_example_fap_parser + TYPE SHARED +) + +ortools_cxx_binary( + NAME bzl_cc_example_frequency_assignment_problem + SOURCES frequency_assignment_problem.cc + LINK_LIBRARIES bzl_cc_example_fap_model_printer bzl_cc_example_fap_parser bzl_cc_example_fap_utilities +) + +ortools_cxx_binary( + NAME bzl_cc_example_qap_sat + SOURCES qap_sat.cc +) + +ortools_cxx_binary( + NAME bzl_cc_example_slitherlink_sat + SOURCES slitherlink_sat.cc +) + +ortools_cxx_bintest( + NAME bzl_cc_example_slitherlink_sat_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/slitherlink_sat_test.bintest + ENVIRONMENT BINTEST_slitherlink_sat=$ +) + +ortools_cxx_binary( + NAME bzl_cc_example_uncapacitated_facility_location + SOURCES uncapacitated_facility_location.cc +) + +ortools_cxx_binary( + NAME bzl_cc_example_variable_intervals_sat + SOURCES variable_intervals_sat.cc +) + +ortools_cxx_bintest( + NAME bzl_cc_example_variable_intervals_sat_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/variable_intervals_sat_test.bintest + ENVIRONMENT BINTEST_variable_intervals_sat=$ +) + +ortools_cxx_binary( + NAME bzl_cc_example_pdlp_solve + SOURCES pdlp_solve.cc +) \ No newline at end of file diff --git a/examples/cpp/CMakeLists.txt b/examples/cpp/CMakeLists.txt index 3f4f6515589..522f06f068c 100644 --- a/examples/cpp/CMakeLists.txt +++ b/examples/cpp/CMakeLists.txt @@ -15,51 +15,4 @@ if(NOT BUILD_CXX_EXAMPLES) return() endif() -#file(GLOB_RECURSE proto_files RELATIVE ${PROJECT_SOURCE_DIR} "*.proto") -#foreach(PROTO_FILE IN LISTS proto_files) -# message(STATUS "protoc proto(cc): ${PROTO_FILE}") -# get_filename_component(PROTO_DIR ${PROTO_FILE} DIRECTORY) -# get_filename_component(PROTO_NAME ${PROTO_FILE} NAME_WE) -# set(PROTO_HDR ${PROJECT_BINARY_DIR}/${PROTO_DIR}/${PROTO_NAME}.pb.h) -# set(PROTO_SRC ${PROJECT_BINARY_DIR}/${PROTO_DIR}/${PROTO_NAME}.pb.cc) -# message(STATUS "protoc hdr: ${PROTO_HDR}") -# message(STATUS "protoc src: ${PROTO_SRC}") -# add_custom_command( -# OUTPUT ${PROTO_SRC} ${PROTO_HDR} -# COMMAND ${PROTOC_PRG} -# "--proto_path=${PROJECT_SOURCE_DIR}" -# ${PROTO_DIRS} -# "--cpp_out=${PROJECT_BINARY_DIR}" -# ${PROTO_FILE} -# DEPENDS ${PROTO_FILE} ${PROTOC_PRG} -# COMMENT "Generate C++ protocol buffer for ${PROTO_FILE}" -# VERBATIM) -# list(APPEND PROTO_HDRS ${PROTO_HDR}) -# list(APPEND PROTO_SRCS ${PROTO_SRC}) -#endforeach() - -file(GLOB CXX_SRCS "*.cc") -list(FILTER CXX_SRCS EXCLUDE REGEX ".*/binpacking_2d_sat.cc") -list(FILTER CXX_SRCS EXCLUDE REGEX ".*/course_scheduling_run.cc") # missing proto -list(FILTER CXX_SRCS EXCLUDE REGEX ".*/course_scheduling.cc") # missing proto -list(FILTER CXX_SRCS EXCLUDE REGEX ".*/dimacs_assignment.cc") # crash -list(FILTER CXX_SRCS EXCLUDE REGEX ".*/dobble_ls.cc") # Too long -list(FILTER CXX_SRCS EXCLUDE REGEX ".*/fap_model_printer.cc") # lib -list(FILTER CXX_SRCS EXCLUDE REGEX ".*/fap_parser.cc") # lib -list(FILTER CXX_SRCS EXCLUDE REGEX ".*/fap_utilities.cc") # lib -list(FILTER CXX_SRCS EXCLUDE REGEX ".*/frequency_assignment_problem.cc") # crash -list(FILTER CXX_SRCS EXCLUDE REGEX ".*/jobshop_sat.cc") # crash -list(FILTER CXX_SRCS EXCLUDE REGEX ".*/knapsack_2d_sat.cc") -list(FILTER CXX_SRCS EXCLUDE REGEX ".*/mps_driver.cc") # crash -list(FILTER CXX_SRCS EXCLUDE REGEX ".*/multi_knapsack_sat.cc") # crash -list(FILTER CXX_SRCS EXCLUDE REGEX ".*/network_routing_sat.cc") -list(FILTER CXX_SRCS EXCLUDE REGEX ".*/parse_dimacs_assignment.cc") # lib -list(FILTER CXX_SRCS EXCLUDE REGEX ".*/pdlp_solve.cc") -list(FILTER CXX_SRCS EXCLUDE REGEX ".*/pdptw.cc") -list(FILTER CXX_SRCS EXCLUDE REGEX ".*/shift_minimization_sat.cc") -list(FILTER CXX_SRCS EXCLUDE REGEX ".*/strawberry_fields_with_column_generation.cc") # Too long -list(FILTER CXX_SRCS EXCLUDE REGEX ".*/vector_bin_packing_solver.cc") -list(FILTER CXX_SRCS EXCLUDE REGEX ".*/weighted_tardiness_sat.cc") -foreach(EXAMPLE IN LISTS CXX_SRCS) - add_cxx_example(FILE_NAME ${EXAMPLE}) -endforeach() +include("CMakeBazel.txt") diff --git a/examples/cpp/binpacking_2d_sat_class01_instance2_test.bintest b/examples/cpp/binpacking_2d_sat_class01_instance2_test.bintest new file mode 100644 index 00000000000..1c18fa902f8 --- /dev/null +++ b/examples/cpp/binpacking_2d_sat_class01_instance2_test.bintest @@ -0,0 +1 @@ +RUN: $(binpacking_2d_sat) --input $(Class_01.2bp) --instance 2 diff --git a/examples/cpp/cgc.cc b/examples/cpp/cgc.cc new file mode 100644 index 00000000000..c23ce870ad3 --- /dev/null +++ b/examples/cpp/cgc.cc @@ -0,0 +1,573 @@ +// Copyright 2010-2025 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Two-Dimensional Constrained Guillotine Cutting + +#include "examples/cpp/cgc.h" + +#include +#include +#include +#include + +#include "absl/container/btree_set.h" +#include "absl/strings/str_format.h" +#include "absl/strings/str_split.h" +#include "absl/time/time.h" +#include "absl/types/span.h" +#include "examples/cpp/cgc_data.h" +#include "ortools/base/helpers.h" +#include "ortools/base/logging.h" +#include "ortools/base/options.h" +#include "ortools/constraint_solver/constraint_solver.h" +#include "re2/re2.h" + +namespace operations_research { + +bool ConstrainedGuillotineCuttingData::LoadFromFile( + const std::string& input_file) { + std::string buffer; + + if (!file::GetContents(input_file, &buffer, file::Defaults()).ok()) { + LOG(ERROR) << "Could not read from file " << input_file; + return false; + } + const std::vector lines = + absl::StrSplit(buffer, '\n', absl::SkipEmpty()); + + if (lines.empty()) { + LOG(ERROR) << "Empty file: " << input_file; + return false; + } + + int num_pieces; + if (!RE2::FullMatch(lines[0], "\\s*(\\d+)\\s*", &num_pieces)) { + LOG(ERROR) << "Could not parse number of pieces"; + return false; + } + + if (0 >= num_pieces) { + LOG(ERROR) << "There are no pieces in the problem specification"; + return false; + } + + if (lines.size() != num_pieces + 2) { + LOG(ERROR) << "File: " << input_file << " does not respect the format"; + return false; + } + + if (!RE2::FullMatch(lines[1], "\\s*(\\d+)\\s+(\\d+)\\s*", &root_length_, + &root_width_)) { + LOG(ERROR) << "Could not parse the size of the main rectangle"; + return false; + } + + pieces_.resize(num_pieces); + for (int i = 0; i < num_pieces; ++i) { + if (!RE2::FullMatch(lines[i + 2], + "\\s*(\\d+)\\s+(\\d+)\\s+(\\d+)\\s+(\\d+)\\s*", + &pieces_[i].length, &pieces_[i].width, + &pieces_[i].max_appearances, &pieces_[i].value)) { + LOG(ERROR) << "Could not parse piece on line " << lines[i + 2] + << ", line number " << i + 3; + return false; + } + } + + return true; +} + +namespace { + +// Depending on the part that was cut, returns an IntVar representing +// if the cut that was made on the (index / 2 + 1) rectangle is +// a guillotine cut. +IntVar* IsAGuillotineCut(int index, + const std::vector& size_currently_cut, + const std::vector& size_not_cut, + const std::vector& parent_index, + const std::vector& pieces_size, Solver* solver) { + CHECK(solver != nullptr); + // The size of the cut must be >= 1 in order for the cut to be a valid one. + IntVar* const condition_var = + solver->MakeIsGreaterOrEqualCstVar(size_currently_cut[index], 1); + + // The part that is not cut should remain the same. + IntVar* const same_uncut_size_as_sibling = + solver->MakeIsEqualVar(size_not_cut[index], size_not_cut[index + 1]); + + // The part that is not cut should remain the same with the parent. + IntVar* const same_uncut_size_as_parent = solver->MakeIsEqualVar( + size_not_cut[index], + solver->MakeElement(size_not_cut, parent_index[index / 2 + 1])); + + // We make a cut if the size of the cut matches at least one of the pieces. + IntVar* const cut_equals_piece_size = solver->MakeIsEqualCstVar( + solver->MakeElement(pieces_size, size_currently_cut[index]), 1); + + // The sum of the sizes that were cut should equal the parent size. + IntVar* const sum_of_sizes = solver->MakeIsEqualVar( + solver->MakeSum(size_currently_cut[index], size_currently_cut[index + 1]), + solver->MakeElement(size_currently_cut, parent_index[index / 2 + 1])); + + const std::vector cut_implications = { + same_uncut_size_as_sibling, same_uncut_size_as_parent, + cut_equals_piece_size, sum_of_sizes}; + + return solver + ->MakeConditionalExpression( + condition_var, + solver->MakeIsEqualCstVar(solver->MakeSum(cut_implications), + cut_implications.size()), + 0) + ->Var(); +} + +// Sets initial variables: +// stores piece sizes related variables and sets maximum value +// and maximum number of elements in a cutting path. +void SetInitialElements(const ConstrainedGuillotineCuttingData& data, + std::vector* sizes_to_pieces, + std::vector* piece_length, + std::vector* piece_width, int* maximum_value, + int* maximum_elements) { + CHECK(sizes_to_pieces != nullptr); + CHECK(piece_length != nullptr); + CHECK(piece_width != nullptr); + CHECK(maximum_value != nullptr); + CHECK(maximum_elements != nullptr); + sizes_to_pieces->resize((data.root_length() + 1) * (data.root_width() + 1), + data.pieces().size()); + piece_length->resize(data.root_length() + 1, 0); + piece_width->resize(data.root_width() + 1, 0); + + const int main_rectangle_area = data.root_length() * data.root_width(); + + // Number of elements in the path should dependent on the number + // of end pieces. Considering that at every point we could in 2 cuts + // get to an end piece, which means maximum 4 new pieces, + // a limit of 4 * number_of_end_pieces should fit the path. + static const int kMultiplyNumOfEndPiecesBy = 4; + *maximum_value = 0; + *maximum_elements = 1; + + int index = 0; + for (const ConstrainedGuillotineCuttingData::Piece& piece : data.pieces()) { + if (piece.length <= data.root_length() && + piece.width <= data.root_width()) { + (*sizes_to_pieces)[piece.length * data.root_width() + piece.width] = + index; + (*piece_length)[piece.length] = 1; + (*piece_width)[piece.width] = 1; + } + + const int number_of_appearances = + std::min(piece.max_appearances, + main_rectangle_area / (piece.length * piece.width)); + + *maximum_value += piece.value * number_of_appearances; + *maximum_elements += kMultiplyNumOfEndPiecesBy * number_of_appearances; + index++; + } + + // TODO(user): a better upper bound for value and for + // maximum_elements. +} + +void SetRectanglesVariablesAndAddConstraints( + const std::vector& piece_length, const std::vector& piece_width, + const int maximum_elements, const int root_length, const int root_width, + std::vector* parent_index, std::vector* rectangle_length, + std::vector* rectangle_width, Solver* solver) { + CHECK(parent_index != nullptr); + CHECK(rectangle_length != nullptr); + CHECK(rectangle_width != nullptr); + CHECK(solver != nullptr); + + static const int kMainRectangleIndex = 0; + + solver->MakeIntVarArray(maximum_elements / 2 + 2, -1, maximum_elements, + "parent_index_", parent_index); + solver->MakeIntVarArray(maximum_elements, 0, root_length, "length_", + rectangle_length); + solver->MakeIntVarArray(maximum_elements, 0, root_width, "width_", + rectangle_width); + + (*parent_index)[kMainRectangleIndex]->SetValue(-1); + (*rectangle_length)[kMainRectangleIndex]->SetValue(root_length); + (*rectangle_width)[kMainRectangleIndex]->SetValue(root_width); + + // Any rectangle can be cut just once. + solver->AddConstraint(solver->MakeAllDifferent(*parent_index)); + + std::vector x_guillotine_cut; + std::vector y_guillotine_cut; + + // Every 2 consecutive cuts are from the same rectangle starting with + // position 1, since at index 0 we keep information regarding the + // main rectangle. + for (int i = 1; i < maximum_elements; i += 2) { + // The rectangle from which we cut needs to be < i. In case we do not cut + // anything (the elements are all 0) the parent_index will be i. + solver->AddConstraint( + solver->MakeLessOrEqual((*parent_index)[i / 2 + 1], i)); + + // If one of the sizes is 0, then all are 0 and the parent does not point + // to a real parent, but to itself, since we cannot have a valid cut + // that leaves one size 0. + IntVar* length_is_zero = + solver->MakeIsEqualCstVar((*rectangle_length)[i], 0); + + solver->AddConstraint(solver->MakeEquality( + length_is_zero, + solver->MakeIsEqualCstVar((*rectangle_length)[i + 1], 0))); + + solver->AddConstraint(solver->MakeEquality( + length_is_zero, solver->MakeIsEqualCstVar((*rectangle_width)[i], 0))); + + solver->AddConstraint(solver->MakeEquality( + length_is_zero, + solver->MakeIsEqualCstVar((*rectangle_width)[i + 1], 0))); + + solver->AddConstraint(solver->MakeEquality( + length_is_zero, + solver->MakeIsEqualCstVar((*parent_index)[i / 2 + 1], i))); + + // Group 0-cuts together at the beginning. So after a normal cut there + // will not be any 0-cuts. + if (i > 1) { + solver->AddConstraint(solver->MakeLessOrEqual( + solver->MakeIsGreaterOrEqualCstVar((*rectangle_length)[i - 1], 1), + solver->MakeIsGreaterOrEqualCstVar((*rectangle_length)[i], 1))); + } + + // If it is an x-guillotine cut. + x_guillotine_cut.push_back(IsAGuillotineCut(i, *rectangle_length, + *rectangle_width, *parent_index, + piece_length, solver)); + + // If it is an y-guillotine cut. + y_guillotine_cut.push_back( + IsAGuillotineCut(i, *rectangle_width, *rectangle_length, *parent_index, + piece_width, solver)); + + // Every pair of rectangles should correspond to a guillotine cut + // on one of the axis or they could be 0 if there was no cut made. + solver->AddConstraint(solver->MakeEquality( + solver->MakeSum( + length_is_zero, + solver->MakeSum( + solver->MakeIsEqualCstVar(x_guillotine_cut[i / 2], 1), + solver->MakeIsEqualCstVar(y_guillotine_cut[i / 2], 1))), + 1)); + } +} + +void AddAdditionalConstraints(const std::vector& parent_index, + const std::vector& rectangle_length, + const std::vector& rectangle_width, + const std::vector& sizes_to_pieces, + const ConstrainedGuillotineCuttingData& data, + int maximum_elements, + std::vector* is_end_piece, + std::vector* was_cut, IntVar* value, + Solver* solver) { + CHECK(is_end_piece != nullptr); + CHECK(was_cut != nullptr); + CHECK(value != nullptr); + CHECK(solver != nullptr); + solver->MakeIntVarArray(maximum_elements, 0, 1, "", was_cut); + + for (int i = 0; i < maximum_elements; ++i) { + solver->AddConstraint(solver->MakeCount(parent_index, i, (*was_cut)[i])); + + is_end_piece->push_back( + solver + ->MakeConditionalExpression( + solver->MakeIsEqualCstVar((*was_cut)[i], 0), + solver->MakeElement( + sizes_to_pieces, + solver + ->MakeSum(solver->MakeProd(rectangle_length[i], + data.root_width()), + rectangle_width[i]) + ->Var()), + data.pieces().size()) + ->Var()); + } + + int index = 0; + std::vector values; + + for (const ConstrainedGuillotineCuttingData::Piece& piece : data.pieces()) { + // Number of appearances of every type should be less or equal + // to the maximum number of times a piece can appear. + IntVar* const appearances = + solver->MakeIntVar(0, (data.root_length() * data.root_width()) / + (piece.length * piece.width)); + + // The number of appearances of every piece should be equal + // to the number of times that piece appears in a path as an end piece. + solver->AddConstraint(solver->MakeCount(*is_end_piece, index, appearances)); + + // Values will contain for every piece: + // number_of_time_the_piece_appears * its value. + values.push_back( + solver + ->MakeProd(solver->MakeMin(appearances, piece.max_appearances), + piece.value) + ->Var()); + + index++; + } + + solver->AddConstraint(solver->MakeEquality(value, solver->MakeSum(values))); +} + +void CreateAdditionalMonitors(absl::Duration time_limit, + std::vector* monitors, + OptimizeVar* objective_value, Solver* solver) { + CHECK(monitors != nullptr); + CHECK(objective_value != nullptr); + CHECK(solver != nullptr); + monitors->push_back(objective_value); + + static const int kLogFrequency = 100000; + SearchMonitor* const log = + solver->MakeSearchLog(kLogFrequency, objective_value); + monitors->push_back(log); + + if (time_limit != absl::InfiniteDuration()) { + SearchLimit* const limit = solver->MakeTimeLimit(time_limit); + monitors->push_back(limit); + } +} + +DecisionBuilder* CreateDecisionBuilder( + const std::vector& parent_index, + const std::vector& rectangle_length, + const std::vector& rectangle_width, + const std::vector& was_cut, Solver* solver) { + CHECK(solver != nullptr); + std::vector decision_variables; + for (int i = 1; i < rectangle_length.size() / 2 + 1; ++i) { + decision_variables.push_back(parent_index[i]); + decision_variables.push_back(rectangle_length[2 * (i - 1) + 1]); + decision_variables.push_back(rectangle_width[2 * (i - 1) + 1]); + } + for (int i = 0; i < rectangle_length.size(); ++i) { + decision_variables.push_back(was_cut[i]); + } + + return solver->MakePhase(decision_variables, Solver::CHOOSE_FIRST_UNBOUND, + Solver::ASSIGN_MAX_VALUE); +} + +void FillSolution( + const std::vector& parent_index, + const std::vector& rectangle_length, + const std::vector& rectangle_width, + const SolutionCollector* collector, IntVar* value, int* maximum_value, + std::vector* solution) { + CHECK(collector != nullptr); + CHECK(value != nullptr); + CHECK(maximum_value != nullptr); + CHECK(solution != nullptr); + + int number_of_zero_cuts = 0; + int parent = -1; + for (int i = 0; i < rectangle_length.size(); ++i) { + if (!collector->Value(0, rectangle_length[i])) { + number_of_zero_cuts++; + continue; + } + + if (i % 2 == 1) { + parent = std::max( + collector->Value(0, parent_index[i / 2 + 1]) - number_of_zero_cuts, + int64_t{0}); + } + + solution->emplace_back(parent, collector->Value(0, rectangle_length[i]), + collector->Value(0, rectangle_width[i])); + } + + *maximum_value = collector->Value(0, value); +} + +void ValidateSolution(int num_pieces, int root_width, + const std::vector& parent_index, + const std::vector& rectangle_length, + const std::vector& rectangle_width, + const std::vector& is_end_piece, + absl::Span sizes_to_pieces, + const SolutionCollector* collector) { + CHECK(collector != nullptr); + absl::btree_set parent_ids; + for (int i = 0; i < parent_index.size(); ++i) { + parent_ids.insert(collector->Value(0, parent_index[i])); + // The rectangle from which the rectangles were cut needs to be + // <= current position. For every pair of rectangles we keep + // their parent index once. + CHECK(collector->Value(0, parent_index[i]) <= i * 2 - 1); + } + // Every piece should be cut just once. + CHECK_EQ(parent_ids.size(), parent_index.size()); + + bool guillotine_cut = false; + for (int i = 1; i < rectangle_length.size(); i += 2) { + const int parent = collector->Value(0, parent_index[i / 2 + 1]); + const int length_left_rectangle = collector->Value(0, rectangle_length[i]); + const int length_rigth_rectangle = + collector->Value(0, rectangle_length[i + 1]); + const int length_parent = collector->Value(0, rectangle_length[parent]); + const int width_parent = collector->Value(0, rectangle_width[parent]); + const int width_left_rectangle = collector->Value(0, rectangle_width[i]); + const int width_right_rectangle = + collector->Value(0, rectangle_width[i + 1]); + + const bool is_a_x_guillotine_cut = + length_left_rectangle + length_rigth_rectangle == length_parent && + length_left_rectangle && length_rigth_rectangle && + width_left_rectangle == width_right_rectangle && + width_left_rectangle == width_parent; + + const bool is_a_y_guillotine_cut = + width_left_rectangle + width_right_rectangle == width_parent && + width_left_rectangle && width_right_rectangle && + length_left_rectangle == length_rigth_rectangle && + length_left_rectangle == length_parent; + + const bool is_a_zero_cut = !length_left_rectangle && + !length_rigth_rectangle && + !width_left_rectangle && !width_right_rectangle; + + // Every cut is a guillotine cut or all elements are 0. + CHECK(is_a_x_guillotine_cut || is_a_y_guillotine_cut || is_a_zero_cut); + + // Check if it is a piece. + const int it_is_piece1 = + parent_ids.contains(i) + ? num_pieces + : sizes_to_pieces[length_left_rectangle * root_width + + width_left_rectangle]; + + const int it_is_piece2 = + parent_ids.contains(i + 1) + ? num_pieces + : sizes_to_pieces[length_rigth_rectangle * root_width + + width_right_rectangle]; + + CHECK_EQ(it_is_piece1, collector->Value(0, is_end_piece[i])); + CHECK_EQ(it_is_piece2, collector->Value(0, is_end_piece[i + 1])); + + // Check that all 0-cuts (both rectangles are 0x0) are grouped together. + CHECK_LE(guillotine_cut, is_a_x_guillotine_cut || is_a_y_guillotine_cut); + guillotine_cut |= is_a_x_guillotine_cut || is_a_y_guillotine_cut; + } +} + +} // namespace + +void ConstrainedGuillotineCutting::PrintSolution() const { + CHECK(solved_); + + absl::PrintF("Maximum value: %d\n", maximum_value_); + absl::PrintF("Main rectangle 0 sizes: %dx%d\n", data_->root_length(), + data_->root_width()); + for (int i = 1; i < solution_.size(); ++i) { + if (i % 2 == 1) { + absl::PrintF("\nRectangle %d was cut in: \n", solution_[i].parent_index); + } + absl::PrintF("Rectangle %d sizes: %dx%d\n", i, solution_[i].length, + solution_[i].width); + } +} + +void ConstrainedGuillotineCutting::Solve(absl::Duration time_limit) { + const std::vector& pieces = + data_->pieces(); + + // Depending on the size of a rectangle, it represents the index of + // the piece to which it corresponds. If it does not correspond to + // any piece, than it will remain pieces.size(). + std::vector sizes_to_pieces; + + // Depending on the length(width) of a rectangle, it will be 1 if + // there exists a piece that has that length(width). + std::vector piece_length; + std::vector piece_width; + + int maximum_value; + int maximum_elements; + SetInitialElements(*data_, &sizes_to_pieces, &piece_length, &piece_width, + &maximum_value, &maximum_elements); + + // For every pair of rectangles the index of the rectangle + // that the rectangles were cut of. + std::vector parent_index; + + // sizes of the rectangles + std::vector rectangle_length; + std::vector rectangle_width; + SetRectanglesVariablesAndAddConstraints( + piece_length, piece_width, maximum_elements, data_->root_length(), + data_->root_width(), &parent_index, &rectangle_length, &rectangle_width, + &solver_); + + // Contains the piece that this rectangle equals to if it is + // an end piece (it was not cut). + std::vector is_end_piece; + // For every piece it is true if the corresponding rectangle + // was cut. + std::vector was_cut; + IntVar* const value = solver_.MakeIntVar(0, maximum_value); + AddAdditionalConstraints(parent_index, rectangle_length, rectangle_width, + sizes_to_pieces, *data_, maximum_elements, + &is_end_piece, &was_cut, value, &solver_); + // Objective: maximize the value of the end pieces. + OptimizeVar* const objective_value = solver_.MakeMaximize(value, 1); + + DecisionBuilder* const db = CreateDecisionBuilder( + parent_index, rectangle_length, rectangle_width, was_cut, &solver_); + std::vector monitors; + + SolutionCollector* const collector = solver_.MakeLastSolutionCollector(); + collector->Add(parent_index); + collector->Add(rectangle_length); + collector->Add(rectangle_width); + collector->Add(is_end_piece); + collector->Add(value); + monitors.push_back(collector); + + CreateAdditionalMonitors(time_limit, &monitors, objective_value, &solver_); + + const int64_t start_time = solver_.wall_time(); + solver_.Solve(db, monitors); + const int64_t end_time = solver_.wall_time(); + + LOG(INFO) << "The process took: " << (end_time - start_time) / 1000.0 + << " seconds."; + + if (collector->solution_count()) { + ValidateSolution(pieces.size(), data_->root_width(), parent_index, + rectangle_length, rectangle_width, is_end_piece, + sizes_to_pieces, collector); + + solved_ = true; + FillSolution(parent_index, rectangle_length, rectangle_width, collector, + value, &maximum_value_, &solution_); + } +} + +} // namespace operations_research diff --git a/examples/cpp/cgc.h b/examples/cpp/cgc.h new file mode 100644 index 00000000000..63faed008cb --- /dev/null +++ b/examples/cpp/cgc.h @@ -0,0 +1,91 @@ +// Copyright 2010-2025 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Two-Dimensional Constrained Guillotine Cutting +// +// This file contains a solver for the Two-Dimensional Constrained +// Guillotine Cutting Problem. The problem requires cutting a plane +// rectangle into smaller rectangular pieces of given sizes and values +// in order to maximize the sum of the values of the cut pieces in which +// there is a constraint on the maximum number of each type of piece that +// is to be produced and all cuts go from one edge of the rectangle to be +// cut to the opposite edge. +// +// If cgc_time_limit_in_ms is defined, it provides the best value +// achieved in that amount of time. +// +// Example usage: +// +// std::unique_ptr +// data(new operations_research::ConstrainedGuillotineCuttingData()); +// data->LoadFromFile(file_path); +// operations_research::ConstrainedGuillotineCutting cgc(std::move(data)); +// cgc.Solve(absl::Milliseconds(absl::GetFlag(FLAGS_time_limit_in_ms))); +// if (cgc.Solved()) { +// cgc.PrintSolution(); +// } + +#ifndef ORTOOLS_EXAMPLES_CGC_H_ +#define ORTOOLS_EXAMPLES_CGC_H_ + +#include +#include +#include +#include +#include + +#include "examples/cpp/cgc_data.h" +#include "ortools/constraint_solver/constraint_solver.h" + +namespace operations_research { + +class ConstrainedGuillotineCutting { + public: + struct CutRectangle { + CutRectangle(int parent_index, int length, int width) + : parent_index(parent_index), length(length), width(width) {} + + int parent_index; + int length; + int width; + }; + + explicit ConstrainedGuillotineCutting( + std::unique_ptr data) + : data_(std::move(data)), + solver_("ConstrainedGuillotineCutting"), + solved_(false), + maximum_value_(0) {} + + int MaximumValue() const { + DCHECK(solved_); + return maximum_value_; + } + bool Solved() const { return solved_; } + + void PrintSolution() const; + void Solve(absl::Duration time_limit); + + private: + // Contains the problem parameters. + std::unique_ptr data_; + Solver solver_; + + bool solved_; + int maximum_value_; + std::vector solution_; +}; + +} // namespace operations_research + +#endif // ORTOOLS_EXAMPLES_CGC_H_ diff --git a/examples/cpp/cgc_data.h b/examples/cpp/cgc_data.h new file mode 100644 index 00000000000..34cb86e4e49 --- /dev/null +++ b/examples/cpp/cgc_data.h @@ -0,0 +1,70 @@ +// Copyright 2010-2025 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Two-Dimensional Constrained Guillotine Cutting +// +// The file contains code to load the problem, in the format detailed below. +// +// Input (on different lines): +// - number of pieces +// - length and width for the plane rectangle +// - for each piece (one line for every piece): +// - length +// - width +// - maximum number of pieces of that type that can be cut +// - value of the piece +// +// For more details and sample input (and format) see: +// - http://people.brunel.ac.uk/~mastjjb/jeb/orlib/cgcutinfo.html +// - //ortools/examples/testdata/cgc contains examples +// of input files. + +#ifndef ORTOOLS_EXAMPLES_CGC_DATA_H_ +#define ORTOOLS_EXAMPLES_CGC_DATA_H_ + +#include +#include + +namespace operations_research { + +class ConstrainedGuillotineCuttingData { + public: + // Each rectangular piece from the input is represented + // as an instance of this structure. + struct Piece { + int length; + int width; + int max_appearances; + int value; + }; + + ConstrainedGuillotineCuttingData() : root_length_(0), root_width_(0) {} + + bool LoadFromFile(const std::string& input_file); + + // Accessors for problem specification data + int root_length() const { return root_length_; } + int root_width() const { return root_width_; } + const std::vector& pieces() const { return pieces_; } + + private: + // main rectangle size + int root_length_; + int root_width_; + + std::vector pieces_; +}; + +} // namespace operations_research + +#endif // ORTOOLS_EXAMPLES_CGC_DATA_H_ diff --git a/examples/cpp/cgc_main.cc b/examples/cpp/cgc_main.cc new file mode 100644 index 00000000000..8ab189e3aaa --- /dev/null +++ b/examples/cpp/cgc_main.cc @@ -0,0 +1,81 @@ +// Copyright 2010-2025 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// This file implements the main function for the Two-Dimensional +// Constrained Guillotine Cutting solver. It reads the problem +// specification from an input file specified via command-line flags, +// and prints the solution to standard output. +// +// Example usage: +// ./cgc_main --input_file=testdata/cgc/my_input_file.in +// Other examples of input files in testdata/cgc/. + +#include +#include +#include + +#include "absl/flags/flag.h" +#include "absl/strings/str_format.h" +#include "absl/time/time.h" +#include "examples/cpp/cgc.h" +#include "examples/cpp/cgc_data.h" +#include "ortools/base/init_google.h" +#include "ortools/base/logging.h" + +ABSL_FLAG(std::string, input_file, "", "Input data file"); +ABSL_FLAG(int, time_limit_in_ms, 0, + "Time limit in milliseconds. 0 means no time limit. " + "If different, the solver will provide the best solution " + "that was found in that amount of time."); +ABSL_FLAG(bool, print_maximum_value, false, + "If true, it prints the maximum value found."); +ABSL_FLAG(bool, print_solution, false, + "If true, it prints the maximum value and the cutting pattern."); + +using operations_research::ConstrainedGuillotineCutting; +using operations_research::ConstrainedGuillotineCuttingData; + +int main(int argc, char** argv) { + InitGoogle(argv[0], &argc, &argv, true); + + if (absl::GetFlag(FLAGS_input_file).empty()) { + LOG(QFATAL) << "Please supply an input file with --input_file="; + } + LOG(INFO) << "Processing file " << absl::GetFlag(FLAGS_input_file); + + auto data = std::make_unique(); + + if (!data->LoadFromFile(absl::GetFlag(FLAGS_input_file))) { + LOG(QFATAL) << "Input file " << absl::GetFlag(FLAGS_input_file) + << " was not loaded."; + } + + ConstrainedGuillotineCutting cgc(std::move(data)); + const absl::Duration time_limit = + absl::GetFlag(FLAGS_time_limit_in_ms) == 0 + ? absl::InfiniteDuration() + : absl::Milliseconds(absl::GetFlag(FLAGS_time_limit_in_ms)); + cgc.Solve(time_limit); + + if (cgc.Solved()) { + if (absl::GetFlag(FLAGS_print_solution)) { + cgc.PrintSolution(); + } else if (absl::GetFlag(FLAGS_print_maximum_value)) { + absl::PrintF("%d", cgc.MaximumValue()); + } else { + LOG(INFO) << "The maximum value found is: " << cgc.MaximumValue(); + } + } else { + absl::PrintF("There was no solution found in %v ms.\n", time_limit); + } +} diff --git a/examples/cpp/cgc_test_solution.bintest b/examples/cpp/cgc_test_solution.bintest new file mode 100644 index 00000000000..25189556b48 --- /dev/null +++ b/examples/cpp/cgc_test_solution.bintest @@ -0,0 +1,24 @@ +# Tests the Two-Dimensional Constrained Guillotine Cutting solver +# by loading example input files from the testdata directory. + +# The following tests are too big and don't converge within a second so we don't +# check for a particular solution. Instead we make sure that the value is +# greater than zero. + +RUN: $(cgc_main) --input_file=$(1.in) --time_limit_in_ms=1000 --print_maximum_value=true +CHECK: "@num(>0)" + +RUN: $(cgc_main) --input_file=$(2.in) --time_limit_in_ms=1000 --print_maximum_value=true +CHECK: "@num(>0)" + +RUN: $(cgc_main) --input_file=$(3.in) --time_limit_in_ms=1000 --print_maximum_value=true +CHECK: "@num(>0)" + +RUN: $(cgc_main) --input_file=$(cgcut1.in) --time_limit_in_ms=1000 --print_maximum_value=true +CHECK: "@num(>0)" + +RUN: $(cgc_main) --input_file=$(cgcut2.in) --time_limit_in_ms=1000 --print_maximum_value=true +CHECK: "@num(>0)" + +RUN: $(cgc_main) --input_file=$(cgcut3.in) --time_limit_in_ms=1000 --print_maximum_value=true +CHECK: "@num(>0)" diff --git a/examples/cpp/constraint_programming_cp_test.bintest b/examples/cpp/constraint_programming_cp_test.bintest new file mode 100644 index 00000000000..50a49955fa5 --- /dev/null +++ b/examples/cpp/constraint_programming_cp_test.bintest @@ -0,0 +1 @@ +RUN: $(constraint_programming_cp) diff --git a/examples/cpp/costas_array_sat_model1_test.bintest b/examples/cpp/costas_array_sat_model1_test.bintest new file mode 100644 index 00000000000..10978aa7c10 --- /dev/null +++ b/examples/cpp/costas_array_sat_model1_test.bintest @@ -0,0 +1 @@ +RUN: $(costas_array_sat) --minsize=6 --maxsize=6 --model=1 diff --git a/examples/cpp/costas_array_sat_model2_test.bintest b/examples/cpp/costas_array_sat_model2_test.bintest new file mode 100644 index 00000000000..279cbe8b28f --- /dev/null +++ b/examples/cpp/costas_array_sat_model2_test.bintest @@ -0,0 +1 @@ +RUN: $(costas_array_sat) --minsize=6 --maxsize=6 --model=2 diff --git a/examples/cpp/costas_array_sat_model3_test.bintest b/examples/cpp/costas_array_sat_model3_test.bintest new file mode 100644 index 00000000000..45de6bb2807 --- /dev/null +++ b/examples/cpp/costas_array_sat_model3_test.bintest @@ -0,0 +1 @@ +RUN: $(costas_array_sat) --minsize=6 --maxsize=6 --model=3 diff --git a/examples/cpp/course_scheduling.proto b/examples/cpp/course_scheduling.proto deleted file mode 100644 index 167c83523c4..00000000000 --- a/examples/cpp/course_scheduling.proto +++ /dev/null @@ -1,188 +0,0 @@ -// Copyright 2010-2025 Google LLC -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package operations_research; - -// Information required to create a schedule for a school system. -message CourseSchedulingModel { - // Schedule name, used only for logging purposes. - string display_name = 1; - - // The number of days in a schedule rotation. If the school system uses a - // block schedule, this value should be 1. - int32 days_count = 2; - - // The number of time slots each day in a schedule rotation. If the school - // system uses a block schedule, this value is the number of blocks. - int32 daily_time_slot_count = 3; - - // List of courses that need to be scheduled. - repeated Course courses = 4; - - // List of teachers. - repeated Teacher teachers = 5; - - // List of students that need to be assigned to these courses. - repeated Student students = 6; - - // List of rooms that the courses can be assigned to. - repeated Room rooms = 7; -} - -// Holds the solution to the course scheduling problem. -message CourseSchedulingResult { - // Human readable message about the solver or given model. - string message = 1; - - // Status of the solver. - CourseSchedulingResultStatus solver_status = 2; - - // List of the time slot and room assignments for each section of a course. - repeated ClassAssignment class_assignments = 3; - - // List of course and section assignments for each student. - repeated StudentAssignment student_assignments = 4; -} - -message ClassAssignment { - // Index of the course in the CourseSchedulingModel. - int32 course_index = 1; - - // Specific section of the course in the CourseSchedulingModel. - int32 section_number = 2; - - // Time slots that this class has been assigned to in the - // CourseSchedulingModel. - repeated int32 time_slots = 3; - - // Indices of the rooms that the class is assigned to in the - // CourseSchedulingModel. If this is not empty, then the number of indices - // must match the number of time slots. - repeated int32 room_indices = 4; -} - -message StudentAssignment { - // Index of the student in the CourseSchedulingModel. - int32 student_index = 1; - - // Course indices in the CourseSchedulingModel that this student has been - // assigned to. The number of indices must match the number of section - // indices. - repeated int32 course_indices = 2; - - // Section indices for each Course in the CourseSchedulingModel this this - // student has been assigned to. The number of indices must match the number - // of course indices. - repeated int32 section_indices = 3; -} - -message Course { - // Course name, used only for logging purposes. - string display_name = 1; - - // The number of times each section of this course needs to meet during a - // schedule rotation. Each section of the course meets no more than once a - // day. If the school system uses a block schedule, then this value should - // be 1. - int32 meetings_count = 2; - - // The maximum number of students for this course. This value can be equal to - // +Infinity to encode a course has no maximum capacity. - int32 max_capacity = 3; - - // The minimum number of students for this course. - int32 min_capacity = 4; - - // The number of consecutive time slots that each section of this course needs - // to be scheduled for. This value can only be 1 or 2. If the value is 2, then - // 2 consecutive time slots in a day counts as 1 meeting time for the section. - int32 consecutive_slots_count = 5; - - // List of indices for the teachers of this course. We are assuming that each - // teacher teaches separately. Must have the same number of elements as the - // number of sections list. - repeated int32 teacher_indices = 6; - - // The number of sections each teacher teaches of this course. Must have the - // same number of elements as the teacher index list. - repeated int32 teacher_section_counts = 7; - - // List of the possible rooms that this course can be assigned to. This can - // be empty. - repeated int32 room_indices = 8; -} - -message Teacher { - // Teacher name, used only for logging purposes. - string display_name = 1; - - // List of time slots that the teacher cannot be scheduled for. These time - // slot values index to the accumulative number of time slots starting at 0. - // For example, if a schedule rotation has 5 days and 8 time slots per day, - // and a teacher cannot be scheduled for the last time slot of the fourth - // day, the number here would be 31. - repeated int32 restricted_time_slots = 2; -} - -message Student { - // Student name, used only for logging purposes. - string display_name = 1; - - // List of course indices that the student needs to be enrolled in. - repeated int32 course_indices = 2; -} - -message Room { - // Room name, used only for logging purposes. - string display_name = 1; - - // Maximum number of students that can fit into this room. - int32 capacity = 2; -} - -// Status returned by the solver. -enum CourseSchedulingResultStatus { - COURSE_SCHEDULING_RESULT_STATUS_UNSPECIFIED = 0; - - // The solver had enough time to find some solution that satisfies all - // constraints, but it did not prove optimality (which means it may or may - // not have reached the optimal). - // - // This can happen for large LP models (linear programming), and is a frequent - // response for time-limited MIPs (mixed integer programming). This is also - // what the CP (constraint programming) solver will return if there is no - // objective specified. - SOLVER_FEASIBLE = 1; - - // The solver found the proven optimal solution. - SOLVER_OPTIMAL = 2; - - // The model does not have any solution, according to the solver (which - // "proved" it, with the caveat that numerical proofs aren't actual proofs), - // or based on trivial considerations (eg. a variable whose lower bound is - // strictly greater than its upper bound). - SOLVER_INFEASIBLE = 3; - - // Model errors. These are always deterministic and repeatable. - // They should be accompanied with a string description of the error. - SOLVER_MODEL_INVALID = 4; - - // The model has not been solved in the given time or the solver was not able - // to solve the model given. - SOLVER_NOT_SOLVED = 5; - - // An error (either numerical or from a bug in the code) occurred. - ABNORMAL = 6; -} diff --git a/examples/cpp/cryptarithm_sat_test.bintest b/examples/cpp/cryptarithm_sat_test.bintest new file mode 100644 index 00000000000..b919bcc24c7 --- /dev/null +++ b/examples/cpp/cryptarithm_sat_test.bintest @@ -0,0 +1 @@ +RUN: $(cryptarithm_sat) diff --git a/examples/cpp/dimacs_assignment.cc b/examples/cpp/dimacs_assignment.cc index bcf43712abc..b8f1917f8c9 100644 --- a/examples/cpp/dimacs_assignment.cc +++ b/examples/cpp/dimacs_assignment.cc @@ -20,7 +20,7 @@ #include "absl/container/flat_hash_map.h" #include "absl/flags/flag.h" -#include "absl/strings/str_format.h" +#include "absl/strings/string_view.h" #include "examples/cpp/parse_dimacs_assignment.h" #include "examples/cpp/print_dimacs_assignment.h" #include "ortools/algorithms/hungarian.h" @@ -156,25 +156,18 @@ int SolveDimacsAssignment(int argc, char* argv[]) { } } // namespace operations_research -static const char* const kUsageTemplate = "usage: %s "; - using ::operations_research::ArcIndex; using ::operations_research::NodeIndex; using ::operations_research::SolveDimacsAssignment; int main(int argc, char* argv[]) { - std::string usage; - if (argc < 1) { - usage = absl::StrFormat(kUsageTemplate, "solve_dimacs_assignment"); - } else { - usage = absl::StrFormat(kUsageTemplate, argv[0]); - } - InitGoogle(usage.c_str(), &argc, &argv, true); + InitGoogle(argv[0], &argc, &argv, true); if (argc < 2) { - LOG(FATAL) << usage; + LOG(FATAL) << "Missing input file."; } + absl::SetStderrThreshold(absl::LogSeverityAtLeast::kInfo); if (absl::GetFlag(FLAGS_assignment_static_graph)) { return SolveDimacsAssignment<::util::StaticGraph>( argc, argv); diff --git a/examples/cpp/dimacs_assignment_max_cost_test.bintest b/examples/cpp/dimacs_assignment_max_cost_test.bintest new file mode 100644 index 00000000000..3352b7ad851 --- /dev/null +++ b/examples/cpp/dimacs_assignment_max_cost_test.bintest @@ -0,0 +1,2 @@ +RUN: $(dimacs_assignment) $(dimacs_example.txt) --assignment_maximize_cost 2>&1 +CHECK: "Cost of optimum assignment: -110" diff --git a/examples/cpp/dimacs_assignment_min_cost_test.bintest b/examples/cpp/dimacs_assignment_min_cost_test.bintest new file mode 100644 index 00000000000..0a816928ce8 --- /dev/null +++ b/examples/cpp/dimacs_assignment_min_cost_test.bintest @@ -0,0 +1,2 @@ +RUN: $(dimacs_assignment) $(dimacs_example.txt) 2>&1 +CHECK: "Cost of optimum assignment: 84" diff --git a/examples/cpp/dobble_ls_test.bintest b/examples/cpp/dobble_ls_test.bintest new file mode 100644 index 00000000000..65ba1b023b6 --- /dev/null +++ b/examples/cpp/dobble_ls_test.bintest @@ -0,0 +1 @@ +RUN: $(dobble_ls) --time_limit_in_ms=10000 diff --git a/examples/cpp/fap_parser.cc b/examples/cpp/fap_parser.cc index 4eb1a8f4237..5dc61e2b76e 100644 --- a/examples/cpp/fap_parser.cc +++ b/examples/cpp/fap_parser.cc @@ -21,6 +21,7 @@ #include "absl/strings/numbers.h" #include "absl/strings/str_split.h" #include "absl/strings/string_view.h" +#include "absl/types/span.h" #include "ortools/base/helpers.h" #include "ortools/base/map_util.h" @@ -94,7 +95,7 @@ void DomainParser::Parse() { } if (!domain.empty()) { - gtl::InsertOrUpdate(&domains_, key, domain); + domains_.insert_or_assign(key, domain); } } } @@ -198,7 +199,7 @@ void ParametersParser::Parse() { } // TODO(user): Make FindComponents linear instead of quadratic. -void FindComponents(const std::vector& constraints, +void FindComponents(absl::Span constraints, const absl::btree_map& variables, const int maximum_variable_id, absl::flat_hash_map* components) { @@ -216,20 +217,20 @@ void FindComponents(const std::vector& constraints, // Create a new one. FapComponent component; const int component_index = constraint_index; - gtl::InsertOrUpdate(&(component.variables), variable_id1, variable1); - gtl::InsertOrUpdate(&(component.variables), variable_id2, variable2); + (component.variables).insert_or_assign(variable_id1, variable1); + (component.variables).insert_or_assign(variable_id2, variable2); in_component[variable_id1] = component_index; in_component[variable_id2] = component_index; component.constraints.push_back(constraint); - gtl::InsertOrUpdate(components, component_index, component); + components->insert_or_assign(component_index, component); } else if (in_component[variable_id1] >= 0 && in_component[variable_id2] < 0) { // If variable1 belongs to an existing component, variable2 should // also be included in the same component. const int component_index = in_component[variable_id1]; CHECK(components->contains(component_index)); - gtl::InsertOrUpdate(&((*components)[component_index].variables), - variable_id2, variable2); + ((*components)[component_index].variables) + .insert_or_assign(variable_id2, variable2); in_component[variable_id2] = component_index; (*components)[component_index].constraints.push_back(constraint); } else if (in_component[variable_id1] < 0 && @@ -238,8 +239,8 @@ void FindComponents(const std::vector& constraints, // also be included in the same component. const int component_index = in_component[variable_id2]; CHECK(components->contains(component_index)); - gtl::InsertOrUpdate(&((*components)[component_index].variables), - variable_id1, variable1); + ((*components)[component_index].variables) + .insert_or_assign(variable_id1, variable1); in_component[variable_id1] = component_index; (*components)[component_index].constraints.push_back(constraint); } else { diff --git a/examples/cpp/fap_parser.h b/examples/cpp/fap_parser.h index 66e9f1706e9..acdb63e3b06 100644 --- a/examples/cpp/fap_parser.h +++ b/examples/cpp/fap_parser.h @@ -23,6 +23,7 @@ #include "absl/container/btree_map.h" #include "absl/container/flat_hash_map.h" #include "absl/strings/string_view.h" +#include "absl/types/span.h" namespace operations_research { @@ -214,7 +215,7 @@ class ParametersParser { }; // Function that finds the disjoint sub-graphs of the graph of the instance. -void FindComponents(const std::vector& constraints, +void FindComponents(absl::Span constraints, const absl::btree_map& variables, int maximum_variable_id, absl::flat_hash_map* components); diff --git a/examples/cpp/flow_api_test.bintest b/examples/cpp/flow_api_test.bintest new file mode 100644 index 00000000000..6bca712df4c --- /dev/null +++ b/examples/cpp/flow_api_test.bintest @@ -0,0 +1 @@ +RUN: $(flow_api) diff --git a/examples/cpp/golomb_sat_test.bintest b/examples/cpp/golomb_sat_test.bintest new file mode 100644 index 00000000000..7fd6596d7a1 --- /dev/null +++ b/examples/cpp/golomb_sat_test.bintest @@ -0,0 +1 @@ +RUN: $(golomb_sat) --size 5 diff --git a/examples/cpp/integer_programming_test.bintest b/examples/cpp/integer_programming_test.bintest new file mode 100644 index 00000000000..bad463547b3 --- /dev/null +++ b/examples/cpp/integer_programming_test.bintest @@ -0,0 +1 @@ +RUN: $(integer_programming) diff --git a/examples/cpp/jobshop_sat_ft06.bintest b/examples/cpp/jobshop_sat_ft06.bintest new file mode 100644 index 00000000000..366a359b4ee --- /dev/null +++ b/examples/cpp/jobshop_sat_ft06.bintest @@ -0,0 +1 @@ +RUN: $(jobshop_sat) --input $(ft06) diff --git a/examples/cpp/knapsack_2d_sat_class01_instance2_test.bintest b/examples/cpp/knapsack_2d_sat_class01_instance2_test.bintest new file mode 100644 index 00000000000..a7695406e71 --- /dev/null +++ b/examples/cpp/knapsack_2d_sat_class01_instance2_test.bintest @@ -0,0 +1 @@ +RUN: $(knapsack_2d_sat) --input $(Class_01.2bp) --instance 2 diff --git a/examples/cpp/linear_assignment_api_test.bintest b/examples/cpp/linear_assignment_api_test.bintest new file mode 100644 index 00000000000..3cf36e00c4e --- /dev/null +++ b/examples/cpp/linear_assignment_api_test.bintest @@ -0,0 +1 @@ +RUN: $(linear_assignment_api) diff --git a/examples/cpp/linear_programming_test.bintest b/examples/cpp/linear_programming_test.bintest new file mode 100644 index 00000000000..dc7d4189d75 --- /dev/null +++ b/examples/cpp/linear_programming_test.bintest @@ -0,0 +1 @@ +RUN: $(linear_programming) diff --git a/examples/cpp/linear_solver_protocol_buffers_test.bintest b/examples/cpp/linear_solver_protocol_buffers_test.bintest new file mode 100644 index 00000000000..a8997d98780 --- /dev/null +++ b/examples/cpp/linear_solver_protocol_buffers_test.bintest @@ -0,0 +1 @@ +RUN: $(linear_solver_protocol_buffers) diff --git a/examples/cpp/magic_sequence_sat_test.bintest b/examples/cpp/magic_sequence_sat_test.bintest new file mode 100644 index 00000000000..0e9b6973889 --- /dev/null +++ b/examples/cpp/magic_sequence_sat_test.bintest @@ -0,0 +1 @@ +RUN: $(magic_sequence_sat) diff --git a/examples/cpp/magic_square_sat_test.bintest b/examples/cpp/magic_square_sat_test.bintest new file mode 100644 index 00000000000..4830bec5343 --- /dev/null +++ b/examples/cpp/magic_square_sat_test.bintest @@ -0,0 +1 @@ +RUN: $(magic_square_sat) diff --git a/examples/cpp/max_flow_test.bintest b/examples/cpp/max_flow_test.bintest new file mode 100644 index 00000000000..530459b6823 --- /dev/null +++ b/examples/cpp/max_flow_test.bintest @@ -0,0 +1 @@ +RUN: $(max_flow) diff --git a/examples/cpp/min_cost_flow_test.bintest b/examples/cpp/min_cost_flow_test.bintest new file mode 100644 index 00000000000..322e309acf8 --- /dev/null +++ b/examples/cpp/min_cost_flow_test.bintest @@ -0,0 +1 @@ +RUN: $(min_cost_flow) diff --git a/examples/cpp/mps_driver_test.bintest b/examples/cpp/mps_driver_test.bintest new file mode 100644 index 00000000000..37e2f8c20de --- /dev/null +++ b/examples/cpp/mps_driver_test.bintest @@ -0,0 +1 @@ +RUN: $(mps_driver) --input $(maximization.mps) diff --git a/examples/cpp/multi_knapsack_sat_test.bintest b/examples/cpp/multi_knapsack_sat_test.bintest new file mode 100644 index 00000000000..1a51c1921b8 --- /dev/null +++ b/examples/cpp/multi_knapsack_sat_test.bintest @@ -0,0 +1 @@ +RUN: $(multi_knapsack_sat) diff --git a/examples/cpp/network_routing_sat_test.bintest b/examples/cpp/network_routing_sat_test.bintest new file mode 100644 index 00000000000..37387090b09 --- /dev/null +++ b/examples/cpp/network_routing_sat_test.bintest @@ -0,0 +1 @@ +RUN: $(network_routing_sat) --clients=10 --backbones=5 --demands=10 --traffic_min=5 --traffic_max=10 --min_client_degree=2 --max_client_degree=5 --min_backbone_degree=3 --max_backbone_degree=5 --max_capacity=20 --fixed_charge_cost=10 diff --git a/examples/cpp/nqueens_test.bintest b/examples/cpp/nqueens_test.bintest new file mode 100644 index 00000000000..d0c8332ca3b --- /dev/null +++ b/examples/cpp/nqueens_test.bintest @@ -0,0 +1 @@ +RUN: $(nqueens) diff --git a/examples/cpp/pdptw_non_homogenous_fleet_test.bintest b/examples/cpp/pdptw_non_homogenous_fleet_test.bintest new file mode 100644 index 00000000000..96762b4ccf0 --- /dev/null +++ b/examples/cpp/pdptw_non_homogenous_fleet_test.bintest @@ -0,0 +1,2 @@ +RUN: $(pdptw) --pdp_file=$(lc102.txt) --reduce_vehicle_cost_model=false --routing_search_parameters=first_solution_strategy:BEST_INSERTION 2>&1 +CHECK: "Cost: 1000828.936870" diff --git a/examples/cpp/pdptw_test.bintest b/examples/cpp/pdptw_test.bintest new file mode 100644 index 00000000000..d3a23a46c78 --- /dev/null +++ b/examples/cpp/pdptw_test.bintest @@ -0,0 +1,2 @@ +RUN: $(pdptw) --pdp_file=$(lc102.txt) 2>&1 +CHECK: "Cost: 1000828.936870" diff --git a/examples/cpp/pdptw_with_alternatives.cc b/examples/cpp/pdptw_with_alternatives.cc new file mode 100644 index 00000000000..1d485ede2cd --- /dev/null +++ b/examples/cpp/pdptw_with_alternatives.cc @@ -0,0 +1,377 @@ +// Copyright 2010-2025 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Pickup and Delivery Problem with Time Windows and Alternatives. +// This is a variant of the mode in pdptw.cc (see that file for more details +// on pickup and delivery models). In this model both pickups and deliveries +// have alternative locations, of which one of each has to be selected. As in +// the standard pickup and delivery problem, pickups must happen before +// deliveries and must be on the same route. + +#include +#include +#include +#include +#include +#include + +#include "absl/base/log_severity.h" +#include "absl/flags/flag.h" +#include "absl/functional/bind_front.h" +#include "absl/log/check.h" +#include "absl/log/globals.h" +#include "absl/log/log.h" +#include "absl/strings/numbers.h" +#include "absl/strings/str_format.h" +#include "absl/strings/str_split.h" +#include "absl/strings/string_view.h" +#include "google/protobuf/text_format.h" +#include "ortools/base/helpers.h" +#include "ortools/base/init_google.h" +#include "ortools/base/options.h" +#include "ortools/constraint_solver/constraint_solver.h" +#include "ortools/routing/index_manager.h" +#include "ortools/routing/parameters.h" +#include "ortools/routing/parameters.pb.h" +#include "ortools/routing/routing.h" + +ABSL_FLAG(std::string, pdp_file, "", + "File containing the Pickup and Delivery Problem to solve."); +ABSL_FLAG(int, pdp_force_vehicles, 0, + "Force the number of vehicles used (maximum number of routes."); +ABSL_FLAG(bool, reduce_vehicle_cost_model, true, + "Overrides the homonymous field of " + "DefaultRoutingModelParameters()."); +ABSL_FLAG(std::string, routing_search_parameters, + "first_solution_strategy:ALL_UNPERFORMED " + "local_search_operators { use_node_pair_swap_active:BOOL_FALSE }", + "Text proto RoutingSearchParameters (possibly partial) that will " + "override the DefaultRoutingSearchParameters()"); + +using ::absl::bind_front; + +namespace operations_research::routing { + +// Scaling factor used to scale up distances, allowing a bit more precision +// from Euclidean distances. +const int64_t kScalingFactor = 1000; + +// Vector of (x,y) node coordinates, *unscaled*, in some imaginary planar, +// metric grid. +typedef std::vector> Coordinates; + +// Returns the scaled Euclidean distance between two nodes, coords holding the +// coordinates of the nodes. +int64_t Travel(const Coordinates* const coords, + RoutingIndexManager::NodeIndex from, + RoutingIndexManager::NodeIndex to) { + DCHECK(coords != nullptr); + const int xd = coords->at(from.value()).first - coords->at(to.value()).first; + const int yd = + coords->at(from.value()).second - coords->at(to.value()).second; + return static_cast(kScalingFactor * + std::sqrt(1.0L * xd * xd + yd * yd)); +} + +// Returns the scaled service time at a given node, service_times holding the +// service times. +int64_t ServiceTime(const std::vector* const service_times, + RoutingIndexManager::NodeIndex node) { + return kScalingFactor * service_times->at(node.value()); +} + +// Returns the scaled (distance plus service time) between two indices, coords +// holding the coordinates of the nodes and service_times holding the service +// times. +// The service time is the time spent to execute a delivery or a pickup. +int64_t TravelPlusServiceTime(const RoutingIndexManager& manager, + const Coordinates* const coords, + const std::vector* const service_times, + int64_t from_index, int64_t to_index) { + const RoutingIndexManager::NodeIndex from = manager.IndexToNode(from_index); + const RoutingIndexManager::NodeIndex to = manager.IndexToNode(to_index); + return ServiceTime(service_times, from) + Travel(coords, from, to); +} + +// Returns the demand (quantity picked up or delivered) of an index, demands +// holds the demand of each node. +int64_t Demand(const RoutingIndexManager& manager, + const std::vector* const demands, int64_t from_index, + int64_t to_index) { + (void)to_index; + return demands->at(manager.IndexToNode(from_index).value()); +} + +// Outputs a solution to the current model in a string. +std::string VerboseOutput(const RoutingModel& routing, + const RoutingIndexManager& manager, + const Assignment& assignment, + const Coordinates& coords, + const std::vector& service_times) { + std::string output; + const RoutingDimension& time_dimension = routing.GetDimensionOrDie("time"); + const RoutingDimension& load_dimension = routing.GetDimensionOrDie("demand"); + for (int i = 0; i < routing.vehicles(); ++i) { + absl::StrAppendFormat(&output, "Vehicle %d: ", i); + int64_t index = routing.Start(i); + if (routing.IsEnd(assignment.Value(routing.NextVar(index)))) { + output.append("empty"); + } else { + while (!routing.IsEnd(index)) { + absl::StrAppendFormat(&output, "%d ", + manager.IndexToNode(index).value()); + const IntVar* vehicle = routing.VehicleVar(index); + absl::StrAppendFormat(&output, "Vehicle(%d) ", + assignment.Value(vehicle)); + const IntVar* arrival = time_dimension.CumulVar(index); + absl::StrAppendFormat(&output, "Time(%d..%d) ", assignment.Min(arrival), + assignment.Max(arrival)); + const IntVar* load = load_dimension.CumulVar(index); + absl::StrAppendFormat(&output, "Load(%d..%d) ", assignment.Min(load), + assignment.Max(load)); + const int64_t next_index = assignment.Value(routing.NextVar(index)); + absl::StrAppendFormat( + &output, "Transit(%d) ", + TravelPlusServiceTime(manager, &coords, &service_times, index, + next_index)); + index = next_index; + } + output.append("Route end "); + const IntVar* vehicle = routing.VehicleVar(index); + absl::StrAppendFormat(&output, "Vehicle(%d) ", assignment.Value(vehicle)); + const IntVar* arrival = time_dimension.CumulVar(index); + absl::StrAppendFormat(&output, "Time(%d..%d) ", assignment.Min(arrival), + assignment.Max(arrival)); + const IntVar* load = load_dimension.CumulVar(index); + absl::StrAppendFormat(&output, "Load(%d..%d) ", assignment.Min(load), + assignment.Max(load)); + } + output.append("\n"); + } + return output; +} + +namespace { +// An inefficient but convenient method to parse a whitespace-separated list +// of integers. Returns true iff the input string was entirely valid and parsed. +bool SafeParseInt64Array(const std::string& str, + std::vector* parsed_int) { + static const char kWhiteSpaces[] = " \t\n\v\f\r"; + parsed_int->clear(); + for (absl::string_view token : + absl::StrSplit(str, absl::ByAnyChar(kWhiteSpaces), absl::SkipEmpty())) { + int value; + if (!absl::SimpleAtoi(token, &value)) return false; + parsed_int->push_back(value); + } + return true; +} +} // namespace + +// Builds and solves a model from a file in the format defined by Li & Lim +// (https://www.sintef.no/projectweb/top/pdptw/li-lim-benchmark/documentation/). +bool LoadAndSolve(const std::string& pdp_file, + const RoutingModelParameters& model_parameters, + const RoutingSearchParameters& search_parameters) { + // Load all the lines of the file in RAM (it shouldn't be too large anyway). + std::vector lines; + { + std::string contents; + CHECK_OK(file::GetContents(pdp_file, &contents, file::Defaults())); + const int64_t kMaxInputFileSize = 1 << 30; // 1GB + if (contents.size() >= kMaxInputFileSize) { + LOG(WARNING) << "Input file '" << pdp_file << "' is too large (>" + << kMaxInputFileSize << " bytes)."; + return false; + } + lines = absl::StrSplit(contents, '\n', absl::SkipEmpty()); + } + // Reading header. + if (lines.empty()) { + LOG(WARNING) << "Empty file: " << pdp_file; + return false; + } + // Parse file header. + std::vector parsed_int; + if (!SafeParseInt64Array(lines[0], &parsed_int) || parsed_int.size() != 3 || + parsed_int[0] < 0 || parsed_int[1] < 0 || parsed_int[2] < 0) { + LOG(WARNING) << "Malformed header: " << lines[0]; + return false; + } + const int num_vehicles = absl::GetFlag(FLAGS_pdp_force_vehicles) > 0 + ? absl::GetFlag(FLAGS_pdp_force_vehicles) + : parsed_int[0]; + const int64_t capacity = parsed_int[1]; + // We do not care about the 'speed' field, in third position. + + // Parse order data. + std::vector customer_ids; + std::vector> coords; + std::vector demands; + std::vector open_times; + std::vector close_times; + std::vector service_times; + std::vector pickups; + std::vector deliveries; + int64_t horizon = 0; + RoutingIndexManager::NodeIndex depot(0); + for (int line_index = 1; line_index < lines.size(); ++line_index) { + if (!SafeParseInt64Array(lines[line_index], &parsed_int) || + parsed_int.size() != 9 || parsed_int[0] < 0 || parsed_int[4] < 0 || + parsed_int[5] < 0 || parsed_int[6] < 0 || parsed_int[7] < 0 || + parsed_int[8] < 0) { + LOG(WARNING) << "Malformed line #" << line_index << ": " + << lines[line_index]; + return false; + } + const int customer_id = parsed_int[0]; + const int x = parsed_int[1]; + const int y = parsed_int[2]; + const int64_t demand = parsed_int[3]; + const int64_t open_time = parsed_int[4]; + const int64_t close_time = parsed_int[5]; + const int64_t service_time = parsed_int[6]; + const int pickup = parsed_int[7]; + const int delivery = parsed_int[8]; + customer_ids.push_back(customer_id); + coords.push_back(std::make_pair(x, y)); + demands.push_back(demand); + open_times.push_back(open_time); + close_times.push_back(close_time); + service_times.push_back(service_time); + pickups.push_back(RoutingIndexManager::NodeIndex(pickup)); + deliveries.push_back(RoutingIndexManager::NodeIndex(delivery)); + if (pickup == 0 && delivery == 0) { + depot = RoutingIndexManager::NodeIndex(pickups.size() - 1); + } + horizon = std::max(horizon, close_time); + } + + // Build pickup and delivery model. + const int num_nodes = customer_ids.size(); + RoutingIndexManager manager(num_nodes, num_vehicles, depot); + RoutingModel routing(manager, model_parameters); + const int vehicle_cost = routing.RegisterTransitCallback( + [&coords, &manager](int64_t i, int64_t j) { + return Travel(const_cast(&coords), + manager.IndexToNode(i), manager.IndexToNode(j)); + }); + routing.SetArcCostEvaluatorOfAllVehicles(vehicle_cost); + routing.AddDimension( + routing.RegisterTransitCallback(absl::bind_front( + TravelPlusServiceTime, manager, + const_cast(&coords), + const_cast*>(&service_times))), + kScalingFactor * horizon, kScalingFactor * horizon, + /*fix_start_cumul_to_zero=*/true, "time"); + const RoutingDimension& time_dimension = routing.GetDimensionOrDie("time"); + Solver* const solver = routing.solver(); + + // Collect pickup and delivery pairs and set time windows. + std::vector> pickup_delivery_pairs; + for (RoutingIndexManager::NodeIndex order(0); order < routing.nodes(); + ++order) { + const int64_t index = manager.NodeToIndex(order); + IntVar* const cumul = time_dimension.CumulVar(index); + cumul->SetMin(kScalingFactor * open_times[order.value()]); + cumul->SetMax(kScalingFactor * close_times[order.value()]); + RoutingIndexManager::NodeIndex delivery = deliveries[order.value()]; + if (pickups[order.value()] == 0 && delivery != 0) { + pickup_delivery_pairs.push_back({index, manager.NodeToIndex(delivery)}); + } + } + + // Build groups of pickup and delivery pairs representing the alternatives of + // pickup and delivery locations for a given shipment, and add the + // corresponding constraints. + const int kGroupSize = 4; + const int64_t kPenalty = 10000000; + // Collecting demands per group computed as the average demand for the group. + std::vector group_demands(demands.size()); + for (int pair_index = 0; pair_index < pickup_delivery_pairs.size();) { + std::vector pickup_indices; + std::vector delivery_indices; + std::vector pickup_vehicle_variables; + std::vector delivery_vehicle_variables; + int64_t demand_sum = 0; + int pair_start = pair_index; + for (int i = 0; i < kGroupSize && pair_index < pickup_delivery_pairs.size(); + ++i, ++pair_index) { + const int64_t pickup = pickup_delivery_pairs[pair_index].first; + const int64_t delivery = pickup_delivery_pairs[pair_index].second; + pickup_indices.push_back(pickup); + delivery_indices.push_back(delivery); + pickup_vehicle_variables.push_back(routing.VehicleVar(pickup)); + delivery_vehicle_variables.push_back(routing.VehicleVar(delivery)); + demand_sum += demands[manager.IndexToNode(pickup).value()]; + } + // Computing demand average. + int64_t demand_avg = demand_sum / (pair_index - pair_start); + for (int i = pair_start; i < pair_index; ++i) { + group_demands[pickup_delivery_pairs[i].first] = demand_avg; + group_demands[pickup_delivery_pairs[i].second] = -demand_avg; + } + // Unperformed pickups or deliveries will have their vehicle variable set + // to -1. Therefore the vehicle performing the performed pickup (resp. the + // performed delivery) is the maximum of the vehicle variables of the + // pickups (resp. deliveries). Using this to ensure the performed pickup + // and delivery are on the same route. + solver->AddConstraint( + solver->MakeEquality(solver->MakeMax(pickup_vehicle_variables), + solver->MakeMax(delivery_vehicle_variables))); + // Only one pickup and one delivery must be performed and notify the solver + // about the pickup and delivery alternatives. + routing.AddPickupAndDeliverySets( + routing.AddDisjunction(pickup_indices, kPenalty), + routing.AddDisjunction(delivery_indices, kPenalty)); + } + // Add demand dimension where the demand corresponds to the average demand + // of the group. + routing.AddDimension( + routing.RegisterTransitCallback(absl::bind_front( + Demand, manager, + const_cast*>(&group_demands))), + 0, capacity, /*fix_start_cumul_to_zero=*/true, "demand"); + + // Solve pickup and delivery problem. + const Assignment* assignment = routing.SolveWithParameters(search_parameters); + LOG(INFO) << routing.solver()->LocalSearchProfile(); + if (nullptr != assignment) { + LOG(INFO) << "Cost: " << assignment->ObjectiveValue(); + LOG(INFO) << VerboseOutput(routing, manager, *assignment, coords, + service_times); + return true; + } + return false; +} + +} // namespace operations_research::routing + +int main(int argc, char** argv) { + absl::SetStderrThreshold(absl::LogSeverityAtLeast::kInfo); + InitGoogle(argv[0], &argc, &argv, true); + // Set up model and search parameters. + operations_research::routing::RoutingModelParameters model_parameters = + operations_research::routing::DefaultRoutingModelParameters(); + model_parameters.set_reduce_vehicle_cost_model( + absl::GetFlag(FLAGS_reduce_vehicle_cost_model)); + operations_research::routing::RoutingSearchParameters search_parameters = + operations_research::routing::DefaultRoutingSearchParameters(); + CHECK(google::protobuf::TextFormat::MergeFromString( + absl::GetFlag(FLAGS_routing_search_parameters), &search_parameters)); + if (!operations_research::routing::LoadAndSolve( + absl::GetFlag(FLAGS_pdp_file), model_parameters, search_parameters)) { + LOG(INFO) << "Error solving " << absl::GetFlag(FLAGS_pdp_file); + } + return 0; +} diff --git a/examples/cpp/pdptw_with_alternatives_non_homogenous_fleet_test.bintest b/examples/cpp/pdptw_with_alternatives_non_homogenous_fleet_test.bintest new file mode 100644 index 00000000000..54406d4f3c1 --- /dev/null +++ b/examples/cpp/pdptw_with_alternatives_non_homogenous_fleet_test.bintest @@ -0,0 +1,2 @@ +RUN: $(pdptw_with_alternatives) --pdp_file=$(lc102.txt) --reduce_vehicle_cost_model=false --routing_search_parameters='first_solution_strategy:BEST_INSERTION local_search_operators { use_node_pair_swap_active:BOOL_FALSE }' 2>&1 +CHECK: "Cost: 362934" diff --git a/examples/cpp/pdptw_with_alternatives_test.bintest b/examples/cpp/pdptw_with_alternatives_test.bintest new file mode 100644 index 00000000000..09fb54c4258 --- /dev/null +++ b/examples/cpp/pdptw_with_alternatives_test.bintest @@ -0,0 +1,2 @@ +RUN: $(pdptw_with_alternatives) --pdp_file=$(lc102.txt) 2>&1 +CHECK: "Cost: 361237" diff --git a/examples/cpp/shift_minimization_sat_test.bintest b/examples/cpp/shift_minimization_sat_test.bintest new file mode 100644 index 00000000000..3694cdf27c3 --- /dev/null +++ b/examples/cpp/shift_minimization_sat_test.bintest @@ -0,0 +1 @@ +RUN: $(shift_minimization_sat) --input $(shift_minimization.dat) diff --git a/examples/cpp/slitherlink_sat_test.bintest b/examples/cpp/slitherlink_sat_test.bintest new file mode 100644 index 00000000000..3561a08a6c1 --- /dev/null +++ b/examples/cpp/slitherlink_sat_test.bintest @@ -0,0 +1 @@ +RUN: $(slitherlink_sat) diff --git a/examples/cpp/sports_scheduling_sat_test.bintest b/examples/cpp/sports_scheduling_sat_test.bintest new file mode 100644 index 00000000000..8cb0012f867 --- /dev/null +++ b/examples/cpp/sports_scheduling_sat_test.bintest @@ -0,0 +1 @@ +RUN: $(sports_scheduling_sat) diff --git a/examples/cpp/strawberry_fields_with_column_generation_test.bintest b/examples/cpp/strawberry_fields_with_column_generation_test.bintest new file mode 100644 index 00000000000..6668965b9c7 --- /dev/null +++ b/examples/cpp/strawberry_fields_with_column_generation_test.bintest @@ -0,0 +1 @@ +RUN: $(strawberry_fields_with_column_generation) --colgen_instance=4 diff --git a/examples/cpp/testdata/cgc/1.in b/examples/cpp/testdata/cgc/1.in new file mode 100644 index 00000000000..00776d701ab --- /dev/null +++ b/examples/cpp/testdata/cgc/1.in @@ -0,0 +1,6 @@ +4 +4 4 +1 2 6 100 +1 3 2 2 +1 4 2 2 +3 4 2 10 diff --git a/examples/cpp/testdata/cgc/2.in b/examples/cpp/testdata/cgc/2.in new file mode 100644 index 00000000000..b009f8917f5 --- /dev/null +++ b/examples/cpp/testdata/cgc/2.in @@ -0,0 +1,6 @@ +4 +4 4 +1 1 1 1 +2 1 1 1 +3 1 1 1 +4 1 1 1 diff --git a/examples/cpp/testdata/cgc/3.in b/examples/cpp/testdata/cgc/3.in new file mode 100644 index 00000000000..184b23bfbcc --- /dev/null +++ b/examples/cpp/testdata/cgc/3.in @@ -0,0 +1,3 @@ +1 +4 4 +2 2 2 10 diff --git a/examples/cpp/testdata/cgc/cgcut1.in b/examples/cpp/testdata/cgc/cgcut1.in new file mode 100644 index 00000000000..9a3483b2198 --- /dev/null +++ b/examples/cpp/testdata/cgc/cgcut1.in @@ -0,0 +1,9 @@ +7 +15 10 +8 4 2 66 +3 7 1 35 +8 2 3 24 +3 4 5 17 +3 3 2 11 +3 2 2 8 +2 1 1 2 diff --git a/examples/cpp/testdata/cgc/cgcut2.in b/examples/cpp/testdata/cgc/cgcut2.in new file mode 100644 index 00000000000..3929f108177 --- /dev/null +++ b/examples/cpp/testdata/cgc/cgcut2.in @@ -0,0 +1,12 @@ +10 +40 70 +21 22 1 582 +31 13 1 403 +9 35 3 315 +9 24 3 216 +30 7 2 210 +11 13 3 143 +10 14 1 140 +14 8 3 110 +12 8 3 94 +13 7 3 90 diff --git a/examples/cpp/testdata/cgc/cgcut3.in b/examples/cpp/testdata/cgc/cgcut3.in new file mode 100644 index 00000000000..30d4d75c0e4 --- /dev/null +++ b/examples/cpp/testdata/cgc/cgcut3.in @@ -0,0 +1,22 @@ +20 +40 70 +31 43 4 500 +30 41 2 480 +29 39 4 460 +28 38 4 440 +27 37 3 420 +26 36 4 410 +25 35 3 400 +24 34 4 380 +33 23 4 360 +22 32 3 340 +31 21 3 320 +29 18 3 300 +17 27 2 280 +15 24 2 240 +16 25 4 260 +15 24 1 240 +23 14 4 220 +21 12 3 180 +19 11 4 160 +9 17 1 140 diff --git a/examples/cpp/testdata/dimacs_example.txt b/examples/cpp/testdata/dimacs_example.txt new file mode 100644 index 00000000000..09f209c3fd4 --- /dev/null +++ b/examples/cpp/testdata/dimacs_example.txt @@ -0,0 +1,25 @@ +c Simple example file demonstrating the DIMACS data format, and for testing. +c Source: Invented from scratch by viger@google.com on 2019-05-22. +c Lines starting with 'c' (like this one) are comments. +c +c Graph description: (number of nodes) (number of arcs) +p asn 6 9 +c Note that the problems are 'perfect assignment' problems, where +c there are as many 'left' nodes as 'right' nodes, and we want to assign +c each 'left' node to exactly one 'right' node. +c +c "Left" nodes. +n 1 +n 2 +n 3 +c +c Arcs: left node, right node, cost of assigning left to right. +a 1 4 12 +a 1 5 53 +a 1 6 36 +a 2 4 14 +a 2 5 37 +a 2 6 46 +a 3 4 11 +a 3 5 52 +a 3 6 35 diff --git a/examples/cpp/testdata/lc102.txt b/examples/cpp/testdata/lc102.txt new file mode 100644 index 00000000000..0fde4b188d7 --- /dev/null +++ b/examples/cpp/testdata/lc102.txt @@ -0,0 +1,108 @@ +25 200 1 +0 40 50 0 0 1236 0 0 0 +1 45 68 10 0 1127 90 0 75 +2 45 70 -20 0 1125 90 8 0 +3 42 66 10 0 1129 90 0 10 +4 42 68 -20 727 782 90 6 0 +5 42 65 10 0 1130 90 0 9 +6 40 69 20 621 702 90 0 4 +7 40 66 20 0 1130 90 0 11 +8 38 68 20 255 324 90 0 2 +9 38 70 -10 534 605 90 5 0 +10 35 66 -10 357 410 90 3 0 +11 35 69 -20 448 505 90 7 0 +12 25 85 -30 0 1107 90 13 0 +13 22 75 30 30 92 90 0 12 +14 22 85 -40 567 620 90 16 0 +15 20 80 -20 384 429 90 18 0 +16 20 85 40 475 528 90 0 14 +17 18 75 20 99 148 90 0 19 +18 15 75 20 179 254 90 0 15 +19 15 80 -20 278 345 90 17 0 +20 30 50 10 10 73 90 0 22 +21 30 52 -10 0 1135 90 23 0 +22 28 52 -10 812 883 90 20 0 +23 28 55 10 732 777 90 0 21 +24 25 50 10 65 144 90 0 25 +25 25 52 -10 169 224 90 24 0 +26 25 55 -10 0 1130 90 29 0 +27 23 52 10 261 316 90 0 30 +28 23 55 20 546 593 0 0 103 +29 20 50 10 358 405 90 0 26 +30 20 55 -10 449 504 90 27 0 +31 10 35 20 0 1112 90 0 35 +32 10 40 30 31 100 90 0 33 +33 8 40 -30 87 158 90 32 0 +34 8 45 -20 0 1113 90 37 0 +35 5 35 -20 283 344 90 31 0 +36 5 45 -30 665 716 90 38 0 +37 2 40 20 0 1106 90 0 34 +38 0 40 30 479 522 90 0 36 +39 0 45 20 567 624 0 0 104 +40 35 30 -10 264 321 90 43 0 +41 35 32 10 166 235 90 0 51 +42 33 32 20 68 149 90 0 48 +43 33 35 10 16 80 90 0 40 +44 32 30 10 359 412 90 0 47 +45 30 30 -30 541 600 90 46 0 +46 30 32 30 448 509 90 0 45 +47 30 35 -10 1054 1127 90 44 0 +48 28 30 -20 0 1122 90 42 0 +49 28 35 -10 1001 1066 90 52 0 +50 26 32 10 0 1123 0 0 106 +51 25 30 -10 725 786 90 41 0 +52 25 35 10 0 1124 90 0 49 +53 44 5 -10 286 347 90 55 0 +54 42 10 40 186 257 90 0 56 +55 42 15 10 95 158 90 0 53 +56 40 5 -40 385 436 90 54 0 +57 40 15 40 35 87 90 0 60 +58 38 5 30 471 534 90 0 59 +59 38 15 -30 0 1110 90 58 0 +60 35 5 -40 562 629 90 57 0 +61 50 30 -10 531 610 90 67 0 +62 50 35 20 262 317 90 0 66 +63 50 40 50 171 218 90 0 69 +64 48 30 -50 632 693 90 74 0 +65 48 40 10 76 129 90 0 72 +66 47 35 -20 826 875 90 62 0 +67 47 40 10 12 77 90 0 61 +68 45 30 10 734 777 0 0 102 +69 45 35 -50 916 969 90 63 0 +70 95 30 -30 387 456 90 81 0 +71 95 35 20 293 360 90 0 77 +72 53 30 -10 0 1122 90 65 0 +73 92 30 -10 478 551 90 76 0 +74 53 35 50 353 412 90 0 64 +75 45 65 -10 0 1130 90 1 0 +76 90 35 10 203 260 90 0 73 +77 88 30 -20 574 643 90 71 0 +78 88 35 20 109 170 0 0 105 +79 87 30 10 668 731 90 0 80 +80 85 25 -10 769 820 90 79 0 +81 85 35 30 47 124 90 0 70 +82 75 55 20 0 1110 90 0 85 +83 72 55 10 0 1113 90 0 84 +84 70 58 -10 458 523 90 83 0 +85 68 60 -20 0 1116 90 82 0 +86 66 55 -10 173 238 90 90 0 +87 65 55 20 85 144 90 0 89 +88 65 60 30 645 708 90 0 91 +89 63 58 -20 737 802 90 87 0 +90 60 55 10 20 84 90 0 86 +91 60 60 -30 0 1123 90 88 0 +92 67 85 -10 368 441 90 96 0 +93 65 85 40 475 518 90 0 99 +94 65 82 -20 0 1105 90 98 0 +95 62 80 30 0 1108 90 0 100 +96 60 80 10 0 1109 90 0 92 +97 60 85 30 561 622 0 0 101 +98 58 75 20 0 1115 90 0 94 +99 55 80 -40 743 820 90 93 0 +100 55 85 -30 647 726 90 95 0 +101 60 85 -30 561 622 90 97 0 +102 45 30 -10 734 777 90 68 0 +103 23 55 -20 546 593 90 28 0 +104 0 45 -20 567 624 90 39 0 +105 88 35 -20 109 170 90 78 0 +106 26 32 -10 0 1123 90 50 0 diff --git a/examples/cpp/testdata/shift_minimization.dat b/examples/cpp/testdata/shift_minimization.dat new file mode 100644 index 00000000000..ddb76610469 --- /dev/null +++ b/examples/cpp/testdata/shift_minimization.dat @@ -0,0 +1,69 @@ +# Randomly generated data for apersonnel scheduling problem +# ./datagen tightness = 90 Multi-skilling level = 66 +# Random number generator seed = 0 +Type = 1 +Jobs = 40 + 43 516 + 164 746 + 75 591 + 230 718 + 839 1354 + 96 637 + 1 593 + 179 713 + 130 765 + 119 688 + 194 783 + 270 818 + 102 618 + 774 1291 + 28 550 + 56 630 + 758 1350 + 804 1320 + 16 557 + 16 565 + 30 536 + 186 752 + 677 1259 + 739 1244 + 834 1313 + 724 1346 + 761 1304 + 823 1396 + 569 1040 + 804 1340 + 125 740 + 764 1256 + 159 656 + 712 1278 + 726 1291 + 651 1235 + 750 1278 + 725 1363 + 867 1376 + 844 1319 +Qualifications = 23 + 26: 6 13 0 1 2 3 4 8 11 12 14 15 16 17 20 21 23 24 25 26 29 30 31 33 37 39 + 24: 14 16 1 4 6 8 12 15 17 18 19 20 22 23 25 27 28 31 34 35 36 37 38 39 + 28: 30 36 1 2 3 4 6 9 10 11 12 13 15 16 17 18 21 22 23 24 26 27 32 33 34 35 38 39 + 24: 7 25 0 2 4 5 6 10 13 15 16 17 21 23 26 27 29 30 31 32 34 35 36 38 + 29: 8 29 0 1 2 6 7 10 12 13 14 15 17 18 20 21 22 24 25 28 30 31 32 33 34 35 36 37 39 + 32: 5 37 0 1 2 3 4 7 9 10 11 12 13 14 15 16 19 20 22 23 24 25 26 27 28 31 32 33 35 36 38 39 + 26: 18 28 0 1 2 3 4 6 8 10 11 15 16 17 19 20 21 22 23 24 25 29 30 31 36 38 + 29: 0 26 1 2 3 4 6 7 8 10 12 13 14 15 16 17 18 19 20 23 24 27 28 31 32 34 35 37 38 + 30: 11 39 2 4 6 7 8 9 10 13 14 15 16 17 18 20 21 23 25 26 27 29 30 31 32 34 35 36 37 38 + 24: 19 24 0 1 2 5 8 9 10 11 15 17 20 21 26 27 28 29 30 31 34 36 37 38 + 30: 3 34 0 2 4 6 7 8 9 10 11 13 14 15 16 17 19 23 24 25 27 29 31 32 33 35 36 37 38 39 + 28: 21 31 0 1 3 5 8 9 12 13 16 17 18 19 20 22 24 25 26 27 28 30 32 34 35 36 37 39 + 29: 32 33 0 1 2 3 4 5 6 7 9 10 11 12 13 16 17 18 20 21 23 24 28 29 31 34 35 36 37 + 35: 9 23 1 2 4 5 6 7 8 10 11 12 14 15 16 17 18 19 20 22 24 25 26 27 28 29 30 31 32 33 34 35 36 37 39 + 31: 20 27 1 2 3 4 5 6 8 9 10 12 14 15 16 17 18 19 21 24 25 26 29 31 33 34 35 36 37 38 39 + 23: 15 38 0 1 4 5 7 11 12 13 14 16 17 18 19 20 21 25 27 31 33 35 37 + 24: 1 4 0 2 7 8 11 13 16 17 21 22 23 24 25 27 28 29 31 33 34 36 38 39 + 28: 2 22 0 1 4 5 6 7 8 10 11 12 13 15 16 18 20 21 24 26 27 30 31 33 35 37 38 39 + 29: 12 35 0 1 2 3 5 6 7 8 11 13 15 16 17 18 19 20 22 23 25 26 30 31 33 36 37 38 39 + 28: 10 17 0 2 3 5 7 8 9 13 14 15 16 19 20 22 23 24 25 27 29 30 31 32 35 36 38 39 + 33: 18 29 26 27 23 31 12 2 1 35 10 19 30 14 11 13 17 21 3 0 33 22 7 15 16 25 4 32 28 39 34 20 5 + 31: 6 5 10 30 23 34 36 20 12 8 39 16 0 18 3 17 22 21 35 25 15 37 13 2 27 29 31 11 38 33 32 + 33: 11 7 17 19 6 32 1 15 8 18 24 35 16 5 26 28 34 33 38 37 14 0 3 12 22 21 13 39 25 2 36 10 4 diff --git a/examples/cpp/testdata/wt40.txt b/examples/cpp/testdata/wt40.txt new file mode 100644 index 00000000000..c4dbb7ab177 --- /dev/null +++ b/examples/cpp/testdata/wt40.txt @@ -0,0 +1,751 @@ + 26 24 79 46 32 35 73 74 14 67 86 46 78 40 29 94 64 27 90 55 + 35 52 36 69 85 95 14 78 37 86 44 28 39 12 30 68 70 9 49 50 + 1 10 9 10 10 4 3 2 10 3 7 3 1 3 10 4 7 7 4 7 + 5 3 5 4 9 5 2 8 10 4 7 4 9 5 7 7 5 10 1 3 + 1588 1620 1731 1773 1694 1487 1566 1844 1727 1636 1599 1539 1855 1645 1709 1660 1582 1836 1484 1559 + 1772 1510 1512 1795 1522 1509 1598 1658 1826 1628 1650 1833 1627 1528 1541 1497 1481 1446 1579 1814 + 56 25 76 35 28 52 21 32 64 67 48 100 94 87 39 18 78 80 56 72 + 4 70 36 46 85 31 96 30 66 92 33 18 19 34 18 4 42 94 4 89 + 1 9 9 9 5 1 4 3 2 8 2 3 7 5 3 5 2 2 2 4 + 9 9 6 8 9 7 5 2 1 6 4 6 1 2 10 10 6 5 4 3 + 1687 1738 1663 1480 1504 1826 1722 1660 1594 1445 1704 1660 1715 1701 1679 1516 1658 1611 1502 1685 + 1614 1647 1689 1615 1524 1800 1654 1752 1456 1452 1801 1713 1761 1513 1759 1484 1821 1448 1666 1611 + 1 49 35 83 75 64 20 84 31 88 27 88 21 32 12 20 26 64 6 11 + 54 2 21 94 44 19 45 6 61 41 45 86 98 45 66 77 76 64 31 25 + 10 4 7 3 6 7 5 10 5 10 2 1 7 7 2 8 3 8 8 8 + 10 1 1 3 6 7 2 4 6 5 7 4 4 9 5 6 1 9 9 4 + 1452 1565 1588 1319 1436 1434 1573 1427 1593 1432 1428 1549 1565 1312 1614 1362 1643 1536 1372 1490 + 1631 1338 1336 1487 1361 1363 1583 1652 1396 1376 1319 1369 1341 1434 1319 1296 1644 1418 1421 1338 + 71 58 89 62 8 31 74 52 71 85 1 77 35 30 96 12 4 29 64 34 + 8 98 86 22 6 6 24 61 86 76 17 36 63 83 81 37 80 56 11 57 + 6 3 9 3 8 8 3 8 10 4 7 8 10 5 5 6 5 10 3 6 + 9 4 3 2 7 10 6 10 9 3 2 6 6 9 10 9 7 5 5 6 + 1419 1682 1683 1617 1703 1549 1741 1634 1580 1588 1694 1574 1548 1730 1535 1438 1501 1504 1587 1687 + 1472 1507 1389 1454 1404 1522 1526 1681 1506 1584 1720 1767 1621 1677 1487 1513 1591 1620 1771 1712 + 7 70 52 52 86 66 70 60 65 70 27 41 42 88 21 15 40 80 28 7 + 13 58 4 43 41 89 80 54 34 92 66 72 29 40 53 35 91 58 6 82 + 5 10 1 2 2 8 8 5 6 4 8 4 1 1 6 5 10 7 2 7 + 5 8 7 7 6 2 6 6 1 8 8 5 7 6 6 4 8 6 5 7 + 1471 1794 1514 1473 1702 1583 1640 1683 1491 1519 1702 1527 1701 1675 1421 1718 1639 1742 1749 1653 + 1441 1722 1691 1612 1424 1615 1809 1533 1648 1702 1450 1614 1675 1435 1441 1485 1746 1732 1727 1612 + 76 54 14 32 100 37 69 36 27 7 39 52 74 67 93 49 89 73 79 98 + 45 36 24 71 47 19 32 47 25 37 86 5 37 60 46 28 72 11 52 4 + 1 7 6 3 10 7 2 8 1 8 8 10 4 3 1 10 10 8 2 10 + 6 7 3 3 8 10 1 2 3 2 8 2 3 5 6 9 10 6 4 9 + 1145 1091 1222 1290 1047 1011 1058 1313 1351 1076 1163 1004 1141 1061 1259 1306 985 1173 1154 1325 + 1352 1169 1189 1241 1210 1209 1331 1128 1102 983 1232 1131 1358 1339 1260 1074 1283 1068 1008 1023 + 73 32 44 87 67 63 13 10 94 11 50 90 93 79 96 39 33 20 72 77 + 11 40 75 24 52 100 83 4 7 12 49 45 59 42 6 75 16 10 65 29 + 8 8 10 1 2 9 3 6 1 10 7 10 2 3 10 9 2 3 4 8 + 6 4 3 4 6 4 7 5 4 7 7 10 10 5 9 8 3 3 1 1 + 1034 1260 1317 1095 1026 1187 1138 1281 1063 1006 982 1169 1051 1310 1189 1125 1097 1212 1091 1063 + 1084 1336 1016 1025 1219 1186 990 1167 1216 977 1184 1155 1235 1335 1008 1329 1064 1242 1108 1328 + 22 83 90 4 33 16 21 17 34 54 51 33 88 93 94 13 85 84 41 21 + 43 30 64 25 10 89 10 96 59 70 5 8 93 37 54 37 44 4 39 65 + 7 7 10 1 5 7 1 8 10 1 6 7 6 1 6 8 6 2 4 1 + 1 6 6 6 10 10 2 6 2 7 6 3 3 9 5 1 8 9 9 5 + 1192 1284 1086 1269 1008 1002 1202 943 1153 960 1201 1204 1074 983 1115 1066 1034 981 1199 1174 + 965 1222 1242 1020 993 1298 1186 1148 1032 1132 1182 1115 1299 1062 1094 949 1175 993 1293 959 + 75 33 77 73 71 24 90 94 44 57 11 67 77 27 27 100 83 81 1 5 + 3 20 75 97 4 98 70 92 84 20 83 12 67 48 90 80 5 94 47 24 + 8 2 8 1 7 6 8 5 3 7 10 6 7 3 5 7 7 9 9 2 + 3 6 9 6 10 10 6 5 2 5 7 6 3 8 9 7 4 5 4 6 + 1472 1343 1218 1338 1495 1463 1444 1249 1121 1325 1366 1368 1186 1545 1396 1517 1141 1538 1405 1508 + 1238 1175 1238 1351 1376 1324 1450 1237 1504 1498 1148 1307 1353 1291 1404 1188 1292 1140 1315 1540 + 12 67 9 86 83 64 94 80 46 97 77 78 5 54 14 36 96 82 41 12 + 79 77 71 68 47 89 97 67 16 22 89 78 34 34 94 72 75 33 39 22 + 10 6 10 6 2 1 3 7 10 10 8 1 1 10 9 4 1 4 1 4 + 4 4 6 1 3 4 5 5 9 7 6 5 2 10 8 5 10 6 5 7 + 1366 1541 1552 1418 1301 1363 1194 1349 1480 1311 1631 1335 1378 1443 1181 1258 1205 1521 1412 1251 + 1398 1347 1620 1449 1410 1327 1466 1510 1539 1412 1515 1297 1239 1201 1213 1332 1416 1205 1285 1614 + 41 18 66 42 100 71 89 19 92 3 75 57 46 2 53 57 17 9 30 25 + 90 19 93 69 76 79 5 100 16 89 7 32 78 4 21 85 60 29 43 77 + 9 10 5 5 3 1 5 9 1 8 1 10 5 8 9 1 9 4 6 3 + 2 5 4 1 5 6 7 7 5 7 3 6 8 6 4 6 9 4 6 2 + 928 623 690 630 796 811 728 670 618 788 609 629 984 841 918 809 613 644 724 764 + 667 713 797 663 951 920 716 892 677 774 894 652 988 696 872 713 971 719 956 836 + 98 77 8 38 79 28 4 57 63 57 7 96 2 48 18 3 72 39 17 31 + 80 21 60 33 60 38 84 69 73 88 2 1 11 22 6 92 92 42 66 10 + 10 9 5 9 3 10 8 2 5 5 6 10 5 2 4 9 4 1 2 4 + 7 1 3 4 2 6 3 5 2 5 2 8 10 5 2 2 8 7 2 8 + 754 682 557 827 647 880 631 783 875 806 619 648 631 540 590 658 876 696 739 570 + 738 886 724 548 870 882 619 789 553 860 606 622 709 771 861 640 646 598 890 722 + 48 83 74 78 6 75 51 45 67 95 57 93 3 25 64 18 13 100 25 19 + 25 8 69 99 94 90 3 61 6 40 13 15 72 5 62 72 32 86 93 46 + 7 6 5 6 1 9 10 1 8 2 10 9 4 9 3 9 5 2 1 3 + 4 8 6 1 6 10 8 5 9 6 8 8 4 1 10 5 8 7 3 10 + 832 907 759 997 708 929 831 1001 763 891 871 686 838 624 690 635 630 997 724 707 + 870 961 874 637 648 881 777 836 620 803 680 955 739 945 892 687 624 774 636 765 + 82 33 79 67 96 3 16 33 40 6 82 46 7 19 22 48 18 76 59 84 + 24 59 96 5 2 9 10 61 59 15 59 8 28 23 80 5 71 29 85 12 + 3 8 6 8 7 6 1 1 6 4 7 6 3 6 1 7 1 3 7 7 + 9 10 1 3 8 10 2 1 4 1 3 7 4 6 10 9 1 5 3 8 + 756 774 770 620 720 667 808 562 728 743 794 657 817 611 562 599 799 748 517 673 + 567 749 691 590 554 724 634 790 716 540 780 627 664 556 691 660 622 541 752 503 + 23 75 17 14 92 58 65 79 46 30 21 58 100 68 1 42 97 100 1 22 + 9 8 93 95 36 26 29 60 6 42 38 18 74 98 29 75 25 88 85 39 + 8 9 9 7 7 3 6 8 2 3 1 2 3 3 5 5 8 2 1 6 + 5 6 8 4 9 5 6 9 5 7 9 4 3 4 6 9 7 5 8 9 + 872 724 826 789 878 974 680 847 796 662 639 800 717 952 742 884 735 900 987 680 + 919 909 609 884 674 830 710 924 688 649 760 724 712 966 836 689 719 905 893 712 + 17 8 61 32 63 36 43 41 77 64 90 36 96 65 89 53 85 63 9 32 + 38 38 93 16 76 22 65 51 49 99 26 84 35 7 56 70 40 38 11 62 + 10 9 10 6 7 9 2 10 9 7 3 7 2 7 5 9 2 7 5 7 + 3 1 10 5 7 9 5 8 4 7 4 1 4 8 8 1 3 9 9 3 + 602 476 347 346 321 320 277 473 492 244 595 307 293 221 208 249 406 521 497 259 + 539 384 509 450 415 541 472 269 532 264 235 304 306 288 345 244 465 270 486 290 + 44 88 60 35 10 90 72 81 55 54 83 87 38 52 53 37 47 6 21 17 + 96 90 4 17 30 34 98 30 19 19 75 51 63 80 86 78 91 5 61 16 + 8 4 9 8 1 8 9 4 6 10 4 4 8 1 6 4 7 8 3 1 + 5 5 6 2 9 1 8 6 7 1 8 1 5 7 7 9 6 4 8 1 + 442 276 351 442 237 620 484 503 402 408 473 293 252 381 379 331 506 370 377 507 + 354 422 414 276 511 480 475 293 301 259 237 545 286 582 578 338 590 371 335 556 + 6 22 44 42 58 22 52 64 28 3 72 24 97 30 36 53 75 23 54 83 + 99 54 16 51 33 49 89 97 72 86 16 63 37 97 23 95 78 67 9 42 + 6 4 9 10 1 1 3 9 4 1 10 6 6 3 5 9 5 4 5 10 + 8 7 5 10 10 8 8 1 5 4 5 7 5 3 8 3 10 3 3 3 + 498 605 344 361 429 599 436 351 308 263 570 415 274 225 586 311 501 237 518 217 + 334 289 245 523 513 292 587 581 255 371 487 538 303 541 575 510 415 600 346 355 + 37 71 18 74 62 92 61 59 73 63 7 63 72 48 60 62 90 62 2 38 + 88 75 94 73 51 9 74 54 96 39 61 71 65 95 48 15 31 57 9 84 + 1 7 6 6 8 6 2 5 9 6 2 6 10 9 3 1 6 9 5 5 + 10 7 1 4 4 2 10 4 8 6 2 3 1 9 1 10 10 5 5 3 + 655 263 510 495 668 392 574 325 588 554 666 634 397 356 649 241 429 290 687 533 + 410 686 402 633 562 431 548 601 643 521 332 267 586 482 466 600 468 541 489 247 + 100 47 68 56 6 8 57 36 94 43 17 20 88 11 25 30 41 25 36 95 + 34 52 81 43 76 10 71 8 5 71 96 27 85 62 22 39 10 61 93 87 + 3 8 10 3 2 3 6 3 7 7 2 9 2 8 5 8 8 1 7 9 + 6 6 4 1 2 10 4 6 5 4 10 4 9 8 8 9 8 6 6 3 + 197 314 578 420 325 474 260 200 227 456 435 438 369 504 493 483 234 469 535 520 + 219 481 275 206 242 252 454 202 484 517 202 420 383 415 367 405 529 469 500 281 + 82 18 55 14 1 36 73 72 26 3 8 18 2 77 11 26 5 66 7 68 + 37 35 100 21 29 98 73 67 41 26 87 87 59 41 81 69 99 17 71 2 + 5 5 4 9 10 2 1 5 3 1 6 2 4 8 8 2 1 7 1 7 + 10 8 2 1 6 6 1 9 2 2 3 7 1 4 4 10 2 5 5 7 + 0 0 123 0 5 168 104 41 0 0 0 62 170 61 163 59 0 113 76 0 + 30 55 0 136 68 179 0 0 54 0 0 0 0 0 154 33 0 28 130 0 + 1 3 22 81 86 90 22 42 28 57 66 82 96 55 73 20 86 92 43 76 + 22 28 52 75 58 76 53 43 75 2 79 11 81 25 42 11 14 17 29 81 + 5 3 5 3 9 8 5 3 10 2 4 1 5 1 4 1 9 9 1 2 + 5 3 1 1 4 10 9 6 6 8 8 7 10 4 8 6 2 1 2 2 + 0 103 0 44 0 0 38 166 0 0 57 1 0 0 159 0 71 0 0 87 + 193 0 0 0 0 0 146 148 0 159 165 36 11 19 83 0 0 0 54 0 + 4 75 87 90 29 42 96 27 92 70 52 38 81 9 47 87 17 64 52 41 + 45 90 14 71 40 97 60 51 5 50 94 59 71 62 98 74 97 5 34 80 + 3 4 3 3 2 10 9 1 8 8 9 3 2 5 8 10 7 3 2 2 + 2 9 10 1 4 7 6 1 1 6 7 4 7 1 4 7 6 5 10 3 + 88 167 228 0 0 0 56 205 0 66 0 0 0 0 0 0 0 117 0 146 + 0 0 0 77 69 185 0 105 0 0 159 0 52 66 0 0 113 20 0 179 + 63 61 47 77 25 14 63 13 33 64 7 18 98 57 45 4 60 94 17 86 + 89 30 43 81 80 69 23 10 59 73 31 97 78 55 23 70 18 80 31 57 + 4 6 1 1 4 4 8 3 7 6 9 7 2 5 8 9 7 4 4 2 + 9 7 8 3 2 9 5 9 9 7 4 7 1 7 7 6 8 7 9 7 + 154 159 70 34 60 142 172 127 0 0 163 2 0 144 0 0 189 123 95 0 + 0 0 34 0 177 0 187 0 0 13 85 0 0 0 161 81 0 0 188 47 + 81 39 78 84 99 82 71 85 98 10 52 56 12 67 58 53 5 51 1 40 + 65 11 75 80 11 52 48 41 91 31 70 94 78 57 66 13 76 92 40 75 + 4 10 6 8 5 4 1 5 5 6 2 3 9 2 6 2 2 10 1 10 + 5 7 1 7 2 4 3 5 10 8 7 4 8 5 7 9 3 2 9 2 + 0 179 19 2 119 0 40 99 24 0 0 69 0 0 0 0 151 128 171 0 + 12 147 0 0 0 0 29 145 0 110 227 45 189 0 0 0 0 10 0 0 + 87 43 33 53 1 76 44 34 60 36 82 88 21 63 54 18 68 53 46 33 + 12 52 21 45 95 60 21 69 85 32 66 21 78 75 55 23 99 47 64 98 + 5 7 10 7 6 6 1 3 9 6 4 2 6 10 6 5 9 7 9 5 + 6 2 8 8 6 4 10 6 4 2 8 8 6 8 6 8 3 9 3 6 + 1267 1914 1785 1385 1653 1344 1888 2075 1804 1297 1990 1709 1561 1719 1947 2024 2095 2021 1591 1836 + 1616 1683 1703 1655 1612 1285 2041 1964 1720 1424 1465 1940 1747 2059 1868 1946 1597 1424 1564 1404 + 41 56 43 30 12 77 87 53 22 19 74 54 47 21 82 84 95 73 70 99 + 99 21 72 58 21 60 4 20 51 41 55 52 9 69 98 20 40 100 79 96 + 1 1 5 9 5 6 2 5 4 9 9 4 6 4 7 2 8 7 8 7 + 7 2 9 7 6 10 8 1 9 6 10 9 7 3 1 1 9 6 1 1 + 1494 1558 1707 1373 1538 1603 1783 2142 1765 1811 1878 1852 1675 1785 1921 1456 1879 1505 1979 1990 + 1387 1958 1519 1470 1475 1815 1739 1907 2090 1578 2190 1797 2202 2017 1352 1372 1485 2162 2054 2134 + 27 19 18 66 40 22 81 39 12 94 80 99 43 70 67 44 60 39 70 76 + 20 41 99 1 6 70 37 26 30 75 50 64 33 54 78 29 84 63 42 38 + 10 3 8 1 9 2 2 9 2 4 10 1 6 10 2 10 5 6 2 5 + 10 5 10 10 2 7 5 7 2 6 3 10 6 3 2 4 10 5 3 6 + 1829 1980 1263 1532 1527 1377 1304 1295 1535 1844 1586 1958 1596 1809 1815 1897 1413 1694 1977 1622 + 1381 1288 1656 1747 1310 1524 1365 1364 1946 1848 1869 1620 1300 2001 1507 1693 1773 1790 1920 1566 + 60 22 47 13 21 98 85 91 90 44 32 68 89 93 11 88 66 28 23 3 + 99 88 47 62 95 65 41 21 88 2 40 40 40 97 59 78 41 100 89 1 + 1 10 2 7 2 2 6 4 7 6 9 9 7 2 5 6 4 7 8 2 + 7 2 9 2 2 6 10 4 6 1 7 5 2 9 1 5 4 5 5 3 + 1442 1639 1483 2221 2264 1743 2047 1442 2193 2139 2239 1543 1725 1378 2034 1385 1939 2135 1892 1846 + 1690 1896 1993 1711 1610 2215 1922 2179 2199 2115 1718 1850 1487 1917 1566 2132 2188 2070 1935 2228 + 83 11 94 26 20 48 38 11 42 9 40 10 92 24 97 15 41 73 80 23 + 89 93 42 31 64 70 12 42 22 46 96 62 47 16 82 98 51 26 32 61 + 3 6 9 3 9 7 7 10 7 6 5 9 5 2 5 9 10 10 6 10 + 8 2 8 10 1 3 7 3 4 8 9 9 7 5 7 2 8 9 4 6 + 1756 1615 1772 1197 1866 1894 1703 1596 1847 1700 1347 1676 1734 1532 1441 1741 1818 1588 1715 1927 + 1794 1548 1811 1788 1861 1520 1781 1838 1863 1825 1437 1566 1608 1607 1644 1763 1581 1734 1313 1758 + 66 83 4 64 29 66 54 6 82 80 92 79 88 52 84 24 44 60 75 83 + 68 36 88 2 13 64 25 29 54 84 65 17 99 85 65 22 81 11 62 100 + 9 8 7 5 2 4 6 8 3 1 7 9 5 4 5 8 4 10 10 1 + 1 7 4 1 6 3 3 4 9 9 6 6 7 3 7 5 5 9 2 7 + 1480 1684 1474 1438 1532 931 1041 1110 1219 1826 1694 998 1607 1400 1174 1018 1033 961 1708 1405 + 1424 1264 1014 1314 1350 1797 1122 1531 1424 1441 1208 1252 1022 1410 938 1703 933 1338 1518 1090 + 9 90 99 90 69 100 29 84 51 3 53 30 43 20 10 17 61 27 16 32 + 46 54 66 70 72 81 34 25 37 43 71 97 70 29 6 87 27 80 47 1 + 5 1 8 3 2 8 5 1 10 3 10 3 1 1 10 3 6 7 7 1 + 3 10 4 4 9 4 8 2 10 2 1 4 9 9 7 3 9 10 5 1 + 1503 881 1491 888 1415 1099 1158 1567 1376 1157 824 1580 1318 850 1077 1164 936 1348 1000 1277 + 1198 1023 1454 811 1510 1152 861 1300 1213 955 851 978 1493 931 1252 1068 1350 1048 1010 1416 + 37 35 80 69 88 52 35 57 22 74 68 23 49 21 90 100 6 66 91 24 + 59 35 36 35 100 4 71 76 51 8 18 93 63 29 80 34 12 7 36 51 + 2 6 5 5 6 9 10 2 5 1 2 8 5 1 10 5 5 9 8 4 + 10 5 10 7 2 2 6 7 5 1 9 10 9 8 3 10 2 4 10 4 + 833 940 1046 1433 1128 829 1582 1431 1398 1217 1180 1109 1076 994 1096 921 1488 1278 1226 903 + 1028 1217 1018 1047 1027 1341 1586 1273 920 958 1066 1013 953 1499 1494 1561 1308 1536 984 894 + 7 77 25 68 63 75 21 41 93 45 59 18 26 5 20 81 23 22 66 13 + 70 20 35 1 82 74 44 33 4 12 88 2 6 16 41 5 89 22 53 40 + 2 10 8 1 4 1 10 4 1 1 3 4 8 6 7 6 8 8 7 6 + 4 4 10 10 6 7 5 6 2 6 8 2 9 7 6 7 3 1 10 2 + 1137 1078 737 846 972 844 691 1087 852 955 895 1209 955 994 746 1032 873 1092 1208 1034 + 824 1117 1104 743 643 811 708 1259 985 880 1203 1109 1052 1234 666 667 1068 803 1135 1180 + 100 25 90 60 100 77 16 53 90 21 25 82 23 71 71 74 81 93 85 60 + 72 5 80 72 34 81 42 47 32 45 41 25 59 77 19 48 37 7 2 62 + 4 5 6 1 10 6 8 2 10 2 8 8 8 8 8 10 1 3 2 5 + 9 5 1 5 9 6 5 2 5 4 2 8 1 3 9 7 10 3 10 2 + 1009 1009 1062 897 1643 1700 1012 1627 1509 1341 1380 1530 1392 1639 1415 1246 1002 1617 940 1067 + 1506 1738 1359 1345 1528 1473 999 1593 1422 1677 1351 891 1015 1285 1535 1349 1262 1433 1055 1542 + 29 43 10 19 31 18 27 15 68 59 46 82 17 75 93 93 80 29 71 25 + 87 38 64 57 97 70 32 75 99 48 13 63 7 48 26 11 65 98 64 13 + 5 10 5 8 9 3 1 2 5 2 5 1 2 1 10 9 7 9 4 8 + 5 3 6 5 8 2 10 3 8 5 5 10 6 5 9 4 1 1 9 3 + 747 1111 1041 1032 401 1148 906 789 747 893 629 996 1100 700 893 556 1138 918 656 906 + 832 858 478 780 766 577 649 999 461 649 505 832 1106 740 563 1186 673 983 1040 963 + 32 47 97 22 17 12 39 52 12 68 78 90 13 23 14 56 45 35 23 73 + 62 19 60 69 43 40 56 75 98 17 22 22 65 23 47 48 8 84 99 92 + 2 9 3 4 3 4 9 1 4 2 5 4 8 7 9 9 10 7 7 5 + 10 5 1 10 8 4 9 5 4 8 7 9 3 1 8 4 1 1 9 3 + 538 436 960 472 537 888 1115 1066 574 817 643 599 663 834 448 886 923 831 701 509 + 755 531 960 464 660 401 665 1115 657 693 772 1029 587 843 467 745 803 780 1062 851 + 77 9 44 50 48 26 13 92 31 13 56 81 44 75 42 85 33 68 21 74 + 74 3 17 23 70 41 37 9 50 99 61 11 75 46 50 14 32 5 45 3 + 10 1 2 3 5 8 4 6 4 2 4 7 9 4 9 6 9 2 9 5 + 2 10 7 7 5 6 1 6 2 7 10 6 8 4 10 3 3 5 6 4 + 591 869 702 434 766 972 820 735 547 426 436 623 989 657 358 824 935 380 850 873 + 440 989 1044 831 357 422 792 566 874 645 777 778 404 827 876 887 403 632 968 751 + 60 49 18 87 42 79 92 64 65 11 92 31 71 47 62 4 27 40 20 99 + 29 96 66 37 4 77 75 43 3 94 2 43 8 15 79 68 28 57 17 50 + 1 5 7 8 3 2 3 6 2 5 8 4 8 5 10 6 4 8 1 9 + 7 6 5 9 10 9 1 5 3 1 1 5 7 3 1 8 10 10 9 6 + 1159 986 1117 761 428 909 599 1123 1070 960 780 891 464 484 683 507 1064 521 431 1069 + 981 473 585 811 530 612 1098 526 601 1077 660 817 634 495 1104 516 890 426 512 939 + 5 46 67 20 44 44 93 3 82 31 78 23 83 3 34 40 36 12 55 21 + 60 61 11 32 1 7 49 44 35 59 95 32 63 80 55 71 17 88 11 43 + 7 1 6 7 5 6 7 1 3 3 1 8 5 9 7 7 4 3 8 9 + 10 4 2 3 4 10 8 2 3 7 9 9 4 1 3 7 1 10 1 1 + 732 933 691 670 499 629 561 816 810 910 964 509 546 494 490 412 802 1034 911 780 + 677 691 363 404 1008 520 728 736 854 812 819 684 539 553 983 733 895 691 446 515 + 63 27 17 40 27 48 15 46 78 96 81 39 99 65 2 26 2 3 40 19 + 28 10 38 33 44 39 15 85 49 99 70 96 66 47 79 90 48 98 97 94 + 7 8 1 4 9 8 9 1 10 10 6 4 2 3 5 8 3 1 9 7 + 3 5 8 5 6 2 2 8 8 4 7 3 1 1 2 9 7 6 7 3 + 625 448 143 797 452 717 316 387 491 464 203 778 536 518 462 512 296 316 756 578 + 131 129 691 685 575 431 369 57 607 17 505 289 453 292 509 382 690 111 254 425 + 14 75 70 39 94 69 32 33 75 82 51 66 63 4 83 94 91 81 64 74 + 21 9 9 67 45 39 8 39 80 75 32 58 72 73 30 93 98 43 26 18 + 1 10 8 8 8 10 5 7 3 10 3 8 5 6 4 9 8 2 5 7 + 7 9 6 2 4 1 9 4 2 2 1 4 9 7 3 3 4 2 8 4 + 513 228 419 398 232 147 101 102 756 38 39 135 116 555 465 823 347 49 714 338 + 411 440 230 354 50 419 16 773 580 437 149 139 874 291 353 852 773 606 132 835 + 66 26 8 14 75 40 99 7 27 18 11 96 49 48 75 81 99 70 98 68 + 58 79 65 66 41 22 41 85 88 1 67 40 65 67 63 50 72 29 28 33 + 5 6 5 7 7 3 7 2 4 10 4 5 1 6 9 9 10 4 2 4 + 1 1 2 4 5 4 6 3 9 3 1 7 9 4 9 10 9 10 5 1 + 259 201 384 841 310 629 132 431 815 125 254 707 381 705 201 296 405 481 414 469 + 722 251 748 425 813 273 177 672 570 21 348 17 650 81 159 661 683 206 166 59 + 56 68 92 64 27 47 46 15 12 88 66 21 60 74 53 28 49 46 87 91 + 85 93 26 63 2 59 4 3 62 43 36 72 56 94 14 36 2 17 7 61 + 5 4 6 9 7 10 8 9 10 5 3 4 5 7 6 10 1 7 6 3 + 5 4 8 10 3 10 6 9 9 7 1 10 6 9 5 3 5 8 6 8 + 355 429 511 168 486 286 141 481 493 478 421 411 77 117 635 178 447 236 337 512 + 394 488 233 703 456 83 383 120 534 657 320 108 205 276 322 115 193 149 768 364 + 48 43 58 92 38 28 83 7 39 25 96 37 25 17 71 78 94 38 59 30 + 75 76 88 95 22 72 67 76 21 21 82 84 84 70 95 29 51 20 49 26 + 3 4 4 6 8 2 5 2 9 5 10 2 1 8 5 4 2 1 2 10 + 4 3 5 10 1 6 3 7 2 9 6 3 8 6 9 3 6 9 1 2 + 786 140 199 360 561 183 810 489 856 46 765 378 333 798 428 72 870 468 631 702 + 416 854 171 480 411 655 793 850 408 521 278 722 195 597 71 861 595 276 134 768 + 34 46 22 4 20 74 98 34 45 19 62 44 2 60 60 78 14 28 15 81 + 29 8 27 2 26 38 77 93 10 9 22 10 16 65 97 10 7 5 17 55 + 2 5 1 8 10 7 10 7 4 5 7 8 1 2 10 6 10 9 3 3 + 4 2 2 6 10 3 5 1 6 10 7 4 9 7 2 5 2 3 1 2 + 119 181 156 0 129 127 0 0 0 215 211 0 0 125 0 0 0 0 228 56 + 2 0 0 0 0 271 265 156 0 0 0 0 62 0 105 129 0 114 0 0 + 36 24 79 9 17 31 57 52 93 12 2 45 78 43 30 76 16 52 37 38 + 78 21 97 27 65 85 59 43 3 32 54 86 49 3 90 62 87 92 51 80 + 3 1 7 7 8 8 5 9 1 8 9 4 3 3 9 8 3 2 10 10 + 7 1 8 10 10 1 6 8 8 8 7 5 2 6 10 3 4 4 5 6 + 0 0 0 93 0 179 6 99 0 0 129 0 270 46 311 0 0 81 0 0 + 0 0 0 0 0 0 184 180 0 327 0 0 156 333 0 0 0 231 0 338 + 1 79 1 18 100 96 4 21 76 98 30 45 67 57 68 25 37 99 86 2 + 80 1 28 18 18 96 17 19 1 69 54 53 6 58 20 73 39 54 71 35 + 5 10 10 7 9 3 10 6 2 1 6 9 9 5 2 6 5 5 3 1 + 5 4 8 6 1 2 10 6 3 6 4 6 9 8 1 2 9 5 2 2 + 0 61 361 0 0 0 327 0 25 76 323 152 1 190 0 126 278 0 277 227 + 248 35 93 0 45 0 248 68 0 0 0 49 0 0 0 231 297 0 0 0 + 7 64 38 47 67 21 61 3 15 25 42 53 11 47 41 82 41 9 99 75 + 22 86 30 23 96 7 31 17 34 54 75 54 38 61 13 33 34 16 74 55 + 4 9 8 10 7 7 4 6 6 7 1 6 3 3 4 7 1 9 10 5 + 10 1 9 7 8 7 6 2 3 3 8 9 10 9 6 9 3 2 5 3 + 195 0 74 0 103 0 0 0 41 69 271 202 159 329 29 0 192 68 0 6 + 80 0 7 0 242 0 0 144 153 125 0 0 0 0 0 0 0 83 0 165 + 58 88 19 14 45 9 28 62 58 95 9 82 81 12 93 5 21 79 91 64 + 17 68 92 95 32 6 63 38 62 70 48 36 26 93 72 35 76 59 35 83 + 8 9 6 3 5 10 3 7 3 8 9 8 2 4 2 7 2 4 8 3 + 8 1 1 9 7 3 6 6 1 3 2 9 3 3 2 1 3 8 2 9 + 325 384 0 0 0 0 0 139 168 0 0 226 195 0 0 0 37 0 103 0 + 0 0 70 134 0 0 0 0 0 0 47 23 0 84 0 0 0 399 369 377 + 89 11 49 41 88 19 85 67 83 61 82 46 76 1 45 56 97 55 57 76 + 7 92 79 90 93 34 49 84 68 58 84 48 78 90 79 75 94 14 7 60 + 10 8 8 1 2 9 3 9 10 8 1 9 1 4 6 4 9 3 1 9 + 9 6 9 8 1 10 6 9 7 9 5 9 4 1 8 7 10 3 3 2 + 1445 1249 2181 1877 1517 1516 1253 1762 2246 2550 1652 1729 1912 2530 1272 1847 1408 2483 1939 1595 + 2624 1437 2244 2031 1653 2654 1478 1312 1962 1388 1334 1298 1683 1477 2448 1596 2347 1610 1669 1726 + 59 46 15 51 81 59 86 89 94 31 89 64 22 20 98 100 19 39 70 24 + 65 55 35 67 69 87 5 61 69 8 69 43 30 41 93 53 46 68 22 33 + 3 9 5 10 8 7 8 4 3 9 8 4 10 3 3 8 1 5 5 3 + 6 4 10 1 3 6 6 1 9 5 6 5 9 9 5 6 7 1 2 6 + 1567 1721 1314 1352 2126 1136 1462 1138 1789 1507 2085 2152 1967 1862 2260 1711 2058 1220 2164 1254 + 1929 2127 1977 1741 1146 1824 1435 2305 1458 2306 1332 2166 2106 1478 1777 1899 2321 1919 2390 1435 + 58 59 77 73 34 95 68 3 24 65 36 57 75 25 31 6 62 86 78 23 + 88 10 82 87 83 13 26 77 7 65 70 76 47 88 52 71 18 57 90 52 + 8 2 9 8 2 2 10 3 7 8 7 8 2 8 8 7 6 6 9 2 + 5 10 5 6 4 7 7 4 6 7 9 2 3 8 9 7 5 4 6 5 + 1934 1175 1115 1502 2129 1792 1230 1803 1832 2123 1134 1253 2297 1666 1659 2101 2335 1304 2360 2293 + 2138 1620 1730 1340 1179 1868 2003 1600 2038 2238 1471 2313 1730 1505 1920 2194 2097 1926 2265 1361 + 4 72 79 38 86 43 42 86 34 77 9 49 17 28 100 44 32 2 12 92 + 3 59 22 16 82 12 84 44 52 27 30 7 20 24 72 36 25 75 99 69 + 5 2 9 4 9 9 10 8 4 10 8 9 5 2 5 4 9 7 5 9 + 7 5 9 2 1 10 6 2 1 2 4 8 6 7 10 2 1 8 6 5 + 1241 1177 952 1274 1716 1866 1388 1739 1257 1384 1903 1703 1428 1738 980 1550 1451 1819 1662 1977 + 1347 1417 915 1253 1283 1459 1145 1672 1486 1548 1721 1418 1591 1275 1845 1007 1624 994 1800 1640 + 43 56 37 79 6 72 1 20 15 34 71 69 61 91 39 90 62 48 77 51 + 65 60 31 23 44 39 28 79 3 46 73 96 72 99 48 56 10 66 13 40 + 1 10 3 7 3 7 7 3 5 2 4 3 8 2 6 8 4 7 2 10 + 3 6 9 8 3 2 3 8 10 4 9 1 9 9 9 3 3 7 6 10 + 1056 1538 1837 1610 1103 1848 1441 1934 1860 1421 2026 1948 1602 1425 1406 1502 1642 2039 1501 1472 + 1393 2113 1295 1910 1267 1838 1246 2010 1360 1208 1058 1876 1937 1486 1682 1281 2119 1245 1888 1436 + 48 38 51 33 88 6 12 88 83 55 61 47 44 32 62 21 13 37 57 25 + 57 24 67 77 73 1 88 98 86 26 98 11 49 13 51 43 42 27 79 25 + 4 3 5 8 9 3 8 3 10 1 2 10 6 3 2 5 5 8 7 2 + 4 5 7 7 6 4 9 7 5 5 1 9 2 3 4 4 7 8 4 9 + 1443 1541 1612 1229 1652 1580 1612 1310 1555 975 1395 1652 1522 837 1425 1406 997 1398 1699 1366 + 715 1681 763 1024 852 917 1117 860 914 1693 1277 1722 1370 934 1176 1311 1226 1169 1181 1686 + 20 97 94 40 57 20 23 77 84 13 99 16 36 19 59 18 75 36 17 43 + 91 15 64 55 3 58 70 23 58 23 84 18 29 20 31 12 21 11 17 84 + 9 5 2 9 7 10 10 3 7 8 2 9 4 6 3 6 3 1 4 2 + 5 10 9 8 3 4 2 4 6 2 6 3 1 6 3 7 6 7 10 10 + 1171 1010 649 1448 751 1185 648 1100 676 672 1499 988 622 1362 1373 636 1191 1309 1333 577 + 701 666 1341 768 797 871 937 554 1163 1245 1482 1234 984 1293 607 837 920 1451 682 835 + 91 65 21 44 64 45 59 76 91 35 83 15 30 77 100 64 64 66 96 34 + 6 48 35 34 95 25 58 80 100 49 87 89 18 28 17 82 73 21 27 83 + 7 4 5 1 8 4 6 9 3 1 7 10 6 6 4 6 6 9 1 10 + 5 6 1 5 8 6 2 8 3 6 6 3 4 9 9 8 3 4 9 10 + 930 1059 1244 1337 1643 1557 941 1854 1158 1123 1435 1016 1771 1883 960 1119 1116 1010 1800 1102 + 1580 1245 959 1274 1313 1648 1962 1911 917 1313 1434 1166 1276 1021 904 1845 695 1499 1562 1137 + 17 28 62 91 89 61 24 81 8 99 87 82 52 81 31 48 28 98 14 32 + 69 94 71 94 38 67 16 95 24 67 41 99 48 97 16 74 54 14 90 91 + 3 6 8 8 9 2 2 10 2 1 4 6 2 6 5 1 4 5 6 4 + 6 3 5 2 2 8 1 4 4 6 7 1 3 10 9 2 9 6 9 1 + 957 2105 1120 1956 1943 1859 1043 2018 1305 1088 763 1805 895 1461 1409 896 1531 990 1491 1856 + 1429 736 1391 948 1056 863 1444 886 1564 1650 751 1549 1117 964 942 1360 1852 968 720 790 + 86 25 28 31 40 5 99 86 63 38 65 62 53 61 78 58 39 60 68 75 + 76 76 41 19 49 5 84 99 70 93 61 25 80 41 13 12 46 2 60 78 + 5 2 7 4 8 1 5 3 8 8 5 7 2 4 6 8 3 6 4 6 + 3 7 7 3 4 6 6 3 4 7 5 9 2 1 3 7 3 6 1 3 + 1484 864 1710 954 1928 1196 1211 933 1452 924 1210 759 768 901 1076 1204 1819 1324 1910 1314 + 1208 1369 1594 1594 1047 763 793 1387 1089 713 1866 771 1836 1608 686 1804 1837 1362 1029 1395 + 12 46 7 16 55 65 44 92 14 38 49 100 12 98 44 88 99 30 33 2 + 51 70 93 97 75 12 82 26 5 98 36 48 4 26 69 25 61 47 72 20 + 2 8 3 4 9 9 9 7 3 8 2 8 5 5 4 3 7 5 2 6 + 7 1 6 3 2 1 7 3 9 8 3 2 2 3 3 10 3 1 10 7 + 1309 524 939 577 378 358 220 611 813 508 587 1209 762 1001 688 452 299 1344 972 211 + 1322 1359 349 824 973 269 486 1054 933 1070 1286 993 849 209 492 851 569 1356 914 332 + 37 37 64 2 96 82 96 1 49 37 47 47 39 29 12 34 38 40 62 58 + 45 5 51 17 89 13 6 13 25 4 36 52 37 72 26 42 38 66 40 94 + 2 7 2 5 7 7 9 3 7 1 2 3 9 7 7 5 10 1 10 7 + 7 3 2 10 4 1 8 4 2 7 6 3 2 10 7 9 4 6 5 7 + 846 751 1016 643 363 1089 392 1154 1120 512 995 509 1025 290 627 731 328 302 831 616 + 521 763 1016 216 727 431 1126 820 624 347 793 391 841 1044 1005 572 873 243 232 1130 + 31 96 43 48 56 53 73 18 80 26 49 55 69 41 39 97 14 76 88 88 + 39 63 18 73 92 88 32 1 53 87 7 24 25 53 3 98 45 29 54 80 +10 8 9 3 4 4 3 1 8 4 8 7 7 8 7 3 4 9 1 2 + 3 6 2 10 3 8 6 10 3 5 2 9 1 5 4 5 3 2 7 5 + 1126 453 366 995 1142 312 616 1308 212 487 895 670 909 1450 454 1011 783 749 944 922 + 1070 213 320 1287 902 1188 1227 1457 970 394 1109 1109 253 296 607 1465 941 1175 1230 834 + 94 74 66 15 57 24 66 53 52 51 20 79 66 76 65 78 14 77 20 41 + 40 81 78 61 16 69 13 32 40 78 76 54 42 94 97 30 77 59 83 63 + 10 6 6 6 10 2 6 5 4 10 2 6 3 8 1 8 9 9 4 6 + 10 2 5 1 3 2 1 10 3 10 4 5 5 3 8 7 1 5 1 7 + 604 331 1144 648 499 636 1080 1434 342 930 1131 741 1202 353 779 1191 1026 406 996 1204 + 1313 498 376 1079 866 956 1469 230 1104 1291 1471 544 1176 486 1280 900 551 703 265 520 + 52 75 35 70 47 97 50 18 34 9 4 78 23 30 5 84 52 34 45 71 + 86 45 88 80 17 42 81 53 69 57 45 36 89 75 84 77 57 33 75 30 + 4 1 8 4 4 2 2 6 10 8 2 7 1 7 7 10 4 9 8 4 + 5 5 2 3 6 7 7 10 1 7 6 10 9 9 6 7 10 6 5 8 + 1213 263 497 1051 1192 956 820 1466 1323 1104 1339 737 529 998 1364 392 789 680 564 1290 + 631 854 449 381 763 619 1372 893 1394 932 1453 1431 1481 1067 1252 1044 1159 802 1409 1105 + 61 57 26 79 2 26 74 27 25 52 22 15 61 51 54 23 70 92 29 33 + 39 99 87 34 83 45 45 14 45 61 97 65 97 27 16 84 39 78 55 70 + 4 10 3 6 3 9 2 8 10 2 10 9 10 6 8 1 6 2 8 1 + 10 8 9 6 3 5 1 3 3 5 8 10 7 1 8 7 1 5 2 4 + 1005 694 0 268 301 136 86 41 179 0 319 0 826 0 433 269 64 0 917 376 + 629 506 560 78 810 704 973 0 497 177 606 0 448 752 495 791 946 0 753 554 + 3 29 97 16 27 17 66 100 52 83 76 55 29 38 83 7 40 50 18 87 + 93 36 92 28 27 70 93 66 6 77 90 41 23 36 26 96 1 79 26 77 + 9 6 1 3 4 4 8 8 7 3 10 9 9 7 9 5 10 7 2 5 + 9 8 6 1 2 3 4 4 4 10 2 6 10 4 8 1 10 5 6 3 + 892 585 889 788 887 0 182 643 596 743 0 571 975 136 0 1015 225 0 40 694 + 0 330 728 874 197 113 729 404 399 28 763 0 48 574 666 912 344 431 815 702 + 89 55 83 53 57 33 33 57 69 60 44 65 2 47 42 51 59 57 32 93 + 69 5 95 70 92 62 92 66 15 49 63 68 54 42 23 64 53 69 69 47 + 6 6 8 7 1 6 6 9 1 8 9 8 6 5 1 5 1 7 10 4 + 1 9 5 1 9 3 10 10 8 5 3 8 8 10 4 3 7 6 8 3 + 867 0 0 892 753 0 0 690 318 858 0 922 935 401 623 580 552 0 319 229 + 296 346 1073 0 978 25 506 559 281 729 32 384 33 43 350 419 698 380 309 1011 + 60 53 96 70 51 86 60 23 61 94 83 18 87 85 61 75 54 19 72 18 + 14 8 69 74 49 18 85 74 66 70 31 50 5 34 95 39 29 38 45 81 + 1 6 2 10 6 8 6 10 3 9 9 7 6 2 7 6 10 4 7 7 + 5 2 2 7 2 4 7 7 8 8 1 7 10 5 6 8 4 4 8 3 + 204 197 546 1039 103 484 11 443 266 0 147 0 549 0 1044 1024 997 426 195 581 + 399 100 625 0 784 661 0 445 700 729 761 542 1027 149 382 632 893 941 862 0 + 82 92 27 46 100 42 10 26 38 48 26 87 17 12 46 98 81 92 16 29 + 7 97 64 54 96 90 94 49 79 88 88 96 16 60 8 78 95 27 91 80 + 10 5 4 6 10 9 2 7 8 4 6 7 5 4 2 5 9 5 1 3 + 10 1 7 5 2 1 6 4 3 1 9 4 4 2 9 2 7 2 3 3 + 173 171 697 157 534 174 1070 590 659 0 0 547 1098 126 0 979 737 722 130 401 + 934 1089 80 0 404 758 666 0 773 361 257 11 53 513 0 0 1032 0 433 297 + 51 95 45 65 20 23 23 96 40 39 87 51 52 42 80 46 23 88 21 98 + 51 81 9 28 60 83 20 59 3 5 8 19 61 15 5 38 42 32 23 50 + 4 10 10 9 4 4 10 10 5 9 6 1 4 6 7 7 7 1 4 4 + 10 8 2 8 4 4 1 9 2 4 1 3 4 9 9 2 7 6 2 5 + 374 413 0 0 0 0 422 0 51 0 0 134 25 500 511 0 0 337 0 0 + 281 399 0 0 276 0 0 211 0 0 461 509 518 0 0 0 89 528 265 364 + 91 37 97 15 58 100 18 67 34 36 57 90 81 42 28 46 91 69 23 99 + 80 20 11 17 96 52 41 71 14 75 39 34 36 52 9 84 93 99 35 100 + 7 3 8 6 6 5 10 7 7 2 2 3 5 6 4 10 2 1 9 10 + 10 10 4 3 8 10 10 2 4 4 9 8 5 2 8 6 5 10 8 1 + 0 444 471 485 348 0 219 221 359 185 0 96 291 505 260 0 0 589 0 0 + 0 400 651 0 361 135 258 0 299 0 0 424 122 0 237 551 0 0 0 334 + 53 49 10 85 87 100 93 76 15 63 5 88 24 31 30 29 23 52 32 11 + 55 27 53 47 88 9 13 56 17 5 10 59 68 75 21 79 94 31 100 43 + 7 2 10 1 1 8 5 10 8 2 8 7 7 1 10 4 10 1 6 5 + 3 2 3 7 3 1 4 4 4 2 4 4 9 2 10 4 10 5 6 3 + 288 199 0 0 320 67 355 554 0 289 0 0 203 448 0 216 220 472 0 0 + 84 314 0 0 393 201 0 239 0 316 499 130 116 0 424 125 360 354 516 0 + 19 12 85 69 9 2 24 64 8 95 78 33 24 82 5 30 89 8 41 13 + 66 69 99 45 46 17 72 90 46 55 29 15 14 40 18 91 24 81 67 93 + 4 1 2 4 5 8 4 10 8 8 4 8 9 1 6 7 6 9 5 3 + 10 2 2 7 7 7 9 7 6 5 3 6 8 3 4 4 10 6 6 5 + 316 351 0 0 308 14 512 0 0 419 0 330 0 0 357 0 275 0 0 0 + 536 0 0 53 139 235 199 423 0 0 0 0 423 208 550 0 297 450 372 0 + 95 62 4 64 53 38 62 74 64 62 1 13 56 22 50 83 83 59 18 90 + 30 67 90 41 10 94 30 49 62 40 79 27 56 59 58 93 87 57 65 68 + 3 1 2 2 1 3 5 8 8 1 2 4 7 3 5 10 2 2 3 8 + 1 6 7 1 6 1 2 4 3 7 6 1 10 10 10 8 9 4 1 3 + 395 0 588 300 333 31 340 0 512 537 457 0 0 166 0 0 578 0 571 25 + 0 515 0 0 0 552 0 222 216 0 454 0 85 0 606 0 0 353 0 498 + 17 88 28 68 79 94 40 52 64 46 77 23 21 29 41 31 29 81 11 29 + 93 92 45 70 47 8 78 14 54 25 24 22 4 94 75 58 16 19 100 90 + 10 7 2 4 1 8 4 10 7 4 6 1 6 1 3 4 10 2 7 5 + 4 5 8 9 5 3 5 7 7 6 6 6 3 1 4 7 3 8 7 1 + 1338 2062 1669 1534 1564 1391 2193 2223 1373 1713 1026 1743 1537 2199 1243 1534 1179 1214 1727 921 + 2359 2189 2155 1015 1245 1339 871 1508 2175 1422 1456 1406 926 1038 1128 2146 1942 2078 2042 2013 + 52 31 94 85 72 9 68 47 15 83 89 59 50 36 31 17 79 3 45 19 + 28 72 60 69 53 83 85 70 40 31 4 31 99 64 81 65 72 81 58 84 + 9 1 9 10 6 3 6 7 10 6 5 8 10 4 2 2 10 9 9 10 + 5 6 10 3 7 1 3 2 6 10 9 2 3 9 4 5 5 10 4 8 + 1349 2078 1523 2126 1871 1271 1006 1749 1928 1774 1537 1783 2157 1532 1854 1665 2563 1234 1640 1226 + 2350 2052 1793 1525 2415 2488 2060 1254 1641 1408 1302 1105 1150 1980 2200 1949 2375 2320 914 2119 + 90 86 18 19 47 73 33 12 53 61 96 20 1 29 51 86 12 9 23 55 + 37 85 12 77 52 95 98 3 65 31 38 89 9 36 68 93 45 9 1 77 + 2 1 1 2 3 7 4 7 4 5 2 4 9 7 5 8 1 4 5 2 + 9 3 9 2 3 5 5 4 9 8 7 6 5 6 10 4 4 9 5 10 + 1747 2082 2255 1152 1211 1532 819 880 2087 890 916 2090 1131 1782 1415 2247 1650 2229 2271 1521 + 1258 2268 1744 1983 1566 1261 1928 1502 1991 2006 1416 2025 1878 1992 840 1960 1881 2049 1593 1427 + 52 1 94 81 46 32 2 3 44 22 35 9 60 46 12 88 38 32 27 32 + 50 63 53 38 96 73 42 4 3 50 24 32 27 35 4 28 53 23 70 100 + 10 10 2 1 10 2 1 4 2 1 3 9 5 10 9 4 9 10 4 8 + 9 4 5 10 3 5 2 10 3 8 1 9 2 8 9 10 2 2 8 6 + 1839 685 1442 1414 1747 761 1405 1888 1750 1448 1292 737 1821 1696 897 1655 653 1330 1194 1001 + 738 695 1941 1384 1101 929 1236 1250 1072 1012 912 668 816 1936 897 1301 1214 1155 716 1817 + 88 77 95 82 40 84 17 79 78 60 23 32 41 78 1 76 92 62 16 50 + 26 92 16 98 64 99 22 65 56 68 36 100 79 37 38 55 40 97 84 86 + 10 9 6 1 3 6 4 6 6 2 6 4 6 1 6 7 9 1 8 8 + 8 5 2 9 6 4 1 6 2 3 10 3 6 6 9 7 6 9 1 5 + 2380 2123 2763 2365 1172 1171 1943 1878 1732 1367 1370 986 1583 1114 1156 2097 1554 2193 1594 1622 + 1725 1454 1528 2535 2205 2885 1562 2664 1016 1537 2429 1427 2647 1422 2377 1712 2335 1377 1354 1078 + 73 68 69 37 27 80 3 2 78 24 54 97 86 71 83 5 39 41 29 12 + 50 11 9 7 65 9 18 83 53 17 29 37 16 70 93 66 81 56 40 73 + 2 3 4 6 5 4 5 1 6 4 5 2 9 3 8 3 5 9 3 2 + 5 6 8 5 8 2 9 2 10 3 3 1 6 9 2 3 2 8 8 9 + 426 1677 602 778 1508 480 1314 481 1390 1584 1019 718 1699 586 1048 758 1673 1716 1667 576 + 992 1056 823 1550 387 1735 1067 466 1497 1620 859 626 1525 1071 1501 1388 537 1036 1416 1843 + 14 82 47 64 34 24 58 37 92 15 99 18 68 22 3 35 70 63 81 23 + 37 11 21 17 34 91 90 58 54 83 33 88 19 17 74 53 78 80 13 1 + 1 3 8 10 9 7 8 8 2 4 5 5 10 3 5 8 10 3 7 10 + 10 4 4 2 5 5 10 5 8 1 5 4 1 1 2 4 6 2 7 8 + 512 426 483 1102 835 1669 1676 870 657 766 1767 1700 1635 420 1302 1545 990 1611 1546 1456 + 478 1646 1676 1729 935 573 1300 1865 611 751 695 1276 1866 1105 1798 765 391 1258 856 1706 + 43 79 34 85 17 73 61 53 96 87 10 8 83 82 84 27 8 76 66 92 + 30 37 17 85 37 88 76 92 44 4 59 4 94 14 62 13 60 100 57 14 + 10 8 5 9 6 2 1 7 2 8 5 6 2 8 6 1 4 5 4 6 + 9 2 2 6 10 6 8 8 7 9 3 9 8 2 10 8 1 9 8 5 + 1109 506 2070 665 2085 1505 533 1899 1501 724 1976 1522 581 557 2018 690 1868 1554 844 1590 + 467 1940 1763 1369 1936 1425 2013 679 1495 724 1630 1037 1915 1932 1138 1338 1323 1165 1816 2105 + 21 72 15 25 58 39 69 20 96 47 14 86 59 57 25 48 99 81 57 32 + 98 87 42 69 65 93 92 92 37 72 95 37 79 49 17 44 85 2 4 57 + 4 10 5 10 6 10 5 8 10 10 9 2 9 7 7 1 5 7 9 1 + 8 10 2 4 6 1 4 9 5 5 7 9 1 8 8 6 8 6 4 3 + 1806 1799 1583 1636 1853 2134 780 1173 490 552 782 1959 1216 2203 1886 1435 1192 1493 539 905 + 1803 1248 2125 1012 2231 586 696 1683 1072 1571 695 1184 641 663 2045 1416 1443 728 2213 1614 + 37 29 53 10 69 83 16 13 25 55 18 73 51 2 4 63 22 34 3 57 + 42 13 18 31 40 56 32 65 76 32 5 43 38 63 76 51 40 26 63 79 + 2 8 1 10 5 1 6 3 1 4 4 8 1 7 5 6 2 3 2 10 + 1 10 4 4 2 6 9 3 10 10 2 2 5 1 10 7 9 2 4 6 + 661 1410 923 401 1076 1203 573 1355 1202 647 449 846 818 1337 1200 589 1170 1086 952 664 + 1001 1267 637 1396 923 672 993 756 723 1567 1476 1369 1178 994 1102 1502 1361 678 1217 788 + 10 69 88 73 89 24 16 9 43 26 6 95 42 29 79 47 81 39 34 86 + 86 17 45 36 10 76 68 24 32 81 34 81 63 52 56 39 40 61 21 35 + 5 10 8 8 10 8 10 6 8 6 7 10 7 1 1 9 10 4 6 8 + 10 5 2 6 9 9 10 1 7 4 5 3 2 9 7 2 1 4 7 10 + 420 1309 525 1430 1497 131 131 1514 296 1456 762 1278 426 1374 41 693 575 814 596 175 + 574 446 977 1315 476 1021 1343 618 409 1383 594 1099 1208 364 492 1369 1164 995 1251 1014 + 85 65 41 46 85 31 59 91 41 26 43 78 43 1 85 72 10 59 55 39 + 88 59 1 52 51 8 50 84 3 71 5 12 75 64 50 16 26 55 46 79 + 10 4 9 5 6 3 1 8 3 5 4 7 5 7 1 1 3 10 7 1 + 6 1 5 9 3 5 5 8 10 1 7 1 2 5 10 4 2 7 8 1 + 655 170 1030 673 453 1100 1370 1455 621 209 1163 288 292 1547 506 1074 972 78 128 1240 + 143 1531 81 269 1265 886 760 1212 1146 673 29 500 1497 377 482 374 981 691 333 256 + 35 43 7 66 53 26 13 41 36 74 67 53 94 34 32 16 69 7 51 43 + 29 47 43 52 70 60 7 64 17 6 18 2 23 99 31 94 69 3 80 70 + 5 9 5 9 5 1 3 6 4 6 1 6 1 9 2 4 4 2 6 5 + 9 10 3 4 1 8 9 8 2 2 3 5 4 9 6 5 7 7 4 5 + 892 1198 538 525 1016 650 884 1099 634 992 1092 255 10 253 148 633 1093 974 662 1268 + 411 66 711 1030 787 261 342 1365 60 1361 382 1375 15 539 575 564 675 314 1190 881 + 62 83 63 75 18 87 28 46 73 44 99 55 62 99 14 61 48 50 86 24 + 20 46 6 9 98 92 45 84 27 44 51 19 83 11 49 89 68 58 46 71 + 4 8 4 3 10 4 5 4 1 9 7 8 1 8 6 3 5 10 4 7 + 6 1 2 2 6 3 7 7 3 4 10 5 5 6 4 9 10 1 6 1 + 236 261 1737 227 383 1094 776 1307 1372 671 88 172 684 1525 1628 615 870 1468 1711 1129 + 623 98 52 983 751 941 916 167 499 441 1124 218 23 609 1714 654 223 1524 1209 1551 + 97 16 62 14 29 41 26 97 26 62 36 20 43 19 99 89 64 7 99 12 + 2 73 2 1 98 17 49 69 18 94 60 35 61 9 33 20 51 23 44 46 + 10 10 7 8 10 1 8 9 8 3 6 2 6 10 9 1 5 10 9 7 + 4 8 9 6 1 5 10 2 2 8 1 2 2 9 8 1 4 8 10 8 + 553 1235 689 933 678 1103 595 145 126 988 741 1137 1095 1150 537 118 962 956 754 1100 + 611 1301 941 1056 768 686 337 1151 475 464 913 357 465 914 1257 1046 221 851 1011 1192 + 97 29 35 31 69 73 13 86 97 7 53 88 60 69 33 89 64 51 11 43 + 42 96 23 8 58 3 82 47 1 20 80 24 89 53 88 6 70 57 53 64 + 6 10 7 9 4 6 8 8 2 1 5 1 9 7 9 4 2 3 10 5 + 7 2 7 1 8 3 9 1 9 4 9 6 9 5 3 7 10 5 2 5 + 0 883 280 474 0 1053 782 2 185 0 779 873 232 0 0 332 497 45 629 734 + 360 1082 434 425 151 821 973 288 1004 616 993 1209 581 675 530 742 1155 0 1174 465 + 49 59 92 74 44 16 70 11 16 11 3 4 65 80 43 50 20 9 18 52 + 35 40 37 16 96 68 85 49 14 39 25 63 29 55 100 73 11 68 30 97 + 1 9 2 10 8 8 4 10 5 1 5 6 9 8 7 9 2 6 3 9 + 7 8 1 9 6 3 3 2 7 6 3 2 9 8 10 4 6 5 5 3 + 437 311 882 962 0 0 410 619 899 691 625 0 0 0 631 137 493 411 39 157 + 0 521 635 0 395 909 84 790 62 936 860 0 736 495 1040 0 607 1050 337 0 + 67 98 20 98 91 60 55 71 41 60 18 12 39 32 31 65 35 59 96 81 + 5 80 91 10 58 86 76 8 25 22 85 34 71 83 31 49 84 22 8 17 + 4 3 1 4 7 5 8 3 10 9 10 1 4 6 9 3 1 2 2 3 + 8 9 8 8 4 5 10 5 4 4 4 2 2 5 1 8 2 5 2 4 + 966 151 262 477 980 323 0 1172 0 848 1200 187 0 1148 1030 826 522 0 0 135 + 0 1206 150 0 0 36 0 243 1007 0 0 0 1244 413 503 0 0 882 192 603 + 94 92 86 56 74 45 8 68 59 92 8 62 89 62 80 31 38 34 97 71 + 5 46 12 81 17 84 53 62 31 90 28 21 31 57 23 64 28 34 1 4 + 3 6 10 10 6 6 6 9 7 5 6 1 3 1 5 4 1 10 10 8 + 8 3 2 3 7 6 5 10 10 4 9 1 9 4 3 10 2 2 2 1 + 628 713 0 822 0 413 488 833 0 0 736 1019 439 211 171 794 447 941 1064 845 + 310 83 654 655 30 0 263 72 497 0 712 62 916 54 155 1043 267 0 781 422 + 99 38 34 9 87 68 44 8 83 100 59 8 52 57 48 48 88 20 54 71 + 13 19 2 22 75 40 92 51 54 19 3 9 55 50 94 52 12 75 99 38 + 2 7 6 3 6 2 7 8 4 7 5 9 9 10 5 1 5 6 9 7 + 6 4 6 1 10 1 5 8 8 9 4 5 7 5 6 8 1 4 7 7 + 956 600 1039 413 130 759 56 968 702 442 413 360 985 0 986 389 870 1005 507 290 + 135 0 929 567 627 667 648 0 0 93 989 234 1137 680 989 1121 655 639 760 0 + 82 87 86 18 48 76 57 100 56 27 40 5 91 61 74 18 71 55 77 28 + 37 35 42 29 63 86 45 17 47 40 34 90 17 87 41 51 30 67 73 4 + 6 9 10 3 9 4 3 1 8 7 10 3 6 4 7 6 4 9 6 5 + 8 2 1 1 8 7 10 3 8 6 3 10 2 6 4 4 3 9 2 7 + 303 0 403 0 0 508 114 0 0 273 111 164 0 338 0 0 0 251 0 272 + 184 328 0 116 0 347 0 0 0 750 54 120 60 0 0 0 0 780 100 804 + 70 43 42 48 8 70 18 57 49 72 92 11 100 68 74 46 1 30 52 97 + 16 84 93 3 38 8 44 8 89 34 26 57 19 48 41 99 6 87 68 86 + 8 4 6 6 2 3 1 3 6 4 9 10 7 1 9 7 4 8 4 8 + 1 4 9 3 2 1 3 6 8 1 10 3 1 9 4 7 9 7 6 5 + 76 782 0 0 231 340 381 3 0 730 93 0 273 114 0 0 0 0 0 0 + 624 0 588 0 157 0 0 0 0 21 0 760 506 69 0 0 0 127 0 184 + 18 65 81 68 43 4 80 92 60 86 39 90 100 25 41 42 21 90 46 32 + 11 5 93 45 86 45 99 72 89 37 58 6 91 65 45 41 92 63 10 78 + 7 8 3 4 3 5 10 6 10 2 3 6 3 5 4 9 5 5 5 10 + 7 10 4 6 1 2 3 4 5 2 4 2 6 3 6 1 9 8 9 10 + 0 637 0 0 578 572 0 0 0 390 396 0 358 0 253 0 0 94 0 0 + 152 878 0 237 296 0 0 714 0 588 0 677 0 362 0 0 386 0 81 388 + 9 61 58 50 58 86 69 25 17 73 81 5 37 33 48 18 1 34 3 92 + 17 95 57 62 23 88 97 63 83 38 92 36 91 24 86 23 69 78 3 70 + 5 4 2 8 9 1 3 7 4 3 7 3 1 6 7 3 10 9 7 9 + 9 9 7 1 6 8 9 9 4 6 8 8 9 5 4 9 2 6 9 8 + 257 0 558 0 0 516 0 585 169 0 513 0 576 0 0 653 134 0 168 0 + 633 0 391 87 711 694 251 0 627 0 501 222 0 637 717 0 783 797 0 0 + 53 96 10 66 86 76 93 77 74 90 96 26 91 48 45 88 41 13 45 40 + 54 1 78 62 89 82 5 36 35 77 65 51 5 63 74 95 44 68 27 60 + 7 10 6 8 3 8 6 10 8 3 9 8 10 8 9 5 5 4 2 4 + 8 4 10 10 2 2 6 10 9 4 8 9 10 4 5 5 3 10 3 6 + 0 0 414 395 895 454 0 0 0 0 0 562 0 0 0 0 0 208 502 719 + 492 689 205 0 0 0 0 0 0 369 0 387 62 0 802 592 115 492 512 0 + 91 87 47 96 88 88 35 72 14 48 41 23 65 76 61 72 66 30 73 40 + 70 25 31 57 57 25 67 51 37 51 93 56 47 74 96 8 53 20 44 48 + 1 1 6 9 9 10 9 3 2 4 9 3 6 5 1 7 1 1 4 2 + 1 6 9 8 6 2 2 2 2 5 2 8 8 7 10 5 9 6 3 6 + 1702 2117 2171 2067 2477 2866 1026 1652 1803 1835 1758 2600 1385 1375 2016 1910 1018 1725 1750 2637 + 2831 2622 1326 1596 1075 1559 771 2629 1913 2043 2335 2093 2105 761 845 2121 2131 2497 1824 2775 + 90 3 63 19 54 87 60 38 49 11 50 72 93 77 2 90 32 44 64 62 + 42 20 64 62 24 69 4 68 11 6 69 59 61 2 18 19 61 78 90 35 + 10 6 2 2 7 7 6 2 4 5 3 10 2 2 1 7 10 6 10 5 + 8 10 3 6 1 4 7 5 4 2 3 5 10 8 3 4 2 7 7 6 + 1077 2349 748 1107 612 1111 2393 2411 592 757 1063 1644 1522 1852 2348 1293 1657 1341 728 2475 + 668 1616 1020 1993 2345 1324 2337 1887 959 596 2234 725 1269 1468 1814 1230 1037 1569 2184 1571 + 45 68 25 21 1 95 8 6 18 95 36 49 36 66 52 67 22 64 76 75 + 75 94 6 96 18 16 18 54 5 44 67 74 16 44 84 87 60 34 82 28 + 9 3 9 3 7 4 2 7 9 1 9 1 1 5 9 1 1 3 3 2 + 3 7 6 1 10 6 6 6 3 5 7 5 2 4 2 9 6 10 2 8 + 2017 780 2268 1232 1486 1892 1167 1097 1071 1186 1160 1453 1512 1372 1281 1940 691 1653 1349 1911 + 1161 1034 2267 1328 1293 663 1519 708 878 1372 1157 2024 1908 2167 1378 942 689 1616 816 1200 + 80 57 81 87 75 84 97 9 88 69 90 36 83 9 75 30 48 53 43 56 + 3 7 64 81 94 25 86 76 96 20 35 14 11 52 17 74 72 39 13 35 + 8 3 6 8 8 1 8 7 2 1 8 1 6 1 7 7 2 9 6 5 + 1 5 3 7 3 8 8 5 7 10 4 1 7 2 8 9 1 9 1 4 + 1406 2620 1708 868 2449 1982 2382 2807 2593 1857 767 2280 2118 941 2734 1902 1750 2773 2456 850 + 2453 2152 2229 2372 1984 750 902 734 1181 2180 710 1712 2678 1651 2201 882 2048 2665 2413 1948 + 47 99 55 8 49 80 25 85 56 58 82 16 51 25 11 11 48 30 78 81 + 86 73 22 40 34 7 65 6 88 39 4 99 74 30 16 45 44 92 55 69 + 8 5 5 2 5 7 8 3 6 1 6 2 4 7 1 2 8 9 8 8 + 5 8 4 3 1 10 2 4 1 8 6 8 9 1 5 2 5 3 1 6 + 2131 2372 2386 1464 1665 2256 633 2265 2299 664 1505 1065 1060 2075 2130 791 708 1656 1194 2338 + 1625 2549 1222 1537 634 2421 1959 977 725 843 676 1033 1948 1700 1397 1779 1233 748 1337 1940 + 27 77 52 78 26 63 68 74 44 36 58 77 88 88 80 43 90 63 71 31 + 14 58 43 1 50 24 89 18 16 64 79 50 59 35 60 68 53 48 60 45 + 7 2 1 3 3 4 8 6 3 5 1 2 5 4 4 8 4 3 1 3 + 5 4 4 4 10 1 4 4 1 6 10 1 7 8 1 9 6 9 10 9 + 237 1217 1587 1994 1991 1195 1477 1525 968 1952 2371 822 413 1314 2139 1235 492 2238 755 940 + 259 1356 635 294 701 295 1559 1768 2369 726 1007 2290 1925 507 2085 1849 606 1743 859 2344 + 26 27 54 92 35 64 78 18 5 34 43 8 65 24 79 64 70 76 92 90 + 5 83 69 95 81 93 72 90 40 7 45 91 49 78 6 14 5 13 55 80 + 1 4 4 10 7 1 10 9 3 10 6 6 5 8 6 4 4 8 10 5 + 2 9 7 1 8 6 3 7 9 9 6 8 2 5 7 9 1 7 2 8 + 894 2007 1726 868 1628 2120 505 575 2001 1808 1630 2012 2263 248 1103 2307 1114 1751 1595 914 + 1848 367 2220 600 535 1488 2027 1040 610 2228 334 1923 1054 340 228 769 422 2079 441 264 + 82 36 18 50 34 69 88 66 19 33 42 4 85 12 82 13 12 44 88 45 + 4 20 17 56 95 85 5 4 49 36 64 50 55 23 89 58 32 50 47 97 + 7 8 5 1 7 4 6 4 4 10 4 2 9 4 7 2 9 6 3 10 + 4 5 5 2 8 5 10 8 6 5 7 6 10 1 9 6 2 6 4 8 + 674 456 1540 1236 1816 2008 878 1173 422 1376 244 1143 883 602 1167 285 1934 783 241 823 + 194 1543 568 1513 211 1447 1829 1291 1282 1186 572 2010 1924 275 722 947 1558 241 867 963 + 58 47 84 55 10 17 6 65 93 69 35 59 79 84 44 33 66 97 85 86 + 84 96 86 21 88 22 55 42 62 69 79 57 18 85 20 78 97 70 21 20 + 7 6 10 7 4 3 4 6 3 6 6 8 9 9 5 4 9 4 2 4 + 6 8 1 9 3 4 9 1 1 10 9 3 9 1 4 10 4 8 4 7 + 1786 896 2051 2195 254 2157 2573 2021 795 341 585 1241 1619 1215 1151 568 2437 2473 1213 1246 + 1907 2446 1672 1456 922 613 1896 929 2394 1623 2428 2047 490 1764 1840 397 1497 2348 1784 2266 + 48 54 68 13 77 100 22 69 65 31 83 74 3 26 96 72 14 17 85 7 + 48 89 50 30 82 80 72 91 23 81 7 47 4 35 91 55 87 41 39 76 + 3 4 8 9 7 5 4 10 5 3 4 3 9 10 6 3 2 8 3 4 + 5 2 1 2 6 5 10 1 3 4 2 10 2 4 4 7 7 4 5 1 + 1579 287 1273 2303 1093 2152 1853 299 1321 1075 1570 957 1683 718 697 2018 2311 1762 1056 764 + 1694 1301 2055 1484 1854 1925 2096 344 1691 351 1064 1454 2161 807 1741 1745 1769 1515 1533 580 + 72 6 63 32 78 62 99 33 97 60 22 52 45 76 47 78 63 40 70 4 + 26 32 63 26 30 88 79 43 6 96 46 56 8 96 72 71 21 83 42 97 + 7 8 2 9 3 4 2 2 3 2 3 8 8 7 6 4 5 6 5 10 + 3 6 7 3 6 7 4 5 9 6 1 7 3 7 4 10 1 8 6 9 + 1023 0 0 660 228 0 732 1145 1008 1912 1104 1101 861 1191 372 1551 1685 958 0 0 + 846 969 1185 1958 798 141 767 0 143 302 1448 1685 1068 123 655 649 414 1046 1680 251 + 78 3 41 24 83 100 84 16 84 51 69 97 18 27 86 4 50 54 100 76 + 75 11 57 56 13 30 80 89 14 95 71 25 56 44 8 15 45 69 50 15 + 7 6 8 10 7 7 4 9 1 3 2 3 4 6 5 2 4 5 9 9 + 2 4 2 7 1 3 8 10 2 9 2 4 4 2 3 6 4 2 5 1 + 94 1227 63 140 429 625 1510 1616 370 1643 1336 103 1152 1814 1462 1506 1339 195 0 1240 + 754 912 1291 1690 1319 1245 0 1841 677 1349 330 299 1271 589 483 1420 0 1076 39 1839 + 43 100 53 5 90 67 98 80 45 52 19 20 65 22 78 6 39 1 13 26 + 82 63 41 69 37 39 58 27 76 9 36 97 98 8 26 77 31 24 59 69 + 1 5 5 5 4 10 1 10 8 8 4 1 6 1 9 3 1 7 1 2 + 2 8 3 6 8 5 9 4 7 1 5 5 4 8 3 9 2 3 7 8 + 422 532 1715 1540 598 781 1339 326 1439 39 296 800 938 1552 89 1275 1747 389 823 989 + 1257 426 1513 1014 182 1139 1359 307 425 947 51 514 28 1541 999 64 779 1484 0 929 + 73 4 28 92 28 67 40 91 29 63 42 32 57 60 67 43 22 75 24 60 + 61 82 3 18 39 5 71 56 50 24 72 18 14 70 29 49 83 28 45 98 + 4 5 7 10 5 3 9 3 3 8 6 4 8 1 4 3 9 10 2 1 + 2 5 2 8 3 6 10 8 5 7 4 3 1 8 8 9 10 4 8 1 + 422 192 488 1319 1672 163 137 427 1141 674 1429 1364 676 587 993 839 363 417 179 1366 + 263 0 1537 691 1188 241 511 1292 697 0 1032 1714 1521 666 1478 335 0 1297 710 83 + 100 75 71 8 49 56 90 17 13 41 90 48 93 92 76 77 21 25 28 86 + 55 10 9 58 84 70 57 79 22 79 88 38 13 66 99 28 29 13 61 46 + 2 5 9 10 2 2 4 8 4 7 6 7 5 10 2 2 9 7 2 6 + 10 8 2 7 9 1 6 10 7 9 7 2 3 2 9 3 2 4 8 1 + 1246 152 965 1255 353 1193 1701 1284 1061 960 1505 1466 318 1651 1082 282 1048 985 400 1832 + 1151 243 1182 1721 861 411 1850 0 382 1578 17 0 766 1244 394 1199 1550 200 0 1856 + 86 27 68 77 39 46 5 28 2 91 91 86 99 21 4 73 83 52 97 18 + 35 47 3 49 64 28 32 1 8 65 74 9 31 84 94 14 28 65 39 58 + 5 8 2 3 4 1 8 5 7 8 7 4 9 9 7 7 10 7 9 2 + 10 4 10 10 9 5 8 1 1 3 5 7 4 6 6 10 10 8 2 3 + 0 0 152 1288 16 275 1087 0 462 1130 1134 805 798 1273 370 1290 0 0 495 0 + 199 751 1299 334 1309 735 0 0 1240 1183 1096 78 319 0 459 10 767 0 879 160 + 20 27 81 26 8 99 18 100 29 73 52 68 30 7 31 99 86 9 81 69 + 99 44 7 52 100 27 91 31 49 27 93 88 32 67 54 9 66 22 89 85 + 7 3 8 6 8 7 7 8 6 9 6 1 10 4 7 9 9 8 2 3 + 7 9 8 6 2 3 7 5 5 2 10 4 9 2 7 1 4 8 7 1 + 1338 923 0 770 1335 300 0 251 1167 1117 326 518 198 951 909 1494 241 0 7 360 + 1072 795 1055 1145 1299 0 728 0 303 785 0 0 1254 1142 1225 1178 194 467 0 1221 + 20 95 51 86 95 33 43 11 55 3 56 19 70 58 12 27 34 5 8 58 + 73 4 21 33 40 68 5 82 16 63 92 33 9 8 93 20 58 76 97 79 + 6 5 4 6 2 6 7 7 4 1 3 5 9 9 3 8 1 10 3 2 + 4 4 1 4 8 6 9 4 3 5 2 1 9 9 9 6 1 9 3 10 + 0 407 1105 76 733 1032 363 397 0 403 65 811 529 525 835 607 0 0 82 0 + 1011 710 0 747 0 1073 880 1134 393 864 882 0 1175 0 0 378 113 887 473 1207 + 30 3 97 91 68 17 70 40 71 46 68 65 29 38 83 31 61 100 86 6 + 14 85 24 60 61 6 24 68 7 24 91 5 82 56 6 54 45 71 31 96 + 1 7 1 5 9 7 1 7 2 6 10 4 3 10 1 7 2 10 9 6 + 10 5 6 3 4 7 4 2 1 7 1 5 5 9 7 8 1 5 5 5 + 1068 1151 1235 0 0 14 0 283 0 0 1263 269 229 0 0 23 907 79 0 0 + 191 683 698 1288 278 486 0 538 822 0 958 0 0 0 82 0 1142 0 359 34 + 26 77 17 78 22 74 68 72 92 21 27 38 47 65 56 89 28 6 98 38 + 3 98 73 4 69 48 78 7 51 15 91 71 100 13 65 24 100 7 34 94 + 7 4 1 3 3 6 5 2 10 8 5 6 8 3 8 7 4 3 10 1 + 6 5 4 4 6 10 5 3 9 7 1 5 7 2 4 9 7 5 10 5 + 557 1428 0 0 0 759 0 0 0 0 0 0 904 909 0 500 0 1016 0 1431 + 0 916 719 290 1306 0 19 543 64 599 891 67 150 548 609 0 1338 493 1399 739 + 8 97 86 87 82 81 86 93 65 3 53 72 92 8 22 81 24 92 8 83 + 53 6 1 40 96 54 73 22 100 92 71 99 40 31 21 45 19 33 28 37 + 10 10 2 10 4 7 5 9 5 8 8 10 8 9 6 3 8 8 9 6 + 8 1 1 9 9 1 8 4 10 4 1 1 10 5 2 4 5 3 9 6 + 378 913 0 0 534 0 91 0 0 461 0 191 0 48 690 945 0 0 0 0 + 0 140 0 0 261 719 927 0 0 0 0 881 241 0 0 0 0 0 0 675 + 7 95 56 62 98 66 47 95 63 17 43 4 58 89 50 92 67 76 56 69 + 2 26 23 46 3 38 93 37 40 67 86 59 7 19 1 35 31 54 14 88 + 4 5 5 9 4 6 5 2 4 4 1 8 7 2 10 9 4 9 2 9 + 4 9 4 1 5 4 3 1 3 9 1 4 2 8 3 7 6 9 7 9 + 266 139 686 373 572 37 0 376 716 0 558 0 0 0 0 0 0 0 63 0 + 473 0 819 0 316 0 634 151 0 574 52 0 0 0 796 0 665 0 140 401 + 81 57 60 48 52 13 56 70 89 43 72 73 71 86 88 8 3 32 61 32 + 19 27 28 1 81 66 1 78 80 2 73 27 34 36 13 16 90 27 70 16 + 10 3 2 4 10 9 10 10 2 9 2 10 7 7 8 10 10 8 1 9 + 8 9 6 3 5 8 4 2 5 1 7 5 7 9 3 5 2 2 2 2 + 0 350 627 0 383 122 936 804 0 875 357 252 0 0 0 0 111 0 935 0 + 871 0 0 0 0 0 0 131 282 0 0 416 848 0 0 542 247 0 376 0 + 15 21 8 96 50 55 94 15 72 12 94 23 81 26 60 20 4 86 22 7 + 98 32 86 91 46 59 38 78 60 14 42 80 82 98 41 13 79 63 53 93 + 8 5 2 9 9 4 9 1 3 2 2 9 6 9 3 4 2 3 7 6 + 3 7 10 9 10 8 6 5 2 3 7 8 7 2 2 3 1 2 4 7 + 0 740 0 0 571 272 0 0 0 0 0 870 845 765 668 208 0 281 745 0 + 0 0 0 799 555 537 595 752 572 0 41 0 258 61 0 0 933 1021 0 778 + 26 78 87 57 31 82 20 28 71 10 41 1 69 94 71 21 74 17 12 37 + 63 19 43 1 44 89 23 69 49 74 86 99 41 69 47 52 10 25 97 93 + 7 8 5 8 6 9 3 4 8 8 4 8 4 5 1 6 5 7 8 3 + 10 5 7 6 6 7 9 10 9 8 2 6 3 6 4 10 2 9 9 5 + 506 0 0 197 0 0 772 818 660 0 801 420 0 593 0 249 0 0 292 0 + 469 122 0 719 42 152 0 824 241 0 0 966 0 0 0 0 430 0 814 0 + diff --git a/examples/cpp/variable_intervals_sat_test.bintest b/examples/cpp/variable_intervals_sat_test.bintest new file mode 100644 index 00000000000..51d3dbf2eb0 --- /dev/null +++ b/examples/cpp/variable_intervals_sat_test.bintest @@ -0,0 +1 @@ +RUN: $(variable_intervals_sat) diff --git a/examples/cpp/weighted_tardiness_sat_test.bintest b/examples/cpp/weighted_tardiness_sat_test.bintest new file mode 100644 index 00000000000..5c95404e752 --- /dev/null +++ b/examples/cpp/weighted_tardiness_sat_test.bintest @@ -0,0 +1 @@ +RUN: $(weighted_tardiness_sat) --input $(wt40.txt) diff --git a/examples/python/BUILD.bazel b/examples/python/BUILD.bazel index 03313e71d66..3798011fd23 100644 --- a/examples/python/BUILD.bazel +++ b/examples/python/BUILD.bazel @@ -13,7 +13,7 @@ load("@pip_deps//:requirements.bzl", "requirement") load("@rules_python//python:py_binary.bzl", "py_binary") -load("//bazel:run_binary_test.bzl", "run_binary_test") +load("//tools/testing:bintest.bzl", "bintest") package(default_visibility = ["//visibility:public"]) @@ -39,10 +39,11 @@ py_binary( ], ) -run_binary_test( +bintest( name = "assignment_with_constraints_sat_py_test", size = "medium", - binary = ":assignment_with_constraints_sat_py3", + srcs = [":assignment_with_constraints_sat_py_test.bintest"], + named_data = {"assignment_with_constraints_sat_py3": ":assignment_with_constraints_sat_py3"}, ) py_binary( @@ -55,10 +56,11 @@ py_binary( ], ) -run_binary_test( +bintest( name = "balance_group_sat_py_test", size = "medium", - binary = ":balance_group_sat_py3", + srcs = [":balance_group_sat_py_test.bintest"], + named_data = {"balance_group_sat_py3": ":balance_group_sat_py3"}, ) py_binary( @@ -71,11 +73,11 @@ py_binary( ], ) -run_binary_test( +bintest( name = "bus_driver_scheduling_sat_py_test", size = "medium", - args = ["--params=max_time_in_seconds:40"], - binary = ":bus_driver_scheduling_sat_py3", + srcs = [":bus_driver_scheduling_sat_py_test.bintest"], + named_data = {"bus_driver_scheduling_sat_py3": ":bus_driver_scheduling_sat_py3"}, ) py_binary( @@ -88,10 +90,11 @@ py_binary( ], ) -run_binary_test( +bintest( name = "car_sequencing_optimization_sat_py_test", size = "small", - binary = ":car_sequencing_optimization_sat_py3", + srcs = [":car_sequencing_optimization_sat_py_test.bintest"], + named_data = {"car_sequencing_optimization_sat_py3": ":car_sequencing_optimization_sat_py3"}, ) py_binary( @@ -104,10 +107,11 @@ py_binary( ], ) -run_binary_test( +bintest( name = "chemical_balance_sat_py_test", size = "medium", - binary = ":chemical_balance_sat_py3", + srcs = [":chemical_balance_sat_py_test.bintest"], + named_data = {"chemical_balance_sat_py3": ":chemical_balance_sat_py3"}, ) py_binary( @@ -120,10 +124,11 @@ py_binary( ], ) -run_binary_test( +bintest( name = "clustering_sat_py_test", size = "medium", - binary = ":clustering_sat_py3", + srcs = [":clustering_sat_py_test.bintest"], + named_data = {"clustering_sat_py3": ":clustering_sat_py3"}, ) py_binary( @@ -136,10 +141,11 @@ py_binary( ], ) -run_binary_test( +bintest( name = "cover_rectangle_sat_py_test", size = "medium", - binary = ":cover_rectangle_sat_py3", + srcs = [":cover_rectangle_sat_py_test.bintest"], + named_data = {"cover_rectangle_sat_py3": ":cover_rectangle_sat_py3"}, ) py_binary( @@ -149,9 +155,10 @@ py_binary( deps = ["//ortools/sat/python:cp_model"], ) -run_binary_test( +bintest( name = "flexible_job_shop_sat_py_test", - binary = ":flexible_job_shop_sat_py3", + srcs = [":flexible_job_shop_sat_py_test.bintest"], + named_data = {"flexible_job_shop_sat_py3": ":flexible_job_shop_sat_py3"}, ) py_binary( @@ -165,9 +172,10 @@ py_binary( ], ) -run_binary_test( +bintest( name = "gate_scheduling_sat_py_test", - binary = ":gate_scheduling_sat_py3", + srcs = [":gate_scheduling_sat_py_test.bintest"], + named_data = {"gate_scheduling_sat_py3": ":gate_scheduling_sat_py3"}, ) py_binary( @@ -180,10 +188,11 @@ py_binary( ], ) -run_binary_test( +bintest( name = "golomb_sat_py_test", size = "medium", - binary = ":golomb_sat_py3", + srcs = [":golomb_sat_py_test.bintest"], + named_data = {"golomb_sat_py3": ":golomb_sat_py3"}, ) py_binary( @@ -197,9 +206,10 @@ py_binary( ], ) -run_binary_test( +bintest( name = "hidato_sat_py_test", - binary = ":hidato_sat_py3", + srcs = [":hidato_sat_py_test.bintest"], + named_data = {"hidato_sat_py3": ":hidato_sat_py3"}, ) py_binary( @@ -209,9 +219,10 @@ py_binary( deps = ["//ortools/sat/python:cp_model"], ) -run_binary_test( +bintest( name = "jobshop_ft06_distance_sat_py_test", - binary = ":jobshop_ft06_distance_sat_py3", + srcs = [":jobshop_ft06_distance_sat_py_test.bintest"], + named_data = {"jobshop_ft06_distance_sat_py3": ":jobshop_ft06_distance_sat_py3"}, ) py_binary( @@ -224,9 +235,10 @@ py_binary( ], ) -run_binary_test( +bintest( name = "jobshop_ft06_sat_py_test", - binary = ":jobshop_ft06_sat_py3", + srcs = [":jobshop_ft06_sat_py_test.bintest"], + named_data = {"jobshop_ft06_sat_py3": ":jobshop_ft06_sat_py3"}, ) py_binary( @@ -239,10 +251,11 @@ py_binary( ], ) -run_binary_test( +bintest( name = "jobshop_with_maintenance_sat_py_test", size = "medium", - binary = ":jobshop_with_maintenance_sat_py3", + srcs = [":jobshop_with_maintenance_sat_py_test.bintest"], + named_data = {"jobshop_with_maintenance_sat_py3": ":jobshop_with_maintenance_sat_py3"}, ) py_binary( @@ -257,10 +270,11 @@ py_binary( ], ) -run_binary_test( +bintest( name = "knapsack_2d_sat_py_test", size = "medium", - binary = ":knapsack_2d_sat_py3", + srcs = [":knapsack_2d_sat_py_test.bintest"], + named_data = {"knapsack_2d_sat_py3": ":knapsack_2d_sat_py3"}, ) py_binary( @@ -273,12 +287,13 @@ py_binary( ], ) -run_binary_test( +bintest( name = "line_balancing_sat_salbp_20_1_py_test", - args = ["--input=$(rootpath //examples/python/testdata:salbp_20_1.alb)"], - binary = ":line_balancing_sat_py3", - data = ["//examples/python/testdata:salbp_20_1.alb"], - grep_lines = ["objective: 3"], + srcs = [":line_balancing_sat_salbp_20_1_py_test.bintest"], + named_data = { + "line_balancing_sat_py3": ":line_balancing_sat_py3", + "salbp_20_1.alb": "//examples/python/testdata:salbp_20_1.alb", + }, ) py_binary( @@ -291,10 +306,11 @@ py_binary( ], ) -run_binary_test( +bintest( name = "maximize_combinations_sat_py_test", size = "medium", - binary = ":maximize_combinations_sat_py3", + srcs = [":maximize_combinations_sat_py_test.bintest"], + named_data = {"maximize_combinations_sat_py3": ":maximize_combinations_sat_py3"}, ) py_binary( @@ -307,9 +323,10 @@ py_binary( ], ) -run_binary_test( +bintest( name = "maze_escape_sat_py_test", - binary = ":maze_escape_sat_py3", + srcs = [":maze_escape_sat_py_test.bintest"], + named_data = {"maze_escape_sat_py3": ":maze_escape_sat_py3"}, ) py_binary( @@ -322,9 +339,10 @@ py_binary( ], ) -run_binary_test( +bintest( name = "music_playlist_sat_py_test", - binary = ":music_playlist_sat_py3", + srcs = [":music_playlist_sat_py_test.bintest"], + named_data = {"music_playlist_sat_py3": ":music_playlist_sat_py3"}, ) py_binary( @@ -337,10 +355,11 @@ py_binary( ], ) -run_binary_test( +bintest( name = "no_wait_baking_scheduling_sat_py_test", size = "medium", - binary = ":no_wait_baking_scheduling_sat_py3", + srcs = [":no_wait_baking_scheduling_sat_py_test.bintest"], + named_data = {"no_wait_baking_scheduling_sat_py3": ":no_wait_baking_scheduling_sat_py3"}, ) py_binary( @@ -353,10 +372,11 @@ py_binary( ], ) -run_binary_test( +bintest( name = "pell_equation_sat_py_test", size = "medium", - binary = ":pell_equation_sat_py3", + srcs = [":pell_equation_sat_py_test.bintest"], + named_data = {"pell_equation_sat_py3": ":pell_equation_sat_py3"}, ) py_binary( @@ -369,10 +389,11 @@ py_binary( ], ) -run_binary_test( +bintest( name = "pentominoes_sat_py_test", size = "medium", - binary = ":pentominoes_sat_py3", + srcs = [":pentominoes_sat_py_test.bintest"], + named_data = {"pentominoes_sat_py3": ":pentominoes_sat_py3"}, ) py_binary( @@ -385,10 +406,11 @@ py_binary( ], ) -run_binary_test( +bintest( name = "prize_collecting_tsp_sat_py_test", size = "medium", - binary = ":prize_collecting_tsp_sat_py3", + srcs = [":prize_collecting_tsp_sat_py_test.bintest"], + named_data = {"prize_collecting_tsp_sat_py3": ":prize_collecting_tsp_sat_py3"}, ) py_binary( @@ -401,10 +423,11 @@ py_binary( ], ) -run_binary_test( +bintest( name = "prize_collecting_vrp_sat_py_test", size = "medium", - binary = ":prize_collecting_vrp_sat_py3", + srcs = [":prize_collecting_vrp_sat_py_test.bintest"], + named_data = {"prize_collecting_vrp_sat_py3": ":prize_collecting_vrp_sat_py3"}, ) py_binary( @@ -417,26 +440,29 @@ py_binary( ], ) -run_binary_test( +bintest( name = "qubo_sat_py_test", size = "medium", - binary = ":qubo_sat_py3", + srcs = [":qubo_sat_py_test.bintest"], + named_data = {"qubo_sat_py3": ":qubo_sat_py3"}, ) -run_binary_test( +bintest( name = "rcpsp_sat_c1510_1_py_test", - args = ["--input=$(rootpath //ortools/scheduling/testdata:c1510_1.mm.txt)"], - binary = ":rcpsp_sat_py3", - data = ["//ortools/scheduling/testdata:c1510_1.mm.txt"], - grep_lines = ["objective: 21"], + srcs = [":rcpsp_sat_c1510_1_py_test.bintest"], + named_data = { + "rcpsp_sat_py3": ":rcpsp_sat_py3", + "c1510_1.mm.txt": "//ortools/scheduling/testdata:c1510_1.mm.txt", + }, ) -run_binary_test( +bintest( name = "rcpsp_sat_j301_1_py_test", - args = ["--input=$(rootpath //ortools/scheduling/testdata:j301_1.sm)"], - binary = ":rcpsp_sat_py3", - data = ["//ortools/scheduling/testdata:j301_1.sm"], - grep_lines = ["objective: 43"], + srcs = [":rcpsp_sat_j301_1_py_test.bintest"], + named_data = { + "rcpsp_sat_py3": ":rcpsp_sat_py3", + "j301_1.sm": "//ortools/scheduling/testdata:j301_1.sm", + }, ) py_binary( @@ -461,10 +487,10 @@ py_binary( ], ) -run_binary_test( +bintest( name = "shift_scheduling_sat_py_test", - args = ["--params=max_time_in_seconds:10"], - binary = ":shift_scheduling_sat_py3", + srcs = [":shift_scheduling_sat_py_test.bintest"], + named_data = {"shift_scheduling_sat_py3": ":shift_scheduling_sat_py3"}, ) py_binary( @@ -477,10 +503,11 @@ py_binary( ], ) -run_binary_test( +bintest( name = "single_machine_scheduling_with_setup_release_due_dates_sat_py_test", size = "medium", - binary = ":single_machine_scheduling_with_setup_release_due_dates_sat_py3", + srcs = [":single_machine_scheduling_with_setup_release_due_dates_sat_py_test.bintest"], + named_data = {"single_machine_scheduling_with_setup_release_due_dates_sat_py3": ":single_machine_scheduling_with_setup_release_due_dates_sat_py3"}, ) py_binary( @@ -493,9 +520,10 @@ py_binary( ], ) -run_binary_test( +bintest( name = "spread_robots_sat_py_test", - binary = ":spread_robots_sat_py3", + srcs = [":spread_robots_sat_py_test.bintest"], + named_data = {"spread_robots_sat_py3": ":spread_robots_sat_py3"}, ) py_binary( @@ -508,9 +536,10 @@ py_binary( ], ) -run_binary_test( +bintest( name = "steel_mill_slab_sat_py_test", - binary = ":steel_mill_slab_sat_py3", + srcs = [":steel_mill_slab_sat_py_test.bintest"], + named_data = {"steel_mill_slab_sat_py3": ":steel_mill_slab_sat_py3"}, ) py_binary( @@ -520,9 +549,10 @@ py_binary( deps = ["//ortools/sat/python:cp_model"], ) -run_binary_test( +bintest( name = "sudoku_sat_py_test", - binary = ":sudoku_sat_py3", + srcs = [":sudoku_sat_py_test.bintest"], + named_data = {"sudoku_sat_py3": ":sudoku_sat_py3"}, ) py_binary( @@ -535,10 +565,11 @@ py_binary( ], ) -run_binary_test( +bintest( name = "task_allocation_sat_py_test", size = "medium", - binary = ":task_allocation_sat_py3", + srcs = [":task_allocation_sat_py_test.bintest"], + named_data = {"task_allocation_sat_py3": ":task_allocation_sat_py3"}, ) py_binary( @@ -551,10 +582,11 @@ py_binary( ], ) -run_binary_test( +bintest( name = "tasks_and_workers_assignment_sat_py_test", size = "medium", - binary = ":tasks_and_workers_assignment_sat_py3", + srcs = [":tasks_and_workers_assignment_sat_py_test.bintest"], + named_data = {"tasks_and_workers_assignment_sat_py3": ":tasks_and_workers_assignment_sat_py3"}, ) py_binary( @@ -568,10 +600,11 @@ py_binary( ], ) -run_binary_test( +bintest( name = "test_scheduling_sat_py_test", size = "medium", - binary = ":test_scheduling_sat_py3", + srcs = [":test_scheduling_sat_py_test.bintest"], + named_data = {"test_scheduling_sat_py3": ":test_scheduling_sat_py3"}, ) py_binary( @@ -581,10 +614,11 @@ py_binary( deps = ["//ortools/sat/python:cp_model"], ) -run_binary_test( +bintest( name = "tsp_sat_py_test", size = "medium", - binary = ":tsp_sat_py3", + srcs = [":tsp_sat_py_test.bintest"], + named_data = {"tsp_sat_py3": ":tsp_sat_py3"}, ) py_binary( @@ -597,10 +631,11 @@ py_binary( ], ) -run_binary_test( +bintest( name = "vendor_scheduling_sat_py_test", size = "medium", - binary = ":vendor_scheduling_sat_py3", + srcs = [":vendor_scheduling_sat_py_test.bintest"], + named_data = {"vendor_scheduling_sat_py3": ":vendor_scheduling_sat_py3"}, ) py_binary( @@ -613,10 +648,11 @@ py_binary( ], ) -run_binary_test( +bintest( name = "wedding_optimal_chart_sat_py_test", size = "medium", - binary = ":wedding_optimal_chart_sat_py3", + srcs = [":wedding_optimal_chart_sat_py_test.bintest"], + named_data = {"wedding_optimal_chart_sat_py3": ":wedding_optimal_chart_sat_py3"}, ) py_binary( @@ -626,7 +662,8 @@ py_binary( deps = ["//ortools/sat/python:cp_model"], ) -run_binary_test( +bintest( name = "zebra_sat_py_test", - binary = ":zebra_sat_py3", + srcs = [":zebra_sat_py_test.bintest"], + named_data = {"zebra_sat_py3": ":zebra_sat_py3"}, ) diff --git a/examples/python/CMakeBazel.txt b/examples/python/CMakeBazel.txt new file mode 100644 index 00000000000..5aacc949071 --- /dev/null +++ b/examples/python/CMakeBazel.txt @@ -0,0 +1,434 @@ +# This file is auto generated by bazel2cmake.py from examples/python/BUILD.bazel +# Don't edit manually, your changes will be lost. +# You can update this file by running: +# python3 tools/build/bazel2cmake.py examples/python/BUILD.bazel + + +add_python_binary( + NAME bzl_py_example_arc_flow_cutting_stock_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/arc_flow_cutting_stock_sat.py +) + +add_python_binary( + NAME bzl_py_example_assignment_with_constraints_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/assignment_with_constraints_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_assignment_with_constraints_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/assignment_with_constraints_sat_py_test.bintest + ENVIRONMENT BINTEST_assignment_with_constraints_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_balance_group_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/balance_group_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_balance_group_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/balance_group_sat_py_test.bintest + ENVIRONMENT BINTEST_balance_group_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_bus_driver_scheduling_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/bus_driver_scheduling_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_bus_driver_scheduling_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/bus_driver_scheduling_sat_py_test.bintest + ENVIRONMENT BINTEST_bus_driver_scheduling_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_car_sequencing_optimization_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/car_sequencing_optimization_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_car_sequencing_optimization_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/car_sequencing_optimization_sat_py_test.bintest + ENVIRONMENT BINTEST_car_sequencing_optimization_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_chemical_balance_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/chemical_balance_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_chemical_balance_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/chemical_balance_sat_py_test.bintest + ENVIRONMENT BINTEST_chemical_balance_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_clustering_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/clustering_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_clustering_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/clustering_sat_py_test.bintest + ENVIRONMENT BINTEST_clustering_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_cover_rectangle_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/cover_rectangle_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_cover_rectangle_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/cover_rectangle_sat_py_test.bintest + ENVIRONMENT BINTEST_cover_rectangle_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_flexible_job_shop_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/flexible_job_shop_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_flexible_job_shop_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/flexible_job_shop_sat_py_test.bintest + ENVIRONMENT BINTEST_flexible_job_shop_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_gate_scheduling_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/gate_scheduling_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_gate_scheduling_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/gate_scheduling_sat_py_test.bintest + ENVIRONMENT BINTEST_gate_scheduling_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_golomb_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/golomb_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_golomb_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/golomb_sat_py_test.bintest + ENVIRONMENT BINTEST_golomb_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_hidato_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/hidato_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_hidato_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/hidato_sat_py_test.bintest + ENVIRONMENT BINTEST_hidato_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_jobshop_ft06_distance_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/jobshop_ft06_distance_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_jobshop_ft06_distance_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/jobshop_ft06_distance_sat_py_test.bintest + ENVIRONMENT BINTEST_jobshop_ft06_distance_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_jobshop_ft06_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/jobshop_ft06_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_jobshop_ft06_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/jobshop_ft06_sat_py_test.bintest + ENVIRONMENT BINTEST_jobshop_ft06_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_jobshop_with_maintenance_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/jobshop_with_maintenance_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_jobshop_with_maintenance_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/jobshop_with_maintenance_sat_py_test.bintest + ENVIRONMENT BINTEST_jobshop_with_maintenance_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_knapsack_2d_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/knapsack_2d_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_knapsack_2d_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/knapsack_2d_sat_py_test.bintest + ENVIRONMENT BINTEST_knapsack_2d_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_line_balancing_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/line_balancing_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_line_balancing_sat_salbp_20_1_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/line_balancing_sat_salbp_20_1_py_test.bintest + ENVIRONMENT BINTEST_line_balancing_sat_py3=$ BINTEST_salbp_20_1.alb=${CMAKE_SOURCE_DIR}/examples/python/testdata/salbp_20_1.alb +) + +add_python_binary( + NAME bzl_py_example_maximize_combinations_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/maximize_combinations_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_maximize_combinations_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/maximize_combinations_sat_py_test.bintest + ENVIRONMENT BINTEST_maximize_combinations_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_maze_escape_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/maze_escape_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_maze_escape_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/maze_escape_sat_py_test.bintest + ENVIRONMENT BINTEST_maze_escape_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_music_playlist_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/music_playlist_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_music_playlist_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/music_playlist_sat_py_test.bintest + ENVIRONMENT BINTEST_music_playlist_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_no_wait_baking_scheduling_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/no_wait_baking_scheduling_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_no_wait_baking_scheduling_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/no_wait_baking_scheduling_sat_py_test.bintest + ENVIRONMENT BINTEST_no_wait_baking_scheduling_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_pell_equation_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/pell_equation_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_pell_equation_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/pell_equation_sat_py_test.bintest + ENVIRONMENT BINTEST_pell_equation_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_pentominoes_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/pentominoes_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_pentominoes_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/pentominoes_sat_py_test.bintest + ENVIRONMENT BINTEST_pentominoes_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_prize_collecting_tsp_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/prize_collecting_tsp_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_prize_collecting_tsp_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/prize_collecting_tsp_sat_py_test.bintest + ENVIRONMENT BINTEST_prize_collecting_tsp_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_prize_collecting_vrp_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/prize_collecting_vrp_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_prize_collecting_vrp_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/prize_collecting_vrp_sat_py_test.bintest + ENVIRONMENT BINTEST_prize_collecting_vrp_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_qubo_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/qubo_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_qubo_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/qubo_sat_py_test.bintest + ENVIRONMENT BINTEST_qubo_sat_py3=$ +) + +ortools_cxx_bintest( + NAME bzl_py_example_rcpsp_sat_c1510_1_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/rcpsp_sat_c1510_1_py_test.bintest + ENVIRONMENT BINTEST_rcpsp_sat_py3=$ BINTEST_c1510_1.mm.txt=${CMAKE_SOURCE_DIR}/ortools/scheduling/testdata/c1510_1.mm.txt +) + +ortools_cxx_bintest( + NAME bzl_py_example_rcpsp_sat_j301_1_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/rcpsp_sat_j301_1_py_test.bintest + ENVIRONMENT BINTEST_rcpsp_sat_py3=$ BINTEST_j301_1.sm=${CMAKE_SOURCE_DIR}/ortools/scheduling/testdata/j301_1.sm +) + +add_python_binary( + NAME bzl_py_example_rcpsp_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/rcpsp_sat.py +) + +add_python_binary( + NAME bzl_py_example_shift_scheduling_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/shift_scheduling_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_shift_scheduling_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/shift_scheduling_sat_py_test.bintest + ENVIRONMENT BINTEST_shift_scheduling_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_single_machine_scheduling_with_setup_release_due_dates_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/single_machine_scheduling_with_setup_release_due_dates_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_single_machine_scheduling_with_setup_release_due_dates_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/single_machine_scheduling_with_setup_release_due_dates_sat_py_test.bintest + ENVIRONMENT BINTEST_single_machine_scheduling_with_setup_release_due_dates_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_spread_robots_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/spread_robots_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_spread_robots_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/spread_robots_sat_py_test.bintest + ENVIRONMENT BINTEST_spread_robots_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_steel_mill_slab_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/steel_mill_slab_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_steel_mill_slab_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/steel_mill_slab_sat_py_test.bintest + ENVIRONMENT BINTEST_steel_mill_slab_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_sudoku_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/sudoku_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_sudoku_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/sudoku_sat_py_test.bintest + ENVIRONMENT BINTEST_sudoku_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_task_allocation_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/task_allocation_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_task_allocation_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/task_allocation_sat_py_test.bintest + ENVIRONMENT BINTEST_task_allocation_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_tasks_and_workers_assignment_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/tasks_and_workers_assignment_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_tasks_and_workers_assignment_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/tasks_and_workers_assignment_sat_py_test.bintest + ENVIRONMENT BINTEST_tasks_and_workers_assignment_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_test_scheduling_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/test_scheduling_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_test_scheduling_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/test_scheduling_sat_py_test.bintest + ENVIRONMENT BINTEST_test_scheduling_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_tsp_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/tsp_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_tsp_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/tsp_sat_py_test.bintest + ENVIRONMENT BINTEST_tsp_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_vendor_scheduling_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/vendor_scheduling_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_vendor_scheduling_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/vendor_scheduling_sat_py_test.bintest + ENVIRONMENT BINTEST_vendor_scheduling_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_wedding_optimal_chart_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/wedding_optimal_chart_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_wedding_optimal_chart_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/wedding_optimal_chart_sat_py_test.bintest + ENVIRONMENT BINTEST_wedding_optimal_chart_sat_py3=$ +) + +add_python_binary( + NAME bzl_py_example_zebra_sat_py3 + FILE ${CMAKE_CURRENT_SOURCE_DIR}/zebra_sat.py +) + +ortools_cxx_bintest( + NAME bzl_py_example_zebra_sat_py_test + SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/zebra_sat_py_test.bintest + ENVIRONMENT BINTEST_zebra_sat_py3=$ +) \ No newline at end of file diff --git a/examples/python/CMakeLists.txt b/examples/python/CMakeLists.txt index 85f6eff8165..14c812cd906 100644 --- a/examples/python/CMakeLists.txt +++ b/examples/python/CMakeLists.txt @@ -15,12 +15,6 @@ if(NOT BUILD_PYTHON_EXAMPLES) return() endif() -file(GLOB PYTHON_SRCS "*.py") -# Remove too long examples -list(FILTER PYTHON_SRCS EXCLUDE REGEX ".*/line_balancing_sat.py") # need input file -list(FILTER PYTHON_SRCS EXCLUDE REGEX ".*/bus_driver_scheduling_sat.py") # too long -list(FILTER PYTHON_SRCS EXCLUDE REGEX ".*/cvrptw_plot.py") # depend on numpy - -foreach(FILE_NAME IN LISTS PYTHON_SRCS) - add_python_example(FILE_NAME ${FILE_NAME}) -endforeach() +if(NOT WIN32) +include("CMakeBazel.txt") +endif() diff --git a/examples/python/appointments_py_test.bintest b/examples/python/appointments_py_test.bintest new file mode 100644 index 00000000000..367623a5cf7 --- /dev/null +++ b/examples/python/appointments_py_test.bintest @@ -0,0 +1 @@ +RUN: $(appointments_py3) diff --git a/examples/python/assignment_with_constraints_sat_py_test.bintest b/examples/python/assignment_with_constraints_sat_py_test.bintest new file mode 100644 index 00000000000..340ba4e3099 --- /dev/null +++ b/examples/python/assignment_with_constraints_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(assignment_with_constraints_sat_py3) diff --git a/examples/python/balance_group_sat_py_test.bintest b/examples/python/balance_group_sat_py_test.bintest new file mode 100644 index 00000000000..97692558c5e --- /dev/null +++ b/examples/python/balance_group_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(balance_group_sat_py3) diff --git a/examples/python/bus_driver_scheduling_sat_py_test.bintest b/examples/python/bus_driver_scheduling_sat_py_test.bintest new file mode 100644 index 00000000000..229e11c4102 --- /dev/null +++ b/examples/python/bus_driver_scheduling_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(bus_driver_scheduling_sat_py3) --params=max_time_in_seconds:40 diff --git a/examples/python/car_sequencing_optimization_sat_py_test.bintest b/examples/python/car_sequencing_optimization_sat_py_test.bintest new file mode 100644 index 00000000000..6a95ef22b14 --- /dev/null +++ b/examples/python/car_sequencing_optimization_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(car_sequencing_optimization_sat_py3) diff --git a/examples/python/chemical_balance_sat_py_test.bintest b/examples/python/chemical_balance_sat_py_test.bintest new file mode 100644 index 00000000000..470ce4c8558 --- /dev/null +++ b/examples/python/chemical_balance_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(chemical_balance_sat_py3) diff --git a/examples/python/clustering_sat_py_test.bintest b/examples/python/clustering_sat_py_test.bintest new file mode 100644 index 00000000000..8f29b62d31d --- /dev/null +++ b/examples/python/clustering_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(clustering_sat_py3) diff --git a/examples/python/cover_rectangle_sat_py_test.bintest b/examples/python/cover_rectangle_sat_py_test.bintest new file mode 100644 index 00000000000..0022f144797 --- /dev/null +++ b/examples/python/cover_rectangle_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(cover_rectangle_sat_py3) diff --git a/examples/python/cryptarithm_sat_py_test.bintest b/examples/python/cryptarithm_sat_py_test.bintest new file mode 100644 index 00000000000..29accdda2b1 --- /dev/null +++ b/examples/python/cryptarithm_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(cryptarithm_sat_py3) diff --git a/examples/python/cvrptw_plot.py b/examples/python/cvrptw_plot.py deleted file mode 100644 index 074cd5ea346..00000000000 --- a/examples/python/cvrptw_plot.py +++ /dev/null @@ -1,753 +0,0 @@ -# This Python file uses the following encoding: utf-8 -# Copyright 2015 Tin Arm Engineering AB -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Capacitated Vehicle Routing Problem with Time Windows (and optional orders). - - This is a sample using the routing library python wrapper to solve a - CVRPTW problem. - A description of the problem can be found here: - http://en.wikipedia.org/wiki/Vehicle_routing_problem. - The variant which is tackled by this model includes a capacity dimension, - time windows and optional orders, with a penalty cost if orders are not - performed. - To help explore the problem, two classes are provided Customers() and - Vehicles(): used to randomly locate orders and depots, and to randomly - generate demands, time-window constraints and vehicles. - Distances are computed using the Great Circle distances. Distances are in km - and times in seconds. - - A function for the displaying of the vehicle plan - display_vehicle_output - - The optimization engine uses local search to improve solutions, first - solutions being generated using a cheapest addition heuristic. - Numpy and Matplotlib are required for the problem creation and display. - -""" -import os -import numpy as np -from matplotlib import pyplot as plt -from collections import namedtuple -from ortools.constraint_solver import pywrapcp -from ortools.constraint_solver import routing_enums_pb2 -from datetime import datetime, timedelta - - -class Customers(): - """ - A class that generates and holds customers information. - - Randomly normally distribute a number of customers and locations within - a region described by a rectangle. Generate a random demand for each - customer. Generate a random time window for each customer. - May either be initiated with the extents, as a dictionary describing - two corners of a rectangle in latitude and longitude OR as a center - point (lat, lon), and box_size in km. The default arguments are for a - 10 x 10 km square centered in Sheffield). - - Args: extents (Optional[Dict]): A dictionary describing a rectangle in - latitude and longitude with the keys 'llcrnrlat', 'llcrnrlon' & - 'urcrnrlat' & 'urcrnrlat' center (Optional(Tuple): A tuple of - (latitude, longitude) describing the centre of the rectangle. box_size - (Optional float: The length in km of the box's sides. num_stops (int): - The number of customers, including the depots that are placed normally - distributed in the rectangle. min_demand (int): Lower limit on the - randomly generated demand at each customer. max_demand (int): Upper - limit on the randomly generated demand at each customer. - min_tw: shortest random time window for a customer, in hours. - max_tw: longest random time window for a customer, in hours. - Examples: To place 100 customers randomly within 100 km x 100 km - rectangle, centered in the default location, with a random demand of - between 5 and 10 units: >>> customers = Customers(num_stops=100, - box_size=100, ... min_demand=5, max_demand=10) - alternatively, to place 75 customers in the same area with default - arguments for demand: >>> extents = {'urcrnrlon': 0.03403, 'llcrnrlon': - -2.98325, ... 'urcrnrlat': 54.28127, 'llcrnrlat': 52.48150} >>> - customers = Customers(num_stops=75, extents=extents) - """ - - def __init__(self, - extents=None, - center=(53.381393, -1.474611), - box_size=10, - num_stops=100, - min_demand=0, - max_demand=25, - min_tw=1, - max_tw=5): - self.number = num_stops #: The number of customers and depots - #: Location, a named tuple for locations. - Location = namedtuple('Location', ['lat', 'lon']) - if extents is not None: - self.extents = extents #: The lower left and upper right points - #: Location[lat,lon]: the centre point of the area. - self.center = Location( - extents['urcrnrlat'] - 0.5 * - (extents['urcrnrlat'] - extents['llcrnrlat']), - extents['urcrnrlon'] - 0.5 * - (extents['urcrnrlon'] - extents['llcrnrlon'])) - else: - #: Location[lat,lon]: the centre point of the area. - (clat, clon) = self.center = Location(center[0], center[1]) - rad_earth = 6367 # km - circ_earth = np.pi * rad_earth - #: The lower left and upper right points - self.extents = { - 'llcrnrlon': (clon - 180 * box_size / - (circ_earth * np.cos(np.deg2rad(clat)))), - 'llcrnrlat': - clat - 180 * box_size / circ_earth, - 'urcrnrlon': (clon + 180 * box_size / - (circ_earth * np.cos(np.deg2rad(clat)))), - 'urcrnrlat': - clat + 180 * box_size / circ_earth - } - # The 'name' of the stop, indexed from 0 to num_stops-1 - stops = np.array(range(0, num_stops)) - # normaly distributed random distribution of stops within the box - stdv = 6 # the number of standard deviations 99.9% will be within +-3 - lats = (self.extents['llcrnrlat'] + np.random.randn(num_stops) * - (self.extents['urcrnrlat'] - self.extents['llcrnrlat']) / stdv) - lons = (self.extents['llcrnrlon'] + np.random.randn(num_stops) * - (self.extents['urcrnrlon'] - self.extents['llcrnrlon']) / stdv) - # uniformly distributed integer demands. - demands = np.random.randint(min_demand, max_demand, num_stops) - - self.time_horizon = 24 * 60**2 # A 24 hour period. - - # The customers demand min_tw to max_tw hour time window for each - # delivery - time_windows = np.random.randint(min_tw * 3600, max_tw * 3600, - num_stops) - # The last time a delivery window can start - latest_time = self.time_horizon - time_windows - start_times = [None for o in time_windows] - stop_times = [None for o in time_windows] - # Make random timedeltas, nominally from the start of the day. - for idx in range(self.number): - stime = int(np.random.randint(0, latest_time[idx])) - start_times[idx] = timedelta(seconds=stime) - stop_times[idx] = ( - start_times[idx] + timedelta(seconds=int(time_windows[idx]))) - # A named tuple for the customer - Customer = namedtuple( - 'Customer', - [ - 'index', # the index of the stop - 'demand', # the demand for the stop - 'lat', # the latitude of the stop - 'lon', # the longitude of the stop - 'tw_open', # timedelta window open - 'tw_close' - ]) # timedelta window cls - - self.customers = [ - Customer(idx, dem, lat, lon, tw_open, tw_close) - for idx, dem, lat, lon, tw_open, tw_close in zip( - stops, demands, lats, lons, start_times, stop_times) - ] - - # The number of seconds needed to 'unload' 1 unit of goods. - self.service_time_per_dem = 300 # seconds - - def set_manager(self, manager): - self.manager = manager - - def central_start_node(self, invert=False): - """ - Return a random starting node, with probability weighted by distance - from the centre of the extents, so that a central starting node is - likely. - - Args: invert (Optional bool): When True, a peripheral starting node is - most likely. - - Returns: - int: a node index. - - Examples: - >>> customers.central_start_node(invert=True) - 42 - """ - num_nodes = len(self.customers) - dist = np.empty((num_nodes, 1)) - for idx_to in range(num_nodes): - dist[idx_to] = self._haversine(self.center.lon, self.center.lat, - self.customers[idx_to].lon, - self.customers[idx_to].lat) - furthest = np.max(dist) - - if invert: - prob = dist * 1.0 / sum(dist) - else: - prob = (furthest - dist * 1.0) / sum(furthest - dist) - indexes = np.array([range(num_nodes)]) - start_node = np.random.choice( - indexes.flatten(), size=1, replace=True, p=prob.flatten()) - return start_node[0] - - def make_distance_mat(self, method='haversine'): - """ - Return a distance matrix and make it a member of Customer, using the - method given in the call. Currently only Haversine (GC distance) is - implemented, but Manhattan, or using a maps API could be added here. - Raises an AssertionError for all other methods. - - Args: method (Optional[str]): method of distance calculation to use. The - Haversine formula is the only method implemented. - - Returns: - Numpy array of node to node distances. - - Examples: - >>> dist_mat = customers.make_distance_mat(method='haversine') - >>> dist_mat = customers.make_distance_mat(method='manhattan') - AssertionError - """ - self.distmat = np.zeros((self.number, self.number)) - methods = {'haversine': self._haversine} - assert (method in methods) - for frm_idx in range(self.number): - for to_idx in range(self.number): - if frm_idx != to_idx: - frm_c = self.customers[frm_idx] - to_c = self.customers[to_idx] - self.distmat[frm_idx, to_idx] = self._haversine( - frm_c.lon, frm_c.lat, to_c.lon, to_c.lat) - return (self.distmat) - - def _haversine(self, lon1, lat1, lon2, lat2): - """ - Calculate the great circle distance between two points - on the earth specified in decimal degrees of latitude and longitude. - https://en.wikipedia.org/wiki/Haversine_formula - - Args: - lon1: longitude of pt 1, - lat1: latitude of pt 1, - lon2: longitude of pt 2, - lat2: latitude of pt 2 - - Returns: - the distace in km between pt1 and pt2 - """ - # convert decimal degrees to radians - lon1, lat1, lon2, lat2 = map(np.radians, [lon1, lat1, lon2, lat2]) - - # haversine formula - dlon = lon2 - lon1 - dlat = lat2 - lat1 - a = (np.sin(dlat / 2)**2 + - np.cos(lat1) * np.cos(lat2) * np.sin(dlon / 2)**2) - c = 2 * np.arcsin(np.sqrt(a)) - - # 6367 km is the radius of the Earth - km = 6367 * c - return km - - def get_total_demand(self): - """ - Return the total demand of all customers. - """ - return (sum([c.demand for c in self.customers])) - - def return_dist_callback(self, **kwargs): - """ - Return a callback function for the distance matrix. - - Args: **kwargs: Arbitrary keyword arguments passed on to - make_distance_mat() - - Returns: - function: dist_return(a,b) A function that takes the 'from' node - index and the 'to' node index and returns the distance in km. - """ - self.make_distance_mat(**kwargs) - - def dist_return(from_index, to_index): - # Convert from routing variable Index to distance matrix NodeIndex. - from_node = self.manager.IndexToNode(from_index) - to_node = self.manager.IndexToNode(to_index) - return (self.distmat[from_node][to_node]) - - return dist_return - - def return_dem_callback(self): - """ - Return a callback function that gives the demands. - - Returns: - function: dem_return(a) A function that takes the 'from' node - index and returns the distance in km. - """ - - def dem_return(from_index): - # Convert from routing variable Index to distance matrix NodeIndex. - from_node = self.manager.IndexToNode(from_index) - return (self.customers[from_node].demand) - - return dem_return - - def zero_depot_demands(self, depot): - """ - Zero out the demands and time windows of depot. The Depots do not have - demands or time windows so this function clears them. - - Args: depot (int): index of the stop to modify into a depot. - Examples: >>> customers.zero_depot_demands(5) >>> - customers.customers[5].demand == 0 True - """ - start_depot = self.customers[depot] - self.customers[depot] = start_depot._replace( - demand=0, tw_open=None, tw_close=None) - - def make_service_time_call_callback(self): - """ - Return a callback function that provides the time spent servicing the - customer. Here is it proportional to the demand given by - self.service_time_per_dem, default 300 seconds per unit demand. - - Returns: - function [dem_return(a, b)]: A function that takes the from/a node - index and the to/b node index and returns the service time at a - - """ - - def service_time_return(a, b): - return (self.customers[a].demand * self.service_time_per_dem) - - return service_time_return - - def make_transit_time_callback(self, speed_kmph=10): - """ - Creates a callback function for transit time. Assuming an average - speed of speed_kmph - Args: - speed_kmph: the average speed in km/h - - Returns: - function [transit_time_return(a, b)]: A function that takes the - from/a node index and the to/b node index and returns the - transit time from a to b. - """ - - def transit_time_return(a, b): - return (self.distmat[a][b] / (speed_kmph * 1.0 / 60**2)) - - return transit_time_return - - -class Vehicles(): - """ - A Class to create and hold vehicle information. - - The Vehicles in a CVRPTW problem service the customers and belong to a - depot. The class Vehicles creates a list of named tuples describing the - Vehicles. The main characteristics are the vehicle capacity, fixed cost, - and cost per km. The fixed cost of using a certain type of vehicles can be - higher or lower than others. If a vehicle is used, i.e. this vehicle serves - at least one node, then this cost is added to the objective function. - - Note: - If numpy arrays are given for capacity and cost, then they must be of - the same length, and the number of vehicles are inferred from them. - If scalars are given, the fleet is homogeneous, and the number of - vehicles is determined by number. - - Args: capacity (scalar or numpy array): The integer capacity of demand - units. cost (scalar or numpy array): The fixed cost of the vehicle. number - (Optional [int]): The number of vehicles in a homogeneous fleet. - """ - - def __init__(self, capacity=100, cost=100, number=None): - - Vehicle = namedtuple('Vehicle', ['index', 'capacity', 'cost']) - - if number is None: - self.number = np.size(capacity) - else: - self.number = number - idxs = np.array(range(0, self.number)) - - if np.isscalar(capacity): - capacities = capacity * np.ones_like(idxs) - elif np.size(capacity) != self.number: - print('capacity is neither scalar, nor the same size as num!') - else: - capacities = capacity - - if np.isscalar(cost): - costs = cost * np.ones_like(idxs) - elif np.size(cost) != self.number: - print(np.size(cost)) - print('cost is neither scalar, nor the same size as num!') - else: - costs = cost - - self.vehicles = [ - Vehicle(idx, capacity, cost) - for idx, capacity, cost in zip(idxs, capacities, costs) - ] - - def get_total_capacity(self): - return (sum([c.capacity for c in self.vehicles])) - - def return_starting_callback(self, customers, sameStartFinish=False): - # create a different starting and finishing depot for each vehicle - self.starts = [ - int(customers.central_start_node()) for o in range(self.number) - ] - if sameStartFinish: - self.ends = self.starts - else: - self.ends = [ - int(customers.central_start_node(invert=True)) - for o in range(self.number) - ] - # the depots will not have demands, so zero them. - for depot in self.starts: - customers.zero_depot_demands(depot) - for depot in self.ends: - customers.zero_depot_demands(depot) - - def start_return(v): - return (self.starts[v]) - - return start_return - - -def discrete_cmap(N, base_cmap=None): - """ - Create an N-bin discrete colormap from the specified input map - """ - # Note that if base_cmap is a string or None, you can simply do - # return plt.cm.get_cmap(base_cmap, N) - # The following works for string, None, or a colormap instance: - - base = plt.cm.get_cmap(base_cmap) - color_list = base(np.linspace(0, 1, N)) - cmap_name = base.name + str(N) - return base.from_list(cmap_name, color_list, N) - - -def vehicle_output_string(manager, routing, plan): - """ - Return a string displaying the output of the routing instance and - assignment (plan). - - Args: routing (ortools.constraint_solver.pywrapcp.RoutingModel): routing. - plan (ortools.constraint_solver.pywrapcp.Assignment): the assignment. - - Returns: - (string) plan_output: describing each vehicle's plan. - - (List) dropped: list of dropped orders. - - """ - dropped = [] - for order in range(routing.Size()): - if (plan.Value(routing.NextVar(order)) == order): - dropped.append(str(order)) - - capacity_dimension = routing.GetDimensionOrDie('Capacity') - time_dimension = routing.GetDimensionOrDie('Time') - plan_output = '' - - for route_number in range(routing.vehicles()): - order = routing.Start(route_number) - plan_output += 'Route {0}:'.format(route_number) - if routing.IsEnd(plan.Value(routing.NextVar(order))): - plan_output += ' Empty \n' - else: - while True: - load_var = capacity_dimension.CumulVar(order) - time_var = time_dimension.CumulVar(order) - node = manager.IndexToNode(order) - plan_output += \ - ' {node} Load({load}) Time({tmin}, {tmax}) -> '.format( - node=node, - load=plan.Value(load_var), - tmin=str(timedelta(seconds=plan.Min(time_var))), - tmax=str(timedelta(seconds=plan.Max(time_var)))) - - if routing.IsEnd(order): - plan_output += ' EndRoute {0}. \n'.format(route_number) - break - order = plan.Value(routing.NextVar(order)) - plan_output += '\n' - - return (plan_output, dropped) - - -def build_vehicle_route(manager, routing, plan, customers, veh_number): - """ - Build a route for a vehicle by starting at the strat node and - continuing to the end node. - - Args: routing (ortools.constraint_solver.pywrapcp.RoutingModel): routing. - plan (ortools.constraint_solver.pywrapcp.Assignment): the assignment. - customers (Customers): the customers instance. veh_number (int): index of - the vehicle - - Returns: - (List) route: indexes of the customers for vehicle veh_number - """ - veh_used = routing.IsVehicleUsed(plan, veh_number) - print('Vehicle {0} is used {1}'.format(veh_number, veh_used)) - if veh_used: - route = [] - node = routing.Start(veh_number) # Get the starting node index - route.append(customers.customers[manager.IndexToNode(node)]) - while not routing.IsEnd(node): - route.append(customers.customers[manager.IndexToNode(node)]) - node = plan.Value(routing.NextVar(node)) - - route.append(customers.customers[manager.IndexToNode(node)]) - return route - else: - return None - - -def plot_vehicle_routes(veh_route, ax1, customers, vehicles): - """ - Plot the vehicle routes on matplotlib axis ax1. - - Args: veh_route (dict): a dictionary of routes keyed by vehicle idx. ax1 - (matplotlib.axes._subplots.AxesSubplot): Matplotlib axes customers - (Customers): the customers instance. vehicles (Vehicles): the vehicles - instance. - """ - veh_used = [v for v in veh_route if veh_route[v] is not None] - - cmap = discrete_cmap(vehicles.number + 2, 'nipy_spectral') - - for veh_number in veh_used: - - lats, lons = zip(*[(c.lat, c.lon) for c in veh_route[veh_number]]) - lats = np.array(lats) - lons = np.array(lons) - s_dep = customers.customers[vehicles.starts[veh_number]] - s_fin = customers.customers[vehicles.ends[veh_number]] - ax1.annotate( - 'v({veh}) S @ {node}'.format( - veh=veh_number, node=vehicles.starts[veh_number]), - xy=(s_dep.lon, s_dep.lat), - xytext=(10, 10), - xycoords='data', - textcoords='offset points', - arrowprops=dict( - arrowstyle='->', - connectionstyle='angle3,angleA=90,angleB=0', - shrinkA=0.05), - ) - ax1.annotate( - 'v({veh}) F @ {node}'.format( - veh=veh_number, node=vehicles.ends[veh_number]), - xy=(s_fin.lon, s_fin.lat), - xytext=(10, -20), - xycoords='data', - textcoords='offset points', - arrowprops=dict( - arrowstyle='->', - connectionstyle='angle3,angleA=-90,angleB=0', - shrinkA=0.05), - ) - ax1.plot(lons, lats, 'o', mfc=cmap(veh_number + 1)) - ax1.quiver( - lons[:-1], - lats[:-1], - lons[1:] - lons[:-1], - lats[1:] - lats[:-1], - scale_units='xy', - angles='xy', - scale=1, - color=cmap(veh_number + 1)) - - -def main(): - # Create a set of customer, (and depot) stops. - customers = Customers( - num_stops=50, - min_demand=1, - max_demand=15, - box_size=40, - min_tw=3, - max_tw=6) - - # Create a list of inhomgenious vehicle capacities as integer units. - capacity = [50, 75, 100, 125, 150, 175, 200, 250] - - # Create a list of inhomogeneous fixed vehicle costs. - cost = [int(100 + 2 * np.sqrt(c)) for c in capacity] - - # Create a set of vehicles, the number set by the length of capacity. - vehicles = Vehicles(capacity=capacity, cost=cost) - - # check to see that the problem is feasible, if we don't have enough - # vehicles to cover the demand, there is no point in going further. - assert (customers.get_total_demand() < vehicles.get_total_capacity()) - - # Set the starting nodes, and create a callback fn for the starting node. - start_fn = vehicles.return_starting_callback( - customers, sameStartFinish=False) - - # Create the routing index manager. - manager = pywrapcp.RoutingIndexManager( - customers.number, # int number - vehicles.number, # int number - vehicles.starts, # List of int start depot - vehicles.ends) # List of int end depot - - customers.set_manager(manager) - - # Set model parameters - model_parameters = pywrapcp.DefaultRoutingModelParameters() - - # The solver parameters can be accessed from the model parameters. For example : - # model_parameters.solver_parameters.CopyFrom( - # pywrapcp.Solver.DefaultSolverParameters()) - # model_parameters.solver_parameters.trace_propagation = True - - # Make the routing model instance. - routing = pywrapcp.RoutingModel(manager, model_parameters) - - parameters = pywrapcp.DefaultRoutingSearchParameters() - # Setting first solution heuristic (cheapest addition). - parameters.first_solution_strategy = ( - routing_enums_pb2.FirstSolutionStrategy.PATH_CHEAPEST_ARC) - # Routing: forbids use of TSPOpt neighborhood, (this is the default behaviour) - parameters.local_search_operators.use_tsp_opt = pywrapcp.BOOL_FALSE - # Disabling Large Neighborhood Search, (this is the default behaviour) - parameters.local_search_operators.use_path_lns = pywrapcp.BOOL_FALSE - parameters.local_search_operators.use_inactive_lns = pywrapcp.BOOL_FALSE - - parameters.time_limit.seconds = 10 - parameters.use_full_propagation = True - #parameters.log_search = True - - # Create callback fns for distances, demands, service and transit-times. - dist_fn = customers.return_dist_callback() - dist_fn_index = routing.RegisterTransitCallback(dist_fn) - - dem_fn = customers.return_dem_callback() - dem_fn_index = routing.RegisterUnaryTransitCallback(dem_fn) - - # Create and register a transit callback. - serv_time_fn = customers.make_service_time_call_callback() - transit_time_fn = customers.make_transit_time_callback() - def tot_time_fn(from_index, to_index): - """ - The time function we want is both transit time and service time. - """ - # Convert from routing variable Index to distance matrix NodeIndex. - from_node = manager.IndexToNode(from_index) - to_node = manager.IndexToNode(to_index) - return serv_time_fn(from_node, to_node) + transit_time_fn(from_node, to_node) - - tot_time_fn_index = routing.RegisterTransitCallback(tot_time_fn) - - # Set the cost function (distance callback) for each arc, homogeneous for - # all vehicles. - routing.SetArcCostEvaluatorOfAllVehicles(dist_fn_index) - - # Set vehicle costs for each vehicle, not homogeneous. - for veh in vehicles.vehicles: - routing.SetFixedCostOfVehicle(veh.cost, int(veh.index)) - - # Add a dimension for vehicle capacities - null_capacity_slack = 0 - routing.AddDimensionWithVehicleCapacity( - dem_fn_index, # demand callback - null_capacity_slack, - capacity, # capacity array - True, - 'Capacity') - # Add a dimension for time and a limit on the total time_horizon - routing.AddDimension( - tot_time_fn_index, # total time function callback - customers.time_horizon, - customers.time_horizon, - True, - 'Time') - - time_dimension = routing.GetDimensionOrDie('Time') - for cust in customers.customers: - if cust.tw_open is not None: - time_dimension.CumulVar(manager.NodeToIndex(cust.index)).SetRange( - cust.tw_open.seconds, cust.tw_close.seconds) - """ - To allow the dropping of orders, we add disjunctions to all the customer - nodes. Each disjunction is a list of 1 index, which allows that customer to - be active or not, with a penalty if not. The penalty should be larger - than the cost of servicing that customer, or it will always be dropped! - """ - # To add disjunctions just to the customers, make a list of non-depots. - non_depot = set(range(customers.number)) - non_depot.difference_update(vehicles.starts) - non_depot.difference_update(vehicles.ends) - penalty = 400000 # The cost for dropping a node from the plan. - nodes = [routing.AddDisjunction([manager.NodeToIndex(c)], penalty) for c in non_depot] - - # This is how you would implement partial routes if you already knew part - # of a feasible solution for example: - # partial = np.random.choice(list(non_depot), size=(4,5), replace=False) - - # routing.CloseModel() - # partial_list = [partial[0,:].tolist(), - # partial[1,:].tolist(), - # partial[2,:].tolist(), - # partial[3,:].tolist(), - # [],[],[],[]] - # print(routing.ApplyLocksToAllVehicles(partial_list, False)) - - # Solve the problem ! - assignment = routing.SolveWithParameters(parameters) - - # The rest is all optional for saving, printing or plotting the solution. - if assignment: - ## save the assignment, (Google Protobuf format) - #save_file_base = os.path.realpath(__file__).split('.')[0] - #if routing.WriteAssignment(save_file_base + '_assignment.ass'): - # print('succesfully wrote assignment to file ' + save_file_base + - # '_assignment.ass') - - print('The Objective Value is {0}'.format(assignment.ObjectiveValue())) - - plan_output, dropped = vehicle_output_string(manager, routing, assignment) - print(plan_output) - print('dropped nodes: ' + ', '.join(dropped)) - - # you could print debug information like this: - # print(routing.DebugOutputAssignment(assignment, 'Capacity')) - - vehicle_routes = {} - for veh in range(vehicles.number): - vehicle_routes[veh] = build_vehicle_route(manager, routing, assignment, - customers, veh) - - # Plotting of the routes in matplotlib. - fig = plt.figure() - ax = fig.add_subplot(111) - # Plot all the nodes as black dots. - clon, clat = zip(*[(c.lon, c.lat) for c in customers.customers]) - ax.plot(clon, clat, 'k.') - # plot the routes as arrows - plot_vehicle_routes(vehicle_routes, ax, customers, vehicles) - plt.show() - - else: - print('No assignment') - - -if __name__ == '__main__': - main() diff --git a/examples/python/flexible_job_shop_sat_py_test.bintest b/examples/python/flexible_job_shop_sat_py_test.bintest new file mode 100644 index 00000000000..eed68ac56e1 --- /dev/null +++ b/examples/python/flexible_job_shop_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(flexible_job_shop_sat_py3) diff --git a/examples/python/gate_scheduling_sat.py b/examples/python/gate_scheduling_sat.py index 9cea61deb76..84c6a083d38 100644 --- a/examples/python/gate_scheduling_sat.py +++ b/examples/python/gate_scheduling_sat.py @@ -24,9 +24,8 @@ """ from absl import app - -from ortools.sat.colab import visualization from ortools.sat.python import cp_model +from ortools.sat.colab import visualization def main(_) -> None: diff --git a/examples/python/gate_scheduling_sat_py_test.bintest b/examples/python/gate_scheduling_sat_py_test.bintest new file mode 100644 index 00000000000..279fc094b0d --- /dev/null +++ b/examples/python/gate_scheduling_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(gate_scheduling_sat_py3) diff --git a/examples/python/golomb8_py_test.bintest b/examples/python/golomb8_py_test.bintest new file mode 100644 index 00000000000..e65bf5dc1ec --- /dev/null +++ b/examples/python/golomb8_py_test.bintest @@ -0,0 +1 @@ +RUN: $(golomb8_py3) diff --git a/examples/python/golomb_sat_py_test.bintest b/examples/python/golomb_sat_py_test.bintest new file mode 100644 index 00000000000..3ab2199d13a --- /dev/null +++ b/examples/python/golomb_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(golomb_sat_py3) diff --git a/examples/python/hidato_sat_py_test.bintest b/examples/python/hidato_sat_py_test.bintest new file mode 100644 index 00000000000..a5e01c6f508 --- /dev/null +++ b/examples/python/hidato_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(hidato_sat_py3) diff --git a/examples/python/horse_jumping_show.py b/examples/python/horse_jumping_show.py new file mode 100644 index 00000000000..bff393a6fe3 --- /dev/null +++ b/examples/python/horse_jumping_show.py @@ -0,0 +1,297 @@ +#!/usr/bin/env python3 +# Copyright 2010-2025 Google LLC +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Horse Jumping Show. + +A major three-day horse jumping competition is scheduled next winter in Geneva. +The show features riders and horses from all over the world, competing in +several different competitions throughout the show. Six months before the show, +riders submit the entries (i.e., rider name, horse, competition) to the +organizers. Riders can submit multiple entries, for example, to compete in the +same competition with multiple horses, or to compete in several competitions. + +There are additional space limitations. For example, the venue has 100 stalls, +4 arenas (where competitions can be scheduled), and 6 paddocks (where riders +warm up before their turn). It is also ideal that paddocks are not overloaded by +riders from multiple competitions. + +The organizer's goal is find a schedule in which competitions don't overlap, and +the times at which they happen are scattered throughout the day (and hopefully +not that early in the morning). The starting times of the competitions should be +at the hour or 30 minutes past the hour (e.g. 9:30, 10:00, 10:30, etc.). +Competitions can only be scheduled while there is daylight, except for +competitions scheduled in the Main Stage arena, which is covered and has proper +lighting. Also, beginner competitions (1.10m or less) are scheduled on the first +day, and advanced competitions (1.50m or more) are scheduled on the last day. + +The information for next winter's show is as follows: +Available stalls: 100 +Number of riders: 100 +Number of horses: 130 +Number of requested Entries: 200 +Number of competitions: 15 + +Venue: +- Main Stage arena: Covered (9AM-11PM) +- Highlands arena: Daylight Only (9AM-5PM) +- Sawdust arena: Daylight Only (9AM-5PM) +- Paddock1 has capacity for 10 riders and serves Main Stage +- Paddock2 has capacity for 6 riders and serves Main Stage +- Paddock3 has capacity for 8 riders and serves Main Stage, Highlands +- Paddock4 has capacity for 8 riders and serves Highlands, Sawdust +- Paddock5 has capacity for 9 riders and serves Sawdust +- Paddock6 has capacity for 7 riders and serves Sawdust + +competitions: +- C_5_1.10m_Year_Olds 1.10m - 60 minutes +- C_6_1.25m_Year_Olds 1.25m - 90 minutes +- C_7_1.35m_Year_Olds 1.35m - 120 minutes +- C_0.8m_Jumpers 0.80m - 240 minutes +- C_1.0m_Jumpers 1.00m - 180 minutes +- C_1.10m_Jumpers 1.10m - 180 minutes +- C_1.20m_Jumpers 1.20m - 120 minutes +- C_1.30m_Jumpers 1.30m - 120 minutes +- C_1.40m_Jumpers 1.40m - 120 minutes +- C_1.20m_Derby 1.20m - 180 minutes +- C_1.35m_Derby 1.35m - 180 minutes +- C_1.45m_Derby 1.45m - 180 minutes +- C_1.40m_Open 1.40m - 120 minutes +- C_1.50m_Open 1.50m - 180 minutes +- C_1.60m_Grand_Prix 1.60m - 240 minutes +""" + +import dataclasses +from absl import app +import numpy as np +from ortools.sat.python import cp_model + + +@dataclasses.dataclass(frozen=True) +class Arena: + """Data for an arena.""" + + id: str + hours: str + + +@dataclasses.dataclass(frozen=True) +class Competition: + """Data for a competition.""" + + id: str + height: float + duration: int + + +@dataclasses.dataclass(frozen=True) +class HorseJumpingShowData: + """Horse Jumping Show Data.""" + + num_days: int + competitions: list[Competition] + arenas: list[Arena] + + +@dataclasses.dataclass(frozen=True) +class ScheduledCompetition: + """Horse Jumping Show Schedule.""" + + completion: str + day: int + arena: str + start_time: str + end_time: str + + +def generate_horse_jumping_show_data() -> HorseJumpingShowData: + """Generates the horse jumping show data.""" + arenas = [ + Arena(id="Main Stage", hours="9AM-9PM"), + Arena(id="Highlands", hours="9AM-5PM"), + Arena(id="Sawdust", hours="9AM-5PM"), + ] + competitions = [ + Competition(id="C_5_1.10m_Year_Olds", height=1.1, duration=60), + Competition(id="C_6_1.25m_Year_Olds", height=1.25, duration=90), + Competition(id="C_7_1.35m_Year_Olds", height=1.35, duration=120), + Competition(id="C_0.8m_Jumpers", height=0.8, duration=240), + Competition(id="C_1.0m_Jumpers", height=1.0, duration=180), + Competition(id="C_1.10m_Jumpers", height=1.10, duration=180), + Competition(id="C_1.20m_Jumpers", height=1.20, duration=120), + Competition(id="C_1.30m_Jumpers", height=1.30, duration=120), + Competition(id="C_1.40m_Jumpers", height=1.40, duration=120), + Competition(id="C_1.20m_Derby", height=1.20, duration=180), + Competition(id="C_1.35m_Derby", height=1.35, duration=180), + Competition(id="C_1.45m_Derby", height=1.45, duration=180), + Competition(id="C_1.40m_Open", height=1.40, duration=120), + Competition(id="C_1.50m_Open", height=1.50, duration=180), + Competition(id="C_1.60m_Grand_Prix", height=1.60, duration=240), + ] + return HorseJumpingShowData(num_days=3, competitions=competitions, arenas=arenas) + + +def solve() -> list[ScheduledCompetition]: + """Solves the horse jumping show problem.""" + data = generate_horse_jumping_show_data() + num_days = data.num_days + competitions = data.competitions + arenas = data.arenas + day_index = list(range(num_days)) + + # Time parser. + def parse_time(t_str): + hour = int(t_str[:-2]) + if "PM" in t_str and hour != 12: + hour += 12 + if "AM" in t_str and hour == 12: + hour = 0 + return hour * 60 + + # Schedule time intervals for each arena. + schedule_interval_by_arena = {} + for arena in arenas: + start_h_str, end_h_str = arena.hours.split("-") + start_time = parse_time(start_h_str) + end_time = parse_time(end_h_str) + schedule_interval_by_arena[arena.id] = (start_time, end_time) + + # Map time to 30-minute intervals and back. + time_slot_size = 30 + + def time_to_slot(time_in_minutes: int): + return time_in_minutes // time_slot_size + + def slot_to_time(slot_index: int): + return slot_index * time_slot_size + + # --- Model Creation --- + model = cp_model.CpModel() + + # --- Variables --- + # Competition scheduling variables per arena and day. + competition_assignments = np.empty( + (len(competitions), len(arenas), num_days), dtype=object + ) + for c, comp in enumerate(competitions): + for a, arena in enumerate(arenas): + for d in day_index: + competition_assignments[c, a, d] = model.new_bool_var( + f"competition_scheduled_{comp.id}_{arena.id}_{d}" + ) + # Time intervals and start times for each competition. We model time steps + # 0,1,2,... to represent the start times in 30 minutes intervals, as opposed + # to represent the start times in minutes. + competition_start_times = np.empty( + (len(competitions), len(arenas), num_days), dtype=object + ) + competition_intervals = np.empty( + (len(competitions), len(arenas), num_days), dtype=object + ) + for c, comp in enumerate(competitions): + for a, arena in enumerate(arenas): + earliest_start_time, latest_end_time = schedule_interval_by_arena[arena.id] + latest_start_time = latest_end_time - comp.duration + for d in day_index: + competition_start_times[c, a, d] = model.new_int_var( + time_to_slot(earliest_start_time), + time_to_slot(latest_start_time), + f"start_time_{comp.id}_{arena.id}_{d}", + ) + competition_intervals[c, a, d] = ( + model.new_optional_fixed_size_interval_var( + competition_start_times[c, a, d], + time_to_slot(comp.duration), + competition_assignments[c, a, d], + f"task_{comp.id}_{arena.id}_{d}", + ) + ) + + # --- Constraints --- + # Every competition must be scheduled, enforcing that beginner competitions + # are on day 1, and advanced competitions are on day 3. + for c, comp in enumerate(competitions): + model.add(np.sum(competition_assignments[c, :, :]) == 1) + # Beginner competitions are on the first day. + if comp.height <= 1.10: + beginners_day = 0 + model.add(np.sum(competition_assignments[c, :, beginners_day]) == 1) + # Advanced competitions are on the last day. + if comp.height >= 1.50: + advanced_day = num_days - 1 + model.add(np.sum(competition_assignments[c, :, advanced_day]) == 1) + + # Competitions scheduled on the same arena and on the same day can't overlap. + for a, _ in enumerate(arenas): + for day in range(num_days): + model.add_no_overlap(competition_intervals[:, a, day]) + + # Start times should be scattered across the day. + for a, _ in enumerate(arenas): + for day in day_index: + model.add_all_different(competition_start_times[:, a, day]) + + # --- Objective --- + model.maximize(np.sum(competition_start_times)) + + # --- Solve --- + solver = cp_model.CpSolver() + solver.parameters.max_time_in_seconds = 30.0 + solver.parameters.log_search_progress = True + solver.parameters.num_workers = 16 + status = solver.solve(model) + + # --- Print Solution --- + if status == cp_model.OPTIMAL or status == cp_model.FEASIBLE: + schedule = [] + for day in range(num_days): + for c, comp in enumerate(competitions): + for a, arena in enumerate(arenas): + if solver.value(competition_assignments[c, a, day]): + start_time_minutes = slot_to_time( + solver.value(competition_start_times[c, a, day]) + ) + start_h, start_m = divmod(start_time_minutes, 60) + end_h, end_m = divmod(start_time_minutes + comp.duration, 60) + schedule.append( + ScheduledCompetition( + completion=comp.id, + day=day + 1, + arena=arena.id, + start_time=f"{start_h:02d}:{start_m:02d}", + end_time=f"{end_h:02d}:{end_m:02d}", + ) + ) + # Sort and print schedule for readability. + schedule.sort(key=lambda x: (x.day, x.start_time)) + print("Schedule:") + for item in schedule: + print( + f"Day {item.day}: {item.completion} in {item.arena} from" + f" {item.start_time} to {item.end_time}." + ) + return schedule + elif status == cp_model.INFEASIBLE: + print("Problem is infeasible.") + else: + print("No solution found.") + # Return an empty schedule if no solution is found. + return [] + + +def main(_): + solve() + + +if __name__ == "__main__": + app.run(main) diff --git a/examples/python/horse_jumping_show_py_test.bintest b/examples/python/horse_jumping_show_py_test.bintest new file mode 100644 index 00000000000..d8ed39bb39d --- /dev/null +++ b/examples/python/horse_jumping_show_py_test.bintest @@ -0,0 +1,2 @@ +RUN: $(horse_jumping_show_py3) +CHECK: "Day 3: C_1.60m_Grand_Prix" diff --git a/examples/python/integer_programming_py_test.bintest b/examples/python/integer_programming_py_test.bintest new file mode 100644 index 00000000000..b6741b163f4 --- /dev/null +++ b/examples/python/integer_programming_py_test.bintest @@ -0,0 +1 @@ +RUN: $(integer_programming_py3) diff --git a/examples/python/jobshop_ft06_distance_sat_py_test.bintest b/examples/python/jobshop_ft06_distance_sat_py_test.bintest new file mode 100644 index 00000000000..311b9f8eff9 --- /dev/null +++ b/examples/python/jobshop_ft06_distance_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(jobshop_ft06_distance_sat_py3) diff --git a/examples/python/jobshop_ft06_sat_py_test.bintest b/examples/python/jobshop_ft06_sat_py_test.bintest new file mode 100644 index 00000000000..3a2dd1304d3 --- /dev/null +++ b/examples/python/jobshop_ft06_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(jobshop_ft06_sat_py3) diff --git a/examples/python/jobshop_with_maintenance_sat_py_test.bintest b/examples/python/jobshop_with_maintenance_sat_py_test.bintest new file mode 100644 index 00000000000..c8fe05b4a15 --- /dev/null +++ b/examples/python/jobshop_with_maintenance_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(jobshop_with_maintenance_sat_py3) diff --git a/examples/python/knapsack_2d_sat_py_test.bintest b/examples/python/knapsack_2d_sat_py_test.bintest new file mode 100644 index 00000000000..4ab15a54c1c --- /dev/null +++ b/examples/python/knapsack_2d_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(knapsack_2d_sat_py3) diff --git a/examples/python/line_balancing_sat_salbp_20_1_py_test.bintest b/examples/python/line_balancing_sat_salbp_20_1_py_test.bintest new file mode 100644 index 00000000000..10d11bef994 --- /dev/null +++ b/examples/python/line_balancing_sat_salbp_20_1_py_test.bintest @@ -0,0 +1,2 @@ +RUN: $(line_balancing_sat_py3) --input=$(salbp_20_1.alb) +CHECK: "objective: 3" diff --git a/examples/python/linear_assignment_api_py_test.bintest b/examples/python/linear_assignment_api_py_test.bintest new file mode 100644 index 00000000000..aa508f3bceb --- /dev/null +++ b/examples/python/linear_assignment_api_py_test.bintest @@ -0,0 +1 @@ +RUN: $(linear_assignment_api_py3) diff --git a/examples/python/linear_programming_py_test.bintest b/examples/python/linear_programming_py_test.bintest new file mode 100644 index 00000000000..6c7b0be1bea --- /dev/null +++ b/examples/python/linear_programming_py_test.bintest @@ -0,0 +1 @@ +RUN: $(linear_programming_py3) diff --git a/examples/python/magic_sequence_distribute_py_test.bintest b/examples/python/magic_sequence_distribute_py_test.bintest new file mode 100644 index 00000000000..8ab9383e2be --- /dev/null +++ b/examples/python/magic_sequence_distribute_py_test.bintest @@ -0,0 +1 @@ +RUN: $(magic_sequence_distribute_py3) diff --git a/examples/python/magic_sequence_distribute_with_arg_py_test.bintest b/examples/python/magic_sequence_distribute_with_arg_py_test.bintest new file mode 100644 index 00000000000..b200a93a358 --- /dev/null +++ b/examples/python/magic_sequence_distribute_with_arg_py_test.bintest @@ -0,0 +1 @@ +RUN: $(magic_sequence_distribute_py3) 5 diff --git a/examples/python/maximize_combinations_sat_py_test.bintest b/examples/python/maximize_combinations_sat_py_test.bintest new file mode 100644 index 00000000000..cb4b60da6b8 --- /dev/null +++ b/examples/python/maximize_combinations_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(maximize_combinations_sat_py3) diff --git a/examples/python/maze_escape_sat_py_test.bintest b/examples/python/maze_escape_sat_py_test.bintest new file mode 100644 index 00000000000..201abe1c108 --- /dev/null +++ b/examples/python/maze_escape_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(maze_escape_sat_py3) diff --git a/examples/python/memory_layout_and_infeasibility_sat_py_test.bintest b/examples/python/memory_layout_and_infeasibility_sat_py_test.bintest new file mode 100644 index 00000000000..98284d635a2 --- /dev/null +++ b/examples/python/memory_layout_and_infeasibility_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(memory_layout_and_infeasibility_sat_py3) diff --git a/examples/python/music_playlist_sat_py_test.bintest b/examples/python/music_playlist_sat_py_test.bintest new file mode 100644 index 00000000000..d0947fd3058 --- /dev/null +++ b/examples/python/music_playlist_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(music_playlist_sat_py3) diff --git a/examples/python/no_wait_baking_scheduling_sat_py_test.bintest b/examples/python/no_wait_baking_scheduling_sat_py_test.bintest new file mode 100644 index 00000000000..1e8edde5a0f --- /dev/null +++ b/examples/python/no_wait_baking_scheduling_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(no_wait_baking_scheduling_sat_py3) diff --git a/examples/python/nqueens_sat_py_test.bintest b/examples/python/nqueens_sat_py_test.bintest new file mode 100644 index 00000000000..5d7cc4f6e3e --- /dev/null +++ b/examples/python/nqueens_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(nqueens_sat_py3) diff --git a/examples/python/pell_equation_sat_py_test.bintest b/examples/python/pell_equation_sat_py_test.bintest new file mode 100644 index 00000000000..7d83a74a24a --- /dev/null +++ b/examples/python/pell_equation_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(pell_equation_sat_py3) diff --git a/examples/python/pentominoes_sat_py_test.bintest b/examples/python/pentominoes_sat_py_test.bintest new file mode 100644 index 00000000000..3ce95b060ca --- /dev/null +++ b/examples/python/pentominoes_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(pentominoes_sat_py3) diff --git a/examples/python/prize_collecting_tsp_sat_py_test.bintest b/examples/python/prize_collecting_tsp_sat_py_test.bintest new file mode 100644 index 00000000000..1ed6cb26485 --- /dev/null +++ b/examples/python/prize_collecting_tsp_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(prize_collecting_tsp_sat_py3) diff --git a/examples/python/prize_collecting_vrp_sat_py_test.bintest b/examples/python/prize_collecting_vrp_sat_py_test.bintest new file mode 100644 index 00000000000..1789eec0fde --- /dev/null +++ b/examples/python/prize_collecting_vrp_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(prize_collecting_vrp_sat_py3) diff --git a/examples/python/pyflow_example_py_test.bintest b/examples/python/pyflow_example_py_test.bintest new file mode 100644 index 00000000000..a9af12919e7 --- /dev/null +++ b/examples/python/pyflow_example_py_test.bintest @@ -0,0 +1 @@ +RUN: $(pyflow_example_py3) diff --git a/examples/python/qubo_sat_py_test.bintest b/examples/python/qubo_sat_py_test.bintest new file mode 100644 index 00000000000..6dc654a1216 --- /dev/null +++ b/examples/python/qubo_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(qubo_sat_py3) diff --git a/examples/python/rcpsp_sat_c1510_1_py_test.bintest b/examples/python/rcpsp_sat_c1510_1_py_test.bintest new file mode 100644 index 00000000000..48a07a05544 --- /dev/null +++ b/examples/python/rcpsp_sat_c1510_1_py_test.bintest @@ -0,0 +1,2 @@ +RUN: $(rcpsp_sat_py3) --input=$(c1510_1.mm.txt) +CHECK: "objective: 21" diff --git a/examples/python/rcpsp_sat_j301_1_py_test.bintest b/examples/python/rcpsp_sat_j301_1_py_test.bintest new file mode 100644 index 00000000000..9df595ccd46 --- /dev/null +++ b/examples/python/rcpsp_sat_j301_1_py_test.bintest @@ -0,0 +1,2 @@ +RUN: $(rcpsp_sat_py3) --input=$(j301_1.sm) +CHECK: "objective: 43" diff --git a/examples/python/rcpsp_sat_rip1_py_test.bintest b/examples/python/rcpsp_sat_rip1_py_test.bintest new file mode 100644 index 00000000000..3b3f0c7dfb2 --- /dev/null +++ b/examples/python/rcpsp_sat_rip1_py_test.bintest @@ -0,0 +1,2 @@ +RUN: $(rcpsp_sat_py3) --input=$(rip1.sch) +CHECK: "objective: 100" diff --git a/examples/python/rcpsp_sat_testset_mm30_psp3_py_test.bintest b/examples/python/rcpsp_sat_testset_mm30_psp3_py_test.bintest new file mode 100644 index 00000000000..801590ce88e --- /dev/null +++ b/examples/python/rcpsp_sat_testset_mm30_psp3_py_test.bintest @@ -0,0 +1 @@ +RUN: $(rcpsp_sat_py3) --input=$(testset_mm30_psp3.sch) --params=max_time_in_seconds:8.0 diff --git a/examples/python/rcpsp_sat_ubo_10_psp2_py_test.bintest b/examples/python/rcpsp_sat_ubo_10_psp2_py_test.bintest new file mode 100644 index 00000000000..47fad4820be --- /dev/null +++ b/examples/python/rcpsp_sat_ubo_10_psp2_py_test.bintest @@ -0,0 +1,2 @@ +RUN: $(rcpsp_sat_py3) --input=$(ubo_10_psp2.sch) +CHECK: "objective: 45" diff --git a/examples/python/shift_scheduling_sat_py_test.bintest b/examples/python/shift_scheduling_sat_py_test.bintest new file mode 100644 index 00000000000..a54e4fa4b57 --- /dev/null +++ b/examples/python/shift_scheduling_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(shift_scheduling_sat_py3) --params=max_time_in_seconds:10 diff --git a/examples/python/single_machine_scheduling_with_setup_release_due_dates_sat_py_test.bintest b/examples/python/single_machine_scheduling_with_setup_release_due_dates_sat_py_test.bintest new file mode 100644 index 00000000000..62c965b85c5 --- /dev/null +++ b/examples/python/single_machine_scheduling_with_setup_release_due_dates_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(single_machine_scheduling_with_setup_release_due_dates_sat_py3) diff --git a/examples/python/spillover_sat.py b/examples/python/spillover_sat.py new file mode 100644 index 00000000000..02609f30b76 --- /dev/null +++ b/examples/python/spillover_sat.py @@ -0,0 +1,360 @@ +#!/usr/bin/env python3 +# Copyright 2010-2025 Google LLC +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Solves the problem of buying physical machines to meet VM demand. + +The Spillover problem is defined as follows: + +You have M types of physical machines and V types of Virtual Machines (VMs). You +can use a physical machine of type m to get n_mv copies of VM v. Each physical +machine m has a cost of c_m. Each VM has a demand of d_v. VMs are assigned to +physical machines by the following rule. The demand for each VM type arrives +equally spaced out over the interval [0, 1]. For each VM type, there is a +priority order over the physical machine types that you must follow. When a +demand arrives, if there are any machines of the highest priority type +available, you use them first, then you move on to the second priority machine +type, and so on. Each VM type has a list of compatible physical machine types, +and when the list is exhausted, the remaining demand is not met. Your goal is +to pick quantities of the physical machines to buy (minimizing cost) so that at +least some target service level (e.g. 95%) of the total demand of all VM is met. + +The number of machines bought of each type and the number of VMs demanded of +each type is large enough that you can solve an approximate problem instead, +where the number of machines purchased and the assignment of machines to VMs is +fractional, if it is helpful to do so. + +The problem is not particularly interesting in isolation, it is more interesting +to embed this LP inside a larger optimization problem (e.g. consider a two stage +problem where in stage one, you buy machines, then in stage two, you realize VM +demand). + +The continuous approximation of this problem can be solved by LP (see the +MathOpt python examples). Doing this, instead of using MIP, is nontrivial. +Below, we show that continuous relaxation can be approximately solved by CP-SAT +as well, despite not having continuous variables. If you were solving the +problem in isolation, you should just use an LP solver, but if you were to add +side constraints or embed this within a more complex model, using CP-SAT could +be appropriate. + +If for each VM type, the physical machines that are most cost effective are the +highest priority, AND the target service level is 100%, then the problem has a +trivial optimal solution: + 1. Rank the VMs by lowest cost to meet a unit of demand with the #1 preferred + machine type. + 2. For each VM type in the order above, buy machines from #1 preferred machine + type, until either you have met all demand for the VM type. + +MOE:begin_strip +This example is motivated by the Cloudy problem, see go/fluid-model. +MOE:end_strip +""" + +from collections.abc import Sequence +import dataclasses +import math +import random + +from absl import app +from absl import flags +from ortools.sat.python import cp_model + +_MACHINE_TYPES = flags.DEFINE_integer( + "machine_types", + 100, + "How many types of machines we can fulfill demand with.", +) + +_VM_TYPES = flags.DEFINE_integer( + "vm_types", 500, "How many types of VMs we need to supply." +) + +_FUNGIBILITY = flags.DEFINE_integer( + "fungibility", + 10, + "Each VM type can be satisfied with this many machine types, selected" + " uniformly at random.", +) + +_MAX_DEMAND = flags.DEFINE_integer( + "max_demand", + 100, + "Demand for each VM type is in [max_demand//2, max_demand], uniformly at" + " random.", +) + +_TEST_DATA = flags.DEFINE_bool( + "test_data", False, "Use small test instance instead of random data." +) + +_SEED = flags.DEFINE_integer("seed", 13, "RNG seed for instance creation.") + +_TIME_STEPS = flags.DEFINE_integer("time_steps", 100, "How much to discretize time.") + + +@dataclasses.dataclass(frozen=True) +class MachineUse: + machine_type: int + vms_per_machine: int + + +@dataclasses.dataclass(frozen=True) +class VmDemand: + compatible_machines: tuple[MachineUse, ...] + vm_quantity: int + + +@dataclasses.dataclass(frozen=True) +class SpilloverProblem: + machine_cost: tuple[float, ...] + machine_limit: tuple[int, ...] + vm_demands: tuple[VmDemand, ...] + service_level: float + time_horizon: int + + +def _random_spillover_problem( + num_machines: int, + num_vms: int, + fungibility: int, + max_vm_demand: int, + horizon: int, +) -> SpilloverProblem: + """Generates a random SpilloverProblem.""" + machine_costs = tuple(random.random() for _ in range(num_machines)) + vm_demands = [] + all_machines = list(range(num_machines)) + min_vm_demand = max_vm_demand // 2 + for _ in range(num_vms): + vm_use = [] + for machine in random.sample(all_machines, fungibility): + vm_use.append( + MachineUse(machine_type=machine, vms_per_machine=random.randint(1, 10)) + ) + vm_demands.append( + VmDemand( + compatible_machines=tuple(vm_use), + vm_quantity=random.randint(min_vm_demand, max_vm_demand), + ) + ) + machine_need_ub = num_vms * max_vm_demand + machine_limit = (machine_need_ub,) * num_machines + return SpilloverProblem( + machine_cost=machine_costs, + machine_limit=machine_limit, + vm_demands=tuple(vm_demands), + service_level=0.95, + time_horizon=horizon, + ) + + +def _test_problem() -> SpilloverProblem: + """Creates a small SpilloverProblem with optimal objective of 360.""" + # To avoid machine type 2, ensure we buy enough of 1 to not stock out, cost + # 20 + vm_a = VmDemand( + vm_quantity=10, + compatible_machines=( + MachineUse(machine_type=1, vms_per_machine=1), + MachineUse(machine_type=2, vms_per_machine=1), + ), + ) + # machine type 0 is cheaper, but we don't want to stock out of machine type 1, + # so use all machine type 1, cost 40. + vm_b = VmDemand( + vm_quantity=20, + compatible_machines=( + MachineUse(machine_type=1, vms_per_machine=1), + MachineUse(machine_type=0, vms_per_machine=1), + ), + ) + # Will use 3 copies of machine type 2, cost 300 + vm_c = VmDemand( + vm_quantity=30, + compatible_machines=(MachineUse(machine_type=2, vms_per_machine=10),), + ) + return SpilloverProblem( + machine_cost=(1.0, 2.0, 100.0), + machine_limit=(60, 60, 60), + vm_demands=(vm_a, vm_b, vm_c), + service_level=1.0, + time_horizon=100, + ) + + +# Indices: +# * i in I, the VM demands +# * j in J, the machines supplied +# +# Data: +# * c_j: cost of a machine of type j +# * l_j: a limit of how many machines of type j you can buy. +# * n_ij: how many VMs of type i you get from a machine of type j +# * d_i: the total demand for VMs of type i +# * service_level: the target fraction of demand that is met. +# * P_i subset J: the compatible machine types for VM demand i. +# * UP_i(j) subset P_i, for j in P_i: for VM demand type i, the machines of +# priority higher than j +# * T: the number of integer time steps. +# +# Note: when d_i/n_ij is not integer, some approximation error is introduced in +# constraint 6 below. +# +# Decision variables: +# * s_j: the supply of machine type j +# * w_j: the time we run out of machine j, or 1 if we never run out +# * v_ij: when we start using supply j to meet demand i, or w_j if we never use +# this machine type for this demand. +# * o_i: the time we start failing to meet vm demand i +# * m_i: the total demand met for vm type i. +# +# Model the problem: +# min sum_{j in J} c_j s_j +# s.t. +# 1: sum_i m_i >= service_level * sum_{i in I} d_i +# 2: T * m_i <= o_i * d_i for all i in I +# 3: v_ij >= w_r for all i in I, j in C_i, r in UP_i(j) +# 4: v_ij <= w_j for all i in I, j in C_i +# 5: o_i = sum_{j in P_i} (w_j - v_ij) for all i in I +# 6: sum_{i in I: j in P_i}ceil(d_i/n_ij)(w_j - v_ij)<=T*s_j for all j in J +# o_i, w_j, v_ij in [0, T] +# 0 <= m_i <= d_i +# 0 <= s_j <= l_j +# +# The constraints say: +# 1. The amount of demand served must be at least 95% of total demand. +# 2. The demand served for VM type i is linear in the time we fail to keep +# serving demand. +# 3. Don't start using machine type j for demand i until all higher priority +# machine types r are used up. +# 4. The time we run out of machine type j must be after we start using it for +# VM demand type i. +# 5. The time we are unable to serve further VM demand i is the sum of the +# time spent serving the demand with each eligible machine type. +# 6. The total use of machine type j to serve demand does not exceed the +# supply. The ceil function above introduces some approximation error when +# d_i/n_ij is not integer. +def _solve_spillover_problem(problem: SpilloverProblem) -> None: + """Solves the spillover problem and prints the optimal objective.""" + model = cp_model.CpModel() + num_machines = len(problem.machine_cost) + num_vms = len(problem.vm_demands) + horizon = problem.time_horizon + s = [ + model.new_int_var(lb=0, ub=problem.machine_limit[j], name=f"s_{j}") + for j in range(num_machines) + ] + w = [ + model.new_int_var(lb=0, ub=horizon, name=f"w_{i}") for i in range(num_machines) + ] + o = [model.new_int_var(lb=0, ub=horizon, name=f"o_{j}") for j in range(num_vms)] + m = [ + model.new_int_var(lb=0, ub=problem.vm_demands[j].vm_quantity, name=f"m_{j}") + for j in range(num_vms) + ] + v = [ + { + compat.machine_type: model.new_int_var( + lb=0, ub=horizon, name=f"v_{i}_{compat.machine_type}" + ) + for compat in vm_demand.compatible_machines + } + for i, vm_demand in enumerate(problem.vm_demands) + ] + + obj = 0 + for j in range(num_machines): + obj += s[j] * problem.machine_cost[j] + model.minimize(obj) + + # Constraint 1: demand served is at least service_level fraction of total. + total_vm_demand = sum(vm_demand.vm_quantity for vm_demand in problem.vm_demands) + model.add(sum(m) >= int(math.ceil(problem.service_level * total_vm_demand))) + + # Constraint 2: demand served is linear in time we stop serving. + for i in range(num_vms): + model.add( + problem.time_horizon * m[i] <= o[i] * problem.vm_demands[i].vm_quantity + ) + + # Constraint 3: use machine type j for demand i after all higher priority + # machine types r are used up. + for i in range(num_vms): + for k, meet_demand in enumerate(problem.vm_demands[i].compatible_machines): + j = meet_demand.machine_type + for l in range(k): + r = problem.vm_demands[i].compatible_machines[l].machine_type + model.add(v[i][j] >= w[r]) + + # Constraint 4: outage time of machine j is after start time for using j to + # meet VM demand i. + for i in range(num_vms): + for meet_demand in problem.vm_demands[i].compatible_machines: + j = meet_demand.machine_type + model.add(v[i][j] <= w[j]) + + # Constraint 5: For VM demand i, time service ends is the sum of the time + # spent serving with each eligible machine type. + for i in range(num_vms): + sum_serving = 0 + for meet_demand in problem.vm_demands[i].compatible_machines: + j = meet_demand.machine_type + sum_serving += w[j] - v[i][j] + model.add(o[i] == sum_serving) + + # Constraint 6: Total use of machine type j is at most the supply. + # + # We build the constraints in bulk because our data is transposed. + total_machine_use = [0 for _ in range(num_machines)] + for i in range(num_vms): + for meet_demand in problem.vm_demands[i].compatible_machines: + j = meet_demand.machine_type + nij = meet_demand.vms_per_machine + vm_quantity = problem.vm_demands[i].vm_quantity + # Want vm_quantity/nij, over estimate with ceil(vm_quantity/nij) to use + # integer coefficients. + rate = (vm_quantity + nij - 1) // nij + total_machine_use[j] += rate * (w[j] - v[i][j]) + for j in range(num_machines): + model.add(total_machine_use[j] <= horizon * s[j]) + + solver = cp_model.CpSolver() + solver.parameters.num_workers = 16 + solver.parameters.log_search_progress = True + solver.max_time_in_seconds = 30.0 + status = solver.solve(model) + if status != cp_model.OPTIMAL: + raise RuntimeError(f"expected optimal, found: {status}") + print(f"objective: {solver.objective_value}") + + +def main(argv: Sequence[str]) -> None: + del argv # Unused. + random.seed(_SEED.value) + if _TEST_DATA.value: + problem = _test_problem() + else: + problem = _random_spillover_problem( + _MACHINE_TYPES.value, + _VM_TYPES.value, + _FUNGIBILITY.value, + _MAX_DEMAND.value, + _TIME_STEPS.value, + ) + print(problem) + + _solve_spillover_problem(problem) + + +if __name__ == "__main__": + app.run(main) diff --git a/examples/python/spillover_sat_test_py_test.bintest b/examples/python/spillover_sat_test_py_test.bintest new file mode 100644 index 00000000000..9a0b29ef22c --- /dev/null +++ b/examples/python/spillover_sat_test_py_test.bintest @@ -0,0 +1,2 @@ +RUN: $(spillover_sat) --test_data +CHECK: "objective: 360.0" diff --git a/examples/python/spread_robots_sat_py_test.bintest b/examples/python/spread_robots_sat_py_test.bintest new file mode 100644 index 00000000000..038d178ca81 --- /dev/null +++ b/examples/python/spread_robots_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(spread_robots_sat_py3) diff --git a/examples/python/steel_mill_slab_sat_py_test.bintest b/examples/python/steel_mill_slab_sat_py_test.bintest new file mode 100644 index 00000000000..0f4fdecb94b --- /dev/null +++ b/examples/python/steel_mill_slab_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(steel_mill_slab_sat_py3) diff --git a/examples/python/sudoku_sat_py_test.bintest b/examples/python/sudoku_sat_py_test.bintest new file mode 100644 index 00000000000..d65f3ef7012 --- /dev/null +++ b/examples/python/sudoku_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(sudoku_sat_py3) diff --git a/examples/python/task_allocation_sat_py_test.bintest b/examples/python/task_allocation_sat_py_test.bintest new file mode 100644 index 00000000000..b2510011e77 --- /dev/null +++ b/examples/python/task_allocation_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(task_allocation_sat_py3) diff --git a/examples/python/tasks_and_workers_assignment_sat_py_test.bintest b/examples/python/tasks_and_workers_assignment_sat_py_test.bintest new file mode 100644 index 00000000000..e5713b78e4b --- /dev/null +++ b/examples/python/tasks_and_workers_assignment_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(tasks_and_workers_assignment_sat_py3) diff --git a/examples/python/test_scheduling_sat_py_test.bintest b/examples/python/test_scheduling_sat_py_test.bintest new file mode 100644 index 00000000000..71bd923c0fd --- /dev/null +++ b/examples/python/test_scheduling_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(test_scheduling_sat_py3) diff --git a/examples/python/tsp_norandom_py_test.bintest b/examples/python/tsp_norandom_py_test.bintest new file mode 100644 index 00000000000..13fc0ce75da --- /dev/null +++ b/examples/python/tsp_norandom_py_test.bintest @@ -0,0 +1 @@ +RUN: $(tsp_py3) --tsp_use_random_matrix=false diff --git a/examples/python/tsp_py_test.bintest b/examples/python/tsp_py_test.bintest new file mode 100644 index 00000000000..b60c8f2cb33 --- /dev/null +++ b/examples/python/tsp_py_test.bintest @@ -0,0 +1 @@ +RUN: $(tsp_py3) diff --git a/examples/python/tsp_sat_py_test.bintest b/examples/python/tsp_sat_py_test.bintest new file mode 100644 index 00000000000..538bc482ea1 --- /dev/null +++ b/examples/python/tsp_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(tsp_sat_py3) diff --git a/examples/python/vendor_scheduling_sat_py_test.bintest b/examples/python/vendor_scheduling_sat_py_test.bintest new file mode 100644 index 00000000000..7fbd519cd67 --- /dev/null +++ b/examples/python/vendor_scheduling_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(vendor_scheduling_sat_py3) diff --git a/examples/python/wedding_optimal_chart_sat_py_test.bintest b/examples/python/wedding_optimal_chart_sat_py_test.bintest new file mode 100644 index 00000000000..c33c3fd0611 --- /dev/null +++ b/examples/python/wedding_optimal_chart_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(wedding_optimal_chart_sat_py3) diff --git a/examples/python/weighted_latency_problem_sat_py_test.bintest b/examples/python/weighted_latency_problem_sat_py_test.bintest new file mode 100644 index 00000000000..97347db8dc4 --- /dev/null +++ b/examples/python/weighted_latency_problem_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(weighted_latency_problem_sat_py3) diff --git a/examples/python/zebra_sat_py_test.bintest b/examples/python/zebra_sat_py_test.bintest new file mode 100644 index 00000000000..92374872f19 --- /dev/null +++ b/examples/python/zebra_sat_py_test.bintest @@ -0,0 +1 @@ +RUN: $(zebra_sat_py3) diff --git a/ortools/algorithms/samples/BUILD.bazel b/ortools/algorithms/samples/BUILD.bazel index a845ec9aec4..4152d7ced52 100644 --- a/ortools/algorithms/samples/BUILD.bazel +++ b/ortools/algorithms/samples/BUILD.bazel @@ -14,7 +14,7 @@ load("@rules_cc//cc:cc_test.bzl", "cc_test") load("@rules_java//java:java_binary.bzl", "java_binary") load("@rules_python//python:py_test.bzl", "py_test") -load("//bazel:run_binary_test.bzl", "run_binary_test") +load("//tools/testing:bintest.bzl", "bintest") package(default_visibility = ["//visibility:public"]) @@ -65,7 +65,8 @@ java_binary( ], ) -run_binary_test( +bintest( name = "KnapsackTest", - binary = ":Knapsack", + srcs = [":KnapsackTest.bintest"], + named_data = {"Knapsack": ":Knapsack"}, ) diff --git a/ortools/algorithms/samples/KnapsackTest.bintest b/ortools/algorithms/samples/KnapsackTest.bintest new file mode 100644 index 00000000000..759db770f5c --- /dev/null +++ b/ortools/algorithms/samples/KnapsackTest.bintest @@ -0,0 +1 @@ +RUN: $(Knapsack) diff --git a/ortools/constraint_solver/samples/BUILD.bazel b/ortools/constraint_solver/samples/BUILD.bazel index ad8757479f2..fcfe067b7c2 100644 --- a/ortools/constraint_solver/samples/BUILD.bazel +++ b/ortools/constraint_solver/samples/BUILD.bazel @@ -98,3 +98,405 @@ cc_test( "@abseil-cpp//absl/log:globals", ], ) + +cc_test( + name = "simple_routing_program_cc", + size = "medium", + srcs = ["simple_routing_program.cc"], + deps = [ + "//ortools/base", + "//ortools/constraint_solver:cp", + "//ortools/constraint_solver:routing", + "//ortools/constraint_solver:routing_enums_cc_proto", + "//ortools/constraint_solver:routing_index_manager", + "//ortools/constraint_solver:routing_parameters", + "@abseil-cpp//absl/base:log_severity", + "@abseil-cpp//absl/log", + "@abseil-cpp//absl/log:flags", + "@abseil-cpp//absl/log:globals", + ], +) + +cc_test( + name = "tsp_cc", + size = "medium", + srcs = ["tsp.cc"], + deps = [ + "//ortools/base", + "//ortools/constraint_solver:cp", + "//ortools/constraint_solver:routing", + "//ortools/constraint_solver:routing_enums_cc_proto", + "//ortools/constraint_solver:routing_index_manager", + "//ortools/constraint_solver:routing_parameters", + "@abseil-cpp//absl/base:log_severity", + "@abseil-cpp//absl/log", + "@abseil-cpp//absl/log:flags", + "@abseil-cpp//absl/log:globals", + ], +) + +cc_test( + name = "tsp_circuit_board_cc", + size = "medium", + srcs = ["tsp_circuit_board.cc"], + deps = [ + "//ortools/base", + "//ortools/constraint_solver:cp", + "//ortools/constraint_solver:routing", + "//ortools/constraint_solver:routing_enums_cc_proto", + "//ortools/constraint_solver:routing_index_manager", + "//ortools/constraint_solver:routing_parameters", + "@abseil-cpp//absl/base:log_severity", + "@abseil-cpp//absl/log", + "@abseil-cpp//absl/log:flags", + "@abseil-cpp//absl/log:globals", + ], +) + +cc_test( + name = "tsp_cities_cc", + size = "medium", + srcs = ["tsp_cities.cc"], + deps = [ + "//ortools/base", + "//ortools/constraint_solver:cp", + "//ortools/constraint_solver:routing", + "//ortools/constraint_solver:routing_enums_cc_proto", + "//ortools/constraint_solver:routing_index_manager", + "//ortools/constraint_solver:routing_parameters", + "@abseil-cpp//absl/base:log_severity", + "@abseil-cpp//absl/log", + "@abseil-cpp//absl/log:flags", + "@abseil-cpp//absl/log:globals", + ], +) + +cc_test( + name = "tsp_cities_routes_cc", + size = "medium", + srcs = ["tsp_cities_routes.cc"], + deps = [ + "//ortools/base", + "//ortools/constraint_solver:cp", + "//ortools/constraint_solver:routing", + "//ortools/constraint_solver:routing_enums_cc_proto", + "//ortools/constraint_solver:routing_index_manager", + "//ortools/constraint_solver:routing_parameters", + "@abseil-cpp//absl/base:log_severity", + "@abseil-cpp//absl/log", + "@abseil-cpp//absl/log:flags", + "@abseil-cpp//absl/log:globals", + ], +) + +cc_test( + name = "tsp_distance_matrix_cc", + size = "medium", + srcs = ["tsp_distance_matrix.cc"], + deps = [ + "//ortools/base", + "//ortools/constraint_solver:cp", + "//ortools/constraint_solver:routing", + "//ortools/constraint_solver:routing_enums_cc_proto", + "//ortools/constraint_solver:routing_index_manager", + "//ortools/constraint_solver:routing_parameters", + "@abseil-cpp//absl/base:log_severity", + "@abseil-cpp//absl/log", + "@abseil-cpp//absl/log:flags", + "@abseil-cpp//absl/log:globals", + ], +) + +cc_test( + name = "vrp_cc", + size = "medium", + srcs = ["vrp.cc"], + deps = [ + "//ortools/base", + "//ortools/constraint_solver:cp", + "//ortools/constraint_solver:routing", + "//ortools/constraint_solver:routing_enums_cc_proto", + "//ortools/constraint_solver:routing_index_manager", + "//ortools/constraint_solver:routing_parameters", + "@abseil-cpp//absl/base:log_severity", + "@abseil-cpp//absl/log", + "@abseil-cpp//absl/log:flags", + "@abseil-cpp//absl/log:globals", + ], +) + +cc_test( + name = "vrp_breaks_cc", + size = "medium", + srcs = ["vrp_breaks.cc"], + deps = [ + "//ortools/base", + "//ortools/constraint_solver:cp", + "//ortools/constraint_solver:routing", + "//ortools/constraint_solver:routing_enums_cc_proto", + "//ortools/constraint_solver:routing_index_manager", + "//ortools/constraint_solver:routing_parameters", + "@abseil-cpp//absl/base:log_severity", + "@abseil-cpp//absl/log", + "@abseil-cpp//absl/log:flags", + "@abseil-cpp//absl/log:globals", + "@abseil-cpp//absl/strings", + ], +) + +cc_test( + name = "vrp_capacity_cc", + size = "medium", + srcs = ["vrp_capacity.cc"], + deps = [ + "//ortools/base", + "//ortools/constraint_solver:cp", + "//ortools/constraint_solver:routing", + "//ortools/constraint_solver:routing_enums_cc_proto", + "//ortools/constraint_solver:routing_index_manager", + "//ortools/constraint_solver:routing_parameters", + "@abseil-cpp//absl/base:log_severity", + "@abseil-cpp//absl/log", + "@abseil-cpp//absl/log:flags", + "@abseil-cpp//absl/log:globals", + "@protobuf//:duration_cc_proto", + ], +) + +cc_test( + name = "vrp_drop_nodes_cc", + size = "medium", + srcs = ["vrp_drop_nodes.cc"], + deps = [ + "//ortools/base", + "//ortools/constraint_solver:cp", + "//ortools/constraint_solver:routing", + "//ortools/constraint_solver:routing_enums_cc_proto", + "//ortools/constraint_solver:routing_index_manager", + "//ortools/constraint_solver:routing_parameters", + "@abseil-cpp//absl/base:log_severity", + "@abseil-cpp//absl/log", + "@abseil-cpp//absl/log:flags", + "@abseil-cpp//absl/log:globals", + "@protobuf//:duration_cc_proto", + ], +) + +cc_test( + name = "vrp_global_span_cc", + size = "medium", + srcs = ["vrp_global_span.cc"], + deps = [ + "//ortools/base", + "//ortools/constraint_solver:cp", + "//ortools/constraint_solver:routing", + "//ortools/constraint_solver:routing_enums_cc_proto", + "//ortools/constraint_solver:routing_index_manager", + "//ortools/constraint_solver:routing_parameters", + "@abseil-cpp//absl/base:log_severity", + "@abseil-cpp//absl/log", + "@abseil-cpp//absl/log:flags", + "@abseil-cpp//absl/log:globals", + ], +) + +cc_test( + name = "vrp_initial_routes_cc", + size = "medium", + srcs = ["vrp_initial_routes.cc"], + deps = [ + "//ortools/base", + "//ortools/constraint_solver:cp", + "//ortools/constraint_solver:routing", + "//ortools/constraint_solver:routing_enums_cc_proto", + "//ortools/constraint_solver:routing_index_manager", + "//ortools/constraint_solver:routing_parameters", + "@abseil-cpp//absl/base:log_severity", + "@abseil-cpp//absl/log", + "@abseil-cpp//absl/log:flags", + "@abseil-cpp//absl/log:globals", + "@protobuf//:duration_cc_proto", + ], +) + +cc_test( + name = "vrp_starts_ends_cc", + size = "medium", + srcs = ["vrp_starts_ends.cc"], + deps = [ + "//ortools/base", + "//ortools/constraint_solver:cp", + "//ortools/constraint_solver:routing", + "//ortools/constraint_solver:routing_enums_cc_proto", + "//ortools/constraint_solver:routing_index_manager", + "//ortools/constraint_solver:routing_parameters", + "@abseil-cpp//absl/base:log_severity", + "@abseil-cpp//absl/log", + "@abseil-cpp//absl/log:flags", + "@abseil-cpp//absl/log:globals", + ], +) + +cc_test( + name = "vrp_pickup_delivery_cc", + size = "medium", + srcs = ["vrp_pickup_delivery.cc"], + deps = [ + "//ortools/base", + "//ortools/constraint_solver:cp", + "//ortools/constraint_solver:routing", + "//ortools/constraint_solver:routing_enums_cc_proto", + "//ortools/constraint_solver:routing_index_manager", + "//ortools/constraint_solver:routing_parameters", + "@abseil-cpp//absl/base:log_severity", + "@abseil-cpp//absl/log", + "@abseil-cpp//absl/log:flags", + "@abseil-cpp//absl/log:globals", + ], +) + +cc_test( + name = "vrp_pickup_delivery_fifo_cc", + size = "medium", + srcs = ["vrp_pickup_delivery_fifo.cc"], + deps = [ + "//ortools/base", + "//ortools/constraint_solver:cp", + "//ortools/constraint_solver:routing", + "//ortools/constraint_solver:routing_enums_cc_proto", + "//ortools/constraint_solver:routing_index_manager", + "//ortools/constraint_solver:routing_parameters", + "@abseil-cpp//absl/base:log_severity", + "@abseil-cpp//absl/log", + "@abseil-cpp//absl/log:flags", + "@abseil-cpp//absl/log:globals", + ], +) + +cc_test( + name = "vrp_pickup_delivery_lifo_cc", + size = "medium", + srcs = ["vrp_pickup_delivery_lifo.cc"], + deps = [ + "//ortools/base", + "//ortools/constraint_solver:cp", + "//ortools/constraint_solver:routing", + "//ortools/constraint_solver:routing_enums_cc_proto", + "//ortools/constraint_solver:routing_index_manager", + "//ortools/constraint_solver:routing_parameters", + "@abseil-cpp//absl/base:log_severity", + "@abseil-cpp//absl/log", + "@abseil-cpp//absl/log:flags", + "@abseil-cpp//absl/log:globals", + ], +) + +cc_test( + name = "vrp_resources_cc", + size = "medium", + srcs = ["vrp_resources.cc"], + deps = [ + "//ortools/base", + "//ortools/constraint_solver:cp", + "//ortools/constraint_solver:routing", + "//ortools/constraint_solver:routing_enums_cc_proto", + "//ortools/constraint_solver:routing_index_manager", + "//ortools/constraint_solver:routing_parameters", + "@abseil-cpp//absl/base:log_severity", + "@abseil-cpp//absl/log", + "@abseil-cpp//absl/log:flags", + "@abseil-cpp//absl/log:globals", + ], +) + +cc_test( + name = "vrp_routes_cc", + size = "medium", + srcs = ["vrp_routes.cc"], + deps = [ + "//ortools/base", + "//ortools/constraint_solver:cp", + "//ortools/constraint_solver:routing", + "//ortools/constraint_solver:routing_enums_cc_proto", + "//ortools/constraint_solver:routing_index_manager", + "//ortools/constraint_solver:routing_parameters", + "@abseil-cpp//absl/base:log_severity", + "@abseil-cpp//absl/log", + "@abseil-cpp//absl/log:flags", + "@abseil-cpp//absl/log:globals", + ], +) + +cc_test( + name = "vrp_solution_callback_cc", + size = "medium", + srcs = ["vrp_solution_callback.cc"], + deps = [ + "//ortools/base", + "//ortools/constraint_solver:cp", + "//ortools/constraint_solver:routing", + "//ortools/constraint_solver:routing_enums_cc_proto", + "//ortools/constraint_solver:routing_index_manager", + "//ortools/constraint_solver:routing_parameters", + "@abseil-cpp//absl/base:log_severity", + "@abseil-cpp//absl/log", + "@abseil-cpp//absl/log:flags", + "@abseil-cpp//absl/log:globals", + "@protobuf//:duration_cc_proto", + ], +) + +cc_test( + name = "vrp_time_windows_cc", + size = "medium", + srcs = ["vrp_time_windows.cc"], + deps = [ + "//ortools/base", + "//ortools/constraint_solver:cp", + "//ortools/constraint_solver:routing", + "//ortools/constraint_solver:routing_enums_cc_proto", + "//ortools/constraint_solver:routing_index_manager", + "//ortools/constraint_solver:routing_parameters", + "@abseil-cpp//absl/base:log_severity", + "@abseil-cpp//absl/log", + "@abseil-cpp//absl/log:flags", + "@abseil-cpp//absl/log:globals", + ], +) + +cc_test( + name = "vrp_with_time_limit_cc", + size = "medium", + srcs = ["vrp_with_time_limit.cc"], + deps = [ + "//ortools/base", + "//ortools/constraint_solver:cp", + "//ortools/constraint_solver:routing", + "//ortools/constraint_solver:routing_enums_cc_proto", + "//ortools/constraint_solver:routing_index_manager", + "//ortools/constraint_solver:routing_parameters", + "@abseil-cpp//absl/base:log_severity", + "@abseil-cpp//absl/log", + "@abseil-cpp//absl/log:flags", + "@abseil-cpp//absl/log:globals", + "@protobuf//:duration_cc_proto", + ], +) + +cc_test( + name = "vrptw_store_solution_data_cc", + size = "medium", + srcs = ["vrptw_store_solution_data.cc"], + deps = [ + "//ortools/base", + "//ortools/constraint_solver:cp", + "//ortools/constraint_solver:routing", + "//ortools/constraint_solver:routing_enums_cc_proto", + "//ortools/constraint_solver:routing_index_manager", + "//ortools/constraint_solver:routing_parameters", + "@abseil-cpp//absl/base:log_severity", + "@abseil-cpp//absl/log", + "@abseil-cpp//absl/log:flags", + "@abseil-cpp//absl/log:globals", + ], +) diff --git a/ortools/glop/samples/BUILD.bazel b/ortools/glop/samples/BUILD.bazel index cddf4f30b16..7afa51de2e6 100644 --- a/ortools/glop/samples/BUILD.bazel +++ b/ortools/glop/samples/BUILD.bazel @@ -12,7 +12,7 @@ # limitations under the License. load("@rules_cc//cc:cc_binary.bzl", "cc_binary") -load("//bazel:run_binary_test.bzl", "run_binary_test") +load("//tools/testing:bintest.bzl", "bintest") package(default_visibility = ["//visibility:public"]) @@ -28,8 +28,9 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "simple_glop_program_cc_test", size = "small", - binary = ":simple_glop_program_cc", + srcs = [":simple_glop_program_cc_test.bintest"], + named_data = {"simple_glop_program_cc": ":simple_glop_program_cc"}, ) diff --git a/ortools/glop/samples/simple_glop_program_cc_test.bintest b/ortools/glop/samples/simple_glop_program_cc_test.bintest new file mode 100644 index 00000000000..d17178feb41 --- /dev/null +++ b/ortools/glop/samples/simple_glop_program_cc_test.bintest @@ -0,0 +1 @@ +RUN: $(simple_glop_program_cc) diff --git a/ortools/graph/samples/AssignmentLinearSumAssignmentTest.bintest b/ortools/graph/samples/AssignmentLinearSumAssignmentTest.bintest new file mode 100644 index 00000000000..d6d7060d7ae --- /dev/null +++ b/ortools/graph/samples/AssignmentLinearSumAssignmentTest.bintest @@ -0,0 +1 @@ +RUN: $(AssignmentLinearSumAssignment) diff --git a/ortools/graph/samples/AssignmentMinFlowTest.bintest b/ortools/graph/samples/AssignmentMinFlowTest.bintest new file mode 100644 index 00000000000..4d3c0911d2a --- /dev/null +++ b/ortools/graph/samples/AssignmentMinFlowTest.bintest @@ -0,0 +1 @@ +RUN: $(AssignmentMinFlow) diff --git a/ortools/graph/samples/BUILD.bazel b/ortools/graph/samples/BUILD.bazel index 37d8c63ff0b..8f96ea4c25c 100644 --- a/ortools/graph/samples/BUILD.bazel +++ b/ortools/graph/samples/BUILD.bazel @@ -16,7 +16,7 @@ load("@rules_cc//cc:cc_binary.bzl", "cc_binary") load("@rules_cc//cc:cc_test.bzl", "cc_test") load("@rules_java//java:java_binary.bzl", "java_binary") load("@rules_python//python:py_test.bzl", "py_test") -load("//bazel:run_binary_test.bzl", "run_binary_test") +load("//tools/testing:bintest.bzl", "bintest") package(default_visibility = ["//visibility:public"]) @@ -52,9 +52,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "AssignmentLinearSumAssignmentTest", - binary = ":AssignmentLinearSumAssignment", + srcs = [":AssignmentLinearSumAssignmentTest.bintest"], + named_data = {"AssignmentLinearSumAssignment": ":AssignmentLinearSumAssignment"}, ) cc_test( @@ -86,9 +87,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "AssignmentMinFlowTest", - binary = ":AssignmentMinFlow", + srcs = [":AssignmentMinFlowTest.bintest"], + named_data = {"AssignmentMinFlow": ":AssignmentMinFlow"}, ) cc_test( @@ -120,9 +122,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "BalanceMinFlowTest", - binary = ":BalanceMinFlow", + srcs = [":BalanceMinFlowTest.bintest"], + named_data = {"BalanceMinFlow": ":BalanceMinFlow"}, ) cc_test( @@ -157,9 +160,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "SimpleMaxFlowProgramTest", - binary = ":SimpleMaxFlowProgram", + srcs = [":SimpleMaxFlowProgramTest.bintest"], + named_data = {"SimpleMaxFlowProgram": ":SimpleMaxFlowProgram"}, ) cc_test( @@ -194,9 +198,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "SimpleMinCostFlowProgramTest", - binary = ":SimpleMinCostFlowProgram", + srcs = [":SimpleMinCostFlowProgramTest.bintest"], + named_data = {"SimpleMinCostFlowProgram": ":SimpleMinCostFlowProgram"}, ) cc_binary( @@ -209,10 +214,10 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "dijkstra_directed_test", - binary = ":dijkstra_directed", - grep_lines = ["Shortest path length: 8"], + srcs = [":dijkstra_directed_test.bintest"], + named_data = {"dijkstra_directed": ":dijkstra_directed"}, ) cc_binary( @@ -225,10 +230,10 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "dijkstra_undirected_test", - binary = ":dijkstra_undirected", - grep_lines = ["Shortest path length: 4"], + srcs = [":dijkstra_undirected_test.bintest"], + named_data = {"dijkstra_undirected": ":dijkstra_undirected"}, ) cc_binary( @@ -242,14 +247,10 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "dijkstra_one_to_all_test", - binary = ":dijkstra_one_to_all", - grep_lines = [ - "Distance to 1: 2", - "Distance to 2: 6", - "Distance to 3: 2", - ], + srcs = [":dijkstra_one_to_all_test.bintest"], + named_data = {"dijkstra_one_to_all": ":dijkstra_one_to_all"}, ) cc_binary( @@ -264,15 +265,10 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "dijkstra_sequential_test", - binary = ":dijkstra_sequential", - grep_lines = [ - "Initial distance: 200", - "Distance_2_4: 2", - "Distance_8_1: 3", - "Distance_3_7: 4", - ], + srcs = [":dijkstra_sequential_test.bintest"], + named_data = {"dijkstra_sequential": ":dijkstra_sequential"}, ) cc_binary( @@ -290,9 +286,10 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "dijkstra_all_pairs_shortest_paths_test", - binary = ":dijkstra_all_pairs_shortest_paths", + srcs = [":dijkstra_all_pairs_shortest_paths_test.bintest"], + named_data = {"dijkstra_all_pairs_shortest_paths": ":dijkstra_all_pairs_shortest_paths"}, ) cc_binary( @@ -305,10 +302,10 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "dag_simple_shortest_path_test", - binary = ":dag_simple_shortest_path", - grep_lines = ["Shortest path length: 2"], + srcs = [":dag_simple_shortest_path_test.bintest"], + named_data = {"dag_simple_shortest_path": ":dag_simple_shortest_path"}, ) cc_binary( @@ -326,10 +323,10 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "dag_shortest_path_one_to_all_test", - binary = ":dag_shortest_path_one_to_all", - grep_lines = ["Length of shortest path to node 4: 2"], + srcs = [":dag_shortest_path_one_to_all_test.bintest"], + named_data = {"dag_shortest_path_one_to_all": ":dag_shortest_path_one_to_all"}, ) cc_binary( @@ -343,15 +340,10 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "dag_shortest_path_sequential_test", - binary = ":dag_shortest_path_sequential", - grep_lines = [ - "Initial distance: 200", - "Distance_2_4: 2", - "Distance_8_1: 100", - "Distance_3_7: 4", - ], + srcs = [":dag_shortest_path_sequential_test.bintest"], + named_data = {"dag_shortest_path_sequential": ":dag_shortest_path_sequential"}, ) cc_binary( @@ -367,10 +359,10 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "bfs_directed_test", - binary = ":bfs_directed", - grep_lines = ["Shortest path length (in arcs): 2"], + srcs = [":bfs_directed_test.bintest"], + named_data = {"bfs_directed": ":bfs_directed"}, ) cc_binary( @@ -386,10 +378,10 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "bfs_undirected_test", - binary = ":bfs_undirected", - grep_lines = ["Shortest path length (in arcs): 2"], + srcs = [":bfs_undirected_test.bintest"], + named_data = {"bfs_undirected": ":bfs_undirected"}, ) cc_binary( @@ -405,10 +397,10 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "bfs_one_to_all_test", - binary = ":bfs_one_to_all", - grep_lines = ["Shortest path from 0 to 2 has length: 2"], + srcs = [":bfs_one_to_all_test.bintest"], + named_data = {"bfs_one_to_all": ":bfs_one_to_all"}, ) cc_binary( @@ -425,10 +417,10 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "root_a_tree_test", - binary = ":root_a_tree", - grep_lines = ["Depths:\n 0 -> 2"], + srcs = [":root_a_tree_test.bintest"], + named_data = {"root_a_tree": ":root_a_tree"}, ) cc_binary( @@ -444,10 +436,10 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "rooted_tree_paths_test", - binary = ":rooted_tree_paths", - grep_lines = ["0 -> 4 [0, 1, 4]"], + srcs = [":rooted_tree_paths_test.bintest"], + named_data = {"rooted_tree_paths": ":rooted_tree_paths"}, ) cc_binary( @@ -460,13 +452,10 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "dag_simple_multiple_shortest_paths_test", - binary = ":dag_simple_multiple_shortest_paths", - grep_lines = [ - "#1 shortest path has length: 2", - "#2 shortest path has length: 3", - ], + srcs = [":dag_simple_multiple_shortest_paths_test.bintest"], + named_data = {"dag_simple_multiple_shortest_paths": ":dag_simple_multiple_shortest_paths"}, ) cc_binary( @@ -484,13 +473,10 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "dag_multiple_shortest_paths_one_to_all_test", - binary = ":dag_multiple_shortest_paths_one_to_all", - grep_lines = [ - "\t#1 shortest path to node 4 has length: 2", - "\t#2 shortest path to node 4 has length: 3", - ], + srcs = [":dag_multiple_shortest_paths_one_to_all_test.bintest"], + named_data = {"dag_multiple_shortest_paths_one_to_all": ":dag_multiple_shortest_paths_one_to_all"}, ) cc_binary( @@ -504,21 +490,10 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "dag_multiple_shortest_paths_sequential_test", - binary = ":dag_multiple_shortest_paths_sequential", - grep_lines = [ - "\t#1 shortest path has length: 200", - "\t#2 shortest path has length: 202", - "\t#1 shortest path (2, 4) has length: 20", - "\t#2 shortest path (2, 4) has length: 102", - "\t#1 shortest path (8, 1) has length: 101", - "\t#2 shortest path (8, 1) has length: 108", - "\t#1 shortest path (3, 3) has length: 0", - "\t#2 shortest path (3, 3) has length: 112", - "\t#1 shortest path (0, 0) has length: 0", - "\t#2 shortest path (0, 0) has length: 111", - ], + srcs = [":dag_multiple_shortest_paths_sequential_test.bintest"], + named_data = {"dag_multiple_shortest_paths_sequential": ":dag_multiple_shortest_paths_sequential"}, ) cc_binary( @@ -532,10 +507,10 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "dag_simple_constrained_shortest_path_test", - binary = ":dag_simple_constrained_shortest_path", - grep_lines = ["Constrained shortest path length: 4"], + srcs = [":dag_simple_constrained_shortest_path_test.bintest"], + named_data = {"dag_simple_constrained_shortest_path": ":dag_simple_constrained_shortest_path"}, ) cc_binary( @@ -549,13 +524,8 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "dag_constrained_shortest_path_sequential_test", - binary = ":dag_constrained_shortest_path_sequential", - grep_lines = [ - "Initial distance: 200", - "Distance_2_3: 1", - "Distance_8_1: 100", - "Distance_3_7: 100", - ], + srcs = [":dag_constrained_shortest_path_sequential_test.bintest"], + named_data = {"dag_constrained_shortest_path_sequential": ":dag_constrained_shortest_path_sequential"}, ) diff --git a/ortools/graph/samples/BalanceMinFlowTest.bintest b/ortools/graph/samples/BalanceMinFlowTest.bintest new file mode 100644 index 00000000000..42a9ecd4e98 --- /dev/null +++ b/ortools/graph/samples/BalanceMinFlowTest.bintest @@ -0,0 +1 @@ +RUN: $(BalanceMinFlow) diff --git a/ortools/graph/samples/SimpleMaxFlowProgramTest.bintest b/ortools/graph/samples/SimpleMaxFlowProgramTest.bintest new file mode 100644 index 00000000000..0634bc265ae --- /dev/null +++ b/ortools/graph/samples/SimpleMaxFlowProgramTest.bintest @@ -0,0 +1 @@ +RUN: $(SimpleMaxFlowProgram) diff --git a/ortools/graph/samples/SimpleMinCostFlowProgramTest.bintest b/ortools/graph/samples/SimpleMinCostFlowProgramTest.bintest new file mode 100644 index 00000000000..f3ca754ec83 --- /dev/null +++ b/ortools/graph/samples/SimpleMinCostFlowProgramTest.bintest @@ -0,0 +1 @@ +RUN: $(SimpleMinCostFlowProgram) diff --git a/ortools/graph/samples/bfs_directed_test.bintest b/ortools/graph/samples/bfs_directed_test.bintest new file mode 100644 index 00000000000..0c243abb199 --- /dev/null +++ b/ortools/graph/samples/bfs_directed_test.bintest @@ -0,0 +1,2 @@ +RUN: $(bfs_directed) +CHECK: "Shortest path length (in arcs): 2" diff --git a/ortools/graph/samples/bfs_one_to_all_test.bintest b/ortools/graph/samples/bfs_one_to_all_test.bintest new file mode 100644 index 00000000000..2831ab5fed7 --- /dev/null +++ b/ortools/graph/samples/bfs_one_to_all_test.bintest @@ -0,0 +1,2 @@ +RUN: $(bfs_one_to_all) +CHECK: "Shortest path from 0 to 2 has length: 2" diff --git a/ortools/graph/samples/bfs_undirected_test.bintest b/ortools/graph/samples/bfs_undirected_test.bintest new file mode 100644 index 00000000000..e8c2534c4cd --- /dev/null +++ b/ortools/graph/samples/bfs_undirected_test.bintest @@ -0,0 +1,2 @@ +RUN: $(bfs_undirected) +CHECK: "Shortest path length (in arcs): 2" diff --git a/ortools/graph/samples/dag_constrained_shortest_path_sequential_test.bintest b/ortools/graph/samples/dag_constrained_shortest_path_sequential_test.bintest new file mode 100644 index 00000000000..9af86fd995c --- /dev/null +++ b/ortools/graph/samples/dag_constrained_shortest_path_sequential_test.bintest @@ -0,0 +1,5 @@ +RUN: $(dag_constrained_shortest_path_sequential) +CHECK: "Initial distance: 200" +CHECK: "Distance_2_3: 1" +CHECK: "Distance_8_1: 100" +CHECK: "Distance_3_7: 100" diff --git a/ortools/graph/samples/dag_multiple_shortest_paths_one_to_all_test.bintest b/ortools/graph/samples/dag_multiple_shortest_paths_one_to_all_test.bintest new file mode 100644 index 00000000000..cc9d8cfa7b8 --- /dev/null +++ b/ortools/graph/samples/dag_multiple_shortest_paths_one_to_all_test.bintest @@ -0,0 +1,3 @@ +RUN: $(dag_multiple_shortest_paths_one_to_all) +CHECK: "#1 shortest path to node 4 has length: 2" +CHECK: "#2 shortest path to node 4 has length: 3" diff --git a/ortools/graph/samples/dag_multiple_shortest_paths_sequential_test.bintest b/ortools/graph/samples/dag_multiple_shortest_paths_sequential_test.bintest new file mode 100644 index 00000000000..3f10b3e45fe --- /dev/null +++ b/ortools/graph/samples/dag_multiple_shortest_paths_sequential_test.bintest @@ -0,0 +1,11 @@ +RUN: $(dag_multiple_shortest_paths_sequential) +CHECK: "#1 shortest path has length: 200" +CHECK: "#2 shortest path has length: 202" +CHECK: "#1 shortest path (2, 4) has length: 20" +CHECK: "#2 shortest path (2, 4) has length: 102" +CHECK: "#1 shortest path (8, 1) has length: 101" +CHECK: "#2 shortest path (8, 1) has length: 108" +CHECK: "#1 shortest path (3, 3) has length: 0" +CHECK: "#2 shortest path (3, 3) has length: 112" +CHECK: "#1 shortest path (0, 0) has length: 0" +CHECK: "#2 shortest path (0, 0) has length: 111" diff --git a/ortools/graph/samples/dag_shortest_path_one_to_all_test.bintest b/ortools/graph/samples/dag_shortest_path_one_to_all_test.bintest new file mode 100644 index 00000000000..d7792066615 --- /dev/null +++ b/ortools/graph/samples/dag_shortest_path_one_to_all_test.bintest @@ -0,0 +1,2 @@ +RUN: $(dag_shortest_path_one_to_all) +CHECK: "Length of shortest path to node 4: 2" diff --git a/ortools/graph/samples/dag_shortest_path_sequential_test.bintest b/ortools/graph/samples/dag_shortest_path_sequential_test.bintest new file mode 100644 index 00000000000..e10fe2767bb --- /dev/null +++ b/ortools/graph/samples/dag_shortest_path_sequential_test.bintest @@ -0,0 +1,5 @@ +RUN: $(dag_shortest_path_sequential) +CHECK: "Initial distance: 200" +CHECK: "Distance_2_4: 2" +CHECK: "Distance_8_1: 100" +CHECK: "Distance_3_7: 4" diff --git a/ortools/graph/samples/dag_simple_constrained_shortest_path_test.bintest b/ortools/graph/samples/dag_simple_constrained_shortest_path_test.bintest new file mode 100644 index 00000000000..40a291cdae1 --- /dev/null +++ b/ortools/graph/samples/dag_simple_constrained_shortest_path_test.bintest @@ -0,0 +1,2 @@ +RUN: $(dag_simple_constrained_shortest_path) +CHECK: "Constrained shortest path length: 4" diff --git a/ortools/graph/samples/dag_simple_multiple_shortest_paths_test.bintest b/ortools/graph/samples/dag_simple_multiple_shortest_paths_test.bintest new file mode 100644 index 00000000000..89e39b96031 --- /dev/null +++ b/ortools/graph/samples/dag_simple_multiple_shortest_paths_test.bintest @@ -0,0 +1,3 @@ +RUN: $(dag_simple_multiple_shortest_paths) +CHECK: "#1 shortest path has length: 2" +CHECK: "#2 shortest path has length: 3" diff --git a/ortools/graph/samples/dag_simple_shortest_path_test.bintest b/ortools/graph/samples/dag_simple_shortest_path_test.bintest new file mode 100644 index 00000000000..37b5c4daf52 --- /dev/null +++ b/ortools/graph/samples/dag_simple_shortest_path_test.bintest @@ -0,0 +1,2 @@ +RUN: $(dag_simple_shortest_path) +CHECK: "Shortest path length: 2" diff --git a/ortools/graph/samples/dijkstra_all_pairs_shortest_paths_test.bintest b/ortools/graph/samples/dijkstra_all_pairs_shortest_paths_test.bintest new file mode 100644 index 00000000000..fff8e2c486b --- /dev/null +++ b/ortools/graph/samples/dijkstra_all_pairs_shortest_paths_test.bintest @@ -0,0 +1 @@ +RUN: $(dijkstra_all_pairs_shortest_paths) diff --git a/ortools/graph/samples/dijkstra_directed_test.bintest b/ortools/graph/samples/dijkstra_directed_test.bintest new file mode 100644 index 00000000000..0966f4e0aab --- /dev/null +++ b/ortools/graph/samples/dijkstra_directed_test.bintest @@ -0,0 +1,2 @@ +RUN: $(dijkstra_directed) +CHECK: "Shortest path length: 8" diff --git a/ortools/graph/samples/dijkstra_one_to_all_test.bintest b/ortools/graph/samples/dijkstra_one_to_all_test.bintest new file mode 100644 index 00000000000..7939cd1b476 --- /dev/null +++ b/ortools/graph/samples/dijkstra_one_to_all_test.bintest @@ -0,0 +1,4 @@ +RUN: $(dijkstra_one_to_all) +CHECK: "Distance to 1: 2" +CHECK: "Distance to 2: 6" +CHECK: "Distance to 3: 2" diff --git a/ortools/graph/samples/dijkstra_sequential_test.bintest b/ortools/graph/samples/dijkstra_sequential_test.bintest new file mode 100644 index 00000000000..d32c3dcd283 --- /dev/null +++ b/ortools/graph/samples/dijkstra_sequential_test.bintest @@ -0,0 +1,5 @@ +RUN: $(dijkstra_sequential) +CHECK: "Initial distance: 200" +CHECK: "Distance_2_4: 2" +CHECK: "Distance_8_1: 3" +CHECK: "Distance_3_7: 4" diff --git a/ortools/graph/samples/dijkstra_undirected_test.bintest b/ortools/graph/samples/dijkstra_undirected_test.bintest new file mode 100644 index 00000000000..9daf3d2cde6 --- /dev/null +++ b/ortools/graph/samples/dijkstra_undirected_test.bintest @@ -0,0 +1,2 @@ +RUN: $(dijkstra_undirected) +CHECK: "Shortest path length: 4" diff --git a/ortools/graph/samples/root_a_tree_test.bintest b/ortools/graph/samples/root_a_tree_test.bintest new file mode 100644 index 00000000000..cabeefcf2b5 --- /dev/null +++ b/ortools/graph/samples/root_a_tree_test.bintest @@ -0,0 +1,2 @@ +RUN: $(root_a_tree) +CHECK: "Depths:" "0 -> 2" diff --git a/ortools/graph/samples/rooted_tree_paths_test.bintest b/ortools/graph/samples/rooted_tree_paths_test.bintest new file mode 100644 index 00000000000..0e5d8dd32d9 --- /dev/null +++ b/ortools/graph/samples/rooted_tree_paths_test.bintest @@ -0,0 +1,2 @@ +RUN: $(rooted_tree_paths) +CHECK: "0 -> 4 [0, 1, 4]" diff --git a/ortools/linear_solver/samples/AssignmentMbTest.bintest b/ortools/linear_solver/samples/AssignmentMbTest.bintest new file mode 100644 index 00000000000..81907f9146e --- /dev/null +++ b/ortools/linear_solver/samples/AssignmentMbTest.bintest @@ -0,0 +1 @@ +RUN: $(AssignmentMb) diff --git a/ortools/linear_solver/samples/BUILD.bazel b/ortools/linear_solver/samples/BUILD.bazel index c2498d96d9e..dce7e576528 100644 --- a/ortools/linear_solver/samples/BUILD.bazel +++ b/ortools/linear_solver/samples/BUILD.bazel @@ -15,7 +15,7 @@ load("@pip_deps//:requirements.bzl", "requirement") load("@rules_cc//cc:cc_test.bzl", "cc_test") load("@rules_java//java:java_binary.bzl", "java_binary") load("@rules_python//python:py_test.bzl", "py_test") -load("//bazel:run_binary_test.bzl", "run_binary_test") +load("//tools/testing:bintest.bzl", "bintest") package(default_visibility = ["//visibility:public"]) @@ -224,9 +224,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "AssignmentMbTest", - binary = ":AssignmentMb", + srcs = [":AssignmentMbTest.bintest"], + named_data = {"AssignmentMb": ":AssignmentMb"}, ) java_binary( @@ -240,9 +241,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "BinPackingMbTest", - binary = ":BinPackingMb", + srcs = [":BinPackingMbTest.bintest"], + named_data = {"BinPackingMb": ":BinPackingMb"}, ) java_binary( @@ -256,9 +258,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "CloneModelMbTest", - binary = ":CloneModelMb", + srcs = [":CloneModelMbTest.bintest"], + named_data = {"CloneModelMb": ":CloneModelMb"}, ) java_binary( @@ -272,9 +275,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "SimpleLpProgramMbTest", - binary = ":SimpleLpProgramMb", + srcs = [":SimpleLpProgramMbTest.bintest"], + named_data = {"SimpleLpProgramMb": ":SimpleLpProgramMb"}, ) java_binary( @@ -288,7 +292,8 @@ java_binary( ], ) -run_binary_test( +bintest( name = "SimpleMipProgramMbTest", - binary = ":SimpleMipProgramMb", + srcs = [":SimpleMipProgramMbTest.bintest"], + named_data = {"SimpleMipProgramMb": ":SimpleMipProgramMb"}, ) diff --git a/ortools/linear_solver/samples/BinPackingMbTest.bintest b/ortools/linear_solver/samples/BinPackingMbTest.bintest new file mode 100644 index 00000000000..cdd153dcbc0 --- /dev/null +++ b/ortools/linear_solver/samples/BinPackingMbTest.bintest @@ -0,0 +1 @@ +RUN: $(BinPackingMb) diff --git a/ortools/linear_solver/samples/CloneModelMbTest.bintest b/ortools/linear_solver/samples/CloneModelMbTest.bintest new file mode 100644 index 00000000000..378e330a0cd --- /dev/null +++ b/ortools/linear_solver/samples/CloneModelMbTest.bintest @@ -0,0 +1 @@ +RUN: $(CloneModelMb) diff --git a/ortools/linear_solver/samples/SimpleLpProgramMbTest.bintest b/ortools/linear_solver/samples/SimpleLpProgramMbTest.bintest new file mode 100644 index 00000000000..663992df3a3 --- /dev/null +++ b/ortools/linear_solver/samples/SimpleLpProgramMbTest.bintest @@ -0,0 +1 @@ +RUN: $(SimpleLpProgramMb) diff --git a/ortools/linear_solver/samples/SimpleMipProgramMbTest.bintest b/ortools/linear_solver/samples/SimpleMipProgramMbTest.bintest new file mode 100644 index 00000000000..332f65ea24d --- /dev/null +++ b/ortools/linear_solver/samples/SimpleMipProgramMbTest.bintest @@ -0,0 +1 @@ +RUN: $(SimpleMipProgramMb) diff --git a/ortools/math_opt/core/c_api/BUILD.bazel b/ortools/math_opt/core/c_api/BUILD.bazel index e77505e7feb..ee31a18d862 100644 --- a/ortools/math_opt/core/c_api/BUILD.bazel +++ b/ortools/math_opt/core/c_api/BUILD.bazel @@ -14,7 +14,7 @@ load("@rules_cc//cc:cc_binary.bzl", "cc_binary") load("@rules_cc//cc:cc_library.bzl", "cc_library") load("@rules_cc//cc:cc_test.bzl", "cc_test") -load("//bazel:run_binary_test.bzl", "run_binary_test") +load("//tools/testing:bintest.bzl", "bintest") cc_library( name = "solver", @@ -75,11 +75,8 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "cpp_example_test", - binary = ":cpp_example", - grep_lines = [ - "Termination is optimal: 1", - "Objective value: 1", - ], + srcs = [":cpp_example_test.bintest"], + named_data = {"cpp_example": ":cpp_example"}, ) diff --git a/ortools/math_opt/core/c_api/cpp_example_test.bintest b/ortools/math_opt/core/c_api/cpp_example_test.bintest new file mode 100644 index 00000000000..92cf27cd7b7 --- /dev/null +++ b/ortools/math_opt/core/c_api/cpp_example_test.bintest @@ -0,0 +1,3 @@ +RUN: $(cpp_example) +CHECK: "Termination is optimal: 1" +CHECK: "Objective value: 1" diff --git a/ortools/routing/samples/BUILD.bazel b/ortools/routing/samples/BUILD.bazel index a32d5c89ed2..3fc67f443af 100644 --- a/ortools/routing/samples/BUILD.bazel +++ b/ortools/routing/samples/BUILD.bazel @@ -11,35 +11,117 @@ # See the License for the specific language governing permissions and # limitations under the License. +# Routing utilities, mostly parsers for routing problems. + +load("@rules_cc//cc:cc_binary.bzl", "cc_binary") +load("@rules_cc//cc:cc_test.bzl", "cc_test") +load("//tools/testing:bintest.bzl", "bintest") + +package(default_visibility = ["//visibility:public"]) + +# cvrptw samples +cc_binary( + name = "cvrptw_soft_capacity", + srcs = ["cvrptw_soft_capacity.cc"], + deps = [ + "//ortools/base", + "//ortools/constraint_solver:cp", + "//ortools/constraint_solver:routing", + "//ortools/constraint_solver:routing_index_manager", + "//ortools/constraint_solver:routing_parameters", + "//ortools/constraint_solver:routing_parameters_cc_proto", + "//ortools/constraint_solver:routing_types", + "//ortools/routing/parsers:cvrptw_lib", + "@abseil-cpp//absl/base:log_severity", + "@abseil-cpp//absl/flags:flag", + "@abseil-cpp//absl/log:globals", + "@abseil-cpp//absl/random", + "@protobuf", + ], +) + cc_binary( name = "cvrptw", srcs = ["cvrptw.cc"], deps = [ "//ortools/base", + "//ortools/constraint_solver:cp", "//ortools/constraint_solver:routing", + "//ortools/constraint_solver:routing_index_manager", + "//ortools/constraint_solver:routing_parameters", + "//ortools/constraint_solver:routing_parameters_cc_proto", + "//ortools/constraint_solver:routing_types", "//ortools/routing/parsers:cvrptw_lib", + "@abseil-cpp//absl/base:log_severity", + "@abseil-cpp//absl/flags:flag", + "@abseil-cpp//absl/log:globals", + "@abseil-cpp//absl/random", + "@protobuf", ], ) +bintest( + name = "cvrptw_test", + size = "large", + srcs = ["cvrptw_test.bintest"], + named_data = {"cvrptw": ":cvrptw"}, +) + cc_binary( name = "cvrp_disjoint_tw", srcs = ["cvrp_disjoint_tw.cc"], deps = [ "//ortools/base", + "//ortools/constraint_solver:cp", "//ortools/constraint_solver:routing", + "//ortools/constraint_solver:routing_index_manager", + "//ortools/constraint_solver:routing_parameters", + "//ortools/constraint_solver:routing_parameters_cc_proto", + "//ortools/constraint_solver:routing_types", "//ortools/routing/parsers:cvrptw_lib", + "@abseil-cpp//absl/base:log_severity", + "@abseil-cpp//absl/flags:flag", + "@abseil-cpp//absl/log:globals", + "@abseil-cpp//absl/random", + "@protobuf", ], ) +bintest( + name = "cvrp_disjoint_tw_test", + size = "large", + srcs = ["cvrp_disjoint_tw_test.bintest"], + named_data = {"cvrp_disjoint_tw": ":cvrp_disjoint_tw"}, +) + +# This test is temporarily down because the time dependent functionality is +# being revised. +# bintest( +# name = "cvrptw_with_time_dependent_costs_test", +# srcs = ["cvrptw_with_time_dependent_costs_test.bintest"], +# size = "large", +# named_data = {"cvrptw_with_time_dependent_costs": ":cvrptw_with_time_dependent_costs"}, +# ) + cc_binary( name = "cvrptw_with_breaks", srcs = ["cvrptw_with_breaks.cc"], deps = [ "//ortools/base", + "//ortools/constraint_solver:cp", "//ortools/constraint_solver:routing", "//ortools/constraint_solver:routing_enums_cc_proto", + "//ortools/constraint_solver:routing_index_manager", + "//ortools/constraint_solver:routing_parameters", + "//ortools/constraint_solver:routing_parameters_cc_proto", + "//ortools/constraint_solver:routing_types", "//ortools/routing/parsers:cvrptw_lib", + "@abseil-cpp//absl/base:log_severity", + "@abseil-cpp//absl/flags:flag", + "@abseil-cpp//absl/log:globals", + "@abseil-cpp//absl/random", "@abseil-cpp//absl/strings", + "@protobuf", ], ) @@ -48,28 +130,79 @@ cc_binary( srcs = ["cvrptw_with_resources.cc"], deps = [ "//ortools/base", + "//ortools/constraint_solver:cp", "//ortools/constraint_solver:routing", + "//ortools/constraint_solver:routing_index_manager", + "//ortools/constraint_solver:routing_parameters", + "//ortools/constraint_solver:routing_parameters_cc_proto", + "//ortools/constraint_solver:routing_types", "//ortools/routing/parsers:cvrptw_lib", + "@abseil-cpp//absl/base:log_severity", + "@abseil-cpp//absl/flags:flag", + "@abseil-cpp//absl/log:globals", + "@abseil-cpp//absl/random", + "@protobuf", ], ) +bintest( + name = "cvrptw_with_resources_test", + size = "large", + srcs = ["cvrptw_with_resources_test.bintest"], + named_data = {"cvrptw_with_resources": ":cvrptw_with_resources"}, +) + cc_binary( name = "cvrptw_with_stop_times_and_resources", srcs = ["cvrptw_with_stop_times_and_resources.cc"], deps = [ "//ortools/base", + "//ortools/constraint_solver:cp", "//ortools/constraint_solver:routing", + "//ortools/constraint_solver:routing_index_manager", + "//ortools/constraint_solver:routing_parameters", + "//ortools/constraint_solver:routing_parameters_cc_proto", + "//ortools/constraint_solver:routing_types", "//ortools/routing/parsers:cvrptw_lib", + "@abseil-cpp//absl/base:log_severity", + "@abseil-cpp//absl/flags:flag", + "@abseil-cpp//absl/log:globals", + "@abseil-cpp//absl/random", "@abseil-cpp//absl/strings", + "@protobuf", ], ) +bintest( + name = "cvrptw_with_stop_times_and_resources_test", + size = "large", + srcs = ["cvrptw_with_stop_times_and_resources_test.bintest"], + named_data = {"cvrptw_with_stop_times_and_resources": ":cvrptw_with_stop_times_and_resources"}, +) + cc_binary( name = "cvrptw_with_refueling", srcs = ["cvrptw_with_refueling.cc"], deps = [ "//ortools/base", + "//ortools/constraint_solver:cp", "//ortools/constraint_solver:routing", + "//ortools/constraint_solver:routing_index_manager", + "//ortools/constraint_solver:routing_parameters", + "//ortools/constraint_solver:routing_parameters_cc_proto", + "//ortools/constraint_solver:routing_types", "//ortools/routing/parsers:cvrptw_lib", + "@abseil-cpp//absl/base:log_severity", + "@abseil-cpp//absl/flags:flag", + "@abseil-cpp//absl/log:globals", + "@abseil-cpp//absl/random", + "@protobuf", ], ) + +bintest( + name = "cvrptw_with_refueling_test", + size = "large", + srcs = ["cvrptw_with_refueling_test.bintest"], + named_data = {"cvrptw_with_refueling": ":cvrptw_with_refueling"}, +) diff --git a/ortools/routing/samples/cvrp_disjoint_tw_test.bintest b/ortools/routing/samples/cvrp_disjoint_tw_test.bintest new file mode 100644 index 00000000000..bcd3d72e9e4 --- /dev/null +++ b/ortools/routing/samples/cvrp_disjoint_tw_test.bintest @@ -0,0 +1 @@ +RUN: $(cvrp_disjoint_tw) --vrp_use_deterministic_random_seed diff --git a/ortools/routing/samples/cvrptw_test.bintest b/ortools/routing/samples/cvrptw_test.bintest new file mode 100644 index 00000000000..f7c5020d13c --- /dev/null +++ b/ortools/routing/samples/cvrptw_test.bintest @@ -0,0 +1 @@ +RUN: $(cvrptw) --vrp_use_deterministic_random_seed diff --git a/ortools/routing/samples/cvrptw_with_precedences_test.bintest b/ortools/routing/samples/cvrptw_with_precedences_test.bintest new file mode 100644 index 00000000000..4fcb6348339 --- /dev/null +++ b/ortools/routing/samples/cvrptw_with_precedences_test.bintest @@ -0,0 +1 @@ +RUN: $(cvrptw_with_precedences) --vrp_use_deterministic_random_seed diff --git a/ortools/routing/samples/cvrptw_with_refueling_test.bintest b/ortools/routing/samples/cvrptw_with_refueling_test.bintest new file mode 100644 index 00000000000..6fb876f5c82 --- /dev/null +++ b/ortools/routing/samples/cvrptw_with_refueling_test.bintest @@ -0,0 +1 @@ +RUN: $(cvrptw_with_refueling) --vrp_use_deterministic_random_seed --cp_random_seed=144 diff --git a/ortools/routing/samples/cvrptw_with_resources_test.bintest b/ortools/routing/samples/cvrptw_with_resources_test.bintest new file mode 100644 index 00000000000..b699662c6a8 --- /dev/null +++ b/ortools/routing/samples/cvrptw_with_resources_test.bintest @@ -0,0 +1 @@ +RUN: $(cvrptw_with_resources) --vrp_use_deterministic_random_seed diff --git a/ortools/routing/samples/cvrptw_with_stop_times_and_resources_test.bintest b/ortools/routing/samples/cvrptw_with_stop_times_and_resources_test.bintest new file mode 100644 index 00000000000..fe75f294fa6 --- /dev/null +++ b/ortools/routing/samples/cvrptw_with_stop_times_and_resources_test.bintest @@ -0,0 +1 @@ +RUN: $(cvrptw_with_stop_times_and_resources) --vrp_use_deterministic_random_seed diff --git a/ortools/sat/samples/AssignmentGroupsSatTest.bintest b/ortools/sat/samples/AssignmentGroupsSatTest.bintest new file mode 100644 index 00000000000..f625fc9c303 --- /dev/null +++ b/ortools/sat/samples/AssignmentGroupsSatTest.bintest @@ -0,0 +1 @@ +RUN: $(AssignmentGroupsSat) diff --git a/ortools/sat/samples/AssignmentSatTest.bintest b/ortools/sat/samples/AssignmentSatTest.bintest new file mode 100644 index 00000000000..5aa64bfc6ca --- /dev/null +++ b/ortools/sat/samples/AssignmentSatTest.bintest @@ -0,0 +1 @@ +RUN: $(AssignmentSat) diff --git a/ortools/sat/samples/AssignmentTaskSizesSatTest.bintest b/ortools/sat/samples/AssignmentTaskSizesSatTest.bintest new file mode 100644 index 00000000000..5f312b1ebc1 --- /dev/null +++ b/ortools/sat/samples/AssignmentTaskSizesSatTest.bintest @@ -0,0 +1 @@ +RUN: $(AssignmentTaskSizesSat) diff --git a/ortools/sat/samples/AssignmentTeamsSatTest.bintest b/ortools/sat/samples/AssignmentTeamsSatTest.bintest new file mode 100644 index 00000000000..cf7d2a1abb1 --- /dev/null +++ b/ortools/sat/samples/AssignmentTeamsSatTest.bintest @@ -0,0 +1 @@ +RUN: $(AssignmentTeamsSat) diff --git a/ortools/sat/samples/AssumptionsSampleSatTest.bintest b/ortools/sat/samples/AssumptionsSampleSatTest.bintest new file mode 100644 index 00000000000..7fad3a5dbd8 --- /dev/null +++ b/ortools/sat/samples/AssumptionsSampleSatTest.bintest @@ -0,0 +1 @@ +RUN: $(AssumptionsSampleSat) diff --git a/ortools/sat/samples/BUILD.bazel b/ortools/sat/samples/BUILD.bazel index c3290ba560c..1402370f79c 100644 --- a/ortools/sat/samples/BUILD.bazel +++ b/ortools/sat/samples/BUILD.bazel @@ -16,7 +16,7 @@ load("@rules_cc//cc:cc_test.bzl", "cc_test") load("@rules_go//go:def.bzl", "go_binary") load("@rules_java//java:java_binary.bzl", "java_binary") load("@rules_python//python:py_test.bzl", "py_test") -load("//bazel:run_binary_test.bzl", "run_binary_test") +load("//tools/testing:bintest.bzl", "bintest") py_test( name = "all_different_except_zero_sample_sat_py3", @@ -1053,9 +1053,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "AssignmentGroupsSatTest", - binary = ":AssignmentGroupsSat", + srcs = ["AssignmentGroupsSatTest.bintest"], + named_data = {"AssignmentGroupsSat": ":AssignmentGroupsSat"}, ) java_binary( @@ -1069,9 +1070,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "AssignmentSatTest", - binary = ":AssignmentSat", + srcs = ["AssignmentSatTest.bintest"], + named_data = {"AssignmentSat": ":AssignmentSat"}, ) java_binary( @@ -1085,9 +1087,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "AssignmentTaskSizesSatTest", - binary = ":AssignmentTaskSizesSat", + srcs = ["AssignmentTaskSizesSatTest.bintest"], + named_data = {"AssignmentTaskSizesSat": ":AssignmentTaskSizesSat"}, ) java_binary( @@ -1101,9 +1104,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "AssignmentTeamsSatTest", - binary = ":AssignmentTeamsSat", + srcs = ["AssignmentTeamsSatTest.bintest"], + named_data = {"AssignmentTeamsSat": ":AssignmentTeamsSat"}, ) java_binary( @@ -1117,9 +1121,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "AssumptionsSampleSatTest", - binary = ":AssumptionsSampleSat", + srcs = ["AssumptionsSampleSatTest.bintest"], + named_data = {"AssumptionsSampleSat": ":AssumptionsSampleSat"}, ) java_binary( @@ -1133,9 +1138,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "BinPackingProblemSatTest", - binary = ":BinPackingProblemSat", + srcs = ["BinPackingProblemSatTest.bintest"], + named_data = {"BinPackingProblemSat": ":BinPackingProblemSat"}, ) java_binary( @@ -1148,9 +1154,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "BoolOrSampleSatTest", - binary = ":BoolOrSampleSat", + srcs = ["BoolOrSampleSatTest.bintest"], + named_data = {"BoolOrSampleSat": ":BoolOrSampleSat"}, ) java_binary( @@ -1165,9 +1172,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "ChannelingSampleSatTest", - binary = ":ChannelingSampleSat", + srcs = ["ChannelingSampleSatTest.bintest"], + named_data = {"ChannelingSampleSat": ":ChannelingSampleSat"}, ) java_binary( @@ -1181,9 +1189,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "CloneModelSampleSatTest", - binary = ":CloneModelSampleSat", + srcs = ["CloneModelSampleSatTest.bintest"], + named_data = {"CloneModelSampleSat": ":CloneModelSampleSat"}, ) java_binary( @@ -1197,9 +1206,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "CpIsFunSatTest", - binary = ":CpIsFunSat", + srcs = ["CpIsFunSatTest.bintest"], + named_data = {"CpIsFunSat": ":CpIsFunSat"}, ) java_binary( @@ -1213,9 +1223,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "CpSatExampleTest", - binary = ":CpSatExample", + srcs = ["CpSatExampleTest.bintest"], + named_data = {"CpSatExample": ":CpSatExample"}, ) java_binary( @@ -1230,9 +1241,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "EarlinessTardinessCostSampleSatTest", - binary = ":EarlinessTardinessCostSampleSat", + srcs = ["EarlinessTardinessCostSampleSatTest.bintest"], + named_data = {"EarlinessTardinessCostSampleSat": ":EarlinessTardinessCostSampleSat"}, ) java_binary( @@ -1245,9 +1257,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "IntervalSampleSatTest", - binary = ":IntervalSampleSat", + srcs = ["IntervalSampleSatTest.bintest"], + named_data = {"IntervalSampleSat": ":IntervalSampleSat"}, ) java_binary( @@ -1260,9 +1273,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "LiteralSampleSatTest", - binary = ":LiteralSampleSat", + srcs = ["LiteralSampleSatTest.bintest"], + named_data = {"LiteralSampleSat": ":LiteralSampleSat"}, ) java_binary( @@ -1276,9 +1290,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "MinimalJobshopSatTest", - binary = ":MinimalJobshopSat", + srcs = ["MinimalJobshopSatTest.bintest"], + named_data = {"MinimalJobshopSat": ":MinimalJobshopSat"}, ) java_binary( @@ -1292,9 +1307,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "MultipleKnapsackSatTest", - binary = ":MultipleKnapsackSat", + srcs = ["MultipleKnapsackSatTest.bintest"], + named_data = {"MultipleKnapsackSat": ":MultipleKnapsackSat"}, ) java_binary( @@ -1308,9 +1324,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "NQueensSatTest", - binary = ":NQueensSat", + srcs = ["NQueensSatTest.bintest"], + named_data = {"NQueensSat": ":NQueensSat"}, ) java_binary( @@ -1324,9 +1341,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "NoOverlapSampleSatTest", - binary = ":NoOverlapSampleSat", + srcs = ["NoOverlapSampleSatTest.bintest"], + named_data = {"NoOverlapSampleSat": ":NoOverlapSampleSat"}, ) java_binary( @@ -1340,9 +1358,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "NonLinearSatTest", - binary = ":NonLinearSat", + srcs = ["NonLinearSatTest.bintest"], + named_data = {"NonLinearSat": ":NonLinearSat"}, ) java_binary( @@ -1356,9 +1375,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "NursesSatTest", - binary = ":NursesSat", + srcs = ["NursesSatTest.bintest"], + named_data = {"NursesSat": ":NursesSat"}, ) java_binary( @@ -1371,9 +1391,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "OptionalIntervalSampleSatTest", - binary = ":OptionalIntervalSampleSat", + srcs = ["OptionalIntervalSampleSatTest.bintest"], + named_data = {"OptionalIntervalSampleSat": ":OptionalIntervalSampleSat"}, ) java_binary( @@ -1387,9 +1408,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "RabbitsAndPheasantsSatTest", - binary = ":RabbitsAndPheasantsSat", + srcs = ["RabbitsAndPheasantsSatTest.bintest"], + named_data = {"RabbitsAndPheasantsSat": ":RabbitsAndPheasantsSat"}, ) java_binary( @@ -1403,9 +1425,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "RankingSampleSatTest", - binary = ":RankingSampleSat", + srcs = ["RankingSampleSatTest.bintest"], + named_data = {"RankingSampleSat": ":RankingSampleSat"}, ) java_binary( @@ -1418,9 +1441,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "ReifiedSampleSatTest", - binary = ":ReifiedSampleSat", + srcs = ["ReifiedSampleSatTest.bintest"], + named_data = {"ReifiedSampleSat": ":ReifiedSampleSat"}, ) java_binary( @@ -1434,9 +1458,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "ScheduleRequestsSatTest", - binary = ":ScheduleRequestsSat", + srcs = ["ScheduleRequestsSatTest.bintest"], + named_data = {"ScheduleRequestsSat": ":ScheduleRequestsSat"}, ) java_binary( @@ -1450,9 +1475,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "SearchForAllSolutionsSampleSatTest", - binary = ":SearchForAllSolutionsSampleSat", + srcs = ["SearchForAllSolutionsSampleSatTest.bintest"], + named_data = {"SearchForAllSolutionsSampleSat": ":SearchForAllSolutionsSampleSat"}, ) java_binary( @@ -1466,9 +1492,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "SimpleSatProgramTest", - binary = ":SimpleSatProgram", + srcs = ["SimpleSatProgramTest.bintest"], + named_data = {"SimpleSatProgram": ":SimpleSatProgram"}, ) java_binary( @@ -1482,9 +1509,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "SolutionHintingSampleSatTest", - binary = ":SolutionHintingSampleSat", + srcs = ["SolutionHintingSampleSatTest.bintest"], + named_data = {"SolutionHintingSampleSat": ":SolutionHintingSampleSat"}, ) java_binary( @@ -1498,9 +1526,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "SolveAndPrintIntermediateSolutionsSampleSatTest", - binary = ":SolveAndPrintIntermediateSolutionsSampleSat", + srcs = ["SolveAndPrintIntermediateSolutionsSampleSatTest.bintest"], + named_data = {"SolveAndPrintIntermediateSolutionsSampleSat": ":SolveAndPrintIntermediateSolutionsSampleSat"}, ) java_binary( @@ -1514,9 +1543,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "SolveWithTimeLimitSampleSatTest", - binary = ":SolveWithTimeLimitSampleSat", + srcs = ["SolveWithTimeLimitSampleSatTest.bintest"], + named_data = {"SolveWithTimeLimitSampleSat": ":SolveWithTimeLimitSampleSat"}, ) java_binary( @@ -1532,9 +1562,10 @@ java_binary( ], ) -run_binary_test( +bintest( name = "StepFunctionSampleSatTest", - binary = ":StepFunctionSampleSat", + srcs = ["StepFunctionSampleSatTest.bintest"], + named_data = {"StepFunctionSampleSat": ":StepFunctionSampleSat"}, ) java_binary( @@ -1548,7 +1579,8 @@ java_binary( ], ) -run_binary_test( +bintest( name = "StopAfterNSolutionsSampleSatTest", - binary = ":StopAfterNSolutionsSampleSat", + srcs = ["StopAfterNSolutionsSampleSatTest.bintest"], + named_data = {"StopAfterNSolutionsSampleSat": ":StopAfterNSolutionsSampleSat"}, ) diff --git a/ortools/sat/samples/BinPackingProblemSatTest.bintest b/ortools/sat/samples/BinPackingProblemSatTest.bintest new file mode 100644 index 00000000000..9669ea85301 --- /dev/null +++ b/ortools/sat/samples/BinPackingProblemSatTest.bintest @@ -0,0 +1 @@ +RUN: $(BinPackingProblemSat) diff --git a/ortools/sat/samples/BoolOrSampleSatTest.bintest b/ortools/sat/samples/BoolOrSampleSatTest.bintest new file mode 100644 index 00000000000..dbc32c3c743 --- /dev/null +++ b/ortools/sat/samples/BoolOrSampleSatTest.bintest @@ -0,0 +1 @@ +RUN: $(BoolOrSampleSat) diff --git a/ortools/sat/samples/ChannelingSampleSatTest.bintest b/ortools/sat/samples/ChannelingSampleSatTest.bintest new file mode 100644 index 00000000000..9d92868476d --- /dev/null +++ b/ortools/sat/samples/ChannelingSampleSatTest.bintest @@ -0,0 +1 @@ +RUN: $(ChannelingSampleSat) diff --git a/ortools/sat/samples/CloneModelSampleSatTest.bintest b/ortools/sat/samples/CloneModelSampleSatTest.bintest new file mode 100644 index 00000000000..94c93cd09f7 --- /dev/null +++ b/ortools/sat/samples/CloneModelSampleSatTest.bintest @@ -0,0 +1 @@ +RUN: $(CloneModelSampleSat) diff --git a/ortools/sat/samples/CpIsFunSatTest.bintest b/ortools/sat/samples/CpIsFunSatTest.bintest new file mode 100644 index 00000000000..f9162043c9d --- /dev/null +++ b/ortools/sat/samples/CpIsFunSatTest.bintest @@ -0,0 +1 @@ +RUN: $(CpIsFunSat) diff --git a/ortools/sat/samples/CpSatExampleTest.bintest b/ortools/sat/samples/CpSatExampleTest.bintest new file mode 100644 index 00000000000..88f9dd7842a --- /dev/null +++ b/ortools/sat/samples/CpSatExampleTest.bintest @@ -0,0 +1 @@ +RUN: $(CpSatExample) diff --git a/ortools/sat/samples/EarlinessTardinessCostSampleSatTest.bintest b/ortools/sat/samples/EarlinessTardinessCostSampleSatTest.bintest new file mode 100644 index 00000000000..776b428f2cb --- /dev/null +++ b/ortools/sat/samples/EarlinessTardinessCostSampleSatTest.bintest @@ -0,0 +1 @@ +RUN: $(EarlinessTardinessCostSampleSat) diff --git a/ortools/sat/samples/IntervalSampleSatTest.bintest b/ortools/sat/samples/IntervalSampleSatTest.bintest new file mode 100644 index 00000000000..41ec2dce98b --- /dev/null +++ b/ortools/sat/samples/IntervalSampleSatTest.bintest @@ -0,0 +1 @@ +RUN: $(IntervalSampleSat) diff --git a/ortools/sat/samples/LiteralSampleSatTest.bintest b/ortools/sat/samples/LiteralSampleSatTest.bintest new file mode 100644 index 00000000000..1c4892f5c9f --- /dev/null +++ b/ortools/sat/samples/LiteralSampleSatTest.bintest @@ -0,0 +1 @@ +RUN: $(LiteralSampleSat) diff --git a/ortools/sat/samples/MinimalJobshopSatTest.bintest b/ortools/sat/samples/MinimalJobshopSatTest.bintest new file mode 100644 index 00000000000..46e31b0b0e8 --- /dev/null +++ b/ortools/sat/samples/MinimalJobshopSatTest.bintest @@ -0,0 +1 @@ +RUN: $(MinimalJobshopSat) diff --git a/ortools/sat/samples/MultipleKnapsackSatTest.bintest b/ortools/sat/samples/MultipleKnapsackSatTest.bintest new file mode 100644 index 00000000000..606d4b4fc05 --- /dev/null +++ b/ortools/sat/samples/MultipleKnapsackSatTest.bintest @@ -0,0 +1 @@ +RUN: $(MultipleKnapsackSat) diff --git a/ortools/sat/samples/NQueensSatTest.bintest b/ortools/sat/samples/NQueensSatTest.bintest new file mode 100644 index 00000000000..8c1aab0fed0 --- /dev/null +++ b/ortools/sat/samples/NQueensSatTest.bintest @@ -0,0 +1 @@ +RUN: $(NQueensSat) diff --git a/ortools/sat/samples/NoOverlapSampleSatTest.bintest b/ortools/sat/samples/NoOverlapSampleSatTest.bintest new file mode 100644 index 00000000000..7937a6d4c6f --- /dev/null +++ b/ortools/sat/samples/NoOverlapSampleSatTest.bintest @@ -0,0 +1 @@ +RUN: $(NoOverlapSampleSat) diff --git a/ortools/sat/samples/NonLinearSatTest.bintest b/ortools/sat/samples/NonLinearSatTest.bintest new file mode 100644 index 00000000000..2e6af3f6b73 --- /dev/null +++ b/ortools/sat/samples/NonLinearSatTest.bintest @@ -0,0 +1 @@ +RUN: $(NonLinearSat) diff --git a/ortools/sat/samples/NursesSatTest.bintest b/ortools/sat/samples/NursesSatTest.bintest new file mode 100644 index 00000000000..6509325ad06 --- /dev/null +++ b/ortools/sat/samples/NursesSatTest.bintest @@ -0,0 +1 @@ +RUN: $(NursesSat) diff --git a/ortools/sat/samples/OptionalIntervalSampleSatTest.bintest b/ortools/sat/samples/OptionalIntervalSampleSatTest.bintest new file mode 100644 index 00000000000..d48ffd7ad59 --- /dev/null +++ b/ortools/sat/samples/OptionalIntervalSampleSatTest.bintest @@ -0,0 +1 @@ +RUN: $(OptionalIntervalSampleSat) diff --git a/ortools/sat/samples/RabbitsAndPheasantsSatTest.bintest b/ortools/sat/samples/RabbitsAndPheasantsSatTest.bintest new file mode 100644 index 00000000000..0732a387ddb --- /dev/null +++ b/ortools/sat/samples/RabbitsAndPheasantsSatTest.bintest @@ -0,0 +1 @@ +RUN: $(RabbitsAndPheasantsSat) diff --git a/ortools/sat/samples/RankingSampleSatTest.bintest b/ortools/sat/samples/RankingSampleSatTest.bintest new file mode 100644 index 00000000000..ea24bdd25e5 --- /dev/null +++ b/ortools/sat/samples/RankingSampleSatTest.bintest @@ -0,0 +1 @@ +RUN: $(RankingSampleSat) diff --git a/ortools/sat/samples/ReifiedSampleSatTest.bintest b/ortools/sat/samples/ReifiedSampleSatTest.bintest new file mode 100644 index 00000000000..2c8de55e8ad --- /dev/null +++ b/ortools/sat/samples/ReifiedSampleSatTest.bintest @@ -0,0 +1 @@ +RUN: $(ReifiedSampleSat) diff --git a/ortools/sat/samples/ScheduleRequestsSatTest.bintest b/ortools/sat/samples/ScheduleRequestsSatTest.bintest new file mode 100644 index 00000000000..bfe90f49c9d --- /dev/null +++ b/ortools/sat/samples/ScheduleRequestsSatTest.bintest @@ -0,0 +1 @@ +RUN: $(ScheduleRequestsSat) diff --git a/ortools/sat/samples/SearchForAllSolutionsSampleSatTest.bintest b/ortools/sat/samples/SearchForAllSolutionsSampleSatTest.bintest new file mode 100644 index 00000000000..783ddf6d490 --- /dev/null +++ b/ortools/sat/samples/SearchForAllSolutionsSampleSatTest.bintest @@ -0,0 +1 @@ +RUN: $(SearchForAllSolutionsSampleSat) diff --git a/ortools/sat/samples/SimpleSatProgramTest.bintest b/ortools/sat/samples/SimpleSatProgramTest.bintest new file mode 100644 index 00000000000..e55bf14ef70 --- /dev/null +++ b/ortools/sat/samples/SimpleSatProgramTest.bintest @@ -0,0 +1 @@ +RUN: $(SimpleSatProgram) diff --git a/ortools/sat/samples/SolutionHintingSampleSatTest.bintest b/ortools/sat/samples/SolutionHintingSampleSatTest.bintest new file mode 100644 index 00000000000..fef6413c5dc --- /dev/null +++ b/ortools/sat/samples/SolutionHintingSampleSatTest.bintest @@ -0,0 +1 @@ +RUN: $(SolutionHintingSampleSat) diff --git a/ortools/sat/samples/SolveAndPrintIntermediateSolutionsSampleSatTest.bintest b/ortools/sat/samples/SolveAndPrintIntermediateSolutionsSampleSatTest.bintest new file mode 100644 index 00000000000..891ed37e691 --- /dev/null +++ b/ortools/sat/samples/SolveAndPrintIntermediateSolutionsSampleSatTest.bintest @@ -0,0 +1 @@ +RUN: $(SolveAndPrintIntermediateSolutionsSampleSat) diff --git a/ortools/sat/samples/SolveWithTimeLimitSampleSatTest.bintest b/ortools/sat/samples/SolveWithTimeLimitSampleSatTest.bintest new file mode 100644 index 00000000000..049deecb0fe --- /dev/null +++ b/ortools/sat/samples/SolveWithTimeLimitSampleSatTest.bintest @@ -0,0 +1 @@ +RUN: $(SolveWithTimeLimitSampleSat) diff --git a/ortools/sat/samples/StepFunctionSampleSatTest.bintest b/ortools/sat/samples/StepFunctionSampleSatTest.bintest new file mode 100644 index 00000000000..fc98cf7a59c --- /dev/null +++ b/ortools/sat/samples/StepFunctionSampleSatTest.bintest @@ -0,0 +1 @@ +RUN: $(StepFunctionSampleSat) diff --git a/ortools/sat/samples/StopAfterNSolutionsSampleSatTest.bintest b/ortools/sat/samples/StopAfterNSolutionsSampleSatTest.bintest new file mode 100644 index 00000000000..75c8a4132ab --- /dev/null +++ b/ortools/sat/samples/StopAfterNSolutionsSampleSatTest.bintest @@ -0,0 +1 @@ +RUN: $(StopAfterNSolutionsSampleSat) diff --git a/ortools/set_cover/samples/BUILD.bazel b/ortools/set_cover/samples/BUILD.bazel index b4515ce2640..ba4284be4a9 100644 --- a/ortools/set_cover/samples/BUILD.bazel +++ b/ortools/set_cover/samples/BUILD.bazel @@ -13,7 +13,7 @@ load("@rules_cc//cc:cc_binary.bzl", "cc_binary") load("@rules_python//python:py_binary.bzl", "py_binary") -load("//bazel:run_binary_test.bzl", "run_binary_test") +load("//tools/testing:bintest.bzl", "bintest") package(default_visibility = ["//visibility:public"]) @@ -31,10 +31,11 @@ cc_binary( ], ) -run_binary_test( +bintest( name = "set_cover_cc_test", size = "small", - binary = ":set_cover_cc", + srcs = [":set_cover_cc_test.bintest"], + named_data = {"set_cover_cc": ":set_cover_cc"}, ) py_binary( @@ -47,8 +48,9 @@ py_binary( ], ) -run_binary_test( +bintest( name = "set_cover_py_test", size = "small", - binary = ":set_cover_py3", + srcs = [":set_cover_py_test.bintest"], + named_data = {"set_cover_py3": ":set_cover_py3"}, ) diff --git a/ortools/set_cover/samples/set_cover_cc_test.bintest b/ortools/set_cover/samples/set_cover_cc_test.bintest new file mode 100644 index 00000000000..a0555f56891 --- /dev/null +++ b/ortools/set_cover/samples/set_cover_cc_test.bintest @@ -0,0 +1 @@ +RUN: $(set_cover_cc) diff --git a/ortools/set_cover/samples/set_cover_py_test.bintest b/ortools/set_cover/samples/set_cover_py_test.bintest new file mode 100644 index 00000000000..5da2026573b --- /dev/null +++ b/ortools/set_cover/samples/set_cover_py_test.bintest @@ -0,0 +1 @@ +RUN: $(set_cover_py3) diff --git a/tools/build/BUILD.bazel b/tools/build/BUILD.bazel new file mode 100644 index 00000000000..6b8d65e59e0 --- /dev/null +++ b/tools/build/BUILD.bazel @@ -0,0 +1,26 @@ +# Copyright 2010-2025 Google LLC +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("@pip_deps//:requirements.bzl", "requirement") +load("@rules_python//python:py_binary.bzl", "py_binary") + +package( + default_applicable_licenses = ["//third_party/ortools:license"], + default_visibility = ["//visibility:public"], +) + +py_binary( + name = "bazel2cmake", + srcs = ["bazel2cmake.py"], + deps = [requirement("absl-py")], +) diff --git a/tools/build/bazel2cmake.py b/tools/build/bazel2cmake.py new file mode 100644 index 00000000000..dfaef7f25d3 --- /dev/null +++ b/tools/build/bazel2cmake.py @@ -0,0 +1,273 @@ +#!/usr/bin/env python3 +# Copyright 2010-2025 Google LLC +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Converts bazel BUILD files to CMakeBazel.txt snippets. + +This script processes BUILD.bazel files and generates CMakeBazel.txt files +containing CMake commands that mirror the bazel build rules. + +Starlark is a subset of Python which allows "executing" them as Python code. +This script "executes" the BUILD.bazel file but provides different definitions +for the target we wants to export to CMake. These new definitions are +responsible for exporting writing the CMake equivalent of Bazel commands. +""" + +from collections.abc import Sequence +import dataclasses +import os + +from absl import app + +# The following global variables are used to interact with the "exec" call. +CURRENT_CMAKE_PIECES = None # The generated cmake is appended to this list. +ROOT_FOLDER = os.getcwd() # The project root. +CURRENT_FOLDER = None # The folder of currently processed BUILD.bazel file. +CURRENT_TARGET_PREFIX = None # The prefix to use for exported targets. + + +@dataclasses.dataclass +class Label: + """Helper class to manipulate bazel labels.""" + + path: str + root: str + cmake_root: str + + def __init__(self, label: str): + """Creates a Label object from a string.""" + if label.startswith("//"): + self.path = label[2:].replace(":", "/") + self.root = ROOT_FOLDER + self.cmake_root = "${CMAKE_SOURCE_DIR}" + elif label.startswith(":"): + self.path = label[1:] + self.root = CURRENT_FOLDER + self.cmake_root = "${CMAKE_CURRENT_SOURCE_DIR}" + else: + assert not label.startswith("/") + self.path = label + self.root = CURRENT_FOLDER + self.cmake_root = "${CMAKE_CURRENT_SOURCE_DIR}" + + def is_file(self) -> bool: + """Returns true if the label is a file.""" + return os.path.isfile(os.path.join(self.root, self.path)) + + def is_target(self) -> bool: + """Returns true if the label is a target.""" + return not self.is_file() + + def as_cmake_target(self) -> str: + """Returns the label as a cmake target.""" + assert self.is_target() + return f"$" + + def as_cmake_file(self) -> str: + """Returns the label as a cmake file.""" + assert self.is_file() + return os.path.join(self.cmake_root, self.path) + + def as_cmake(self) -> str: + """Returns the label as a cmake string.""" + return self.as_cmake_file() if self.is_file() else self.as_cmake_target() + + def as_target_name(self) -> str: + """Returns the label as a target name.""" + assert self.is_target() + return CURRENT_TARGET_PREFIX + self.path + + +@dataclasses.dataclass +class Attr: + """Helper class to manipulate cmake attributes.""" + + name: str + values: list[str] + + def __init__(self, name: str, *values: str): + """Creates an Attr object from a name and a list of values.""" + self.name = name + self.values = values + + def __str__(self): + return f" {self.name} {" ".join(self.values)}" + + def __bool__(self): + return bool(self.values) + + +def name_attr(name: str) -> Attr: + """Returns a NAME attribute.""" + return Attr("NAME", Label(name).as_target_name()) + + +def sources_attr(srcs: Sequence[str], hdrs: Sequence[str]) -> Attr: + """Returns a SOURCES attribute.""" + values = sorted(srcs + hdrs) + return Attr("SOURCES", *values) + + +def link_libraries_attr(deps: Sequence[str]) -> Attr: + """Returns a LINK_LIBRARIES attribute.""" + values = [] + for dep in deps: + if not dep.startswith(":"): + continue + label = Label(dep) + if label.is_target(): + values.append(label.as_target_name()) + return Attr("LINK_LIBRARIES", *values) + + +def type_attr(value: str) -> Attr: + """Returns a TYPE attribute.""" + return Attr("TYPE", value) + + +def env_attr(named_data: dict[str, str]) -> Attr: + """Returns an ENVIRONMENT attribute.""" + values = [] + for key, target in named_data.items(): + label = Label(target) + values.append(f"BINTEST_{key}={label.as_cmake()}") + return Attr("ENVIRONMENT", *values) + + +def script_attr(script: str) -> Attr: + """Returns a SCRIPT attribute.""" + label = Label(script) + assert label.is_file() + return Attr("SCRIPT", label.as_cmake_file()) + + +def add_call(call: str, attrs: Sequence[Attr]) -> str: + """Adds a cmake call to the current cmake pieces.""" + CURRENT_CMAKE_PIECES.append( + f"""{call}( +{'\n'.join(str(a) for a in filter(None, attrs))} +)""" + ) + + +# The functions below are the one replacing the bazel functions. + + +def cc_library( + name, srcs=[], hdrs=[], deps=[], **kwargs +): # pylint: disable=dangerous-default-value + """Adds a cc_library to the current cmake pieces.""" + del kwargs + add_call( + "ortools_cxx_library", + [ + name_attr(name), + sources_attr(srcs, hdrs), + link_libraries_attr(deps), + type_attr("INTERFACE" if not srcs else "SHARED"), + ], + ) + + +def cc_test( + name, srcs=[], hdrs=[], deps=[], **kwargs +): # pylint: disable=dangerous-default-value + """Adds a cc_test to the current cmake pieces.""" + del kwargs + add_call( + "ortools_cxx_test", + [name_attr(name), sources_attr(srcs, hdrs), link_libraries_attr(deps)], + ) + + +def cc_binary( + name, srcs=[], hdrs=[], deps=[], **kwargs +): # pylint: disable=dangerous-default-value + """Adds a cc_binary to the current cmake pieces.""" + del kwargs + add_call( + "ortools_cxx_binary", + [name_attr(name), sources_attr(srcs, hdrs), link_libraries_attr(deps)], + ) + + +def bintest( + name, srcs=[], named_data={}, **kwargs +): # pylint: disable=dangerous-default-value + """Adds a bintest to the current cmake pieces.""" + del kwargs + add_call( + "ortools_cxx_bintest", + [name_attr(name), script_attr(srcs[0]), env_attr(named_data)], + ) + + +# The functions above are the only one accessible when executing the bazel file. +EXEC_GLOBALS = { + "bintest": bintest, + "cc_binary": cc_binary, + "cc_library": cc_library, + "cc_test": cc_test, +} | { + # The function below are ignored and doesn't produce any CMake commands. + name: lambda *kargs, **kwargs: None + for name in [ + # keep sorted go/buildifier#keep-sorted + "build_test", + "cc_proto_library", + "cc_stubby_library", + "java_proto_library", + "java_stubby_library", + "load", + "package", + "proto_library", + "sh_binary", + "sh_test", + ] +} + + +def process_file(prefix: str, file: str): + """Processes a BUILD file and generates a CMakeBazel.txt file.""" + assert os.path.isfile(file) + assert os.path.basename(file) == "BUILD.bazel" + with open(file, "r") as f: + lines = f.read() + global CURRENT_CMAKE_PIECES + CURRENT_CMAKE_PIECES = [] + CURRENT_CMAKE_PIECES.append( + f"# This file is auto generated by bazel2cmake.py from {file}\n" + "# Don't edit manually, your changes will be lost.\n" + "# You can update this file by running:\n" + f"# python3 tools/build/bazel2cmake.py {file}\n" + ) + global CURRENT_FOLDER + CURRENT_FOLDER = os.path.dirname(file) + global CURRENT_TARGET_PREFIX + CURRENT_TARGET_PREFIX = prefix + exec(lines, EXEC_GLOBALS) # pylint: disable=exec-used + output_file = os.path.join(os.path.dirname(file), "CMakeBazel.txt") + with open(output_file, "w") as f: + f.write("\n\n".join(CURRENT_CMAKE_PIECES)) + + +def main(argv: Sequence[str]) -> None: + if len(argv) > 1: + print("bazel2cmake takes no arguments") + return + # TODO: Add more bazel files to autogenerate. + process_file("bzl_cc_example_", "examples/cpp/BUILD.bazel") + + +if __name__ == "__main__": + app.run(main) diff --git a/tools/testing/BUILD.bazel b/tools/testing/BUILD.bazel new file mode 100644 index 00000000000..93fc2058792 --- /dev/null +++ b/tools/testing/BUILD.bazel @@ -0,0 +1,134 @@ +# Copyright 2010-2025 Google LLC +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("@pip_deps//:requirements.bzl", "requirement") +load("@rules_cc//cc:cc_binary.bzl", "cc_binary") +load("@rules_python//python:py_binary.bzl", "py_binary") +load("@rules_python//python:py_library.bzl", "py_library") +load("@rules_python//python:py_test.bzl", "py_test") +load("//tools/testing:bintest.bzl", "bintest", "py_bintest") + +package(default_visibility = ["//visibility:public"]) + +exports_files(["bintest_script_launcher.py"]) + +# The following libraries are used to implement the `bintest` and `py_bintest` rules. + +py_library( + name = "bintest_run_utils", + srcs = ["bintest_run_utils.py"], +) + +py_test( + name = "bintest_run_utils_test", + srcs = ["bintest_run_utils_test.py"], + deps = [ + ":bintest_run_utils", + requirement("absl-py"), + ], +) + +py_library( + name = "bintest_matchers", + srcs = ["bintest_matchers.py"], +) + +py_test( + name = "bintest_matchers_test", + srcs = ["bintest_matchers_test.py"], + deps = [ + ":bintest_matchers", + requirement("absl-py"), + ], +) + +py_library( + name = "binary_test", + srcs = ["binary_test.py"], + deps = [ + ":bintest_matchers", + ":bintest_run_utils", + requirement("absl-py"), + ], +) + +py_library( + name = "bintest_script_runner", + srcs = ["bintest_script_runner.py"], + deps = [ + ":bintest_matchers", + ":bintest_run_utils", + requirement("absl-py"), + ], +) + +py_binary( + name = "bintest_script_launcher", + srcs = ["bintest_script_launcher.py"], + deps = [":bintest_script_runner"], +) + +cc_binary( + name = "echo", + testonly = True, + srcs = ["echo.cc"], +) + +cc_binary( + name = "fail", + testonly = True, + srcs = ["fail.cc"], +) + +py_test( + name = "bintest_script_runner_test", + srcs = ["bintest_script_runner_test.py"], + data = [ + ":echo", + ":fail", + ], + env = { + "BINTEST_ECHO": "$(rootpath :echo)", + "BINTEST_FAIL": "$(rootpath :fail)", + }, + deps = [ + ":bintest_script_runner", + requirement("absl-py"), + ], +) + +# The following targets demonstrate the usage of the `bintest` and `py_bintest` rules. + +cc_binary( + name = "print_args", + srcs = ["print_args.cc"], +) + +bintest( + name = "print_args_bintest", + srcs = ["print_args.bintest"], + named_data = { + "print_args": ":print_args", + "data_file": ":print_args_data.txt", + }, +) + +py_bintest( + name = "print_args_test", + srcs = ["print_args_test.py"], + named_data = { + "print_args": ":print_args", + "data_file": ":print_args_data.txt", + }, + deps = [requirement("absl-py")], +) diff --git a/tools/testing/README.md b/tools/testing/README.md new file mode 100644 index 00000000000..0e2dbfba324 --- /dev/null +++ b/tools/testing/README.md @@ -0,0 +1,230 @@ +# Binary testing + +This folder contains facilities to **test executable files**. +We offer two APIs: + +* `bintest`: A simple scripting language to write simple tests such as + checking execution success or asserting the presence of text or numbers + within bounds, +* `py_bintest`: An extension of the unit testing framework that makes it easy + to invoke the binary under test, extract values from its output, and check + them within the unittest framework. + +## `bintest` + +It offers two commands `RUN:` and `CHECK:`. + +* The `RUN:` command executes the binary and asserts it ran successfully. \ + The passed arguments can use the `$(

-/// Fill a 60x50 rectangle exactly using a minimum number of non-overlapping squares.""" -/// -class CoverRectangleSat -{ - static int sizeX = 60; - static int sizeY = 50; - - static bool CoverRectangle(int numSquares) - { - CpModel model = new CpModel(); - - var areas = new List(); - var sizes = new List(); - var xIntervals = new List(); - var yIntervals = new List(); - var xStarts = new List(); - var yStarts = new List(); - - // Creates intervals for the NoOverlap2D and size variables. - foreach (var i in Enumerable.Range(0, numSquares)) - { - var size = model.NewIntVar(1, sizeY, String.Format("size_{0}", i)); - var startX = model.NewIntVar(0, sizeX, String.Format("startX_{0}", i)); - var endX = model.NewIntVar(0, sizeX, String.Format("endX_{0}", i)); - var startY = model.NewIntVar(0, sizeY, String.Format("startY_{0}", i)); - var endY = model.NewIntVar(0, sizeY, String.Format("endY_{0}", i)); - - var intervalX = model.NewIntervalVar(startX, size, endX, String.Format("intervalX_{0}", i)); - var intervalY = model.NewIntervalVar(startY, size, endY, String.Format("intervalY_{0}", i)); - - var area = model.NewIntVar(1, sizeY * sizeY, String.Format("area_{0}", i)); - model.AddMultiplicationEquality(area, size, size); - - areas.Add(area); - xIntervals.Add(intervalX); - yIntervals.Add(intervalY); - sizes.Add(size); - xStarts.Add(startX); - yStarts.Add(startY); - } - - // Main constraint. - NoOverlap2dConstraint noOverlap2d = model.AddNoOverlap2D(); - foreach (var i in Enumerable.Range(0, numSquares)) - { - noOverlap2d.AddRectangle(xIntervals[i], yIntervals[i]); - } - - // Redundant constraints. - model.AddCumulative(sizeY).AddDemands(xIntervals, sizes); - model.AddCumulative(sizeX).AddDemands(yIntervals, sizes); - - // Forces the rectangle to be exactly covered. - model.Add(LinearExpr.Sum(areas) == sizeX * sizeY); - - // Symmetry breaking 1: sizes are ordered. - foreach (var i in Enumerable.Range(0, numSquares - 1)) - { - model.Add(sizes[i] <= sizes[i + 1]); - - // Define same to be true iff sizes[i] == sizes[i + 1] - var same = model.NewBoolVar(""); - model.Add(sizes[i] == sizes[i + 1]).OnlyEnforceIf(same); - model.Add(sizes[i] < sizes[i + 1]).OnlyEnforceIf(same.Not()); - - // Tie break with starts. - model.Add(xStarts[i] <= xStarts[i + 1]).OnlyEnforceIf(same); - } - - // Symmetry breaking 2: first square in one quadrant. - model.Add(xStarts[0] < (sizeX + 1) / 2); - model.Add(yStarts[0] < (sizeY + 1) / 2); - - // Creates a solver and solves. - var solver = new CpSolver(); - solver.StringParameters = "num_search_workers:16, log_search_progress: false, max_time_in_seconds:10"; - var status = solver.Solve(model); - Console.WriteLine(string.Format("{0} found in {1:0.00}s", status, solver.WallTime())); - - // Prints solution. - bool solution_found = status == CpSolverStatus.Optimal || status == CpSolverStatus.Feasible; - if (solution_found) - { - char[][] output = new char [sizeY][]; - foreach (var y in Enumerable.Range(0, sizeY)) - { - - output[y] = new char[sizeX]; - foreach (var x in Enumerable.Range(0, sizeX)) - { - output[y][x] = ' '; - } - } - - foreach (var s in Enumerable.Range(0, numSquares)) - { - int startX = (int)solver.Value(xStarts[s]); - int startY = (int)solver.Value(yStarts[s]); - int size = (int)solver.Value(sizes[s]); - char c = (char)(65 + s); - foreach (var x in Enumerable.Range(startX, size)) - { - foreach (var y in Enumerable.Range(startY, size)) - { - if (output[y][x] != ' ') - { - Console.WriteLine( - string.Format("Error at position x={0} y{1}, found {2}", x, y, output[y][x])); - } - output[y][x] = c; - } - } - } - foreach (var y in Enumerable.Range(0, sizeY)) - { - Console.WriteLine(new String(output[y], 0, sizeX)); - } - } - return solution_found; - } - - static void Main() - { - foreach (int numSquares in Enumerable.Range(1, 15)) - { - Console.WriteLine("Trying with size = {0}", numSquares); - if (CoverRectangle(numSquares)) - break; - } - } -} +// Copyright 2010-2025 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +using System; +using System.Collections.Generic; +using System.Linq; +using Google.OrTools.Sat; + +/// +/// Fill a 60x50 rectangle exactly using a minimum number of non-overlapping squares.""" +/// +class CoverRectangleSat +{ + static int sizeX = 60; + static int sizeY = 50; + + static bool CoverRectangle(int numSquares) + { + CpModel model = new CpModel(); + + var areas = new List(); + var sizes = new List(); + var xIntervals = new List(); + var yIntervals = new List(); + var xStarts = new List(); + var yStarts = new List(); + + // Creates intervals for the NoOverlap2D and size variables. + foreach (var i in Enumerable.Range(0, numSquares)) + { + var size = model.NewIntVar(1, sizeY, String.Format("size_{0}", i)); + var startX = model.NewIntVar(0, sizeX, String.Format("startX_{0}", i)); + var endX = model.NewIntVar(0, sizeX, String.Format("endX_{0}", i)); + var startY = model.NewIntVar(0, sizeY, String.Format("startY_{0}", i)); + var endY = model.NewIntVar(0, sizeY, String.Format("endY_{0}", i)); + + var intervalX = model.NewIntervalVar(startX, size, endX, String.Format("intervalX_{0}", i)); + var intervalY = model.NewIntervalVar(startY, size, endY, String.Format("intervalY_{0}", i)); + + var area = model.NewIntVar(1, sizeY * sizeY, String.Format("area_{0}", i)); + model.AddMultiplicationEquality(area, size, size); + + areas.Add(area); + xIntervals.Add(intervalX); + yIntervals.Add(intervalY); + sizes.Add(size); + xStarts.Add(startX); + yStarts.Add(startY); + } + + // Main constraint. + NoOverlap2dConstraint noOverlap2d = model.AddNoOverlap2D(); + foreach (var i in Enumerable.Range(0, numSquares)) + { + noOverlap2d.AddRectangle(xIntervals[i], yIntervals[i]); + } + + // Redundant constraints. + model.AddCumulative(sizeY).AddDemands(xIntervals, sizes); + model.AddCumulative(sizeX).AddDemands(yIntervals, sizes); + + // Forces the rectangle to be exactly covered. + model.Add(LinearExpr.Sum(areas) == sizeX * sizeY); + + // Symmetry breaking 1: sizes are ordered. + foreach (var i in Enumerable.Range(0, numSquares - 1)) + { + model.Add(sizes[i] <= sizes[i + 1]); + + // Define same to be true iff sizes[i] == sizes[i + 1] + var same = model.NewBoolVar(""); + model.Add(sizes[i] == sizes[i + 1]).OnlyEnforceIf(same); + model.Add(sizes[i] < sizes[i + 1]).OnlyEnforceIf(same.Not()); + + // Tie break with starts. + model.Add(xStarts[i] <= xStarts[i + 1]).OnlyEnforceIf(same); + } + + // Symmetry breaking 2: first square in one quadrant. + model.Add(xStarts[0] < (sizeX + 1) / 2); + model.Add(yStarts[0] < (sizeY + 1) / 2); + + // Creates a solver and solves. + var solver = new CpSolver(); + solver.StringParameters = "num_search_workers:16, log_search_progress: false, max_time_in_seconds:10"; + var status = solver.Solve(model); + Console.WriteLine(string.Format("{0} found in {1:0.00}s", status, solver.WallTime())); + + // Prints solution. + bool solution_found = status == CpSolverStatus.Optimal || status == CpSolverStatus.Feasible; + if (solution_found) + { + char[][] output = new char [sizeY][]; + foreach (var y in Enumerable.Range(0, sizeY)) + { + + output[y] = new char[sizeX]; + foreach (var x in Enumerable.Range(0, sizeX)) + { + output[y][x] = ' '; + } + } + + foreach (var s in Enumerable.Range(0, numSquares)) + { + int startX = (int)solver.Value(xStarts[s]); + int startY = (int)solver.Value(yStarts[s]); + int size = (int)solver.Value(sizes[s]); + char c = (char)(65 + s); + foreach (var x in Enumerable.Range(startX, size)) + { + foreach (var y in Enumerable.Range(startY, size)) + { + if (output[y][x] != ' ') + { + Console.WriteLine( + string.Format("Error at position x={0} y{1}, found {2}", x, y, output[y][x])); + } + output[y][x] = c; + } + } + } + foreach (var y in Enumerable.Range(0, sizeY)) + { + Console.WriteLine(new String(output[y], 0, sizeX)); + } + } + return solution_found; + } + + static void Main() + { + foreach (int numSquares in Enumerable.Range(1, 15)) + { + Console.WriteLine("Trying with size = {0}", numSquares); + if (CoverRectangle(numSquares)) + break; + } + } +} diff --git a/examples/dotnet/TaskSchedulingSat.cs b/examples/dotnet/TaskSchedulingSat.cs index bc020f0db71..dddd2bb3975 100644 --- a/examples/dotnet/TaskSchedulingSat.cs +++ b/examples/dotnet/TaskSchedulingSat.cs @@ -1,199 +1,199 @@ -// Copyright 2010-2025 Google LLC -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -using System; -using System.Collections.Generic; -using Google.OrTools.Sat; - -class Job -{ - public Job(List tasks) - { - AlternativeTasks = tasks; - } - public Job Successor { get; set; } - public List AlternativeTasks { get; set; } -} - -class Task -{ - public Task(string name, long duration, long equipment) - { - Name = name; - Duration = duration; - Equipment = equipment; - } - - public string Name { get; set; } - public long StartTime { get; set; } - public long EndTime - { - get { - return StartTime + Duration; - } - } - public long Duration { get; set; } - public long Equipment { get; set; } - - public override string ToString() - { - return Name + " [ " + Equipment + " ]\tstarts: " + StartTime + " ends:" + EndTime + ", duration: " + Duration; - } -} - -class TaskSchedulingSat -{ - public static List myJobList = new List(); - public static Dictionary> tasksToEquipment = new Dictionary>(); - public static Dictionary taskIndexes = new Dictionary(); - - public static void InitTaskList() - { - List taskList = new List(); - taskList.Add(new Task("Job1Task0a", 15, 0)); - taskList.Add(new Task("Job1Task0b", 25, 1)); - taskList.Add(new Task("Job1Task0c", 10, 2)); - myJobList.Add(new Job(taskList)); - - taskList = new List(); - taskList.Add(new Task("Job1Task1a", 25, 0)); - taskList.Add(new Task("Job1Task1b", 30, 1)); - taskList.Add(new Task("Job1Task1c", 40, 2)); - myJobList.Add(new Job(taskList)); - - taskList = new List(); - taskList.Add(new Task("Job1Task2a", 20, 0)); - taskList.Add(new Task("Job1Task2b", 35, 1)); - taskList.Add(new Task("Job1Task2c", 10, 2)); - myJobList.Add(new Job(taskList)); - - taskList = new List(); - taskList.Add(new Task("Job2Task0a", 15, 0)); - taskList.Add(new Task("Job2Task0b", 25, 1)); - taskList.Add(new Task("Job2Task0c", 10, 2)); - myJobList.Add(new Job(taskList)); - - taskList = new List(); - taskList.Add(new Task("Job2Task1a", 25, 0)); - taskList.Add(new Task("Job2Task1b", 30, 1)); - taskList.Add(new Task("Job2Task1c", 40, 2)); - myJobList.Add(new Job(taskList)); - - taskList = new List(); - taskList.Add(new Task("Job2Task2a", 20, 0)); - taskList.Add(new Task("Job2Task2b", 35, 1)); - taskList.Add(new Task("Job2Task2c", 10, 2)); - myJobList.Add(new Job(taskList)); - - taskList = new List(); - taskList.Add(new Task("Job3Task0a", 10, 0)); - taskList.Add(new Task("Job3Task0b", 15, 1)); - taskList.Add(new Task("Job3Task0c", 50, 2)); - myJobList.Add(new Job(taskList)); - - taskList = new List(); - taskList.Add(new Task("Job3Task1a", 50, 0)); - taskList.Add(new Task("Job3Task1b", 10, 1)); - taskList.Add(new Task("Job3Task1c", 20, 2)); - myJobList.Add(new Job(taskList)); - - taskList = new List(); - taskList.Add(new Task("Job3Task2a", 65, 0)); - taskList.Add(new Task("Job3Task2b", 5, 1)); - taskList.Add(new Task("Job3Task2c", 15, 2)); - myJobList.Add(new Job(taskList)); - - myJobList[0].Successor = myJobList[1]; - myJobList[1].Successor = myJobList[2]; - myJobList[2].Successor = null; - - myJobList[3].Successor = myJobList[4]; - myJobList[4].Successor = myJobList[5]; - myJobList[5].Successor = null; - - myJobList[6].Successor = myJobList[7]; - myJobList[7].Successor = myJobList[8]; - myJobList[8].Successor = null; - } - - public static int GetTaskCount() - { - int c = 0; - foreach (Job j in myJobList) - foreach (Task t in j.AlternativeTasks) - { - taskIndexes[t.Name] = c; - c++; - } - - return c; - } - - public static int GetEndTaskCount() - { - int c = 0; - foreach (Job j in myJobList) - if (j.Successor == null) - c += j.AlternativeTasks.Count; - return c; - } - - static void Main() - { - InitTaskList(); - int taskCount = GetTaskCount(); - - CpModel model = new CpModel(); - - IntervalVar[] tasks = new IntervalVar[taskCount]; - BoolVar[] taskChoosed = new BoolVar[taskCount]; - IntVar[] allEnds = new IntVar[GetEndTaskCount()]; - - int endJobCounter = 0; - foreach (Job j in myJobList) - { - BoolVar[] tmp = new BoolVar[j.AlternativeTasks.Count]; - int i = 0; - foreach (Task t in j.AlternativeTasks) - { - long ti = taskIndexes[t.Name]; - taskChoosed[ti] = model.NewBoolVar(t.Name + "_choose"); - tmp[i++] = taskChoosed[ti]; - IntVar start = model.NewIntVar(0, 10000, t.Name + "_start"); - IntVar end = model.NewIntVar(0, 10000, t.Name + "_end"); - tasks[ti] = model.NewIntervalVar(start, t.Duration, end, t.Name + "_interval"); - if (j.Successor == null) - allEnds[endJobCounter++] = end; - if (!tasksToEquipment.ContainsKey(t.Equipment)) - tasksToEquipment[t.Equipment] = new List(); - tasksToEquipment[t.Equipment].Add(tasks[ti]); - } - model.AddExactlyOne(tmp); - } - - foreach (KeyValuePair> pair in tasksToEquipment) - { - model.AddNoOverlap(pair.Value); - } - - IntVar makespan = model.NewIntVar(0, 100000, "makespan"); - model.AddMaxEquality(makespan, allEnds); - model.Minimize(makespan); - - // Create the solver. - CpSolver solver = new CpSolver(); - // Solve the problem. - solver.Solve(model); - Console.WriteLine(solver.ResponseStats()); - } -} +// Copyright 2010-2025 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +using System; +using System.Collections.Generic; +using Google.OrTools.Sat; + +class Job +{ + public Job(List tasks) + { + AlternativeTasks = tasks; + } + public Job Successor { get; set; } + public List AlternativeTasks { get; set; } +} + +class Task +{ + public Task(string name, long duration, long equipment) + { + Name = name; + Duration = duration; + Equipment = equipment; + } + + public string Name { get; set; } + public long StartTime { get; set; } + public long EndTime + { + get { + return StartTime + Duration; + } + } + public long Duration { get; set; } + public long Equipment { get; set; } + + public override string ToString() + { + return Name + " [ " + Equipment + " ]\tstarts: " + StartTime + " ends:" + EndTime + ", duration: " + Duration; + } +} + +class TaskSchedulingSat +{ + public static List myJobList = new List(); + public static Dictionary> tasksToEquipment = new Dictionary>(); + public static Dictionary taskIndexes = new Dictionary(); + + public static void InitTaskList() + { + List taskList = new List(); + taskList.Add(new Task("Job1Task0a", 15, 0)); + taskList.Add(new Task("Job1Task0b", 25, 1)); + taskList.Add(new Task("Job1Task0c", 10, 2)); + myJobList.Add(new Job(taskList)); + + taskList = new List(); + taskList.Add(new Task("Job1Task1a", 25, 0)); + taskList.Add(new Task("Job1Task1b", 30, 1)); + taskList.Add(new Task("Job1Task1c", 40, 2)); + myJobList.Add(new Job(taskList)); + + taskList = new List(); + taskList.Add(new Task("Job1Task2a", 20, 0)); + taskList.Add(new Task("Job1Task2b", 35, 1)); + taskList.Add(new Task("Job1Task2c", 10, 2)); + myJobList.Add(new Job(taskList)); + + taskList = new List(); + taskList.Add(new Task("Job2Task0a", 15, 0)); + taskList.Add(new Task("Job2Task0b", 25, 1)); + taskList.Add(new Task("Job2Task0c", 10, 2)); + myJobList.Add(new Job(taskList)); + + taskList = new List(); + taskList.Add(new Task("Job2Task1a", 25, 0)); + taskList.Add(new Task("Job2Task1b", 30, 1)); + taskList.Add(new Task("Job2Task1c", 40, 2)); + myJobList.Add(new Job(taskList)); + + taskList = new List(); + taskList.Add(new Task("Job2Task2a", 20, 0)); + taskList.Add(new Task("Job2Task2b", 35, 1)); + taskList.Add(new Task("Job2Task2c", 10, 2)); + myJobList.Add(new Job(taskList)); + + taskList = new List(); + taskList.Add(new Task("Job3Task0a", 10, 0)); + taskList.Add(new Task("Job3Task0b", 15, 1)); + taskList.Add(new Task("Job3Task0c", 50, 2)); + myJobList.Add(new Job(taskList)); + + taskList = new List(); + taskList.Add(new Task("Job3Task1a", 50, 0)); + taskList.Add(new Task("Job3Task1b", 10, 1)); + taskList.Add(new Task("Job3Task1c", 20, 2)); + myJobList.Add(new Job(taskList)); + + taskList = new List(); + taskList.Add(new Task("Job3Task2a", 65, 0)); + taskList.Add(new Task("Job3Task2b", 5, 1)); + taskList.Add(new Task("Job3Task2c", 15, 2)); + myJobList.Add(new Job(taskList)); + + myJobList[0].Successor = myJobList[1]; + myJobList[1].Successor = myJobList[2]; + myJobList[2].Successor = null; + + myJobList[3].Successor = myJobList[4]; + myJobList[4].Successor = myJobList[5]; + myJobList[5].Successor = null; + + myJobList[6].Successor = myJobList[7]; + myJobList[7].Successor = myJobList[8]; + myJobList[8].Successor = null; + } + + public static int GetTaskCount() + { + int c = 0; + foreach (Job j in myJobList) + foreach (Task t in j.AlternativeTasks) + { + taskIndexes[t.Name] = c; + c++; + } + + return c; + } + + public static int GetEndTaskCount() + { + int c = 0; + foreach (Job j in myJobList) + if (j.Successor == null) + c += j.AlternativeTasks.Count; + return c; + } + + static void Main() + { + InitTaskList(); + int taskCount = GetTaskCount(); + + CpModel model = new CpModel(); + + IntervalVar[] tasks = new IntervalVar[taskCount]; + BoolVar[] taskChoosed = new BoolVar[taskCount]; + IntVar[] allEnds = new IntVar[GetEndTaskCount()]; + + int endJobCounter = 0; + foreach (Job j in myJobList) + { + BoolVar[] tmp = new BoolVar[j.AlternativeTasks.Count]; + int i = 0; + foreach (Task t in j.AlternativeTasks) + { + long ti = taskIndexes[t.Name]; + taskChoosed[ti] = model.NewBoolVar(t.Name + "_choose"); + tmp[i++] = taskChoosed[ti]; + IntVar start = model.NewIntVar(0, 10000, t.Name + "_start"); + IntVar end = model.NewIntVar(0, 10000, t.Name + "_end"); + tasks[ti] = model.NewIntervalVar(start, t.Duration, end, t.Name + "_interval"); + if (j.Successor == null) + allEnds[endJobCounter++] = end; + if (!tasksToEquipment.ContainsKey(t.Equipment)) + tasksToEquipment[t.Equipment] = new List(); + tasksToEquipment[t.Equipment].Add(tasks[ti]); + } + model.AddExactlyOne(tmp); + } + + foreach (KeyValuePair> pair in tasksToEquipment) + { + model.AddNoOverlap(pair.Value); + } + + IntVar makespan = model.NewIntVar(0, 100000, "makespan"); + model.AddMaxEquality(makespan, allEnds); + model.Minimize(makespan); + + // Create the solver. + CpSolver solver = new CpSolver(); + // Solve the problem. + solver.Solve(model); + Console.WriteLine(solver.ResponseStats()); + } +} diff --git a/examples/python/testdata/salbp_20_1.alb b/examples/python/testdata/salbp_20_1.alb index 6b780dae89b..e3d93785827 100644 --- a/examples/python/testdata/salbp_20_1.alb +++ b/examples/python/testdata/salbp_20_1.alb @@ -1,51 +1,51 @@ - -20 - - -1000 - - -0,268 - - - -1 142 -2 34 -3 140 -4 214 -5 121 -6 279 -7 50 -8 282 -9 129 -10 175 -11 97 -12 132 -13 107 -14 132 -15 69 -16 169 -17 73 -18 231 -19 120 -20 186 - - -1,6 -2,7 -4,8 -5,9 -6,10 -7,11 -8,12 -10,13 -11,13 -12,14 -12,15 -13,16 -13,17 -13,18 -14,20 -15,19 - - + +20 + + +1000 + + +0,268 + + + +1 142 +2 34 +3 140 +4 214 +5 121 +6 279 +7 50 +8 282 +9 129 +10 175 +11 97 +12 132 +13 107 +14 132 +15 69 +16 169 +17 73 +18 231 +19 120 +20 186 + + +1,6 +2,7 +4,8 +5,9 +6,10 +7,11 +8,12 +10,13 +11,13 +12,14 +12,15 +13,16 +13,17 +13,18 +14,20 +15,19 + + diff --git a/examples/tests/issue33.cs b/examples/tests/issue33.cs index 5c78383a347..2e888ca2072 100644 --- a/examples/tests/issue33.cs +++ b/examples/tests/issue33.cs @@ -1,676 +1,676 @@ -// Authors: Johan Wessén -// Copyright 2010-2025 Google LLC -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -using Google.OrTools.ConstraintSolver; -using System.Collections.Generic; -using System.Diagnostics; -using System.IO; -using System.Linq; -using System.Text; -using System.Threading.Tasks; -using System; - -public class Task -{ - public int Id { get; private set; } - public int TaskType { get; private set; } - public int LocationId { get; private set; } - public Dictionary Durations { get; private set; } - public int TaskPosition { get; private set; } - - public Task(int id, int taskType, int locationIndex, int taskPosition, Dictionary durations) - { - Id = id; - TaskType = taskType; - LocationId = locationIndex; - Durations = durations; - TaskPosition = taskPosition; - } - - public Task(int id, int taskType, int locationIndex, int taskPosition) - { - Id = id; - TaskType = taskType; - LocationId = locationIndex; - TaskPosition = taskPosition; - Durations = new Dictionary(); - } -} - -public class WorkLocation -{ - public int Id { get; private set; } - public int NbTasks - { - get { - Debug.Assert(Tasks != null); - return Tasks.Length; - } - set { - Debug.Assert(Tasks == null); - Tasks = new Task[value]; - } - } - public Task[] Tasks { get; private set; } - - public WorkLocation(int index) - { - Id = index; - } -} - -public class Tool -{ - public int Id { get; private set; } - public HashSet TaskTypes { get; set; } - public int[,] TravellingTime { get; set; } - public int InitialLocationId { get; set; } - - public Tool(int index, int initialLocation = 0) - { - Id = index; - InitialLocationId = initialLocation; - TaskTypes = new HashSet(); - } - - public void AddTaskType(int t) - { - TaskTypes.Add(t); - } - - public bool CanPerformTaskType(int taskType) - { - return TaskTypes.Contains(taskType); - } -} - -public class FactoryDescription -{ - public Tool[] Tools { get; private set; } - public WorkLocation[] Locations { get; private set; } - - public int NbWorkLocations - { - get { - return Locations.Length; - } - } - public int NbTools - { - get { - return Tools.Length; - } - } - - public int NbTaskPerCycle { get; private set; } - // TaskType go typically from 0 to 6. InspectionType indicates which - // is the TaskType that correspond to Inspection. - public int Inspection { get; private set; } - // All the time within the schedule horizon in which the blast can start. - public long[] InspectionStarts { get; private set; } - - public int Horizon { get; private set; } - - // horizon equal to 2 weeks (in minutes). - public FactoryDescription(int nbTools, int nbLocations, int nbTaskPerCycle, int horizon = 14 * 24 * 60) - { - Debug.Assert(nbTools > 0); - Debug.Assert(nbLocations > 0); - Debug.Assert(nbTaskPerCycle > 0); - Debug.Assert(horizon > 0); - NbTaskPerCycle = nbTaskPerCycle; - Inspection = NbTaskPerCycle - 1; - Tools = new Tool[nbTools]; - Horizon = horizon; - for (int i = 0; i < nbTools; i++) - Tools[i] = new Tool(i); - Locations = new WorkLocation[nbLocations]; - for (int i = 0; i < nbLocations; i++) - Locations[i] = new WorkLocation(i); - - InspectionStarts = new long[] { -1, 600, 1200, 1800, 2400, 2800 }; - } - - public Tool[] getToolPerTaskType(int taskType) - { - var elements = from tool in Tools - where tool.CanPerformTaskType(taskType) select tool; - return elements.ToArray(); - } - - public Task[] getFlatTaskList() - { - return (from location in Locations from task in location.Tasks orderby task.Id select task).ToArray(); - } - - public int[] getTaskTypes() - { - return (from location in Locations from task in location.Tasks select task.TaskType).Distinct().ToArray(); - } - - // TODO: This should be enhanced - public void SanityCheck() - { - foreach (Tool tool in Tools) - { - Debug.Assert(tool.TravellingTime.GetLength(0) == NbWorkLocations); - Debug.Assert(tool.TravellingTime.GetLength(1) == NbWorkLocations); - for (int i = 0; i < NbWorkLocations; i++) - Debug.Assert(tool.TravellingTime[i, i] == 0); - } - } -} - -interface DataReader -{ - FactoryDescription FetchData(); -} - -public class SmallSyntheticData : DataReader -{ - public SmallSyntheticData() - { - } - - public FactoryDescription FetchData() - { - // deterministic seed for result reproducibility - Random randomDuration = new Random(2); - - // FactoryDescription(nbTools, nblocations, nbTasks per cycle) - FactoryDescription factoryDescription = new FactoryDescription(5, 4, 3); - - // Travelling time and distance are temporarily identical and they - // are no different for different tools - int[,] travellingTime = new int[factoryDescription.NbWorkLocations, factoryDescription.NbWorkLocations]; - for (int i = 0; i < travellingTime.GetLength(0); i++) - { - for (int j = 0; j < travellingTime.GetLength(1); j++) - { - if (i == j) - travellingTime[i, j] = 0; - else - travellingTime[i, j] = (5 * Math.Abs(i - j)) * 10; - } - } - - factoryDescription.Tools[0].AddTaskType(0); - factoryDescription.Tools[1].AddTaskType(0); - factoryDescription.Tools[2].AddTaskType(1); - factoryDescription.Tools[3].AddTaskType(1); - factoryDescription.Tools[4].AddTaskType(2); - factoryDescription.Tools[1].AddTaskType(1); - - foreach (Tool tool in factoryDescription.Tools) - tool.TravellingTime = travellingTime; - - int c = 0; - int nbCyclePerWorkLocation = 2; - int[] boll = new int[100]; - for (int i = 0; i < factoryDescription.NbWorkLocations; i++) - { - factoryDescription.Locations[i].NbTasks = nbCyclePerWorkLocation * factoryDescription.NbTaskPerCycle; - for (int j = 0; j < nbCyclePerWorkLocation; j++) - { - for (int k = 0; k < factoryDescription.NbTaskPerCycle; k++) - { - Task t = new Task(c, k, i, k + j * factoryDescription.NbTaskPerCycle); - - // Filling in tool-dependent durations - Tool[] compatibleTools = factoryDescription.getToolPerTaskType(k); - foreach (Tool tool in compatibleTools) - { - boll[c] = randomDuration.Next(13, 17) * 10; - ; - t.Durations[tool.Id] = boll[c]; - } - factoryDescription.Locations[i].Tasks[t.TaskPosition] = t; - c++; - } - } - } - - factoryDescription.SanityCheck(); - return factoryDescription; - } -} - -public class RandomSelectToolHeuristic : NetDecisionBuilder -{ - private FactoryScheduling factoryScheduling; - private Random rnd; - - public RandomSelectToolHeuristic(FactoryScheduling factoryScheduling, int seed) - { - this.factoryScheduling = factoryScheduling; - // deterministic seed for result reproducibility - this.rnd = new Random(seed); - } - - public override Decision Next(Solver solver) - { - foreach (IntVar var in factoryScheduling.SelectedTool) - { - if (!var.Bound()) - { - int min = (int)var.Min(); - int max = (int)var.Max(); - int rndVal = rnd.Next(min, max + 1); - while (!var.Contains(rndVal)) - rndVal = rnd.Next(min, max + 1); - return solver.MakeAssignVariableValue(var, rndVal); - } - } - return null; - } -} - -class TaskAlternative -{ - public Task Task { get; private set; } - public IntVar ToolVar { get; set; } - public List Intervals { get; private set; } - - public TaskAlternative(Task t) - { - Task = t; - Intervals = new List(); - } -} - -public class FactoryScheduling -{ - private FactoryDescription factoryData; - private Solver solver; - - private Task[] tasks; - private int[] taskTypes; - - /* Flat list of all the tasks */ - private TaskAlternative[] taskStructures; - - /* Task per WorkLocation: location2Task[d][i]: the i-th task of the - * d-th location */ - private TaskAlternative[][] location2Task; - - /* Task per Tool: tool2Task[t][i]: the i-th task of the t-th tool. - Note that it does NOT imply that the it will be the i-th - executed. In other words, it should be considered as an unordered - set. Furthermore, tool2Task[t][i] can also be *unperformed* */ - private List[] tool2Task; - - /* All the transition times for the tools. - tool2TransitionTimes[t][i]: the transition time of the t-th tool - from the i-th task to the next */ - private List[] tool2TransitionTimes; - - /* Map between the interval var of a tool to its related task id. - toolIntervalVar2TaskId[t][k] = i: in the t-th tool, the k-th - interval var correspond to tasks[i] */ - private List[] toolIntervalVar2TaskId; - - /* Tools per task type: taskType2Tool[tt][t]: the t-th tool capable - * of doing the tt-th task type */ - private List[] taskType2Tool; - - /* For each task which tools is performed upon */ - private List selectedTool; - public List SelectedTool - { - get { - return selectedTool; - } - } - - /* Sequence of task for each tool */ - private SequenceVar[] allToolSequences; - public SequenceVar[] AllToolSequences - { - get { - return allToolSequences; - } - } - - /* Makespan var */ - private IntVar makespan; - - /* Objective */ - private OptimizeVar objective; - - /* maximum horizon */ - private int horizon; - - /* Start & End times of IntervalVars*/ - IntVar[][] startingTimes; - IntVar[][] endTimes; - - public FactoryScheduling(FactoryDescription data) - { - factoryData = data; - } - - private void Init() - { - horizon = factoryData.Horizon; - solver = new Solver("Factory Scheduling"); - tasks = factoryData.getFlatTaskList(); - taskTypes = factoryData.getTaskTypes(); - taskStructures = new TaskAlternative[tasks.Length]; - location2Task = new TaskAlternative[factoryData.NbWorkLocations][]; - tool2Task = new List[factoryData.NbTools]; - toolIntervalVar2TaskId = new List[factoryData.NbTools]; - tool2TransitionTimes = new List[factoryData.NbTools]; - - taskType2Tool = new List[taskTypes.Length]; - selectedTool = new List(); - for (int tt = 0; tt < taskTypes.Length; tt++) - taskType2Tool[tt] = new List(); - - foreach (Tool tool in factoryData.Tools) - foreach (int taskType in tool.TaskTypes) - taskType2Tool[taskType].Add(tool); - for (int d = 0; d < factoryData.NbWorkLocations; d++) - location2Task[d] = new TaskAlternative[factoryData.Locations[d].NbTasks]; - for (int t = 0; t < factoryData.NbTools; t++) - { - tool2Task[t] = new List(); - toolIntervalVar2TaskId[t] = new List(); - tool2TransitionTimes[t] = new List(); - } - - allToolSequences = new SequenceVar[factoryData.NbTools - 1]; - - startingTimes = new IntVar[factoryData.NbTools - 1][]; - endTimes = new IntVar[factoryData.NbTools - 1][]; - } - - private void PostTransitionTimeConstraints(int t, bool postTransitionsConstraint = true) - { - Tool tool = factoryData.Tools[t]; - // if it is a inspection, we make sure there are no transitiontimes - if (tool.CanPerformTaskType(factoryData.Inspection)) - tool2TransitionTimes[t].Add(null); - else - { - int[,] tt = tool.TravellingTime; - - SequenceVar seq = allToolSequences[t]; - long s = seq.Size(); - IntVar[] nextLocation = new IntVar[s + 1]; - - // The seq.Next(i) represents the task performed after the i-th - // task in the sequence seq.Next(0) represents the first task - // performed for extracting travelling times we need to get the - // related location In case a task is not performed (seq.Next(i) - // == i), i.e. it's pointing to itself The last performed task - // (or pre-start task, if no tasks are performed) will have - // seq.Next(i) == s + 1 therefore we add a virtual location - // whose travelling time is equal to 0 - // - // NOTE: The index of a SequenceVar are 0..n, but the domain - // range is 1..(n+1), this is due to that the start node = 0 is - // a dummy node, and the node where seq.Next(i) == n+1 is the - // end node - - // Extra elements for the unreachable start node (0), and the - // end node whose next task takes place in a virtual location - int[] taskIndex2locationId = new int[s + 2]; - taskIndex2locationId[0] = -10; - for (int i = 0; i < s; i++) - taskIndex2locationId[i + 1] = tasks[toolIntervalVar2TaskId[t][i]].LocationId; - - // this is the virtual location for unperformed tasks - taskIndex2locationId[s + 1] = factoryData.NbWorkLocations; - - // Build the travelling time matrix with the additional virtual location - int[][] ttWithVirtualLocation = new int [factoryData.NbWorkLocations + 1][]; - for (int d1 = 0; d1 < ttWithVirtualLocation.Length; d1++) - { - ttWithVirtualLocation[d1] = new int[factoryData.NbWorkLocations + 1]; - for (int d2 = 0; d2 < ttWithVirtualLocation.Length; d2++) - if (d1 == factoryData.NbWorkLocations) - { - ttWithVirtualLocation[d1][d2] = 0; - } - else - { - ttWithVirtualLocation[d1][d2] = (d2 == factoryData.NbWorkLocations) ? 0 : tt[d1, d2]; - } - } - - for (int i = 0; i < nextLocation.Length; i++) - { - // this is the next-location associated with the i-th task - nextLocation[i] = solver.MakeElement(taskIndex2locationId, seq.Next(i)).Var(); - - int d = (i == 0) ? tool.InitialLocationId : tasks[toolIntervalVar2TaskId[t][i - 1]].LocationId; - if (i == 0) - { - // To be changed - right now we don't have meaningful indata - // of previous location Ugly way of setting initial travel - // time to = 0, as this is how we find common grounds - // between benchmark algorithm and this - tool2TransitionTimes[t].Add( - solver.MakeElement(new int[ttWithVirtualLocation[d].Length], nextLocation[i]).Var()); - } - else - { - tool2TransitionTimes[t].Add(solver.MakeElement(ttWithVirtualLocation[d], nextLocation[i]).Var()); - } - } - - // Extra elements for the unreachable start node (0), and the - // end node whose next task takes place in a virtual location - startingTimes[t] = new IntVar[s + 2]; - endTimes[t] = new IntVar[s + 2]; - - startingTimes[t][0] = solver.MakeIntConst(0); - // Tbd: Set this endtime to the estimated time of finishing - // previous task for the current tool - endTimes[t][0] = solver.MakeIntConst(0); - - for (int i = 0; i < s; i++) - { - startingTimes[t][i + 1] = tool2Task[t][i].SafeStartExpr(-1).Var(); - endTimes[t][i + 1] = tool2Task[t][i].SafeEndExpr(-1).Var(); - } - startingTimes[t][s + 1] = solver.MakeIntConst(factoryData.Horizon); - endTimes[t][s + 1] = solver.MakeIntConst(factoryData.Horizon); - - // Enforce (or not) that each task is separated by the - // transition time to the next task - for (int i = 0; i < nextLocation.Length; i++) - { - IntVar nextStart = solver.MakeElement(startingTimes[t], seq.Next(i).Var()).Var(); - if (postTransitionsConstraint) - solver.Add(endTimes[t][i] + tool2TransitionTimes[t][i] <= nextStart); - } - } - } - - private void Model() - { - /* Building basic task data structures */ - for (int i = 0; i < tasks.Length; i++) - { - /* Create a new set of possible IntervalVars & IntVar to decide - * which one (and only 1) is performed */ - taskStructures[i] = new TaskAlternative(tasks[i]); - - /* Container to use when posting constraints */ - location2Task[tasks[i].LocationId][tasks[i].TaskPosition] = taskStructures[i]; - - /* Get task type */ - int taskType = tasks[i].TaskType; - - /* Possible tool for this task */ - List tools = taskType2Tool[taskType]; - bool optional = tools.Count > 1; - - /* List of boolean variables. If performedOnTool[t] == true then - * the task is performed on tool t */ - List performedOnTool = new List(); - for (int t = 0; t < tools.Count; t++) - { - /* Creating an IntervalVar. If tools.Count > 1 the intervalVar - * is *OPTIONAL* */ - int toolId = tools[t].Id; - Debug.Assert(tasks[i].Durations.ContainsKey(toolId)); - int duration = tasks[i].Durations[toolId]; - string name = "J " + tasks[i].Id + " [" + toolId + "]"; - - IntervalVar intervalVar; - if (taskType == factoryData.Inspection) - { - /* We set a 0 time if the task is an inspection */ - duration = 0; - intervalVar = solver.MakeFixedDurationIntervalVar(0, horizon, duration, optional, name); - IntVar start = intervalVar.SafeStartExpr(-1).Var(); - - intervalVar.SafeStartExpr(-1).Var().SetValues(factoryData.InspectionStarts); - } - else - { - intervalVar = solver.MakeFixedDurationIntervalVar(0, horizon, duration, optional, name); - } - - taskStructures[i].Intervals.Add(intervalVar); - tool2Task[toolId].Add(intervalVar); - toolIntervalVar2TaskId[toolId].Add(i); - - /* Collecting all the bool vars, even if they are optional */ - performedOnTool.Add(intervalVar.PerformedExpr().Var()); - } - - /* Linking the bool var to a single integer variable: */ - /* if alternativeToolVar == t <=> performedOnTool[t] == true */ - string alternativeName = "J " + tasks[i].Id; - IntVar alternativeToolVar = solver.MakeIntVar(0, tools.Count - 1, alternativeName); - taskStructures[i].ToolVar = alternativeToolVar; - - solver.Add(solver.MakeMapDomain(alternativeToolVar, performedOnTool.ToArray())); - Debug.Assert(performedOnTool.ToArray().Length == alternativeToolVar.Max() + 1); - - selectedTool.Add(alternativeToolVar); - } - - /* Creates precedences on a work Location in order to enforce a - * fully ordered set within the same location - */ - for (int d = 0; d < location2Task.Length; d++) - { - for (int i = 0; i < location2Task[d].Length - 1; i++) - { - TaskAlternative task1 = location2Task[d][i]; - TaskAlternative task2 = location2Task[d][i + 1]; - /* task1 must end before task2 starts */ - /* Adding precedence for each possible alternative pair */ - for (int t1 = 0; t1 < task1.Intervals.Count(); t1++) - { - IntervalVar task1Alternative = task1.Intervals[t1]; - for (int t2 = 0; t2 < task2.Intervals.Count(); t2++) - { - IntervalVar task2Alternative = task2.Intervals[t2]; - Constraint precedence = - solver.MakeIntervalVarRelation(task2Alternative, Solver.STARTS_AFTER_END, task1Alternative); - solver.Add(precedence); - } - } - } - } - - /* Adds disjunctive constraints on unary resources, and creates - * sequence variables. */ - for (int t = 0; t < factoryData.NbTools; t++) - { - string name = "Tool " + t; - - if (!factoryData.Tools[t].CanPerformTaskType(factoryData.Inspection)) - { - DisjunctiveConstraint ct = solver.MakeDisjunctiveConstraint(tool2Task[t].ToArray(), name); - solver.Add(ct); - allToolSequences[t] = ct.SequenceVar(); - } - PostTransitionTimeConstraints(t, true); - } - - /* Collecting all tasks end for makespan objective function */ - List intervalEnds = new List(); - for (int i = 0; i < tasks.Length; i++) - foreach (IntervalVar var in taskStructures[i].Intervals) - intervalEnds.Add(var.SafeEndExpr(-1).Var()); - - /* Objective: minimize the makespan (maximum end times of all tasks) */ - makespan = solver.MakeMax(intervalEnds.ToArray()).Var(); - objective = solver.MakeMinimize(makespan, 1); - } - - private void Search() - { - int seed = 2; // This is a good seed to show the crash - - /* Assigning first tools */ - DecisionBuilder myToolAssignmentPhase = new RandomSelectToolHeuristic(this, seed); - - /* Ranking of the tools */ - DecisionBuilder sequencingPhase = solver.MakePhase(allToolSequences, Solver.SEQUENCE_DEFAULT); - - /* Then fixing time of tasks as early as possible */ - DecisionBuilder timingPhase = solver.MakePhase(makespan, Solver.CHOOSE_FIRST_UNBOUND, Solver.ASSIGN_MIN_VALUE); - - /* Overall phase */ - DecisionBuilder mainPhase = solver.Compose(myToolAssignmentPhase, sequencingPhase, timingPhase); - - /* Logging */ - const int logFrequency = 1000000; - SearchMonitor searchLog = solver.MakeSearchLog(logFrequency, objective); - - /* Restarts */ - SearchMonitor searchRestart = solver.MakeLubyRestart(100); - - /* Search Limit in ms */ - SearchLimit limit = solver.MakeTimeLimit(180 * 1000); - - /* Collecting best solution */ - SolutionCollector collector = solver.MakeLastSolutionCollector(); - collector.AddObjective(makespan); - - // collector.Add( pile.ToArray() ); - solver.NewSearch(mainPhase, searchLog, searchRestart, objective, limit); - while (solver.NextSolution()) - { - Console.WriteLine("MAKESPAN: " + makespan.Value()); - } - } - - public void Solve() - { - Init(); - Model(); - Search(); - } -} - -public class Issue33Test -{ - public static void FactorySchedulingTest() - { - FactoryScheduling scheduling = new FactoryScheduling(new SmallSyntheticData().FetchData()); - scheduling.Solve(); - } - static void Main() - { - FactorySchedulingTest(); - } -} +// Authors: Johan Wessén +// Copyright 2010-2025 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +using Google.OrTools.ConstraintSolver; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading.Tasks; +using System; + +public class Task +{ + public int Id { get; private set; } + public int TaskType { get; private set; } + public int LocationId { get; private set; } + public Dictionary Durations { get; private set; } + public int TaskPosition { get; private set; } + + public Task(int id, int taskType, int locationIndex, int taskPosition, Dictionary durations) + { + Id = id; + TaskType = taskType; + LocationId = locationIndex; + Durations = durations; + TaskPosition = taskPosition; + } + + public Task(int id, int taskType, int locationIndex, int taskPosition) + { + Id = id; + TaskType = taskType; + LocationId = locationIndex; + TaskPosition = taskPosition; + Durations = new Dictionary(); + } +} + +public class WorkLocation +{ + public int Id { get; private set; } + public int NbTasks + { + get { + Debug.Assert(Tasks != null); + return Tasks.Length; + } + set { + Debug.Assert(Tasks == null); + Tasks = new Task[value]; + } + } + public Task[] Tasks { get; private set; } + + public WorkLocation(int index) + { + Id = index; + } +} + +public class Tool +{ + public int Id { get; private set; } + public HashSet TaskTypes { get; set; } + public int[,] TravellingTime { get; set; } + public int InitialLocationId { get; set; } + + public Tool(int index, int initialLocation = 0) + { + Id = index; + InitialLocationId = initialLocation; + TaskTypes = new HashSet(); + } + + public void AddTaskType(int t) + { + TaskTypes.Add(t); + } + + public bool CanPerformTaskType(int taskType) + { + return TaskTypes.Contains(taskType); + } +} + +public class FactoryDescription +{ + public Tool[] Tools { get; private set; } + public WorkLocation[] Locations { get; private set; } + + public int NbWorkLocations + { + get { + return Locations.Length; + } + } + public int NbTools + { + get { + return Tools.Length; + } + } + + public int NbTaskPerCycle { get; private set; } + // TaskType go typically from 0 to 6. InspectionType indicates which + // is the TaskType that correspond to Inspection. + public int Inspection { get; private set; } + // All the time within the schedule horizon in which the blast can start. + public long[] InspectionStarts { get; private set; } + + public int Horizon { get; private set; } + + // horizon equal to 2 weeks (in minutes). + public FactoryDescription(int nbTools, int nbLocations, int nbTaskPerCycle, int horizon = 14 * 24 * 60) + { + Debug.Assert(nbTools > 0); + Debug.Assert(nbLocations > 0); + Debug.Assert(nbTaskPerCycle > 0); + Debug.Assert(horizon > 0); + NbTaskPerCycle = nbTaskPerCycle; + Inspection = NbTaskPerCycle - 1; + Tools = new Tool[nbTools]; + Horizon = horizon; + for (int i = 0; i < nbTools; i++) + Tools[i] = new Tool(i); + Locations = new WorkLocation[nbLocations]; + for (int i = 0; i < nbLocations; i++) + Locations[i] = new WorkLocation(i); + + InspectionStarts = new long[] { -1, 600, 1200, 1800, 2400, 2800 }; + } + + public Tool[] getToolPerTaskType(int taskType) + { + var elements = from tool in Tools + where tool.CanPerformTaskType(taskType) select tool; + return elements.ToArray(); + } + + public Task[] getFlatTaskList() + { + return (from location in Locations from task in location.Tasks orderby task.Id select task).ToArray(); + } + + public int[] getTaskTypes() + { + return (from location in Locations from task in location.Tasks select task.TaskType).Distinct().ToArray(); + } + + // TODO: This should be enhanced + public void SanityCheck() + { + foreach (Tool tool in Tools) + { + Debug.Assert(tool.TravellingTime.GetLength(0) == NbWorkLocations); + Debug.Assert(tool.TravellingTime.GetLength(1) == NbWorkLocations); + for (int i = 0; i < NbWorkLocations; i++) + Debug.Assert(tool.TravellingTime[i, i] == 0); + } + } +} + +interface DataReader +{ + FactoryDescription FetchData(); +} + +public class SmallSyntheticData : DataReader +{ + public SmallSyntheticData() + { + } + + public FactoryDescription FetchData() + { + // deterministic seed for result reproducibility + Random randomDuration = new Random(2); + + // FactoryDescription(nbTools, nblocations, nbTasks per cycle) + FactoryDescription factoryDescription = new FactoryDescription(5, 4, 3); + + // Travelling time and distance are temporarily identical and they + // are no different for different tools + int[,] travellingTime = new int[factoryDescription.NbWorkLocations, factoryDescription.NbWorkLocations]; + for (int i = 0; i < travellingTime.GetLength(0); i++) + { + for (int j = 0; j < travellingTime.GetLength(1); j++) + { + if (i == j) + travellingTime[i, j] = 0; + else + travellingTime[i, j] = (5 * Math.Abs(i - j)) * 10; + } + } + + factoryDescription.Tools[0].AddTaskType(0); + factoryDescription.Tools[1].AddTaskType(0); + factoryDescription.Tools[2].AddTaskType(1); + factoryDescription.Tools[3].AddTaskType(1); + factoryDescription.Tools[4].AddTaskType(2); + factoryDescription.Tools[1].AddTaskType(1); + + foreach (Tool tool in factoryDescription.Tools) + tool.TravellingTime = travellingTime; + + int c = 0; + int nbCyclePerWorkLocation = 2; + int[] boll = new int[100]; + for (int i = 0; i < factoryDescription.NbWorkLocations; i++) + { + factoryDescription.Locations[i].NbTasks = nbCyclePerWorkLocation * factoryDescription.NbTaskPerCycle; + for (int j = 0; j < nbCyclePerWorkLocation; j++) + { + for (int k = 0; k < factoryDescription.NbTaskPerCycle; k++) + { + Task t = new Task(c, k, i, k + j * factoryDescription.NbTaskPerCycle); + + // Filling in tool-dependent durations + Tool[] compatibleTools = factoryDescription.getToolPerTaskType(k); + foreach (Tool tool in compatibleTools) + { + boll[c] = randomDuration.Next(13, 17) * 10; + ; + t.Durations[tool.Id] = boll[c]; + } + factoryDescription.Locations[i].Tasks[t.TaskPosition] = t; + c++; + } + } + } + + factoryDescription.SanityCheck(); + return factoryDescription; + } +} + +public class RandomSelectToolHeuristic : NetDecisionBuilder +{ + private FactoryScheduling factoryScheduling; + private Random rnd; + + public RandomSelectToolHeuristic(FactoryScheduling factoryScheduling, int seed) + { + this.factoryScheduling = factoryScheduling; + // deterministic seed for result reproducibility + this.rnd = new Random(seed); + } + + public override Decision Next(Solver solver) + { + foreach (IntVar var in factoryScheduling.SelectedTool) + { + if (!var.Bound()) + { + int min = (int)var.Min(); + int max = (int)var.Max(); + int rndVal = rnd.Next(min, max + 1); + while (!var.Contains(rndVal)) + rndVal = rnd.Next(min, max + 1); + return solver.MakeAssignVariableValue(var, rndVal); + } + } + return null; + } +} + +class TaskAlternative +{ + public Task Task { get; private set; } + public IntVar ToolVar { get; set; } + public List Intervals { get; private set; } + + public TaskAlternative(Task t) + { + Task = t; + Intervals = new List(); + } +} + +public class FactoryScheduling +{ + private FactoryDescription factoryData; + private Solver solver; + + private Task[] tasks; + private int[] taskTypes; + + /* Flat list of all the tasks */ + private TaskAlternative[] taskStructures; + + /* Task per WorkLocation: location2Task[d][i]: the i-th task of the + * d-th location */ + private TaskAlternative[][] location2Task; + + /* Task per Tool: tool2Task[t][i]: the i-th task of the t-th tool. + Note that it does NOT imply that the it will be the i-th + executed. In other words, it should be considered as an unordered + set. Furthermore, tool2Task[t][i] can also be *unperformed* */ + private List[] tool2Task; + + /* All the transition times for the tools. + tool2TransitionTimes[t][i]: the transition time of the t-th tool + from the i-th task to the next */ + private List[] tool2TransitionTimes; + + /* Map between the interval var of a tool to its related task id. + toolIntervalVar2TaskId[t][k] = i: in the t-th tool, the k-th + interval var correspond to tasks[i] */ + private List[] toolIntervalVar2TaskId; + + /* Tools per task type: taskType2Tool[tt][t]: the t-th tool capable + * of doing the tt-th task type */ + private List[] taskType2Tool; + + /* For each task which tools is performed upon */ + private List selectedTool; + public List SelectedTool + { + get { + return selectedTool; + } + } + + /* Sequence of task for each tool */ + private SequenceVar[] allToolSequences; + public SequenceVar[] AllToolSequences + { + get { + return allToolSequences; + } + } + + /* Makespan var */ + private IntVar makespan; + + /* Objective */ + private OptimizeVar objective; + + /* maximum horizon */ + private int horizon; + + /* Start & End times of IntervalVars*/ + IntVar[][] startingTimes; + IntVar[][] endTimes; + + public FactoryScheduling(FactoryDescription data) + { + factoryData = data; + } + + private void Init() + { + horizon = factoryData.Horizon; + solver = new Solver("Factory Scheduling"); + tasks = factoryData.getFlatTaskList(); + taskTypes = factoryData.getTaskTypes(); + taskStructures = new TaskAlternative[tasks.Length]; + location2Task = new TaskAlternative[factoryData.NbWorkLocations][]; + tool2Task = new List[factoryData.NbTools]; + toolIntervalVar2TaskId = new List[factoryData.NbTools]; + tool2TransitionTimes = new List[factoryData.NbTools]; + + taskType2Tool = new List[taskTypes.Length]; + selectedTool = new List(); + for (int tt = 0; tt < taskTypes.Length; tt++) + taskType2Tool[tt] = new List(); + + foreach (Tool tool in factoryData.Tools) + foreach (int taskType in tool.TaskTypes) + taskType2Tool[taskType].Add(tool); + for (int d = 0; d < factoryData.NbWorkLocations; d++) + location2Task[d] = new TaskAlternative[factoryData.Locations[d].NbTasks]; + for (int t = 0; t < factoryData.NbTools; t++) + { + tool2Task[t] = new List(); + toolIntervalVar2TaskId[t] = new List(); + tool2TransitionTimes[t] = new List(); + } + + allToolSequences = new SequenceVar[factoryData.NbTools - 1]; + + startingTimes = new IntVar[factoryData.NbTools - 1][]; + endTimes = new IntVar[factoryData.NbTools - 1][]; + } + + private void PostTransitionTimeConstraints(int t, bool postTransitionsConstraint = true) + { + Tool tool = factoryData.Tools[t]; + // if it is a inspection, we make sure there are no transitiontimes + if (tool.CanPerformTaskType(factoryData.Inspection)) + tool2TransitionTimes[t].Add(null); + else + { + int[,] tt = tool.TravellingTime; + + SequenceVar seq = allToolSequences[t]; + long s = seq.Size(); + IntVar[] nextLocation = new IntVar[s + 1]; + + // The seq.Next(i) represents the task performed after the i-th + // task in the sequence seq.Next(0) represents the first task + // performed for extracting travelling times we need to get the + // related location In case a task is not performed (seq.Next(i) + // == i), i.e. it's pointing to itself The last performed task + // (or pre-start task, if no tasks are performed) will have + // seq.Next(i) == s + 1 therefore we add a virtual location + // whose travelling time is equal to 0 + // + // NOTE: The index of a SequenceVar are 0..n, but the domain + // range is 1..(n+1), this is due to that the start node = 0 is + // a dummy node, and the node where seq.Next(i) == n+1 is the + // end node + + // Extra elements for the unreachable start node (0), and the + // end node whose next task takes place in a virtual location + int[] taskIndex2locationId = new int[s + 2]; + taskIndex2locationId[0] = -10; + for (int i = 0; i < s; i++) + taskIndex2locationId[i + 1] = tasks[toolIntervalVar2TaskId[t][i]].LocationId; + + // this is the virtual location for unperformed tasks + taskIndex2locationId[s + 1] = factoryData.NbWorkLocations; + + // Build the travelling time matrix with the additional virtual location + int[][] ttWithVirtualLocation = new int [factoryData.NbWorkLocations + 1][]; + for (int d1 = 0; d1 < ttWithVirtualLocation.Length; d1++) + { + ttWithVirtualLocation[d1] = new int[factoryData.NbWorkLocations + 1]; + for (int d2 = 0; d2 < ttWithVirtualLocation.Length; d2++) + if (d1 == factoryData.NbWorkLocations) + { + ttWithVirtualLocation[d1][d2] = 0; + } + else + { + ttWithVirtualLocation[d1][d2] = (d2 == factoryData.NbWorkLocations) ? 0 : tt[d1, d2]; + } + } + + for (int i = 0; i < nextLocation.Length; i++) + { + // this is the next-location associated with the i-th task + nextLocation[i] = solver.MakeElement(taskIndex2locationId, seq.Next(i)).Var(); + + int d = (i == 0) ? tool.InitialLocationId : tasks[toolIntervalVar2TaskId[t][i - 1]].LocationId; + if (i == 0) + { + // To be changed - right now we don't have meaningful indata + // of previous location Ugly way of setting initial travel + // time to = 0, as this is how we find common grounds + // between benchmark algorithm and this + tool2TransitionTimes[t].Add( + solver.MakeElement(new int[ttWithVirtualLocation[d].Length], nextLocation[i]).Var()); + } + else + { + tool2TransitionTimes[t].Add(solver.MakeElement(ttWithVirtualLocation[d], nextLocation[i]).Var()); + } + } + + // Extra elements for the unreachable start node (0), and the + // end node whose next task takes place in a virtual location + startingTimes[t] = new IntVar[s + 2]; + endTimes[t] = new IntVar[s + 2]; + + startingTimes[t][0] = solver.MakeIntConst(0); + // Tbd: Set this endtime to the estimated time of finishing + // previous task for the current tool + endTimes[t][0] = solver.MakeIntConst(0); + + for (int i = 0; i < s; i++) + { + startingTimes[t][i + 1] = tool2Task[t][i].SafeStartExpr(-1).Var(); + endTimes[t][i + 1] = tool2Task[t][i].SafeEndExpr(-1).Var(); + } + startingTimes[t][s + 1] = solver.MakeIntConst(factoryData.Horizon); + endTimes[t][s + 1] = solver.MakeIntConst(factoryData.Horizon); + + // Enforce (or not) that each task is separated by the + // transition time to the next task + for (int i = 0; i < nextLocation.Length; i++) + { + IntVar nextStart = solver.MakeElement(startingTimes[t], seq.Next(i).Var()).Var(); + if (postTransitionsConstraint) + solver.Add(endTimes[t][i] + tool2TransitionTimes[t][i] <= nextStart); + } + } + } + + private void Model() + { + /* Building basic task data structures */ + for (int i = 0; i < tasks.Length; i++) + { + /* Create a new set of possible IntervalVars & IntVar to decide + * which one (and only 1) is performed */ + taskStructures[i] = new TaskAlternative(tasks[i]); + + /* Container to use when posting constraints */ + location2Task[tasks[i].LocationId][tasks[i].TaskPosition] = taskStructures[i]; + + /* Get task type */ + int taskType = tasks[i].TaskType; + + /* Possible tool for this task */ + List tools = taskType2Tool[taskType]; + bool optional = tools.Count > 1; + + /* List of boolean variables. If performedOnTool[t] == true then + * the task is performed on tool t */ + List performedOnTool = new List(); + for (int t = 0; t < tools.Count; t++) + { + /* Creating an IntervalVar. If tools.Count > 1 the intervalVar + * is *OPTIONAL* */ + int toolId = tools[t].Id; + Debug.Assert(tasks[i].Durations.ContainsKey(toolId)); + int duration = tasks[i].Durations[toolId]; + string name = "J " + tasks[i].Id + " [" + toolId + "]"; + + IntervalVar intervalVar; + if (taskType == factoryData.Inspection) + { + /* We set a 0 time if the task is an inspection */ + duration = 0; + intervalVar = solver.MakeFixedDurationIntervalVar(0, horizon, duration, optional, name); + IntVar start = intervalVar.SafeStartExpr(-1).Var(); + + intervalVar.SafeStartExpr(-1).Var().SetValues(factoryData.InspectionStarts); + } + else + { + intervalVar = solver.MakeFixedDurationIntervalVar(0, horizon, duration, optional, name); + } + + taskStructures[i].Intervals.Add(intervalVar); + tool2Task[toolId].Add(intervalVar); + toolIntervalVar2TaskId[toolId].Add(i); + + /* Collecting all the bool vars, even if they are optional */ + performedOnTool.Add(intervalVar.PerformedExpr().Var()); + } + + /* Linking the bool var to a single integer variable: */ + /* if alternativeToolVar == t <=> performedOnTool[t] == true */ + string alternativeName = "J " + tasks[i].Id; + IntVar alternativeToolVar = solver.MakeIntVar(0, tools.Count - 1, alternativeName); + taskStructures[i].ToolVar = alternativeToolVar; + + solver.Add(solver.MakeMapDomain(alternativeToolVar, performedOnTool.ToArray())); + Debug.Assert(performedOnTool.ToArray().Length == alternativeToolVar.Max() + 1); + + selectedTool.Add(alternativeToolVar); + } + + /* Creates precedences on a work Location in order to enforce a + * fully ordered set within the same location + */ + for (int d = 0; d < location2Task.Length; d++) + { + for (int i = 0; i < location2Task[d].Length - 1; i++) + { + TaskAlternative task1 = location2Task[d][i]; + TaskAlternative task2 = location2Task[d][i + 1]; + /* task1 must end before task2 starts */ + /* Adding precedence for each possible alternative pair */ + for (int t1 = 0; t1 < task1.Intervals.Count(); t1++) + { + IntervalVar task1Alternative = task1.Intervals[t1]; + for (int t2 = 0; t2 < task2.Intervals.Count(); t2++) + { + IntervalVar task2Alternative = task2.Intervals[t2]; + Constraint precedence = + solver.MakeIntervalVarRelation(task2Alternative, Solver.STARTS_AFTER_END, task1Alternative); + solver.Add(precedence); + } + } + } + } + + /* Adds disjunctive constraints on unary resources, and creates + * sequence variables. */ + for (int t = 0; t < factoryData.NbTools; t++) + { + string name = "Tool " + t; + + if (!factoryData.Tools[t].CanPerformTaskType(factoryData.Inspection)) + { + DisjunctiveConstraint ct = solver.MakeDisjunctiveConstraint(tool2Task[t].ToArray(), name); + solver.Add(ct); + allToolSequences[t] = ct.SequenceVar(); + } + PostTransitionTimeConstraints(t, true); + } + + /* Collecting all tasks end for makespan objective function */ + List intervalEnds = new List(); + for (int i = 0; i < tasks.Length; i++) + foreach (IntervalVar var in taskStructures[i].Intervals) + intervalEnds.Add(var.SafeEndExpr(-1).Var()); + + /* Objective: minimize the makespan (maximum end times of all tasks) */ + makespan = solver.MakeMax(intervalEnds.ToArray()).Var(); + objective = solver.MakeMinimize(makespan, 1); + } + + private void Search() + { + int seed = 2; // This is a good seed to show the crash + + /* Assigning first tools */ + DecisionBuilder myToolAssignmentPhase = new RandomSelectToolHeuristic(this, seed); + + /* Ranking of the tools */ + DecisionBuilder sequencingPhase = solver.MakePhase(allToolSequences, Solver.SEQUENCE_DEFAULT); + + /* Then fixing time of tasks as early as possible */ + DecisionBuilder timingPhase = solver.MakePhase(makespan, Solver.CHOOSE_FIRST_UNBOUND, Solver.ASSIGN_MIN_VALUE); + + /* Overall phase */ + DecisionBuilder mainPhase = solver.Compose(myToolAssignmentPhase, sequencingPhase, timingPhase); + + /* Logging */ + const int logFrequency = 1000000; + SearchMonitor searchLog = solver.MakeSearchLog(logFrequency, objective); + + /* Restarts */ + SearchMonitor searchRestart = solver.MakeLubyRestart(100); + + /* Search Limit in ms */ + SearchLimit limit = solver.MakeTimeLimit(180 * 1000); + + /* Collecting best solution */ + SolutionCollector collector = solver.MakeLastSolutionCollector(); + collector.AddObjective(makespan); + + // collector.Add( pile.ToArray() ); + solver.NewSearch(mainPhase, searchLog, searchRestart, objective, limit); + while (solver.NextSolution()) + { + Console.WriteLine("MAKESPAN: " + makespan.Value()); + } + } + + public void Solve() + { + Init(); + Model(); + Search(); + } +} + +public class Issue33Test +{ + public static void FactorySchedulingTest() + { + FactoryScheduling scheduling = new FactoryScheduling(new SmallSyntheticData().FetchData()); + scheduling.Solve(); + } + static void Main() + { + FactorySchedulingTest(); + } +} diff --git a/ortools/dotnet/Google.OrTools.sln b/ortools/dotnet/Google.OrTools.sln index c2ef81d8af4..d906c3e4eca 100644 --- a/ortools/dotnet/Google.OrTools.sln +++ b/ortools/dotnet/Google.OrTools.sln @@ -1,37 +1,37 @@ - -Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio 15 -VisualStudioVersion = 15.0.26124.0 -MinimumVisualStudioVersion = 15.0.26124.0 -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Google.OrTools.runtime.linux-x64", "Google.OrTools.runtime.linux-x64\Google.OrTools.runtime.linux-x64.csproj", "{FC646C34-8541-427D-B9F6-1247798F4574}" -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Google.OrTools.runtime.osx-x64", "Google.OrTools.runtime.osx-x64\Google.OrTools.runtime.osx-x64.csproj", "{FC646C34-8541-427D-B9F6-1247798F4574}" -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Google.OrTools.runtime.win-x64", "Google.OrTools.runtime.win-x64\Google.OrTools.runtime.win-x64.csproj", "{FC646C34-8541-427D-B9F6-1247798F4574}" -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Google.OrTools", "Google.OrTools\Google.OrTools.csproj", "{FC646C34-8541-427D-B9F6-1247798F4574}" -EndProject -Global -GlobalSection(SolutionConfigurationPlatforms) = preSolution -Debug|Any CPU = Debug|Any CPU -Debug|x64 = Debug|x64 -Debug|x86 = Debug|x86 -Release|Any CPU = Release|Any CPU -Release|x64 = Release|x64 -Release|x86 = Release|x86 -EndGlobalSection -GlobalSection(SolutionProperties) = preSolution -HideSolutionNode = FALSE -EndGlobalSection -GlobalSection(ProjectConfigurationPlatforms) = postSolution -{FC646C34-8541-427D-B9F6-1247798F4574}.Debug|Any CPU.ActiveCfg = Debug|Any CPU -{FC646C34-8541-427D-B9F6-1247798F4574}.Debug|Any CPU.Build.0 = Debug|Any CPU -{FC646C34-8541-427D-B9F6-1247798F4574}.Debug|x64.ActiveCfg = Debug|Any CPU -{FC646C34-8541-427D-B9F6-1247798F4574}.Debug|x64.Build.0 = Debug|Any CPU -{FC646C34-8541-427D-B9F6-1247798F4574}.Debug|x86.ActiveCfg = Debug|Any CPU -{FC646C34-8541-427D-B9F6-1247798F4574}.Debug|x86.Build.0 = Debug|Any CPU -{FC646C34-8541-427D-B9F6-1247798F4574}.Release|Any CPU.ActiveCfg = Release|Any CPU -{FC646C34-8541-427D-B9F6-1247798F4574}.Release|Any CPU.Build.0 = Release|Any CPU -{FC646C34-8541-427D-B9F6-1247798F4574}.Release|x64.ActiveCfg = Release|Any CPU -{FC646C34-8541-427D-B9F6-1247798F4574}.Release|x64.Build.0 = Release|Any CPU -{FC646C34-8541-427D-B9F6-1247798F4574}.Release|x86.ActiveCfg = Release|Any CPU -{FC646C34-8541-427D-B9F6-1247798F4574}.Release|x86.Build.0 = Release|Any CPU -EndGlobalSection -EndGlobal + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio 15 +VisualStudioVersion = 15.0.26124.0 +MinimumVisualStudioVersion = 15.0.26124.0 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Google.OrTools.runtime.linux-x64", "Google.OrTools.runtime.linux-x64\Google.OrTools.runtime.linux-x64.csproj", "{FC646C34-8541-427D-B9F6-1247798F4574}" +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Google.OrTools.runtime.osx-x64", "Google.OrTools.runtime.osx-x64\Google.OrTools.runtime.osx-x64.csproj", "{FC646C34-8541-427D-B9F6-1247798F4574}" +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Google.OrTools.runtime.win-x64", "Google.OrTools.runtime.win-x64\Google.OrTools.runtime.win-x64.csproj", "{FC646C34-8541-427D-B9F6-1247798F4574}" +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Google.OrTools", "Google.OrTools\Google.OrTools.csproj", "{FC646C34-8541-427D-B9F6-1247798F4574}" +EndProject +Global +GlobalSection(SolutionConfigurationPlatforms) = preSolution +Debug|Any CPU = Debug|Any CPU +Debug|x64 = Debug|x64 +Debug|x86 = Debug|x86 +Release|Any CPU = Release|Any CPU +Release|x64 = Release|x64 +Release|x86 = Release|x86 +EndGlobalSection +GlobalSection(SolutionProperties) = preSolution +HideSolutionNode = FALSE +EndGlobalSection +GlobalSection(ProjectConfigurationPlatforms) = postSolution +{FC646C34-8541-427D-B9F6-1247798F4574}.Debug|Any CPU.ActiveCfg = Debug|Any CPU +{FC646C34-8541-427D-B9F6-1247798F4574}.Debug|Any CPU.Build.0 = Debug|Any CPU +{FC646C34-8541-427D-B9F6-1247798F4574}.Debug|x64.ActiveCfg = Debug|Any CPU +{FC646C34-8541-427D-B9F6-1247798F4574}.Debug|x64.Build.0 = Debug|Any CPU +{FC646C34-8541-427D-B9F6-1247798F4574}.Debug|x86.ActiveCfg = Debug|Any CPU +{FC646C34-8541-427D-B9F6-1247798F4574}.Debug|x86.Build.0 = Debug|Any CPU +{FC646C34-8541-427D-B9F6-1247798F4574}.Release|Any CPU.ActiveCfg = Release|Any CPU +{FC646C34-8541-427D-B9F6-1247798F4574}.Release|Any CPU.Build.0 = Release|Any CPU +{FC646C34-8541-427D-B9F6-1247798F4574}.Release|x64.ActiveCfg = Release|Any CPU +{FC646C34-8541-427D-B9F6-1247798F4574}.Release|x64.Build.0 = Release|Any CPU +{FC646C34-8541-427D-B9F6-1247798F4574}.Release|x86.ActiveCfg = Release|Any CPU +{FC646C34-8541-427D-B9F6-1247798F4574}.Release|x86.Build.0 = Release|Any CPU +EndGlobalSection +EndGlobal diff --git a/ortools/routing/parsers/testdata/pdtsp_prob10b.txt b/ortools/routing/parsers/testdata/pdtsp_prob10b.txt index d1194cb91c2..69abc76207d 100644 --- a/ortools/routing/parsers/testdata/pdtsp_prob10b.txt +++ b/ortools/routing/parsers/testdata/pdtsp_prob10b.txt @@ -1,23 +1,23 @@ -21 -1 682 266 -2 129 265 0 12 -3 298 495 0 13 -4 720 160 0 14 -5 93 10 0 15 -6 891 782 0 16 -7 888 533 0 17 -8 414 290 0 18 -9 61 22 0 19 -10 485 352 0 20 -11 817 619 0 21 -12 669 775 1 2 -13 628 117 1 3 -14 178 31 1 4 -15 733 97 1 5 -16 985 320 1 6 -17 319 0 1 7 -18 545 283 1 8 -19 331 664 1 9 -20 598 785 1 10 -21 245 810 1 11 --999 +21 +1 682 266 +2 129 265 0 12 +3 298 495 0 13 +4 720 160 0 14 +5 93 10 0 15 +6 891 782 0 16 +7 888 533 0 17 +8 414 290 0 18 +9 61 22 0 19 +10 485 352 0 20 +11 817 619 0 21 +12 669 775 1 2 +13 628 117 1 3 +14 178 31 1 4 +15 733 97 1 5 +16 985 320 1 6 +17 319 0 1 7 +18 545 283 1 8 +19 331 664 1 9 +20 598 785 1 10 +21 245 810 1 11 +-999 From e066455f2950b4adfd0e4a7282a456c0d7116df0 Mon Sep 17 00:00:00 2001 From: Mizux Seiha Date: Tue, 16 Dec 2025 12:25:51 +0100 Subject: [PATCH 075/111] algorithms: fix windows build when SCIP support is off --- ortools/algorithms/knapsack_solver_test.cc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ortools/algorithms/knapsack_solver_test.cc b/ortools/algorithms/knapsack_solver_test.cc index 2c7572aead4..771037b351c 100644 --- a/ortools/algorithms/knapsack_solver_test.cc +++ b/ortools/algorithms/knapsack_solver_test.cc @@ -133,7 +133,9 @@ int64_t SolveKnapsackProblem( return kInvalidSolution; } #else // !defined(USE_SCIP) +#if !defined(_MSC_VER) #warning SCIP support disable +#endif // !defined(_MSC_VER) #endif // !defined(USE_SCIP) const int64_t cpsat_profit = SolveKnapsackProblemUsingSpecificSolver( From b0391683a6b606b4835ae2e30e9d9626a7d485ea Mon Sep 17 00:00:00 2001 From: Corentin Le Molgat Date: Mon, 15 Dec 2025 18:20:41 +0100 Subject: [PATCH 076/111] cmake: fix python build when BUILD_TESTING=OFF --- ortools/python/setup.py.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ortools/python/setup.py.in b/ortools/python/setup.py.in index ce713f09505..24f5ea0a72d 100644 --- a/ortools/python/setup.py.in +++ b/ortools/python/setup.py.in @@ -128,7 +128,7 @@ setup( '@PYTHON_PROJECT@.util.python':[ '$', '$', - '$', + $<$:'$',> '*.pyi' ], }, From 447ab9bfbb77cdf9dd75529a6b14841c69a316dc Mon Sep 17 00:00:00 2001 From: Mizux Seiha Date: Tue, 16 Dec 2025 18:39:36 +0100 Subject: [PATCH 077/111] algorithms: disable knapsack_solver_test.py if SCIP no unavailable --- ortools/algorithms/python/CMakeLists.txt | 3 +++ 1 file changed, 3 insertions(+) diff --git a/ortools/algorithms/python/CMakeLists.txt b/ortools/algorithms/python/CMakeLists.txt index 87e29473d17..1289cee6ca9 100644 --- a/ortools/algorithms/python/CMakeLists.txt +++ b/ortools/algorithms/python/CMakeLists.txt @@ -33,6 +33,9 @@ add_library(${PROJECT_NAMESPACE}::knapsack_solver_pybind11 ALIAS knapsack_solver if(BUILD_TESTING) file(GLOB PYTHON_SRCS "*_test.py") + if(NOT USE_SCIP) + list(FILTER PYTHON_SRCS EXCLUDE REGEX "knapsack_solver_test.py$") + endif() foreach(FILE_NAME IN LISTS PYTHON_SRCS) add_python_test(FILE_NAME ${FILE_NAME}) endforeach() From ad91b2ddd348e7df31ee4ff8e6aa0f9b3c7e39e3 Mon Sep 17 00:00:00 2001 From: AliReZa Sabouri <7004080+alirezanet@users.noreply.github.com> Date: Wed, 17 Dec 2025 00:32:15 +0100 Subject: [PATCH 078/111] Remove sealed modifier and implement standard dispose pattern for CpSolver The sealed modifier was unnecessarily breaking inheritance for users who need to extend CpSolver. The standard IDisposable pattern with protected virtual Dispose(bool) allows safe inheritance while maintaining proper resource cleanup. This change: - Removes the sealed modifier from CpSolver class - Implements the standard dispose pattern with protected virtual Dispose(bool) - Allows derived classes to override disposal behavior safely - Maintains backward compatibility for existing non-inheriting code --- ortools/sat/csharp/CpSolver.cs | 29 +++++++++++++++++++++++------ 1 file changed, 23 insertions(+), 6 deletions(-) diff --git a/ortools/sat/csharp/CpSolver.cs b/ortools/sat/csharp/CpSolver.cs index c2a1449ee01..9a5a89dc422 100644 --- a/ortools/sat/csharp/CpSolver.cs +++ b/ortools/sat/csharp/CpSolver.cs @@ -26,7 +26,7 @@ namespace Google.OrTools.Sat * variables in the best solution, as well as general statistics of the search. * */ -public sealed class CpSolver : IDisposable +public class CpSolver : IDisposable { private LogCallback? _log_callback; private BestBoundCallback? _best_bound_callback; @@ -207,19 +207,36 @@ public bool BooleanValue(ILiteral literal) public string SolutionInfo() => Response!.SolutionInfo; - public void Dispose() + /// + /// Releases unmanaged resources and optionally releases managed resources. + /// + /// true to release both managed and unmanaged resources; false to release only unmanaged resources. + protected virtual void Dispose(bool disposing) { if (_disposed) { return; } - _best_bound_callback?.Dispose(); - _log_callback?.Dispose(); - ReleaseSolveWrapper(); + if (disposing) + { + _best_bound_callback?.Dispose(); + _log_callback?.Dispose(); + ReleaseSolveWrapper(); + } + _disposed = true; } + /// + /// Releases all resources used by the CpSolver. + /// + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + [MethodImpl(MethodImplOptions.Synchronized)] private void CreateSolveWrapper() { @@ -253,4 +270,4 @@ class BestBoundCallbackDelegate : BestBoundCallback public override void NewBestBound(double bound) => _delegate(bound); } -} // namespace Google.OrTools.Sat +} // namespace Google.OrTools.Sat \ No newline at end of file From 2f299d5d51402682e34f76ab89eec92d174d2561 Mon Sep 17 00:00:00 2001 From: Corentin Le Molgat Date: Wed, 17 Dec 2025 13:04:07 +0100 Subject: [PATCH 079/111] tools/release: Fix maven install note: dlcdn have uploaded 3.9.12 on December 16th and removed the previous one... --- tools/release/amd64.Dockerfile | 7 ++++--- tools/release/arm64.Dockerfile | 7 ++++--- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/tools/release/amd64.Dockerfile b/tools/release/amd64.Dockerfile index 9520bc7596d..4a18f989f1d 100644 --- a/tools/release/amd64.Dockerfile +++ b/tools/release/amd64.Dockerfile @@ -36,10 +36,11 @@ RUN dnf -y update \ ENV JAVA_HOME=/usr/lib/jvm/java # Update maven -ADD https://dlcdn.apache.org/maven/maven-3/3.9.11/binaries/apache-maven-3.9.11-bin.tar.gz /usr/local +ARG MAVEN_VERSION=3.9.12 +ADD https://dlcdn.apache.org/maven/maven-3/${MAVEN_VERSION}/binaries/apache-maven-${MAVEN_VERSION}-bin.tar.gz /usr/local RUN mkdir -p /usr/local/maven \ - && tar xzvf /usr/local/apache-maven-3.9.11-bin.tar.gz --strip-components=1 -C /usr/local/maven \ - && rm /usr/local/apache-maven-3.9.11-bin.tar.gz + && tar xzvf /usr/local/apache-maven-${MAVEN_VERSION}-bin.tar.gz --strip-components=1 -C /usr/local/maven \ + && rm /usr/local/apache-maven-${MAVEN_VERSION}-bin.tar.gz ENV PATH=/usr/local/maven/bin:$PATH ENV TZ=America/Los_Angeles diff --git a/tools/release/arm64.Dockerfile b/tools/release/arm64.Dockerfile index dba7ce957d8..27ec9547fb5 100644 --- a/tools/release/arm64.Dockerfile +++ b/tools/release/arm64.Dockerfile @@ -41,10 +41,11 @@ RUN dnf -y update \ ENV JAVA_HOME=/usr/lib/jvm/java # Update maven -ADD https://dlcdn.apache.org/maven/maven-3/3.9.11/binaries/apache-maven-3.9.11-bin.tar.gz /usr/local +ARG MAVEN_VERSION=3.9.12 +ADD https://dlcdn.apache.org/maven/maven-3/${MAVEN_VERSION}/binaries/apache-maven-${MAVEN_VERSION}-bin.tar.gz /usr/local RUN mkdir -p /usr/local/maven \ - && tar xzvf /usr/local/apache-maven-3.9.11-bin.tar.gz --strip-components=1 -C /usr/local/maven \ - && rm /usr/local/apache-maven-3.9.11-bin.tar.gz + && tar xzvf /usr/local/apache-maven-${MAVEN_VERSION}-bin.tar.gz --strip-components=1 -C /usr/local/maven \ + && rm /usr/local/apache-maven-${MAVEN_VERSION}-bin.tar.gz ENV PATH=/usr/local/maven/bin:$PATH ENV TZ=America/Los_Angeles From c3164316fbd49a6f3b5f3924873737ff7b27c2bb Mon Sep 17 00:00:00 2001 From: Mizux Seiha Date: Fri, 19 Dec 2025 15:01:21 +0100 Subject: [PATCH 080/111] examples: regenerate notebook --- examples/notebook/algorithms/knapsack.ipynb | 1 + .../algorithms/simple_knapsack_program.ipynb | 1 + .../contrib/permutation_flow_shop.ipynb | 3 +- .../scheduling_with_transitions_sat.ipynb | 3 +- examples/notebook/examples/appointments.ipynb | 2 +- .../examples/arc_flow_cutting_stock_sat.ipynb | 3 +- .../notebook/examples/balance_group_sat.ipynb | 3 +- .../examples/bus_driver_scheduling_sat.ipynb | 11 +- .../car_sequencing_optimization_sat.ipynb | 349 ++++++++ .../notebook/examples/cryptarithm_sat.ipynb | 3 +- examples/notebook/examples/cvrptw_plot.ipynb | 833 ------------------ .../examples/gate_scheduling_sat.ipynb | 2 +- examples/notebook/examples/golomb8.ipynb | 2 +- examples/notebook/examples/golomb_sat.ipynb | 4 +- .../examples/horse_jumping_show.ipynb | 376 ++++++++ .../notebook/examples/knapsack_2d_sat.ipynb | 8 +- .../examples/line_balancing_sat.ipynb | 6 +- .../examples/linear_assignment_api.ipynb | 13 +- .../examples/maximize_combinations_sat.ipynb | 1 + .../notebook/examples/maze_escape_sat.ipynb | 6 +- .../memory_layout_and_infeasibility_sat.ipynb | 5 +- .../examples/music_playlist_sat.ipynb | 388 ++++++++ .../no_wait_baking_scheduling_sat.ipynb | 3 +- .../notebook/examples/pentominoes_sat.ipynb | 3 +- .../examples/prize_collecting_vrp.ipynb | 2 + .../notebook/examples/pyflow_example.ipynb | 7 +- examples/notebook/examples/rcpsp_sat.ipynb | 3 +- .../examples/shift_scheduling_sat.ipynb | 3 +- ...ing_with_setup_release_due_dates_sat.ipynb | 3 +- .../notebook/examples/spillover_sat.ipynb | 439 +++++++++ .../notebook/examples/spread_robots_sat.ipynb | 3 +- .../examples/steel_mill_slab_sat.ipynb | 7 +- .../examples/test_scheduling_sat.ipynb | 3 +- examples/notebook/examples/transit_time.ipynb | 1 - .../weighted_latency_problem_sat.ipynb | 8 +- .../assignment_linear_sum_assignment.ipynb | 1 + .../notebook/graph/assignment_min_flow.ipynb | 1 + .../notebook/graph/balance_min_flow.ipynb | 1 + .../graph/simple_max_flow_program.ipynb | 1 + .../graph/simple_min_cost_flow_program.ipynb | 1 + .../linear_solver/assignment_groups_mip.ipynb | 1 + .../linear_solver/assignment_mb.ipynb | 1 + .../linear_solver/assignment_mip.ipynb | 1 + .../assignment_task_sizes_mip.ipynb | 1 + .../linear_solver/assignment_teams_mip.ipynb | 1 + .../linear_solver/basic_example.ipynb | 1 + .../linear_solver/bin_packing_mb.ipynb | 1 + .../linear_solver/bin_packing_mip.ipynb | 2 + .../linear_solver/clone_model_mb.ipynb | 1 + .../integer_programming_example.ipynb | 1 + .../linear_programming_example.ipynb | 1 + .../linear_solver/mip_var_array.ipynb | 2 + .../linear_solver/multiple_knapsack_mip.ipynb | 1 + .../linear_solver/simple_lp_program.ipynb | 1 + .../linear_solver/simple_lp_program_mb.ipynb | 1 + .../linear_solver/simple_mip_program.ipynb | 1 + .../linear_solver/simple_mip_program_mb.ipynb | 1 + .../notebook/linear_solver/stigler_diet.ipynb | 1 + .../sat/ranking_circuit_sample_sat.ipynb | 8 +- .../sequences_in_no_overlap_sample_sat.ipynb | 10 +- .../notebook/sat/soft_constraints_sat.ipynb | 249 ++++++ ...transitions_in_no_overlap_sample_sat.ipynb | 11 +- examples/notebook/set_cover/set_cover.ipynb | 1 + 63 files changed, 1903 insertions(+), 909 deletions(-) create mode 100644 examples/notebook/examples/car_sequencing_optimization_sat.ipynb delete mode 100644 examples/notebook/examples/cvrptw_plot.ipynb create mode 100644 examples/notebook/examples/horse_jumping_show.ipynb create mode 100644 examples/notebook/examples/music_playlist_sat.ipynb create mode 100644 examples/notebook/examples/spillover_sat.ipynb create mode 100644 examples/notebook/sat/soft_constraints_sat.ipynb diff --git a/examples/notebook/algorithms/knapsack.ipynb b/examples/notebook/algorithms/knapsack.ipynb index c0d34c4ad76..553a09d2ac9 100644 --- a/examples/notebook/algorithms/knapsack.ipynb +++ b/examples/notebook/algorithms/knapsack.ipynb @@ -86,6 +86,7 @@ "from ortools.algorithms.python import knapsack_solver\n", "\n", "\n", + "\n", "def main():\n", " # Create the solver.\n", " solver = knapsack_solver.KnapsackSolver(\n", diff --git a/examples/notebook/algorithms/simple_knapsack_program.ipynb b/examples/notebook/algorithms/simple_knapsack_program.ipynb index 820c62baa06..824f0a67c55 100644 --- a/examples/notebook/algorithms/simple_knapsack_program.ipynb +++ b/examples/notebook/algorithms/simple_knapsack_program.ipynb @@ -86,6 +86,7 @@ "from ortools.algorithms.python import knapsack_solver\n", "\n", "\n", + "\n", "def main():\n", " # Create the solver.\n", " solver = knapsack_solver.KnapsackSolver(\n", diff --git a/examples/notebook/contrib/permutation_flow_shop.ipynb b/examples/notebook/contrib/permutation_flow_shop.ipynb index 607f1e48473..6f99533cc09 100644 --- a/examples/notebook/contrib/permutation_flow_shop.ipynb +++ b/examples/notebook/contrib/permutation_flow_shop.ipynb @@ -95,7 +95,6 @@ "import numpy as np\n", "\n", "from ortools.sat.colab import flags\n", - "from google.protobuf import text_format\n", "from ortools.sat.python import cp_model\n", "\n", "_PARAMS = flags.define_string(\n", @@ -217,7 +216,7 @@ "\n", " solver = cp_model.CpSolver()\n", " if params:\n", - " text_format.Parse(params, solver.parameters)\n", + " solver.parameters.parse_text_format(params)\n", " solver.parameters.log_search_progress = log\n", " solver.parameters.max_time_in_seconds = time_limit\n", "\n", diff --git a/examples/notebook/contrib/scheduling_with_transitions_sat.ipynb b/examples/notebook/contrib/scheduling_with_transitions_sat.ipynb index f1676beeeb3..2471e770506 100644 --- a/examples/notebook/contrib/scheduling_with_transitions_sat.ipynb +++ b/examples/notebook/contrib/scheduling_with_transitions_sat.ipynb @@ -90,7 +90,6 @@ "import collections\n", "\n", "from ortools.sat.python import cp_model\n", - "from google.protobuf import text_format\n", "\n", "#----------------------------------------------------------------------------\n", "# Command line arguments.\n", @@ -376,7 +375,7 @@ " solver = cp_model.CpSolver()\n", " solver.parameters.max_time_in_seconds = 60 * 60 * 2\n", " if parameters:\n", - " text_format.Merge(parameters, solver.parameters)\n", + " solver.parameters.merge_text_format(parameters)\n", " solution_printer = SolutionPrinter(makespan)\n", " status = solver.Solve(model, solution_printer)\n", "\n", diff --git a/examples/notebook/examples/appointments.ipynb b/examples/notebook/examples/appointments.ipynb index f4c85fec33f..b2e075e1788 100644 --- a/examples/notebook/examples/appointments.ipynb +++ b/examples/notebook/examples/appointments.ipynb @@ -244,7 +244,7 @@ "\n", "\n", "def get_optimal_schedule(\n", - " demand: list[tuple[float, str, int]]\n", + " demand: list[tuple[float, str, int]],\n", ") -> list[tuple[int, list[tuple[int, str]]]]:\n", " \"\"\"Computes the optimal schedule for the installation input.\n", "\n", diff --git a/examples/notebook/examples/arc_flow_cutting_stock_sat.ipynb b/examples/notebook/examples/arc_flow_cutting_stock_sat.ipynb index 89c9809afca..94e93cd8e57 100644 --- a/examples/notebook/examples/arc_flow_cutting_stock_sat.ipynb +++ b/examples/notebook/examples/arc_flow_cutting_stock_sat.ipynb @@ -89,7 +89,6 @@ "from ortools.sat.colab import flags\n", "import numpy as np\n", "\n", - "from google.protobuf import text_format\n", "from ortools.linear_solver.python import model_builder as mb\n", "from ortools.sat.python import cp_model\n", "\n", @@ -387,7 +386,7 @@ " # Solve model.\n", " solver = cp_model.CpSolver()\n", " if params:\n", - " text_format.Parse(params, solver.parameters)\n", + " solver.parameters.parse_text_format(params)\n", " solver.parameters.log_search_progress = True\n", " solver.Solve(model)\n", "\n", diff --git a/examples/notebook/examples/balance_group_sat.ipynb b/examples/notebook/examples/balance_group_sat.ipynb index aa8477b300e..3cea13a81ab 100644 --- a/examples/notebook/examples/balance_group_sat.ipynb +++ b/examples/notebook/examples/balance_group_sat.ipynb @@ -78,7 +78,8 @@ "Each item has a color and a value. We want the sum of values of each group to\n", "be as close to the average as possible.\n", "Furthermore, if one color is an a group, at least k items with this color must\n", - "be in that group.\n" + "be in that group.\n", + "\n" ] }, { diff --git a/examples/notebook/examples/bus_driver_scheduling_sat.ipynb b/examples/notebook/examples/bus_driver_scheduling_sat.ipynb index 896dce73aba..ae52c1adc6d 100644 --- a/examples/notebook/examples/bus_driver_scheduling_sat.ipynb +++ b/examples/notebook/examples/bus_driver_scheduling_sat.ipynb @@ -97,7 +97,6 @@ "import math\n", "\n", "from ortools.sat.colab import flags\n", - "from google.protobuf import text_format\n", "from ortools.sat.python import cp_model\n", "\n", "_OUTPUT_PROTO = flags.define_string(\n", @@ -149,7 +148,7 @@ " [25, \"15:40\", \"15:56\", 940, 956, 16],\n", " [26, \"15:58\", \"16:45\", 958, 1005, 47],\n", " [27, \"16:04\", \"17:30\", 964, 1050, 86],\n", - "] # yapf:disable\n", + "]\n", "\n", "SAMPLE_SHIFTS_SMALL = [\n", " #\n", @@ -211,7 +210,7 @@ " [47, \"18:34\", \"19:58\", 1114, 1198, 84],\n", " [48, \"19:56\", \"20:34\", 1196, 1234, 38],\n", " [49, \"20:05\", \"20:48\", 1205, 1248, 43],\n", - "] # yapf:disable\n", + "]\n", "\n", "SAMPLE_SHIFTS_MEDIUM = [\n", " [0, \"04:30\", \"04:53\", 270, 293, 23],\n", @@ -414,7 +413,7 @@ " [197, \"00:02\", \"00:12\", 1442, 1452, 10],\n", " [198, \"00:07\", \"00:39\", 1447, 1479, 32],\n", " [199, \"00:25\", \"01:12\", 1465, 1512, 47],\n", - "] # yapf:disable\n", + "]\n", "\n", "SAMPLE_SHIFTS_LARGE = [\n", " [0, \"04:18\", \"05:00\", 258, 300, 42],\n", @@ -1773,7 +1772,7 @@ " [1353, \"00:47\", \"01:26\", 1487, 1526, 39],\n", " [1354, \"00:54\", \"01:04\", 1494, 1504, 10],\n", " [1355, \"00:57\", \"01:07\", 1497, 1507, 10],\n", - "] # yapf:disable\n", + "]\n", "\n", "\n", "def bus_driver_scheduling(minimize_drivers: bool, max_num_drivers: int) -> int:\n", @@ -2049,7 +2048,7 @@ " # Solve model.\n", " solver = cp_model.CpSolver()\n", " if _PARAMS.value:\n", - " text_format.Parse(_PARAMS.value, solver.parameters)\n", + " solver.parameters.parse_text_format(_PARAMS.value)\n", "\n", " status = solver.solve(model)\n", "\n", diff --git a/examples/notebook/examples/car_sequencing_optimization_sat.ipynb b/examples/notebook/examples/car_sequencing_optimization_sat.ipynb new file mode 100644 index 00000000000..0e0ff138be1 --- /dev/null +++ b/examples/notebook/examples/car_sequencing_optimization_sat.ipynb @@ -0,0 +1,349 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "google", + "metadata": {}, + "source": [ + "##### Copyright 2025 Google LLC." + ] + }, + { + "cell_type": "markdown", + "id": "apache", + "metadata": {}, + "source": [ + "Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "you may not use this file except in compliance with the License.\n", + "You may obtain a copy of the License at\n", + "\n", + " http://www.apache.org/licenses/LICENSE-2.0\n", + "\n", + "Unless required by applicable law or agreed to in writing, software\n", + "distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "See the License for the specific language governing permissions and\n", + "limitations under the License.\n" + ] + }, + { + "cell_type": "markdown", + "id": "basename", + "metadata": {}, + "source": [ + "# car_sequencing_optimization_sat" + ] + }, + { + "cell_type": "markdown", + "id": "link", + "metadata": {}, + "source": [ + "\n", + "\n", + "\n", + "
\n", + "Run in Google Colab\n", + "\n", + "View source on GitHub\n", + "
" + ] + }, + { + "cell_type": "markdown", + "id": "doc", + "metadata": {}, + "source": [ + "First, you must install [ortools](https://pypi.org/project/ortools/) package in this colab." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "install", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install ortools" + ] + }, + { + "cell_type": "markdown", + "id": "description", + "metadata": {}, + "source": [ + "\n", + "Solve the car sequencing problem as an optimization problem.\n", + "\n", + "Problem Description: The Car Sequencing Problem with Optimization\n", + "-----------------------------------------------------------------\n", + "\n", + "See https://en.wikipedia.org/wiki/Car_sequencing_problem for more details.\n", + "\n", + "We are tasked with determining the optimal production sequence for a set of cars\n", + "on an assembly line. This is a classic and challenging combinatorial\n", + "optimization problem with the following characteristics:\n", + "\n", + "Fixed Production Demand: There is a specific, non-negotiable number of cars of\n", + "different types (or 'classes') that must be produced. In our case, we have 6\n", + "distinct classes of cars, and we must produce exactly 5 of each, for a total of\n", + "30 'real' cars.\n", + "\n", + "Diverse Car Configurations: Each car class is defined by a unique combination of\n", + "optional features. For example, 'Class 1' might require a sunroof (Option 1) and\n", + "a special engine (Option 4), while 'Class 3' only requires air conditioning\n", + "(Option 2).\n", + "\n", + "Specialized Assembly Stations: The assembly line is composed of a series of\n", + "specialized stations. Each station is responsible for installing one specific\n", + "option. For example, there is one station for sunroofs, one for special engines,\n", + "and so on.\n", + "\n", + "Capacity-Limited Stations: The core challenge of the problem lies here. The\n", + "stations cannot handle an unlimited, dense flow of cars requiring their specific\n", + "option. Their capacity is defined by a 'sliding window' constraint. For example,\n", + "the sunroof station might have a constraint of 'at most 1 car with a sunroof in\n", + "any sequence of 3 consecutive cars'. This means sequences like [Sunroof, No, No,\n", + "Sunroof] are valid, but [Sunroof, No, Sunroof, No] are not.\n", + "\n", + "The Need for Spacing (Optimization): The combination of high demand for certain\n", + "options and tight capacity constraints may make it impossible to produce the 30\n", + "real cars consecutively. To create a valid sequence, we may need to insert\n", + "'dummy' or 'filler' cars into the production line. These dummy cars have no\n", + "options and therefore do not consume capacity at any station. They serve purely\n", + "as spacers to break up dense sequences of option-heavy cars.\n", + "\n", + "The Goal: The objective is to find a production sequence that fulfills the\n", + "demand for all 30 real cars while using the minimum number of dummy cars. This\n", + "is equivalent to finding the shortest possible total production schedule (real\n", + "cars + dummy cars).\n", + "\n", + "Modeling and Solution Approach with CP-SAT\n", + "------------------------------------------\n", + "\n", + "To solve this problem, we use the CP-SAT solver from Google's OR-Tools library.\n", + "This is a constraint programming approach, which works by defining variables,\n", + "constraints, and an objective function.\n", + "\n", + "1. Decision Variables\n", + "The fundamental decision the solver must make is: 'Which class of car should be\n", + "placed in each production slot?'\n", + "We define a large number of boolean variables: produces[c][s]. This variable is\n", + "True if a car of class c is scheduled in slot s, and False otherwise. We create\n", + "these for all car classes (including the dummy class) and for an extended number\n", + "of slots (30 real + a buffer of 20 for dummies).\n", + "We introduce a key integer variable: makespan. This variable represents the\n", + "total length of the 'meaningful' part of our schedule. It's the slot number\n", + "where the first dummy car appears, after which all subsequent cars are also\n", + "dummies.\n", + "\n", + "2. Constraints (The Rules of the Game)\n", + "We translate the problem's rules into mathematical constraints that the solver\n", + "must obey:\n", + "\n", + "One Car Per Slot: For every production slot s, exactly one car class can be\n", + "assigned. We enforce this using an AddExactlyOne constraint over all\n", + "produces[c][s] variables for that slot.\n", + "\n", + "Fulfill Real Car Demand: The total number of times each real car class c appears\n", + "across all slots must equal its required demand (5 in our case). This is a\n", + "simple Add(sum(...) == 5) constraint.\n", + "\n", + "Station Capacity (Sliding Window): This is the most critical constraint. For\n", + "each option (e.g., 'sunroof') and its capacity rule (e.g., '1 in 3'), we create\n", + "constraints for every possible sliding window. For every subsequence of 3 slots,\n", + "we sum up the produces variables corresponding to car classes that require that\n", + "option and constrain this sum to be less than or equal to 1.\n", + "\n", + "Makespan Definition: This is the clever part of the model. We link our makespan\n", + "objective variable to the placement of dummy cars using logical equivalences for\n", + "each slot s:\n", + "(makespan <= s) is equivalent to (slot s contains a dummy car)\n", + "This ensures that if the solver chooses a makespan of 32, for example, it is\n", + "forced to place dummy cars in slots 32, 33, 34, and so on. Conversely, if the\n", + "solver is forced to place a dummy car in slot 32 to satisfy a capacity\n", + "constraint, the makespan must be at most 32.\n", + "\n", + "3. The Objective Function\n", + "\n", + "The objective is simple and directly tied to our goal:\n", + "\n", + "Minimize makespan: By instructing the solver to find a solution with the\n", + "smallest possible value for the makespan variable, we are asking it to find the\n", + "shortest possible production schedule that satisfies all the rules. This\n", + "inherently minimizes the number of dummy cars used.\n", + "\n", + "By defining the problem in this way, we let the CP-SAT solver explore the vast\n", + "search space of possible sequences efficiently, using its powerful constraint\n", + "propagation and search techniques to find an optimal arrangement that meets all\n", + "our complex requirements.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "code", + "metadata": {}, + "outputs": [], + "source": [ + "from collections.abc import Sequence\n", + "\n", + "from ortools.sat.python import cp_model\n", + "\n", + "\n", + "def solve_car_sequencing_optimization() -> None:\n", + " \"\"\"Solves the car sequencing problem with an optimization approach.\"\"\"\n", + "\n", + " # --------------------\n", + " # 1. Data\n", + " # --------------------\n", + " num_real_cars: int = 30\n", + " max_dummy_cars: int = 20\n", + " num_slots = num_real_cars + max_dummy_cars\n", + " all_slots = range(num_slots)\n", + "\n", + " class_options = [\n", + " # Options: 1 2 3 4 5\n", + " [0, 0, 0, 0, 0], # Class 0 (Dummy)\n", + " [1, 0, 0, 1, 0], # Class 1\n", + " [0, 1, 0, 0, 1], # Class 2\n", + " [0, 1, 0, 0, 0], # Class 3\n", + " [0, 0, 1, 1, 0], # Class 4\n", + " [0, 0, 1, 0, 0], # Class 5\n", + " [0, 0, 0, 0, 1], # Class 6\n", + " ]\n", + " num_classes = len(class_options)\n", + " all_classes = range(num_classes)\n", + " real_classes = range(1, num_classes)\n", + " dummy_class = 0\n", + "\n", + " demands = [5, 5, 5, 5, 5, 5]\n", + "\n", + " capacity_constraints = [(1, 3), (1, 2), (1, 3), (2, 5), (1, 5)]\n", + " num_options = len(capacity_constraints)\n", + " all_options = range(num_options)\n", + "\n", + " classes_with_option = [\n", + " [c for c in real_classes if class_options[c][o] == 1] for o in all_options\n", + " ]\n", + "\n", + " # --------------------\n", + " # 2. Model Creation\n", + " # --------------------\n", + " model = cp_model.CpModel()\n", + "\n", + " # --------------------\n", + " # 3. Decision Variables\n", + " # --------------------\n", + " produces = {}\n", + " for c in all_classes:\n", + " for s in all_slots:\n", + " produces[(c, s)] = model.new_bool_var(f\"produces_c{c}_s{s}\")\n", + "\n", + " makespan = model.new_int_var(num_real_cars, num_slots, \"makespan\")\n", + "\n", + " # --------------------\n", + " # 4. Constraints\n", + " # --------------------\n", + "\n", + " # Constraint 1: Only one car produced per slot.\n", + " for s in all_slots:\n", + " model.add_exactly_one([produces[(c, s)] for c in all_classes])\n", + "\n", + " # Constraint 2: Meet the demand of real cars.\n", + " for i, c in enumerate(real_classes):\n", + " model.add(sum(produces[(c, s)] for s in all_slots) == demands[i])\n", + "\n", + " # Constraint 3: Enforce the capacity constraints on options.\n", + " for o in all_options:\n", + " max_cars, subsequence_len = capacity_constraints[o]\n", + " for start in range(num_slots - subsequence_len + 1):\n", + " window = range(start, start + subsequence_len)\n", + " cars_with_option_in_window = []\n", + " for c in classes_with_option[o]:\n", + " for s in window:\n", + " cars_with_option_in_window.append(produces[(c, s)])\n", + " model.add(sum(cars_with_option_in_window) <= max_cars)\n", + "\n", + " # Constraint 4 (Link objective and dummy cars at the end of the schedule)\n", + " for s in all_slots:\n", + " makespan_le_s = model.new_bool_var(f\"makespan_le_{s}\")\n", + "\n", + " # Enforce makespan_le_s <=> (makespan <= s)\n", + " model.add(makespan <= s).only_enforce_if(makespan_le_s)\n", + " # Use ~ for negation\n", + " model.add(makespan > s).only_enforce_if(~makespan_le_s)\n", + "\n", + " # Enforce makespan_le_s => produces[dummy_class, s]\n", + " model.add_implication(makespan_le_s, produces[dummy_class, s])\n", + "\n", + " # --------------------\n", + " # 5. Objective\n", + " # --------------------\n", + " model.minimize(makespan)\n", + "\n", + " # --------------------\n", + " # 6. Solve and Print Solution\n", + " # --------------------\n", + " solver = cp_model.CpSolver()\n", + " solver.parameters.max_time_in_seconds = 30.0\n", + " solver.parameters.num_search_workers = 1 # The problem is easy to solve.\n", + " # solver.parameters.log_search_progress = True # uncomment to see the log.\n", + "\n", + " status = solver.Solve(model)\n", + "\n", + " if status == cp_model.OPTIMAL or status == cp_model.FEASIBLE:\n", + " final_makespan = int(solver.ObjectiveValue())\n", + " num_dummies_needed = final_makespan - num_real_cars\n", + "\n", + " print(\n", + " f'\\n{\"Optimal\" if status == cp_model.OPTIMAL else \"Feasible\"}'\n", + " f\" solution found with a makespan of {final_makespan}.\"\n", + " )\n", + " print(\n", + " f\"This requires the conceptual equivalent of {num_dummies_needed} dummy\"\n", + " \" car(s) to be used as spacers.\"\n", + " )\n", + "\n", + " sequence = [-1] * num_slots\n", + " for s in all_slots:\n", + " for c in all_classes:\n", + " if solver.Value(produces[(c, s)]) == 1:\n", + " sequence[s] = c\n", + " break\n", + "\n", + " print(\"\\nFull Production Sequence (Class 0 is dummy):\")\n", + " print(\"Slot: | \" + \" | \".join(f\"{i:2}\" for i in range(num_slots)) + \" |\")\n", + " print(\"-------|-\" + \"--|-\" * num_slots)\n", + " print(\"Class: | \" + \" | \".join(f\"{c:2}\" for c in sequence) + \" |\")\n", + "\n", + " elif status == cp_model.INFEASIBLE:\n", + " print(\"\\nNo solution found.\")\n", + "\n", + " else:\n", + " print(f\"\\nSomething went wrong. Solver status: {status}\")\n", + "\n", + " print(\"\\nSolver statistics:\")\n", + " print(solver.response_stats())\n", + "\n", + "\n", + "def main(argv: Sequence[str]) -> None:\n", + " if len(argv) > 1:\n", + " raise app.UsageError(\"Too many command-line arguments.\")\n", + " solve_car_sequencing_optimization()\n", + "\n", + "\n", + "main()\n", + "\n" + ] + } + ], + "metadata": { + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/notebook/examples/cryptarithm_sat.ipynb b/examples/notebook/examples/cryptarithm_sat.ipynb index d04c6c890f1..2849b51f379 100644 --- a/examples/notebook/examples/cryptarithm_sat.ipynb +++ b/examples/notebook/examples/cryptarithm_sat.ipynb @@ -73,8 +73,7 @@ "metadata": {}, "source": [ "\n", - "Use CP-SAT to solve a simple cryptarithmetic problem: SEND+MORE=MONEY.\n", - "\n" + "Use CP-SAT to solve a simple cryptarithmetic problem: SEND+MORE=MONEY.\n" ] }, { diff --git a/examples/notebook/examples/cvrptw_plot.ipynb b/examples/notebook/examples/cvrptw_plot.ipynb deleted file mode 100644 index 5f7928f82fd..00000000000 --- a/examples/notebook/examples/cvrptw_plot.ipynb +++ /dev/null @@ -1,833 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "google", - "metadata": {}, - "source": [ - "##### Copyright 2025 Google LLC." - ] - }, - { - "cell_type": "markdown", - "id": "apache", - "metadata": {}, - "source": [ - "Licensed under the Apache License, Version 2.0 (the \"License\");\n", - "you may not use this file except in compliance with the License.\n", - "You may obtain a copy of the License at\n", - "\n", - " http://www.apache.org/licenses/LICENSE-2.0\n", - "\n", - "Unless required by applicable law or agreed to in writing, software\n", - "distributed under the License is distributed on an \"AS IS\" BASIS,\n", - "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", - "See the License for the specific language governing permissions and\n", - "limitations under the License.\n" - ] - }, - { - "cell_type": "markdown", - "id": "basename", - "metadata": {}, - "source": [ - "# cvrptw_plot" - ] - }, - { - "cell_type": "markdown", - "id": "link", - "metadata": {}, - "source": [ - "\n", - "\n", - "\n", - "
\n", - "Run in Google Colab\n", - "\n", - "View source on GitHub\n", - "
" - ] - }, - { - "cell_type": "markdown", - "id": "doc", - "metadata": {}, - "source": [ - "First, you must install [ortools](https://pypi.org/project/ortools/) package in this colab." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "install", - "metadata": {}, - "outputs": [], - "source": [ - "%pip install ortools" - ] - }, - { - "cell_type": "markdown", - "id": "description", - "metadata": {}, - "source": [ - "Capacitated Vehicle Routing Problem with Time Windows (and optional orders).\n", - "\n", - " This is a sample using the routing library python wrapper to solve a\n", - " CVRPTW problem.\n", - " A description of the problem can be found here:\n", - " http://en.wikipedia.org/wiki/Vehicle_routing_problem.\n", - " The variant which is tackled by this model includes a capacity dimension,\n", - " time windows and optional orders, with a penalty cost if orders are not\n", - " performed.\n", - " To help explore the problem, two classes are provided Customers() and\n", - " Vehicles(): used to randomly locate orders and depots, and to randomly\n", - " generate demands, time-window constraints and vehicles.\n", - " Distances are computed using the Great Circle distances. Distances are in km\n", - " and times in seconds.\n", - "\n", - " A function for the displaying of the vehicle plan\n", - " display_vehicle_output\n", - "\n", - " The optimization engine uses local search to improve solutions, first\n", - " solutions being generated using a cheapest addition heuristic.\n", - " Numpy and Matplotlib are required for the problem creation and display.\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "code", - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "import numpy as np\n", - "from matplotlib import pyplot as plt\n", - "from collections import namedtuple\n", - "from ortools.constraint_solver import pywrapcp\n", - "from ortools.constraint_solver import routing_enums_pb2\n", - "from datetime import datetime, timedelta\n", - "\n", - "\n", - "class Customers():\n", - " \"\"\"\n", - " A class that generates and holds customers information.\n", - "\n", - " Randomly normally distribute a number of customers and locations within\n", - " a region described by a rectangle. Generate a random demand for each\n", - " customer. Generate a random time window for each customer.\n", - " May either be initiated with the extents, as a dictionary describing\n", - " two corners of a rectangle in latitude and longitude OR as a center\n", - " point (lat, lon), and box_size in km. The default arguments are for a\n", - " 10 x 10 km square centered in Sheffield).\n", - "\n", - " Args: extents (Optional[Dict]): A dictionary describing a rectangle in\n", - " latitude and longitude with the keys 'llcrnrlat', 'llcrnrlon' &\n", - " 'urcrnrlat' & 'urcrnrlat' center (Optional(Tuple): A tuple of\n", - " (latitude, longitude) describing the centre of the rectangle. box_size\n", - " (Optional float: The length in km of the box's sides. num_stops (int):\n", - " The number of customers, including the depots that are placed normally\n", - " distributed in the rectangle. min_demand (int): Lower limit on the\n", - " randomly generated demand at each customer. max_demand (int): Upper\n", - " limit on the randomly generated demand at each customer.\n", - " min_tw: shortest random time window for a customer, in hours.\n", - " max_tw: longest random time window for a customer, in hours.\n", - " Examples: To place 100 customers randomly within 100 km x 100 km\n", - " rectangle, centered in the default location, with a random demand of\n", - " between 5 and 10 units: >>> customers = Customers(num_stops=100,\n", - " box_size=100, ... min_demand=5, max_demand=10)\n", - " alternatively, to place 75 customers in the same area with default\n", - " arguments for demand: >>> extents = {'urcrnrlon': 0.03403, 'llcrnrlon':\n", - " -2.98325, ... 'urcrnrlat': 54.28127, 'llcrnrlat': 52.48150} >>>\n", - " customers = Customers(num_stops=75, extents=extents)\n", - " \"\"\"\n", - "\n", - " def __init__(self,\n", - " extents=None,\n", - " center=(53.381393, -1.474611),\n", - " box_size=10,\n", - " num_stops=100,\n", - " min_demand=0,\n", - " max_demand=25,\n", - " min_tw=1,\n", - " max_tw=5):\n", - " self.number = num_stops #: The number of customers and depots\n", - " #: Location, a named tuple for locations.\n", - " Location = namedtuple('Location', ['lat', 'lon'])\n", - " if extents is not None:\n", - " self.extents = extents #: The lower left and upper right points\n", - " #: Location[lat,lon]: the centre point of the area.\n", - " self.center = Location(\n", - " extents['urcrnrlat'] - 0.5 *\n", - " (extents['urcrnrlat'] - extents['llcrnrlat']),\n", - " extents['urcrnrlon'] - 0.5 *\n", - " (extents['urcrnrlon'] - extents['llcrnrlon']))\n", - " else:\n", - " #: Location[lat,lon]: the centre point of the area.\n", - " (clat, clon) = self.center = Location(center[0], center[1])\n", - " rad_earth = 6367 # km\n", - " circ_earth = np.pi * rad_earth\n", - " #: The lower left and upper right points\n", - " self.extents = {\n", - " 'llcrnrlon': (clon - 180 * box_size /\n", - " (circ_earth * np.cos(np.deg2rad(clat)))),\n", - " 'llcrnrlat':\n", - " clat - 180 * box_size / circ_earth,\n", - " 'urcrnrlon': (clon + 180 * box_size /\n", - " (circ_earth * np.cos(np.deg2rad(clat)))),\n", - " 'urcrnrlat':\n", - " clat + 180 * box_size / circ_earth\n", - " }\n", - " # The 'name' of the stop, indexed from 0 to num_stops-1\n", - " stops = np.array(range(0, num_stops))\n", - " # normaly distributed random distribution of stops within the box\n", - " stdv = 6 # the number of standard deviations 99.9% will be within +-3\n", - " lats = (self.extents['llcrnrlat'] + np.random.randn(num_stops) *\n", - " (self.extents['urcrnrlat'] - self.extents['llcrnrlat']) / stdv)\n", - " lons = (self.extents['llcrnrlon'] + np.random.randn(num_stops) *\n", - " (self.extents['urcrnrlon'] - self.extents['llcrnrlon']) / stdv)\n", - " # uniformly distributed integer demands.\n", - " demands = np.random.randint(min_demand, max_demand, num_stops)\n", - "\n", - " self.time_horizon = 24 * 60**2 # A 24 hour period.\n", - "\n", - " # The customers demand min_tw to max_tw hour time window for each\n", - " # delivery\n", - " time_windows = np.random.randint(min_tw * 3600, max_tw * 3600,\n", - " num_stops)\n", - " # The last time a delivery window can start\n", - " latest_time = self.time_horizon - time_windows\n", - " start_times = [None for o in time_windows]\n", - " stop_times = [None for o in time_windows]\n", - " # Make random timedeltas, nominally from the start of the day.\n", - " for idx in range(self.number):\n", - " stime = int(np.random.randint(0, latest_time[idx]))\n", - " start_times[idx] = timedelta(seconds=stime)\n", - " stop_times[idx] = (\n", - " start_times[idx] + timedelta(seconds=int(time_windows[idx])))\n", - " # A named tuple for the customer\n", - " Customer = namedtuple(\n", - " 'Customer',\n", - " [\n", - " 'index', # the index of the stop\n", - " 'demand', # the demand for the stop\n", - " 'lat', # the latitude of the stop\n", - " 'lon', # the longitude of the stop\n", - " 'tw_open', # timedelta window open\n", - " 'tw_close'\n", - " ]) # timedelta window cls\n", - "\n", - " self.customers = [\n", - " Customer(idx, dem, lat, lon, tw_open, tw_close)\n", - " for idx, dem, lat, lon, tw_open, tw_close in zip(\n", - " stops, demands, lats, lons, start_times, stop_times)\n", - " ]\n", - "\n", - " # The number of seconds needed to 'unload' 1 unit of goods.\n", - " self.service_time_per_dem = 300 # seconds\n", - "\n", - " def set_manager(self, manager):\n", - " self.manager = manager\n", - "\n", - " def central_start_node(self, invert=False):\n", - " \"\"\"\n", - " Return a random starting node, with probability weighted by distance\n", - " from the centre of the extents, so that a central starting node is\n", - " likely.\n", - "\n", - " Args: invert (Optional bool): When True, a peripheral starting node is\n", - " most likely.\n", - "\n", - " Returns:\n", - " int: a node index.\n", - "\n", - " Examples:\n", - " >>> customers.central_start_node(invert=True)\n", - " 42\n", - " \"\"\"\n", - " num_nodes = len(self.customers)\n", - " dist = np.empty((num_nodes, 1))\n", - " for idx_to in range(num_nodes):\n", - " dist[idx_to] = self._haversine(self.center.lon, self.center.lat,\n", - " self.customers[idx_to].lon,\n", - " self.customers[idx_to].lat)\n", - " furthest = np.max(dist)\n", - "\n", - " if invert:\n", - " prob = dist * 1.0 / sum(dist)\n", - " else:\n", - " prob = (furthest - dist * 1.0) / sum(furthest - dist)\n", - " indexes = np.array([range(num_nodes)])\n", - " start_node = np.random.choice(\n", - " indexes.flatten(), size=1, replace=True, p=prob.flatten())\n", - " return start_node[0]\n", - "\n", - " def make_distance_mat(self, method='haversine'):\n", - " \"\"\"\n", - " Return a distance matrix and make it a member of Customer, using the\n", - " method given in the call. Currently only Haversine (GC distance) is\n", - " implemented, but Manhattan, or using a maps API could be added here.\n", - " Raises an AssertionError for all other methods.\n", - "\n", - " Args: method (Optional[str]): method of distance calculation to use. The\n", - " Haversine formula is the only method implemented.\n", - "\n", - " Returns:\n", - " Numpy array of node to node distances.\n", - "\n", - " Examples:\n", - " >>> dist_mat = customers.make_distance_mat(method='haversine')\n", - " >>> dist_mat = customers.make_distance_mat(method='manhattan')\n", - " AssertionError\n", - " \"\"\"\n", - " self.distmat = np.zeros((self.number, self.number))\n", - " methods = {'haversine': self._haversine}\n", - " assert (method in methods)\n", - " for frm_idx in range(self.number):\n", - " for to_idx in range(self.number):\n", - " if frm_idx != to_idx:\n", - " frm_c = self.customers[frm_idx]\n", - " to_c = self.customers[to_idx]\n", - " self.distmat[frm_idx, to_idx] = self._haversine(\n", - " frm_c.lon, frm_c.lat, to_c.lon, to_c.lat)\n", - " return (self.distmat)\n", - "\n", - " def _haversine(self, lon1, lat1, lon2, lat2):\n", - " \"\"\"\n", - " Calculate the great circle distance between two points\n", - " on the earth specified in decimal degrees of latitude and longitude.\n", - " https://en.wikipedia.org/wiki/Haversine_formula\n", - "\n", - " Args:\n", - " lon1: longitude of pt 1,\n", - " lat1: latitude of pt 1,\n", - " lon2: longitude of pt 2,\n", - " lat2: latitude of pt 2\n", - "\n", - " Returns:\n", - " the distace in km between pt1 and pt2\n", - " \"\"\"\n", - " # convert decimal degrees to radians\n", - " lon1, lat1, lon2, lat2 = map(np.radians, [lon1, lat1, lon2, lat2])\n", - "\n", - " # haversine formula\n", - " dlon = lon2 - lon1\n", - " dlat = lat2 - lat1\n", - " a = (np.sin(dlat / 2)**2 +\n", - " np.cos(lat1) * np.cos(lat2) * np.sin(dlon / 2)**2)\n", - " c = 2 * np.arcsin(np.sqrt(a))\n", - "\n", - " # 6367 km is the radius of the Earth\n", - " km = 6367 * c\n", - " return km\n", - "\n", - " def get_total_demand(self):\n", - " \"\"\"\n", - " Return the total demand of all customers.\n", - " \"\"\"\n", - " return (sum([c.demand for c in self.customers]))\n", - "\n", - " def return_dist_callback(self, **kwargs):\n", - " \"\"\"\n", - " Return a callback function for the distance matrix.\n", - "\n", - " Args: **kwargs: Arbitrary keyword arguments passed on to\n", - " make_distance_mat()\n", - "\n", - " Returns:\n", - " function: dist_return(a,b) A function that takes the 'from' node\n", - " index and the 'to' node index and returns the distance in km.\n", - " \"\"\"\n", - " self.make_distance_mat(**kwargs)\n", - "\n", - " def dist_return(from_index, to_index):\n", - " # Convert from routing variable Index to distance matrix NodeIndex.\n", - " from_node = self.manager.IndexToNode(from_index)\n", - " to_node = self.manager.IndexToNode(to_index)\n", - " return (self.distmat[from_node][to_node])\n", - "\n", - " return dist_return\n", - "\n", - " def return_dem_callback(self):\n", - " \"\"\"\n", - " Return a callback function that gives the demands.\n", - "\n", - " Returns:\n", - " function: dem_return(a) A function that takes the 'from' node\n", - " index and returns the distance in km.\n", - " \"\"\"\n", - "\n", - " def dem_return(from_index):\n", - " # Convert from routing variable Index to distance matrix NodeIndex.\n", - " from_node = self.manager.IndexToNode(from_index)\n", - " return (self.customers[from_node].demand)\n", - "\n", - " return dem_return\n", - "\n", - " def zero_depot_demands(self, depot):\n", - " \"\"\"\n", - " Zero out the demands and time windows of depot. The Depots do not have\n", - " demands or time windows so this function clears them.\n", - "\n", - " Args: depot (int): index of the stop to modify into a depot.\n", - " Examples: >>> customers.zero_depot_demands(5) >>>\n", - " customers.customers[5].demand == 0 True\n", - " \"\"\"\n", - " start_depot = self.customers[depot]\n", - " self.customers[depot] = start_depot._replace(\n", - " demand=0, tw_open=None, tw_close=None)\n", - "\n", - " def make_service_time_call_callback(self):\n", - " \"\"\"\n", - " Return a callback function that provides the time spent servicing the\n", - " customer. Here is it proportional to the demand given by\n", - " self.service_time_per_dem, default 300 seconds per unit demand.\n", - "\n", - " Returns:\n", - " function [dem_return(a, b)]: A function that takes the from/a node\n", - " index and the to/b node index and returns the service time at a\n", - "\n", - " \"\"\"\n", - "\n", - " def service_time_return(a, b):\n", - " return (self.customers[a].demand * self.service_time_per_dem)\n", - "\n", - " return service_time_return\n", - "\n", - " def make_transit_time_callback(self, speed_kmph=10):\n", - " \"\"\"\n", - " Creates a callback function for transit time. Assuming an average\n", - " speed of speed_kmph\n", - " Args:\n", - " speed_kmph: the average speed in km/h\n", - "\n", - " Returns:\n", - " function [transit_time_return(a, b)]: A function that takes the\n", - " from/a node index and the to/b node index and returns the\n", - " transit time from a to b.\n", - " \"\"\"\n", - "\n", - " def transit_time_return(a, b):\n", - " return (self.distmat[a][b] / (speed_kmph * 1.0 / 60**2))\n", - "\n", - " return transit_time_return\n", - "\n", - "\n", - "class Vehicles():\n", - " \"\"\"\n", - " A Class to create and hold vehicle information.\n", - "\n", - " The Vehicles in a CVRPTW problem service the customers and belong to a\n", - " depot. The class Vehicles creates a list of named tuples describing the\n", - " Vehicles. The main characteristics are the vehicle capacity, fixed cost,\n", - " and cost per km. The fixed cost of using a certain type of vehicles can be\n", - " higher or lower than others. If a vehicle is used, i.e. this vehicle serves\n", - " at least one node, then this cost is added to the objective function.\n", - "\n", - " Note:\n", - " If numpy arrays are given for capacity and cost, then they must be of\n", - " the same length, and the number of vehicles are inferred from them.\n", - " If scalars are given, the fleet is homogeneous, and the number of\n", - " vehicles is determined by number.\n", - "\n", - " Args: capacity (scalar or numpy array): The integer capacity of demand\n", - " units. cost (scalar or numpy array): The fixed cost of the vehicle. number\n", - " (Optional [int]): The number of vehicles in a homogeneous fleet.\n", - " \"\"\"\n", - "\n", - " def __init__(self, capacity=100, cost=100, number=None):\n", - "\n", - " Vehicle = namedtuple('Vehicle', ['index', 'capacity', 'cost'])\n", - "\n", - " if number is None:\n", - " self.number = np.size(capacity)\n", - " else:\n", - " self.number = number\n", - " idxs = np.array(range(0, self.number))\n", - "\n", - " if np.isscalar(capacity):\n", - " capacities = capacity * np.ones_like(idxs)\n", - " elif np.size(capacity) != self.number:\n", - " print('capacity is neither scalar, nor the same size as num!')\n", - " else:\n", - " capacities = capacity\n", - "\n", - " if np.isscalar(cost):\n", - " costs = cost * np.ones_like(idxs)\n", - " elif np.size(cost) != self.number:\n", - " print(np.size(cost))\n", - " print('cost is neither scalar, nor the same size as num!')\n", - " else:\n", - " costs = cost\n", - "\n", - " self.vehicles = [\n", - " Vehicle(idx, capacity, cost)\n", - " for idx, capacity, cost in zip(idxs, capacities, costs)\n", - " ]\n", - "\n", - " def get_total_capacity(self):\n", - " return (sum([c.capacity for c in self.vehicles]))\n", - "\n", - " def return_starting_callback(self, customers, sameStartFinish=False):\n", - " # create a different starting and finishing depot for each vehicle\n", - " self.starts = [\n", - " int(customers.central_start_node()) for o in range(self.number)\n", - " ]\n", - " if sameStartFinish:\n", - " self.ends = self.starts\n", - " else:\n", - " self.ends = [\n", - " int(customers.central_start_node(invert=True))\n", - " for o in range(self.number)\n", - " ]\n", - " # the depots will not have demands, so zero them.\n", - " for depot in self.starts:\n", - " customers.zero_depot_demands(depot)\n", - " for depot in self.ends:\n", - " customers.zero_depot_demands(depot)\n", - "\n", - " def start_return(v):\n", - " return (self.starts[v])\n", - "\n", - " return start_return\n", - "\n", - "\n", - "def discrete_cmap(N, base_cmap=None):\n", - " \"\"\"\n", - " Create an N-bin discrete colormap from the specified input map\n", - " \"\"\"\n", - " # Note that if base_cmap is a string or None, you can simply do\n", - " # return plt.cm.get_cmap(base_cmap, N)\n", - " # The following works for string, None, or a colormap instance:\n", - "\n", - " base = plt.cm.get_cmap(base_cmap)\n", - " color_list = base(np.linspace(0, 1, N))\n", - " cmap_name = base.name + str(N)\n", - " return base.from_list(cmap_name, color_list, N)\n", - "\n", - "\n", - "def vehicle_output_string(manager, routing, plan):\n", - " \"\"\"\n", - " Return a string displaying the output of the routing instance and\n", - " assignment (plan).\n", - "\n", - " Args: routing (ortools.constraint_solver.pywrapcp.RoutingModel): routing.\n", - " plan (ortools.constraint_solver.pywrapcp.Assignment): the assignment.\n", - "\n", - " Returns:\n", - " (string) plan_output: describing each vehicle's plan.\n", - "\n", - " (List) dropped: list of dropped orders.\n", - "\n", - " \"\"\"\n", - " dropped = []\n", - " for order in range(routing.Size()):\n", - " if (plan.Value(routing.NextVar(order)) == order):\n", - " dropped.append(str(order))\n", - "\n", - " capacity_dimension = routing.GetDimensionOrDie('Capacity')\n", - " time_dimension = routing.GetDimensionOrDie('Time')\n", - " plan_output = ''\n", - "\n", - " for route_number in range(routing.vehicles()):\n", - " order = routing.Start(route_number)\n", - " plan_output += 'Route {0}:'.format(route_number)\n", - " if routing.IsEnd(plan.Value(routing.NextVar(order))):\n", - " plan_output += ' Empty \\n'\n", - " else:\n", - " while True:\n", - " load_var = capacity_dimension.CumulVar(order)\n", - " time_var = time_dimension.CumulVar(order)\n", - " node = manager.IndexToNode(order)\n", - " plan_output += \\\n", - " ' {node} Load({load}) Time({tmin}, {tmax}) -> '.format(\n", - " node=node,\n", - " load=plan.Value(load_var),\n", - " tmin=str(timedelta(seconds=plan.Min(time_var))),\n", - " tmax=str(timedelta(seconds=plan.Max(time_var))))\n", - "\n", - " if routing.IsEnd(order):\n", - " plan_output += ' EndRoute {0}. \\n'.format(route_number)\n", - " break\n", - " order = plan.Value(routing.NextVar(order))\n", - " plan_output += '\\n'\n", - "\n", - " return (plan_output, dropped)\n", - "\n", - "\n", - "def build_vehicle_route(manager, routing, plan, customers, veh_number):\n", - " \"\"\"\n", - " Build a route for a vehicle by starting at the strat node and\n", - " continuing to the end node.\n", - "\n", - " Args: routing (ortools.constraint_solver.pywrapcp.RoutingModel): routing.\n", - " plan (ortools.constraint_solver.pywrapcp.Assignment): the assignment.\n", - " customers (Customers): the customers instance. veh_number (int): index of\n", - " the vehicle\n", - "\n", - " Returns:\n", - " (List) route: indexes of the customers for vehicle veh_number\n", - " \"\"\"\n", - " veh_used = routing.IsVehicleUsed(plan, veh_number)\n", - " print('Vehicle {0} is used {1}'.format(veh_number, veh_used))\n", - " if veh_used:\n", - " route = []\n", - " node = routing.Start(veh_number) # Get the starting node index\n", - " route.append(customers.customers[manager.IndexToNode(node)])\n", - " while not routing.IsEnd(node):\n", - " route.append(customers.customers[manager.IndexToNode(node)])\n", - " node = plan.Value(routing.NextVar(node))\n", - "\n", - " route.append(customers.customers[manager.IndexToNode(node)])\n", - " return route\n", - " else:\n", - " return None\n", - "\n", - "\n", - "def plot_vehicle_routes(veh_route, ax1, customers, vehicles):\n", - " \"\"\"\n", - " Plot the vehicle routes on matplotlib axis ax1.\n", - "\n", - " Args: veh_route (dict): a dictionary of routes keyed by vehicle idx. ax1\n", - " (matplotlib.axes._subplots.AxesSubplot): Matplotlib axes customers\n", - " (Customers): the customers instance. vehicles (Vehicles): the vehicles\n", - " instance.\n", - " \"\"\"\n", - " veh_used = [v for v in veh_route if veh_route[v] is not None]\n", - "\n", - " cmap = discrete_cmap(vehicles.number + 2, 'nipy_spectral')\n", - "\n", - " for veh_number in veh_used:\n", - "\n", - " lats, lons = zip(*[(c.lat, c.lon) for c in veh_route[veh_number]])\n", - " lats = np.array(lats)\n", - " lons = np.array(lons)\n", - " s_dep = customers.customers[vehicles.starts[veh_number]]\n", - " s_fin = customers.customers[vehicles.ends[veh_number]]\n", - " ax1.annotate(\n", - " 'v({veh}) S @ {node}'.format(\n", - " veh=veh_number, node=vehicles.starts[veh_number]),\n", - " xy=(s_dep.lon, s_dep.lat),\n", - " xytext=(10, 10),\n", - " xycoords='data',\n", - " textcoords='offset points',\n", - " arrowprops=dict(\n", - " arrowstyle='->',\n", - " connectionstyle='angle3,angleA=90,angleB=0',\n", - " shrinkA=0.05),\n", - " )\n", - " ax1.annotate(\n", - " 'v({veh}) F @ {node}'.format(\n", - " veh=veh_number, node=vehicles.ends[veh_number]),\n", - " xy=(s_fin.lon, s_fin.lat),\n", - " xytext=(10, -20),\n", - " xycoords='data',\n", - " textcoords='offset points',\n", - " arrowprops=dict(\n", - " arrowstyle='->',\n", - " connectionstyle='angle3,angleA=-90,angleB=0',\n", - " shrinkA=0.05),\n", - " )\n", - " ax1.plot(lons, lats, 'o', mfc=cmap(veh_number + 1))\n", - " ax1.quiver(\n", - " lons[:-1],\n", - " lats[:-1],\n", - " lons[1:] - lons[:-1],\n", - " lats[1:] - lats[:-1],\n", - " scale_units='xy',\n", - " angles='xy',\n", - " scale=1,\n", - " color=cmap(veh_number + 1))\n", - "\n", - "\n", - "def main():\n", - " # Create a set of customer, (and depot) stops.\n", - " customers = Customers(\n", - " num_stops=50,\n", - " min_demand=1,\n", - " max_demand=15,\n", - " box_size=40,\n", - " min_tw=3,\n", - " max_tw=6)\n", - "\n", - " # Create a list of inhomgenious vehicle capacities as integer units.\n", - " capacity = [50, 75, 100, 125, 150, 175, 200, 250]\n", - "\n", - " # Create a list of inhomogeneous fixed vehicle costs.\n", - " cost = [int(100 + 2 * np.sqrt(c)) for c in capacity]\n", - "\n", - " # Create a set of vehicles, the number set by the length of capacity.\n", - " vehicles = Vehicles(capacity=capacity, cost=cost)\n", - "\n", - " # check to see that the problem is feasible, if we don't have enough\n", - " # vehicles to cover the demand, there is no point in going further.\n", - " assert (customers.get_total_demand() < vehicles.get_total_capacity())\n", - "\n", - " # Set the starting nodes, and create a callback fn for the starting node.\n", - " start_fn = vehicles.return_starting_callback(\n", - " customers, sameStartFinish=False)\n", - "\n", - " # Create the routing index manager.\n", - " manager = pywrapcp.RoutingIndexManager(\n", - " customers.number, # int number\n", - " vehicles.number, # int number\n", - " vehicles.starts, # List of int start depot\n", - " vehicles.ends) # List of int end depot\n", - "\n", - " customers.set_manager(manager)\n", - "\n", - " # Set model parameters\n", - " model_parameters = pywrapcp.DefaultRoutingModelParameters()\n", - "\n", - " # The solver parameters can be accessed from the model parameters. For example :\n", - " # model_parameters.solver_parameters.CopyFrom(\n", - " # pywrapcp.Solver.DefaultSolverParameters())\n", - " # model_parameters.solver_parameters.trace_propagation = True\n", - "\n", - " # Make the routing model instance.\n", - " routing = pywrapcp.RoutingModel(manager, model_parameters)\n", - "\n", - " parameters = pywrapcp.DefaultRoutingSearchParameters()\n", - " # Setting first solution heuristic (cheapest addition).\n", - " parameters.first_solution_strategy = (\n", - " routing_enums_pb2.FirstSolutionStrategy.PATH_CHEAPEST_ARC)\n", - " # Routing: forbids use of TSPOpt neighborhood, (this is the default behaviour)\n", - " parameters.local_search_operators.use_tsp_opt = pywrapcp.BOOL_FALSE\n", - " # Disabling Large Neighborhood Search, (this is the default behaviour)\n", - " parameters.local_search_operators.use_path_lns = pywrapcp.BOOL_FALSE\n", - " parameters.local_search_operators.use_inactive_lns = pywrapcp.BOOL_FALSE\n", - "\n", - " parameters.time_limit.seconds = 10\n", - " parameters.use_full_propagation = True\n", - " #parameters.log_search = True\n", - "\n", - " # Create callback fns for distances, demands, service and transit-times.\n", - " dist_fn = customers.return_dist_callback()\n", - " dist_fn_index = routing.RegisterTransitCallback(dist_fn)\n", - "\n", - " dem_fn = customers.return_dem_callback()\n", - " dem_fn_index = routing.RegisterUnaryTransitCallback(dem_fn)\n", - "\n", - " # Create and register a transit callback.\n", - " serv_time_fn = customers.make_service_time_call_callback()\n", - " transit_time_fn = customers.make_transit_time_callback()\n", - " def tot_time_fn(from_index, to_index):\n", - " \"\"\"\n", - " The time function we want is both transit time and service time.\n", - " \"\"\"\n", - " # Convert from routing variable Index to distance matrix NodeIndex.\n", - " from_node = manager.IndexToNode(from_index)\n", - " to_node = manager.IndexToNode(to_index)\n", - " return serv_time_fn(from_node, to_node) + transit_time_fn(from_node, to_node)\n", - "\n", - " tot_time_fn_index = routing.RegisterTransitCallback(tot_time_fn)\n", - "\n", - " # Set the cost function (distance callback) for each arc, homogeneous for\n", - " # all vehicles.\n", - " routing.SetArcCostEvaluatorOfAllVehicles(dist_fn_index)\n", - "\n", - " # Set vehicle costs for each vehicle, not homogeneous.\n", - " for veh in vehicles.vehicles:\n", - " routing.SetFixedCostOfVehicle(veh.cost, int(veh.index))\n", - "\n", - " # Add a dimension for vehicle capacities\n", - " null_capacity_slack = 0\n", - " routing.AddDimensionWithVehicleCapacity(\n", - " dem_fn_index, # demand callback\n", - " null_capacity_slack,\n", - " capacity, # capacity array\n", - " True,\n", - " 'Capacity')\n", - " # Add a dimension for time and a limit on the total time_horizon\n", - " routing.AddDimension(\n", - " tot_time_fn_index, # total time function callback\n", - " customers.time_horizon,\n", - " customers.time_horizon,\n", - " True,\n", - " 'Time')\n", - "\n", - " time_dimension = routing.GetDimensionOrDie('Time')\n", - " for cust in customers.customers:\n", - " if cust.tw_open is not None:\n", - " time_dimension.CumulVar(manager.NodeToIndex(cust.index)).SetRange(\n", - " cust.tw_open.seconds, cust.tw_close.seconds)\n", - " \"\"\"\n", - " To allow the dropping of orders, we add disjunctions to all the customer\n", - " nodes. Each disjunction is a list of 1 index, which allows that customer to\n", - " be active or not, with a penalty if not. The penalty should be larger\n", - " than the cost of servicing that customer, or it will always be dropped!\n", - " \"\"\"\n", - " # To add disjunctions just to the customers, make a list of non-depots.\n", - " non_depot = set(range(customers.number))\n", - " non_depot.difference_update(vehicles.starts)\n", - " non_depot.difference_update(vehicles.ends)\n", - " penalty = 400000 # The cost for dropping a node from the plan.\n", - " nodes = [routing.AddDisjunction([manager.NodeToIndex(c)], penalty) for c in non_depot]\n", - "\n", - " # This is how you would implement partial routes if you already knew part\n", - " # of a feasible solution for example:\n", - " # partial = np.random.choice(list(non_depot), size=(4,5), replace=False)\n", - "\n", - " # routing.CloseModel()\n", - " # partial_list = [partial[0,:].tolist(),\n", - " # partial[1,:].tolist(),\n", - " # partial[2,:].tolist(),\n", - " # partial[3,:].tolist(),\n", - " # [],[],[],[]]\n", - " # print(routing.ApplyLocksToAllVehicles(partial_list, False))\n", - "\n", - " # Solve the problem !\n", - " assignment = routing.SolveWithParameters(parameters)\n", - "\n", - " # The rest is all optional for saving, printing or plotting the solution.\n", - " if assignment:\n", - " ## save the assignment, (Google Protobuf format)\n", - " #save_file_base = os.path.realpath(__file__).split('.')[0]\n", - " #if routing.WriteAssignment(save_file_base + '_assignment.ass'):\n", - " # print('succesfully wrote assignment to file ' + save_file_base +\n", - " # '_assignment.ass')\n", - "\n", - " print('The Objective Value is {0}'.format(assignment.ObjectiveValue()))\n", - "\n", - " plan_output, dropped = vehicle_output_string(manager, routing, assignment)\n", - " print(plan_output)\n", - " print('dropped nodes: ' + ', '.join(dropped))\n", - "\n", - " # you could print debug information like this:\n", - " # print(routing.DebugOutputAssignment(assignment, 'Capacity'))\n", - "\n", - " vehicle_routes = {}\n", - " for veh in range(vehicles.number):\n", - " vehicle_routes[veh] = build_vehicle_route(manager, routing, assignment,\n", - " customers, veh)\n", - "\n", - " # Plotting of the routes in matplotlib.\n", - " fig = plt.figure()\n", - " ax = fig.add_subplot(111)\n", - " # Plot all the nodes as black dots.\n", - " clon, clat = zip(*[(c.lon, c.lat) for c in customers.customers])\n", - " ax.plot(clon, clat, 'k.')\n", - " # plot the routes as arrows\n", - " plot_vehicle_routes(vehicle_routes, ax, customers, vehicles)\n", - " plt.show()\n", - "\n", - " else:\n", - " print('No assignment')\n", - "\n", - "\n", - "main()\n", - "\n" - ] - } - ], - "metadata": { - "language_info": { - "name": "python" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/examples/notebook/examples/gate_scheduling_sat.ipynb b/examples/notebook/examples/gate_scheduling_sat.ipynb index 134683752d7..97121aaf8e8 100644 --- a/examples/notebook/examples/gate_scheduling_sat.ipynb +++ b/examples/notebook/examples/gate_scheduling_sat.ipynb @@ -92,8 +92,8 @@ "metadata": {}, "outputs": [], "source": [ - "from ortools.sat.colab import visualization\n", "from ortools.sat.python import cp_model\n", + "from ortools.sat.colab import visualization\n", "\n", "\n", "def main(_) -> None:\n", diff --git a/examples/notebook/examples/golomb8.ipynb b/examples/notebook/examples/golomb8.ipynb index bf943ae0def..1c4d8d66b3b 100644 --- a/examples/notebook/examples/golomb8.ipynb +++ b/examples/notebook/examples/golomb8.ipynb @@ -137,7 +137,7 @@ " branches = collector.Branches(i)\n", " failures = collector.Failures(i)\n", " print(\n", - " (\"Solution #%i: value = %i, failures = %i, branches = %i,\" \"time = %i ms\")\n", + " \"Solution #%i: value = %i, failures = %i, branches = %i,time = %i ms\"\n", " % (i, obj_value, failures, branches, time)\n", " )\n", " time = solver.WallTime()\n", diff --git a/examples/notebook/examples/golomb_sat.ipynb b/examples/notebook/examples/golomb_sat.ipynb index 410ab758785..47476ffe2ba 100644 --- a/examples/notebook/examples/golomb_sat.ipynb +++ b/examples/notebook/examples/golomb_sat.ipynb @@ -93,8 +93,8 @@ "outputs": [], "source": [ "from typing import Sequence\n", + "\n", "from ortools.sat.colab import flags\n", - "from google.protobuf import text_format\n", "from ortools.sat.python import cp_model\n", "\n", "_ORDER = flags.define_integer(\"order\", 8, \"Order of the ruler.\")\n", @@ -137,7 +137,7 @@ " # Solve the model.\n", " solver = cp_model.CpSolver()\n", " if params:\n", - " text_format.Parse(params, solver.parameters)\n", + " solver.parameters.parse_text_format(params)\n", " solution_printer = cp_model.ObjectiveSolutionPrinter()\n", " print(f\"Golomb ruler(order={order})\")\n", " status = solver.solve(model, solution_printer)\n", diff --git a/examples/notebook/examples/horse_jumping_show.ipynb b/examples/notebook/examples/horse_jumping_show.ipynb new file mode 100644 index 00000000000..ac5ed19094b --- /dev/null +++ b/examples/notebook/examples/horse_jumping_show.ipynb @@ -0,0 +1,376 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "google", + "metadata": {}, + "source": [ + "##### Copyright 2025 Google LLC." + ] + }, + { + "cell_type": "markdown", + "id": "apache", + "metadata": {}, + "source": [ + "Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "you may not use this file except in compliance with the License.\n", + "You may obtain a copy of the License at\n", + "\n", + " http://www.apache.org/licenses/LICENSE-2.0\n", + "\n", + "Unless required by applicable law or agreed to in writing, software\n", + "distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "See the License for the specific language governing permissions and\n", + "limitations under the License.\n" + ] + }, + { + "cell_type": "markdown", + "id": "basename", + "metadata": {}, + "source": [ + "# horse_jumping_show" + ] + }, + { + "cell_type": "markdown", + "id": "link", + "metadata": {}, + "source": [ + "\n", + "\n", + "\n", + "
\n", + "Run in Google Colab\n", + "\n", + "View source on GitHub\n", + "
" + ] + }, + { + "cell_type": "markdown", + "id": "doc", + "metadata": {}, + "source": [ + "First, you must install [ortools](https://pypi.org/project/ortools/) package in this colab." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "install", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install ortools" + ] + }, + { + "cell_type": "markdown", + "id": "description", + "metadata": {}, + "source": [ + "\n", + "Horse Jumping Show.\n", + "\n", + "A major three-day horse jumping competition is scheduled next winter in Geneva.\n", + "The show features riders and horses from all over the world, competing in\n", + "several different competitions throughout the show. Six months before the show,\n", + "riders submit the entries (i.e., rider name, horse, competition) to the\n", + "organizers. Riders can submit multiple entries, for example, to compete in the\n", + "same competition with multiple horses, or to compete in several competitions.\n", + "\n", + "There are additional space limitations. For example, the venue has 100 stalls,\n", + "4 arenas (where competitions can be scheduled), and 6 paddocks (where riders\n", + "warm up before their turn). It is also ideal that paddocks are not overloaded by\n", + "riders from multiple competitions.\n", + "\n", + "The organizer's goal is find a schedule in which competitions don't overlap, and\n", + "the times at which they happen are scattered throughout the day (and hopefully\n", + "not that early in the morning). The starting times of the competitions should be\n", + "at the hour or 30 minutes past the hour (e.g. 9:30, 10:00, 10:30, etc.).\n", + "Competitions can only be scheduled while there is daylight, except for\n", + "competitions scheduled in the Main Stage arena, which is covered and has proper\n", + "lighting. Also, beginner competitions (1.10m or less) are scheduled on the first\n", + "day, and advanced competitions (1.50m or more) are scheduled on the last day.\n", + "\n", + "The information for next winter's show is as follows:\n", + "Available stalls: 100\n", + "Number of riders: 100\n", + "Number of horses: 130\n", + "Number of requested Entries: 200\n", + "Number of competitions: 15\n", + "\n", + "Venue:\n", + "- Main Stage arena: Covered (9AM-11PM)\n", + "- Highlands arena: Daylight Only (9AM-5PM)\n", + "- Sawdust arena: Daylight Only (9AM-5PM)\n", + "- Paddock1 has capacity for 10 riders and serves Main Stage\n", + "- Paddock2 has capacity for 6 riders and serves Main Stage\n", + "- Paddock3 has capacity for 8 riders and serves Main Stage, Highlands\n", + "- Paddock4 has capacity for 8 riders and serves Highlands, Sawdust\n", + "- Paddock5 has capacity for 9 riders and serves Sawdust\n", + "- Paddock6 has capacity for 7 riders and serves Sawdust\n", + "\n", + "competitions:\n", + "- C_5_1.10m_Year_Olds 1.10m - 60 minutes\n", + "- C_6_1.25m_Year_Olds 1.25m - 90 minutes\n", + "- C_7_1.35m_Year_Olds 1.35m - 120 minutes\n", + "- C_0.8m_Jumpers 0.80m - 240 minutes\n", + "- C_1.0m_Jumpers 1.00m - 180 minutes\n", + "- C_1.10m_Jumpers 1.10m - 180 minutes\n", + "- C_1.20m_Jumpers 1.20m - 120 minutes\n", + "- C_1.30m_Jumpers 1.30m - 120 minutes\n", + "- C_1.40m_Jumpers 1.40m - 120 minutes\n", + "- C_1.20m_Derby 1.20m - 180 minutes\n", + "- C_1.35m_Derby 1.35m - 180 minutes\n", + "- C_1.45m_Derby 1.45m - 180 minutes\n", + "- C_1.40m_Open 1.40m - 120 minutes\n", + "- C_1.50m_Open 1.50m - 180 minutes\n", + "- C_1.60m_Grand_Prix 1.60m - 240 minutes\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "code", + "metadata": {}, + "outputs": [], + "source": [ + "import dataclasses\n", + "import numpy as np\n", + "from ortools.sat.python import cp_model\n", + "\n", + "\n", + "@dataclasses.dataclass(frozen=True)\n", + "class Arena:\n", + " \"\"\"Data for an arena.\"\"\"\n", + "\n", + " id: str\n", + " hours: str\n", + "\n", + "\n", + "@dataclasses.dataclass(frozen=True)\n", + "class Competition:\n", + " \"\"\"Data for a competition.\"\"\"\n", + "\n", + " id: str\n", + " height: float\n", + " duration: int\n", + "\n", + "\n", + "@dataclasses.dataclass(frozen=True)\n", + "class HorseJumpingShowData:\n", + " \"\"\"Horse Jumping Show Data.\"\"\"\n", + "\n", + " num_days: int\n", + " competitions: list[Competition]\n", + " arenas: list[Arena]\n", + "\n", + "\n", + "@dataclasses.dataclass(frozen=True)\n", + "class ScheduledCompetition:\n", + " \"\"\"Horse Jumping Show Schedule.\"\"\"\n", + "\n", + " completion: str\n", + " day: int\n", + " arena: str\n", + " start_time: str\n", + " end_time: str\n", + "\n", + "\n", + "def generate_horse_jumping_show_data() -> HorseJumpingShowData:\n", + " \"\"\"Generates the horse jumping show data.\"\"\"\n", + " arenas = [\n", + " Arena(id=\"Main Stage\", hours=\"9AM-9PM\"),\n", + " Arena(id=\"Highlands\", hours=\"9AM-5PM\"),\n", + " Arena(id=\"Sawdust\", hours=\"9AM-5PM\"),\n", + " ]\n", + " competitions = [\n", + " Competition(id=\"C_5_1.10m_Year_Olds\", height=1.1, duration=60),\n", + " Competition(id=\"C_6_1.25m_Year_Olds\", height=1.25, duration=90),\n", + " Competition(id=\"C_7_1.35m_Year_Olds\", height=1.35, duration=120),\n", + " Competition(id=\"C_0.8m_Jumpers\", height=0.8, duration=240),\n", + " Competition(id=\"C_1.0m_Jumpers\", height=1.0, duration=180),\n", + " Competition(id=\"C_1.10m_Jumpers\", height=1.10, duration=180),\n", + " Competition(id=\"C_1.20m_Jumpers\", height=1.20, duration=120),\n", + " Competition(id=\"C_1.30m_Jumpers\", height=1.30, duration=120),\n", + " Competition(id=\"C_1.40m_Jumpers\", height=1.40, duration=120),\n", + " Competition(id=\"C_1.20m_Derby\", height=1.20, duration=180),\n", + " Competition(id=\"C_1.35m_Derby\", height=1.35, duration=180),\n", + " Competition(id=\"C_1.45m_Derby\", height=1.45, duration=180),\n", + " Competition(id=\"C_1.40m_Open\", height=1.40, duration=120),\n", + " Competition(id=\"C_1.50m_Open\", height=1.50, duration=180),\n", + " Competition(id=\"C_1.60m_Grand_Prix\", height=1.60, duration=240),\n", + " ]\n", + " return HorseJumpingShowData(num_days=3, competitions=competitions, arenas=arenas)\n", + "\n", + "\n", + "def solve() -> list[ScheduledCompetition]:\n", + " \"\"\"Solves the horse jumping show problem.\"\"\"\n", + " data = generate_horse_jumping_show_data()\n", + " num_days = data.num_days\n", + " competitions = data.competitions\n", + " arenas = data.arenas\n", + " day_index = list(range(num_days))\n", + "\n", + " # Time parser.\n", + " def parse_time(t_str):\n", + " hour = int(t_str[:-2])\n", + " if \"PM\" in t_str and hour != 12:\n", + " hour += 12\n", + " if \"AM\" in t_str and hour == 12:\n", + " hour = 0\n", + " return hour * 60\n", + "\n", + " # Schedule time intervals for each arena.\n", + " schedule_interval_by_arena = {}\n", + " for arena in arenas:\n", + " start_h_str, end_h_str = arena.hours.split(\"-\")\n", + " start_time = parse_time(start_h_str)\n", + " end_time = parse_time(end_h_str)\n", + " schedule_interval_by_arena[arena.id] = (start_time, end_time)\n", + "\n", + " # Map time to 30-minute intervals and back.\n", + " time_slot_size = 30\n", + "\n", + " def time_to_slot(time_in_minutes: int):\n", + " return time_in_minutes // time_slot_size\n", + "\n", + " def slot_to_time(slot_index: int):\n", + " return slot_index * time_slot_size\n", + "\n", + " # --- Model Creation ---\n", + " model = cp_model.CpModel()\n", + "\n", + " # --- Variables ---\n", + " # Competition scheduling variables per arena and day.\n", + " competition_assignments = np.empty(\n", + " (len(competitions), len(arenas), num_days), dtype=object\n", + " )\n", + " for c, comp in enumerate(competitions):\n", + " for a, arena in enumerate(arenas):\n", + " for d in day_index:\n", + " competition_assignments[c, a, d] = model.new_bool_var(\n", + " f\"competition_scheduled_{comp.id}_{arena.id}_{d}\"\n", + " )\n", + " # Time intervals and start times for each competition. We model time steps\n", + " # 0,1,2,... to represent the start times in 30 minutes intervals, as opposed\n", + " # to represent the start times in minutes.\n", + " competition_start_times = np.empty(\n", + " (len(competitions), len(arenas), num_days), dtype=object\n", + " )\n", + " competition_intervals = np.empty(\n", + " (len(competitions), len(arenas), num_days), dtype=object\n", + " )\n", + " for c, comp in enumerate(competitions):\n", + " for a, arena in enumerate(arenas):\n", + " earliest_start_time, latest_end_time = schedule_interval_by_arena[arena.id]\n", + " latest_start_time = latest_end_time - comp.duration\n", + " for d in day_index:\n", + " competition_start_times[c, a, d] = model.new_int_var(\n", + " time_to_slot(earliest_start_time),\n", + " time_to_slot(latest_start_time),\n", + " f\"start_time_{comp.id}_{arena.id}_{d}\",\n", + " )\n", + " competition_intervals[c, a, d] = (\n", + " model.new_optional_fixed_size_interval_var(\n", + " competition_start_times[c, a, d],\n", + " time_to_slot(comp.duration),\n", + " competition_assignments[c, a, d],\n", + " f\"task_{comp.id}_{arena.id}_{d}\",\n", + " )\n", + " )\n", + "\n", + " # --- Constraints ---\n", + " # Every competition must be scheduled, enforcing that beginner competitions\n", + " # are on day 1, and advanced competitions are on day 3.\n", + " for c, comp in enumerate(competitions):\n", + " model.add(np.sum(competition_assignments[c, :, :]) == 1)\n", + " # Beginner competitions are on the first day.\n", + " if comp.height <= 1.10:\n", + " beginners_day = 0\n", + " model.add(np.sum(competition_assignments[c, :, beginners_day]) == 1)\n", + " # Advanced competitions are on the last day.\n", + " if comp.height >= 1.50:\n", + " advanced_day = num_days - 1\n", + " model.add(np.sum(competition_assignments[c, :, advanced_day]) == 1)\n", + "\n", + " # Competitions scheduled on the same arena and on the same day can't overlap.\n", + " for a, _ in enumerate(arenas):\n", + " for day in range(num_days):\n", + " model.add_no_overlap(competition_intervals[:, a, day])\n", + "\n", + " # Start times should be scattered across the day.\n", + " for a, _ in enumerate(arenas):\n", + " for day in day_index:\n", + " model.add_all_different(competition_start_times[:, a, day])\n", + "\n", + " # --- Objective ---\n", + " model.maximize(np.sum(competition_start_times))\n", + "\n", + " # --- Solve ---\n", + " solver = cp_model.CpSolver()\n", + " solver.parameters.max_time_in_seconds = 30.0\n", + " solver.parameters.log_search_progress = True\n", + " solver.parameters.num_workers = 16\n", + " status = solver.solve(model)\n", + "\n", + " # --- Print Solution ---\n", + " if status == cp_model.OPTIMAL or status == cp_model.FEASIBLE:\n", + " schedule = []\n", + " for day in range(num_days):\n", + " for c, comp in enumerate(competitions):\n", + " for a, arena in enumerate(arenas):\n", + " if solver.value(competition_assignments[c, a, day]):\n", + " start_time_minutes = slot_to_time(\n", + " solver.value(competition_start_times[c, a, day])\n", + " )\n", + " start_h, start_m = divmod(start_time_minutes, 60)\n", + " end_h, end_m = divmod(start_time_minutes + comp.duration, 60)\n", + " schedule.append(\n", + " ScheduledCompetition(\n", + " completion=comp.id,\n", + " day=day + 1,\n", + " arena=arena.id,\n", + " start_time=f\"{start_h:02d}:{start_m:02d}\",\n", + " end_time=f\"{end_h:02d}:{end_m:02d}\",\n", + " )\n", + " )\n", + " # Sort and print schedule for readability.\n", + " schedule.sort(key=lambda x: (x.day, x.start_time))\n", + " print(\"Schedule:\")\n", + " for item in schedule:\n", + " print(\n", + " f\"Day {item.day}: {item.completion} in {item.arena} from\"\n", + " f\" {item.start_time} to {item.end_time}.\"\n", + " )\n", + " return schedule\n", + " elif status == cp_model.INFEASIBLE:\n", + " print(\"Problem is infeasible.\")\n", + " else:\n", + " print(\"No solution found.\")\n", + " # Return an empty schedule if no solution is found.\n", + " return []\n", + "\n", + "\n", + "def main(_):\n", + " solve()\n", + "\n", + "\n", + "main()\n", + "\n" + ] + } + ], + "metadata": { + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/notebook/examples/knapsack_2d_sat.ipynb b/examples/notebook/examples/knapsack_2d_sat.ipynb index 297bc816119..5cb51585962 100644 --- a/examples/notebook/examples/knapsack_2d_sat.ipynb +++ b/examples/notebook/examples/knapsack_2d_sat.ipynb @@ -93,8 +93,6 @@ "import numpy as np\n", "import pandas as pd\n", "\n", - "from google.protobuf import text_format\n", - "\n", "from ortools.sat.python import cp_model\n", "\n", "\n", @@ -227,7 +225,7 @@ " # Solve model.\n", " solver = cp_model.CpSolver()\n", " if _PARAMS.value:\n", - " text_format.Parse(_PARAMS.value, solver.parameters)\n", + " solver.parameters.parse_text_format(_PARAMS.value)\n", "\n", " status = solver.solve(model)\n", "\n", @@ -329,7 +327,7 @@ " # solve model.\n", " solver = cp_model.CpSolver()\n", " if _PARAMS.value:\n", - " text_format.Parse(_PARAMS.value, solver.parameters)\n", + " solver.parameters.parse_text_format(_PARAMS.value)\n", "\n", " status = solver.solve(model)\n", "\n", @@ -450,7 +448,7 @@ " # solve model.\n", " solver = cp_model.CpSolver()\n", " if _PARAMS.value:\n", - " text_format.Parse(_PARAMS.value, solver.parameters)\n", + " solver.parameters.parse_text_format(_PARAMS.value)\n", "\n", " status = solver.solve(model)\n", "\n", diff --git a/examples/notebook/examples/line_balancing_sat.ipynb b/examples/notebook/examples/line_balancing_sat.ipynb index a95f9c24438..f3d2220ec8b 100644 --- a/examples/notebook/examples/line_balancing_sat.ipynb +++ b/examples/notebook/examples/line_balancing_sat.ipynb @@ -101,8 +101,6 @@ "from typing import Dict, Sequence\n", "\n", "from ortools.sat.colab import flags\n", - "from google.protobuf import text_format\n", - "\n", "from ortools.sat.python import cp_model\n", "\n", "_INPUT = flags.define_string(\"input\", \"\", \"Input file to parse and solve.\")\n", @@ -340,7 +338,7 @@ " # solve model.\n", " solver = cp_model.CpSolver()\n", " if _PARAMS.value:\n", - " text_format.Parse(_PARAMS.value, solver.parameters)\n", + " solver.parameters.parse_text_format(_PARAMS.value)\n", " solver.parameters.log_search_progress = True\n", " solver.solve(model)\n", "\n", @@ -407,7 +405,7 @@ " # solve model.\n", " solver = cp_model.CpSolver()\n", " if _PARAMS.value:\n", - " text_format.Parse(_PARAMS.value, solver.parameters)\n", + " solver.parameters.parse_text_format(_PARAMS.value)\n", " solver.parameters.log_search_progress = True\n", " solver.solve(model)\n", "\n", diff --git a/examples/notebook/examples/linear_assignment_api.ipynb b/examples/notebook/examples/linear_assignment_api.ipynb index 4b02415e9db..fdfbd4b0947 100644 --- a/examples/notebook/examples/linear_assignment_api.ipynb +++ b/examples/notebook/examples/linear_assignment_api.ipynb @@ -75,9 +75,9 @@ "\n", "Test linear sum assignment on a 4x4 matrix.\n", "\n", - " Example taken from:\n", - " http://www.ee.oulu.fi/~mpa/matreng/eem1_2-1.htm with kCost[0][1]\n", - " modified so the optimum solution is unique.\n", + "Example taken from:\n", + "http://www.ee.oulu.fi/~mpa/matreng/eem1_2-1.htm with kCost[0][1]\n", + "modified so the optimum solution is unique.\n", "\n" ] }, @@ -96,7 +96,12 @@ " \"\"\"Test linear sum assignment on a 4x4 matrix.\"\"\"\n", " num_sources = 4\n", " num_targets = 4\n", - " cost = [[90, 76, 75, 80], [35, 85, 55, 65], [125, 95, 90, 105], [45, 110, 95, 115]]\n", + " cost = [\n", + " [90, 76, 75, 80],\n", + " [35, 85, 55, 65],\n", + " [125, 95, 90, 105],\n", + " [45, 110, 95, 115],\n", + " ]\n", " expected_cost = cost[0][3] + cost[1][2] + cost[2][1] + cost[3][0]\n", "\n", " assignment = linear_sum_assignment.SimpleLinearSumAssignment()\n", diff --git a/examples/notebook/examples/maximize_combinations_sat.ipynb b/examples/notebook/examples/maximize_combinations_sat.ipynb index 40651039542..ff177562ff7 100644 --- a/examples/notebook/examples/maximize_combinations_sat.ipynb +++ b/examples/notebook/examples/maximize_combinations_sat.ipynb @@ -84,6 +84,7 @@ "outputs": [], "source": [ "from typing import Sequence\n", + "\n", "from ortools.sat.python import cp_model\n", "\n", "\n", diff --git a/examples/notebook/examples/maze_escape_sat.ipynb b/examples/notebook/examples/maze_escape_sat.ipynb index a1be0ae6b3e..47238ef6524 100644 --- a/examples/notebook/examples/maze_escape_sat.ipynb +++ b/examples/notebook/examples/maze_escape_sat.ipynb @@ -79,7 +79,8 @@ "visit all boxes in order, and walk on each block in a 4x4x4 map exactly once.\n", "\n", "Admissible moves are one step in one of the 6 directions:\n", - " x+, x-, y+, y-, z+(up), z-(down)\n" + " x+, x-, y+, y-, z+(up), z-(down)\n", + "\n" ] }, { @@ -92,7 +93,6 @@ "from typing import Dict, Sequence, Tuple\n", "\n", "from ortools.sat.colab import flags\n", - "from google.protobuf import text_format\n", "from ortools.sat.python import cp_model\n", "\n", "_OUTPUT_PROTO = flags.define_string(\n", @@ -207,7 +207,7 @@ " # Solve model.\n", " solver = cp_model.CpSolver()\n", " if params:\n", - " text_format.Parse(params, solver.parameters)\n", + " solver.parameters.parse_text_format(params)\n", " solver.parameters.log_search_progress = True\n", " result = solver.solve(model)\n", "\n", diff --git a/examples/notebook/examples/memory_layout_and_infeasibility_sat.ipynb b/examples/notebook/examples/memory_layout_and_infeasibility_sat.ipynb index 71b8e27a30c..cf369a58403 100644 --- a/examples/notebook/examples/memory_layout_and_infeasibility_sat.ipynb +++ b/examples/notebook/examples/memory_layout_and_infeasibility_sat.ipynb @@ -87,7 +87,6 @@ "from typing import List\n", "\n", "from ortools.sat.colab import flags\n", - "from google.protobuf import text_format\n", "from ortools.sat.python import cp_model\n", "\n", "\n", @@ -139,7 +138,7 @@ "\n", " solver = cp_model.CpSolver()\n", " if params:\n", - " text_format.Parse(params, solver.parameters)\n", + " solver.parameters.parse_text_format(params)\n", " status = solver.solve(model)\n", " print(solver.response_stats())\n", "\n", @@ -225,7 +224,7 @@ "\n", " solver = cp_model.CpSolver()\n", " if params:\n", - " text_format.Parse(params, solver.parameters)\n", + " solver.parameters.parse_text_format(params)\n", " status = solver.solve(model)\n", " print(solver.response_stats())\n", " if status == cp_model.OPTIMAL or status == cp_model.FEASIBLE:\n", diff --git a/examples/notebook/examples/music_playlist_sat.ipynb b/examples/notebook/examples/music_playlist_sat.ipynb new file mode 100644 index 00000000000..5b27bbb50b0 --- /dev/null +++ b/examples/notebook/examples/music_playlist_sat.ipynb @@ -0,0 +1,388 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "google", + "metadata": {}, + "source": [ + "##### Copyright 2025 Google LLC." + ] + }, + { + "cell_type": "markdown", + "id": "apache", + "metadata": {}, + "source": [ + "Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "you may not use this file except in compliance with the License.\n", + "You may obtain a copy of the License at\n", + "\n", + " http://www.apache.org/licenses/LICENSE-2.0\n", + "\n", + "Unless required by applicable law or agreed to in writing, software\n", + "distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "See the License for the specific language governing permissions and\n", + "limitations under the License.\n" + ] + }, + { + "cell_type": "markdown", + "id": "basename", + "metadata": {}, + "source": [ + "# music_playlist_sat" + ] + }, + { + "cell_type": "markdown", + "id": "link", + "metadata": {}, + "source": [ + "\n", + "\n", + "\n", + "
\n", + "Run in Google Colab\n", + "\n", + "View source on GitHub\n", + "
" + ] + }, + { + "cell_type": "markdown", + "id": "doc", + "metadata": {}, + "source": [ + "First, you must install [ortools](https://pypi.org/project/ortools/) package in this colab." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "install", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install ortools" + ] + }, + { + "cell_type": "markdown", + "id": "description", + "metadata": {}, + "source": [ + "\n", + "Create a balanced music playlist.\n", + "\n", + "Create a music playlist by selecting tunes from a list of tunes.\n", + "\n", + "Each tune has a duration in seconds and a music genre (e.g. Rock, Disco, Techno,\n", + "etc).\n", + "\n", + "The total playlist duration must be as close as possible to a given total\n", + "duration. Each tune can appear at most once in the playlist. All existing\n", + "genres must appear at least once in the playlist. Two consecutive tunes must be\n", + "of different genres. There is a positive cost to go from a genre to another, and\n", + "the playlist must minimize this cost overall.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "code", + "metadata": {}, + "outputs": [], + "source": [ + "from collections.abc import Sequence\n", + "\n", + "from ortools.sat.python import cp_model\n", + "\n", + "\n", + "def Solve():\n", + " \"\"\"Solves the music playlist problem.\"\"\"\n", + "\n", + " # --------------------\n", + " # 1. Data\n", + " # --------------------\n", + " tunes = [\n", + " (\"Song 01\", 202, \"Pop\"),\n", + " (\"Song 02\", 233, \"Techno\"),\n", + " (\"Song 03\", 108, \"Disco\"),\n", + " (\"Song 04\", 281, \"Disco\"),\n", + " (\"Song 05\", 129, \"Techno\"),\n", + " (\"Song 06\", 122, \"Techno\"),\n", + " (\"Song 07\", 244, \"Pop\"),\n", + " (\"Song 08\", 178, \"Techno\"),\n", + " (\"Song 09\", 213, \"Techno\"),\n", + " (\"Song 10\", 124, \"Rock\"),\n", + " (\"Song 11\", 120, \"Disco\"),\n", + " (\"Song 12\", 196, \"Rock\"),\n", + " (\"Song 13\", 249, \"Disco\"),\n", + " (\"Song 14\", 294, \"Disco\"),\n", + " (\"Song 15\", 103, \"Techno\"),\n", + " (\"Song 16\", 179, \"Disco\"),\n", + " (\"Song 17\", 146, \"Disco\"),\n", + " (\"Song 18\", 126, \"Techno\"),\n", + " (\"Song 19\", 100, \"Pop\"),\n", + " (\"Song 20\", 122, \"Disco\"),\n", + " (\"Song 21\", 190, \"Disco\"),\n", + " (\"Song 22\", 181, \"Techno\"),\n", + " (\"Song 23\", 273, \"Pop\"),\n", + " (\"Song 24\", 121, \"Disco\"),\n", + " (\"Song 25\", 159, \"Pop\"),\n", + " (\"Song 26\", 234, \"Rock\"),\n", + " (\"Song 27\", 169, \"Rock\"),\n", + " (\"Song 28\", 151, \"Rock\"),\n", + " (\"Song 29\", 142, \"Techno\"),\n", + " (\"Song 30\", 245, \"Pop\"),\n", + " (\"Song 31\", 281, \"Techno\"),\n", + " (\"Song 32\", 154, \"Rock\"),\n", + " (\"Song 33\", 148, \"Disco\"),\n", + " (\"Song 34\", 120, \"Pop\"),\n", + " (\"Song 35\", 163, \"Disco\"),\n", + " (\"Song 36\", 158, \"Pop\"),\n", + " (\"Song 37\", 235, \"Rock\"),\n", + " (\"Song 38\", 106, \"Techno\"),\n", + " (\"Song 39\", 117, \"Disco\"),\n", + " (\"Song 40\", 110, \"Pop\"),\n", + " (\"Song 41\", 144, \"Rock\"),\n", + " (\"Song 42\", 156, \"Disco\"),\n", + " (\"Song 43\", 204, \"Rock\"),\n", + " (\"Song 44\", 108, \"Pop\"),\n", + " (\"Song 45\", 255, \"Pop\"),\n", + " (\"Song 46\", 165, \"Rock\"),\n", + " (\"Song 47\", 290, \"Disco\"),\n", + " (\"Song 48\", 242, \"Pop\"),\n", + " (\"Song 49\", 272, \"Rock\"),\n", + " (\"Song 50\", 212, \"Pop\"),\n", + " ]\n", + "\n", + " # Genre transition costs. A higher cost means a less desirable transition.\n", + " genre_transition_costs = {\n", + " \"Rock\": {\"Pop\": 3, \"Disco\": 5, \"Techno\": 7},\n", + " \"Pop\": {\"Rock\": 3, \"Disco\": 6, \"Techno\": 8},\n", + " \"Disco\": {\"Rock\": 5, \"Pop\": 6, \"Techno\": 9},\n", + " \"Techno\": {\"Rock\": 7, \"Pop\": 8, \"Disco\": 9},\n", + " }\n", + "\n", + " num_tunes = len(tunes)\n", + " all_tunes = range(num_tunes)\n", + "\n", + " # Playlist target duration in seconds.\n", + " target_duration = 60 * 60 # 1 hour\n", + "\n", + " # We use a circuit constraint to model the playlist. In the circuit constraint\n", + " # graph, each node is a tune, and each arc represents a pair of consecutive\n", + " # tunes in the playlist. We introduce a dummy node to represent the start and\n", + " # the end of the playlist.\n", + " #\n", + " # The constraint that two consecutive tunes must be of different genres is\n", + " # encoded by not creating an arc between two tunes that are of the same genre.\n", + " # This is crucial in the modelisation of this problem: it reduces the number\n", + " # of variables in the model, and it avoids additional constraints to ensure\n", + " # two consecutive tunes are of different genres.\n", + "\n", + " # Dummy node representing the start and end of the playlist.\n", + " dummy_node = num_tunes\n", + "\n", + " # `possible_successors[i]` contains the list of nodes that can be reached\n", + " # after node `i`.\n", + " possible_successors = {}\n", + " possible_successors[dummy_node] = [dummy_node]\n", + " for i in all_tunes:\n", + " # Any node can be the first tune in the playlist.\n", + " possible_successors[dummy_node].append(i)\n", + " # Any node can be the last tune in the playlist.\n", + " possible_successors[i] = [dummy_node]\n", + " genre_i = tunes[i][2]\n", + " for j in all_tunes:\n", + " genre_j = tunes[j][2]\n", + " # If `i` and `j` are of different genres, we can go from `i` to `j`.\n", + " if genre_i != genre_j:\n", + " possible_successors[i].append(j)\n", + "\n", + " # --------------------\n", + " # 2. Model\n", + " # --------------------\n", + " model = cp_model.CpModel()\n", + "\n", + " # --------------------\n", + " # 3. Decision Variables\n", + " # --------------------\n", + " # `literals[i][j]` is true if tune `j` follows tune `i` in the playlist.\n", + " literals = {}\n", + "\n", + " # --------------------\n", + " # 4. Constraints\n", + " # --------------------\n", + "\n", + " # 4.1 Two consecutive tunes must be of different genres.\n", + " # This is encoded in possible_successors, which doesn't contain any arcs\n", + " # between two tunes that are of the same genre. Now we just have to add a\n", + " # circuit constraint.\n", + "\n", + " # `arcs` contains the list of possible arcs in the circuit graph, each arc\n", + " # is a tuple (i, j, literals[i][j]).\n", + " arcs = []\n", + "\n", + " def AddArc(i, j):\n", + " literals[(i, j)] = model.new_bool_var(f\"lit_{i}_{j}\")\n", + " arcs.append((i, j, literals[(i, j)]))\n", + "\n", + " # Add all possible arcs between different nodes.\n", + " for i, successors in possible_successors.items():\n", + " for j in successors:\n", + " AddArc(i, j)\n", + "\n", + " # Add self-arcs to let tunes not be in the playlist.\n", + " for i in all_tunes:\n", + " AddArc(i, i)\n", + "\n", + " # Add a circuit constraint with the arcs.\n", + " model.add_circuit(arcs)\n", + "\n", + " # 4.2 All genres must appear at least once.\n", + " # This is encoded by adding a constraint that the sum of all literals for a\n", + " # given genre is at least 1.\n", + "\n", + " # `is_active[i]` is true iff tune `i` is in the playlist, i.e. if its self-arc\n", + " # is not active in the circuit.\n", + " is_active = {}\n", + " for i in all_tunes:\n", + " is_active[i] = literals[(i, i)].Not()\n", + "\n", + " # `genre_tunes[genre]` contains the list of tunes that are of genre `genre`.\n", + " genre_tunes = {}\n", + " for genre in genre_transition_costs:\n", + " genre_tunes[genre] = []\n", + " for i in all_tunes:\n", + " genre_tunes[tunes[i][2]].append(i)\n", + "\n", + " # For each genre, at least one tune must be active: the sum of all literals\n", + " # for this genre is at least 1.\n", + " for t in genre_tunes.values():\n", + " model.add(sum(is_active[i] for i in t) >= 1)\n", + "\n", + " # --------------------\n", + " # 5. Objective\n", + " # --------------------\n", + "\n", + " # 5.1. Minimize genre transition costs.\n", + "\n", + " # Add a total_transition_cost variable representing the sum of all transition\n", + " # costs in the playlist.\n", + " max_transition_cost = 0\n", + " for genre_costs in genre_transition_costs.values():\n", + " for cost in genre_costs.values():\n", + " max_transition_cost = max(cost, max_transition_cost)\n", + " total_transition_cost_upper_bound = (num_tunes - 1) * max_transition_cost\n", + " total_transition_cost = model.new_int_var(\n", + " 0, total_transition_cost_upper_bound, \"total_transition_cost\"\n", + " )\n", + "\n", + " transition_cost_terms = []\n", + " for i, successors in possible_successors.items():\n", + " if i == dummy_node:\n", + " continue\n", + " genre_i = tunes[i][2]\n", + " for j in successors:\n", + " if j == dummy_node:\n", + " continue\n", + " genre_j = tunes[j][2]\n", + " cost = genre_transition_costs[genre_i][genre_j]\n", + " transition_cost_terms.append(cost * literals[(i, j)])\n", + " model.add(total_transition_cost == sum(transition_cost_terms))\n", + "\n", + " # 5.2. Minimize the deviation between the target duration and the actual total\n", + " # duration.\n", + "\n", + " # Add a total_duration variable representing the duration of all active tunes.\n", + " total_duration_upper_bound = sum([t[1] for t in tunes])\n", + " total_duration = model.new_int_var(0, total_duration_upper_bound, \"total_duration\")\n", + " model.add(total_duration == sum(tunes[i][1] * is_active[i] for i in all_tunes))\n", + "\n", + " # Minimize the absolute difference from the target duration.\n", + " deviation = model.new_int_var(0, target_duration, \"deviation\")\n", + " model.add_abs_equality(deviation, total_duration - target_duration)\n", + "\n", + " # 5.3 Combine the objectives.\n", + " #\n", + " # You can add a weight to prioritize one over the other.\n", + " # For example, `model.minimize(10 * total_transition_cost + deviation)`\n", + " model.minimize(total_transition_cost + deviation)\n", + "\n", + " # --------------------\n", + " # 6. Solve\n", + " # --------------------\n", + " solver = cp_model.CpSolver()\n", + " # Set a time limit for the solver\n", + " solver.parameters.max_time_in_seconds = 30.0\n", + " status = solver.solve(model)\n", + "\n", + " # -----------------------\n", + " # 7. Print the solution\n", + " # -----------------------\n", + " if status == cp_model.OPTIMAL:\n", + " print(\"Found Optimal Playlist:\")\n", + " elif status == cp_model.FEASIBLE:\n", + " print(\"Found Feasible Playlist:\")\n", + " else:\n", + " print(\"No solution found.\")\n", + " return\n", + "\n", + " print(f\" Total Transition Cost: {solver.value(total_transition_cost)}\")\n", + " print(\n", + " f\" Playlist Duration: {solver.value(total_duration)} seconds \"\n", + " f\"({solver.value(total_duration) / 60:.2f} minutes)\"\n", + " )\n", + " print(\n", + " f\" Deviation from target duration ({target_duration}):\"\n", + " f\" {solver.value(deviation)} seconds\"\n", + " )\n", + " print(\"-\" * 30)\n", + "\n", + " # Reconstruct the playlist sequence by starting from the dummy node.\n", + " playlist = []\n", + " current_node = dummy_node\n", + " while True:\n", + " # Find the successor of the current node.\n", + " next_node = dummy_node\n", + " for next_node in possible_successors[current_node]:\n", + " if solver.value(literals[(current_node, next_node)]):\n", + " break\n", + "\n", + " if next_node == dummy_node:\n", + " break # We've completed the loop back to the start.\n", + "\n", + " playlist.append(next_node)\n", + " current_node = next_node\n", + "\n", + " if not playlist:\n", + " print(\"Empty playlist.\")\n", + " else:\n", + " for i in playlist:\n", + " (name, duration, genre) = tunes[i]\n", + " print(f\"{i+1}. {name} ({genre}) - {duration}s\")\n", + "\n", + "\n", + "def main(argv: Sequence[str]) -> None:\n", + " if len(argv) > 1:\n", + " raise app.UsageError(\"Too many command-line arguments.\")\n", + " Solve()\n", + "\n", + "\n", + "main()\n", + "\n" + ] + } + ], + "metadata": { + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/notebook/examples/no_wait_baking_scheduling_sat.ipynb b/examples/notebook/examples/no_wait_baking_scheduling_sat.ipynb index 93e6e8c8117..24bf4726670 100644 --- a/examples/notebook/examples/no_wait_baking_scheduling_sat.ipynb +++ b/examples/notebook/examples/no_wait_baking_scheduling_sat.ipynb @@ -93,7 +93,6 @@ "from typing import List, Sequence, Tuple\n", "\n", "from ortools.sat.colab import flags\n", - "from google.protobuf import text_format\n", "from ortools.sat.python import cp_model\n", "\n", "_PARAMS = flags.define_string(\n", @@ -354,7 +353,7 @@ " # Solve model.\n", " solver = cp_model.CpSolver()\n", " if _PARAMS.value:\n", - " text_format.Parse(_PARAMS.value, solver.parameters)\n", + " solver.parameters.parse_text_format(_PARAMS.value)\n", " solver.parameters.log_search_progress = True\n", " status = solver.solve(model)\n", "\n", diff --git a/examples/notebook/examples/pentominoes_sat.ipynb b/examples/notebook/examples/pentominoes_sat.ipynb index da74b189c83..7a0cc3df4b3 100644 --- a/examples/notebook/examples/pentominoes_sat.ipynb +++ b/examples/notebook/examples/pentominoes_sat.ipynb @@ -98,7 +98,6 @@ "from typing import Dict, List\n", "\n", "from ortools.sat.colab import flags\n", - "from google.protobuf import text_format\n", "from ortools.sat.python import cp_model\n", "\n", "\n", @@ -211,7 +210,7 @@ " # Solve the model.\n", " solver = cp_model.CpSolver()\n", " if _PARAMS.value:\n", - " text_format.Parse(_PARAMS.value, solver.parameters)\n", + " solver.parameters.parse_text_format(_PARAMS.value)\n", " status = solver.solve(model)\n", "\n", " print(\n", diff --git a/examples/notebook/examples/prize_collecting_vrp.ipynb b/examples/notebook/examples/prize_collecting_vrp.ipynb index 1d0da49a882..4bf531de111 100644 --- a/examples/notebook/examples/prize_collecting_vrp.ipynb +++ b/examples/notebook/examples/prize_collecting_vrp.ipynb @@ -151,6 +151,8 @@ " total_distance = 0\n", " total_value_collected = 0\n", " for v in range(manager.GetNumberOfVehicles()):\n", + " if not routing.IsVehicleUsed(assignment, v):\n", + " continue\n", " index = routing.Start(v)\n", " plan_output = f'Route for vehicle {v}:\\n'\n", " route_distance = 0\n", diff --git a/examples/notebook/examples/pyflow_example.ipynb b/examples/notebook/examples/pyflow_example.ipynb index ef8a5b42f50..2f2a5389b6b 100644 --- a/examples/notebook/examples/pyflow_example.ipynb +++ b/examples/notebook/examples/pyflow_example.ipynb @@ -120,7 +120,12 @@ " print(\"MinCostFlow on 4x4 matrix.\")\n", " num_sources = 4\n", " num_targets = 4\n", - " costs = [[90, 75, 75, 80], [35, 85, 55, 65], [125, 95, 90, 105], [45, 110, 95, 115]]\n", + " costs = [\n", + " [90, 75, 75, 80],\n", + " [35, 85, 55, 65],\n", + " [125, 95, 90, 105],\n", + " [45, 110, 95, 115],\n", + " ]\n", " expected_cost = 275\n", " smcf = min_cost_flow.SimpleMinCostFlow()\n", " for source in range(0, num_sources):\n", diff --git a/examples/notebook/examples/rcpsp_sat.ipynb b/examples/notebook/examples/rcpsp_sat.ipynb index 2dba41feebb..c8f3a1d7b57 100644 --- a/examples/notebook/examples/rcpsp_sat.ipynb +++ b/examples/notebook/examples/rcpsp_sat.ipynb @@ -93,7 +93,6 @@ "import collections\n", "\n", "from ortools.sat.colab import flags\n", - "from google.protobuf import text_format\n", "from ortools.sat.python import cp_model\n", "from ortools.scheduling import rcpsp_pb2\n", "from ortools.scheduling.python import rcpsp\n", @@ -428,7 +427,7 @@ "\n", " # Parse user specified parameters.\n", " if params:\n", - " text_format.Parse(params, solver.parameters)\n", + " solver.parameters.parse_text_format(params)\n", "\n", " # Favor objective_shaving over objective_lb_search.\n", " if solver.parameters.num_workers >= 16 and solver.parameters.num_workers < 24:\n", diff --git a/examples/notebook/examples/shift_scheduling_sat.ipynb b/examples/notebook/examples/shift_scheduling_sat.ipynb index e61259c2e2f..9da86581baf 100644 --- a/examples/notebook/examples/shift_scheduling_sat.ipynb +++ b/examples/notebook/examples/shift_scheduling_sat.ipynb @@ -84,7 +84,6 @@ "outputs": [], "source": [ "from ortools.sat.colab import flags\n", - "from google.protobuf import text_format\n", "from ortools.sat.python import cp_model\n", "\n", "_OUTPUT_PROTO = flags.define_string(\n", @@ -477,7 +476,7 @@ " # Solve the model.\n", " solver = cp_model.CpSolver()\n", " if params:\n", - " text_format.Parse(params, solver.parameters)\n", + " solver.parameters.parse_text_format(params)\n", " solution_printer = cp_model.ObjectiveSolutionPrinter()\n", " status = solver.solve(model, solution_printer)\n", "\n", diff --git a/examples/notebook/examples/single_machine_scheduling_with_setup_release_due_dates_sat.ipynb b/examples/notebook/examples/single_machine_scheduling_with_setup_release_due_dates_sat.ipynb index a01f93c92e1..0126a9bce44 100644 --- a/examples/notebook/examples/single_machine_scheduling_with_setup_release_due_dates_sat.ipynb +++ b/examples/notebook/examples/single_machine_scheduling_with_setup_release_due_dates_sat.ipynb @@ -85,7 +85,6 @@ "source": [ "from typing import Sequence\n", "from ortools.sat.colab import flags\n", - "from google.protobuf import text_format\n", "from ortools.sat.python import cp_model\n", "\n", "# ----------------------------------------------------------------------------\n", @@ -566,7 +565,7 @@ " # Solve.\n", " solver = cp_model.CpSolver()\n", " if parameters:\n", - " text_format.Parse(parameters, solver.parameters)\n", + " solver.parameters.parse_text_format(parameters)\n", " solution_printer = SolutionPrinter()\n", " solver.best_bound_callback = lambda a: print(f\"New objective lower bound: {a}\")\n", " solver.solve(model, solution_printer)\n", diff --git a/examples/notebook/examples/spillover_sat.ipynb b/examples/notebook/examples/spillover_sat.ipynb new file mode 100644 index 00000000000..8b1e9fe6924 --- /dev/null +++ b/examples/notebook/examples/spillover_sat.ipynb @@ -0,0 +1,439 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "google", + "metadata": {}, + "source": [ + "##### Copyright 2025 Google LLC." + ] + }, + { + "cell_type": "markdown", + "id": "apache", + "metadata": {}, + "source": [ + "Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "you may not use this file except in compliance with the License.\n", + "You may obtain a copy of the License at\n", + "\n", + " http://www.apache.org/licenses/LICENSE-2.0\n", + "\n", + "Unless required by applicable law or agreed to in writing, software\n", + "distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "See the License for the specific language governing permissions and\n", + "limitations under the License.\n" + ] + }, + { + "cell_type": "markdown", + "id": "basename", + "metadata": {}, + "source": [ + "# spillover_sat" + ] + }, + { + "cell_type": "markdown", + "id": "link", + "metadata": {}, + "source": [ + "\n", + "\n", + "\n", + "
\n", + "Run in Google Colab\n", + "\n", + "View source on GitHub\n", + "
" + ] + }, + { + "cell_type": "markdown", + "id": "doc", + "metadata": {}, + "source": [ + "First, you must install [ortools](https://pypi.org/project/ortools/) package in this colab." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "install", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install ortools" + ] + }, + { + "cell_type": "markdown", + "id": "description", + "metadata": {}, + "source": [ + "\n", + "Solves the problem of buying physical machines to meet VM demand.\n", + "\n", + "The Spillover problem is defined as follows:\n", + "\n", + "You have M types of physical machines and V types of Virtual Machines (VMs). You\n", + "can use a physical machine of type m to get n_mv copies of VM v. Each physical\n", + "machine m has a cost of c_m. Each VM has a demand of d_v. VMs are assigned to\n", + "physical machines by the following rule. The demand for each VM type arrives\n", + "equally spaced out over the interval [0, 1]. For each VM type, there is a\n", + "priority order over the physical machine types that you must follow. When a\n", + "demand arrives, if there are any machines of the highest priority type\n", + "available, you use them first, then you move on to the second priority machine\n", + "type, and so on. Each VM type has a list of compatible physical machine types,\n", + "and when the list is exhausted, the remaining demand is not met. Your goal is\n", + "to pick quantities of the physical machines to buy (minimizing cost) so that at\n", + "least some target service level (e.g. 95%) of the total demand of all VM is met.\n", + "\n", + "The number of machines bought of each type and the number of VMs demanded of\n", + "each type is large enough that you can solve an approximate problem instead,\n", + "where the number of machines purchased and the assignment of machines to VMs is\n", + "fractional, if it is helpful to do so.\n", + "\n", + "The problem is not particularly interesting in isolation, it is more interesting\n", + "to embed this LP inside a larger optimization problem (e.g. consider a two stage\n", + "problem where in stage one, you buy machines, then in stage two, you realize VM\n", + "demand).\n", + "\n", + "The continuous approximation of this problem can be solved by LP (see the\n", + "MathOpt python examples). Doing this, instead of using MIP, is nontrivial.\n", + "Below, we show that continuous relaxation can be approximately solved by CP-SAT\n", + "as well, despite not having continuous variables. If you were solving the\n", + "problem in isolation, you should just use an LP solver, but if you were to add\n", + "side constraints or embed this within a more complex model, using CP-SAT could\n", + "be appropriate.\n", + "\n", + "If for each VM type, the physical machines that are most cost effective are the\n", + "highest priority, AND the target service level is 100%, then the problem has a\n", + "trivial optimal solution:\n", + " 1. Rank the VMs by lowest cost to meet a unit of demand with the #1 preferred\n", + " machine type.\n", + " 2. For each VM type in the order above, buy machines from #1 preferred machine\n", + " type, until either you have met all demand for the VM type.\n", + "\n", + "MOE:begin_strip\n", + "This example is motivated by the Cloudy problem, see go/fluid-model.\n", + "MOE:end_strip\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "code", + "metadata": {}, + "outputs": [], + "source": [ + "from collections.abc import Sequence\n", + "import dataclasses\n", + "import math\n", + "import random\n", + "\n", + "from ortools.sat.colab import flags\n", + "from ortools.sat.python import cp_model\n", + "\n", + "_MACHINE_TYPES = flags.define_integer(\n", + " \"machine_types\",\n", + " 100,\n", + " \"How many types of machines we can fulfill demand with.\",\n", + ")\n", + "\n", + "_VM_TYPES = flags.define_integer(\n", + " \"vm_types\", 500, \"How many types of VMs we need to supply.\"\n", + ")\n", + "\n", + "_FUNGIBILITY = flags.define_integer(\n", + " \"fungibility\",\n", + " 10,\n", + " \"Each VM type can be satisfied with this many machine types, selected\"\n", + " \" uniformly at random.\",\n", + ")\n", + "\n", + "_MAX_DEMAND = flags.define_integer(\n", + " \"max_demand\",\n", + " 100,\n", + " \"Demand for each VM type is in [max_demand//2, max_demand], uniformly at\"\n", + " \" random.\",\n", + ")\n", + "\n", + "_TEST_DATA = flags.define_bool(\n", + " \"test_data\", False, \"Use small test instance instead of random data.\"\n", + ")\n", + "\n", + "_SEED = flags.define_integer(\"seed\", 13, \"RNG seed for instance creation.\")\n", + "\n", + "_TIME_STEPS = flags.define_integer(\"time_steps\", 100, \"How much to discretize time.\")\n", + "\n", + "\n", + "@dataclasses.dataclass(frozen=True)\n", + "class MachineUse:\n", + " machine_type: int\n", + " vms_per_machine: int\n", + "\n", + "\n", + "@dataclasses.dataclass(frozen=True)\n", + "class VmDemand:\n", + " compatible_machines: tuple[MachineUse, ...]\n", + " vm_quantity: int\n", + "\n", + "\n", + "@dataclasses.dataclass(frozen=True)\n", + "class SpilloverProblem:\n", + " machine_cost: tuple[float, ...]\n", + " machine_limit: tuple[int, ...]\n", + " vm_demands: tuple[VmDemand, ...]\n", + " service_level: float\n", + " time_horizon: int\n", + "\n", + "\n", + "def _random_spillover_problem(\n", + " num_machines: int,\n", + " num_vms: int,\n", + " fungibility: int,\n", + " max_vm_demand: int,\n", + " horizon: int,\n", + ") -> SpilloverProblem:\n", + " \"\"\"Generates a random SpilloverProblem.\"\"\"\n", + " machine_costs = tuple(random.random() for _ in range(num_machines))\n", + " vm_demands = []\n", + " all_machines = list(range(num_machines))\n", + " min_vm_demand = max_vm_demand // 2\n", + " for _ in range(num_vms):\n", + " vm_use = []\n", + " for machine in random.sample(all_machines, fungibility):\n", + " vm_use.append(\n", + " MachineUse(machine_type=machine, vms_per_machine=random.randint(1, 10))\n", + " )\n", + " vm_demands.append(\n", + " VmDemand(\n", + " compatible_machines=tuple(vm_use),\n", + " vm_quantity=random.randint(min_vm_demand, max_vm_demand),\n", + " )\n", + " )\n", + " machine_need_ub = num_vms * max_vm_demand\n", + " machine_limit = (machine_need_ub,) * num_machines\n", + " return SpilloverProblem(\n", + " machine_cost=machine_costs,\n", + " machine_limit=machine_limit,\n", + " vm_demands=tuple(vm_demands),\n", + " service_level=0.95,\n", + " time_horizon=horizon,\n", + " )\n", + "\n", + "\n", + "def _test_problem() -> SpilloverProblem:\n", + " \"\"\"Creates a small SpilloverProblem with optimal objective of 360.\"\"\"\n", + " # To avoid machine type 2, ensure we buy enough of 1 to not stock out, cost\n", + " # 20\n", + " vm_a = VmDemand(\n", + " vm_quantity=10,\n", + " compatible_machines=(\n", + " MachineUse(machine_type=1, vms_per_machine=1),\n", + " MachineUse(machine_type=2, vms_per_machine=1),\n", + " ),\n", + " )\n", + " # machine type 0 is cheaper, but we don't want to stock out of machine type 1,\n", + " # so use all machine type 1, cost 40.\n", + " vm_b = VmDemand(\n", + " vm_quantity=20,\n", + " compatible_machines=(\n", + " MachineUse(machine_type=1, vms_per_machine=1),\n", + " MachineUse(machine_type=0, vms_per_machine=1),\n", + " ),\n", + " )\n", + " # Will use 3 copies of machine type 2, cost 300\n", + " vm_c = VmDemand(\n", + " vm_quantity=30,\n", + " compatible_machines=(MachineUse(machine_type=2, vms_per_machine=10),),\n", + " )\n", + " return SpilloverProblem(\n", + " machine_cost=(1.0, 2.0, 100.0),\n", + " machine_limit=(60, 60, 60),\n", + " vm_demands=(vm_a, vm_b, vm_c),\n", + " service_level=1.0,\n", + " time_horizon=100,\n", + " )\n", + "\n", + "\n", + "# Indices:\n", + "# * i in I, the VM demands\n", + "# * j in J, the machines supplied\n", + "#\n", + "# Data:\n", + "# * c_j: cost of a machine of type j\n", + "# * l_j: a limit of how many machines of type j you can buy.\n", + "# * n_ij: how many VMs of type i you get from a machine of type j\n", + "# * d_i: the total demand for VMs of type i\n", + "# * service_level: the target fraction of demand that is met.\n", + "# * P_i subset J: the compatible machine types for VM demand i.\n", + "# * UP_i(j) subset P_i, for j in P_i: for VM demand type i, the machines of\n", + "# priority higher than j\n", + "# * T: the number of integer time steps.\n", + "#\n", + "# Note: when d_i/n_ij is not integer, some approximation error is introduced in\n", + "# constraint 6 below.\n", + "#\n", + "# Decision variables:\n", + "# * s_j: the supply of machine type j\n", + "# * w_j: the time we run out of machine j, or 1 if we never run out\n", + "# * v_ij: when we start using supply j to meet demand i, or w_j if we never use\n", + "# this machine type for this demand.\n", + "# * o_i: the time we start failing to meet vm demand i\n", + "# * m_i: the total demand met for vm type i.\n", + "#\n", + "# Model the problem:\n", + "# min sum_{j in J} c_j s_j\n", + "# s.t.\n", + "# 1: sum_i m_i >= service_level * sum_{i in I} d_i\n", + "# 2: T * m_i <= o_i * d_i for all i in I\n", + "# 3: v_ij >= w_r for all i in I, j in C_i, r in UP_i(j)\n", + "# 4: v_ij <= w_j for all i in I, j in C_i\n", + "# 5: o_i = sum_{j in P_i} (w_j - v_ij) for all i in I\n", + "# 6: sum_{i in I: j in P_i}ceil(d_i/n_ij)(w_j - v_ij)<=T*s_j for all j in J\n", + "# o_i, w_j, v_ij in [0, T]\n", + "# 0 <= m_i <= d_i\n", + "# 0 <= s_j <= l_j\n", + "#\n", + "# The constraints say:\n", + "# 1. The amount of demand served must be at least 95% of total demand.\n", + "# 2. The demand served for VM type i is linear in the time we fail to keep\n", + "# serving demand.\n", + "# 3. Don't start using machine type j for demand i until all higher priority\n", + "# machine types r are used up.\n", + "# 4. The time we run out of machine type j must be after we start using it for\n", + "# VM demand type i.\n", + "# 5. The time we are unable to serve further VM demand i is the sum of the\n", + "# time spent serving the demand with each eligible machine type.\n", + "# 6. The total use of machine type j to serve demand does not exceed the\n", + "# supply. The ceil function above introduces some approximation error when\n", + "# d_i/n_ij is not integer.\n", + "def _solve_spillover_problem(problem: SpilloverProblem) -> None:\n", + " \"\"\"Solves the spillover problem and prints the optimal objective.\"\"\"\n", + " model = cp_model.CpModel()\n", + " num_machines = len(problem.machine_cost)\n", + " num_vms = len(problem.vm_demands)\n", + " horizon = problem.time_horizon\n", + " s = [\n", + " model.new_int_var(lb=0, ub=problem.machine_limit[j], name=f\"s_{j}\")\n", + " for j in range(num_machines)\n", + " ]\n", + " w = [\n", + " model.new_int_var(lb=0, ub=horizon, name=f\"w_{i}\") for i in range(num_machines)\n", + " ]\n", + " o = [model.new_int_var(lb=0, ub=horizon, name=f\"o_{j}\") for j in range(num_vms)]\n", + " m = [\n", + " model.new_int_var(lb=0, ub=problem.vm_demands[j].vm_quantity, name=f\"m_{j}\")\n", + " for j in range(num_vms)\n", + " ]\n", + " v = [\n", + " {\n", + " compat.machine_type: model.new_int_var(\n", + " lb=0, ub=horizon, name=f\"v_{i}_{compat.machine_type}\"\n", + " )\n", + " for compat in vm_demand.compatible_machines\n", + " }\n", + " for i, vm_demand in enumerate(problem.vm_demands)\n", + " ]\n", + "\n", + " obj = 0\n", + " for j in range(num_machines):\n", + " obj += s[j] * problem.machine_cost[j]\n", + " model.minimize(obj)\n", + "\n", + " # Constraint 1: demand served is at least service_level fraction of total.\n", + " total_vm_demand = sum(vm_demand.vm_quantity for vm_demand in problem.vm_demands)\n", + " model.add(sum(m) >= int(math.ceil(problem.service_level * total_vm_demand)))\n", + "\n", + " # Constraint 2: demand served is linear in time we stop serving.\n", + " for i in range(num_vms):\n", + " model.add(\n", + " problem.time_horizon * m[i] <= o[i] * problem.vm_demands[i].vm_quantity\n", + " )\n", + "\n", + " # Constraint 3: use machine type j for demand i after all higher priority\n", + " # machine types r are used up.\n", + " for i in range(num_vms):\n", + " for k, meet_demand in enumerate(problem.vm_demands[i].compatible_machines):\n", + " j = meet_demand.machine_type\n", + " for l in range(k):\n", + " r = problem.vm_demands[i].compatible_machines[l].machine_type\n", + " model.add(v[i][j] >= w[r])\n", + "\n", + " # Constraint 4: outage time of machine j is after start time for using j to\n", + " # meet VM demand i.\n", + " for i in range(num_vms):\n", + " for meet_demand in problem.vm_demands[i].compatible_machines:\n", + " j = meet_demand.machine_type\n", + " model.add(v[i][j] <= w[j])\n", + "\n", + " # Constraint 5: For VM demand i, time service ends is the sum of the time\n", + " # spent serving with each eligible machine type.\n", + " for i in range(num_vms):\n", + " sum_serving = 0\n", + " for meet_demand in problem.vm_demands[i].compatible_machines:\n", + " j = meet_demand.machine_type\n", + " sum_serving += w[j] - v[i][j]\n", + " model.add(o[i] == sum_serving)\n", + "\n", + " # Constraint 6: Total use of machine type j is at most the supply.\n", + " #\n", + " # We build the constraints in bulk because our data is transposed.\n", + " total_machine_use = [0 for _ in range(num_machines)]\n", + " for i in range(num_vms):\n", + " for meet_demand in problem.vm_demands[i].compatible_machines:\n", + " j = meet_demand.machine_type\n", + " nij = meet_demand.vms_per_machine\n", + " vm_quantity = problem.vm_demands[i].vm_quantity\n", + " # Want vm_quantity/nij, over estimate with ceil(vm_quantity/nij) to use\n", + " # integer coefficients.\n", + " rate = (vm_quantity + nij - 1) // nij\n", + " total_machine_use[j] += rate * (w[j] - v[i][j])\n", + " for j in range(num_machines):\n", + " model.add(total_machine_use[j] <= horizon * s[j])\n", + "\n", + " solver = cp_model.CpSolver()\n", + " solver.parameters.num_workers = 16\n", + " solver.parameters.log_search_progress = True\n", + " solver.max_time_in_seconds = 30.0\n", + " status = solver.solve(model)\n", + " if status != cp_model.OPTIMAL:\n", + " raise RuntimeError(f\"expected optimal, found: {status}\")\n", + " print(f\"objective: {solver.objective_value}\")\n", + "\n", + "\n", + "def main(argv: Sequence[str]) -> None:\n", + " del argv # Unused.\n", + " random.seed(_SEED.value)\n", + " if _TEST_DATA.value:\n", + " problem = _test_problem()\n", + " else:\n", + " problem = _random_spillover_problem(\n", + " _MACHINE_TYPES.value,\n", + " _VM_TYPES.value,\n", + " _FUNGIBILITY.value,\n", + " _MAX_DEMAND.value,\n", + " _TIME_STEPS.value,\n", + " )\n", + " print(problem)\n", + "\n", + " _solve_spillover_problem(problem)\n", + "\n", + "\n", + "main()\n", + "\n" + ] + } + ], + "metadata": { + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/notebook/examples/spread_robots_sat.ipynb b/examples/notebook/examples/spread_robots_sat.ipynb index 09f31c81d40..fda17e41fbe 100644 --- a/examples/notebook/examples/spread_robots_sat.ipynb +++ b/examples/notebook/examples/spread_robots_sat.ipynb @@ -86,7 +86,6 @@ "import math\n", "from typing import Sequence\n", "from ortools.sat.colab import flags\n", - "from google.protobuf import text_format\n", "from ortools.sat.python import cp_model\n", "\n", "_NUM_ROBOTS = flags.define_integer(\"num_robots\", 8, \"Number of robots to place.\")\n", @@ -161,7 +160,7 @@ " # Creates a solver and solves the model.\n", " solver = cp_model.CpSolver()\n", " if params:\n", - " text_format.Parse(params, solver.parameters)\n", + " solver.parameters.parse_text_format(params)\n", " solver.parameters.log_search_progress = True\n", " status = solver.solve(model)\n", "\n", diff --git a/examples/notebook/examples/steel_mill_slab_sat.ipynb b/examples/notebook/examples/steel_mill_slab_sat.ipynb index c6124111f06..b20be405e79 100644 --- a/examples/notebook/examples/steel_mill_slab_sat.ipynb +++ b/examples/notebook/examples/steel_mill_slab_sat.ipynb @@ -88,7 +88,6 @@ "import time\n", "\n", "from ortools.sat.colab import flags\n", - "from google.protobuf import text_format\n", "from ortools.sat.python import cp_model\n", "\n", "\n", @@ -360,7 +359,7 @@ " ### Solve model.\n", " solver = cp_model.CpSolver()\n", " if _PARAMS.value:\n", - " text_format.Parse(_PARAMS.value, solver.parameters)\n", + " solver.parameters.parse_text_format(_PARAMS.value)\n", " objective_printer = cp_model.ObjectiveSolutionPrinter()\n", " status = solver.solve(model, objective_printer)\n", "\n", @@ -544,7 +543,7 @@ " ### Solve model.\n", " solver = cp_model.CpSolver()\n", " if _PARAMS.value:\n", - " text_format.Parse(_PARAMS.value, solver.parameters)\n", + " solver.parameters.parse_text_format(_PARAMS.value)\n", "\n", " solution_printer = SteelMillSlabSolutionPrinter(orders, assign, loads, losses)\n", " status = solver.solve(model, solution_printer)\n", @@ -614,7 +613,7 @@ " ### Solve model.\n", " solver = cp_model.CpSolver()\n", " if _PARAMS.value:\n", - " text_format.Parse(_PARAMS.value, solver.parameters)\n", + " solver.parameters.parse_text_format(_PARAMS.value)\n", " solution_printer = cp_model.ObjectiveSolutionPrinter()\n", " status = solver.solve(model, solution_printer)\n", "\n", diff --git a/examples/notebook/examples/test_scheduling_sat.ipynb b/examples/notebook/examples/test_scheduling_sat.ipynb index 6d8c8394af6..0235f85eac5 100644 --- a/examples/notebook/examples/test_scheduling_sat.ipynb +++ b/examples/notebook/examples/test_scheduling_sat.ipynb @@ -101,7 +101,6 @@ "from ortools.sat.colab import flags\n", "import pandas as pd\n", "\n", - "from google.protobuf import text_format\n", "from ortools.sat.python import cp_model\n", "\n", "\n", @@ -209,7 +208,7 @@ " # Solve model.\n", " solver = cp_model.CpSolver()\n", " if _PARAMS.value:\n", - " text_format.Parse(_PARAMS.value, solver.parameters)\n", + " solver.parameters.parse_text_format(_PARAMS.value)\n", " status = solver.solve(model)\n", "\n", " # Report solution.\n", diff --git a/examples/notebook/examples/transit_time.ipynb b/examples/notebook/examples/transit_time.ipynb index 8f75f9028ef..f8fa14e7939 100644 --- a/examples/notebook/examples/transit_time.ipynb +++ b/examples/notebook/examples/transit_time.ipynb @@ -89,7 +89,6 @@ "outputs": [], "source": [ "from ortools.constraint_solver import pywrapcp\n", - "from ortools.constraint_solver import routing_enums_pb2\n", "\n", "\n", "###########################\n", diff --git a/examples/notebook/examples/weighted_latency_problem_sat.ipynb b/examples/notebook/examples/weighted_latency_problem_sat.ipynb index 7d0bb9980a1..503155545a5 100644 --- a/examples/notebook/examples/weighted_latency_problem_sat.ipynb +++ b/examples/notebook/examples/weighted_latency_problem_sat.ipynb @@ -85,8 +85,8 @@ "source": [ "import random\n", "from typing import Sequence\n", + "\n", "from ortools.sat.colab import flags\n", - "from google.protobuf import text_format\n", "from ortools.sat.python import cp_model\n", "\n", "_NUM_NODES = flags.define_integer(\"num_nodes\", 12, \"Number of nodes to visit.\")\n", @@ -94,7 +94,9 @@ "_PROFIT_RANGE = flags.define_integer(\"profit_range\", 50, \"Range of profit.\")\n", "_SEED = flags.define_integer(\"seed\", 0, \"Random seed.\")\n", "_PARAMS = flags.define_string(\n", - " \"params\", \"num_search_workers:16, max_time_in_seconds:5\", \"Sat solver parameters.\"\n", + " \"params\",\n", + " \"num_search_workers:16, max_time_in_seconds:5\",\n", + " \"Sat solver parameters.\",\n", ")\n", "_PROTO_FILE = flags.define_string(\n", " \"proto_file\", \"\", \"If not empty, output the proto to this file.\"\n", @@ -163,7 +165,7 @@ " # Solve model.\n", " solver = cp_model.CpSolver()\n", " if _PARAMS.value:\n", - " text_format.Parse(_PARAMS.value, solver.parameters)\n", + " solver.parameters.parse_text_format(_PARAMS.value)\n", " solver.parameters.log_search_progress = True\n", " solver.solve(model)\n", "\n", diff --git a/examples/notebook/graph/assignment_linear_sum_assignment.ipynb b/examples/notebook/graph/assignment_linear_sum_assignment.ipynb index 3692e1e9e68..7ff9ac1d4f3 100644 --- a/examples/notebook/graph/assignment_linear_sum_assignment.ipynb +++ b/examples/notebook/graph/assignment_linear_sum_assignment.ipynb @@ -88,6 +88,7 @@ "from ortools.graph.python import linear_sum_assignment\n", "\n", "\n", + "\n", "def main():\n", " \"\"\"Linear Sum Assignment example.\"\"\"\n", " assignment = linear_sum_assignment.SimpleLinearSumAssignment()\n", diff --git a/examples/notebook/graph/assignment_min_flow.ipynb b/examples/notebook/graph/assignment_min_flow.ipynb index 330903a1ec1..6edf050a5ba 100644 --- a/examples/notebook/graph/assignment_min_flow.ipynb +++ b/examples/notebook/graph/assignment_min_flow.ipynb @@ -86,6 +86,7 @@ "from ortools.graph.python import min_cost_flow\n", "\n", "\n", + "\n", "def main():\n", " \"\"\"Solving an Assignment Problem with MinCostFlow.\"\"\"\n", " # Instantiate a SimpleMinCostFlow solver.\n", diff --git a/examples/notebook/graph/balance_min_flow.ipynb b/examples/notebook/graph/balance_min_flow.ipynb index 6f25892b610..229e499f9a7 100644 --- a/examples/notebook/graph/balance_min_flow.ipynb +++ b/examples/notebook/graph/balance_min_flow.ipynb @@ -86,6 +86,7 @@ "from ortools.graph.python import min_cost_flow\n", "\n", "\n", + "\n", "def main():\n", " \"\"\"Solving an Assignment with teams of worker.\"\"\"\n", " smcf = min_cost_flow.SimpleMinCostFlow()\n", diff --git a/examples/notebook/graph/simple_max_flow_program.ipynb b/examples/notebook/graph/simple_max_flow_program.ipynb index 5e6cabde020..2e49a0289b9 100644 --- a/examples/notebook/graph/simple_max_flow_program.ipynb +++ b/examples/notebook/graph/simple_max_flow_program.ipynb @@ -88,6 +88,7 @@ "from ortools.graph.python import max_flow\n", "\n", "\n", + "\n", "def main():\n", " \"\"\"MaxFlow simple interface example.\"\"\"\n", " # Instantiate a SimpleMaxFlow solver.\n", diff --git a/examples/notebook/graph/simple_min_cost_flow_program.ipynb b/examples/notebook/graph/simple_min_cost_flow_program.ipynb index 8f78329717c..07efb1500af 100644 --- a/examples/notebook/graph/simple_min_cost_flow_program.ipynb +++ b/examples/notebook/graph/simple_min_cost_flow_program.ipynb @@ -88,6 +88,7 @@ "from ortools.graph.python import min_cost_flow\n", "\n", "\n", + "\n", "def main():\n", " \"\"\"MinCostFlow simple interface example.\"\"\"\n", " # Instantiate a SimpleMinCostFlow solver.\n", diff --git a/examples/notebook/linear_solver/assignment_groups_mip.ipynb b/examples/notebook/linear_solver/assignment_groups_mip.ipynb index 6a1a256796f..126e0a16be9 100644 --- a/examples/notebook/linear_solver/assignment_groups_mip.ipynb +++ b/examples/notebook/linear_solver/assignment_groups_mip.ipynb @@ -86,6 +86,7 @@ "from ortools.linear_solver import pywraplp\n", "\n", "\n", + "\n", "def main():\n", " # Data\n", " costs = [\n", diff --git a/examples/notebook/linear_solver/assignment_mb.ipynb b/examples/notebook/linear_solver/assignment_mb.ipynb index ad5d07ee420..59aece13a83 100644 --- a/examples/notebook/linear_solver/assignment_mb.ipynb +++ b/examples/notebook/linear_solver/assignment_mb.ipynb @@ -90,6 +90,7 @@ "from ortools.linear_solver.python import model_builder\n", "\n", "\n", + "\n", "def main():\n", " # Data\n", " data_str = \"\"\"\n", diff --git a/examples/notebook/linear_solver/assignment_mip.ipynb b/examples/notebook/linear_solver/assignment_mip.ipynb index 60119e7f4c5..bdb85d308cd 100644 --- a/examples/notebook/linear_solver/assignment_mip.ipynb +++ b/examples/notebook/linear_solver/assignment_mip.ipynb @@ -86,6 +86,7 @@ "from ortools.linear_solver import pywraplp\n", "\n", "\n", + "\n", "def main():\n", " # Data\n", " costs = [\n", diff --git a/examples/notebook/linear_solver/assignment_task_sizes_mip.ipynb b/examples/notebook/linear_solver/assignment_task_sizes_mip.ipynb index a4c4ca6dbfd..7db62fc22dd 100644 --- a/examples/notebook/linear_solver/assignment_task_sizes_mip.ipynb +++ b/examples/notebook/linear_solver/assignment_task_sizes_mip.ipynb @@ -86,6 +86,7 @@ "from ortools.linear_solver import pywraplp\n", "\n", "\n", + "\n", "def main():\n", " # Data\n", " costs = [\n", diff --git a/examples/notebook/linear_solver/assignment_teams_mip.ipynb b/examples/notebook/linear_solver/assignment_teams_mip.ipynb index 6414ace0dd0..90477f19556 100644 --- a/examples/notebook/linear_solver/assignment_teams_mip.ipynb +++ b/examples/notebook/linear_solver/assignment_teams_mip.ipynb @@ -86,6 +86,7 @@ "from ortools.linear_solver import pywraplp\n", "\n", "\n", + "\n", "def main():\n", " # Data\n", " costs = [\n", diff --git a/examples/notebook/linear_solver/basic_example.ipynb b/examples/notebook/linear_solver/basic_example.ipynb index 525689e1a4e..5176ae6070a 100644 --- a/examples/notebook/linear_solver/basic_example.ipynb +++ b/examples/notebook/linear_solver/basic_example.ipynb @@ -87,6 +87,7 @@ "from ortools.linear_solver import pywraplp\n", "\n", "\n", + "\n", "def main():\n", " print(\"Google OR-Tools version:\", init.OrToolsVersion.version_string())\n", "\n", diff --git a/examples/notebook/linear_solver/bin_packing_mb.ipynb b/examples/notebook/linear_solver/bin_packing_mb.ipynb index 205acb33e28..8e21b6d5452 100644 --- a/examples/notebook/linear_solver/bin_packing_mb.ipynb +++ b/examples/notebook/linear_solver/bin_packing_mb.ipynb @@ -90,6 +90,7 @@ "from ortools.linear_solver.python import model_builder\n", "\n", "\n", + "\n", "def create_data_model() -> tuple[pd.DataFrame, pd.DataFrame]:\n", " \"\"\"Create the data for the example.\"\"\"\n", "\n", diff --git a/examples/notebook/linear_solver/bin_packing_mip.ipynb b/examples/notebook/linear_solver/bin_packing_mip.ipynb index 53d68248b58..cefbfc700e9 100644 --- a/examples/notebook/linear_solver/bin_packing_mip.ipynb +++ b/examples/notebook/linear_solver/bin_packing_mip.ipynb @@ -86,6 +86,7 @@ "from ortools.linear_solver import pywraplp\n", "\n", "\n", + "\n", "def create_data_model():\n", " \"\"\"Create the data for the example.\"\"\"\n", " data = {}\n", @@ -98,6 +99,7 @@ "\n", "\n", "\n", + "\n", "def main():\n", " data = create_data_model()\n", "\n", diff --git a/examples/notebook/linear_solver/clone_model_mb.ipynb b/examples/notebook/linear_solver/clone_model_mb.ipynb index f64b365cf41..43f9cf98ab8 100644 --- a/examples/notebook/linear_solver/clone_model_mb.ipynb +++ b/examples/notebook/linear_solver/clone_model_mb.ipynb @@ -88,6 +88,7 @@ "from ortools.linear_solver.python import model_builder\n", "\n", "\n", + "\n", "def main():\n", " # Create the model.\n", " model = model_builder.Model()\n", diff --git a/examples/notebook/linear_solver/integer_programming_example.ipynb b/examples/notebook/linear_solver/integer_programming_example.ipynb index 49a9a840ea2..21270651a94 100644 --- a/examples/notebook/linear_solver/integer_programming_example.ipynb +++ b/examples/notebook/linear_solver/integer_programming_example.ipynb @@ -86,6 +86,7 @@ "from ortools.linear_solver import pywraplp\n", "\n", "\n", + "\n", "def IntegerProgrammingExample():\n", " \"\"\"Integer programming sample.\"\"\"\n", " # Create the mip solver with the SCIP backend.\n", diff --git a/examples/notebook/linear_solver/linear_programming_example.ipynb b/examples/notebook/linear_solver/linear_programming_example.ipynb index 3db71f1df0a..41e563d3d2a 100644 --- a/examples/notebook/linear_solver/linear_programming_example.ipynb +++ b/examples/notebook/linear_solver/linear_programming_example.ipynb @@ -86,6 +86,7 @@ "from ortools.linear_solver import pywraplp\n", "\n", "\n", + "\n", "def LinearProgrammingExample():\n", " \"\"\"Linear programming sample.\"\"\"\n", " # Instantiate a Glop solver, naming it LinearExample.\n", diff --git a/examples/notebook/linear_solver/mip_var_array.ipynb b/examples/notebook/linear_solver/mip_var_array.ipynb index 0a1575fc96e..23f271c1e49 100644 --- a/examples/notebook/linear_solver/mip_var_array.ipynb +++ b/examples/notebook/linear_solver/mip_var_array.ipynb @@ -86,6 +86,7 @@ "from ortools.linear_solver import pywraplp\n", "\n", "\n", + "\n", "def create_data_model():\n", " \"\"\"Stores the data for the problem.\"\"\"\n", " data = {}\n", @@ -103,6 +104,7 @@ "\n", "\n", "\n", + "\n", "def main():\n", " data = create_data_model()\n", " # Create the mip solver with the SCIP backend.\n", diff --git a/examples/notebook/linear_solver/multiple_knapsack_mip.ipynb b/examples/notebook/linear_solver/multiple_knapsack_mip.ipynb index ab752d2a997..4412da1565c 100644 --- a/examples/notebook/linear_solver/multiple_knapsack_mip.ipynb +++ b/examples/notebook/linear_solver/multiple_knapsack_mip.ipynb @@ -86,6 +86,7 @@ "from ortools.linear_solver import pywraplp\n", "\n", "\n", + "\n", "def main():\n", " data = {}\n", " data[\"weights\"] = [48, 30, 42, 36, 36, 48, 42, 42, 36, 24, 30, 30, 42, 36, 36]\n", diff --git a/examples/notebook/linear_solver/simple_lp_program.ipynb b/examples/notebook/linear_solver/simple_lp_program.ipynb index 8878896a77e..b86ba489a26 100644 --- a/examples/notebook/linear_solver/simple_lp_program.ipynb +++ b/examples/notebook/linear_solver/simple_lp_program.ipynb @@ -86,6 +86,7 @@ "from ortools.linear_solver import pywraplp\n", "\n", "\n", + "\n", "def main():\n", " # Create the linear solver with the GLOP backend.\n", " solver = pywraplp.Solver.CreateSolver(\"GLOP\")\n", diff --git a/examples/notebook/linear_solver/simple_lp_program_mb.ipynb b/examples/notebook/linear_solver/simple_lp_program_mb.ipynb index b9745252153..9c18840229d 100644 --- a/examples/notebook/linear_solver/simple_lp_program_mb.ipynb +++ b/examples/notebook/linear_solver/simple_lp_program_mb.ipynb @@ -88,6 +88,7 @@ "from ortools.linear_solver.python import model_builder\n", "\n", "\n", + "\n", "def main():\n", " # Create the model.\n", " model = model_builder.Model()\n", diff --git a/examples/notebook/linear_solver/simple_mip_program.ipynb b/examples/notebook/linear_solver/simple_mip_program.ipynb index 9ba26454b5f..2993a83ee35 100644 --- a/examples/notebook/linear_solver/simple_mip_program.ipynb +++ b/examples/notebook/linear_solver/simple_mip_program.ipynb @@ -86,6 +86,7 @@ "from ortools.linear_solver import pywraplp\n", "\n", "\n", + "\n", "def main():\n", " # Create the mip solver with the CP-SAT backend.\n", " solver = pywraplp.Solver.CreateSolver(\"SAT\")\n", diff --git a/examples/notebook/linear_solver/simple_mip_program_mb.ipynb b/examples/notebook/linear_solver/simple_mip_program_mb.ipynb index 9feaf39005f..577001dda52 100644 --- a/examples/notebook/linear_solver/simple_mip_program_mb.ipynb +++ b/examples/notebook/linear_solver/simple_mip_program_mb.ipynb @@ -88,6 +88,7 @@ "from ortools.linear_solver.python import model_builder\n", "\n", "\n", + "\n", "def main():\n", " # Create the model.\n", " model = model_builder.Model()\n", diff --git a/examples/notebook/linear_solver/stigler_diet.ipynb b/examples/notebook/linear_solver/stigler_diet.ipynb index 50bc390dbe9..ba0e6e6616f 100644 --- a/examples/notebook/linear_solver/stigler_diet.ipynb +++ b/examples/notebook/linear_solver/stigler_diet.ipynb @@ -89,6 +89,7 @@ "from ortools.linear_solver import pywraplp\n", "\n", "\n", + "\n", "def main():\n", " \"\"\"Entry point of the program.\"\"\"\n", " # Instantiate the data problem.\n", diff --git a/examples/notebook/sat/ranking_circuit_sample_sat.ipynb b/examples/notebook/sat/ranking_circuit_sample_sat.ipynb index 8f7be0d7943..ea5d2909d30 100644 --- a/examples/notebook/sat/ranking_circuit_sample_sat.ipynb +++ b/examples/notebook/sat/ranking_circuit_sample_sat.ipynb @@ -73,7 +73,8 @@ "metadata": {}, "source": [ "\n", - "Code sample to demonstrates how to rank intervals using a circuit.\n" + "Code sample to demonstrates how to rank intervals using a circuit.\n", + "\n" ] }, { @@ -83,8 +84,7 @@ "metadata": {}, "outputs": [], "source": [ - "from typing import List, Sequence\n", - "\n", + "from collections.abc import Sequence\n", "\n", "from ortools.sat.python import cp_model\n", "\n", @@ -125,7 +125,7 @@ " num_tasks = len(starts)\n", " all_tasks = range(num_tasks)\n", "\n", - " arcs: List[cp_model.ArcT] = []\n", + " arcs: list[cp_model.ArcT] = []\n", " for i in all_tasks:\n", " # if node i is first.\n", " start_lit = model.new_bool_var(f\"start_{i}\")\n", diff --git a/examples/notebook/sat/sequences_in_no_overlap_sample_sat.ipynb b/examples/notebook/sat/sequences_in_no_overlap_sample_sat.ipynb index 0cf40020dca..8bf5e64d4f7 100644 --- a/examples/notebook/sat/sequences_in_no_overlap_sample_sat.ipynb +++ b/examples/notebook/sat/sequences_in_no_overlap_sample_sat.ipynb @@ -83,7 +83,7 @@ "metadata": {}, "outputs": [], "source": [ - "from typing import Dict, List, Sequence, Tuple\n", + "from collections.abc import Sequence\n", "\n", "from ortools.sat.python import cp_model\n", "\n", @@ -95,9 +95,9 @@ " task_types: Sequence[str],\n", " lengths: Sequence[cp_model.IntVar],\n", " cumuls: Sequence[cp_model.IntVar],\n", - " sequence_length_constraints: Dict[str, Tuple[int, int]],\n", - " sequence_cumul_constraints: Dict[str, Tuple[int, int, int]],\n", - ") -> Sequence[Tuple[cp_model.IntVar, int]]:\n", + " sequence_length_constraints: dict[str, tuple[int, int]],\n", + " sequence_cumul_constraints: dict[str, tuple[int, int, int]],\n", + ") -> Sequence[tuple[cp_model.IntVar, int]]:\n", " \"\"\"This method enforces constraints on sequences of tasks of the same type.\n", "\n", " This method assumes that all durations are strictly positive.\n", @@ -133,7 +133,7 @@ " num_tasks = len(starts)\n", " all_tasks = range(num_tasks)\n", "\n", - " arcs: List[cp_model.ArcT] = []\n", + " arcs: list[cp_model.ArcT] = []\n", " for i in all_tasks:\n", " # if node i is first.\n", " start_lit = model.new_bool_var(f\"start_{i}\")\n", diff --git a/examples/notebook/sat/soft_constraints_sat.ipynb b/examples/notebook/sat/soft_constraints_sat.ipynb new file mode 100644 index 00000000000..b0bfadb9182 --- /dev/null +++ b/examples/notebook/sat/soft_constraints_sat.ipynb @@ -0,0 +1,249 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "google", + "metadata": {}, + "source": [ + "##### Copyright 2025 Google LLC." + ] + }, + { + "cell_type": "markdown", + "id": "apache", + "metadata": {}, + "source": [ + "Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "you may not use this file except in compliance with the License.\n", + "You may obtain a copy of the License at\n", + "\n", + " http://www.apache.org/licenses/LICENSE-2.0\n", + "\n", + "Unless required by applicable law or agreed to in writing, software\n", + "distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "See the License for the specific language governing permissions and\n", + "limitations under the License.\n" + ] + }, + { + "cell_type": "markdown", + "id": "basename", + "metadata": {}, + "source": [ + "# soft_constraints_sat" + ] + }, + { + "cell_type": "markdown", + "id": "link", + "metadata": {}, + "source": [ + "\n", + "\n", + "\n", + "
\n", + "Run in Google Colab\n", + "\n", + "View source on GitHub\n", + "
" + ] + }, + { + "cell_type": "markdown", + "id": "doc", + "metadata": {}, + "source": [ + "First, you must install [ortools](https://pypi.org/project/ortools/) package in this colab." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "install", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install ortools" + ] + }, + { + "cell_type": "markdown", + "id": "description", + "metadata": {}, + "source": [ + "\n", + "The sample shows multiple ways to model soft constraints in CP-SAT.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "code", + "metadata": {}, + "outputs": [], + "source": [ + "from ortools.sat.python import cp_model\n", + "\n", + "\n", + "\n", + "def infeasible_model() -> None:\n", + " \"\"\"Base model that is infeasible.\"\"\"\n", + " # Creates the model.\n", + " model = cp_model.CpModel()\n", + "\n", + " # Creates the variables.\n", + " x = model.new_int_var(0, 10, \"x\")\n", + " y = model.new_int_var(0, 10, \"y\")\n", + " z = model.new_int_var(0, 10, \"z\")\n", + "\n", + " # Creates the constraints.\n", + " model.add(x > y)\n", + " model.add(y > z)\n", + " model.add(z > x)\n", + "\n", + " # Creates a solver and solves.\n", + " solver = cp_model.CpSolver()\n", + " status = solver.solve(model)\n", + "\n", + " # Print solution.\n", + " print(f\" Status = {solver.status_name(status)}\")\n", + "\n", + "\n", + "def model_with_enforcement_literals() -> None:\n", + " \"\"\"Adds fixed costs to violated constraints.\"\"\"\n", + " # Creates the model.\n", + " model = cp_model.CpModel()\n", + "\n", + " # Creates the variables.\n", + " x = model.new_int_var(0, 10, \"x\")\n", + " y = model.new_int_var(0, 10, \"y\")\n", + " z = model.new_int_var(0, 10, \"z\")\n", + " a = model.new_bool_var(\"a\")\n", + " b = model.new_bool_var(\"b\")\n", + "\n", + " # Creates the constraints. Adds enforcement literals to the first two\n", + " # constraints, we assume the third constraint is always enforced.\n", + " model.add(x > y).only_enforce_if(a)\n", + " model.add(y > z).only_enforce_if(b)\n", + " model.add(z > x)\n", + "\n", + " # Adds an objective to maximize the number of enforced constraints.\n", + " model.maximize(a + 2 * b)\n", + "\n", + " # Creates a solver and solves.\n", + " solver = cp_model.CpSolver()\n", + " status = solver.solve(model)\n", + "\n", + " # Print solution.\n", + " print(f\" Status = {solver.status_name(status)}\")\n", + " if status == cp_model.OPTIMAL:\n", + " print(f\" Objective value = {solver.objective_value}\")\n", + " print(f\" Value of x = {solver.value(x)}\")\n", + " print(f\" Value of y = {solver.value(y)}\")\n", + " print(f\" Value of z = {solver.value(z)}\")\n", + " print(f\" Value of a = {solver.boolean_value(a)}\")\n", + " print(f\" Value of b = {solver.boolean_value(b)}\")\n", + "\n", + "\n", + "def model_with_linear_violations() -> None:\n", + " \"\"\"Adds fixed costs to violated constraints.\"\"\"\n", + " # Creates the model.\n", + " model = cp_model.CpModel()\n", + "\n", + " # Creates the variables.\n", + " x = model.new_int_var(0, 10, \"x\")\n", + " y = model.new_int_var(0, 10, \"y\")\n", + " z = model.new_int_var(0, 10, \"z\")\n", + " a = model.new_int_var(0, 10, \"a\")\n", + " b = model.new_int_var(0, 10, \"b\")\n", + "\n", + " # Creates the constraints. Adds enforcement literals to the first two\n", + " # constraints, we assume the third constraint is always enforced.\n", + " model.add(x > y - a)\n", + " model.add(y > z - b)\n", + " model.add(z > x)\n", + "\n", + " # Adds an objective to minimize the added slacks.\n", + " model.minimize(a + 2 * b)\n", + "\n", + " # Creates a solver and solves.\n", + " solver = cp_model.CpSolver()\n", + " status = solver.solve(model)\n", + "\n", + " # Print solution.\n", + " print(f\" Status = {solver.status_name(status)}\")\n", + " if status == cp_model.OPTIMAL:\n", + " print(f\" Objective value = {solver.objective_value}\")\n", + " print(f\" Value of x = {solver.value(x)}\")\n", + " print(f\" Value of y = {solver.value(y)}\")\n", + " print(f\" Value of z = {solver.value(z)}\")\n", + " print(f\" Value of a = {solver.value(a)}\")\n", + " print(f\" Value of b = {solver.value(b)}\")\n", + "\n", + "\n", + "def model_with_quadratic_violations() -> None:\n", + " \"\"\"Adds fixed costs to violated constraints.\"\"\"\n", + " # Creates the model.\n", + " model = cp_model.CpModel()\n", + "\n", + " # Creates the variables.\n", + " x = model.new_int_var(0, 10, \"x\")\n", + " y = model.new_int_var(0, 10, \"y\")\n", + " z = model.new_int_var(0, 10, \"z\")\n", + " a = model.new_int_var(0, 10, \"a\")\n", + " b = model.new_int_var(0, 10, \"b\")\n", + " square_a = model.new_int_var(0, 100, \"square_a\")\n", + " square_b = model.new_int_var(0, 100, \"square_b\")\n", + "\n", + " # Creates the constraints. Adds enforcement literals to the first two\n", + " # constraints, we assume the third constraint is always enforced.\n", + " model.add(x > y - a)\n", + " model.add(y > z - b)\n", + " model.add(z > x)\n", + "\n", + " model.add_multiplication_equality(square_a, a, a)\n", + " model.add_multiplication_equality(square_b, b, b)\n", + "\n", + " # Adds an objective to minimize the added slacks.\n", + " model.minimize(square_a + 2 * square_b)\n", + "\n", + " # Creates a solver and solves.\n", + " solver = cp_model.CpSolver()\n", + " status = solver.solve(model)\n", + "\n", + " # Print solution.\n", + " print(f\" Status = {solver.status_name(status)}\")\n", + " if status == cp_model.OPTIMAL:\n", + " print(f\" Objective value = {solver.objective_value}\")\n", + " print(f\" Value of x = {solver.value(x)}\")\n", + " print(f\" Value of y = {solver.value(y)}\")\n", + " print(f\" Value of z = {solver.value(z)}\")\n", + " print(f\" Value of a = {solver.value(a)}\")\n", + " print(f\" Value of b = {solver.value(b)}\")\n", + "\n", + "\n", + "def main() -> None:\n", + " print(\"Infeasible model:\")\n", + " infeasible_model()\n", + " print(\"Model with enforcement literals:\")\n", + " model_with_enforcement_literals()\n", + " print(\"Model with linear violations:\")\n", + " model_with_linear_violations()\n", + " print(\"Model with quadratic violations:\")\n", + " model_with_quadratic_violations()\n", + "\n", + "\n", + "main()\n", + "\n" + ] + } + ], + "metadata": { + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/notebook/sat/transitions_in_no_overlap_sample_sat.ipynb b/examples/notebook/sat/transitions_in_no_overlap_sample_sat.ipynb index 05f0a4ea3e8..ba7b105464a 100644 --- a/examples/notebook/sat/transitions_in_no_overlap_sample_sat.ipynb +++ b/examples/notebook/sat/transitions_in_no_overlap_sample_sat.ipynb @@ -83,7 +83,8 @@ "metadata": {}, "outputs": [], "source": [ - "from typing import Dict, List, Sequence, Tuple, Union\n", + "from collections.abc import Sequence\n", + "from typing import Union\n", "\n", "from ortools.sat.python import cp_model\n", "\n", @@ -93,9 +94,9 @@ " starts: Sequence[cp_model.IntVar],\n", " durations: Sequence[int],\n", " presences: Sequence[Union[cp_model.IntVar, bool]],\n", - " penalties: Dict[Tuple[int, int], int],\n", - " delays: Dict[Tuple[int, int], int],\n", - ") -> Sequence[Tuple[cp_model.IntVar, int]]:\n", + " penalties: dict[tuple[int, int], int],\n", + " delays: dict[tuple[int, int], int],\n", + ") -> Sequence[tuple[cp_model.IntVar, int]]:\n", " \"\"\"This method uses a circuit constraint to rank tasks.\n", "\n", " This method assumes that all starts are disjoint, meaning that all tasks have\n", @@ -132,7 +133,7 @@ " num_tasks = len(starts)\n", " all_tasks = range(num_tasks)\n", "\n", - " arcs: List[cp_model.ArcT] = []\n", + " arcs: list[cp_model.ArcT] = []\n", " penalty_terms = []\n", " for i in all_tasks:\n", " # if node i is first.\n", diff --git a/examples/notebook/set_cover/set_cover.ipynb b/examples/notebook/set_cover/set_cover.ipynb index 3bc66a08eb1..d41780de5e1 100644 --- a/examples/notebook/set_cover/set_cover.ipynb +++ b/examples/notebook/set_cover/set_cover.ipynb @@ -86,6 +86,7 @@ "from ortools.set_cover.python import set_cover\n", "\n", "\n", + "\n", "def main():\n", " model = set_cover.SetCoverModel()\n", " model.add_empty_subset(2.0)\n", From cc8ee317d764a40cf8ab580d5bcf05f3791fa335 Mon Sep 17 00:00:00 2001 From: Yoong Hor Meng Date: Sun, 21 Dec 2025 06:52:47 +0100 Subject: [PATCH 081/111] Rename CPSolver to CpSolver --- ortools/sat/python/cp_model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ortools/sat/python/cp_model.py b/ortools/sat/python/cp_model.py index 435be0f7baa..d446bebd6e5 100644 --- a/ortools/sat/python/cp_model.py +++ b/ortools/sat/python/cp_model.py @@ -18,7 +18,7 @@ * [`CpModel`](#cp_model.CpModel): Methods for creating models, including variables and constraints. -* [`CPSolver`](#cp_model.CpSolver): Methods for solving +* [`CpSolver`](#cp_model.CpSolver): Methods for solving a model and evaluating solutions. The following methods implement callbacks that the From 5ed34077c797ef72fdb8ee70bf7348183ec2594a Mon Sep 17 00:00:00 2001 From: Mizux Seiha Date: Mon, 22 Dec 2025 14:10:33 +0100 Subject: [PATCH 082/111] tools/docker: fix cmake install while debian-12 provide cmake 3.25.1 this one still not support SYSTEM option in FetchContent --- tools/docker/images/debian-12.Dockerfile | 9 ++++++++- tools/docker/test/debian-12/cpp.Dockerfile | 9 ++++++++- tools/docker/test/ubuntu-22.04/cpp.Dockerfile | 9 ++++++++- 3 files changed, 24 insertions(+), 3 deletions(-) diff --git a/tools/docker/images/debian-12.Dockerfile b/tools/docker/images/debian-12.Dockerfile index da883df0ff5..eed7fe1714e 100644 --- a/tools/docker/images/debian-12.Dockerfile +++ b/tools/docker/images/debian-12.Dockerfile @@ -6,13 +6,20 @@ FROM debian:12 AS env ############# RUN apt-get update -qq \ && apt-get install -qq \ - git pkg-config wget cmake make autoconf libtool zlib1g-dev gawk g++ curl subversion \ + git pkg-config wget make autoconf libtool zlib1g-dev gawk g++ curl subversion \ swig lsb-release libicu-dev \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* ENTRYPOINT ["/bin/bash", "-c"] CMD ["/bin/bash"] +# Install CMake 3.31.0 +RUN ARCH=$(uname -m) \ +&& wget -q "https://cmake.org/files/v3.31/cmake-3.31.0-linux-${ARCH}.sh" \ +&& chmod a+x cmake-3.31.0-linux-${ARCH}.sh \ +&& ./cmake-3.31.0-linux-${ARCH}.sh --prefix=/usr/local/ --skip-license \ +&& rm cmake-3.31.0-linux-${ARCH}.sh + # Install .Net # see: https://learn.microsoft.com/en-us/dotnet/core/install/linux-scripted-manual#scripted-install RUN wget -q "https://dot.net/v1/dotnet-install.sh" \ diff --git a/tools/docker/test/debian-12/cpp.Dockerfile b/tools/docker/test/debian-12/cpp.Dockerfile index 4627a050105..62125c5b699 100644 --- a/tools/docker/test/debian-12/cpp.Dockerfile +++ b/tools/docker/test/debian-12/cpp.Dockerfile @@ -2,12 +2,19 @@ FROM debian:12 RUN apt-get update \ -&& apt-get install -yq wget build-essential cmake zlib1g-dev \ +&& apt-get install -yq wget build-essential zlib1g-dev \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* ENTRYPOINT ["/bin/bash", "-c"] CMD ["/bin/bash"] +# Install CMake 3.31.0 +RUN ARCH=$(uname -m) \ +&& wget -q "https://cmake.org/files/v3.31/cmake-3.31.0-linux-${ARCH}.sh" \ +&& chmod a+x cmake-3.31.0-linux-${ARCH}.sh \ +&& ./cmake-3.31.0-linux-${ARCH}.sh --prefix=/usr/local/ --skip-license \ +&& rm cmake-3.31.0-linux-${ARCH}.sh + WORKDIR /root ADD or-tools_amd64_debian-12_cpp_v*.tar.gz . diff --git a/tools/docker/test/ubuntu-22.04/cpp.Dockerfile b/tools/docker/test/ubuntu-22.04/cpp.Dockerfile index 01ca4b35f2a..61f81f24135 100644 --- a/tools/docker/test/ubuntu-22.04/cpp.Dockerfile +++ b/tools/docker/test/ubuntu-22.04/cpp.Dockerfile @@ -3,10 +3,17 @@ FROM ubuntu:22.04 ENV DEBIAN_FRONTEND=noninteractive RUN apt-get update -qq \ -&& apt-get install -yq build-essential cmake zlib1g-dev \ +&& apt-get install -yq build-essential zlib1g-dev \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* +# Install CMake 3.31.0 +RUN ARCH=$(uname -m) \ +&& wget -q "https://cmake.org/files/v3.31/cmake-3.31.0-linux-${ARCH}.sh" \ +&& chmod a+x cmake-3.31.0-linux-${ARCH}.sh \ +&& ./cmake-3.31.0-linux-${ARCH}.sh --prefix=/usr/local/ --skip-license \ +&& rm cmake-3.31.0-linux-${ARCH}.sh + WORKDIR /root ADD or-tools_amd64_ubuntu-22.04_cpp_v*.tar.gz . From a790483ba14656b1740e7940d77ba0cda92990cb Mon Sep 17 00:00:00 2001 From: Corentin Le Molgat Date: Mon, 5 Jan 2026 03:46:42 -0800 Subject: [PATCH 083/111] cmake: Fix windows LNK1189 error (#4627) --- cmake/cpp.cmake | 1 + 1 file changed, 1 insertion(+) diff --git a/cmake/cpp.cmake b/cmake/cpp.cmake index 16e82c1467b..3cf0921a7bc 100644 --- a/cmake/cpp.cmake +++ b/cmake/cpp.cmake @@ -110,6 +110,7 @@ if(MSVC) "/D_CRT_SECURE_NO_WARNINGS" "/D_CRT_SECURE_NO_DEPRECATE" "/MP" # Build with multiple processes + "/Zc:inline" # Remove unreferenced COMDAT "/Zc:preprocessor" # Enable preprocessor conformance mode "/fp:precise" ) From 4f5557884f490d0b0be11bd121fa69d886730992 Mon Sep 17 00:00:00 2001 From: Corentin Le Molgat Date: Mon, 5 Jan 2026 17:43:50 +0100 Subject: [PATCH 084/111] cmake: fix soplex boost check in a super build --- cmake/dependencies/CMakeLists.txt | 1 + patches/soplex-v8.0.0.patch | 8 +++++++- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/cmake/dependencies/CMakeLists.txt b/cmake/dependencies/CMakeLists.txt index 432f7f53f8a..66211302898 100644 --- a/cmake/dependencies/CMakeLists.txt +++ b/cmake/dependencies/CMakeLists.txt @@ -345,6 +345,7 @@ if(BUILD_Boost) UPDATE_COMMAND git reset --hard --recurse-submodules PATCH_COMMAND git apply --ignore-whitespace "${CMAKE_CURRENT_LIST_DIR}/../../patches/boost-1.87.0.patch" + OVERRIDE_FIND_PACKAGE # Needed for Soplex SYSTEM ) set(BOOST_INCLUDE_LIBRARIES multiprecision serialization) diff --git a/patches/soplex-v8.0.0.patch b/patches/soplex-v8.0.0.patch index f7f4410a30c..14a2aeb71b8 100644 --- a/patches/soplex-v8.0.0.patch +++ b/patches/soplex-v8.0.0.patch @@ -1,7 +1,13 @@ diff --git a/CMakeLists.txt b/CMakeLists.txt -index 9511442..58a8a58 100644 +index 9511442..3993fc4 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt +@@ -1,4 +1,4 @@ +-cmake_minimum_required(VERSION 3.11) ++cmake_minimum_required(VERSION 3.25) + + # FindBoost is removed in version cmake 3.30 + if(POLICY CMP0167) @@ -31,6 +31,10 @@ set(CPACK_PACKAGE_VERSION_PATCH "${SOPLEX_VERSION_PATCH}") set(CPACK_PACKAGE_VENDOR "Zuse Institute Berlin") include(CPack) From 91afd9da7515c8ca274e0bcd285d53e272d756ef Mon Sep 17 00:00:00 2001 From: Mizux Seiha Date: Tue, 6 Jan 2026 17:48:37 +0100 Subject: [PATCH 085/111] tools/release: fix macos protoc-gen-mypy check --- tools/release/build_delivery_macos.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/release/build_delivery_macos.sh b/tools/release/build_delivery_macos.sh index 79244cae903..97d983fbc57 100755 --- a/tools/release/build_delivery_macos.sh +++ b/tools/release/build_delivery_macos.sh @@ -251,7 +251,7 @@ function build_python() { echo "check protoc-gen-mypy..." command -v protoc-gen-mypy | xargs echo "protoc-gen-mypy: " | tee -a build.log protoc-gen-mypy --version | xargs echo "protoc-gen-mypy version: " | tee -a build.log - protoc-gen-mypy --version | grep "3\.6\.0" + protoc-gen-mypy --version | grep "4\.0\.0" done declare -a MYPY_FILES=( From 642c19b38a211338d8f9405bdf5a90fc2944289a Mon Sep 17 00:00:00 2001 From: Mizux Seiha Date: Tue, 6 Jan 2026 17:49:12 +0100 Subject: [PATCH 086/111] tools/release: workaround stubgen timeout on macos M1 --- tools/release/build_delivery_macos.sh | 24 +++++++++--------------- 1 file changed, 9 insertions(+), 15 deletions(-) diff --git a/tools/release/build_delivery_macos.sh b/tools/release/build_delivery_macos.sh index 97d983fbc57..c6b4291404b 100755 --- a/tools/release/build_delivery_macos.sh +++ b/tools/release/build_delivery_macos.sh @@ -291,21 +291,15 @@ function build_python() { cmake --build "temp_python${PY_VERSION}" --target ortools -j8 -v echo "DONE" | tee -a build.log - if [[ ${PLATFORM} == "x86_64" ]]; then - echo -n " Build all few times..." | tee -a build.log - # on macos X86_64 stubgen will timeout -> need to build few times - cmake --build "temp_python${PY_VERSION}" -j4 -v || true - sleep 10 - cmake --build "temp_python${PY_VERSION}" -v || true - echo "DONE" | tee -a build.log - echo -n " ReBuild all..." | tee -a build.log - cmake --build "temp_python${PY_VERSION}" -j4 -v - echo "DONE" | tee -a build.log - else - echo -n " Build all..." | tee -a build.log - cmake --build "temp_python${PY_VERSION}" -j8 -v - echo "DONE" | tee -a build.log - fi + echo -n " Build all few times..." | tee -a build.log + # on macos stubgen will timeout -> need to build few times + cmake --build "temp_python${PY_VERSION}" -j4 -v || true + sleep 10 + cmake --build "temp_python${PY_VERSION}" -v || true + echo "DONE" | tee -a build.log + echo -n " ReBuild all..." | tee -a build.log + cmake --build "temp_python${PY_VERSION}" -j4 -v + echo "DONE" | tee -a build.log echo -n " Check libortools.dylib..." | tee -a build.log otool -L "temp_python${PY_VERSION}/lib/libortools.dylib" | grep -vqz "/Users" From 29a2cbf0a7f322d6d3e7ac7deb59e1d456a3d943 Mon Sep 17 00:00:00 2001 From: Corentin Le Molgat Date: Wed, 7 Jan 2026 15:06:35 +0100 Subject: [PATCH 087/111] julia: add first version of ORToolsBinaries.jl --- ortools/julia/ORToolsBinaries.jl/LICENSE.md | 202 ++++++++++++++++++ ortools/julia/ORToolsBinaries.jl/Project.toml | 16 ++ ortools/julia/ORToolsBinaries.jl/README.md | 10 + .../julia/ORToolsBinaries.jl/deps/build.jl | 125 +++++++++++ .../ORToolsBinaries.jl/src/ORToolsBinaries.jl | 23 ++ 5 files changed, 376 insertions(+) create mode 100644 ortools/julia/ORToolsBinaries.jl/LICENSE.md create mode 100644 ortools/julia/ORToolsBinaries.jl/Project.toml create mode 100644 ortools/julia/ORToolsBinaries.jl/README.md create mode 100644 ortools/julia/ORToolsBinaries.jl/deps/build.jl create mode 100644 ortools/julia/ORToolsBinaries.jl/src/ORToolsBinaries.jl diff --git a/ortools/julia/ORToolsBinaries.jl/LICENSE.md b/ortools/julia/ORToolsBinaries.jl/LICENSE.md new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/ortools/julia/ORToolsBinaries.jl/LICENSE.md @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/ortools/julia/ORToolsBinaries.jl/Project.toml b/ortools/julia/ORToolsBinaries.jl/Project.toml new file mode 100644 index 00000000000..6fca905e432 --- /dev/null +++ b/ortools/julia/ORToolsBinaries.jl/Project.toml @@ -0,0 +1,16 @@ +name = "ORToolsBinaries" +uuid = "594ad865-6a17-49ee-8f22-76ed020c7c08" +version = "0.0.1" + +[deps] +Downloads = "f43a241f-c20a-4ad4-852c-f6b1247861c6" +ZipArchives = "49080126-0e18-4c2a-b176-c102e4b3760c" +Tar = "a4e569a6-e804-4fa4-b0f3-eef7a1d5b13e" +CodecZlib = "944b1d66-785c-5afd-91f1-9de20f533193" + +[compat] +julia = "1.9" +Downloads = "1" +ZipArchives = "2" +Tar = "1" +CodecZlib = "0.7" diff --git a/ortools/julia/ORToolsBinaries.jl/README.md b/ortools/julia/ORToolsBinaries.jl/README.md new file mode 100644 index 00000000000..df304a71605 --- /dev/null +++ b/ortools/julia/ORToolsBinaries.jl/README.md @@ -0,0 +1,10 @@ +# ORToolsBinaries + +This package, upon installation, downloads the latest OR-Tools binaries from +GitHub. This is especially useful on platforms like Windows or ARM64/Aarch64 +because no JLL is available for them. + +This package otherwise provides the same interface as +[`ORTools_jll.jl`](https://github.com/JuliaBinaryWrappers/ORTools_jll.jl), +i.e. a global variable `libortools` that contains the path to the local copy +of OR-Tools. diff --git a/ortools/julia/ORToolsBinaries.jl/deps/build.jl b/ortools/julia/ORToolsBinaries.jl/deps/build.jl new file mode 100644 index 00000000000..27fc8c71f3d --- /dev/null +++ b/ortools/julia/ORToolsBinaries.jl/deps/build.jl @@ -0,0 +1,125 @@ +import Downloads +import ZipArchives +import Tar +import CodecZlib + +const BASE_URL = "https://github.com/google/or-tools/releases/download" +const ORTOOLS_MINOR_VERSION = "9.14" +const ORTOOLS_PATCH_VERSION = "9.14.6206" + +const PACKAGE_FILE_NAME_WITHOUT_EXTENSION = Dict{String, String}( + "linux_arm64" => "or-tools_aarch64_AlmaLinux-8.10_cpp_v$ORTOOLS_PATCH_VERSION", + "linux_x64" => "or-tools_amd64_almalinux-9_cpp_v$ORTOOLS_PATCH_VERSION", + "macos_arm64" => "or-tools_arm64_macOS-15.5_cpp_v$ORTOOLS_PATCH_VERSION", + "macos_x64" => "or-tools_x86_64_macOS-15.5_cpp_v$ORTOOLS_PATCH_VERSION", + "windows_x64" => "or-tools_x64_VisualStudio2022_cpp_v$ORTOOLS_PATCH_VERSION", +) + +const TARGET_PACKAGES = Dict{String, String}( + platform => "$BASE_URL/v$ORTOOLS_MINOR_VERSION/$name.$(ifelse(startswith(platform, "windows"), "zip", "tar.gz"))" + for (platform, name) in PACKAGE_FILE_NAME_WITHOUT_EXTENSION +) + +const DEPS_DIR = @__DIR__ + +println("WARNING: if ORTools_jll provides binaries for your platform, prefer using them rather than this package.") +println() +println("Downloading and installing a precompiled version of OR-Tools...") +println("BASE_URL: $BASE_URL, ORTOOLS_MINOR_VERSION: $ORTOOLS_MINOR_VERSION, ORTOOLS_PATCH_VERSION: $ORTOOLS_PATCH_VERSION") + +key = "unknown" +if Sys.islinux() + if Sys.ARCH === :x86_64 + key = "linux_x64" + elseif Sys.ARCH === :aarch64 + key = "linux_arm64" + else + key = "linux_unknown" + end +elseif Sys.isapple() + if Sys.ARCH === :x86_64 + key = "macos_x64" + elseif Sys.ARCH === :aarch64 + key = "macos_arm64" + else + key = "macos_unknown" + end +elseif Sys.iswindows() + if Sys.ARCH === :x86_64 + key = "windows_x64" + elseif Sys.ARCH === :aarch64 + key = "windows_arm64" + else + key = "windows_unknown" + end +end + +println("Sys.islinux: $(Sys.islinux()), Sys.isapple: $(Sys.isapple()), Sys.iswindows: $(Sys.iswindows()), Sys.ARCH: $(Sys.ARCH)") +println("Detected platform: $key") +if !(key in keys(TARGET_PACKAGES)) + error("No package found for $key. Known packages: $(keys(TARGET_PACKAGES)). Maybe ORTools_jll contains a package for your platform.") +end + +println("Downloading the following binary package:") +println(TARGET_PACKAGES[key]) +package = Downloads.download(TARGET_PACKAGES[key]) +println("Package downloaded. Size: $(filesize(package)) bytes, i.e. roughly $(round(Int, filesize(package) / 1024 / 1024)) MiB") +println("Local path (temporary): $package") + +dest_dir = joinpath(DEPS_DIR, "lib") +if isdir(dest_dir) + rm(dest_dir, recursive=true, force=true) +end +mkpath(dest_dir) + +count_files = 0 +if endswith(TARGET_PACKAGES[key], ".zip") + # Only for Windows. The ZIP archive contains a folder with the same name + # as the archive itself. We only need the DLLs in the `bin` folder. + zr = ZipArchives.ZipReader(read(package)) + for name in ZipArchives.zip_names(zr) + if startswith(name, PACKAGE_FILE_NAME_WITHOUT_EXTENSION[key]) && endswith(name, ".dll") + filename = basename(name) + println("Extracting: $filename (path in the ZIP archive: $name)") + + ZipArchives.zip_openentry(zr, name) do io + open(joinpath(dest_dir, filename), "w") do f + write(f, io) + end + end + global count_files + count_files += 1 + end + end +elseif endswith(TARGET_PACKAGES[key], ".tar.gz") + # For all other platforms. The TAR.GZ archives contains a folder with the + # same name as the archive itself. We need all the dynamic libraries in + # the `lib` or `lib64` folder (`lib` is for MacOS, `lib64` is for Linux). + open(package, "r") do tar_gz + tar_stream = CodecZlib.GzipDecompressorStream(tar_gz) + + Tar.extract(tar_stream, dest_dir) do header + name = header.path + should_extract = startswith(name, PACKAGE_FILE_NAME_WITHOUT_EXTENSION[key]) && + (endswith(name, ".so") || endswith(name, ".dylib")) + if should_extract + println("Extracting: $(basename(name)) (path in TAR.GZ archive: $(name))") + end + return should_extract + end + end + + # Flatten the output, because Tar.extract recreates whatever path it + # finds in the archive. + for (root, dirs, files) in walkdir(dest_dir) + for file in files + mv(joinpath(root, file), joinpath(dest_dir, file), force=true) + end + end + + # Clean up after flattening. + rm(joinpath(dest_dir, PACKAGE_FILE_NAME_WITHOUT_EXTENSION[key]), recursive=true) +else + error("Assertion failed: archive type not supported. Please report the problem to the maintainers of OR-Tools.") +end +println("Installed $count_files files in $dest_dir.") diff --git a/ortools/julia/ORToolsBinaries.jl/src/ORToolsBinaries.jl b/ortools/julia/ORToolsBinaries.jl/src/ORToolsBinaries.jl new file mode 100644 index 00000000000..b254befaaf3 --- /dev/null +++ b/ortools/julia/ORToolsBinaries.jl/src/ORToolsBinaries.jl @@ -0,0 +1,23 @@ +module ORToolsBinaries + +export libortools + +const _SRC_DIR = @__DIR__ +const _DEPS_DIR = joinpath(_SRC_DIR, "..", "deps") +const _LIB_DIR = joinpath(_DEPS_DIR, "lib") + +if Sys.islinux() + const libortools = joinpath(_LIB_DIR, "libortools.so") +elseif Sys.isapple() + const libortools = joinpath(_LIB_DIR, "libortools.dylib") +elseif Sys.iswindows() + const libortools = joinpath(_LIB_DIR, "ortools.dll") +else + @error "Platform not supported! If ORTools_jll provides binaries for your platform, prefer using them rather than this package." +end + +if !isfile(libortools) + @error "Installing ORToolsBinaries failed, the expected binary is not available at the expected path ($libortools)" +end + +end From b8b9a5170ab89b0e0c36556af641167bdf788341 Mon Sep 17 00:00:00 2001 From: Guillaume Chatelet Date: Wed, 17 Dec 2025 20:09:35 +0100 Subject: [PATCH 088/111] Small fixes (#4957) --- ortools/linear_solver/glop_utils.cc | 4 ++++ ortools/set_cover/set_cover_heuristics.cc | 17 ++++++++++------- 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/ortools/linear_solver/glop_utils.cc b/ortools/linear_solver/glop_utils.cc index 7c9634e89a9..1620653a4ac 100644 --- a/ortools/linear_solver/glop_utils.cc +++ b/ortools/linear_solver/glop_utils.cc @@ -13,6 +13,10 @@ #include "ortools/linear_solver/glop_utils.h" +#include "absl/log/log.h" +#include "ortools/linear_solver/linear_solver.h" +#include "ortools/lp_data/lp_types.h" + namespace operations_research { MPSolver::ResultStatus GlopToMPSolverResultStatus(glop::ProblemStatus s) { diff --git a/ortools/set_cover/set_cover_heuristics.cc b/ortools/set_cover/set_cover_heuristics.cc index ef4e344dc57..d9ca035ba01 100644 --- a/ortools/set_cover/set_cover_heuristics.cc +++ b/ortools/set_cover/set_cover_heuristics.cc @@ -240,18 +240,21 @@ namespace { // - improve performance. // - use vectorized code. namespace internal { -uint32_t RawBits(uint32_t x) { return x; } // NOLINT -uint32_t RawBits(int x) { return absl::bit_cast(x); } // NOLINT -uint32_t RawBits(float x) { return absl::bit_cast(x); } // NOLINT -uint64_t RawBits(uint64_t x) { return x; } // NOLINT -uint64_t RawBits(int64_t x) { return absl::bit_cast(x); } // NOLINT -uint64_t RawBits(double x) { return absl::bit_cast(x); } // NOLINT +template +auto RawBits(T x) { + if constexpr (sizeof(T) == sizeof(uint32_t)) { + return absl::bit_cast(x); + } else { + static_assert(sizeof(T) == sizeof(uint64_t)); + return absl::bit_cast(x); + } +} inline uint32_t Bucket(uint32_t x, uint32_t shift, uint32_t radix) { DCHECK_EQ(0, radix & (radix - 1)); // Must be a power of two. // NOMUTANTS -- a way to compute the remainder of a division when radix is a // power of two. - return (RawBits(x) >> shift) & (radix - 1); + return (x >> shift) & (radix - 1); } template From f4fdf9466f3fd2b9d53c0390a2aee95cc96e6cb0 Mon Sep 17 00:00:00 2001 From: Guillaume Chatelet Date: Wed, 17 Dec 2025 21:23:58 +0100 Subject: [PATCH 089/111] Update Bazel linear solver build (#4956) --- ortools/linear_solver/BUILD.bazel | 387 ++++++++++++++++++++++-------- 1 file changed, 283 insertions(+), 104 deletions(-) diff --git a/ortools/linear_solver/BUILD.bazel b/ortools/linear_solver/BUILD.bazel index 871c25c4f0f..f26292ab8b5 100644 --- a/ortools/linear_solver/BUILD.bazel +++ b/ortools/linear_solver/BUILD.bazel @@ -170,140 +170,52 @@ py_proto_library( cc_library( name = "linear_solver", srcs = [ - "linear_expr.cc", - "linear_solver.cc", - "linear_solver_callback.cc", "sat_interface.cc", ] + select({ - ":use_bop": ["bop_interface.cc"], - "//conditions:default": [], - }) + select({ - ":use_cbc": ["cbc_interface.cc"], - "//conditions:default": [], - }) + select({ - ":use_clp": ["clp_interface.cc"], - "//conditions:default": [], - }) + select({ - ":use_glop": [ - "glop_interface.cc", - "glop_utils.cc", - ], - "//conditions:default": [], - }) + select({ - ":use_glpk": ["glpk_interface.cc"], - "//conditions:default": [], - }) + select({ - ":use_gurobi": [ - "gurobi_interface.cc", - "gurobi_util.cc", - ], - "//conditions:default": [], - }) + select({ - ":use_highs": ["highs_interface.cc"], - "//conditions:default": [], - }) + select({ - ":use_pdlp": ["pdlp_interface.cc"], - "//conditions:default": [], - }) + select({ - ":use_scip": [ - "scip_callback.cc", - "scip_interface.cc", - ], - "//conditions:default": [], - }) + select({ ":use_cplex": ["cplex_interface.cc"], "//conditions:default": [], }) + select({ ":use_xpress": ["xpress_interface.cc"], "//conditions:default": [], }), - hdrs = [ - "linear_expr.h", - "linear_solver.h", - "linear_solver_callback.h", - ] + select({ - ":use_glop": ["glop_utils.h"], - "//conditions:default": [], - }) + select({ - ":use_gurobi": ["gurobi_util.h"], - "//conditions:default": [], - }) + select({ - ":use_scip": [ - "scip_callback.h", - "scip_helper_macros.h", - ], - "//conditions:default": [], - }), deps = [ + ":linear_solver_base", ":linear_solver_cc_proto", - ":model_exporter", - ":model_validator", "//ortools/base", - "//ortools/base:accurate_sum", - "//ortools/base:hash", - "//ortools/base:logging", - "//ortools/base:map_util", - "//ortools/base:status_macros", - "//ortools/base:stl_util", - "//ortools/base:timer", + "//ortools/linear_solver/proto_solver:proto_utils", "//ortools/linear_solver/proto_solver:sat_proto_solver", - "//ortools/port:file", "//ortools/port:proto_utils", "//ortools/sat:cp_model_cc_proto", "//ortools/sat:cp_model_solver", - "//ortools/sat:lp_utils", - "//ortools/util:fp_utils", "//ortools/util:lazy_mutable_copy", + "@abseil-cpp//absl/base:core_headers", "@abseil-cpp//absl/status", - "@abseil-cpp//absl/status:statusor", - "@abseil-cpp//absl/strings", - "@abseil-cpp//absl/synchronization", - "@abseil-cpp//absl/types:optional", ] + select({ - ":use_bop": [ - "//ortools/bop:bop_parameters_cc_proto", - "//ortools/bop:integral_solver", - ], + ":use_bop": [":linear_solver_bop"], "//conditions:default": [], }) + select({ - ":use_glop": [ - "//ortools/glop:lp_solver", - "//ortools/glop:parameters_cc_proto", - "//ortools/linear_solver/proto_solver:glop_proto_solver", - ], + ":use_cbc": [":linear_solver_cbc"], "//conditions:default": [], }) + select({ - ":use_glpk": [ - "//ortools/glpk:glpk_env_deleter", - "@glpk", - ], + ":use_clp": [":linear_solver_clp"], "//conditions:default": [], }) + select({ - ":use_gurobi": [ - "//ortools/linear_solver/proto_solver:gurobi_proto_solver", - "//ortools/third_party_solvers:gurobi_environment", - ], + ":use_glop": [":linear_solver_glop"], "//conditions:default": [], }) + select({ - ":use_pdlp": [ - "//ortools/linear_solver/proto_solver:pdlp_proto_solver", - "//ortools/pdlp:primal_dual_hybrid_gradient", - "//ortools/pdlp:solve_log_cc_proto", - "//ortools/pdlp:solvers_cc_proto", - ], + ":use_glpk": [":linear_solver_glpk"], "//conditions:default": [], }) + select({ - ":use_scip": [ - "//ortools/linear_solver/proto_solver:scip_params", - "//ortools/linear_solver/proto_solver:scip_proto_solver", - "@scip", - ], + ":use_gurobi": [":linear_solver_gurobi"], "//conditions:default": [], }) + select({ - ":use_highs": [ - "//ortools/linear_solver/proto_solver:highs_proto_solver", - "@highs", - ], + ":use_pdlp": [":linear_solver_pdlp"], + "//conditions:default": [], + }) + select({ + ":use_scip": [":linear_solver_scip"], + "//conditions:default": [], + }) + select({ + ":use_highs": [":linear_solver_highs"], "//conditions:default": [], }) + select({ ":use_xpress": ["//ortools/third_party_solvers:xpress_environment"], @@ -312,6 +224,160 @@ cc_library( alwayslink = 1, # Important! Library is used via dependency injection. ) +# Recommended target to do linear programming (LP). +cc_library( + name = "linear_solver_glop", + srcs = ["glop_interface.cc"], + deps = [ + ":glop_utils", + ":linear_solver_base", + "//ortools/base", + "//ortools/glop:lp_solver", + "//ortools/glop:parameters_cc_proto", + "//ortools/linear_solver/proto_solver:glop_proto_solver", + "//ortools/lp_data", + "//ortools/lp_data:base", + "//ortools/port:proto_utils", + "//ortools/util:lazy_mutable_copy", + "//ortools/util:time_limit", + "@abseil-cpp//absl/base:core_headers", + "@abseil-cpp//absl/log:check", + ], + alwayslink = 1, # Important! Library is used via dependency injection. +) + +# Most problems are solved faster by GLOP (and with better numerical accuracy +# and better behavior on ill-conditioned problems); but we still provide +# some free third-party solvers. CLP is usually the best among those. +# GUROBI is the best, even better than GLOP, but it has a restricted license. +cc_library( + name = "linear_solver_clp", + srcs = ["clp_interface.cc"], + target_compatible_with = select({ + ":use_clp": [], + "//conditions:default": ["@platforms//:incompatible"], + }), + deps = [ + ":linear_solver_base", + "//ortools/base", + "//ortools/base:timer", + "//third_party/cbc:clp", + "//third_party/cbc:coinutils", + "@abseil-cpp//absl/base:core_headers", + "@abseil-cpp//absl/strings:str_format", + ], + alwayslink = 1, # Important! Library is used via dependency injection. +) + +cc_library( + name = "scip_callback", + srcs = ["scip_callback.cc"], + hdrs = ["scip_callback.h"], + deps = [ + ":linear_solver_base", + ":scip_helper_macros", + "//ortools/base", + "@abseil-cpp//absl/types:span", + "@scip", + ], +) + +# Recommended target for mixed integer programming. +# +# NOTE: You can change the default underlying LP engine (Soplex) to GLOP or CLP +# by setting a define when you build your end target: +# blaze build -c opt --define scip_lp_solver=glop my/build:target. +# blaze build -c opt --define scip_lp_solver=clp my/build:target. +cc_library( + name = "linear_solver_scip", + srcs = ["scip_interface.cc"], + deps = [ + ":linear_solver_base", + ":linear_solver_cc_proto", + ":scip_callback", + ":scip_helper_macros", + "//ortools/base", + "//ortools/base:sysinfo", + "//ortools/base:timer", + "//ortools/linear_solver/proto_solver:proto_utils", + "//ortools/linear_solver/proto_solver:scip_params", + "//ortools/linear_solver/proto_solver:scip_proto_solver", + "//ortools/util:lazy_mutable_copy", + "@abseil-cpp//absl/base:core_headers", + "@abseil-cpp//absl/cleanup", + "@abseil-cpp//absl/flags:flag", + "@abseil-cpp//absl/status", + "@abseil-cpp//absl/strings:str_format", + "@abseil-cpp//absl/synchronization", + "@abseil-cpp//absl/time", + "@scip", + ], + alwayslink = 1, # Important! Library is used via dependency injection. +) + +# Use with caution. For example, it is not thread-safe. +cc_library( + name = "linear_solver_glpk", + srcs = ["glpk_interface.cc"], + deps = [ + ":linear_solver_base", + "//ortools/base", + "//ortools/base:timer", + "//ortools/third_party_solvers/glpk:glpk_env_deleter", + "@abseil-cpp//absl/base:core_headers", + "@abseil-cpp//absl/strings:str_format", + "@glpk", + ], + alwayslink = 1, # Important! Library is used via dependency injection. +) + +# Use with caution. For example, it has yielded erroneous solutions +# in the past (contact or-core-team@ for details). +cc_library( + name = "linear_solver_cbc", + srcs = ["cbc_interface.cc"], + target_compatible_with = select({ + ":use_cbc": [], + "//conditions:default": ["@platforms//:incompatible"], + }), + deps = [ + ":linear_solver_base", + "//ortools/base", + "//ortools/base:timer", + "@abseil-cpp//absl/base:core_headers", + "@abseil-cpp//absl/status", + "@abseil-cpp//absl/strings:str_format", + ], + alwayslink = 1, # Important! Library is used via dependency injection. +) + +cc_library( + name = "linear_solver_gurobi", + srcs = ["gurobi_interface.cc"], + deps = [ + ":gurobi_util", + ":linear_solver_base", + "//ortools/base", + "//ortools/base:status_macros", + "//ortools/base:timer", + "//ortools/linear_solver/proto_solver:gurobi_proto_solver", + "//ortools/linear_solver/proto_solver:proto_utils", + "//ortools/util:lazy_mutable_copy", + "//ortools/util:time_limit", + "@abseil-cpp//absl/base:core_headers", + "@abseil-cpp//absl/container:flat_hash_map", + "@abseil-cpp//absl/container:flat_hash_set", + "@abseil-cpp//absl/flags:flag", + "@abseil-cpp//absl/log:check", + "@abseil-cpp//absl/log:die_if_null", + "@abseil-cpp//absl/status", + "@abseil-cpp//absl/strings:str_format", + "@abseil-cpp//absl/synchronization", + "@abseil-cpp//absl/time", + ], + alwayslink = 1, # Important! Library is used via dependency injection. +) + cc_library( name = "gurobi_util", srcs = ["gurobi_util.cc"], @@ -328,6 +394,119 @@ cc_library( ], ) +# Experimental. Boolean optimization problem solver. +# This works best on MIP problem where all the variables are Boolean integers. +cc_library( + name = "linear_solver_bop", + srcs = ["bop_interface.cc"], + deps = [ + ":linear_solver_base", + "//ortools/base", + "//ortools/bop:bop_parameters_cc_proto", + "//ortools/bop:integral_solver", + "@abseil-cpp//absl/base:core_headers", + ], + alwayslink = 1, # Important! Library is used via dependency injection. +) + +cc_library( + name = "linear_solver_pdlp", + srcs = ["pdlp_interface.cc"], + deps = [ + ":linear_solver_base", + ":linear_solver_cc_proto", + "//ortools/base", + "//ortools/linear_solver/proto_solver:pdlp_proto_solver", + "//ortools/linear_solver/proto_solver:proto_utils", + "//ortools/pdlp:solve_log_cc_proto", + "//ortools/pdlp:solvers_cc_proto", + "//ortools/port:proto_utils", + "//ortools/util:lazy_mutable_copy", + "@abseil-cpp//absl/base:core_headers", + "@abseil-cpp//absl/status", + "@abseil-cpp//absl/status:statusor", + "@abseil-cpp//absl/strings", + ], + alwayslink = 1, # Important! Library is used via dependency injection. +) + +# Highs solver; +cc_library( + name = "linear_solver_highs", + srcs = ["highs_interface.cc"], + deps = [ + ":linear_solver_base", + ":linear_solver_cc_proto", + "//ortools/base", + "//ortools/linear_solver/proto_solver:highs_proto_solver", + "//ortools/linear_solver/proto_solver:proto_utils", + "//ortools/util:lazy_mutable_copy", + "@abseil-cpp//absl/base:core_headers", + "@abseil-cpp//absl/log:check", + "@abseil-cpp//absl/status", + "@abseil-cpp//absl/status:statusor", + "@abseil-cpp//absl/strings", + ], + alwayslink = 1, # Important! Library is used via dependency injection. +) + +cc_library( + name = "linear_solver_base", + srcs = [ + "linear_expr.cc", + "linear_solver.cc", + "linear_solver_callback.cc", + ], + hdrs = [ + "linear_expr.h", + "linear_solver.h", + "linear_solver_callback.h", + ], + deps = [ + ":linear_solver_cc_proto", + ":model_exporter", + ":model_validator", + "//ortools/base", + "//ortools/base:accurate_sum", + "//ortools/base:base_export", + "//ortools/base:map_util", + "//ortools/base:numbers", + "//ortools/base:stl_util", + "//ortools/base:threadpool", + "//ortools/glop:parameters_cc_proto", + "//ortools/port:file", # Needed by go/lp-specific-params. Don't remove! + "//ortools/port:proto_utils", + "//ortools/util:fp_utils", + "//ortools/util:lazy_mutable_copy", + "//ortools/util:testing_utils", + "//ortools/util:time_limit", + "@abseil-cpp//absl/base:core_headers", + "@abseil-cpp//absl/container:flat_hash_map", + "@abseil-cpp//absl/container:flat_hash_set", + "@abseil-cpp//absl/flags:flag", + "@abseil-cpp//absl/log", + "@abseil-cpp//absl/log:check", + "@abseil-cpp//absl/status", + "@abseil-cpp//absl/status:statusor", + "@abseil-cpp//absl/strings", + "@abseil-cpp//absl/strings:str_format", + "@abseil-cpp//absl/synchronization", + "@abseil-cpp//absl/time", + "@protobuf", + ], +) + +cc_library( + name = "glop_utils", + srcs = ["glop_utils.cc"], + hdrs = ["glop_utils.h"], + deps = [ + ":linear_solver_base", + "//ortools/lp_data:base", + "@abseil-cpp//absl/log", + ], +) + # Model exporter that can write MPS and LP file formats from an MPModelProto. cc_library( name = "model_exporter", From 0fe7cb5dbd486c15f26fb19b331a0e541eadb259 Mon Sep 17 00:00:00 2001 From: Laurent Perron Date: Thu, 18 Dec 2025 13:05:33 +0100 Subject: [PATCH 090/111] fixes --- ortools/constraint_solver/routing.cc | 34 +- ortools/constraint_solver/routing.h | 2 +- .../routing_decision_builders.cc | 9 +- .../routing_decision_builders.h | 2 +- ortools/flatzinc/BUILD.bazel | 1 + ortools/flatzinc/model.cc | 78 ++-- ortools/graph/minimum_spanning_tree.h | 1 - ortools/sat/BUILD.bazel | 4 + ortools/sat/clause.cc | 48 +- ortools/sat/clause.h | 32 +- ortools/sat/cp_model_lns.cc | 2 - ortools/sat/cp_model_presolve.cc | 9 +- ortools/sat/cp_model_solver_helpers.cc | 12 +- ortools/sat/integer_search.cc | 2 - ortools/sat/lrat.proto | 18 +- ortools/sat/lrat_checker.cc | 42 +- ortools/sat/lrat_checker.h | 16 +- ortools/sat/lrat_proof_handler.cc | 107 ++++- ortools/sat/lrat_proof_handler.h | 35 +- ortools/sat/probing.cc | 68 ++- ortools/sat/probing.h | 4 + ortools/sat/sat_inprocessing.cc | 51 ++- ortools/sat/sat_inprocessing.h | 7 +- ortools/sat/sat_parameters.proto | 12 +- ortools/sat/sat_sweeping.cc | 152 ++++--- ortools/sat/sat_sweeping.h | 6 +- ortools/sat/scheduling_helpers.cc | 16 +- ortools/sat/synchronization.cc | 7 +- ortools/sat/work_assignment.cc | 429 ++++++++++++++---- ortools/sat/work_assignment.h | 46 +- ortools/sat/work_assignment_test.cc | 40 ++ 31 files changed, 930 insertions(+), 362 deletions(-) diff --git a/ortools/constraint_solver/routing.cc b/ortools/constraint_solver/routing.cc index 9ddf0f38a23..975246d0d10 100644 --- a/ortools/constraint_solver/routing.cc +++ b/ortools/constraint_solver/routing.cc @@ -181,7 +181,7 @@ const Assignment* RoutingModel::PackCumulsOfOptimizerDimensionsFromAssignment( } for (auto& [lp_optimizer, mp_optimizer] : global_dimension_optimizers_) { decision_builders.push_back(MakeSetCumulsFromGlobalDimensionCosts( - solver_.get(), lp_optimizer.get(), mp_optimizer.get(), cumulative_limit, + solver_.get(), lp_optimizer.get(), mp_optimizer.get(), /*optimize_and_pack=*/true)); } decision_builders.push_back(finalizer_variables_->CreateFinalizer()); @@ -5844,7 +5844,7 @@ bool RoutingModel::AreRoutesInterdependent( } DecisionBuilder* RoutingModel::CreateSolutionFinalizer( - const RoutingSearchParameters& parameters, SearchLimit* lns_limit) { + const RoutingSearchParameters& parameters) { std::vector decision_builders; decision_builders.push_back(solver_->MakePhase( nexts_, Solver::CHOOSE_FIRST_UNBOUND, Solver::ASSIGN_MIN_VALUE)); @@ -5875,7 +5875,7 @@ DecisionBuilder* RoutingModel::CreateSolutionFinalizer( can_use_dimension_cumul_optimizers); for (auto& [lp_optimizer, mp_optimizer] : global_dimension_optimizers_) { decision_builders.push_back(MakeSetCumulsFromGlobalDimensionCosts( - solver_.get(), lp_optimizer.get(), mp_optimizer.get(), lns_limit)); + solver_.get(), lp_optimizer.get(), mp_optimizer.get())); } decision_builders.push_back(finalizer_variables_->CreateFinalizer()); @@ -5888,8 +5888,8 @@ void RoutingModel::CreateFirstSolutionDecisionBuilders( FirstSolutionStrategy_Value_Value_ARRAYSIZE, nullptr); first_solution_filtered_decision_builders_.resize( FirstSolutionStrategy_Value_Value_ARRAYSIZE, nullptr); - DecisionBuilder* const finalize_solution = CreateSolutionFinalizer( - search_parameters, GetOrCreateLargeNeighborhoodSearchLimit()); + DecisionBuilder* const finalize_solution = + CreateSolutionFinalizer(search_parameters); // Default heuristic first_solution_decision_builders_ [FirstSolutionStrategy::FIRST_UNBOUND_MIN_VALUE] = finalize_solution; @@ -6265,8 +6265,8 @@ LocalSearchPhaseParameters* RoutingModel::CreateLocalSearchParameters( } return solver_->MakeLocalSearchPhaseParameters( CostVar(), ls_operator, - solver_->MakeSolveOnce( - CreateSolutionFinalizer(search_parameters, lns_limit), lns_limit), + solver_->MakeSolveOnce(CreateSolutionFinalizer(search_parameters), + lns_limit), GetOrCreateLocalSearchLimit(), GetOrCreateLocalSearchFilterManager( search_parameters, @@ -6283,9 +6283,8 @@ DecisionBuilder* RoutingModel::CreatePrimaryLocalSearchDecisionBuilder( SearchLimit* first_solution_lns_limit = GetOrCreateFirstSolutionLargeNeighborhoodSearchLimit(); DecisionBuilder* const first_solution_sub_decision_builder = - solver_->MakeSolveOnce( - CreateSolutionFinalizer(search_parameters, first_solution_lns_limit), - first_solution_lns_limit); + solver_->MakeSolveOnce(CreateSolutionFinalizer(search_parameters), + first_solution_lns_limit); if (CostsAreHomogeneousAcrossVehicles()) { return solver_->MakeLocalSearchPhase(nexts_, first_solution, first_solution_sub_decision_builder, @@ -6311,9 +6310,8 @@ void RoutingModel::SetupDecisionBuilders( GetOrCreateFirstSolutionLargeNeighborhoodSearchLimit(); solve_db_ = solver_->Compose( GetFirstSolutionDecisionBuilder(search_parameters), - solver_->MakeSolveOnce( - CreateSolutionFinalizer(search_parameters, first_lns_limit), - first_lns_limit)); + solver_->MakeSolveOnce(CreateSolutionFinalizer(search_parameters), + first_lns_limit)); } else { solve_db_ = CreatePrimaryLocalSearchDecisionBuilder(search_parameters); } @@ -6334,15 +6332,13 @@ void RoutingModel::SetupDecisionBuilders( CreateLocalSearchParameters(search_parameters, /*secondary_ls=*/true)); secondary_ls_db_ = solver_->Compose(restore_preassignment, secondary_ls_db_); - restore_assignment_ = solver_->Compose( - solver_->MakeRestoreAssignment(GetOrCreateAssignment()), - CreateSolutionFinalizer(search_parameters, - GetOrCreateLargeNeighborhoodSearchLimit())); + restore_assignment_ = + solver_->Compose(solver_->MakeRestoreAssignment(GetOrCreateAssignment()), + CreateSolutionFinalizer(search_parameters)); restore_tmp_assignment_ = solver_->Compose( restore_preassignment, solver_->MakeRestoreAssignment(GetOrCreateTmpAssignment()), - CreateSolutionFinalizer(search_parameters, - GetOrCreateLargeNeighborhoodSearchLimit())); + CreateSolutionFinalizer(search_parameters)); } void RoutingModel::SetupMetaheuristics( diff --git a/ortools/constraint_solver/routing.h b/ortools/constraint_solver/routing.h index c8847ef4f2f..2c8355daba0 100644 --- a/ortools/constraint_solver/routing.h +++ b/ortools/constraint_solver/routing.h @@ -2504,7 +2504,7 @@ class OR_DLL RoutingModel { LocalSearchFilterManager* GetOrCreateLocalSearchFilterManager( const RoutingSearchParameters& parameters, const FilterOptions& options); DecisionBuilder* CreateSolutionFinalizer( - const RoutingSearchParameters& parameters, SearchLimit* lns_limit); + const RoutingSearchParameters& parameters); void CreateFirstSolutionDecisionBuilders( const RoutingSearchParameters& search_parameters); DecisionBuilder* GetFirstSolutionDecisionBuilder( diff --git a/ortools/constraint_solver/routing_decision_builders.cc b/ortools/constraint_solver/routing_decision_builders.cc index 266c397aabe..fce48978460 100644 --- a/ortools/constraint_solver/routing_decision_builders.cc +++ b/ortools/constraint_solver/routing_decision_builders.cc @@ -558,12 +558,11 @@ class SetCumulsFromGlobalDimensionCosts : public DecisionBuilder { SetCumulsFromGlobalDimensionCosts( GlobalDimensionCumulOptimizer* global_optimizer, GlobalDimensionCumulOptimizer* global_mp_optimizer, - SearchMonitor* monitor, bool optimize_and_pack, + bool optimize_and_pack, std::vector dimension_travel_info_per_route) : global_optimizer_(global_optimizer), global_mp_optimizer_(global_mp_optimizer), - monitor_(monitor), optimize_and_pack_(optimize_and_pack), dimension_travel_info_per_route_( std::move(dimension_travel_info_per_route)), @@ -698,7 +697,6 @@ class SetCumulsFromGlobalDimensionCosts : public DecisionBuilder { GlobalDimensionCumulOptimizer* const global_optimizer_; GlobalDimensionCumulOptimizer* const global_mp_optimizer_; - SearchMonitor* const monitor_; const bool optimize_and_pack_; std::vector cp_variables_; std::vector cp_values_; @@ -720,12 +718,11 @@ class SetCumulsFromGlobalDimensionCosts : public DecisionBuilder { DecisionBuilder* MakeSetCumulsFromGlobalDimensionCosts( Solver* solver, GlobalDimensionCumulOptimizer* global_optimizer, - GlobalDimensionCumulOptimizer* global_mp_optimizer, SearchMonitor* monitor, - bool optimize_and_pack, + GlobalDimensionCumulOptimizer* global_mp_optimizer, bool optimize_and_pack, std::vector dimension_travel_info_per_route) { return solver->RevAlloc(new SetCumulsFromGlobalDimensionCosts( - global_optimizer, global_mp_optimizer, monitor, optimize_and_pack, + global_optimizer, global_mp_optimizer, optimize_and_pack, std::move(dimension_travel_info_per_route))); } diff --git a/ortools/constraint_solver/routing_decision_builders.h b/ortools/constraint_solver/routing_decision_builders.h index f7ba230a349..1a200262942 100644 --- a/ortools/constraint_solver/routing_decision_builders.h +++ b/ortools/constraint_solver/routing_decision_builders.h @@ -46,7 +46,7 @@ DecisionBuilder* MakeSetCumulsFromLocalDimensionCosts( /// Variant based on global optimizers, handling all routes together. DecisionBuilder* MakeSetCumulsFromGlobalDimensionCosts( Solver* solver, GlobalDimensionCumulOptimizer* global_optimizer, - GlobalDimensionCumulOptimizer* global_mp_optimizer, SearchMonitor* monitor, + GlobalDimensionCumulOptimizer* global_mp_optimizer, bool optimize_and_pack = false, std::vector dimension_travel_info_per_route = {}); diff --git a/ortools/flatzinc/BUILD.bazel b/ortools/flatzinc/BUILD.bazel index 77ea81e445f..7f432402370 100644 --- a/ortools/flatzinc/BUILD.bazel +++ b/ortools/flatzinc/BUILD.bazel @@ -32,6 +32,7 @@ cc_library( "//ortools/base:stl_util", "//ortools/util:logging", "//ortools/util:string_array", + "@abseil-cpp//absl/base:core_headers", "@abseil-cpp//absl/container:flat_hash_map", "@abseil-cpp//absl/container:flat_hash_set", "@abseil-cpp//absl/log", diff --git a/ortools/flatzinc/model.cc b/ortools/flatzinc/model.cc index a7ae81f56d5..bcc1ffd4781 100644 --- a/ortools/flatzinc/model.cc +++ b/ortools/flatzinc/model.cc @@ -20,6 +20,7 @@ #include #include +#include "absl/base/optimization.h" #include "absl/container/flat_hash_set.h" #include "absl/log/check.h" #include "absl/log/log.h" @@ -615,7 +616,7 @@ std::string Argument::DebugString() const { return absl::StrFormat("[%s]", absl::StrJoin(floats, ", ")); } LOG(FATAL) << "Unhandled case in DebugString " << static_cast(type); - return ""; + ABSL_UNREACHABLE(); } bool Argument::IsVariable() const { return type == VAR_REF; } @@ -634,14 +635,12 @@ int64_t Argument::Value() const { case INT_INTERVAL: case INT_LIST: return values[0]; - case VAR_REF: { + case VAR_REF: return variables[0]->domain.values[0]; - } - default: { - LOG(FATAL) << "Should not be here"; - return 0; - } + default: + break; } + ABSL_UNREACHABLE(); } bool Argument::IsArrayOfValues() const { @@ -679,24 +678,22 @@ bool Argument::IsArrayOfValues() const { case FLOAT_LIST: return false; } + ABSL_UNREACHABLE(); } bool Argument::Contains(int64_t value) const { switch (type) { - case Argument::INT_LIST: { + case Argument::INT_LIST: return std::find(values.begin(), values.end(), value) != values.end(); - } - case Argument::INT_INTERVAL: { + case Argument::INT_INTERVAL: return value >= values.front() && value <= values.back(); - } - case Argument::INT_VALUE: { + case Argument::INT_VALUE: return value == values.front(); - } - default: { - LOG(FATAL) << "Cannot call Contains() on " << DebugString(); - return false; - } + default: + break; } + + ABSL_UNREACHABLE(); } int64_t Argument::ValueAt(int pos) const { @@ -715,11 +712,10 @@ int64_t Argument::ValueAt(int pos) const { CHECK_LT(pos, variables.size()); return variables[pos]->domain.Value(); } - default: { - LOG(FATAL) << "Should not be here"; - return 0; - } + default: + break; } + ABSL_UNREACHABLE(); } bool Argument::HasOneValueAt(int pos) const { @@ -738,11 +734,10 @@ bool Argument::HasOneValueAt(int pos) const { CHECK_LT(pos, variables.size()); return variables[pos]->domain.HasOneValue(); } - default: { - LOG(FATAL) << "Should not be here"; - return false; - } + default: + break; } + ABSL_UNREACHABLE(); } Variable* Argument::Var() const { @@ -769,11 +764,10 @@ int Argument::Size() const { case FLOAT_LIST: { return floats.size(); } - default: { - LOG(FATAL) << "Should not be here"; - return 0; - } + default: + break; } + ABSL_UNREACHABLE(); } // ----- Variable ----- @@ -943,27 +937,20 @@ void Annotation::AppendAllVariables(std::vector* const vars) const { std::string Annotation::DebugString() const { switch (type) { - case ANNOTATION_LIST: { + case ANNOTATION_LIST: return absl::StrFormat("[%s]", JoinDebugString(annotations, ", ")); - } - case IDENTIFIER: { + case IDENTIFIER: return id; - } - case FUNCTION_CALL: { + case FUNCTION_CALL: return absl::StrFormat("%s(%s)", id, JoinDebugString(annotations, ", ")); - } - case INTERVAL: { + case INTERVAL: return absl::StrFormat("%d..%d", interval_min, interval_max); - } - case INT_VALUE: { + case INT_VALUE: return absl::StrCat(interval_min); - } - case INT_LIST: { + case INT_LIST: return absl::StrFormat("[%s]", absl::StrJoin(values, ", ")); - } - case VAR_REF: { + case VAR_REF: return variables.front()->name; - } case VAR_REF_ARRAY: { std::string result = "["; for (int i = 0; i < variables.size(); ++i) { @@ -972,12 +959,11 @@ std::string Annotation::DebugString() const { } return result; } - case STRING_VALUE: { + case STRING_VALUE: return absl::StrFormat("\"%s\"", string_value); - } } LOG(FATAL) << "Unhandled case in DebugString " << static_cast(type); - return ""; + ABSL_UNREACHABLE(); } // ----- SolutionOutputSpecs ----- diff --git a/ortools/graph/minimum_spanning_tree.h b/ortools/graph/minimum_spanning_tree.h index 36b15972b1d..6d1d4436d07 100644 --- a/ortools/graph/minimum_spanning_tree.h +++ b/ortools/graph/minimum_spanning_tree.h @@ -50,7 +50,6 @@ BuildKruskalMinimumSpanningTreeFromSortedArcs( const Graph& graph, absl::Span sorted_arcs) { using ArcIndex = typename Graph::ArcIndex; - using NodeIndex = typename Graph::NodeIndex; const int num_arcs = graph.num_arcs(); int arc_index = 0; std::vector tree_arcs; diff --git a/ortools/sat/BUILD.bazel b/ortools/sat/BUILD.bazel index c45d5c87da7..c1c447036c0 100644 --- a/ortools/sat/BUILD.bazel +++ b/ortools/sat/BUILD.bazel @@ -1793,6 +1793,7 @@ cc_library( "@abseil-cpp//absl/container:flat_hash_set", "@abseil-cpp//absl/container:inlined_vector", "@abseil-cpp//absl/functional:any_invocable", + "@abseil-cpp//absl/functional:function_ref", "@abseil-cpp//absl/log", "@abseil-cpp//absl/log:check", "@abseil-cpp//absl/log:vlog_is_on", @@ -4325,6 +4326,7 @@ cc_library( ":sat_base", ":synchronization", ":util", + "//ortools/base:strong_vector", "//ortools/util:bitset", "@abseil-cpp//absl/algorithm:container", "@abseil-cpp//absl/container:flat_hash_map", @@ -4366,6 +4368,7 @@ cc_library( "//ortools/base:intops", "//ortools/base:timer", "@abseil-cpp//absl/container:flat_hash_map", + "@abseil-cpp//absl/container:flat_hash_set", "@abseil-cpp//absl/flags:flag", "@abseil-cpp//absl/log", "@abseil-cpp//absl/log:check", @@ -4744,6 +4747,7 @@ cc_library( "@abseil-cpp//absl/container:flat_hash_map", "@abseil-cpp//absl/container:flat_hash_set", "@abseil-cpp//absl/container:node_hash_map", + "@abseil-cpp//absl/functional:function_ref", "@abseil-cpp//absl/log", "@abseil-cpp//absl/log:check", "@abseil-cpp//absl/strings", diff --git a/ortools/sat/clause.cc b/ortools/sat/clause.cc index b1fa8b454c1..f0ef8f42ce6 100644 --- a/ortools/sat/clause.cc +++ b/ortools/sat/clause.cc @@ -18,6 +18,7 @@ #include #include #include +#include #include #include #include @@ -28,6 +29,7 @@ #include "absl/container/flat_hash_map.h" #include "absl/container/flat_hash_set.h" #include "absl/container/inlined_vector.h" +#include "absl/functional/function_ref.h" #include "absl/log/check.h" #include "absl/log/log.h" #include "absl/log/vlog_is_on.h" @@ -319,7 +321,7 @@ bool ClauseManager::AddClause(ClauseId id, absl::Span literals, if (id != kNoClauseId) { clause_id_[clause] = id; } - if (add_clause_callback_ != nullptr) add_clause_callback_(lbd, literals); + if (add_clause_callback_ != nullptr) add_clause_callback_(lbd, id, literals); return AttachAndPropagate(clause, trail); } @@ -331,7 +333,7 @@ SatClause* ClauseManager::AddRemovableClause(ClauseId id, if (id != kNoClauseId) { clause_id_[clause] = id; } - if (add_clause_callback_ != nullptr) add_clause_callback_(lbd, literals); + if (add_clause_callback_ != nullptr) add_clause_callback_(lbd, id, literals); CHECK(AttachAndPropagate(clause, trail)); // Create an entry in clauses_info_ to mark that clause as removable. @@ -725,24 +727,20 @@ ClauseId ClauseManager::ReasonClauseId(Literal literal) const { void ClauseManager::AppendClauseIdsFixing( absl::Span literals, std::vector* clause_ids, LiteralIndex decision, - absl::flat_hash_map, ClauseId>* - additional_binary_clause_ids) { + std::optional> root_literals) { SCOPED_TIME_STAT(&stats_); const auto& assignment = trail_->Assignment(); - // Mark the literals whose reason must be expanded, and compute their min and - // max trail index. + // Mark the literals whose reason must be expanded, and put them in a heap. tmp_mark_.ClearAndResize(BooleanVariable(trail_->NumVariables())); - int trail_index = 0; - int min_trail_index = trail_->Index(); + marked_trail_indices_heap_.clear(); for (const Literal lit : literals) { CHECK(assignment.LiteralIsAssigned(lit)); - const int var_trail_index = trail_->Info(lit.Variable()).trail_index; - trail_index = std::max(trail_index, var_trail_index); - min_trail_index = std::min(min_trail_index, var_trail_index); tmp_mark_.Set(lit.Variable()); + marked_trail_indices_heap_.push_back( + trail_->Info(lit.Variable()).trail_index); } - + absl::c_make_heap(marked_trail_indices_heap_); const int current_level = trail_->CurrentDecisionLevel(); // The min level of the expanded literals. @@ -755,14 +753,11 @@ void ClauseManager::AppendClauseIdsFixing( non_unit_clause_ids.clear(); const auto& decisions = trail_->Decisions(); - while (true) { - // Find next marked literal to expand from the trail. - while (trail_index >= min_trail_index && - !tmp_mark_[(*trail_)[trail_index].Variable()]) { - --trail_index; - } - if (trail_index < min_trail_index) break; - const Literal marked_literal = (*trail_)[trail_index--]; + while (!marked_trail_indices_heap_.empty()) { + absl::c_pop_heap(marked_trail_indices_heap_); + const int trail_index = marked_trail_indices_heap_.back(); + marked_trail_indices_heap_.pop_back(); + const Literal marked_literal = (*trail_)[trail_index]; // Stop at decisions, at literals fixed at root, and at literals implied by // the decision at their level. @@ -779,12 +774,8 @@ void ClauseManager::AppendClauseIdsFixing( const Literal level_decision = decisions[level - 1].literal; ClauseId clause_id = implication_graph_->GetClauseId( level_decision.Negated(), marked_literal); - if (clause_id == kNoClauseId && additional_binary_clause_ids != nullptr) { - const auto it = additional_binary_clause_ids->find( - std::minmax(level_decision.Negated(), marked_literal)); - if (it != additional_binary_clause_ids->end()) { - clause_id = it->second; - } + if (clause_id == kNoClauseId && root_literals.has_value()) { + clause_id = (*root_literals)(level, trail_index); } if (clause_id != kNoClauseId) { non_unit_clause_ids.push_back(clause_id); @@ -795,10 +786,11 @@ void ClauseManager::AppendClauseIdsFixing( for (const Literal literal : trail_->Reason(marked_literal.Variable())) { const BooleanVariable var = literal.Variable(); if (!tmp_mark_[var]) { - tmp_mark_.Set(var); const AssignmentInfo& info = trail_->Info(var); + tmp_mark_.Set(var); if (info.level > 0) { - min_trail_index = std::min(min_trail_index, info.trail_index); + marked_trail_indices_heap_.push_back(info.trail_index); + absl::c_push_heap(marked_trail_indices_heap_); } else { clause_ids->push_back(trail_->GetUnitClauseId(var)); } diff --git a/ortools/sat/clause.h b/ortools/sat/clause.h index a69fc4bbb9d..d254e9ba28e 100644 --- a/ortools/sat/clause.h +++ b/ortools/sat/clause.h @@ -20,6 +20,7 @@ #include #include #include +#include #include #include #include @@ -28,6 +29,7 @@ #include "absl/container/flat_hash_map.h" #include "absl/container/flat_hash_set.h" #include "absl/functional/any_invocable.h" +#include "absl/functional/function_ref.h" #include "absl/log/check.h" #include "absl/types/span.h" #include "ortools/base/strong_vector.h" @@ -400,14 +402,14 @@ class ClauseManager : public SatPropagator { } void SetAddClauseCallback( - absl::AnyInvocable)> + absl::AnyInvocable)> add_clause_callback) { add_clause_callback_ = std::move(add_clause_callback); } // Removes the add clause callback and returns it. This can be used to // temporarily disable the callback. - absl::AnyInvocable)> + absl::AnyInvocable)> TakeAddClauseCallback() { return std::move(add_clause_callback_); } @@ -427,20 +429,23 @@ class ClauseManager : public SatPropagator { // previous one on the trail. // // This method expands the reasons of each literal recursively until a - // decision, or a literal implied by the decision at its decision level, is - // found. The latter criterion avoids a quadratic complexity when implications - // of the form "decision => literal" are added for each newly propagated - // literal after taking a decision (provided these implications are added to - // the binary implication graph right away, in trail index order). + // decision, or a literal implied by the decision at its decision level, or a + // literal for which `root_literals` returns a value other than kNoClauseId, + // is found. The latter criteria avoid a quadratic complexity when + // implications of the form "decision(s) => literal" are added for each newly + // propagated literal after taking a decision (provided these implications are + // added to the binary implication graph or to the `root_literals` function + // right away, in trail index order). // - // If `additional_binary_clause_ids` is not null, it is used to look for - // existing binary clauses if they are not found in the binary implication - // graph. + // `root_literals` must take a decision level and a trail index as parameter + // (the level is the assignment level of this trail index). It must return the + // ID of the clause stating that the literal at this index is fixed by + // previous decision(s), if the reason expansion should be stopped here + // (otherwise it should return kNoClauseId). void AppendClauseIdsFixing( absl::Span literals, std::vector* clause_ids, LiteralIndex decision = kNoLiteralIndex, - absl::flat_hash_map, ClauseId>* - additional_binary_clause_ids = nullptr); + std::optional> root_literals = {}); private: // Attaches the given clause. This eventually propagates a literal which is @@ -506,10 +511,11 @@ class ClauseManager : public SatPropagator { // Temporary member used when adding LRAT inferred clauses. std::vector clause_ids_scratchpad_; - absl::AnyInvocable)> + absl::AnyInvocable)> add_clause_callback_ = nullptr; SparseBitset tmp_mark_; + std::vector marked_trail_indices_heap_; std::vector tmp_clause_ids_for_append_clauses_fixing_; }; diff --git a/ortools/sat/cp_model_lns.cc b/ortools/sat/cp_model_lns.cc index cb8f06cf923..bed16c6dee6 100644 --- a/ortools/sat/cp_model_lns.cc +++ b/ortools/sat/cp_model_lns.cc @@ -2123,7 +2123,6 @@ Neighborhood GenerateSchedulingNeighborhoodFromIntervalPrecedences( } // Fix the presence/absence of unseen intervals. - bool enforcement_literals_fixed = false; for (const int i : helper.TypeToConstraints(ConstraintProto::kInterval)) { if (seen_intervals.contains(i)) continue; @@ -2146,7 +2145,6 @@ Neighborhood GenerateSchedulingNeighborhoodFromIntervalPrecedences( neighborhood.delta.mutable_variables(enforcement_var)->clear_domain(); neighborhood.delta.mutable_variables(enforcement_var)->add_domain(value); neighborhood.delta.mutable_variables(enforcement_var)->add_domain(value); - enforcement_literals_fixed = true; } for (const std::pair& prec : precedences) { diff --git a/ortools/sat/cp_model_presolve.cc b/ortools/sat/cp_model_presolve.cc index 852305a8724..e8ebda8e3a7 100644 --- a/ortools/sat/cp_model_presolve.cc +++ b/ortools/sat/cp_model_presolve.cc @@ -4003,11 +4003,8 @@ void CpModelPresolver::ProcessOneLinearWithAmo(int ct_index, if (non_boolean_domain == Domain(0) && rhs.NumIntervals() == 1 && min_magnitude < max_magnitude) { int64_t min_activity = 0; - int64_t max_activity = 0; for (const auto [ref, coeff] : tmp_terms_) { - if (coeff > 0) { - max_activity += coeff; - } else { + if (coeff <= 0) { min_activity += coeff; } } @@ -8092,7 +8089,6 @@ void CpModelPresolver::RunPropagatorsForConstraint(const ConstraintProto& ct) { auto* implication_graph = model.GetOrCreate(); auto* trail = model.GetOrCreate(); - int num_equiv = 0; int num_changed_bounds = 0; int num_fixed_bools = 0; for (int var = 0; var < variable_mapping.size(); ++var) { @@ -8111,7 +8107,6 @@ void CpModelPresolver::RunPropagatorsForConstraint(const ConstraintProto& ct) { // Add Boolean equivalence relations. const Literal r = implication_graph->RepresentativeOf(l); if (r != l) { - ++num_equiv; const int r_var = mapping->GetProtoVariableFromBooleanVariable(r.Variable()); if (r_var < 0) continue; @@ -8753,6 +8748,8 @@ bool CpModelPresolver::PresolvePureSatPart() { options.use_transitive_reduction = false; options.deterministic_time_limit = context_->params().presolve_probing_deterministic_time_limit(); + options.use_equivalence_sat_sweeping = + context_->params().inprocessing_use_sat_sweeping(); auto* inprocessing = local_model.GetOrCreate(); inprocessing->ProvideLogger(logger_); diff --git a/ortools/sat/cp_model_solver_helpers.cc b/ortools/sat/cp_model_solver_helpers.cc index 7f72d8e16bc..c3286e1a120 100644 --- a/ortools/sat/cp_model_solver_helpers.cc +++ b/ortools/sat/cp_model_solver_helpers.cc @@ -1081,10 +1081,12 @@ void RegisterClausesExport(int id, SharedClausesManager* shared_clauses_manager, model->GetOrCreate()->share_glue_clauses_dtime(); auto* clause_stream = model->GetOrCreate(); auto* time_limit = model->GetOrCreate(); - auto share_clause = [mapping, clause_stream, time_limit, id, - shared_clauses_manager, share_interval, + auto* lrat_proof_handler = model->Mutable(); + auto share_clause = [mapping, clause_stream, time_limit, lrat_proof_handler, + id, shared_clauses_manager, share_interval, next_batch_dtime = -1.0, clause = std::vector()]( - int lbd, absl::Span literals) mutable { + int lbd, ClauseId clause_id, + absl::Span literals) mutable { if (literals.size() >= UniqueClauseStream::kMinClauseSize && literals.size() <= UniqueClauseStream::kMaxClauseSize) { clause.clear(); @@ -1094,7 +1096,9 @@ void RegisterClausesExport(int id, SharedClausesManager* shared_clauses_manager, if (var == -1) return; clause.push_back(lit.IsPositive() ? var : NegatedRef(var)); } - clause_stream->Add(clause, lbd); + if (clause_stream->Add(clause, lbd) && lrat_proof_handler != nullptr) { + lrat_proof_handler->ExportClause(clause_id, literals); + } } const double elapsed_dtime = time_limit->GetElapsedDeterministicTime(); if (next_batch_dtime < 0) next_batch_dtime = elapsed_dtime + share_interval; diff --git a/ortools/sat/integer_search.cc b/ortools/sat/integer_search.cc index ac7550ffd36..737a08615f1 100644 --- a/ortools/sat/integer_search.cc +++ b/ortools/sat/integer_search.cc @@ -810,7 +810,6 @@ std::function CumulativePrecedenceSearchHeuristic( int next_end = 0; int next_start = 0; - int num_added = 0; bool found = false; while (!found && next_end < num_tasks) { IntegerValue time = by_emin[next_end].time; @@ -843,7 +842,6 @@ std::function CumulativePrecedenceSearchHeuristic( if (added_demand[t] == -1) continue; // Corner case. const IntegerValue demand_min = h.demand_helper->DemandMin(t); if (current_height + demand_min <= capacity_max) { - ++num_added; added_demand[t] = demand_min; current_height += demand_min; } else if (first_skipped_task == -1) { diff --git a/ortools/sat/lrat.proto b/ortools/sat/lrat.proto index 000a26736b6..cd0be01ca4f 100644 --- a/ortools/sat/lrat.proto +++ b/ortools/sat/lrat.proto @@ -52,6 +52,18 @@ message LratInferredClause { repeated int64 unit_ids = 2 [packed = true]; } repeated RatInfo rat_infos = 4; + + // Whether the clause must be exported, so that other workers can import it (a + // clause cannot be imported if it is not previously exported). This is not + // needed for unary and binary clauses, which are always exported. + optional bool exported = 5; +} + +// A clause to export, so that it can be imported from any worker. This is not +// needed for unary and binary clauses, which are always exported. +message LratExportedClause { + optional int64 clause_id = 1; + repeated int32 literals = 2 [packed = true]; } // A list of clauses to delete. @@ -65,11 +77,13 @@ message LratDeletedClauses { // and ending with the empty clause. At each step new clauses can be inferred // from previous ones (with an explicit proof), or imported from another proof // built by another thread. A proof step can also delete clauses which are no -// longer needed. Each clause is identified by a unique clause ID. +// longer needed, or export a clause for other workers to import. Each clause is +// identified by a unique clause ID. message LratProofStep { oneof step { LratImportedClause imported_clause = 1; LratInferredClause inferred_clause = 2; - LratDeletedClauses deleted_clauses = 3; + LratExportedClause exported_clause = 3; + LratDeletedClauses deleted_clauses = 4; } } diff --git a/ortools/sat/lrat_checker.cc b/ortools/sat/lrat_checker.cc index 91419224e61..fbdaee466b4 100644 --- a/ortools/sat/lrat_checker.cc +++ b/ortools/sat/lrat_checker.cc @@ -44,6 +44,7 @@ void LratChecker::AddStats() const { {"LratChecker/num_processed_rat_clauses", num_processed_rat_clauses_}, {"LratChecker/num_unneeded_rat_literals", num_unneeded_rat_literals_}, {"LratChecker/num_unneeded_rat_clauses", num_unneeded_rat_clauses_}, + {"LratChecker/num_deleted_clauses", num_deleted_clauses_}, {"LratChecker/num_deleted_clauses_not_found", num_deleted_clauses_not_found_}}); } @@ -65,14 +66,17 @@ bool LratChecker::AddInferredClause(ClauseId id, } void LratChecker::DeleteClauses(absl::Span clause_ids) { + ++num_deleted_clauses_; for (const ClauseId clause_id : clause_ids) { const auto it = clauses_.find(clause_id); if (it == clauses_.end()) { ++num_deleted_clauses_not_found_; continue; } - for (const Literal literal : it->second) { - occurrences_[literal]--; + if (occurrences_needed_) { + for (const Literal literal : it->second) { + occurrences_[literal.Index()]--; + } } clauses_.erase(it); } @@ -129,8 +133,19 @@ bool LratChecker::AddClauseInternal(ClauseId id, } } if (!sorted_clause.empty()) { - num_variables_ = - std::max(num_variables_, sorted_clause.back().Variable().value() + 1); + const int last_variable = sorted_clause.back().Variable().value(); + if (last_variable >= num_variables_) { + num_variables_ = last_variable + 1; + if (occurrences_needed_) { + occurrences_.resize(2 * num_variables_, 0); + } else if (clause.size() == 1 && unit_ids.empty() && rat.empty()) { + // Early return for unit clauses made of a new variable. The following + // code would validate this proof with the RAT property, but would also + // set `occurrences_needed_` to true, which is unnecessary. + clauses_[id] = std::move(sorted_clause); + return true; + } + } } if (!is_problem_clause) { @@ -164,6 +179,10 @@ bool LratChecker::AddClauseInternal(ClauseId id, // Check if `clause` has the RAT property. if (clause.empty()) return Error(id, "missing pivot for RAT proof"); const Literal pivot = clause.front(); + if (!occurrences_needed_) { + occurrences_needed_ = true; + InitializeOccurrences(); + } if (rat.size() != occurrences_[pivot.Negated()]) { return Error(id, "wrong number of resolvant IDs in RAT proof"); } @@ -235,8 +254,10 @@ bool LratChecker::AddClauseInternal(ClauseId id, } } - for (const Literal literal : sorted_clause) { - occurrences_[literal]++; + if (occurrences_needed_) { + for (const Literal literal : sorted_clause) { + occurrences_[literal.Index()]++; + } } clauses_[id] = std::move(sorted_clause); if (clause.empty()) { @@ -245,6 +266,15 @@ bool LratChecker::AddClauseInternal(ClauseId id, return true; } +void LratChecker::InitializeOccurrences() { + occurrences_.assign(2 * num_variables_, 0); + for (const auto& [id, clause] : clauses_) { + for (const Literal literal : clause) { + occurrences_[literal.Index()]++; + } + } +} + bool LratChecker::Error(ClauseId id, std::string_view error) { if (valid_) { error_message_ = absl::StrCat("In clause ", id, ": ", error); diff --git a/ortools/sat/lrat_checker.h b/ortools/sat/lrat_checker.h index f8d3b35f4ec..c17c0ede332 100644 --- a/ortools/sat/lrat_checker.h +++ b/ortools/sat/lrat_checker.h @@ -24,6 +24,7 @@ #include "absl/container/flat_hash_set.h" #include "absl/strings/str_join.h" #include "absl/types/span.h" +#include "ortools/base/strong_vector.h" #include "ortools/sat/model.h" #include "ortools/sat/sat_base.h" #include "ortools/sat/synchronization.h" @@ -114,8 +115,10 @@ class LratChecker { std::string_view error_message() const { return error_message_; } // This can help debugging wrong proof. - absl::Span GetClauseForDebug(ClauseId id) { - return clauses_[id]; + absl::Span GetClauseForDebug(ClauseId id) const { + auto it = clauses_.find(id); + if (it == clauses_.end()) return {}; + return it->second; } private: @@ -124,6 +127,8 @@ class LratChecker { absl::Span unit_ids, absl::Span rat); + void InitializeOccurrences(); + bool Error(ClauseId id, std::string_view error); int num_variables_ = 0; @@ -134,8 +139,10 @@ class LratChecker { // more efficient but their correctness could be harder to trust). absl::flat_hash_map> clauses_; - // The number of clauses in `clauses_` which contain each literal. - absl::flat_hash_map occurrences_; + // The number of clauses in `clauses_` which contain each literal. This is + // initialized only if needed, i.e., when the first RAT proof is needed. + util_intops::StrongVector occurrences_; + bool occurrences_needed_ = false; // Whether all the operations made so far were valid. bool valid_ = true; @@ -152,6 +159,7 @@ class LratChecker { int64_t num_processed_rat_clauses_ = 0; int64_t num_unneeded_rat_literals_ = 0; int64_t num_unneeded_rat_clauses_ = 0; + int64_t num_deleted_clauses_ = 0; int64_t num_deleted_clauses_not_found_ = 0; // Whether the proof is complete, i.e., whether the empty clause has been diff --git a/ortools/sat/lrat_proof_handler.cc b/ortools/sat/lrat_proof_handler.cc index 4478b3fe67e..f722c9d9921 100644 --- a/ortools/sat/lrat_proof_handler.cc +++ b/ortools/sat/lrat_proof_handler.cc @@ -70,10 +70,14 @@ LratWriter::LratWriter(std::string_view filename) } } -LratWriter::~LratWriter() { writer_.Close(); } +LratWriter::~LratWriter() { + WriteDeletedClauseIds(); + writer_.Close(); +} void LratWriter::AddImportedClause(ClauseId id, absl::Span clause) { + WriteDeletedClauseIds(); LratProofStep step; LratImportedClause* imported_clause = step.mutable_imported_clause(); imported_clause->set_clause_id(id.value()); @@ -86,7 +90,9 @@ void LratWriter::AddImportedClause(ClauseId id, void LratWriter::AddInferredClause(ClauseId id, absl::Span clause, absl::Span unit_ids, - absl::Span rat) { + absl::Span rat, + bool exported) { + WriteDeletedClauseIds(); LratProofStep step; LratInferredClause* inferred_clause = step.mutable_inferred_clause(); inferred_clause->set_clause_id(id.value()); @@ -103,13 +109,32 @@ void LratWriter::AddInferredClause(ClauseId id, rat_info->add_unit_ids(unit_id.value()); } } + inferred_clause->set_exported(exported); + CHECK(writer_.WriteRecord(step)); +} + +void LratWriter::ExportClause(ClauseId id, absl::Span clause) { + WriteDeletedClauseIds(); + LratProofStep step; + LratExportedClause* exported_clause = step.mutable_exported_clause(); + exported_clause->set_clause_id(id.value()); + for (const Literal literal : clause) { + exported_clause->add_literals(literal.Index().value()); + } CHECK(writer_.WriteRecord(step)); } void LratWriter::DeleteClause(ClauseId id) { + deleted_clause_ids_.push_back(id); +} + +void LratWriter::WriteDeletedClauseIds() { + if (deleted_clause_ids_.empty()) return; LratProofStep step; - step.mutable_deleted_clauses()->add_clause_ids(id.value()); + step.mutable_deleted_clauses()->mutable_clause_ids()->Add( + deleted_clause_ids_.begin(), deleted_clause_ids_.end()); CHECK(writer_.WriteRecord(step)); + deleted_clause_ids_.clear(); } namespace { @@ -164,6 +189,7 @@ bool LratMerger::Merge(absl::Span proof_filenames) { std::vector> readers(num_workers); last_read_steps_.resize(num_workers); local_to_global_ids_.resize(num_workers); + exported_local_ids_.resize(num_workers); for (int i = 0; i < num_workers; ++i) { const std::string& filename = proof_filenames[i + 1]; inputs[i].open(filename, std::ios::binary); @@ -205,17 +231,44 @@ bool LratMerger::Merge(absl::Span proof_filenames) { *step.mutable_inferred_clause())) { return false; } - clause.clear(); - IndicesToLiterals(step.inferred_clause().literals(), &clause); - std::sort(clause.begin(), clause.end()); - shared_clause_ids_.insert( - {clause, GlobalId(step.inferred_clause().clause_id())}); if (!WriteInferredClause(step.inferred_clause())) return false; // We found the empty clause, we don't need anymore steps. if (step.inferred_clause().literals().empty()) return true; + if (step.inferred_clause().exported() || + step.inferred_clause().literals_size() <= 2) { + clause.clear(); + IndicesToLiterals(step.inferred_clause().literals(), &clause); + SortAndAddSharedClause( + GlobalId(step.inferred_clause().clause_id()), clause); + exported_local_ids_[i].insert( + ClauseId(step.inferred_clause().clause_id())); + } break; + case LratProofStep::kExportedClause: { + const ClauseId local_id(step.exported_clause().clause_id()); + auto it = local_to_global_ids_[i].find(local_id); + if (it == local_to_global_ids_[i].end()) { + return Error(absl::StrCat("unknown exported clause ID ", local_id, + " in ", filename)); + } + const GlobalId global_id = it->second; + IndicesToLiterals(step.exported_clause().literals(), &clause); + SortAndAddSharedClause(global_id, clause); + exported_local_ids_[i].insert(local_id); + break; + } case LratProofStep::kDeletedClauses: - // TODO(user): implement this case. + for (const int64_t clause_id : + step.deleted_clauses().clause_ids()) { + const ClauseId local_id(clause_id); + if (exported_local_ids_[i].contains(local_id)) { + // TODO(user): implement this case. We should delete the + // clause from `shared_clause_ids_`, but only after we are sure + // that no other worker will ever import it. + } else { + local_to_global_ids_[i].erase(local_id); + } + } break; default: return Error(absl::StrCat("unknown step type ", step.step_case(), @@ -257,6 +310,7 @@ bool LratMerger::ReadPresolveProof(const std::string& filename) { RecordReader reader(&input); LratProofStep step; std::vector clause; + absl::flat_hash_map> shared_clauses; GlobalId max_global_id(0); while (reader.ReadRecord(&step)) { switch (step.step_case()) { @@ -264,7 +318,8 @@ bool LratMerger::ReadPresolveProof(const std::string& filename) { GlobalId global_id(step.imported_clause().clause_id()); max_global_id = std::max(max_global_id, global_id); IndicesToLiterals(step.imported_clause().literals(), &clause); - SortAndAddSharedClause(global_id, clause); + std::sort(clause.begin(), clause.end()); + shared_clauses[global_id] = clause; if (lrat_checker_ != nullptr && !lrat_checker_->AddProblemClause(ClauseId(global_id.value()), clause)) { @@ -276,18 +331,32 @@ bool LratMerger::ReadPresolveProof(const std::string& filename) { GlobalId global_id(step.inferred_clause().clause_id()); max_global_id = std::max(max_global_id, global_id); IndicesToLiterals(step.inferred_clause().literals(), &clause); - SortAndAddSharedClause(global_id, clause); + std::sort(clause.begin(), clause.end()); + shared_clauses[global_id] = clause; if (!WriteInferredClause(step.inferred_clause())) return false; break; } + case LratProofStep::kExportedClause: { + // Nothing to do, since we export all clauses in the presolve proof. + break; + } case LratProofStep::kDeletedClauses: - // TODO(user): implement this. + for (const int64_t clause_id : step.deleted_clauses().clause_ids()) { + const GlobalId global_id(clause_id); + auto it = shared_clauses.find(global_id); + if (it != shared_clauses.end()) { + shared_clauses.erase(it); + } + } break; default: return Error(absl::StrCat("unknown proof step type ", step.step_case(), " in ", filename)); } } + for (const auto& [global_id, clause] : shared_clauses) { + shared_clause_ids_.insert({clause, global_id}); + } next_global_id_ = ++max_global_id; return true; } @@ -494,7 +563,7 @@ void LratProofHandler::EndProblemClauses() { bool LratProofHandler::AddInferredClause( ClauseId id, absl::Span clause, absl::Span unit_ids, - absl::Span rat) { + absl::Span rat, bool exported) { VLOG(2) << "AddInferredClause: id=" << id << " literals=" << absl::StrJoin(clause, ",") << " unit_ids=" << absl::StrJoin(unit_ids, ",") << " rat={" @@ -515,7 +584,7 @@ bool LratProofHandler::AddInferredClause( } } if (lrat_writer_ != nullptr) { - lrat_writer_->AddInferredClause(id, clause, unit_ids, rat); + lrat_writer_->AddInferredClause(id, clause, unit_ids, rat, exported); } if (drat_writer_ != nullptr) { drat_writer_->AddClause(clause); @@ -562,6 +631,16 @@ bool LratProofHandler::AddAssumedClause(ClauseId id, return true; } +bool LratProofHandler::ExportClause(ClauseId id, + absl::Span clause) { + VLOG(2) << "ExportClause: id=" << id + << " literals=" << absl::StrJoin(clause, ","); + if (lrat_writer_ != nullptr) { + lrat_writer_->ExportClause(id, clause); + } + return true; +} + void LratProofHandler::PinClause(ClauseId id, absl::Span clause) { DCHECK_NE(id, kNoClauseId); diff --git a/ortools/sat/lrat_proof_handler.h b/ortools/sat/lrat_proof_handler.h index a80772378c0..66b57c7f4a9 100644 --- a/ortools/sat/lrat_proof_handler.h +++ b/ortools/sat/lrat_proof_handler.h @@ -23,6 +23,9 @@ #include #include "absl/container/flat_hash_map.h" +#include "absl/container/flat_hash_set.h" +#include "absl/log/check.h" +#include "absl/strings/string_view.h" #include "absl/types/span.h" #include "ortools/base/strong_int.h" #include "ortools/sat/drat_checker.h" @@ -50,14 +53,20 @@ class LratWriter { void AddInferredClause(ClauseId id, absl::Span clause, absl::Span unit_ids, - absl::Span rat = {}); + absl::Span rat, + bool exported = false); + + void ExportClause(ClauseId id, absl::Span clause); void DeleteClause(ClauseId id); private: + void WriteDeletedClauseIds(); + std::string filename_; std::ofstream ofstream_; RecordWriter writer_; + std::vector deleted_clause_ids_; }; // Merges separate LRAT proofs into a single LRAT file in ASCII format. @@ -118,6 +127,7 @@ class LratMerger { absl::flat_hash_map, GlobalId> shared_clause_ids_; std::vector> local_to_global_ids_; + std::vector> exported_local_ids_; std::vector last_read_steps_; std::vector tmp_clause_; @@ -151,7 +161,13 @@ class LratProofHandler { // previously inferred clauses. See LratChecker for more details. bool AddInferredClause(ClauseId id, absl::Span clause, absl::Span unit_ids, - absl::Span rat = {}); + bool exported = false) { + return AddInferredClause(id, clause, unit_ids, {}, exported); + } + bool AddInferredClause(ClauseId id, absl::Span clause, + absl::Span unit_ids, + absl::Span rat, + bool exported = false); // This assumes that the 'new_clause' to prove and all the ones needed for the // proof only touch a small number of variables (<= 6). It will then prove the @@ -170,9 +186,9 @@ class LratProofHandler { absl::Span ids_for_proof, const CompactVectorVector& clauses_for_proof); - // Adds a clause which was inferred by another worker. Returns true if - // successful (the operation can fail if LRAT checks are enabled, and the ID - // is already used by another clause). + // Adds a clause which was inferred and exported by another worker. Returns + // true if successful (the operation can fail if LRAT checks are enabled, and + // the ID is already used by another clause). bool AddImportedClause(ClauseId id, absl::Span clause); // Adds a clause which is assumed to be true, without proof. Returns true if @@ -180,6 +196,13 @@ class LratProofHandler { // checks are enabled and the ID is already used by another clause). bool AddAssumedClause(ClauseId id, absl::Span clause); + // Exports a clause so that it can be imported by other workers. If you know + // whether a clause must be exported when it is inferred, it is more efficient + // to use the `exported` parameter of AddInferredClause(). `id` and `clause` + // must be the ID and the literals of a previously added clause. This is not + // needed for unary and binary clauses, which are always exported. + bool ExportClause(ClauseId id, absl::Span clause); + // Prevents the given clause from being deleted, until UnpinClause() is called // with the same ID. At most one clause can be pinned at any time. void PinClause(ClauseId id, absl::Span clause); @@ -206,7 +229,7 @@ class LratProofHandler { void Close(bool model_is_unsat); // This can be helpful to debug wrong proof, but shouldn't be used otherwise. - absl::Span GetLratClauseForDebug(ClauseId id) { + absl::Span GetLratClauseForDebug(ClauseId id) const { CHECK(lrat_checker_ != nullptr); return lrat_checker_->GetClauseForDebug(id); } diff --git a/ortools/sat/probing.cc b/ortools/sat/probing.cc index 2b3d6d86f2e..79ffa761e81 100644 --- a/ortools/sat/probing.cc +++ b/ortools/sat/probing.cc @@ -110,15 +110,19 @@ bool Prober::ProbeOneVariableInternal(BooleanVariable b) { integer_trail_->AppendNewBounds(&new_integer_bounds_); to_fix_at_true_.clear(); new_literals_implied_by_decision_.clear(); + new_implied_or_fixed_literals_.clear(); for (int i = saved_index + 1; i < trail_.Index(); ++i) { const Literal l = trail_[i]; - // We mark on the first run (b.IsPositive()) and check on the second. + // We mark on the first pass (b.IsPositive()) and check on the second. if (decision.IsPositive()) { propagated_.Set(l.Index()); } else { if (propagated_[l]) { to_fix_at_true_.push_back(l); + if (lrat_proof_handler_ != nullptr) { + new_implied_or_fixed_literals_.push_back(l); + } } } @@ -132,21 +136,49 @@ bool Prober::ProbeOneVariableInternal(BooleanVariable b) { } else if (l != decision) { new_literals_implied_by_decision_.push_back(l); } + if (lrat_proof_handler_ != nullptr && l != decision) { + new_implied_or_fixed_literals_.push_back(l); + } } - // TODO(user): it might be possible to generate less temporary LRAT - // clauses by adding them in a third iteration instead of in the first - // one, once we know if there are any literal to fix. - // Otherwise, since we always add binary, the reason should be retrievable - // from the binary implication graph alone. So we might just need a - // MarkDescendant() + parent inspection for this. - if (lrat_proof_handler_ != nullptr) { + } + + if (lrat_proof_handler_ != nullptr) { + auto add_tmp_implication = [&](const Literal decision, const Literal l) { tmp_clause_ids_.clear(); clause_manager_->AppendClauseIdsFixing( - {l}, &tmp_clause_ids_, decision.Index(), &tmp_binary_clause_ids_); + {l}, &tmp_clause_ids_, decision.Index(), + [&](int level, int trail_index) { + const Literal decision = trail_.Decisions()[level - 1].literal; + const Literal lit = trail_[trail_index]; + const auto it = tmp_binary_clause_ids_.find( + std::minmax(decision.Negated(), lit)); + if (it != tmp_binary_clause_ids_.end()) return it->second; + return kNoClauseId; + }); const ClauseId clause_id = clause_id_generator_->GetNextId(); lrat_proof_handler_->AddInferredClause( clause_id, {decision.Negated(), l}, tmp_clause_ids_); tmp_binary_clause_ids_[std::minmax(decision.Negated(), l)] = clause_id; + num_lrat_clauses_++; + num_lrat_proof_clauses_ += tmp_clause_ids_.size(); + }; + for (const Literal l : new_implied_or_fixed_literals_) { + add_tmp_implication(decision, l); + } + if (decision.IsNegative() && !to_fix_at_true_.empty()) { + // Redo the first pass to add the LRAT clauses b => to_fix_at_true. + if (!sat_solver_->ResetToLevelZero()) return false; + if (assignment_.LiteralIsAssigned(decision)) continue; + CHECK_EQ(sat_solver_->CurrentDecisionLevel(), 0); + if (sat_solver_->EnqueueDecisionAndBackjumpOnConflict( + decision.Negated()) == kUnsatTrailIndex) { + return false; + } + if (sat_solver_->ModelIsUnsat()) return false; + if (sat_solver_->CurrentDecisionLevel() == 0) continue; + for (const Literal l : to_fix_at_true_) { + add_tmp_implication(decision.Negated(), l); + } } } @@ -164,6 +196,8 @@ bool Prober::ProbeOneVariableInternal(BooleanVariable b) { tmp_binary_clause_ids_.at(std::minmax(decision, l))); } lrat_proof_handler_->AddInferredClause(clause_id, {l}, clause_ids); + num_lrat_clauses_++; + num_lrat_proof_clauses_ += clause_ids.size(); } if (!clause_manager_->InprocessingAddUnitClause(clause_id, l)) { return false; @@ -204,6 +238,7 @@ bool Prober::ProbeOneVariableInternal(BooleanVariable b) { binary_clause.second) != clause_id) { lrat_proof_handler_->DeleteClause( clause_id, {binary_clause.first, binary_clause.second}); + num_unneeded_lrat_clauses_++; } } } @@ -310,6 +345,9 @@ bool Prober::ProbeBooleanVariables( num_new_holes_ = 0; num_new_integer_bounds_ = 0; num_new_literals_fixed_ = 0; + num_lrat_clauses_ = 0; + num_lrat_proof_clauses_ = 0; + num_unneeded_lrat_clauses_ = 0; // Resize the propagated sparse bitset. const int num_variables = sat_solver_->NumVariables(); @@ -378,6 +416,18 @@ bool Prober::ProbeBooleanVariables( SOLVER_LOG(logger_, "[Probing] - new binary clause: ", FormatCounter(num_new_binary_)); } + if (num_lrat_clauses_ > 0) { + SOLVER_LOG(logger_, "[Probing] - new LRAT clauses: ", + FormatCounter(num_lrat_clauses_)); + } + if (num_lrat_proof_clauses_ > 0) { + SOLVER_LOG(logger_, "[Probing] - new LRAT proof clauses: ", + FormatCounter(num_lrat_proof_clauses_)); + } + if (num_unneeded_lrat_clauses_ > 0) { + SOLVER_LOG(logger_, "[Probing] - unneeded LRAT clauses: ", + FormatCounter(num_unneeded_lrat_clauses_)); + } } return true; diff --git a/ortools/sat/probing.h b/ortools/sat/probing.h index 57572f4c1e9..5ef528c409d 100644 --- a/ortools/sat/probing.h +++ b/ortools/sat/probing.h @@ -169,6 +169,7 @@ class Prober { std::vector to_fix_at_true_; std::vector new_integer_bounds_; std::vector new_literals_implied_by_decision_; + std::vector new_implied_or_fixed_literals_; absl::btree_set new_propagated_literals_; absl::btree_set always_propagated_literals_; absl::btree_map new_propagated_bounds_; @@ -186,6 +187,9 @@ class Prober { int num_new_binary_ = 0; int num_new_integer_bounds_ = 0; int num_new_literals_fixed_ = 0; + int num_lrat_clauses_ = 0; + int num_lrat_proof_clauses_ = 0; + int num_unneeded_lrat_clauses_ = 0; std::function callback_ = nullptr; diff --git a/ortools/sat/sat_inprocessing.cc b/ortools/sat/sat_inprocessing.cc index 584ecea4cb3..d2c85d929ce 100644 --- a/ortools/sat/sat_inprocessing.cc +++ b/ortools/sat/sat_inprocessing.cc @@ -143,6 +143,22 @@ bool Inprocessing::PresolveLoop(SatPresolveOptions options) { continue; } + // SAT sweeping has a small dtime limit, so do it before other heuristics + // exhaust our budget. + if (options.use_equivalence_sat_sweeping && + stop_dtime > time_limit_->GetElapsedDeterministicTime()) { + auto inner_model_inprocessing = [&](Model* inner_model) { + inner_model->GetOrCreate() + ->set_inprocessing_use_sat_sweeping(false); + inner_model->GetOrCreate()->InprocessingRound(); + }; + RETURN_IF_FALSE(LevelZeroPropagate()); + RETURN_IF_FALSE( + equivalence_sat_sweeping_->DoOneRound(inner_model_inprocessing)); + RETURN_IF_FALSE(LevelZeroPropagate()); + implication_graph_->RemoveAllRedundantVariables(&postsolve_->clauses); + } + // TODO(user): Think about the right order in this function. if (params_.inprocessing_use_congruence_closure()) { RETURN_IF_FALSE(RemoveFixedAndEquivalentVariables(log_round_info)); @@ -856,36 +872,41 @@ void StampingSimplifier::SampleTreeAndFillParent() { bool StampingSimplifier::ComputeStamps() { const int size = implication_graph_->literal_size(); + // Adjacency list representation of the parents_ tree. + util_intops::StrongVector sizes; + util_intops::StrongVector starts; + std::vector children; + // Compute sizes. - sizes_.assign(size, 0); + sizes.assign(size, 0); for (LiteralIndex i(0); i < size; ++i) { if (parents_[i] == i) continue; // leaf. - sizes_[parents_[i]]++; + sizes[parents_[i]]++; } // Compute starts in the children_ vector for each node. - starts_.resize(size + 1); // We use a sentinel. - starts_[LiteralIndex(0)] = 0; + starts.resize(size + 1); // We use a sentinel. + starts[LiteralIndex(0)] = 0; for (LiteralIndex i(1); i <= size; ++i) { - starts_[i] = starts_[i - 1] + sizes_[i - 1]; + starts[i] = starts[i - 1] + sizes[i - 1]; } // Fill children. This messes up starts_. - children_.resize(size); + children.resize(size); for (LiteralIndex i(0); i < size; ++i) { if (parents_[i] == i) continue; // leaf. - children_[starts_[parents_[i]]++] = i; + children[starts[parents_[i]]++] = i; } // Reset starts to correct value. for (LiteralIndex i(0); i < size; ++i) { - starts_[i] -= sizes_[i]; + starts[i] -= sizes[i]; } if (DEBUG_MODE) { - CHECK_EQ(starts_[LiteralIndex(0)], 0); + CHECK_EQ(starts[LiteralIndex(0)], 0); for (LiteralIndex i(1); i <= size; ++i) { - CHECK_EQ(starts_[i], starts_[i - 1] + sizes_[i - 1]); + CHECK_EQ(starts[i], starts[i - 1] + sizes[i - 1]); } } @@ -933,11 +954,11 @@ bool StampingSimplifier::ComputeStamps() { } } - const int end = starts_[top + 1]; // Ok with sentinel. - for (int j = starts_[top]; j < end; ++j) { - DCHECK_NE(top, children_[j]); // We removed leaf self-loop. - DCHECK(!marked_[children_[j]]); // This is a tree. - dfs_stack_.push_back(children_[j]); + const int end = starts[top + 1]; // Ok with sentinel. + for (int j = starts[top]; j < end; ++j) { + DCHECK_NE(top, children[j]); // We removed leaf self-loop. + DCHECK(!marked_[children[j]]); // This is a tree. + dfs_stack_.push_back(children[j]); } } } diff --git a/ortools/sat/sat_inprocessing.h b/ortools/sat/sat_inprocessing.h index 1090a2bf23d..a3c3d21848b 100644 --- a/ortools/sat/sat_inprocessing.h +++ b/ortools/sat/sat_inprocessing.h @@ -91,6 +91,8 @@ struct SatPresolveOptions { // possible reduction. This shouldn't matter if we use the binary implication // graph and its reachability instead of just binary clause though. bool use_transitive_reduction = false; + + bool use_equivalence_sat_sweeping = false; }; // We need to keep some information from one call to the next, so we use a @@ -276,11 +278,6 @@ class StampingSimplifier { // Encode a spanning tree of the implication graph. util_intops::StrongVector parents_; - // Adjacency list representation of the parents_ tree. - util_intops::StrongVector sizes_; - util_intops::StrongVector starts_; - std::vector children_; - // Temporary data for the DFS. util_intops::StrongVector marked_; std::vector dfs_stack_; diff --git a/ortools/sat/sat_parameters.proto b/ortools/sat/sat_parameters.proto index 55c2b4c7b4a..b7ebe2454cf 100644 --- a/ortools/sat/sat_parameters.proto +++ b/ortools/sat/sat_parameters.proto @@ -802,12 +802,11 @@ message SatParameters { // If true, inferred clauses are checked with an LRAT checker as they are // learned, in presolve (reduced to trivial simplifications if - // cp_model_presolve is false), and in each worker. As of November 2025, this + // cp_model_presolve is false), and in each worker. As of December 2025, this // only works with pure SAT problems, with // - cp_model_presolve = false, // - linearization_level <= 1, - // - symmetry_level <= 1, - // - shared_tree_num_workers = 0. + // - symmetry_level <= 1. optional bool check_lrat_proof = 344 [default = false]; // If true, and if output_lrat_proof is true and the problem is UNSAT, check @@ -820,12 +819,11 @@ message SatParameters { // If true, an LRAT proof that all the clauses inferred by the solver are // valid is output to several files (one for presolve -- reduced to trivial // simplifications if cp_model_presolve is false, one per worker, and one for - // the merged proof). As of November 2025, this only works for pure SAT + // the merged proof). As of December 2025, this only works for pure SAT // problems, with // - cp_model_presolve = false, // - linearization_level <= 1, - // - symmetry_level <= 1, - // - shared_tree_num_workers = 0. + // - symmetry_level <= 1. optional bool output_lrat_proof = 345 [default = false]; // If true, and if the problem is UNSAT, a DRAT proof of this UNSAT property @@ -838,7 +836,7 @@ message SatParameters { optional bool check_drat_proof = 346 [default = false]; // If true, a DRAT proof that all the clauses inferred by the solver are valid - // is output to a file. As of November 2025, this only works for pure SAT + // is output to a file. As of December 2025, this only works for pure SAT // problems, with // - num_workers = 1, // - cp_model_presolve = false, diff --git a/ortools/sat/sat_sweeping.cc b/ortools/sat/sat_sweeping.cc index ecc82f7bf07..6571731e286 100644 --- a/ortools/sat/sat_sweeping.cc +++ b/ortools/sat/sat_sweeping.cc @@ -170,7 +170,11 @@ bool EquivalenceSatSweeping::DoOneRound( // TODO(user): consider doing several neighborhoods to amortize the cost of // building the variable->clause graph. CHECK_EQ(sat_solver_->CurrentDecisionLevel(), 0); - if (sat_solver_->AssumptionLevel() != 0) return true; + if (sat_solver_->AssumptionLevel() != 0) { + VLOG(2) + << "Assumption level is not 0 (should not happen), skipping sweeping."; + return true; + } clauses_.clear(); struct ExtractedClausesHelper { @@ -198,7 +202,10 @@ bool EquivalenceSatSweeping::DoOneRound( ExtractedClausesHelper helper(clauses_, max_num_boolean_variables_); if (!sat_solver_->ExtractClauses(&helper)) return false; - if (clauses_.empty()) return true; + if (clauses_.empty()) { + VLOG(2) << "No clauses extracted, skipping sweeping."; + return true; + } const int num_vars = sat_solver_->NumVariables(); @@ -207,79 +214,96 @@ bool EquivalenceSatSweeping::DoOneRound( }; var_to_clauses_.ResetFromTransposeMap(clauses_, num_vars); - BooleanVariable boolean_for_neighborhood; - { - int tries = 0; - constexpr int kMaxTries = 10; - for (tries = 0; tries < kMaxTries; ++tries) { - boolean_for_neighborhood = absl::Uniform(*random_, 0, num_vars); - if (var_to_clauses_[boolean_for_neighborhood].size() < 2) continue; - const Literal positive_lit(boolean_for_neighborhood, true); - if (implication_graph_->RepresentativeOf(positive_lit) != positive_lit) { - continue; + global_time_limit_->AdvanceDeterministicTime(clause_manager_->num_clauses() * + 1.0e-7); + TimeLimit sweep_time_limit; + sweep_time_limit.ChangeDeterministicLimit(1.0); + sweep_time_limit.MergeWithGlobalTimeLimit(global_time_limit_); + std::vector> binary_clauses; + std::vector unary_clauses; + for (int i = 0; i < 50; ++i) { + BooleanVariable boolean_for_neighborhood; + { + int tries = 0; + constexpr int kMaxTries = 10; + for (tries = 0; tries < kMaxTries; ++tries) { + boolean_for_neighborhood = absl::Uniform(*random_, 0, num_vars); + if (var_to_clauses_[boolean_for_neighborhood].size() < 2) continue; + const Literal positive_lit(boolean_for_neighborhood, true); + if (implication_graph_->RepresentativeOf(positive_lit) != + positive_lit) { + continue; + } + break; } - break; + if (tries == kMaxTries) continue; } - if (tries == kMaxTries) return true; - } - const std::vector> neighborhood = - GetNeighborhood(boolean_for_neighborhood); - - if (neighborhood.empty()) return true; - - CompactVectorVector neighborhood_clauses; - big_model_to_small_model_.clear(); - small_model_to_big_model_.clear(); - for (const absl::Span clause : neighborhood) { - neighborhood_clauses.Add({}); - for (const Literal l : clause) { - const BooleanVariable new_var(big_model_to_small_model_.size()); - auto [it, inserted] = - big_model_to_small_model_.insert({l.Variable(), new_var}); - if (inserted) { - small_model_to_big_model_.push_back(l.Variable()); + const std::vector> neighborhood = + GetNeighborhood(boolean_for_neighborhood); + + if (neighborhood.empty()) { + VLOG(2) << "Neighborhood is empty for " << boolean_for_neighborhood; + continue; + } + + CompactVectorVector neighborhood_clauses; + big_model_to_small_model_.clear(); + small_model_to_big_model_.clear(); + for (const absl::Span clause : neighborhood) { + neighborhood_clauses.Add({}); + for (const Literal l : clause) { + const BooleanVariable new_var(big_model_to_small_model_.size()); + auto [it, inserted] = + big_model_to_small_model_.insert({l.Variable(), new_var}); + if (inserted) { + small_model_to_big_model_.push_back(l.Variable()); + } + neighborhood_clauses.AppendToLastVector( + Literal(it->second, l.IsPositive())); } - neighborhood_clauses.AppendToLastVector( - Literal(it->second, l.IsPositive())); } - } - TimeLimit sweep_time_limit; - sweep_time_limit.ChangeDeterministicLimit(1.0); - sweep_time_limit.MergeWithGlobalTimeLimit(global_time_limit_); - const SatSweepingResult result = DoFullSatSweeping( - neighborhood_clauses, &sweep_time_limit, run_inprocessing); - global_time_limit_->AdvanceDeterministicTime( - sweep_time_limit.GetElapsedDeterministicTime()); + const SatSweepingResult result = DoFullSatSweeping( + neighborhood_clauses, &sweep_time_limit, run_inprocessing); - if (result.status == SatSolver::INFEASIBLE) { - sat_solver_->NotifyThatModelIsUnsat(); - return false; + if (result.status == SatSolver::INFEASIBLE) { + sat_solver_->NotifyThatModelIsUnsat(); + return false; + } + for (const auto& [l1, l2] : result.binary_clauses) { + const Literal mapped_l1 = + Literal(small_model_to_big_model_[l1.Variable()], l1.IsPositive()); + const Literal mapped_l2 = + Literal(small_model_to_big_model_[l2.Variable()], l2.IsPositive()); + if (implication_graph_->IsRemoved(mapped_l1) || + implication_graph_->IsRemoved(mapped_l2)) { + continue; + } + binary_clauses.push_back({mapped_l1, mapped_l2}); + } + for (const Literal l : result.unary_clauses) { + const Literal mapped_l = + Literal(small_model_to_big_model_[l.Variable()], l.IsPositive()); + if (implication_graph_->IsRemoved(mapped_l)) continue; + unary_clauses.push_back(mapped_l); + } + if (result.status == SatSolver::LIMIT_REACHED) { + break; + } } - if (result.binary_clauses.empty() && result.unary_clauses.empty()) { + global_time_limit_->AdvanceDeterministicTime( + sweep_time_limit.GetElapsedDeterministicTime()); + if (binary_clauses.empty() && unary_clauses.empty()) { return true; } + // TODO(user): find out why this is necessary. clause_manager_->DetachAllClauses(); - for (const auto& [l1, l2] : result.binary_clauses) { - const Literal mapped_l1 = - Literal(small_model_to_big_model_[l1.Variable()], l1.IsPositive()); - const Literal mapped_l2 = - Literal(small_model_to_big_model_[l2.Variable()], l2.IsPositive()); - if (implication_graph_->IsRemoved(mapped_l1) || - implication_graph_->IsRemoved(mapped_l2)) { - continue; - } - clause_manager_->InprocessingAddClause({mapped_l1, mapped_l2}); + for (const auto& [l1, l2] : binary_clauses) { + if (!implication_graph_->AddBinaryClause(l1, l2)) return false; } - for (const Literal l : result.unary_clauses) { - const Literal mapped_l = - Literal(small_model_to_big_model_[l.Variable()], l.IsPositive()); - if (implication_graph_->IsRemoved(mapped_l)) continue; - const ClauseId new_clause_id = clause_id_generator_->GetNextId(); - if (!clause_manager_->InprocessingAddUnitClause(new_clause_id, mapped_l)) { - return false; - } + for (const Literal l : unary_clauses) { + if (!implication_graph_->FixLiteral(l, {})) return false; } return true; } @@ -512,7 +536,7 @@ SatSweepingResult DoFullSatSweeping( } result.binary_clauses.resize(new_binary_clauses_size); - VLOG(1) << "num_booleans: " << num_variables + VLOG(2) << "num_booleans: " << num_variables << " num_clauses: " << clauses.size() << " num_partitions: " << num_partitions << " num_unary_clauses: " << result.unary_clauses.size() diff --git a/ortools/sat/sat_sweeping.h b/ortools/sat/sat_sweeping.h index 21597e1d0bf..d7eb0a0cd7f 100644 --- a/ortools/sat/sat_sweeping.h +++ b/ortools/sat/sat_sweeping.h @@ -54,7 +54,6 @@ class EquivalenceSatSweeping { : sat_solver_(model->GetOrCreate()), implication_graph_(model->GetOrCreate()), clause_manager_(model->GetOrCreate()), - clause_id_generator_(model->GetOrCreate()), global_time_limit_(model->GetOrCreate()), random_(model->GetOrCreate()) {} @@ -71,12 +70,11 @@ class EquivalenceSatSweeping { SatSolver* sat_solver_; BinaryImplicationGraph* implication_graph_; ClauseManager* clause_manager_; - ClauseIdGenerator* clause_id_generator_; TimeLimit* global_time_limit_; ModelRandomGenerator* random_; - int max_num_clauses_ = 32000; - int max_num_boolean_variables_ = 1000; + int max_num_clauses_ = 52000; + int max_num_boolean_variables_ = 2000; // We compute the occurrence graph once at the beginning of each round. util_intops::StrongVector> clauses_; diff --git a/ortools/sat/scheduling_helpers.cc b/ortools/sat/scheduling_helpers.cc index ec8e2ff3c15..6ae244be5c3 100644 --- a/ortools/sat/scheduling_helpers.cc +++ b/ortools/sat/scheduling_helpers.cc @@ -567,7 +567,21 @@ void SchedulingConstraintHelper::AddReasonForBeingBeforeAssumingNoOverlap( } } - // We will explain StartMax(before) < EndMin(after); + // We prefer to explain StartMax(before) < EndMin(after), but this is false + // for zero-size intervals. For example, imagine two tasks: + // t1: start=0, end=0, size=0 + // t2: start=0, end=[0-1], size=[0-1] + // We can say that t1 is "before" t2, but StartMax(t1) == EndMin(t2) == 0. + if (SizeMin(before) <= 0) { + // Encode the straightforward expression End(before) - Start(after) <= 0. + const auto [expr, ub] = + EncodeDifferenceLowerThan(ends_[before], starts_[after], 0); + if (linear2_bounds_->UpperBound(expr) <= ub) { + linear2_bounds_->AddReasonForUpperBoundLowerThan( + expr, ub, &literal_reason_, &integer_reason_); + return; + } + } DCHECK_LT(StartMax(before), EndMin(after)); // The reason will be a linear expression greater than a value. Note that all diff --git a/ortools/sat/synchronization.cc b/ortools/sat/synchronization.cc index 57ba577a376..36bc3a5a627 100644 --- a/ortools/sat/synchronization.cc +++ b/ortools/sat/synchronization.cc @@ -31,6 +31,7 @@ #include #include +#include "absl/algorithm/container.h" #include "ortools/base/logging.h" #include "ortools/base/timer.h" #include "ortools/sat/drat_checker.h" @@ -38,7 +39,6 @@ #include "ortools/base/helpers.h" #include "ortools/base/options.h" #endif // __PORTABLE_PLATFORM__ -#include "absl/algorithm/container.h" #include "absl/base/thread_annotations.h" #include "absl/container/btree_map.h" #include "absl/container/flat_hash_map.h" @@ -1239,9 +1239,8 @@ bool UniqueClauseStream::Add(absl::Span clause, int lbd) { absl::Span replaced_clause = absl::MakeSpan(*buffer).subspan( replaced_clause_id * clause.size(), clause.size()); dropped_literals_since_last_batch_ += clause.size(); - if (HashClause(clause, 2) < HashClause(replaced_clause, 2)) { - std::copy(clause.begin(), clause.end(), replaced_clause.begin()); - } + if (HashClause(clause, 2) >= HashClause(replaced_clause, 2)) return false; + absl::c_copy(clause, replaced_clause.begin()); } return true; } diff --git a/ortools/sat/work_assignment.cc b/ortools/sat/work_assignment.cc index d783b7d55d1..30b1aa94a24 100644 --- a/ortools/sat/work_assignment.cc +++ b/ortools/sat/work_assignment.cc @@ -26,7 +26,7 @@ #include #include -#include "absl/container/flat_hash_set.h" +#include "absl/functional/function_ref.h" #include "absl/log/check.h" #include "absl/log/log.h" #include "absl/strings/str_cat.h" @@ -160,13 +160,20 @@ std::optional ProtoLiteral::EncodeLiteral( return result; } +namespace { +Literal DecodeWithIdentityMapping(const ProtoLiteral& literal) { + const int ref = literal.proto_var(); + return Literal(BooleanVariable(PositiveRef(ref)), RefIsPositive(ref)); +} +} // namespace + ProtoTrail::ProtoTrail() { target_phase_.reserve(kMaxPhaseSize); } void ProtoTrail::PushLevel(const ProtoLiteral& decision, IntegerValue objective_lb, int node_id) { CHECK_GT(node_id, 0); - assigned_at_level_[decision] = decision_indexes_.size(); decision_indexes_.push_back(literals_.size()); + assigned_at_level_[decision] = decision_indexes_.size(); literals_.push_back(decision); node_ids_.push_back(node_id); implications_.push_back({}); @@ -201,6 +208,23 @@ void ProtoTrail::SetLevelImplied(int level) { level_to_objective_lbs_.erase(level_to_objective_lbs_.begin() + level - 1); } +void ProtoTrail::NormalizeImplications() { + assigned_at_level_.clear(); + for (int level = 1; level <= MaxLevel(); ++level) { + assigned_at_level_[Decision(level)] = level; + int new_size = 0; + std::vector& implications = MutableImplications(level); + for (int i = 0; i < implications.size(); ++i) { + const ProtoLiteral& implication = implications[i]; + if (!assigned_at_level_.contains(implication)) { + implications[new_size++] = implication; + assigned_at_level_[implication] = level; + } + } + implications.resize(new_size); + } +} + void ProtoTrail::Clear() { decision_indexes_.clear(); literals_.clear(); @@ -217,6 +241,11 @@ void ProtoTrail::SetObjectiveLb(int level, IntegerValue objective_lb) { std::max(objective_lb, level_to_objective_lbs_[level - 1]); } +int ProtoTrail::DecisionNodeId(int level) const { + DCHECK_LE(level, decision_indexes_.size()); + return node_ids_[decision_indexes_[level - 1]]; +} + absl::Span ProtoTrail::NodeIds(int level) const { DCHECK_LE(level, decision_indexes_.size()); int start = level == 0 ? 0 : decision_indexes_[level - 1]; @@ -268,21 +297,38 @@ bool SharedTreeManager::SyncTree(ProtoTrail& path) { path.Clear(); return false; } + DCHECK(CheckLratInvariants()); // We don't rely on these being empty, but we expect them to be. DCHECK(to_close_.empty()); DCHECK(to_update_.empty()); + path.NormalizeImplications(); int prev_level = -1; for (const auto& [node, level] : nodes) { if (level == prev_level) { // `node` is implied by the previous decisions in `path`, hence its - // sibling can be closed (using this implication as proof). + // sibling can be closed (using this implication as proof; the implication + // proved by the worker providing `path` must be imported and a new one, + // adapted for the manager, must be inferred from it). Node* sibling = GetSibling(node); ClauseId closing_clause_id = kNoClauseId; if (lrat_proof_handler_ != nullptr) { - closing_clause_id = clause_id_generator_.GetNextId(); - // TODO(user): make sure this clause was really exported first. - lrat_proof_handler_->AddImportedClause(closing_clause_id, - ClosingClause(sibling)); + // For the worker, `node` is implied by all the previous decisions in + // `path`, but for the manager we need an implication clause using the + // non-implied ancestors of `node` in the tree (they can be different + // because the manager and the worker have different views of the tree). + const std::vector inferred_clause = ClosingClause(sibling); + std::vector imported_clause; + std::vector lrat_proof; + for (int l = 1; l <= level + 1; ++l) { + Node* n = l <= level ? GetNode(path.DecisionNodeId(l)) : node; + const Literal decision = DecodeWithIdentityMapping(n->decision); + imported_clause.push_back(l <= level ? decision.Negated() : decision); + if (n->implied_and_processed) { + lrat_proof.push_back(GetSibling(n)->closing_clause_id); + } + } + closing_clause_id = AddImportedAndInferredClauses( + imported_clause, inferred_clause, lrat_proof); } to_close_.emplace_back(sibling, closing_clause_id); } else if (level > 0 && node->objective_lb < path.ObjectiveLb(level)) { @@ -294,15 +340,33 @@ bool SharedTreeManager::SyncTree(ProtoTrail& path) { for (const ProtoLiteral& implication : path.Implications(level)) { // Trivial implication, can be ignored. if (IsDecisionOfNodeOrAncestor(implication, node)) continue; - ClauseId clause_id = kNoClauseId; + ClauseId implication_clause_id = kNoClauseId; if (lrat_proof_handler_ != nullptr) { - clause_id = clause_id_generator_.GetNextId(); - lrat_proof_handler_->AddImportedClause( - clause_id, ImplicationClause(node, implication)); + // For the worker, 'implication' is implied by all the previous + // decisions in `path`, but for the manager we need an implication + // clause using the non-implied ancestors of `node` in the tree (they + // can be different because the manager and the worker have different + // views of the tree). + const std::vector inferred_clause = + ImplicationClause(node, implication); + std::vector imported_clause; + std::vector lrat_proof; + for (int l = 1; l <= level; ++l) { + Node* n = GetNode(path.DecisionNodeId(l)); + const Literal decision = DecodeWithIdentityMapping(n->decision); + imported_clause.push_back(decision.Negated()); + if (n->implied_and_processed) { + lrat_proof.push_back(GetSibling(n)->closing_clause_id); + } + } + imported_clause.push_back(DecodeWithIdentityMapping(implication)); + implication_clause_id = AddImportedAndInferredClauses( + imported_clause, inferred_clause, lrat_proof); } auto it = trail_info->implications .emplace(implication.proto_var(), - std::make_pair(implication.lb(), clause_id)) + std::make_pair(implication.lb(), + implication_clause_id)) .first; if (it->second.first < implication.lb()) { it->second.first = implication.lb(); @@ -327,6 +391,7 @@ bool SharedTreeManager::SyncTree(ProtoTrail& path) { } // Sync lower bounds and implications from the shared tree to `path`. AssignLeaf(path, nodes.back().first); + DCHECK(CheckLratInvariants()); return true; } @@ -454,7 +519,7 @@ void SharedTreeManager::ClearTrailInfo(Node* node, bool implications_only) { } } -SharedTreeManager::Node* SharedTreeManager::GetSibling(Node* node) { +SharedTreeManager::Node* SharedTreeManager::GetSibling(const Node* node) const { if (node == nullptr || node->parent == nullptr) return nullptr; if (node->parent->children[0] != node) { return node->parent->children[0]; @@ -492,7 +557,9 @@ SharedTreeManager::Node* SharedTreeManager::MakeSubtree(Node* parent, } void SharedTreeManager::ProcessNodeChanges() { + DCHECK(CheckLratInvariants()); int num_newly_closed = 0; + std::vector newly_implied; while (!to_close_.empty()) { auto [node, closing_clause_id] = to_close_.back(); CHECK_NE(node, nullptr); @@ -518,15 +585,18 @@ void SharedTreeManager::ProcessNodeChanges() { // be closed. We use a new clause only to avoid double deletes in // RestartLockHeld(). child_closing_clause_id = clause_id_generator_.GetNextId(); - lrat_proof_handler_->AddInferredClause(child_closing_clause_id, - ClosingClause(child), - {closing_clause_id}); + lrat_proof_handler_->AddInferredClause( + child_closing_clause_id, ClosingClause(child), + {closing_clause_id}, /*exported=*/true); } to_close_.emplace_back(child, child_closing_clause_id); } Node* sibling = GetSibling(node); if (sibling != nullptr) { sibling->implied = true; + if (lrat_proof_handler_ != nullptr) { + newly_implied.push_back(sibling); + } if (!sibling->closed) { break; } @@ -539,7 +609,8 @@ void SharedTreeManager::ProcessNodeChanges() { // closed to prove that the parent can be closed. lrat_proof_handler_->AddInferredClause( closing_clause_id, ClosingClause(parent), - {node->closing_clause_id, sibling->closing_clause_id}); + {node->closing_clause_id, sibling->closing_clause_id}, + /*exported=*/true); } node = parent; } @@ -557,6 +628,7 @@ void SharedTreeManager::ProcessNodeChanges() { " unassigned:", unassigned_leaves_.size(), " restarts:", num_restarts_)); } + DCHECK(CheckLratInvariants()); // TODO(user): We could do resolution here by moving implications that // are true in each child to the parent. bool root_updated = false; @@ -568,10 +640,13 @@ void SharedTreeManager::ProcessNodeChanges() { DCHECK(node->children[0] != nullptr); DCHECK(node->children[1] != nullptr); for (Node* child : node->children) { - if (child->implied && child->trail_info != nullptr) { - ProcessImpliedNode(child); + if (child->implied) { + if (child->trail_info != nullptr) { + DCHECK(!child->implied_and_processed); + ProcessImpliedNode(child); + ClearTrailInfo(child); + } child->implied_and_processed = true; - ClearTrailInfo(child); } } IntegerValue child_bound = std::min(node->children[0]->objective_lb, @@ -586,18 +661,28 @@ void SharedTreeManager::ProcessNodeChanges() { shared_response_manager_->UpdateInnerObjectiveBounds( ShortStatus(), nodes_[0].objective_lb, kMaxIntegerValue); } + for (Node* node : newly_implied) { + if (!node->implied_and_processed) { + DCHECK_EQ(node->trail_info, nullptr); + DCHECK_NE(lrat_proof_handler_, nullptr); + ProcessImpliedNode(node); + node->implied_and_processed = true; + } + } // These are shared via SharedBoundsManager, don't duplicate here. ClearTrailInfo(&nodes_[0], /*implications_only=*/true); + DCHECK(CheckLratInvariants()); } // Moves the trail_info implications of `node` to its first non-implied -// ancestor, and removes the newly implied literal from the closing clause of -// `node` and its descendants. +// ancestor, and removes the newly implied literal from the closing and +// implication clauses of `node` and its descendants. void SharedTreeManager::ProcessImpliedNode(Node* node) { - CHECK(node->parent != nullptr && !node->parent->closed); + CHECK(node->parent != nullptr); Node* first_non_implied_ancestor = node->parent; while (first_non_implied_ancestor->trail_info == nullptr) { first_non_implied_ancestor = first_non_implied_ancestor->parent; + DCHECK_NE(first_non_implied_ancestor, nullptr); } // Fast path for the common case where there is no need to add LRAT clauses. // The rest of the code is only executed when LRAT is enabled, and assumes a @@ -612,55 +697,99 @@ void SharedTreeManager::ProcessImpliedNode(Node* node) { std::vector clauses; Node* n = node; while (n->parent != nullptr) { - if (n->implied) { + // Newly implied nodes must be removed from the closing and implication + // clauses, which requires a proof (already implied nodes are no longer in + // these clauses, so we don't need a proof for them). + if (n->implied && !n->implied_and_processed) { clauses.push_back(GetSibling(n)->closing_clause_id); } n = n->parent; } std::reverse(clauses.begin(), clauses.end()); // Move the implications of `node` to the first non-implied ancestor. - for (const auto& [var, lb_and_clause] : node->trail_info->implications) { - // This is OK because we assume a pure SAT problem. - if (first_non_implied_ancestor->trail_info->implications.contains(var)) { - continue; + if (node->trail_info != nullptr) { + for (const auto& [var, lb_and_clause] : node->trail_info->implications) { + // This is OK because we assume a pure SAT problem. + if (first_non_implied_ancestor->trail_info->implications.contains(var)) { + continue; + } + const auto [lb, clause_id] = lb_and_clause; + ClauseId new_clause_id = clause_id_generator_.GetNextId(); + clauses.push_back(clause_id); + lrat_proof_handler_->AddInferredClause( + new_clause_id, + ImplicationClause(first_non_implied_ancestor, ProtoLiteral(var, lb), + /*skip_unprocessed_implied_nodes=*/true), + clauses, /*exported=*/true); + clauses.pop_back(); + first_non_implied_ancestor->trail_info->implications.insert( + {var, std::make_pair(lb, new_clause_id)}); } - const auto [lb, clause_id] = lb_and_clause; + } + UpdateLratClausesInSubtree(node, node, clauses); +} + +// Updates the closing clauses and the trail implication clauses of all the +// nodes in the subtree rooted at `node`, to maintain the LRAT invariants. +// Recursive method where `n` is a node of the subtree, and `clauses` are the +// clauses needed to infer its updated closing and implication clauses. +// TODO(user): change to a non-recursive implementation? +void SharedTreeManager::UpdateLratClausesInSubtree( + Node* node, Node* n, std::vector& clauses) { + const bool implied_and_not_processed = + n->implied && !n->implied_and_processed; + if (implied_and_not_processed) { + // Newly implied nodes must be removed from the closing and implication + // clauses of `n`, which requires a proof (already implied nodes are no + // longer in these clauses, so we don't need a proof for them). + clauses.push_back(GetSibling(n)->closing_clause_id); + } + if (n->closed) { + DCHECK_NE(n->closing_clause_id, kNoClauseId); ClauseId new_clause_id = clause_id_generator_.GetNextId(); - clauses.push_back(clause_id); + clauses.push_back(n->closing_clause_id); lrat_proof_handler_->AddInferredClause( new_clause_id, - ImplicationClause(first_non_implied_ancestor, ProtoLiteral(var, lb), - /*skip_unprocessed_implied_nodes=*/true), - clauses); + ClosingClause(n, /*skip_unprocessed_implied_nodes=*/true), clauses, + /*exported=*/true); clauses.pop_back(); - first_non_implied_ancestor->trail_info->implications.insert( - {var, std::make_pair(lb, new_clause_id)}); - } - // Update the closing clauses of `node` and its descendants. - std::vector to_update; - to_update.push_back(node); - while (!to_update.empty()) { - Node* n = to_update.back(); - to_update.pop_back(); - if (n->closed) { - CHECK_NE(n->closing_clause_id, kNoClauseId); + lrat_proof_handler_->DeleteClause(n->closing_clause_id, {}); + n->closing_clause_id = new_clause_id; + } + if (n != node && n->trail_info != nullptr) { + for (auto& [var, lb_and_clause] : n->trail_info->implications) { + auto& [lb, clause_id] = lb_and_clause; ClauseId new_clause_id = clause_id_generator_.GetNextId(); - clauses.push_back(n->closing_clause_id); + clauses.push_back(clause_id); lrat_proof_handler_->AddInferredClause( new_clause_id, - ClosingClause(n, /*skip_unprocessed_implied_nodes=*/true), clauses); + ImplicationClause(n, ProtoLiteral(var, lb), + /*skip_unprocessed_implied_nodes=*/true), + clauses, /*exported=*/true); + lrat_proof_handler_->DeleteClause(clause_id, {}); + clause_id = new_clause_id; clauses.pop_back(); - lrat_proof_handler_->DeleteClause(n->closing_clause_id, {}); - n->closing_clause_id = new_clause_id; - } else if (!n->implied) { - // We don't need to update the closing clauses of nodes whose parent is - // closed. We can also stop at implied nodes (they will be processed with - // other calls to this method). - for (Node* child : n->children) { - if (child != nullptr) to_update.push_back(child); + } + } + // We can stop at implied but not yet processed nodes (they will be processed + // with further calls to ProcessImpliedNode()). + if (n == node || !(n->implied && n->trail_info != nullptr)) { + for (Node* child : n->children) { + if (child != nullptr && child->parent != nullptr) { + UpdateLratClausesInSubtree(node, child, clauses); } } } + if (implied_and_not_processed) { + clauses.pop_back(); + } +} + +SharedTreeManager::Node* SharedTreeManager::GetNode(int id) { + const int index = id - node_id_offset_; + CHECK_GE(index, 0); + CHECK_LT(index, nodes_.size()); + return &nodes_[index]; } std::vector> @@ -686,29 +815,43 @@ SharedTreeManager::GetAssignedNodes(const ProtoTrail& path) { void SharedTreeManager::CloseTree(ProtoTrail& path, int level) { absl::MutexLock mutex_lock(mu_); + DCHECK(CheckLratInvariants()); const int node_id_to_close = path.NodeIds(level).front(); - path.Clear(); - if (node_id_to_close < node_id_offset_) return; + if (node_id_to_close < node_id_offset_) { + path.Clear(); + return; + } Node* node = &nodes_[node_id_to_close - node_id_offset_]; VLOG(2) << "Closing subtree at level " << level; DCHECK(to_close_.empty()); - ClauseId clause_id = kNoClauseId; + ClauseId closing_clause_id = kNoClauseId; if (lrat_proof_handler_ != nullptr) { - clause_id = clause_id_generator_.GetNextId(); - lrat_proof_handler_->AddImportedClause(clause_id, ClosingClause(node)); + // For the worker providing `path`, `node` is implied by all the previous + // decisions in `path`, but for the manager we need a closing clause using + // `node` and its ancestors in the tree (with implied ones filtered out -- + // they can be different because the manager and the worker have different + // views of the tree). + const std::vector inferred_clause = ClosingClause(node); + std::vector imported_clause; + std::vector lrat_proof; + for (int l = 1; l <= level; ++l) { + Node* n = GetNode(path.DecisionNodeId(l)); + const Literal decision = DecodeWithIdentityMapping(n->decision); + imported_clause.push_back(decision.Negated()); + if (n->implied_and_processed) { + lrat_proof.push_back(GetSibling(n)->closing_clause_id); + } + } + closing_clause_id = AddImportedAndInferredClauses( + imported_clause, inferred_clause, lrat_proof); } - to_close_.emplace_back(node, clause_id); + path.Clear(); + to_close_.emplace_back(node, closing_clause_id); ProcessNodeChanges(); + DCHECK(CheckLratInvariants()); } -namespace { -Literal DecodeWithIdentityMapping(const ProtoLiteral& literal) { - const int ref = literal.proto_var(); - return Literal(BooleanVariable(PositiveRef(ref)), RefIsPositive(ref)); -} -} // namespace - bool SharedTreeManager::IsDecisionOfNodeOrAncestor(ProtoLiteral literal, const Node* node) const { CHECK_NE(node, nullptr); @@ -756,6 +899,37 @@ std::vector SharedTreeManager::ClosingClause( return clause; } +namespace { +bool UnorderedSpansAreEqual(absl::Span a, + absl::Span b) { + if (a.size() != b.size()) return false; + std::vector sorted_a(a.begin(), a.end()); + std::vector sorted_b(b.begin(), b.end()); + std::sort(sorted_a.begin(), sorted_a.end()); + std::sort(sorted_b.begin(), sorted_b.end()); + return sorted_a == sorted_b; +} +} // namespace + +ClauseId SharedTreeManager::AddImportedAndInferredClauses( + absl::Span imported_clause, + absl::Span inferred_clause, + std::vector& lrat_proof) { + const ClauseId id = clause_id_generator_.GetNextId(); + lrat_proof_handler_->AddImportedClause(id, imported_clause); + if (!lrat_proof.empty() || + !UnorderedSpansAreEqual(inferred_clause, imported_clause)) { + lrat_proof.push_back(id); + const ClauseId new_id = clause_id_generator_.GetNextId(); + lrat_proof_handler_->AddInferredClause(new_id, inferred_clause, lrat_proof, + /*exported=*/true); + lrat_proof_handler_->DeleteClause(id, {}); + return new_id; + } else { + return id; + } +} + void SharedTreeManager::AssignLeaf(ProtoTrail& path, Node* leaf) { path.Clear(); std::vector reversed_path; @@ -768,9 +942,6 @@ void SharedTreeManager::AssignLeaf(ProtoTrail& path, Node* leaf) { reversed_path.pop_back(); path.PushLevel(leaf->decision, leaf->objective_lb, leaf->id); if (leaf->implied) { - // TODO(user): add LRAT proofs for the shortened implications - // computed in SetLevelImplied(), by using the closing clause of the - // leaf's sibling. path.SetLevelImplied(path.MaxLevel()); } if (params_.shared_tree_worker_enable_trail_sharing() && @@ -825,6 +996,38 @@ std::string SharedTreeManager::ShortStatus() const { " n=", nodes_.size(), ")"); } +namespace { +void CheckEqual(absl::Span a, absl::Span b) { + std::vector sorted_a(a.begin(), a.end()); + std::vector sorted_b(b.begin(), b.end()); + std::sort(sorted_a.begin(), sorted_a.end()); + std::sort(sorted_b.begin(), sorted_b.end()); + CHECK_EQ(sorted_a, sorted_b); +} +} // namespace + +bool SharedTreeManager::CheckLratInvariants() const { + if (lrat_proof_handler_ != nullptr && + lrat_proof_handler_->lrat_check_enabled()) { + for (const Node& node : nodes_) { + if (node.parent == nullptr) continue; + if (node.closed) { + CheckEqual( + lrat_proof_handler_->GetLratClauseForDebug(node.closing_clause_id), + ClosingClause(&node)); + } + if (node.trail_info != nullptr) { + for (const auto& [var, lb_and_clause] : node.trail_info->implications) { + const auto [lb, clause_id] = lb_and_clause; + CheckEqual(lrat_proof_handler_->GetLratClauseForDebug(clause_id), + ImplicationClause(&node, ProtoLiteral(var, lb))); + } + } + } + } + return true; +} + SharedTreeWorker::SharedTreeWorker(Model* model) : parameters_(model->GetOrCreate()), shared_response_(model->GetOrCreate()), @@ -867,6 +1070,18 @@ bool SharedTreeWorker::AddDecisionImplication(Literal lit, int level, if (sat_solver_->Assignment().LiteralIsFalse(lit)) { VLOG(2) << "Closing subtree via impl at " << level + 1 << " assigned=" << assigned_tree_.MaxLevel(); + if (lrat_proof_handler_ != nullptr) { + // Use the fact that `reason` implies both `lit` and not(`lit`) to prove + // that the tree can be closed. + const ClauseId closing_clause_id = clause_id_generator_->GetNextId(); + std::vector clause_ids; + clause_manager_->AppendClauseIdsFixing({lit}, &clause_ids); + clause_ids.push_back(clause_id); + lrat_proof_handler_->AddInferredClause(closing_clause_id, + DecisionReason(level), clause_ids, + /*exported=*/true); + lrat_proof_handler_->DeleteClause(closing_clause_id, {}); + } trail_->MutableConflict()->assign(reason.begin(), reason.end()); manager_->CloseTree(assigned_tree_, level); assigned_tree_decisions_.clear(); @@ -910,10 +1125,11 @@ bool SharedTreeWorker::AddImplications() { return true; } } + DCHECK(CheckLratInvariants()); return added_clause; } -void SharedTreeWorker::ClearAssignedTreeAndImplications() { +void SharedTreeWorker::ClearAssignedTreeDecisionsAndImplications() { // Delete all LRAT clauses corresponding to the assigned tree implications, // which are deleted too. Note that there is one LRAT proof per worker. Each // proof uses its local clause IDs, and there is no global clause ID space. @@ -933,7 +1149,12 @@ void SharedTreeWorker::ClearAssignedTreeAndImplications() { } bool SharedTreeWorker::SyncWithLocalTrail() { + DCHECK(CheckLratInvariants()); + std::vector new_implication_trail_indices; while (true) { + if (lrat_proof_handler_ != nullptr) { + trail_implication_clauses_.resize(reversible_trail_index_, kNoClauseId); + } if (!sat_solver_->FinishPropagation()) return false; // Ensure we are at fixed point w.r.t. implications in the tree up to the // current level. @@ -947,6 +1168,7 @@ bool SharedTreeWorker::SyncWithLocalTrail() { const int binary_propagator_id = binary_propagator_->PropagatorId(); // Add implications from the local trail to share with other workers. reversible_int_repository_->SaveState(&reversible_trail_index_); + new_implication_trail_indices.clear(); for (int i = trail_->Index() - 1; i >= reversible_trail_index_; --i) { const Literal lit = (*trail_)[i]; const int assignment_type = trail_->AssignmentType(lit.Variable()); @@ -956,13 +1178,26 @@ bool SharedTreeWorker::SyncWithLocalTrail() { if (assignment_type == binary_propagator_id) continue; std::optional encoded = EncodeDecision(lit); if (!encoded.has_value()) continue; - // Add an LRAT inferred clause for the implication, so that other - // workers can import it without proof. - // TODO(user): this can lead to quadratic complexity. Optimize this - // by using the implication proofs for previous literals on the trail to - // shorten the proof for next ones. - AddLratClauseAndProofForImplication(lit, level); - assigned_tree_.AddImplication(level, *encoded); + if (assigned_tree_.AddImplication(level, *encoded) && + lrat_proof_handler_ != nullptr) { + new_implication_trail_indices.push_back(i); + } + } + // Add LRAT inferred clauses for the new implications, so that other + // workers can import them without proof. Do this in increasing trail + // index order, and reuse the previously added clauses to prove the new + // ones (to avoid a quadratic complexity). + if (lrat_proof_handler_ != nullptr) { + trail_implication_clauses_.resize(trail_->Index(), kNoClauseId); + for (int i = new_implication_trail_indices.size() - 1; i >= 0; --i) { + const int new_trail_index = new_implication_trail_indices[i]; + const Literal lit = (*trail_)[new_trail_index]; + trail_implication_clauses_[new_trail_index] = + AddLratClauseAndProofForImplication( + lit, level, [&](int /*level*/, int trail_index) { + return trail_implication_clauses_[trail_index]; + }); + } } reversible_trail_index_ = trail_->Index(); } @@ -983,7 +1218,7 @@ bool SharedTreeWorker::SyncWithLocalTrail() { if (lrat_proof_handler_ != nullptr) { lrat_proof_handler_->DeleteClause(clause_id, {}); } - ClearAssignedTreeAndImplications(); + ClearAssignedTreeDecisionsAndImplications(); sat_solver_->Backtrack(0); } else { // The next level is implied by the current one. @@ -1006,8 +1241,8 @@ bool SharedTreeWorker::SyncWithLocalTrail() { const ClauseId old_id = id; id = clause_id_generator_->GetNextId(); implication.push_back(lit); - lrat_proof_handler_->AddInferredClause(id, implication, - {clause_id, old_id}); + lrat_proof_handler_->AddInferredClause( + id, implication, {clause_id, old_id}, /*exported=*/true); lrat_proof_handler_->DeleteClause(old_id, {}); implication.pop_back(); } @@ -1026,11 +1261,13 @@ bool SharedTreeWorker::SyncWithLocalTrail() { assigned_tree_decisions_.erase(assigned_tree_decisions_.begin() + level); } } + DCHECK(CheckLratInvariants()); return true; } -ClauseId SharedTreeWorker::AddLratClauseAndProofForImplication(Literal literal, - int level) { +ClauseId SharedTreeWorker::AddLratClauseAndProofForImplication( + Literal literal, int level, + std::optional> root_literals) { if (lrat_proof_handler_ == nullptr) return kNoClauseId; CHECK_LE(level, assigned_tree_decisions_.size()); @@ -1038,8 +1275,10 @@ ClauseId SharedTreeWorker::AddLratClauseAndProofForImplication(Literal literal, std::vector& implication = DecisionReason(level); implication.push_back(literal); std::vector clause_ids; - clause_manager_->AppendClauseIdsFixing({literal}, &clause_ids); - lrat_proof_handler_->AddInferredClause(clause_id, implication, clause_ids); + clause_manager_->AppendClauseIdsFixing( + {literal}, &clause_ids, /*decision=*/kNoLiteralIndex, root_literals); + lrat_proof_handler_->AddInferredClause(clause_id, implication, clause_ids, + /*exported=*/true); return clause_id; } @@ -1111,7 +1350,9 @@ bool SharedTreeWorker::ShouldReplaceSubtree() { bool SharedTreeWorker::SyncWithSharedTree() { DCHECK_EQ(trail_->CurrentDecisionLevel(), 0); + DCHECK(CheckLratInvariants()); manager_->SyncTree(assigned_tree_); + assigned_tree_.NormalizeImplications(); if (ShouldReplaceSubtree()) { ++num_trees_; VLOG(2) << parameters_->name() << " acquiring tree #" << num_trees_ @@ -1139,6 +1380,7 @@ bool SharedTreeWorker::SyncWithSharedTree() { } } manager_->ReplaceTree(assigned_tree_); + assigned_tree_.NormalizeImplications(); assigned_tree_lbds_.Add(restart_policy_->LbdAverageSinceReset()); restart_policy_->Reset(); earliest_replacement_dtime_ = 0; @@ -1168,7 +1410,7 @@ bool SharedTreeWorker::SyncWithSharedTree() { } VLOG(2) << "Assigned level: " << assigned_tree_.MaxLevel() << " " << parameters_->name(); - ClearAssignedTreeAndImplications(); + ClearAssignedTreeDecisionsAndImplications(); for (int level = 1; level <= assigned_tree_.MaxLevel(); ++level) { assigned_tree_decisions_.push_back( DecodeDecision(assigned_tree_.Decision(level))); @@ -1180,6 +1422,7 @@ bool SharedTreeWorker::SyncWithSharedTree() { } assigned_tree_implications_.push_back(std::move(implications)); } + DCHECK(CheckLratInvariants()); return true; } @@ -1247,4 +1490,18 @@ std::optional SharedTreeWorker::EncodeDecision(Literal decision) { return ProtoLiteral::Encode(decision, mapping_, encoder_); } +bool SharedTreeWorker::CheckLratInvariants() { + if (lrat_proof_handler_ != nullptr && + lrat_proof_handler_->lrat_check_enabled()) { + for (int level = 0; level < assigned_tree_decisions_.size(); ++level) { + for (auto& [lit, id] : assigned_tree_implications_[level]) { + std::vector& expected = DecisionReason(level + 1); + expected.push_back(lit); + CheckEqual(lrat_proof_handler_->GetLratClauseForDebug(id), expected); + } + } + } + return true; +} + } // namespace operations_research::sat diff --git a/ortools/sat/work_assignment.h b/ortools/sat/work_assignment.h index d5b0411d9d5..8c3b5bc15bf 100644 --- a/ortools/sat/work_assignment.h +++ b/ortools/sat/work_assignment.h @@ -31,6 +31,7 @@ #include "absl/container/flat_hash_map.h" #include "absl/container/flat_hash_set.h" #include "absl/container/node_hash_map.h" +#include "absl/functional/function_ref.h" #include "absl/log/check.h" #include "absl/synchronization/mutex.h" #include "absl/types/span.h" @@ -130,6 +131,9 @@ class ProtoTrail { return literals_[decision_indexes_[level - 1]]; } + // Returns the node ID for the decision at `level`. + int DecisionNodeId(int level) const; + // Returns the node ids for decisions and implications at `level`. absl::Span NodeIds(int level) const; @@ -138,16 +142,21 @@ class ProtoTrail { absl::Span Implications(int level) const; // Adds a literal which is implied by the decisions from level 1 to `level`. - // The caller must add a corresponding LRAT inferred clause first (if LRAT is + // The caller must add a corresponding LRAT inferred clause (if LRAT is // enabled). This implication can then be used by other workers as an LRAT // imported clause, without proof. - void AddImplication(int level, ProtoLiteral implication) { + bool AddImplication(int level, ProtoLiteral implication) { auto it = assigned_at_level_.find(implication); - if (it != assigned_at_level_.end() && it->second <= level) return; + if (it != assigned_at_level_.end() && it->second <= level) return false; MutableImplications(level).push_back(implication); assigned_at_level_[implication] = level; + return true; } + // Removes implications that are already assigned at an earlier level, as well + // as duplicate implications at the same level. + void NormalizeImplications(); + IntegerValue ObjectiveLb(int level) const { CHECK_GE(level, 1); return level_to_objective_lbs_[level - 1]; @@ -281,7 +290,7 @@ class SharedTreeManager { std::unique_ptr trail_info; }; bool IsValid(const ProtoTrail& path) const ABSL_EXCLUSIVE_LOCKS_REQUIRED(mu_); - Node* GetSibling(Node* node) ABSL_EXCLUSIVE_LOCKS_REQUIRED(mu_); + Node* GetSibling(const Node* node) const ABSL_EXCLUSIVE_LOCKS_REQUIRED(mu_); // Returns the NodeTrailInfo for `node` or it's closest non-closed, // non-implied ancestor. `node` must be valid, never returns nullptr. NodeTrailInfo* GetTrailInfo(Node* node); @@ -294,6 +303,10 @@ class SharedTreeManager { ABSL_EXCLUSIVE_LOCKS_REQUIRED(mu_); void ProcessNodeChanges() ABSL_EXCLUSIVE_LOCKS_REQUIRED(mu_); void ProcessImpliedNode(Node* node) ABSL_EXCLUSIVE_LOCKS_REQUIRED(mu_); + void UpdateLratClausesInSubtree(Node* node, Node* n, + std::vector& clauses) + ABSL_EXCLUSIVE_LOCKS_REQUIRED(mu_); + Node* GetNode(int node_id) ABSL_EXCLUSIVE_LOCKS_REQUIRED(mu_); std::vector> GetAssignedNodes(const ProtoTrail& path) ABSL_EXCLUSIVE_LOCKS_REQUIRED(mu_); void AssignLeaf(ProtoTrail& path, Node* leaf) @@ -322,6 +335,18 @@ class SharedTreeManager { const Node* node, bool skip_unprocessed_implied_nodes = false) const ABSL_EXCLUSIVE_LOCKS_REQUIRED(mu_); + // Adds `imported_clause` to the LRAT proof handler, as well as + // `inferred_clause`, inferred from `imported_clause` with `lrat_proof` (which + // should contain clauses proving that literals removed from `imported_clause` + // can actually be removed). Then deletes `imported_clause` and returns the ID + // of the inferred clause. + ClauseId AddImportedAndInferredClauses( + absl::Span imported_clause, + absl::Span inferred_clause, + std::vector& lrat_proof) ABSL_EXCLUSIVE_LOCKS_REQUIRED(mu_); + + bool CheckLratInvariants() const ABSL_EXCLUSIVE_LOCKS_REQUIRED(mu_); + mutable absl::Mutex mu_; const SatParameters& params_; const int num_workers_; @@ -389,11 +414,13 @@ class SharedTreeWorker { bool AddImplications(); bool AddDecisionImplication(Literal literal, int level, ClauseId clause_id); - void ClearAssignedTreeAndImplications(); + void ClearAssignedTreeDecisionsAndImplications(); // Adds the LRAT inferred clause "assigned tree decisions up to `level` => // `literal`" if `lrat_proof_handler_` is not null. - ClauseId AddLratClauseAndProofForImplication(Literal literal, int level); + ClauseId AddLratClauseAndProofForImplication( + Literal literal, int level, + std::optional> root_literals = {}); // Adds the LRAT imported clause "assigned tree decisions up to `level` => // `literal`" if `lrat_proof_handler_` is not null. @@ -401,6 +428,8 @@ class SharedTreeWorker { std::vector& DecisionReason(int level); + bool CheckLratInvariants(); + SatParameters* parameters_; SharedResponseManager* shared_response_; TimeLimit* time_limit_; @@ -434,6 +463,11 @@ class SharedTreeWorker { assigned_tree_implications_; double next_split_dtime_ = 0; + // For each literal on the trail, the ID of the LRAT clause stating that this + // literal is implied by the previous decisions on the trail, or kNoClauseId + // if there is no such clause. + std::vector trail_implication_clauses_; + std::vector tmp_splits_; std::vector reason_; // Stores the average LBD of learned clauses for each tree assigned since it diff --git a/ortools/sat/work_assignment_test.cc b/ortools/sat/work_assignment_test.cc index d1c36acfd6b..5bc4deeccb6 100644 --- a/ortools/sat/work_assignment_test.cc +++ b/ortools/sat/work_assignment_test.cc @@ -136,6 +136,46 @@ TEST(ProtoTrailTest, SetMultiLevelImplied) { EXPECT_EQ(p.ObjectiveLb(1), 2); } +TEST(ProtoTrailTest, + NormalizeImplicationsRemovesImplicationsAlreadyAssignedAtAnEarlierLevel) { + ProtoTrail p; + p.PushLevel({0, 0}, 0, 1); + p.AddImplication(1, {3, 0}); + p.PushLevel({1, 0}, 0, 2); + p.PushLevel({3, 0}, 0, 3); + p.SetLevelImplied(3); + const std::vector implications1_before_normalization = { + p.Implications(1).begin(), p.Implications(1).end()}; + const std::vector implications2_before_normalization = { + p.Implications(2).begin(), p.Implications(2).end()}; + + p.NormalizeImplications(); + + EXPECT_THAT(implications1_before_normalization, + testing::ElementsAre(ProtoLiteral(3, 0))); + EXPECT_THAT(implications2_before_normalization, + testing::ElementsAre(ProtoLiteral(3, 0))); + EXPECT_THAT(p.Implications(1), testing::ElementsAre(ProtoLiteral(3, 0))); + EXPECT_THAT(p.Implications(2), testing::IsEmpty()); +} + +TEST(ProtoTrailTest, NormalizeImplicationsRemovesDuplicateAtSameLevel) { + ProtoTrail p; + p.PushLevel({0, 0}, 0, 1); + p.PushLevel({1, 0}, 0, 2); + p.AddImplication(2, {3, 0}); + p.PushLevel({3, 0}, 0, 3); + p.SetLevelImplied(3); + const std::vector implications_before_normalization = { + p.Implications(2).begin(), p.Implications(2).end()}; + + p.NormalizeImplications(); + + EXPECT_THAT(implications_before_normalization, + testing::ElementsAre(ProtoLiteral(3, 0), ProtoLiteral(3, 0))); + EXPECT_THAT(p.Implications(2), testing::ElementsAre(ProtoLiteral(3, 0))); +} + TEST(ProtoTrailTest, Clear) { ProtoTrail p; p.PushLevel({0, 0}, 0, 1); From a05ea60fd51e2b2a868a3e9d3257baaf64797367 Mon Sep 17 00:00:00 2001 From: Guillaume Chatelet Date: Thu, 18 Dec 2025 13:40:16 +0000 Subject: [PATCH 091/111] Prevents presubmit runs on different branches from canceling each other. --- .github/workflows/presubmit.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/presubmit.yml b/.github/workflows/presubmit.yml index 9cd9d456aef..4fbd6cd7708 100644 --- a/.github/workflows/presubmit.yml +++ b/.github/workflows/presubmit.yml @@ -7,7 +7,7 @@ on: - main concurrency: - group: ortools-presubmit + group: ${{ github.workflow }}-${{ github.ref_name }} cancel-in-progress: true env: From 5a867fd4578bb80391f3bf76340bd7c6ec038992 Mon Sep 17 00:00:00 2001 From: Guillaume Chatelet Date: Thu, 18 Dec 2025 16:53:26 +0100 Subject: [PATCH 092/111] Add nullability to SolveInterrupter in linear solver (#4958) --- ortools/linear_solver/BUILD.bazel | 1 + ortools/linear_solver/linear_solver.cc | 1 - ortools/linear_solver/solve_mp_model.cc | 6 ++++-- ortools/linear_solver/solve_mp_model.h | 6 ++++-- ortools/util/python/py_solve_interrupter_testing.cc | 4 +++- ortools/util/python/py_solve_interrupter_testing.h | 3 ++- 6 files changed, 14 insertions(+), 7 deletions(-) diff --git a/ortools/linear_solver/BUILD.bazel b/ortools/linear_solver/BUILD.bazel index f26292ab8b5..468af61a92b 100644 --- a/ortools/linear_solver/BUILD.bazel +++ b/ortools/linear_solver/BUILD.bazel @@ -598,5 +598,6 @@ cc_library( ":linear_solver_cc_proto", "//ortools/util:lazy_mutable_copy", "//ortools/util:solve_interrupter", + "@abseil-cpp//absl/base:nullability", ], ) diff --git a/ortools/linear_solver/linear_solver.cc b/ortools/linear_solver/linear_solver.cc index f7e299fdf4b..74c14c54cfb 100644 --- a/ortools/linear_solver/linear_solver.cc +++ b/ortools/linear_solver/linear_solver.cc @@ -516,7 +516,6 @@ absl::string_view ToString( } LOG(FATAL) << "Unrecognized solver type: " << static_cast(optimization_problem_type); - return ""; } bool AbslParseFlag(const absl::string_view text, diff --git a/ortools/linear_solver/solve_mp_model.cc b/ortools/linear_solver/solve_mp_model.cc index 35b5783c72d..8c8e918e9d6 100644 --- a/ortools/linear_solver/solve_mp_model.cc +++ b/ortools/linear_solver/solve_mp_model.cc @@ -17,6 +17,7 @@ #include #include +#include "absl/base/nullability.h" #include "ortools/linear_solver/linear_solver.h" #include "ortools/linear_solver/linear_solver.pb.h" #include "ortools/util/lazy_mutable_copy.h" @@ -26,8 +27,9 @@ namespace operations_research { // TODO(b/311704821): this function should not delegate to MPSolver, also true // for the functions below. -MPSolutionResponse SolveMPModel(LazyMutableCopy request, - const SolveInterrupter* interrupter) { +MPSolutionResponse SolveMPModel( + LazyMutableCopy request, + const SolveInterrupter* absl_nullable interrupter) { MPSolutionResponse response; if (interrupter != nullptr) { std::atomic atomic_bool = false; diff --git a/ortools/linear_solver/solve_mp_model.h b/ortools/linear_solver/solve_mp_model.h index c830e001ec0..198c41a6e2c 100644 --- a/ortools/linear_solver/solve_mp_model.h +++ b/ortools/linear_solver/solve_mp_model.h @@ -20,6 +20,7 @@ #include +#include "absl/base/nullability.h" #include "ortools/linear_solver/linear_solver.pb.h" #include "ortools/util/lazy_mutable_copy.h" #include "ortools/util/solve_interrupter.h" @@ -41,8 +42,9 @@ namespace operations_research { * Passing a non-null pointer with any other solver type immediately returns an * MPSOLVER_INCOMPATIBLE_OPTIONS error. */ -MPSolutionResponse SolveMPModel(LazyMutableCopy request, - const SolveInterrupter* interrupter = nullptr); +MPSolutionResponse SolveMPModel( + LazyMutableCopy request, + const SolveInterrupter* absl_nullable interrupter = nullptr); bool SolverTypeSupportsInterruption(MPModelRequest::SolverType solver); diff --git a/ortools/util/python/py_solve_interrupter_testing.cc b/ortools/util/python/py_solve_interrupter_testing.cc index 1c572489705..14e07a20f71 100644 --- a/ortools/util/python/py_solve_interrupter_testing.cc +++ b/ortools/util/python/py_solve_interrupter_testing.cc @@ -15,11 +15,13 @@ #include +#include "absl/base/nullability.h" #include "ortools/util/python/py_solve_interrupter.h" namespace operations_research { -std::optional IsInterrupted(const PySolveInterrupter* interrupter) { +std::optional IsInterrupted( + const PySolveInterrupter* absl_nullable interrupter) { if (interrupter == nullptr) { return std::nullopt; } diff --git a/ortools/util/python/py_solve_interrupter_testing.h b/ortools/util/python/py_solve_interrupter_testing.h index 29ff08c8729..b8602dce4e7 100644 --- a/ortools/util/python/py_solve_interrupter_testing.h +++ b/ortools/util/python/py_solve_interrupter_testing.h @@ -31,7 +31,8 @@ namespace operations_research { // // The Clif/pybind11 wrapper will return a `bool | None` value, with None for // nullopt. -std::optional IsInterrupted(const PySolveInterrupter* interrupter); +std::optional IsInterrupted( + const PySolveInterrupter* absl_nullable interrupter); // Class that keeps a reference on a std::shared_ptr to // test that the C++ object survive the cleanup of the Python reference. From 40ff9caa8883d184ad3ac14b826e7d11b006f249 Mon Sep 17 00:00:00 2001 From: Guillaume Chatelet Date: Thu, 18 Dec 2025 22:38:22 +0100 Subject: [PATCH 093/111] Propagate nullability (#4959) --- ortools/math_opt/core/BUILD.bazel | 2 + ortools/math_opt/core/base_solver.h | 5 +- ortools/math_opt/core/solver_interface.h | 8 +-- ortools/math_opt/core/solver_interface_mock.h | 11 ++-- ortools/math_opt/cpp/BUILD.bazel | 3 + .../compute_infeasible_subsystem_arguments.h | 3 +- ortools/math_opt/cpp/message_callback.cc | 1 + ortools/math_opt/cpp/solve_arguments.cc | 7 +-- ortools/math_opt/cpp/solve_arguments.h | 9 ++- ortools/math_opt/cpp/solve_arguments_test.cc | 27 ++------- ortools/math_opt/cpp/solve_impl.cc | 18 +++--- ortools/math_opt/cpp/solve_impl.h | 15 +++-- ortools/math_opt/cpp/solve_impl_test.cc | 60 ------------------- ortools/math_opt/cpp/solve_test.cc | 26 ++++---- ortools/math_opt/cpp/update_tracker.cc | 1 + ortools/math_opt/solvers/BUILD.bazel | 8 ++- ortools/math_opt/solvers/cp_sat_solver.cc | 4 +- ortools/math_opt/solvers/cp_sat_solver.h | 9 +-- ortools/math_opt/solvers/glop_solver.cc | 10 ++-- ortools/math_opt/solvers/glop_solver.h | 12 ++-- ortools/math_opt/solvers/glpk_solver.cc | 11 ++-- ortools/math_opt/solvers/glpk_solver.h | 9 +-- ortools/math_opt/solvers/gscip_solver.cc | 5 +- ortools/math_opt/solvers/gscip_solver.h | 9 +-- ortools/math_opt/solvers/gurobi_solver.cc | 22 +++---- ortools/math_opt/solvers/gurobi_solver.h | 26 ++++---- ortools/math_opt/solvers/highs_solver.cc | 5 +- ortools/math_opt/solvers/highs_solver.h | 9 +-- ortools/math_opt/solvers/pdlp_solver.cc | 5 +- ortools/math_opt/solvers/pdlp_solver.h | 9 +-- 30 files changed, 153 insertions(+), 196 deletions(-) diff --git a/ortools/math_opt/core/BUILD.bazel b/ortools/math_opt/core/BUILD.bazel index fb2968f4c93..cee25b1af6d 100644 --- a/ortools/math_opt/core/BUILD.bazel +++ b/ortools/math_opt/core/BUILD.bazel @@ -210,6 +210,7 @@ cc_library( "//ortools/math_opt:result_cc_proto", "//ortools/util:solve_interrupter", "@abseil-cpp//absl/base:core_headers", + "@abseil-cpp//absl/base:nullability", "@abseil-cpp//absl/log", "@abseil-cpp//absl/log:check", "@abseil-cpp//absl/log:die_if_null", @@ -455,6 +456,7 @@ cc_library( "//ortools/math_opt:parameters_cc_proto", "//ortools/math_opt:result_cc_proto", "//ortools/util:solve_interrupter", + "@abseil-cpp//absl/base:nullability", "@abseil-cpp//absl/status:statusor", "@abseil-cpp//absl/strings", "@abseil-cpp//absl/strings:string_view", diff --git a/ortools/math_opt/core/base_solver.h b/ortools/math_opt/core/base_solver.h index 2c26b270f39..d7c2e9c3d15 100644 --- a/ortools/math_opt/core/base_solver.h +++ b/ortools/math_opt/core/base_solver.h @@ -19,6 +19,7 @@ #include #include +#include "absl/base/nullability.h" #include "absl/status/statusor.h" #include "ortools/math_opt/callback.pb.h" #include "ortools/math_opt/infeasible_subsystem.pb.h" @@ -78,7 +79,7 @@ class BaseSolver { // An optional interrupter that the solver can use to interrupt the solve // early. - const SolveInterrupter* interrupter = nullptr; + const SolveInterrupter* absl_nullable interrupter = nullptr; friend std::ostream& operator<<(std::ostream& out, const SolveArgs& args); }; @@ -96,7 +97,7 @@ class BaseSolver { // An optional interrupter that the solver can use to interrupt the solve // early. - const SolveInterrupter* interrupter = nullptr; + const SolveInterrupter* absl_nullable interrupter = nullptr; friend std::ostream& operator<<(std::ostream& out, const ComputeInfeasibleSubsystemArgs& args); diff --git a/ortools/math_opt/core/solver_interface.h b/ortools/math_opt/core/solver_interface.h index 845a7f1f113..1127ad0cd86 100644 --- a/ortools/math_opt/core/solver_interface.h +++ b/ortools/math_opt/core/solver_interface.h @@ -146,7 +146,7 @@ class SolverInterface { const ModelSolveParametersProto& model_parameters, MessageCallback message_cb, const CallbackRegistrationProto& callback_registration, Callback cb, - const SolveInterrupter* interrupter) = 0; + const SolveInterrupter* absl_nullable interrupter) = 0; // Updates the model to solve and returns true, or returns false if this // update is not supported. @@ -173,9 +173,9 @@ class SolverInterface { // When parameter `message_cb` is not null and the underlying solver does not // supports message callbacks, it should ignore it. virtual absl::StatusOr - ComputeInfeasibleSubsystem(const SolveParametersProto& parameters, - MessageCallback message_cb, - const SolveInterrupter* interrupter) = 0; + ComputeInfeasibleSubsystem( + const SolveParametersProto& parameters, MessageCallback message_cb, + const SolveInterrupter* absl_nullable interrupter) = 0; }; class AllSolversRegistry { diff --git a/ortools/math_opt/core/solver_interface_mock.h b/ortools/math_opt/core/solver_interface_mock.h index 4f69ac903e4..6a9683d69c3 100644 --- a/ortools/math_opt/core/solver_interface_mock.h +++ b/ortools/math_opt/core/solver_interface_mock.h @@ -71,6 +71,8 @@ #include #include +#include "absl/base/attributes.h" +#include "absl/base/nullability.h" #include "absl/base/thread_annotations.h" #include "absl/log/die_if_null.h" #include "absl/status/statusor.h" @@ -98,7 +100,7 @@ class SolverInterfaceMock : public SolverInterface { const ModelSolveParametersProto& model_parameters, MessageCallback message_cb, const CallbackRegistrationProto& callback_registration, - Callback cb, const SolveInterrupter* interrupter), + Callback cb, const SolveInterrupter* absl_nullable interrupter), (override)); MOCK_METHOD(absl::StatusOr, Update, @@ -107,7 +109,8 @@ class SolverInterfaceMock : public SolverInterface { MOCK_METHOD(absl::StatusOr, ComputeInfeasibleSubsystem, (const SolveParametersProto& parameters, - MessageCallback message_cb, const SolveInterrupter* interrupter), + MessageCallback message_cb, + const SolveInterrupter* absl_nullable interrupter), (override)); }; @@ -136,7 +139,7 @@ class DelegatingSolver : public SolverInterface { const ModelSolveParametersProto& model_parameters, MessageCallback message_cb, const CallbackRegistrationProto& callback_registration, Callback cb, - const SolveInterrupter* const interrupter) override { + const SolveInterrupter* absl_nullable const interrupter) override { return solver_->Solve(parameters, model_parameters, std::move(message_cb), callback_registration, std::move(cb), interrupter); }; @@ -148,7 +151,7 @@ class DelegatingSolver : public SolverInterface { absl::StatusOr ComputeInfeasibleSubsystem( const SolveParametersProto& parameters, MessageCallback message_cb, - const SolveInterrupter* const interrupter) override { + const SolveInterrupter* absl_nullable const interrupter) override { return solver_->ComputeInfeasibleSubsystem( parameters, std::move(message_cb), interrupter); } diff --git a/ortools/math_opt/cpp/BUILD.bazel b/ortools/math_opt/cpp/BUILD.bazel index 762002f206e..3ea9efc9b76 100644 --- a/ortools/math_opt/cpp/BUILD.bazel +++ b/ortools/math_opt/cpp/BUILD.bazel @@ -517,6 +517,7 @@ cc_library( "//ortools/math_opt:model_cc_proto", "//ortools/math_opt:model_update_cc_proto", "//ortools/math_opt/storage:model_storage", + "@abseil-cpp//absl/log:die_if_null", "@abseil-cpp//absl/status", "@abseil-cpp//absl/status:statusor", "@abseil-cpp//absl/strings", @@ -531,6 +532,7 @@ cc_library( "//ortools/base:logging", "//ortools/base:source_location", "@abseil-cpp//absl/base:core_headers", + "@abseil-cpp//absl/log:die_if_null", "@abseil-cpp//absl/strings", "@abseil-cpp//absl/synchronization", "@abseil-cpp//absl/types:span", @@ -559,6 +561,7 @@ cc_library( "//ortools/base:status_macros", "//ortools/math_opt/storage:model_storage", "//ortools/util:solve_interrupter", + "@abseil-cpp//absl/base:nullability", "@abseil-cpp//absl/container:flat_hash_set", "@abseil-cpp//absl/status", ], diff --git a/ortools/math_opt/cpp/compute_infeasible_subsystem_arguments.h b/ortools/math_opt/cpp/compute_infeasible_subsystem_arguments.h index 405a11abfd1..f5a46e22cef 100644 --- a/ortools/math_opt/cpp/compute_infeasible_subsystem_arguments.h +++ b/ortools/math_opt/cpp/compute_infeasible_subsystem_arguments.h @@ -14,6 +14,7 @@ #ifndef ORTOOLS_MATH_OPT_CPP_COMPUTE_INFEASIBLE_SUBSYSTEM_ARGUMENTS_H_ #define ORTOOLS_MATH_OPT_CPP_COMPUTE_INFEASIBLE_SUBSYSTEM_ARGUMENTS_H_ +#include "absl/base/nullability.h" #include "ortools/math_opt/cpp/message_callback.h" // IWYU pragma: export #include "ortools/math_opt/cpp/parameters.h" // IWYU pragma: export #include "ortools/util/solve_interrupter.h" // IWYU pragma: export @@ -59,7 +60,7 @@ struct ComputeInfeasibleSubsystemArguments { // ComputeInfeasibleSubsystem(model, SolverType::kGurobi, // { .interrupter = interrupter.get() }); // - const SolveInterrupter* interrupter = nullptr; + const SolveInterrupter* absl_nullable interrupter = nullptr; }; } // namespace operations_research::math_opt diff --git a/ortools/math_opt/cpp/message_callback.cc b/ortools/math_opt/cpp/message_callback.cc index b9d69596914..566d190d05f 100644 --- a/ortools/math_opt/cpp/message_callback.cc +++ b/ortools/math_opt/cpp/message_callback.cc @@ -20,6 +20,7 @@ #include #include "absl/base/thread_annotations.h" +#include "absl/log/die_if_null.h" #include "absl/strings/string_view.h" #include "absl/synchronization/mutex.h" #include "absl/types/span.h" diff --git a/ortools/math_opt/cpp/solve_arguments.cc b/ortools/math_opt/cpp/solve_arguments.cc index c2df0d6b7a0..ab17e4f0625 100644 --- a/ortools/math_opt/cpp/solve_arguments.cc +++ b/ortools/math_opt/cpp/solve_arguments.cc @@ -24,17 +24,12 @@ namespace operations_research::math_opt { -absl::Status SolveArguments::CheckModelStorageAndCallback( +absl::Status SolveArguments::CheckModelStorage( const ModelStorageCPtr expected_storage) const { RETURN_IF_ERROR(model_parameters.CheckModelStorage(expected_storage)) << "invalid model_parameters"; RETURN_IF_ERROR(callback_registration.CheckModelStorage(expected_storage)) << "invalid callback_registration"; - - if (callback == nullptr && !callback_registration.events.empty()) { - return absl::InvalidArgumentError( - "no callback was provided to run, but callback events were registered"); - } return absl::OkStatus(); } diff --git a/ortools/math_opt/cpp/solve_arguments.h b/ortools/math_opt/cpp/solve_arguments.h index 115b5529e1f..eb3128b3b3d 100644 --- a/ortools/math_opt/cpp/solve_arguments.h +++ b/ortools/math_opt/cpp/solve_arguments.h @@ -17,6 +17,7 @@ #ifndef ORTOOLS_MATH_OPT_CPP_SOLVE_ARGUMENTS_H_ #define ORTOOLS_MATH_OPT_CPP_SOLVE_ARGUMENTS_H_ +#include "absl/base/nullability.h" #include "absl/status/status.h" #include "ortools/math_opt/cpp/callback.h" // IWYU pragma: export #include "ortools/math_opt/cpp/message_callback.h" // IWYU pragma: export @@ -82,13 +83,11 @@ struct SolveArguments { // Solve(model, SolverType::kGlop, // { .interrupter = interrupter.get() }); // - const SolveInterrupter* interrupter = nullptr; + const SolveInterrupter* absl_nullable interrupter = nullptr; // Returns a failure if the referenced variables and constraints don't belong - // to the input expected_storage (which must not be nullptr). Also returns a - // failure if callback events are registered but no callback is provided. - absl::Status CheckModelStorageAndCallback( - ModelStorageCPtr expected_storage) const; + // to the input expected_storage (which must not be nullptr). + absl::Status CheckModelStorage(ModelStorageCPtr expected_storage) const; }; } // namespace operations_research::math_opt diff --git a/ortools/math_opt/cpp/solve_arguments_test.cc b/ortools/math_opt/cpp/solve_arguments_test.cc index 0451cd38723..efdcdeb7e72 100644 --- a/ortools/math_opt/cpp/solve_arguments_test.cc +++ b/ortools/math_opt/cpp/solve_arguments_test.cc @@ -27,7 +27,7 @@ namespace { using ::testing::HasSubstr; using ::testing::status::StatusIs; -TEST(CheckModelStorageAndCallbackTest, CorrectModelAndCallback) { +TEST(CheckModelStorageTest, CorrectModelAndCallback) { Model model; const Variable x = model.AddVariable("x"); @@ -41,10 +41,10 @@ TEST(CheckModelStorageAndCallbackTest, CorrectModelAndCallback) { .callback = [](const CallbackData&) { return CallbackResult{}; }, }; - EXPECT_OK(args.CheckModelStorageAndCallback(model.storage())); + EXPECT_OK(args.CheckModelStorage(model.storage())); } -TEST(CheckModelStorageAndCallbackTest, WrongModelInModelParameters) { +TEST(CheckModelStorageTest, WrongModelInModelParameters) { Model model; const Variable x = model.AddVariable("x"); @@ -53,12 +53,12 @@ TEST(CheckModelStorageAndCallbackTest, WrongModelInModelParameters) { }; Model other_model; - EXPECT_THAT(args.CheckModelStorageAndCallback(other_model.storage()), + EXPECT_THAT(args.CheckModelStorage(other_model.storage()), StatusIs(absl::StatusCode::kInvalidArgument, HasSubstr("model_parameters"))); } -TEST(CheckModelStorageAndCallbackTest, WrongModelInCallbackRegistration) { +TEST(CheckModelStorageTest, WrongModelInCallbackRegistration) { Model model; const Variable x = model.AddVariable("x"); @@ -72,26 +72,11 @@ TEST(CheckModelStorageAndCallbackTest, WrongModelInCallbackRegistration) { }; Model other_model; - EXPECT_THAT(args.CheckModelStorageAndCallback(other_model.storage()), + EXPECT_THAT(args.CheckModelStorage(other_model.storage()), StatusIs(absl::StatusCode::kInvalidArgument, AllOf(HasSubstr("callback_registration"), HasSubstr("mip_solution_filter")))); } -TEST(CheckModelStorageAndCallbackTest, NoCallbackWithRegisteredEvents) { - Model model; - - const SolveArguments args = { - .callback_registration = - { - .events = {CallbackEvent::kMipSolution}, - }, - }; - - EXPECT_THAT( - args.CheckModelStorageAndCallback(model.storage()), - StatusIs(absl::StatusCode::kInvalidArgument, HasSubstr("no callback"))); -} - } // namespace } // namespace operations_research::math_opt diff --git a/ortools/math_opt/cpp/solve_impl.cc b/ortools/math_opt/cpp/solve_impl.cc index 0586b2e7f5b..78538213042 100644 --- a/ortools/math_opt/cpp/solve_impl.cc +++ b/ortools/math_opt/cpp/solve_impl.cc @@ -17,6 +17,7 @@ #include #include +#include "absl/base/nullability.h" #include "absl/functional/any_invocable.h" #include "absl/log/check.h" #include "absl/memory/memory.h" @@ -44,7 +45,7 @@ absl::StatusOr CallSolve(BaseSolver& solver, const ModelStorageCPtr expected_storage, const SolveArguments& arguments, SolveInterrupter& local_canceller) { - RETURN_IF_ERROR(arguments.CheckModelStorageAndCallback(expected_storage)); + RETURN_IF_ERROR(arguments.CheckModelStorage(expected_storage)); BaseSolver::Callback cb = nullptr; absl::Mutex mutex; @@ -124,7 +125,8 @@ absl::StatusOr CallComputeInfeasibleSubsystem( absl::StatusOr SolveImpl( const BaseSolverFactory solver_factory, const Model& model, const SolverType solver_type, const SolveArguments& solve_args, - const SolveInterrupter* const user_canceller, const bool remove_names) { + const SolveInterrupter* absl_nullable const user_canceller, + const bool remove_names) { SolveInterrupter local_canceller; const ScopedSolveInterrupterCallback user_canceller_cb( user_canceller, [&]() { local_canceller.Interrupt(); }); @@ -139,7 +141,8 @@ absl::StatusOr ComputeInfeasibleSubsystemImpl( const BaseSolverFactory solver_factory, const Model& model, const SolverType solver_type, const ComputeInfeasibleSubsystemArguments& compute_args, - const SolveInterrupter* const user_canceller, const bool remove_names) { + const SolveInterrupter* absl_nullable const user_canceller, + const bool remove_names) { SolveInterrupter local_canceller; const ScopedSolveInterrupterCallback user_canceller_cb( user_canceller, [&]() { local_canceller.Interrupt(); }); @@ -152,10 +155,11 @@ absl::StatusOr ComputeInfeasibleSubsystemImpl( } absl::StatusOr> -IncrementalSolverImpl::New(BaseSolverFactory solver_factory, Model* const model, - const SolverType solver_type, - const SolveInterrupter* const user_canceller, - const bool remove_names) { +IncrementalSolverImpl::New( + BaseSolverFactory solver_factory, Model* const model, + const SolverType solver_type, + const SolveInterrupter* absl_nullable const user_canceller, + const bool remove_names) { if (model == nullptr) { return absl::InvalidArgumentError("input model can't be null"); } diff --git a/ortools/math_opt/cpp/solve_impl.h b/ortools/math_opt/cpp/solve_impl.h index 9c410b4ee28..221fccd6ba0 100644 --- a/ortools/math_opt/cpp/solve_impl.h +++ b/ortools/math_opt/cpp/solve_impl.h @@ -16,6 +16,7 @@ #include +#include "absl/base/nullability.h" #include "absl/functional/any_invocable.h" #include "absl/status/statusor.h" #include "ortools/math_opt/core/base_solver.h" @@ -55,12 +56,10 @@ using BaseSolverFactory = // Solves the input model. // // The `user_canceller` parameter is optional. -absl::StatusOr SolveImpl(BaseSolverFactory solver_factory, - const Model& model, - SolverType solver_type, - const SolveArguments& solve_args, - const SolveInterrupter* user_canceller, - bool remove_names); +absl::StatusOr SolveImpl( + BaseSolverFactory solver_factory, const Model& model, + SolverType solver_type, const SolveArguments& solve_args, + const SolveInterrupter* absl_nullable user_canceller, bool remove_names); // ComputeInfeasibleSubsystems the input model in a subprocess. // @@ -69,7 +68,7 @@ absl::StatusOr ComputeInfeasibleSubsystemImpl( BaseSolverFactory solver_factory, const Model& model, SolverType solver_type, const ComputeInfeasibleSubsystemArguments& compute_args, - const SolveInterrupter* user_canceller, bool remove_names); + const SolveInterrupter* absl_nullable user_canceller, bool remove_names); // Incremental solve of a model. class IncrementalSolverImpl : public IncrementalSolver { @@ -79,7 +78,7 @@ class IncrementalSolverImpl : public IncrementalSolver { // The `user_canceller` parameter is optional. static absl::StatusOr> New( BaseSolverFactory solver_factory, Model* model, SolverType solver_type, - const SolveInterrupter* user_canceller, bool remove_names); + const SolveInterrupter* absl_nullable user_canceller, bool remove_names); absl::StatusOr Update() override; diff --git a/ortools/math_opt/cpp/solve_impl_test.cc b/ortools/math_opt/cpp/solve_impl_test.cc index 12f264ddc7b..903e8b4ed09 100644 --- a/ortools/math_opt/cpp/solve_impl_test.cc +++ b/ortools/math_opt/cpp/solve_impl_test.cc @@ -484,32 +484,6 @@ TEST(SolveImplTest, FailingSolve) { StatusIs(absl::StatusCode::kInternal, "solve failed")); } -TEST(SolveImplTest, NullCallback) { - BasicLp basic_lp; - - SolveArguments args; - args.parameters.enable_output = true; - - args.model_parameters = - ModelSolveParameters::OnlySomePrimalVariables({basic_lp.a}); - - args.callback_registration.add_lazy_constraints = true; - args.callback_registration.events.insert(CallbackEvent::kMipSolution); - - BaseSolverFactoryMock factory_mock; - BaseSolverMock solver; - EXPECT_CALL(factory_mock, Call(SOLVER_TYPE_GLOP, - EquivToProto(basic_lp.model.ExportModel()), _)) - .WillOnce( - Return(ByMove(std::make_unique(&solver)))); - - EXPECT_THAT( - SolveImpl(factory_mock.AsStdFunction(), basic_lp.model, SolverType::kGlop, - args, /*user_canceller=*/nullptr, /*remove_names=*/false), - StatusIs(absl::StatusCode::kInvalidArgument, - HasSubstr("no callback was provided"))); -} - TEST(SolveImplTest, WrongModelInModelParameters) { BasicLp basic_lp; BasicLp other_basic_lp; @@ -1593,40 +1567,6 @@ TEST(IncrementalSolverImplTest, UpdateAndSolveWithFailingSolverUpdate) { HasSubstr("update failed")))); } -TEST(IncrementalSolverImplTest, NullCallback) { - BasicLp basic_lp; - - SolveArguments args; - args.parameters.enable_output = true; - - args.model_parameters = - ModelSolveParameters::OnlySomePrimalVariables({basic_lp.a}); - - args.callback_registration.add_lazy_constraints = true; - args.callback_registration.events.insert(CallbackEvent::kMipSolution); - - BaseSolverFactoryMock factory_mock; - BaseSolverMock solver_interface; - - EXPECT_CALL(factory_mock, Call(SOLVER_TYPE_GLOP, - EquivToProto(basic_lp.model.ExportModel()), _)) - .WillOnce(Return( - ByMove(std::make_unique(&solver_interface)))); - - ASSERT_OK_AND_ASSIGN( - std::unique_ptr solver, - IncrementalSolverImpl::New( - factory_mock.AsStdFunction(), &basic_lp.model, SolverType::kGlop, - /*user_canceller=*/nullptr, /*remove_names=*/false)); - - Mock::VerifyAndClearExpectations(&factory_mock); - Mock::VerifyAndClearExpectations(&solver_interface); - - EXPECT_THAT(solver->SolveWithoutUpdate(args), - StatusIs(absl::StatusCode::kInvalidArgument, - HasSubstr("no callback was provided"))); -} - TEST(IncrementalSolverImplTest, WrongModelInModelParameters) { BasicLp basic_lp; BasicLp other_basic_lp; diff --git a/ortools/math_opt/cpp/solve_test.cc b/ortools/math_opt/cpp/solve_test.cc index fc591aade62..ae5948290ac 100644 --- a/ortools/math_opt/cpp/solve_test.cc +++ b/ortools/math_opt/cpp/solve_test.cc @@ -19,6 +19,7 @@ #include #include +#include "absl/base/nullability.h" #include "absl/container/flat_hash_set.h" #include "absl/status/status.h" #include "absl/status/statusor.h" @@ -53,7 +54,6 @@ using ::testing::HasSubstr; using ::testing::InSequence; using ::testing::Mock; using ::testing::Ne; -using ::testing::Optional; using ::testing::Pair; using ::testing::Return; using ::testing::UnorderedElementsAre; @@ -184,7 +184,7 @@ TEST(SolveTest, SuccessfulSolveWithCallback) { [&](const SolveParametersProto&, const ModelSolveParametersProto&, const MessageCallback message_cb, const CallbackRegistrationProto&, SolverInterface::Callback cb, - const SolveInterrupter* const interrupter) + const SolveInterrupter* absl_nullable const interrupter) -> absl::StatusOr { CallbackDataProto cb_data; cb_data.set_event(CALLBACK_EVENT_MIP_SOLUTION); @@ -345,10 +345,10 @@ TEST(SolveTest, NullCallback) { EXPECT_CALL(factory_mock, Call(EquivToProto(basic_lp.model.ExportModel()), _)) .WillOnce(Return(ByMove(std::make_unique(&solver)))); - EXPECT_THAT(Solve(basic_lp.model, - EnumFromProto(registration.solver_type()).value(), args), - StatusIs(absl::StatusCode::kInvalidArgument, - HasSubstr("no callback was provided"))); + EXPECT_THAT( + Solve(basic_lp.model, EnumFromProto(registration.solver_type()).value(), + args), + StatusIs(absl::StatusCode::kInvalidArgument, HasSubstr("no callback"))); } TEST(SolveTest, WrongModelInModelParameters) { @@ -416,7 +416,7 @@ TEST(SolveTest, WrongModelInCallbackResult) { [&](const SolveParametersProto&, const ModelSolveParametersProto&, const MessageCallback message_cb, const CallbackRegistrationProto&, SolverInterface::Callback cb, - const SolveInterrupter* const interrupter) + const SolveInterrupter* absl_nullable const interrupter) -> absl::StatusOr { CallbackDataProto cb_data; cb_data.set_event(CALLBACK_EVENT_MIP_SOLUTION); @@ -961,7 +961,7 @@ TEST(IncrementalSolverTest, SuccessfulSolveWithCallback) { [&](const SolveParametersProto&, const ModelSolveParametersProto&, const MessageCallback message_cb, const CallbackRegistrationProto&, SolverInterface::Callback cb, - const SolveInterrupter* const interrupter) + const SolveInterrupter* absl_nullable const interrupter) -> absl::StatusOr { CallbackDataProto cb_data; cb_data.set_event(CALLBACK_EVENT_MIP_SOLUTION); @@ -1137,7 +1137,7 @@ TEST(IncrementalSolverTest, UpdateAndSolve) { [&](const SolveParametersProto&, const ModelSolveParametersProto&, const MessageCallback message_cb, const CallbackRegistrationProto&, SolverInterface::Callback cb, - const SolveInterrupter* const interrupter) + const SolveInterrupter* absl_nullable const interrupter) -> absl::StatusOr { CallbackDataProto cb_data; cb_data.set_event(CALLBACK_EVENT_MIP_SOLUTION); @@ -1306,9 +1306,9 @@ TEST(IncrementalSolverTest, NullCallback) { Mock::VerifyAndClearExpectations(&factory_mock); Mock::VerifyAndClearExpectations(&solver_interface); - EXPECT_THAT(solver->SolveWithoutUpdate(args), - StatusIs(absl::StatusCode::kInvalidArgument, - HasSubstr("no callback was provided"))); + EXPECT_THAT( + solver->SolveWithoutUpdate(args), + StatusIs(absl::StatusCode::kInvalidArgument, HasSubstr("no callback"))); } TEST(IncrementalSolverTest, WrongModelInModelParameters) { @@ -1395,7 +1395,7 @@ TEST(IncrementalSolverTest, WrongModelInCallbackResult) { [&](const SolveParametersProto&, const ModelSolveParametersProto&, const MessageCallback message_cb, const CallbackRegistrationProto&, SolverInterface::Callback cb, - const SolveInterrupter* const interrupter) + const SolveInterrupter* absl_nullable const interrupter) -> absl::StatusOr { CallbackDataProto cb_data; cb_data.set_event(CALLBACK_EVENT_MIP_SOLUTION); diff --git a/ortools/math_opt/cpp/update_tracker.cc b/ortools/math_opt/cpp/update_tracker.cc index c96a6f9fb1a..1a9339c02ec 100644 --- a/ortools/math_opt/cpp/update_tracker.cc +++ b/ortools/math_opt/cpp/update_tracker.cc @@ -16,6 +16,7 @@ #include #include +#include "absl/log/die_if_null.h" #include "absl/status/status.h" #include "absl/status/statusor.h" #include "ortools/base/logging.h" diff --git a/ortools/math_opt/solvers/BUILD.bazel b/ortools/math_opt/solvers/BUILD.bazel index 277a971804d..bd18d2953c9 100644 --- a/ortools/math_opt/solvers/BUILD.bazel +++ b/ortools/math_opt/solvers/BUILD.bazel @@ -54,6 +54,7 @@ cc_library( "//ortools/math_opt/validators:callback_validator", "//ortools/port:proto_utils", "//ortools/util:solve_interrupter", + "@abseil-cpp//absl/base:nullability", "@abseil-cpp//absl/cleanup", "@abseil-cpp//absl/container:flat_hash_map", "@abseil-cpp//absl/container:flat_hash_set", @@ -141,12 +142,12 @@ cc_library( "//ortools/util:solve_interrupter", "//ortools/util:testing_utils", "@abseil-cpp//absl/algorithm:container", + "@abseil-cpp//absl/base:nullability", "@abseil-cpp//absl/container:flat_hash_map", "@abseil-cpp//absl/container:flat_hash_set", "@abseil-cpp//absl/log", "@abseil-cpp//absl/log:check", "@abseil-cpp//absl/memory", - "@abseil-cpp//absl/meta:type_traits", "@abseil-cpp//absl/status", "@abseil-cpp//absl/status:statusor", "@abseil-cpp//absl/strings", @@ -193,6 +194,7 @@ cc_library( "//ortools/util:solve_interrupter", "//ortools/util:strong_integers", "//ortools/util:time_limit", + "@abseil-cpp//absl/base:nullability", "@abseil-cpp//absl/cleanup", "@abseil-cpp//absl/container:flat_hash_map", "@abseil-cpp//absl/log", @@ -387,6 +389,7 @@ cc_library( "//ortools/pdlp:solvers_cc_proto", "//ortools/port:proto_utils", "//ortools/util:solve_interrupter", + "@abseil-cpp//absl/base:nullability", "@abseil-cpp//absl/log", "@abseil-cpp//absl/memory", "@abseil-cpp//absl/status", @@ -469,6 +472,7 @@ cc_library( "//ortools/third_party_solvers/glpk:glpk_env_deleter", "//ortools/third_party_solvers/glpk:glpk_formatters", "//ortools/util:solve_interrupter", + "@abseil-cpp//absl/base:nullability", "@abseil-cpp//absl/cleanup", "@abseil-cpp//absl/container:flat_hash_map", "@abseil-cpp//absl/log", @@ -568,6 +572,7 @@ cc_library( "//ortools/util:solve_interrupter", "//ortools/util:status_macros", "@abseil-cpp//absl/algorithm:container", + "@abseil-cpp//absl/base:nullability", "@abseil-cpp//absl/cleanup", "@abseil-cpp//absl/container:flat_hash_map", "@abseil-cpp//absl/log:check", @@ -706,6 +711,7 @@ cc_library( "//ortools/port:proto_utils", "//ortools/third_party_solvers:xpress_environment", "//ortools/util:solve_interrupter", + "@abseil-cpp//absl/base:nullability", "@abseil-cpp//absl/log:check", "@abseil-cpp//absl/memory", "@abseil-cpp//absl/status", diff --git a/ortools/math_opt/solvers/cp_sat_solver.cc b/ortools/math_opt/solvers/cp_sat_solver.cc index c4da11cb149..67f03456502 100644 --- a/ortools/math_opt/solvers/cp_sat_solver.cc +++ b/ortools/math_opt/solvers/cp_sat_solver.cc @@ -503,7 +503,7 @@ absl::StatusOr CpSatSolver::Solve( const ModelSolveParametersProto& model_parameters, const MessageCallback message_cb, const CallbackRegistrationProto& callback_registration, const Callback cb, - const SolveInterrupter* const interrupter) { + const SolveInterrupter* absl_nullable interrupter) { RETURN_IF_ERROR(ModelSolveParametersAreSupported( model_parameters, kCpSatSupportedStructures, "CP-SAT")); const absl::Time start = absl::Now(); @@ -675,7 +675,7 @@ InvertedBounds CpSatSolver::ListInvertedBounds() const { absl::StatusOr CpSatSolver::ComputeInfeasibleSubsystem(const SolveParametersProto&, MessageCallback, - const SolveInterrupter*) { + const SolveInterrupter* absl_nullable) { return absl::UnimplementedError( "CPSAT does not provide a method to compute an infeasible subsystem"); } diff --git a/ortools/math_opt/solvers/cp_sat_solver.h b/ortools/math_opt/solvers/cp_sat_solver.h index a7c0dbe9cf9..d2bbbae9685 100644 --- a/ortools/math_opt/solvers/cp_sat_solver.h +++ b/ortools/math_opt/solvers/cp_sat_solver.h @@ -18,6 +18,7 @@ #include #include +#include "absl/base/nullability.h" #include "absl/status/statusor.h" #include "absl/types/span.h" #include "ortools/linear_solver/linear_solver.pb.h" @@ -46,12 +47,12 @@ class CpSatSolver : public SolverInterface { const ModelSolveParametersProto& model_parameters, MessageCallback message_cb, const CallbackRegistrationProto& callback_registration, Callback cb, - const SolveInterrupter* interrupter) override; + const SolveInterrupter* absl_nullable interrupter) override; absl::StatusOr Update(const ModelUpdateProto& model_update) override; absl::StatusOr - ComputeInfeasibleSubsystem(const SolveParametersProto& parameters, - MessageCallback message_cb, - const SolveInterrupter* interrupter) override; + ComputeInfeasibleSubsystem( + const SolveParametersProto& parameters, MessageCallback message_cb, + const SolveInterrupter* absl_nullable interrupter) override; private: CpSatSolver(MPModelProto cp_sat_model, std::vector variable_ids, diff --git a/ortools/math_opt/solvers/glop_solver.cc b/ortools/math_opt/solvers/glop_solver.cc index 8b5379ba979..cdb666989d4 100644 --- a/ortools/math_opt/solvers/glop_solver.cc +++ b/ortools/math_opt/solvers/glop_solver.cc @@ -21,6 +21,7 @@ #include #include +#include "absl/base/nullability.h" #include "absl/cleanup/cleanup.h" #include "absl/container/flat_hash_map.h" #include "absl/log/check.h" @@ -87,7 +88,8 @@ absl::string_view SafeName(const LinearConstraintsProto& linear_constraints, } absl::StatusOr BuildTermination( - const glop::ProblemStatus status, const SolveInterrupter* const interrupter, + const glop::ProblemStatus status, + const SolveInterrupter* absl_nullable const interrupter, const bool is_maximize, const double objective_value) { switch (status) { case glop::ProblemStatus::OPTIMAL: @@ -730,7 +732,7 @@ absl::Status GlopSolver::FillSolveStats(const absl::Duration solve_time, absl::StatusOr GlopSolver::MakeSolveResult( const glop::ProblemStatus status, const ModelSolveParametersProto& model_parameters, - const SolveInterrupter* const interrupter, + const SolveInterrupter* absl_nullable const interrupter, const absl::Duration solve_time) { SolveResultProto solve_result; ASSIGN_OR_RETURN(*solve_result.mutable_termination(), @@ -765,7 +767,7 @@ absl::StatusOr GlopSolver::Solve( const ModelSolveParametersProto& model_parameters, const MessageCallback message_cb, const CallbackRegistrationProto& callback_registration, const Callback, - const SolveInterrupter* const interrupter) { + const SolveInterrupter* absl_nullable const interrupter) { RETURN_IF_ERROR(ModelSolveParametersAreSupported( model_parameters, kGlopSupportedStructures, "Glop")); RETURN_IF_ERROR(CheckRegisteredCallbackEvents(callback_registration, @@ -886,7 +888,7 @@ absl::StatusOr GlopSolver::Update(const ModelUpdateProto& model_update) { absl::StatusOr GlopSolver::ComputeInfeasibleSubsystem(const SolveParametersProto&, MessageCallback, - const SolveInterrupter*) { + const SolveInterrupter* absl_nullable) { return absl::UnimplementedError( "GLOP does not implement a method to compute an infeasible subsystem"); } diff --git a/ortools/math_opt/solvers/glop_solver.h b/ortools/math_opt/solvers/glop_solver.h index b80893ab27a..1778974ac24 100644 --- a/ortools/math_opt/solvers/glop_solver.h +++ b/ortools/math_opt/solvers/glop_solver.h @@ -18,6 +18,7 @@ #include +#include "absl/base/nullability.h" #include "absl/container/flat_hash_map.h" #include "absl/status/status.h" #include "absl/status/statusor.h" @@ -53,12 +54,12 @@ class GlopSolver : public SolverInterface { const ModelSolveParametersProto& model_parameters, MessageCallback message_cb, const CallbackRegistrationProto& callback_registration, Callback cb, - const SolveInterrupter* interrupter) override; + const SolveInterrupter* absl_nullable interrupter) override; absl::StatusOr Update(const ModelUpdateProto& model_update) override; absl::StatusOr - ComputeInfeasibleSubsystem(const SolveParametersProto& parameters, - MessageCallback message_cb, - const SolveInterrupter* interrupter) override; + ComputeInfeasibleSubsystem( + const SolveParametersProto& parameters, MessageCallback message_cb, + const SolveInterrupter* absl_nullable interrupter) override; // Returns the merged parameters and a list of warnings from any parameter // settings that are invalid for this solver. @@ -93,7 +94,8 @@ class GlopSolver : public SolverInterface { absl::StatusOr MakeSolveResult( glop::ProblemStatus status, const ModelSolveParametersProto& model_parameters, - const SolveInterrupter* interrupter, absl::Duration solve_time); + const SolveInterrupter* absl_nullable interrupter, + absl::Duration solve_time); absl::Status FillSolveStats(absl::Duration solve_time, SolveStatsProto& solve_stats); diff --git a/ortools/math_opt/solvers/glpk_solver.cc b/ortools/math_opt/solvers/glpk_solver.cc index 1195b7a0c22..a41bdc4b28c 100644 --- a/ortools/math_opt/solvers/glpk_solver.cc +++ b/ortools/math_opt/solvers/glpk_solver.cc @@ -27,6 +27,7 @@ #include #include +#include "absl/base/nullability.h" #include "absl/cleanup/cleanup.h" #include "absl/container/flat_hash_map.h" #include "absl/log/check.h" @@ -490,7 +491,8 @@ absl::Status SetLPParameters(const SolveParametersProto& parameters, class MipCallbackData { public: - explicit MipCallbackData(const SolveInterrupter* const interrupter) + explicit MipCallbackData( + const SolveInterrupter* absl_nullable const interrupter) : interrupter_(interrupter) {} void Callback(glp_tree* const tree) { @@ -540,7 +542,7 @@ class MipCallbackData { private: // Optional interrupter. - const SolveInterrupter* const interrupter_; + const SolveInterrupter* absl_nullable const interrupter_; // Set to true if glp_ios_terminate() has been called due to the interrupter. std::atomic interrupted_by_interrupter_ = false; @@ -1059,7 +1061,8 @@ absl::StatusOr GlpkSolver::Solve( const ModelSolveParametersProto& model_parameters, MessageCallback message_cb, const CallbackRegistrationProto& callback_registration, - const Callback /*cb*/, const SolveInterrupter* const interrupter) { + const Callback /*cb*/, + const SolveInterrupter* absl_nullable const interrupter) { RETURN_IF_ERROR(ModelSolveParametersAreSupported( model_parameters, kGlpkSupportedStructures, "GLPK")); RETURN_IF_ERROR(CheckCurrentThread()); @@ -1808,7 +1811,7 @@ std::optional GlpkSolver::EmptyIntegerBoundsResult() { absl::StatusOr GlpkSolver::ComputeInfeasibleSubsystem( const SolveParametersProto& parameters, MessageCallback message_cb, - const SolveInterrupter* const interrupter) { + const SolveInterrupter* absl_nullable const interrupter) { return absl::UnimplementedError( "GLPK does not provide a method to compute an infeasible subsystem"); } diff --git a/ortools/math_opt/solvers/glpk_solver.h b/ortools/math_opt/solvers/glpk_solver.h index 790ae48e68b..57512f0e378 100644 --- a/ortools/math_opt/solvers/glpk_solver.h +++ b/ortools/math_opt/solvers/glpk_solver.h @@ -20,6 +20,7 @@ #include #include +#include "absl/base/nullability.h" #include "absl/container/flat_hash_map.h" #include "absl/log/log.h" #include "absl/status/status.h" @@ -55,12 +56,12 @@ class GlpkSolver : public SolverInterface { const ModelSolveParametersProto& model_parameters, MessageCallback message_cb, const CallbackRegistrationProto& callback_registration, Callback cb, - const SolveInterrupter* interrupter) override; + const SolveInterrupter* absl_nullable interrupter) override; absl::StatusOr Update(const ModelUpdateProto& model_update) override; absl::StatusOr - ComputeInfeasibleSubsystem(const SolveParametersProto& parameters, - MessageCallback message_cb, - const SolveInterrupter* interrupter) override; + ComputeInfeasibleSubsystem( + const SolveParametersProto& parameters, MessageCallback message_cb, + const SolveInterrupter* absl_nullable interrupter) override; private: // The columns of the GPLK problem. diff --git a/ortools/math_opt/solvers/gscip_solver.cc b/ortools/math_opt/solvers/gscip_solver.cc index 85193b55b76..800e2a88f95 100644 --- a/ortools/math_opt/solvers/gscip_solver.cc +++ b/ortools/math_opt/solvers/gscip_solver.cc @@ -23,6 +23,7 @@ #include #include +#include "absl/base/nullability.h" #include "absl/cleanup/cleanup.h" #include "absl/container/flat_hash_map.h" #include "absl/container/flat_hash_set.h" @@ -1013,7 +1014,7 @@ absl::StatusOr GScipSolver::Solve( const ModelSolveParametersProto& model_parameters, const MessageCallback message_cb, const CallbackRegistrationProto& callback_registration, Callback cb, - const SolveInterrupter* const interrupter) { + const SolveInterrupter* absl_nullable const interrupter) { RETURN_IF_ERROR(ModelSolveParametersAreSupported( model_parameters, kGscipSupportedStructures, "SCIP")); const absl::Time start = absl::Now(); @@ -1353,7 +1354,7 @@ absl::StatusOr GScipSolver::Update(const ModelUpdateProto& model_update) { absl::StatusOr GScipSolver::ComputeInfeasibleSubsystem(const SolveParametersProto&, MessageCallback, - const SolveInterrupter*) { + const SolveInterrupter* absl_nullable) { return absl::UnimplementedError( "SCIP does not provide a method to compute an infeasible subsystem"); } diff --git a/ortools/math_opt/solvers/gscip_solver.h b/ortools/math_opt/solvers/gscip_solver.h index d31b05e18b9..21d3586b455 100644 --- a/ortools/math_opt/solvers/gscip_solver.h +++ b/ortools/math_opt/solvers/gscip_solver.h @@ -20,6 +20,7 @@ #include #include +#include "absl/base/nullability.h" #include "absl/container/flat_hash_map.h" #include "absl/container/flat_hash_set.h" #include "absl/status/status.h" @@ -58,12 +59,12 @@ class GScipSolver : public SolverInterface { const ModelSolveParametersProto& model_parameters, MessageCallback message_cb, const CallbackRegistrationProto& callback_registration, Callback cb, - const SolveInterrupter* interrupter) override; + const SolveInterrupter* absl_nullable interrupter) override; absl::StatusOr Update(const ModelUpdateProto& model_update) override; absl::StatusOr - ComputeInfeasibleSubsystem(const SolveParametersProto& parameters, - MessageCallback message_cb, - const SolveInterrupter* interrupter) override; + ComputeInfeasibleSubsystem( + const SolveParametersProto& parameters, MessageCallback message_cb, + const SolveInterrupter* absl_nullable interrupter) override; // Returns the merged parameters and a list of warnings for unsupported // parameters. diff --git a/ortools/math_opt/solvers/gurobi_solver.cc b/ortools/math_opt/solvers/gurobi_solver.cc index 136919e7f23..e13c4d01c8f 100644 --- a/ortools/math_opt/solvers/gurobi_solver.cc +++ b/ortools/math_opt/solvers/gurobi_solver.cc @@ -26,6 +26,7 @@ #include #include "absl/algorithm/container.h" +#include "absl/base/nullability.h" #include "absl/container/flat_hash_map.h" #include "absl/container/flat_hash_set.h" #include "absl/log/check.h" @@ -2145,8 +2146,8 @@ absl::Status GurobiSolver::ChangeCoefficients( } absl::Status GurobiSolver::UpdateDoubleListAttribute( - const SparseDoubleVectorProto& update, const char* attribute_name, - const IdHashMap& id_hash_map) { + const SparseDoubleVectorProto& update, + const char* absl_nonnull attribute_name, const IdHashMap& id_hash_map) { if (update.ids_size() == 0) { return absl::OkStatus(); } @@ -2159,8 +2160,8 @@ absl::Status GurobiSolver::UpdateDoubleListAttribute( } absl::Status GurobiSolver::UpdateInt32ListAttribute( - const SparseInt32VectorProto& update, const char* attribute_name, - const IdHashMap& id_hash_map) { + const SparseInt32VectorProto& update, + const char* absl_nonnull attribute_name, const IdHashMap& id_hash_map) { if (update.ids_size() == 0) { return absl::OkStatus(); } @@ -2782,11 +2783,10 @@ absl::StatusOr> GurobiSolver::New( } absl::StatusOr> -GurobiSolver::RegisterCallback(const CallbackRegistrationProto& registration, - const Callback cb, - const MessageCallback message_cb, - const absl::Time start, - SolveInterrupter* const local_interrupter) { +GurobiSolver::RegisterCallback( + const CallbackRegistrationProto& registration, const Callback cb, + const MessageCallback message_cb, const absl::Time start, + SolveInterrupter* absl_nullable const local_interrupter) { const absl::flat_hash_set events = EventSet(registration); // Note that IS_MIP does not necessarily mean the problem has integer @@ -2971,7 +2971,7 @@ absl::StatusOr GurobiSolver::Solve( const ModelSolveParametersProto& model_parameters, const MessageCallback message_cb, const CallbackRegistrationProto& callback_registration, const Callback cb, - const SolveInterrupter* const interrupter) { + const SolveInterrupter* absl_nullable const interrupter) { RETURN_IF_ERROR(ModelSolveParametersAreSupported( model_parameters, kGurobiSupportedStructures, "Gurobi")); const absl::Time start = absl::Now(); @@ -3102,7 +3102,7 @@ absl::StatusOr GurobiSolver::Solve( absl::StatusOr GurobiSolver::ComputeInfeasibleSubsystem( const SolveParametersProto& parameters, MessageCallback message_cb, - const SolveInterrupter* const interrupter) { + const SolveInterrupter* absl_nullable const interrupter) { const absl::Time start = absl::Now(); // Need to run GRBupdatemodel before: diff --git a/ortools/math_opt/solvers/gurobi_solver.h b/ortools/math_opt/solvers/gurobi_solver.h index 9cacb995df6..0fd2c4b4ae5 100644 --- a/ortools/math_opt/solvers/gurobi_solver.h +++ b/ortools/math_opt/solvers/gurobi_solver.h @@ -21,6 +21,7 @@ #include #include +#include "absl/base/nullability.h" #include "absl/container/flat_hash_map.h" #include "absl/container/flat_hash_set.h" #include "absl/status/status.h" @@ -60,17 +61,18 @@ class GurobiSolver : public SolverInterface { const ModelSolveParametersProto& model_parameters, MessageCallback message_cb, const CallbackRegistrationProto& callback_registration, Callback cb, - const SolveInterrupter* interrupter) override; + const SolveInterrupter* absl_nullable interrupter) override; absl::StatusOr Update(const ModelUpdateProto& model_update) override; absl::StatusOr - ComputeInfeasibleSubsystem(const SolveParametersProto& parameters, - MessageCallback message_cb, - const SolveInterrupter* interrupter) override; + ComputeInfeasibleSubsystem( + const SolveParametersProto& parameters, MessageCallback message_cb, + const SolveInterrupter* absl_nullable interrupter) override; private: struct GurobiCallbackData { - explicit GurobiCallbackData(GurobiCallbackInput callback_input, - SolveInterrupter* const local_interrupter) + explicit GurobiCallbackData( + GurobiCallbackInput callback_input, + SolveInterrupter* absl_nullable const local_interrupter) : callback_input(std::move(callback_input)), local_interrupter(local_interrupter) {} const GurobiCallbackInput callback_input; @@ -83,7 +85,7 @@ class GurobiSolver : public SolverInterface { // // It is optional; it is not null when either we have a LP/MIP callback or a // user interrupter. But it can be null if we only have a message callback. - SolveInterrupter* const local_interrupter; + SolveInterrupter* absl_nullable const local_interrupter; MessageCallbackData message_callback_data; @@ -305,11 +307,11 @@ class GurobiSolver : public SolverInterface { const SparseDoubleMatrixProto& terms); absl::Status LoadModel(const ModelProto& input_model); - absl::Status UpdateDoubleListAttribute(const SparseDoubleVectorProto& update, - const char* attribute_name, - const IdHashMap& id_hash_map); + absl::Status UpdateDoubleListAttribute( + const SparseDoubleVectorProto& update, + const char* absl_nonnull attribute_name, const IdHashMap& id_hash_map); absl::Status UpdateInt32ListAttribute(const SparseInt32VectorProto& update, - const char* attribute_name, + const char* absl_nonnull attribute_name, const IdHashMap& id_hash_map); struct DeletedIndices { @@ -354,7 +356,7 @@ class GurobiSolver : public SolverInterface { absl::StatusOr> RegisterCallback( const CallbackRegistrationProto& registration, Callback cb, MessageCallback message_cb, absl::Time start, - SolveInterrupter* local_interrupter); + SolveInterrupter* absl_nullable local_interrupter); // Returns the ids of variables and linear constraints with inverted bounds. absl::StatusOr ListInvertedBounds() const; diff --git a/ortools/math_opt/solvers/highs_solver.cc b/ortools/math_opt/solvers/highs_solver.cc index 0bf7506b274..88fd2cd7575 100644 --- a/ortools/math_opt/solvers/highs_solver.cc +++ b/ortools/math_opt/solvers/highs_solver.cc @@ -31,6 +31,7 @@ #include "Highs.h" #include "absl/algorithm/container.h" +#include "absl/base/nullability.h" #include "absl/cleanup/cleanup.h" #include "absl/container/flat_hash_map.h" #include "absl/log/check.h" @@ -912,7 +913,7 @@ absl::StatusOr HighsSolver::Solve( const SolveParametersProto& parameters, const ModelSolveParametersProto& model_parameters, MessageCallback message_cb, const CallbackRegistrationProto&, Callback, - const SolveInterrupter* const) { + const SolveInterrupter* absl_nullable const) { RETURN_IF_ERROR(ModelSolveParametersAreSupported( model_parameters, kHighsSupportedStructures, "Highs")); const absl::Time start = absl::Now(); @@ -1031,7 +1032,7 @@ absl::StatusOr HighsSolver::Update(const ModelUpdateProto&) { absl::StatusOr HighsSolver::ComputeInfeasibleSubsystem(const SolveParametersProto&, MessageCallback, - const SolveInterrupter*) { + const SolveInterrupter* absl_nullable) { return absl::UnimplementedError( "HiGHS does not provide a method to compute an infeasible subsystem"); } diff --git a/ortools/math_opt/solvers/highs_solver.h b/ortools/math_opt/solvers/highs_solver.h index eaba4360a69..5b0e5034670 100644 --- a/ortools/math_opt/solvers/highs_solver.h +++ b/ortools/math_opt/solvers/highs_solver.h @@ -22,6 +22,7 @@ #include #include "Highs.h" +#include "absl/base/nullability.h" #include "absl/container/flat_hash_map.h" #include "absl/status/status.h" #include "absl/status/statusor.h" @@ -51,12 +52,12 @@ class HighsSolver : public SolverInterface { const ModelSolveParametersProto& model_parameters, MessageCallback message_cb, const CallbackRegistrationProto& callback_registration, Callback cb, - const SolveInterrupter* interrupter) override; + const SolveInterrupter* absl_nullable interrupter) override; absl::StatusOr Update(const ModelUpdateProto& model_update) override; absl::StatusOr - ComputeInfeasibleSubsystem(const SolveParametersProto& parameters, - MessageCallback message_cb, - const SolveInterrupter* interrupter) override; + ComputeInfeasibleSubsystem( + const SolveParametersProto& parameters, MessageCallback message_cb, + const SolveInterrupter* absl_nullable interrupter) override; private: struct SolutionClaims { diff --git a/ortools/math_opt/solvers/pdlp_solver.cc b/ortools/math_opt/solvers/pdlp_solver.cc index 537162add04..13d0f15e9ea 100644 --- a/ortools/math_opt/solvers/pdlp_solver.cc +++ b/ortools/math_opt/solvers/pdlp_solver.cc @@ -24,6 +24,7 @@ #include #include +#include "absl/base/nullability.h" #include "absl/log/log.h" #include "absl/memory/memory.h" #include "absl/status/status.h" @@ -336,7 +337,7 @@ absl::StatusOr PdlpSolver::Solve( const ModelSolveParametersProto& model_parameters, const MessageCallback message_cb, const CallbackRegistrationProto& callback_registration, const Callback, - const SolveInterrupter* const interrupter) { + const SolveInterrupter* absl_nullable const interrupter) { RETURN_IF_ERROR(ModelSolveParametersAreSupported( model_parameters, kPdlpSupportedStructures, "PDLP")); RETURN_IF_ERROR(CheckRegisteredCallbackEvents(callback_registration, @@ -381,7 +382,7 @@ absl::StatusOr PdlpSolver::Update(const ModelUpdateProto&) { absl::StatusOr PdlpSolver::ComputeInfeasibleSubsystem(const SolveParametersProto&, MessageCallback, - const SolveInterrupter*) { + const SolveInterrupter* absl_nullable) { return absl::UnimplementedError( "PDLP does not provide a method to compute an infeasible subsystem"); } diff --git a/ortools/math_opt/solvers/pdlp_solver.h b/ortools/math_opt/solvers/pdlp_solver.h index edca6bd50d4..183bc186c0d 100644 --- a/ortools/math_opt/solvers/pdlp_solver.h +++ b/ortools/math_opt/solvers/pdlp_solver.h @@ -16,6 +16,7 @@ #include +#include "absl/base/nullability.h" #include "absl/status/statusor.h" #include "ortools/math_opt/callback.pb.h" #include "ortools/math_opt/core/solver_interface.h" @@ -43,12 +44,12 @@ class PdlpSolver : public SolverInterface { const ModelSolveParametersProto& model_parameters, MessageCallback message_cb, const CallbackRegistrationProto& callback_registration, Callback cb, - const SolveInterrupter* interrupter) override; + const SolveInterrupter* absl_nullable interrupter) override; absl::StatusOr Update(const ModelUpdateProto& model_update) override; absl::StatusOr - ComputeInfeasibleSubsystem(const SolveParametersProto& parameters, - MessageCallback message_cb, - const SolveInterrupter* interrupter) override; + ComputeInfeasibleSubsystem( + const SolveParametersProto& parameters, MessageCallback message_cb, + const SolveInterrupter* absl_nullable interrupter) override; // Returns the merged parameters and a list of warnings. static absl::StatusOr MergeParameters( From 9190a5d819c4f62f0c0fc963b8116d1cf5dafe70 Mon Sep 17 00:00:00 2001 From: Guillaume Chatelet Date: Tue, 23 Dec 2025 15:01:05 +0000 Subject: [PATCH 094/111] [bazel] Fix remaining licence directive --- tools/build/BUILD.bazel | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/tools/build/BUILD.bazel b/tools/build/BUILD.bazel index 6b8d65e59e0..ea3007c9423 100644 --- a/tools/build/BUILD.bazel +++ b/tools/build/BUILD.bazel @@ -14,10 +14,7 @@ load("@pip_deps//:requirements.bzl", "requirement") load("@rules_python//python:py_binary.bzl", "py_binary") -package( - default_applicable_licenses = ["//third_party/ortools:license"], - default_visibility = ["//visibility:public"], -) +package(default_visibility = ["//visibility:public"]) py_binary( name = "bazel2cmake", From 0782d130654f6b166d0f489c09f269d59c458210 Mon Sep 17 00:00:00 2001 From: Laurent Perron Date: Tue, 23 Dec 2025 18:11:58 +0100 Subject: [PATCH 095/111] [CP-SAT] work on lrat, clause congruence, inprocess ; bug fixes --- ortools/sat/BUILD.bazel | 1 + ortools/sat/clause.cc | 22 +- ortools/sat/cp_model_checker.cc | 13 +- ortools/sat/cp_model_solver.cc | 6 + ortools/sat/csharp/CpSolver.cs | 5 +- ortools/sat/gate_utils.h | 22 ++ ortools/sat/gate_utils_test.cc | 41 +++ ortools/sat/lrat_proof_handler.cc | 14 +- ortools/sat/probing.cc | 381 +++++++++++++++++++-- ortools/sat/probing.h | 35 +- ortools/sat/python/cp_model.py | 8 +- ortools/sat/sat_inprocessing.cc | 530 ++++++++++++++++++++---------- ortools/sat/sat_inprocessing.h | 25 +- ortools/sat/scheduling_helpers.cc | 11 + 14 files changed, 883 insertions(+), 231 deletions(-) diff --git a/ortools/sat/BUILD.bazel b/ortools/sat/BUILD.bazel index c1c447036c0..0820d997949 100644 --- a/ortools/sat/BUILD.bazel +++ b/ortools/sat/BUILD.bazel @@ -1638,6 +1638,7 @@ cc_library( "//ortools/util:sorted_interval_list", "//ortools/util:strong_integers", "//ortools/util:time_limit", + "@abseil-cpp//absl/algorithm:container", "@abseil-cpp//absl/cleanup", "@abseil-cpp//absl/container:btree", "@abseil-cpp//absl/container:flat_hash_map", diff --git a/ortools/sat/clause.cc b/ortools/sat/clause.cc index f0ef8f42ce6..7b1b47078eb 100644 --- a/ortools/sat/clause.cc +++ b/ortools/sat/clause.cc @@ -125,7 +125,6 @@ ClauseManager::~ClauseManager() { } void ClauseManager::Resize(int num_variables) { - DCHECK(is_clean_); watchers_on_false_.resize(num_variables << 1); reasons_.resize(num_variables); needs_cleaning_.Resize(LiteralIndex(num_variables << 1)); @@ -980,8 +979,29 @@ bool BinaryImplicationGraph::AddBinaryClauseInternal( trail_->ChangeReason(trail_index, propagator_id_); } + // Deal with literal fixing and do not even add a binary clause in that case. if (rep_a == rep_b) { return FixLiteral(rep_a, {rep_id}); + } else if (trail_->CurrentDecisionLevel() == 0) { + const auto& assignment = trail_->Assignment(); + + // TODO(user): just make GetUnitClauseId() work all the time? for that + // we need to make sure all level zero are pushed with kUnitReason. + if (lrat_proof_handler_ != nullptr) { + if (assignment.LiteralIsFalse(rep_a)) { + return FixLiteral(rep_b, + {rep_id, trail_->GetUnitClauseId(rep_a.Variable())}); + } else if (assignment.LiteralIsFalse(rep_b)) { + return FixLiteral(rep_a, + {rep_id, trail_->GetUnitClauseId(rep_b.Variable())}); + } + } else { + if (assignment.LiteralIsFalse(rep_a)) { + return FixLiteral(rep_b, {}); + } else if (assignment.LiteralIsFalse(rep_b)) { + return FixLiteral(rep_a, {}); + } + } } a = rep_a; diff --git a/ortools/sat/cp_model_checker.cc b/ortools/sat/cp_model_checker.cc index 0de28767579..74be2c2ba6b 100644 --- a/ortools/sat/cp_model_checker.cc +++ b/ortools/sat/cp_model_checker.cc @@ -284,15 +284,6 @@ std::string ValidateLinearExpression(const CpModelProto& model, return ""; } -std::string ValidateConstantExpression(const CpModelProto& model, - const LinearExpressionProto& expr) { - if (!expr.vars().empty()) { - return absl::StrCat("expression must be constant: ", - ProtobufShortDebugString(expr)); - } - return ValidateLinearExpression(model, expr); -} - std::string ValidateLinearConstraint(const CpModelProto& model, const ConstraintProto& ct) { if (!DomainInProtoIsValid(ct.linear())) { @@ -1376,8 +1367,8 @@ class ConstraintChecker { bool LinearConstraintIsFeasible(const ConstraintProto& ct) { int64_t sum = 0; const int num_variables = ct.linear().coeffs_size(); - const int* const vars = ct.linear().vars().data(); - const int64_t* const coeffs = ct.linear().coeffs().data(); + absl::Span vars = absl::MakeSpan(ct.linear().vars()); + absl::Span coeffs = absl::MakeSpan(ct.linear().coeffs()); for (int i = 0; i < num_variables; ++i) { // We know we only have positive reference now. DCHECK(RefIsPositive(vars[i])); diff --git a/ortools/sat/cp_model_solver.cc b/ortools/sat/cp_model_solver.cc index a116cb39b65..0ab16590142 100644 --- a/ortools/sat/cp_model_solver.cc +++ b/ortools/sat/cp_model_solver.cc @@ -2529,6 +2529,12 @@ CpSolverResponse SolveCpModel(const CpModelProto& model_proto, Model* model) { } #endif // ORTOOLS_TARGET_OS_SUPPORTS_THREADS + if (DEBUG_MODE) { + LOG(WARNING) + << "WARNING: CP-SAT is running in debug mode. The solver will " + "be slow because we will do a lot of extra checks. Compile in " + "optimization mode to gain an order of magnitude speedup."; + } SOLVER_LOG(logger, ""); SOLVER_LOG(logger, "Starting ", CpSatSolverVersion()); SOLVER_LOG(logger, "Parameters: ", ProtobufShortDebugString(params)); diff --git a/ortools/sat/csharp/CpSolver.cs b/ortools/sat/csharp/CpSolver.cs index 9a5a89dc422..1e8fc491c58 100644 --- a/ortools/sat/csharp/CpSolver.cs +++ b/ortools/sat/csharp/CpSolver.cs @@ -210,7 +210,8 @@ public bool BooleanValue(ILiteral literal) /// /// Releases unmanaged resources and optionally releases managed resources. /// - /// true to release both managed and unmanaged resources; false to release only unmanaged resources. + /// true to release both managed and unmanaged resources; false to release only unmanaged + /// resources. protected virtual void Dispose(bool disposing) { if (_disposed) @@ -270,4 +271,4 @@ class BestBoundCallbackDelegate : BestBoundCallback public override void NewBestBound(double bound) => _delegate(bound); } -} // namespace Google.OrTools.Sat \ No newline at end of file +} // namespace Google.OrTools.Sat diff --git a/ortools/sat/gate_utils.h b/ortools/sat/gate_utils.h index d902c57977c..0674556f171 100644 --- a/ortools/sat/gate_utils.h +++ b/ortools/sat/gate_utils.h @@ -98,6 +98,28 @@ inline int AddHoleAtPosition(int i, int bitset) { return (bitset & ((1 << i) - 1)) + ((bitset >> i) << (i + 1)); } +inline int RemoveFixedInput(int i, bool at_true, + absl::Span inputs, + int& int_function_values) { + DCHECK_LT(i, inputs.size()); + const int value = at_true ? 1 : 0; + + // Re-compute the bitset. + SmallBitset values = int_function_values; + SmallBitset new_truth_table = 0; + const int new_size = inputs.size() - 1; + for (int p = 0; p < (1 << new_size); ++p) { + const int extended_p = AddHoleAtPosition(i, p) | (value << i); + new_truth_table |= ((values >> extended_p) & 1) << p; + } + int_function_values = new_truth_table; + + for (int j = i + 1; j < inputs.size(); ++j) { + inputs[j - 1] = inputs[j]; + } + return new_size; +} + // The function is target = function_values[inputs as bit position]. // // TODO(user): This can be optimized with more bit twiddling if needed. diff --git a/ortools/sat/gate_utils_test.cc b/ortools/sat/gate_utils_test.cc index 1294122b2bf..07428e21901 100644 --- a/ortools/sat/gate_utils_test.cc +++ b/ortools/sat/gate_utils_test.cc @@ -75,6 +75,47 @@ TEST(AddHoleAtPositionTest, BasicTest) { EXPECT_EQ(AddHoleAtPosition(8, 0xFF), 0b011111111); } +TEST(CanonicalizeFunctionTruthTableTest, AndGateWithXAndNotX) { + LiteralIndex output = Literal(+1).Index(); + std::vector inputs{Literal(+2).Index(), Literal(-2).Index()}; + int table = 0b1000; + const int new_size = + CanonicalizeFunctionTruthTable(output, absl::MakeSpan(inputs), table); + CHECK_EQ(new_size, 0); // Fixed to zero. +} + +TEST(RemoveFixedInputTest, BasicTest1) { + std::vector inputs{Literal(+1).Index(), Literal(+2).Index(), + Literal(+3).Index()}; + int table = 0b01011010; + const int new_size = RemoveFixedInput(1, true, absl::MakeSpan(inputs), table); + EXPECT_EQ(new_size, 2); + EXPECT_EQ(inputs[0], Literal(+1).Index()); + EXPECT_EQ(inputs[1], Literal(+3).Index()); + EXPECT_EQ(table, 0b0110) << std::bitset<4>(table); +} + +TEST(RemoveFixedInputTest, BasicTest2) { + std::vector inputs{Literal(+1).Index(), Literal(+2).Index(), + Literal(+3).Index()}; + int table = 0b01011010; + const int new_size = + RemoveFixedInput(1, false, absl::MakeSpan(inputs), table); + EXPECT_EQ(new_size, 2); + EXPECT_EQ(inputs[0], Literal(+1).Index()); + EXPECT_EQ(inputs[1], Literal(+3).Index()); + EXPECT_EQ(table, 0b0110) << std::bitset<4>(table); +} + +TEST(CanonicalizeFunctionTruthTableTest, AndGateWithXAndNotX2) { + LiteralIndex output = Literal(+1).Index(); + std::vector inputs{Literal(-2).Index(), Literal(+2).Index()}; + int table = 0b1000; + const int new_size = + CanonicalizeFunctionTruthTable(output, absl::MakeSpan(inputs), table); + CHECK_EQ(new_size, 0); // Fixed to zero. +} + TEST(CanonicalizeFunctionTruthTableTest, RandomTest) { absl::BitGen random; const int num_vars = 8; diff --git a/ortools/sat/lrat_proof_handler.cc b/ortools/sat/lrat_proof_handler.cc index f722c9d9921..d2a226b1033 100644 --- a/ortools/sat/lrat_proof_handler.cc +++ b/ortools/sat/lrat_proof_handler.cc @@ -802,7 +802,6 @@ ClauseId LratProofHandler::AddAndProveInferredClauseByEnumeration( // That give us 2^(n + 1) intermediate clauses. // Their ids will be stored in (1 << k) + binary_encoding_of_the_li. const int n = to_dense_index.size() - new_clause.size(); - CHECK_GT(n, 0); // We dealt with this above. CHECK_EQ(n, relevant_literals.size()); const int num_intermediates = 1 << (n + 1); std::vector ids(num_intermediates, kNoClauseId); @@ -838,11 +837,16 @@ ClauseId LratProofHandler::AddAndProveInferredClauseByEnumeration( // The clause is the same as the one we try to prove! or smaller. if (clauses_for_proof[i].size() == new_clause.size()) { return ids_for_proof[i]; + } else { + // TODO(user): Likely we could have simplified what we are trying to + // prove. Like I saw this happen when we prove an equivalence but we + // can actually prove that the variables are fixed. + const ClauseId new_id = id_generator_->GetNextId(); + if (!AddInferredClause(new_id, new_clause, {ids_for_proof[i]})) { + return error("failed trivial inclusion proof"); + } + return new_id; } - - // TODO(user): if this ever happen we can create a new id and prove it - // with clauses_for_proof[i], but for now I never saw that. - return error("Case not yet supported"); } mask >>= new_clause.size(); diff --git a/ortools/sat/probing.cc b/ortools/sat/probing.cc index 79ffa761e81..a7c84315849 100644 --- a/ortools/sat/probing.cc +++ b/ortools/sat/probing.cc @@ -14,11 +14,14 @@ #include "ortools/sat/probing.h" #include +#include #include #include #include +#include #include +#include "absl/algorithm/container.h" #include "absl/cleanup/cleanup.h" #include "absl/container/btree_map.h" #include "absl/container/btree_set.h" @@ -51,6 +54,212 @@ namespace operations_research { namespace sat { +// Holds a copy of the trail and of propagator reasons in order to be able to +// build LRAT proofs after the trail has been modified. +class TrailCopy { + public: + TrailCopy(const Trail& trail, const BinaryImplicationGraph& implication_graph, + const ClauseManager& clause_manager) + : trail_(trail), + implication_graph_(implication_graph), + clause_manager_(clause_manager) {} + + // Updates this trail copy with the current trail state. + void CopyTrail() { + const int trail_size = trail_.Index(); + trail_index_.resize(trail_.NumVariables(), -1); + trail_literals_.clear(); + trail_info_.clear(); + stored_reasons_.clear(); + trail_literals_.reserve(trail_size); + trail_info_.reserve(trail_size); + for (int i = 0; i < trail_size; ++i) { + const Literal literal = trail_[i]; + const BooleanVariable var = literal.Variable(); + const AssignmentInfo& info = trail_.Info(var); + const int assignment_type = trail_.AssignmentType(var); + absl::Span reason; + // Get the clause ID only if it is very cheap to compute. Otherwise, delay + // its computation until it is really needed, in AppendClauseIdsFixing(). + std::variant reason_clause = kNoClauseId; + if (assignment_type == AssignmentType::kCachedReason) { + const absl::Span cached_reason = trail_.Reason(var); + stored_reasons_.push_back({cached_reason.begin(), cached_reason.end()}); + reason = stored_reasons_.back(); + } else if (assignment_type == AssignmentType::kUnitReason) { + reason_clause = trail_.GetUnitClauseId(var); + } else if (assignment_type == implication_graph_.PropagatorId()) { + absl::Span original_reason = trail_.Reason(var); + DCHECK_EQ(original_reason.size(), 1); + reason = absl::MakeConstSpan(trail_literals_) + .subspan(trail_index_[original_reason[0].Variable()], 1); + DCHECK_EQ(reason[0], original_reason[0].Negated()); + } else if (assignment_type == clause_manager_.PropagatorId()) { + const SatClause* sat_clause = clause_manager_.ReasonClauseOrNull(var); + if (sat_clause != nullptr) { + reason = sat_clause->AsSpan(); + } + reason_clause = clause_manager_.ReasonClauseOrNull(var); + } + trail_index_[var] = i; + trail_literals_.push_back(literal); + trail_info_.emplace_back(info.level, assignment_type, reason, + reason_clause); + } + + const int num_decisions = trail_.CurrentDecisionLevel(); + decisions_.clear(); + decisions_.reserve(num_decisions); + for (int i = 0; i < num_decisions; ++i) { + decisions_.push_back(trail_.Decisions()[i].literal); + } + } + + // Same as ClauseManager::AppendClauseIdsFixing(), but adapted to work on this + // trail copy instead of on the real trail. + void AppendClauseIdsFixing( + absl::Span literals, std::vector* clause_ids, + LiteralIndex decision, + absl::flat_hash_map, ClauseId> + tmp_binary_clause_ids) { + // Mark the literals whose reason must be expanded, and put them in a heap. + tmp_mark_.ClearAndResize(BooleanVariable(trail_index_.size())); + marked_trail_indices_heap_.clear(); + for (const Literal lit : literals) { + tmp_mark_.Set(lit.Variable()); + marked_trail_indices_heap_.push_back(trail_index_[lit.Variable()]); + } + absl::c_make_heap(marked_trail_indices_heap_); + const int current_level = decisions_.size(); + + // The min level of the expanded literals. + int min_level = current_level; + + // Unit clauses must come first. We put them in clause_ids directly. We put + // the others in non_unit_clause_ids and append them to clause_ids at the + // end. + std::vector& non_unit_clause_ids = + tmp_clause_ids_for_append_clauses_fixing_; + non_unit_clause_ids.clear(); + + while (!marked_trail_indices_heap_.empty()) { + absl::c_pop_heap(marked_trail_indices_heap_); + const int trail_index = marked_trail_indices_heap_.back(); + marked_trail_indices_heap_.pop_back(); + const Literal marked_literal = trail_literals_[trail_index]; + const TrailInfo& trail_info = trail_info_[trail_index]; + + // Stop at decisions, at literals fixed at root, and at literals implied + // by the decision at their level. + const int level = trail_info.level; + if (level > 0) min_level = std::min(min_level, level); + if (trail_info.assignment_type == AssignmentType::kSearchDecision) { + continue; + } + if (level == 0) { + clause_ids->push_back(std::get(trail_info.reason_clause)); + continue; + } + const Literal level_decision = decisions_[level - 1]; + ClauseId clause_id = implication_graph_.GetClauseId( + level_decision.Negated(), marked_literal); + if (clause_id == kNoClauseId) { + const auto it = tmp_binary_clause_ids.find( + std::minmax(level_decision.Negated(), marked_literal)); + if (it != tmp_binary_clause_ids.end()) { + clause_id = it->second; + } + } + if (clause_id != kNoClauseId) { + non_unit_clause_ids.push_back(clause_id); + continue; + } + + // Mark all the literals of its reason. + for (const Literal literal : trail_info.reason) { + const BooleanVariable var = literal.Variable(); + if (!tmp_mark_[var]) { + const int trail_index = trail_index_[var]; + const TrailInfo& info = trail_info_[trail_index]; + tmp_mark_.Set(var); + if (info.level > 0) { + marked_trail_indices_heap_.push_back(trail_index); + absl::c_push_heap(marked_trail_indices_heap_); + } else { + clause_ids->push_back(std::get(info.reason_clause)); + } + } + } + non_unit_clause_ids.push_back(ReasonClauseId(trail_index)); + } + + // Add the implication chain from `decision` to all the decisions found + // during the expansion. + if (Literal(decision) != decisions_[current_level - 1]) { + // If `decision` is not the last decision, it must directly imply it. + clause_ids->push_back(implication_graph_.GetClauseId( + Literal(decision).Negated(), decisions_[current_level - 1])); + } + for (int level = current_level - 1; level >= min_level; --level) { + clause_ids->push_back(implication_graph_.GetClauseId( + decisions_[level].Negated(), decisions_[level - 1])); + } + + clause_ids->insert(clause_ids->end(), non_unit_clause_ids.rbegin(), + non_unit_clause_ids.rend()); + } + + private: + // Same as ClauseManager::ReasonClauseId(), but adapted to work on this trail + // copy instead of on the real trail. + ClauseId ReasonClauseId(int trail_index) const { + const TrailInfo& trail_info = trail_info_[trail_index]; + const int assignment_type = trail_info.assignment_type; + if (assignment_type == AssignmentType::kCachedReason || + assignment_type == AssignmentType::kUnitReason) { + return std::get(trail_info.reason_clause); + } else if (assignment_type == implication_graph_.PropagatorId()) { + return implication_graph_.GetClauseId(trail_literals_[trail_index], + trail_info.reason[0].Negated()); + } else if (assignment_type == clause_manager_.PropagatorId()) { + const SatClause* reason = + std::get(trail_info.reason_clause); + if (reason != nullptr) { + return clause_manager_.GetClauseId(reason); + } + } + return kNoClauseId; + } + + struct TrailInfo { + int level; + int assignment_type; + // For literals propagated by the BinaryImplicationGraph, the negation of + // the original reason. For literals propagated by the ClauseManager, *all* + // the literals of the SatClause (which includes the propagated variable). + // For kCachedReason, this is the stored reason. Empty for kUnitReason. + absl::Span reason; + // Holds a clause ID if `assignment_type` is kCachedReason or kUnitReason, + // or a SatClause* if `assignment_type` corresponds to the ClauseManager. + std::variant reason_clause; + }; + + const Trail& trail_; + const BinaryImplicationGraph& implication_graph_; + const ClauseManager& clause_manager_; + + util_intops::StrongVector trail_index_; + std::vector trail_literals_; + std::vector trail_info_; + // We use a deque for pointer stability (they are kept in TrailInfo::reason). + std::deque> stored_reasons_; + std::vector decisions_; + + SparseBitset tmp_mark_; + std::vector marked_trail_indices_heap_; + std::vector tmp_clause_ids_for_append_clauses_fixing_; +}; + Prober::Prober(Model* model) : trail_(*model->GetOrCreate()), assignment_(model->GetOrCreate()->Assignment()), @@ -63,11 +272,14 @@ Prober::Prober(Model* model) clause_manager_(model->GetOrCreate()), clause_id_generator_(model->GetOrCreate()), lrat_proof_handler_(model->Mutable()), + trail_copy_(new TrailCopy(trail_, *implication_graph_, *clause_manager_)), drat_enabled_(lrat_proof_handler_ != nullptr && (lrat_proof_handler_->drat_check_enabled() || lrat_proof_handler_->drat_output_enabled())), logger_(model->GetOrCreate()) {} +Prober::~Prober() { delete trail_copy_; } + bool Prober::ProbeBooleanVariables(const double deterministic_time_limit) { const int num_variables = sat_solver_->NumVariables(); const VariablesAssignment& assignment = sat_solver_->Assignment(); @@ -87,6 +299,12 @@ bool Prober::ProbeOneVariableInternal(BooleanVariable b) { new_integer_bounds_.clear(); propagated_.ResetAllToFalse(); tmp_binary_clause_ids_.clear(); + // We block clause deletion since we compute some LRAT proofs in a delayed + // way, and we need to make sure the clauses that were used are still around. + sat_solver_->BlockClauseDeletion(true); + absl::Cleanup unblock_clause_deletion = [&] { + sat_solver_->BlockClauseDeletion(false); + }; for (const Literal decision : {Literal(b, true), Literal(b, false)}) { if (assignment_.LiteralIsAssigned(decision)) continue; @@ -143,7 +361,7 @@ bool Prober::ProbeOneVariableInternal(BooleanVariable b) { } if (lrat_proof_handler_ != nullptr) { - auto add_tmp_implication = [&](const Literal decision, const Literal l) { + for (const Literal l : new_implied_or_fixed_literals_) { tmp_clause_ids_.clear(); clause_manager_->AppendClauseIdsFixing( {l}, &tmp_clause_ids_, decision.Index(), @@ -161,23 +379,21 @@ bool Prober::ProbeOneVariableInternal(BooleanVariable b) { tmp_binary_clause_ids_[std::minmax(decision.Negated(), l)] = clause_id; num_lrat_clauses_++; num_lrat_proof_clauses_ += tmp_clause_ids_.size(); - }; - for (const Literal l : new_implied_or_fixed_literals_) { - add_tmp_implication(decision, l); } - if (decision.IsNegative() && !to_fix_at_true_.empty()) { - // Redo the first pass to add the LRAT clauses b => to_fix_at_true. - if (!sat_solver_->ResetToLevelZero()) return false; - if (assignment_.LiteralIsAssigned(decision)) continue; - CHECK_EQ(sat_solver_->CurrentDecisionLevel(), 0); - if (sat_solver_->EnqueueDecisionAndBackjumpOnConflict( - decision.Negated()) == kUnsatTrailIndex) { - return false; - } - if (sat_solver_->ModelIsUnsat()) return false; - if (sat_solver_->CurrentDecisionLevel() == 0) continue; + if (decision.IsPositive()) { + trail_copy_->CopyTrail(); + } else if (!to_fix_at_true_.empty()) { for (const Literal l : to_fix_at_true_) { - add_tmp_implication(decision.Negated(), l); + tmp_clause_ids_.clear(); + trail_copy_->AppendClauseIdsFixing({l}, &tmp_clause_ids_, + decision.NegatedIndex(), + tmp_binary_clause_ids_); + const ClauseId clause_id = clause_id_generator_->GetNextId(); + lrat_proof_handler_->AddInferredClause(clause_id, {decision, l}, + tmp_clause_ids_); + tmp_binary_clause_ids_[std::minmax(decision, l)] = clause_id; + num_lrat_clauses_++; + num_lrat_proof_clauses_ += tmp_clause_ids_.size(); } } } @@ -980,6 +1196,8 @@ bool FailedLiteralProbing::DoOneRound(ProbingOptions options) { num_conflicts_ = 0; num_new_binary_ = 0; num_subsumed_ = 0; + num_lrat_clauses_ = 0; + num_lrat_proof_clauses_ = 0; // Reset the solver in case it was already used. if (!sat_solver_->ResetToLevelZero()) return false; @@ -1030,12 +1248,14 @@ bool FailedLiteralProbing::DoOneRound(ProbingOptions options) { } binary_clause_extracted_.assign(trail_.Index(), false); + trail_implication_clauses_.assign(trail_.Index(), {kNoClauseId, false}); while (!time_limit_->LimitReached() && time_limit_->GetElapsedDeterministicTime() <= limit) { // We only enqueue literal at level zero if we don't use "tree look". if (!options.use_tree_look) { if (!sat_solver_->BacktrackAndPropagateReimplications(0)) return false; + DeleteTemporaryLratImplicationsAfterBacktrack(); } // Probing works by taking a series of decisions, and by analyzing what @@ -1073,6 +1293,7 @@ bool FailedLiteralProbing::DoOneRound(ProbingOptions options) { const int new_level = sat_solver_->CurrentDecisionLevel(); if (new_level == 0) continue; const Literal last_decision = trail_.Decisions()[new_level - 1].literal; + binary_clauses_to_extract_.clear(); for (int i = first_new_trail_index; i < trail_.Index(); ++i) { const Literal l = trail_[i]; if (l == last_decision) continue; @@ -1096,7 +1317,9 @@ bool FailedLiteralProbing::DoOneRound(ProbingOptions options) { // propagation for one literal we do finish it before calling again // the binary propagation. if (trail_.AssignmentType(l.Variable()) != binary_propagator_id_) { - MaybeExtractImplication(last_decision, l); + if (ShouldExtractImplication(l)) { + binary_clauses_to_extract_.push_back(l); + } } } else { // If we don't extract binary, we don't need to explore any of @@ -1104,6 +1327,9 @@ bool FailedLiteralProbing::DoOneRound(ProbingOptions options) { processed_.Set(l.Index()); } } + if (!binary_clauses_to_extract_.empty()) { + ExtractImplications(last_decision, binary_clauses_to_extract_); + } if (options.subsume_with_binary_clause) { SubsumeWithBinaryClauseUsingBlockingLiteral(last_decision); @@ -1112,6 +1338,7 @@ bool FailedLiteralProbing::DoOneRound(ProbingOptions options) { if (!sat_solver_->ResetToLevelZero()) return false; if (!ProcessLiteralsToFix()) return false; + DeleteTemporaryLratImplicationsAfterBacktrack(); clause_manager_->CleanUpWatchers(); // Display stats. @@ -1129,6 +1356,8 @@ bool FailedLiteralProbing::DoOneRound(ProbingOptions options) { << " explicit_fix:" << num_explicit_fix_ << " num_conflicts:" << num_conflicts_ << " new_binary_clauses: " << num_new_binary_ + << " num_lrat_clauses: " << num_lrat_clauses_ + << " num_lrat_proof_clauses: " << num_lrat_proof_clauses_ << " subsumed: " << num_subsumed_ << " dtime: " << time_diff << " wtime: " << wall_timer.Get() << (limit_reached ? " (Aborted)" : ""); return sat_solver_->FinishPropagation(); @@ -1190,8 +1419,10 @@ bool FailedLiteralProbing::ComputeNextDecisionInOrder( // This is a backtrack marker, go back one level. CHECK_GT(sat_solver_->CurrentDecisionLevel(), 0); if (!sat_solver_->BacktrackAndPropagateReimplications( - sat_solver_->CurrentDecisionLevel() - 1)) + sat_solver_->CurrentDecisionLevel() - 1)) { return false; + } + DeleteTemporaryLratImplicationsAfterBacktrack(); continue; } const Literal candidate(index); @@ -1227,6 +1458,7 @@ bool FailedLiteralProbing::GetNextDecisionInNoParticularOrder( if (!sat_solver_->BacktrackAndPropagateReimplications(level - 1)) { return false; } + DeleteTemporaryLratImplicationsAfterBacktrack(); } return true; } @@ -1263,6 +1495,7 @@ bool FailedLiteralProbing::EnqueueDecisionAndBackjumpOnConflict( ClauseId fixed_decision_unit_id = kNoClauseId; auto conflict_callback = [&](ClauseId conflict_id, absl::Span conflict_clause) { + DeleteTemporaryLratImplicationsAfterBacktrack(); if (fixed_decision_unit_id != kNoClauseId) return; tmp_clause_ids_.clear(); clause_manager_->AppendClauseIdsFixing(conflict_clause, &tmp_clause_ids_, @@ -1272,6 +1505,8 @@ bool FailedLiteralProbing::EnqueueDecisionAndBackjumpOnConflict( lrat_proof_handler_->AddInferredClause(fixed_decision_unit_id, {Literal(next_decision).Negated()}, tmp_clause_ids_); + num_lrat_clauses_++; + num_lrat_proof_clauses_ += tmp_clause_ids_.size(); }; first_new_trail_index = sat_solver_->EnqueueDecisionAndBackjumpOnConflict( Literal(next_decision), @@ -1282,6 +1517,8 @@ bool FailedLiteralProbing::EnqueueDecisionAndBackjumpOnConflict( if (first_new_trail_index == kUnsatTrailIndex) return false; binary_clause_extracted_.resize(first_new_trail_index); binary_clause_extracted_.resize(trail_.Index(), false); + trail_implication_clauses_.resize(first_new_trail_index); + trail_implication_clauses_.resize(trail_.Index(), {kNoClauseId, false}); // This is tricky, depending on the parameters, and for integer problem, // EnqueueDecisionAndBackjumpOnConflict() might create new Booleans. @@ -1341,15 +1578,19 @@ bool FailedLiteralProbing::EnqueueDecisionAndBackjumpOnConflict( return true; } -void FailedLiteralProbing::MaybeExtractImplication(const Literal last_decision, - const Literal l) { +bool FailedLiteralProbing::ShouldExtractImplication(const Literal l) { const auto& info = trail_.Info(l.Variable()); - if (binary_clause_extracted_[info.trail_index]) return; + if (binary_clause_extracted_[info.trail_index]) return false; binary_clause_extracted_[info.trail_index] = true; // If the variable was true at level zero, this is not necessary. - if (info.level == 0) return; + return info.level > 0; +} + +void FailedLiteralProbing::ExtractImplication(const Literal last_decision, + const Literal l, bool lrat_only) { + const auto& info = trail_.Info(l.Variable()); // TODO(user): Think about trying to extract clause that will not // get removed by transitive reduction later. If we can both extract @@ -1360,7 +1601,6 @@ void FailedLiteralProbing::MaybeExtractImplication(const Literal last_decision, // of all literals in the reason for this propagation. And use this // as a reason for later hyber binary resolution. Like we do when // this clause subsumes the reason. - ++num_new_binary_; DCHECK(assignment_.LiteralIsTrue(l)); CHECK_NE(l.Variable(), last_decision.Variable()); @@ -1375,19 +1615,97 @@ void FailedLiteralProbing::MaybeExtractImplication(const Literal last_decision, if (lrat_proof_handler_ != nullptr) { clause_id = clause_id_generator_->GetNextId(); tmp_clause_ids_.clear(); - clause_manager_->AppendClauseIdsFixing({l}, &tmp_clause_ids_, - last_decision); + clause_manager_->AppendClauseIdsFixing( + {l}, &tmp_clause_ids_, last_decision, + [&](int /*level*/, int trail_index) { + return trail_implication_clauses_[trail_index].first; + }); + // Cache this LRAT clause so that it can be reused for later proofs. Do this + // only if `l` is propagated by the last decision, so that this cache entry + // remains valid when we backtrack later decisions. + if (info.level == sat_solver_->CurrentDecisionLevel()) { + trail_implication_clauses_[info.trail_index] = {clause_id, lrat_only}; + } lrat_proof_handler_->AddInferredClause( clause_id, {last_decision.Negated(), l}, tmp_clause_ids_); + num_lrat_clauses_++; + num_lrat_proof_clauses_ += tmp_clause_ids_.size(); } + if (lrat_only) return; // Each time we extract a binary clause, we change the reason in the trail. // This is important as it will allow us to remove clauses that are now // subsumed by this binary, even if it was a reason. + ++num_new_binary_; CHECK(implication_graph_->AddBinaryClauseAndChangeReason( clause_id, l, last_decision.Negated())); } +void FailedLiteralProbing::MaybeExtractImplication(const Literal last_decision, + const Literal l) { + if (ShouldExtractImplication(l)) { + ExtractImplication(last_decision, l); + } +} + +void FailedLiteralProbing::ExtractImplications( + Literal last_decision, absl::Span literals) { + // 1. Find all the literals which appear in the expansion of the reasons of + // all the `literals` and collect them in reverse trail order in + // `tmp_marked_literals_`. + // 1.a Put the `literals` in a heap. + tmp_mark_.ClearAndResize(BooleanVariable(trail_.NumVariables())); + tmp_heap_.clear(); + const VariablesAssignment& assignment = trail_.Assignment(); + for (const Literal lit : literals) { + CHECK(assignment.LiteralIsAssigned(lit)); + tmp_mark_.Set(lit.Variable()); + tmp_heap_.push_back(trail_.Info(lit.Variable()).trail_index); + } + absl::c_make_heap(tmp_heap_); + + // 1.b Expand the reasons of all the literals in the heap and add the reason + // literals back in the heap. Collect the literals in the order they are + // popped from the heap in `tmp_marked_literals_`. + tmp_marked_literals_.clear(); + while (!tmp_heap_.empty()) { + absl::c_pop_heap(tmp_heap_); + const int trail_index = tmp_heap_.back(); + tmp_heap_.pop_back(); + const Literal marked_literal = trail_[trail_index]; + tmp_marked_literals_.push_back(marked_literal); + + DCHECK_GT(trail_.Info(marked_literal.Variable()).level, 0); + DCHECK_NE(trail_.AssignmentType(marked_literal.Variable()), + AssignmentType::kSearchDecision); + + for (const Literal literal : trail_.Reason(marked_literal.Variable())) { + const BooleanVariable var = literal.Variable(); + const AssignmentInfo& info = trail_.Info(var); + if (info.level > 0 && !tmp_mark_[var] && + trail_.AssignmentType(var) != AssignmentType::kSearchDecision) { + tmp_mark_.Set(var); + tmp_heap_.push_back(info.trail_index); + absl::c_push_heap(tmp_heap_); + } + } + } + + // 2. Add an LRAT implication "last_decision => l" for each literal l in + // `tmp_marked_literals_`, in increasing trail index order. Thanks to the + // cache in ExtractImplication(), the proof for each new implication has + // the same size as its reason. Also add a "real" implication in the binary + // implication graph if `l` is in `literals`. + tmp_mark_.ClearAndResize(BooleanVariable(trail_.NumVariables())); + for (const Literal lit : literals) { + tmp_mark_.Set(lit.Variable()); + } + for (int i = tmp_marked_literals_.size() - 1; i >= 0; --i) { + const bool lrat_only = !tmp_mark_[tmp_marked_literals_[i].Variable()]; + ExtractImplication(last_decision, tmp_marked_literals_[i], lrat_only); + } +} + // If we can extract a binary clause that subsume the reason clause, we do add // the binary and remove the subsumed clause. // @@ -1479,6 +1797,8 @@ void FailedLiteralProbing::AddFailedLiteralToFix(const Literal literal) { lrat_proof_handler_->AddInferredClause(unit_id, {literal.Negated()}, tmp_clause_ids_); to_fix_unit_id_.push_back({unit_id}); + num_lrat_clauses_++; + num_lrat_proof_clauses_ += tmp_clause_ids_.size(); } // Fixes all the literals in to_fix_, and finish propagation. @@ -1498,5 +1818,16 @@ bool FailedLiteralProbing::ProcessLiteralsToFix() { return sat_solver_->FinishPropagation(); } +void FailedLiteralProbing::DeleteTemporaryLratImplicationsAfterBacktrack() { + if (lrat_proof_handler_ == nullptr) return; + for (int i = trail_.Index(); i < trail_implication_clauses_.size(); ++i) { + auto [clause_id, is_temporary] = trail_implication_clauses_[i]; + if (is_temporary) { + lrat_proof_handler_->DeleteClause(clause_id, {}); + } + } + trail_implication_clauses_.resize(trail_.Index(), {kNoClauseId, false}); +} + } // namespace sat } // namespace operations_research diff --git a/ortools/sat/probing.h b/ortools/sat/probing.h index 5ef528c409d..d42750653db 100644 --- a/ortools/sat/probing.h +++ b/ortools/sat/probing.h @@ -43,9 +43,12 @@ namespace operations_research { namespace sat { +class TrailCopy; + class Prober { public: explicit Prober(Model* model); + ~Prober(); // Fixes Booleans variables to true/false and see what is propagated. This // can: @@ -158,6 +161,7 @@ class Prober { ClauseManager* clause_manager_; ClauseIdGenerator* clause_id_generator_; LratProofHandler* lrat_proof_handler_; + TrailCopy* trail_copy_; const bool drat_enabled_; // To detect literal x that must be true because b => x and not(b) => x. @@ -353,9 +357,19 @@ class FailedLiteralProbing { // do not contain last_decision.Negated(). void MaybeSubsumeWithBinaryClause(Literal last_decision, Literal l); - // If not already done, add last_decision => l to the repository. + // Functions to add "last_decision => l" to the repository if not already + // done. The Maybe() version just calls Extract() if ShouldExtract() is true. + bool ShouldExtractImplication(Literal l); + void ExtractImplication(Literal last_decision, Literal l, + bool lrat_only = false); void MaybeExtractImplication(Literal last_decision, Literal l); + // Extracts an implication "`last_decision` => l" for each literal l in + // `literals`. This is more efficient than calling ExtractImplication() for + // each literal when LRAT is enabled. + void ExtractImplications(Literal last_decision, + absl::Span literals); + // Inspect the watcher list for last_decision, If we have a blocking // literal at true (implied by last decision), then we have subsumptions. // @@ -375,6 +389,10 @@ class FailedLiteralProbing { // Fixes all the literals in to_fix_, and finish propagation. bool ProcessLiteralsToFix(); + // Deletes the temporary LRAT clauses in trail_implication_clauses_ for all + // trail indices greater than the current trail index. + void DeleteTemporaryLratImplicationsAfterBacktrack(); + SatSolver* sat_solver_; BinaryImplicationGraph* implication_graph_; TimeLimit* time_limit_; @@ -402,13 +420,24 @@ class FailedLiteralProbing { std::vector to_fix_; // For each literal in to_fix_, the ID of the corresponding LRAT unit clause. std::vector to_fix_unit_id_; + // The literals for which we want to extract "last_decision => l" clauses. + std::vector binary_clauses_to_extract_; // For each literal 'l' in the trail, whether a binary clause "d => l" has // been extracted, with 'd' the decision at the same level as 'l'. std::vector binary_clause_extracted_; - // Temporary vector used for LRAT proofs. + // For each literal on the trail, the ID of the LRAT clause stating that this + // literal is implied by the previous decisions on the trail (or kNoClauseId + // if there is no such clause), plus a Boolean indicating whether this clause + // is temporary (i.e., is not an extracted binary clause). + std::vector> trail_implication_clauses_; + + // Temporary data structures used for LRAT proofs. std::vector tmp_clause_ids_; + SparseBitset tmp_mark_; + std::vector tmp_heap_; + std::vector tmp_marked_literals_; // Stats. int64_t num_probed_ = 0; @@ -416,6 +445,8 @@ class FailedLiteralProbing { int64_t num_conflicts_ = 0; int64_t num_new_binary_ = 0; int64_t num_subsumed_ = 0; + int64_t num_lrat_clauses_ = 0; + int64_t num_lrat_proof_clauses_ = 0; }; } // namespace sat diff --git a/ortools/sat/python/cp_model.py b/ortools/sat/python/cp_model.py index 44eab008b8d..822dcef2200 100644 --- a/ortools/sat/python/cp_model.py +++ b/ortools/sat/python/cp_model.py @@ -17,10 +17,10 @@ The following two sections describe the main methods for building and solving CP-SAT models. -* [`CpModel`](#cp_model.CpModel): Methods for creating -models, including variables and constraints. -* [`CPSolver`](#cp_model.CpSolver): Methods for solving -a model and evaluating solutions. +* [`CpModel`](#cp_model.CpModel): Methods for creating models, including + variables and constraints. +* [`CpSolver`](#cp_model.CpSolver): Methods for solving a model and evaluating + solutions. The following methods implement callbacks that the solver calls each time it finds a new solution. diff --git a/ortools/sat/sat_inprocessing.cc b/ortools/sat/sat_inprocessing.cc index d2c85d929ce..69769ce84b9 100644 --- a/ortools/sat/sat_inprocessing.cc +++ b/ortools/sat/sat_inprocessing.cc @@ -15,6 +15,7 @@ #include #include +#include #include #include #include @@ -91,9 +92,21 @@ bool Inprocessing::PresolveLoop(SatPresolveOptions options) { // Mainly useful for development. double probing_time = 0.0; - const bool log_info = VLOG_IS_ON(2); const bool log_round_info = VLOG_IS_ON(2); + // In the presolve, we run this first as some preprocessing technique might + // change the problem clauses in such a way that make our heuristic gate + // detection miss some gates. Also, when this applies the reduction in problem + // size is huge, so it is just faster to run this early. + // + // TODO(user): If we remove fixed variables, on some problem like: + // ~/SAT24/f0426369f61595aee97055965ee7e6a3-hwmcc12miters-xits-iso-6s111.sanitized.cnf.xz + // we don't detect as much equivalences... Understand why. I suspect it is due + // to the heuristic for ITE gate that combine two clauses of size 3 to get a + // truth table on 4 variables. If one of them become of size 2, we might miss + // it. Still we should be more robust to stuff like this. + RETURN_IF_FALSE(congruence_closure_->DoOneRound(log_round_info)); + // We currently do the transformations in a given order and restart each time // we did something to make sure that the earlier step cannot strengthen more. // This might not be the best, but it is really good during development phase @@ -159,13 +172,6 @@ bool Inprocessing::PresolveLoop(SatPresolveOptions options) { implication_graph_->RemoveAllRedundantVariables(&postsolve_->clauses); } - // TODO(user): Think about the right order in this function. - if (params_.inprocessing_use_congruence_closure()) { - RETURN_IF_FALSE(RemoveFixedAndEquivalentVariables(log_round_info)); - RETURN_IF_FALSE(implication_graph_->RemoveDuplicatesAndFixedVariables()); - RETURN_IF_FALSE(congruence_closure_->DoOneRound(log_info)); - } - // TODO(user): Combine the two? this way we don't create a full literal <-> // clause graph twice. It might make sense to reach the BCE fix point which // is unique before each variable elimination. @@ -1974,6 +1980,24 @@ void GateCongruenceClosure::AddToTruthTable( } } +namespace { + +// Given a set of feasible assignment of two variables, recover the +// corresponding binary clauses. +void AppendBinaryClausesFromTruthTable( + absl::Span vars, SmallBitset table, + std::vector>* binary_used) { + DCHECK_EQ(vars.size(), 2); + for (int b = 0; b < 4; ++b) { + if (((table >> b) & 1) == 0) { + binary_used->emplace_back(Literal(vars[0], (b & 1) == 0), + Literal(vars[1], (b & 2) == 0)); + } + } +} + +} // namespace + // Note that this is the "hot" part of the algo, once we have the and gates, // the congruence closure should be quite fast. void GateCongruenceClosure::ExtractAndGatesAndFillShortTruthTables( @@ -1987,6 +2011,51 @@ void GateCongruenceClosure::ExtractAndGatesAndFillShortTruthTables( tmp_ids_.clear(); tmp_clauses_.clear(); + // We deal with binary clause a bit differently. + // + // Tricky: We still include binary clause between fixed literal that haven't + // been cleaned up yet, as these are needed to really recover all gates. + // + // TODO(user): Ideally the detection code should be robust to that. + int num_fn1 = 0; + std::vector> binary_used; + for (LiteralIndex a(0); a < implication_graph_->literal_size(); ++a) { + if (implication_graph_->IsRedundant(Literal(a))) continue; + for (const Literal b : implication_graph_->Implications(Literal(a))) { + if (implication_graph_->IsRedundant(b)) continue; + + std::array key2; + SmallBitset bitmask; + FillKeyAndBitmask({Literal(a).Negated(), b}, absl::MakeSpan(key2), + bitmask); + auto [it, inserted] = ids2_.insert({key2, bitmask}); + if (!inserted) { + const SmallBitset old = it->second; + it->second &= bitmask; + if (it->second != old) { + // This is either fixing or equivalence! + // + // Doing a run of DetectEquivalences() should fix that but then + // new clause of size 3 might become binary, and the fix point might + // require a lot of step. So it is important to do it here. + const SmallBitset bitset2 = it->second; + if (lrat_proof_handler_ != nullptr) { + binary_used.clear(); + AppendBinaryClausesFromTruthTable(key2, bitset2, &binary_used); + } + // If we are equivalent, we always have 2 functions. + // But if we fix a variable (like bitset2 = 0011) we just have one. + const int num_added = + ProcessTruthTable(key2, bitset2, {}, binary_used); + CHECK_GE(num_added, 1) << std::bitset<4>(bitset2); + num_fn1 += num_added; + } + } + } + } + timer.AddCounter("t2", ids2_.size()); + timer.AddCounter("fn1", num_fn1); + std::vector candidates; for (SatClause* clause : clause_manager_->AllClausesInCreationOrder()) { if (timer.WorkLimitIsReached()) break; @@ -1994,6 +2063,12 @@ void GateCongruenceClosure::ExtractAndGatesAndFillShortTruthTables( if (clause->size() == 3) { AddToTruthTable<3>(clause, ids3_); + + // The AND gate of size 3 should be detected by the short table code, no + // need to do the algo here which should be slower. + // + // TODO(user): This seems to be less strong. I think we have some bug + // in our fixed point loop when we fix variables. } else if (clause->size() == 4) { AddToTruthTable<4>(clause, ids4_); } else if (clause->size() == 5) { @@ -2065,7 +2140,6 @@ void GateCongruenceClosure::ExtractAndGatesAndFillShortTruthTables( } } - // This relies on having no duplicates. for (const Literal target : candidates) { if (!(*is_potential_target)[target]) continue; @@ -2089,10 +2163,11 @@ void GateCongruenceClosure::ExtractAndGatesAndFillShortTruthTables( std::swap(is_potential_target, next_is_potential_target); // Target should imply all other literal in the base clause to false. - if (count != clause_size - 1) continue; + if (count < clause_size - 1) continue; - // We have an and_gate ! - // Double-check no duplicate. + // Using only the "count" require that there is no duplicates. But + // depending when this is run in the inprocessing loop, we might have + // some. Redo a pass to double check. int second_count = 0; for (const Literal implied : implications) { if (implied.Variable() == target.Variable()) continue; @@ -2101,8 +2176,14 @@ void GateCongruenceClosure::ExtractAndGatesAndFillShortTruthTables( marked_.Clear(implied.Negated()); } } - CHECK_EQ(count, second_count); + // Restore is_clause_literal. + for (const Literal l : clause->AsSpan()) { + marked_.Set(l); + } + if (second_count != clause_size - 1) continue; + + // We have an and_gate ! // Add the detected gate (its inputs are the negation of each clause // literal other than the target). gates_target_.push_back(target.Index()); @@ -2115,6 +2196,14 @@ void GateCongruenceClosure::ExtractAndGatesAndFillShortTruthTables( } if (lrat_proof_handler_ != nullptr) { gates_clauses_.Add({clause}); + + // Create temporary size 2 clauses for the needed binary. + for (const Literal l : clause->AsSpan()) { + if (l == target) continue; + tmp_binary_clauses_.emplace_back( + SatClause::Create({target.Negated(), l.Negated()})); + gates_clauses_.AppendToLastVector(tmp_binary_clauses_.back().get()); + } } // Canonicalize. @@ -2130,15 +2219,44 @@ void GateCongruenceClosure::ExtractAndGatesAndFillShortTruthTables( timer.AddCounter("and_gates", gates_inputs_.size()); } +int GateCongruenceClosure::CanonicalizeShortGate(GateId id) { + // Deals with fixed input variable. + absl::Span inputs = gates_inputs_[id]; + int new_size = inputs.size(); + + for (int i = 0; i < new_size;) { + if (assignment_.LiteralIsAssigned(Literal(inputs[i]))) { + new_size = + RemoveFixedInput(i, assignment_.LiteralIsTrue(Literal(inputs[i])), + inputs.subspan(0, new_size), gates_type_[id]); + } else { + ++i; + } + } + + // Now canonicalize. + new_size = CanonicalizeFunctionTruthTable( + gates_target_[id], inputs.subspan(0, new_size), gates_type_[id]); + + // Resize and return. + if (new_size < gates_inputs_[id].size()) { + gates_inputs_.Shrink(id, new_size); + } + DCHECK_EQ(gates_type_[id] >> (1 << (gates_inputs_[id].size())), 0); + return new_size; +} + int GateCongruenceClosure::ProcessTruthTable( absl::Span inputs, SmallBitset truth_table, - absl::Span ids_for_proof) { + absl::Span ids_for_proof, + absl::Span> binary_used) { int num_detected = 0; for (int i = 0; i < inputs.size(); ++i) { if (!IsFunction(i, inputs.size(), truth_table)) continue; const int num_bits = inputs.size() - 1; ++num_detected; + const GateId new_id(gates_target_.size()); gates_target_.push_back(Literal(inputs[i], true)); gates_inputs_.Add({}); for (int j = 0; j < inputs.size(); ++j) { @@ -2178,7 +2296,14 @@ int GateCongruenceClosure::ProcessTruthTable( for (const TruthTableId id : ids_for_proof) { gates_clauses_.AppendToLastVector(truth_tables_clauses_[id]); } + for (const auto [a, b] : binary_used) { + tmp_binary_clauses_.emplace_back(SatClause::Create({a, b})); + gates_clauses_.AppendToLastVector(tmp_binary_clauses_.back().get()); + } } + + // Canonicalize right away to deal with corner case. + CanonicalizeShortGate(new_id); } return num_detected; } @@ -2266,6 +2391,44 @@ void GateCongruenceClosure::ExtractShortGates(PresolveTimer& timer) { } }; + int num_merges2 = 0; + const auto merge2_into_n = + [this, &num_merges2]( + absl::Span inputs, SmallBitset& truth_table, + std::vector>& binary_used) { + for (int i = 0; i < inputs.size(); ++i) { + for (int j = i + 1; j < inputs.size(); ++j) { + std::array key2 = {inputs[i], inputs[j]}; + const auto it = ids2_.find(key2); + if (it == ids2_.end()) continue; + + const SmallBitset bitset2 = it->second; + SmallBitset bitset = bitset2; + int new_size = 0; + std::vector key(inputs.size()); + key[new_size++] = inputs[i]; + key[new_size++] = inputs[j]; + for (int t = 0; t < inputs.size(); ++t) { + if (t != i && t != j) { + key[new_size] = inputs[t]; + bitset |= bitset << (1 << new_size); // EXTEND + ++new_size; + } + } + CanonicalizeTruthTable(absl::MakeSpan(key), + bitset); + CHECK_EQ(inputs, absl::MakeSpan(key)); + + const SmallBitset old = truth_table; + truth_table &= bitset; + if (old != truth_table) { + AppendBinaryClausesFromTruthTable(key2, bitset2, &binary_used); + ++num_merges2; + } + } + } + }; + // Starts by processing all existing tables. // // TODO(user): Since we deal with and_gates differently, do we need to look at @@ -2273,12 +2436,19 @@ void GateCongruenceClosure::ExtractShortGates(PresolveTimer& timer) { // kind of Boolean function on two inputs (and_gates, with any possible // negation) and xor_gate. std::vector ids_for_proof; + std::vector> binary_used; for (TruthTableId t_id(0); t_id < truth_tables_inputs_.size(); ++t_id) { ids_for_proof.clear(); ids_for_proof.push_back(t_id); const absl::Span inputs = truth_tables_inputs_[t_id]; SmallBitset truth_table = truth_tables_bitset_[t_id]; + // TODO(user): it is unlcear why this is useful. Understand a bit more the + // set of possible Boolean functions of 2 and 3 variables and their clause + // encoding. + binary_used.clear(); + merge2_into_n(inputs, truth_table, binary_used); + // Merge any size-3 table included inside a size-4 gate. // TODO(user): do it for larger gate too ? if (inputs.size() == 4) { @@ -2287,7 +2457,7 @@ void GateCongruenceClosure::ExtractShortGates(PresolveTimer& timer) { ++num_tables[inputs.size()]; const int num_detected = - ProcessTruthTable(inputs, truth_table, ids_for_proof); + ProcessTruthTable(inputs, truth_table, ids_for_proof, binary_used); num_functions[inputs.size() - 1] += num_detected; // If this is not a function and of size 3, lets try to combine it with @@ -2344,8 +2514,11 @@ void GateCongruenceClosure::ExtractShortGates(PresolveTimer& timer) { ++num_combinations; ++num_tables[4]; ids_for_proof.clear(); + binary_used.clear(); + merge2_into_n(key, bitmask, binary_used); merge3_into_4(key, bitmask, ids_for_proof); - num_functions[3] += ProcessTruthTable(key, bitmask, ids_for_proof); + num_functions[3] += + ProcessTruthTable(key, bitmask, ids_for_proof, binary_used); } } } @@ -2353,12 +2526,13 @@ void GateCongruenceClosure::ExtractShortGates(PresolveTimer& timer) { timer.AddCounter("combine3", num_combinations); timer.AddCounter("merges", num_merges); + timer.AddCounter("merges2", num_merges2); // Note that we only display non-zero counters. - for (int i = 2; i < num_tables.size(); ++i) { + for (int i = 0; i < num_tables.size(); ++i) { timer.AddCounter(absl::StrCat("t", i), num_tables[i]); } - for (int i = 2; i < num_functions.size(); ++i) { + for (int i = 0; i < num_functions.size(); ++i) { timer.AddCounter(absl::StrCat("fn", i), num_functions[i]); } } @@ -2368,13 +2542,14 @@ namespace { class LratGateCongruenceHelper { public: LratGateCongruenceHelper( - const BinaryImplicationGraph* implication_graph, + const Trail* trail, const BinaryImplicationGraph* implication_graph, ClauseManager* clause_manager, ClauseIdGenerator* clause_id_generator, LratProofHandler* lrat_proof_handler, const util_intops::StrongVector& gates_target, const CompactVectorVector& gates_clauses, DenseConnectedComponentsFinder& union_find) - : implication_graph_(implication_graph), + : trail_(trail), + implication_graph_(implication_graph), clause_manager_(clause_manager), clause_id_generator_(clause_id_generator), lrat_proof_handler_(lrat_proof_handler), @@ -2519,6 +2694,8 @@ class LratGateCongruenceHelper { void AddGateClausesToTemporaryProof(GateId id) { CHECK(lrat_proof_handler_ != nullptr); + const auto& assignment = trail_->Assignment(); + std::vector fixed; for (const SatClause* clause : gates_clauses_[id]) { // We rewrite each clause using new equivalences found. marked_.ResetAllToFalse(); @@ -2528,6 +2705,9 @@ class LratGateCongruenceHelper { bool some_change = false; for (const Literal lit : clause->AsSpan()) { const Literal rep = GetRepresentativeWithProofSupport(lit); + if (assignment.LiteralIsAssigned(rep)) { + fixed.push_back(rep); + } if (rep != lit) { some_change = true; // We need not(rep) => not(lit). This should be equivalent to @@ -2547,7 +2727,12 @@ class LratGateCongruenceHelper { // If this is the case, we shouldn't need it for the proof. if (clause_is_trivial) continue; - ClauseId new_id = clause_manager_->GetClauseId(clause); + ClauseId new_id = + clause->size() == 2 + ? implication_graph_->GetClauseId(clause->FirstLiteral(), + clause->SecondLiteral()) + : clause_manager_->GetClauseId(clause); + CHECK_NE(new_id, kNoClauseId); if (some_change) { // If there is some change, we add a temporary clause id with the // proof to go from the original clause to this one. @@ -2563,81 +2748,32 @@ class LratGateCongruenceHelper { tmp_proof_clauses_.Add(tmp_literals_); } - // Hacky: If we have a single clause, then there is a chance this was - // an and_gate. We must add all the implications target => inputs[i]. - // Note that the inputs are the negation of the literals in the unique - // clause, so we really have target => not(lit) for lit in clause. - // which gives (not(target), not(lit)) for the needed clause. - if (gates_clauses_[id].size() == 1) { - // Tricky: The target might have been negated ! so we recover it from - // the unique clause. - const Literal current = Literal(gates_target_[id]); - LiteralIndex real_target = kNoLiteralIndex; - for (const Literal lit : gates_clauses_[id][0]->AsSpan()) { - if (current.Variable() == lit.Variable()) { - real_target = lit.Index(); - } - } - if (real_target == kNoLiteralIndex) return; - - const Literal neg_target = Literal(real_target).Negated(); - const Literal neg_target_rep = - GetRepresentativeWithProofSupport(neg_target); - for (const Literal lit : gates_clauses_[id][0]->AsSpan()) { - const Literal neg_lit = lit.Negated(); - if (neg_lit == neg_target) continue; - - // Skip trivial clause after rewrite. - const Literal neg_lit_rep = GetRepresentativeWithProofSupport(neg_lit); - if (neg_lit_rep == neg_target_rep.Negated()) continue; - - ClauseId new_id = implication_graph_->GetClauseId(neg_target, neg_lit); - if (new_id == kNoClauseId) { - // We where likely not a bool_and to start with, so we shouldn't need - // these clauses. - break; - } - - if (neg_lit != neg_lit_rep || neg_target != neg_target_rep) { - tmp_clause_ids_.clear(); - tmp_index_to_delete_.push_back(tmp_proof_clauses_.size()); - if (neg_lit != neg_lit_rep) { - tmp_clause_ids_.push_back( - GetLiteralImpliesRepresentativeClauseId(neg_lit)); - } - if (neg_target != neg_target_rep) { - tmp_clause_ids_.push_back( - GetLiteralImpliesRepresentativeClauseId(neg_target)); - } - tmp_clause_ids_.push_back(new_id); - - new_id = clause_id_generator_->GetNextId(); - lrat_proof_handler_->AddInferredClause( - new_id, {neg_target_rep, neg_lit_rep}, tmp_clause_ids_); - } - - tmp_proof_clauses_id_.push_back(new_id); - CHECK_NE(neg_target_rep, neg_lit_rep); - tmp_proof_clauses_.Add({neg_target_rep, neg_lit_rep}); + // Add size1 clauses. + gtl::STLSortAndRemoveDuplicates(&fixed); + for (const Literal lit : fixed) { + if (assignment.LiteralIsAssigned(lit)) { + tmp_proof_clauses_id_.push_back( + trail_->GetUnitClauseId(lit.Variable())); + tmp_proof_clauses_.Add( + {assignment.LiteralIsTrue(lit) ? lit : lit.Negated()}); } } } // Same as AddAndGateTargetImplication() but with truth table based gates. - std::pair AddShortGateTargetEquivalence( - GateId gate_a_id, GateId gate_b_id) { + std::pair AddShortGateEquivalence( + Literal rep_a, Literal rep_b, absl::Span gate_ids) { + if (lrat_proof_handler_ == nullptr) return {kNoClauseId, kNoClauseId}; + // Just add all clauses from both gates. // But note that we need to remap them. ClearTemporaryProof(); - AddGateClausesToTemporaryProof(gate_a_id); - AddGateClausesToTemporaryProof(gate_b_id); + for (const GateId id : gate_ids) { + AddGateClausesToTemporaryProof(id); + } // All clauses are now in tmp_proof_clauses_/tmp_proof_clauses_id_. // We can add both implications with proof. - const Literal a = Literal(gates_target_[gate_a_id]); - const Literal b = Literal(gates_target_[gate_b_id]); - const Literal rep_a = GetRepresentativeWithProofSupport(a); - const Literal rep_b = GetRepresentativeWithProofSupport(b); DCHECK(IsRepresentative(rep_a)); DCHECK(IsRepresentative(rep_b)); CHECK_NE(rep_a, rep_b); @@ -2677,6 +2813,7 @@ class LratGateCongruenceHelper { } ClauseId ProofForFixingLiteral(Literal to_fix, GateId id) { + if (lrat_proof_handler_ == nullptr) return kNoClauseId; CHECK(IsRepresentative(to_fix)); ClearTemporaryProof(); AddGateClausesToTemporaryProof(id); @@ -2766,6 +2903,7 @@ class LratGateCongruenceHelper { } } + const Trail* trail_; const BinaryImplicationGraph* implication_graph_; ClauseManager* clause_manager_; ClauseIdGenerator* clause_id_generator_; @@ -2815,6 +2953,10 @@ bool GateCongruenceClosure::DoOneRound(bool log_info) { gates_type_.clear(); gates_clauses_.clear(); + // Lets release the memory on exit. + CHECK(tmp_binary_clauses_.empty()); + absl::Cleanup cleanup = [this] { tmp_binary_clauses_.clear(); }; + ExtractAndGatesAndFillShortTruthTables(timer); ExtractShortGates(timer); @@ -2847,7 +2989,7 @@ bool GateCongruenceClosure::DoOneRound(bool log_info) { input_literals_to_gate.ResetFromTranspose(gates_inputs_, num_literals); LratGateCongruenceHelper lrat_helper( - implication_graph_, clause_manager_, clause_id_generator_, + trail_, implication_graph_, clause_manager_, clause_id_generator_, lrat_proof_handler_, gates_target_, gates_clauses_, union_find); // Starts with all gates in the queue. @@ -2856,10 +2998,108 @@ bool GateCongruenceClosure::DoOneRound(bool log_info) { std::vector queue(num_gates); for (GateId id(0); id < num_gates; ++id) queue[id.value()] = id; - // Main loop. int num_units = 0; - int num_processed = 0; + const auto fix_literal = [&, this](Literal to_fix, + absl::Span clause_ids) { + if (assignment_.LiteralIsTrue(to_fix)) return true; + if (!clause_manager_->InprocessingFixLiteral(to_fix, clause_ids)) { + return false; + } + + ++num_units; + for (const GateId gate_id : input_literals_to_gate[to_fix]) { + if (in_queue[gate_id.value()]) continue; + queue.push_back(gate_id); + in_queue[gate_id.value()] = true; + } + for (const GateId gate_id : input_literals_to_gate[to_fix.Negated()]) { + if (in_queue[gate_id.value()]) continue; + queue.push_back(gate_id); + in_queue[gate_id.value()] = true; + } + return true; + }; + + const auto get_unit_clause = [this](Literal a) { + if (lrat_proof_handler_ == nullptr) return kNoClauseId; + return trail_->GetUnitClauseId(a.Variable()); + }; + int num_equivalences = 0; + const auto new_equivalence = [&, this](Literal a, Literal b, + ClauseId a_implies_b, + ClauseId b_implies_a) { + // Lets propagate fixed variable as we find new equivalences. + if (assignment_.LiteralIsAssigned(a)) { + if (assignment_.LiteralIsTrue(a)) { + return fix_literal(b, {a_implies_b, get_unit_clause(a)}); + } else { + return fix_literal(b.Negated(), {b_implies_a, get_unit_clause(a)}); + } + } else if (assignment_.LiteralIsAssigned(b)) { + if (assignment_.LiteralIsTrue(b)) { + return fix_literal(a, {b_implies_a, get_unit_clause(b)}); + } else { + return fix_literal(a.Negated(), {a_implies_b, get_unit_clause(b)}); + } + } + + ++num_equivalences; + DCHECK(!implication_graph_->IsRedundant(a)); + DCHECK(!implication_graph_->IsRedundant(b)); + if (!implication_graph_->AddBinaryClause(a_implies_b, a.Negated(), b) || + !implication_graph_->AddBinaryClause(b_implies_a, b.Negated(), a)) { + return false; + } + + for (const bool negate : {false, true}) { + const LiteralIndex x = negate ? a.NegatedIndex() : a.Index(); + const LiteralIndex y = negate ? b.NegatedIndex() : b.Index(); + const ClauseId x_implies_y = negate ? b_implies_a : a_implies_b; + const ClauseId y_implies_x = negate ? a_implies_b : b_implies_a; + + // Because x always refer to a and y to b, this should maintain + // the invariant root(lit) = root(lit.Negated()).Negated(). + // This is checked below. + union_find.AddEdge(x.value(), y.value()); + const LiteralIndex rep(union_find.FindRoot(y.value())); + const LiteralIndex to_merge = rep == x ? y : x; + input_literals_to_gate.MergeInto(to_merge, rep); + + if (lrat_proof_handler_ != nullptr) { + if (rep == x) { + lrat_helper.AddGateEquivalenceClauses(Literal(y), y_implies_x, + x_implies_y); + } else { + lrat_helper.AddGateEquivalenceClauses(Literal(x), x_implies_y, + y_implies_x); + } + } + + // Re-add to the queue all gates with touched inputs. + // + // TODO(user): I think we could only add the gates of "to_merge" + // before we merge. This part of the code is quite quick in any + // case. + for (const GateId gate_id : input_literals_to_gate[rep]) { + if (in_queue[gate_id.value()]) continue; + queue.push_back(gate_id); + in_queue[gate_id.value()] = true; + } + } + + // Invariant. + CHECK_EQ( + lrat_helper.GetRepresentativeWithProofSupport(a), + lrat_helper.GetRepresentativeWithProofSupport(a.Negated()).Negated()); + CHECK_EQ( + lrat_helper.GetRepresentativeWithProofSupport(b), + lrat_helper.GetRepresentativeWithProofSupport(b.Negated()).Negated()); + return true; + }; + + // Main loop. + int num_processed = 0; int arity1_equivalences = 0; while (!queue.empty()) { ++num_processed; @@ -2912,7 +3152,6 @@ bool GateCongruenceClosure::DoOneRound(bool log_info) { const Literal rep_b = lrat_helper.GetRepresentativeWithProofSupport(b); if (rep_a != rep_b) { - ++num_equivalences; ClauseId rep_a_implies_rep_b = kNoClauseId; ClauseId rep_b_implies_rep_a = kNoClauseId; if (lrat_proof_handler_ != nullptr) { @@ -2922,71 +3161,16 @@ bool GateCongruenceClosure::DoOneRound(bool log_info) { rep_b_implies_rep_a = lrat_helper.AddAndGateTargetImplication(other_id, id); } else { - const auto [x, y] = - lrat_helper.AddShortGateTargetEquivalence(id, other_id); + const auto [x, y] = lrat_helper.AddShortGateEquivalence( + rep_a, rep_b, {id, other_id}); rep_a_implies_rep_b = x; rep_b_implies_rep_a = y; } } - - DCHECK(!implication_graph_->IsRedundant(rep_a)); - DCHECK(!implication_graph_->IsRedundant(rep_b)); - if (!implication_graph_->AddBinaryClause(rep_a_implies_rep_b, - rep_a.Negated(), rep_b) || - !implication_graph_->AddBinaryClause(rep_b_implies_rep_a, - rep_b.Negated(), rep_a)) { + if (!new_equivalence(rep_a, rep_b, rep_a_implies_rep_b, + rep_b_implies_rep_a)) { return false; } - - for (const bool negate : {false, true}) { - const LiteralIndex x = - negate ? rep_a.NegatedIndex() : rep_a.Index(); - const LiteralIndex y = - negate ? rep_b.NegatedIndex() : rep_b.Index(); - const ClauseId x_implies_y = - negate ? rep_b_implies_rep_a : rep_a_implies_rep_b; - const ClauseId y_implies_x = - negate ? rep_a_implies_rep_b : rep_b_implies_rep_a; - - // Because x always refer to a and y to b, this should maintain - // the invariant root(lit) = root(lit.Negated()).Negated(). - // This is checked below. - union_find.AddEdge(x.value(), y.value()); - const LiteralIndex rep(union_find.FindRoot(y.value())); - const LiteralIndex to_merge = rep == x ? y : x; - input_literals_to_gate.MergeInto(to_merge, rep); - - if (lrat_proof_handler_ != nullptr) { - if (rep == x) { - lrat_helper.AddGateEquivalenceClauses(Literal(y), y_implies_x, - x_implies_y); - } else { - lrat_helper.AddGateEquivalenceClauses(Literal(x), x_implies_y, - y_implies_x); - } - } - - // Re-add to the queue all gates with touched inputs. - // - // TODO(user): I think we could only add the gates of "to_merge" - // before we merge. This part of the code is quite quick in any - // case. - for (const GateId gate_id : input_literals_to_gate[rep]) { - if (in_queue[gate_id.value()]) continue; - queue.push_back(gate_id); - in_queue[gate_id.value()] = true; - } - } - - // Invariant. - CHECK_EQ( - lrat_helper.GetRepresentativeWithProofSupport(rep_a), - lrat_helper.GetRepresentativeWithProofSupport(rep_a.Negated()) - .Negated()); - CHECK_EQ( - lrat_helper.GetRepresentativeWithProofSupport(rep_b), - lrat_helper.GetRepresentativeWithProofSupport(rep_b.Negated()) - .Negated()); } break; } @@ -3016,6 +3200,8 @@ bool GateCongruenceClosure::DoOneRound(bool log_info) { // then target must be false. if (marked_[Literal(rep).Negated()]) { is_unit = true; + input_literals_to_gate.RemoveFromFutureOutput(id); + const Literal to_fix = Literal(gates_target_[id]).Negated(); if (!assignment_.LiteralIsTrue(to_fix)) { absl::InlinedVector clause_ids; @@ -3023,10 +3209,7 @@ bool GateCongruenceClosure::DoOneRound(bool log_info) { lrat_helper.AppendFixAndGateTargetClauses(id, Literal(rep), clause_ids); } - if (!clause_manager_->InprocessingFixLiteral(to_fix, - clause_ids)) { - return false; - } + if (!fix_literal(to_fix, clause_ids)) return false; } break; } @@ -3036,8 +3219,6 @@ bool GateCongruenceClosure::DoOneRound(bool log_info) { } if (is_unit) { - ++num_units; - input_literals_to_gate.RemoveFromFutureOutput(id); break; // Abort the passes loop. } @@ -3077,43 +3258,36 @@ bool GateCongruenceClosure::DoOneRound(bool log_info) { .Index(); } - const int new_size = CanonicalizeFunctionTruthTable( - gates_target_[id], inputs, gates_type_[id]); - if (new_size < inputs.size()) { - gates_inputs_.Shrink(id, new_size); - } - DCHECK_EQ(gates_type_[id] >> (1 << (inputs.size())), 0); - + const int new_size = CanonicalizeShortGate(id); if (new_size == 1) { // We have a function of size 1! This is an equivalence. - // - // TODO(user): deal with it. - ++arity1_equivalences; input_literals_to_gate.RemoveFromFutureOutput(id); + const Literal a = Literal(gates_target_[id]); + const Literal b = Literal(gates_inputs_[id][0]); + const Literal rep_a = lrat_helper.GetRepresentativeWithProofSupport(a); + const Literal rep_b = lrat_helper.GetRepresentativeWithProofSupport(b); + if (rep_a != rep_b) { + ++arity1_equivalences; + const auto [a_to_b, b_to_a] = + lrat_helper.AddShortGateEquivalence(rep_a, rep_b, {id}); + if (!new_equivalence(rep_a, rep_b, a_to_b, b_to_a)) { + return false; + } + } break; } else if (new_size == 0) { // We have a fixed function! Just fix the literal. + input_literals_to_gate.RemoveFromFutureOutput(id); const Literal initial_to_fix = (gates_type_[id] & 1) == 1 ? Literal(gates_target_[id]) : Literal(gates_target_[id]).Negated(); const Literal to_fix = lrat_helper.GetRepresentativeWithProofSupport(initial_to_fix); if (!assignment_.LiteralIsTrue(to_fix)) { - if (lrat_proof_handler_ == nullptr) { - if (!clause_manager_->InprocessingFixLiteral(to_fix, {})) { - return false; - } - } else { - const ClauseId clause_id = - lrat_helper.ProofForFixingLiteral(to_fix, id); - if (!clause_manager_->InprocessingAddUnitClause(clause_id, - to_fix)) { - return false; - } - } - ++num_units; + const ClauseId clause_id = + lrat_helper.ProofForFixingLiteral(to_fix, id); + if (!fix_literal(to_fix, {clause_id})) return false; } - input_literals_to_gate.RemoveFromFutureOutput(id); break; } } diff --git a/ortools/sat/sat_inprocessing.h b/ortools/sat/sat_inprocessing.h index a3c3d21848b..488457e44f3 100644 --- a/ortools/sat/sat_inprocessing.h +++ b/ortools/sat/sat_inprocessing.h @@ -449,6 +449,7 @@ class GateCongruenceClosure { explicit GateCongruenceClosure(Model* model) : assignment_(model->GetOrCreate()->Assignment()), sat_solver_(model->GetOrCreate()), + trail_(model->GetOrCreate()), implication_graph_(model->GetOrCreate()), clause_manager_(model->GetOrCreate()), clause_id_generator_(model->GetOrCreate()), @@ -508,9 +509,10 @@ class GateCongruenceClosure { // Detects gates encoded in the given truth table, and add them to the set // of gates. Returns the number of gate detected. - int ProcessTruthTable(absl::Span inputs, - SmallBitset truth_table, - absl::Span ids_for_proof = {}); + int ProcessTruthTable( + absl::Span inputs, SmallBitset truth_table, + absl::Span ids_for_proof, + absl::Span> binary_used); // Add a small clause to the corresponding truth table. template @@ -518,8 +520,13 @@ class GateCongruenceClosure { absl::flat_hash_map, TruthTableId>& ids); + // Make sure the small gate at given id is canonicalized. + // Returns its number of inputs. + int CanonicalizeShortGate(GateId id); + const VariablesAssignment& assignment_; SatSolver* sat_solver_; + Trail* trail_; BinaryImplicationGraph* implication_graph_; ClauseManager* clause_manager_; ClauseIdGenerator* clause_id_generator_; @@ -554,6 +561,14 @@ class GateCongruenceClosure { CompactVectorVector gates_inputs_; CompactVectorVector gates_clauses_; + // Truth tables on 2 variables are handled differently, and we don't use + // a TruthTableId indirection. + // + // TODO(user): it feels like we could benefit from just storing this all + // the time in the binary_implication graph. This allow to never add duplicate + // and detect easy case of fixing/equivalences right away. To investigate. + absl::flat_hash_map, SmallBitset> ids2_; + // Map (Xi) (sorted) to a bitmask corresponding to the allowed values. // We loop over all short clauses to fill this. We actually store an "id" // pointing in the vectors below. @@ -571,6 +586,10 @@ class GateCongruenceClosure { std::vector tmp_ids_; std::vector tmp_clauses_; + // Temporary SatClause* for binary, so we don't need to specialize too much + // code for them. + std::vector> tmp_binary_clauses_; + // For stats. double total_dtime_ = 0.0; double total_wtime_ = 0.0; diff --git a/ortools/sat/scheduling_helpers.cc b/ortools/sat/scheduling_helpers.cc index 6ae244be5c3..c5ff5494929 100644 --- a/ortools/sat/scheduling_helpers.cc +++ b/ortools/sat/scheduling_helpers.cc @@ -531,6 +531,17 @@ void SchedulingConstraintHelper::AddReasonForBeingBeforeAssumingNoOverlap( starts_[before].var == ends_[before].var && starts_[after].var == ends_[after].var; + if (fixed_size && sizes_[after].constant == 0 && + sizes_[before].constant == 0) { + // If both sizes are fixed to zero use the trivial explanation. + const auto [expr, ub] = + EncodeDifferenceLowerThan(ends_[before], starts_[after], 0); + DCHECK_LE(linear2_bounds_->UpperBound(expr), ub); + linear2_bounds_->AddReasonForUpperBoundLowerThan(expr, ub, &literal_reason_, + &integer_reason_); + return; + } + // Prefer the straightforward linear2 explanation as it is more likely this // comes from level zero or a single enforcement literal. Also handle the // fixed size case. This explains with at most two integer bounds. From b28edf1d0442d9615b22824abeae40bffaa5ca2e Mon Sep 17 00:00:00 2001 From: Guillaume Chatelet Date: Wed, 24 Dec 2025 14:11:17 +0100 Subject: [PATCH 096/111] Fix warning 'control reaches end of non-void function' (#4964) --- ortools/constraint_solver/BUILD.bazel | 1 + .../routing_ils_parameters_utils.cc | 1 + ortools/glop/BUILD.bazel | 4 ++++ ortools/glop/initial_basis.cc | 9 +++++++++ ortools/glop/primal_edge_norms.cc | 2 ++ ortools/math_opt/core/math_opt_proto_utils.cc | 2 ++ ortools/math_opt/elemental/BUILD.bazel | 1 + ortools/math_opt/elemental/tagged_id_test.cc | 2 ++ ortools/math_opt/solver_tests/BUILD.bazel | 1 + .../solver_tests/multi_objective_tests.cc | 3 +++ ortools/math_opt/solvers/gscip/BUILD.bazel | 7 ++++++- ortools/math_opt/solvers/gscip/gscip.cc | 19 +++++++++++-------- .../solvers/gscip/gscip_callback_result.cc | 2 ++ ortools/math_opt/tools/BUILD.bazel | 2 ++ ortools/math_opt/tools/file_format_flags.cc | 4 ++++ ortools/pdlp/BUILD.bazel | 1 + ortools/pdlp/primal_dual_hybrid_gradient.cc | 2 ++ ortools/routing/parsers/BUILD.bazel | 4 +++- .../routing/parsers/solution_serializer.cc | 11 ++++++++++- ortools/routing/parsers/solution_serializer.h | 11 ++++++++--- ortools/sat/2d_rectangle_presolve.cc | 3 +++ ortools/sat/2d_rectangle_presolve.h | 2 ++ ortools/sat/BUILD.bazel | 5 ++--- ortools/sat/cp_model_utils.cc | 2 ++ ortools/sat/diffn_util.cc | 5 ++++- ortools/sat/diffn_util.h | 4 ++-- ortools/sat/no_overlap_2d_helper.cc | 2 ++ 27 files changed, 92 insertions(+), 20 deletions(-) diff --git a/ortools/constraint_solver/BUILD.bazel b/ortools/constraint_solver/BUILD.bazel index 11ae9c26adc..0c1991f3397 100644 --- a/ortools/constraint_solver/BUILD.bazel +++ b/ortools/constraint_solver/BUILD.bazel @@ -379,6 +379,7 @@ cc_library( deps = [ ":routing_enums_cc_proto", ":routing_ils_cc_proto", + "@abseil-cpp//absl/base:core_headers", ], ) diff --git a/ortools/constraint_solver/routing_ils_parameters_utils.cc b/ortools/constraint_solver/routing_ils_parameters_utils.cc index 453492799e2..fde53e44567 100644 --- a/ortools/constraint_solver/routing_ils_parameters_utils.cc +++ b/ortools/constraint_solver/routing_ils_parameters_utils.cc @@ -60,6 +60,7 @@ std::string GetRecreateParametersName( case RecreateParameters::PARAMETERS_NOT_SET: return "PARAMETERS_NOT_SET"; } + ABSL_UNREACHABLE(); } } // namespace operations_research diff --git a/ortools/glop/BUILD.bazel b/ortools/glop/BUILD.bazel index 053a27d79a6..f97d27785fd 100644 --- a/ortools/glop/BUILD.bazel +++ b/ortools/glop/BUILD.bazel @@ -225,6 +225,10 @@ cc_library( "//ortools/lp_data:base", "//ortools/lp_data:lp_utils", "//ortools/lp_data:sparse", + "//ortools/lp_data:sparse_column", + "@abseil-cpp//absl/base:core_headers", + "@abseil-cpp//absl/log", + "@abseil-cpp//absl/log:check", ], ) diff --git a/ortools/glop/initial_basis.cc b/ortools/glop/initial_basis.cc index 97734b10e63..c2d528e3178 100644 --- a/ortools/glop/initial_basis.cc +++ b/ortools/glop/initial_basis.cc @@ -20,8 +20,14 @@ #include #include +#include "absl/base/optimization.h" +#include "absl/log/check.h" +#include "absl/log/log.h" #include "ortools/glop/markowitz.h" +#include "ortools/lp_data/lp_types.h" #include "ortools/lp_data/lp_utils.h" +#include "ortools/lp_data/sparse.h" +#include "ortools/lp_data/sparse_column.h" namespace operations_research { namespace glop { @@ -219,6 +225,8 @@ int InitialBasis::GetMarosPriority(ColIndex col) const { case VariableType::FIXED_VARIABLE: return 0; } + LOG(FATAL) << "Invalid variable type: " + << static_cast(variable_type_[col]); } int InitialBasis::GetMarosPriority(RowIndex row) const { @@ -388,6 +396,7 @@ int InitialBasis::GetColumnCategory(ColIndex col) const { case VariableType::FIXED_VARIABLE: return 5; } + ABSL_UNREACHABLE(); } Fractional InitialBasis::GetColumnPenalty(ColIndex col) const { diff --git a/ortools/glop/primal_edge_norms.cc b/ortools/glop/primal_edge_norms.cc index 5a39d73899e..49603eb133c 100644 --- a/ortools/glop/primal_edge_norms.cc +++ b/ortools/glop/primal_edge_norms.cc @@ -14,6 +14,7 @@ #include "ortools/glop/primal_edge_norms.h" #include +#include #include #include "absl/log/check.h" @@ -68,6 +69,7 @@ DenseRow::ConstView PrimalEdgeNorms::GetSquaredNorms() { case GlopParameters::DEVEX: return GetDevexWeights().const_view(); } + LOG(FATAL) << "Invalid pricing rule: " << pricing_rule_; } const DenseRow& PrimalEdgeNorms::GetEdgeSquaredNorms() { diff --git a/ortools/math_opt/core/math_opt_proto_utils.cc b/ortools/math_opt/core/math_opt_proto_utils.cc index 88392dfc34f..463aa026b2a 100644 --- a/ortools/math_opt/core/math_opt_proto_utils.cc +++ b/ortools/math_opt/core/math_opt_proto_utils.cc @@ -21,6 +21,7 @@ #include #include +#include "absl/base/optimization.h" #include "absl/container/flat_hash_set.h" #include "absl/log/check.h" #include "absl/log/log.h" @@ -403,6 +404,7 @@ absl::Status ModelIsSupported(const ModelProto& model, case SupportType::kSupported: LOG(FATAL) << "Unexpected call with `kSupported`"; } + ABSL_UNREACHABLE(); }; if (const SupportType support = support_menu.integer_variables; support != SupportType::kSupported) { diff --git a/ortools/math_opt/elemental/BUILD.bazel b/ortools/math_opt/elemental/BUILD.bazel index d0e9163fb67..778e5566e4b 100644 --- a/ortools/math_opt/elemental/BUILD.bazel +++ b/ortools/math_opt/elemental/BUILD.bazel @@ -412,6 +412,7 @@ cc_test( ":tagged_id", "//ortools/base:gmock_main", "//ortools/math_opt/testing:stream", + "@abseil-cpp//absl/base:core_headers", "@abseil-cpp//absl/hash:hash_testing", "@abseil-cpp//absl/strings", ], diff --git a/ortools/math_opt/elemental/tagged_id_test.cc b/ortools/math_opt/elemental/tagged_id_test.cc index 49fc1f3435e..faefea5c6db 100644 --- a/ortools/math_opt/elemental/tagged_id_test.cc +++ b/ortools/math_opt/elemental/tagged_id_test.cc @@ -18,6 +18,7 @@ #include #include +#include "absl/base/optimization.h" #include "absl/hash/hash_testing.h" #include "absl/strings/str_cat.h" #include "gtest/gtest.h" @@ -39,6 +40,7 @@ std::ostream& operator<<(std::ostream& ostr, const TestEnum& e) { ostr << "kValue0"; return ostr; } + ABSL_UNREACHABLE(); } using TestEnumId = TaggedId; diff --git a/ortools/math_opt/solver_tests/BUILD.bazel b/ortools/math_opt/solver_tests/BUILD.bazel index 9f31d474e6d..f769aff01d6 100644 --- a/ortools/math_opt/solver_tests/BUILD.bazel +++ b/ortools/math_opt/solver_tests/BUILD.bazel @@ -322,6 +322,7 @@ cc_library( "//ortools/math_opt/cpp:math_opt", "//ortools/math_opt/io:mps_converter", "//ortools/port:proto_utils", + "@abseil-cpp//absl/base:core_headers", "@abseil-cpp//absl/status", "@abseil-cpp//absl/status:statusor", "@abseil-cpp//absl/strings:string_view", diff --git a/ortools/math_opt/solver_tests/multi_objective_tests.cc b/ortools/math_opt/solver_tests/multi_objective_tests.cc index 098d3717b03..2ebefb11c4a 100644 --- a/ortools/math_opt/solver_tests/multi_objective_tests.cc +++ b/ortools/math_opt/solver_tests/multi_objective_tests.cc @@ -17,6 +17,7 @@ #include #include +#include "absl/base/optimization.h" #include "absl/status/status.h" #include "absl/status/statusor.h" #include "absl/strings/string_view.h" @@ -210,6 +211,7 @@ absl::StatusOr SolveWithObjectiveDegradation( case ObjectiveType::kAuxiliary: return model.AddMaximizationObjective(x, /*priority=*/0); } + ABSL_UNREACHABLE(); }(); const Objective priority_1 = [&]() { switch (priority_1_type) { @@ -220,6 +222,7 @@ absl::StatusOr SolveWithObjectiveDegradation( case ObjectiveType::kAuxiliary: return model.AddMinimizationObjective(x, /*priority=*/1); } + ABSL_UNREACHABLE(); }(); ModelSolveParameters model_parameters; switch (tolerance_type) { diff --git a/ortools/math_opt/solvers/gscip/BUILD.bazel b/ortools/math_opt/solvers/gscip/BUILD.bazel index 01474cd3895..088812049af 100644 --- a/ortools/math_opt/solvers/gscip/BUILD.bazel +++ b/ortools/math_opt/solvers/gscip/BUILD.bazel @@ -97,6 +97,7 @@ cc_library( "@abseil-cpp//absl/cleanup", "@abseil-cpp//absl/container:flat_hash_map", "@abseil-cpp//absl/container:flat_hash_set", + "@abseil-cpp//absl/log", "@abseil-cpp//absl/log:check", "@abseil-cpp//absl/memory", "@abseil-cpp//absl/status", @@ -215,6 +216,7 @@ cc_library( deps = [ "//ortools/base:logging", "//ortools/linear_solver:scip_helper_macros", + "@abseil-cpp//absl/log:die_if_null", "@abseil-cpp//absl/status:statusor", "@abseil-cpp//absl/strings", "@scip", @@ -225,7 +227,10 @@ cc_library( name = "gscip_callback_result", srcs = ["gscip_callback_result.cc"], hdrs = ["gscip_callback_result.h"], - deps = ["@scip"], + deps = [ + "@abseil-cpp//absl/base:core_headers", + "@scip", + ], ) cc_library( diff --git a/ortools/math_opt/solvers/gscip/gscip.cc b/ortools/math_opt/solvers/gscip/gscip.cc index 7bcac209d5f..1b52f6cc68c 100644 --- a/ortools/math_opt/solvers/gscip/gscip.cc +++ b/ortools/math_opt/solvers/gscip/gscip.cc @@ -22,10 +22,12 @@ #include #include +#include "absl/base/optimization.h" #include "absl/cleanup/cleanup.h" #include "absl/container/flat_hash_map.h" #include "absl/container/flat_hash_set.h" #include "absl/log/check.h" +#include "absl/log/log.h" #include "absl/memory/memory.h" #include "absl/status/status.h" #include "absl/status/statusor.h" @@ -35,14 +37,6 @@ #include "absl/synchronization/mutex.h" #include "absl/types/span.h" #include "lpi/lpi.h" -#include "ortools/base/logging.h" -#include "ortools/base/status_macros.h" -#include "ortools/linear_solver/scip_helper_macros.h" -#include "ortools/math_opt/solvers/gscip/gscip.pb.h" -#include "ortools/math_opt/solvers/gscip/gscip_event_handler.h" -#include "ortools/math_opt/solvers/gscip/gscip_parameters.h" -#include "ortools/port/proto_utils.h" -#include "ortools/util/status_macros.h" #include "scip/cons_and.h" #include "scip/cons_indicator.h" #include "scip/cons_linear.h" @@ -53,6 +47,13 @@ #else #include "scip/cons_quadratic.h" #endif // SCIP_VERSION_MAJOR >= 10 +#include "ortools/base/status_macros.h" +#include "ortools/linear_solver/scip_helper_macros.h" +#include "ortools/math_opt/solvers/gscip/gscip.pb.h" +#include "ortools/math_opt/solvers/gscip/gscip_event_handler.h" +#include "ortools/math_opt/solvers/gscip/gscip_parameters.h" +#include "ortools/port/proto_utils.h" +#include "ortools/util/status_macros.h" #include "scip/cons_sos1.h" #include "scip/cons_sos2.h" #include "scip/def.h" @@ -102,6 +103,7 @@ SCIP_VARTYPE ConvertVarType(const GScipVarType var_type) { case GScipVarType::kInteger: return SCIP_VARTYPE_INTEGER; } + ABSL_UNREACHABLE(); } GScipVarType ConvertVarType(const SCIP_VARTYPE var_type) { @@ -115,6 +117,7 @@ GScipVarType ConvertVarType(const SCIP_VARTYPE var_type) { case SCIP_VARTYPE_BINARY: return GScipVarType::kBinary; } + LOG(FATAL) << "Unrecognized SCIP_VARTYPE: " << var_type; } GScipOutput::Status ConvertStatus(const SCIP_STATUS scip_status) { diff --git a/ortools/math_opt/solvers/gscip/gscip_callback_result.cc b/ortools/math_opt/solvers/gscip/gscip_callback_result.cc index 5d6b4ba71c2..c4d1da265a0 100644 --- a/ortools/math_opt/solvers/gscip/gscip_callback_result.cc +++ b/ortools/math_opt/solvers/gscip/gscip_callback_result.cc @@ -13,6 +13,7 @@ #include "ortools/math_opt/solvers/gscip/gscip_callback_result.h" +#include "absl/base/optimization.h" #include "scip/type_result.h" namespace operations_research { @@ -56,6 +57,7 @@ SCIP_RESULT ConvertGScipCallbackResult(const GScipCallbackResult result) { case GScipCallbackResult::kDelayNode: return SCIP_DELAYNODE; } + ABSL_UNREACHABLE(); } } // namespace operations_research diff --git a/ortools/math_opt/tools/BUILD.bazel b/ortools/math_opt/tools/BUILD.bazel index d9e61c8b67f..58d9f1b1e07 100644 --- a/ortools/math_opt/tools/BUILD.bazel +++ b/ortools/math_opt/tools/BUILD.bazel @@ -41,6 +41,7 @@ cc_binary( "//ortools/util:sigint", "//ortools/util:status_macros", "@abseil-cpp//absl/base:no_destructor", + "@abseil-cpp//absl/base:nullability", "@abseil-cpp//absl/flags:flag", "@abseil-cpp//absl/log:check", "@abseil-cpp//absl/status", @@ -81,6 +82,7 @@ cc_library( "//ortools/math_opt/io:mps_converter", "//ortools/math_opt/io:proto_converter", "@abseil-cpp//absl/algorithm:container", + "@abseil-cpp//absl/base:core_headers", "@abseil-cpp//absl/container:flat_hash_map", "@abseil-cpp//absl/log", "@abseil-cpp//absl/status", diff --git a/ortools/math_opt/tools/file_format_flags.cc b/ortools/math_opt/tools/file_format_flags.cc index 01e11c17221..bf57e67fb7d 100644 --- a/ortools/math_opt/tools/file_format_flags.cc +++ b/ortools/math_opt/tools/file_format_flags.cc @@ -20,6 +20,7 @@ #include #include "absl/algorithm/container.h" +#include "absl/base/optimization.h" #include "absl/container/flat_hash_map.h" #include "absl/log/log.h" #include "absl/status/status.h" @@ -69,6 +70,7 @@ std::string AbslUnparseFlag(const FileFormat f) { case FileFormat::kLP: return "lp"; } + ABSL_UNREACHABLE(); } std::ostream& operator<<(std::ostream& out, const FileFormat f) { @@ -232,6 +234,7 @@ ReadModel(const absl::string_view file_path, const FileFormat format) { return std::make_pair(std::move(model), std::nullopt); } } + ABSL_UNREACHABLE(); } absl::Status WriteModel(const absl::string_view file_path, @@ -267,6 +270,7 @@ absl::Status WriteModel(const absl::string_view file_path, return file::SetContents(file_path, lp_data, file::Defaults()); } } + ABSL_UNREACHABLE(); } } // namespace operations_research::math_opt diff --git a/ortools/pdlp/BUILD.bazel b/ortools/pdlp/BUILD.bazel index 0e76eccfbb4..64d668b803b 100644 --- a/ortools/pdlp/BUILD.bazel +++ b/ortools/pdlp/BUILD.bazel @@ -135,6 +135,7 @@ cc_library( "//ortools/lp_data:proto_utils", "//ortools/util:logging", "@abseil-cpp//absl/algorithm:container", + "@abseil-cpp//absl/base:core_headers", "@abseil-cpp//absl/base:nullability", "@abseil-cpp//absl/log", "@abseil-cpp//absl/log:check", diff --git a/ortools/pdlp/primal_dual_hybrid_gradient.cc b/ortools/pdlp/primal_dual_hybrid_gradient.cc index b86f3e9f1c1..415a1455989 100644 --- a/ortools/pdlp/primal_dual_hybrid_gradient.cc +++ b/ortools/pdlp/primal_dual_hybrid_gradient.cc @@ -54,6 +54,7 @@ #include "Eigen/SparseCore" #include "absl/algorithm/container.h" #include "absl/base/nullability.h" +#include "absl/base/optimization.h" #include "absl/log/check.h" #include "absl/log/log.h" #include "absl/status/status.h" @@ -250,6 +251,7 @@ void LogIterationStats(int verbosity_level, bool use_feasibility_polishing, case IterationType::kPresolveTermination: return "t "; } + ABSL_UNREACHABLE(); } else { return ""; } diff --git a/ortools/routing/parsers/BUILD.bazel b/ortools/routing/parsers/BUILD.bazel index 8b69141dfda..d58d165240b 100644 --- a/ortools/routing/parsers/BUILD.bazel +++ b/ortools/routing/parsers/BUILD.bazel @@ -310,7 +310,9 @@ cc_library( deps = [ ":simple_graph", "//ortools/base:file", - "//ortools/base:logging", + "@abseil-cpp//absl/base:core_headers", + "@abseil-cpp//absl/log", + "@abseil-cpp//absl/log:check", "@abseil-cpp//absl/strings", "@abseil-cpp//absl/strings:str_format", "@abseil-cpp//absl/time", diff --git a/ortools/routing/parsers/solution_serializer.cc b/ortools/routing/parsers/solution_serializer.cc index a92efccbff5..a28c5674016 100644 --- a/ortools/routing/parsers/solution_serializer.cc +++ b/ortools/routing/parsers/solution_serializer.cc @@ -14,16 +14,25 @@ #include "ortools/routing/parsers/solution_serializer.h" #include +#include #include #include #include #include +#include "absl/base/attributes.h" +#include "absl/log/check.h" +#include "absl/log/log.h" #include "absl/strings/ascii.h" +#include "absl/strings/str_cat.h" +#include "absl/strings/str_format.h" #include "absl/time/clock.h" #include "absl/time/time.h" #include "absl/types/span.h" -#include "ortools/base/logging.h" +#include "ortools/base/file.h" +#include "ortools/base/helpers.h" +#include "ortools/base/options.h" +#include "ortools/routing/parsers/simple_graph.h" namespace operations_research::routing { diff --git a/ortools/routing/parsers/solution_serializer.h b/ortools/routing/parsers/solution_serializer.h index d1f545cafac..4e3996579af 100644 --- a/ortools/routing/parsers/solution_serializer.h +++ b/ortools/routing/parsers/solution_serializer.h @@ -17,19 +17,20 @@ #ifndef ORTOOLS_ROUTING_PARSERS_SOLUTION_SERIALIZER_H_ #define ORTOOLS_ROUTING_PARSERS_SOLUTION_SERIALIZER_H_ +#include #include #include #include #include #include +#include "absl/base/attributes.h" +#include "absl/base/optimization.h" +#include "absl/log/check.h" #include "absl/strings/str_cat.h" #include "absl/strings/str_format.h" #include "absl/strings/string_view.h" #include "absl/types/span.h" -#include "ortools/base/file.h" -#include "ortools/base/helpers.h" -#include "ortools/base/logging.h" #include "ortools/routing/parsers/simple_graph.h" namespace operations_research::routing { @@ -162,6 +163,7 @@ class RoutingSolution { case RoutingOutputFormat::kNEARPLIB: return SerializeToNEARPLIBString(); } + ABSL_UNREACHABLE(); } // Serializes the full solution to the given file, including metadata like @@ -180,6 +182,7 @@ class RoutingSolution { case RoutingOutputFormat::kNEARPLIB: return SerializeToNEARPLIBSolutionFile(); } + ABSL_UNREACHABLE(); } // Serializes the full solution to the given file, including metadata like @@ -263,6 +266,7 @@ std::string FormatStatistic(absl::string_view name, T value, case RoutingOutputFormat::kNEARPLIB: return absl::StrCat(name, " : ", value); } + ABSL_UNREACHABLE(); } // Specialization for doubles to show a higher precision: without this @@ -282,6 +286,7 @@ inline std::string FormatStatistic(absl::string_view name, double value, case RoutingOutputFormat::kNEARPLIB: return absl::StrFormat("%s : %f", name, value); } + ABSL_UNREACHABLE(); } // Prints a formatted solution or solver statistic according to the given diff --git a/ortools/sat/2d_rectangle_presolve.cc b/ortools/sat/2d_rectangle_presolve.cc index a3edee86276..152e48191f1 100644 --- a/ortools/sat/2d_rectangle_presolve.cc +++ b/ortools/sat/2d_rectangle_presolve.cc @@ -318,6 +318,7 @@ struct Edge { .y_start = rectangle.y_min, .size = rectangle.SizeY()}; } + LOG(FATAL) << "Invalid edge position: " << static_cast(pos); } template @@ -598,6 +599,7 @@ IntegerValue GetClockwiseStart(EdgePosition edge, const Rectangle& rectangle) { case EdgePosition::TOP: return rectangle.x_min; } + LOG(FATAL) << "Invalid edge position: " << static_cast(edge); } IntegerValue GetClockwiseEnd(EdgePosition edge, const Rectangle& rectangle) { @@ -611,6 +613,7 @@ IntegerValue GetClockwiseEnd(EdgePosition edge, const Rectangle& rectangle) { case EdgePosition::TOP: return rectangle.x_max; } + LOG(FATAL) << "Invalid edge position: " << static_cast(edge); } // Given a list of rectangles and their neighbours graph, find the list of diff --git a/ortools/sat/2d_rectangle_presolve.h b/ortools/sat/2d_rectangle_presolve.h index 1ed1295fa25..19a4cbe6661 100644 --- a/ortools/sat/2d_rectangle_presolve.h +++ b/ortools/sat/2d_rectangle_presolve.h @@ -21,6 +21,7 @@ #include "absl/algorithm/container.h" #include "absl/container/flat_hash_map.h" #include "absl/container/inlined_vector.h" +#include "absl/log/log.h" #include "absl/types/span.h" #include "ortools/sat/diffn_util.h" #include "ortools/sat/integer_base.h" @@ -180,6 +181,7 @@ class Neighbours { case EdgePosition::RIGHT: return std::tie(a.y_min, a.y_max) > std::tie(b.y_min, b.y_max); } + LOG(FATAL) << "Invalid edge position: " << static_cast(edge_); } EdgePosition edge_; }; diff --git a/ortools/sat/BUILD.bazel b/ortools/sat/BUILD.bazel index 0820d997949..46c06284b61 100644 --- a/ortools/sat/BUILD.bazel +++ b/ortools/sat/BUILD.bazel @@ -426,6 +426,7 @@ cc_library( "//ortools/util:bitset", "//ortools/util:saturated_arithmetic", "//ortools/util:sorted_interval_list", + "@abseil-cpp//absl/base:core_headers", "@abseil-cpp//absl/container:flat_hash_map", "@abseil-cpp//absl/container:flat_hash_set", "@abseil-cpp//absl/flags:flag", @@ -2308,6 +2309,7 @@ cc_library( ":sat_base", ":scheduling_helpers", ":util", + "@abseil-cpp//absl/base:core_headers", "@abseil-cpp//absl/base:log_severity", "@abseil-cpp//absl/log", "@abseil-cpp//absl/log:check", @@ -3684,14 +3686,12 @@ cc_library( ":integer_base", ":scheduling_helpers", ":util", - "//ortools/base:logging", "//ortools/base:stl_util", "//ortools/base:strong_vector", "//ortools/graph:connected_components", "//ortools/graph:strongly_connected_components", "//ortools/util:fixed_shape_binary_tree", "//ortools/util:integer_pq", - "//ortools/util:saturated_arithmetic", "//ortools/util:strong_integers", "@abseil-cpp//absl/algorithm:container", "@abseil-cpp//absl/container:btree", @@ -3703,7 +3703,6 @@ cc_library( "@abseil-cpp//absl/log:vlog_is_on", "@abseil-cpp//absl/random:bit_gen_ref", "@abseil-cpp//absl/strings:str_format", - "@abseil-cpp//absl/types:optional", "@abseil-cpp//absl/types:span", ], ) diff --git a/ortools/sat/cp_model_utils.cc b/ortools/sat/cp_model_utils.cc index 678cc677819..1d925ea1c23 100644 --- a/ortools/sat/cp_model_utils.cc +++ b/ortools/sat/cp_model_utils.cc @@ -22,6 +22,7 @@ #include #include +#include "absl/base/optimization.h" #include "absl/container/flat_hash_map.h" #include "absl/container/flat_hash_set.h" #include "absl/flags/flag.h" @@ -547,6 +548,7 @@ absl::string_view ConstraintCaseName( case ConstraintProto::ConstraintCase::CONSTRAINT_NOT_SET: return "kEmpty"; } + ABSL_UNREACHABLE(); } std::vector UsedVariables(const ConstraintProto& ct) { diff --git a/ortools/sat/diffn_util.cc b/ortools/sat/diffn_util.cc index 7bb8e668d96..31c99b487f0 100644 --- a/ortools/sat/diffn_util.cc +++ b/ortools/sat/diffn_util.cc @@ -41,7 +41,6 @@ #include "absl/log/vlog_is_on.h" #include "absl/random/bit_gen_ref.h" #include "absl/types/span.h" -#include "ortools/base/logging.h" #include "ortools/base/stl_util.h" #include "ortools/base/strong_vector.h" #include "ortools/graph/connected_components.h" @@ -1114,6 +1113,7 @@ constexpr const EdgeInfo& GetEdgeInfo(ProbingRectangle::Edge edge) { case Edge::TOP: return EdgeInfoHolder::kTop; } + LOG(FATAL) << "Invalid edge: " << static_cast(edge); } IntegerValue GetSmallest1DIntersection(ProbingRectangle::Direction direction, @@ -1129,6 +1129,7 @@ IntegerValue GetSmallest1DIntersection(ProbingRectangle::Direction direction, range.bounding_area.y_max, range.y_size, rectangle.y_min, rectangle.y_max); } + LOG(FATAL) << "Invalid direction: " << static_cast(direction); } } // namespace @@ -1391,6 +1392,7 @@ IntegerValue ProbingRectangle::GetShrinkDeltaArea(Edge edge) const { case Edge::TOP: return (current_rectangle.y_max - coordinate) * current_rectangle.SizeX(); } + LOG(FATAL) << "Invalid edge: " << static_cast(edge); } void ProbingRectangle::CacheShrinkDeltaEnergy(int dimension) { @@ -1493,6 +1495,7 @@ bool ProbingRectangle::CanShrink(Edge edge) const { case Edge::TOP: return (indexes_[Edge::TOP] > next_indexes_[Edge::BOTTOM]); } + LOG(FATAL) << "Invalid edge: " << static_cast(edge); } namespace { diff --git a/ortools/sat/diffn_util.h b/ortools/sat/diffn_util.h index 5291a71b2a8..110f8f59fd5 100644 --- a/ortools/sat/diffn_util.h +++ b/ortools/sat/diffn_util.h @@ -28,14 +28,13 @@ #include "absl/container/flat_hash_set.h" #include "absl/container/inlined_vector.h" #include "absl/log/check.h" +#include "absl/log/log.h" #include "absl/random/bit_gen_ref.h" #include "absl/strings/str_format.h" -#include "absl/types/optional.h" #include "absl/types/span.h" #include "ortools/sat/integer_base.h" #include "ortools/sat/scheduling_helpers.h" #include "ortools/sat/util.h" -#include "ortools/util/saturated_arithmetic.h" #include "ortools/util/strong_integers.h" namespace operations_research { @@ -471,6 +470,7 @@ struct RectangleInRange { .y_min = bounding_area.y_max - y_size, .y_max = bounding_area.y_max}; } + LOG(FATAL) << "Invalid corner: " << static_cast(p); } Rectangle GetBoudingBox() const { return bounding_area; } diff --git a/ortools/sat/no_overlap_2d_helper.cc b/ortools/sat/no_overlap_2d_helper.cc index 35f7c08e155..d25712be18f 100644 --- a/ortools/sat/no_overlap_2d_helper.cc +++ b/ortools/sat/no_overlap_2d_helper.cc @@ -19,6 +19,7 @@ #include #include "absl/base/log_severity.h" +#include "absl/base/optimization.h" #include "absl/log/check.h" #include "absl/log/log.h" #include "absl/types/span.h" @@ -188,6 +189,7 @@ bool NoOverlap2DConstraintHelper::PropagateRelativePosition( return LeftBoxBeforeRightBoxOnFirstDimension( second, first, y_helper_.get(), x_helper_.get()); } + ABSL_UNREACHABLE(); } void NoOverlap2DConstraintHelper::Reset( From c8d7710fd77f3a48a5c2ed1a637756ff2267f80d Mon Sep 17 00:00:00 2001 From: Laurent Perron Date: Sat, 27 Dec 2025 11:58:47 +0100 Subject: [PATCH 097/111] [CP-SAT] more work on lrat, regroup linear1 presolve methods --- ortools/sat/BUILD.bazel | 13 ++ ortools/sat/cp_model_presolve.cc | 294 ++++++++---------------- ortools/sat/cp_model_presolve.h | 1 - ortools/sat/cp_model_utils.cc | 24 ++ ortools/sat/cp_model_utils.h | 3 + ortools/sat/presolve_context.cc | 16 ++ ortools/sat/presolve_context.h | 4 + ortools/sat/presolve_encoding.cc | 136 +++++++++++ ortools/sat/presolve_encoding.h | 65 ++++++ ortools/sat/sat_base.h | 16 ++ ortools/sat/sat_inprocessing.cc | 179 ++++++++++----- ortools/sat/util.h | 21 +- ortools/sat/util_test.cc | 5 +- ortools/third_party_solvers/BUILD.bazel | 1 - ortools/util/logging.h | 1 + 15 files changed, 521 insertions(+), 258 deletions(-) create mode 100644 ortools/sat/presolve_encoding.cc create mode 100644 ortools/sat/presolve_encoding.h diff --git a/ortools/sat/BUILD.bazel b/ortools/sat/BUILD.bazel index 46c06284b61..d2e6fd6fff1 100644 --- a/ortools/sat/BUILD.bazel +++ b/ortools/sat/BUILD.bazel @@ -386,6 +386,18 @@ cc_test( ], ) +cc_library( + name = "presolve_encoding", + srcs = ["presolve_encoding.cc"], + hdrs = ["presolve_encoding.h"], + deps = [ + ":cp_model_utils", + ":presolve_context", + "//ortools/util:sorted_interval_list", + "@abseil-cpp//absl/log", + ], +) + cc_proto_library( name = "cp_model_cc_proto", visibility = ["//visibility:public"], @@ -1322,6 +1334,7 @@ cc_library( ":model", ":precedences", ":presolve_context", + ":presolve_encoding", ":presolve_util", ":probing", ":sat_base", diff --git a/ortools/sat/cp_model_presolve.cc b/ortools/sat/cp_model_presolve.cc index e8ebda8e3a7..ae01a7177c0 100644 --- a/ortools/sat/cp_model_presolve.cc +++ b/ortools/sat/cp_model_presolve.cc @@ -76,6 +76,7 @@ #include "ortools/sat/model.h" #include "ortools/sat/precedences.h" #include "ortools/sat/presolve_context.h" +#include "ortools/sat/presolve_encoding.h" #include "ortools/sat/presolve_util.h" #include "ortools/sat/probing.h" #include "ortools/sat/sat_base.h" @@ -430,20 +431,9 @@ bool CpModelPresolver::PresolveBoolOr(ConstraintProto* ct) { // done elsewhere. ABSL_MUST_USE_RESULT bool CpModelPresolver::MarkConstraintAsFalse( ConstraintProto* ct, std::string_view reason) { - DCHECK(!reason.empty()); - if (HasEnforcementLiteral(*ct)) { - // Change the constraint to a bool_or. - ct->mutable_bool_or()->clear_literals(); - for (const int lit : ct->enforcement_literal()) { - ct->mutable_bool_or()->add_literals(NegatedRef(lit)); - } - ct->clear_enforcement_literal(); - PresolveBoolOr(ct); - context_->UpdateRuleStats(reason); - return true; - } else { - return context_->NotifyThatModelIsUnsat(reason); - } + if (!context_->MarkConstraintAsFalse(ct, reason)) return false; + if (ct->constraint_case() == ConstraintProto::kBoolOr) PresolveBoolOr(ct); + return true; } ABSL_MUST_USE_RESULT bool CpModelPresolver::MarkOptionalIntervalAsFalse( @@ -870,30 +860,6 @@ int GetFirstVar(ExpressionList exprs) { return -1; } -bool IsAffineIntAbs(const ConstraintProto& ct) { - if (ct.constraint_case() != ConstraintProto::kLinMax || - ct.lin_max().exprs_size() != 2 || ct.lin_max().target().vars_size() > 1 || - ct.lin_max().exprs(0).vars_size() != 1 || - ct.lin_max().exprs(1).vars_size() != 1) { - return false; - } - - const LinearArgumentProto& lin_max = ct.lin_max(); - if (lin_max.exprs(0).offset() != -lin_max.exprs(1).offset()) return false; - if (PositiveRef(lin_max.exprs(0).vars(0)) != - PositiveRef(lin_max.exprs(1).vars(0))) { - return false; - } - - const int64_t left_coeff = RefIsPositive(lin_max.exprs(0).vars(0)) - ? lin_max.exprs(0).coeffs(0) - : -lin_max.exprs(0).coeffs(0); - const int64_t right_coeff = RefIsPositive(lin_max.exprs(1).vars(0)) - ? lin_max.exprs(1).coeffs(0) - : -lin_max.exprs(1).coeffs(0); - return left_coeff == -right_coeff; -} - } // namespace bool CpModelPresolver::PropagateAndReduceAffineMax(ConstraintProto* ct) { @@ -12293,9 +12259,9 @@ void CpModelPresolver::FindBigAtMostOneAndLinearOverlap( for (int x = 0; x < context_->working_model->variables().size(); ++x) { // We pick a variable x that appear in some AMO. + if (helper->NumAmoForVariable(x) == 0) continue; if (time_limit_->LimitReached()) break; if (timer.WorkLimitIsReached()) break; - if (helper->NumAmoForVariable(x) == 0) continue; amo_cts.clear(); timer.TrackSimpleLoop(context_->VarToConstraints(x).size()); @@ -13363,121 +13329,6 @@ void CpModelPresolver::ProcessVariableInTwoAtMostOrExactlyOne(int var) { } } -// If we have a bunch of constraint of the form literal => Y \in domain and -// another constraint Y = f(X), we can remove Y, that constraint, and transform -// all linear1 from constraining Y to constraining X. -// -// We can for instance do it for Y = abs(X) or Y = X^2 easily. More complex -// function might be trickier. -// -// Note that we can't always do it in the reverse direction though! -// If we have l => X = -1, we can't transfer that to abs(X) for instance, since -// X=1 will also map to abs(-1). We can only do it if for all implied domain D -// we have f^-1(f(D)) = D, which is not easy to check. -void CpModelPresolver::MaybeTransferLinear1ToAnotherVariable(int var) { - // Find the extra constraint and do basic CHECKs. - int other_c; - int num_others = 0; - std::vector to_rewrite; - for (const int c : context_->VarToConstraints(var)) { - if (c >= 0) { - const ConstraintProto& ct = context_->working_model->constraints(c); - if (ct.constraint_case() == ConstraintProto::kLinear && - ct.linear().vars().size() == 1) { - to_rewrite.push_back(c); - continue; - } - } - ++num_others; - other_c = c; - } - if (num_others != 1) return; - if (other_c < 0) return; - - // In general constraint with more than two variable can't be removed. - // Similarly for linear2 with non-fixed rhs as we would need to check the form - // of all implied domain. - const auto& other_ct = context_->working_model->constraints(other_c); - if (context_->ConstraintToVars(other_c).size() != 2 || - !other_ct.enforcement_literal().empty() || - other_ct.constraint_case() == ConstraintProto::kLinear) { - return; - } - - // This will be the rewriting function. It takes the implied domain of var - // from linear1, and return a pair {new_var, new_var_implied_domain}. - std::function(const Domain& implied)> transfer_f = - nullptr; - - // We only support a few cases. - // - // TODO(user): implement more! Note that the linear2 case was tempting, but if - // we don't have an equality, we can't transfer, and if we do, we actually - // have affine equivalence already. - if (other_ct.constraint_case() == ConstraintProto::kLinMax && - other_ct.lin_max().target().vars().size() == 1 && - other_ct.lin_max().target().vars(0) == var && - std::abs(other_ct.lin_max().target().coeffs(0)) == 1 && - IsAffineIntAbs(other_ct)) { - context_->UpdateRuleStats("linear1: transferred from abs(X) to X"); - const LinearExpressionProto& target = other_ct.lin_max().target(); - const LinearExpressionProto& expr = other_ct.lin_max().exprs(0); - transfer_f = [target = target, expr = expr](const Domain& implied) { - Domain target_domain = - implied.ContinuousMultiplicationBy(target.coeffs(0)) - .AdditionWith(Domain(target.offset())); - target_domain = target_domain.IntersectionWith( - Domain(0, std::numeric_limits::max())); - - // We have target = abs(expr). - const Domain expr_domain = - target_domain.UnionWith(target_domain.Negation()); - const Domain new_domain = expr_domain.AdditionWith(Domain(-expr.offset())) - .InverseMultiplicationBy(expr.coeffs(0)); - return std::make_pair(expr.vars(0), new_domain); - }; - } - - if (transfer_f == nullptr) { - context_->UpdateRuleStats( - "TODO linear1: appear in only one extra 2-var constraint"); - return; - } - - // Applies transfer_f to all linear1. - std::sort(to_rewrite.begin(), to_rewrite.end()); - const Domain var_domain = context_->DomainOf(var); - for (const int c : to_rewrite) { - ConstraintProto* ct = context_->working_model->mutable_constraints(c); - if (ct->linear().vars(0) != var || ct->linear().coeffs(0) != 1) { - // This shouldn't happen. - LOG(INFO) << "Aborted in MaybeTransferLinear1ToAnotherVariable()"; - return; - } - - const Domain implied = - var_domain.IntersectionWith(ReadDomainFromProto(ct->linear())); - auto [new_var, new_domain] = transfer_f(implied); - const Domain current = context_->DomainOf(new_var); - new_domain = new_domain.IntersectionWith(current); - if (new_domain.IsEmpty()) { - if (!MarkConstraintAsFalse(ct, "linear1: unsat transfer")) return; - } else if (new_domain == current) { - ct->Clear(); - } else { - ct->mutable_linear()->set_vars(0, new_var); - FillDomainInProto(new_domain, ct->mutable_linear()); - } - context_->UpdateConstraintVariableUsage(c); - } - - // Copy other_ct to the mapping model and delete var! - context_->NewMappingConstraint(other_ct, __FILE__, __LINE__); - context_->working_model->mutable_constraints(other_c)->Clear(); - context_->UpdateConstraintVariableUsage(other_c); - context_->MarkVariableAsRemoved(var); -} - // TODO(user): We can still remove the variable even if we want to keep // all feasible solutions for the cases when we have a full encoding. // Similarly this shouldn't break symmetry, but we do need to do it for all @@ -13499,13 +13350,46 @@ void CpModelPresolver::ProcessVariableOnlyUsedInEncoding(int var) { return; } - if (!context_->VariableIsOnlyUsedInEncodingAndMaybeInObjective(var)) { - if (context_->VariableIsOnlyUsedInLinear1AndOneExtraConstraint(var)) { - MaybeTransferLinear1ToAnotherVariable(var); - return; + const bool is_only_used_in_linear1 = + context_->VariableIsOnlyUsedInLinear1AndOneExtraConstraint(var); + const bool is_only_used_in_encoding = + context_->VariableIsOnlyUsedInEncodingAndMaybeInObjective(var); + if (!is_only_used_in_encoding && is_only_used_in_linear1) { + VariableEncodingLocalModel local_model; + local_model.var = var; + local_model.single_constraint_using_the_var_outside_the_local_model = -1; + local_model.var_in_more_than_one_constraint_outside_the_local_model = false; + for (const int c : context_->VarToConstraints(var)) { + if (c >= 0) { + const ConstraintProto& ct = context_->working_model->constraints(c); + if (ct.constraint_case() == ConstraintProto::kLinear && + ct.linear().vars().size() == 1 && ct.linear().vars(0) == var) { + local_model.linear1_constraints.push_back(c); + continue; + } + } + if (c == kObjectiveConstraint) { + local_model.variable_coeff_in_objective = + context_->ObjectiveMap().at(var); + } else if ( + local_model.single_constraint_using_the_var_outside_the_local_model == + -1 && + c >= 0) { + // First "other" constraint. + local_model.single_constraint_using_the_var_outside_the_local_model = c; + } else { + // We have a second "other" constraint. + local_model.single_constraint_using_the_var_outside_the_local_model = + -1; + local_model.var_in_more_than_one_constraint_outside_the_local_model = + true; + } } + + MaybeTransferLinear1ToAnotherVariable(local_model, context_); return; } + if (!is_only_used_in_encoding) return; // Presolve newly created constraints. const int old_size = context_->working_model->constraints_size(); @@ -13643,18 +13527,19 @@ bool CpModelPresolver::ProcessChangedVariables(std::vector* in_queue, if (!context_->CanonicalizeOneObjectiveVariable(v)) return false; in_queue->resize(context_->working_model->constraints_size(), false); + const int size_before = queue->size(); for (const int c : context_->VarToConstraints(v)) { if (c >= 0 && !(*in_queue)[c]) { (*in_queue)[c] = true; queue->push_back(c); } } + + // Make sure the order is deterministic! because var_to_constraints[] + // order changes from one run to the next. + std::sort(queue->begin() + size_before, queue->end()); } context_->modified_domains.ResetAllToFalse(); - - // Make sure the order is deterministic! because var_to_constraints[] - // order changes from one run to the next. - std::sort(queue->begin(), queue->end()); return !queue->empty(); } @@ -13871,47 +13756,58 @@ void CpModelPresolver::PresolveToFixPoint() { // TODO(user): ideally we should "wake-up" any constraint that contains an // absent interval in the main propagation loop above. But we currently don't // maintain such list. - const int num_constraints = context_->working_model->constraints_size(); - for (int c = 0; c < num_constraints; ++c) { - if (time_limit_->LimitReached()) break; - ConstraintProto* ct = context_->working_model->mutable_constraints(c); - switch (ct->constraint_case()) { - case ConstraintProto::kNoOverlap: - // Filter out absent intervals. - if (PresolveNoOverlap(ct)) { - context_->UpdateConstraintVariableUsage(c); - } - break; - case ConstraintProto::kNoOverlap2D: - // Filter out absent intervals. - if (PresolveNoOverlap2D(c, ct)) { - context_->UpdateConstraintVariableUsage(c); - } - break; - case ConstraintProto::kCumulative: - // Filter out absent intervals. - if (PresolveCumulative(ct)) { - context_->UpdateConstraintVariableUsage(c); - } - break; - case ConstraintProto::kBoolOr: { - // Try to infer domain reductions from clauses and the saved "implies in - // domain" relations. - for (const auto& pair : - context_->deductions.ProcessClause(ct->bool_or().literals())) { - bool modified = false; - if (!context_->IntersectDomainWith(pair.first, pair.second, - &modified)) { - return; + if (!time_limit_->LimitReached()) { + const int num_constraints = context_->working_model->constraints_size(); + TimeLimitCheckEveryNCalls bool_or_check_time_limit(100, time_limit_); + for (int c = 0; c < num_constraints; ++c) { + ConstraintProto* ct = context_->working_model->mutable_constraints(c); + // We don't want to check the time limit at each "small" constraint as + // there could be many. + bool check_time_limit = false; + + switch (ct->constraint_case()) { + case ConstraintProto::kNoOverlap: + // Filter out absent intervals. + if (PresolveNoOverlap(ct)) { + context_->UpdateConstraintVariableUsage(c); + } + check_time_limit = true; + break; + case ConstraintProto::kNoOverlap2D: + // Filter out absent intervals. + if (PresolveNoOverlap2D(c, ct)) { + context_->UpdateConstraintVariableUsage(c); + } + check_time_limit = true; + break; + case ConstraintProto::kCumulative: + // Filter out absent intervals. + if (PresolveCumulative(ct)) { + context_->UpdateConstraintVariableUsage(c); } - if (modified) { - context_->UpdateRuleStats("deductions: reduced variable domain"); + check_time_limit = true; + break; + case ConstraintProto::kBoolOr: { + // Try to infer domain reductions from clauses and the saved "implies + // in domain" relations. + for (const auto& pair : + context_->deductions.ProcessClause(ct->bool_or().literals())) { + bool modified = false; + if (!context_->IntersectDomainWith(pair.first, pair.second, + &modified)) { + return; + } + if (modified) { + context_->UpdateRuleStats("deductions: reduced variable domain"); + } } + if (bool_or_check_time_limit.LimitReached()) check_time_limit = true; + break; } - break; + default: + break; } - default: - break; + if (check_time_limit && time_limit_->LimitReached()) break; } } diff --git a/ortools/sat/cp_model_presolve.h b/ortools/sat/cp_model_presolve.h index f17aea0bc42..b6068867a2b 100644 --- a/ortools/sat/cp_model_presolve.h +++ b/ortools/sat/cp_model_presolve.h @@ -335,7 +335,6 @@ class CpModelPresolver { // merge this with what ExpandObjective() is doing. void ShiftObjectiveWithExactlyOnes(); - void MaybeTransferLinear1ToAnotherVariable(int var); void ProcessVariableOnlyUsedInEncoding(int var); void TryToSimplifyDomain(int var); diff --git a/ortools/sat/cp_model_utils.cc b/ortools/sat/cp_model_utils.cc index 1d925ea1c23..8725d300d6a 100644 --- a/ortools/sat/cp_model_utils.cc +++ b/ortools/sat/cp_model_utils.cc @@ -1152,5 +1152,29 @@ int CombineSeed(int base_seed, int64_t delta) { return static_cast(FingerprintSingleField(base_seed, fp) & (0x7FFFFFFF)); } +bool IsAffineIntAbs(const ConstraintProto& ct) { + if (ct.constraint_case() != ConstraintProto::kLinMax || + ct.lin_max().exprs_size() != 2 || ct.lin_max().target().vars_size() > 1 || + ct.lin_max().exprs(0).vars_size() != 1 || + ct.lin_max().exprs(1).vars_size() != 1) { + return false; + } + + const LinearArgumentProto& lin_max = ct.lin_max(); + if (lin_max.exprs(0).offset() != -lin_max.exprs(1).offset()) return false; + if (PositiveRef(lin_max.exprs(0).vars(0)) != + PositiveRef(lin_max.exprs(1).vars(0))) { + return false; + } + + const int64_t left_coeff = RefIsPositive(lin_max.exprs(0).vars(0)) + ? lin_max.exprs(0).coeffs(0) + : -lin_max.exprs(0).coeffs(0); + const int64_t right_coeff = RefIsPositive(lin_max.exprs(1).vars(0)) + ? lin_max.exprs(1).coeffs(0) + : -lin_max.exprs(1).coeffs(0); + return left_coeff == -right_coeff; +} + } // namespace sat } // namespace operations_research diff --git a/ortools/sat/cp_model_utils.h b/ortools/sat/cp_model_utils.h index 8d8979677f6..87eba2eaf03 100644 --- a/ortools/sat/cp_model_utils.h +++ b/ortools/sat/cp_model_utils.h @@ -289,6 +289,9 @@ bool SafeAddLinearExpressionToLinearConstraint( const LinearExpressionProto& expr, int64_t coefficient, LinearConstraintProto* linear); +// Returns if a constraint is of the form y = lin_max(x, -x). +bool IsAffineIntAbs(const ConstraintProto& ct); + // Returns true iff a == b * b_scaling. bool LinearExpressionProtosAreEqual(const LinearExpressionProto& a, const LinearExpressionProto& b, diff --git a/ortools/sat/presolve_context.cc b/ortools/sat/presolve_context.cc index 19bf5ffcac3..3787156be44 100644 --- a/ortools/sat/presolve_context.cc +++ b/ortools/sat/presolve_context.cc @@ -633,6 +633,22 @@ bool PresolveContext::ConstraintIsInactive(int index) const { return false; } +bool PresolveContext::MarkConstraintAsFalse(ConstraintProto* ct, + std::string_view reason) { + DCHECK(!reason.empty()); + if (!HasEnforcementLiteral(*ct)) { + return NotifyThatModelIsUnsat(reason); + } + // Change the constraint to a bool_or. + ct->mutable_bool_or()->clear_literals(); + for (const int lit : ct->enforcement_literal()) { + ct->mutable_bool_or()->add_literals(NegatedRef(lit)); + } + ct->clear_enforcement_literal(); + UpdateRuleStats(reason); + return true; +} + bool PresolveContext::ConstraintIsOptional(int ct_ref) const { const ConstraintProto& ct = working_model->constraints(ct_ref); bool contains_one_free_literal = false; diff --git a/ortools/sat/presolve_context.h b/ortools/sat/presolve_context.h index f1348b2029a..13fd7063bb9 100644 --- a/ortools/sat/presolve_context.h +++ b/ortools/sat/presolve_context.h @@ -616,6 +616,10 @@ class PresolveContext { return interval_usage_[c]; } + // Note this function does not update the constraint graph. It assumes this is + // done elsewhere. + bool MarkConstraintAsFalse(ConstraintProto* ct, std::string_view reason); + // Checks if a constraint contains an enforcement literal set to false, // or if it has been cleared. bool ConstraintIsInactive(int ct_index) const; diff --git a/ortools/sat/presolve_encoding.cc b/ortools/sat/presolve_encoding.cc new file mode 100644 index 00000000000..33398ff76b9 --- /dev/null +++ b/ortools/sat/presolve_encoding.cc @@ -0,0 +1,136 @@ +// Copyright 2010-2025 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "ortools/sat/presolve_encoding.h" + +#include +#include +#include +#include +#include +#include + +#include "absl/log/log.h" +#include "ortools/sat/cp_model_utils.h" +#include "ortools/sat/presolve_context.h" +#include "ortools/util/sorted_interval_list.h" + +namespace operations_research { +namespace sat { + +bool MaybeTransferLinear1ToAnotherVariable( + VariableEncodingLocalModel& local_model, PresolveContext* context) { + if (local_model.var == -1) return true; + if (local_model.variable_coeff_in_objective != 0) return true; + if (local_model.single_constraint_using_the_var_outside_the_local_model == + -1) { + return true; + } + const int other_c = + local_model.single_constraint_using_the_var_outside_the_local_model; + + const std::vector& to_rewrite = local_model.linear1_constraints; + + // In general constraint with more than two variable can't be removed. + // Similarly for linear2 with non-fixed rhs as we would need to check the form + // of all implied domain. + const auto& other_ct = context->working_model->constraints(other_c); + if (context->ConstraintToVars(other_c).size() != 2 || + !other_ct.enforcement_literal().empty() || + other_ct.constraint_case() == ConstraintProto::kLinear) { + return true; + } + + // This will be the rewriting function. It takes the implied domain of var + // from linear1, and return a pair {new_var, new_var_implied_domain}. + std::function(const Domain& implied)> transfer_f = + nullptr; + + const int var = local_model.var; + // We only support a few cases. + // + // TODO(user): implement more! Note that the linear2 case was tempting, but if + // we don't have an equality, we can't transfer, and if we do, we actually + // have affine equivalence already. + if (other_ct.constraint_case() == ConstraintProto::kLinMax && + other_ct.lin_max().target().vars().size() == 1 && + other_ct.lin_max().target().vars(0) == var && + std::abs(other_ct.lin_max().target().coeffs(0)) == 1 && + IsAffineIntAbs(other_ct)) { + context->UpdateRuleStats("linear1: transferred from abs(X) to X"); + const LinearExpressionProto& target = other_ct.lin_max().target(); + const LinearExpressionProto& expr = other_ct.lin_max().exprs(0); + transfer_f = [target = target, expr = expr](const Domain& implied) { + Domain target_domain = + implied.ContinuousMultiplicationBy(target.coeffs(0)) + .AdditionWith(Domain(target.offset())); + target_domain = target_domain.IntersectionWith( + Domain(0, std::numeric_limits::max())); + + // We have target = abs(expr). + const Domain expr_domain = + target_domain.UnionWith(target_domain.Negation()); + const Domain new_domain = expr_domain.AdditionWith(Domain(-expr.offset())) + .InverseMultiplicationBy(expr.coeffs(0)); + return std::make_pair(expr.vars(0), new_domain); + }; + } + + if (transfer_f == nullptr) { + context->UpdateRuleStats( + "TODO linear1: appear in only one extra 2-var constraint"); + return true; + } + + // Applies transfer_f to all linear1. + const Domain var_domain = context->DomainOf(var); + for (const int c : to_rewrite) { + ConstraintProto* ct = context->working_model->mutable_constraints(c); + if (ct->linear().vars(0) != var || ct->linear().coeffs(0) != 1) { + // This shouldn't happen. + LOG(INFO) << "Aborted in MaybeTransferLinear1ToAnotherVariable()"; + return true; + } + + const Domain implied = + var_domain.IntersectionWith(ReadDomainFromProto(ct->linear())); + auto [new_var, new_domain] = transfer_f(implied); + const Domain current = context->DomainOf(new_var); + new_domain = new_domain.IntersectionWith(current); + if (new_domain.IsEmpty()) { + if (!context->MarkConstraintAsFalse(ct, "linear1: unsat transfer")) { + return false; + } + } else if (new_domain == current) { + // Note that we don't need to remove this constraint from + // local_model.linear1_constraints since we will set + // local_model.var = -1 below. + ct->Clear(); + } else { + ct->mutable_linear()->set_vars(0, new_var); + FillDomainInProto(new_domain, ct->mutable_linear()); + } + context->UpdateConstraintVariableUsage(c); + } + + // Copy other_ct to the mapping model and delete var! + context->NewMappingConstraint(other_ct, __FILE__, __LINE__); + context->working_model->mutable_constraints(other_c)->Clear(); + context->UpdateConstraintVariableUsage(other_c); + context->MarkVariableAsRemoved(var); + local_model.var = -1; + return true; +} + +} // namespace sat +} // namespace operations_research diff --git a/ortools/sat/presolve_encoding.h b/ortools/sat/presolve_encoding.h new file mode 100644 index 00000000000..6dad2318bdb --- /dev/null +++ b/ortools/sat/presolve_encoding.h @@ -0,0 +1,65 @@ +// Copyright 2010-2025 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef ORTOOLS_SAT_PRESOLVE_ENCODING_H_ +#define ORTOOLS_SAT_PRESOLVE_ENCODING_H_ + +#include +#include + +#include "ortools/sat/presolve_context.h" + +namespace operations_research { +namespace sat { + +struct VariableEncodingLocalModel { + // The integer variable that is encoded. Internally it can be replaced by + // -1 if some presolve rule removed the variable. + int var; + + // The linear1 constraint indexes that define conditional bounds on the + // variable. Those linear1 should have exactly one enforcement literal and + // satisfy `PositiveRef(enf) != var`. All linear1 restraining `var` and + // fulfilling the conditions above will appear here. + std::vector linear1_constraints; + + // Zero if `var` doesn't appear in the objective. + int64_t variable_coeff_in_objective = 0; + + // Note: the objective doesn't count as a constraint outside the local model. + bool var_in_more_than_one_constraint_outside_the_local_model; + + // Set to -1 if there is none or if the variable appears in more than one + // constraint outside the local model. + int single_constraint_using_the_var_outside_the_local_model = -1; +}; + +// If we have a bunch of constraint of the form literal => Y \in domain and +// another constraint Y = f(X), we can remove Y, that constraint, and transform +// all linear1 from constraining Y to constraining X. +// +// We can for instance do it for Y = abs(X) or Y = X^2 easily. More complex +// function might be trickier. +// +// Note that we can't always do it in the reverse direction though! +// If we have l => X = -1, we can't transfer that to abs(X) for instance, since +// X=1 will also map to abs(-1). We can only do it if for all implied domain D +// we have f^-1(f(D)) = D, which is not easy to check. +// Returns false if we prove unsat. +bool MaybeTransferLinear1ToAnotherVariable( + VariableEncodingLocalModel& local_model, PresolveContext* context); + +} // namespace sat +} // namespace operations_research + +#endif // ORTOOLS_SAT_PRESOLVE_ENCODING_H_ diff --git a/ortools/sat/sat_base.h b/ortools/sat/sat_base.h index 051be3dd50b..bc791468b85 100644 --- a/ortools/sat/sat_base.h +++ b/ortools/sat/sat_base.h @@ -142,6 +142,22 @@ inline std::ostream& operator<<(std::ostream& os, return os; } +inline std::ostream& operator<<(std::ostream& os, + absl::Span literals) { + os << "["; + bool first = true; + for (const LiteralIndex index : literals) { + if (first) { + first = false; + } else { + os << ","; + } + os << Literal(index).DebugString(); + } + os << "]"; + return os; +} + // Only used for testing to use the classical SAT notation for a literal. This // allows to write Literals({+1, -4, +3}) for the clause with BooleanVariable 0 // and 2 appearing positively and 3 negatively. diff --git a/ortools/sat/sat_inprocessing.cc b/ortools/sat/sat_inprocessing.cc index 69769ce84b9..1baddcf50cf 100644 --- a/ortools/sat/sat_inprocessing.cc +++ b/ortools/sat/sat_inprocessing.cc @@ -2017,11 +2017,18 @@ void GateCongruenceClosure::ExtractAndGatesAndFillShortTruthTables( // been cleaned up yet, as these are needed to really recover all gates. // // TODO(user): Ideally the detection code should be robust to that. + // TODO(user): Maybe we should always have an hash-map of binary up to date? int num_fn1 = 0; std::vector> binary_used; for (LiteralIndex a(0); a < implication_graph_->literal_size(); ++a) { + // TODO(user): If we know we have too many implications for the time limit + // We should just be better of not doing that loop at all. + if (timer.WorkLimitIsReached()) break; if (implication_graph_->IsRedundant(Literal(a))) continue; - for (const Literal b : implication_graph_->Implications(Literal(a))) { + const absl::Span implied = + implication_graph_->Implications(Literal(a)); + timer.TrackHashLookups(implied.size()); + for (const Literal b : implied) { if (implication_graph_->IsRedundant(b)) continue; std::array key2; @@ -2066,9 +2073,7 @@ void GateCongruenceClosure::ExtractAndGatesAndFillShortTruthTables( // The AND gate of size 3 should be detected by the short table code, no // need to do the algo here which should be slower. - // - // TODO(user): This seems to be less strong. I think we have some bug - // in our fixed point loop when we fix variables. + continue; } else if (clause->size() == 4) { AddToTruthTable<4>(clause, ids4_); } else if (clause->size() == 5) { @@ -2867,6 +2872,7 @@ class LratGateCongruenceHelper { implication_graph_->GetClauseId(target.Negated(), Literal(m_index))); Append(clause_ids, GetLiteralImpliesRepresentativeClauseId(Literal(m_index))); + Append(clause_ids, GetLiteralImpliesRepresentativeClauseId(target)); } private: @@ -2943,7 +2949,8 @@ bool GateCongruenceClosure::DoOneRound(bool log_info) { PresolveTimer timer("GateCongruenceClosure", logger_, time_limit_); timer.OverrideLogging(log_info); - const int num_literals(sat_solver_->NumVariables() * 2); + const int num_variables(sat_solver_->NumVariables()); + const int num_literals(num_variables * 2); marked_.ClearAndResize(Literal(num_literals)); seen_.ClearAndResize(Literal(num_literals)); next_seen_.ClearAndResize(Literal(num_literals)); @@ -2955,7 +2962,7 @@ bool GateCongruenceClosure::DoOneRound(bool log_info) { // Lets release the memory on exit. CHECK(tmp_binary_clauses_.empty()); - absl::Cleanup cleanup = [this] { tmp_binary_clauses_.clear(); }; + absl::Cleanup binary_cleanup = [this] { tmp_binary_clauses_.clear(); }; ExtractAndGatesAndFillShortTruthTables(timer); ExtractShortGates(timer); @@ -2985,37 +2992,67 @@ bool GateCongruenceClosure::DoOneRound(bool log_info) { // Tricky: we need to resize this to num_literals because the union_find that // merges target can choose for a representative a literal that is not in the // set of gate inputs. - MergeableOccurrenceList input_literals_to_gate; - input_literals_to_gate.ResetFromTranspose(gates_inputs_, num_literals); + MergeableOccurrenceList input_var_to_gate; + struct GetVarMapper { + BooleanVariable operator()(LiteralIndex l) const { + return Literal(l).Variable(); + } + }; + input_var_to_gate.ResetFromTransposeMap(gates_inputs_, + num_variables); LratGateCongruenceHelper lrat_helper( trail_, implication_graph_, clause_manager_, clause_id_generator_, lrat_proof_handler_, gates_target_, gates_clauses_, union_find); + // Stats + make sure we run it at exit. + int num_units = 0; + int num_equivalences = 0; + int num_processed = 0; + int arity1_equivalences = 0; + absl::Cleanup stat_cleanup = [&] { + total_wtime_ += timer.wtime(); + total_dtime_ += timer.deterministic_time(); + total_equivalences_ += num_equivalences; + total_num_units_ += num_units; + timer.AddCounter("processed", num_processed); + timer.AddCounter("units", num_units); + timer.AddCounter("f1_equiv", arity1_equivalences); + timer.AddCounter("equiv", num_equivalences); + }; + // Starts with all gates in the queue. const int num_gates = gates_inputs_.size(); + total_gates_ += num_gates; std::vector in_queue(num_gates, true); std::vector queue(num_gates); for (GateId id(0); id < num_gates; ++id) queue[id.value()] = id; - int num_units = 0; + int num_processed_fixed_variables = trail_->Index(); + const auto fix_literal = [&, this](Literal to_fix, absl::Span clause_ids) { + DCHECK_EQ(to_fix, lrat_helper.GetRepresentativeWithProofSupport(to_fix)); if (assignment_.LiteralIsTrue(to_fix)) return true; if (!clause_manager_->InprocessingFixLiteral(to_fix, clause_ids)) { return false; } + // This is quite tricky: as we fix a literal, we propagate right away + // everything implied by it in the binary implication graph. So we need to + // loop over all newly_fixed variable in order to properly reach the fix + // point! ++num_units; - for (const GateId gate_id : input_literals_to_gate[to_fix]) { - if (in_queue[gate_id.value()]) continue; - queue.push_back(gate_id); - in_queue[gate_id.value()] = true; - } - for (const GateId gate_id : input_literals_to_gate[to_fix.Negated()]) { - if (in_queue[gate_id.value()]) continue; - queue.push_back(gate_id); - in_queue[gate_id.value()] = true; + for (; num_processed_fixed_variables < trail_->Index(); + ++num_processed_fixed_variables) { + const Literal to_update = lrat_helper.GetRepresentativeWithProofSupport( + (*trail_)[num_processed_fixed_variables]); + for (const GateId gate_id : input_var_to_gate[to_update.Variable()]) { + if (in_queue[gate_id.value()]) continue; + queue.push_back(gate_id); + in_queue[gate_id.value()] = true; + } + input_var_to_gate.ClearList(to_update.Variable()); } return true; }; @@ -3025,7 +3062,6 @@ bool GateCongruenceClosure::DoOneRound(bool log_info) { return trail_->GetUnitClauseId(a.Variable()); }; - int num_equivalences = 0; const auto new_equivalence = [&, this](Literal a, Literal b, ClauseId a_implies_b, ClauseId b_implies_a) { @@ -3052,6 +3088,8 @@ bool GateCongruenceClosure::DoOneRound(bool log_info) { return false; } + BooleanVariable to_merge_var = kNoBooleanVariable; + BooleanVariable rep_var = kNoBooleanVariable; for (const bool negate : {false, true}) { const LiteralIndex x = negate ? a.NegatedIndex() : a.Index(); const LiteralIndex y = negate ? b.NegatedIndex() : b.Index(); @@ -3064,7 +3102,14 @@ bool GateCongruenceClosure::DoOneRound(bool log_info) { union_find.AddEdge(x.value(), y.value()); const LiteralIndex rep(union_find.FindRoot(y.value())); const LiteralIndex to_merge = rep == x ? y : x; - input_literals_to_gate.MergeInto(to_merge, rep); + if (to_merge_var == kNoBooleanVariable) { + to_merge_var = Literal(to_merge).Variable(); + rep_var = Literal(rep).Variable(); + } else { + // We should have the same var. + CHECK_EQ(to_merge_var, Literal(to_merge).Variable()); + CHECK_EQ(rep_var, Literal(rep).Variable()); + } if (lrat_proof_handler_ != nullptr) { if (rep == x) { @@ -3075,17 +3120,6 @@ bool GateCongruenceClosure::DoOneRound(bool log_info) { y_implies_x); } } - - // Re-add to the queue all gates with touched inputs. - // - // TODO(user): I think we could only add the gates of "to_merge" - // before we merge. This part of the code is quite quick in any - // case. - for (const GateId gate_id : input_literals_to_gate[rep]) { - if (in_queue[gate_id.value()]) continue; - queue.push_back(gate_id); - in_queue[gate_id.value()] = true; - } } // Invariant. @@ -3095,16 +3129,28 @@ bool GateCongruenceClosure::DoOneRound(bool log_info) { CHECK_EQ( lrat_helper.GetRepresentativeWithProofSupport(b), lrat_helper.GetRepresentativeWithProofSupport(b.Negated()).Negated()); + + // Re-add to the queue all gates with touched inputs. + // + // TODO(user): I think we could only add the gates of "to_merge" + // before we merge. This part of the code is quite quick in any + // case. + input_var_to_gate.MergeInto(to_merge_var, rep_var); + for (const GateId gate_id : input_var_to_gate[rep_var]) { + if (in_queue[gate_id.value()]) continue; + queue.push_back(gate_id); + in_queue[gate_id.value()] = true; + } + return true; }; // Main loop. - int num_processed = 0; - int arity1_equivalences = 0; while (!queue.empty()) { ++num_processed; const GateId id = queue.back(); queue.pop_back(); + CHECK(in_queue[id.value()]); in_queue[id.value()] = false; // Tricky: the hash-map might contain id not yet canonicalized. And in @@ -3140,17 +3186,15 @@ bool GateCongruenceClosure::DoOneRound(bool log_info) { CHECK_NE(id, other_id); CHECK_GE(other_id, 0); CHECK_EQ(gates_type_[id], gates_type_[other_id]); - CHECK_EQ(absl::Span(gates_inputs_[id]), - absl::Span(gates_inputs_[other_id])); + CHECK_EQ(gates_inputs_[id], gates_inputs_[other_id]); - input_literals_to_gate.RemoveFromFutureOutput(id); + input_var_to_gate.RemoveFromFutureOutput(id); // We detected a <=> b (or, equivalently, rep(a) <=> rep(b)). const Literal a(gates_target_[id]); const Literal b(gates_target_[other_id]); const Literal rep_a = lrat_helper.GetRepresentativeWithProofSupport(a); const Literal rep_b = lrat_helper.GetRepresentativeWithProofSupport(b); - if (rep_a != rep_b) { ClauseId rep_a_implies_rep_b = kNoClauseId; ClauseId rep_b_implies_rep_a = kNoClauseId; @@ -3200,9 +3244,11 @@ bool GateCongruenceClosure::DoOneRound(bool log_info) { // then target must be false. if (marked_[Literal(rep).Negated()]) { is_unit = true; - input_literals_to_gate.RemoveFromFutureOutput(id); + input_var_to_gate.RemoveFromFutureOutput(id); - const Literal to_fix = Literal(gates_target_[id]).Negated(); + const Literal initial_to_fix = Literal(gates_target_[id]).Negated(); + const Literal to_fix = + lrat_helper.GetRepresentativeWithProofSupport(initial_to_fix); if (!assignment_.LiteralIsTrue(to_fix)) { absl::InlinedVector clause_ids; if (lrat_proof_handler_ != nullptr) { @@ -3249,10 +3295,9 @@ bool GateCongruenceClosure::DoOneRound(bool log_info) { // Generic "short" gates. // We just take the representative and re-canonicalize. - absl::Span inputs = gates_inputs_[id]; DCHECK_GE(gates_type_[id], 0); - DCHECK_EQ(gates_type_[id] >> (1 << (inputs.size())), 0); - for (LiteralIndex& lit_ref : inputs) { + DCHECK_EQ(gates_type_[id] >> (1 << (gates_inputs_[id].size())), 0); + for (LiteralIndex& lit_ref : gates_inputs_[id]) { lit_ref = lrat_helper.GetRepresentativeWithProofSupport(Literal(lit_ref)) .Index(); @@ -3261,7 +3306,7 @@ bool GateCongruenceClosure::DoOneRound(bool log_info) { const int new_size = CanonicalizeShortGate(id); if (new_size == 1) { // We have a function of size 1! This is an equivalence. - input_literals_to_gate.RemoveFromFutureOutput(id); + input_var_to_gate.RemoveFromFutureOutput(id); const Literal a = Literal(gates_target_[id]); const Literal b = Literal(gates_inputs_[id][0]); const Literal rep_a = lrat_helper.GetRepresentativeWithProofSupport(a); @@ -3277,7 +3322,7 @@ bool GateCongruenceClosure::DoOneRound(bool log_info) { break; } else if (new_size == 0) { // We have a fixed function! Just fix the literal. - input_literals_to_gate.RemoveFromFutureOutput(id); + input_var_to_gate.RemoveFromFutureOutput(id); const Literal initial_to_fix = (gates_type_[id] & 1) == 1 ? Literal(gates_target_[id]) : Literal(gates_target_[id]).Negated(); @@ -3293,16 +3338,44 @@ bool GateCongruenceClosure::DoOneRound(bool log_info) { } } - total_wtime_ += timer.wtime(); - total_dtime_ += timer.deterministic_time(); - total_gates_ += num_gates; - total_equivalences_ += num_equivalences; - total_num_units_ += num_units; + // DEBUG check that we reached the fix point correctly. + if (DEBUG_MODE) { + CHECK(queue.empty()); + gate_set.clear(); + for (GateId id(0); id < num_gates; ++id) { + if (gates_type_[id] == kAndGateType) continue; + if (assignment_.LiteralIsAssigned(Literal(gates_target_[id]))) continue; + + const int new_size = CanonicalizeShortGate(id); + if (new_size == 0) { + CHECK_EQ(gates_type_[id] & 1, 0); + const Literal initial_to_fix = Literal(gates_target_[id]).Negated(); + const Literal to_fix = + lrat_helper.GetRepresentativeWithProofSupport(initial_to_fix); + CHECK(assignment_.LiteralIsTrue(to_fix)); + } else if (new_size == 1) { + CHECK(!assignment_.LiteralIsAssigned(Literal(gates_target_[id]))); + CHECK(!assignment_.LiteralIsAssigned(Literal(gates_inputs_[id][0]))); + CHECK_EQ(lrat_helper.GetRepresentativeWithProofSupport( + Literal(gates_target_[id])), + lrat_helper.GetRepresentativeWithProofSupport( + Literal(gates_inputs_[id][0]))) + << id << " "; + } else { + const auto [it, inserted] = gate_set.insert(id); + if (!inserted) { + const GateId other_id = *it; + CHECK_EQ(lrat_helper.GetRepresentativeWithProofSupport( + Literal(gates_target_[id])), + lrat_helper.GetRepresentativeWithProofSupport( + Literal(gates_target_[other_id]))) + << id << " " << gates_inputs_[id] << " " << other_id << " " + << gates_inputs_[other_id]; + } + } + } + } - timer.AddCounter("arity1_equivalences", arity1_equivalences); - timer.AddCounter("units", num_units); - timer.AddCounter("processed", num_processed); - timer.AddCounter("equivalences", num_equivalences); return true; } diff --git a/ortools/sat/util.h b/ortools/sat/util.h index 98786296d04..4077df4ddbd 100644 --- a/ortools/sat/util.h +++ b/ortools/sat/util.h @@ -193,11 +193,14 @@ class MergeableOccurrenceList { public: MergeableOccurrenceList() = default; - void ResetFromTranspose(const CompactVectorVector& input, - int min_transpose_size = 0) { - rows_.ResetFromTranspose(input, min_transpose_size); + template + void ResetFromTransposeMap(const Container& input, + int min_transpose_size = 0) { + rows_.template ResetFromTransposeMap(input, + min_transpose_size); next_.assign(rows_.size(), K(-1)); marked_.ClearAndResize(V(input.size())); + merged_.ClearAndResize(K(rows_.size())); } int size() const { return rows_.size(); } @@ -212,6 +215,7 @@ class MergeableOccurrenceList { // This is not const because it lazily merges lists. absl::Span operator[](K key) { if (key >= rows_.size()) return {}; + CHECK(!merged_[key]); tmp_result_.clear(); K previous(-1); @@ -247,9 +251,13 @@ class MergeableOccurrenceList { // // And otherwise key should never be accessed anymore. void MergeInto(K to_merge, K representative) { + CHECK(!merged_[to_merge]); + DCHECK_GE(to_merge, 0); + DCHECK_GE(representative, 0); DCHECK_LT(to_merge, rows_.size()); DCHECK_LT(representative, rows_.size()); if (to_merge == representative) return; + merged_.Set(to_merge); // Find the end of the representative list to happen to_merge there. // @@ -259,10 +267,16 @@ class MergeableOccurrenceList { K last_list = representative; while (next_[InternalKey(last_list)] >= 0) { last_list = next_[InternalKey(last_list)]; + DCHECK_NE(last_list, to_merge); } next_[InternalKey(last_list)] = to_merge; } + void ClearList(K key) { + next_[InternalKey(key)] = -1; + rows_.Shrink(key, 0); + } + private: // Convert int and StrongInt to normal int. int InternalKey(K key) const; @@ -271,6 +285,7 @@ class MergeableOccurrenceList { // The bitset is used to remove duplicates when merging lists. std::vector tmp_result_; Bitset64 marked_; + Bitset64 merged_; // Each "row" contains a set of values (we lazily remove duplicate). CompactVectorVector rows_; diff --git a/ortools/sat/util_test.cc b/ortools/sat/util_test.cc index 19ac63aa370..a0c076012d4 100644 --- a/ortools/sat/util_test.cc +++ b/ortools/sat/util_test.cc @@ -172,7 +172,10 @@ TEST(MergeableOccurrenceList, BasicTest) { storage.ResetFromFlatMapping(keys, vals); MergeableOccurrenceList occ; - occ.ResetFromTranspose(storage); + struct GetVarMapper { + int operator()(int i) const { return i; } + }; + occ.ResetFromTransposeMap(storage); // The first access should be ordered. EXPECT_THAT(occ.size(), 6); diff --git a/ortools/third_party_solvers/BUILD.bazel b/ortools/third_party_solvers/BUILD.bazel index 188f5b4deb2..2fbd4966233 100644 --- a/ortools/third_party_solvers/BUILD.bazel +++ b/ortools/third_party_solvers/BUILD.bazel @@ -48,7 +48,6 @@ cc_library( hdrs = ["xpress_environment.h"], deps = [ ":dynamic_library", - "//ortools/base", "//ortools/base:base_export", "//ortools/base:status_builder", "@abseil-cpp//absl/base", diff --git a/ortools/util/logging.h b/ortools/util/logging.h index 4907d1fdb72..4516a3628e0 100644 --- a/ortools/util/logging.h +++ b/ortools/util/logging.h @@ -135,6 +135,7 @@ class PresolveTimer { // By default we want a limit of around 1 deterministic seconds. void AddToWork(double dtime) { work_ += dtime; } void TrackSimpleLoop(int size) { work_ += 5e-9 * size; } + void TrackHashLookups(int size) { work_ += 5e-8 * size; } void TrackFastLoop(int size) { work_ += 1e-9 * size; } bool WorkLimitIsReached() const { return work_ >= 1.0; } From 30a7baee6d435bb1bf704ea97d8b736d8ad3152f Mon Sep 17 00:00:00 2001 From: Guillaume Chatelet Date: Mon, 5 Jan 2026 10:51:26 +0000 Subject: [PATCH 098/111] Add vector reserve to examples/cpp --- examples/cpp/costas_array_sat.cc | 1 + examples/cpp/frequency_assignment_problem.cc | 1 + examples/cpp/jobshop_sat.cc | 1 + examples/cpp/network_routing_sat.cc | 2 ++ examples/cpp/qap_sat.cc | 1 + examples/cpp/slitherlink_sat.cc | 2 ++ examples/cpp/sports_scheduling_sat.cc | 1 + 7 files changed, 9 insertions(+) diff --git a/examples/cpp/costas_array_sat.cc b/examples/cpp/costas_array_sat.cc index 79c6bc75fdf..c6a68671af2 100644 --- a/examples/cpp/costas_array_sat.cc +++ b/examples/cpp/costas_array_sat.cc @@ -98,6 +98,7 @@ void CostasHard(const int dim) { // create the variables std::vector vars; Domain domain(1, dim); + vars.reserve(dim); for (int i = 0; i < dim; ++i) { vars.push_back( cp_model.NewIntVar(domain).WithName(absl::StrCat("var_", i))); diff --git a/examples/cpp/frequency_assignment_problem.cc b/examples/cpp/frequency_assignment_problem.cc index a78e20a4c32..17d25d3c94a 100644 --- a/examples/cpp/frequency_assignment_problem.cc +++ b/examples/cpp/frequency_assignment_problem.cc @@ -567,6 +567,7 @@ void HardFapSolver(const absl::btree_map& data_variables, static_cast(variables.size()), &cardinality); solver.AddConstraint(solver.MakeDistribute(variables, values, cardinality)); std::vector value_not_assigned; + value_not_assigned.reserve(values.size()); for (int val = 0; val < values.size(); ++val) { value_not_assigned.push_back( solver.MakeIsEqualCstVar(cardinality[val], 0)); diff --git a/examples/cpp/jobshop_sat.cc b/examples/cpp/jobshop_sat.cc index c8cbc782952..2415bb4767d 100644 --- a/examples/cpp/jobshop_sat.cc +++ b/examples/cpp/jobshop_sat.cc @@ -279,6 +279,7 @@ void CreateAlternativeTasks( // Exactly one alternative interval is present. std::vector interval_presences; + interval_presences.reserve(alternatives.size()); for (const AlternativeTaskData& alternative : alternatives) { interval_presences.push_back(alternative.presence); } diff --git a/examples/cpp/network_routing_sat.cc b/examples/cpp/network_routing_sat.cc index 0072f8d4b5f..5740835abd6 100644 --- a/examples/cpp/network_routing_sat.cc +++ b/examples/cpp/network_routing_sat.cc @@ -377,9 +377,11 @@ class NetworkRoutingSolver { CpModelBuilder cp_model; std::vector arc_vars; std::vector node_vars; + node_vars.reserve(max_length); for (int i = 0; i < max_length; ++i) { node_vars.push_back(cp_model.NewIntVar(Domain(0, num_nodes() - 1))); } + arc_vars.reserve(max_length - 1); for (int i = 0; i < max_length - 1; ++i) { arc_vars.push_back(cp_model.NewIntVar(Domain(-1, count_arcs() - 1))); } diff --git a/examples/cpp/qap_sat.cc b/examples/cpp/qap_sat.cc index d98ce396a06..af457c5446a 100644 --- a/examples/cpp/qap_sat.cc +++ b/examples/cpp/qap_sat.cc @@ -60,6 +60,7 @@ void SolveQap() { // Occupy each location exactly once. for (int l = 0; l < n; ++l) { std::vector tmp; + tmp.reserve(n); for (int f = 0; f < n; ++f) { tmp.push_back(place_vars[f][l]); } diff --git a/examples/cpp/slitherlink_sat.cc b/examples/cpp/slitherlink_sat.cc index a0a9e7358a3..84255e86ea0 100644 --- a/examples/cpp/slitherlink_sat.cc +++ b/examples/cpp/slitherlink_sat.cc @@ -95,10 +95,12 @@ void SlitherLink(absl::Span> data) { CpModelBuilder builder; std::vector horizontal_arcs; + horizontal_arcs.reserve(2 * num_horizontal_arcs); for (int arc = 0; arc < 2 * num_horizontal_arcs; ++arc) { horizontal_arcs.push_back(builder.NewBoolVar()); } std::vector vertical_arcs; + vertical_arcs.reserve(2 * num_vertical_arcs); for (int arc = 0; arc < 2 * num_vertical_arcs; ++arc) { vertical_arcs.push_back(builder.NewBoolVar()); } diff --git a/examples/cpp/sports_scheduling_sat.cc b/examples/cpp/sports_scheduling_sat.cc index ffc7c09251a..0b7bdbf68c6 100644 --- a/examples/cpp/sports_scheduling_sat.cc +++ b/examples/cpp/sports_scheduling_sat.cc @@ -246,6 +246,7 @@ void FixtureModel(int num_teams) { for (int other = 0; other < num_teams; ++other) { if (team == other) continue; std::vector possible_days; + possible_days.reserve(num_days); for (int d = 0; d < num_days; ++d) { possible_days.push_back(fixtures[d][team][other]); } From caab346d4dd5fd182c18702d44628777c8bfd56f Mon Sep 17 00:00:00 2001 From: Guillaume Chatelet Date: Mon, 5 Jan 2026 13:26:43 +0100 Subject: [PATCH 099/111] Remove examples/tests folder (#4971) This folder was mostly used to store reproducers for issues but were never proper tests. --- .gitignore | 2 - CMakeLists.txt | 3 - README.md | 1 - cmake/vagrant/freebsd/cpp/Vagrantfile | 1 - cmake/vagrant/freebsd/java/Vagrantfile | 1 - cmake/vagrant/freebsd/python/Vagrantfile | 1 - cmake/vagrant/netbsd/cpp/Vagrantfile | 1 - cmake/vagrant/netbsd/dotnet/Vagrantfile | 1 - cmake/vagrant/netbsd/java/Vagrantfile | 1 - cmake/vagrant/netbsd/python/Vagrantfile | 1 - cmake/vagrant/openbsd/cpp/Vagrantfile | 1 - cmake/vagrant/openbsd/dotnet/Vagrantfile | 1 - cmake/vagrant/openbsd/java/Vagrantfile | 1 - cmake/vagrant/openbsd/python/Vagrantfile | 1 - examples/tests/CMakeLists.txt | 51 -- examples/tests/bug_fz1.cc | 74 -- examples/tests/cpp11_test.cc | 88 --- examples/tests/dual_loading.py | 12 - examples/tests/forbidden_intervals_test.cc | 160 ---- examples/tests/init_test.cc | 53 -- examples/tests/issue115.fzn | 560 -------------- examples/tests/issue115b.fzn | 777 -------------------- examples/tests/issue117.py | 65 -- examples/tests/issue1231.py | 79 -- examples/tests/issue128.py | 157 ---- examples/tests/issue1303.cc | 29 - examples/tests/issue139.fzn | 243 ------ examples/tests/issue173.cc | 36 - examples/tests/issue18.cs | 60 -- examples/tests/issue2.py | 42 -- examples/tests/issue22.cs | 84 --- examples/tests/issue3.py | 74 -- examples/tests/issue33.cs | 676 ----------------- examples/tests/issue4.py | 42 -- examples/tests/issue46.py | 94 --- examples/tests/issue5.py | 180 ----- examples/tests/issue57.cc | 39 - examples/tests/issue62.py | 29 - examples/tests/lp_test.cc | 232 ------ examples/tests/min_max_test.cc | 407 ---------- examples/tests/remote/Makefile | 18 - examples/tests/remote/linear_programming.cc | 115 --- examples/tests/remote/tsp.cc | 164 ----- 43 files changed, 4657 deletions(-) delete mode 100644 examples/tests/CMakeLists.txt delete mode 100644 examples/tests/bug_fz1.cc delete mode 100644 examples/tests/cpp11_test.cc delete mode 100755 examples/tests/dual_loading.py delete mode 100644 examples/tests/forbidden_intervals_test.cc delete mode 100644 examples/tests/init_test.cc delete mode 100644 examples/tests/issue115.fzn delete mode 100644 examples/tests/issue115b.fzn delete mode 100755 examples/tests/issue117.py delete mode 100755 examples/tests/issue1231.py delete mode 100755 examples/tests/issue128.py delete mode 100644 examples/tests/issue1303.cc delete mode 100644 examples/tests/issue139.fzn delete mode 100644 examples/tests/issue173.cc delete mode 100644 examples/tests/issue18.cs delete mode 100755 examples/tests/issue2.py delete mode 100644 examples/tests/issue22.cs delete mode 100755 examples/tests/issue3.py delete mode 100644 examples/tests/issue33.cs delete mode 100755 examples/tests/issue4.py delete mode 100755 examples/tests/issue46.py delete mode 100755 examples/tests/issue5.py delete mode 100644 examples/tests/issue57.cc delete mode 100755 examples/tests/issue62.py delete mode 100644 examples/tests/lp_test.cc delete mode 100644 examples/tests/min_max_test.cc delete mode 100644 examples/tests/remote/Makefile delete mode 100644 examples/tests/remote/linear_programming.cc delete mode 100644 examples/tests/remote/tsp.cc diff --git a/.gitignore b/.gitignore index fad35df17c8..692491ab1a8 100644 --- a/.gitignore +++ b/.gitignore @@ -95,8 +95,6 @@ ortools/dotnet/*/bin ortools/dotnet/*/obj ortools/**/samples/bin ortools/**/samples/obj -examples/tests/bin -examples/tests/obj examples/contrib/bin examples/contrib/obj examples/dotnet/bin diff --git a/CMakeLists.txt b/CMakeLists.txt index 40faaae8ebc..69bf10b07bb 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -470,6 +470,3 @@ endforeach() foreach(EXAMPLES IN ITEMS contrib cpp dotnet java python) add_subdirectory(examples/${EXAMPLES}) endforeach() - -# Add tests in examples/tests -add_subdirectory(examples/tests) diff --git a/README.md b/README.md index eb614890de9..344dfd9abc4 100644 --- a/README.md +++ b/README.md @@ -84,7 +84,6 @@ This software suite is composed of the following components: * [python](examples/python) Python examples. * [notebook](examples/notebook) Jupyter/IPython notebooks. * [flatzinc](examples/flatzinc) FlatZinc examples. - * [tests](examples/tests) Unit tests and bug reports. * [tools](tools) Delivery Tools (e.g. Windows GNU binaries, scripts, release dockers) ## Installation diff --git a/cmake/vagrant/freebsd/cpp/Vagrantfile b/cmake/vagrant/freebsd/cpp/Vagrantfile index 5440fd949ed..7b268a72970 100644 --- a/cmake/vagrant/freebsd/cpp/Vagrantfile +++ b/cmake/vagrant/freebsd/cpp/Vagrantfile @@ -91,7 +91,6 @@ Vagrant.configure("2") do |config| config.vm.provision "file", source: "../../../../examples/dotnet", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../examples/java", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../examples/python", destination: "$HOME/project/examples/" - config.vm.provision "file", source: "../../../../examples/tests", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../patches", destination: "$HOME/project/" config.vm.provision "file", source: "../../../../Version.txt", destination: "$HOME/project/" diff --git a/cmake/vagrant/freebsd/java/Vagrantfile b/cmake/vagrant/freebsd/java/Vagrantfile index fff36643427..308e783fe4e 100644 --- a/cmake/vagrant/freebsd/java/Vagrantfile +++ b/cmake/vagrant/freebsd/java/Vagrantfile @@ -94,7 +94,6 @@ Vagrant.configure("2") do |config| config.vm.provision "file", source: "../../../../examples/dotnet", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../examples/java", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../examples/python", destination: "$HOME/project/examples/" - config.vm.provision "file", source: "../../../../examples/tests", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../patches", destination: "$HOME/project/" config.vm.provision "file", source: "../../../../Version.txt", destination: "$HOME/project/" diff --git a/cmake/vagrant/freebsd/python/Vagrantfile b/cmake/vagrant/freebsd/python/Vagrantfile index 7cd93bd1d5a..6808ec14271 100644 --- a/cmake/vagrant/freebsd/python/Vagrantfile +++ b/cmake/vagrant/freebsd/python/Vagrantfile @@ -93,7 +93,6 @@ Vagrant.configure("2") do |config| config.vm.provision "file", source: "../../../../examples/dotnet", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../examples/java", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../examples/python", destination: "$HOME/project/examples/" - config.vm.provision "file", source: "../../../../examples/tests", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../patches", destination: "$HOME/project/" config.vm.provision "file", source: "../../../../LICENSE", destination: "$HOME/project/" config.vm.provision "file", source: "../../../../Version.txt", destination: "$HOME/project/" diff --git a/cmake/vagrant/netbsd/cpp/Vagrantfile b/cmake/vagrant/netbsd/cpp/Vagrantfile index db188d48cce..785262af9b4 100644 --- a/cmake/vagrant/netbsd/cpp/Vagrantfile +++ b/cmake/vagrant/netbsd/cpp/Vagrantfile @@ -85,7 +85,6 @@ Vagrant.configure("2") do |config| config.vm.provision "file", source: "../../../../examples/dotnet", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../examples/java", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../examples/python", destination: "$HOME/project/examples/" - config.vm.provision "file", source: "../../../../examples/tests", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../patches", destination: "$HOME/project/" config.vm.provision "file", source: "../../../../Version.txt", destination: "$HOME/project/" diff --git a/cmake/vagrant/netbsd/dotnet/Vagrantfile b/cmake/vagrant/netbsd/dotnet/Vagrantfile index 09ac57e5949..b88bd5aa715 100644 --- a/cmake/vagrant/netbsd/dotnet/Vagrantfile +++ b/cmake/vagrant/netbsd/dotnet/Vagrantfile @@ -87,7 +87,6 @@ Vagrant.configure("2") do |config| config.vm.provision "file", source: "../../../../examples/dotnet", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../examples/java", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../examples/python", destination: "$HOME/project/examples/" - config.vm.provision "file", source: "../../../../examples/tests", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../patches", destination: "$HOME/project/" config.vm.provision "file", source: "../../../../Version.txt", destination: "$HOME/project/" config.vm.provision "file", source: "../../../../tools/doc/orLogo.png", destination: "$HOME/project/tools/doc/" diff --git a/cmake/vagrant/netbsd/java/Vagrantfile b/cmake/vagrant/netbsd/java/Vagrantfile index 7c44664b8ef..a646decdca4 100644 --- a/cmake/vagrant/netbsd/java/Vagrantfile +++ b/cmake/vagrant/netbsd/java/Vagrantfile @@ -88,7 +88,6 @@ Vagrant.configure("2") do |config| config.vm.provision "file", source: "../../../../examples/dotnet", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../examples/java", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../examples/python", destination: "$HOME/project/examples/" - config.vm.provision "file", source: "../../../../examples/tests", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../patches", destination: "$HOME/project/" config.vm.provision "file", source: "../../../../Version.txt", destination: "$HOME/project/" diff --git a/cmake/vagrant/netbsd/python/Vagrantfile b/cmake/vagrant/netbsd/python/Vagrantfile index 86340ab3d01..af297cb0761 100644 --- a/cmake/vagrant/netbsd/python/Vagrantfile +++ b/cmake/vagrant/netbsd/python/Vagrantfile @@ -87,7 +87,6 @@ Vagrant.configure("2") do |config| config.vm.provision "file", source: "../../../../examples/dotnet", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../examples/java", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../examples/python", destination: "$HOME/project/examples/" - config.vm.provision "file", source: "../../../../examples/tests", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../patches", destination: "$HOME/project/" config.vm.provision "file", source: "../../../../LICENSE", destination: "$HOME/project/" config.vm.provision "file", source: "../../../../Version.txt", destination: "$HOME/project/" diff --git a/cmake/vagrant/openbsd/cpp/Vagrantfile b/cmake/vagrant/openbsd/cpp/Vagrantfile index 85a99d367b4..cc586c9d726 100644 --- a/cmake/vagrant/openbsd/cpp/Vagrantfile +++ b/cmake/vagrant/openbsd/cpp/Vagrantfile @@ -85,7 +85,6 @@ Vagrant.configure("2") do |config| config.vm.provision "file", source: "../../../../examples/dotnet", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../examples/java", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../examples/python", destination: "$HOME/project/examples/" - config.vm.provision "file", source: "../../../../examples/tests", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../patches", destination: "$HOME/project/" config.vm.provision "file", source: "../../../../Version.txt", destination: "$HOME/project/" diff --git a/cmake/vagrant/openbsd/dotnet/Vagrantfile b/cmake/vagrant/openbsd/dotnet/Vagrantfile index b129bca007f..a30d368c3a1 100644 --- a/cmake/vagrant/openbsd/dotnet/Vagrantfile +++ b/cmake/vagrant/openbsd/dotnet/Vagrantfile @@ -87,7 +87,6 @@ Vagrant.configure("2") do |config| config.vm.provision "file", source: "../../../../examples/dotnet", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../examples/java", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../examples/python", destination: "$HOME/project/examples/" - config.vm.provision "file", source: "../../../../examples/tests", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../patches", destination: "$HOME/project/" config.vm.provision "file", source: "../../../../Version.txt", destination: "$HOME/project/" config.vm.provision "file", source: "../../../../tools/doc/orLogo.png", destination: "$HOME/project/tools/doc/" diff --git a/cmake/vagrant/openbsd/java/Vagrantfile b/cmake/vagrant/openbsd/java/Vagrantfile index c0298674717..3a932bcc8be 100644 --- a/cmake/vagrant/openbsd/java/Vagrantfile +++ b/cmake/vagrant/openbsd/java/Vagrantfile @@ -88,7 +88,6 @@ Vagrant.configure("2") do |config| config.vm.provision "file", source: "../../../../examples/dotnet", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../examples/java", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../examples/python", destination: "$HOME/project/examples/" - config.vm.provision "file", source: "../../../../examples/tests", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../patches", destination: "$HOME/project/" config.vm.provision "file", source: "../../../../Version.txt", destination: "$HOME/project/" diff --git a/cmake/vagrant/openbsd/python/Vagrantfile b/cmake/vagrant/openbsd/python/Vagrantfile index d2083458d33..05d4cf967bf 100644 --- a/cmake/vagrant/openbsd/python/Vagrantfile +++ b/cmake/vagrant/openbsd/python/Vagrantfile @@ -87,7 +87,6 @@ Vagrant.configure("2") do |config| config.vm.provision "file", source: "../../../../examples/dotnet", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../examples/java", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../examples/python", destination: "$HOME/project/examples/" - config.vm.provision "file", source: "../../../../examples/tests", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../patches", destination: "$HOME/project/" config.vm.provision "file", source: "../../../../LICENSE", destination: "$HOME/project/" config.vm.provision "file", source: "../../../../Version.txt", destination: "$HOME/project/" diff --git a/examples/tests/CMakeLists.txt b/examples/tests/CMakeLists.txt deleted file mode 100644 index df9c94d12a4..00000000000 --- a/examples/tests/CMakeLists.txt +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright 2010-2025 Google LLC -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -if(NOT BUILD_EXAMPLES) - return() -endif() - -if(BUILD_CXX_EXAMPLES) - file(GLOB CXX_SRCS "*.cc") - foreach(_FULL_FILE_NAME IN LISTS CXX_SRCS) - get_filename_component(_NAME ${_FULL_FILE_NAME} NAME_WE) - get_filename_component(_FILE_NAME ${_FULL_FILE_NAME} NAME) - ortools_cxx_test( - NAME - tests_${_NAME} - SOURCES - ${_FULL_FILE_NAME} - ) - endforeach() -endif() - -if(BUILD_PYTHON_EXAMPLES) - file(GLOB PYTHON_SRCS "*.py") - foreach(FILE_NAME IN LISTS PYTHON_SRCS) - add_python_example(FILE_NAME ${FILE_NAME}) - endforeach() -endif() - -if(BUILD_JAVA_EXAMPLES) - file(GLOB JAVA_SRCS "*.java") - foreach(FILE_NAME IN LISTS JAVA_SRCS) - add_java_example(FILE_NAME ${FILE_NAME}) - endforeach() -endif() - -if(BUILD_DOTNET_EXAMPLES) - file(GLOB DOTNET_SRCS "*.cs") - foreach(FILE_NAME IN LISTS DOTNET_SRCS) - add_dotnet_example(FILE_NAME ${FILE_NAME}) - endforeach() -endif() diff --git a/examples/tests/bug_fz1.cc b/examples/tests/bug_fz1.cc deleted file mode 100644 index 0654c3065a5..00000000000 --- a/examples/tests/bug_fz1.cc +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright 2011-2012 Google -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "absl/flags/parse.h" -#include "ortools/base/hash.h" -#include "ortools/base/map_util.h" -#include "ortools/base/stl_util.h" -#include "ortools/constraint_solver/constraint_solver.h" -#include "ortools/constraint_solver/constraint_solveri.h" -#include "ortools/util/string_array.h" - -namespace operations_research { -void ShoppingBasketBug() { - Solver s("ShoppingBasketBug"); - IntVar* const x15 = s.MakeIntVar(0, 2, "x15"); - IntVar* const x18 = s.MakeIntVar(0, 2, "x18"); - IntVar* const is1 = s.MakeIsEqualCstVar(x15, 2); - IntVar* const is2 = s.MakeIsEqualCstVar(x18, 2); - IntVar* const is_less = s.MakeIsLessOrEqualCstVar( - s.MakeSum(s.MakeProd(is1, 2), s.MakeProd(is2, 2)), 1); - std::vector values1; - values1.push_back(10); - values1.push_back(2); - values1.push_back(12); - IntVar* const elem1 = s.MakeElement(values1, x15)->Var(); - std::vector values2; - values2.push_back(2); - values2.push_back(10); - values2.push_back(5); - IntVar* const elem2 = s.MakeElement(values2, x18)->Var(); - std::vector vars; - vars.push_back(elem1); - vars.push_back(is_less); - vars.push_back(elem2); - std::vector coefs; - coefs.push_back(1); - coefs.push_back(90); - coefs.push_back(1); - IntVar* const obj = s.MakeScalProd(vars, coefs)->Var(); - OptimizeVar* const optimize = s.MakeMinimize(obj, 1); - SearchMonitor* const log = s.MakeSearchLog(10, optimize); - SolutionCollector* const collector = s.MakeLastSolutionCollector(); - collector->Add(x15); - collector->Add(x18); - collector->Add(is_less); - collector->Add(elem1); - collector->Add(elem2); - collector->Add(is1); - collector->Add(is2); - DecisionBuilder* const db1 = - s.MakePhase(x15, x18, Solver::CHOOSE_MAX_SIZE, Solver::ASSIGN_MIN_VALUE); - DecisionBuilder* const db2 = - s.MakePhase(obj, Solver::CHOOSE_FIRST_UNBOUND, Solver::ASSIGN_MIN_VALUE); - DecisionBuilder* const db = s.Compose(db1, db2); - s.Solve(db, optimize, log, collector); - LOG(INFO) << collector->solution(0)->DebugString(); -} -} // namespace operations_research - -int main(int argc, char** argv) { - absl::ParseCommandLine(argc, argv); - operations_research::ShoppingBasketBug(); - return 0; -} diff --git a/examples/tests/cpp11_test.cc b/examples/tests/cpp11_test.cc deleted file mode 100644 index b91d3c0bbb4..00000000000 --- a/examples/tests/cpp11_test.cc +++ /dev/null @@ -1,88 +0,0 @@ -#include -#include -#include -#include - -#include "ortools/base/hash.h" - -namespace operations_research { -struct Foo { - Foo() { std::cout << "Foo::Foo\n"; } - ~Foo() { std::cout << "Foo::~Foo\n"; } - void bar() { std::cout << "Foo::bar\n"; } -}; - -void f(const Foo& foo) { std::cout << "f(const Foo&)\n"; } - -void test_unique() { - std::cout << "test_unique" << std::endl; - std::unique_ptr p1(new Foo); // p1 owns Foo - if (p1) p1->bar(); - - { - std::unique_ptr p2(std::move(p1)); // now p2 owns Foo - f(*p2); - - p1 = std::move(p2); // ownership returns to p1 - std::cout << "destroying p2...\n"; - } - - if (p1) p1->bar(); - // Foo instance is destroyed when p1 goes out of scope -} - -void test_auto() { - std::cout << "test_auto" << std::endl; - std::vector numbers; - numbers.push_back(1); - numbers.push_back(2); - numbers.push_back(3); - numbers.push_back(4); - numbers.push_back(5); - numbers.push_back(6); - numbers.push_back(7); - for (int vec : numbers) { - std::cout << vec << std::endl; - } - - std::unordered_map my_map; - my_map["toto"] = 2; - for (auto mm : my_map) { - std::cout << mm.first << " -> " << mm.second << std::endl; - } -} - -void test_chevron() { - std::cout << "test_chevron" << std::endl; - std::vector> toto; - toto.push_back(std::make_pair(2, 4)); -} - -class A { - public: - virtual ~A() {} - virtual int V() const { return 1; } -}; - -class B : public A { - public: - ~B() override {} - int V() const override { return 2; } -}; - -void test_override() { - std::cout << "test_override" << std::endl; - B* b = new B(); - if (b->V() != 2) { - std::cout << "Problem with override" << std::endl; - } -} -} // namespace operations_research - -int main() { - operations_research::test_unique(); - operations_research::test_auto(); - operations_research::test_chevron(); - operations_research::test_override(); - return 0; -} diff --git a/examples/tests/dual_loading.py b/examples/tests/dual_loading.py deleted file mode 100755 index 8ab1c1c88ea..00000000000 --- a/examples/tests/dual_loading.py +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/env python3 -from ortools.constraint_solver import pywrapcp -from ortools.linear_solver import pywraplp - - -def main(): - cp = pywrapcp.Solver("test") - lp = pywraplp.Solver.CreateSolver('GLOP') - - -if __name__ == "__main__": - main() diff --git a/examples/tests/forbidden_intervals_test.cc b/examples/tests/forbidden_intervals_test.cc deleted file mode 100644 index 9cfbe02cdb8..00000000000 --- a/examples/tests/forbidden_intervals_test.cc +++ /dev/null @@ -1,160 +0,0 @@ -// Copyright 2011-2012 Google -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "absl/flags/parse.h" -#include "ortools/constraint_solver/constraint_solver.h" - -namespace operations_research { - -class ForbiddenIntervalTestSimpleReductionOnBothSide : public DecisionBuilder { - public: - ForbiddenIntervalTestSimpleReductionOnBothSide(IntVar* const var) - : var_(var) {} - ~ForbiddenIntervalTestSimpleReductionOnBothSide() override {} - - Decision* Next(Solver* const s) override { - CHECK_EQ(101, var_->Min()); - CHECK_EQ(899, var_->Max()); - return NULL; - } - - private: - IntVar* const var_; -}; - -class ForbiddenIntervalTestMultipleReductionsOnMin : public DecisionBuilder { - public: - ForbiddenIntervalTestMultipleReductionsOnMin(IntVar* const var) : var_(var) {} - ~ForbiddenIntervalTestMultipleReductionsOnMin() override {} - - Decision* Next(Solver* const s) override { - CHECK_EQ(0, var_->Min()); - CHECK_EQ(1000, var_->Max()); - var_->SetMin(5); - CHECK_EQ(5, var_->Min()); - CHECK_EQ(1000, var_->Max()); - var_->SetMax(995); - CHECK_EQ(5, var_->Min()); - CHECK_EQ(995, var_->Max()); - var_->SetMin(10); - CHECK_EQ(21, var_->Min()); - CHECK_EQ(995, var_->Max()); - var_->SetMin(30); - CHECK_EQ(30, var_->Min()); - CHECK_EQ(995, var_->Max()); - var_->SetMin(505); - CHECK_EQ(511, var_->Min()); - CHECK_EQ(995, var_->Max()); - var_->SetMin(600); - CHECK_EQ(600, var_->Min()); - CHECK_EQ(995, var_->Max()); - var_->SetMin(900); - CHECK_EQ(901, var_->Min()); - CHECK_EQ(995, var_->Max()); - return NULL; - } - - private: - IntVar* const var_; -}; - -class ForbiddenIntervalTestMultipleReductionsOnMax : public DecisionBuilder { - public: - ForbiddenIntervalTestMultipleReductionsOnMax(IntVar* const var) : var_(var) {} - ~ForbiddenIntervalTestMultipleReductionsOnMax() override {} - - Decision* Next(Solver* const s) override { - CHECK_EQ(0, var_->Min()); - CHECK_EQ(1000, var_->Max()); - var_->SetMin(5); - CHECK_EQ(5, var_->Min()); - CHECK_EQ(1000, var_->Max()); - var_->SetMax(995); - CHECK_EQ(5, var_->Min()); - CHECK_EQ(995, var_->Max()); - var_->SetMax(900); - CHECK_EQ(5, var_->Min()); - CHECK_EQ(799, var_->Max()); - var_->SetMax(750); - CHECK_EQ(5, var_->Min()); - CHECK_EQ(750, var_->Max()); - var_->SetMax(505); - CHECK_EQ(5, var_->Min()); - CHECK_EQ(499, var_->Max()); - var_->SetMax(300); - CHECK_EQ(5, var_->Min()); - CHECK_EQ(300, var_->Max()); - var_->SetMax(20); - CHECK_EQ(5, var_->Min()); - CHECK_EQ(9, var_->Max()); - return NULL; - } - - private: - IntVar* const var_; -}; - -class ForbiddenIntervalTest { - public: - void SetUp(std::vector& starts, std::vector& ends) { - solver_.reset(new Solver("ForbiddenIntervalTest")); - var_ = solver_->MakeIntVar(0, 1000, "var"); - CHECK_EQ(starts.size(), ends.size()); - for (std::size_t i = 0; i < starts.size(); ++i) { - var_->RemoveInterval(starts[i], ends[i]); - } - } - - std::unique_ptr solver_; - IntVar* var_; - - void TestSimpleReductionOnBothSide() { - std::cout << "TestSimpleReductionOnBothSide" << std::endl; - std::vector starts = {0, 900}; - std::vector ends = {100, 1000}; - SetUp(starts, ends); - CHECK(solver_->Solve(solver_->RevAlloc( - new ForbiddenIntervalTestSimpleReductionOnBothSide(var_)))); - std::cout << " .. done" << std::endl; - } - - void TestMultipleReductionsOnMin() { - std::cout << "TestMultipleReductionsOnMin" << std::endl; - std::vector starts = {10, 500, 800}; - std::vector ends = {20, 510, 900}; - SetUp(starts, ends); - CHECK(solver_->Solve(solver_->RevAlloc( - new ForbiddenIntervalTestMultipleReductionsOnMin(var_)))); - std::cout << " .. done" << std::endl; - } - - void TestMultipleReductionsOnMax() { - std::cout << "TestMultipleReductionsOnMax" << std::endl; - std::vector starts = {10, 500, 800}; - std::vector ends = {20, 510, 900}; - SetUp(starts, ends); - CHECK(solver_->Solve(solver_->RevAlloc( - new ForbiddenIntervalTestMultipleReductionsOnMax(var_)))); - std::cout << " .. done" << std::endl; - } -}; -} // namespace operations_research - -int main(int argc, char** argv) { - absl::ParseCommandLine(argc, argv); - operations_research::ForbiddenIntervalTest forbidden_intervals_test; - forbidden_intervals_test.TestSimpleReductionOnBothSide(); - forbidden_intervals_test.TestMultipleReductionsOnMin(); - forbidden_intervals_test.TestMultipleReductionsOnMax(); - return 0; -} diff --git a/examples/tests/init_test.cc b/examples/tests/init_test.cc deleted file mode 100644 index ba85469a357..00000000000 --- a/examples/tests/init_test.cc +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright 2010-2025 Google LLC -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "ortools/init/init.h" - -#include "absl/strings/str_cat.h" - -namespace operations_research { -void TestLogging() { - LOG(INFO) << "Test Logging"; - CppBridge::InitLogging("init"); - CppBridge::ShutdownLogging(); -} - -void TestFlags() { - LOG(INFO) << "Test Flags"; - auto cpp_flags = CppFlags(); - cpp_flags.log_prefix = true; - cpp_flags.cp_model_dump_prefix = "init"; - cpp_flags.cp_model_dump_models = true; - cpp_flags.cp_model_dump_submodels = true; - cpp_flags.cp_model_dump_response = true; - CppBridge::SetFlags(cpp_flags); -} - -void TestVersion() { - LOG(INFO) << "Test Version"; - using version = OrToolsVersion; - int major = version::MajorNumber(); - int minor = version::MinorNumber(); - int patch = version::PatchNumber(); - std::string vers = absl::StrCat(major, ".", minor, ".", patch); - assert(vers == version::VersionString()); -} - -} // namespace operations_research - -int main(int argc, char** argv) { - operations_research::TestLogging(); - operations_research::TestFlags(); - operations_research::TestVersion(); - return 0; -} diff --git a/examples/tests/issue115.fzn b/examples/tests/issue115.fzn deleted file mode 100644 index d2df535a145..00000000000 --- a/examples/tests/issue115.fzn +++ /dev/null @@ -1,560 +0,0 @@ -array [1..8] of int: X_INTRODUCED_255 = [1,1,1,1,1,1,1,1]; -array [1..4] of int: X_INTRODUCED_419 = [-1,-1,-1,-1]; -array [1..97] of int: X_INTRODUCED_796 = [1,-3,-3,-3,-3,-3,-3,-3,-3,-3,-3,-1,-1,-1,-1,-1,-1,-2,-2,-2,-2,-1,-1,-3,-3,-3,-3,-3,-3,-3,-3,-3,-3,-3,-3,-1,-1,-1,-1,-1,-1,-2,-2,-2,-2,-1,-1,-3,-3,-3,-3,-3,-3,-3,-3,-3,-3,-3,-3,-1,-1,-1,-1,-1,-1,-2,-2,-2,-2,-1,-1,-3,-3,-3,-3,-3,-3,-3,-3,-3,-3,-3,-3,-1,-1,-1,-1,-1,-1,-2,-2,-2,-2,-1,-1,-3,-3]; -var bool: X_INTRODUCED_2; -var bool: X_INTRODUCED_3; -var bool: X_INTRODUCED_6; -var bool: X_INTRODUCED_7; -var bool: X_INTRODUCED_8; -var bool: X_INTRODUCED_9; -var bool: X_INTRODUCED_10; -var bool: X_INTRODUCED_11; -var bool: X_INTRODUCED_12; -var bool: X_INTRODUCED_13; -var bool: X_INTRODUCED_20; -var bool: X_INTRODUCED_21; -var bool: X_INTRODUCED_22; -var bool: X_INTRODUCED_23; -var bool: X_INTRODUCED_24; -var bool: X_INTRODUCED_25; -var bool: X_INTRODUCED_26; -var bool: X_INTRODUCED_27; -var bool: X_INTRODUCED_28; -var bool: X_INTRODUCED_29; -var bool: X_INTRODUCED_32; -var bool: X_INTRODUCED_33; -var bool: X_INTRODUCED_34; -var bool: X_INTRODUCED_35; -var bool: X_INTRODUCED_40; -var bool: X_INTRODUCED_41; -var bool: X_INTRODUCED_46; -var bool: X_INTRODUCED_47; -var bool: X_INTRODUCED_50; -var bool: X_INTRODUCED_51; -var bool: X_INTRODUCED_52; -var bool: X_INTRODUCED_53; -var bool: X_INTRODUCED_58; -var bool: X_INTRODUCED_59; -var bool: X_INTRODUCED_60; -var bool: X_INTRODUCED_61; -var bool: X_INTRODUCED_62; -var bool: X_INTRODUCED_63; -var bool: X_INTRODUCED_66; -var bool: X_INTRODUCED_67; -var bool: X_INTRODUCED_76; -var bool: X_INTRODUCED_77; -var bool: X_INTRODUCED_78; -var bool: X_INTRODUCED_79; -var bool: X_INTRODUCED_82; -var bool: X_INTRODUCED_83; -var bool: X_INTRODUCED_88; -var bool: X_INTRODUCED_89; -var bool: X_INTRODUCED_92; -var bool: X_INTRODUCED_93; -var bool: X_INTRODUCED_102; -var bool: X_INTRODUCED_103; -var bool: X_INTRODUCED_106; -var bool: X_INTRODUCED_107; -var bool: X_INTRODUCED_108; -var bool: X_INTRODUCED_109; -var bool: X_INTRODUCED_116; -var bool: X_INTRODUCED_117; -var bool: X_INTRODUCED_118; -var bool: X_INTRODUCED_119; -var bool: X_INTRODUCED_122; -var bool: X_INTRODUCED_123; -var bool: X_INTRODUCED_126; -var bool: X_INTRODUCED_127; -var bool: X_INTRODUCED_128; -var bool: X_INTRODUCED_129; -var bool: X_INTRODUCED_130; -var bool: X_INTRODUCED_131; -var bool: X_INTRODUCED_132; -var bool: X_INTRODUCED_133; -var 0..144: objective:: output_var:: is_defined_var; -var 0..1: X_INTRODUCED_248 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_249 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_252 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_253 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_257 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_258 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_259 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_260 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_261 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_262 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_271 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_272 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_273 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_274 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_277 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_278 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_279 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_280 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_281 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_282 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_287 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_288 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_289 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_290 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_297 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_298 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_303 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_304 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_309 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_310 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_311 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_312 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_319 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_320 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_321 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_322 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_323 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_324 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_329 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_330 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_341 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_342 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_343 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_344 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_349 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_350 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_357 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_358 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_361 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_362 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_373 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_374 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_379 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_380 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_381 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_382 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_391 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_392 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_393 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_394 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_399 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_400 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_403 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_404 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_407 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_408 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_409 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_410 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_411 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_412 ::var_is_introduced :: is_defined_var; -var -10..1: X_INTRODUCED_492 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_493 ::var_is_introduced ; -var 0..1: X_INTRODUCED_497 ::var_is_introduced ; -var -7..1: X_INTRODUCED_500 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_501 ::var_is_introduced ; -var -4..1: X_INTRODUCED_504 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_505 ::var_is_introduced ; -var -2..1: X_INTRODUCED_508 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_509 ::var_is_introduced ; -var 0..1: X_INTRODUCED_512 ::var_is_introduced ; -var 0..1: X_INTRODUCED_515 ::var_is_introduced ; -var 0..1: X_INTRODUCED_518 ::var_is_introduced ; -var 0..1: X_INTRODUCED_521 ::var_is_introduced ; -var 0..1: X_INTRODUCED_524 ::var_is_introduced ; -var -3..1: X_INTRODUCED_527 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_528 ::var_is_introduced ; -var -3..1: X_INTRODUCED_531 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_532 ::var_is_introduced ; -var -1..1: X_INTRODUCED_535 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_536 ::var_is_introduced ; -var -1..1: X_INTRODUCED_539 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_540 ::var_is_introduced ; -var -15..2: X_INTRODUCED_543 ::var_is_introduced :: is_defined_var; -var 0..2: X_INTRODUCED_544 ::var_is_introduced ; -var -15..2: X_INTRODUCED_547 ::var_is_introduced :: is_defined_var; -var 0..2: X_INTRODUCED_548 ::var_is_introduced ; -var -10..1: X_INTRODUCED_551 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_552 ::var_is_introduced ; -var -4..1: X_INTRODUCED_555 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_556 ::var_is_introduced ; -var -7..1: X_INTRODUCED_559 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_560 ::var_is_introduced ; -var -4..1: X_INTRODUCED_563 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_564 ::var_is_introduced ; -var -2..1: X_INTRODUCED_567 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_568 ::var_is_introduced ; -var 0..1: X_INTRODUCED_570 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_571 ::var_is_introduced ; -var 0..1: X_INTRODUCED_574 ::var_is_introduced ; -var 0..1: X_INTRODUCED_577 ::var_is_introduced ; -var 0..1: X_INTRODUCED_580 ::var_is_introduced ; -var 0..1: X_INTRODUCED_583 ::var_is_introduced ; -var -3..1: X_INTRODUCED_586 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_587 ::var_is_introduced ; -var -3..1: X_INTRODUCED_590 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_591 ::var_is_introduced ; -var -1..1: X_INTRODUCED_594 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_595 ::var_is_introduced ; -var -1..1: X_INTRODUCED_598 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_599 ::var_is_introduced ; -var -15..2: X_INTRODUCED_602 ::var_is_introduced :: is_defined_var; -var 0..2: X_INTRODUCED_603 ::var_is_introduced ; -var -15..2: X_INTRODUCED_606 ::var_is_introduced :: is_defined_var; -var 0..2: X_INTRODUCED_607 ::var_is_introduced ; -var -10..1: X_INTRODUCED_610 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_611 ::var_is_introduced ; -var -4..1: X_INTRODUCED_614 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_615 ::var_is_introduced ; -var -7..1: X_INTRODUCED_618 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_619 ::var_is_introduced ; -var -4..1: X_INTRODUCED_622 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_623 ::var_is_introduced ; -var -2..1: X_INTRODUCED_626 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_627 ::var_is_introduced ; -var 0..1: X_INTRODUCED_629 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_630 ::var_is_introduced ; -var 0..1: X_INTRODUCED_633 ::var_is_introduced ; -var 0..1: X_INTRODUCED_636 ::var_is_introduced ; -var 0..1: X_INTRODUCED_639 ::var_is_introduced ; -var 0..1: X_INTRODUCED_642 ::var_is_introduced ; -var -3..1: X_INTRODUCED_645 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_646 ::var_is_introduced ; -var -3..1: X_INTRODUCED_649 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_650 ::var_is_introduced ; -var -1..1: X_INTRODUCED_653 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_654 ::var_is_introduced ; -var -1..1: X_INTRODUCED_657 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_658 ::var_is_introduced ; -var -15..2: X_INTRODUCED_661 ::var_is_introduced :: is_defined_var; -var 0..2: X_INTRODUCED_662 ::var_is_introduced ; -var -15..2: X_INTRODUCED_665 ::var_is_introduced :: is_defined_var; -var 0..2: X_INTRODUCED_666 ::var_is_introduced ; -var -10..1: X_INTRODUCED_669 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_670 ::var_is_introduced ; -var -4..1: X_INTRODUCED_673 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_674 ::var_is_introduced ; -var -7..1: X_INTRODUCED_677 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_678 ::var_is_introduced ; -var -4..1: X_INTRODUCED_681 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_682 ::var_is_introduced ; -var -2..1: X_INTRODUCED_685 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_686 ::var_is_introduced ; -var 0..1: X_INTRODUCED_689 ::var_is_introduced ; -var 0..1: X_INTRODUCED_691 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_692 ::var_is_introduced ; -var 0..1: X_INTRODUCED_694 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_695 ::var_is_introduced ; -var 0..1: X_INTRODUCED_697 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_698 ::var_is_introduced ; -var 0..1: X_INTRODUCED_700 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_701 ::var_is_introduced ; -var -3..1: X_INTRODUCED_704 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_705 ::var_is_introduced ; -var -3..1: X_INTRODUCED_708 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_709 ::var_is_introduced ; -var 0..1: X_INTRODUCED_713 ::var_is_introduced ; -var 0..1: X_INTRODUCED_717 ::var_is_introduced ; -var -15..2: X_INTRODUCED_720 ::var_is_introduced :: is_defined_var; -var 0..2: X_INTRODUCED_721 ::var_is_introduced ; -var -15..2: X_INTRODUCED_724 ::var_is_introduced :: is_defined_var; -var 0..2: X_INTRODUCED_725 ::var_is_introduced ; -var 0..1: X_INTRODUCED_246 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_247 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_250 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_251 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_263 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_264 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_267 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_268 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_269 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_270 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_283 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_284 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_291 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_292 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_293 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_294 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_299 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_300 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_301 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_302 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_307 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_308 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_313 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_314 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_317 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_318 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_327 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_328 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_331 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_332 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_333 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_334 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_337 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_338 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_339 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_340 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_347 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_348 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_351 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_352 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_353 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_354 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_359 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_360 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_363 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_364 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_367 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_368 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_369 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_370 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_371 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_372 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_377 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_378 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_383 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_384 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_387 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_388 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_389 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_390 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_397 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_398 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_401 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_402 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_413 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_414 ::var_is_introduced = 0; -array [1..136] of var bool: Assignment:: output_array([1..17,1..4,1..2]) = [false,false,X_INTRODUCED_2,X_INTRODUCED_3,false,false,X_INTRODUCED_6,X_INTRODUCED_7,X_INTRODUCED_8,X_INTRODUCED_9,X_INTRODUCED_10,X_INTRODUCED_11,X_INTRODUCED_12,X_INTRODUCED_13,false,false,false,false,false,false,X_INTRODUCED_20,X_INTRODUCED_21,X_INTRODUCED_22,X_INTRODUCED_23,X_INTRODUCED_24,X_INTRODUCED_25,X_INTRODUCED_26,X_INTRODUCED_27,X_INTRODUCED_28,X_INTRODUCED_29,false,false,X_INTRODUCED_32,X_INTRODUCED_33,X_INTRODUCED_34,X_INTRODUCED_35,false,false,false,false,X_INTRODUCED_40,X_INTRODUCED_41,false,false,false,false,X_INTRODUCED_46,X_INTRODUCED_47,false,false,X_INTRODUCED_50,X_INTRODUCED_51,X_INTRODUCED_52,X_INTRODUCED_53,false,false,false,false,X_INTRODUCED_58,X_INTRODUCED_59,X_INTRODUCED_60,X_INTRODUCED_61,X_INTRODUCED_62,X_INTRODUCED_63,false,false,X_INTRODUCED_66,X_INTRODUCED_67,false,false,false,false,false,false,false,false,X_INTRODUCED_76,X_INTRODUCED_77,X_INTRODUCED_78,X_INTRODUCED_79,false,false,X_INTRODUCED_82,X_INTRODUCED_83,false,false,false,false,X_INTRODUCED_88,X_INTRODUCED_89,false,false,X_INTRODUCED_92,X_INTRODUCED_93,false,false,false,false,false,false,false,false,X_INTRODUCED_102,X_INTRODUCED_103,false,false,X_INTRODUCED_106,X_INTRODUCED_107,X_INTRODUCED_108,X_INTRODUCED_109,false,false,false,false,false,false,X_INTRODUCED_116,X_INTRODUCED_117,X_INTRODUCED_118,X_INTRODUCED_119,false,false,X_INTRODUCED_122,X_INTRODUCED_123,false,false,X_INTRODUCED_126,X_INTRODUCED_127,X_INTRODUCED_128,X_INTRODUCED_129,X_INTRODUCED_130,X_INTRODUCED_131,X_INTRODUCED_132,X_INTRODUCED_133,false,false]; -array [1..96] of var int: SubjSlack:: output_array([1..4,1..12,1..2]) = [X_INTRODUCED_493,0,0,X_INTRODUCED_497,X_INTRODUCED_501,0,0,X_INTRODUCED_505,0,X_INTRODUCED_509,0,X_INTRODUCED_512,X_INTRODUCED_515,X_INTRODUCED_518,X_INTRODUCED_521,X_INTRODUCED_524,X_INTRODUCED_528,X_INTRODUCED_532,X_INTRODUCED_536,X_INTRODUCED_540,0,X_INTRODUCED_544,X_INTRODUCED_548,0,X_INTRODUCED_552,0,0,X_INTRODUCED_556,X_INTRODUCED_560,0,0,X_INTRODUCED_564,0,X_INTRODUCED_568,0,X_INTRODUCED_571,X_INTRODUCED_574,X_INTRODUCED_577,X_INTRODUCED_580,X_INTRODUCED_583,X_INTRODUCED_587,X_INTRODUCED_591,X_INTRODUCED_595,X_INTRODUCED_599,0,X_INTRODUCED_603,X_INTRODUCED_607,0,X_INTRODUCED_611,0,0,X_INTRODUCED_615,X_INTRODUCED_619,0,0,X_INTRODUCED_623,0,X_INTRODUCED_627,0,X_INTRODUCED_630,X_INTRODUCED_633,X_INTRODUCED_636,X_INTRODUCED_639,X_INTRODUCED_642,X_INTRODUCED_646,X_INTRODUCED_650,X_INTRODUCED_654,X_INTRODUCED_658,0,X_INTRODUCED_662,X_INTRODUCED_666,0,X_INTRODUCED_670,0,0,X_INTRODUCED_674,X_INTRODUCED_678,0,0,X_INTRODUCED_682,0,X_INTRODUCED_686,0,X_INTRODUCED_689,X_INTRODUCED_692,X_INTRODUCED_695,X_INTRODUCED_698,X_INTRODUCED_701,X_INTRODUCED_705,X_INTRODUCED_709,X_INTRODUCED_713,X_INTRODUCED_717,0,X_INTRODUCED_721,X_INTRODUCED_725,0]; -array [1..12] of var int: Target:: output_array([1..12]) = [1,1,1,1,1,1,1,1,1,1,2,2]; -array [1..97] of var int: X_INTRODUCED_795 ::var_is_introduced = [objective,X_INTRODUCED_493,0,0,X_INTRODUCED_497,X_INTRODUCED_501,0,0,X_INTRODUCED_505,0,X_INTRODUCED_509,0,X_INTRODUCED_512,X_INTRODUCED_515,X_INTRODUCED_518,X_INTRODUCED_521,X_INTRODUCED_524,X_INTRODUCED_528,X_INTRODUCED_532,X_INTRODUCED_536,X_INTRODUCED_540,0,X_INTRODUCED_544,X_INTRODUCED_548,0,X_INTRODUCED_552,0,0,X_INTRODUCED_556,X_INTRODUCED_560,0,0,X_INTRODUCED_564,0,X_INTRODUCED_568,0,X_INTRODUCED_571,X_INTRODUCED_574,X_INTRODUCED_577,X_INTRODUCED_580,X_INTRODUCED_583,X_INTRODUCED_587,X_INTRODUCED_591,X_INTRODUCED_595,X_INTRODUCED_599,0,X_INTRODUCED_603,X_INTRODUCED_607,0,X_INTRODUCED_611,0,0,X_INTRODUCED_615,X_INTRODUCED_619,0,0,X_INTRODUCED_623,0,X_INTRODUCED_627,0,X_INTRODUCED_630,X_INTRODUCED_633,X_INTRODUCED_636,X_INTRODUCED_639,X_INTRODUCED_642,X_INTRODUCED_646,X_INTRODUCED_650,X_INTRODUCED_654,X_INTRODUCED_658,0,X_INTRODUCED_662,X_INTRODUCED_666,0,X_INTRODUCED_670,0,0,X_INTRODUCED_674,X_INTRODUCED_678,0,0,X_INTRODUCED_682,0,X_INTRODUCED_686,0,X_INTRODUCED_689,X_INTRODUCED_692,X_INTRODUCED_695,X_INTRODUCED_698,X_INTRODUCED_701,X_INTRODUCED_705,X_INTRODUCED_709,X_INTRODUCED_713,X_INTRODUCED_717,0,X_INTRODUCED_721,X_INTRODUCED_725,0]; -array [1..136] of var bool: X_INTRODUCED_798 ::var_is_introduced = [false,false,X_INTRODUCED_2,X_INTRODUCED_3,false,false,X_INTRODUCED_6,X_INTRODUCED_7,X_INTRODUCED_8,X_INTRODUCED_9,X_INTRODUCED_10,X_INTRODUCED_11,X_INTRODUCED_12,X_INTRODUCED_13,false,false,false,false,false,false,X_INTRODUCED_20,X_INTRODUCED_21,X_INTRODUCED_22,X_INTRODUCED_23,X_INTRODUCED_24,X_INTRODUCED_25,X_INTRODUCED_26,X_INTRODUCED_27,X_INTRODUCED_28,X_INTRODUCED_29,false,false,X_INTRODUCED_32,X_INTRODUCED_33,X_INTRODUCED_34,X_INTRODUCED_35,false,false,false,false,X_INTRODUCED_40,X_INTRODUCED_41,false,false,false,false,X_INTRODUCED_46,X_INTRODUCED_47,false,false,X_INTRODUCED_50,X_INTRODUCED_51,X_INTRODUCED_52,X_INTRODUCED_53,false,false,false,false,X_INTRODUCED_58,X_INTRODUCED_59,X_INTRODUCED_60,X_INTRODUCED_61,X_INTRODUCED_62,X_INTRODUCED_63,false,false,X_INTRODUCED_66,X_INTRODUCED_67,false,false,false,false,false,false,false,false,X_INTRODUCED_76,X_INTRODUCED_77,X_INTRODUCED_78,X_INTRODUCED_79,false,false,X_INTRODUCED_82,X_INTRODUCED_83,false,false,false,false,X_INTRODUCED_88,X_INTRODUCED_89,false,false,X_INTRODUCED_92,X_INTRODUCED_93,false,false,false,false,false,false,false,false,X_INTRODUCED_102,X_INTRODUCED_103,false,false,X_INTRODUCED_106,X_INTRODUCED_107,X_INTRODUCED_108,X_INTRODUCED_109,false,false,false,false,false,false,X_INTRODUCED_116,X_INTRODUCED_117,X_INTRODUCED_118,X_INTRODUCED_119,false,false,X_INTRODUCED_122,X_INTRODUCED_123,false,false,X_INTRODUCED_126,X_INTRODUCED_127,X_INTRODUCED_128,X_INTRODUCED_129,X_INTRODUCED_130,X_INTRODUCED_131,X_INTRODUCED_132,X_INTRODUCED_133,false,false]; -constraint int_lin_eq(X_INTRODUCED_255,[X_INTRODUCED_246,X_INTRODUCED_247,X_INTRODUCED_248,X_INTRODUCED_249,X_INTRODUCED_250,X_INTRODUCED_251,X_INTRODUCED_252,X_INTRODUCED_253],1); -constraint int_lin_eq(X_INTRODUCED_255,[X_INTRODUCED_257,X_INTRODUCED_258,X_INTRODUCED_259,X_INTRODUCED_260,X_INTRODUCED_261,X_INTRODUCED_262,X_INTRODUCED_263,X_INTRODUCED_264],1); -constraint int_lin_eq(X_INTRODUCED_255,[X_INTRODUCED_267,X_INTRODUCED_268,X_INTRODUCED_269,X_INTRODUCED_270,X_INTRODUCED_271,X_INTRODUCED_272,X_INTRODUCED_273,X_INTRODUCED_274],1); -constraint int_lin_eq(X_INTRODUCED_255,[X_INTRODUCED_277,X_INTRODUCED_278,X_INTRODUCED_279,X_INTRODUCED_280,X_INTRODUCED_281,X_INTRODUCED_282,X_INTRODUCED_283,X_INTRODUCED_284],1); -constraint int_lin_eq(X_INTRODUCED_255,[X_INTRODUCED_287,X_INTRODUCED_288,X_INTRODUCED_289,X_INTRODUCED_290,X_INTRODUCED_291,X_INTRODUCED_292,X_INTRODUCED_293,X_INTRODUCED_294],1); -constraint int_lin_eq(X_INTRODUCED_255,[X_INTRODUCED_297,X_INTRODUCED_298,X_INTRODUCED_299,X_INTRODUCED_300,X_INTRODUCED_301,X_INTRODUCED_302,X_INTRODUCED_303,X_INTRODUCED_304],1); -constraint int_lin_eq(X_INTRODUCED_255,[X_INTRODUCED_307,X_INTRODUCED_308,X_INTRODUCED_309,X_INTRODUCED_310,X_INTRODUCED_311,X_INTRODUCED_312,X_INTRODUCED_313,X_INTRODUCED_314],1); -constraint int_lin_eq(X_INTRODUCED_255,[X_INTRODUCED_317,X_INTRODUCED_318,X_INTRODUCED_319,X_INTRODUCED_320,X_INTRODUCED_321,X_INTRODUCED_322,X_INTRODUCED_323,X_INTRODUCED_324],1); -constraint int_lin_eq(X_INTRODUCED_255,[X_INTRODUCED_327,X_INTRODUCED_328,X_INTRODUCED_329,X_INTRODUCED_330,X_INTRODUCED_331,X_INTRODUCED_332,X_INTRODUCED_333,X_INTRODUCED_334],1); -constraint int_lin_eq(X_INTRODUCED_255,[X_INTRODUCED_337,X_INTRODUCED_338,X_INTRODUCED_339,X_INTRODUCED_340,X_INTRODUCED_341,X_INTRODUCED_342,X_INTRODUCED_343,X_INTRODUCED_344],1); -constraint int_lin_eq(X_INTRODUCED_255,[X_INTRODUCED_347,X_INTRODUCED_348,X_INTRODUCED_349,X_INTRODUCED_350,X_INTRODUCED_351,X_INTRODUCED_352,X_INTRODUCED_353,X_INTRODUCED_354],1); -constraint int_lin_eq(X_INTRODUCED_255,[X_INTRODUCED_357,X_INTRODUCED_358,X_INTRODUCED_359,X_INTRODUCED_360,X_INTRODUCED_361,X_INTRODUCED_362,X_INTRODUCED_363,X_INTRODUCED_364],1); -constraint int_lin_eq(X_INTRODUCED_255,[X_INTRODUCED_367,X_INTRODUCED_368,X_INTRODUCED_369,X_INTRODUCED_370,X_INTRODUCED_371,X_INTRODUCED_372,X_INTRODUCED_373,X_INTRODUCED_374],1); -constraint int_lin_eq(X_INTRODUCED_255,[X_INTRODUCED_377,X_INTRODUCED_378,X_INTRODUCED_379,X_INTRODUCED_380,X_INTRODUCED_381,X_INTRODUCED_382,X_INTRODUCED_383,X_INTRODUCED_384],1); -constraint int_lin_eq(X_INTRODUCED_255,[X_INTRODUCED_387,X_INTRODUCED_388,X_INTRODUCED_389,X_INTRODUCED_390,X_INTRODUCED_391,X_INTRODUCED_392,X_INTRODUCED_393,X_INTRODUCED_394],1); -constraint int_lin_eq(X_INTRODUCED_255,[X_INTRODUCED_397,X_INTRODUCED_398,X_INTRODUCED_399,X_INTRODUCED_400,X_INTRODUCED_401,X_INTRODUCED_402,X_INTRODUCED_403,X_INTRODUCED_404],1); -constraint int_lin_eq(X_INTRODUCED_255,[X_INTRODUCED_407,X_INTRODUCED_408,X_INTRODUCED_409,X_INTRODUCED_410,X_INTRODUCED_411,X_INTRODUCED_412,X_INTRODUCED_413,X_INTRODUCED_414],1); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_246,X_INTRODUCED_248,X_INTRODUCED_250,X_INTRODUCED_252],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_247,X_INTRODUCED_249,X_INTRODUCED_251,X_INTRODUCED_253],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_257,X_INTRODUCED_259,X_INTRODUCED_261,X_INTRODUCED_263],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_258,X_INTRODUCED_260,X_INTRODUCED_262,X_INTRODUCED_264],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_267,X_INTRODUCED_269,X_INTRODUCED_271,X_INTRODUCED_273],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_268,X_INTRODUCED_270,X_INTRODUCED_272,X_INTRODUCED_274],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_277,X_INTRODUCED_279,X_INTRODUCED_281,X_INTRODUCED_283],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_278,X_INTRODUCED_280,X_INTRODUCED_282,X_INTRODUCED_284],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_287,X_INTRODUCED_289,X_INTRODUCED_291,X_INTRODUCED_293],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_288,X_INTRODUCED_290,X_INTRODUCED_292,X_INTRODUCED_294],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_297,X_INTRODUCED_299,X_INTRODUCED_301,X_INTRODUCED_303],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_298,X_INTRODUCED_300,X_INTRODUCED_302,X_INTRODUCED_304],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_307,X_INTRODUCED_309,X_INTRODUCED_311,X_INTRODUCED_313],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_308,X_INTRODUCED_310,X_INTRODUCED_312,X_INTRODUCED_314],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_317,X_INTRODUCED_319,X_INTRODUCED_321,X_INTRODUCED_323],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_318,X_INTRODUCED_320,X_INTRODUCED_322,X_INTRODUCED_324],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_327,X_INTRODUCED_329,X_INTRODUCED_331,X_INTRODUCED_333],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_328,X_INTRODUCED_330,X_INTRODUCED_332,X_INTRODUCED_334],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_337,X_INTRODUCED_339,X_INTRODUCED_341,X_INTRODUCED_343],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_338,X_INTRODUCED_340,X_INTRODUCED_342,X_INTRODUCED_344],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_347,X_INTRODUCED_349,X_INTRODUCED_351,X_INTRODUCED_353],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_348,X_INTRODUCED_350,X_INTRODUCED_352,X_INTRODUCED_354],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_357,X_INTRODUCED_359,X_INTRODUCED_361,X_INTRODUCED_363],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_358,X_INTRODUCED_360,X_INTRODUCED_362,X_INTRODUCED_364],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_367,X_INTRODUCED_369,X_INTRODUCED_371,X_INTRODUCED_373],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_368,X_INTRODUCED_370,X_INTRODUCED_372,X_INTRODUCED_374],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_377,X_INTRODUCED_379,X_INTRODUCED_381,X_INTRODUCED_383],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_378,X_INTRODUCED_380,X_INTRODUCED_382,X_INTRODUCED_384],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_387,X_INTRODUCED_389,X_INTRODUCED_391,X_INTRODUCED_393],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_388,X_INTRODUCED_390,X_INTRODUCED_392,X_INTRODUCED_394],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_397,X_INTRODUCED_399,X_INTRODUCED_401,X_INTRODUCED_403],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_398,X_INTRODUCED_400,X_INTRODUCED_402,X_INTRODUCED_404],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_407,X_INTRODUCED_409,X_INTRODUCED_411,X_INTRODUCED_413],0); -constraint int_lin_le(X_INTRODUCED_419,[X_INTRODUCED_408,X_INTRODUCED_410,X_INTRODUCED_412,X_INTRODUCED_414],0); -constraint int_max(0,X_INTRODUCED_492,X_INTRODUCED_493); -constraint int_max(0,1,X_INTRODUCED_497); -constraint int_max(0,X_INTRODUCED_500,X_INTRODUCED_501); -constraint int_max(0,X_INTRODUCED_504,X_INTRODUCED_505); -constraint int_max(0,X_INTRODUCED_508,X_INTRODUCED_509); -constraint int_max(0,1,X_INTRODUCED_512); -constraint int_max(0,1,X_INTRODUCED_515); -constraint int_max(0,1,X_INTRODUCED_518); -constraint int_max(0,1,X_INTRODUCED_521); -constraint int_max(0,1,X_INTRODUCED_524); -constraint int_max(0,X_INTRODUCED_527,X_INTRODUCED_528); -constraint int_max(0,X_INTRODUCED_531,X_INTRODUCED_532); -constraint int_max(0,X_INTRODUCED_535,X_INTRODUCED_536); -constraint int_max(0,X_INTRODUCED_539,X_INTRODUCED_540); -constraint int_max(0,X_INTRODUCED_543,X_INTRODUCED_544); -constraint int_max(0,X_INTRODUCED_547,X_INTRODUCED_548); -constraint int_max(0,X_INTRODUCED_551,X_INTRODUCED_552); -constraint int_max(0,X_INTRODUCED_555,X_INTRODUCED_556); -constraint int_max(0,X_INTRODUCED_559,X_INTRODUCED_560); -constraint int_max(0,X_INTRODUCED_563,X_INTRODUCED_564); -constraint int_max(0,X_INTRODUCED_567,X_INTRODUCED_568); -constraint int_max(0,X_INTRODUCED_570,X_INTRODUCED_571); -constraint int_max(0,1,X_INTRODUCED_574); -constraint int_max(0,1,X_INTRODUCED_577); -constraint int_max(0,1,X_INTRODUCED_580); -constraint int_max(0,1,X_INTRODUCED_583); -constraint int_max(0,X_INTRODUCED_586,X_INTRODUCED_587); -constraint int_max(0,X_INTRODUCED_590,X_INTRODUCED_591); -constraint int_max(0,X_INTRODUCED_594,X_INTRODUCED_595); -constraint int_max(0,X_INTRODUCED_598,X_INTRODUCED_599); -constraint int_max(0,X_INTRODUCED_602,X_INTRODUCED_603); -constraint int_max(0,X_INTRODUCED_606,X_INTRODUCED_607); -constraint int_max(0,X_INTRODUCED_610,X_INTRODUCED_611); -constraint int_max(0,X_INTRODUCED_614,X_INTRODUCED_615); -constraint int_max(0,X_INTRODUCED_618,X_INTRODUCED_619); -constraint int_max(0,X_INTRODUCED_622,X_INTRODUCED_623); -constraint int_max(0,X_INTRODUCED_626,X_INTRODUCED_627); -constraint int_max(0,X_INTRODUCED_629,X_INTRODUCED_630); -constraint int_max(0,1,X_INTRODUCED_633); -constraint int_max(0,1,X_INTRODUCED_636); -constraint int_max(0,1,X_INTRODUCED_639); -constraint int_max(0,1,X_INTRODUCED_642); -constraint int_max(0,X_INTRODUCED_645,X_INTRODUCED_646); -constraint int_max(0,X_INTRODUCED_649,X_INTRODUCED_650); -constraint int_max(0,X_INTRODUCED_653,X_INTRODUCED_654); -constraint int_max(0,X_INTRODUCED_657,X_INTRODUCED_658); -constraint int_max(0,X_INTRODUCED_661,X_INTRODUCED_662); -constraint int_max(0,X_INTRODUCED_665,X_INTRODUCED_666); -constraint int_max(0,X_INTRODUCED_669,X_INTRODUCED_670); -constraint int_max(0,X_INTRODUCED_673,X_INTRODUCED_674); -constraint int_max(0,X_INTRODUCED_677,X_INTRODUCED_678); -constraint int_max(0,X_INTRODUCED_681,X_INTRODUCED_682); -constraint int_max(0,X_INTRODUCED_685,X_INTRODUCED_686); -constraint int_max(0,1,X_INTRODUCED_689); -constraint int_max(0,X_INTRODUCED_691,X_INTRODUCED_692); -constraint int_max(0,X_INTRODUCED_694,X_INTRODUCED_695); -constraint int_max(0,X_INTRODUCED_697,X_INTRODUCED_698); -constraint int_max(0,X_INTRODUCED_700,X_INTRODUCED_701); -constraint int_max(0,X_INTRODUCED_704,X_INTRODUCED_705); -constraint int_max(0,X_INTRODUCED_708,X_INTRODUCED_709); -constraint int_max(0,1,X_INTRODUCED_713); -constraint int_max(0,1,X_INTRODUCED_717); -constraint int_max(0,X_INTRODUCED_720,X_INTRODUCED_721); -constraint int_max(0,X_INTRODUCED_724,X_INTRODUCED_725); -constraint int_lin_eq(X_INTRODUCED_796,X_INTRODUCED_795,0):: defines_var(objective); -constraint bool2int(X_INTRODUCED_2,X_INTRODUCED_248):: defines_var(X_INTRODUCED_248); -constraint bool2int(X_INTRODUCED_3,X_INTRODUCED_249):: defines_var(X_INTRODUCED_249); -constraint bool2int(X_INTRODUCED_6,X_INTRODUCED_252):: defines_var(X_INTRODUCED_252); -constraint bool2int(X_INTRODUCED_7,X_INTRODUCED_253):: defines_var(X_INTRODUCED_253); -constraint bool2int(X_INTRODUCED_8,X_INTRODUCED_257):: defines_var(X_INTRODUCED_257); -constraint bool2int(X_INTRODUCED_9,X_INTRODUCED_258):: defines_var(X_INTRODUCED_258); -constraint bool2int(X_INTRODUCED_10,X_INTRODUCED_259):: defines_var(X_INTRODUCED_259); -constraint bool2int(X_INTRODUCED_11,X_INTRODUCED_260):: defines_var(X_INTRODUCED_260); -constraint bool2int(X_INTRODUCED_12,X_INTRODUCED_261):: defines_var(X_INTRODUCED_261); -constraint bool2int(X_INTRODUCED_13,X_INTRODUCED_262):: defines_var(X_INTRODUCED_262); -constraint bool2int(X_INTRODUCED_20,X_INTRODUCED_271):: defines_var(X_INTRODUCED_271); -constraint bool2int(X_INTRODUCED_21,X_INTRODUCED_272):: defines_var(X_INTRODUCED_272); -constraint bool2int(X_INTRODUCED_22,X_INTRODUCED_273):: defines_var(X_INTRODUCED_273); -constraint bool2int(X_INTRODUCED_23,X_INTRODUCED_274):: defines_var(X_INTRODUCED_274); -constraint bool2int(X_INTRODUCED_24,X_INTRODUCED_277):: defines_var(X_INTRODUCED_277); -constraint bool2int(X_INTRODUCED_25,X_INTRODUCED_278):: defines_var(X_INTRODUCED_278); -constraint bool2int(X_INTRODUCED_26,X_INTRODUCED_279):: defines_var(X_INTRODUCED_279); -constraint bool2int(X_INTRODUCED_27,X_INTRODUCED_280):: defines_var(X_INTRODUCED_280); -constraint bool2int(X_INTRODUCED_28,X_INTRODUCED_281):: defines_var(X_INTRODUCED_281); -constraint bool2int(X_INTRODUCED_29,X_INTRODUCED_282):: defines_var(X_INTRODUCED_282); -constraint bool2int(X_INTRODUCED_32,X_INTRODUCED_287):: defines_var(X_INTRODUCED_287); -constraint bool2int(X_INTRODUCED_33,X_INTRODUCED_288):: defines_var(X_INTRODUCED_288); -constraint bool2int(X_INTRODUCED_34,X_INTRODUCED_289):: defines_var(X_INTRODUCED_289); -constraint bool2int(X_INTRODUCED_35,X_INTRODUCED_290):: defines_var(X_INTRODUCED_290); -constraint bool2int(X_INTRODUCED_40,X_INTRODUCED_297):: defines_var(X_INTRODUCED_297); -constraint bool2int(X_INTRODUCED_41,X_INTRODUCED_298):: defines_var(X_INTRODUCED_298); -constraint bool2int(X_INTRODUCED_46,X_INTRODUCED_303):: defines_var(X_INTRODUCED_303); -constraint bool2int(X_INTRODUCED_47,X_INTRODUCED_304):: defines_var(X_INTRODUCED_304); -constraint bool2int(X_INTRODUCED_50,X_INTRODUCED_309):: defines_var(X_INTRODUCED_309); -constraint bool2int(X_INTRODUCED_51,X_INTRODUCED_310):: defines_var(X_INTRODUCED_310); -constraint bool2int(X_INTRODUCED_52,X_INTRODUCED_311):: defines_var(X_INTRODUCED_311); -constraint bool2int(X_INTRODUCED_53,X_INTRODUCED_312):: defines_var(X_INTRODUCED_312); -constraint bool2int(X_INTRODUCED_58,X_INTRODUCED_319):: defines_var(X_INTRODUCED_319); -constraint bool2int(X_INTRODUCED_59,X_INTRODUCED_320):: defines_var(X_INTRODUCED_320); -constraint bool2int(X_INTRODUCED_60,X_INTRODUCED_321):: defines_var(X_INTRODUCED_321); -constraint bool2int(X_INTRODUCED_61,X_INTRODUCED_322):: defines_var(X_INTRODUCED_322); -constraint bool2int(X_INTRODUCED_62,X_INTRODUCED_323):: defines_var(X_INTRODUCED_323); -constraint bool2int(X_INTRODUCED_63,X_INTRODUCED_324):: defines_var(X_INTRODUCED_324); -constraint bool2int(X_INTRODUCED_66,X_INTRODUCED_329):: defines_var(X_INTRODUCED_329); -constraint bool2int(X_INTRODUCED_67,X_INTRODUCED_330):: defines_var(X_INTRODUCED_330); -constraint bool2int(X_INTRODUCED_76,X_INTRODUCED_341):: defines_var(X_INTRODUCED_341); -constraint bool2int(X_INTRODUCED_77,X_INTRODUCED_342):: defines_var(X_INTRODUCED_342); -constraint bool2int(X_INTRODUCED_78,X_INTRODUCED_343):: defines_var(X_INTRODUCED_343); -constraint bool2int(X_INTRODUCED_79,X_INTRODUCED_344):: defines_var(X_INTRODUCED_344); -constraint bool2int(X_INTRODUCED_82,X_INTRODUCED_349):: defines_var(X_INTRODUCED_349); -constraint bool2int(X_INTRODUCED_83,X_INTRODUCED_350):: defines_var(X_INTRODUCED_350); -constraint bool2int(X_INTRODUCED_88,X_INTRODUCED_357):: defines_var(X_INTRODUCED_357); -constraint bool2int(X_INTRODUCED_89,X_INTRODUCED_358):: defines_var(X_INTRODUCED_358); -constraint bool2int(X_INTRODUCED_92,X_INTRODUCED_361):: defines_var(X_INTRODUCED_361); -constraint bool2int(X_INTRODUCED_93,X_INTRODUCED_362):: defines_var(X_INTRODUCED_362); -constraint bool2int(X_INTRODUCED_102,X_INTRODUCED_373):: defines_var(X_INTRODUCED_373); -constraint bool2int(X_INTRODUCED_103,X_INTRODUCED_374):: defines_var(X_INTRODUCED_374); -constraint bool2int(X_INTRODUCED_106,X_INTRODUCED_379):: defines_var(X_INTRODUCED_379); -constraint bool2int(X_INTRODUCED_107,X_INTRODUCED_380):: defines_var(X_INTRODUCED_380); -constraint bool2int(X_INTRODUCED_108,X_INTRODUCED_381):: defines_var(X_INTRODUCED_381); -constraint bool2int(X_INTRODUCED_109,X_INTRODUCED_382):: defines_var(X_INTRODUCED_382); -constraint bool2int(X_INTRODUCED_116,X_INTRODUCED_391):: defines_var(X_INTRODUCED_391); -constraint bool2int(X_INTRODUCED_117,X_INTRODUCED_392):: defines_var(X_INTRODUCED_392); -constraint bool2int(X_INTRODUCED_118,X_INTRODUCED_393):: defines_var(X_INTRODUCED_393); -constraint bool2int(X_INTRODUCED_119,X_INTRODUCED_394):: defines_var(X_INTRODUCED_394); -constraint bool2int(X_INTRODUCED_122,X_INTRODUCED_399):: defines_var(X_INTRODUCED_399); -constraint bool2int(X_INTRODUCED_123,X_INTRODUCED_400):: defines_var(X_INTRODUCED_400); -constraint bool2int(X_INTRODUCED_126,X_INTRODUCED_403):: defines_var(X_INTRODUCED_403); -constraint bool2int(X_INTRODUCED_127,X_INTRODUCED_404):: defines_var(X_INTRODUCED_404); -constraint bool2int(X_INTRODUCED_128,X_INTRODUCED_407):: defines_var(X_INTRODUCED_407); -constraint bool2int(X_INTRODUCED_129,X_INTRODUCED_408):: defines_var(X_INTRODUCED_408); -constraint bool2int(X_INTRODUCED_130,X_INTRODUCED_409):: defines_var(X_INTRODUCED_409); -constraint bool2int(X_INTRODUCED_131,X_INTRODUCED_410):: defines_var(X_INTRODUCED_410); -constraint bool2int(X_INTRODUCED_132,X_INTRODUCED_411):: defines_var(X_INTRODUCED_411); -constraint bool2int(X_INTRODUCED_133,X_INTRODUCED_412):: defines_var(X_INTRODUCED_412); -constraint int_lin_eq([-1,-1,-1,-1,-1],[X_INTRODUCED_277,X_INTRODUCED_287,X_INTRODUCED_297,X_INTRODUCED_407,X_INTRODUCED_492],-1):: defines_var(X_INTRODUCED_492); -constraint int_lin_eq([-1,-1,-1,-1,-1,-1],[X_INTRODUCED_257,X_INTRODUCED_277,X_INTRODUCED_297,X_INTRODUCED_357,X_INTRODUCED_407,X_INTRODUCED_500],-1):: defines_var(X_INTRODUCED_500); -constraint int_lin_eq([-1,-1,-1],[X_INTRODUCED_258,X_INTRODUCED_278,X_INTRODUCED_504],-1):: defines_var(X_INTRODUCED_504); -constraint int_lin_eq([-1,-1],[X_INTRODUCED_258,X_INTRODUCED_508],-1):: defines_var(X_INTRODUCED_508); -constraint int_lin_eq([-1,-1,-1],[X_INTRODUCED_277,X_INTRODUCED_297,X_INTRODUCED_527],-1):: defines_var(X_INTRODUCED_527); -constraint int_lin_eq([-1,-1,-1],[X_INTRODUCED_278,X_INTRODUCED_298,X_INTRODUCED_531],-1):: defines_var(X_INTRODUCED_531); -constraint int_lin_eq([-1,-1],[X_INTRODUCED_277,X_INTRODUCED_535],-1):: defines_var(X_INTRODUCED_535); -constraint int_lin_eq([-1,-1],[X_INTRODUCED_278,X_INTRODUCED_539],-1):: defines_var(X_INTRODUCED_539); -constraint int_lin_eq([-1,-1,-1,-1,-1,-1,-1],[X_INTRODUCED_258,X_INTRODUCED_278,X_INTRODUCED_288,X_INTRODUCED_298,X_INTRODUCED_358,X_INTRODUCED_408,X_INTRODUCED_543],-2):: defines_var(X_INTRODUCED_543); -constraint int_lin_eq([-1,-1,-1,-1,-1,-1,-1],[X_INTRODUCED_257,X_INTRODUCED_277,X_INTRODUCED_287,X_INTRODUCED_297,X_INTRODUCED_357,X_INTRODUCED_407,X_INTRODUCED_547],-2):: defines_var(X_INTRODUCED_547); -constraint int_lin_eq([-1,-1,-1,-1,-1,-1,-1,-1,-1],[X_INTRODUCED_248,X_INTRODUCED_279,X_INTRODUCED_289,X_INTRODUCED_309,X_INTRODUCED_349,X_INTRODUCED_379,X_INTRODUCED_399,X_INTRODUCED_409,X_INTRODUCED_551],-1):: defines_var(X_INTRODUCED_551); -constraint int_lin_eq([-1,-1,-1,-1,-1],[X_INTRODUCED_310,X_INTRODUCED_330,X_INTRODUCED_350,X_INTRODUCED_380,X_INTRODUCED_555],-1):: defines_var(X_INTRODUCED_555); -constraint int_lin_eq([-1,-1,-1,-1,-1,-1],[X_INTRODUCED_259,X_INTRODUCED_279,X_INTRODUCED_319,X_INTRODUCED_399,X_INTRODUCED_409,X_INTRODUCED_559],-1):: defines_var(X_INTRODUCED_559); -constraint int_lin_eq([-1,-1,-1,-1,-1],[X_INTRODUCED_260,X_INTRODUCED_280,X_INTRODUCED_310,X_INTRODUCED_320,X_INTRODUCED_563],-1):: defines_var(X_INTRODUCED_563); -constraint int_lin_eq([-1,-1,-1],[X_INTRODUCED_260,X_INTRODUCED_320,X_INTRODUCED_567],-1):: defines_var(X_INTRODUCED_567); -constraint int_lin_eq([-1,-1],[X_INTRODUCED_380,X_INTRODUCED_570],-1):: defines_var(X_INTRODUCED_570); -constraint int_lin_eq([-1,-1,-1],[X_INTRODUCED_279,X_INTRODUCED_379,X_INTRODUCED_586],-1):: defines_var(X_INTRODUCED_586); -constraint int_lin_eq([-1,-1,-1],[X_INTRODUCED_280,X_INTRODUCED_380,X_INTRODUCED_590],-1):: defines_var(X_INTRODUCED_590); -constraint int_lin_eq([-1,-1,-1],[X_INTRODUCED_279,X_INTRODUCED_379,X_INTRODUCED_594],-1):: defines_var(X_INTRODUCED_594); -constraint int_lin_eq([-1,-1,-1],[X_INTRODUCED_280,X_INTRODUCED_380,X_INTRODUCED_598],-1):: defines_var(X_INTRODUCED_598); -constraint int_lin_eq([-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],[X_INTRODUCED_249,X_INTRODUCED_260,X_INTRODUCED_280,X_INTRODUCED_290,X_INTRODUCED_310,X_INTRODUCED_320,X_INTRODUCED_330,X_INTRODUCED_350,X_INTRODUCED_380,X_INTRODUCED_400,X_INTRODUCED_410,X_INTRODUCED_602],-2):: defines_var(X_INTRODUCED_602); -constraint int_lin_eq([-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],[X_INTRODUCED_248,X_INTRODUCED_259,X_INTRODUCED_279,X_INTRODUCED_289,X_INTRODUCED_309,X_INTRODUCED_319,X_INTRODUCED_329,X_INTRODUCED_349,X_INTRODUCED_379,X_INTRODUCED_399,X_INTRODUCED_409,X_INTRODUCED_606],-2):: defines_var(X_INTRODUCED_606); -constraint int_lin_eq([-1,-1,-1,-1,-1,-1,-1],[X_INTRODUCED_271,X_INTRODUCED_281,X_INTRODUCED_311,X_INTRODUCED_381,X_INTRODUCED_391,X_INTRODUCED_411,X_INTRODUCED_610],-1):: defines_var(X_INTRODUCED_610); -constraint int_lin_eq([-1,-1,-1,-1],[X_INTRODUCED_312,X_INTRODUCED_342,X_INTRODUCED_382,X_INTRODUCED_614],-1):: defines_var(X_INTRODUCED_614); -constraint int_lin_eq([-1,-1,-1,-1,-1,-1,-1],[X_INTRODUCED_261,X_INTRODUCED_281,X_INTRODUCED_321,X_INTRODUCED_341,X_INTRODUCED_361,X_INTRODUCED_411,X_INTRODUCED_618],-1):: defines_var(X_INTRODUCED_618); -constraint int_lin_eq([-1,-1,-1,-1,-1,-1],[X_INTRODUCED_262,X_INTRODUCED_282,X_INTRODUCED_312,X_INTRODUCED_322,X_INTRODUCED_392,X_INTRODUCED_622],-1):: defines_var(X_INTRODUCED_622); -constraint int_lin_eq([-1,-1,-1,-1],[X_INTRODUCED_262,X_INTRODUCED_322,X_INTRODUCED_342,X_INTRODUCED_626],-1):: defines_var(X_INTRODUCED_626); -constraint int_lin_eq([-1,-1],[X_INTRODUCED_382,X_INTRODUCED_629],-1):: defines_var(X_INTRODUCED_629); -constraint int_lin_eq([-1,-1,-1],[X_INTRODUCED_281,X_INTRODUCED_381,X_INTRODUCED_645],-1):: defines_var(X_INTRODUCED_645); -constraint int_lin_eq([-1,-1,-1],[X_INTRODUCED_282,X_INTRODUCED_382,X_INTRODUCED_649],-1):: defines_var(X_INTRODUCED_649); -constraint int_lin_eq([-1,-1,-1],[X_INTRODUCED_281,X_INTRODUCED_381,X_INTRODUCED_653],-1):: defines_var(X_INTRODUCED_653); -constraint int_lin_eq([-1,-1,-1],[X_INTRODUCED_282,X_INTRODUCED_382,X_INTRODUCED_657],-1):: defines_var(X_INTRODUCED_657); -constraint int_lin_eq([-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],[X_INTRODUCED_262,X_INTRODUCED_272,X_INTRODUCED_282,X_INTRODUCED_312,X_INTRODUCED_322,X_INTRODUCED_342,X_INTRODUCED_362,X_INTRODUCED_382,X_INTRODUCED_392,X_INTRODUCED_412,X_INTRODUCED_661],-2):: defines_var(X_INTRODUCED_661); -constraint int_lin_eq([-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],[X_INTRODUCED_261,X_INTRODUCED_271,X_INTRODUCED_281,X_INTRODUCED_311,X_INTRODUCED_321,X_INTRODUCED_341,X_INTRODUCED_361,X_INTRODUCED_381,X_INTRODUCED_391,X_INTRODUCED_411,X_INTRODUCED_665],-2):: defines_var(X_INTRODUCED_665); -constraint int_lin_eq([-1,-1,-1,-1,-1,-1],[X_INTRODUCED_252,X_INTRODUCED_273,X_INTRODUCED_303,X_INTRODUCED_393,X_INTRODUCED_403,X_INTRODUCED_669],-1):: defines_var(X_INTRODUCED_669); -constraint int_lin_eq([-1,-1],[X_INTRODUCED_344,X_INTRODUCED_673],-1):: defines_var(X_INTRODUCED_673); -constraint int_lin_eq([-1,-1,-1,-1,-1],[X_INTRODUCED_303,X_INTRODUCED_323,X_INTRODUCED_343,X_INTRODUCED_403,X_INTRODUCED_677],-1):: defines_var(X_INTRODUCED_677); -constraint int_lin_eq([-1,-1,-1],[X_INTRODUCED_324,X_INTRODUCED_394,X_INTRODUCED_681],-1):: defines_var(X_INTRODUCED_681); -constraint int_lin_eq([-1,-1,-1],[X_INTRODUCED_324,X_INTRODUCED_344,X_INTRODUCED_685],-1):: defines_var(X_INTRODUCED_685); -constraint int_lin_eq([-1,-1],[X_INTRODUCED_373,X_INTRODUCED_691],-1):: defines_var(X_INTRODUCED_691); -constraint int_lin_eq([-1,-1],[X_INTRODUCED_374,X_INTRODUCED_694],-1):: defines_var(X_INTRODUCED_694); -constraint int_lin_eq([-1,-1],[X_INTRODUCED_373,X_INTRODUCED_697],-1):: defines_var(X_INTRODUCED_697); -constraint int_lin_eq([-1,-1],[X_INTRODUCED_374,X_INTRODUCED_700],-1):: defines_var(X_INTRODUCED_700); -constraint int_lin_eq([-1,-1,-1],[X_INTRODUCED_303,X_INTRODUCED_373,X_INTRODUCED_704],-1):: defines_var(X_INTRODUCED_704); -constraint int_lin_eq([-1,-1,-1],[X_INTRODUCED_304,X_INTRODUCED_374,X_INTRODUCED_708],-1):: defines_var(X_INTRODUCED_708); -constraint int_lin_eq([-1,-1,-1,-1,-1,-1,-1,-1,-1],[X_INTRODUCED_253,X_INTRODUCED_274,X_INTRODUCED_304,X_INTRODUCED_324,X_INTRODUCED_344,X_INTRODUCED_374,X_INTRODUCED_394,X_INTRODUCED_404,X_INTRODUCED_720],-2):: defines_var(X_INTRODUCED_720); -constraint int_lin_eq([-1,-1,-1,-1,-1,-1,-1,-1,-1],[X_INTRODUCED_252,X_INTRODUCED_273,X_INTRODUCED_303,X_INTRODUCED_323,X_INTRODUCED_343,X_INTRODUCED_373,X_INTRODUCED_393,X_INTRODUCED_403,X_INTRODUCED_724],-2):: defines_var(X_INTRODUCED_724); -solve :: bool_search(X_INTRODUCED_798,occurrence,indomain_random,complete) minimize objective; \ No newline at end of file diff --git a/examples/tests/issue115b.fzn b/examples/tests/issue115b.fzn deleted file mode 100644 index 3532428e4a0..00000000000 --- a/examples/tests/issue115b.fzn +++ /dev/null @@ -1,777 +0,0 @@ -predicate maximum_int(var int: m,array [int] of var int: x); -predicate minimum_int(var int: m,array [int] of var int: x); -array [1..8] of int: X_INTRODUCED_313 = [1,1,1,1,1,1,1,1]; -array [1..4] of int: X_INTRODUCED_538 = [-1,-1,-1,-1]; -array [1..14] of int: X_INTRODUCED_633 = [1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1]; -array [1..7] of int: X_INTRODUCED_657 = [1,-1,-1,-1,-1,-1,-1]; -array [1..11] of int: X_INTRODUCED_675 = [1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1]; -array [1..8] of int: X_INTRODUCED_699 = [1,-1,-1,-1,-1,-1,-1,-1]; -array [1..6] of int: X_INTRODUCED_716 = [1,-1,-1,-1,-1,-1]; -array [1..5] of int: X_INTRODUCED_741 = [1,-1,-1,-1,-1]; -array [1..3] of int: X_INTRODUCED_782 = [1,-1,-1]; -array [1..9] of int: X_INTRODUCED_815 = [1,-1,-1,-1,-1,-1,-1,-1,-1]; -array [1..29] of int: X_INTRODUCED_909 = [1,-3,-3,-3,-3,-3,-3,-3,-3,-3,-3,-1,-1,-1,-1,-1,-1,-2,-2,-2,-2,1,1,1,1,1,1,1,1]; -var bool: X_INTRODUCED_2; -var bool: X_INTRODUCED_3; -var bool: X_INTRODUCED_6; -var bool: X_INTRODUCED_7; -var bool: X_INTRODUCED_8; -var bool: X_INTRODUCED_9; -var bool: X_INTRODUCED_10; -var bool: X_INTRODUCED_11; -var bool: X_INTRODUCED_12; -var bool: X_INTRODUCED_13; -var bool: X_INTRODUCED_20; -var bool: X_INTRODUCED_21; -var bool: X_INTRODUCED_22; -var bool: X_INTRODUCED_23; -var bool: X_INTRODUCED_24; -var bool: X_INTRODUCED_25; -var bool: X_INTRODUCED_26; -var bool: X_INTRODUCED_27; -var bool: X_INTRODUCED_28; -var bool: X_INTRODUCED_29; -var bool: X_INTRODUCED_32; -var bool: X_INTRODUCED_33; -var bool: X_INTRODUCED_34; -var bool: X_INTRODUCED_35; -var bool: X_INTRODUCED_40; -var bool: X_INTRODUCED_41; -var bool: X_INTRODUCED_46; -var bool: X_INTRODUCED_47; -var bool: X_INTRODUCED_50; -var bool: X_INTRODUCED_51; -var bool: X_INTRODUCED_52; -var bool: X_INTRODUCED_53; -var bool: X_INTRODUCED_58; -var bool: X_INTRODUCED_59; -var bool: X_INTRODUCED_60; -var bool: X_INTRODUCED_61; -var bool: X_INTRODUCED_62; -var bool: X_INTRODUCED_63; -var bool: X_INTRODUCED_66; -var bool: X_INTRODUCED_67; -var bool: X_INTRODUCED_76; -var bool: X_INTRODUCED_77; -var bool: X_INTRODUCED_78; -var bool: X_INTRODUCED_79; -var bool: X_INTRODUCED_82; -var bool: X_INTRODUCED_83; -var bool: X_INTRODUCED_88; -var bool: X_INTRODUCED_89; -var bool: X_INTRODUCED_92; -var bool: X_INTRODUCED_93; -var bool: X_INTRODUCED_102; -var bool: X_INTRODUCED_103; -var bool: X_INTRODUCED_106; -var bool: X_INTRODUCED_107; -var bool: X_INTRODUCED_108; -var bool: X_INTRODUCED_109; -var bool: X_INTRODUCED_116; -var bool: X_INTRODUCED_117; -var bool: X_INTRODUCED_118; -var bool: X_INTRODUCED_119; -var bool: X_INTRODUCED_122; -var bool: X_INTRODUCED_123; -var bool: X_INTRODUCED_126; -var bool: X_INTRODUCED_127; -var bool: X_INTRODUCED_128; -var bool: X_INTRODUCED_129; -var bool: X_INTRODUCED_130; -var bool: X_INTRODUCED_131; -var bool: X_INTRODUCED_132; -var bool: X_INTRODUCED_133; -var bool: X_INTRODUCED_138; -var bool: X_INTRODUCED_139; -var bool: X_INTRODUCED_142; -var bool: X_INTRODUCED_143; -var bool: X_INTRODUCED_148; -var bool: X_INTRODUCED_149; -var bool: X_INTRODUCED_152; -var bool: X_INTRODUCED_153; -var bool: X_INTRODUCED_156; -var bool: X_INTRODUCED_157; -var bool: X_INTRODUCED_162; -var bool: X_INTRODUCED_163; -var bool: X_INTRODUCED_166; -var bool: X_INTRODUCED_167; -var bool: X_INTRODUCED_172; -var bool: X_INTRODUCED_173; -var bool: X_INTRODUCED_178; -var bool: X_INTRODUCED_179; -var bool: X_INTRODUCED_182; -var bool: X_INTRODUCED_183; -var 0..13: X_INTRODUCED_184:: is_defined_var; -var 0..13: X_INTRODUCED_185:: is_defined_var; -var 0..13: X_INTRODUCED_186:: is_defined_var; -var 0..13: X_INTRODUCED_187:: is_defined_var; -var 0..13: X_INTRODUCED_188:: is_defined_var; -var 0..13: X_INTRODUCED_189:: is_defined_var; -var 0..13: X_INTRODUCED_190:: is_defined_var; -var 0..13: X_INTRODUCED_191:: is_defined_var; -var 0..6: X_INTRODUCED_192:: is_defined_var; -var 0..6: X_INTRODUCED_193:: is_defined_var; -var 0..6: X_INTRODUCED_194:: is_defined_var; -var 0..6: X_INTRODUCED_195:: is_defined_var; -var 0..6: X_INTRODUCED_196:: is_defined_var; -var 0..6: X_INTRODUCED_197:: is_defined_var; -var 0..6: X_INTRODUCED_198:: is_defined_var; -var 0..6: X_INTRODUCED_199:: is_defined_var; -var 0..10: X_INTRODUCED_200:: is_defined_var; -var 0..10: X_INTRODUCED_201:: is_defined_var; -var 0..10: X_INTRODUCED_202:: is_defined_var; -var 0..10: X_INTRODUCED_203:: is_defined_var; -var 0..10: X_INTRODUCED_204:: is_defined_var; -var 0..10: X_INTRODUCED_205:: is_defined_var; -var 0..10: X_INTRODUCED_206:: is_defined_var; -var 0..10: X_INTRODUCED_207:: is_defined_var; -var 0..7: X_INTRODUCED_208:: is_defined_var; -var 0..7: X_INTRODUCED_209:: is_defined_var; -var 0..7: X_INTRODUCED_210:: is_defined_var; -var 0..7: X_INTRODUCED_211:: is_defined_var; -var 0..7: X_INTRODUCED_212:: is_defined_var; -var 0..7: X_INTRODUCED_213:: is_defined_var; -var 0..7: X_INTRODUCED_214:: is_defined_var; -var 0..7: X_INTRODUCED_215:: is_defined_var; -var 0..5: X_INTRODUCED_216:: is_defined_var; -var 0..5: X_INTRODUCED_217:: is_defined_var; -var 0..5: X_INTRODUCED_218:: is_defined_var; -var 0..5: X_INTRODUCED_219:: is_defined_var; -var 0..5: X_INTRODUCED_220:: is_defined_var; -var 0..5: X_INTRODUCED_221:: is_defined_var; -var 0..5: X_INTRODUCED_222:: is_defined_var; -var 0..5: X_INTRODUCED_223:: is_defined_var; -var 0..4: X_INTRODUCED_232:: is_defined_var; -var 0..4: X_INTRODUCED_233:: is_defined_var; -var 0..4: X_INTRODUCED_234:: is_defined_var; -var 0..4: X_INTRODUCED_235:: is_defined_var; -var 0..4: X_INTRODUCED_236:: is_defined_var; -var 0..4: X_INTRODUCED_237:: is_defined_var; -var 0..4: X_INTRODUCED_238:: is_defined_var; -var 0..4: X_INTRODUCED_239:: is_defined_var; -var 0..4: X_INTRODUCED_240:: is_defined_var; -var 0..4: X_INTRODUCED_241:: is_defined_var; -var 0..4: X_INTRODUCED_242:: is_defined_var; -var 0..4: X_INTRODUCED_243:: is_defined_var; -var 0..4: X_INTRODUCED_244:: is_defined_var; -var 0..4: X_INTRODUCED_245:: is_defined_var; -var 0..4: X_INTRODUCED_246:: is_defined_var; -var 0..4: X_INTRODUCED_247:: is_defined_var; -var 0..4: X_INTRODUCED_248:: is_defined_var; -var 0..4: X_INTRODUCED_249:: is_defined_var; -var 0..4: X_INTRODUCED_250:: is_defined_var; -var 0..4: X_INTRODUCED_251:: is_defined_var; -var 0..4: X_INTRODUCED_252:: is_defined_var; -var 0..4: X_INTRODUCED_253:: is_defined_var; -var 0..4: X_INTRODUCED_254:: is_defined_var; -var 0..4: X_INTRODUCED_255:: is_defined_var; -var 0..2: X_INTRODUCED_256:: is_defined_var; -var 0..2: X_INTRODUCED_257:: is_defined_var; -var 0..2: X_INTRODUCED_258:: is_defined_var; -var 0..2: X_INTRODUCED_259:: is_defined_var; -var 0..2: X_INTRODUCED_260:: is_defined_var; -var 0..2: X_INTRODUCED_261:: is_defined_var; -var 0..2: X_INTRODUCED_262:: is_defined_var; -var 0..2: X_INTRODUCED_263:: is_defined_var; -var int: X_INTRODUCED_264; -var int: X_INTRODUCED_265; -var int: X_INTRODUCED_266; -var int: X_INTRODUCED_267; -var int: X_INTRODUCED_268; -var int: X_INTRODUCED_269; -var int: X_INTRODUCED_270; -var int: X_INTRODUCED_271; -var int: X_INTRODUCED_272; -var int: X_INTRODUCED_273; -var int: X_INTRODUCED_274; -var int: X_INTRODUCED_275; -var int: X_INTRODUCED_276; -var int: X_INTRODUCED_277; -var int: X_INTRODUCED_278; -var int: X_INTRODUCED_279; -var int: X_INTRODUCED_280; -var int: X_INTRODUCED_281; -var int: X_INTRODUCED_282; -var int: X_INTRODUCED_283; -var 0..52: X_INTRODUCED_284:: is_defined_var; -var 0..24: X_INTRODUCED_285:: is_defined_var; -var 0..40: X_INTRODUCED_286:: is_defined_var; -var 0..28: X_INTRODUCED_287:: is_defined_var; -var 0..20: X_INTRODUCED_288:: is_defined_var; -var int: X_INTRODUCED_289:: is_defined_var; -var 0..32: X_INTRODUCED_290:: is_defined_var; -var 0..32: X_INTRODUCED_291:: is_defined_var; -var 0..32: X_INTRODUCED_292:: is_defined_var; -var 0..16: X_INTRODUCED_293:: is_defined_var; -var int: objective:: output_var:: is_defined_var; -var 3..3: target:: output_var; -var 0..1: X_INTRODUCED_306 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_307 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_310 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_311 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_315 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_316 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_317 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_318 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_319 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_320 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_329 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_330 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_331 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_332 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_335 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_336 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_337 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_338 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_339 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_340 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_345 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_346 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_347 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_348 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_355 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_356 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_361 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_362 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_367 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_368 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_369 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_370 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_377 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_378 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_379 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_380 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_381 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_382 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_387 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_388 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_399 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_400 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_401 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_402 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_407 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_408 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_415 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_416 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_419 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_420 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_431 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_432 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_437 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_438 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_439 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_440 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_449 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_450 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_451 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_452 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_457 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_458 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_461 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_462 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_465 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_466 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_467 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_468 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_469 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_470 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_477 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_478 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_481 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_482 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_489 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_490 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_495 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_496 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_499 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_500 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_507 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_508 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_511 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_512 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_519 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_520 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_527 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_528 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_531 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_532 ::var_is_introduced :: is_defined_var; -var -20..3: X_INTRODUCED_823 ::var_is_introduced :: is_defined_var; -var 0..3: X_INTRODUCED_824 ::var_is_introduced ; -var -20..3: X_INTRODUCED_828 ::var_is_introduced :: is_defined_var; -var 0..3: X_INTRODUCED_829 ::var_is_introduced ; -var -20..3: X_INTRODUCED_833 ::var_is_introduced :: is_defined_var; -var 0..3: X_INTRODUCED_834 ::var_is_introduced ; -var -20..3: X_INTRODUCED_838 ::var_is_introduced :: is_defined_var; -var 0..3: X_INTRODUCED_839 ::var_is_introduced ; -var -20..3: X_INTRODUCED_843 ::var_is_introduced :: is_defined_var; -var 0..3: X_INTRODUCED_844 ::var_is_introduced ; -var -20..3: X_INTRODUCED_848 ::var_is_introduced :: is_defined_var; -var 0..3: X_INTRODUCED_849 ::var_is_introduced ; -var -20..3: X_INTRODUCED_853 ::var_is_introduced :: is_defined_var; -var 0..3: X_INTRODUCED_854 ::var_is_introduced ; -var -20..3: X_INTRODUCED_858 ::var_is_introduced :: is_defined_var; -var 0..3: X_INTRODUCED_859 ::var_is_introduced ; -var 0..300: X_INTRODUCED_861 ::var_is_introduced :: is_defined_var; -var 0..100: X_INTRODUCED_863 ::var_is_introduced :: is_defined_var; -var 0..300: X_INTRODUCED_864 ::var_is_introduced :: is_defined_var; -var 0..100: X_INTRODUCED_866 ::var_is_introduced :: is_defined_var; -var 0..300: X_INTRODUCED_867 ::var_is_introduced :: is_defined_var; -var 0..100: X_INTRODUCED_869 ::var_is_introduced :: is_defined_var; -var 0..300: X_INTRODUCED_870 ::var_is_introduced :: is_defined_var; -var 0..100: X_INTRODUCED_872 ::var_is_introduced :: is_defined_var; -var 0..300: X_INTRODUCED_873 ::var_is_introduced :: is_defined_var; -var 0..100: X_INTRODUCED_875 ::var_is_introduced :: is_defined_var; -var 0..300: X_INTRODUCED_876 ::var_is_introduced :: is_defined_var; -var 0..100: X_INTRODUCED_878 ::var_is_introduced :: is_defined_var; -var 0..300: X_INTRODUCED_879 ::var_is_introduced :: is_defined_var; -var 0..100: X_INTRODUCED_881 ::var_is_introduced :: is_defined_var; -var 0..300: X_INTRODUCED_882 ::var_is_introduced :: is_defined_var; -var 0..100: X_INTRODUCED_884 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_304 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_305 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_308 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_309 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_321 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_322 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_325 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_326 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_327 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_328 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_341 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_342 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_349 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_350 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_351 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_352 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_357 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_358 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_359 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_360 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_365 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_366 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_371 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_372 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_375 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_376 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_385 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_386 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_389 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_390 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_391 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_392 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_395 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_396 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_397 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_398 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_405 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_406 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_409 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_410 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_411 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_412 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_417 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_418 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_421 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_422 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_425 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_426 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_427 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_428 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_429 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_430 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_435 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_436 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_441 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_442 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_445 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_446 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_447 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_448 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_455 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_456 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_459 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_460 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_471 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_472 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_475 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_476 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_479 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_480 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_485 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_486 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_487 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_488 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_491 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_492 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_497 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_498 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_501 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_502 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_505 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_506 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_509 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_510 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_515 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_516 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_517 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_518 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_521 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_522 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_525 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_526 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_529 ::var_is_introduced = 0; -var 0..1: X_INTRODUCED_530 ::var_is_introduced = 0; -array [1..184] of var bool: Assignment:: output_array([1..23,1..4,1..2]) = [false,false,X_INTRODUCED_2,X_INTRODUCED_3,false,false,X_INTRODUCED_6,X_INTRODUCED_7,X_INTRODUCED_8,X_INTRODUCED_9,X_INTRODUCED_10,X_INTRODUCED_11,X_INTRODUCED_12,X_INTRODUCED_13,false,false,false,false,false,false,X_INTRODUCED_20,X_INTRODUCED_21,X_INTRODUCED_22,X_INTRODUCED_23,X_INTRODUCED_24,X_INTRODUCED_25,X_INTRODUCED_26,X_INTRODUCED_27,X_INTRODUCED_28,X_INTRODUCED_29,false,false,X_INTRODUCED_32,X_INTRODUCED_33,X_INTRODUCED_34,X_INTRODUCED_35,false,false,false,false,X_INTRODUCED_40,X_INTRODUCED_41,false,false,false,false,X_INTRODUCED_46,X_INTRODUCED_47,false,false,X_INTRODUCED_50,X_INTRODUCED_51,X_INTRODUCED_52,X_INTRODUCED_53,false,false,false,false,X_INTRODUCED_58,X_INTRODUCED_59,X_INTRODUCED_60,X_INTRODUCED_61,X_INTRODUCED_62,X_INTRODUCED_63,false,false,X_INTRODUCED_66,X_INTRODUCED_67,false,false,false,false,false,false,false,false,X_INTRODUCED_76,X_INTRODUCED_77,X_INTRODUCED_78,X_INTRODUCED_79,false,false,X_INTRODUCED_82,X_INTRODUCED_83,false,false,false,false,X_INTRODUCED_88,X_INTRODUCED_89,false,false,X_INTRODUCED_92,X_INTRODUCED_93,false,false,false,false,false,false,false,false,X_INTRODUCED_102,X_INTRODUCED_103,false,false,X_INTRODUCED_106,X_INTRODUCED_107,X_INTRODUCED_108,X_INTRODUCED_109,false,false,false,false,false,false,X_INTRODUCED_116,X_INTRODUCED_117,X_INTRODUCED_118,X_INTRODUCED_119,false,false,X_INTRODUCED_122,X_INTRODUCED_123,false,false,X_INTRODUCED_126,X_INTRODUCED_127,X_INTRODUCED_128,X_INTRODUCED_129,X_INTRODUCED_130,X_INTRODUCED_131,X_INTRODUCED_132,X_INTRODUCED_133,false,false,false,false,X_INTRODUCED_138,X_INTRODUCED_139,false,false,X_INTRODUCED_142,X_INTRODUCED_143,false,false,false,false,X_INTRODUCED_148,X_INTRODUCED_149,false,false,X_INTRODUCED_152,X_INTRODUCED_153,false,false,X_INTRODUCED_156,X_INTRODUCED_157,false,false,false,false,X_INTRODUCED_162,X_INTRODUCED_163,false,false,X_INTRODUCED_166,X_INTRODUCED_167,false,false,false,false,X_INTRODUCED_172,X_INTRODUCED_173,false,false,false,false,X_INTRODUCED_178,X_INTRODUCED_179,false,false,X_INTRODUCED_182,X_INTRODUCED_183]; -array [1..80] of var int: Coverage:: output_array([1..10,1..4,1..2]) = [X_INTRODUCED_184,X_INTRODUCED_185,X_INTRODUCED_186,X_INTRODUCED_187,X_INTRODUCED_188,X_INTRODUCED_189,X_INTRODUCED_190,X_INTRODUCED_191,X_INTRODUCED_192,X_INTRODUCED_193,X_INTRODUCED_194,X_INTRODUCED_195,X_INTRODUCED_196,X_INTRODUCED_197,X_INTRODUCED_198,X_INTRODUCED_199,X_INTRODUCED_200,X_INTRODUCED_201,X_INTRODUCED_202,X_INTRODUCED_203,X_INTRODUCED_204,X_INTRODUCED_205,X_INTRODUCED_206,X_INTRODUCED_207,X_INTRODUCED_208,X_INTRODUCED_209,X_INTRODUCED_210,X_INTRODUCED_211,X_INTRODUCED_212,X_INTRODUCED_213,X_INTRODUCED_214,X_INTRODUCED_215,X_INTRODUCED_216,X_INTRODUCED_217,X_INTRODUCED_218,X_INTRODUCED_219,X_INTRODUCED_220,X_INTRODUCED_221,X_INTRODUCED_222,X_INTRODUCED_223,0,0,X_INTRODUCED_437,X_INTRODUCED_438,X_INTRODUCED_439,X_INTRODUCED_440,0,0,X_INTRODUCED_232,X_INTRODUCED_233,X_INTRODUCED_234,X_INTRODUCED_235,X_INTRODUCED_236,X_INTRODUCED_237,X_INTRODUCED_238,X_INTRODUCED_239,X_INTRODUCED_240,X_INTRODUCED_241,X_INTRODUCED_242,X_INTRODUCED_243,X_INTRODUCED_244,X_INTRODUCED_245,X_INTRODUCED_246,X_INTRODUCED_247,X_INTRODUCED_248,X_INTRODUCED_249,X_INTRODUCED_250,X_INTRODUCED_251,X_INTRODUCED_252,X_INTRODUCED_253,X_INTRODUCED_254,X_INTRODUCED_255,X_INTRODUCED_256,X_INTRODUCED_257,X_INTRODUCED_258,X_INTRODUCED_259,X_INTRODUCED_260,X_INTRODUCED_261,X_INTRODUCED_262,X_INTRODUCED_263]; -array [1..10] of var int: MaxTable:: output_array([1..10]) = [X_INTRODUCED_264,X_INTRODUCED_265,X_INTRODUCED_266,X_INTRODUCED_267,X_INTRODUCED_268,X_INTRODUCED_269,X_INTRODUCED_270,X_INTRODUCED_271,X_INTRODUCED_272,X_INTRODUCED_273]; -array [1..10] of var int: MinTable:: output_array([1..10]) = [X_INTRODUCED_274,X_INTRODUCED_275,X_INTRODUCED_276,X_INTRODUCED_277,X_INTRODUCED_278,X_INTRODUCED_279,X_INTRODUCED_280,X_INTRODUCED_281,X_INTRODUCED_282,X_INTRODUCED_283]; -array [1..10] of var int: totalTable:: output_array([1..10]) = [X_INTRODUCED_284,X_INTRODUCED_285,X_INTRODUCED_286,X_INTRODUCED_287,X_INTRODUCED_288,X_INTRODUCED_289,X_INTRODUCED_290,X_INTRODUCED_291,X_INTRODUCED_292,X_INTRODUCED_293]; -array [1..8] of var int: seshSlack:: output_array([1..4,1..2]) = [X_INTRODUCED_824,X_INTRODUCED_829,X_INTRODUCED_834,X_INTRODUCED_839,X_INTRODUCED_844,X_INTRODUCED_849,X_INTRODUCED_854,X_INTRODUCED_859]; -array [1..14] of var int: X_INTRODUCED_632 ::var_is_introduced = [X_INTRODUCED_184,X_INTRODUCED_304,X_INTRODUCED_325,X_INTRODUCED_335,X_INTRODUCED_345,X_INTRODUCED_355,X_INTRODUCED_365,X_INTRODUCED_405,X_INTRODUCED_435,X_INTRODUCED_445,X_INTRODUCED_455,X_INTRODUCED_465,X_INTRODUCED_515,X_INTRODUCED_525]; -array [1..14] of var int: X_INTRODUCED_636 ::var_is_introduced = [X_INTRODUCED_185,X_INTRODUCED_305,X_INTRODUCED_326,X_INTRODUCED_336,X_INTRODUCED_346,X_INTRODUCED_356,X_INTRODUCED_366,X_INTRODUCED_406,X_INTRODUCED_436,X_INTRODUCED_446,X_INTRODUCED_456,X_INTRODUCED_466,X_INTRODUCED_516,X_INTRODUCED_526]; -array [1..14] of var int: X_INTRODUCED_639 ::var_is_introduced = [X_INTRODUCED_186,X_INTRODUCED_306,X_INTRODUCED_327,X_INTRODUCED_337,X_INTRODUCED_347,X_INTRODUCED_357,X_INTRODUCED_367,X_INTRODUCED_407,X_INTRODUCED_437,X_INTRODUCED_447,X_INTRODUCED_457,X_INTRODUCED_467,X_INTRODUCED_517,X_INTRODUCED_527]; -array [1..14] of var int: X_INTRODUCED_642 ::var_is_introduced = [X_INTRODUCED_187,X_INTRODUCED_307,X_INTRODUCED_328,X_INTRODUCED_338,X_INTRODUCED_348,X_INTRODUCED_358,X_INTRODUCED_368,X_INTRODUCED_408,X_INTRODUCED_438,X_INTRODUCED_448,X_INTRODUCED_458,X_INTRODUCED_468,X_INTRODUCED_518,X_INTRODUCED_528]; -array [1..14] of var int: X_INTRODUCED_645 ::var_is_introduced = [X_INTRODUCED_188,X_INTRODUCED_308,X_INTRODUCED_329,X_INTRODUCED_339,X_INTRODUCED_349,X_INTRODUCED_359,X_INTRODUCED_369,X_INTRODUCED_409,X_INTRODUCED_439,X_INTRODUCED_449,X_INTRODUCED_459,X_INTRODUCED_469,X_INTRODUCED_519,X_INTRODUCED_529]; -array [1..14] of var int: X_INTRODUCED_648 ::var_is_introduced = [X_INTRODUCED_189,X_INTRODUCED_309,X_INTRODUCED_330,X_INTRODUCED_340,X_INTRODUCED_350,X_INTRODUCED_360,X_INTRODUCED_370,X_INTRODUCED_410,X_INTRODUCED_440,X_INTRODUCED_450,X_INTRODUCED_460,X_INTRODUCED_470,X_INTRODUCED_520,X_INTRODUCED_530]; -array [1..14] of var int: X_INTRODUCED_651 ::var_is_introduced = [X_INTRODUCED_190,X_INTRODUCED_310,X_INTRODUCED_331,X_INTRODUCED_341,X_INTRODUCED_351,X_INTRODUCED_361,X_INTRODUCED_371,X_INTRODUCED_411,X_INTRODUCED_441,X_INTRODUCED_451,X_INTRODUCED_461,X_INTRODUCED_471,X_INTRODUCED_521,X_INTRODUCED_531]; -array [1..14] of var int: X_INTRODUCED_654 ::var_is_introduced = [X_INTRODUCED_191,X_INTRODUCED_311,X_INTRODUCED_332,X_INTRODUCED_342,X_INTRODUCED_352,X_INTRODUCED_362,X_INTRODUCED_372,X_INTRODUCED_412,X_INTRODUCED_442,X_INTRODUCED_452,X_INTRODUCED_462,X_INTRODUCED_472,X_INTRODUCED_522,X_INTRODUCED_532]; -array [1..11] of var int: X_INTRODUCED_674 ::var_is_introduced = [X_INTRODUCED_200,X_INTRODUCED_315,X_INTRODUCED_335,X_INTRODUCED_355,X_INTRODUCED_375,X_INTRODUCED_395,X_INTRODUCED_415,X_INTRODUCED_455,X_INTRODUCED_465,X_INTRODUCED_475,X_INTRODUCED_525]; -array [1..11] of var int: X_INTRODUCED_678 ::var_is_introduced = [X_INTRODUCED_201,X_INTRODUCED_316,X_INTRODUCED_336,X_INTRODUCED_356,X_INTRODUCED_376,X_INTRODUCED_396,X_INTRODUCED_416,X_INTRODUCED_456,X_INTRODUCED_466,X_INTRODUCED_476,X_INTRODUCED_526]; -array [1..11] of var int: X_INTRODUCED_681 ::var_is_introduced = [X_INTRODUCED_202,X_INTRODUCED_317,X_INTRODUCED_337,X_INTRODUCED_357,X_INTRODUCED_377,X_INTRODUCED_397,X_INTRODUCED_417,X_INTRODUCED_457,X_INTRODUCED_467,X_INTRODUCED_477,X_INTRODUCED_527]; -array [1..11] of var int: X_INTRODUCED_684 ::var_is_introduced = [X_INTRODUCED_203,X_INTRODUCED_318,X_INTRODUCED_338,X_INTRODUCED_358,X_INTRODUCED_378,X_INTRODUCED_398,X_INTRODUCED_418,X_INTRODUCED_458,X_INTRODUCED_468,X_INTRODUCED_478,X_INTRODUCED_528]; -array [1..11] of var int: X_INTRODUCED_687 ::var_is_introduced = [X_INTRODUCED_204,X_INTRODUCED_319,X_INTRODUCED_339,X_INTRODUCED_359,X_INTRODUCED_379,X_INTRODUCED_399,X_INTRODUCED_419,X_INTRODUCED_459,X_INTRODUCED_469,X_INTRODUCED_479,X_INTRODUCED_529]; -array [1..11] of var int: X_INTRODUCED_690 ::var_is_introduced = [X_INTRODUCED_205,X_INTRODUCED_320,X_INTRODUCED_340,X_INTRODUCED_360,X_INTRODUCED_380,X_INTRODUCED_400,X_INTRODUCED_420,X_INTRODUCED_460,X_INTRODUCED_470,X_INTRODUCED_480,X_INTRODUCED_530]; -array [1..11] of var int: X_INTRODUCED_693 ::var_is_introduced = [X_INTRODUCED_206,X_INTRODUCED_321,X_INTRODUCED_341,X_INTRODUCED_361,X_INTRODUCED_381,X_INTRODUCED_401,X_INTRODUCED_421,X_INTRODUCED_461,X_INTRODUCED_471,X_INTRODUCED_481,X_INTRODUCED_531]; -array [1..11] of var int: X_INTRODUCED_696 ::var_is_introduced = [X_INTRODUCED_207,X_INTRODUCED_322,X_INTRODUCED_342,X_INTRODUCED_362,X_INTRODUCED_382,X_INTRODUCED_402,X_INTRODUCED_422,X_INTRODUCED_462,X_INTRODUCED_472,X_INTRODUCED_482,X_INTRODUCED_532]; -array [1..4] of var int: X_INTRODUCED_798 ::var_is_introduced = [X_INTRODUCED_184,X_INTRODUCED_186,X_INTRODUCED_188,X_INTRODUCED_190]; -array [1..4] of var int: X_INTRODUCED_799 ::var_is_introduced = [X_INTRODUCED_193,X_INTRODUCED_195,X_INTRODUCED_197,X_INTRODUCED_199]; -array [1..4] of var int: X_INTRODUCED_800 ::var_is_introduced = [X_INTRODUCED_200,X_INTRODUCED_202,X_INTRODUCED_204,X_INTRODUCED_206]; -array [1..4] of var int: X_INTRODUCED_801 ::var_is_introduced = [X_INTRODUCED_209,X_INTRODUCED_211,X_INTRODUCED_213,X_INTRODUCED_215]; -array [1..4] of var int: X_INTRODUCED_802 ::var_is_introduced = [X_INTRODUCED_217,X_INTRODUCED_219,X_INTRODUCED_221,X_INTRODUCED_223]; -array [1..4] of var int: X_INTRODUCED_803 ::var_is_introduced = [0,X_INTRODUCED_438,X_INTRODUCED_440,0]; -array [1..8] of var int: X_INTRODUCED_804 ::var_is_introduced = [X_INTRODUCED_232,X_INTRODUCED_233,X_INTRODUCED_234,X_INTRODUCED_235,X_INTRODUCED_236,X_INTRODUCED_237,X_INTRODUCED_238,X_INTRODUCED_239]; -array [1..8] of var int: X_INTRODUCED_805 ::var_is_introduced = [X_INTRODUCED_240,X_INTRODUCED_241,X_INTRODUCED_242,X_INTRODUCED_243,X_INTRODUCED_244,X_INTRODUCED_245,X_INTRODUCED_246,X_INTRODUCED_247]; -array [1..8] of var int: X_INTRODUCED_806 ::var_is_introduced = [X_INTRODUCED_248,X_INTRODUCED_249,X_INTRODUCED_250,X_INTRODUCED_251,X_INTRODUCED_252,X_INTRODUCED_253,X_INTRODUCED_254,X_INTRODUCED_255]; -array [1..8] of var int: X_INTRODUCED_807 ::var_is_introduced = [X_INTRODUCED_256,X_INTRODUCED_257,X_INTRODUCED_258,X_INTRODUCED_259,X_INTRODUCED_260,X_INTRODUCED_261,X_INTRODUCED_262,X_INTRODUCED_263]; -array [1..29] of var int: X_INTRODUCED_908 ::var_is_introduced = [objective,X_INTRODUCED_284,X_INTRODUCED_274,X_INTRODUCED_285,X_INTRODUCED_275,X_INTRODUCED_286,X_INTRODUCED_276,X_INTRODUCED_287,X_INTRODUCED_277,X_INTRODUCED_288,X_INTRODUCED_278,X_INTRODUCED_289,X_INTRODUCED_279,X_INTRODUCED_290,X_INTRODUCED_280,X_INTRODUCED_291,X_INTRODUCED_281,X_INTRODUCED_292,X_INTRODUCED_282,X_INTRODUCED_293,X_INTRODUCED_283,X_INTRODUCED_863,X_INTRODUCED_866,X_INTRODUCED_869,X_INTRODUCED_872,X_INTRODUCED_875,X_INTRODUCED_878,X_INTRODUCED_881,X_INTRODUCED_884]; -constraint int_lin_eq(X_INTRODUCED_313,[X_INTRODUCED_304,X_INTRODUCED_305,X_INTRODUCED_306,X_INTRODUCED_307,X_INTRODUCED_308,X_INTRODUCED_309,X_INTRODUCED_310,X_INTRODUCED_311],1); -constraint int_lin_eq(X_INTRODUCED_313,[X_INTRODUCED_315,X_INTRODUCED_316,X_INTRODUCED_317,X_INTRODUCED_318,X_INTRODUCED_319,X_INTRODUCED_320,X_INTRODUCED_321,X_INTRODUCED_322],1); -constraint int_lin_eq(X_INTRODUCED_313,[X_INTRODUCED_325,X_INTRODUCED_326,X_INTRODUCED_327,X_INTRODUCED_328,X_INTRODUCED_329,X_INTRODUCED_330,X_INTRODUCED_331,X_INTRODUCED_332],1); -constraint int_lin_eq(X_INTRODUCED_313,[X_INTRODUCED_335,X_INTRODUCED_336,X_INTRODUCED_337,X_INTRODUCED_338,X_INTRODUCED_339,X_INTRODUCED_340,X_INTRODUCED_341,X_INTRODUCED_342],1); -constraint int_lin_eq(X_INTRODUCED_313,[X_INTRODUCED_345,X_INTRODUCED_346,X_INTRODUCED_347,X_INTRODUCED_348,X_INTRODUCED_349,X_INTRODUCED_350,X_INTRODUCED_351,X_INTRODUCED_352],1); -constraint int_lin_eq(X_INTRODUCED_313,[X_INTRODUCED_355,X_INTRODUCED_356,X_INTRODUCED_357,X_INTRODUCED_358,X_INTRODUCED_359,X_INTRODUCED_360,X_INTRODUCED_361,X_INTRODUCED_362],1); -constraint int_lin_eq(X_INTRODUCED_313,[X_INTRODUCED_365,X_INTRODUCED_366,X_INTRODUCED_367,X_INTRODUCED_368,X_INTRODUCED_369,X_INTRODUCED_370,X_INTRODUCED_371,X_INTRODUCED_372],1); -constraint int_lin_eq(X_INTRODUCED_313,[X_INTRODUCED_375,X_INTRODUCED_376,X_INTRODUCED_377,X_INTRODUCED_378,X_INTRODUCED_379,X_INTRODUCED_380,X_INTRODUCED_381,X_INTRODUCED_382],1); -constraint int_lin_eq(X_INTRODUCED_313,[X_INTRODUCED_385,X_INTRODUCED_386,X_INTRODUCED_387,X_INTRODUCED_388,X_INTRODUCED_389,X_INTRODUCED_390,X_INTRODUCED_391,X_INTRODUCED_392],1); -constraint int_lin_eq(X_INTRODUCED_313,[X_INTRODUCED_395,X_INTRODUCED_396,X_INTRODUCED_397,X_INTRODUCED_398,X_INTRODUCED_399,X_INTRODUCED_400,X_INTRODUCED_401,X_INTRODUCED_402],1); -constraint int_lin_eq(X_INTRODUCED_313,[X_INTRODUCED_405,X_INTRODUCED_406,X_INTRODUCED_407,X_INTRODUCED_408,X_INTRODUCED_409,X_INTRODUCED_410,X_INTRODUCED_411,X_INTRODUCED_412],1); -constraint int_lin_eq(X_INTRODUCED_313,[X_INTRODUCED_415,X_INTRODUCED_416,X_INTRODUCED_417,X_INTRODUCED_418,X_INTRODUCED_419,X_INTRODUCED_420,X_INTRODUCED_421,X_INTRODUCED_422],1); -constraint int_lin_eq(X_INTRODUCED_313,[X_INTRODUCED_425,X_INTRODUCED_426,X_INTRODUCED_427,X_INTRODUCED_428,X_INTRODUCED_429,X_INTRODUCED_430,X_INTRODUCED_431,X_INTRODUCED_432],1); -constraint int_lin_eq(X_INTRODUCED_313,[X_INTRODUCED_435,X_INTRODUCED_436,X_INTRODUCED_437,X_INTRODUCED_438,X_INTRODUCED_439,X_INTRODUCED_440,X_INTRODUCED_441,X_INTRODUCED_442],1); -constraint int_lin_eq(X_INTRODUCED_313,[X_INTRODUCED_445,X_INTRODUCED_446,X_INTRODUCED_447,X_INTRODUCED_448,X_INTRODUCED_449,X_INTRODUCED_450,X_INTRODUCED_451,X_INTRODUCED_452],1); -constraint int_lin_eq(X_INTRODUCED_313,[X_INTRODUCED_455,X_INTRODUCED_456,X_INTRODUCED_457,X_INTRODUCED_458,X_INTRODUCED_459,X_INTRODUCED_460,X_INTRODUCED_461,X_INTRODUCED_462],1); -constraint int_lin_eq(X_INTRODUCED_313,[X_INTRODUCED_465,X_INTRODUCED_466,X_INTRODUCED_467,X_INTRODUCED_468,X_INTRODUCED_469,X_INTRODUCED_470,X_INTRODUCED_471,X_INTRODUCED_472],1); -constraint int_lin_eq(X_INTRODUCED_313,[X_INTRODUCED_475,X_INTRODUCED_476,X_INTRODUCED_477,X_INTRODUCED_478,X_INTRODUCED_479,X_INTRODUCED_480,X_INTRODUCED_481,X_INTRODUCED_482],1); -constraint int_lin_eq(X_INTRODUCED_313,[X_INTRODUCED_485,X_INTRODUCED_486,X_INTRODUCED_487,X_INTRODUCED_488,X_INTRODUCED_489,X_INTRODUCED_490,X_INTRODUCED_491,X_INTRODUCED_492],1); -constraint int_lin_eq(X_INTRODUCED_313,[X_INTRODUCED_495,X_INTRODUCED_496,X_INTRODUCED_497,X_INTRODUCED_498,X_INTRODUCED_499,X_INTRODUCED_500,X_INTRODUCED_501,X_INTRODUCED_502],1); -constraint int_lin_eq(X_INTRODUCED_313,[X_INTRODUCED_505,X_INTRODUCED_506,X_INTRODUCED_507,X_INTRODUCED_508,X_INTRODUCED_509,X_INTRODUCED_510,X_INTRODUCED_511,X_INTRODUCED_512],1); -constraint int_lin_eq(X_INTRODUCED_313,[X_INTRODUCED_515,X_INTRODUCED_516,X_INTRODUCED_517,X_INTRODUCED_518,X_INTRODUCED_519,X_INTRODUCED_520,X_INTRODUCED_521,X_INTRODUCED_522],1); -constraint int_lin_eq(X_INTRODUCED_313,[X_INTRODUCED_525,X_INTRODUCED_526,X_INTRODUCED_527,X_INTRODUCED_528,X_INTRODUCED_529,X_INTRODUCED_530,X_INTRODUCED_531,X_INTRODUCED_532],1); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_304,X_INTRODUCED_306,X_INTRODUCED_308,X_INTRODUCED_310],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_305,X_INTRODUCED_307,X_INTRODUCED_309,X_INTRODUCED_311],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_315,X_INTRODUCED_317,X_INTRODUCED_319,X_INTRODUCED_321],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_316,X_INTRODUCED_318,X_INTRODUCED_320,X_INTRODUCED_322],-1); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_325,X_INTRODUCED_327,X_INTRODUCED_329,X_INTRODUCED_331],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_326,X_INTRODUCED_328,X_INTRODUCED_330,X_INTRODUCED_332],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_335,X_INTRODUCED_337,X_INTRODUCED_339,X_INTRODUCED_341],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_336,X_INTRODUCED_338,X_INTRODUCED_340,X_INTRODUCED_342],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_345,X_INTRODUCED_347,X_INTRODUCED_349,X_INTRODUCED_351],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_346,X_INTRODUCED_348,X_INTRODUCED_350,X_INTRODUCED_352],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_355,X_INTRODUCED_357,X_INTRODUCED_359,X_INTRODUCED_361],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_356,X_INTRODUCED_358,X_INTRODUCED_360,X_INTRODUCED_362],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_365,X_INTRODUCED_367,X_INTRODUCED_369,X_INTRODUCED_371],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_366,X_INTRODUCED_368,X_INTRODUCED_370,X_INTRODUCED_372],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_375,X_INTRODUCED_377,X_INTRODUCED_379,X_INTRODUCED_381],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_376,X_INTRODUCED_378,X_INTRODUCED_380,X_INTRODUCED_382],-1); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_385,X_INTRODUCED_387,X_INTRODUCED_389,X_INTRODUCED_391],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_386,X_INTRODUCED_388,X_INTRODUCED_390,X_INTRODUCED_392],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_395,X_INTRODUCED_397,X_INTRODUCED_399,X_INTRODUCED_401],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_396,X_INTRODUCED_398,X_INTRODUCED_400,X_INTRODUCED_402],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_405,X_INTRODUCED_407,X_INTRODUCED_409,X_INTRODUCED_411],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_406,X_INTRODUCED_408,X_INTRODUCED_410,X_INTRODUCED_412],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_415,X_INTRODUCED_417,X_INTRODUCED_419,X_INTRODUCED_421],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_416,X_INTRODUCED_418,X_INTRODUCED_420,X_INTRODUCED_422],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_425,X_INTRODUCED_427,X_INTRODUCED_429,X_INTRODUCED_431],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_426,X_INTRODUCED_428,X_INTRODUCED_430,X_INTRODUCED_432],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_435,X_INTRODUCED_437,X_INTRODUCED_439,X_INTRODUCED_441],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_436,X_INTRODUCED_438,X_INTRODUCED_440,X_INTRODUCED_442],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_445,X_INTRODUCED_447,X_INTRODUCED_449,X_INTRODUCED_451],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_446,X_INTRODUCED_448,X_INTRODUCED_450,X_INTRODUCED_452],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_455,X_INTRODUCED_457,X_INTRODUCED_459,X_INTRODUCED_461],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_456,X_INTRODUCED_458,X_INTRODUCED_460,X_INTRODUCED_462],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_465,X_INTRODUCED_467,X_INTRODUCED_469,X_INTRODUCED_471],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_466,X_INTRODUCED_468,X_INTRODUCED_470,X_INTRODUCED_472],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_475,X_INTRODUCED_477,X_INTRODUCED_479,X_INTRODUCED_481],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_476,X_INTRODUCED_478,X_INTRODUCED_480,X_INTRODUCED_482],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_485,X_INTRODUCED_487,X_INTRODUCED_489,X_INTRODUCED_491],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_486,X_INTRODUCED_488,X_INTRODUCED_490,X_INTRODUCED_492],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_495,X_INTRODUCED_497,X_INTRODUCED_499,X_INTRODUCED_501],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_496,X_INTRODUCED_498,X_INTRODUCED_500,X_INTRODUCED_502],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_505,X_INTRODUCED_507,X_INTRODUCED_509,X_INTRODUCED_511],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_506,X_INTRODUCED_508,X_INTRODUCED_510,X_INTRODUCED_512],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_515,X_INTRODUCED_517,X_INTRODUCED_519,X_INTRODUCED_521],-1); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_516,X_INTRODUCED_518,X_INTRODUCED_520,X_INTRODUCED_522],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_525,X_INTRODUCED_527,X_INTRODUCED_529,X_INTRODUCED_531],0); -constraint int_lin_le(X_INTRODUCED_538,[X_INTRODUCED_526,X_INTRODUCED_528,X_INTRODUCED_530,X_INTRODUCED_532],0); -constraint int_lin_eq(X_INTRODUCED_633,X_INTRODUCED_632,0):: defines_var(X_INTRODUCED_184); -constraint int_lin_eq(X_INTRODUCED_633,X_INTRODUCED_636,0):: defines_var(X_INTRODUCED_185); -constraint int_lin_eq(X_INTRODUCED_633,X_INTRODUCED_639,0):: defines_var(X_INTRODUCED_186); -constraint int_lin_eq(X_INTRODUCED_633,X_INTRODUCED_642,0):: defines_var(X_INTRODUCED_187); -constraint int_lin_eq(X_INTRODUCED_633,X_INTRODUCED_645,0):: defines_var(X_INTRODUCED_188); -constraint int_lin_eq(X_INTRODUCED_633,X_INTRODUCED_648,0):: defines_var(X_INTRODUCED_189); -constraint int_lin_eq(X_INTRODUCED_633,X_INTRODUCED_651,0):: defines_var(X_INTRODUCED_190); -constraint int_lin_eq(X_INTRODUCED_633,X_INTRODUCED_654,0):: defines_var(X_INTRODUCED_191); -constraint int_lin_eq(X_INTRODUCED_657,[X_INTRODUCED_192,X_INTRODUCED_365,X_INTRODUCED_385,X_INTRODUCED_395,X_INTRODUCED_405,X_INTRODUCED_435,X_INTRODUCED_525],0):: defines_var(X_INTRODUCED_192); -constraint int_lin_eq(X_INTRODUCED_657,[X_INTRODUCED_193,X_INTRODUCED_366,X_INTRODUCED_386,X_INTRODUCED_396,X_INTRODUCED_406,X_INTRODUCED_436,X_INTRODUCED_526],0):: defines_var(X_INTRODUCED_193); -constraint int_lin_eq(X_INTRODUCED_657,[X_INTRODUCED_194,X_INTRODUCED_367,X_INTRODUCED_387,X_INTRODUCED_397,X_INTRODUCED_407,X_INTRODUCED_437,X_INTRODUCED_527],0):: defines_var(X_INTRODUCED_194); -constraint int_lin_eq(X_INTRODUCED_657,[X_INTRODUCED_195,X_INTRODUCED_368,X_INTRODUCED_388,X_INTRODUCED_398,X_INTRODUCED_408,X_INTRODUCED_438,X_INTRODUCED_528],0):: defines_var(X_INTRODUCED_195); -constraint int_lin_eq(X_INTRODUCED_657,[X_INTRODUCED_196,X_INTRODUCED_369,X_INTRODUCED_389,X_INTRODUCED_399,X_INTRODUCED_409,X_INTRODUCED_439,X_INTRODUCED_529],0):: defines_var(X_INTRODUCED_196); -constraint int_lin_eq(X_INTRODUCED_657,[X_INTRODUCED_197,X_INTRODUCED_370,X_INTRODUCED_390,X_INTRODUCED_400,X_INTRODUCED_410,X_INTRODUCED_440,X_INTRODUCED_530],0):: defines_var(X_INTRODUCED_197); -constraint int_lin_eq(X_INTRODUCED_657,[X_INTRODUCED_198,X_INTRODUCED_371,X_INTRODUCED_391,X_INTRODUCED_401,X_INTRODUCED_411,X_INTRODUCED_441,X_INTRODUCED_531],0):: defines_var(X_INTRODUCED_198); -constraint int_lin_eq(X_INTRODUCED_657,[X_INTRODUCED_199,X_INTRODUCED_372,X_INTRODUCED_392,X_INTRODUCED_402,X_INTRODUCED_412,X_INTRODUCED_442,X_INTRODUCED_532],0):: defines_var(X_INTRODUCED_199); -constraint int_lin_eq(X_INTRODUCED_675,X_INTRODUCED_674,0):: defines_var(X_INTRODUCED_200); -constraint int_lin_eq(X_INTRODUCED_675,X_INTRODUCED_678,0):: defines_var(X_INTRODUCED_201); -constraint int_lin_eq(X_INTRODUCED_675,X_INTRODUCED_681,0):: defines_var(X_INTRODUCED_202); -constraint int_lin_eq(X_INTRODUCED_675,X_INTRODUCED_684,0):: defines_var(X_INTRODUCED_203); -constraint int_lin_eq(X_INTRODUCED_675,X_INTRODUCED_687,0):: defines_var(X_INTRODUCED_204); -constraint int_lin_eq(X_INTRODUCED_675,X_INTRODUCED_690,0):: defines_var(X_INTRODUCED_205); -constraint int_lin_eq(X_INTRODUCED_675,X_INTRODUCED_693,0):: defines_var(X_INTRODUCED_206); -constraint int_lin_eq(X_INTRODUCED_675,X_INTRODUCED_696,0):: defines_var(X_INTRODUCED_207); -constraint int_lin_eq(X_INTRODUCED_699,[X_INTRODUCED_208,X_INTRODUCED_315,X_INTRODUCED_335,X_INTRODUCED_365,X_INTRODUCED_375,X_INTRODUCED_445,X_INTRODUCED_475,X_INTRODUCED_485],0):: defines_var(X_INTRODUCED_208); -constraint int_lin_eq(X_INTRODUCED_699,[X_INTRODUCED_209,X_INTRODUCED_316,X_INTRODUCED_336,X_INTRODUCED_366,X_INTRODUCED_376,X_INTRODUCED_446,X_INTRODUCED_476,X_INTRODUCED_486],0):: defines_var(X_INTRODUCED_209); -constraint int_lin_eq(X_INTRODUCED_699,[X_INTRODUCED_210,X_INTRODUCED_317,X_INTRODUCED_337,X_INTRODUCED_367,X_INTRODUCED_377,X_INTRODUCED_447,X_INTRODUCED_477,X_INTRODUCED_487],0):: defines_var(X_INTRODUCED_210); -constraint int_lin_eq(X_INTRODUCED_699,[X_INTRODUCED_211,X_INTRODUCED_318,X_INTRODUCED_338,X_INTRODUCED_368,X_INTRODUCED_378,X_INTRODUCED_448,X_INTRODUCED_478,X_INTRODUCED_488],0):: defines_var(X_INTRODUCED_211); -constraint int_lin_eq(X_INTRODUCED_699,[X_INTRODUCED_212,X_INTRODUCED_319,X_INTRODUCED_339,X_INTRODUCED_369,X_INTRODUCED_379,X_INTRODUCED_449,X_INTRODUCED_479,X_INTRODUCED_489],0):: defines_var(X_INTRODUCED_212); -constraint int_lin_eq(X_INTRODUCED_699,[X_INTRODUCED_213,X_INTRODUCED_320,X_INTRODUCED_340,X_INTRODUCED_370,X_INTRODUCED_380,X_INTRODUCED_450,X_INTRODUCED_480,X_INTRODUCED_490],0):: defines_var(X_INTRODUCED_213); -constraint int_lin_eq(X_INTRODUCED_699,[X_INTRODUCED_214,X_INTRODUCED_321,X_INTRODUCED_341,X_INTRODUCED_371,X_INTRODUCED_381,X_INTRODUCED_451,X_INTRODUCED_481,X_INTRODUCED_491],0):: defines_var(X_INTRODUCED_214); -constraint int_lin_eq(X_INTRODUCED_699,[X_INTRODUCED_215,X_INTRODUCED_322,X_INTRODUCED_342,X_INTRODUCED_372,X_INTRODUCED_382,X_INTRODUCED_452,X_INTRODUCED_482,X_INTRODUCED_492],0):: defines_var(X_INTRODUCED_215); -constraint int_lin_eq(X_INTRODUCED_716,[X_INTRODUCED_216,X_INTRODUCED_315,X_INTRODUCED_375,X_INTRODUCED_395,X_INTRODUCED_475,X_INTRODUCED_485],0):: defines_var(X_INTRODUCED_216); -constraint int_lin_eq(X_INTRODUCED_716,[X_INTRODUCED_217,X_INTRODUCED_316,X_INTRODUCED_376,X_INTRODUCED_396,X_INTRODUCED_476,X_INTRODUCED_486],0):: defines_var(X_INTRODUCED_217); -constraint int_lin_eq(X_INTRODUCED_716,[X_INTRODUCED_218,X_INTRODUCED_317,X_INTRODUCED_377,X_INTRODUCED_397,X_INTRODUCED_477,X_INTRODUCED_487],0):: defines_var(X_INTRODUCED_218); -constraint int_lin_eq(X_INTRODUCED_716,[X_INTRODUCED_219,X_INTRODUCED_318,X_INTRODUCED_378,X_INTRODUCED_398,X_INTRODUCED_478,X_INTRODUCED_488],0):: defines_var(X_INTRODUCED_219); -constraint int_lin_eq(X_INTRODUCED_716,[X_INTRODUCED_220,X_INTRODUCED_319,X_INTRODUCED_379,X_INTRODUCED_399,X_INTRODUCED_479,X_INTRODUCED_489],0):: defines_var(X_INTRODUCED_220); -constraint int_lin_eq(X_INTRODUCED_716,[X_INTRODUCED_221,X_INTRODUCED_320,X_INTRODUCED_380,X_INTRODUCED_400,X_INTRODUCED_480,X_INTRODUCED_490],0):: defines_var(X_INTRODUCED_221); -constraint int_lin_eq(X_INTRODUCED_716,[X_INTRODUCED_222,X_INTRODUCED_321,X_INTRODUCED_381,X_INTRODUCED_401,X_INTRODUCED_481,X_INTRODUCED_491],0):: defines_var(X_INTRODUCED_222); -constraint int_lin_eq(X_INTRODUCED_716,[X_INTRODUCED_223,X_INTRODUCED_322,X_INTRODUCED_382,X_INTRODUCED_402,X_INTRODUCED_482,X_INTRODUCED_492],0):: defines_var(X_INTRODUCED_223); -constraint int_lin_eq(X_INTRODUCED_741,[X_INTRODUCED_232,X_INTRODUCED_425,X_INTRODUCED_495,X_INTRODUCED_505,X_INTRODUCED_515],0):: defines_var(X_INTRODUCED_232); -constraint int_lin_eq(X_INTRODUCED_741,[X_INTRODUCED_233,X_INTRODUCED_426,X_INTRODUCED_496,X_INTRODUCED_506,X_INTRODUCED_516],0):: defines_var(X_INTRODUCED_233); -constraint int_lin_eq(X_INTRODUCED_741,[X_INTRODUCED_234,X_INTRODUCED_427,X_INTRODUCED_497,X_INTRODUCED_507,X_INTRODUCED_517],0):: defines_var(X_INTRODUCED_234); -constraint int_lin_eq(X_INTRODUCED_741,[X_INTRODUCED_235,X_INTRODUCED_428,X_INTRODUCED_498,X_INTRODUCED_508,X_INTRODUCED_518],0):: defines_var(X_INTRODUCED_235); -constraint int_lin_eq(X_INTRODUCED_741,[X_INTRODUCED_236,X_INTRODUCED_429,X_INTRODUCED_499,X_INTRODUCED_509,X_INTRODUCED_519],0):: defines_var(X_INTRODUCED_236); -constraint int_lin_eq(X_INTRODUCED_741,[X_INTRODUCED_237,X_INTRODUCED_430,X_INTRODUCED_500,X_INTRODUCED_510,X_INTRODUCED_520],0):: defines_var(X_INTRODUCED_237); -constraint int_lin_eq(X_INTRODUCED_741,[X_INTRODUCED_238,X_INTRODUCED_431,X_INTRODUCED_501,X_INTRODUCED_511,X_INTRODUCED_521],0):: defines_var(X_INTRODUCED_238); -constraint int_lin_eq(X_INTRODUCED_741,[X_INTRODUCED_239,X_INTRODUCED_432,X_INTRODUCED_502,X_INTRODUCED_512,X_INTRODUCED_522],0):: defines_var(X_INTRODUCED_239); -constraint int_lin_eq(X_INTRODUCED_741,[X_INTRODUCED_240,X_INTRODUCED_425,X_INTRODUCED_495,X_INTRODUCED_505,X_INTRODUCED_515],0):: defines_var(X_INTRODUCED_240); -constraint int_lin_eq(X_INTRODUCED_741,[X_INTRODUCED_241,X_INTRODUCED_426,X_INTRODUCED_496,X_INTRODUCED_506,X_INTRODUCED_516],0):: defines_var(X_INTRODUCED_241); -constraint int_lin_eq(X_INTRODUCED_741,[X_INTRODUCED_242,X_INTRODUCED_427,X_INTRODUCED_497,X_INTRODUCED_507,X_INTRODUCED_517],0):: defines_var(X_INTRODUCED_242); -constraint int_lin_eq(X_INTRODUCED_741,[X_INTRODUCED_243,X_INTRODUCED_428,X_INTRODUCED_498,X_INTRODUCED_508,X_INTRODUCED_518],0):: defines_var(X_INTRODUCED_243); -constraint int_lin_eq(X_INTRODUCED_741,[X_INTRODUCED_244,X_INTRODUCED_429,X_INTRODUCED_499,X_INTRODUCED_509,X_INTRODUCED_519],0):: defines_var(X_INTRODUCED_244); -constraint int_lin_eq(X_INTRODUCED_741,[X_INTRODUCED_245,X_INTRODUCED_430,X_INTRODUCED_500,X_INTRODUCED_510,X_INTRODUCED_520],0):: defines_var(X_INTRODUCED_245); -constraint int_lin_eq(X_INTRODUCED_741,[X_INTRODUCED_246,X_INTRODUCED_431,X_INTRODUCED_501,X_INTRODUCED_511,X_INTRODUCED_521],0):: defines_var(X_INTRODUCED_246); -constraint int_lin_eq(X_INTRODUCED_741,[X_INTRODUCED_247,X_INTRODUCED_432,X_INTRODUCED_502,X_INTRODUCED_512,X_INTRODUCED_522],0):: defines_var(X_INTRODUCED_247); -constraint int_lin_eq(X_INTRODUCED_741,[X_INTRODUCED_248,X_INTRODUCED_335,X_INTRODUCED_355,X_INTRODUCED_425,X_INTRODUCED_435],0):: defines_var(X_INTRODUCED_248); -constraint int_lin_eq(X_INTRODUCED_741,[X_INTRODUCED_249,X_INTRODUCED_336,X_INTRODUCED_356,X_INTRODUCED_426,X_INTRODUCED_436],0):: defines_var(X_INTRODUCED_249); -constraint int_lin_eq(X_INTRODUCED_741,[X_INTRODUCED_250,X_INTRODUCED_337,X_INTRODUCED_357,X_INTRODUCED_427,X_INTRODUCED_437],0):: defines_var(X_INTRODUCED_250); -constraint int_lin_eq(X_INTRODUCED_741,[X_INTRODUCED_251,X_INTRODUCED_338,X_INTRODUCED_358,X_INTRODUCED_428,X_INTRODUCED_438],0):: defines_var(X_INTRODUCED_251); -constraint int_lin_eq(X_INTRODUCED_741,[X_INTRODUCED_252,X_INTRODUCED_339,X_INTRODUCED_359,X_INTRODUCED_429,X_INTRODUCED_439],0):: defines_var(X_INTRODUCED_252); -constraint int_lin_eq(X_INTRODUCED_741,[X_INTRODUCED_253,X_INTRODUCED_340,X_INTRODUCED_360,X_INTRODUCED_430,X_INTRODUCED_440],0):: defines_var(X_INTRODUCED_253); -constraint int_lin_eq(X_INTRODUCED_741,[X_INTRODUCED_254,X_INTRODUCED_341,X_INTRODUCED_361,X_INTRODUCED_431,X_INTRODUCED_441],0):: defines_var(X_INTRODUCED_254); -constraint int_lin_eq(X_INTRODUCED_741,[X_INTRODUCED_255,X_INTRODUCED_342,X_INTRODUCED_362,X_INTRODUCED_432,X_INTRODUCED_442],0):: defines_var(X_INTRODUCED_255); -constraint int_lin_eq(X_INTRODUCED_782,[X_INTRODUCED_256,X_INTRODUCED_335,X_INTRODUCED_435],0):: defines_var(X_INTRODUCED_256); -constraint int_lin_eq(X_INTRODUCED_782,[X_INTRODUCED_257,X_INTRODUCED_336,X_INTRODUCED_436],0):: defines_var(X_INTRODUCED_257); -constraint int_lin_eq(X_INTRODUCED_782,[X_INTRODUCED_258,X_INTRODUCED_337,X_INTRODUCED_437],0):: defines_var(X_INTRODUCED_258); -constraint int_lin_eq(X_INTRODUCED_782,[X_INTRODUCED_259,X_INTRODUCED_338,X_INTRODUCED_438],0):: defines_var(X_INTRODUCED_259); -constraint int_lin_eq(X_INTRODUCED_782,[X_INTRODUCED_260,X_INTRODUCED_339,X_INTRODUCED_439],0):: defines_var(X_INTRODUCED_260); -constraint int_lin_eq(X_INTRODUCED_782,[X_INTRODUCED_261,X_INTRODUCED_340,X_INTRODUCED_440],0):: defines_var(X_INTRODUCED_261); -constraint int_lin_eq(X_INTRODUCED_782,[X_INTRODUCED_262,X_INTRODUCED_341,X_INTRODUCED_441],0):: defines_var(X_INTRODUCED_262); -constraint int_lin_eq(X_INTRODUCED_782,[X_INTRODUCED_263,X_INTRODUCED_342,X_INTRODUCED_442],0):: defines_var(X_INTRODUCED_263); -constraint maximum_int(X_INTRODUCED_264,X_INTRODUCED_798); -constraint maximum_int(X_INTRODUCED_265,X_INTRODUCED_799); -constraint maximum_int(X_INTRODUCED_266,X_INTRODUCED_800); -constraint maximum_int(X_INTRODUCED_267,X_INTRODUCED_801); -constraint maximum_int(X_INTRODUCED_268,X_INTRODUCED_802); -constraint maximum_int(X_INTRODUCED_269,X_INTRODUCED_803); -constraint maximum_int(X_INTRODUCED_270,X_INTRODUCED_804); -constraint maximum_int(X_INTRODUCED_271,X_INTRODUCED_805); -constraint maximum_int(X_INTRODUCED_272,X_INTRODUCED_806); -constraint maximum_int(X_INTRODUCED_273,X_INTRODUCED_807); -constraint minimum_int(X_INTRODUCED_274,X_INTRODUCED_798); -constraint minimum_int(X_INTRODUCED_275,X_INTRODUCED_799); -constraint minimum_int(X_INTRODUCED_276,X_INTRODUCED_800); -constraint minimum_int(X_INTRODUCED_277,X_INTRODUCED_801); -constraint minimum_int(X_INTRODUCED_278,X_INTRODUCED_802); -constraint minimum_int(X_INTRODUCED_279,X_INTRODUCED_803); -constraint minimum_int(X_INTRODUCED_280,X_INTRODUCED_804); -constraint minimum_int(X_INTRODUCED_281,X_INTRODUCED_805); -constraint minimum_int(X_INTRODUCED_282,X_INTRODUCED_806); -constraint minimum_int(X_INTRODUCED_283,X_INTRODUCED_807); -constraint int_lin_eq(X_INTRODUCED_741,[X_INTRODUCED_284,X_INTRODUCED_184,X_INTRODUCED_186,X_INTRODUCED_188,X_INTRODUCED_190],0):: defines_var(X_INTRODUCED_284); -constraint int_lin_eq(X_INTRODUCED_741,[X_INTRODUCED_285,X_INTRODUCED_193,X_INTRODUCED_195,X_INTRODUCED_197,X_INTRODUCED_199],0):: defines_var(X_INTRODUCED_285); -constraint int_lin_eq(X_INTRODUCED_741,[X_INTRODUCED_286,X_INTRODUCED_200,X_INTRODUCED_202,X_INTRODUCED_204,X_INTRODUCED_206],0):: defines_var(X_INTRODUCED_286); -constraint int_lin_eq(X_INTRODUCED_741,[X_INTRODUCED_287,X_INTRODUCED_209,X_INTRODUCED_211,X_INTRODUCED_213,X_INTRODUCED_215],0):: defines_var(X_INTRODUCED_287); -constraint int_lin_eq(X_INTRODUCED_741,[X_INTRODUCED_288,X_INTRODUCED_217,X_INTRODUCED_219,X_INTRODUCED_221,X_INTRODUCED_223],0):: defines_var(X_INTRODUCED_288); -constraint int_lin_eq([1,-1,-1],[X_INTRODUCED_289,X_INTRODUCED_438,X_INTRODUCED_440],0):: defines_var(X_INTRODUCED_289); -constraint int_lin_eq(X_INTRODUCED_815,[X_INTRODUCED_290,X_INTRODUCED_232,X_INTRODUCED_233,X_INTRODUCED_234,X_INTRODUCED_235,X_INTRODUCED_236,X_INTRODUCED_237,X_INTRODUCED_238,X_INTRODUCED_239],0):: defines_var(X_INTRODUCED_290); -constraint int_lin_eq(X_INTRODUCED_815,[X_INTRODUCED_291,X_INTRODUCED_240,X_INTRODUCED_241,X_INTRODUCED_242,X_INTRODUCED_243,X_INTRODUCED_244,X_INTRODUCED_245,X_INTRODUCED_246,X_INTRODUCED_247],0):: defines_var(X_INTRODUCED_291); -constraint int_lin_eq(X_INTRODUCED_815,[X_INTRODUCED_292,X_INTRODUCED_248,X_INTRODUCED_249,X_INTRODUCED_250,X_INTRODUCED_251,X_INTRODUCED_252,X_INTRODUCED_253,X_INTRODUCED_254,X_INTRODUCED_255],0):: defines_var(X_INTRODUCED_292); -constraint int_lin_eq(X_INTRODUCED_815,[X_INTRODUCED_293,X_INTRODUCED_256,X_INTRODUCED_257,X_INTRODUCED_258,X_INTRODUCED_259,X_INTRODUCED_260,X_INTRODUCED_261,X_INTRODUCED_262,X_INTRODUCED_263],0):: defines_var(X_INTRODUCED_293); -constraint int_max(0,X_INTRODUCED_823,X_INTRODUCED_824); -constraint int_max(0,X_INTRODUCED_828,X_INTRODUCED_829); -constraint int_max(0,X_INTRODUCED_833,X_INTRODUCED_834); -constraint int_max(0,X_INTRODUCED_838,X_INTRODUCED_839); -constraint int_max(0,X_INTRODUCED_843,X_INTRODUCED_844); -constraint int_max(0,X_INTRODUCED_848,X_INTRODUCED_849); -constraint int_max(0,X_INTRODUCED_853,X_INTRODUCED_854); -constraint int_max(0,X_INTRODUCED_858,X_INTRODUCED_859); -constraint int_div(X_INTRODUCED_861,3,X_INTRODUCED_863):: defines_var(X_INTRODUCED_863); -constraint int_div(X_INTRODUCED_864,3,X_INTRODUCED_866):: defines_var(X_INTRODUCED_866); -constraint int_div(X_INTRODUCED_867,3,X_INTRODUCED_869):: defines_var(X_INTRODUCED_869); -constraint int_div(X_INTRODUCED_870,3,X_INTRODUCED_872):: defines_var(X_INTRODUCED_872); -constraint int_div(X_INTRODUCED_873,3,X_INTRODUCED_875):: defines_var(X_INTRODUCED_875); -constraint int_div(X_INTRODUCED_876,3,X_INTRODUCED_878):: defines_var(X_INTRODUCED_878); -constraint int_div(X_INTRODUCED_879,3,X_INTRODUCED_881):: defines_var(X_INTRODUCED_881); -constraint int_div(X_INTRODUCED_882,3,X_INTRODUCED_884):: defines_var(X_INTRODUCED_884); -constraint int_lin_eq(X_INTRODUCED_909,X_INTRODUCED_908,0):: defines_var(objective); -constraint int_le(0,0); -constraint int_le(0,0); -constraint int_le(0,0); -constraint int_le(0,0); -constraint int_le(0,X_INTRODUCED_264); -constraint int_le(0,X_INTRODUCED_265); -constraint int_le(0,X_INTRODUCED_266); -constraint int_le(0,X_INTRODUCED_267); -constraint int_le(0,X_INTRODUCED_268); -constraint int_le(0,X_INTRODUCED_269); -constraint int_le(0,X_INTRODUCED_270); -constraint int_le(0,X_INTRODUCED_271); -constraint int_le(0,X_INTRODUCED_272); -constraint int_le(0,X_INTRODUCED_273); -constraint int_le(0,X_INTRODUCED_274); -constraint int_le(0,X_INTRODUCED_275); -constraint int_le(0,X_INTRODUCED_276); -constraint int_le(0,X_INTRODUCED_277); -constraint int_le(0,X_INTRODUCED_278); -constraint int_le(0,X_INTRODUCED_279); -constraint int_le(0,X_INTRODUCED_280); -constraint int_le(0,X_INTRODUCED_281); -constraint int_le(0,X_INTRODUCED_282); -constraint int_le(0,X_INTRODUCED_283); -constraint int_le(0,X_INTRODUCED_289); -constraint int_le(0,objective); -constraint bool2int(X_INTRODUCED_2,X_INTRODUCED_306):: defines_var(X_INTRODUCED_306); -constraint bool2int(X_INTRODUCED_3,X_INTRODUCED_307):: defines_var(X_INTRODUCED_307); -constraint bool2int(X_INTRODUCED_6,X_INTRODUCED_310):: defines_var(X_INTRODUCED_310); -constraint bool2int(X_INTRODUCED_7,X_INTRODUCED_311):: defines_var(X_INTRODUCED_311); -constraint bool2int(X_INTRODUCED_8,X_INTRODUCED_315):: defines_var(X_INTRODUCED_315); -constraint bool2int(X_INTRODUCED_9,X_INTRODUCED_316):: defines_var(X_INTRODUCED_316); -constraint bool2int(X_INTRODUCED_10,X_INTRODUCED_317):: defines_var(X_INTRODUCED_317); -constraint bool2int(X_INTRODUCED_11,X_INTRODUCED_318):: defines_var(X_INTRODUCED_318); -constraint bool2int(X_INTRODUCED_12,X_INTRODUCED_319):: defines_var(X_INTRODUCED_319); -constraint bool2int(X_INTRODUCED_13,X_INTRODUCED_320):: defines_var(X_INTRODUCED_320); -constraint bool2int(X_INTRODUCED_20,X_INTRODUCED_329):: defines_var(X_INTRODUCED_329); -constraint bool2int(X_INTRODUCED_21,X_INTRODUCED_330):: defines_var(X_INTRODUCED_330); -constraint bool2int(X_INTRODUCED_22,X_INTRODUCED_331):: defines_var(X_INTRODUCED_331); -constraint bool2int(X_INTRODUCED_23,X_INTRODUCED_332):: defines_var(X_INTRODUCED_332); -constraint bool2int(X_INTRODUCED_24,X_INTRODUCED_335):: defines_var(X_INTRODUCED_335); -constraint bool2int(X_INTRODUCED_25,X_INTRODUCED_336):: defines_var(X_INTRODUCED_336); -constraint bool2int(X_INTRODUCED_26,X_INTRODUCED_337):: defines_var(X_INTRODUCED_337); -constraint bool2int(X_INTRODUCED_27,X_INTRODUCED_338):: defines_var(X_INTRODUCED_338); -constraint bool2int(X_INTRODUCED_28,X_INTRODUCED_339):: defines_var(X_INTRODUCED_339); -constraint bool2int(X_INTRODUCED_29,X_INTRODUCED_340):: defines_var(X_INTRODUCED_340); -constraint bool2int(X_INTRODUCED_32,X_INTRODUCED_345):: defines_var(X_INTRODUCED_345); -constraint bool2int(X_INTRODUCED_33,X_INTRODUCED_346):: defines_var(X_INTRODUCED_346); -constraint bool2int(X_INTRODUCED_34,X_INTRODUCED_347):: defines_var(X_INTRODUCED_347); -constraint bool2int(X_INTRODUCED_35,X_INTRODUCED_348):: defines_var(X_INTRODUCED_348); -constraint bool2int(X_INTRODUCED_40,X_INTRODUCED_355):: defines_var(X_INTRODUCED_355); -constraint bool2int(X_INTRODUCED_41,X_INTRODUCED_356):: defines_var(X_INTRODUCED_356); -constraint bool2int(X_INTRODUCED_46,X_INTRODUCED_361):: defines_var(X_INTRODUCED_361); -constraint bool2int(X_INTRODUCED_47,X_INTRODUCED_362):: defines_var(X_INTRODUCED_362); -constraint bool2int(X_INTRODUCED_50,X_INTRODUCED_367):: defines_var(X_INTRODUCED_367); -constraint bool2int(X_INTRODUCED_51,X_INTRODUCED_368):: defines_var(X_INTRODUCED_368); -constraint bool2int(X_INTRODUCED_52,X_INTRODUCED_369):: defines_var(X_INTRODUCED_369); -constraint bool2int(X_INTRODUCED_53,X_INTRODUCED_370):: defines_var(X_INTRODUCED_370); -constraint bool2int(X_INTRODUCED_58,X_INTRODUCED_377):: defines_var(X_INTRODUCED_377); -constraint bool2int(X_INTRODUCED_59,X_INTRODUCED_378):: defines_var(X_INTRODUCED_378); -constraint bool2int(X_INTRODUCED_60,X_INTRODUCED_379):: defines_var(X_INTRODUCED_379); -constraint bool2int(X_INTRODUCED_61,X_INTRODUCED_380):: defines_var(X_INTRODUCED_380); -constraint bool2int(X_INTRODUCED_62,X_INTRODUCED_381):: defines_var(X_INTRODUCED_381); -constraint bool2int(X_INTRODUCED_63,X_INTRODUCED_382):: defines_var(X_INTRODUCED_382); -constraint bool2int(X_INTRODUCED_66,X_INTRODUCED_387):: defines_var(X_INTRODUCED_387); -constraint bool2int(X_INTRODUCED_67,X_INTRODUCED_388):: defines_var(X_INTRODUCED_388); -constraint bool2int(X_INTRODUCED_76,X_INTRODUCED_399):: defines_var(X_INTRODUCED_399); -constraint bool2int(X_INTRODUCED_77,X_INTRODUCED_400):: defines_var(X_INTRODUCED_400); -constraint bool2int(X_INTRODUCED_78,X_INTRODUCED_401):: defines_var(X_INTRODUCED_401); -constraint bool2int(X_INTRODUCED_79,X_INTRODUCED_402):: defines_var(X_INTRODUCED_402); -constraint bool2int(X_INTRODUCED_82,X_INTRODUCED_407):: defines_var(X_INTRODUCED_407); -constraint bool2int(X_INTRODUCED_83,X_INTRODUCED_408):: defines_var(X_INTRODUCED_408); -constraint bool2int(X_INTRODUCED_88,X_INTRODUCED_415):: defines_var(X_INTRODUCED_415); -constraint bool2int(X_INTRODUCED_89,X_INTRODUCED_416):: defines_var(X_INTRODUCED_416); -constraint bool2int(X_INTRODUCED_92,X_INTRODUCED_419):: defines_var(X_INTRODUCED_419); -constraint bool2int(X_INTRODUCED_93,X_INTRODUCED_420):: defines_var(X_INTRODUCED_420); -constraint bool2int(X_INTRODUCED_102,X_INTRODUCED_431):: defines_var(X_INTRODUCED_431); -constraint bool2int(X_INTRODUCED_103,X_INTRODUCED_432):: defines_var(X_INTRODUCED_432); -constraint bool2int(X_INTRODUCED_106,X_INTRODUCED_437):: defines_var(X_INTRODUCED_437); -constraint bool2int(X_INTRODUCED_107,X_INTRODUCED_438):: defines_var(X_INTRODUCED_438); -constraint bool2int(X_INTRODUCED_108,X_INTRODUCED_439):: defines_var(X_INTRODUCED_439); -constraint bool2int(X_INTRODUCED_109,X_INTRODUCED_440):: defines_var(X_INTRODUCED_440); -constraint bool2int(X_INTRODUCED_116,X_INTRODUCED_449):: defines_var(X_INTRODUCED_449); -constraint bool2int(X_INTRODUCED_117,X_INTRODUCED_450):: defines_var(X_INTRODUCED_450); -constraint bool2int(X_INTRODUCED_118,X_INTRODUCED_451):: defines_var(X_INTRODUCED_451); -constraint bool2int(X_INTRODUCED_119,X_INTRODUCED_452):: defines_var(X_INTRODUCED_452); -constraint bool2int(X_INTRODUCED_122,X_INTRODUCED_457):: defines_var(X_INTRODUCED_457); -constraint bool2int(X_INTRODUCED_123,X_INTRODUCED_458):: defines_var(X_INTRODUCED_458); -constraint bool2int(X_INTRODUCED_126,X_INTRODUCED_461):: defines_var(X_INTRODUCED_461); -constraint bool2int(X_INTRODUCED_127,X_INTRODUCED_462):: defines_var(X_INTRODUCED_462); -constraint bool2int(X_INTRODUCED_128,X_INTRODUCED_465):: defines_var(X_INTRODUCED_465); -constraint bool2int(X_INTRODUCED_129,X_INTRODUCED_466):: defines_var(X_INTRODUCED_466); -constraint bool2int(X_INTRODUCED_130,X_INTRODUCED_467):: defines_var(X_INTRODUCED_467); -constraint bool2int(X_INTRODUCED_131,X_INTRODUCED_468):: defines_var(X_INTRODUCED_468); -constraint bool2int(X_INTRODUCED_132,X_INTRODUCED_469):: defines_var(X_INTRODUCED_469); -constraint bool2int(X_INTRODUCED_133,X_INTRODUCED_470):: defines_var(X_INTRODUCED_470); -constraint bool2int(X_INTRODUCED_138,X_INTRODUCED_477):: defines_var(X_INTRODUCED_477); -constraint bool2int(X_INTRODUCED_139,X_INTRODUCED_478):: defines_var(X_INTRODUCED_478); -constraint bool2int(X_INTRODUCED_142,X_INTRODUCED_481):: defines_var(X_INTRODUCED_481); -constraint bool2int(X_INTRODUCED_143,X_INTRODUCED_482):: defines_var(X_INTRODUCED_482); -constraint bool2int(X_INTRODUCED_148,X_INTRODUCED_489):: defines_var(X_INTRODUCED_489); -constraint bool2int(X_INTRODUCED_149,X_INTRODUCED_490):: defines_var(X_INTRODUCED_490); -constraint bool2int(X_INTRODUCED_152,X_INTRODUCED_495):: defines_var(X_INTRODUCED_495); -constraint bool2int(X_INTRODUCED_153,X_INTRODUCED_496):: defines_var(X_INTRODUCED_496); -constraint bool2int(X_INTRODUCED_156,X_INTRODUCED_499):: defines_var(X_INTRODUCED_499); -constraint bool2int(X_INTRODUCED_157,X_INTRODUCED_500):: defines_var(X_INTRODUCED_500); -constraint bool2int(X_INTRODUCED_162,X_INTRODUCED_507):: defines_var(X_INTRODUCED_507); -constraint bool2int(X_INTRODUCED_163,X_INTRODUCED_508):: defines_var(X_INTRODUCED_508); -constraint bool2int(X_INTRODUCED_166,X_INTRODUCED_511):: defines_var(X_INTRODUCED_511); -constraint bool2int(X_INTRODUCED_167,X_INTRODUCED_512):: defines_var(X_INTRODUCED_512); -constraint bool2int(X_INTRODUCED_172,X_INTRODUCED_519):: defines_var(X_INTRODUCED_519); -constraint bool2int(X_INTRODUCED_173,X_INTRODUCED_520):: defines_var(X_INTRODUCED_520); -constraint bool2int(X_INTRODUCED_178,X_INTRODUCED_527):: defines_var(X_INTRODUCED_527); -constraint bool2int(X_INTRODUCED_179,X_INTRODUCED_528):: defines_var(X_INTRODUCED_528); -constraint bool2int(X_INTRODUCED_182,X_INTRODUCED_531):: defines_var(X_INTRODUCED_531); -constraint bool2int(X_INTRODUCED_183,X_INTRODUCED_532):: defines_var(X_INTRODUCED_532); -constraint int_lin_eq([-1,-1,-1,-1,-1,-1,-1,-1],[X_INTRODUCED_315,X_INTRODUCED_335,X_INTRODUCED_345,X_INTRODUCED_355,X_INTRODUCED_415,X_INTRODUCED_465,X_INTRODUCED_495,X_INTRODUCED_823],-3):: defines_var(X_INTRODUCED_823); -constraint int_lin_eq([-1,-1,-1,-1,-1,-1,-1,-1],[X_INTRODUCED_316,X_INTRODUCED_336,X_INTRODUCED_346,X_INTRODUCED_356,X_INTRODUCED_416,X_INTRODUCED_466,X_INTRODUCED_496,X_INTRODUCED_828],-3):: defines_var(X_INTRODUCED_828); -constraint int_lin_eq([-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],[X_INTRODUCED_306,X_INTRODUCED_317,X_INTRODUCED_337,X_INTRODUCED_347,X_INTRODUCED_367,X_INTRODUCED_377,X_INTRODUCED_387,X_INTRODUCED_407,X_INTRODUCED_437,X_INTRODUCED_457,X_INTRODUCED_467,X_INTRODUCED_477,X_INTRODUCED_507,X_INTRODUCED_527,X_INTRODUCED_833],-3):: defines_var(X_INTRODUCED_833); -constraint int_lin_eq([-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],[X_INTRODUCED_307,X_INTRODUCED_318,X_INTRODUCED_338,X_INTRODUCED_348,X_INTRODUCED_368,X_INTRODUCED_378,X_INTRODUCED_388,X_INTRODUCED_408,X_INTRODUCED_438,X_INTRODUCED_458,X_INTRODUCED_468,X_INTRODUCED_478,X_INTRODUCED_508,X_INTRODUCED_528,X_INTRODUCED_838],-3):: defines_var(X_INTRODUCED_838); -constraint int_lin_eq([-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],[X_INTRODUCED_319,X_INTRODUCED_329,X_INTRODUCED_339,X_INTRODUCED_369,X_INTRODUCED_379,X_INTRODUCED_399,X_INTRODUCED_419,X_INTRODUCED_439,X_INTRODUCED_449,X_INTRODUCED_469,X_INTRODUCED_489,X_INTRODUCED_499,X_INTRODUCED_519,X_INTRODUCED_843],-3):: defines_var(X_INTRODUCED_843); -constraint int_lin_eq([-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],[X_INTRODUCED_320,X_INTRODUCED_330,X_INTRODUCED_340,X_INTRODUCED_370,X_INTRODUCED_380,X_INTRODUCED_400,X_INTRODUCED_420,X_INTRODUCED_440,X_INTRODUCED_450,X_INTRODUCED_470,X_INTRODUCED_490,X_INTRODUCED_500,X_INTRODUCED_520,X_INTRODUCED_848],-3):: defines_var(X_INTRODUCED_848); -constraint int_lin_eq([-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],[X_INTRODUCED_310,X_INTRODUCED_331,X_INTRODUCED_361,X_INTRODUCED_381,X_INTRODUCED_401,X_INTRODUCED_431,X_INTRODUCED_451,X_INTRODUCED_461,X_INTRODUCED_481,X_INTRODUCED_511,X_INTRODUCED_531,X_INTRODUCED_853],-3):: defines_var(X_INTRODUCED_853); -constraint int_lin_eq([-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],[X_INTRODUCED_311,X_INTRODUCED_332,X_INTRODUCED_362,X_INTRODUCED_382,X_INTRODUCED_402,X_INTRODUCED_432,X_INTRODUCED_452,X_INTRODUCED_462,X_INTRODUCED_482,X_INTRODUCED_512,X_INTRODUCED_532,X_INTRODUCED_858],-3):: defines_var(X_INTRODUCED_858); -constraint int_lin_eq([100,-1],[X_INTRODUCED_824,X_INTRODUCED_861],0):: defines_var(X_INTRODUCED_861); -constraint int_lin_eq([100,-1],[X_INTRODUCED_829,X_INTRODUCED_864],0):: defines_var(X_INTRODUCED_864); -constraint int_lin_eq([100,-1],[X_INTRODUCED_834,X_INTRODUCED_867],0):: defines_var(X_INTRODUCED_867); -constraint int_lin_eq([100,-1],[X_INTRODUCED_839,X_INTRODUCED_870],0):: defines_var(X_INTRODUCED_870); -constraint int_lin_eq([100,-1],[X_INTRODUCED_844,X_INTRODUCED_873],0):: defines_var(X_INTRODUCED_873); -constraint int_lin_eq([100,-1],[X_INTRODUCED_849,X_INTRODUCED_876],0):: defines_var(X_INTRODUCED_876); -constraint int_lin_eq([100,-1],[X_INTRODUCED_854,X_INTRODUCED_879],0):: defines_var(X_INTRODUCED_879); -constraint int_lin_eq([100,-1],[X_INTRODUCED_859,X_INTRODUCED_882],0):: defines_var(X_INTRODUCED_882); -solve maximize objective; \ No newline at end of file diff --git a/examples/tests/issue117.py b/examples/tests/issue117.py deleted file mode 100755 index 68ec9d59a48..00000000000 --- a/examples/tests/issue117.py +++ /dev/null @@ -1,65 +0,0 @@ -#!/usr/bin/env python3 -from collections import namedtuple -from ortools.constraint_solver import pywrapcp - -VEHICLE_COUNT = 30 -VEHICLE_CAPACITY = 200 -Customer = namedtuple("Customer", ['index', 'demand', 'x', 'y']) - -print('Init') - -customers = list() -customers.append(Customer(0, 0, 0, 0)) -customers.append(Customer(1, 1, 1.0, 1.0)) -customers.append(Customer(1, 1, 2.0, 2.0)) -customer_count = len(customers) - -manager = pywrapcp.RoutingIndexManager(3, VEHICLE_COUNT, 0) -routing = pywrapcp.RoutingModel(manager) - -print('Demand Constraint') -demands = [] -for i in range(0, customer_count): - demands.append(customers[i][1]) -routing.AddVectorDimension(demands, VEHICLE_CAPACITY, True, "Demand") - -print('Adding Costs') - - -def distance_callback(from_index, to_index): - #static just for the sake of the example - return 1 - -transit_callback_index = routing.RegisterTransitCallback(distance_callback) -routing.SetArcCostEvaluatorOfAllVehicles(transit_callback_index) - -routing.CloseModel() - -assignment = routing.Solve(None) - -# Inspect solution and extract routes -routes = [] -for i in range(0, routing.vehicles()): - - route_number = i - routes.append([]) - node = routing.Start(route_number) - route = [] - route.append(0) - if routing.IsVehicleUsed(assignment, i): - while True: - node = assignment.Value(routing.NextVar(node)) - - if not routing.IsEnd(node): - route.append(int(node)) - else: - break - - route.append(0) - routes[route_number].append(route) - -#This are the routes as list of lists -routes = [el[0] for el in routes] - -#Now try to read the routes into a new assigment object fails -assignment2 = routing.ReadAssignmentFromRoutes(routes, True) diff --git a/examples/tests/issue1231.py b/examples/tests/issue1231.py deleted file mode 100755 index e78a2dc933a..00000000000 --- a/examples/tests/issue1231.py +++ /dev/null @@ -1,79 +0,0 @@ -#!/usr/bin/env python3 -# Copyright 2010-2025 Google -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Cryptarithmetic puzzle - -First attempt to solve equation CP + IS + FUN = TRUE -where each letter represents a unique digit. - -This problem has 72 different solutions in base 10. -""" - -from ortools.constraint_solver import pywrapcp -from os import abort - -def CPIsFun(): - # Constraint programming engine - solver = pywrapcp.Solver('CP is fun!'); - - kBase = 10 - - # Decision variables. - digits = list(range(0, kBase)) - digits_without_zero = list(range(1, kBase)) - - c = solver.IntVar(digits_without_zero, 'C'); - p = solver.IntVar(digits, 'P'); - i = solver.IntVar(digits_without_zero, 'I'); - s = solver.IntVar(digits, 'S'); - f = solver.IntVar(digits_without_zero, 'F'); - u = solver.IntVar(digits, 'U'); - n = solver.IntVar(digits, 'N'); - t = solver.IntVar(digits_without_zero, 'T'); - r = solver.IntVar(digits, 'R'); - e = solver.IntVar(digits, 'E'); - - # We need to group variables in a list to use the constraint AllDifferent. - letters = [c, p, i, s, f, u, n, t, r, e] - - # Verify that we have enough digits. - assert kBase >= len(letters) - - # Define constraints. - solver.Add(solver.AllDifferent(letters)) - - # CP + IS + FUN = TRUE - solver.Add (p + s + n + kBase * (c + i + u) + kBase * kBase * f == - e + kBase * u + kBase * kBase * r + kBase * kBase * kBase * t) - - db = solver.Phase(letters, solver.INT_VAR_DEFAULT, - solver.INT_VALUE_DEFAULT) - solver.NewSearch(db) - - while solver.NextSolution(): - print(letters) - # Is CP + IS + FUN = TRUE? - assert (kBase*c.Value() + p.Value() + kBase*i.Value() + s.Value() + - kBase*kBase*f.Value() + kBase*u.Value() + n.Value() == - kBase*kBase*kBase*t.Value() + kBase*kBase*r.Value() + - kBase*u.Value() + e.Value()) - - solver.EndSearch() - - return - - -if __name__ == '__main__': - CPIsFun() diff --git a/examples/tests/issue128.py b/examples/tests/issue128.py deleted file mode 100755 index 71286d5cf46..00000000000 --- a/examples/tests/issue128.py +++ /dev/null @@ -1,157 +0,0 @@ -#!/usr/bin/env python3 -from ortools.constraint_solver import pywrapcp - -def test_v0(): - print('test_v0') - solver = pywrapcp.Solver('') - - # we have two tasks of durations 4 and 7 - task1 = solver.FixedDurationIntervalVar(0, 5, 4, False, "task1") - task2 = solver.FixedDurationIntervalVar(0, 5, 7, False, "task2") - tasks = [task1, task2] - - # to each task, a post task of duration 64 is attached - postTask1 = solver.FixedDurationIntervalVar(4, 74 + 64, 64, False, "postTask1") - postTask2 = solver.FixedDurationIntervalVar(4, 77 + 64, 64, False, "postTask2") - postTasks = [postTask1, postTask2] - - solver.Add(postTask1.StartsAtEnd(task1)) - solver.Add(postTask2.StartsAtEnd(task2)) - - # two resources are available for the post tasks. There are binary indicator - # variables to determine which task uses which resource - postTask1UsesRes1 = solver.IntVar(0, 1, "post task 1 using resource 1") - postTask1UsesRes2 = solver.IntVar(0, 1, "post task 1 using resource 2") - postTask2UsesRes1 = solver.IntVar(0, 1, "post task 2 using resource 1") - postTask2UsesRes2 = solver.IntVar(0, 1, "post task 2 using resource 2") - - indicators = [postTask1UsesRes1, postTask1UsesRes2, postTask2UsesRes1, postTask2UsesRes2] - - # each post task needs exactly one resource - solver.Add(postTask1UsesRes1 + postTask1UsesRes2 == 1) - solver.Add(postTask2UsesRes1 + postTask2UsesRes2 == 1) - - # each resource cannot be used simultaneously by more than one post task - solver.Add(solver.Cumulative(postTasks, [postTask1UsesRes1, postTask2UsesRes1], 1, "cumul1")) - solver.Add(solver.Cumulative(postTasks, [postTask1UsesRes2, postTask2UsesRes2], 1, "cumul2")) - - # using constant demands instead, the correct solution is found - # solver.Add(solver.Cumulative(postTasks, [0, 1], 1, "")) - # solver.Add(solver.Cumulative(postTasks, [1, 0], 1, "")) - - - # search setup and solving - dbInterval = solver.Phase(tasks + postTasks, solver.INTERVAL_DEFAULT) - dbInt = solver.Phase(indicators, solver.INT_VAR_DEFAULT, solver.INT_VALUE_DEFAULT) - - makespan = solver.Max([task1.EndExpr().Var(), task2.EndExpr().Var()]) - optimize = solver.Optimize(False, makespan, 1) - - solution = solver.Assignment() - solution.Add([t for t in (tasks + postTasks)]) - solution.Add(indicators) - collector = solver.LastSolutionCollector(solution) - phase = solver.Compose([dbInt, dbInterval]) - solver.Solve(phase, [collector, optimize]) - - if collector.SolutionCount() > 0: - for i, task in enumerate(tasks): - print("task {} runs from {} to {}".format( - i, - collector.StartValue(0, task), - collector.EndValue(0, task))) - for i, task in enumerate(postTasks): - print("postTask {} starts at {}".format(i, collector.StartValue(0, task))) - for indicator in indicators: - print('{} -> {}'.format(indicator.Name(), collector.Value(0, indicator))) - else: - print('No solution') - -def test_v1(): - print('test_v1') - solver = pywrapcp.Solver('') - - # we have two tasks of durations 4 and 7 - task1 = solver.FixedDurationIntervalVar(0, 5, 4, False, "task1") - task2 = solver.FixedDurationIntervalVar(0, 5, 7, False, "task2") - tasks = [task1, task2] - # Create copies for each resource - task1_r1 = solver.FixedDurationIntervalVar(0, 5, 4, True, "task1_1") - task2_r1 = solver.FixedDurationIntervalVar(0, 5, 7, True, "task2_1") - tasks_r1 = [task1_r1, task2_r1] - task1_r2 = solver.FixedDurationIntervalVar(0, 5, 4, True, "task1_2") - task2_r2 = solver.FixedDurationIntervalVar(0, 5, 7, True, "task2_2") - tasks_r2 = [task1_r2, task2_r2] - - - # to each task, a post task of duration 64 is attached - postTask1 = solver.FixedDurationStartSyncedOnEndIntervalVar(task1, 64, 0) - postTask2 = solver.FixedDurationStartSyncedOnEndIntervalVar(task2, 64, 0) - postTasks = [postTask1, postTask2] - - # Create copies for each resource - postTask1_r1 = solver.FixedDurationIntervalVar(4, 9, 64, True, "pTask1_1") - postTask2_r1 = solver.FixedDurationIntervalVar(4, 11, 64, True, "pTask2_1") - postTask1_r2 = solver.FixedDurationIntervalVar(4, 9, 64, True, "pTask1_2") - postTask2_r2 = solver.FixedDurationIntervalVar(4, 11, 64, True, "pTask2_2") - - copies = [ task1_r1, task2_r1, task1_r2, task2_r2, - postTask1_r1, postTask1_r2, postTask2_r1, postTask2_r2 ] - - # each resource cannot be used simultaneously by more than one post task - solver.Add(solver.DisjunctiveConstraint( - [task1_r1, task2_r1, postTask1_r1, postTask2_r1], "disj1")) - solver.Add(solver.DisjunctiveConstraint( - [task1_r2, task2_r2, postTask1_r2, postTask2_r2], "disj1")) - - # Only one resource available - solver.Add(task1_r1.PerformedExpr() + task1_r2.PerformedExpr() == 1) - solver.Add(task2_r1.PerformedExpr() + task2_r2.PerformedExpr() == 1) - solver.Add(postTask1_r1.PerformedExpr() + postTask1_r2.PerformedExpr() == 1) - solver.Add(postTask2_r1.PerformedExpr() + postTask2_r2.PerformedExpr() == 1) - - # Sync main task with copies - solver.Add(solver.Cover([task1_r1, task1_r2], task1)) - solver.Add(solver.Cover([task2_r1, task2_r2], task2)) - solver.Add(solver.Cover([postTask1_r1, postTask1_r2], postTask1)) - solver.Add(solver.Cover([postTask2_r1, postTask2_r2], postTask2)) - - # Indicators (no need to add both as they are constrained together) - indicators = [ - task1_r1.PerformedExpr(), task2_r1.PerformedExpr(), - postTask1_r1.PerformedExpr(), postTask2_r1.PerformedExpr()] - - # search setup and solving - dbInterval = solver.Phase(tasks + postTasks, solver.INTERVAL_DEFAULT) - dbInt = solver.Phase( - indicators, solver.INT_VAR_DEFAULT, solver.INT_VALUE_DEFAULT) - - makespan = solver.Max([task1.EndExpr(), task2.EndExpr()]) - optimize = solver.Minimize(makespan, 1) - - solution = solver.Assignment() - solution.Add(tasks) - solution.Add(postTasks) - solution.Add(copies) - solution.AddObjective(makespan) - collector = solver.LastSolutionCollector(solution) - phase = solver.Compose([dbInt, dbInterval]) - solver.Solve(phase, [collector, optimize]) - - if collector.SolutionCount() > 0: - print('solution with makespan', collector.ObjectiveValue(0)) - for task in tasks: - print("task {} runs from {} to {}".format( - task.Name(), - collector.StartValue(0, task), - collector.EndValue(0, task))) - for task in postTasks: - print("postTask {} starts at {}".format( - task.Name(), collector.StartValue(0, task))) - for task in copies: - print(task.Name(), collector.PerformedValue(0, task)) - else: - print('No solution') - -test_v0() -test_v1() diff --git a/examples/tests/issue1303.cc b/examples/tests/issue1303.cc deleted file mode 100644 index 197e36fffa5..00000000000 --- a/examples/tests/issue1303.cc +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright 2011-2014 Google -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "absl/flags/parse.h" -#include "ortools/base/logging.h" -#include "ortools/base/version.h" - -namespace operations_research { -void PrintVersion() { - LOG(INFO) << "Version = " << OrToolsMajorVersion() << "." - << OrToolsMinorVersion(); -} -} // namespace operations_research - -int main(int argc, char** argv) { - absl::ParseCommandLine(argc, argv); - operations_research::PrintVersion(); - return 0; -} diff --git a/examples/tests/issue139.fzn b/examples/tests/issue139.fzn deleted file mode 100644 index eb098a4607c..00000000000 --- a/examples/tests/issue139.fzn +++ /dev/null @@ -1,243 +0,0 @@ -predicate inverse(array [int] of var int: f,array [int] of var int: invf); -predicate array_int_maximum(var int: m,array [int] of var int: x); -array [1..2] of int: X_INTRODUCED_54 = [1,-1]; -array [1..2] of int: X_INTRODUCED_81 = [-1,1]; -array [1..3] of int: X_INTRODUCED_96 = [1,-2,-2]; -array [1..2] of int: X_INTRODUCED_127 = [1,-2]; -array [1..4] of int: X_INTRODUCED_158 = [1,-2,-2,-2]; -var 1..18: X_INTRODUCED_0; -var 1..18: X_INTRODUCED_1; -var 1..18: X_INTRODUCED_2; -var 1..18: X_INTRODUCED_3; -var 1..18: X_INTRODUCED_4; -var 1..18: X_INTRODUCED_5; -var 1..18: X_INTRODUCED_6; -var 1..18: X_INTRODUCED_7; -var 1..18: X_INTRODUCED_8; -var 1..18: X_INTRODUCED_9; -var 1..18: X_INTRODUCED_10; -var 1..18: X_INTRODUCED_11; -var 1..18: X_INTRODUCED_12; -var 1..18: X_INTRODUCED_13; -var 1..18: X_INTRODUCED_14; -var 1..18: X_INTRODUCED_15; -var 1..18: X_INTRODUCED_16; -var 1..18: X_INTRODUCED_17; -var bool: X_INTRODUCED_55 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_56 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_57 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_58 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_59 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_60 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_61 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_62 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_63 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_64 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_65 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_66 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_67 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_68 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_69 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_70 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_71 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_72 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_73 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_74 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_75 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_76 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_77 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_78 ::var_is_introduced :: is_defined_var; -var int: X_INTRODUCED_80 ::var_is_introduced ; -var bool: X_INTRODUCED_82 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_83 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_84 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_85 ::var_is_introduced :: is_defined_var; -var 1..18: X_INTRODUCED_90 ::var_is_introduced ; -var bool: X_INTRODUCED_91 ::var_is_introduced :: is_defined_var; -var 1..18: X_INTRODUCED_93 ::var_is_introduced ; -var bool: X_INTRODUCED_94 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_95 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_97 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_98 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_99 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_100 ::var_is_introduced :: is_defined_var; -var int: X_INTRODUCED_101 ::var_is_introduced ; -var bool: X_INTRODUCED_102 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_103 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_104 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_105 ::var_is_introduced :: is_defined_var; -var 1..18: X_INTRODUCED_109 ::var_is_introduced ; -var bool: X_INTRODUCED_110 ::var_is_introduced :: is_defined_var; -var 1..18: X_INTRODUCED_112 ::var_is_introduced ; -var bool: X_INTRODUCED_113 ::var_is_introduced :: is_defined_var; -var 1..18: X_INTRODUCED_115 ::var_is_introduced ; -var bool: X_INTRODUCED_116 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_117 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_118 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_119 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_120 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_121 ::var_is_introduced :: is_defined_var; -var int: X_INTRODUCED_122 ::var_is_introduced ; -var bool: X_INTRODUCED_123 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_124 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_126 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_128 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_129 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_130 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_131 ::var_is_introduced :: is_defined_var; -var int: X_INTRODUCED_132 ::var_is_introduced ; -var bool: X_INTRODUCED_133 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_134 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_135 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_136 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_139 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_140 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_141 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_142 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_143 ::var_is_introduced :: is_defined_var; -var int: X_INTRODUCED_144 ::var_is_introduced ; -var bool: X_INTRODUCED_145 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_146 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_147 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_148 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_149 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_150 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_153 ::var_is_introduced :: is_defined_var; -var 1..18: X_INTRODUCED_155 ::var_is_introduced ; -var bool: X_INTRODUCED_156 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_157 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_159 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_160 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_161 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_162 ::var_is_introduced :: is_defined_var; -var int: X_INTRODUCED_163 ::var_is_introduced ; -var bool: X_INTRODUCED_164 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_165 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_166 ::var_is_introduced :: is_defined_var; -var 0..1: X_INTRODUCED_167 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_170 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_171 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_172 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_173 ::var_is_introduced :: is_defined_var; -var bool: X_INTRODUCED_174 ::var_is_introduced :: is_defined_var; -var int: X_INTRODUCED_178 ::var_is_introduced :: is_defined_var; -array [1..18] of var int: interview = [X_INTRODUCED_0,X_INTRODUCED_1,X_INTRODUCED_2,X_INTRODUCED_3,X_INTRODUCED_4,X_INTRODUCED_5,X_INTRODUCED_6,X_INTRODUCED_7,X_INTRODUCED_8,X_INTRODUCED_9,X_INTRODUCED_10,X_INTRODUCED_11,X_INTRODUCED_12,X_INTRODUCED_13,X_INTRODUCED_14,X_INTRODUCED_15,X_INTRODUCED_16,X_INTRODUCED_17]; -array [1..18] of var int: order:: output_array([1..18]) = [18,8,17,3,16,7,12,4,15,6,14,5,13,9,1,11,2,10]; -array [1..18] of var int: points:: output_array([1..18]) = [X_INTRODUCED_80,X_INTRODUCED_56,X_INTRODUCED_58,X_INTRODUCED_101,X_INTRODUCED_60,X_INTRODUCED_62,X_INTRODUCED_122,X_INTRODUCED_64,X_INTRODUCED_132,X_INTRODUCED_66,X_INTRODUCED_68,X_INTRODUCED_144,X_INTRODUCED_70,X_INTRODUCED_72,X_INTRODUCED_74,X_INTRODUCED_163,X_INTRODUCED_76,X_INTRODUCED_78]; -array [1..3] of var int: X_INTRODUCED_89 ::var_is_introduced = [X_INTRODUCED_3,X_INTRODUCED_4,X_INTRODUCED_5]; -array [1..4] of var int: X_INTRODUCED_92 ::var_is_introduced = [X_INTRODUCED_11,X_INTRODUCED_12,X_INTRODUCED_13,X_INTRODUCED_14]; -array [1..3] of var int: X_INTRODUCED_108 ::var_is_introduced = [X_INTRODUCED_0,X_INTRODUCED_1,X_INTRODUCED_2]; -array [1..3] of var int: X_INTRODUCED_114 ::var_is_introduced = [X_INTRODUCED_8,X_INTRODUCED_9,X_INTRODUCED_10]; -array [1..3] of var int: X_INTRODUCED_154 ::var_is_introduced = [X_INTRODUCED_15,X_INTRODUCED_16,X_INTRODUCED_17]; -constraint inverse(interview,order); -constraint array_int_maximum(X_INTRODUCED_90,X_INTRODUCED_89); -constraint array_int_maximum(X_INTRODUCED_93,X_INTRODUCED_92); -constraint bool_clause([X_INTRODUCED_97],[X_INTRODUCED_95]); -constraint bool_clause([X_INTRODUCED_95,X_INTRODUCED_99],[X_INTRODUCED_98]); -constraint array_bool_or([X_INTRODUCED_95,X_INTRODUCED_98,X_INTRODUCED_100],true); -constraint array_int_maximum(X_INTRODUCED_109,X_INTRODUCED_108); -constraint int_max(X_INTRODUCED_6,X_INTRODUCED_7,X_INTRODUCED_112); -constraint array_int_maximum(X_INTRODUCED_115,X_INTRODUCED_114); -constraint bool_clause([X_INTRODUCED_118],[X_INTRODUCED_117]); -constraint bool_clause([X_INTRODUCED_117,X_INTRODUCED_120],[X_INTRODUCED_119]); -constraint array_bool_or([X_INTRODUCED_117,X_INTRODUCED_119,X_INTRODUCED_121],true); -constraint bool_clause([X_INTRODUCED_128],[X_INTRODUCED_126]); -constraint bool_clause([X_INTRODUCED_126,X_INTRODUCED_130],[X_INTRODUCED_129]); -constraint array_bool_or([X_INTRODUCED_126,X_INTRODUCED_129,X_INTRODUCED_131],true); -constraint bool_clause([X_INTRODUCED_140],[X_INTRODUCED_139]); -constraint bool_clause([X_INTRODUCED_139,X_INTRODUCED_142],[X_INTRODUCED_141]); -constraint array_bool_or([X_INTRODUCED_139,X_INTRODUCED_141,X_INTRODUCED_143],true); -constraint array_int_maximum(X_INTRODUCED_155,X_INTRODUCED_154); -constraint bool_clause([X_INTRODUCED_159],[X_INTRODUCED_157]); -constraint bool_clause([X_INTRODUCED_157,X_INTRODUCED_161],[X_INTRODUCED_160]); -constraint array_bool_or([X_INTRODUCED_157,X_INTRODUCED_160,X_INTRODUCED_162],true); -constraint bool_clause([X_INTRODUCED_171],[X_INTRODUCED_170]); -constraint bool_clause([X_INTRODUCED_170,X_INTRODUCED_173],[X_INTRODUCED_172]); -constraint array_bool_or([X_INTRODUCED_170,X_INTRODUCED_172,X_INTRODUCED_174],true); -constraint int_lin_le_reif(X_INTRODUCED_54,[X_INTRODUCED_1,X_INTRODUCED_0],-1,X_INTRODUCED_55):: defines_var(X_INTRODUCED_55); -constraint bool2int(X_INTRODUCED_55,X_INTRODUCED_56):: defines_var(X_INTRODUCED_56); -constraint int_lin_le_reif(X_INTRODUCED_54,[X_INTRODUCED_2,X_INTRODUCED_0],-1,X_INTRODUCED_57):: defines_var(X_INTRODUCED_57); -constraint bool2int(X_INTRODUCED_57,X_INTRODUCED_58):: defines_var(X_INTRODUCED_58); -constraint int_lin_le_reif(X_INTRODUCED_54,[X_INTRODUCED_4,X_INTRODUCED_3],-1,X_INTRODUCED_59):: defines_var(X_INTRODUCED_59); -constraint bool2int(X_INTRODUCED_59,X_INTRODUCED_60):: defines_var(X_INTRODUCED_60); -constraint int_lin_le_reif(X_INTRODUCED_54,[X_INTRODUCED_5,X_INTRODUCED_3],-1,X_INTRODUCED_61):: defines_var(X_INTRODUCED_61); -constraint bool2int(X_INTRODUCED_61,X_INTRODUCED_62):: defines_var(X_INTRODUCED_62); -constraint int_lin_le_reif(X_INTRODUCED_54,[X_INTRODUCED_7,X_INTRODUCED_6],-1,X_INTRODUCED_63):: defines_var(X_INTRODUCED_63); -constraint bool2int(X_INTRODUCED_63,X_INTRODUCED_64):: defines_var(X_INTRODUCED_64); -constraint int_lin_le_reif(X_INTRODUCED_54,[X_INTRODUCED_9,X_INTRODUCED_8],-1,X_INTRODUCED_65):: defines_var(X_INTRODUCED_65); -constraint bool2int(X_INTRODUCED_65,X_INTRODUCED_66):: defines_var(X_INTRODUCED_66); -constraint int_lin_le_reif(X_INTRODUCED_54,[X_INTRODUCED_10,X_INTRODUCED_8],-1,X_INTRODUCED_67):: defines_var(X_INTRODUCED_67); -constraint bool2int(X_INTRODUCED_67,X_INTRODUCED_68):: defines_var(X_INTRODUCED_68); -constraint int_lin_le_reif(X_INTRODUCED_54,[X_INTRODUCED_12,X_INTRODUCED_11],-1,X_INTRODUCED_69):: defines_var(X_INTRODUCED_69); -constraint bool2int(X_INTRODUCED_69,X_INTRODUCED_70):: defines_var(X_INTRODUCED_70); -constraint int_lin_le_reif(X_INTRODUCED_54,[X_INTRODUCED_13,X_INTRODUCED_11],-1,X_INTRODUCED_71):: defines_var(X_INTRODUCED_71); -constraint bool2int(X_INTRODUCED_71,X_INTRODUCED_72):: defines_var(X_INTRODUCED_72); -constraint int_lin_le_reif(X_INTRODUCED_54,[X_INTRODUCED_14,X_INTRODUCED_11],-1,X_INTRODUCED_73):: defines_var(X_INTRODUCED_73); -constraint bool2int(X_INTRODUCED_73,X_INTRODUCED_74):: defines_var(X_INTRODUCED_74); -constraint int_lin_le_reif(X_INTRODUCED_54,[X_INTRODUCED_16,X_INTRODUCED_15],-1,X_INTRODUCED_75):: defines_var(X_INTRODUCED_75); -constraint bool2int(X_INTRODUCED_75,X_INTRODUCED_76):: defines_var(X_INTRODUCED_76); -constraint int_lin_le_reif(X_INTRODUCED_54,[X_INTRODUCED_17,X_INTRODUCED_15],-1,X_INTRODUCED_77):: defines_var(X_INTRODUCED_77); -constraint bool2int(X_INTRODUCED_77,X_INTRODUCED_78):: defines_var(X_INTRODUCED_78); -constraint int_lin_le_reif(X_INTRODUCED_81,[X_INTRODUCED_1,X_INTRODUCED_0],-1,X_INTRODUCED_82):: defines_var(X_INTRODUCED_82); -constraint bool2int(X_INTRODUCED_82,X_INTRODUCED_83):: defines_var(X_INTRODUCED_83); -constraint int_lin_le_reif(X_INTRODUCED_81,[X_INTRODUCED_2,X_INTRODUCED_0],-1,X_INTRODUCED_84):: defines_var(X_INTRODUCED_84); -constraint bool2int(X_INTRODUCED_84,X_INTRODUCED_85):: defines_var(X_INTRODUCED_85); -constraint int_lin_le_reif(X_INTRODUCED_81,[X_INTRODUCED_0,X_INTRODUCED_90],-1,X_INTRODUCED_91):: defines_var(X_INTRODUCED_91); -constraint int_lin_le_reif(X_INTRODUCED_81,[X_INTRODUCED_0,X_INTRODUCED_93],-1,X_INTRODUCED_94):: defines_var(X_INTRODUCED_94); -constraint array_bool_or([X_INTRODUCED_91,X_INTRODUCED_94],X_INTRODUCED_95):: defines_var(X_INTRODUCED_95); -constraint int_lin_eq_reif(X_INTRODUCED_96,[X_INTRODUCED_80,X_INTRODUCED_83,X_INTRODUCED_85],3,X_INTRODUCED_97):: defines_var(X_INTRODUCED_97); -constraint int_le_reif(X_INTRODUCED_0,8,X_INTRODUCED_98):: defines_var(X_INTRODUCED_98); -constraint int_eq_reif(X_INTRODUCED_80,3,X_INTRODUCED_99):: defines_var(X_INTRODUCED_99); -constraint int_eq_reif(X_INTRODUCED_80,0,X_INTRODUCED_100):: defines_var(X_INTRODUCED_100); -constraint int_lin_le_reif(X_INTRODUCED_81,[X_INTRODUCED_4,X_INTRODUCED_3],-1,X_INTRODUCED_102):: defines_var(X_INTRODUCED_102); -constraint bool2int(X_INTRODUCED_102,X_INTRODUCED_103):: defines_var(X_INTRODUCED_103); -constraint int_lin_le_reif(X_INTRODUCED_81,[X_INTRODUCED_5,X_INTRODUCED_3],-1,X_INTRODUCED_104):: defines_var(X_INTRODUCED_104); -constraint bool2int(X_INTRODUCED_104,X_INTRODUCED_105):: defines_var(X_INTRODUCED_105); -constraint int_lin_le_reif(X_INTRODUCED_81,[X_INTRODUCED_3,X_INTRODUCED_109],-1,X_INTRODUCED_110):: defines_var(X_INTRODUCED_110); -constraint int_lin_le_reif(X_INTRODUCED_81,[X_INTRODUCED_3,X_INTRODUCED_112],-1,X_INTRODUCED_113):: defines_var(X_INTRODUCED_113); -constraint int_lin_le_reif(X_INTRODUCED_81,[X_INTRODUCED_3,X_INTRODUCED_115],-1,X_INTRODUCED_116):: defines_var(X_INTRODUCED_116); -constraint array_bool_or([X_INTRODUCED_110,X_INTRODUCED_113,X_INTRODUCED_116],X_INTRODUCED_117):: defines_var(X_INTRODUCED_117); -constraint int_lin_eq_reif(X_INTRODUCED_96,[X_INTRODUCED_101,X_INTRODUCED_103,X_INTRODUCED_105],3,X_INTRODUCED_118):: defines_var(X_INTRODUCED_118); -constraint int_le_reif(X_INTRODUCED_3,8,X_INTRODUCED_119):: defines_var(X_INTRODUCED_119); -constraint int_eq_reif(X_INTRODUCED_101,3,X_INTRODUCED_120):: defines_var(X_INTRODUCED_120); -constraint int_eq_reif(X_INTRODUCED_101,0,X_INTRODUCED_121):: defines_var(X_INTRODUCED_121); -constraint int_lin_le_reif(X_INTRODUCED_81,[X_INTRODUCED_7,X_INTRODUCED_6],-1,X_INTRODUCED_123):: defines_var(X_INTRODUCED_123); -constraint bool2int(X_INTRODUCED_123,X_INTRODUCED_124):: defines_var(X_INTRODUCED_124); -constraint int_lin_le_reif(X_INTRODUCED_81,[X_INTRODUCED_6,X_INTRODUCED_90],-1,X_INTRODUCED_126):: defines_var(X_INTRODUCED_126); -constraint int_lin_eq_reif(X_INTRODUCED_127,[X_INTRODUCED_122,X_INTRODUCED_124],3,X_INTRODUCED_128):: defines_var(X_INTRODUCED_128); -constraint int_le_reif(X_INTRODUCED_6,8,X_INTRODUCED_129):: defines_var(X_INTRODUCED_129); -constraint int_eq_reif(X_INTRODUCED_122,3,X_INTRODUCED_130):: defines_var(X_INTRODUCED_130); -constraint int_eq_reif(X_INTRODUCED_122,0,X_INTRODUCED_131):: defines_var(X_INTRODUCED_131); -constraint int_lin_le_reif(X_INTRODUCED_81,[X_INTRODUCED_9,X_INTRODUCED_8],-1,X_INTRODUCED_133):: defines_var(X_INTRODUCED_133); -constraint bool2int(X_INTRODUCED_133,X_INTRODUCED_134):: defines_var(X_INTRODUCED_134); -constraint int_lin_le_reif(X_INTRODUCED_81,[X_INTRODUCED_10,X_INTRODUCED_8],-1,X_INTRODUCED_135):: defines_var(X_INTRODUCED_135); -constraint bool2int(X_INTRODUCED_135,X_INTRODUCED_136):: defines_var(X_INTRODUCED_136); -constraint int_lin_le_reif(X_INTRODUCED_81,[X_INTRODUCED_8,X_INTRODUCED_90],-1,X_INTRODUCED_139):: defines_var(X_INTRODUCED_139); -constraint int_lin_eq_reif(X_INTRODUCED_96,[X_INTRODUCED_132,X_INTRODUCED_134,X_INTRODUCED_136],3,X_INTRODUCED_140):: defines_var(X_INTRODUCED_140); -constraint int_le_reif(X_INTRODUCED_8,8,X_INTRODUCED_141):: defines_var(X_INTRODUCED_141); -constraint int_eq_reif(X_INTRODUCED_132,3,X_INTRODUCED_142):: defines_var(X_INTRODUCED_142); -constraint int_eq_reif(X_INTRODUCED_132,0,X_INTRODUCED_143):: defines_var(X_INTRODUCED_143); -constraint int_lin_le_reif(X_INTRODUCED_81,[X_INTRODUCED_12,X_INTRODUCED_11],-1,X_INTRODUCED_145):: defines_var(X_INTRODUCED_145); -constraint bool2int(X_INTRODUCED_145,X_INTRODUCED_146):: defines_var(X_INTRODUCED_146); -constraint int_lin_le_reif(X_INTRODUCED_81,[X_INTRODUCED_13,X_INTRODUCED_11],-1,X_INTRODUCED_147):: defines_var(X_INTRODUCED_147); -constraint bool2int(X_INTRODUCED_147,X_INTRODUCED_148):: defines_var(X_INTRODUCED_148); -constraint int_lin_le_reif(X_INTRODUCED_81,[X_INTRODUCED_14,X_INTRODUCED_11],-1,X_INTRODUCED_149):: defines_var(X_INTRODUCED_149); -constraint bool2int(X_INTRODUCED_149,X_INTRODUCED_150):: defines_var(X_INTRODUCED_150); -constraint int_lin_le_reif(X_INTRODUCED_81,[X_INTRODUCED_11,X_INTRODUCED_109],-1,X_INTRODUCED_153):: defines_var(X_INTRODUCED_153); -constraint int_lin_le_reif(X_INTRODUCED_81,[X_INTRODUCED_11,X_INTRODUCED_155],-1,X_INTRODUCED_156):: defines_var(X_INTRODUCED_156); -constraint array_bool_or([X_INTRODUCED_153,X_INTRODUCED_156],X_INTRODUCED_157):: defines_var(X_INTRODUCED_157); -constraint int_lin_eq_reif(X_INTRODUCED_158,[X_INTRODUCED_144,X_INTRODUCED_146,X_INTRODUCED_148,X_INTRODUCED_150],3,X_INTRODUCED_159):: defines_var(X_INTRODUCED_159); -constraint int_le_reif(X_INTRODUCED_11,8,X_INTRODUCED_160):: defines_var(X_INTRODUCED_160); -constraint int_eq_reif(X_INTRODUCED_144,3,X_INTRODUCED_161):: defines_var(X_INTRODUCED_161); -constraint int_eq_reif(X_INTRODUCED_144,0,X_INTRODUCED_162):: defines_var(X_INTRODUCED_162); -constraint int_lin_le_reif(X_INTRODUCED_81,[X_INTRODUCED_16,X_INTRODUCED_15],-1,X_INTRODUCED_164):: defines_var(X_INTRODUCED_164); -constraint bool2int(X_INTRODUCED_164,X_INTRODUCED_165):: defines_var(X_INTRODUCED_165); -constraint int_lin_le_reif(X_INTRODUCED_81,[X_INTRODUCED_17,X_INTRODUCED_15],-1,X_INTRODUCED_166):: defines_var(X_INTRODUCED_166); -constraint bool2int(X_INTRODUCED_166,X_INTRODUCED_167):: defines_var(X_INTRODUCED_167); -constraint int_lin_le_reif(X_INTRODUCED_81,[X_INTRODUCED_15,X_INTRODUCED_93],-1,X_INTRODUCED_170):: defines_var(X_INTRODUCED_170); -constraint int_lin_eq_reif(X_INTRODUCED_96,[X_INTRODUCED_163,X_INTRODUCED_165,X_INTRODUCED_167],3,X_INTRODUCED_171):: defines_var(X_INTRODUCED_171); -constraint int_le_reif(X_INTRODUCED_15,8,X_INTRODUCED_172):: defines_var(X_INTRODUCED_172); -constraint int_eq_reif(X_INTRODUCED_163,3,X_INTRODUCED_173):: defines_var(X_INTRODUCED_173); -constraint int_eq_reif(X_INTRODUCED_163,0,X_INTRODUCED_174):: defines_var(X_INTRODUCED_174); -constraint int_lin_eq([1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,-1],[X_INTRODUCED_80,X_INTRODUCED_56,X_INTRODUCED_58,X_INTRODUCED_101,X_INTRODUCED_60,X_INTRODUCED_62,X_INTRODUCED_122,X_INTRODUCED_64,X_INTRODUCED_132,X_INTRODUCED_66,X_INTRODUCED_68,X_INTRODUCED_144,X_INTRODUCED_70,X_INTRODUCED_72,X_INTRODUCED_74,X_INTRODUCED_163,X_INTRODUCED_76,X_INTRODUCED_78,X_INTRODUCED_178],0):: defines_var(X_INTRODUCED_178); -solve maximize X_INTRODUCED_178; diff --git a/examples/tests/issue173.cc b/examples/tests/issue173.cc deleted file mode 100644 index 01bb4cf3fdc..00000000000 --- a/examples/tests/issue173.cc +++ /dev/null @@ -1,36 +0,0 @@ -#include "ortools/base/commandlineflags.h" -#include "ortools/base/logging.h" -#include "ortools/linear_solver/linear_solver.h" - -namespace operations_research { -void SolveLP() { - // Create the linear solver with the GLOP backend. - std::unique_ptr solver(MPSolver::CreateSolver("CBC")); - if (!solver) { - return; - } - const double kInfinity = solver->infinity(); - MPVariable* const x = solver->MakeNumVar(-kInfinity, kInfinity, "x"); - - MPObjective* const objective = solver->MutableObjective(); - objective->SetMaximization(); - objective->SetCoefficient(x, 1); - - MPConstraint* const constraint = solver->MakeRowConstraint(0, 5); - constraint->SetCoefficient(x, 1); - - solver->Solve(); -} - -void BreakLoop() { - for (int i = 0; i < 500; i++) { - SolveLP(); - } -} -} // namespace operations_research - -int main(int argc, char** argv) { - absl::ParseCommandLine(argc, argv); - operations_research::BreakLoop(); - return 0; -} diff --git a/examples/tests/issue18.cs b/examples/tests/issue18.cs deleted file mode 100644 index a0f895a70d7..00000000000 --- a/examples/tests/issue18.cs +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright 2010-2025 Google LLC -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -using System; -using System.Collections.Generic; -using Google.OrTools.ConstraintSolver; - -public class Issue18 -{ - public static void NewSearchTest() - { - Solver solver = new Google.OrTools.ConstraintSolver.Solver("p"); - - // creating dummy variables - List vars = new List(); - for (int i = 0; i < 100000; i++) - { - vars.Add(solver.MakeIntVar(0, 1)); - } - - IntExpr globalSum = solver.MakeSum(vars.ToArray()); - - DecisionBuilder db = solver.MakePhase(vars.ToArray(), Google.OrTools.ConstraintSolver.Solver.INT_VAR_SIMPLE, - Google.OrTools.ConstraintSolver.Solver.INT_VALUE_SIMPLE); - - solver.NewSearch(db, new OptimizeVar(solver, true, globalSum.Var(), 100)); - - // force Garbage Collector - GC.Collect(); - GC.WaitForPendingFinalizers(); - - // Try to read all solutions - int count = 0; - while (solver.NextSolution()) - { - count++; - Console.WriteLine("solution " + count + " found"); - // Console.WriteLine("solution " + globalSum.Var().Value()); - if (count > 10) - { - break; - } - } - Console.WriteLine("Solutions: " + count); - } - static void Main() - { - NewSearchTest(); - } -} diff --git a/examples/tests/issue2.py b/examples/tests/issue2.py deleted file mode 100755 index c6d36b1da22..00000000000 --- a/examples/tests/issue2.py +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env python3 -from ortools.constraint_solver import pywrapcp - -# Control-C test. Hit Control-C during execution of this program. - -def main(): - solver = pywrapcp.Solver("time limit test") - n = 10 - x = [solver.IntVar(1, n, "x[%i]" % i) for i in range(n)] - solver.Add(solver.AllDifferent(x, True)) - - solution = solver.Assignment() - solution.Add(x) - - db = solver.Phase(x, - solver.CHOOSE_FIRST_UNBOUND, - solver.ASSIGN_MIN_VALUE) - - time_limit = 2000 - branch_limit = 100000000 - failures_limit = 100000000 - solutions_limit = 10000000 - limits = ( - solver.Limit( - time_limit, branch_limit, failures_limit, solutions_limit, True)) - - search_log = solver.SearchLog(1000) - assignment = solver.Assignment() - assignment.Add(x) - collector = solver.LastSolutionCollector(assignment) - try: - solver.Solve(db, [limits, search_log, collector]) - except KeyboardInterrupt: - print("Control-C caught") - - print("failures:", solver.Failures()) - print("branches:", solver.Branches()) - print("wall_time:", solver.WallTime()) - - -if __name__ == "__main__": - main() diff --git a/examples/tests/issue22.cs b/examples/tests/issue22.cs deleted file mode 100644 index a1cffd7fb30..00000000000 --- a/examples/tests/issue22.cs +++ /dev/null @@ -1,84 +0,0 @@ -// Copyright 2010-2025 Google LLC -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -using System; -using System.Collections; -using System.Collections.Generic; -using System.IO; -using System.Text.RegularExpressions; -using Google.OrTools.ConstraintSolver; - -public class Issue22Test -{ - private static long Solve(long num_buses_check = 0) - { - ConstraintSolverParameters sPrm = Solver.DefaultSolverParameters(); - sPrm.CompressTrail = 0; - Solver solver = new Solver("OrTools", sPrm); - - // this works - // IntVar[,] x = solver.MakeIntVarMatrix(2,2, new int[] {-2,0,1,2}, "x"); - - // this doesn't work - IntVar[,] x = solver.MakeIntVarMatrix(2, 2, new int[] { 0, 1, 2 }, "x"); - - for (int w = 0; w < 2; w++) - { - IntVar[] b = new IntVar[2]; - for (int i = 0; i < 2; i++) - { - b[i] = solver.MakeIsEqualCstVar(x[w, i], 0); - } - solver.Add(solver.MakeSumGreaterOrEqual(b, 2)); - } - - IntVar[] x_flat = x.Flatten(); - DecisionBuilder db = solver.MakePhase(x_flat, Solver.CHOOSE_FIRST_UNBOUND, Solver.ASSIGN_MIN_VALUE); - solver.NewSearch(db); - while (solver.NextSolution()) - { - Console.WriteLine("x: "); - for (int j = 0; j < 2; j++) - { - Console.Write("worker" + (j + 1).ToString() + ":"); - for (int i = 0; i < 2; i++) - { - Console.Write(" {0,2} ", x[j, i].Value()); - } - Console.Write("\n"); - } - Console.WriteLine("End at---->" + DateTime.Now); - } - - Console.WriteLine("\nSolutions: {0}", solver.Solutions()); - Console.WriteLine("WallTime: {0}ms", solver.WallTime()); - Console.WriteLine("Failures: {0}", solver.Failures()); - Console.WriteLine("Branches: {0} ", solver.Branches()); - - solver.EndSearch(); - return 1; - } - - public static void InitialPropagateTest() - { - Console.WriteLine("Check for minimum number of buses: "); - long num_buses = Solve(); - Console.WriteLine("\n... got {0} as minimal value.", num_buses); - Console.WriteLine("\nAll solutions: ", num_buses); - } - - static void Main() - { - InitialPropagateTest(); - } -} diff --git a/examples/tests/issue3.py b/examples/tests/issue3.py deleted file mode 100755 index c74b1853dbc..00000000000 --- a/examples/tests/issue3.py +++ /dev/null @@ -1,74 +0,0 @@ -#!/usr/bin/env python3 -# Copyright 2010 Pierre Schaus pschaus@gmail.com -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from ortools.constraint_solver import pywrapcp -from time import time -from random import randint - -#----------------helper for binpacking posting---------------- - - -def binpacking(cp, binvars, weights, loadvars): - """post the connstraints forall j: loadvars[j] == sum_i (binvars[i] == j) * weights[i])""" - - nbins = len(loadvars) - nitems = len(binvars) - for j in range(nbins): - b = [cp.BoolVar(str(i)) for i in range(nitems)] - for i in range(nitems): - cp.Add(cp.IsEqualCstCt(binvars[i], j, b[i])) - cp.Add(solver.Sum([b[i] * weights[i] for i in range(nitems)]) == l[j]) - cp.Add(solver.Sum(loadvars) == sum(weights)) - -#------------------------------data reading------------------- - -maxcapa = 44 -weights = [4, 22, 9, 5, 8, 3, 3, 4, 7, 7, 3] -loss = [ - 0, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0, 1, 0, 2, 1, 0, 0, 0, 0, 2, 1, 0, - 0, 0, 0, 0, 0, 0, 0, 1, 0, 2, 1, 0, 3, 2, 1, 0, 2, 1, 0, 0, 0] -nbslab = 11 - -#------------------solver and variable declaration------------- - -solver = pywrapcp.Solver('Steel Mill Slab') -x = [solver.IntVar(0, nbslab-1, 'x' + str(i)) for i in range(nbslab)] -l = [solver.IntVar(0, maxcapa, 'l' + str(i)) for i in range(nbslab)] -obj = solver.IntVar(0, nbslab * maxcapa, 'obj') - -#-------------------post of the constraints-------------- - - -binpacking(solver, x, weights[:nbslab], l) -solver.Add(solver.Sum([solver.Element(loss, l[s]) - for s in range(nbslab)]) == obj) - -sol = [2, 0, 0, 0, 0, 1, 2, 2, 1, 1, 2] - -#------------start the search and optimization----------- - -objective = solver.Minimize(obj, 1) -db = solver.Phase(x, solver.INT_VAR_DEFAULT, - solver.INT_VALUE_DEFAULT) -# solver.NewSearch(db,[objective]) #segfault if I comment this - -while solver.NextSolution(): - print(obj, 'check:', sum([loss[l[s].Min()] for s in range(nbslab)])) - print(l) -solver.EndSearch() - -print('#fails: ', solver.Failures()) -print('time: ', solver.WallTime()) diff --git a/examples/tests/issue33.cs b/examples/tests/issue33.cs deleted file mode 100644 index 2e888ca2072..00000000000 --- a/examples/tests/issue33.cs +++ /dev/null @@ -1,676 +0,0 @@ -// Authors: Johan Wessén -// Copyright 2010-2025 Google LLC -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -using Google.OrTools.ConstraintSolver; -using System.Collections.Generic; -using System.Diagnostics; -using System.IO; -using System.Linq; -using System.Text; -using System.Threading.Tasks; -using System; - -public class Task -{ - public int Id { get; private set; } - public int TaskType { get; private set; } - public int LocationId { get; private set; } - public Dictionary Durations { get; private set; } - public int TaskPosition { get; private set; } - - public Task(int id, int taskType, int locationIndex, int taskPosition, Dictionary durations) - { - Id = id; - TaskType = taskType; - LocationId = locationIndex; - Durations = durations; - TaskPosition = taskPosition; - } - - public Task(int id, int taskType, int locationIndex, int taskPosition) - { - Id = id; - TaskType = taskType; - LocationId = locationIndex; - TaskPosition = taskPosition; - Durations = new Dictionary(); - } -} - -public class WorkLocation -{ - public int Id { get; private set; } - public int NbTasks - { - get { - Debug.Assert(Tasks != null); - return Tasks.Length; - } - set { - Debug.Assert(Tasks == null); - Tasks = new Task[value]; - } - } - public Task[] Tasks { get; private set; } - - public WorkLocation(int index) - { - Id = index; - } -} - -public class Tool -{ - public int Id { get; private set; } - public HashSet TaskTypes { get; set; } - public int[,] TravellingTime { get; set; } - public int InitialLocationId { get; set; } - - public Tool(int index, int initialLocation = 0) - { - Id = index; - InitialLocationId = initialLocation; - TaskTypes = new HashSet(); - } - - public void AddTaskType(int t) - { - TaskTypes.Add(t); - } - - public bool CanPerformTaskType(int taskType) - { - return TaskTypes.Contains(taskType); - } -} - -public class FactoryDescription -{ - public Tool[] Tools { get; private set; } - public WorkLocation[] Locations { get; private set; } - - public int NbWorkLocations - { - get { - return Locations.Length; - } - } - public int NbTools - { - get { - return Tools.Length; - } - } - - public int NbTaskPerCycle { get; private set; } - // TaskType go typically from 0 to 6. InspectionType indicates which - // is the TaskType that correspond to Inspection. - public int Inspection { get; private set; } - // All the time within the schedule horizon in which the blast can start. - public long[] InspectionStarts { get; private set; } - - public int Horizon { get; private set; } - - // horizon equal to 2 weeks (in minutes). - public FactoryDescription(int nbTools, int nbLocations, int nbTaskPerCycle, int horizon = 14 * 24 * 60) - { - Debug.Assert(nbTools > 0); - Debug.Assert(nbLocations > 0); - Debug.Assert(nbTaskPerCycle > 0); - Debug.Assert(horizon > 0); - NbTaskPerCycle = nbTaskPerCycle; - Inspection = NbTaskPerCycle - 1; - Tools = new Tool[nbTools]; - Horizon = horizon; - for (int i = 0; i < nbTools; i++) - Tools[i] = new Tool(i); - Locations = new WorkLocation[nbLocations]; - for (int i = 0; i < nbLocations; i++) - Locations[i] = new WorkLocation(i); - - InspectionStarts = new long[] { -1, 600, 1200, 1800, 2400, 2800 }; - } - - public Tool[] getToolPerTaskType(int taskType) - { - var elements = from tool in Tools - where tool.CanPerformTaskType(taskType) select tool; - return elements.ToArray(); - } - - public Task[] getFlatTaskList() - { - return (from location in Locations from task in location.Tasks orderby task.Id select task).ToArray(); - } - - public int[] getTaskTypes() - { - return (from location in Locations from task in location.Tasks select task.TaskType).Distinct().ToArray(); - } - - // TODO: This should be enhanced - public void SanityCheck() - { - foreach (Tool tool in Tools) - { - Debug.Assert(tool.TravellingTime.GetLength(0) == NbWorkLocations); - Debug.Assert(tool.TravellingTime.GetLength(1) == NbWorkLocations); - for (int i = 0; i < NbWorkLocations; i++) - Debug.Assert(tool.TravellingTime[i, i] == 0); - } - } -} - -interface DataReader -{ - FactoryDescription FetchData(); -} - -public class SmallSyntheticData : DataReader -{ - public SmallSyntheticData() - { - } - - public FactoryDescription FetchData() - { - // deterministic seed for result reproducibility - Random randomDuration = new Random(2); - - // FactoryDescription(nbTools, nblocations, nbTasks per cycle) - FactoryDescription factoryDescription = new FactoryDescription(5, 4, 3); - - // Travelling time and distance are temporarily identical and they - // are no different for different tools - int[,] travellingTime = new int[factoryDescription.NbWorkLocations, factoryDescription.NbWorkLocations]; - for (int i = 0; i < travellingTime.GetLength(0); i++) - { - for (int j = 0; j < travellingTime.GetLength(1); j++) - { - if (i == j) - travellingTime[i, j] = 0; - else - travellingTime[i, j] = (5 * Math.Abs(i - j)) * 10; - } - } - - factoryDescription.Tools[0].AddTaskType(0); - factoryDescription.Tools[1].AddTaskType(0); - factoryDescription.Tools[2].AddTaskType(1); - factoryDescription.Tools[3].AddTaskType(1); - factoryDescription.Tools[4].AddTaskType(2); - factoryDescription.Tools[1].AddTaskType(1); - - foreach (Tool tool in factoryDescription.Tools) - tool.TravellingTime = travellingTime; - - int c = 0; - int nbCyclePerWorkLocation = 2; - int[] boll = new int[100]; - for (int i = 0; i < factoryDescription.NbWorkLocations; i++) - { - factoryDescription.Locations[i].NbTasks = nbCyclePerWorkLocation * factoryDescription.NbTaskPerCycle; - for (int j = 0; j < nbCyclePerWorkLocation; j++) - { - for (int k = 0; k < factoryDescription.NbTaskPerCycle; k++) - { - Task t = new Task(c, k, i, k + j * factoryDescription.NbTaskPerCycle); - - // Filling in tool-dependent durations - Tool[] compatibleTools = factoryDescription.getToolPerTaskType(k); - foreach (Tool tool in compatibleTools) - { - boll[c] = randomDuration.Next(13, 17) * 10; - ; - t.Durations[tool.Id] = boll[c]; - } - factoryDescription.Locations[i].Tasks[t.TaskPosition] = t; - c++; - } - } - } - - factoryDescription.SanityCheck(); - return factoryDescription; - } -} - -public class RandomSelectToolHeuristic : NetDecisionBuilder -{ - private FactoryScheduling factoryScheduling; - private Random rnd; - - public RandomSelectToolHeuristic(FactoryScheduling factoryScheduling, int seed) - { - this.factoryScheduling = factoryScheduling; - // deterministic seed for result reproducibility - this.rnd = new Random(seed); - } - - public override Decision Next(Solver solver) - { - foreach (IntVar var in factoryScheduling.SelectedTool) - { - if (!var.Bound()) - { - int min = (int)var.Min(); - int max = (int)var.Max(); - int rndVal = rnd.Next(min, max + 1); - while (!var.Contains(rndVal)) - rndVal = rnd.Next(min, max + 1); - return solver.MakeAssignVariableValue(var, rndVal); - } - } - return null; - } -} - -class TaskAlternative -{ - public Task Task { get; private set; } - public IntVar ToolVar { get; set; } - public List Intervals { get; private set; } - - public TaskAlternative(Task t) - { - Task = t; - Intervals = new List(); - } -} - -public class FactoryScheduling -{ - private FactoryDescription factoryData; - private Solver solver; - - private Task[] tasks; - private int[] taskTypes; - - /* Flat list of all the tasks */ - private TaskAlternative[] taskStructures; - - /* Task per WorkLocation: location2Task[d][i]: the i-th task of the - * d-th location */ - private TaskAlternative[][] location2Task; - - /* Task per Tool: tool2Task[t][i]: the i-th task of the t-th tool. - Note that it does NOT imply that the it will be the i-th - executed. In other words, it should be considered as an unordered - set. Furthermore, tool2Task[t][i] can also be *unperformed* */ - private List[] tool2Task; - - /* All the transition times for the tools. - tool2TransitionTimes[t][i]: the transition time of the t-th tool - from the i-th task to the next */ - private List[] tool2TransitionTimes; - - /* Map between the interval var of a tool to its related task id. - toolIntervalVar2TaskId[t][k] = i: in the t-th tool, the k-th - interval var correspond to tasks[i] */ - private List[] toolIntervalVar2TaskId; - - /* Tools per task type: taskType2Tool[tt][t]: the t-th tool capable - * of doing the tt-th task type */ - private List[] taskType2Tool; - - /* For each task which tools is performed upon */ - private List selectedTool; - public List SelectedTool - { - get { - return selectedTool; - } - } - - /* Sequence of task for each tool */ - private SequenceVar[] allToolSequences; - public SequenceVar[] AllToolSequences - { - get { - return allToolSequences; - } - } - - /* Makespan var */ - private IntVar makespan; - - /* Objective */ - private OptimizeVar objective; - - /* maximum horizon */ - private int horizon; - - /* Start & End times of IntervalVars*/ - IntVar[][] startingTimes; - IntVar[][] endTimes; - - public FactoryScheduling(FactoryDescription data) - { - factoryData = data; - } - - private void Init() - { - horizon = factoryData.Horizon; - solver = new Solver("Factory Scheduling"); - tasks = factoryData.getFlatTaskList(); - taskTypes = factoryData.getTaskTypes(); - taskStructures = new TaskAlternative[tasks.Length]; - location2Task = new TaskAlternative[factoryData.NbWorkLocations][]; - tool2Task = new List[factoryData.NbTools]; - toolIntervalVar2TaskId = new List[factoryData.NbTools]; - tool2TransitionTimes = new List[factoryData.NbTools]; - - taskType2Tool = new List[taskTypes.Length]; - selectedTool = new List(); - for (int tt = 0; tt < taskTypes.Length; tt++) - taskType2Tool[tt] = new List(); - - foreach (Tool tool in factoryData.Tools) - foreach (int taskType in tool.TaskTypes) - taskType2Tool[taskType].Add(tool); - for (int d = 0; d < factoryData.NbWorkLocations; d++) - location2Task[d] = new TaskAlternative[factoryData.Locations[d].NbTasks]; - for (int t = 0; t < factoryData.NbTools; t++) - { - tool2Task[t] = new List(); - toolIntervalVar2TaskId[t] = new List(); - tool2TransitionTimes[t] = new List(); - } - - allToolSequences = new SequenceVar[factoryData.NbTools - 1]; - - startingTimes = new IntVar[factoryData.NbTools - 1][]; - endTimes = new IntVar[factoryData.NbTools - 1][]; - } - - private void PostTransitionTimeConstraints(int t, bool postTransitionsConstraint = true) - { - Tool tool = factoryData.Tools[t]; - // if it is a inspection, we make sure there are no transitiontimes - if (tool.CanPerformTaskType(factoryData.Inspection)) - tool2TransitionTimes[t].Add(null); - else - { - int[,] tt = tool.TravellingTime; - - SequenceVar seq = allToolSequences[t]; - long s = seq.Size(); - IntVar[] nextLocation = new IntVar[s + 1]; - - // The seq.Next(i) represents the task performed after the i-th - // task in the sequence seq.Next(0) represents the first task - // performed for extracting travelling times we need to get the - // related location In case a task is not performed (seq.Next(i) - // == i), i.e. it's pointing to itself The last performed task - // (or pre-start task, if no tasks are performed) will have - // seq.Next(i) == s + 1 therefore we add a virtual location - // whose travelling time is equal to 0 - // - // NOTE: The index of a SequenceVar are 0..n, but the domain - // range is 1..(n+1), this is due to that the start node = 0 is - // a dummy node, and the node where seq.Next(i) == n+1 is the - // end node - - // Extra elements for the unreachable start node (0), and the - // end node whose next task takes place in a virtual location - int[] taskIndex2locationId = new int[s + 2]; - taskIndex2locationId[0] = -10; - for (int i = 0; i < s; i++) - taskIndex2locationId[i + 1] = tasks[toolIntervalVar2TaskId[t][i]].LocationId; - - // this is the virtual location for unperformed tasks - taskIndex2locationId[s + 1] = factoryData.NbWorkLocations; - - // Build the travelling time matrix with the additional virtual location - int[][] ttWithVirtualLocation = new int [factoryData.NbWorkLocations + 1][]; - for (int d1 = 0; d1 < ttWithVirtualLocation.Length; d1++) - { - ttWithVirtualLocation[d1] = new int[factoryData.NbWorkLocations + 1]; - for (int d2 = 0; d2 < ttWithVirtualLocation.Length; d2++) - if (d1 == factoryData.NbWorkLocations) - { - ttWithVirtualLocation[d1][d2] = 0; - } - else - { - ttWithVirtualLocation[d1][d2] = (d2 == factoryData.NbWorkLocations) ? 0 : tt[d1, d2]; - } - } - - for (int i = 0; i < nextLocation.Length; i++) - { - // this is the next-location associated with the i-th task - nextLocation[i] = solver.MakeElement(taskIndex2locationId, seq.Next(i)).Var(); - - int d = (i == 0) ? tool.InitialLocationId : tasks[toolIntervalVar2TaskId[t][i - 1]].LocationId; - if (i == 0) - { - // To be changed - right now we don't have meaningful indata - // of previous location Ugly way of setting initial travel - // time to = 0, as this is how we find common grounds - // between benchmark algorithm and this - tool2TransitionTimes[t].Add( - solver.MakeElement(new int[ttWithVirtualLocation[d].Length], nextLocation[i]).Var()); - } - else - { - tool2TransitionTimes[t].Add(solver.MakeElement(ttWithVirtualLocation[d], nextLocation[i]).Var()); - } - } - - // Extra elements for the unreachable start node (0), and the - // end node whose next task takes place in a virtual location - startingTimes[t] = new IntVar[s + 2]; - endTimes[t] = new IntVar[s + 2]; - - startingTimes[t][0] = solver.MakeIntConst(0); - // Tbd: Set this endtime to the estimated time of finishing - // previous task for the current tool - endTimes[t][0] = solver.MakeIntConst(0); - - for (int i = 0; i < s; i++) - { - startingTimes[t][i + 1] = tool2Task[t][i].SafeStartExpr(-1).Var(); - endTimes[t][i + 1] = tool2Task[t][i].SafeEndExpr(-1).Var(); - } - startingTimes[t][s + 1] = solver.MakeIntConst(factoryData.Horizon); - endTimes[t][s + 1] = solver.MakeIntConst(factoryData.Horizon); - - // Enforce (or not) that each task is separated by the - // transition time to the next task - for (int i = 0; i < nextLocation.Length; i++) - { - IntVar nextStart = solver.MakeElement(startingTimes[t], seq.Next(i).Var()).Var(); - if (postTransitionsConstraint) - solver.Add(endTimes[t][i] + tool2TransitionTimes[t][i] <= nextStart); - } - } - } - - private void Model() - { - /* Building basic task data structures */ - for (int i = 0; i < tasks.Length; i++) - { - /* Create a new set of possible IntervalVars & IntVar to decide - * which one (and only 1) is performed */ - taskStructures[i] = new TaskAlternative(tasks[i]); - - /* Container to use when posting constraints */ - location2Task[tasks[i].LocationId][tasks[i].TaskPosition] = taskStructures[i]; - - /* Get task type */ - int taskType = tasks[i].TaskType; - - /* Possible tool for this task */ - List tools = taskType2Tool[taskType]; - bool optional = tools.Count > 1; - - /* List of boolean variables. If performedOnTool[t] == true then - * the task is performed on tool t */ - List performedOnTool = new List(); - for (int t = 0; t < tools.Count; t++) - { - /* Creating an IntervalVar. If tools.Count > 1 the intervalVar - * is *OPTIONAL* */ - int toolId = tools[t].Id; - Debug.Assert(tasks[i].Durations.ContainsKey(toolId)); - int duration = tasks[i].Durations[toolId]; - string name = "J " + tasks[i].Id + " [" + toolId + "]"; - - IntervalVar intervalVar; - if (taskType == factoryData.Inspection) - { - /* We set a 0 time if the task is an inspection */ - duration = 0; - intervalVar = solver.MakeFixedDurationIntervalVar(0, horizon, duration, optional, name); - IntVar start = intervalVar.SafeStartExpr(-1).Var(); - - intervalVar.SafeStartExpr(-1).Var().SetValues(factoryData.InspectionStarts); - } - else - { - intervalVar = solver.MakeFixedDurationIntervalVar(0, horizon, duration, optional, name); - } - - taskStructures[i].Intervals.Add(intervalVar); - tool2Task[toolId].Add(intervalVar); - toolIntervalVar2TaskId[toolId].Add(i); - - /* Collecting all the bool vars, even if they are optional */ - performedOnTool.Add(intervalVar.PerformedExpr().Var()); - } - - /* Linking the bool var to a single integer variable: */ - /* if alternativeToolVar == t <=> performedOnTool[t] == true */ - string alternativeName = "J " + tasks[i].Id; - IntVar alternativeToolVar = solver.MakeIntVar(0, tools.Count - 1, alternativeName); - taskStructures[i].ToolVar = alternativeToolVar; - - solver.Add(solver.MakeMapDomain(alternativeToolVar, performedOnTool.ToArray())); - Debug.Assert(performedOnTool.ToArray().Length == alternativeToolVar.Max() + 1); - - selectedTool.Add(alternativeToolVar); - } - - /* Creates precedences on a work Location in order to enforce a - * fully ordered set within the same location - */ - for (int d = 0; d < location2Task.Length; d++) - { - for (int i = 0; i < location2Task[d].Length - 1; i++) - { - TaskAlternative task1 = location2Task[d][i]; - TaskAlternative task2 = location2Task[d][i + 1]; - /* task1 must end before task2 starts */ - /* Adding precedence for each possible alternative pair */ - for (int t1 = 0; t1 < task1.Intervals.Count(); t1++) - { - IntervalVar task1Alternative = task1.Intervals[t1]; - for (int t2 = 0; t2 < task2.Intervals.Count(); t2++) - { - IntervalVar task2Alternative = task2.Intervals[t2]; - Constraint precedence = - solver.MakeIntervalVarRelation(task2Alternative, Solver.STARTS_AFTER_END, task1Alternative); - solver.Add(precedence); - } - } - } - } - - /* Adds disjunctive constraints on unary resources, and creates - * sequence variables. */ - for (int t = 0; t < factoryData.NbTools; t++) - { - string name = "Tool " + t; - - if (!factoryData.Tools[t].CanPerformTaskType(factoryData.Inspection)) - { - DisjunctiveConstraint ct = solver.MakeDisjunctiveConstraint(tool2Task[t].ToArray(), name); - solver.Add(ct); - allToolSequences[t] = ct.SequenceVar(); - } - PostTransitionTimeConstraints(t, true); - } - - /* Collecting all tasks end for makespan objective function */ - List intervalEnds = new List(); - for (int i = 0; i < tasks.Length; i++) - foreach (IntervalVar var in taskStructures[i].Intervals) - intervalEnds.Add(var.SafeEndExpr(-1).Var()); - - /* Objective: minimize the makespan (maximum end times of all tasks) */ - makespan = solver.MakeMax(intervalEnds.ToArray()).Var(); - objective = solver.MakeMinimize(makespan, 1); - } - - private void Search() - { - int seed = 2; // This is a good seed to show the crash - - /* Assigning first tools */ - DecisionBuilder myToolAssignmentPhase = new RandomSelectToolHeuristic(this, seed); - - /* Ranking of the tools */ - DecisionBuilder sequencingPhase = solver.MakePhase(allToolSequences, Solver.SEQUENCE_DEFAULT); - - /* Then fixing time of tasks as early as possible */ - DecisionBuilder timingPhase = solver.MakePhase(makespan, Solver.CHOOSE_FIRST_UNBOUND, Solver.ASSIGN_MIN_VALUE); - - /* Overall phase */ - DecisionBuilder mainPhase = solver.Compose(myToolAssignmentPhase, sequencingPhase, timingPhase); - - /* Logging */ - const int logFrequency = 1000000; - SearchMonitor searchLog = solver.MakeSearchLog(logFrequency, objective); - - /* Restarts */ - SearchMonitor searchRestart = solver.MakeLubyRestart(100); - - /* Search Limit in ms */ - SearchLimit limit = solver.MakeTimeLimit(180 * 1000); - - /* Collecting best solution */ - SolutionCollector collector = solver.MakeLastSolutionCollector(); - collector.AddObjective(makespan); - - // collector.Add( pile.ToArray() ); - solver.NewSearch(mainPhase, searchLog, searchRestart, objective, limit); - while (solver.NextSolution()) - { - Console.WriteLine("MAKESPAN: " + makespan.Value()); - } - } - - public void Solve() - { - Init(); - Model(); - Search(); - } -} - -public class Issue33Test -{ - public static void FactorySchedulingTest() - { - FactoryScheduling scheduling = new FactoryScheduling(new SmallSyntheticData().FetchData()); - scheduling.Solve(); - } - static void Main() - { - FactorySchedulingTest(); - } -} diff --git a/examples/tests/issue4.py b/examples/tests/issue4.py deleted file mode 100755 index 870328ad272..00000000000 --- a/examples/tests/issue4.py +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env python3 -from ortools.constraint_solver import pywrapcp - - -def main(): - solver = pywrapcp.Solver("time limit test") - n = 10 - x = [solver.IntVar(1, n, "x[%i]" % i) for i in range(n)] - solver.Add(solver.AllDifferent(x, True)) - - solution = solver.Assignment() - solution.Add(x) - - db = solver.Phase(x, - solver.CHOOSE_FIRST_UNBOUND, - solver.ASSIGN_MIN_VALUE) - - time_limit = 2000 - branch_limit = 100000000 - failures_limit = 100000000 - solutions_limit = 10000000 - limits = ( - solver.Limit( - time_limit, branch_limit, failures_limit, solutions_limit, True)) - - search_log = solver.SearchLog(1000) - - solver.NewSearch(db, [limits, search_log]) - num_solutions = 0 - while solver.NextSolution(): - print("x:", [x[i].Value() for i in range(n)]) - num_solutions += 1 - solver.EndSearch() - - print("num_solutions:", num_solutions) - print("failures:", solver.Failures()) - print("branches:", solver.Branches()) - print("wall_time:", solver.WallTime()) - - -if __name__ == "__main__": - main() diff --git a/examples/tests/issue46.py b/examples/tests/issue46.py deleted file mode 100755 index 338cb72f373..00000000000 --- a/examples/tests/issue46.py +++ /dev/null @@ -1,94 +0,0 @@ -#!/usr/bin/env python3 -# Copyright 2010-2025 Google LLC -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Code for issue 46 in or-tools.""" - -from ortools.constraint_solver import pywrapcp - - -class AssignToStartMin(pywrapcp.PyDecisionBuilder): - def __init__(self, intervals): - pywrapcp.PyDecisionBuilder.__init__(self) - self.__intervals = intervals - - def Next(self, solver): - for interval in self.__intervals: - interval.SetStartMax(interval.StartMin()) - return None - - def DebugString(self): - return 'CustomDecisionBuilder' - - -def NoSequence(): - print('NoSequence') - solver = pywrapcp.Solver('Ordo') - tasks = [] - [ - tasks.append( - solver.FixedDurationIntervalVar(0, 25, 5, False, 'Tasks%i' % i)) - for i in range(3) - ] - print(tasks) - disj = solver.DisjunctiveConstraint(tasks, 'Disjunctive') - solver.Add(disj) - collector = solver.AllSolutionCollector() - collector.Add(tasks) - intervalPhase = solver.Phase(tasks, solver.INTERVAL_DEFAULT) - solver.Solve(intervalPhase, [collector]) - print(collector.SolutionCount()) - for i in range(collector.SolutionCount()): - print("Solution ", i) - print(collector.ObjectiveValue(i)) - print([collector.StartValue(i, tasks[j]) for j in range(3)]) - print([collector.EndValue(i, tasks[j]) for j in range(3)]) - - -def Sequence(): - print('Sequence') - solver = pywrapcp.Solver('Ordo') - tasks = [] - [ - tasks.append( - solver.FixedDurationIntervalVar(0, 25, 5, False, 'Tasks%i' % i)) - for i in range(3) - ] - print(tasks) - disj = solver.DisjunctiveConstraint(tasks, 'Disjunctive') - solver.Add(disj) - sequence = [] - sequence.append(disj.SequenceVar()) - sequence[0].RankFirst(0) - collector = solver.AllSolutionCollector() - collector.Add(sequence) - collector.Add(tasks) - sequencePhase = solver.Phase(sequence, solver.SEQUENCE_DEFAULT) - intervalPhase = AssignToStartMin(tasks) - # intervalPhase = solver.Phase(tasks, solver.INTERVAL_DEFAULT) - mainPhase = solver.Compose([sequencePhase, intervalPhase]) - solver.Solve(mainPhase, [collector]) - print(collector.SolutionCount()) - for i in range(collector.SolutionCount()): - print("Solution ", i) - print(collector.ObjectiveValue(i)) - print([collector.StartValue(i, tasks[j]) for j in range(3)]) - print([collector.EndValue(i, tasks[j]) for j in range(3)]) - - -def main(): - NoSequence() - Sequence() - - -if __name__ == '__main__': - main() diff --git a/examples/tests/issue5.py b/examples/tests/issue5.py deleted file mode 100755 index d12a3e97c3c..00000000000 --- a/examples/tests/issue5.py +++ /dev/null @@ -1,180 +0,0 @@ -#!/usr/bin/env python3 -# Copyright 2010 Hakan Kjellerstrand hakank@bonetmail.com -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -''' - A programming puzzle from Einav in Google CP Solver. - - From - 'A programming puzzle from Einav' - http://gcanyon.wordpress.com/2009/10/28/a-programming-puzzle-from-einav/ - - My friend Einav gave me this programming puzzle to work on. Given - this array of positive and negative numbers: - 33 30 -10 -6 18 7 -11 -23 6 - ... - -25 4 16 30 33 -23 -4 4 -23 - - You can flip the sign of entire rows and columns, as many of them - as you like. The goal is to make all the rows and columns sum to positive - numbers (or zero), and then to find the solution (there are more than one) - that has the smallest overall sum. So for example, for this array: - 33 30 -10 - -16 19 9 - -17 -12 -14 - You could flip the sign for the bottom row to get this array: - 33 30 -10 - -16 19 9 - 17 12 14 - Now all the rows and columns have positive sums, and the overall total is - 108. - But you could instead flip the second and third columns, and the second - row, to get this array: - 33 -30 10 - 16 19 9 - -17 12 14 - All the rows and columns still total positive, and the overall sum is just - 66. So this solution is better (I don't know if it's the best) - A pure brute force solution would have to try over 30 billion solutions. - I wrote code to solve this in J. I'll post that separately. - - Compare with the following models: - * MiniZinc http://www.hakank.org/minizinc/einav_puzzle.mzn - * SICStus: http://hakank.org/sicstus/einav_puzzle.pl - - - This model was created by Hakan Kjellerstrand (hakank@bonetmail.com) - Also see my other Google CP Solver models: - http://www.hakank.org/google_or_tools/ -''' - -from ortools.constraint_solver import pywrapcp - - -def main(): - # Create the solver. - solver = pywrapcp.Solver('Einav puzzle') - - # - # data - # - - # small problem -# data = [ -# [ 33, 30, -10], -# [-16, 19, 9], -# [-17, -12, -14] -# ] - - data = [[33, 30, 10, -6, 18, -7, -11, 23, -6], - [16, -19, 9, -26, -8, -19, -8, -21, -14], - [17, 12, -14, 31, -30, 13, -13, 19, 16], - [-6, -11, 1, 17, -12, -4, -7, 14, -21], - [18, -31, 34, -22, 17, -19, 20, 24, 6], - [33, -18, 17, -15, 31, -5, 3, 27, -3], - [-18, -20, -18, 31, 6, 4, -2, -12, 24], - [27, 14, 4, -29, -3, 5, -29, 8, -12], - [-15, -7, -23, 23, -9, -8, 6, 8, -12], - [33, -23, -19, -4, -8, -7, 11, -12, 31], - [-20, 19, -15, -30, 11, 32, 7, 14, -5], - [-23, 18, -32, -2, -31, -7, 8, 24, 16], - [32, -4, -10, -14, -6, -1, 0, 23, 23], - [25, 0, -23, 22, 12, 28, -27, 15, 4], - [-30, -13, -16, -3, -3, -32, -3, 27, -31], - [22, 1, 26, 4, -2, -13, 26, 17, 14], - [-9, -18, 3, -20, -27, -32, -11, 27, 13], - [-17, 33, -7, 19, -32, 13, -31, -2, -24], - [-31, 27, -31, -29, 15, 2, 29, -15, 33], - [-18, -23, 15, 28, 0, 30, -4, 12, -32], - [-3, 34, 27, -25, -18, 26, 1, 34, 26], - [-21, -31, -10, -13, -30, -17, -12, -26, 31], - [23, -31, -19, 21, -17, -10, 2, -23, 23], - [-3, 6, 0, -3, -32, 0, -10, -25, 14], - [-19, 9, 14, -27, 20, 15, -5, -27, 18], - [11, -6, 24, 7, -17, 26, 20, -31, -25], - [-25, 4, -16, 30, 33, 23, -4, -4, 23]] - - rows = len(data) - cols = len(data[0]) - - # - # variables - # - x = {} - for i in range(rows): - for j in range(cols): - x[i, j] = solver.IntVar(-100, 100, 'x[%i,%i]' % (i, j)) - - row_signs = [solver.IntVar([-1, 1], 'row_signs(%i)' % i) - for i in range(rows)] - col_signs = [solver.IntVar([-1, 1], 'col_signs(%i)' % j) - for j in range(cols)] - - # - # constraints - # - for i in range(rows): - for j in range(cols): - solver.Add(x[i, j] == data[i][j] * row_signs[i] * col_signs[j]) - - total_sum = solver.Sum([x[i, j] for i in range(rows) for j in range(cols)]) - - # row sums - row_sums = [solver.Sum([x[i, j] for j in range(cols)]).Var() - for i in range(rows)] - # >= 0 - for i in range(rows): - row_sums[i].SetMin(0) - - # column sums - col_sums = [solver.Sum([x[i, j] for i in range(rows)]).Var() - for j in range(cols)] - for j in range(cols): - col_sums[j].SetMin(0) - - # objective - objective = solver.Minimize(total_sum, 1) - - # - # search and result - # - db = solver.Phase(col_signs + row_signs, - solver.CHOOSE_FIRST_UNBOUND, - solver.ASSIGN_MIN_VALUE) - - search_log = solver.SearchLog(100000, total_sum) - solver.NewSearch(db, [objective, search_log]) - - num_solutions = 0 - while solver.NextSolution(): - num_solutions += 1 - print('Sum =', objective.Best()) - print('row_sums:', [row_sums[i].Value() for i in range(rows)]) - print('col_sums:', [col_sums[j].Value() for j in range(cols)]) - for i in range(rows): - for j in range(cols): - print(x[i, j].Value(),', ') - print('\n') - print('\n') - - solver.EndSearch() - - print('num_solutions:', num_solutions) - print('failures:', solver.Failures()) - print('branches:', solver.Branches()) - print('wall_time:', solver.WallTime(), 'ms') - - -if __name__ == '__main__': - main() diff --git a/examples/tests/issue57.cc b/examples/tests/issue57.cc deleted file mode 100644 index ffd0c0ec55e..00000000000 --- a/examples/tests/issue57.cc +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2011-2014 Google -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "absl/flags/parse.h" -#include "ortools/base/hash.h" -#include "ortools/base/map_util.h" -#include "ortools/base/stl_util.h" -#include "ortools/constraint_solver/constraint_solver.h" -#include "ortools/constraint_solver/constraint_solveri.h" -#include "ortools/util/string_array.h" - -namespace operations_research { -void OverflowTest() { - // It works on mac-clang, but fails with gcc. - Solver solver("OverflowTest"); - IntVar* const x = solver.MakeIntVar(kint64min, kint64max, "x"); - IntVar* const y = solver.MakeIntVar(kint64min, kint64max, "y"); - IntExpr* const z = solver.MakeDifference(x, y); - LOG(INFO) << z->DebugString(); - Constraint* const ct = solver.MakeGreaterOrEqual(z, 10); - LOG(INFO) << ct->DebugString(); -} -} // namespace operations_research - -int main(int argc, char** argv) { - absl::ParseCommandLine(argc, argv); - operations_research::OverflowTest(); - return 0; -} diff --git a/examples/tests/issue62.py b/examples/tests/issue62.py deleted file mode 100755 index afb6e0b2caa..00000000000 --- a/examples/tests/issue62.py +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env python3 -from ortools.constraint_solver import pywrapcp - - -def main(): - solver = pywrapcp.Solver('Ordo') - tasks = [solver.FixedDurationIntervalVar(0, 25, 5, False, 'Tasks%i' %i) - for i in range(3)] - print(tasks) - disj = solver.DisjunctiveConstraint(tasks, 'Disjunctive') - sequence = [] - sequence.append(disj.SequenceVar()) - solver.Add(disj) - collector = solver.AllSolutionCollector() - collector.Add(sequence) - collector.Add(tasks) - sequencePhase = solver.Phase(sequence, solver.SEQUENCE_DEFAULT) - intervalPhase = solver.Phase(tasks, solver.INTERVAL_DEFAULT) - mainPhase = solver.Compose([sequencePhase, intervalPhase]) - solver.Solve(mainPhase, [ collector]) - print(collector.SolutionCount()) - for i in range(collector.SolutionCount()): - print("Solution " , i) - print([collector.StartValue(i, tasks[j]) for j in range(3)]) - print([collector.EndValue(i, tasks[j]) for j in range(3)]) - - -if __name__ == '__main__': - main() diff --git a/examples/tests/lp_test.cc b/examples/tests/lp_test.cc deleted file mode 100644 index c15d3a0db3d..00000000000 --- a/examples/tests/lp_test.cc +++ /dev/null @@ -1,232 +0,0 @@ -// Copyright 2010-2025 Google LLC -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Linear programming example that shows how to use the API. - -#include "ortools/base/init_google.h" -#include "ortools/base/logging.h" -#include "ortools/linear_solver/linear_solver.h" -#include "ortools/linear_solver/linear_solver.pb.h" - -namespace operations_research { -void SolveAndPrint(MPSolver& solver, std::vector variables, - std::vector constraints, bool is_continuous) { - LOG(INFO) << "Number of variables = " << solver.NumVariables(); - LOG(INFO) << "Number of constraints = " << solver.NumConstraints(); - - const MPSolver::ResultStatus result_status = solver.Solve(); - // Check that the problem has an optimal solution. - if (result_status != MPSolver::OPTIMAL) { - LOG(FATAL) << "The problem does not have an optimal solution!"; - } - LOG(INFO) << "Solution:"; - for (const auto& i : variables) { - LOG(INFO) << i->name() << " = " << i->solution_value(); - } - LOG(INFO) << "Optimal objective value = " << solver.Objective().Value(); - LOG(INFO) << ""; - LOG(INFO) << "Advanced usage:"; - LOG(INFO) << "Problem solved in " << solver.wall_time() << " milliseconds"; - if (solver.ProblemType() != MPSolver::BOP_INTEGER_PROGRAMMING) - LOG(INFO) << "Problem solved in " << solver.iterations() << " iterations"; - if (is_continuous) { - for (const auto& i : variables) { - LOG(INFO) << i->name() << ": reduced cost " << i->reduced_cost(); - } - - const std::vector activities = solver.ComputeConstraintActivities(); - for (const auto& i : constraints) { - LOG(INFO) << i->name() << ": dual value = " << i->dual_value() - << " activity = " << activities[i->index()]; - } - } -} - -void RunLinearProgrammingExample( - MPSolver::OptimizationProblemType optimization_problem_type) { - MPSolver solver("LinearProgrammingExample", optimization_problem_type); - const double infinity = solver.infinity(); - // x and y are continuous non-negative variables. - MPVariable* const x = solver.MakeNumVar(0.0, infinity, "x"); - MPVariable* const y = solver.MakeNumVar(0.0, infinity, "y"); - - // Objectif function: Maximize 3x + 4y. - MPObjective* const objective = solver.MutableObjective(); - objective->SetCoefficient(x, 3); - objective->SetCoefficient(y, 4); - objective->SetMaximization(); - - // x + 2y <= 14. - MPConstraint* const c0 = solver.MakeRowConstraint(-infinity, 14.0, "c0"); - c0->SetCoefficient(x, 1); - c0->SetCoefficient(y, 2); - - // 3x - y >= 0. - MPConstraint* const c1 = solver.MakeRowConstraint(0.0, infinity, "c1"); - c1->SetCoefficient(x, 3); - c1->SetCoefficient(y, -1); - - // x - y <= 2. - MPConstraint* const c2 = solver.MakeRowConstraint(-infinity, 2.0, "c2"); - c2->SetCoefficient(x, 1); - c2->SetCoefficient(y, -1); - - SolveAndPrint(solver, {x, y}, {c0, c1, c2}, true); -} - -void RunMixedIntegerProgrammingExample( - MPSolver::OptimizationProblemType optimization_problem_type) { - MPSolver solver("MixedIntegerProgrammingExample", optimization_problem_type); - const double infinity = solver.infinity(); - // x and y are integers non-negative variables. - MPVariable* const x = solver.MakeIntVar(0.0, infinity, "x"); - MPVariable* const y = solver.MakeIntVar(0.0, infinity, "y"); - - // Objective function: Maximize x + 10 * y. - MPObjective* const objective = solver.MutableObjective(); - objective->SetCoefficient(x, 1); - objective->SetCoefficient(y, 10); - objective->SetMaximization(); - - // x + 7 * y <= 17.5 - MPConstraint* const c0 = solver.MakeRowConstraint(-infinity, 17.5, "c0"); - c0->SetCoefficient(x, 1); - c0->SetCoefficient(y, 7); - - // x <= 3.5 - MPConstraint* const c1 = solver.MakeRowConstraint(-infinity, 3.5, "c1"); - c1->SetCoefficient(x, 1); - c1->SetCoefficient(y, 0); - - SolveAndPrint(solver, {x, y}, {c0, c1}, false); -} - -void RunBooleanProgrammingExample( - MPSolver::OptimizationProblemType optimization_problem_type) { - MPSolver solver("BooleanProgrammingExample", optimization_problem_type); - const double infinity = solver.infinity(); - // x and y are boolean variables. - MPVariable* const x = solver.MakeBoolVar("x"); - MPVariable* const y = solver.MakeBoolVar("y"); - - // Objective function: Minimize 2 * x + y. - MPObjective* const objective = solver.MutableObjective(); - objective->SetCoefficient(x, 2); - objective->SetCoefficient(y, 1); - objective->SetMinimization(); - - // 1 <= x + 2 * y <= 3. - MPConstraint* const c0 = solver.MakeRowConstraint(1, 3, "c0"); - c0->SetCoefficient(x, 1); - c0->SetCoefficient(y, 2); - - SolveAndPrint(solver, {x, y}, {c0}, false); -} - -void MutableObjectiveCrash() { - LOG(INFO) << "MutableObjectiveCrash"; - // Create the linear solver with the GLOP backend. - std::unique_ptr solver(MPSolver::CreateSolver("GLOP")); - - // Create the variables x and y. - MPVariable* const x = solver->MakeNumVar(0.0, 1, "x"); - MPVariable* const y = solver->MakeNumVar(0.0, 2, "y"); - - LOG(INFO) << "Number of variables = " << solver->NumVariables(); - - // Create a linear constraint, 0 <= x + y <= 2. - MPConstraint* const ct = solver->MakeRowConstraint(0.0, 2.0, "ct"); - ct->SetCoefficient(x, 1); - ct->SetCoefficient(y, 1); - - LOG(INFO) << "Number of constraints = " << solver->NumConstraints(); - - // Create the objective function, 3 * x + y. - MPObjective* const objective = solver->MutableObjective(); - objective->SetCoefficient(x, 3); - objective->SetCoefficient(y, 1); - objective->SetMaximization(); - - solver->Solve(); - - LOG(INFO) << "Solution:" << std::endl; - LOG(INFO) << "Objective value = " << objective->Value(); - LOG(INFO) << "x = " << x->solution_value(); - LOG(INFO) << "y = " << y->solution_value(); -} - -void RunAllExamples() { - // Linear programming problems -#if defined(USE_CLP) - LOG(INFO) << "---- Linear programming example with CLP ----"; - RunLinearProgrammingExample(MPSolver::CLP_LINEAR_PROGRAMMING); -#endif // USE_CLP -#if defined(USE_GLPK) - LOG(INFO) << "---- Linear programming example with GLPK ----"; - RunLinearProgrammingExample(MPSolver::GLPK_LINEAR_PROGRAMMING); -#endif // USE_GLPK -#if defined(USE_GLOP) - LOG(INFO) << "---- Linear programming example with GLOP ----"; - RunLinearProgrammingExample(MPSolver::GLOP_LINEAR_PROGRAMMING); -#endif // USE_GLOP -#if defined(USE_PDLP) - LOG(INFO) << "---- Linear programming example with PDLP ----"; - RunLinearProgrammingExample(MPSolver::PDLP_LINEAR_PROGRAMMING); -#endif // USE_PDLP -#if defined(USE_GUROBI) - LOG(INFO) << "---- Linear programming example with Gurobi ----"; - RunLinearProgrammingExample(MPSolver::GUROBI_LINEAR_PROGRAMMING); -#endif // USE_GUROBI -#if defined(USE_CPLEX) - LOG(INFO) << "---- Linear programming example with CPLEX ----"; - RunLinearProgrammingExample(MPSolver::CPLEX_LINEAR_PROGRAMMING); -#endif // USE_CPLEX - - // Integer programming problems -#if defined(USE_SCIP) - LOG(INFO) << "---- Mixed Integer programming example with SCIP ----"; - RunMixedIntegerProgrammingExample(MPSolver::SCIP_MIXED_INTEGER_PROGRAMMING); -#endif // USE_SCIP -#if defined(USE_GLPK) - LOG(INFO) << "---- Mixed Integer programming example with GLPK ----"; - RunMixedIntegerProgrammingExample(MPSolver::GLPK_MIXED_INTEGER_PROGRAMMING); -#endif // USE_GLPK -#if defined(USE_CBC) - LOG(INFO) << "---- Mixed Integer programming example with CBC ----"; - RunMixedIntegerProgrammingExample(MPSolver::CBC_MIXED_INTEGER_PROGRAMMING); -#endif // USE_CBC -#if defined(USE_GUROBI) - LOG(INFO) << "---- Mixed Integer programming example with GUROBI ----"; - RunMixedIntegerProgrammingExample(MPSolver::GUROBI_MIXED_INTEGER_PROGRAMMING); -#endif // USE_GUROBI -#if defined(USE_CPLEX) - LOG(INFO) << "---- Mixed Integer programming example with CPLEX ----"; - RunMixedIntegerProgrammingExample(MPSolver::CPLEX_MIXED_INTEGER_PROGRAMMING); -#endif // USE_CPLEX - - // Boolean integer programming problems -#if defined(USE_BOP) - LOG(INFO) << "---- Boolean Integer programming example with BOP ----"; - RunBooleanProgrammingExample(MPSolver::BOP_INTEGER_PROGRAMMING); -#endif // USE_BOP - - MutableObjectiveCrash(); -} -} // namespace operations_research - -int main(int argc, char** argv) { - google::InitGoogleLogging(argv[0]); - absl::SetStderrThreshold(absl::LogSeverityAtLeast::kInfo); - operations_research::RunAllExamples(); - return 0; -} diff --git a/examples/tests/min_max_test.cc b/examples/tests/min_max_test.cc deleted file mode 100644 index 2c905adecf9..00000000000 --- a/examples/tests/min_max_test.cc +++ /dev/null @@ -1,407 +0,0 @@ -// Copyright 2011-2012 Google -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "absl/flags/parse.h" -#include "ortools/base/hash.h" -#include "ortools/base/logging.h" -#include "ortools/base/map_util.h" -#include "ortools/base/stl_util.h" -#include "ortools/constraint_solver/constraint_solver.h" -#include "ortools/constraint_solver/constraint_solveri.h" -#include "ortools/util/string_array.h" - -namespace operations_research { - -class NullDemon : public Demon { - virtual void Run(Solver* const s) {} -}; - -// ----- Min Array Test ----- - -class MinArrayCtTestSetToMin : public DecisionBuilder { - public: - MinArrayCtTestSetToMin(IntExpr* const min, const std::vector& vars) - : min_(min), vars_(vars) {} - virtual ~MinArrayCtTestSetToMin() {} - - virtual Decision* Next(Solver* const s) { - min_->SetMax(0); - CHECK(vars_[0]->Bound()) << "var not bound"; - CHECK_EQ(0, vars_[0]->Min()) << "var not bound to the correct value"; - return NULL; - } - - private: - IntExpr* const min_; - const std::vector& vars_; -}; - -class MinArrayCtTestSetToMax : public DecisionBuilder { - public: - MinArrayCtTestSetToMax(IntExpr* const min, const std::vector& vars) - : min_(min), vars_(vars) {} - virtual ~MinArrayCtTestSetToMax() {} - - virtual Decision* Next(Solver* const s) { - min_->SetMin(5); - CHECK(vars_[0]->Bound()) << "var not bound"; - CHECK_EQ(5, vars_[0]->Min()) << "var not bound to the correct value"; - return NULL; - } - - private: - IntExpr* const min_; - const std::vector& vars_; -}; - -class MinArrayCtTestSetOneVar : public DecisionBuilder { - public: - MinArrayCtTestSetOneVar(IntExpr* const min, const std::vector& vars) - : min_(min), vars_(vars) {} - virtual ~MinArrayCtTestSetOneVar() {} - - virtual Decision* Next(Solver* const s) { - vars_[0]->SetValue(5); - CHECK_EQ(1, min_->Min()) << "bad computed min in min_array"; - CHECK_EQ(5, min_->Max()) << "bad computed max in min_array"; - return NULL; - } - - private: - IntExpr* const min_; - const std::vector& vars_; -}; - -class MinArrayCtTest { - public: - void SetUp() { - solver_.reset(new Solver("MinConstraintTest")); - vars_.clear(); - vars_.resize(10); - for (int i = 0; i < 10; ++i) { - vars_[i] = solver_->MakeIntVar(i, 2 * i + 5); - } - min_ = solver_->MakeMin(vars_)->Var(); - } - - std::unique_ptr solver_; - std::vector vars_; - IntExpr* min_; - - void TestAlternateCtor() { - SetUp(); - std::vector vars; - for (int i = 0; i < 4; ++i) { - vars.push_back(solver_->MakeIntVar(i, 2 * i)); - } - IntExpr* emin = solver_->MakeMin(vars); - CHECK(!emin->DebugString().empty()); - } - - void TestBounds() { - SetUp(); - CHECK_EQ(0LL, min_->Min()) << "bad computed min in min_array"; - CHECK_EQ(5, min_->Max()) << "bad computed min in min_array"; - } - - void TestSetToMin() { - SetUp(); - solver_->Solve(solver_->RevAlloc(new MinArrayCtTestSetToMin(min_, vars_))); - } - - void TestSetToMax() { - SetUp(); - solver_->Solve(solver_->RevAlloc(new MinArrayCtTestSetToMax(min_, vars_))); - } - - void TestSetOneVar() { - SetUp(); - solver_->Solve(solver_->RevAlloc(new MinArrayCtTestSetOneVar(min_, vars_))); - } - - void TestWhen() { - SetUp(); - Demon* const d = solver_->RevAlloc(new NullDemon()); - min_->WhenRange(d); - } - - void TestBigMinVector() { - SetUp(); - std::vector vars; - for (int i = 0; i < 1001; ++i) { - vars.push_back( - solver_->MakeIntVar(i, 3000 - i, absl::StrFormat("x%d", i))); - } - IntExpr* expr = solver_->MakeMin(vars); - CHECK_EQ(2000, expr->Max()); - CHECK_EQ(0, expr->Min()); - } - - void TestBigMinArray() { - SetUp(); - std::vector vars; - vars.reserve(1001); - for (int i = 0; i < 1001; ++i) { - vars.push_back( - solver_->MakeIntVar(i, 3000 - i, absl::StrFormat("x%d", i))); - } - IntExpr* expr = solver_->MakeMin(vars); - CHECK_EQ(2000, expr->Max()); - CHECK_EQ(0, expr->Min()); - } - - void TestSmallMinVector() { - SetUp(); - std::vector vars; - IntExpr* expr = solver_->MakeMin(vars); - CHECK_EQ(kint64max, expr->Min()); - CHECK_EQ(kint64max, expr->Max()); - vars.push_back(solver_->MakeIntVar(1, 10, "x0")); - expr = solver_->MakeMin(vars); - CHECK_EQ(1, expr->Min()); - CHECK_EQ(10, expr->Max()); - vars.push_back(solver_->MakeIntVar(2, 9, "x1")); - expr = solver_->MakeMin(vars); - CHECK_EQ(1, expr->Min()); - CHECK_EQ(9, expr->Max()); - vars.push_back(solver_->MakeIntVar(3, 8, "x2")); - expr = solver_->MakeMin(vars); - CHECK_EQ(1, expr->Min()); - CHECK_EQ(8, expr->Max()); - } - - void TestSmallMinArray() { - SetUp(); - std::vector vars; - vars.reserve(3); - IntExpr* expr = solver_->MakeMin(vars); - CHECK_EQ(kint64max, expr->Min()); - CHECK_EQ(kint64max, expr->Max()); - vars.push_back(solver_->MakeIntVar(1, 10, absl::StrFormat("x%d", 0))); - expr = solver_->MakeMin(vars); - CHECK_EQ(1, expr->Min()); - CHECK_EQ(10, expr->Max()); - vars.push_back(solver_->MakeIntVar(1, 9, absl::StrFormat("x%d", 1))); - expr = solver_->MakeMin(vars); - CHECK_EQ(1, expr->Min()); - CHECK_EQ(9, expr->Max()); - vars.push_back(solver_->MakeIntVar(1, 8, absl::StrFormat("x%d", 2))); - expr = solver_->MakeMin(vars); - CHECK_EQ(1, expr->Min()); - CHECK_EQ(8, expr->Max()); - } -}; - -// ----- Max Array Test ----- - -class MaxArrayCtTestSetToMin : public DecisionBuilder { - public: - MaxArrayCtTestSetToMin(IntExpr* const max, const std::vector& vars) - : max_(max), vars_(vars) {} - virtual ~MaxArrayCtTestSetToMin() {} - - virtual Decision* Next(Solver* const s) { - max_->SetMin(23); - CHECK(vars_[9]->Bound()) << "var not bound"; - CHECK_EQ(23, vars_[9]->Min()) << "var not bound to the correct value"; - return NULL; - } - - private: - IntExpr* const max_; - const std::vector& vars_; -}; - -class MaxArrayCtTestSetToMax : public DecisionBuilder { - public: - MaxArrayCtTestSetToMax(IntExpr* const max, const std::vector& vars) - : max_(max), vars_(vars) {} - virtual ~MaxArrayCtTestSetToMax() {} - - virtual Decision* Next(Solver* const s) { - max_->SetMax(9); - CHECK(vars_[9]->Bound()) << "var not bound"; - CHECK_EQ(9, vars_[9]->Min()) << "var not bound to the correct value"; - return NULL; - } - - private: - IntExpr* const max_; - const std::vector& vars_; -}; - -class MaxArrayCtTestSetOneVar : public DecisionBuilder { - public: - MaxArrayCtTestSetOneVar(IntExpr* const max, const std::vector& vars) - : max_(max), vars_(vars) {} - virtual ~MaxArrayCtTestSetOneVar() {} - - virtual Decision* Next(Solver* const s) { - vars_[9]->SetValue(18); - CHECK_EQ(18, max_->Min()) << "bad computed min in max_array"; - CHECK_EQ(21, max_->Max()) << "bad computed max in max_array"; - return NULL; - } - - private: - IntExpr* const max_; - const std::vector& vars_; -}; - -class MaxArrayCtTest { - public: - void SetUp() { - solver_.reset(new Solver("MaxArrayCtTest")); - vars_.resize(10); - for (int i = 0; i < 10; ++i) { - vars_[i] = solver_->MakeIntVar(i, 2 * i + 5); - } - max_ = solver_->MakeMax(vars_)->Var(); - } - - std::unique_ptr solver_; - std::vector vars_; - IntExpr* max_; - - void TestAlternateCtor() { - SetUp(); - std::vector vars; - for (int i = 0; i < 4; ++i) { - vars.push_back(solver_->MakeIntVar(i, 2 * i)); - } - IntExpr* emax = solver_->MakeMax(vars); - CHECK(!emax->DebugString().empty()); - } - - void TestBounds() { - SetUp(); - CHECK_EQ(9, max_->Min()) << "bad computed min in max_array"; - CHECK_EQ(23, max_->Max()) << "bad computed min in max_array"; - } - - void TestSetToMin() { - SetUp(); - solver_->Solve(solver_->RevAlloc(new MaxArrayCtTestSetToMin(max_, vars_))); - } - - void TestSetToMax() { - SetUp(); - solver_->Solve(solver_->RevAlloc(new MaxArrayCtTestSetToMax(max_, vars_))); - } - - void TestSetOneVar() { - SetUp(); - solver_->Solve(solver_->RevAlloc(new MaxArrayCtTestSetOneVar(max_, vars_))); - } - - void TestWhen() { - SetUp(); - Demon* d = solver_->RevAlloc(new NullDemon()); - max_->WhenRange(d); - } - - void TestBigMaxVector() { - SetUp(); - std::vector vars; - vars.reserve(1001); - for (int i = 0; i < 1001; ++i) { - vars.push_back( - solver_->MakeIntVar(i, 3000 - i, absl::StrFormat("x%d", i))); - } - IntExpr* expr = solver_->MakeMax(vars); - CHECK_EQ(3000, expr->Max()); - CHECK_EQ(1000, expr->Min()); - } - - void TestBigMaxArray() { - SetUp(); - std::vector vars; - for (int i = 0; i < 1001; ++i) { - vars.push_back( - solver_->MakeIntVar(i, 3000 - i, absl::StrFormat("x%d", i))); - } - IntExpr* expr = solver_->MakeMax(vars); - CHECK_EQ(3000, expr->Max()); - CHECK_EQ(1000, expr->Min()); - } - - void TestSmallMaxVector() { - SetUp(); - std::vector vars; - IntExpr* expr = solver_->MakeMax(vars); - CHECK_EQ(kint64min, expr->Min()); - CHECK_EQ(kint64min, expr->Max()); - vars.push_back(solver_->MakeIntVar(1, 10, "x0")); - expr = solver_->MakeMax(vars); - CHECK_EQ(1, expr->Min()); - CHECK_EQ(10, expr->Max()); - vars.push_back(solver_->MakeIntVar(2, 9, "x1")); - expr = solver_->MakeMax(vars); - CHECK_EQ(2, expr->Min()); - CHECK_EQ(10, expr->Max()); - vars.push_back(solver_->MakeIntVar(3, 8, "x2")); - expr = solver_->MakeMax(vars); - CHECK_EQ(3, expr->Min()); - CHECK_EQ(10, expr->Max()); - } - - void TestSmallMaxArray() { - SetUp(); - std::vector vars; - IntExpr* expr = solver_->MakeMax(vars); - CHECK_EQ(kint64min, expr->Min()); - CHECK_EQ(kint64min, expr->Max()); - vars.push_back(solver_->MakeIntVar(1, 10, absl::StrFormat("x%d", 0))); - expr = solver_->MakeMax(vars); - CHECK_EQ(1, expr->Min()); - CHECK_EQ(10, expr->Max()); - vars.push_back(solver_->MakeIntVar(2, 10, absl::StrFormat("x%d", 1))); - expr = solver_->MakeMax(vars); - CHECK_EQ(2, expr->Min()); - CHECK_EQ(10, expr->Max()); - vars.push_back(solver_->MakeIntVar(3, 10, absl::StrFormat("x%d", 2))); - expr = solver_->MakeMax(vars); - CHECK_EQ(3, expr->Min()); - CHECK_EQ(10, expr->Max()); - } -}; -} // namespace operations_research - -int main(int argc, char** argv) { - absl::ParseCommandLine(argc, argv); - operations_research::MinArrayCtTest min_test; - min_test.TestAlternateCtor(); - min_test.TestBounds(); - min_test.TestSetToMin(); - min_test.TestSetToMax(); - min_test.TestSetOneVar(); - min_test.TestWhen(); - min_test.TestBigMinVector(); - min_test.TestBigMinArray(); - min_test.TestSmallMinVector(); - min_test.TestSmallMinArray(); - - operations_research::MaxArrayCtTest max_test; - max_test.TestAlternateCtor(); - max_test.TestBounds(); - max_test.TestSetToMin(); - max_test.TestSetToMax(); - max_test.TestSetOneVar(); - max_test.TestWhen(); - max_test.TestBigMaxVector(); - max_test.TestBigMaxArray(); - max_test.TestSmallMaxVector(); - max_test.TestSmallMaxArray(); - - return 0; -} diff --git a/examples/tests/remote/Makefile b/examples/tests/remote/Makefile deleted file mode 100644 index 770bb2bec95..00000000000 --- a/examples/tests/remote/Makefile +++ /dev/null @@ -1,18 +0,0 @@ -OR_TOOLS_TOP=$(CURDIR)/../../.. -include $(OR_TOOLS_TOP)/Makefile - -tsp.$O: tsp.cc $(OR_TOOLS_TOP)/src/constraint_solver/routing.h - $(CCC) $(CFLAGS) -c tsp.cc $(OBJ_OUT)tsp.$O - -tsp$E: $(DYNAMIC_ROUTING_DEPS) tsp.$O - $(CCC) $(CFLAGS) tsp.$O $(DYNAMIC_ROUTING_LNK) $(DYNAMIC_LD_FLAGS) $(EXE_OUT)tsp$E - -linear_programming.$O: linear_programming.cc $(OR_TOOLS_TOP)/src/constraint_solver/routing.h - $(CCC) $(CFLAGS) -c linear_programming.cc $(OBJ_OUT)linear_programming.$O - -linear_programming$E: $(DYNAMIC_LP_DEPS) linear_programming.$O - $(CCC) $(CFLAGS) linear_programming.$O $(DYNAMIC_LP_LNK) $(DYNAMIC_LD_FLAGS) $(EXE_OUT)linear_programming$E - -local_clean: - rm tsp.$O tsp$E linear_programming.$O linear_programming$E - diff --git a/examples/tests/remote/linear_programming.cc b/examples/tests/remote/linear_programming.cc deleted file mode 100644 index 21752310751..00000000000 --- a/examples/tests/remote/linear_programming.cc +++ /dev/null @@ -1,115 +0,0 @@ -// Copyright 2010-2025 Google -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// Linear programming example that shows how to use the API. - -#include "base/commandlineflags.h" -#include "base/logging.h" -#include "linear_solver/linear_solver.h" -#include "linear_solver/linear_solver.pb.h" - -namespace operations_research { -void RunLinearProgrammingExample( - MPSolver::OptimizationProblemType optimization_problem_type) { - MPSolver solver("LinearProgrammingExample", optimization_problem_type); - const double infinity = solver.infinity(); - // x1, x2 and x3 are continuous non-negative variables. - MPVariable* const x1 = solver.MakeNumVar(0.0, infinity, "x1"); - MPVariable* const x2 = solver.MakeNumVar(0.0, infinity, "x2"); - MPVariable* const x3 = solver.MakeNumVar(0.0, infinity, "x3"); - - // Maximize 10 * x1 + 6 * x2 + 4 * x3. - MPObjective* const objective = solver.MutableObjective(); - objective->SetCoefficient(x1, 10); - objective->SetCoefficient(x2, 6); - objective->SetCoefficient(x3, 4); - objective->SetMaximization(); - - // x1 + x2 + x3 <= 100. - MPConstraint* const c0 = solver.MakeRowConstraint(-infinity, 100.0); - c0->SetCoefficient(x1, 1); - c0->SetCoefficient(x2, 1); - c0->SetCoefficient(x3, 1); - - // 10 * x1 + 4 * x2 + 5 * x3 <= 600. - MPConstraint* const c1 = solver.MakeRowConstraint(-infinity, 600.0); - c1->SetCoefficient(x1, 10); - c1->SetCoefficient(x2, 4); - c1->SetCoefficient(x3, 5); - - // 2 * x1 + 2 * x2 + 6 * x3 <= 300. - MPConstraint* const c2 = solver.MakeRowConstraint(-infinity, 300.0); - c2->SetCoefficient(x1, 2); - c2->SetCoefficient(x2, 2); - c2->SetCoefficient(x3, 6); - - // TODO(user): Change example to show = and >= constraints. - - LOG(INFO) << "Number of variables = " << solver.NumVariables(); - LOG(INFO) << "Number of constraints = " << solver.NumConstraints(); - - const MPSolver::ResultStatus result_status = solver.Solve(); - - // Check that the problem has an optimal solution. - if (result_status != MPSolver::OPTIMAL) { - LOG(FATAL) << "The problem does not have an optimal solution!"; - } - - LOG(INFO) << "Problem solved in " << solver.wall_time() << " milliseconds"; - - // The objective value of the solution. - LOG(INFO) << "Optimal objective value = " << objective->Value(); - - // The value of each variable in the solution. - LOG(INFO) << "x1 = " << x1->solution_value(); - LOG(INFO) << "x2 = " << x2->solution_value(); - LOG(INFO) << "x3 = " << x3->solution_value(); - - LOG(INFO) << "Advanced usage:"; - LOG(INFO) << "Problem solved in " << solver.iterations() << " iterations"; - LOG(INFO) << "x1: reduced cost = " << x1->reduced_cost(); - LOG(INFO) << "x2: reduced cost = " << x2->reduced_cost(); - LOG(INFO) << "x3: reduced cost = " << x3->reduced_cost(); - LOG(INFO) << "c0: dual value = " << c0->dual_value() - << " activity = " << c0->activity(); - LOG(INFO) << "c1: dual value = " << c1->dual_value() - << " activity = " << c1->activity(); - LOG(INFO) << "c2: dual value = " << c2->dual_value() - << " activity = " << c2->activity(); -} - -void RunAllExamples() { -#if defined(USE_GLPK) - LOG(INFO) << "---- Linear programming example with GLPK ----"; - RunLinearProgrammingExample(MPSolver::GLPK_LINEAR_PROGRAMMING); -#endif // USE_GLPK -#if defined(USE_CLP) - LOG(INFO) << "---- Linear programming example with CLP ----"; - RunLinearProgrammingExample(MPSolver::CLP_LINEAR_PROGRAMMING); -#endif // USE_CLP -#if defined(USE_SLM) - LOG(INFO) << "---- Linear programming example with Sulum ----"; - RunLinearProgrammingExample(MPSolver::SULUM_LINEAR_PROGRAMMING); -#endif // USE_SLM -#if defined(USE_GUROBI) - LOG(INFO) << "---- Linear programming example with Gurobi ----"; - RunLinearProgrammingExample(MPSolver::GUROBI_LINEAR_PROGRAMMING); -#endif // USE_GUROBI -} -} // namespace operations_research - -int main(int argc, char** argv) { - google::ParseCommandLineFlags(&argc, &argv, true); - operations_research::RunAllExamples(); - return 0; -} diff --git a/examples/tests/remote/tsp.cc b/examples/tests/remote/tsp.cc deleted file mode 100644 index 7493725256a..00000000000 --- a/examples/tests/remote/tsp.cc +++ /dev/null @@ -1,164 +0,0 @@ -// Copyright 2010-2025 Google -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// Traveling Salesman Sample. -// -// This is a sample using the routing library to solve a Traveling Salesman -// Problem. -// The description of the problem can be found here: -// http://en.wikipedia.org/wiki/Travelling_salesman_problem. -// For small problems one can use the hamiltonian path library directly (cf -// graph/hamiltonian_path.h). -// The optimization engine uses local search to improve solutions, first -// solutions being generated using a cheapest addition heuristic. -// Optionally one can randomly forbid a set of random connections between nodes -// (forbidden arcs). - -#include "base/callback.h" -#include "base/commandlineflags.h" -#include "base/join.h" -#include "base/random.h" -#include "base/types.h" -#include "base/unique_ptr.h" -#include "constraint_solver/routing.h" - -using operations_research::ACMRandom; -using operations_research::Assignment; -using operations_research::RoutingModel; -using operations_research::scoped_ptr; -using operations_research::StrCat; - -DEFINE_int32(tsp_size, 10, "Size of Traveling Salesman Problem instance."); -DEFINE_bool(tsp_use_random_matrix, true, "Use random cost matrix."); -DEFINE_int32(tsp_random_forbidden_connections, 0, - "Number of random forbidden connections."); -DEFINE_bool(tsp_use_deterministic_random_seed, false, - "Use deterministic random seeds."); -DECLARE_string(routing_first_solution); -DECLARE_bool(routing_no_lns); - -// Random seed generator. -int32_t GetSeed() { - if (absl::GetFlag(FLAGS_tsp_use_deterministic_random_seed)) { - return ACMRandom::DeterministicSeed(); - } else { - return ACMRandom::HostnamePidTimeSeed(); - } -} - -// Cost/distance functions. - -// Sample function. -int64_t MyDistance(RoutingModel::NodeIndex from, RoutingModel::NodeIndex to) { - // Put your distance code here. - return (from + to).value(); // for instance -} - -// Random matrix. -class RandomMatrix { - public: - explicit RandomMatrix(int size) : size_(size) {} - void Initialize() { - matrix_.reset(new int64_t[size_ * size_]); - const int64_t kDistanceMax = 100; - ACMRandom randomizer(GetSeed()); - for (RoutingModel::NodeIndex from = RoutingModel::kFirstNode; from < size_; - ++from) { - for (RoutingModel::NodeIndex to = RoutingModel::kFirstNode; to < size_; - ++to) { - if (to != from) { - matrix_[MatrixIndex(from, to)] = randomizer.Uniform(kDistanceMax); - } else { - matrix_[MatrixIndex(from, to)] = 0LL; - } - } - } - } - int64_t Distance(RoutingModel::NodeIndex from, - RoutingModel::NodeIndex to) const { - return matrix_[MatrixIndex(from, to)]; - } - - private: - int64_t MatrixIndex(RoutingModel::NodeIndex from, - RoutingModel::NodeIndex to) const { - return (from * size_ + to).value(); - } - std::unique_ptr matrix_; - const int size_; -}; - -int main(int argc, char** argv) { - google::ParseCommandLineFlags(&argc, &argv, true); - if (absl::GetFlag(FLAGS_tsp_size) > 0) { - // TSP of size absl::GetFlag(FLAGS_tsp_size). - // Second argument = 1 to build a single tour (it's a TSP). - // Nodes are indexed from 0 to absl::GetFlag(FLAGS_tsp_size) - 1, by default - // the start of the route is node 0. - RoutingModel routing(absl::GetFlag(FLAGS_tsp_size), 1); - // Setting first solution heuristic (cheapest addition). - absl::GetFlag(FLAGS_routing_first_solution) = "PathCheapestArc"; - // Disabling Large Neighborhood Search, comment out to activate it. - absl::GetFlag(FLAGS_routing_no_lns) = true; - - // Setting the cost function. - // Put a permanent callback to the distance accessor here. The callback - // has the following signature: ResultCallback2. - // The two arguments are the from and to node inidices. - RandomMatrix matrix(absl::GetFlag(FLAGS_tsp_size)); - if (absl::GetFlag(FLAGS_tsp_use_random_matrix)) { - matrix.Initialize(); - routing.SetArcCostEvaluatorOfAllVehicles( - NewPermanentCallback(&matrix, &RandomMatrix::Distance)); - } else { - routing.SetArcCostEvaluatorOfAllVehicles( - NewPermanentCallback(MyDistance)); - } - // Forbid node connections (randomly). - ACMRandom randomizer(GetSeed()); - int64_t forbidden_connections = 0; - while (forbidden_connections < - absl::GetFlag(FLAGS_tsp_random_forbidden_connections)) { - const int64_t from = - randomizer.Uniform(absl::GetFlag(FLAGS_tsp_size) - 1); - const int64_t to = - randomizer.Uniform(absl::GetFlag(FLAGS_tsp_size) - 1) + 1; - if (routing.NextVar(from)->Contains(to)) { - LOG(INFO) << "Forbidding connection " << from << " -> " << to; - routing.NextVar(from)->RemoveValue(to); - ++forbidden_connections; - } - } - // Solve, returns a solution if any (owned by RoutingModel). - const Assignment* solution = routing.Solve(); - if (solution != NULL) { - // Solution cost. - LOG(INFO) << "Cost " << solution->ObjectiveValue(); - // Inspect solution. - // Only one route here; otherwise iterate from 0 to routing.vehicles() - 1 - const int route_number = 0; - std::string route; - for (int64_t node = routing.Start(route_number); !routing.IsEnd(node); - node = solution->Value(routing.NextVar(node))) { - route = StrCat(route, StrCat(node, " -> ")); - } - route = StrCat(route, "0"); - LOG(INFO) << route; - } else { - LOG(INFO) << "No solution found."; - } - } else { - LOG(INFO) << "Specify an instance size greater than 0."; - } - return 0; -} From a7ffad19503d6f4ea3a9bc7e7e24f10e6656fa75 Mon Sep 17 00:00:00 2001 From: Corentin Le Molgat Date: Wed, 7 Jan 2026 16:14:45 +0100 Subject: [PATCH 100/111] fixup remove of examples/tests (#4971) --- cmake/vagrant/freebsd/dotnet/Vagrantfile | 1 - makefiles/Makefile.cpp.mk | 59 ------------------------ makefiles/Makefile.java.mk | 2 +- makefiles/Makefile.python.mk | 4 +- 4 files changed, 2 insertions(+), 64 deletions(-) diff --git a/cmake/vagrant/freebsd/dotnet/Vagrantfile b/cmake/vagrant/freebsd/dotnet/Vagrantfile index 7648c0ae111..5ae33d1b696 100644 --- a/cmake/vagrant/freebsd/dotnet/Vagrantfile +++ b/cmake/vagrant/freebsd/dotnet/Vagrantfile @@ -93,7 +93,6 @@ Vagrant.configure("2") do |config| config.vm.provision "file", source: "../../../../examples/dotnet", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../examples/java", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../examples/python", destination: "$HOME/project/examples/" - config.vm.provision "file", source: "../../../../examples/tests", destination: "$HOME/project/examples/" config.vm.provision "file", source: "../../../../patches", destination: "$HOME/project/" config.vm.provision "file", source: "../../../../Version.txt", destination: "$HOME/project/" config.vm.provision "file", source: "../../../../tools/doc/orLogo.png", destination: "$HOME/project/tools/doc/" diff --git a/makefiles/Makefile.cpp.mk b/makefiles/Makefile.cpp.mk index b2444c72898..d23e4fc559e 100644 --- a/makefiles/Makefile.cpp.mk +++ b/makefiles/Makefile.cpp.mk @@ -88,7 +88,6 @@ check_cpp: check_cpp_pimpl test_cpp: \ cpp \ - test_cc_tests \ test_cc_contrib \ test_cc_cpp @@ -306,54 +305,6 @@ endef CPP_EXAMPLES := contrib cpp $(foreach example,$(CPP_EXAMPLES),$(eval $(call cpp-example-target,$(example)))) -# Tests -CPP_TESTS := tests - -$(TEMP_CPP_DIR)/tests: | $(TEMP_CPP_DIR) - -$(MKDIR) $(TEMP_CPP_DIR)$Stests - -$(TEMP_CPP_DIR)/tests/%: \ - $(SRC_DIR)/examples/tests/%.cc \ - | $(TEMP_CPP_DIR)/tests - -$(MKDIR) $(TEMP_CPP_DIR)$Stests$S$* - -$(TEMP_CPP_DIR)/tests/%/CMakeLists.txt: ${SRC_DIR}/ortools/cpp/CMakeLists.txt.in | $(TEMP_CPP_DIR)/tests/% - $(COPY) ortools$Scpp$SCMakeLists.txt.in $(TEMP_CPP_DIR)$Stests$S$*$SCMakeLists.txt - $(SED) -i -e 's/@CPP_NAME@/$*/' \ - $(TEMP_CPP_DIR)$Stests$S$*$SCMakeLists.txt - $(SED) -i -e 's/@CPP_FILE_NAME@/$*.cc/' \ - $(TEMP_CPP_DIR)$Stests$S$*$SCMakeLists.txt - $(SED) -i -e 's/@TEST_ARGS@//' \ - $(TEMP_CPP_DIR)$Stests$S$*$SCMakeLists.txt - -$(TEMP_CPP_DIR)/tests/%/%.cc: \ - $(SRC_DIR)/examples/tests/%.cc \ - | $(TEMP_CPP_DIR)/tests/% - $(MKDIR_P) $(TEMP_CPP_DIR)$Stests$S$* - $(COPY) $(SRC_DIR)$Sexamples$Stests$S$*.cc \ - $(TEMP_CPP_DIR)$Stests$S$* - -rcpp_%: \ - cpp \ - $(SRC_DIR)/examples/tests/%.cc \ - $(TEMP_CPP_DIR)/tests/%/CMakeLists.txt \ - $(TEMP_CPP_DIR)/tests/%/%.cc \ - FORCE - cd $(TEMP_CPP_DIR)$Stests$S$* && \ - cmake -S. -Bbuild \ - -DCMAKE_BUILD_TYPE=$(BUILD_TYPE) \ - -DCMAKE_PREFIX_PATH=$(OR_ROOT_FULL)/$(INSTALL_DIR) \ - -DCMAKE_INSTALL_PREFIX=install \ - $(CMAKE_ARGS) \ - -G $(GENERATOR) -ifneq ($(PLATFORM),WIN64) - cd $(TEMP_CPP_DIR)$Stests$S$* && cmake --build build --config $(BUILD_TYPE) --target all -v - cd $(TEMP_CPP_DIR)$Stests$S$* && cmake --build build --config $(BUILD_TYPE) --target test -v -else - cd $(TEMP_CPP_DIR)$Stests$S$* && cmake --build build --config $(BUILD_TYPE) --target ALL_BUILD -v - cd $(TEMP_CPP_DIR)$Stests$S$* && cmake --build build --config $(BUILD_TYPE) --target RUN_TESTS -v -endif - ################################## ## Course scheduling example ## ################################## @@ -497,16 +448,6 @@ check_cpp_pimpl: \ rcpp_max_flow \ rcpp_min_cost_flow -.PHONY: test_cc_tests # Build and Run all C++ Tests (located in examples/tests) -test_cc_tests: \ - rcpp_lp_test \ - rcpp_bug_fz1 \ - rcpp_cpp11_test \ - rcpp_forbidden_intervals_test \ - rcpp_issue57 \ - rcpp_min_max_test -# $(MAKE) rcpp_issue173 # error: too long - .PHONY: test_cc_contrib # Build and Run all C++ Contrib (located in examples/contrib) test_cc_contrib: diff --git a/makefiles/Makefile.java.mk b/makefiles/Makefile.java.mk index e8e59a5cd0b..42b94398b7f 100644 --- a/makefiles/Makefile.java.mk +++ b/makefiles/Makefile.java.mk @@ -379,7 +379,7 @@ check_java: \ rjava_LinearProgramming \ rjava_IntegerProgramming -.PHONY: test_java_tests # Build and Run all Java Tests (located in examples/tests) +.PHONY: test_java_tests # Build and Run all Java Tests (located in ortools/*/java) test_java_tests: \ rjava_InitTest \ rjava_KnapsackSolverTest \ diff --git a/makefiles/Makefile.python.mk b/makefiles/Makefile.python.mk index d8321ddefe9..857465ccdc0 100644 --- a/makefiles/Makefile.python.mk +++ b/makefiles/Makefile.python.mk @@ -234,15 +234,13 @@ check_python: \ # rpy_nurses_cp \ # rpy_job_shop_cp \ -.PHONY: test_python_tests # Run all Python Tests (located in examples/tests) +.PHONY: test_python_tests # Run all Python Tests (located in ortools/*/python) test_python_tests: \ rpy_init_test \ rpy_lp_api_test \ rpy_lp_test \ rpy_model_builder_test \ rpy_pywraplp_test \ - rpy_cp_api_test \ - rpy_routing_api_test \ rpy_pywrapcp_test \ rpy_pywraprouting_test \ rpy_cp_model_test \ From 74a9ed242d62e75e85bd938d90a9fd4e21bc2a91 Mon Sep 17 00:00:00 2001 From: Guillaume Chatelet Date: Wed, 7 Jan 2026 15:51:08 +0100 Subject: [PATCH 101/111] Move course_scheduling from examples to ortools/scheduling (#4972) Also export tests for Bazel --- ortools/scheduling/BUILD.bazel | 90 ++ ortools/scheduling/CMakeLists.txt | 14 +- .../scheduling}/course_scheduling.cc | 4 +- .../scheduling}/course_scheduling.h | 6 +- .../scheduling}/course_scheduling_run.cc | 2 +- ortools/scheduling/course_scheduling_test.cc | 1204 +++++++++++++++++ .../jobshop_scheduling_parser_test.cc | 102 ++ ortools/scheduling/rcpsp_parser_test.cc | 115 ++ 8 files changed, 1526 insertions(+), 11 deletions(-) rename {examples/cpp => ortools/scheduling}/course_scheduling.cc (99%) rename {examples/cpp => ortools/scheduling}/course_scheduling.h (95%) rename {examples/cpp => ortools/scheduling}/course_scheduling_run.cc (98%) create mode 100644 ortools/scheduling/course_scheduling_test.cc create mode 100644 ortools/scheduling/jobshop_scheduling_parser_test.cc create mode 100644 ortools/scheduling/rcpsp_parser_test.cc diff --git a/ortools/scheduling/BUILD.bazel b/ortools/scheduling/BUILD.bazel index 6e27790607d..3e2dccf0610 100644 --- a/ortools/scheduling/BUILD.bazel +++ b/ortools/scheduling/BUILD.bazel @@ -17,7 +17,9 @@ load("@protobuf//bazel:cc_proto_library.bzl", "cc_proto_library") load("@protobuf//bazel:proto_library.bzl", "proto_library") load("@protobuf//bazel:py_proto_library.bzl", "py_proto_library") +load("@rules_cc//cc:cc_binary.bzl", "cc_binary") load("@rules_cc//cc:cc_library.bzl", "cc_library") +load("@rules_cc//cc:cc_test.bzl", "cc_test") package(default_visibility = ["//visibility:public"]) @@ -51,6 +53,29 @@ cc_library( ], ) +cc_test( + name = "rcpsp_parser_test", + size = "small", + srcs = ["rcpsp_parser_test.cc"], + data = [ + "//ortools/scheduling/testdata:c1510_1.mm.txt", + "//ortools/scheduling/testdata:j301_1.sm", + "//ortools/scheduling/testdata:mmlib100_j100100_1.mm.txt", + "//ortools/scheduling/testdata:psp1.sch", + "//ortools/scheduling/testdata:psp10_1.sch", + "//ortools/scheduling/testdata:rg300_1.rcp", + "//ortools/scheduling/testdata:rg30_set1_pat1.rcp", + "//ortools/scheduling/testdata:rip1.sch", + "//ortools/scheduling/testdata:ubo_10_psp2.sch", + ], + deps = [ + ":rcpsp_parser", + "//ortools/base:gmock_main", + "//ortools/base:path", + "@abseil-cpp//absl/strings:string_view", + ], +) + ### Jobshop Scheduling ### proto_library( name = "jobshop_scheduling_proto", @@ -81,6 +106,27 @@ cc_library( ], ) +cc_test( + name = "jobshop_scheduling_parser_test", + size = "small", + srcs = ["jobshop_scheduling_parser_test.cc"], + data = [ + "//ortools/scheduling/testdata:02a.fjs", + "//ortools/scheduling/testdata:1010_1_3", + "//ortools/scheduling/testdata:50_10_01_ta041.txt", + "//ortools/scheduling/testdata:SDST10_ta001.txt", + "//ortools/scheduling/testdata:ft06", + "//ortools/scheduling/testdata:jb1.txt", + "//ortools/scheduling/testdata:taillard-jobshop-15_15-1_225_100_150-1", + ], + deps = [ + ":jobshop_scheduling_parser", + "//ortools/base:gmock_main", + "//ortools/base:path", + "@abseil-cpp//absl/strings:string_view", + ], +) + ### Course Scheduling ### proto_library( name = "course_scheduling_proto", @@ -91,3 +137,47 @@ cc_proto_library( name = "course_scheduling_cc_proto", deps = [":course_scheduling_proto"], ) + +cc_library( + name = "course_scheduling", + srcs = ["course_scheduling.cc"], + hdrs = ["course_scheduling.h"], + deps = [ + ":course_scheduling_cc_proto", + "//ortools/base:mathutil", + "//ortools/linear_solver:linear_solver_base", + "//ortools/linear_solver:linear_solver_scip", + "//ortools/sat:cp_model_cc_proto", + "@abseil-cpp//absl/container:flat_hash_set", + "@abseil-cpp//absl/log", + "@abseil-cpp//absl/status", + "@abseil-cpp//absl/strings:str_format", + "@abseil-cpp//absl/types:span", + ], +) + +cc_binary( + name = "course_scheduling_run", + srcs = ["course_scheduling_run.cc"], + deps = [ + ":course_scheduling", + ":course_scheduling_cc_proto", + "//ortools/base", + "//ortools/base:file", + "//ortools/base:timer", + "@abseil-cpp//absl/flags:flag", + "@abseil-cpp//absl/log", + ], +) + +cc_test( + name = "course_scheduling_test", + srcs = ["course_scheduling_test.cc"], + deps = [ + ":course_scheduling", + ":course_scheduling_cc_proto", + "//ortools/base:gmock_main", + "//ortools/base:mutable_memfile", + "//ortools/base:parse_test_proto", + ], +) diff --git a/ortools/scheduling/CMakeLists.txt b/ortools/scheduling/CMakeLists.txt index 48953bca474..4b8285a637e 100644 --- a/ortools/scheduling/CMakeLists.txt +++ b/ortools/scheduling/CMakeLists.txt @@ -11,15 +11,19 @@ # See the License for the specific language governing permissions and # limitations under the License. -file(GLOB _SRCS "*.h" "*.cc") +list(APPEND _SRCS + course_scheduling.cc + course_scheduling.h + jobshop_scheduling_parser.cc + jobshop_scheduling_parser.h + rcpsp_parser.cc + rcpsp_parser.h +) set(NAME ${PROJECT_NAME}_scheduling) # Will be merge in libortools.so -#add_library(${NAME} STATIC ${_SRCS}) add_library(${NAME} OBJECT ${_SRCS}) -set_target_properties(${NAME} PROPERTIES - POSITION_INDEPENDENT_CODE ON - ) +set_target_properties(${NAME} PROPERTIES POSITION_INDEPENDENT_CODE ON) target_include_directories(${NAME} PRIVATE ${PROJECT_SOURCE_DIR} ${PROJECT_BINARY_DIR}) diff --git a/examples/cpp/course_scheduling.cc b/ortools/scheduling/course_scheduling.cc similarity index 99% rename from examples/cpp/course_scheduling.cc rename to ortools/scheduling/course_scheduling.cc index 264acc52682..e1f0a6d0e88 100644 --- a/examples/cpp/course_scheduling.cc +++ b/ortools/scheduling/course_scheduling.cc @@ -11,7 +11,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -#include "examples/cpp/course_scheduling.h" +#include "ortools/scheduling/course_scheduling.h" #include #include @@ -21,10 +21,10 @@ #include #include "absl/container/flat_hash_set.h" +#include "absl/log/log.h" #include "absl/status/status.h" #include "absl/strings/str_format.h" #include "absl/types/span.h" -#include "ortools/base/logging.h" #include "ortools/base/mathutil.h" #include "ortools/linear_solver/linear_solver.h" #include "ortools/scheduling/course_scheduling.pb.h" diff --git a/examples/cpp/course_scheduling.h b/ortools/scheduling/course_scheduling.h similarity index 95% rename from examples/cpp/course_scheduling.h rename to ortools/scheduling/course_scheduling.h index cc8247f0b63..0ab86401638 100644 --- a/examples/cpp/course_scheduling.h +++ b/ortools/scheduling/course_scheduling.h @@ -11,8 +11,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -#ifndef ORTOOLS_EXAMPLES_COURSE_SCHEDULING_H_ -#define ORTOOLS_EXAMPLES_COURSE_SCHEDULING_H_ +#ifndef ORTOOLS_SCHEDULING_COURSE_SCHEDULING_H_ +#define ORTOOLS_SCHEDULING_COURSE_SCHEDULING_H_ #include #include @@ -83,4 +83,4 @@ class CourseSchedulingSolver { } // namespace operations_research -#endif // ORTOOLS_EXAMPLES_COURSE_SCHEDULING_H_ +#endif // ORTOOLS_SCHEDULING_COURSE_SCHEDULING_H_ diff --git a/examples/cpp/course_scheduling_run.cc b/ortools/scheduling/course_scheduling_run.cc similarity index 98% rename from examples/cpp/course_scheduling_run.cc rename to ortools/scheduling/course_scheduling_run.cc index 2aef942952b..2b30cdf5f71 100644 --- a/examples/cpp/course_scheduling_run.cc +++ b/ortools/scheduling/course_scheduling_run.cc @@ -23,11 +23,11 @@ #include "absl/flags/flag.h" #include "absl/log/log.h" -#include "examples/cpp/course_scheduling.h" #include "ortools/base/helpers.h" #include "ortools/base/init_google.h" #include "ortools/base/options.h" #include "ortools/base/timer.h" +#include "ortools/scheduling/course_scheduling.h" #include "ortools/scheduling/course_scheduling.pb.h" ABSL_FLAG(std::string, input, "", diff --git a/ortools/scheduling/course_scheduling_test.cc b/ortools/scheduling/course_scheduling_test.cc new file mode 100644 index 00000000000..9525f909a52 --- /dev/null +++ b/ortools/scheduling/course_scheduling_test.cc @@ -0,0 +1,1204 @@ +// Copyright 2010-2025 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "ortools/scheduling/course_scheduling.h" + +#include "gtest/gtest.h" +#include "ortools/base/gmock.h" +#include "ortools/base/parse_test_proto.h" +#include "ortools/scheduling/course_scheduling.pb.h" + +namespace operations_research { +namespace { + +using ::google::protobuf::contrib::parse_proto::ParseTestProto; + +TEST(CourseSchedulingTest, CheckMultipleSectionsCorrectlyScheduled) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 2 + daily_time_slot_count: 2 + courses { + meetings_count: 2 + consecutive_slots_count: 1 + teacher_indices: 0 + teacher_indices: 1 + teacher_section_counts: 1 + teacher_section_counts: 2 + } + teachers {} + teachers { restricted_time_slots: 1 } + )pb"); + const CourseSchedulingResult infeasible_result_ = ParseTestProto(R"pb( + solver_status: SOLVER_INFEASIBLE + message: "The problem is infeasible with the given courses." + )pb"); + + CourseSchedulingSolver solver; + const CourseSchedulingResult result = solver.Solve(model); + + EXPECT_THAT(result, testing::EqualsProto(infeasible_result_)); +} + +TEST(CourseSchedulingTest, CheckDailyMaximumForCoursesIsNotViolated) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 3 + daily_time_slot_count: 2 + courses { + meetings_count: 4 + consecutive_slots_count: 1 + teacher_indices: 0 + teacher_section_counts: 1 + } + teachers {} + )pb"); + const CourseSchedulingResult infeasible_result_ = ParseTestProto(R"pb( + solver_status: SOLVER_INFEASIBLE + message: "The problem is infeasible with the given courses." + )pb"); + + CourseSchedulingSolver solver; + const CourseSchedulingResult result = solver.Solve(model); + + EXPECT_THAT(result, testing::EqualsProto(infeasible_result_)); +} + +TEST(CourseSchedulingTest, CheckTeacherIsNotDoubleBooked) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 1 + daily_time_slot_count: 1 + courses { + meetings_count: 1 + consecutive_slots_count: 1 + teacher_indices: 0 + teacher_section_counts: 1 + } + courses { + meetings_count: 1 + consecutive_slots_count: 1 + teacher_indices: 0 + teacher_section_counts: 1 + } + teachers {} + )pb"); + const CourseSchedulingResult infeasible_result_ = ParseTestProto(R"pb( + solver_status: SOLVER_INFEASIBLE + message: "The problem is infeasible with the given courses." + )pb"); + + CourseSchedulingSolver solver; + const CourseSchedulingResult result = solver.Solve(model); + + EXPECT_THAT(result, testing::EqualsProto(infeasible_result_)); +} + +TEST(CourseSchedulingTest, CheckRoomAssignmentsForTimeSlotNotViolated) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 1 + daily_time_slot_count: 1 + courses { + meetings_count: 1 + consecutive_slots_count: 1 + teacher_indices: 0 + teacher_section_counts: 1 + room_indices: 0 + } + courses { + meetings_count: 1 + consecutive_slots_count: 1 + teacher_indices: 1 + teacher_section_counts: 1 + room_indices: 0 + } + teachers {} + teachers {} + rooms {} + )pb"); + const CourseSchedulingResult infeasible_result_ = ParseTestProto(R"pb( + solver_status: SOLVER_INFEASIBLE + message: "The problem is infeasible with the given courses." + )pb"); + + CourseSchedulingSolver solver; + const CourseSchedulingResult result = solver.Solve(model); + + EXPECT_THAT(result, testing::EqualsProto(infeasible_result_)); +} + +TEST(CourseSchedulingTest, + CheckConsecutiveTimeslotsValuesStartOfDayNotViolated) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 2 + daily_time_slot_count: 4 + courses { + meetings_count: 1 + consecutive_slots_count: 2 + teacher_indices: 0 + teacher_section_counts: 1 + } + teachers { + restricted_time_slots: 1 + restricted_time_slots: 2 + restricted_time_slots: 3 + restricted_time_slots: 5 + restricted_time_slots: 6 + restricted_time_slots: 7 + } + )pb"); + const CourseSchedulingResult infeasible_result_ = ParseTestProto(R"pb( + solver_status: SOLVER_INFEASIBLE + message: "The problem is infeasible with the given courses." + )pb"); + + CourseSchedulingSolver solver; + const CourseSchedulingResult result = solver.Solve(model); + + EXPECT_THAT(result, testing::EqualsProto(infeasible_result_)); +} + +TEST(CourseSchedulingTest, CheckConsecutiveTimeslotsValuesEndOfDayNotViolated) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 2 + daily_time_slot_count: 4 + courses { + meetings_count: 1 + consecutive_slots_count: 2 + teacher_indices: 0 + teacher_section_counts: 1 + } + teachers { + restricted_time_slots: 0 + restricted_time_slots: 1 + restricted_time_slots: 2 + restricted_time_slots: 4 + restricted_time_slots: 5 + restricted_time_slots: 6 + } + )pb"); + const CourseSchedulingResult infeasible_result_ = ParseTestProto(R"pb( + solver_status: SOLVER_INFEASIBLE + message: "The problem is infeasible with the given courses." + )pb"); + + CourseSchedulingSolver solver; + const CourseSchedulingResult result = solver.Solve(model); + + EXPECT_THAT(result, testing::EqualsProto(infeasible_result_)); +} + +TEST(CourseSchedulingTest, CheckSingletonCoursesNotScheduledForSameTime) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 1 + daily_time_slot_count: 1 + courses { + meetings_count: 1 + consecutive_slots_count: 1 + min_capacity: 0 + max_capacity: 5 + teacher_indices: 0 + teacher_section_counts: 1 + } + courses { + meetings_count: 1 + consecutive_slots_count: 1 + min_capacity: 0 + max_capacity: 5 + teacher_indices: 1 + teacher_section_counts: 1 + } + teachers {} + teachers {} + students { course_indices: 0 course_indices: 1 } + )pb"); + const CourseSchedulingResult infeasible_result_ = ParseTestProto(R"pb( + solver_status: SOLVER_INFEASIBLE + message: "The problem is infeasible with the given courses." + )pb"); + + CourseSchedulingSolver solver; + const CourseSchedulingResult result = solver.Solve(model); + + EXPECT_THAT(result, testing::EqualsProto(infeasible_result_)); +} + +TEST(CourseSchedulingTest, CheckMinimumCapacityForCourseNotViolated) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 1 + daily_time_slot_count: 1 + courses { + meetings_count: 1 + consecutive_slots_count: 1 + min_capacity: 6 + max_capacity: 10 + teacher_indices: 0 + teacher_section_counts: 1 + } + teachers {} + students { course_indices: 0 } + )pb"); + const CourseSchedulingResult infeasible_result_ = ParseTestProto(R"pb( + solver_status: SOLVER_INFEASIBLE + message: "Check the minimum or maximum capacity constraints for your " + "classes." + )pb"); + + CourseSchedulingSolver solver; + const CourseSchedulingResult result = solver.Solve(model); + + EXPECT_THAT(result, testing::EqualsProto(infeasible_result_)); +} + +TEST(CourseSchedulingTest, CheckMaximumCapacityForCourseNotViolated) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 1 + daily_time_slot_count: 1 + courses { + meetings_count: 1 + consecutive_slots_count: 1 + min_capacity: 0 + max_capacity: 2 + teacher_indices: 0 + teacher_section_counts: 1 + } + teachers {} + students { course_indices: 0 } + students { course_indices: 0 } + students { course_indices: 0 } + )pb"); + const CourseSchedulingResult infeasible_result_ = ParseTestProto(R"pb( + solver_status: SOLVER_INFEASIBLE + message: "Check the minimum or maximum capacity constraints for your " + "classes." + )pb"); + + CourseSchedulingSolver solver; + const CourseSchedulingResult result = solver.Solve(model); + + EXPECT_THAT(result, testing::EqualsProto(infeasible_result_)); +} + +TEST(CourseSchedulingTest, CheckStudentsAreNotDoubleBooked) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 1 + daily_time_slot_count: 2 + courses { + meetings_count: 1 + consecutive_slots_count: 1 + min_capacity: 0 + max_capacity: 5 + teacher_indices: 0 + teacher_indices: 1 + teacher_section_counts: 1 + teacher_section_counts: 1 + } + courses { + meetings_count: 1 + consecutive_slots_count: 1 + min_capacity: 0 + max_capacity: 5 + teacher_indices: 2 + teacher_section_counts: 1 + } + teachers { restricted_time_slots: 1 } + teachers { restricted_time_slots: 1 } + teachers { restricted_time_slots: 1 } + students { course_indices: 0 course_indices: 1 } + )pb"); + const CourseSchedulingResult infeasible_result = ParseTestProto(R"pb( + solver_status: SOLVER_INFEASIBLE + message: "The problem is infeasible with the given courses." + )pb"); + + CourseSchedulingSolver solver; + const CourseSchedulingResult result = solver.Solve(model); + + EXPECT_THAT(result, testing::EqualsProto(infeasible_result)); +} + +TEST(CourseSchedulingTest, CheckStudentsAreNotDoubleBooked_TooManyCourses) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 1 + daily_time_slot_count: 2 + courses { + meetings_count: 1 + consecutive_slots_count: 1 + min_capacity: 0 + max_capacity: 5 + teacher_indices: 0 + teacher_section_counts: 2 + } + courses { + meetings_count: 1 + consecutive_slots_count: 1 + min_capacity: 0 + max_capacity: 5 + teacher_indices: 1 + teacher_indices: 2 + teacher_section_counts: 1 + teacher_section_counts: 1 + } + courses { + meetings_count: 1 + consecutive_slots_count: 1 + min_capacity: 0 + max_capacity: 5 + teacher_indices: 2 + teacher_section_counts: 1 + } + teachers {} + teachers {} + teachers {} + students { course_indices: 0 course_indices: 1 course_indices: 2 } + )pb"); + const CourseSchedulingResult infeasible_result = ParseTestProto(R"pb( + solver_status: SOLVER_INFEASIBLE + message: "The problem is infeasible with the given courses." + )pb"); + + CourseSchedulingSolver solver; + const CourseSchedulingResult result = solver.Solve(model); + + EXPECT_THAT(result, testing::EqualsProto(infeasible_result)); +} + +TEST(CourseSchedulingTest, CheckTeacherNotScheduledForRestrictedSlot) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 1 + daily_time_slot_count: 2 + courses { + meetings_count: 1 + consecutive_slots_count: 1 + teacher_indices: 0 + teacher_indices: 1 + teacher_section_counts: 1 + teacher_section_counts: 1 + } + teachers { restricted_time_slots: 1 } + teachers { restricted_time_slots: 0 } + )pb"); + const CourseSchedulingResult expected_result = ParseTestProto(R"pb( + solver_status: SOLVER_OPTIMAL + class_assignments { course_index: 0 section_number: 0 time_slots: 0 } + class_assignments { course_index: 0 section_number: 1 time_slots: 1 } + )pb"); + + CourseSchedulingSolver solver; + const CourseSchedulingResult result = solver.Solve(model); + + EXPECT_THAT(result, testing::EqualsProto(expected_result)); +} + +TEST(CourseSchedulingTest, CheckConsecutiveTimeSlotsValuesNotViolated) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 2 + daily_time_slot_count: 2 + courses { + meetings_count: 1 + consecutive_slots_count: 2 + teacher_indices: 0 + teacher_section_counts: 1 + } + teachers { restricted_time_slots: 1 } + )pb"); + const CourseSchedulingResult expected_result = ParseTestProto(R"pb( + solver_status: SOLVER_OPTIMAL + class_assignments { + course_index: 0 + section_number: 0 + time_slots: 2 + time_slots: 3 + } + )pb"); + + CourseSchedulingSolver solver; + const CourseSchedulingResult result = solver.Solve(model); + + EXPECT_THAT(result, testing::EqualsProto(expected_result)); +} + +TEST(CourseSchedulingTest, CheckStudentsCorrectlyAssignedToSingletonCourses) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 1 + daily_time_slot_count: 2 + courses { + meetings_count: 1 + consecutive_slots_count: 1 + min_capacity: 0 + max_capacity: 5 + teacher_indices: 0 + teacher_section_counts: 1 + } + courses { + meetings_count: 1 + consecutive_slots_count: 1 + min_capacity: 0 + max_capacity: 5 + teacher_indices: 1 + teacher_section_counts: 1 + } + teachers { restricted_time_slots: 0 } + teachers {} + students { course_indices: 0 course_indices: 1 } + students { course_indices: 1 course_indices: 0 } + )pb"); + const CourseSchedulingResult expected_result = ParseTestProto(R"pb( + solver_status: SOLVER_OPTIMAL + class_assignments { course_index: 0 section_number: 0 time_slots: 1 } + class_assignments { course_index: 1 section_number: 0 time_slots: 0 } + student_assignments { + student_index: 0 + course_indices: 0 + course_indices: 1 + section_indices: 0 + section_indices: 0 + } + student_assignments { + student_index: 1 + course_indices: 1 + course_indices: 0 + section_indices: 0 + section_indices: 0 + } + )pb"); + + CourseSchedulingSolver solver; + const CourseSchedulingResult result = solver.Solve(model); + + EXPECT_THAT(result, testing::EqualsProto(expected_result)); +} + +TEST(CourseSchedulingTest, CheckStudentsNotDoubleBookedForTimeSlot) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 1 + daily_time_slot_count: 2 + courses { + meetings_count: 1 + consecutive_slots_count: 1 + min_capacity: 0 + max_capacity: 5 + teacher_indices: 0 + teacher_indices: 1 + teacher_indices: 2 + teacher_section_counts: 1 + teacher_section_counts: 1 + teacher_section_counts: 1 + } + courses { + meetings_count: 1 + consecutive_slots_count: 1 + min_capacity: 0 + max_capacity: 5 + teacher_indices: 3 + teacher_section_counts: 1 + } + teachers { restricted_time_slots: 1 } + teachers { restricted_time_slots: 1 } + teachers { restricted_time_slots: 0 } + teachers { restricted_time_slots: 1 } + students { course_indices: 0 course_indices: 1 } + )pb"); + const CourseSchedulingResult expected_result = ParseTestProto(R"pb( + solver_status: SOLVER_OPTIMAL + class_assignments { course_index: 0 section_number: 0 time_slots: 0 } + class_assignments { course_index: 0 section_number: 1 time_slots: 0 } + class_assignments { course_index: 0 section_number: 2 time_slots: 1 } + class_assignments { course_index: 1 section_number: 0 time_slots: 0 } + student_assignments { + student_index: 0 + course_indices: 0 + course_indices: 1 + section_indices: 2 + section_indices: 0 + } + )pb"); + + CourseSchedulingSolver solver; + const CourseSchedulingResult result = solver.Solve(model); + + EXPECT_THAT(result, testing::EqualsProto(expected_result)); +} + +TEST(CourseSchedulingTest, + AssertErrorWhenTeacherIndexNumberDoesNotMatchNumSectionsNumber) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 3 + daily_time_slot_count: 2 + courses { + display_name: "English" + meetings_count: 1 + consecutive_slots_count: 1 + teacher_indices: 0 + teacher_indices: 1 + teacher_section_counts: 2 + } + teachers {} + teachers {} + )pb"); + + CourseSchedulingSolver solver; + const CourseSchedulingResult result = solver.Solve(model); + + ASSERT_EQ(result.solver_status(), SOLVER_MODEL_INVALID); + ASSERT_EQ(result.message(), + "The course titled English should have the same number of teacher " + "indices and section numbers."); +} + +TEST(CourseSchedulingTest, + AssertErrorWhenCourseIsAllottedRoomIndexThatDoesNotExist) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 3 + daily_time_slot_count: 2 + courses { + display_name: "English" + meetings_count: 1 + consecutive_slots_count: 1 + teacher_indices: 0 + teacher_section_counts: 1 + room_indices: 1 + } + teachers {} + rooms {} + )pb"); + + CourseSchedulingSolver solver; + const CourseSchedulingResult result = solver.Solve(model); + + ASSERT_EQ(result.solver_status(), SOLVER_MODEL_INVALID); + ASSERT_EQ(result.message(), + "The course titled English is slotted for room index 1 but there " + "are only 1 rooms."); +} + +TEST(CourseSchedulingTest, + AssertErrorWhenCourseIsGivenTeacherIndexThatDoesNotExist) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 3 + daily_time_slot_count: 2 + courses { + display_name: "English" + meetings_count: 1 + consecutive_slots_count: 1 + teacher_indices: 1 + teacher_section_counts: 1 + } + teachers {} + )pb"); + + CourseSchedulingSolver solver; + const CourseSchedulingResult result = solver.Solve(model); + + ASSERT_EQ(result.solver_status(), SOLVER_MODEL_INVALID); + ASSERT_EQ(result.message(), + "The course titled English has teacher 1 assigned to it but there " + "are only 1 teachers."); +} + +TEST(CourseSchedulingTest, + AssertErrorWhenConsecutiveTimeSlotNumberIsMoreThanTwo) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 3 + daily_time_slot_count: 2 + courses { + display_name: "English" + meetings_count: 1 + consecutive_slots_count: 3 + teacher_indices: 0 + teacher_section_counts: 1 + } + teachers {} + )pb"); + + CourseSchedulingSolver solver; + const CourseSchedulingResult result = solver.Solve(model); + + ASSERT_EQ(result.solver_status(), SOLVER_MODEL_INVALID); + ASSERT_EQ(result.message(), + "The course titled English has 3 consecutive time slots specified " + "when it can only have 1 or 2."); +} + +TEST(CourseSchedulingTest, + AssertErrorWhenTeacherHasRestrictedTimeSlotThatDoesNotExist) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 1 + daily_time_slot_count: 1 + courses { + display_name: "English" + meetings_count: 1 + consecutive_slots_count: 1 + teacher_indices: 0 + teacher_section_counts: 1 + } + teachers { display_name: "A" restricted_time_slots: 1 } + )pb"); + + CourseSchedulingSolver solver; + const CourseSchedulingResult result = solver.Solve(model); + + ASSERT_EQ(result.solver_status(), SOLVER_MODEL_INVALID); + ASSERT_EQ(result.message(), + "Teacher with name A has restricted time slot 1 but there are only " + "1 time slots."); +} + +TEST(CourseSchedulingTest, + AssertErrorWhenStudentHasCourseIndexThatDoesNotExist) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 1 + daily_time_slot_count: 1 + courses { + display_name: "English" + meetings_count: 1 + consecutive_slots_count: 1 + teacher_indices: 0 + teacher_section_counts: 1 + } + teachers {} + students { display_name: "Marvin" course_indices: 1 } + )pb"); + + CourseSchedulingSolver solver; + const CourseSchedulingResult result = solver.Solve(model); + + ASSERT_EQ(result.solver_status(), SOLVER_MODEL_INVALID); + ASSERT_EQ(result.message(), + "Student with name Marvin has course index 1 but there are only 1 " + "courses."); +} + +class CourseSchedulingVerifierTestSolver : public CourseSchedulingSolver { + public: + void SetResultToReturn(CourseSchedulingResult result_to_return) { + result_to_return_ = result_to_return; + } + + protected: + CourseSchedulingResult SolveModel( + const CourseSchedulingModel& model, + const ConflictPairs& class_conflicts) override { + return result_to_return_; + } + + private: + CourseSchedulingResult result_to_return_; +}; + +TEST(CourseSchedulingTest, CheckVerifierErrorWhenTwoClassesAssignedToSameRoom) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 3 + daily_time_slot_count: 2 + courses { + meetings_count: 1 + consecutive_slots_count: 1 + teacher_indices: 0 + teacher_section_counts: 1 + room_indices: 0 + } + courses { + meetings_count: 1 + consecutive_slots_count: 1 + teacher_indices: 1 + teacher_section_counts: 1 + room_indices: 0 + } + teachers {} + teachers {} + rooms { display_name: "Zaphod" } + )pb"); + const CourseSchedulingResult error_result = ParseTestProto(R"pb( + solver_status: SOLVER_OPTIMAL + class_assignments { + course_index: 0 + section_number: 0 + time_slots: 0 + room_indices: 0 + } + class_assignments { + course_index: 1 + section_number: 0 + time_slots: 0 + room_indices: 0 + } + )pb"); + + CourseSchedulingVerifierTestSolver solver; + solver.SetResultToReturn(error_result); + const CourseSchedulingResult result = solver.Solve(model); + + ASSERT_EQ(result.solver_status(), ABNORMAL); + ASSERT_EQ(result.message(), + "Verification failed: Multiple classes have been assigned to room " + "Zaphod during time slot 0."); +} + +TEST(CourseSchedulingTest, + CheckVerifierErrorWhenClassDoesNotMeetCorrectNumberOfTimes) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 3 + daily_time_slot_count: 2 + courses { + display_name: "English" + meetings_count: 3 + consecutive_slots_count: 2 + teacher_indices: 0 + teacher_section_counts: 2 + } + teachers {} + )pb"); + const CourseSchedulingResult error_result = ParseTestProto(R"pb( + solver_status: SOLVER_OPTIMAL + class_assignments { + course_index: 0 + section_number: 0 + time_slots: 0 + time_slots: 1 + time_slots: 2 + time_slots: 3 + time_slots: 4 + time_slots: 5 + } + class_assignments { + course_index: 0 + section_number: 1 + time_slots: 0 + time_slots: 1 + time_slots: 2 + time_slots: 3 + time_slots: 5 + } + )pb"); + + CourseSchedulingVerifierTestSolver solver; + solver.SetResultToReturn(error_result); + const CourseSchedulingResult result = solver.Solve(model); + + ASSERT_EQ(result.solver_status(), ABNORMAL); + ASSERT_EQ(result.message(), + "Verification failed: The course titled English and section number " + "1 meets 5 times when it should meet 6 times."); +} + +TEST(CourseSchedulingTest, + CheckVerifierErrorWhenClassMeetsMoreThanConsecutiveSlotCountPerDay) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 2 + daily_time_slot_count: 3 + courses { + display_name: "English" + meetings_count: 2 + consecutive_slots_count: 1 + teacher_indices: 0 + teacher_section_counts: 1 + } + teachers {} + )pb"); + const CourseSchedulingResult error_result = ParseTestProto(R"pb( + solver_status: SOLVER_OPTIMAL + class_assignments { + course_index: 0 + section_number: 0 + time_slots: 3 + time_slots: 4 + } + )pb"); + + CourseSchedulingVerifierTestSolver solver; + solver.SetResultToReturn(error_result); + const CourseSchedulingResult result = solver.Solve(model); + + ASSERT_EQ(result.solver_status(), ABNORMAL); + ASSERT_EQ(result.message(), + "Verification failed: The course titled English does not meet the " + "correct number of " + "times in day 1."); +} + +TEST(CourseSchedulingTest, + CheckVerifierErrorWhenClassIsNotScheduledForConsecutiveSlots) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 2 + daily_time_slot_count: 3 + courses { + display_name: "English" + meetings_count: 2 + consecutive_slots_count: 2 + teacher_indices: 0 + teacher_section_counts: 1 + } + teachers {} + )pb"); + const CourseSchedulingResult error_result = ParseTestProto(R"pb( + solver_status: SOLVER_OPTIMAL + class_assignments { + course_index: 0 + section_number: 0 + time_slots: 1 + time_slots: 2 + time_slots: 3 + time_slots: 5 + } + )pb"); + + CourseSchedulingVerifierTestSolver solver; + solver.SetResultToReturn(error_result); + const CourseSchedulingResult result = solver.Solve(model); + + ASSERT_EQ(result.solver_status(), ABNORMAL); + ASSERT_EQ(result.message(), + "Verification failed: The course titled English is not scheduled " + "for consecutive time " + "slots in day 1."); +} + +TEST(CourseSchedulingTest, + CheckVerifierErrorWhenTeacherIsDoubleBookedForATimeSlot) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 1 + daily_time_slot_count: 2 + courses { + meetings_count: 1 + consecutive_slots_count: 1 + teacher_indices: 0 + teacher_section_counts: 1 + } + courses { + meetings_count: 1 + consecutive_slots_count: 1 + teacher_indices: 0 + teacher_section_counts: 1 + } + teachers { display_name: "Marvin" } + )pb"); + const CourseSchedulingResult error_result = ParseTestProto(R"pb( + solver_status: SOLVER_OPTIMAL + class_assignments { course_index: 0 section_number: 0 time_slots: 0 } + class_assignments { course_index: 1 section_number: 0 time_slots: 0 } + )pb"); + + CourseSchedulingVerifierTestSolver solver; + solver.SetResultToReturn(error_result); + const CourseSchedulingResult result = solver.Solve(model); + + ASSERT_EQ(result.solver_status(), ABNORMAL); + ASSERT_EQ(result.message(), + "Verification failed: Teacher with name Marvin has been assigned " + "to multiple classes at time slot 0."); +} + +TEST(CourseSchedulingTest, + CheckVerifierErrorWhenTeacherIsAssignedToRestrictedTimeSlot) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 1 + daily_time_slot_count: 2 + courses { + meetings_count: 1 + consecutive_slots_count: 1 + teacher_indices: 0 + teacher_section_counts: 1 + } + courses { + meetings_count: 1 + consecutive_slots_count: 1 + teacher_indices: 0 + teacher_section_counts: 1 + } + teachers { display_name: "Marvin" restricted_time_slots: 1 } + )pb"); + const CourseSchedulingResult error_result = ParseTestProto(R"pb( + solver_status: SOLVER_OPTIMAL + class_assignments { course_index: 0 section_number: 0 time_slots: 0 } + class_assignments { course_index: 1 section_number: 0 time_slots: 1 } + )pb"); + + CourseSchedulingVerifierTestSolver solver; + solver.SetResultToReturn(error_result); + const CourseSchedulingResult result = solver.Solve(model); + + ASSERT_EQ(result.solver_status(), ABNORMAL); + ASSERT_EQ(result.message(), + "Verification failed: Teacher with name Marvin has been assigned " + "to restricted time slot 1."); +} + +TEST(CourseSchedulingTest, + CheckVerifierErrorWhenStudentNotAssignedToCorrectCourses) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 3 + daily_time_slot_count: 2 + courses { + meetings_count: 1 + consecutive_slots_count: 1 + teacher_indices: 0 + teacher_section_counts: 1 + } + courses { + meetings_count: 1 + consecutive_slots_count: 1 + teacher_indices: 1 + teacher_section_counts: 1 + } + teachers {} + teachers {} + students { display_name: "Marvin" course_indices: 0 } + )pb"); + const CourseSchedulingResult error_result = ParseTestProto(R"pb( + solver_status: SOLVER_OPTIMAL + class_assignments { course_index: 0 section_number: 0 time_slots: 0 } + class_assignments { course_index: 1 section_number: 0 time_slots: 0 } + student_assignments { + student_index: 0 + course_indices: 1 + section_indices: 0 + } + )pb"); + + CourseSchedulingVerifierTestSolver solver; + solver.SetResultToReturn(error_result); + const CourseSchedulingResult result = solver.Solve(model); + + ASSERT_EQ(result.solver_status(), ABNORMAL); + ASSERT_EQ(result.message(), + "Verification failed: Student with name Marvin has not been " + "assigned the correct courses."); +} + +TEST(CourseSchedulingTest, + CheckVerifierErrorWhenStudentIsDoubleBookedForATimeSlot) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 1 + daily_time_slot_count: 2 + courses { + meetings_count: 1 + consecutive_slots_count: 1 + teacher_indices: 0 + teacher_section_counts: 1 + } + courses { + meetings_count: 1 + consecutive_slots_count: 1 + teacher_indices: 1 + teacher_section_counts: 1 + } + teachers {} + teachers {} + students { display_name: "Marvin" course_indices: 0 course_indices: 1 } + )pb"); + const CourseSchedulingResult error_result = ParseTestProto(R"pb( + solver_status: SOLVER_OPTIMAL + class_assignments { course_index: 0 section_number: 0 time_slots: 0 } + class_assignments { course_index: 1 section_number: 0 time_slots: 0 } + student_assignments { + student_index: 0 + course_indices: 0 + course_indices: 1 + section_indices: 0 + section_indices: 0 + } + )pb"); + + CourseSchedulingVerifierTestSolver solver; + solver.SetResultToReturn(error_result); + const CourseSchedulingResult result = solver.Solve(model); + + ASSERT_EQ(result.solver_status(), ABNORMAL); + ASSERT_EQ(result.message(), + "Verification failed: Student with name Marvin has been assigned " + "to multiple classes at time slot 0."); +} + +TEST(CourseSchedulingTest, + CheckVerifierErrorWhenClassSizeDoesNotReachMinCapacity) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 1 + daily_time_slot_count: 2 + courses { + display_name: "English" + meetings_count: 1 + min_capacity: 3 + max_capacity: 10 + consecutive_slots_count: 1 + teacher_indices: 0 + teacher_section_counts: 1 + } + teachers {} + students { course_indices: 0 } + students { course_indices: 0 } + )pb"); + const CourseSchedulingResult error_result = ParseTestProto(R"pb( + solver_status: SOLVER_OPTIMAL + class_assignments { course_index: 0 section_number: 0 time_slots: 0 } + student_assignments { + student_index: 0 + course_indices: 0 + section_indices: 0 + } + student_assignments { + student_index: 1 + course_indices: 0 + section_indices: 0 + } + )pb"); + + CourseSchedulingVerifierTestSolver solver; + solver.SetResultToReturn(error_result); + const CourseSchedulingResult result = solver.Solve(model); + + ASSERT_EQ(result.solver_status(), ABNORMAL); + ASSERT_EQ(result.message(), + "Verification failed: The course titled English has 2 students " + "when it should have at least 3 students."); +} + +TEST(CourseSchedulingTest, CheckVerifierErrorWhenClassSizeExceedsMaxCapacity) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 1 + daily_time_slot_count: 2 + courses { + display_name: "English" + meetings_count: 1 + min_capacity: 0 + max_capacity: 2 + consecutive_slots_count: 1 + teacher_indices: 0 + teacher_section_counts: 1 + } + teachers {} + students { course_indices: 0 } + students { course_indices: 0 } + students { course_indices: 0 } + )pb"); + const CourseSchedulingResult error_result = ParseTestProto(R"pb( + solver_status: SOLVER_OPTIMAL + class_assignments { course_index: 0 section_number: 0 time_slots: 0 } + student_assignments { + student_index: 0 + course_indices: 0 + section_indices: 0 + } + student_assignments { + student_index: 1 + course_indices: 0 + section_indices: 0 + } + student_assignments { + student_index: 2 + course_indices: 0 + section_indices: 0 + } + )pb"); + + CourseSchedulingVerifierTestSolver solver; + solver.SetResultToReturn(error_result); + const CourseSchedulingResult result = solver.Solve(model); + + ASSERT_EQ(result.solver_status(), ABNORMAL); + ASSERT_EQ(result.message(), + "Verification failed: The course titled English has 3 students " + "when it should have no more than 2 students."); +} + +TEST(CourseSchedulingTest, CheckRoomAssignmentsForCourseNotViolated) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 1 + daily_time_slot_count: 1 + courses { + meetings_count: 1 + consecutive_slots_count: 1 + teacher_indices: 0 + teacher_section_counts: 1 + room_indices: 1 + } + courses { + meetings_count: 1 + consecutive_slots_count: 1 + teacher_indices: 1 + teacher_section_counts: 1 + room_indices: 0 + } + teachers {} + teachers {} + rooms {} + rooms {} + )pb"); + const CourseSchedulingResult expected_result = ParseTestProto(R"pb( + solver_status: SOLVER_OPTIMAL + class_assignments { + course_index: 0 + section_number: 0 + time_slots: 0 + room_indices: 1 + } + class_assignments { + course_index: 1 + section_number: 0 + time_slots: 0 + room_indices: 0 + } + )pb"); + + CourseSchedulingSolver solver; + const CourseSchedulingResult result = solver.Solve(model); + + EXPECT_THAT(result, testing::EqualsProto(expected_result)); +} + +TEST(CourseSchedulingTest, CheckConsecutiveTimeSlotsScheduledForSameRoom) { + const CourseSchedulingModel model = ParseTestProto(R"pb( + days_count: 1 + daily_time_slot_count: 2 + courses { + meetings_count: 1 + consecutive_slots_count: 1 + teacher_indices: 0 + teacher_section_counts: 1 + room_indices: 0 + } + courses { + meetings_count: 1 + consecutive_slots_count: 2 + teacher_indices: 1 + teacher_section_counts: 1 + room_indices: 0 + room_indices: 1 + } + teachers { restricted_time_slots: 0 } + teachers {} + rooms {} + rooms {} + )pb"); + const CourseSchedulingResult expected_result = ParseTestProto(R"pb( + solver_status: SOLVER_OPTIMAL + class_assignments { + course_index: 0 + section_number: 0 + time_slots: 1 + room_indices: 0 + } + class_assignments { + course_index: 1 + section_number: 0 + time_slots: 0 + room_indices: 1 + time_slots: 1 + room_indices: 1 + } + )pb"); + + CourseSchedulingSolver solver; + const CourseSchedulingResult result = solver.Solve(model); + + EXPECT_THAT(result, testing::EqualsProto(expected_result)); +} + +} // namespace +} // namespace operations_research diff --git a/ortools/scheduling/jobshop_scheduling_parser_test.cc b/ortools/scheduling/jobshop_scheduling_parser_test.cc new file mode 100644 index 00000000000..d5cbc7ca966 --- /dev/null +++ b/ortools/scheduling/jobshop_scheduling_parser_test.cc @@ -0,0 +1,102 @@ +// Copyright 2010-2025 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "ortools/scheduling/jobshop_scheduling_parser.h" + +#include + +#include "absl/strings/string_view.h" +#include "gtest/gtest.h" +#include "ortools/base/path.h" + +namespace operations_research { +namespace scheduling { +namespace jssp { +namespace { + +std::string GetPath(absl::string_view filename) { + constexpr absl::string_view kTestDataDir = + "_main/ortools/scheduling/testdata/"; + return file::JoinPath(::testing::SrcDir(), kTestDataDir, filename); +} + +TEST(RcpspParserTest, Jssp) { + JsspParser parser; + ASSERT_TRUE(parser.ParseFile(GetPath("ft06"))); + const JsspInputProblem problem = parser.problem(); + EXPECT_EQ(6, problem.jobs_size()); + EXPECT_EQ(6, problem.machines_size()); +} + +TEST(RcpspParserTest, Taillard) { + JsspParser parser; + ASSERT_TRUE(parser.ParseFile(GetPath("50_10_01_ta041.txt"))); + const JsspInputProblem problem = parser.problem(); + EXPECT_EQ(50, problem.jobs_size()); + EXPECT_EQ(10, problem.machines_size()); +} + +TEST(RcpspParserTest, Flexible) { + JsspParser parser; + ASSERT_TRUE(parser.ParseFile(GetPath("02a.fjs"))); + const JsspInputProblem problem = parser.problem(); + EXPECT_EQ(10, problem.jobs_size()); + EXPECT_EQ(5, problem.machines_size()); +} + +TEST(RcpspParserTest, Sdst) { + JsspParser parser; + ASSERT_TRUE(parser.ParseFile(GetPath("SDST10_ta001.txt"))); + const JsspInputProblem problem = parser.problem(); + EXPECT_EQ(20, problem.jobs_size()); + EXPECT_EQ(5, problem.machines_size()); + for (const Machine& m : problem.machines()) { + ASSERT_TRUE(m.has_transition_time_matrix()); + EXPECT_EQ(m.transition_time_matrix().transition_time_size(), + problem.jobs_size() * problem.jobs_size()); + } +} + +TEST(RcpspParserTest, Tardiness) { + JsspParser parser; + ASSERT_TRUE(parser.ParseFile(GetPath("jb1.txt"))); + const JsspInputProblem problem = parser.problem(); + EXPECT_EQ(10, problem.jobs_size()); + EXPECT_EQ(5, problem.machines_size()); +} + +TEST(RcpspParserTest, Pss) { + JsspParser parser; + ASSERT_TRUE( + parser.ParseFile(GetPath("taillard-jobshop-15_15-1_225_100_150-1"))); + const JsspInputProblem problem = parser.problem(); + EXPECT_EQ(15, problem.jobs_size()); + EXPECT_EQ(15, problem.machines_size()); +} + +TEST(RcpspParserTest, EarlyTardy) { + JsspParser parser; + ASSERT_TRUE(parser.ParseFile(GetPath("1010_1_3"))); + const JsspInputProblem problem = parser.problem(); + EXPECT_EQ(10, problem.jobs_size()); + EXPECT_EQ(10, problem.machines_size()); + EXPECT_EQ(1033, problem.jobs(0).early_due_date()); + EXPECT_EQ(1033, problem.jobs(0).late_due_date()); + EXPECT_EQ(3, problem.jobs(0).earliness_cost_per_time_unit()); + EXPECT_EQ(10, problem.jobs(0).lateness_cost_per_time_unit()); +} + +} // namespace +} // namespace jssp +} // namespace scheduling +} // namespace operations_research diff --git a/ortools/scheduling/rcpsp_parser_test.cc b/ortools/scheduling/rcpsp_parser_test.cc new file mode 100644 index 00000000000..36486e7ddbb --- /dev/null +++ b/ortools/scheduling/rcpsp_parser_test.cc @@ -0,0 +1,115 @@ +// Copyright 2010-2025 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "ortools/scheduling/rcpsp_parser.h" + +#include + +#include "absl/strings/string_view.h" +#include "gtest/gtest.h" +#include "ortools/base/path.h" + +namespace operations_research { +namespace scheduling { +namespace rcpsp { + +namespace { + +std::string GetPath(absl::string_view filename) { + constexpr absl::string_view kTestDataDir = + "_main/ortools/scheduling/testdata/"; + return file::JoinPath(::testing::SrcDir(), kTestDataDir, filename); +} + +TEST(RcpspParserTest, SingleMode) { + RcpspParser parser; + ASSERT_TRUE(parser.ParseFile(GetPath("j301_1.sm"))); + const RcpspProblem problem = parser.problem(); + EXPECT_EQ(32, problem.tasks_size()); + EXPECT_EQ(4, problem.resources_size()); +} + +TEST(RcpspParserTest, MultiMode) { + RcpspParser parser; + ASSERT_TRUE(parser.ParseFile(GetPath("c1510_1.mm.txt"))); + const RcpspProblem problem = parser.problem(); + EXPECT_EQ(18, problem.tasks_size()); + EXPECT_EQ(4, problem.resources_size()); +} + +TEST(RcpspParserTest, MultiModeMax) { + RcpspParser parser; + ASSERT_TRUE(parser.ParseFile(GetPath("psp1.sch"))); + const RcpspProblem problem = parser.problem(); + EXPECT_EQ(12, problem.tasks_size()); + EXPECT_EQ(7, problem.resources_size()); + EXPECT_TRUE(problem.is_rcpsp_max()); +} + +TEST(RcpspParserTest, SingleModeMax) { + RcpspParser parser; + ASSERT_TRUE(parser.ParseFile(GetPath("ubo_10_psp2.sch"))); + const RcpspProblem problem = parser.problem(); + EXPECT_EQ(12, problem.tasks_size()); + EXPECT_EQ(5, problem.resources_size()); + EXPECT_TRUE(problem.is_rcpsp_max()); + EXPECT_FALSE(problem.is_consumer_producer()); +} + +TEST(RcpspParserTest, SingleModeMaxReservoir) { + RcpspParser parser; + ASSERT_TRUE(parser.ParseFile(GetPath("psp10_1.sch"))); + const RcpspProblem problem = parser.problem(); + EXPECT_EQ(12, problem.tasks_size()); + EXPECT_EQ(5, problem.resources_size()); + EXPECT_TRUE(problem.is_rcpsp_max()); + EXPECT_TRUE(problem.is_consumer_producer()); +} + +TEST(RcpspParserTest, SingleModeInvestment) { + RcpspParser parser; + ASSERT_TRUE(parser.ParseFile(GetPath("rip1.sch"))); + const RcpspProblem problem = parser.problem(); + EXPECT_EQ(12, problem.tasks_size()); + EXPECT_EQ(1, problem.resources_size()); + EXPECT_TRUE(problem.is_resource_investment()); + EXPECT_EQ(19, problem.deadline()); +} + +TEST(RcpspParserTest, SingleModePatterson) { + RcpspParser parser; + ASSERT_TRUE(parser.ParseFile(GetPath("rg30_set1_pat1.rcp"))); + const RcpspProblem problem = parser.problem(); + EXPECT_EQ(32, problem.tasks_size()); + EXPECT_EQ(4, problem.resources_size()); +} + +TEST(RcpspParserTest, SingleModeLargePatterson) { + RcpspParser parser; + ASSERT_TRUE(parser.ParseFile(GetPath("rg300_1.rcp"))); + const RcpspProblem problem = parser.problem(); + EXPECT_EQ(302, problem.tasks_size()); + EXPECT_EQ(4, problem.resources_size()); +} + +TEST(RcpspParserTest, MultiModeMmLib) { + RcpspParser parser; + ASSERT_TRUE(parser.ParseFile(GetPath("mmlib100_j100100_1.mm.txt"))); + const RcpspProblem problem = parser.problem(); + EXPECT_EQ(102, problem.tasks_size()); + EXPECT_EQ(4, problem.resources_size()); +} +} // namespace +} // namespace rcpsp +} // namespace scheduling +} // namespace operations_research From 8d3645a6cdc9462aa29d67d7d96a8a973875ce54 Mon Sep 17 00:00:00 2001 From: Laurent Perron Date: Wed, 7 Jan 2026 13:01:43 +0100 Subject: [PATCH 102/111] [CP-SAT] fix vivification bug; more work on encodings --- ortools/sat/BUILD.bazel | 28 ++ ortools/sat/clause.cc | 15 +- ortools/sat/clause.h | 8 +- ortools/sat/cp_model_presolve.cc | 344 +--------------- ortools/sat/cp_model_presolve_test.cc | 6 +- ortools/sat/cp_model_solver.cc | 6 +- ortools/sat/integer_base.cc | 17 - ortools/sat/integer_base.h | 5 - ortools/sat/integer_base_test.cc | 22 - ortools/sat/presolve_encoding.cc | 570 +++++++++++++++++++++++++- ortools/sat/presolve_encoding.h | 48 +++ ortools/sat/presolve_encoding_test.cc | 462 +++++++++++++++++++++ ortools/sat/sat_parameters.proto | 6 +- ortools/sat/stat_tables.cc | 6 +- ortools/sat/vivification.cc | 86 ++-- ortools/sat/vivification.h | 4 +- ortools/sat/work_assignment.cc | 9 +- ortools/util/logging.cc | 4 +- ortools/util/logging.h | 6 +- 19 files changed, 1222 insertions(+), 430 deletions(-) create mode 100644 ortools/sat/presolve_encoding_test.cc diff --git a/ortools/sat/BUILD.bazel b/ortools/sat/BUILD.bazel index d2e6fd6fff1..5566b0410ef 100644 --- a/ortools/sat/BUILD.bazel +++ b/ortools/sat/BUILD.bazel @@ -335,7 +335,9 @@ cc_library( "//ortools/util:time_limit", "@abseil-cpp//absl/algorithm:container", "@abseil-cpp//absl/base:core_headers", + "@abseil-cpp//absl/cleanup", "@abseil-cpp//absl/container:btree", + "@abseil-cpp//absl/log", "@abseil-cpp//absl/log:check", "@abseil-cpp//absl/types:span", ], @@ -393,8 +395,32 @@ cc_library( deps = [ ":cp_model_utils", ":presolve_context", + "//ortools/base:stl_util", + "//ortools/util:bitset", "//ortools/util:sorted_interval_list", + "@abseil-cpp//absl/algorithm:container", + "@abseil-cpp//absl/container:flat_hash_map", + "@abseil-cpp//absl/container:flat_hash_set", + "@abseil-cpp//absl/container:inlined_vector", "@abseil-cpp//absl/log", + "@abseil-cpp//absl/log:check", + "@protobuf", + ], +) + +cc_test( + name = "presolve_encoding_test", + srcs = ["presolve_encoding_test.cc"], + deps = [ + ":cp_model_cc_proto", + ":model", + ":presolve_context", + ":presolve_encoding", + "//ortools/base:gmock_main", + "//ortools/base:parse_test_proto", + "//ortools/util:sorted_interval_list", + "@abseil-cpp//absl/container:flat_hash_map", + "@abseil-cpp//absl/log:check", ], ) @@ -975,8 +1001,10 @@ cc_library( "//ortools/util:sigint", "//ortools/util:sorted_interval_list", "//ortools/util:strong_integers", + "//ortools/util:testing_utils", "//ortools/util:time_limit", "@abseil-cpp//absl/base:core_headers", + "@abseil-cpp//absl/base:log_severity", "@abseil-cpp//absl/cleanup", "@abseil-cpp//absl/container:btree", "@abseil-cpp//absl/container:flat_hash_map", diff --git a/ortools/sat/clause.cc b/ortools/sat/clause.cc index 7b1b47078eb..e463b8e5df3 100644 --- a/ortools/sat/clause.cc +++ b/ortools/sat/clause.cc @@ -529,7 +529,8 @@ bool ClauseManager::InprocessingRewriteClause( } const bool is_reason = ClauseIsUsedAsReason(clause); - CHECK(!is_reason || new_clause[0] == clause->PropagatedLiteral()); + CHECK(!is_reason || new_clause[0] == clause->PropagatedLiteral()) + << new_clause << " old " << clause->AsSpan(); if (new_clause.empty()) return false; // UNSAT. @@ -682,12 +683,24 @@ SatClause* ClauseManager::NextNewClauseToMinimize() { } SatClause* ClauseManager::NextClauseToMinimize() { + const int old = to_first_minimize_index_; for (; to_minimize_index_ < clauses_.size(); ++to_minimize_index_) { if (clauses_[to_minimize_index_]->IsRemoved()) continue; if (!IsRemovable(clauses_[to_minimize_index_])) { return clauses_[to_minimize_index_++]; } } + + // Lets reset and try once more to find one. + to_minimize_index_ = 0; + ++num_to_minimize_index_resets_; + for (; to_minimize_index_ < old; ++to_minimize_index_) { + if (clauses_[to_minimize_index_]->IsRemoved()) continue; + if (!IsRemovable(clauses_[to_minimize_index_])) { + return clauses_[to_minimize_index_++]; + } + } + return nullptr; } diff --git a/ortools/sat/clause.h b/ortools/sat/clause.h index d254e9ba28e..5aab10b4a44 100644 --- a/ortools/sat/clause.h +++ b/ortools/sat/clause.h @@ -308,6 +308,7 @@ class ClauseManager : public SatPropagator { // Returns the next clause to minimize that has never been minimized before. // Note that we only minimize clauses kept forever. SatClause* NextNewClauseToMinimize(); + // Returns the next clause to minimize, this iterator will be reset to the // start so the clauses will be returned in round-robin order. // Note that we only minimize clauses kept forever. @@ -324,7 +325,10 @@ class ClauseManager : public SatPropagator { // Restart the scans. void ResetToProbeIndex() { to_probe_index_ = 0; } - void ResetToMinimizeIndex() { to_minimize_index_ = 0; } + int64_t NumToMinimizeIndexResets() const { + return num_to_minimize_index_resets_; + } + // Ensures that NextNewClauseToMinimize() returns only learned clauses. // This is a noop after the first call. void EnsureNewClauseIndexInitialized() { @@ -499,6 +503,8 @@ class ClauseManager : public SatPropagator { // TODO(user): If more indices are needed, switch to a generic API. int to_minimize_index_ = 0; + + int num_to_minimize_index_resets_ = 0; int to_first_minimize_index_ = 0; int to_probe_index_ = 0; diff --git a/ortools/sat/cp_model_presolve.cc b/ortools/sat/cp_model_presolve.cc index ae01a7177c0..71e75105340 100644 --- a/ortools/sat/cp_model_presolve.cc +++ b/ortools/sat/cp_model_presolve.cc @@ -6815,7 +6815,8 @@ bool CpModelPresolver::PresolveNoOverlap2D(int /*c*/, ConstraintProto* ct) { IntegerValue(context_->EndMax(y))}); } CompactVectorVector no_overlaps; - absl::c_sort(indexed_intervals, IndexedInterval::ComparatorByStart()); + absl::c_stable_sort(indexed_intervals, + IndexedInterval::ComparatorByStart()); ConstructOverlappingSets(absl::MakeSpan(indexed_intervals), &no_overlaps); for (int i = 0; i < no_overlaps.size(); ++i) { ConstraintProto* new_ct = context_->working_model->add_constraints(); @@ -9431,341 +9432,18 @@ bool CpModelPresolver::MergeNoOverlap2DConstraints() { return true; } -namespace { -bool ConstraintIsEncodingBound(const ConstraintProto& ct) { - if (ct.constraint_case() != ConstraintProto::kLinear) return false; - if (ct.linear().vars_size() != 1) return false; - if (ct.linear().coeffs(0) != 1) return false; - if (ct.enforcement_literal_size() != 1) return false; - return true; -} -} // namespace - -// Return true if something changed. -bool CpModelPresolver::DetectEncodedComplexDomain( - PresolveContext* context, ConstraintProto* ct, - const Bitset64& pertinent_bools) { - if (context->ModelIsUnsat()) return false; - if (ct->constraint_case() != ConstraintProto::kAtMostOne && - ct->constraint_case() != ConstraintProto::kExactlyOne && - ct->constraint_case() != ConstraintProto::kBoolOr) { - return false; - } - - // Handling exaclty_one, at_most_one and bool_or is pretty similar. If we have - // l1 <=> v \in D1 - // l2 <=> v \in D2 - // - // We built - // l <=> v \in (D1 U D2). - // - // Moreover, if we have exactly_one(l1, l2, ...) or at_most_one(l1, l2, ...), - // we know that v cannot be in the intersection of D1 and D2. Thus, we first - // unconditionally remove (D1 ∩ D2) from the domain of v, making - // (l1=true and l2=true) impossible and allowing us to write our clauses as - // exactly_one(l1 or l2, ...) or at_most_one(l1 or l2, ...). - // - // Thus, other than the domain reduction that should not be done for the - // bool_or, all we need is to create a variable - // (l1 or l2) == l <=> (v \in (D1 U D2)). - google::protobuf::RepeatedField& literals = - ct->constraint_case() == ConstraintProto::kAtMostOne - ? *ct->mutable_at_most_one()->mutable_literals() - : (ct->constraint_case() == ConstraintProto::kExactlyOne - ? *ct->mutable_exactly_one()->mutable_literals() - : *ct->mutable_bool_or()->mutable_literals()); - if (literals.size() <= 1) return false; - - if (!ct->enforcement_literal().empty()) { - // TODO(user): support this case if it any problem needs it. - return false; - } - - struct Linear1Info { - int lit = -1; - int positive_linear1_ct = -1; - int negative_linear1_ct = -1; - }; - absl::flat_hash_map> var_to_linear1; - for (const int lit : literals) { - if (PositiveRef(lit) < pertinent_bools.size() && - !pertinent_bools[PositiveRef(lit)]) { - continue; - } - bool or_and_single_var_linear1 = true; - Linear1Info info; - int var = -1; - for (const int c : context->VarToConstraints(PositiveRef(lit))) { - if (c < 0) { - or_and_single_var_linear1 = false; - break; - } - const ConstraintProto& other_ct = context->working_model->constraints(c); - if (&other_ct == ct) continue; - if (!ConstraintIsEncodingBound(other_ct)) { - or_and_single_var_linear1 = false; - break; - } - if (other_ct.enforcement_literal(0) != lit && - other_ct.enforcement_literal(0) != NegatedRef(lit)) { - or_and_single_var_linear1 = false; - break; - } - if (var == -1) { - var = other_ct.linear().vars(0); - } else if (var != other_ct.linear().vars(0)) { - or_and_single_var_linear1 = false; - break; - } - info.lit = lit; - if (other_ct.enforcement_literal(0) == lit) { - info.positive_linear1_ct = c; - } else { - DCHECK_EQ(other_ct.enforcement_literal(0), NegatedRef(lit)); - info.negative_linear1_ct = c; - } - } - // When we have - // lit => var in D1 - // ~lit => var in D2 - // we can represent this on a line: - // - // ----------------D1---------------- - // ----------------D2--------------- - // |+++++++++++|*********************|++++++++++| - // lit=false lit unconstrained lit=true - // - // Handling the case where the variable is unconstrained by the lit is a - // bit of a pain: we want to replace two literals in a exactly_one by a - // single one, and if they are both unconstrained we might be forced to pick - // one arbitrarily to set to true. In any case, this is not a proper - // encoding of a complex domain, so we just ignore it. - // TODO(user): This can be implemented if it turns out to be common. - if (or_and_single_var_linear1 && info.negative_linear1_ct != -1 && - info.positive_linear1_ct != -1) { - const Domain domain_enforced_lit = ReadDomainFromProto( - context->working_model->constraints(info.positive_linear1_ct) - .linear()); - - // ~lit1 => var in domain_enforced_not_lit1 - const Domain domain_enforced_not_lit = ReadDomainFromProto( - context->working_model->constraints(info.negative_linear1_ct) - .linear()); - if (domain_enforced_lit.IntersectionWith(domain_enforced_not_lit) - .IsEmpty()) { - var_to_linear1[var].push_back(info); - } - } - } - // Ignore all variables that only appear once. - std::vector>> var_to_linear1_infos; - for (const auto& [var, linear1_infos] : var_to_linear1) { - if (linear1_infos.size() > 1) { - var_to_linear1_infos.push_back( - {var, std::vector(linear1_infos.begin(), - linear1_infos.end())}); - } - } - if (var_to_linear1_infos.empty()) return false; - - // We have some variables to simplify! Start by sorting to make the code - // deterministic. - absl::c_sort(var_to_linear1_infos, - [](const std::pair>& a, - const std::pair>& b) { - return a.first < b.first; - }); - // Doing the general code is rather complex, so we will just simplify one - // variable and two literals at a time, and leave for the presolve fixpoint - // to do the rest. - for (const auto& [var, infos] : var_to_linear1_infos) { - const Linear1Info& info1 = infos[0]; - const Linear1Info& info2 = infos[1]; - const int lit1 = info1.lit; - const int lit2 = info2.lit; - const Domain original_var_domain = context->DomainOf(var); - - DCHECK_NE(info1.positive_linear1_ct, -1); - DCHECK_NE(info2.positive_linear1_ct, -1); - DCHECK_NE(info1.negative_linear1_ct, -1); - DCHECK_NE(info2.negative_linear1_ct, -1); - - // lit1 => var in domain_enforced_lit1 - const Domain domain_enforced_lit1 = ReadDomainFromProto( - context->working_model->constraints(info1.positive_linear1_ct) - .linear()); - - // ~lit1 => var in domain_enforced_not_lit1 - const Domain domain_enforced_not_lit1 = ReadDomainFromProto( - context->working_model->constraints(info1.negative_linear1_ct) - .linear()); - - // lit2 => var in domain_enforced_lit2 - const Domain domain_enforced_lit2 = ReadDomainFromProto( - context->working_model->constraints(info2.positive_linear1_ct) - .linear()); - - // ~lit2 => var in domain_enforced_not_lit2 - const Domain domain_enforced_not_lit2 = ReadDomainFromProto( - context->working_model->constraints(info2.negative_linear1_ct) - .linear()); - - DCHECK(domain_enforced_lit1.IntersectionWith(domain_enforced_not_lit1) - .IsEmpty()); - DCHECK(domain_enforced_lit2.IntersectionWith(domain_enforced_not_lit2) - .IsEmpty()); - - // First, the variable must be in the domain of either the lit or of its - // negation. - if (!context->IntersectDomainWith( - var, domain_enforced_lit1.UnionWith(domain_enforced_not_lit1))) { - return true; - } - if (!context->IntersectDomainWith( - var, domain_enforced_lit2.UnionWith(domain_enforced_not_lit2))) { - return true; - } - - if (ct->constraint_case() != ConstraintProto::kBoolOr) { - // In virtue of the AMO, var must not be in the intersection of the two - // domains where both literals are true. - if (!context->IntersectDomainWith( - var, domain_enforced_lit2.IntersectionWith(domain_enforced_lit1) - .Complement())) { - return true; - } - } - const Domain domain_new_var_false = context->DomainOf(var).IntersectionWith( - domain_enforced_not_lit1.IntersectionWith(domain_enforced_not_lit2)); - const Domain domain_new_var_true = context->DomainOf(var).IntersectionWith( - domain_new_var_false.Complement()); - - // Now we want to build a lit3 = (lit1 or lit2) to use in the AMO/bool_or. - const int new_var = context->NewBoolVarWithClause({lit1, lit2}); - - if (domain_new_var_true.IsEmpty()) { - if (!context->SetLiteralToFalse(new_var)) return true; - } else if (domain_new_var_false.IsEmpty()) { - if (!context->SetLiteralToTrue(new_var)) return true; - } else { - ConstraintProto* new_ct = context->working_model->add_constraints(); - new_ct->add_enforcement_literal(new_var); - new_ct->mutable_linear()->add_vars(var); - new_ct->mutable_linear()->add_coeffs(1); - FillDomainInProto(domain_new_var_true, new_ct->mutable_linear()); - new_ct = context->working_model->add_constraints(); - new_ct->add_enforcement_literal(NegatedRef(new_var)); - new_ct->mutable_linear()->add_vars(var); - new_ct->mutable_linear()->add_coeffs(1); - FillDomainInProto(domain_new_var_false, new_ct->mutable_linear()); - } - - // Remove the two literals from the AMO. - int new_size = 0; - for (int i = 0; i < literals.size(); ++i) { - if (literals.Get(i) != lit1 && literals.Get(i) != lit2) { - literals.Set(new_size++, literals.Get(i)); - } - } - literals.Truncate(new_size); - literals.Add(new_var); - context->UpdateNewConstraintsVariableUsage(); - context->UpdateRuleStats( - "variables: detected encoding of a complex domain with multiple " - "linear1"); - } - return true; -} - void CpModelPresolver::DetectEncodedComplexDomains(PresolveContext* context) { PresolveTimer timer(__FUNCTION__, logger_, time_limit_); - // Constraints taking a list of literals that can, under some conditions, - // accept the following substitution: - // constraint(a, b, ...) => constraint(a | b, ...) - // one obvious case is bool_or. But if we can know that a and b cannot be - // both true, we can also apply this to at_most_one and exactly_one. - std::vector constraint_encoding_or; // bool_or, exactly_one, at_most_one - - // To make sure this is not too slow, first do a pass to gather all linear1 - // constraints that shares the same variable with other three linear1. - absl::flat_hash_map> var_to_linear1; - for (int i = 0; i < context->working_model->constraints_size(); ++i) { - const ConstraintProto& ct = context->working_model->constraints(i); - if (ct.constraint_case() == ConstraintProto::kBoolOr || - ct.constraint_case() == ConstraintProto::kAtMostOne || - ct.constraint_case() == ConstraintProto::kExactlyOne) { - constraint_encoding_or.push_back(i); - continue; - } - if (!ConstraintIsEncodingBound(ct)) { - continue; - } - var_to_linear1[ct.linear().vars(0)].push_back(i); - } - absl::erase_if(var_to_linear1, - [](const auto& p) { return p.second.size() <= 3; }); - // Now that we reduced cheaply our set of "interesting" linear1, let's use the - // variable->constraint graph to restrict it further. - for (auto& [var, linear1_cts] : var_to_linear1) { - int new_size = 0; - for (const int ct : linear1_cts) { - const int ref = - context->working_model->constraints(ct).enforcement_literal(0); - // We want to focus on literals that become removable once we undo the - // encoding, otherwise this whole step might just make the problem harder. - // So we want it to appear in two linear1 and a bool_or/amo/exactly_one. - if (context->VarToConstraints(PositiveRef(ref)).size() <= 3) { - linear1_cts[new_size++] = ct; - } - } - linear1_cts.resize(new_size); - } - absl::erase_if(var_to_linear1, - [](const auto& p) { return p.second.size() <= 3; }); - - if (var_to_linear1.empty()) return; - - // Now we use the linear1 we found to see which bool_or/amo/exactly_one could - // be applied to the heuristic. - Bitset64 booleans_potentially_encoding_domain( - context_->working_model->variables_size()); - for (const auto& [unused, linear1_cts] : var_to_linear1) { - for (const int ct : linear1_cts) { - booleans_potentially_encoding_domain.Set(PositiveRef( - context->working_model->constraints(ct).enforcement_literal(0))); - } - } - int new_encoding_or_count = 0; - for (int i = 0; i < constraint_encoding_or.size(); ++i) { - const int c = constraint_encoding_or[i]; - const ConstraintProto& ct = context->working_model->constraints(c); - const BoolArgumentProto& bool_ct = - ct.constraint_case() == ConstraintProto::kAtMostOne - ? ct.at_most_one() - : (ct.constraint_case() == ConstraintProto::kExactlyOne - ? ct.exactly_one() - : ct.bool_or()); - if (absl::c_count_if( - bool_ct.literals(), - [booleans_potentially_encoding_domain](int ref) { - return booleans_potentially_encoding_domain[PositiveRef(ref)]; - }) < 2) { - continue; - } - constraint_encoding_or[new_encoding_or_count++] = c; - } - constraint_encoding_or.resize(new_encoding_or_count); + if (context->ModelIsUnsat()) return; + if (time_limit_->LimitReached()) return; - for (const int c : constraint_encoding_or) { - ConstraintProto* ct = context->working_model->mutable_constraints(c); - bool changed = false; - do { - changed = DetectEncodedComplexDomain( - context, ct, booleans_potentially_encoding_domain); - if (changed) { - context->UpdateConstraintVariableUsage(c); - } - } while (changed); + std::vector local_models = + CreateVariableEncodingLocalModels(context); + for (VariableEncodingLocalModel& local_model : local_models) { + if (time_limit_->LimitReached()) return; + if (!DetectAllEncodedComplexDomain(context, local_model)) { + return; + } } } diff --git a/ortools/sat/cp_model_presolve_test.cc b/ortools/sat/cp_model_presolve_test.cc index f086e60cc5c..b3a3e72194b 100644 --- a/ortools/sat/cp_model_presolve_test.cc +++ b/ortools/sat/cp_model_presolve_test.cc @@ -7859,9 +7859,11 @@ TEST(PresolveCpModelTest, DetectEncodingFromLinear) { params.set_keep_all_feasible_solutions_in_presolve(true); const CpModelProto presolved_model = PresolveForTest(initial_model, params); + IntegerVariableProto expected_proto; + FillDomainInProto(Domain::FromValues({3, 6, 9, 10, 12}), &expected_proto); // The values are 10, 10-1, 10-7, 10+2, and 10-4. - EXPECT_EQ(ReadDomainFromProto(presolved_model.variables(5)).ToString(), - "[3][6][9,10][12]"); + EXPECT_THAT(presolved_model.variables(), + testing::Contains(testing::EqualsProto(expected_proto))); } TEST(PresolveCpModelTest, ReplaceNonEqual) { diff --git a/ortools/sat/cp_model_solver.cc b/ortools/sat/cp_model_solver.cc index 0ab16590142..720ecfb1a30 100644 --- a/ortools/sat/cp_model_solver.cc +++ b/ortools/sat/cp_model_solver.cc @@ -31,6 +31,7 @@ #include #include +#include "absl/base/log_severity.h" #include "absl/base/thread_annotations.h" #include "absl/container/btree_map.h" #include "absl/container/btree_set.h" @@ -97,6 +98,7 @@ #include "ortools/util/random_engine.h" #include "ortools/util/sigint.h" #include "ortools/util/sorted_interval_list.h" +#include "ortools/util/testing_utils.h" #include "ortools/util/time_limit.h" ABSL_FLAG( @@ -2529,8 +2531,8 @@ CpSolverResponse SolveCpModel(const CpModelProto& model_proto, Model* model) { } #endif // ORTOOLS_TARGET_OS_SUPPORTS_THREADS - if (DEBUG_MODE) { - LOG(WARNING) + if (DEBUG_MODE && !ProbablyRunningInsideUnitTest()) { + LOG_EVERY_N_SEC(WARNING, 0.1) << "WARNING: CP-SAT is running in debug mode. The solver will " "be slow because we will do a lot of extra checks. Compile in " "optimization mode to gain an order of magnitude speedup."; diff --git a/ortools/sat/integer_base.cc b/ortools/sat/integer_base.cc index 46d9996f0da..40eb284a979 100644 --- a/ortools/sat/integer_base.cc +++ b/ortools/sat/integer_base.cc @@ -238,23 +238,6 @@ RelationStatus BestBinaryRelationBounds::GetStatus(LinearExpression2 expr, return RelationStatus::IS_UNKNOWN; } -IntegerValue BestBinaryRelationBounds::GetUpperBound( - LinearExpression2 expr) const { - expr.SimpleCanonicalization(); - const IntegerValue gcd = expr.DivideByGcd(); - const bool negated = expr.NegateForCanonicalization(); - const auto it = best_bounds_.find(expr); - if (it != best_bounds_.end()) { - const auto [known_lb, known_ub] = it->second; - if (negated) { - return CapProdI(gcd, -known_lb); - } else { - return CapProdI(gcd, known_ub); - } - } - return kMaxIntegerValue; -} - std::vector> BestBinaryRelationBounds::GetSortedNonTrivialUpperBounds() const { std::vector> root_relations_sorted; diff --git a/ortools/sat/integer_base.h b/ortools/sat/integer_base.h index 0301f443f5c..66a04864bb2 100644 --- a/ortools/sat/integer_base.h +++ b/ortools/sat/integer_base.h @@ -514,11 +514,6 @@ class BestBinaryRelationBounds { RelationStatus GetStatus(LinearExpression2 expr, IntegerValue lb, IntegerValue ub) const; - // Return a valid upper-bound on the given LinearExpression2. Note that we - // assume kMaxIntegerValue is always valid and returns it if we don't have an - // entry in the hash-map. - IntegerValue GetUpperBound(LinearExpression2 expr) const; - // Same as GetUpperBound() but assume the expression is already canonicalized. // This is slightly faster. IntegerValue UpperBoundWhenCanonicalized(LinearExpression2 expr) const; diff --git a/ortools/sat/integer_base_test.cc b/ortools/sat/integer_base_test.cc index 2b45a6e2069..5285b4e383c 100644 --- a/ortools/sat/integer_base_test.cc +++ b/ortools/sat/integer_base_test.cc @@ -94,28 +94,6 @@ TEST(BestBinaryRelationBoundsTest, Basic) { best_bounds.GetStatus(expr, IntegerValue(-5), IntegerValue(3))); } -TEST(BestBinaryRelationBoundsTest, UpperBound) { - LinearExpression2 expr; - expr.vars[0] = IntegerVariable(0); - expr.vars[1] = IntegerVariable(2); - expr.coeffs[0] = IntegerValue(1); - expr.coeffs[1] = IntegerValue(-1); - - using AddResult = BestBinaryRelationBounds::AddResult; - BestBinaryRelationBounds best_bounds; - EXPECT_EQ(best_bounds.Add(expr, IntegerValue(0), IntegerValue(5)), - std::make_pair(AddResult::ADDED, AddResult::ADDED)); - - EXPECT_EQ(best_bounds.GetUpperBound(expr), IntegerValue(5)); - - expr.coeffs[0] *= 3; - expr.coeffs[1] *= 3; - EXPECT_EQ(best_bounds.GetUpperBound(expr), IntegerValue(15)); - - expr.Negate(); - EXPECT_EQ(best_bounds.GetUpperBound(expr), IntegerValue(0)); -} - AffineExpression OtherAffineLowerBound(LinearExpression2 expr, int var_index, IntegerValue expr_lb, IntegerValue other_var_lb) { diff --git a/ortools/sat/presolve_encoding.cc b/ortools/sat/presolve_encoding.cc index 33398ff76b9..b11e4f4c876 100644 --- a/ortools/sat/presolve_encoding.cc +++ b/ortools/sat/presolve_encoding.cc @@ -17,17 +17,586 @@ #include #include #include +#include #include #include +#include "absl/algorithm/container.h" +#include "absl/container/flat_hash_map.h" +#include "absl/container/flat_hash_set.h" +#include "absl/container/inlined_vector.h" +#include "absl/log/check.h" #include "absl/log/log.h" +#include "google/protobuf/repeated_field.h" +#include "ortools/base/stl_util.h" #include "ortools/sat/cp_model_utils.h" #include "ortools/sat/presolve_context.h" +#include "ortools/util/bitset.h" #include "ortools/util/sorted_interval_list.h" namespace operations_research { namespace sat { +namespace { +bool ConstraintIsEncodingBound(const ConstraintProto& ct) { + if (ct.constraint_case() != ConstraintProto::kLinear) return false; + if (ct.linear().vars_size() != 1) return false; + if (ct.linear().coeffs(0) != 1) return false; + if (ct.enforcement_literal_size() != 1) return false; + if (PositiveRef(ct.enforcement_literal(0)) == ct.linear().vars(0)) { + return false; + } + return true; +} +} // namespace + +std::vector CreateVariableEncodingLocalModels( + PresolveContext* context) { + // In this function we want to make sure we don't waste too much time on + // problems that do not have many linear1. Thus, the first thing we do is to + // filter out as soon and cheaply as possible the bare minimum of constraints + // that could be relevant to the final output. + + // Constraints taking a list of literals that can, under some conditions, + // accept the following substitution: + // constraint(a, b, ...) => constraint(a | b, ...) + // one obvious case is bool_or. But if we can know that a and b cannot be + // both true, we can also apply this to at_most_one and exactly_one. + // + // Note that in the implementation we might for simplicity refer to the + // constraints we are interested in as "bool_or" but this is just to avoid + // mentioning all the three types over and over. + // TODO(user): this should also work for linear constraints with the two + // booleans having the same coefficient? + std::vector constraint_encoding_or; // bool_or, exactly_one, at_most_one + + // Do a pass to gather all linear1 constraints. + absl::flat_hash_map> var_to_linear1; + for (int i = 0; i < context->working_model->constraints_size(); ++i) { + const ConstraintProto& ct = context->working_model->constraints(i); + if (ct.constraint_case() == ConstraintProto::kBoolOr || + ct.constraint_case() == ConstraintProto::kAtMostOne || + ct.constraint_case() == ConstraintProto::kExactlyOne) { + constraint_encoding_or.push_back(i); + continue; + } + if (!ConstraintIsEncodingBound(ct)) { + continue; + } + var_to_linear1[ct.linear().vars(0)].push_back(i); + } + + // Filter out the variables that do not have an interesting encoding. + absl::erase_if(var_to_linear1, [context](const auto& p) { + if (p.second.size() > 1) return false; + return context->VarToConstraints(p.first).size() > 2; + }); + + if (var_to_linear1.empty()) return {}; + + absl::flat_hash_map> bool_to_var_encodings; + + // Now we use the linear1 we found to see which bool_or/amo/exactly_one are + // linking two encodings of the same variable. But first, since some models + // have a lot of bool_or, we use a simple heuristic to filter out all that are + // not related to the encodings. We use a bitset to keep track of all boolean + // potentially encoding a domain for any variable and we filter out all + // bool_or that are not linked to at least two of these booleans. + Bitset64 booleans_potentially_encoding_domain( + context->working_model->variables_size()); + + for (const auto& [var, linear1_cts] : var_to_linear1) { + for (const int c : linear1_cts) { + const ConstraintProto& ct = context->working_model->constraints(c); + const int bool_var = PositiveRef(ct.enforcement_literal(0)); + booleans_potentially_encoding_domain.Set(bool_var); + bool_to_var_encodings[bool_var].push_back(var); + } + } + for (auto& [bool_var, var_encodings] : bool_to_var_encodings) { + // Remove the potential duplicate for the negation. + gtl::STLSortAndRemoveDuplicates(&var_encodings); + } + int new_encoding_or_count = 0; + for (int i = 0; i < constraint_encoding_or.size(); ++i) { + const int c = constraint_encoding_or[i]; + const ConstraintProto& ct = context->working_model->constraints(c); + const BoolArgumentProto& bool_ct = + ct.constraint_case() == ConstraintProto::kAtMostOne + ? ct.at_most_one() + : (ct.constraint_case() == ConstraintProto::kExactlyOne + ? ct.exactly_one() + : ct.bool_or()); + if (absl::c_count_if( + bool_ct.literals(), + [booleans_potentially_encoding_domain](int ref) { + return booleans_potentially_encoding_domain[PositiveRef(ref)]; + }) < 2) { + continue; + } + constraint_encoding_or[new_encoding_or_count++] = c; + } + constraint_encoding_or.resize(new_encoding_or_count); + + // Track the number of times a given boolean appears in the local model for a + // given variable. + struct VariableAndBoolInfo { + // Can only be 1 or 2 (for negation) if properly presolved. + int linear1_count = 0; + // Number of times the boolean will appear in + // `constraints_linking_two_encoding_booleans`. + int bool_or_count = 0; + }; + absl::flat_hash_map, VariableAndBoolInfo> var_bool_counts; + + // Now that we have a potentially smaller set of bool_or, we actually check + // which of them are linking two encodings of the same variable. + absl::flat_hash_map> var_to_constraints_encoding_or; + + // Map from variable to the bools that appear in a given bool_or. + absl::flat_hash_map> var_to_bools; + + for (const int c : constraint_encoding_or) { + var_to_bools.clear(); + const ConstraintProto& ct = context->working_model->constraints(c); + const BoolArgumentProto& bool_ct = + ct.constraint_case() == ConstraintProto::kAtMostOne + ? ct.at_most_one() + : (ct.constraint_case() == ConstraintProto::kExactlyOne + ? ct.exactly_one() + : ct.bool_or()); + for (const int ref : bool_ct.literals()) { + const int bool_var = PositiveRef(ref); + if (!booleans_potentially_encoding_domain[bool_var]) continue; + for (const int var : bool_to_var_encodings[bool_var]) { + var_to_bools[var].push_back(bool_var); + } + } + for (const auto& [var, bools] : var_to_bools) { + if (bools.size() >= 2) { + // We have two encodings of `var` in the same constraint `c`. Thus `c` + // should be part of the local model for `var`. + var_to_constraints_encoding_or[var].push_back(c); + for (const int bool_var : bools) { + var_bool_counts[{var, bool_var}].bool_or_count++; + } + } + } + } + + std::vector local_models; + // Now that we have all the information, we can create the local models. + for (const auto& [var, linear1_cts] : var_to_linear1) { + VariableEncodingLocalModel& encoding_model = local_models.emplace_back(); + encoding_model.var = var; + encoding_model.linear1_constraints.assign(linear1_cts.begin(), + linear1_cts.end()); + encoding_model.constraints_linking_two_encoding_booleans = + var_to_constraints_encoding_or[var]; + absl::c_sort(encoding_model.constraints_linking_two_encoding_booleans); + encoding_model.var_in_more_than_one_constraint_outside_the_local_model = + (context->VarToConstraints(var).size() - linear1_cts.size() > 1); + for (const int ct : linear1_cts) { + const int bool_var = PositiveRef( + context->working_model->constraints(ct).enforcement_literal(0)); + encoding_model.bools_only_used_inside_the_local_model.insert(bool_var); + var_bool_counts[{var, bool_var}].linear1_count++; + } + absl::erase_if(encoding_model.bools_only_used_inside_the_local_model, + [context, v = var, &var_bool_counts](int bool_var) { + const auto& counts = var_bool_counts[{v, bool_var}]; + return context->VarToConstraints(bool_var).size() != + counts.linear1_count + counts.bool_or_count; + }); + auto it = context->ObjectiveMap().find(var); + if (it != context->ObjectiveMap().end()) { + encoding_model.variable_coeff_in_objective = it->second; + } + } + absl::c_sort(local_models, [](const VariableEncodingLocalModel& a, + const VariableEncodingLocalModel& b) { + return a.var < b.var; + }); + return local_models; +} + +bool BasicPresolveAndGetFullyEncodedDomains( + PresolveContext* context, VariableEncodingLocalModel& local_model, + absl::flat_hash_map* result, bool* changed) { + *changed = false; + absl::flat_hash_map ref_to_linear1; + + // Fill ref_to_linear1 and do some basic presolving. + const Domain var_domain = context->DomainOf(local_model.var); + for (const int ct : local_model.linear1_constraints) { + ConstraintProto* ct_proto = context->working_model->mutable_constraints(ct); + DCHECK(ConstraintIsEncodingBound(*ct_proto)); + const int ref = ct_proto->enforcement_literal(0); + Domain domain = ReadDomainFromProto(ct_proto->linear()); + if (!domain.IsIncludedIn(var_domain)) { + *changed = true; + domain = domain.IntersectionWith(context->DomainOf(local_model.var)); + if (domain.IsEmpty()) { + context->UpdateRuleStats( + "variables: linear1 with domain not included in variable domain"); + if (!context->SetLiteralToFalse(ref)) { + return false; + } + ct_proto->Clear(); + context->UpdateConstraintVariableUsage(ct); + continue; + } + FillDomainInProto(domain, ct_proto->mutable_linear()); + } + auto [it, inserted] = ref_to_linear1.insert({ref, ct}); + if (!inserted) { + *changed = true; + ConstraintProto* old_ct_proto = + context->working_model->mutable_constraints(it->second); + const Domain old_ct_domain = ReadDomainFromProto(old_ct_proto->linear()); + const Domain new_domain = domain.IntersectionWith(old_ct_domain); + ct_proto->Clear(); + context->UpdateConstraintVariableUsage(ct); + if (new_domain.IsEmpty()) { + context->UpdateRuleStats( + "variables: linear1 with same variable and enforcement and " + "non-overlapping domain, setting enforcement to false"); + if (!context->SetLiteralToFalse(ref)) { + return false; + } + old_ct_proto->Clear(); + context->UpdateConstraintVariableUsage(it->second); + ref_to_linear1.erase(ref); + } else { + FillDomainInProto(new_domain, old_ct_proto->mutable_linear()); + context->UpdateRuleStats( + "variables: merged two linear1 with same variable and enforcement"); + } + } + } + + // Remove from the local model anything that was removed in the loop above. + int new_linear1_size = 0; + for (int i = 0; i < local_model.linear1_constraints.size(); ++i) { + const int ct = local_model.linear1_constraints[i]; + const ConstraintProto& ct_proto = context->working_model->constraints(ct); + if (ct_proto.constraint_case() != ConstraintProto::kLinear) continue; + if (context->IsFixed(ct_proto.enforcement_literal(0))) { + continue; + } + DCHECK(ConstraintIsEncodingBound(ct_proto)); + local_model.linear1_constraints[new_linear1_size++] = ct; + } + if (new_linear1_size != local_model.linear1_constraints.size()) { + *changed = true; + local_model.linear1_constraints.resize(new_linear1_size); + // Rerun the presolve loop to recompute ref_to_linear1. + return true; + } + + for (const auto& [ref, ct] : ref_to_linear1) { + auto it = ref_to_linear1.find(NegatedRef(ref)); + if (it == ref_to_linear1.end()) continue; + const ConstraintProto& positive_ct = + context->working_model->constraints(ct); + const ConstraintProto& negative_ct = + context->working_model->constraints(it->second); + const Domain positive_domain = ReadDomainFromProto(positive_ct.linear()); + const Domain negative_domain = ReadDomainFromProto(negative_ct.linear()); + if (!positive_domain.IntersectionWith(negative_domain).IsEmpty()) { + // This is not a fully encoded domain. For example, it could be + // l => x in {-inf,inf} + // ~l => x in {-inf,inf} + // which actually means that `l` doesn't really encode anything. + continue; + } + bool domain_modified = false; + if (!context->IntersectDomainWith( + local_model.var, positive_domain.UnionWith(negative_domain), + &domain_modified)) { + return false; + } + *changed = *changed || domain_modified; + result->insert({ref, positive_domain}); + result->insert({NegatedRef(ref), negative_domain}); + } + + // Now detect a different way of fully encoding a domain: + // l1 => x in D1 + // l2 => x in D2 + // l3 => x in D3 + // ... + // l_n => x in D_n + // bool_or(l1, l2, l3, ..., l_n) + // + // where D1, D2, ..., D_n are non overlapping. This works too for exactly_one. + for (const int ct : local_model.constraints_linking_two_encoding_booleans) { + const ConstraintProto& ct_proto = context->working_model->constraints(ct); + if (ct_proto.constraint_case() != ConstraintProto::kBoolOr && + ct_proto.constraint_case() != ConstraintProto::kExactlyOne) { + continue; + } + if (!ct_proto.enforcement_literal().empty()) continue; + const BoolArgumentProto& bool_or = + ct_proto.constraint_case() == ConstraintProto::kExactlyOne + ? ct_proto.exactly_one() + : ct_proto.bool_or(); + if (bool_or.literals().size() < 2) continue; + bool encoding_detected = true; + Domain non_overlapping_domain; + std::vector> ref_and_domains; + for (const int ref : bool_or.literals()) { + auto it = ref_to_linear1.find(ref); + if (it == ref_to_linear1.end()) { + encoding_detected = false; + break; + } + const Domain domain = ReadDomainFromProto( + context->working_model->constraints(it->second).linear()); + ref_and_domains.push_back({ref, domain}); + if (!non_overlapping_domain.IntersectionWith(domain).IsEmpty()) { + encoding_detected = false; + break; + } + non_overlapping_domain = non_overlapping_domain.UnionWith(domain); + } + if (encoding_detected) { + context->UpdateRuleStats("variables: detected fully encoded domain"); + bool domain_modified = false; + if (!context->IntersectDomainWith(local_model.var, non_overlapping_domain, + &domain_modified)) { + return false; + } + if (domain_modified) { + context->UpdateRuleStats( + "variables: restricted domain to fully encoded domain"); + } + *changed = *changed || domain_modified; + for (const auto& [ref, domain] : ref_and_domains) { + result->insert({ref, domain}); + result->insert({NegatedRef(ref), + var_domain.IntersectionWith(domain.Complement())}); + } + // Promote a bool_or to an exactly_one. + if (ct_proto.constraint_case() == ConstraintProto::kBoolOr) { + context->UpdateRuleStats( + "variables: promoted bool_or to exactly_one for fully encoded " + "domain"); + std::vector new_enforcement_literals(bool_or.literals().begin(), + bool_or.literals().end()); + context->working_model->mutable_constraints(ct)->clear_bool_or(); + context->working_model->mutable_constraints(ct) + ->mutable_exactly_one() + ->mutable_literals() + ->Add(new_enforcement_literals.begin(), + new_enforcement_literals.end()); + *changed = true; + } + } + } + return true; +} + +// Return false on unsat +bool DetectEncodedComplexDomain( + PresolveContext* context, int ct_index, + VariableEncodingLocalModel& local_model, + absl::flat_hash_map* fully_encoded_domains, bool* changed) { + ConstraintProto* ct = context->working_model->mutable_constraints(ct_index); + *changed = false; + + if (context->ModelIsUnsat()) return false; + DCHECK(ct->constraint_case() == ConstraintProto::kAtMostOne || + ct->constraint_case() == ConstraintProto::kExactlyOne || + ct->constraint_case() == ConstraintProto::kBoolOr); + + // Handling exaclty_one, at_most_one and bool_or is pretty similar. If we have + // l1 <=> v \in D1 + // l2 <=> v \in D2 + // + // We built + // l <=> v \in (D1 U D2). + // + // Moreover, if we have exactly_one(l1, l2, ...) or at_most_one(l1, l2, ...), + // we know that v cannot be in the intersection of D1 and D2. Thus, we first + // unconditionally remove (D1 ∩ D2) from the domain of v, making + // (l1=true and l2=true) impossible and allowing us to write our clauses as + // exactly_one(l1 or l2, ...) or at_most_one(l1 or l2, ...). + // + // Thus, other than the domain reduction that should not be done for the + // bool_or, all we need is to create a variable + // (l1 or l2) == l <=> (v \in (D1 U D2)). + google::protobuf::RepeatedField& literals = + ct->constraint_case() == ConstraintProto::kAtMostOne + ? *ct->mutable_at_most_one()->mutable_literals() + : (ct->constraint_case() == ConstraintProto::kExactlyOne + ? *ct->mutable_exactly_one()->mutable_literals() + : *ct->mutable_bool_or()->mutable_literals()); + if (literals.size() <= 1) return true; + + if (!ct->enforcement_literal().empty()) { + // TODO(user): support this case if it any problem needs it. + return true; + } + + // When we have + // lit => var in D1 + // ~lit => var in D2 + // we can represent this on a line: + // + // ----------------D1---------------- + // ----------------D2--------------- + // |+++++++++++|*********************|++++++++++| + // lit=false lit unconstrained lit=true + // + // Handling the case where the variable is unconstrained by the lit is a + // bit of a pain: we want to replace two literals in a exactly_one by a + // single one, and if they are both unconstrained we might be forced to pick + // one arbitrarily to set to true. In any case, this is not a proper + // encoding of a complex domain, so we just ignore it. + // TODO(user): This can be implemented if it turns out to be common. + + std::optional maybe_lit1; + Domain domain_lit1; + std::optional maybe_lit2; + Domain domain_lit2; + for (const int lit_var : literals) { + if (!local_model.bools_only_used_inside_the_local_model.contains( + PositiveRef(lit_var))) { + continue; + } + auto it = fully_encoded_domains->find(lit_var); + if (it == fully_encoded_domains->end()) { + continue; + } + + if (!maybe_lit1) { + maybe_lit1 = lit_var; + domain_lit1 = it->second; + } else { + maybe_lit2 = lit_var; + domain_lit2 = it->second; + break; + } + } + + if (!maybe_lit2.has_value()) return true; + DCHECK(maybe_lit1.has_value()); + const int lit1 = *maybe_lit1; + const int lit2 = *maybe_lit2; + + // We found two literals that each fully encodes an interval and are both only + // used in the encoding and in the bool_or/exactly_one/at_most_one. We can + // thus replace the two literals by their OR. Since this code is already + // rather complex, so we will just simplify a pair of literals at a time, and + // leave for the presolve fixpoint to do the rest. + *changed = true; + + context->UpdateRuleStats( + "variables: detected encoding of a complex domain with multiple " + "linear1"); + + if (ct->constraint_case() != ConstraintProto::kBoolOr) { + // In virtue of the AMO, var must not be in the intersection of the two + // domains where both literals are true. + if (!context->IntersectDomainWith( + local_model.var, + domain_lit2.IntersectionWith(domain_lit1).Complement())) { + return false; + } + } + const Domain var_domain = context->DomainOf(local_model.var); + const Domain domain_new_var_false = var_domain.IntersectionWith( + domain_lit1.Complement().IntersectionWith(domain_lit2.Complement())); + const Domain domain_new_var_true = + var_domain.IntersectionWith(domain_new_var_false.Complement()); + + // Now we want to build a lit3 = (lit1 or lit2) to use in the AMO/bool_or. + const int new_var = context->NewBoolVarWithClause({lit1, lit2}); + + if (domain_new_var_true.IsEmpty()) { + CHECK(context->SetLiteralToFalse(new_var)); + } else if (domain_new_var_false.IsEmpty()) { + CHECK(context->SetLiteralToTrue(new_var)); + } else { + local_model.linear1_constraints.push_back( + context->working_model->constraints_size()); + ConstraintProto* new_ct = context->working_model->add_constraints(); + new_ct->add_enforcement_literal(new_var); + new_ct->mutable_linear()->add_vars(local_model.var); + new_ct->mutable_linear()->add_coeffs(1); + FillDomainInProto(domain_new_var_true, new_ct->mutable_linear()); + local_model.linear1_constraints.push_back( + context->working_model->constraints_size()); + new_ct = context->working_model->add_constraints(); + new_ct->add_enforcement_literal(NegatedRef(new_var)); + new_ct->mutable_linear()->add_vars(local_model.var); + new_ct->mutable_linear()->add_coeffs(1); + FillDomainInProto(domain_new_var_false, new_ct->mutable_linear()); + context->UpdateNewConstraintsVariableUsage(); + } + + // Remove the two literals from the AMO. + int new_size = 0; + for (int i = 0; i < literals.size(); ++i) { + if (literals.Get(i) != lit1 && literals.Get(i) != lit2) { + literals.Set(new_size++, literals.Get(i)); + } + } + literals.Truncate(new_size); + literals.Add(new_var); + context->UpdateConstraintVariableUsage(ct_index); + + // Finally, move the four linear1 to the mapping model. + fully_encoded_domains->insert({new_var, domain_new_var_true}); + fully_encoded_domains->insert({NegatedRef(new_var), domain_new_var_false}); + fully_encoded_domains->erase(lit1); + fully_encoded_domains->erase(lit2); + fully_encoded_domains->erase(NegatedRef(lit1)); + fully_encoded_domains->erase(NegatedRef(lit2)); + context->MarkVariableAsRemoved(PositiveRef(lit1)); + context->MarkVariableAsRemoved(PositiveRef(lit2)); + int new_linear1_size = 0; + for (int i = 0; i < local_model.linear1_constraints.size(); ++i) { + const int ct = local_model.linear1_constraints[i]; + ConstraintProto* ct_proto = context->working_model->mutable_constraints(ct); + if (PositiveRef(ct_proto->enforcement_literal(0)) == PositiveRef(lit1) || + PositiveRef(ct_proto->enforcement_literal(0)) == PositiveRef(lit2)) { + context->NewMappingConstraint(*ct_proto, __FILE__, __LINE__); + ct_proto->Clear(); + context->UpdateConstraintVariableUsage(ct); + continue; + } + local_model.linear1_constraints[new_linear1_size++] = ct; + } + local_model.linear1_constraints.resize(new_linear1_size); + + return true; +} + +bool DetectAllEncodedComplexDomain(PresolveContext* context, + VariableEncodingLocalModel& local_model) { + absl::flat_hash_map fully_encoded_domains; + bool changed_on_basic_presolve = false; + if (!BasicPresolveAndGetFullyEncodedDomains(context, local_model, + &fully_encoded_domains, + &changed_on_basic_presolve)) { + return false; + } + if (local_model.constraints_linking_two_encoding_booleans.size() != 1) { + return true; + } + const int ct = local_model.constraints_linking_two_encoding_booleans[0]; + bool changed = true; + while (changed) { + if (!DetectEncodedComplexDomain(context, ct, local_model, + &fully_encoded_domains, &changed)) { + return false; + } + } + return true; +} + bool MaybeTransferLinear1ToAnotherVariable( VariableEncodingLocalModel& local_model, PresolveContext* context) { if (local_model.var == -1) return true; @@ -131,6 +700,5 @@ bool MaybeTransferLinear1ToAnotherVariable( local_model.var = -1; return true; } - } // namespace sat } // namespace operations_research diff --git a/ortools/sat/presolve_encoding.h b/ortools/sat/presolve_encoding.h index 6dad2318bdb..5c0f2d8170e 100644 --- a/ortools/sat/presolve_encoding.h +++ b/ortools/sat/presolve_encoding.h @@ -17,7 +17,10 @@ #include #include +#include "absl/container/flat_hash_map.h" +#include "absl/container/flat_hash_set.h" #include "ortools/sat/presolve_context.h" +#include "ortools/util/sorted_interval_list.h" namespace operations_research { namespace sat { @@ -33,6 +36,13 @@ struct VariableEncodingLocalModel { // fulfilling the conditions above will appear here. std::vector linear1_constraints; + // Constraints of the form bool_or/exactly_one/at_most_one that contains at + // least two of the encoding booleans. + std::vector constraints_linking_two_encoding_booleans; + + // Booleans that do not appear on any constraints outside the local model. + absl::flat_hash_set bools_only_used_inside_the_local_model; + // Zero if `var` doesn't appear in the objective. int64_t variable_coeff_in_objective = 0; @@ -44,6 +54,44 @@ struct VariableEncodingLocalModel { int single_constraint_using_the_var_outside_the_local_model = -1; }; +// For performance, this skips variables that appears in a single linear1 and is +// used in more than another constraint, since there is no interesting presolve +// we can do in this case. +std::vector CreateVariableEncodingLocalModels( + PresolveContext* context); + +// Do a few simple presolve rules on the local model: +// - restrict the domain of the linear1 to the domain of the variable. +// - merge linear1 over the same enforcement,var pairs. +// - if we have a linear1 for a literal and another for its negation, do +// not allow both to be true. +// +// Also returns a list of literals that fully encodes a domain for the variable. +// Returns false if we prove unsat. +bool BasicPresolveAndGetFullyEncodedDomains( + PresolveContext* context, VariableEncodingLocalModel& local_model, + absl::flat_hash_map* result, bool* changed); + +// If we have a model containing: +// l1 => var in [0, 10] +// ~l1 => var in [11, 20] +// l2 => var in [50, 60] +// ~l2 => var in [70, 80] +// bool_or(l1, l2, ...) +// +// if moreover `l1` and `l2` are only used in the constraints above, we can +// replace them by: +// l3 => var in [0, 10] U [50, 60] +// ~l3 => var in [11, 20] U [70, 80] +// bool_or(l3, ...) +// +// and remove the variables `l1` and `l2`. This also works if we replace the +// bool_or for an at_most_one or an exactly_one, but requires imposing +// (unconditionally) that the variable cannot be both in the domain encoded by +// `l1` and in the domain encoded by `l2`. +bool DetectAllEncodedComplexDomain(PresolveContext* context, + VariableEncodingLocalModel& local_model); + // If we have a bunch of constraint of the form literal => Y \in domain and // another constraint Y = f(X), we can remove Y, that constraint, and transform // all linear1 from constraining Y to constraining X. diff --git a/ortools/sat/presolve_encoding_test.cc b/ortools/sat/presolve_encoding_test.cc new file mode 100644 index 00000000000..5a9214e5bec --- /dev/null +++ b/ortools/sat/presolve_encoding_test.cc @@ -0,0 +1,462 @@ +// Copyright 2010-2025 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "ortools/sat/presolve_encoding.h" + +#include + +#include "absl/container/flat_hash_map.h" +#include "absl/log/check.h" +#include "gtest/gtest.h" +#include "ortools/base/gmock.h" +#include "ortools/base/parse_test_proto.h" +#include "ortools/sat/cp_model.pb.h" +#include "ortools/sat/model.h" +#include "ortools/sat/presolve_context.h" +#include "ortools/util/sorted_interval_list.h" + +namespace operations_research { +namespace sat { +namespace { + +using ::google::protobuf::contrib::parse_proto::ParseTestProto; +using ::testing::ElementsAre; +using ::testing::Pair; +using ::testing::UnorderedElementsAre; + +TEST(CreateVariableEncodingLocalModelsTest, TrivialTest) { + CpModelProto model_proto = ParseTestProto(R"pb( + variables { domain: [ 0, 1 ] } + variables { domain: [ 0, 1 ] } + constraints { + enforcement_literal: 0 + linear { + vars: [ 1 ] + coeffs: [ 1 ] + domain: [ 0, 1 ] + } + } + )pb"); + Model model; + CpModelProto mapping_model; + PresolveContext context(&model, &model_proto, &mapping_model); + context.InitializeNewDomains(); + context.ReadObjectiveFromProto(); + context.UpdateNewConstraintsVariableUsage(); + const std::vector local_models = + CreateVariableEncodingLocalModels(&context); + ASSERT_EQ(local_models.size(), 1); + ASSERT_EQ(local_models[0].var, 1); + EXPECT_THAT(local_models[0].linear1_constraints, ElementsAre(0)); +} + +TEST(CreateVariableEncodingLocalModelsTest, BasicTest) { + CpModelProto model_proto = ParseTestProto(R"pb( + variables { domain: [ 0, 1 ] } + variables { domain: [ 0, 1 ] } + variables { domain: [ 0, 2 ] } + constraints { + enforcement_literal: 0 + linear { + vars: [ 2 ] + coeffs: [ 1 ] + domain: [ 0, 0 ] + } + } + constraints { + enforcement_literal: 1 + linear { + vars: [ 2 ] + coeffs: [ 1 ] + domain: [ 0, 0 ] + } + } + constraints { bool_or { literals: [ 0, 1 ] } } + )pb"); + Model model; + CpModelProto mapping_model; + PresolveContext context(&model, &model_proto, &mapping_model); + context.InitializeNewDomains(); + context.ReadObjectiveFromProto(); + context.UpdateNewConstraintsVariableUsage(); + const std::vector local_models = + CreateVariableEncodingLocalModels(&context); + ASSERT_EQ(local_models.size(), 1); + ASSERT_EQ(local_models[0].var, 2); + EXPECT_THAT(local_models[0].linear1_constraints, ElementsAre(0, 1)); + EXPECT_THAT(local_models[0].constraints_linking_two_encoding_booleans, + ElementsAre(2)); + EXPECT_THAT(local_models[0].bools_only_used_inside_the_local_model, + UnorderedElementsAre(0, 1)); +} + +TEST(CreateVariableEncodingLocalModelsTest, OneBooleanUsedElsewhere) { + CpModelProto model_proto = ParseTestProto(R"pb( + variables { domain: [ 0, 1 ] } + variables { domain: [ 0, 1 ] } + variables { domain: [ 0, 1 ] } + variables { domain: [ 0, 2 ] } + constraints { + enforcement_literal: 0 + linear { + vars: [ 3 ] + coeffs: [ 1 ] + domain: [ 0, 0 ] + } + } + constraints { + enforcement_literal: 1 + linear { + vars: [ 3 ] + coeffs: [ 1 ] + domain: [ 1, 1 ] + } + } + constraints { + enforcement_literal: 2 + linear { + vars: [ 3 ] + coeffs: [ 1 ] + domain: [ 2, 2 ] + } + } + constraints { bool_or { literals: [ 0, 1, 2 ] } } + constraints { at_most_one { literals: [ 0, 1, 2 ] } } + constraints { + linear { + vars: [ 2, 3 ] + coeffs: [ 1, 1 ] + domain: [ 0, 3 ] + } + } + objective { + vars: [ 1 ] + coeffs: [ 2 ] + } + )pb"); + Model model; + CpModelProto mapping_model; + PresolveContext context(&model, &model_proto, &mapping_model); + context.InitializeNewDomains(); + context.ReadObjectiveFromProto(); + context.UpdateNewConstraintsVariableUsage(); + const std::vector local_models = + CreateVariableEncodingLocalModels(&context); + ASSERT_EQ(local_models.size(), 1); + ASSERT_EQ(local_models[0].var, 3); + EXPECT_THAT(local_models[0].linear1_constraints, ElementsAre(0, 1, 2)); + EXPECT_THAT(local_models[0].constraints_linking_two_encoding_booleans, + ElementsAre(3, 4)); + EXPECT_THAT(local_models[0].bools_only_used_inside_the_local_model, + UnorderedElementsAre(0)); +} + +TEST(CreateVariableEncodingLocalModelsTest, TwoVars) { + CpModelProto model_proto = ParseTestProto(R"pb( + variables { domain: [ 0, 1 ] } + variables { domain: [ 0, 1 ] } + variables { domain: [ 0, 1 ] } + variables { domain: [ 0, 2 ] } + variables { domain: [ 0, 2 ] } + variables { domain: [ 0, 1 ] } + constraints { + enforcement_literal: 0 + linear { + vars: [ 3 ] + coeffs: [ 1 ] + domain: [ 0, 0 ] + } + } + constraints { + enforcement_literal: -1 + linear { + vars: [ 3 ] + coeffs: [ 1 ] + domain: [ 1, 1 ] + } + } + constraints { + enforcement_literal: 1 + linear { + vars: [ 3 ] + coeffs: [ 1 ] + domain: [ 1, 1 ] + } + } + constraints { + enforcement_literal: 1 + linear { + vars: [ 4 ] + coeffs: [ 1 ] + domain: [ 2, 2 ] + } + } + constraints { + enforcement_literal: 2 + linear { + vars: [ 4 ] + coeffs: [ 1 ] + domain: [ 2, 2 ] + } + } + constraints { bool_or { literals: [ 0, 1, 5 ] } } + constraints { at_most_one { literals: [ 0, 1, 2 ] } } + constraints { + linear { + vars: [ 2, 3 ] + coeffs: [ 1, 1 ] + domain: [ 0, 3 ] + } + } + objective { + vars: [ 3 ] + coeffs: [ 2 ] + } + )pb"); + Model model; + CpModelProto mapping_model; + PresolveContext context(&model, &model_proto, &mapping_model); + context.InitializeNewDomains(); + context.ReadObjectiveFromProto(); + context.UpdateNewConstraintsVariableUsage(); + std::vector local_models = + CreateVariableEncodingLocalModels(&context); + ASSERT_EQ(local_models.size(), 2); + ASSERT_EQ(local_models[0].var, 3); + ASSERT_EQ(local_models[1].var, 4); + EXPECT_THAT(local_models[0].linear1_constraints, ElementsAre(0, 1, 2)); + EXPECT_THAT(local_models[1].linear1_constraints, ElementsAre(3, 4)); + EXPECT_THAT(local_models[0].constraints_linking_two_encoding_booleans, + ElementsAre(5, 6)); + EXPECT_THAT(local_models[1].constraints_linking_two_encoding_booleans, + ElementsAre(6)); + EXPECT_THAT(local_models[0].bools_only_used_inside_the_local_model, + UnorderedElementsAre(0)); + EXPECT_THAT(local_models[1].bools_only_used_inside_the_local_model, + UnorderedElementsAre()); + EXPECT_EQ(local_models[0].variable_coeff_in_objective, 2); + EXPECT_EQ(local_models[1].variable_coeff_in_objective, 0); + + absl::flat_hash_map fully_encoded_domains; + bool changed = false; + CHECK(BasicPresolveAndGetFullyEncodedDomains( + &context, local_models[0], &fully_encoded_domains, &changed)); + EXPECT_THAT( + fully_encoded_domains, + UnorderedElementsAre(Pair(0, Domain(0, 0)), Pair(-1, Domain(1, 1)))); +} + +TEST(BasicPresolveAndGetFullyEncodedDomainsTest, EncodingWithBoolOr) { + CpModelProto model_proto = ParseTestProto(R"pb( + variables { domain: [ 0, 1 ] } + variables { domain: [ 0, 1 ] } + variables { domain: [ 0, 1 ] } + variables { domain: [ 0, 2 ] } + variables { domain: [ 0, 2 ] } + constraints { + enforcement_literal: 0 + linear { + vars: [ 3 ] + coeffs: [ 1 ] + domain: [ 0, 0 ] + } + } + constraints { + enforcement_literal: 1 + linear { + vars: [ 3 ] + coeffs: [ 1 ] + domain: [ 1, 1 ] + } + } + constraints { + enforcement_literal: 2 + linear { + vars: [ 3 ] + coeffs: [ 1 ] + domain: [ 2, 2 ] + } + } + constraints { + enforcement_literal: 0 + linear { + vars: [ 4 ] + coeffs: [ 1 ] + domain: [ 2, 2 ] + } + } + constraints { bool_or { literals: [ 0, 1, 2 ] } } + constraints { + linear { + vars: [ 2, 3 ] + coeffs: [ 1, 1 ] + domain: [ 0, 3 ] + } + } + objective { + vars: [ 3 ] + coeffs: [ 2 ] + } + )pb"); + Model model; + CpModelProto mapping_model; + PresolveContext context(&model, &model_proto, &mapping_model); + context.InitializeNewDomains(); + context.ReadObjectiveFromProto(); + context.UpdateNewConstraintsVariableUsage(); + std::vector local_models = + CreateVariableEncodingLocalModels(&context); + + absl::flat_hash_map fully_encoded_domains; + bool changed = false; + CHECK(BasicPresolveAndGetFullyEncodedDomains( + &context, local_models[0], &fully_encoded_domains, &changed)); + EXPECT_THAT(fully_encoded_domains, + UnorderedElementsAre(Pair(0, Domain(0)), Pair(1, Domain(1)), + Pair(2, Domain(2)), + Pair(-1, Domain::FromValues({1, 2})), + Pair(-2, Domain::FromValues({0, 2})), + Pair(-3, Domain::FromValues({0, 1})))); + ConstraintProto expected_exactly_one = ParseTestProto(R"pb( + exactly_one { literals: [ 0, 1, 2 ] } + )pb"); + EXPECT_THAT(context.working_model->constraints(), + testing::Contains(testing::EqualsProto(expected_exactly_one))); +} + +TEST(DetectAllEncodedComplexDomainTest, BasicTest) { + CpModelProto model_proto = ParseTestProto(R"pb( + variables { domain: [ 0, 1 ] } + variables { domain: [ 0, 1 ] } + variables { domain: [ 0, 1 ] } + variables { domain: [ 0, 5 ] } + variables { domain: [ 0, 2 ] } + constraints { + enforcement_literal: 0 + linear { + vars: [ 3 ] + coeffs: [ 1 ] + domain: [ 0, 1 ] + } + } + constraints { + enforcement_literal: -1 + linear { + vars: [ 3 ] + coeffs: [ 1 ] + domain: [ 2, 5 ] + } + } + # Note that the var=3 is missing from both this encoding and its negation. + constraints { + enforcement_literal: 1 + linear { + vars: [ 3 ] + coeffs: [ 1 ] + domain: [ 1, 2 ] + } + } + constraints { + enforcement_literal: -2 + linear { + vars: [ 3 ] + coeffs: [ 1 ] + domain: [ 0, 0, 4, 5 ] + } + } + constraints { + enforcement_literal: 2 + linear { + vars: [ 3 ] + coeffs: [ 1 ] + domain: [ 2, 2 ] + } + } + constraints { at_most_one { literals: [ 0, 1, 2 ] } } + constraints { + linear { + vars: [ 2, 3 ] + coeffs: [ 1, 1 ] + domain: [ 0, 3 ] + } + } + objective { + vars: [ 3 ] + coeffs: [ 2 ] + } + )pb"); + Model model; + CpModelProto mapping_model; + PresolveContext context(&model, &model_proto, &mapping_model); + context.InitializeNewDomains(); + context.ReadObjectiveFromProto(); + context.UpdateNewConstraintsVariableUsage(); + std::vector local_models = + CreateVariableEncodingLocalModels(&context); + ASSERT_TRUE(DetectAllEncodedComplexDomain(&context, local_models[0])); + context.WriteVariableDomainsToProto(); + const CpModelProto expected_model = ParseTestProto(R"pb( + variables { domain: [ 0, 1 ] } + variables { domain: [ 0, 1 ] } + variables { domain: [ 0, 1 ] } + # var=1 is forbidden by the at_most_one + variables { domain: [ 0, 0, 2, 2, 4, 5 ] } + variables { domain: [ 0, 2 ] } + variables { domain: [ 0, 1 ] } + constraints {} + constraints {} + constraints {} + constraints {} + constraints { + enforcement_literal: 2 + linear { + vars: [ 3 ] + coeffs: [ 1 ] + domain: [ 2, 2 ] + } + } + constraints { at_most_one { literals: [ 2, 5 ] } } + constraints { + linear { + vars: [ 2, 3 ] + coeffs: [ 1, 1 ] + domain: [ 0, 3 ] + } + } + constraints { + enforcement_literal: 5 + linear { + vars: [ 3 ] + coeffs: [ 1 ] + domain: [ 0, 0, 2, 2 ] + } + } + constraints { + enforcement_literal: -6 + linear { + vars: [ 3 ] + coeffs: [ 1 ] + domain: [ 4, 5 ] + } + } + objective { + vars: [ 3 ] + coeffs: [ 2 ] + } + )pb"); + EXPECT_THAT(context.working_model, testing::EqualsProto(expected_model)); +} + +} // namespace +} // namespace sat +} // namespace operations_research diff --git a/ortools/sat/sat_parameters.proto b/ortools/sat/sat_parameters.proto index b7ebe2454cf..56b07417190 100644 --- a/ortools/sat/sat_parameters.proto +++ b/ortools/sat/sat_parameters.proto @@ -24,7 +24,7 @@ option java_multiple_files = true; // Contains the definitions for all the sat algorithm parameters and their // default values. // -// NEXT TAG: 355 +// NEXT TAG: 356 message SatParameters { // In some context, like in a portfolio of search, it makes sense to name a // given parameters set for logging purpose. @@ -153,6 +153,10 @@ message SatParameters { // clause with it. optional int32 eagerly_subsume_last_n_conflicts = 343 [default = 4]; + // If we remove clause that we now are "implied" by others. Note that this + // might not always be good as we might loose some propagation power. + optional bool subsume_during_vivification = 355 [default = true]; + // If true, try to backtrack as little as possible on conflict and re-imply // the clauses later. // This means we discard less propagation than traditional backjumping, but diff --git a/ortools/sat/stat_tables.cc b/ortools/sat/stat_tables.cc index bded127e7fd..48190cb5ef8 100644 --- a/ortools/sat/stat_tables.cc +++ b/ortools/sat/stat_tables.cc @@ -120,7 +120,8 @@ void SharedStatTables::AddClausesStat(absl::string_view name, Model* model) { if (vivify_table_.empty()) { vivify_table_.push_back({"Vivification", "Clauses", "Decisions", "LitTrue", - "Subsumed", "LitRemoved", "DecisionReused"}); + "Subsumed", "LitRemoved", "DecisionReused", + "Conflicts"}); } vivify_table_.push_back({FormatName(name), FormatCounter(vivify_counters.num_clauses_vivified), @@ -128,7 +129,8 @@ void SharedStatTables::AddClausesStat(absl::string_view name, Model* model) { FormatCounter(vivify_counters.num_true), FormatCounter(vivify_counters.num_subsumed), FormatCounter(vivify_counters.num_removed_literals), - FormatCounter(vivify_counters.num_reused)}); + FormatCounter(vivify_counters.num_reused), + FormatCounter(vivify_counters.num_conflicts)}); // Track reductions of Boolean variables. if (bool_var_table_.empty()) { diff --git a/ortools/sat/vivification.cc b/ortools/sat/vivification.cc index fbfc1500057..a0ef539964e 100644 --- a/ortools/sat/vivification.cc +++ b/ortools/sat/vivification.cc @@ -18,8 +18,10 @@ #include #include "absl/algorithm/container.h" +#include "absl/cleanup/cleanup.h" #include "absl/container/btree_set.h" #include "absl/log/check.h" +#include "absl/log/log.h" #include "absl/types/span.h" #include "ortools/sat/clause.h" #include "ortools/sat/sat_base.h" @@ -31,7 +33,7 @@ namespace operations_research::sat { bool Vivifier::MinimizeByPropagation(bool log_info, double dtime_budget, bool minimize_new_clauses_only) { PresolveTimer timer("Vivification", logger_, time_limit_); - timer.OverrideLogging(log_info); + timer.OverrideLogging(log_info || VLOG_IS_ON(2)); sat_solver_->AdvanceDeterministicTime(time_limit_); const double threshold = @@ -44,34 +46,34 @@ bool Vivifier::MinimizeByPropagation(bool log_info, double dtime_budget, // Tricky: we don't want TryToMinimizeClause() to delete to_minimize // while we are processing it. sat_solver_->BlockClauseDeletion(true); + absl::Cleanup unblock_clause_deletion = [&] { + sat_solver_->BlockClauseDeletion(false); + }; const auto old_counter = counters_; - int num_resets = 0; - while (!time_limit_->LimitReached() && - time_limit_->GetElapsedDeterministicTime() < threshold) { - SatClause* to_minimize = clause_manager_->NextNewClauseToMinimize(); - if (!minimize_new_clauses_only && to_minimize == nullptr) { - to_minimize = clause_manager_->NextClauseToMinimize(); - } + const int num_resets = clause_manager_->NumToMinimizeIndexResets(); + while (!time_limit_->LimitReached()) { + // Abort if we used our budget. + sat_solver_->AdvanceDeterministicTime(time_limit_); + if (time_limit_->GetElapsedDeterministicTime() >= threshold) break; - if (to_minimize != nullptr) { - if (!TryToMinimizeClause(to_minimize)) { - sat_solver_->BlockClauseDeletion(false); - return false; - } - } else if (minimize_new_clauses_only) { - break; - } else { - ++num_resets; - if (log_info) { - SOLVER_LOG(logger_, - "Minimized all clauses, restarting from first one."); + // Also abort if we did more than one loop over all the clause. + if (clause_manager_->NumToMinimizeIndexResets() > num_resets + 1) break; + + // First minimize clauses that where never minimized before. + { + SatClause* to_minimize = clause_manager_->NextNewClauseToMinimize(); + if (to_minimize != nullptr) { + if (!TryToMinimizeClause(to_minimize)) return false; + continue; } - clause_manager_->ResetToMinimizeIndex(); - if (num_resets > 1) break; + if (minimize_new_clauses_only) break; // We are done. } - sat_solver_->AdvanceDeterministicTime(time_limit_); + SatClause* clause = clause_manager_->NextClauseToMinimize(); + if (clause != nullptr) { + if (!TryToMinimizeClause(clause)) return false; + } } // Note(user): In some corner cases, the function above might find a @@ -85,8 +87,8 @@ bool Vivifier::MinimizeByPropagation(bool log_info, double dtime_budget, counters_.num_removed_literals - old_counter.num_removed_literals; timer.AddCounter("num_vivifed", last_num_vivified_); timer.AddCounter("literals_removed", last_num_literals_removed_); + timer.AddCounter("loops", clause_manager_->NumToMinimizeIndexResets()); - sat_solver_->BlockClauseDeletion(false); clause_manager_->DeleteRemovedClauses(); return result; } @@ -171,6 +173,7 @@ bool Vivifier::SubsumptionIsInteresting(BooleanVariable variable, // that we can reuse the trail from previous calls in case there are overlaps. bool Vivifier::TryToMinimizeClause(SatClause* clause) { CHECK(clause != nullptr); + if (clause->empty()) return true; ++counters_.num_clauses_vivified; // TODO(user): Make sure clause do not contain any redundant literal before @@ -229,8 +232,10 @@ bool Vivifier::TryToMinimizeClause(SatClause* clause) { } CHECK_EQ(candidate.size(), clause->size()); - if (!sat_solver_->BacktrackAndPropagateReimplications(longest_valid_prefix)) + if (!sat_solver_->BacktrackAndPropagateReimplications(longest_valid_prefix)) { return false; + } + absl::btree_set moved_last; while (!sat_solver_->ModelIsUnsat()) { // We want each literal in candidate to appear last once in our propagation @@ -240,8 +245,9 @@ bool Vivifier::TryToMinimizeClause(SatClause* clause) { const int target_level = MoveOneUnprocessedLiteralLast( moved_last, sat_solver_->CurrentDecisionLevel(), &candidate); if (target_level == -1) break; - if (!sat_solver_->BacktrackAndPropagateReimplications(target_level)) + if (!sat_solver_->BacktrackAndPropagateReimplications(target_level)) { return false; + } fixed_false_literals.clear(); fixed_true_literal = kNoLiteralIndex; @@ -273,9 +279,8 @@ bool Vivifier::TryToMinimizeClause(SatClause* clause) { // Replace the clause with the reason for the literal being true, plus // the literal itself. candidate.clear(); - for (Literal lit : sat_solver_->GetDecisionsFixing( - trail_->Reason(literal.Variable()))) { - candidate.push_back(lit.Negated()); + for (const Literal l : sat_solver_->GetDecisionsFixing({literal})) { + candidate.push_back(l.Negated()); } } else { candidate.resize(variable_level); @@ -289,7 +294,8 @@ bool Vivifier::TryToMinimizeClause(SatClause* clause) { // clauses. If we can subsume this clause by making only 1 additional // clause permanent and that clause is no longer than this one, we will // do so. - if (clause_manager_->ReasonClauseOrNull(literal.Variable()) != clause && + if (parameters_.subsume_during_vivification() && + clause_manager_->ReasonClauseOrNull(literal.Variable()) != clause && SubsumptionIsInteresting(literal.Variable(), candidate.size())) { counters_.num_subsumed++; counters_.num_removed_literals += clause->size(); @@ -305,7 +311,10 @@ bool Vivifier::TryToMinimizeClause(SatClause* clause) { sat_solver_->EnqueueDecisionAndBackjumpOnConflict(literal.Negated()); if (sat_solver_->ModelIsUnsat()) return false; if (clause->IsRemoved()) return true; + if (sat_solver_->CurrentDecisionLevel() < level) { + ++counters_.num_conflicts; + // There was a conflict, consider the conflicting literal next so we // should be able to exploit the conflict in the next iteration. // TODO(user): I *think* this is sufficient to ensure pushing @@ -321,6 +330,9 @@ bool Vivifier::TryToMinimizeClause(SatClause* clause) { sat_solver_->NotifyThatModelIsUnsat(); return false; } + + // TODO(user): To use this, we need to proove and rewrite the clause + // on each of its modification. if (!parameters_.inprocessing_minimization_use_all_orderings()) break; moved_last.insert(candidate.back().Index()); } @@ -396,15 +408,11 @@ bool Vivifier::TryToMinimizeClause(SatClause* clause) { sat_solver_->NotifyThatModelIsUnsat(); return false; } - // Adding a unit clause can cause additional propagation, but there is also an - // edge case where binary_clauses_->PropagationIsDone() may return - // false after we add the first binary clause, even if nothing has been added - // to the trail. Either way, we can just check if the implication graph thinks - // it is done to propagate only when required. - if (!binary_clauses_->PropagationIsDone(*trail_)) { - return sat_solver_->FinishPropagation(); - } - return true; + + // Adding a unit clause can cause additional propagation. There is also an + // edge case where we added the first binary clause of the model by + // strenghtening a normal clause. + return sat_solver_->FinishPropagation(); } } // namespace operations_research::sat diff --git a/ortools/sat/vivification.h b/ortools/sat/vivification.h index 63ac4bf9ae5..9064f80424b 100644 --- a/ortools/sat/vivification.h +++ b/ortools/sat/vivification.h @@ -45,6 +45,7 @@ class Vivifier { trail_(model->GetOrCreate()), binary_clauses_(model->GetOrCreate()), clause_manager_(model->GetOrCreate()), + clause_id_generator_(model->GetOrCreate()), lrat_proof_handler_(model->Mutable()) {} // Minimize a batch of clauses using propagation. @@ -67,6 +68,7 @@ class Vivifier { int64_t num_subsumed = 0; int64_t num_removed_literals = 0; int64_t num_reused = 0; + int64_t num_conflicts = 0; }; Counters counters() const { return counters_; } @@ -92,7 +94,7 @@ class Vivifier { Trail* trail_; BinaryImplicationGraph* binary_clauses_; ClauseManager* clause_manager_; - + ClauseIdGenerator* clause_id_generator_; LratProofHandler* lrat_proof_handler_ = nullptr; Counters counters_; diff --git a/ortools/sat/work_assignment.cc b/ortools/sat/work_assignment.cc index 30b1aa94a24..7cfdb794ff2 100644 --- a/ortools/sat/work_assignment.cc +++ b/ortools/sat/work_assignment.cc @@ -301,7 +301,6 @@ bool SharedTreeManager::SyncTree(ProtoTrail& path) { // We don't rely on these being empty, but we expect them to be. DCHECK(to_close_.empty()); DCHECK(to_update_.empty()); - path.NormalizeImplications(); int prev_level = -1; for (const auto& [node, level] : nodes) { if (level == prev_level) { @@ -1367,6 +1366,8 @@ bool SharedTreeWorker::SyncWithSharedTree() { !decision_policy_->GetBestPartialAssignment().empty()) { assigned_tree_.ClearTargetPhase(); for (Literal lit : decision_policy_->GetBestPartialAssignment()) { + // Skip anything assigned at level 0. + if (trail_->Assignment().LiteralIsAssigned(lit)) continue; // If `lit` was last assigned at a shared level, it is implied in the // tree, no need to share its phase. if (trail_->Info(lit.Variable()).level <= assigned_tree_.MaxLevel()) { @@ -1396,6 +1397,12 @@ bool SharedTreeWorker::SyncWithSharedTree() { decision_policy_->SetTargetPolarityIfUnassigned(DecodeDecision(lit)); } decision_policy_->ResetActivitiesToFollowBestPartialAssignment(); + // This seems bizzare after just setting the best partial assignment, + // but this makes phase sharing work even when there is no stable phase in + // the restart strategy, and makes no real difference if there is, since + // the first dive will still try to follow this assignment until the first + // conflict regardless of the restart strategy. + decision_policy_->ClearBestPartialAssignment(); } } // If we commit to this subtree, keep it for at least 1s of dtime. diff --git a/ortools/util/logging.cc b/ortools/util/logging.cc index e063fc2f1fd..669deff3920 100644 --- a/ortools/util/logging.cc +++ b/ortools/util/logging.cc @@ -112,6 +112,8 @@ void SolverLogger::FlushPendingThrottledLogs(bool ignore_rates) { PresolveTimer::~PresolveTimer() { time_limit_->AdvanceDeterministicTime(work_); + const double dtime = + time_limit_->GetElapsedDeterministicTime() - dtime_at_start_; std::string counter_string; for (const auto& [counter_name, count] : counters_) { @@ -124,7 +126,7 @@ PresolveTimer::~PresolveTimer() { logger_->LogInfo( __FILE__, __LINE__, absl::StrCat(absl::StrFormat(" %.2es", timer_.Get()), - absl::StrFormat(" %.2ed", work_), + absl::StrFormat(" %.2ed", dtime), (WorkLimitIsReached() ? " *" : " "), "[", name_, "]", counter_string, " ", absl::StrJoin(extra_infos_, " "))); } diff --git a/ortools/util/logging.h b/ortools/util/logging.h index 4516a3628e0..0884b390104 100644 --- a/ortools/util/logging.h +++ b/ortools/util/logging.h @@ -127,7 +127,10 @@ class SolverLogger { class PresolveTimer { public: PresolveTimer(std::string name, SolverLogger* logger, TimeLimit* time_limit) - : name_(std::move(name)), logger_(logger), time_limit_(time_limit) { + : name_(std::move(name)), + dtime_at_start_(time_limit->GetElapsedDeterministicTime()), + logger_(logger), + time_limit_(time_limit) { timer_.Start(); } @@ -164,6 +167,7 @@ class PresolveTimer { private: const std::string name_; + const double dtime_at_start_; WallTimer timer_; SolverLogger* logger_; From f72f9eea98e941b7f1e89aa70e7ca0505d2ee3ae Mon Sep 17 00:00:00 2001 From: Corentin Le Molgat Date: Wed, 7 Jan 2026 16:19:03 +0100 Subject: [PATCH 103/111] sat: backport README from main --- ortools/sat/colab/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ortools/sat/colab/README.md b/ortools/sat/colab/README.md index 1a5dc85d724..f4de2f358c7 100644 --- a/ortools/sat/colab/README.md +++ b/ortools/sat/colab/README.md @@ -12,7 +12,7 @@ Below you'll find three examples of Google's CP-SAT solver. Build and run locally: ``` -bazel run -c opt --cxxopt=-std=c++17 ortools/python:ortools_notebook +bazel run -c opt ortools/python:ortools_notebook ``` This will open a jupyter notebook in your browser. @@ -20,7 +20,7 @@ This will open a jupyter notebook in your browser. To use it as a server only, use the command ``` -bazel run -c opt --cxxopt=-std=c++17 ortools/python:ortools_notebook -- --no-browser +bazel run -c opt ortools/python:ortools_notebook -- --no-browser ``` And paste the resulting url in your favorite environment, like visual studio code. From 708262c71618acfb8441a7d52777e8127bf5e5a2 Mon Sep 17 00:00:00 2001 From: Corentin Le Molgat Date: Wed, 7 Jan 2026 17:46:34 +0100 Subject: [PATCH 104/111] sat: fix compilation error --- ortools/sat/probing.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ortools/sat/probing.cc b/ortools/sat/probing.cc index a7c84315849..fa3193bab58 100644 --- a/ortools/sat/probing.cc +++ b/ortools/sat/probing.cc @@ -103,8 +103,8 @@ class TrailCopy { } trail_index_[var] = i; trail_literals_.push_back(literal); - trail_info_.emplace_back(info.level, assignment_type, reason, - reason_clause); + trail_info_.push_back({info.level, assignment_type, reason, + reason_clause}); } const int num_decisions = trail_.CurrentDecisionLevel(); From 6a603cc183439eb6827acc23f474b4b247b99613 Mon Sep 17 00:00:00 2001 From: Laurent Perron Date: Thu, 8 Jan 2026 13:07:57 +0100 Subject: [PATCH 105/111] [CP-SAT] fix rare crash --- ortools/sat/clause.cc | 43 ++++++++++++++------ ortools/sat/clause.h | 1 + ortools/sat/linear_constraint.cc | 11 ----- ortools/sat/linear_constraint.h | 3 -- ortools/sat/linear_constraint_test.cc | 8 ---- ortools/sat/linear_programming_constraint.cc | 4 +- ortools/sat/sat_inprocessing.cc | 8 ++-- ortools/sat/sat_solver.cc | 1 + 8 files changed, 38 insertions(+), 41 deletions(-) diff --git a/ortools/sat/clause.cc b/ortools/sat/clause.cc index e463b8e5df3..67427cd2235 100644 --- a/ortools/sat/clause.cc +++ b/ortools/sat/clause.cc @@ -244,21 +244,25 @@ bool ClauseManager::Propagate(Trail* trail) { } } - reasons_[trail->Index()] = it->clause; - if (propagation_level == 0 && lrat_proof_handler_ != nullptr) { - const ClauseId clause_id = GetClauseId(it->clause); - const int size = it->clause->size(); - std::vector& unit_ids = clause_ids_scratchpad_; - unit_ids.clear(); - for (int i = 1; i < size; ++i) { - unit_ids.push_back(trail_->GetUnitClauseId(literals[i].Variable())); + if (propagation_level == 0) { + if (lrat_proof_handler_ != nullptr) { + std::vector& unit_ids = clause_ids_scratchpad_; + unit_ids.clear(); + const int size = it->clause->size(); + for (int i = 1; i < size; ++i) { + unit_ids.push_back( + trail_->GetUnitClauseId(literals[i].Variable())); + } + unit_ids.push_back(GetClauseId(it->clause)); + const ClauseId new_clause_id = clause_id_generator_->GetNextId(); + lrat_proof_handler_->AddInferredClause( + new_clause_id, {other_watched_literal}, unit_ids); + helper.EnqueueWithUnitReason(other_watched_literal, new_clause_id); + } else { + trail_->EnqueueWithUnitReason(other_watched_literal); } - unit_ids.push_back(clause_id); - const ClauseId new_clause_id = clause_id_generator_->GetNextId(); - lrat_proof_handler_->AddInferredClause( - new_clause_id, {other_watched_literal}, unit_ids); - helper.EnqueueWithUnitReason(other_watched_literal, new_clause_id); } else { + reasons_[trail->Index()] = it->clause; helper.EnqueueAtLevel(other_watched_literal, propagation_level); } *new_it++ = *it; @@ -296,6 +300,7 @@ SatClause* ClauseManager::ReasonClauseOrNull(BooleanVariable var) const { if (!trail_->Assignment().VariableIsAssigned(var)) return nullptr; if (trail_->AssignmentType(var) != propagator_id_) return nullptr; SatClause* result = reasons_[trail_->Info(var).trail_index]; + DCHECK(result != nullptr) << trail_->Info(var).DebugString(); // Tricky: In some corner case, that clause was subsumed, so we don't want // to check it nor use it. @@ -306,6 +311,7 @@ SatClause* ClauseManager::ReasonClauseOrNull(BooleanVariable var) const { bool ClauseManager::ClauseIsUsedAsReason(SatClause* clause) const { DCHECK(clause != nullptr); + if (clause->empty()) return false; return clause == ReasonClauseOrNull(clause->PropagatedLiteral().Variable()); } @@ -642,6 +648,16 @@ void ClauseManager::CleanUpWatchers() { void ClauseManager::DeleteRemovedClauses() { if (!is_clean_) CleanUpWatchers(); + if (DEBUG_MODE) { + // This help debug issues, as it is easier to check for nullptr rather than + // detect a pointer that has been deleted. + for (int i = 0; i < reasons_.size(); ++i) { + if (reasons_[i] != nullptr && reasons_[i]->empty()) { + reasons_[i] = nullptr; + } + } + } + int new_size = 0; const int old_size = clauses_.size(); for (int i = 0; i < old_size; ++i) { @@ -649,6 +665,7 @@ void ClauseManager::DeleteRemovedClauses() { if (i == to_first_minimize_index_) to_first_minimize_index_ = new_size; if (i == to_probe_index_) to_probe_index_ = new_size; if (clauses_[i]->IsRemoved()) { + DCHECK(!clauses_info_.contains(clauses_[i])); delete clauses_[i]; } else { clauses_[new_size++] = clauses_[i]; diff --git a/ortools/sat/clause.h b/ortools/sat/clause.h index 5aab10b4a44..6bdff62f203 100644 --- a/ortools/sat/clause.h +++ b/ortools/sat/clause.h @@ -313,6 +313,7 @@ class ClauseManager : public SatPropagator { // start so the clauses will be returned in round-robin order. // Note that we only minimize clauses kept forever. SatClause* NextClauseToMinimize(); + // Returns the next clause to probe in round-robin order. SatClause* NextClauseToProbe(); diff --git a/ortools/sat/linear_constraint.cc b/ortools/sat/linear_constraint.cc index c6b099b3f24..744bea31ee5 100644 --- a/ortools/sat/linear_constraint.cc +++ b/ortools/sat/linear_constraint.cc @@ -296,17 +296,6 @@ void DivideByGCD(LinearConstraint* constraint) { } } -void MakeAllCoefficientsPositive(LinearConstraint* constraint) { - const int size = constraint->num_terms; - for (int i = 0; i < size; ++i) { - const IntegerValue coeff = constraint->coeffs[i]; - if (coeff < 0) { - constraint->coeffs[i] = -coeff; - constraint->vars[i] = NegationOf(constraint->vars[i]); - } - } -} - void MakeAllVariablesPositive(LinearConstraint* constraint) { const int size = constraint->num_terms; for (int i = 0; i < size; ++i) { diff --git a/ortools/sat/linear_constraint.h b/ortools/sat/linear_constraint.h index 72a0441c837..a5123403136 100644 --- a/ortools/sat/linear_constraint.h +++ b/ortools/sat/linear_constraint.h @@ -350,9 +350,6 @@ double ScalarProduct(const LinearConstraint& constraint1, // also tighten the constraint bounds assuming all the variables are integer. void DivideByGCD(LinearConstraint* constraint); -// Makes all coefficients positive by transforming a variable to its negation. -void MakeAllCoefficientsPositive(LinearConstraint* constraint); - // Makes all variables "positive" by transforming a variable to its negation. void MakeAllVariablesPositive(LinearConstraint* constraint); diff --git a/ortools/sat/linear_constraint_test.cc b/ortools/sat/linear_constraint_test.cc index 2a541e285e6..7164bd84eb6 100644 --- a/ortools/sat/linear_constraint_test.cc +++ b/ortools/sat/linear_constraint_test.cc @@ -181,14 +181,6 @@ TEST(LinearConstraintCopyTest, BasicBehavior) { EXPECT_EQ(ct, other); } -TEST(MakeAllCoefficientsPositiveTest, BasicBehavior) { - // Note that this relies on the fact that the negation of an IntegerVariable - // var is is the one with IntegerVariable(var.value() ^ 1); - LinearConstraint ct = CreateUbConstraintForTest({-2, 0, -7, 0}, 10); - MakeAllCoefficientsPositive(&ct); - EXPECT_EQ(ct, CreateUbConstraintForTest({0, 2, 0, 7}, 10)); -} - TEST(LinearConstraintBuilderTest, DuplicateCoefficient) { Model model; model.GetOrCreate(); diff --git a/ortools/sat/linear_programming_constraint.cc b/ortools/sat/linear_programming_constraint.cc index 6fa3794c9ce..9b0bb04adb5 100644 --- a/ortools/sat/linear_programming_constraint.cc +++ b/ortools/sat/linear_programming_constraint.cc @@ -218,8 +218,8 @@ LinearConstraint ScatteredIntegerVector::ConvertToLinearConstraint( result.ub = upper_bound; if (extra_term != std::nullopt) { - result.vars[new_size] += extra_term->first; - result.coeffs[new_size] += extra_term->second; + result.vars[new_size] = extra_term->first; + result.coeffs[new_size] = extra_term->second; ++new_size; } diff --git a/ortools/sat/sat_inprocessing.cc b/ortools/sat/sat_inprocessing.cc index 1baddcf50cf..04927991ea7 100644 --- a/ortools/sat/sat_inprocessing.cc +++ b/ortools/sat/sat_inprocessing.cc @@ -1487,10 +1487,9 @@ bool BoundedVariableElimination::DoOneRound(bool log_info) { DCHECK( std::all_of(marked_.begin(), marked_.end(), [](bool b) { return !b; })); - // TODO(user): add a local dtime limit for the corner case where this take too - // much time. We can adapt the limit depending on how much we want to spend on + // TODO(user): adapt the dtime limit depending on how much we want to spend on // inprocessing. - while (!time_limit_->LimitReached() && !queue_.IsEmpty()) { + while (!time_limit_->LimitReached() && !queue_.IsEmpty() && dtime_ < 10.0) { const BooleanVariable top = queue_.Top().var; queue_.Pop(); @@ -1538,7 +1537,6 @@ bool BoundedVariableElimination::DoOneRound(bool log_info) { literal_to_clauses_.clear(); literal_to_num_clauses_.clear(); - dtime_ += 1e-8 * num_inspected_literals_; time_limit_->AdvanceDeterministicTime(dtime_); log_info |= VLOG_IS_ON(2); LOG_IF(INFO, log_info) << "BVE." @@ -1885,10 +1883,12 @@ bool BoundedVariableElimination::CrossProduct(BooleanVariable var) { for (const ClauseIndex i : literal_to_clauses_[lit]) { const auto c = clauses_[i]->AsSpan(); if (!c.empty()) score += clause_weight + c.size(); + dtime_ += 1e-8 * c.size(); } for (const ClauseIndex i : literal_to_clauses_[not_lit]) { const auto c = clauses_[i]->AsSpan(); if (!c.empty()) score += clause_weight + c.size(); + dtime_ += 1.0e-8 * c.size(); } // Compute the new score after BVE. diff --git a/ortools/sat/sat_solver.cc b/ortools/sat/sat_solver.cc index cc6a95ad653..81a60aaf970 100644 --- a/ortools/sat/sat_solver.cc +++ b/ortools/sat/sat_solver.cc @@ -3030,6 +3030,7 @@ void SatSolver::CleanClauseDatabaseIfNeeded() { std::vector entries; auto& clauses_info = *(clauses_propagator_->mutable_clauses_info()); for (auto& entry : clauses_info) { + DCHECK(!entry.first->empty()); // Should have been deleted ! entry.second.num_cleanup_rounds_since_last_bumped++; if (clauses_propagator_->ClauseIsUsedAsReason(entry.first)) continue; From 204b31a748dda752835c1300556dc56b2a223343 Mon Sep 17 00:00:00 2001 From: Guillaume Chatelet Date: Wed, 7 Jan 2026 15:49:29 +0100 Subject: [PATCH 106/111] Refactor MPSolver interface registration to include runtime readiness checks. (#4973) This PR is removing the need for `linear_solver` to depend on `GurobiIsCorrectlyInstalled` and `XpressIsCorrectlyInstalled`. --- ortools/linear_solver/gurobi_interface.cc | 6 ++- ortools/linear_solver/linear_solver.cc | 46 ++++++++++------------- ortools/linear_solver/linear_solver.h | 22 +++++++---- ortools/linear_solver/xpress_interface.cc | 6 ++- 4 files changed, 42 insertions(+), 38 deletions(-) diff --git a/ortools/linear_solver/gurobi_interface.cc b/ortools/linear_solver/gurobi_interface.cc index af035792ccb..6b2e8b1614d 100644 --- a/ortools/linear_solver/gurobi_interface.cc +++ b/ortools/linear_solver/gurobi_interface.cc @@ -1417,7 +1417,8 @@ namespace { const void* const kRegisterGurobiLp ABSL_ATTRIBUTE_UNUSED = [] { MPSolverInterfaceFactoryRepository::GetInstance()->Register( [](MPSolver* solver) { return new GurobiInterface(solver, false); }, - MPSolver::GUROBI_LINEAR_PROGRAMMING); + MPSolver::GUROBI_LINEAR_PROGRAMMING, + []() { return GurobiIsCorrectlyInstalled(); }); return nullptr; }(); @@ -1425,7 +1426,8 @@ const void* const kRegisterGurobiLp ABSL_ATTRIBUTE_UNUSED = [] { const void* const kRegisterGurobiMip ABSL_ATTRIBUTE_UNUSED = [] { MPSolverInterfaceFactoryRepository::GetInstance()->Register( [](MPSolver* solver) { return new GurobiInterface(solver, true); }, - MPSolver::GUROBI_MIXED_INTEGER_PROGRAMMING); + MPSolver::GUROBI_MIXED_INTEGER_PROGRAMMING, + []() { return GurobiIsCorrectlyInstalled(); }); return nullptr; }(); diff --git a/ortools/linear_solver/linear_solver.cc b/ortools/linear_solver/linear_solver.cc index 74c14c54cfb..824128d5108 100644 --- a/ortools/linear_solver/linear_solver.cc +++ b/ortools/linear_solver/linear_solver.cc @@ -412,26 +412,10 @@ MPSolver::MPSolver(const std::string& name, MPSolver::~MPSolver() { Clear(); } -extern bool GurobiIsCorrectlyInstalled(); -extern bool XpressIsCorrectlyInstalled(); - // static bool MPSolver::SupportsProblemType(OptimizationProblemType problem_type) { - if (!MPSolverInterfaceFactoryRepository::GetInstance()->Supports( - problem_type)) { - return false; - } - switch (problem_type) { - case GUROBI_LINEAR_PROGRAMMING: - case GUROBI_MIXED_INTEGER_PROGRAMMING: - return GurobiIsCorrectlyInstalled(); - case XPRESS_LINEAR_PROGRAMMING: - case XPRESS_MIXED_INTEGER_PROGRAMMING: - return XpressIsCorrectlyInstalled(); - default: - break; - } - return true; + return MPSolverInterfaceFactoryRepository::GetInstance()->Supports( + problem_type); } // TODO(user): post c++ 14, instead use @@ -2238,9 +2222,14 @@ MPSolverInterfaceFactoryRepository::~MPSolverInterfaceFactoryRepository() { void MPSolverInterfaceFactoryRepository::Register( MPSolverInterfaceFactory factory, - MPSolver::OptimizationProblemType problem_type) { + MPSolver::OptimizationProblemType problem_type, + std::function is_runtime_ready) { absl::MutexLock lock(mutex_); - map_[problem_type] = std::move(factory); + if (!is_runtime_ready) is_runtime_ready = []() { return true; }; + map_[problem_type] = Entry{ + .factory = std::move(factory), + .is_runtime_ready = std::move(is_runtime_ready), + }; } bool MPSolverInterfaceFactoryRepository::Unregister( @@ -2252,17 +2241,20 @@ bool MPSolverInterfaceFactoryRepository::Unregister( MPSolverInterface* MPSolverInterfaceFactoryRepository::Create( MPSolver* solver) const { absl::MutexLock lock(mutex_); - const MPSolverInterfaceFactory factory = - gtl::FindWithDefault(map_, solver->ProblemType(), nullptr); - if (!factory) { - return nullptr; - } - return factory(solver); + const Entry* entry = gtl::FindOrNull(map_, solver->ProblemType()); + CHECK(entry != nullptr) << "No factory registered for problem type " + << ToString(solver->ProblemType()); + CHECK(entry->is_runtime_ready()) + << "Solver for problem type " << ToString(solver->ProblemType()) + << " is not ready."; + return entry->factory(solver); } bool MPSolverInterfaceFactoryRepository::Supports( MPSolver::OptimizationProblemType problem_type) const { - return map_.count(problem_type) > 0; + const Entry* entry = gtl::FindOrNull(map_, problem_type); + if (entry == nullptr) return false; + return entry->is_runtime_ready(); } std::vector diff --git a/ortools/linear_solver/linear_solver.h b/ortools/linear_solver/linear_solver.h index 4656ac13698..087ce547895 100644 --- a/ortools/linear_solver/linear_solver.h +++ b/ortools/linear_solver/linear_solver.h @@ -152,6 +152,7 @@ #include "absl/container/flat_hash_map.h" #include "absl/flags/declare.h" #include "absl/log/check.h" +#include "absl/log/log.h" #include "absl/status/status.h" #include "absl/strings/str_format.h" #include "absl/strings/string_view.h" @@ -1959,17 +1960,20 @@ class MPSolverInterfaceFactoryRepository { public: static MPSolverInterfaceFactoryRepository* GetInstance(); - // Maps the given factory to the given problem type. If a factory was already - // assigned to this problem type, it will be replaced. + // Maps the given factory to the given problem type. For solver needing + // runtime checks an additional `is_runtime_ready` argument can be set. If + // a factory was already assigned to this problem type, it will be replaced. void Register(MPSolverInterfaceFactory factory, - MPSolver::OptimizationProblemType problem_type); + MPSolver::OptimizationProblemType problem_type, + std::function is_runtime_ready = {}); - // Invokes the factory associated to the given solver's problem type, - // or return NULL if no factory was found for it. + // Invokes the factory associated to the given solver's problem type and fails + // if no factory is registered or its runtime is not ready. + // Use `Supports` below to check if `Create` succeeds. MPSolverInterface* Create(MPSolver* solver) const; // Whether the implementation associated to the given problem type is - // available. + // available and ready to use. bool Supports(MPSolver::OptimizationProblemType problem_type) const; // List all the problem types. @@ -1991,7 +1995,11 @@ class MPSolverInterfaceFactoryRepository { ~MPSolverInterfaceFactoryRepository(); mutable absl::Mutex mutex_; - std::map map_; + struct Entry { + MPSolverInterfaceFactory factory; + std::function is_runtime_ready; + }; + std::map map_; }; } // namespace operations_research diff --git a/ortools/linear_solver/xpress_interface.cc b/ortools/linear_solver/xpress_interface.cc index 40b14845dfe..0e09df60534 100644 --- a/ortools/linear_solver/xpress_interface.cc +++ b/ortools/linear_solver/xpress_interface.cc @@ -2289,7 +2289,8 @@ namespace { const void* const kRegisterXpress ABSL_ATTRIBUTE_UNUSED = [] { MPSolverInterfaceFactoryRepository::GetInstance()->Register( [](MPSolver* const solver) { return new XpressInterface(solver, false); }, - MPSolver::XPRESS_LINEAR_PROGRAMMING); + MPSolver::XPRESS_LINEAR_PROGRAMMING, + []() { return XpressIsCorrectlyInstalled(); }); return nullptr; }(); @@ -2297,7 +2298,8 @@ const void* const kRegisterXpress ABSL_ATTRIBUTE_UNUSED = [] { const void* const kRegisterXpressMip ABSL_ATTRIBUTE_UNUSED = [] { MPSolverInterfaceFactoryRepository::GetInstance()->Register( [](MPSolver* const solver) { return new XpressInterface(solver, true); }, - MPSolver::XPRESS_MIXED_INTEGER_PROGRAMMING); + MPSolver::XPRESS_MIXED_INTEGER_PROGRAMMING, + []() { return XpressIsCorrectlyInstalled(); }); return nullptr; }(); From 87e8e340d82e53fa02493ec442821ae157e3a213 Mon Sep 17 00:00:00 2001 From: Corentin Le Molgat Date: Thu, 8 Jan 2026 15:52:28 +0100 Subject: [PATCH 107/111] sat: disable deprecated warnings in cp_model.py User can reenable them using: ```py from ortools.sat.python import cp_model cp_model.enable_warnings = True ``` --- ortools/sat/python/cp_model.py | 28 +++++++++++++++------------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/ortools/sat/python/cp_model.py b/ortools/sat/python/cp_model.py index 822dcef2200..b2e4c382959 100644 --- a/ortools/sat/python/cp_model.py +++ b/ortools/sat/python/cp_model.py @@ -184,7 +184,7 @@ # Helper functions. - +enable_warnings = False # warnings.deprecated is python3.13+. Not compatible with Open Source (3.10+). # pylint: disable=g-bare-generic @@ -193,12 +193,13 @@ def deprecated(message: str) -> Callable[[Callable], Callable]: def deprecated_decorator(func) -> Callable: def deprecated_func(*args, **kwargs): - warnings.warn( - f"{func.__name__} is a deprecated function. {message}", - category=DeprecationWarning, - stacklevel=2, - ) - warnings.simplefilter("default", DeprecationWarning) + if enable_warnings: + warnings.warn( + f"{func.__name__} is a deprecated function. {message}", + category=DeprecationWarning, + stacklevel=2, + ) + warnings.simplefilter("default", DeprecationWarning) return func(*args, **kwargs) return deprecated_func @@ -210,12 +211,13 @@ def deprecated_method(func, old_name: str) -> Callable: """Wrapper that warns about a deprecated method.""" def deprecated_func(*args, **kwargs) -> Any: - warnings.warn( - f"{old_name} is a deprecated function. Use {func.__name__} instead.", - category=DeprecationWarning, - stacklevel=2, - ) - warnings.simplefilter("default", DeprecationWarning) + if enable_warnings: + warnings.warn( + f"{old_name} is a deprecated function. Use {func.__name__} instead.", + category=DeprecationWarning, + stacklevel=2, + ) + warnings.simplefilter("default", DeprecationWarning) return func(*args, **kwargs) return deprecated_func From 1ed9a17980d23a86323b1794b1930cedde5e5888 Mon Sep 17 00:00:00 2001 From: Corentin Le Molgat Date: Thu, 8 Jan 2026 09:04:19 -0800 Subject: [PATCH 108/111] sat: fix probing.cc compilation on windows --- ortools/sat/probing.cc | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/ortools/sat/probing.cc b/ortools/sat/probing.cc index fa3193bab58..073d63d5e6e 100644 --- a/ortools/sat/probing.cc +++ b/ortools/sat/probing.cc @@ -14,6 +14,7 @@ #include "ortools/sat/probing.h" #include +#include #include #include #include @@ -103,8 +104,8 @@ class TrailCopy { } trail_index_[var] = i; trail_literals_.push_back(literal); - trail_info_.push_back({info.level, assignment_type, reason, - reason_clause}); + trail_info_.emplace_back(info.level, assignment_type, reason, + reason_clause); } const int num_decisions = trail_.CurrentDecisionLevel(); @@ -232,7 +233,7 @@ class TrailCopy { } struct TrailInfo { - int level; + uint32_t level; int assignment_type; // For literals propagated by the BinaryImplicationGraph, the negation of // the original reason. For literals propagated by the ClauseManager, *all* From e0f74bd698c32b2463513edf0b7c084477f5817c Mon Sep 17 00:00:00 2001 From: Corentin Le Molgat Date: Fri, 9 Jan 2026 10:34:14 +0100 Subject: [PATCH 109/111] sat: fix probing.cc implem against stdc++17 note: Invented public ctor `A(a, b, v)` not defined in c++17 ref: https://www.open-std.org/jtc1/sc22/wg21/docs/papers/2019/p0960r2.html --- ortools/sat/probing.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ortools/sat/probing.cc b/ortools/sat/probing.cc index 073d63d5e6e..7f2473c0f6b 100644 --- a/ortools/sat/probing.cc +++ b/ortools/sat/probing.cc @@ -104,8 +104,8 @@ class TrailCopy { } trail_index_[var] = i; trail_literals_.push_back(literal); - trail_info_.emplace_back(info.level, assignment_type, reason, - reason_clause); + trail_info_.push_back( + {info.level, assignment_type, reason, reason_clause}); } const int num_decisions = trail_.CurrentDecisionLevel(); From 551ad10d94835c99e5e1e684500d3db398c0e345 Mon Sep 17 00:00:00 2001 From: Guillaume Chatelet Date: Fri, 9 Jan 2026 12:16:04 +0100 Subject: [PATCH 110/111] Update Python dependencies and remove black. (#4979) - Updated jupyter-server to 2.17.0 in notebook_requirements.in. - Updated virtualenv to 20.35.4 in ortools_requirements.in. - Removed black from both ortools_requirements.in and notebook_requirements.in. - Updated filelock>=3.20.1 to mitigate CVE-2025-68146. - Regenerated notebook_requirements.txt and ortools_requirements.txt with updated hashes and versions for various packages including debugpy, jupyter-client, platformdirs, tzdata, and urllib3. - Added comments in bazel/BUILD regarding the compile_pip_requirements rules and instructions for updating the requirements files. --- bazel/BUILD.bazel | 10 ++- bazel/notebook_requirements.in | 2 +- bazel/notebook_requirements.txt | 146 ++++++++++++-------------------- bazel/ortools_requirements.in | 3 +- bazel/ortools_requirements.txt | 76 ++++------------- 5 files changed, 81 insertions(+), 156 deletions(-) diff --git a/bazel/BUILD.bazel b/bazel/BUILD.bazel index 7f30b27ccdf..962f791284e 100644 --- a/bazel/BUILD.bazel +++ b/bazel/BUILD.bazel @@ -14,8 +14,8 @@ load("@rules_python//python:pip.bzl", "compile_pip_requirements") extra_args = [ - "--no-emit-index-url", - "--reuse-hashes", + "--no-emit-index-url", # Prevent emitting url in .txt file when using a proxy. + "--reuse-hashes", # Improve the speed of --generate-hashes by reusing the hashes from an existing output file. "--verbose", ] @@ -36,4 +36,10 @@ compile_pip_requirements( requirements_txt = "notebook_requirements.txt", ) +# To fully update `requirements.txt` files run the following commands: +# cat /dev/null > bazel/ortools_requirements.txt +# cat /dev/null > bazel/notebook_requirements.txt +# bazelisk-linux-amd64 run --config=ci //bazel:ortools_requirements.update +# bazelisk-linux-amd64 run --config=ci //bazel:notebook_requirements.update + package(default_visibility = ["//visibility:public"]) diff --git a/bazel/notebook_requirements.in b/bazel/notebook_requirements.in index 0dee0c339c5..178a1cee282 100644 --- a/bazel/notebook_requirements.in +++ b/bazel/notebook_requirements.in @@ -6,7 +6,7 @@ plotly==5.15.0 # Notebook jupyterlab==4.4.8 notebook==7.4.2 -jupyter-server==2.16.0 +jupyter-server==2.17.0 tornado==6.5.0 Pygments==2.19.1 jsonschema==4.23.0 diff --git a/bazel/notebook_requirements.txt b/bazel/notebook_requirements.txt index b5004f7658c..8cb2c17c7b8 100644 --- a/bazel/notebook_requirements.txt +++ b/bazel/notebook_requirements.txt @@ -72,30 +72,6 @@ beautifulsoup4==4.14.3 \ --hash=sha256:0918bfe44902e6ad8d57732ba310582e98da931428d231a5ecb9e7c703a735bb \ --hash=sha256:6292b1c5186d356bba669ef9f7f051757099565ad9ada5dd630bd9de5fa7fb86 # via nbconvert -black==24.8.0 \ - --hash=sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6 \ - --hash=sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e \ - --hash=sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f \ - --hash=sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018 \ - --hash=sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e \ - --hash=sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd \ - --hash=sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4 \ - --hash=sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed \ - --hash=sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2 \ - --hash=sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42 \ - --hash=sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af \ - --hash=sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb \ - --hash=sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368 \ - --hash=sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb \ - --hash=sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af \ - --hash=sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed \ - --hash=sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47 \ - --hash=sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2 \ - --hash=sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a \ - --hash=sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c \ - --hash=sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920 \ - --hash=sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1 - # via -r bazel/ortools_requirements.in bleach[css]==6.3.0 \ --hash=sha256:6f3b91b1c0a02bb9a78b5a454c92506aa0fdf197e1d5e114d2e00c6f64306d22 \ --hash=sha256:fe10ec77c93ddf3d13a73b035abaac7a9f5e436513864ccdad516693213c65d6 @@ -308,45 +284,41 @@ charset-normalizer==3.4.4 \ --hash=sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e \ --hash=sha256:fd44c878ea55ba351104cb93cc85e74916eb8fa440ca7903e57575e97394f608 # via requests -click==8.3.1 \ - --hash=sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a \ - --hash=sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6 - # via black comm==0.2.3 \ --hash=sha256:2dc8048c10962d55d7ad693be1e7045d891b7ce8d999c97963a5e3e99c055971 \ --hash=sha256:c615d91d75f7f04f095b30d1c1711babd43bdc6419c1be9886a85f2f4e489417 # via ipykernel -debugpy==1.8.17 \ - --hash=sha256:045290c010bcd2d82bc97aa2daf6837443cd52f6328592698809b4549babcee1 \ - --hash=sha256:1440fd514e1b815edd5861ca394786f90eb24960eb26d6f7200994333b1d79e3 \ - --hash=sha256:17e456da14848d618662354e1dccfd5e5fb75deec3d1d48dc0aa0baacda55860 \ - --hash=sha256:24693179ef9dfa20dca8605905a42b392be56d410c333af82f1c5dff807a64cc \ - --hash=sha256:3a32c0af575749083d7492dc79f6ab69f21b2d2ad4cd977a958a07d5865316e4 \ - --hash=sha256:3bea3b0b12f3946e098cce9b43c3c46e317b567f79570c3f43f0b96d00788088 \ - --hash=sha256:5c59b74aa5630f3a5194467100c3b3d1c77898f9ab27e3f7dc5d40fc2f122670 \ - --hash=sha256:60c7dca6571efe660ccb7a9508d73ca14b8796c4ed484c2002abba714226cfef \ - --hash=sha256:6a4e9dacf2cbb60d2514ff7b04b4534b0139facbf2abdffe0639ddb6088e59cf \ - --hash=sha256:6c5cd6f009ad4fca8e33e5238210dc1e5f42db07d4b6ab21ac7ffa904a196420 \ - --hash=sha256:857c1dd5d70042502aef1c6d1c2801211f3ea7e56f75e9c335f434afb403e464 \ - --hash=sha256:893cba7bb0f55161de4365584b025f7064e1f88913551bcd23be3260b231429c \ - --hash=sha256:8deb4e31cd575c9f9370042876e078ca118117c1b5e1f22c32befcfbb6955f0c \ - --hash=sha256:a3aad0537cf4d9c1996434be68c6c9a6d233ac6f76c2a482c7803295b4e4f99a \ - --hash=sha256:b13eea5587e44f27f6c48588b5ad56dcb74a4f3a5f89250443c94587f3eb2ea1 \ - --hash=sha256:b532282ad4eca958b1b2d7dbcb2b7218e02cb934165859b918e3b6ba7772d3f4 \ - --hash=sha256:b69b6bd9dba6a03632534cdf67c760625760a215ae289f7489a452af1031fe1f \ - --hash=sha256:b75868b675949a96ab51abc114c7163f40ff0d8f7d6d5fd63f8932fd38e9c6d7 \ - --hash=sha256:bb1bbf92317e1f35afcf3ef0450219efb3afe00be79d8664b250ac0933b9015f \ - --hash=sha256:c41d2ce8bbaddcc0009cc73f65318eedfa3dbc88a8298081deb05389f1ab5542 \ - --hash=sha256:c6bdf134457ae0cac6fb68205776be635d31174eeac9541e1d0c062165c6461f \ - --hash=sha256:d3fce3f0e3de262a3b67e69916d001f3e767661c6e1ee42553009d445d1cd840 \ - --hash=sha256:e34ee844c2f17b18556b5bbe59e1e2ff4e86a00282d2a46edab73fd7f18f4a83 \ - --hash=sha256:e79a195f9e059edfe5d8bf6f3749b2599452d3e9380484cd261f6b7cd2c7c4da \ - --hash=sha256:e851beb536a427b5df8aa7d0c7835b29a13812f41e46292ff80b2ef77327355a \ - --hash=sha256:e8f8f61c518952fb15f74a302e068b48d9c4691768ade433e4adeea961993464 \ - --hash=sha256:eaa85bce251feca8e4c87ce3b954aba84b8c645b90f0e6a515c00394a9f5c0e7 \ - --hash=sha256:f14467edef672195c6f6b8e27ce5005313cb5d03c9239059bc7182b60c176e2d \ - --hash=sha256:f2ac8055a0c4a09b30b931100996ba49ef334c6947e7ae365cdd870416d7513e \ - --hash=sha256:fd723b47a8c08892b1a16b2c6239a8b96637c62a59b94bb5dab4bac592a58a8e +debugpy==1.8.18 \ + --hash=sha256:02551b1b84a91faadd2db9bc4948873f2398190c95b3cc6f97dc706f43e8c433 \ + --hash=sha256:0701d83c4c1a74ed2c9abdabce102b1daf24cf81e1802421980871c9ee41f371 \ + --hash=sha256:1b224887af5121fa702f9f542968170d104e3f9cac827d85fdefe89702dc235c \ + --hash=sha256:2721237f9456394943f75c4b6f7cf2aed6ab9c59b7beca4bf553621d37000115 \ + --hash=sha256:32dd56d50fe15c47d0f930a7f0b9d3e5eb8ed04770bc6c313fba6d226f87e1e8 \ + --hash=sha256:3dae1d65e581406a4d7c1bb44391f47e621b8c87c5639b6607e6007a5d823205 \ + --hash=sha256:46e4aa316f9c16fa7145f192bf0fd1c5c43effca13b8767270a99e7e7ac464f5 \ + --hash=sha256:4d26736dfabf404e9f3032015ec7b0189e7396d0664e29e5bdbe7ac453043c95 \ + --hash=sha256:530c38114725505a7e4ea95328dbc24aabb9be708c6570623c8163412e6d1d6b \ + --hash=sha256:63424eb602ccb2c158fbd40437404d29ce0da5f9552e8bab53fb265e19e686ee \ + --hash=sha256:636a5445a3336e4aba323a3545ca2bb373b04b0bc14084a4eb20c989db44429f \ + --hash=sha256:6da217ac8c1152d698b9809484d50c75bef9cc02fd6886a893a6df81ec952ff8 \ + --hash=sha256:6f97083b68f680b244a96c5923862a84aab32b486393c71deac152b1c001429b \ + --hash=sha256:714b61d753cfe3ed5e7bf0aad131506d750e271726ac86e3e265fd7eeebbe765 \ + --hash=sha256:75d14dd04b617ee38e46786394ec0dd5e1ac5e3d10ffb034fd6c7b72111174c2 \ + --hash=sha256:7e68ba950acbcf95ee862210133681f408cbb78d1c9badbb515230ec55ed6487 \ + --hash=sha256:8804d1288e6006629a87d53eb44b7b66e695d428ac529ffd75bfc7d730a9c821 \ + --hash=sha256:971965e264faed48ae961ff1e1ad2ce32d8e0cc550a4baa7643a25f1782b7125 \ + --hash=sha256:a114865099283cbed4c9330cb0c9cb7a04cfa92e803577843657302d526141ec \ + --hash=sha256:a69ef7d6050e5d26cf8e0081c6b591a41383dc18db734c4acafdd49568bb7a6f \ + --hash=sha256:ab8cf0abe0fe2dfe1f7e65abc04b1db8740f9be80c1274acb625855c5c3ece6e \ + --hash=sha256:be7f622d250fe3429571e84572eb771023f1da22c754f28d2c60a10d74a4cc1b \ + --hash=sha256:cab3abf0ee2328269c380f7a8a1c41ea1d80d6507404db9b005c8432bc6224a1 \ + --hash=sha256:d44e9c531f2519ec4b856ddde8f536615918f5b7886c658a81bf200c90315f77 \ + --hash=sha256:ded8a5a413bd0a249b3c0be9f43128f437755180ac431222a6354c7d76a76a54 \ + --hash=sha256:df6c1243dedcb6bf9a5dc1c5668009e2b5508b8525f27d9821be91da57827743 \ + --hash=sha256:df8bf7cd78019d5d155213bf5a1818b36403d0c3758d669e76827d4db026b840 \ + --hash=sha256:df93f78d6d031b6d2aae72fee7b000985bc88f6496d8eec2bd1bbfe7b61aa20a \ + --hash=sha256:e8431bc71a3903c6d7f39c91b550aed73f98f0e179967380f04f6f779b8171ee \ + --hash=sha256:f312871f85a30522bc31be6f52343de0420474fe467e2bfe38d6d4a4029db194 # via ipykernel decorator==5.2.1 \ --hash=sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360 \ @@ -368,9 +340,9 @@ fastjsonschema==2.21.2 \ --hash=sha256:1c797122d0a86c5cace2e54bf4e819c36223b552017172f32c5c024a6b77e463 \ --hash=sha256:b1eb43748041c880796cd077f1a07c3d94e93ae84bba5ed36800a33554ae05de # via nbformat -filelock==3.20.0 \ - --hash=sha256:339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2 \ - --hash=sha256:711e943b4ec6be42e1d4e6690b48dc175c822967466bb31c0c293f34334c13f4 +filelock==3.20.1 \ + --hash=sha256:15d9e9a67306188a44baa72f569d2bfd803076269365fdea0934385da4dc361a \ + --hash=sha256:b8360948b351b80f420878d8516519a2204b07aefcdcfd24912a5d33127f188c # via virtualenv fqdn==1.5.1 \ --hash=sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f \ @@ -449,9 +421,9 @@ jsonschema-specifications==2025.9.1 \ --hash=sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe \ --hash=sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d # via jsonschema -jupyter-client==8.6.3 \ - --hash=sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419 \ - --hash=sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f +jupyter-client==8.7.0 \ + --hash=sha256:3357212d9cbe01209e59190f67a3a7e1f387a4f4e88d1e0433ad84d7b262531d \ + --hash=sha256:3671a94fd25e62f5f2f554f5e95389c2294d89822378a5f2dd24353e1494a9e0 # via # ipykernel # jupyter-server @@ -475,9 +447,9 @@ jupyter-lsp==2.3.0 \ --hash=sha256:458aa59339dc868fb784d73364f17dbce8836e906cd75fd471a325cba02e0245 \ --hash=sha256:e914a3cb2addf48b1c7710914771aaf1819d46b2e5a79b0f917b5478ec93f34f # via jupyterlab -jupyter-server==2.16.0 \ - --hash=sha256:3d8db5be3bc64403b1c65b400a1d7f4647a5ce743f3b20dbdefe8ddb7b55af9e \ - --hash=sha256:65d4b44fdf2dcbbdfe0aa1ace4a842d4aaf746a2b7b168134d5aaed35621b7f6 +jupyter-server==2.17.0 \ + --hash=sha256:c38ea898566964c888b4772ae1ed58eca84592e88251d2cfc4d171f81f7e99d5 \ + --hash=sha256:e8cb9c7db4251f51ed307e329b81b72ccf2056ff82d50524debde1ee1870e13f # via # -r bazel/notebook_requirements.in # jupyter-lsp @@ -640,9 +612,7 @@ mypy==1.6.1 \ mypy-extensions==1.1.0 \ --hash=sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505 \ --hash=sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558 - # via - # black - # mypy + # via mypy mypy-protobuf==3.5.0 \ --hash=sha256:0d0548c6b9a6faf14ce1a9ce2831c403a5c1f2a9363e85b1e2c51d5d57aa8393 \ --hash=sha256:21f270da0a9792a9dac76b0df463c027e561664ab6973c59be4e4d064dfe67dc @@ -755,15 +725,10 @@ numpy==2.3.5 \ # -r bazel/ortools_requirements.in # pandas # scipy -overrides==7.7.0 \ - --hash=sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a \ - --hash=sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49 - # via jupyter-server packaging==25.0 \ --hash=sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484 \ --hash=sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f # via - # black # ipykernel # jupyter-events # jupyter-server @@ -836,19 +801,14 @@ parso==0.8.5 \ --hash=sha256:034d7354a9a018bdce352f48b2a8a450f05e9d6ee85db84764e9b6bd96dafe5a \ --hash=sha256:646204b5ee239c396d040b90f9e272e9a8017c630092bf59980beb62fd033887 # via jedi -pathspec==0.12.1 \ - --hash=sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08 \ - --hash=sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712 - # via black pexpect==4.9.0 \ --hash=sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523 \ --hash=sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f # via ipython -platformdirs==4.5.0 \ - --hash=sha256:70ddccdd7c99fc5942e9fc25636a8b34d04c24b335100223152c2803e4063312 \ - --hash=sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3 +platformdirs==4.5.1 \ + --hash=sha256:61d5cdcc6065745cdd94f0f878977f8de9437be93de97c1c12f853c9c0cdcbda \ + --hash=sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31 # via - # black # jupyter-core # virtualenv plotly==5.15.0 \ @@ -1396,9 +1356,9 @@ typing-extensions==4.15.0 \ # beautifulsoup4 # mypy # referencing -tzdata==2025.2 \ - --hash=sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8 \ - --hash=sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9 +tzdata==2025.3 \ + --hash=sha256:06a47e5700f3081aab02b2e513160914ff0694bce9947d6b76ebd6bf57cfc5d1 \ + --hash=sha256:de39c2ca5dc7b0344f2eba86f49d614019d29f060fc4ebc8a417896a620b56a7 # via # arrow # pandas @@ -1406,13 +1366,13 @@ uri-template==1.3.0 \ --hash=sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7 \ --hash=sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363 # via jsonschema -urllib3==2.6.0 \ - --hash=sha256:c90f7a39f716c572c4e3e58509581ebd83f9b59cced005b7db7ad2d22b0db99f \ - --hash=sha256:cb9bcef5a4b345d5da5d145dc3e30834f58e8018828cbc724d30b4cb7d4d49f1 +urllib3==2.6.3 \ + --hash=sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed \ + --hash=sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4 # via requests -virtualenv==20.28.1 \ - --hash=sha256:412773c85d4dab0409b83ec36f7a6499e72eaf08c80e81e9576bca61831c71cb \ - --hash=sha256:5d34ab240fdb5d21549b76f9e8ff3af28252f5499fb6d6f031adac4e5a8c5329 +virtualenv==20.35.4 \ + --hash=sha256:643d3914d73d3eeb0c552cbb12d7e82adf0e504dbf86a3182f8771a153a1971c \ + --hash=sha256:c21c9cede36c9753eeade68ba7d523529f228a403463376cf821eaae2b650f1b # via -r bazel/ortools_requirements.in wcwidth==0.2.14 \ --hash=sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605 \ diff --git a/bazel/ortools_requirements.in b/bazel/ortools_requirements.in index 9f6f3965dc9..e15ee208da9 100644 --- a/bazel/ortools_requirements.in +++ b/bazel/ortools_requirements.in @@ -10,8 +10,7 @@ typing-extensions==4.15.0 # OR-Tools build dependencies mypy==1.6.1 mypy-protobuf==3.5.0 -virtualenv==20.28.1 -black==24.8.0 +virtualenv==20.35.4 # Example dependencies pandas==2.3.3 diff --git a/bazel/ortools_requirements.txt b/bazel/ortools_requirements.txt index e52667ee381..2902cad2c12 100644 --- a/bazel/ortools_requirements.txt +++ b/bazel/ortools_requirements.txt @@ -8,30 +8,6 @@ absl-py==2.3.1 \ --hash=sha256:a97820526f7fbfd2ec1bce83f3f25e3a14840dac0d8e02a0b71cd75db3f77fc9 \ --hash=sha256:eeecf07f0c2a93ace0772c92e596ace6d3d3996c042b2128459aaae2a76de11d # via -r bazel/ortools_requirements.in -black==24.8.0 \ - --hash=sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6 \ - --hash=sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e \ - --hash=sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f \ - --hash=sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018 \ - --hash=sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e \ - --hash=sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd \ - --hash=sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4 \ - --hash=sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed \ - --hash=sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2 \ - --hash=sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42 \ - --hash=sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af \ - --hash=sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb \ - --hash=sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368 \ - --hash=sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb \ - --hash=sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af \ - --hash=sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed \ - --hash=sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47 \ - --hash=sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2 \ - --hash=sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a \ - --hash=sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c \ - --hash=sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920 \ - --hash=sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1 - # via -r bazel/ortools_requirements.in certifi==2025.11.12 \ --hash=sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b \ --hash=sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316 @@ -151,17 +127,13 @@ charset-normalizer==3.4.4 \ --hash=sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e \ --hash=sha256:fd44c878ea55ba351104cb93cc85e74916eb8fa440ca7903e57575e97394f608 # via requests -click==8.3.1 \ - --hash=sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a \ - --hash=sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6 - # via black distlib==0.4.0 \ --hash=sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16 \ --hash=sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d # via virtualenv -filelock==3.20.0 \ - --hash=sha256:339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2 \ - --hash=sha256:711e943b4ec6be42e1d4e6690b48dc175c822967466bb31c0c293f34334c13f4 +filelock==3.20.1 \ + --hash=sha256:15d9e9a67306188a44baa72f569d2bfd803076269365fdea0934385da4dc361a \ + --hash=sha256:b8360948b351b80f420878d8516519a2204b07aefcdcfd24912a5d33127f188c # via virtualenv idna==3.11 \ --hash=sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea \ @@ -203,9 +175,7 @@ mypy==1.6.1 \ mypy-extensions==1.1.0 \ --hash=sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505 \ --hash=sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558 - # via - # black - # mypy + # via mypy mypy-protobuf==3.5.0 \ --hash=sha256:0d0548c6b9a6faf14ce1a9ce2831c403a5c1f2a9363e85b1e2c51d5d57aa8393 \ --hash=sha256:21f270da0a9792a9dac76b0df463c027e561664ab6973c59be4e4d064dfe67dc @@ -289,10 +259,6 @@ numpy==2.3.5 \ # -r bazel/ortools_requirements.in # pandas # scipy -packaging==25.0 \ - --hash=sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484 \ - --hash=sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f - # via black pandas==2.3.3 \ --hash=sha256:0242fe9a49aa8b4d78a4fa03acb397a58833ef6199e9aa40a95f027bb3a1b6e7 \ --hash=sha256:1611aedd912e1ff81ff41c745822980c49ce4a7907537be8692c8dbc31924593 \ @@ -350,16 +316,10 @@ pandas==2.3.3 \ --hash=sha256:f086f6fe114e19d92014a1966f43a3e62285109afe874f067f5abbdcbb10e59c \ --hash=sha256:f8bfc0e12dc78f777f323f55c58649591b2cd0c43534e8355c51d3fede5f4dee # via -r bazel/ortools_requirements.in -pathspec==0.12.1 \ - --hash=sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08 \ - --hash=sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712 - # via black -platformdirs==4.5.0 \ - --hash=sha256:70ddccdd7c99fc5942e9fc25636a8b34d04c24b335100223152c2803e4063312 \ - --hash=sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3 - # via - # black - # virtualenv +platformdirs==4.5.1 \ + --hash=sha256:61d5cdcc6065745cdd94f0f878977f8de9437be93de97c1c12f853c9c0cdcbda \ + --hash=sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31 + # via virtualenv protobuf==6.32.0 \ --hash=sha256:15eba1b86f193a407607112ceb9ea0ba9569aed24f93333fe9a497cf2fda37d3 \ --hash=sha256:501fe6372fd1c8ea2a30b4d9be8f87955a64d6be9c88a973996cef5ef6f0abf1 \ @@ -464,15 +424,15 @@ typing-extensions==4.15.0 \ # via # -r bazel/ortools_requirements.in # mypy -tzdata==2025.2 \ - --hash=sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8 \ - --hash=sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9 +tzdata==2025.3 \ + --hash=sha256:06a47e5700f3081aab02b2e513160914ff0694bce9947d6b76ebd6bf57cfc5d1 \ + --hash=sha256:de39c2ca5dc7b0344f2eba86f49d614019d29f060fc4ebc8a417896a620b56a7 # via pandas -urllib3==2.6.0 \ - --hash=sha256:c90f7a39f716c572c4e3e58509581ebd83f9b59cced005b7db7ad2d22b0db99f \ - --hash=sha256:cb9bcef5a4b345d5da5d145dc3e30834f58e8018828cbc724d30b4cb7d4d49f1 +urllib3==2.6.3 \ + --hash=sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed \ + --hash=sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4 # via requests -virtualenv==20.28.1 \ - --hash=sha256:412773c85d4dab0409b83ec36f7a6499e72eaf08c80e81e9576bca61831c71cb \ - --hash=sha256:5d34ab240fdb5d21549b76f9e8ff3af28252f5499fb6d6f031adac4e5a8c5329 - # via -r bazel/ortools_requirements.in \ No newline at end of file +virtualenv==20.35.4 \ + --hash=sha256:643d3914d73d3eeb0c552cbb12d7e82adf0e504dbf86a3182f8771a153a1971c \ + --hash=sha256:c21c9cede36c9753eeade68ba7d523529f228a403463376cf821eaae2b650f1b + # via -r bazel/ortools_requirements.in From 100c6671b6c3b7a81f4be17438424c9a1df77287 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 12 Jan 2026 22:30:18 +0000 Subject: [PATCH 111/111] build(deps): bump the github-actions group with 2 updates Bumps the github-actions group with 2 updates: [actions/cache](https://github.com/actions/cache) and [bazel-contrib/setup-bazel](https://github.com/bazel-contrib/setup-bazel). Updates `actions/cache` from 4 to 5 - [Release notes](https://github.com/actions/cache/releases) - [Changelog](https://github.com/actions/cache/blob/main/RELEASES.md) - [Commits](https://github.com/actions/cache/compare/v4...v5) Updates `bazel-contrib/setup-bazel` from 0.15.0 to 0.18.0 - [Release notes](https://github.com/bazel-contrib/setup-bazel/releases) - [Commits](https://github.com/bazel-contrib/setup-bazel/compare/0.15.0...0.18.0) --- updated-dependencies: - dependency-name: actions/cache dependency-version: '5' dependency-type: direct:production update-type: version-update:semver-major dependency-group: github-actions - dependency-name: bazel-contrib/setup-bazel dependency-version: 0.18.0 dependency-type: direct:production update-type: version-update:semver-minor dependency-group: github-actions ... Signed-off-by: dependabot[bot] --- .github/workflows/amd64_linux_bazel.yml | 4 ++-- .github/workflows/amd64_linux_cmake_coinor_off.yml | 12 ++++++------ .github/workflows/amd64_linux_cmake_cpp.yml | 12 ++++++------ .github/workflows/amd64_linux_cmake_dotnet.yml | 12 ++++++------ .github/workflows/amd64_linux_cmake_glpk_on.yml | 12 ++++++------ .github/workflows/amd64_linux_cmake_java.yml | 12 ++++++------ .github/workflows/amd64_linux_cmake_python.yml | 12 ++++++------ .github/workflows/amd64_linux_cmake_scip_off.yml | 12 ++++++------ .github/workflows/amd64_macos_bazel.yml | 4 ++-- .github/workflows/amd64_macos_cmake_cpp.yml | 12 ++++++------ .github/workflows/amd64_macos_cmake_dotnet.yml | 12 ++++++------ .github/workflows/amd64_macos_cmake_java.yml | 12 ++++++------ .github/workflows/amd64_macos_cmake_python.yml | 12 ++++++------ .github/workflows/amd64_windows_bazel.yml | 4 ++-- .github/workflows/amd64_windows_cmake_cpp.yml | 4 ++-- .github/workflows/amd64_windows_cmake_dotnet.yml | 4 ++-- .github/workflows/amd64_windows_cmake_java.yml | 4 ++-- .github/workflows/amd64_windows_cmake_python.yml | 4 ++-- .github/workflows/arm64_macos_bazel.yml | 4 ++-- .github/workflows/arm64_macos_cmake_cpp.yml | 12 ++++++------ .github/workflows/arm64_macos_cmake_dotnet.yml | 12 ++++++------ .github/workflows/arm64_macos_cmake_java.yml | 12 ++++++------ .github/workflows/arm64_macos_cmake_python.yml | 12 ++++++------ .github/workflows/presubmit.yml | 2 +- 24 files changed, 107 insertions(+), 107 deletions(-) diff --git a/.github/workflows/amd64_linux_bazel.yml b/.github/workflows/amd64_linux_bazel.yml index f53347522af..702e4fef85e 100644 --- a/.github/workflows/amd64_linux_bazel.yml +++ b/.github/workflows/amd64_linux_bazel.yml @@ -63,7 +63,7 @@ jobs: sed -i -e 's/\(DEFAULT_PYTHON =\) "3.[0-9]\+"/\1 "${{matrix.python.version}}"/g' MODULE.bazel cat MODULE.bazel - name: Restore bazel cache - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: bazel-cache-restore with: key: ${{env.cache-name}} @@ -73,7 +73,7 @@ jobs: - name: Test run: bazel --output_user_root=${{env.cache-root}} test --config=ci //ortools/... //examples/... - name: Save Bazel cache for branch main even when Build and Test fail. - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 # https://docs.github.com/en/actions/reference/workflows-and-actions/dependency-caching#restrictions-for-accessing-a-cache if: always() && github.ref == 'refs/heads/main' with: diff --git a/.github/workflows/amd64_linux_cmake_coinor_off.yml b/.github/workflows/amd64_linux_cmake_coinor_off.yml index c6812d5cc1f..311a2cadc4e 100644 --- a/.github/workflows/amd64_linux_cmake_coinor_off.yml +++ b/.github/workflows/amd64_linux_cmake_coinor_off.yml @@ -50,13 +50,13 @@ jobs: # RESTORING CACHES - name: Restore CMake dependency source code - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: deps_src_restore with: key: ${{env.deps_src_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} path: ${{github.workspace}}/build/_deps/*-src - name: Restore CMake dependency build - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: deps_build_restore with: key: ${{env.deps_build_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} @@ -64,7 +64,7 @@ jobs: ${{github.workspace}}/build/_deps/*-build ${{github.workspace}}/build/_deps/*-subbuild - name: Restore CCache - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: ccache_restore with: key: ${{env.ccache_key}}-${{github.sha}} @@ -109,13 +109,13 @@ jobs: # SAVING CACHES - name: Save CCache if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.ccache_restore.outputs.cache-primary-key}} path: ${{env.CCACHE_DIR}} - name: Save CMake dependency build if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.deps_build_restore.outputs.cache-primary-key}} path: | @@ -123,7 +123,7 @@ jobs: ${{github.workspace}}/build/_deps/*-subbuild - name: Save CMake dependency source code if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.deps_src_restore.outputs.cache-primary-key}} path: ${{github.workspace}}/build/_deps/*-src diff --git a/.github/workflows/amd64_linux_cmake_cpp.yml b/.github/workflows/amd64_linux_cmake_cpp.yml index 497f0887d37..3a2858e61af 100644 --- a/.github/workflows/amd64_linux_cmake_cpp.yml +++ b/.github/workflows/amd64_linux_cmake_cpp.yml @@ -42,13 +42,13 @@ jobs: # RESTORING CACHES - name: Restore CMake dependency source code - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: deps_src_restore with: key: ${{env.deps_src_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} path: ${{github.workspace}}/build/_deps/*-src - name: Restore CMake dependency build - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: deps_build_restore with: key: ${{env.deps_build_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} @@ -56,7 +56,7 @@ jobs: ${{github.workspace}}/build/_deps/*-build ${{github.workspace}}/build/_deps/*-subbuild - name: Restore CCache - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: ccache_restore with: key: ${{env.ccache_key}}-${{github.sha}} @@ -99,13 +99,13 @@ jobs: # SAVING CACHES - name: Save CCache if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.ccache_restore.outputs.cache-primary-key}} path: ${{env.CCACHE_DIR}} - name: Save CMake dependency build if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.deps_build_restore.outputs.cache-primary-key}} path: | @@ -113,7 +113,7 @@ jobs: ${{github.workspace}}/build/_deps/*-subbuild - name: Save CMake dependency source code if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.deps_src_restore.outputs.cache-primary-key}} path: ${{github.workspace}}/build/_deps/*-src diff --git a/.github/workflows/amd64_linux_cmake_dotnet.yml b/.github/workflows/amd64_linux_cmake_dotnet.yml index 354cd816f21..9ac0ee8d1c5 100644 --- a/.github/workflows/amd64_linux_cmake_dotnet.yml +++ b/.github/workflows/amd64_linux_cmake_dotnet.yml @@ -48,13 +48,13 @@ jobs: # RESTORING CACHES - name: Restore CMake dependency source code - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: deps_src_restore with: key: ${{env.deps_src_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} path: ${{github.workspace}}/build/_deps/*-src - name: Restore CMake dependency build - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: deps_build_restore with: key: ${{env.deps_build_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} @@ -62,7 +62,7 @@ jobs: ${{github.workspace}}/build/_deps/*-build ${{github.workspace}}/build/_deps/*-subbuild - name: Restore CCache - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: ccache_restore with: key: ${{env.ccache_key}}-${{github.sha}} @@ -107,13 +107,13 @@ jobs: # SAVING CACHES - name: Save CCache if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.ccache_restore.outputs.cache-primary-key}} path: ${{env.CCACHE_DIR}} - name: Save CMake dependency build if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.deps_build_restore.outputs.cache-primary-key}} path: | @@ -121,7 +121,7 @@ jobs: ${{github.workspace}}/build/_deps/*-subbuild - name: Save CMake dependency source code if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.deps_src_restore.outputs.cache-primary-key}} path: ${{github.workspace}}/build/_deps/*-src diff --git a/.github/workflows/amd64_linux_cmake_glpk_on.yml b/.github/workflows/amd64_linux_cmake_glpk_on.yml index 1844adf6e1f..641dcadef15 100644 --- a/.github/workflows/amd64_linux_cmake_glpk_on.yml +++ b/.github/workflows/amd64_linux_cmake_glpk_on.yml @@ -50,13 +50,13 @@ jobs: # RESTORING CACHES - name: Restore CMake dependency source code - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: deps_src_restore with: key: ${{env.deps_src_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} path: ${{github.workspace}}/build/_deps/*-src - name: Restore CMake dependency build - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: deps_build_restore with: key: ${{env.deps_build_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} @@ -64,7 +64,7 @@ jobs: ${{github.workspace}}/build/_deps/*-build ${{github.workspace}}/build/_deps/*-subbuild - name: Restore CCache - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: ccache_restore with: key: ${{env.ccache_key}}-${{github.sha}} @@ -109,13 +109,13 @@ jobs: # SAVING CACHES - name: Save CCache if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.ccache_restore.outputs.cache-primary-key}} path: ${{env.CCACHE_DIR}} - name: Save CMake dependency build if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.deps_build_restore.outputs.cache-primary-key}} path: | @@ -123,7 +123,7 @@ jobs: ${{github.workspace}}/build/_deps/*-subbuild - name: Save CMake dependency source code if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.deps_src_restore.outputs.cache-primary-key}} path: ${{github.workspace}}/build/_deps/*-src diff --git a/.github/workflows/amd64_linux_cmake_java.yml b/.github/workflows/amd64_linux_cmake_java.yml index 796c3767157..82582a5ce2a 100644 --- a/.github/workflows/amd64_linux_cmake_java.yml +++ b/.github/workflows/amd64_linux_cmake_java.yml @@ -37,13 +37,13 @@ jobs: # RESTORING CACHES - name: Restore CMake dependency source code - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: deps_src_restore with: key: ${{env.deps_src_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} path: ${{github.workspace}}/build/_deps/*-src - name: Restore CMake dependency build - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: deps_build_restore with: key: ${{env.deps_build_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} @@ -51,7 +51,7 @@ jobs: ${{github.workspace}}/build/_deps/*-build ${{github.workspace}}/build/_deps/*-subbuild - name: Restore CCache - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: ccache_restore with: key: ${{env.ccache_key}}-${{github.sha}} @@ -95,13 +95,13 @@ jobs: # SAVING CACHES - name: Save CCache if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.ccache_restore.outputs.cache-primary-key}} path: ${{env.CCACHE_DIR}} - name: Save CMake dependency build if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.deps_build_restore.outputs.cache-primary-key}} path: | @@ -109,7 +109,7 @@ jobs: ${{github.workspace}}/build/_deps/*-subbuild - name: Save CMake dependency source code if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.deps_src_restore.outputs.cache-primary-key}} path: ${{github.workspace}}/build/_deps/*-src diff --git a/.github/workflows/amd64_linux_cmake_python.yml b/.github/workflows/amd64_linux_cmake_python.yml index 9b0c082d18a..92045b37200 100644 --- a/.github/workflows/amd64_linux_cmake_python.yml +++ b/.github/workflows/amd64_linux_cmake_python.yml @@ -52,13 +52,13 @@ jobs: # RESTORING CACHES - name: Restore CMake dependency source code - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: deps_src_restore with: key: ${{env.deps_src_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} path: ${{github.workspace}}/build/_deps/*-src - name: Restore CMake dependency build - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: deps_build_restore with: key: ${{env.deps_build_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} @@ -66,7 +66,7 @@ jobs: ${{github.workspace}}/build/_deps/*-build ${{github.workspace}}/build/_deps/*-subbuild - name: Restore CCache - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: ccache_restore with: key: ${{env.ccache_key}}-${{github.sha}} @@ -110,13 +110,13 @@ jobs: # SAVING CACHES - name: Save CCache if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.ccache_restore.outputs.cache-primary-key}} path: ${{env.CCACHE_DIR}} - name: Save CMake dependency build if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.deps_build_restore.outputs.cache-primary-key}} path: | @@ -124,7 +124,7 @@ jobs: ${{github.workspace}}/build/_deps/*-subbuild - name: Save CMake dependency source code if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.deps_src_restore.outputs.cache-primary-key}} path: ${{github.workspace}}/build/_deps/*-src diff --git a/.github/workflows/amd64_linux_cmake_scip_off.yml b/.github/workflows/amd64_linux_cmake_scip_off.yml index 6689b1bd929..31fe0eecf6a 100644 --- a/.github/workflows/amd64_linux_cmake_scip_off.yml +++ b/.github/workflows/amd64_linux_cmake_scip_off.yml @@ -50,13 +50,13 @@ jobs: # RESTORING CACHES - name: Restore CMake dependency source code - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: deps_src_restore with: key: ${{env.deps_src_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} path: ${{github.workspace}}/build/_deps/*-src - name: Restore CMake dependency build - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: deps_build_restore with: key: ${{env.deps_build_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} @@ -64,7 +64,7 @@ jobs: ${{github.workspace}}/build/_deps/*-build ${{github.workspace}}/build/_deps/*-subbuild - name: Restore CCache - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: ccache_restore with: key: ${{env.ccache_key}}-${{github.sha}} @@ -109,13 +109,13 @@ jobs: # SAVING CACHES - name: Save CCache if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.ccache_restore.outputs.cache-primary-key}} path: ${{env.CCACHE_DIR}} - name: Save CMake dependency build if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.deps_build_restore.outputs.cache-primary-key}} path: | @@ -123,7 +123,7 @@ jobs: ${{github.workspace}}/build/_deps/*-subbuild - name: Save CMake dependency source code if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.deps_src_restore.outputs.cache-primary-key}} path: ${{github.workspace}}/build/_deps/*-src diff --git a/.github/workflows/amd64_macos_bazel.yml b/.github/workflows/amd64_macos_bazel.yml index 6a431f70cd8..5d2cb0fd291 100644 --- a/.github/workflows/amd64_macos_bazel.yml +++ b/.github/workflows/amd64_macos_bazel.yml @@ -57,7 +57,7 @@ jobs: sed -i '' -e 's/\(DEFAULT_PYTHON =\) "3.[0-9]*"/\1 "${{matrix.python.version}}"/g' MODULE.bazel cat MODULE.bazel - name: Restore bazel cache - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: bazel-cache-restore with: key: ${{env.cache-name}} @@ -67,7 +67,7 @@ jobs: - name: Test run: bazel --output_user_root=${{env.cache-root}} test --config=ci //ortools/... //examples/... - name: Save Bazel cache for branch main even when Build and Test fail. - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 # https://docs.github.com/en/actions/reference/workflows-and-actions/dependency-caching#restrictions-for-accessing-a-cache if: always() && github.ref == 'refs/heads/main' with: diff --git a/.github/workflows/amd64_macos_cmake_cpp.yml b/.github/workflows/amd64_macos_cmake_cpp.yml index 28964ae5a2f..4743a068d8e 100644 --- a/.github/workflows/amd64_macos_cmake_cpp.yml +++ b/.github/workflows/amd64_macos_cmake_cpp.yml @@ -39,13 +39,13 @@ jobs: # RESTORING CACHES - name: Restore CMake dependency source code - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: deps_src_restore with: key: ${{env.deps_src_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} path: ${{github.workspace}}/build/_deps/*-src - name: Restore CMake dependency build - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: deps_build_restore with: key: ${{env.deps_build_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} @@ -53,7 +53,7 @@ jobs: ${{github.workspace}}/build/_deps/*-build ${{github.workspace}}/build/_deps/*-subbuild - name: Restore CCache - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: ccache_restore with: key: ${{env.ccache_key}}-${{github.sha}} @@ -96,13 +96,13 @@ jobs: # SAVING CACHES - name: Save CCache if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.ccache_restore.outputs.cache-primary-key}} path: ${{env.CCACHE_DIR}} - name: Save CMake dependency build if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.deps_build_restore.outputs.cache-primary-key}} path: | @@ -110,7 +110,7 @@ jobs: ${{github.workspace}}/build/_deps/*-subbuild - name: Save CMake dependency source code if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.deps_src_restore.outputs.cache-primary-key}} path: ${{github.workspace}}/build/_deps/*-src diff --git a/.github/workflows/amd64_macos_cmake_dotnet.yml b/.github/workflows/amd64_macos_cmake_dotnet.yml index 455fc76f82a..09b20f6fe19 100644 --- a/.github/workflows/amd64_macos_cmake_dotnet.yml +++ b/.github/workflows/amd64_macos_cmake_dotnet.yml @@ -47,13 +47,13 @@ jobs: # RESTORING CACHES - name: Restore CMake dependency source code - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: deps_src_restore with: key: ${{env.deps_src_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} path: ${{github.workspace}}/build/_deps/*-src - name: Restore CMake dependency build - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: deps_build_restore with: key: ${{env.deps_build_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} @@ -61,7 +61,7 @@ jobs: ${{github.workspace}}/build/_deps/*-build ${{github.workspace}}/build/_deps/*-subbuild - name: Restore CCache - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: ccache_restore with: key: ${{env.ccache_key}}-${{github.sha}} @@ -105,13 +105,13 @@ jobs: # SAVING CACHES - name: Save CCache if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.ccache_restore.outputs.cache-primary-key}} path: ${{env.CCACHE_DIR}} - name: Save CMake dependency build if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.deps_build_restore.outputs.cache-primary-key}} path: | @@ -119,7 +119,7 @@ jobs: ${{github.workspace}}/build/_deps/*-subbuild - name: Save CMake dependency source code if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.deps_src_restore.outputs.cache-primary-key}} path: ${{github.workspace}}/build/_deps/*-src diff --git a/.github/workflows/amd64_macos_cmake_java.yml b/.github/workflows/amd64_macos_cmake_java.yml index 80e680ecb79..fdcb3fe4a82 100644 --- a/.github/workflows/amd64_macos_cmake_java.yml +++ b/.github/workflows/amd64_macos_cmake_java.yml @@ -43,13 +43,13 @@ jobs: # RESTORING CACHES - name: Restore CMake dependency source code - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: deps_src_restore with: key: ${{env.deps_src_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} path: ${{github.workspace}}/build/_deps/*-src - name: Restore CMake dependency build - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: deps_build_restore with: key: ${{env.deps_build_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} @@ -57,7 +57,7 @@ jobs: ${{github.workspace}}/build/_deps/*-build ${{github.workspace}}/build/_deps/*-subbuild - name: Restore CCache - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: ccache_restore with: key: ${{env.ccache_key}}-${{github.sha}} @@ -101,13 +101,13 @@ jobs: # SAVING CACHES - name: Save CCache if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.ccache_restore.outputs.cache-primary-key}} path: ${{env.CCACHE_DIR}} - name: Save CMake dependency build if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.deps_build_restore.outputs.cache-primary-key}} path: | @@ -115,7 +115,7 @@ jobs: ${{github.workspace}}/build/_deps/*-subbuild - name: Save CMake dependency source code if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.deps_src_restore.outputs.cache-primary-key}} path: ${{github.workspace}}/build/_deps/*-src diff --git a/.github/workflows/amd64_macos_cmake_python.yml b/.github/workflows/amd64_macos_cmake_python.yml index 72b961b4807..6d138739a2e 100644 --- a/.github/workflows/amd64_macos_cmake_python.yml +++ b/.github/workflows/amd64_macos_cmake_python.yml @@ -57,13 +57,13 @@ jobs: # RESTORING CACHES - name: Restore CMake dependency source code - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: deps_src_restore with: key: ${{env.deps_src_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} path: ${{github.workspace}}/build/_deps/*-src - name: Restore CMake dependency build - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: deps_build_restore with: key: ${{env.deps_build_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} @@ -71,7 +71,7 @@ jobs: ${{github.workspace}}/build/_deps/*-build ${{github.workspace}}/build/_deps/*-subbuild - name: Restore CCache - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: ccache_restore with: key: ${{env.ccache_key}}-${{github.sha}} @@ -115,13 +115,13 @@ jobs: # SAVING CACHES - name: Save CCache if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.ccache_restore.outputs.cache-primary-key}} path: ${{env.CCACHE_DIR}} - name: Save CMake dependency build if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.deps_build_restore.outputs.cache-primary-key}} path: | @@ -129,7 +129,7 @@ jobs: ${{github.workspace}}/build/_deps/*-subbuild - name: Save CMake dependency source code if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.deps_src_restore.outputs.cache-primary-key}} path: ${{github.workspace}}/build/_deps/*-src diff --git a/.github/workflows/amd64_windows_bazel.yml b/.github/workflows/amd64_windows_bazel.yml index 1f06be32600..0988214d3f4 100644 --- a/.github/workflows/amd64_windows_bazel.yml +++ b/.github/workflows/amd64_windows_bazel.yml @@ -52,7 +52,7 @@ jobs: - name: Check Bazel run: bazel version - name: Restore bazel cache - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: bazel-cache-restore with: key: ${{env.cache-name}} @@ -62,7 +62,7 @@ jobs: - name: Test run: bazel --output_user_root=${{env.cache-root}} test --config=ci //ortools/... //examples/... - name: Save Bazel cache for branch main even when Build and Test fail. - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 # https://docs.github.com/en/actions/reference/workflows-and-actions/dependency-caching#restrictions-for-accessing-a-cache if: always() && github.ref == 'refs/heads/main' with: diff --git a/.github/workflows/amd64_windows_cmake_cpp.yml b/.github/workflows/amd64_windows_cmake_cpp.yml index 20457d19a35..9c36a94d4d0 100644 --- a/.github/workflows/amd64_windows_cmake_cpp.yml +++ b/.github/workflows/amd64_windows_cmake_cpp.yml @@ -33,12 +33,12 @@ jobs: # CONFIGURING CACHES - name: Cache CMake dependency source code - uses: actions/cache@v4 + uses: actions/cache@v5 with: key: ${{env.deps_src_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} path: ${{github.workspace}}/build/_deps/*-src - name: Cache CMake dependency build - uses: actions/cache@v4 + uses: actions/cache@v5 with: key: ${{env.deps_build_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} path: | diff --git a/.github/workflows/amd64_windows_cmake_dotnet.yml b/.github/workflows/amd64_windows_cmake_dotnet.yml index 47269dbc654..62f5202657e 100644 --- a/.github/workflows/amd64_windows_cmake_dotnet.yml +++ b/.github/workflows/amd64_windows_cmake_dotnet.yml @@ -38,12 +38,12 @@ jobs: # CONFIGURING CACHES - name: Cache CMake dependency source code - uses: actions/cache@v4 + uses: actions/cache@v5 with: key: ${{env.deps_src_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} path: ${{github.workspace}}/build/_deps/*-src - name: Cache CMake dependency build - uses: actions/cache@v4 + uses: actions/cache@v5 with: key: ${{env.deps_build_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} path: | diff --git a/.github/workflows/amd64_windows_cmake_java.yml b/.github/workflows/amd64_windows_cmake_java.yml index c67b1a72e87..dae335fd89f 100644 --- a/.github/workflows/amd64_windows_cmake_java.yml +++ b/.github/workflows/amd64_windows_cmake_java.yml @@ -57,12 +57,12 @@ jobs: # CONFIGURING CACHES - name: Cache CMake dependency source code - uses: actions/cache@v4 + uses: actions/cache@v5 with: key: ${{env.deps_src_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} path: ${{github.workspace}}/build/_deps/*-src - name: Cache CMake dependency build - uses: actions/cache@v4 + uses: actions/cache@v5 with: key: ${{env.deps_build_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} path: | diff --git a/.github/workflows/amd64_windows_cmake_python.yml b/.github/workflows/amd64_windows_cmake_python.yml index 7a1beb4c766..a82bfdf3f3c 100644 --- a/.github/workflows/amd64_windows_cmake_python.yml +++ b/.github/workflows/amd64_windows_cmake_python.yml @@ -49,12 +49,12 @@ jobs: # CONFIGURING CACHES - name: Cache CMake dependency source code - uses: actions/cache@v4 + uses: actions/cache@v5 with: key: ${{env.deps_src_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} path: ${{github.workspace}}/build/_deps/*-src - name: Cache CMake dependency build - uses: actions/cache@v4 + uses: actions/cache@v5 with: key: ${{env.deps_build_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} path: | diff --git a/.github/workflows/arm64_macos_bazel.yml b/.github/workflows/arm64_macos_bazel.yml index 3a6ec458a82..96ebe7b5712 100644 --- a/.github/workflows/arm64_macos_bazel.yml +++ b/.github/workflows/arm64_macos_bazel.yml @@ -57,7 +57,7 @@ jobs: sed -i '' -e 's/\(DEFAULT_PYTHON =\) "3.[0-9]*"/\1 "${{matrix.python.version}}"/g' MODULE.bazel cat MODULE.bazel - name: Restore bazel cache - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: bazel-cache-restore with: key: ${{env.cache-name}} @@ -67,7 +67,7 @@ jobs: - name: Test run: bazel --output_user_root=${{env.cache-root}} test --config=ci //ortools/... //examples/... - name: Save Bazel cache for branch main even when Build and Test fail. - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 # https://docs.github.com/en/actions/reference/workflows-and-actions/dependency-caching#restrictions-for-accessing-a-cache if: always() && github.ref == 'refs/heads/main' with: diff --git a/.github/workflows/arm64_macos_cmake_cpp.yml b/.github/workflows/arm64_macos_cmake_cpp.yml index f9ee7eb27d5..1faccd2966b 100644 --- a/.github/workflows/arm64_macos_cmake_cpp.yml +++ b/.github/workflows/arm64_macos_cmake_cpp.yml @@ -39,13 +39,13 @@ jobs: # RESTORING CACHES - name: Restore CMake dependency source code - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: deps_src_restore with: key: ${{env.deps_src_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} path: ${{github.workspace}}/build/_deps/*-src - name: Restore CMake dependency build - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: deps_build_restore with: key: ${{env.deps_build_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} @@ -53,7 +53,7 @@ jobs: ${{github.workspace}}/build/_deps/*-build ${{github.workspace}}/build/_deps/*-subbuild - name: Restore CCache - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: ccache_restore with: key: ${{env.ccache_key}}-${{github.sha}} @@ -96,13 +96,13 @@ jobs: # SAVING CACHES - name: Save CCache if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.ccache_restore.outputs.cache-primary-key}} path: ${{env.CCACHE_DIR}} - name: Save CMake dependency build if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.deps_build_restore.outputs.cache-primary-key}} path: | @@ -110,7 +110,7 @@ jobs: ${{github.workspace}}/build/_deps/*-subbuild - name: Save CMake dependency source code if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.deps_src_restore.outputs.cache-primary-key}} path: ${{github.workspace}}/build/_deps/*-src diff --git a/.github/workflows/arm64_macos_cmake_dotnet.yml b/.github/workflows/arm64_macos_cmake_dotnet.yml index 8b54e39e6cc..458bd0fbfd1 100644 --- a/.github/workflows/arm64_macos_cmake_dotnet.yml +++ b/.github/workflows/arm64_macos_cmake_dotnet.yml @@ -47,13 +47,13 @@ jobs: # RESTORING CACHES - name: Restore CMake dependency source code - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: deps_src_restore with: key: ${{env.deps_src_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} path: ${{github.workspace}}/build/_deps/*-src - name: Restore CMake dependency build - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: deps_build_restore with: key: ${{env.deps_build_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} @@ -61,7 +61,7 @@ jobs: ${{github.workspace}}/build/_deps/*-build ${{github.workspace}}/build/_deps/*-subbuild - name: Restore CCache - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: ccache_restore with: key: ${{env.ccache_key}}-${{github.sha}} @@ -105,13 +105,13 @@ jobs: # SAVING CACHES - name: Save CCache if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.ccache_restore.outputs.cache-primary-key}} path: ${{env.CCACHE_DIR}} - name: Save CMake dependency build if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.deps_build_restore.outputs.cache-primary-key}} path: | @@ -119,7 +119,7 @@ jobs: ${{github.workspace}}/build/_deps/*-subbuild - name: Save CMake dependency source code if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.deps_src_restore.outputs.cache-primary-key}} path: ${{github.workspace}}/build/_deps/*-src diff --git a/.github/workflows/arm64_macos_cmake_java.yml b/.github/workflows/arm64_macos_cmake_java.yml index d859e8fa3fe..4c9509bc387 100644 --- a/.github/workflows/arm64_macos_cmake_java.yml +++ b/.github/workflows/arm64_macos_cmake_java.yml @@ -43,13 +43,13 @@ jobs: # RESTORING CACHES - name: Restore CMake dependency source code - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: deps_src_restore with: key: ${{env.deps_src_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} path: ${{github.workspace}}/build/_deps/*-src - name: Restore CMake dependency build - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: deps_build_restore with: key: ${{env.deps_build_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} @@ -57,7 +57,7 @@ jobs: ${{github.workspace}}/build/_deps/*-build ${{github.workspace}}/build/_deps/*-subbuild - name: Restore CCache - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: ccache_restore with: key: ${{env.ccache_key}}-${{github.sha}} @@ -101,13 +101,13 @@ jobs: # SAVING CACHES - name: Save CCache if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.ccache_restore.outputs.cache-primary-key}} path: ${{env.CCACHE_DIR}} - name: Save CMake dependency build if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.deps_build_restore.outputs.cache-primary-key}} path: | @@ -115,7 +115,7 @@ jobs: ${{github.workspace}}/build/_deps/*-subbuild - name: Save CMake dependency source code if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.deps_src_restore.outputs.cache-primary-key}} path: ${{github.workspace}}/build/_deps/*-src diff --git a/.github/workflows/arm64_macos_cmake_python.yml b/.github/workflows/arm64_macos_cmake_python.yml index 0fa219c609b..d4e2b062c3e 100644 --- a/.github/workflows/arm64_macos_cmake_python.yml +++ b/.github/workflows/arm64_macos_cmake_python.yml @@ -57,13 +57,13 @@ jobs: # RESTORING CACHES - name: Restore CMake dependency source code - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: deps_src_restore with: key: ${{env.deps_src_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} path: ${{github.workspace}}/build/_deps/*-src - name: Restore CMake dependency build - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: deps_build_restore with: key: ${{env.deps_build_key}}-${{hashFiles('CMakeLists.txt', 'cmake/**')}} @@ -71,7 +71,7 @@ jobs: ${{github.workspace}}/build/_deps/*-build ${{github.workspace}}/build/_deps/*-subbuild - name: Restore CCache - uses: actions/cache/restore@v4 + uses: actions/cache/restore@v5 id: ccache_restore with: key: ${{env.ccache_key}}-${{github.sha}} @@ -115,13 +115,13 @@ jobs: # SAVING CACHES - name: Save CCache if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.ccache_restore.outputs.cache-primary-key}} path: ${{env.CCACHE_DIR}} - name: Save CMake dependency build if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.deps_build_restore.outputs.cache-primary-key}} path: | @@ -129,7 +129,7 @@ jobs: ${{github.workspace}}/build/_deps/*-subbuild - name: Save CMake dependency source code if: github.ref == 'refs/heads/main' - uses: actions/cache/save@v4 + uses: actions/cache/save@v5 with: key: ${{steps.deps_src_restore.outputs.cache-primary-key}} path: ${{github.workspace}}/build/_deps/*-src diff --git a/.github/workflows/presubmit.yml b/.github/workflows/presubmit.yml index 4fbd6cd7708..7e89f3785c0 100644 --- a/.github/workflows/presubmit.yml +++ b/.github/workflows/presubmit.yml @@ -40,7 +40,7 @@ jobs: - uses: actions/setup-python@v6 with: python-version: ${{env.PYTHON_VERSION}} - - uses: bazel-contrib/setup-bazel@0.15.0 + - uses: bazel-contrib/setup-bazel@0.18.0 - name: Build run: bazel test --config=ci //ortools/... shell: bash