From f6fa781fa49118fd83dede4a36d9551ba41b3264 Mon Sep 17 00:00:00 2001 From: Laurent Perron Date: Tue, 28 Sep 2021 15:58:37 +0200 Subject: [PATCH] split ortools/data into ortools/scheduling and ortools/packing --- cmake/cpp.cmake | 10 +- cmake/python.cmake | 9 +- examples/cpp/BUILD | 4 +- examples/cpp/jobshop_sat.cc | 18 +- examples/python/rcpsp_sat.py | 2 +- makefiles/Makefile.cpp.mk | 61 +-- makefiles/Makefile.gen.mk | 136 +++--- makefiles/Makefile.python.mk | 68 +-- ortools/data/set_covering_data.h | 59 --- ortools/data/set_covering_parser.cc | 207 --------- ortools/data/set_covering_parser.h | 113 ----- ortools/packing/BUILD | 3 + ortools/packing/CMakeLists.txt | 21 + ortools/{data => packing}/__init__.py | 2 +- ortools/packing/arc_flow_builder.cc | 408 ++++++++++++++++++ ortools/packing/arc_flow_builder.h | 92 ++++ ortools/packing/arc_flow_solver.cc | 192 +++++++++ .../arc_flow_solver.h} | 33 +- .../vector_bin_packing.proto | 6 +- .../vector_bin_packing_parser.cc | 8 +- .../vector_bin_packing_parser.h | 14 +- ortools/{data => scheduling}/BUILD | 0 ortools/{data => scheduling}/CMakeLists.txt | 4 +- ortools/scheduling/__init__.py | 15 + .../jobshop_scheduling.proto | 6 +- .../jobshop_scheduling_parser.cc | 8 +- .../jobshop_scheduling_parser.h | 12 +- .../python/CMakeLists.txt | 4 +- ortools/{data => scheduling}/python/rcpsp.i | 22 +- ortools/{data => scheduling}/rcpsp.proto | 6 +- ortools/{data => scheduling}/rcpsp_parser.cc | 8 +- ortools/{data => scheduling}/rcpsp_parser.h | 14 +- tools/generate_all_deps.sh | 4 +- 33 files changed, 956 insertions(+), 613 deletions(-) delete mode 100644 ortools/data/set_covering_data.h delete mode 100644 ortools/data/set_covering_parser.cc delete mode 100644 ortools/data/set_covering_parser.h create mode 100644 ortools/packing/BUILD create mode 100644 ortools/packing/CMakeLists.txt rename ortools/{data => packing}/__init__.py (97%) create mode 100644 ortools/packing/arc_flow_builder.cc create mode 100644 ortools/packing/arc_flow_builder.h create mode 100644 ortools/packing/arc_flow_solver.cc rename ortools/{data/set_covering_data.cc => packing/arc_flow_solver.h} (53%) rename ortools/{data => packing}/vector_bin_packing.proto (96%) rename ortools/{data => packing}/vector_bin_packing_parser.cc (95%) rename ortools/{data => packing}/vector_bin_packing_parser.h (83%) rename ortools/{data => scheduling}/BUILD (100%) rename ortools/{data => scheduling}/CMakeLists.txt (83%) create mode 100644 ortools/scheduling/__init__.py rename ortools/{data => scheduling}/jobshop_scheduling.proto (97%) rename ortools/{data => scheduling}/jobshop_scheduling_parser.cc (99%) rename ortools/{data => scheduling}/jobshop_scheduling_parser.h (89%) rename ortools/{data => scheduling}/python/CMakeLists.txt (94%) rename ortools/{data => scheduling}/python/rcpsp.i (53%) rename ortools/{data => scheduling}/rcpsp.proto (97%) rename ortools/{data => scheduling}/rcpsp_parser.cc (99%) rename ortools/{data => scheduling}/rcpsp_parser.h (87%) diff --git a/cmake/cpp.cmake b/cmake/cpp.cmake index 649422642e..633005c0cb 100644 --- a/cmake/cpp.cmake +++ b/cmake/cpp.cmake @@ -139,13 +139,14 @@ set(PROTO_SRCS) file(GLOB_RECURSE proto_files RELATIVE ${PROJECT_SOURCE_DIR} "ortools/bop/*.proto" "ortools/constraint_solver/*.proto" - "ortools/data/*.proto" "ortools/glop/*.proto" "ortools/graph/*.proto" "ortools/linear_solver/*.proto" - "ortools/sat/*.proto" - "ortools/util/*.proto" "ortools/linear_solver/*.proto" + "ortools/packing/*.proto" + "ortools/sat/*.proto" + "ortools/scheduling/*.proto" + "ortools/util/*.proto" ) if(USE_SCIP) file(GLOB_RECURSE gscip_proto_files RELATIVE ${PROJECT_SOURCE_DIR} "ortools/gscip/*.proto") @@ -209,7 +210,6 @@ foreach(SUBPROJECT IN ITEMS base bop constraint_solver - data ${GSCIP_DIR} glop graph @@ -217,8 +217,10 @@ foreach(SUBPROJECT IN ITEMS init linear_solver lp_data + packing port sat + scheduling util) add_subdirectory(ortools/${SUBPROJECT}) #target_link_libraries(${PROJECT_NAME} PRIVATE ${PROJECT_NAME}_${SUBPROJECT}) diff --git a/cmake/python.cmake b/cmake/python.cmake index c746de386a..dad8afceb1 100644 --- a/cmake/python.cmake +++ b/cmake/python.cmake @@ -68,10 +68,11 @@ search_python_module(mypy-protobuf) set(PROTO_PYS) file(GLOB_RECURSE proto_py_files RELATIVE ${PROJECT_SOURCE_DIR} "ortools/constraint_solver/*.proto" - "ortools/data/*.proto" "ortools/linear_solver/*.proto" + "ortools/packing/*.proto" "ortools/sat/*.proto" "ortools/util/*.proto" + "ortools/scheduling/*.proto" ) list(REMOVE_ITEM proto_py_files "ortools/constraint_solver/demon_profiler.proto") foreach(PROTO_FILE IN LISTS proto_py_files) @@ -106,7 +107,7 @@ if(USE_COINOR) endif() list(APPEND CMAKE_SWIG_FLAGS ${FLAGS} "-I${PROJECT_SOURCE_DIR}") -foreach(SUBPROJECT IN ITEMS init algorithms graph linear_solver constraint_solver sat data util) +foreach(SUBPROJECT IN ITEMS init algorithms graph linear_solver constraint_solver sat scheduling util) add_subdirectory(ortools/${SUBPROJECT}/python) endforeach() @@ -122,7 +123,7 @@ file(COPY ortools/__init__.py DESTINATION python/${PROJECT_NAME}/linear_solver) file(COPY ortools/__init__.py DESTINATION python/${PROJECT_NAME}/constraint_solver) file(COPY ortools/__init__.py DESTINATION python/${PROJECT_NAME}/sat) file(COPY ortools/__init__.py DESTINATION python/${PROJECT_NAME}/sat/python) -file(COPY ortools/__init__.py DESTINATION python/${PROJECT_NAME}/data) +file(COPY ortools/__init__.py DESTINATION python/${PROJECT_NAME}/scheduling) file(COPY ortools/__init__.py DESTINATION python/${PROJECT_NAME}/util) file(COPY @@ -167,7 +168,7 @@ add_custom_target(python_package ALL COMMAND ${CMAKE_COMMAND} -E copy $ ${PROJECT_NAME}/constraint_solver COMMAND ${CMAKE_COMMAND} -E copy $ ${PROJECT_NAME}/linear_solver COMMAND ${CMAKE_COMMAND} -E copy $ ${PROJECT_NAME}/sat - COMMAND ${CMAKE_COMMAND} -E copy $ ${PROJECT_NAME}/data + COMMAND ${CMAKE_COMMAND} -E copy $ ${PROJECT_NAME}/scheduling COMMAND ${CMAKE_COMMAND} -E copy $ ${PROJECT_NAME}/util #COMMAND ${Python3_EXECUTABLE} setup.py bdist_egg bdist_wheel COMMAND ${Python3_EXECUTABLE} setup.py bdist_wheel diff --git a/examples/cpp/BUILD b/examples/cpp/BUILD index 5eaa63d565..b6ae1b265d 100644 --- a/examples/cpp/BUILD +++ b/examples/cpp/BUILD @@ -240,8 +240,6 @@ cc_binary( "//ortools/base:file", "@com_google_absl//absl/strings", "//ortools/base:timer", - "//ortools/data:jobshop_scheduling_cc_proto", - "//ortools/data:jobshop_scheduling_parser", "//ortools/sat:cp_model", "//ortools/sat:cp_model_solver", "//ortools/sat:disjunctive", @@ -251,6 +249,8 @@ cc_binary( "//ortools/sat:optimization", "//ortools/sat:precedences", "//ortools/sat:sat_solver", + "//ortools/scheduling:jobshop_scheduling_cc_proto", + "//ortools/scheduling:jobshop_scheduling_parser", ], ) diff --git a/examples/cpp/jobshop_sat.cc b/examples/cpp/jobshop_sat.cc index 15236248d6..fc749744d9 100644 --- a/examples/cpp/jobshop_sat.cc +++ b/examples/cpp/jobshop_sat.cc @@ -26,12 +26,12 @@ #include "google/protobuf/wrappers.pb.h" #include "ortools/base/logging.h" #include "ortools/base/timer.h" -#include "ortools/data/jobshop_scheduling.pb.h" -#include "ortools/data/jobshop_scheduling_parser.h" #include "ortools/graph/connected_components.h" #include "ortools/sat/cp_model.h" #include "ortools/sat/cp_model.pb.h" #include "ortools/sat/model.h" +#include "ortools/scheduling/jobshop_scheduling.pb.h" +#include "ortools/scheduling/jobshop_scheduling_parser.h" ABSL_FLAG(std::string, input, "", "Jobshop data file name."); ABSL_FLAG(std::string, params, "", "Sat parameters in text proto format."); @@ -49,12 +49,12 @@ ABSL_FLAG(bool, display_model, false, "Display jobshop proto before solving."); ABSL_FLAG(bool, display_sat_model, false, "Display sat proto before solving."); ABSL_FLAG(int, horizon, -1, "Override horizon computation."); -using operations_research::data::jssp::Job; -using operations_research::data::jssp::JobPrecedence; -using operations_research::data::jssp::JsspInputProblem; -using operations_research::data::jssp::Machine; -using operations_research::data::jssp::Task; -using operations_research::data::jssp::TransitionTimeMatrix; +using operations_research::scheduling::jssp::Job; +using operations_research::scheduling::jssp::JobPrecedence; +using operations_research::scheduling::jssp::JsspInputProblem; +using operations_research::scheduling::jssp::Machine; +using operations_research::scheduling::jssp::Task; +using operations_research::scheduling::jssp::TransitionTimeMatrix; namespace operations_research { namespace sat { @@ -871,7 +871,7 @@ int main(int argc, char** argv) { LOG(FATAL) << "Please supply a data file with --input="; } - operations_research::data::jssp::JsspParser parser; + operations_research::scheduling::jssp::JsspParser parser; CHECK(parser.ParseFile(absl::GetFlag(FLAGS_input))); operations_research::sat::Solve(parser.problem()); return EXIT_SUCCESS; diff --git a/examples/python/rcpsp_sat.py b/examples/python/rcpsp_sat.py index 57e9855a23..a8c83b8c11 100644 --- a/examples/python/rcpsp_sat.py +++ b/examples/python/rcpsp_sat.py @@ -18,7 +18,7 @@ import collections from absl import app from absl import flags from google.protobuf import text_format -from ortools.data import pywraprcpsp +from ortools.scheduling import pywraprcpsp from ortools.sat.python import cp_model FLAGS = flags.FLAGS diff --git a/makefiles/Makefile.cpp.mk b/makefiles/Makefile.cpp.mk index 234c383fe0..61cc709f2e 100644 --- a/makefiles/Makefile.cpp.mk +++ b/makefiles/Makefile.cpp.mk @@ -64,9 +64,6 @@ $(GEN_DIR)/ortools/bop: | $(GEN_DIR)/ortools $(GEN_DIR)/ortools/constraint_solver: | $(GEN_DIR)/ortools -$(MKDIR) $(GEN_PATH)$Sortools$Sconstraint_solver -$(GEN_DIR)/ortools/data: | $(GEN_DIR)/ortools - -$(MKDIR) $(GEN_PATH)$Sortools$Sdata - $(GEN_DIR)/ortools/flatzinc: | $(GEN_DIR)/ortools -$(MKDIR) $(GEN_PATH)$Sortools$Sflatzinc @@ -79,18 +76,24 @@ $(GEN_DIR)/ortools/graph: | $(GEN_DIR)/ortools $(GEN_DIR)/ortools/gscip: | $(GEN_DIR)/ortools -$(MKDIR) $(GEN_PATH)$Sortools$Sgscip +$(GEN_DIR)/ortools/init: | $(GEN_DIR)/ortools + -$(MKDIR) $(GEN_PATH)$Sortools$Sinit + $(GEN_DIR)/ortools/linear_solver: | $(GEN_DIR)/ortools -$(MKDIR) $(GEN_PATH)$Sortools$Slinear_solver +$(GEN_DIR)/ortools/packing: | $(GEN_DIR)/ortools + -$(MKDIR) $(GEN_PATH)$Sortools$Spacking + $(GEN_DIR)/ortools/sat: | $(GEN_DIR)/ortools -$(MKDIR) $(GEN_PATH)$Sortools$Ssat +$(GEN_DIR)/ortools/scheduling: | $(GEN_DIR)/ortools + -$(MKDIR) $(GEN_PATH)$Sortools$Sscheduling + $(GEN_DIR)/ortools/util: | $(GEN_DIR)/ortools -$(MKDIR) $(GEN_PATH)$Sortools$Sutil -$(GEN_DIR)/ortools/init: | $(GEN_DIR)/ortools - -$(MKDIR) $(GEN_PATH)$Sortools$Sinit - $(GEN_DIR)/examples: | $(GEN_DIR) -$(MKDIR) $(GEN_PATH)$Sexamples @@ -118,9 +121,6 @@ $(OBJ_DIR)/bop: | $(OBJ_DIR) $(OBJ_DIR)/constraint_solver: | $(OBJ_DIR) -$(MKDIR_P) $(OBJ_DIR)$Sconstraint_solver -$(OBJ_DIR)/data: | $(OBJ_DIR) - -$(MKDIR_P) $(OBJ_DIR)$Sdata - $(OBJ_DIR)/flatzinc: | $(OBJ_DIR) -$(MKDIR_P) $(OBJ_DIR)$Sflatzinc @@ -142,12 +142,18 @@ $(OBJ_DIR)/linear_solver: | $(OBJ_DIR) $(OBJ_DIR)/lp_data: | $(OBJ_DIR) -$(MKDIR_P) $(OBJ_DIR)$Slp_data +$(OBJ_DIR)/packing: | $(OBJ_DIR) + -$(MKDIR_P) $(OBJ_DIR)$Spacking + $(OBJ_DIR)/port: | $(OBJ_DIR) -$(MKDIR_P) $(OBJ_DIR)$Sport $(OBJ_DIR)/sat: | $(OBJ_DIR) -$(MKDIR_P) $(OBJ_DIR)$Ssat +$(OBJ_DIR)/scheduling: | $(OBJ_DIR) + -$(MKDIR_P) $(OBJ_DIR)$Sscheduling + $(OBJ_DIR)/util: | $(OBJ_DIR) -$(MKDIR_P) $(OBJ_DIR)$Sutil @@ -159,24 +165,24 @@ $(OBJ_DIR)/swig: | $(OBJ_DIR) ############### # build from: $> grep "pb\.h:" makefiles/Makefile.gen.mk PROTO_DEPS = \ -$(GEN_DIR)/ortools/util/optional_boolean.pb.h \ -$(GEN_DIR)/ortools/data/jobshop_scheduling.pb.h \ -$(GEN_DIR)/ortools/data/rcpsp.pb.h \ -$(GEN_DIR)/ortools/data/vector_bin_packing.pb.h \ -$(GEN_DIR)/ortools/glop/parameters.pb.h \ -$(GEN_DIR)/ortools/graph/flow_problem.pb.h \ -$(GEN_DIR)/ortools/sat/boolean_problem.pb.h \ -$(GEN_DIR)/ortools/sat/cp_model.pb.h \ -$(GEN_DIR)/ortools/sat/sat_parameters.pb.h \ $(GEN_DIR)/ortools/bop/bop_parameters.pb.h \ -$(GEN_DIR)/ortools/linear_solver/linear_solver.pb.h \ $(GEN_DIR)/ortools/constraint_solver/assignment.pb.h \ $(GEN_DIR)/ortools/constraint_solver/demon_profiler.pb.h \ $(GEN_DIR)/ortools/constraint_solver/routing_enums.pb.h \ $(GEN_DIR)/ortools/constraint_solver/routing_parameters.pb.h \ $(GEN_DIR)/ortools/constraint_solver/search_limit.pb.h \ $(GEN_DIR)/ortools/constraint_solver/search_stats.pb.h \ -$(GEN_DIR)/ortools/constraint_solver/solver_parameters.pb.h +$(GEN_DIR)/ortools/constraint_solver/solver_parameters.pb.h \ +$(GEN_DIR)/ortools/glop/parameters.pb.h \ +$(GEN_DIR)/ortools/graph/flow_problem.pb.h \ +$(GEN_DIR)/ortools/linear_solver/linear_solver.pb.h \ +$(GEN_DIR)/ortools/packing/vector_bin_packing.pb.h \ +$(GEN_DIR)/ortools/sat/boolean_problem.pb.h \ +$(GEN_DIR)/ortools/sat/cp_model.pb.h \ +$(GEN_DIR)/ortools/sat/sat_parameters.pb.h \ +$(GEN_DIR)/ortools/scheduling/jobshop_scheduling.pb.h \ +$(GEN_DIR)/ortools/scheduling/rcpsp.pb.h \ +$(GEN_DIR)/ortools/util/optional_boolean.pb.h include $(OR_ROOT)makefiles/Makefile.gen.mk @@ -607,7 +613,8 @@ install_ortools_dirs: install_dirs -$(MKDIR) "$(DESTDIR)$(prefix)$Sinclude$Sortools$Sport" -$(MKDIR) "$(DESTDIR)$(prefix)$Sinclude$Sortools$Ssat" -$(MKDIR) "$(DESTDIR)$(prefix)$Sinclude$Sortools$Sutil" - -$(MKDIR) "$(DESTDIR)$(prefix)$Sinclude$Sortools$Sdata" + -$(MKDIR) "$(DESTDIR)$(prefix)$Sinclude$Sortools$Sscheduling" + -$(MKDIR) "$(DESTDIR)$(prefix)$Sinclude$Sortools$Spacking" .PHONY: install_cc # Install C++ OR-Tools to $(DESTDIR)$(prefix) install_cc: install_libortools install_third_party install_doc @@ -633,8 +640,10 @@ install_libortools: $(OR_TOOLS_LIBS) install_ortools_dirs $(COPY) $(GEN_PATH)$Sortools$Ssat$S*.pb.h "$(DESTDIR)$(prefix)$Sinclude$Sortools$Ssat" $(COPY) ortools$Sutil$S*.h "$(DESTDIR)$(prefix)$Sinclude$Sortools$Sutil" $(COPY) $(GEN_PATH)$Sortools$Sutil$S*.pb.h "$(DESTDIR)$(prefix)$Sinclude$Sortools$Sutil" - $(COPY) ortools$Sdata$S*.h "$(DESTDIR)$(prefix)$Sinclude$Sortools$Sdata" - $(COPY) $(GEN_PATH)$Sortools$Sdata$S*.pb.h "$(DESTDIR)$(prefix)$Sinclude$Sortools$Sdata" + $(COPY) ortools$Spacking$S*.h "$(DESTDIR)$(prefix)$Sinclude$Sortools$Spacking" + $(COPY) $(GEN_PATH)$Sortools$Spacking$S*.pb.h "$(DESTDIR)$(prefix)$Sinclude$Sortools$Spacking" + $(COPY) ortools$Sscheduling$S*.h "$(DESTDIR)$(prefix)$Sinclude$Sortools$Sscheduling" + $(COPY) $(GEN_PATH)$Sortools$Sscheduling$S*.pb.h "$(DESTDIR)$(prefix)$Sinclude$Sortools$Sscheduling" $(COPY) $(LIB_DIR)$S$(LIB_PREFIX)ortools.$L "$(DESTDIR)$(prefix)$Slib" .PHONY: install_third_party @@ -782,7 +791,8 @@ clean_cc: -$(DEL) $(OBJ_DIR)$Sbase$S*.$O -$(DEL) $(OBJ_DIR)$Sbop$S*.$O -$(DEL) $(OBJ_DIR)$Sconstraint_solver$S*.$O - -$(DEL) $(OBJ_DIR)$Sdata$S*.$O + -$(DEL) $(OBJ_DIR)$Spacking$S*.$O + -$(DEL) $(OBJ_DIR)$Sscheduling$S*.$O -$(DEL) $(OBJ_DIR)$Sflatzinc$S*.$O -$(DEL) $(OBJ_DIR)$Sglop$S*.$O -$(DEL) $(OBJ_DIR)$Sgraph$S*.$O @@ -800,7 +810,8 @@ clean_cc: -$(DEL) $(CC_TESTS) -$(DEL) $(GEN_PATH)$Sortools$Sbop$S*.pb.* -$(DEL) $(GEN_PATH)$Sortools$Sconstraint_solver$S*.pb.* - -$(DEL) $(GEN_PATH)$Sortools$Sdata$S*.pb.* + -$(DEL) $(GEN_PATH)$Sortools$Spacking$S*.pb.* + -$(DEL) $(GEN_PATH)$Sortools$Sscheduling$S*.pb.* -$(DEL) $(GEN_PATH)$Sortools$Sflatzinc$S*.tab.* -$(DEL) $(GEN_PATH)$Sortools$Sflatzinc$S*.yy.* -$(DEL) $(GEN_PATH)$Sortools$Sflatzinc$Sparser.* diff --git a/makefiles/Makefile.gen.mk b/makefiles/Makefile.gen.mk index 6d98eff025..e0d329bff6 100644 --- a/makefiles/Makefile.gen.mk +++ b/makefiles/Makefile.gen.mk @@ -360,108 +360,91 @@ INIT_LIB_OBJS = \ DATA_DEPS = \ - $(SRC_DIR)/ortools/data/jobshop_scheduling_parser.h \ - $(SRC_DIR)/ortools/data/rcpsp_parser.h \ - $(SRC_DIR)/ortools/data/set_covering_data.h \ - $(SRC_DIR)/ortools/data/set_covering_parser.h \ - $(SRC_DIR)/ortools/data/vector_bin_packing_parser.h \ - $(GEN_DIR)/ortools/data/jobshop_scheduling.pb.h \ - $(GEN_DIR)/ortools/data/rcpsp.pb.h \ - $(GEN_DIR)/ortools/data/vector_bin_packing.pb.h + $(SRC_DIR)/ortools/scheduling/jobshop_scheduling_parser.h \ + $(SRC_DIR)/ortools/scheduling/rcpsp_parser.h \ + $(SRC_DIR)/ortools/packing/vector_bin_packing_parser.h \ + $(GEN_DIR)/ortools/scheduling/jobshop_scheduling.pb.h \ + $(GEN_DIR)/ortools/scheduling/rcpsp.pb.h \ + $(GEN_DIR)/ortools/packing/vector_bin_packing.pb.h DATA_LIB_OBJS = \ - $(OBJ_DIR)/data/jobshop_scheduling_parser.$O \ - $(OBJ_DIR)/data/rcpsp_parser.$O \ - $(OBJ_DIR)/data/set_covering_data.$O \ - $(OBJ_DIR)/data/set_covering_parser.$O \ - $(OBJ_DIR)/data/vector_bin_packing_parser.$O \ - $(OBJ_DIR)/data/jobshop_scheduling.pb.$O \ - $(OBJ_DIR)/data/rcpsp.pb.$O \ - $(OBJ_DIR)/data/vector_bin_packing.pb.$O + $(OBJ_DIR)/scheduling/jobshop_scheduling_parser.$O \ + $(OBJ_DIR)/scheduling/rcpsp_parser.$O \ + $(OBJ_DIR)/packing/vector_bin_packing_parser.$O \ + $(OBJ_DIR)/scheduling/jobshop_scheduling.pb.$O \ + $(OBJ_DIR)/scheduling/rcpsp.pb.$O \ + $(OBJ_DIR)/packing/vector_bin_packing.pb.$O -objs/data/jobshop_scheduling_parser.$O: \ - ortools/data/jobshop_scheduling_parser.cc \ - ortools/data/jobshop_scheduling_parser.h ortools/base/integral_types.h \ - ortools/gen/ortools/data/jobshop_scheduling.pb.h \ +objs/scheduling/jobshop_scheduling_parser.$O: \ + ortools/scheduling/jobshop_scheduling_parser.cc \ + ortools/scheduling/jobshop_scheduling_parser.h ortools/base/integral_types.h \ + ortools/gen/ortools/scheduling/jobshop_scheduling.pb.h \ ortools/base/commandlineflags.h ortools/base/filelineiter.h \ ortools/base/file.h ortools/base/logging.h ortools/base/log_severity.h \ ortools/base/logging_export.h ortools/base/macros.h \ - ortools/base/vlog_is_on.h | $(OBJ_DIR)/data - $(CCC) $(CFLAGS) -c $(SRC_DIR)$Sortools$Sdata$Sjobshop_scheduling_parser.cc $(OBJ_OUT)$(OBJ_DIR)$Sdata$Sjobshop_scheduling_parser.$O + ortools/base/vlog_is_on.h | $(OBJ_DIR)/scheduling + $(CCC) $(CFLAGS) -c $(SRC_DIR)$Sortools$Sscheduling$Sjobshop_scheduling_parser.cc $(OBJ_OUT)$(OBJ_DIR)$Sscheduling$Sjobshop_scheduling_parser.$O -objs/data/rcpsp_parser.$O: ortools/data/rcpsp_parser.cc \ - ortools/data/rcpsp_parser.h ortools/base/integral_types.h \ - ortools/gen/ortools/data/rcpsp.pb.h ortools/base/filelineiter.h \ +objs/scheduling/rcpsp_parser.$O: ortools/scheduling/rcpsp_parser.cc \ + ortools/scheduling/rcpsp_parser.h ortools/base/integral_types.h \ + ortools/gen/ortools/scheduling/rcpsp.pb.h ortools/base/filelineiter.h \ ortools/base/file.h ortools/base/logging.h \ ortools/base/commandlineflags.h ortools/base/log_severity.h \ ortools/base/logging_export.h ortools/base/macros.h \ - ortools/base/vlog_is_on.h | $(OBJ_DIR)/data - $(CCC) $(CFLAGS) -c $(SRC_DIR)$Sortools$Sdata$Srcpsp_parser.cc $(OBJ_OUT)$(OBJ_DIR)$Sdata$Srcpsp_parser.$O + ortools/base/vlog_is_on.h | $(OBJ_DIR)/scheduling + $(CCC) $(CFLAGS) -c $(SRC_DIR)$Sortools$Sscheduling$Srcpsp_parser.cc $(OBJ_OUT)$(OBJ_DIR)$Sscheduling$Srcpsp_parser.$O -objs/data/set_covering_data.$O: ortools/data/set_covering_data.cc \ - ortools/data/set_covering_data.h ortools/base/integral_types.h | $(OBJ_DIR)/data - $(CCC) $(CFLAGS) -c $(SRC_DIR)$Sortools$Sdata$Sset_covering_data.cc $(OBJ_OUT)$(OBJ_DIR)$Sdata$Sset_covering_data.$O - -objs/data/set_covering_parser.$O: ortools/data/set_covering_parser.cc \ - ortools/data/set_covering_parser.h ortools/base/integral_types.h \ - ortools/data/set_covering_data.h ortools/base/filelineiter.h \ - ortools/base/file.h ortools/base/logging.h \ - ortools/base/commandlineflags.h ortools/base/log_severity.h \ - ortools/base/logging_export.h ortools/base/macros.h \ - ortools/base/vlog_is_on.h | $(OBJ_DIR)/data - $(CCC) $(CFLAGS) -c $(SRC_DIR)$Sortools$Sdata$Sset_covering_parser.cc $(OBJ_OUT)$(OBJ_DIR)$Sdata$Sset_covering_parser.$O - -objs/data/vector_bin_packing_parser.$O: \ - ortools/data/vector_bin_packing_parser.cc \ - ortools/data/vector_bin_packing_parser.h ortools/base/integral_types.h \ - ortools/gen/ortools/data/vector_bin_packing.pb.h \ +objs/packing/vector_bin_packing_parser.$O: \ + ortools/packing/vector_bin_packing_parser.cc \ + ortools/packing/vector_bin_packing_parser.h ortools/base/integral_types.h \ + ortools/gen/ortools/packing/vector_bin_packing.pb.h \ ortools/base/filelineiter.h ortools/base/file.h ortools/base/logging.h \ ortools/base/commandlineflags.h ortools/base/log_severity.h \ ortools/base/logging_export.h ortools/base/macros.h \ - ortools/base/vlog_is_on.h | $(OBJ_DIR)/data - $(CCC) $(CFLAGS) -c $(SRC_DIR)$Sortools$Sdata$Svector_bin_packing_parser.cc $(OBJ_OUT)$(OBJ_DIR)$Sdata$Svector_bin_packing_parser.$O + ortools/base/vlog_is_on.h | $(OBJ_DIR)/packing + $(CCC) $(CFLAGS) -c $(SRC_DIR)$Sortools$Spacking$Svector_bin_packing_parser.cc $(OBJ_OUT)$(OBJ_DIR)$Spacking$Svector_bin_packing_parser.$O -ortools/data/jobshop_scheduling.proto: ; +ortools/scheduling/jobshop_scheduling.proto: ; -$(GEN_DIR)/ortools/data/jobshop_scheduling.pb.cc: \ - $(SRC_DIR)/ortools/data/jobshop_scheduling.proto | $(GEN_DIR)/ortools/data - $(PROTOC) --experimental_allow_proto3_optional --proto_path=$(INC_DIR) $(PROTOBUF_PROTOC_INC) --cpp_out=$(GEN_PATH) $(SRC_DIR)/ortools/data/jobshop_scheduling.proto +$(GEN_DIR)/ortools/scheduling/jobshop_scheduling.pb.cc: \ + $(SRC_DIR)/ortools/scheduling/jobshop_scheduling.proto | $(GEN_DIR)/ortools/scheduling + $(PROTOC) --experimental_allow_proto3_optional --proto_path=$(INC_DIR) $(PROTOBUF_PROTOC_INC) --cpp_out=$(GEN_PATH) $(SRC_DIR)/ortools/scheduling/jobshop_scheduling.proto -$(GEN_DIR)/ortools/data/jobshop_scheduling.pb.h: \ - $(GEN_DIR)/ortools/data/jobshop_scheduling.pb.cc - $(TOUCH) $(GEN_PATH)$Sortools$Sdata$Sjobshop_scheduling.pb.h +$(GEN_DIR)/ortools/scheduling/jobshop_scheduling.pb.h: \ + $(GEN_DIR)/ortools/scheduling/jobshop_scheduling.pb.cc + $(TOUCH) $(GEN_PATH)$Sortools$Sscheduling$Sjobshop_scheduling.pb.h -$(OBJ_DIR)/data/jobshop_scheduling.pb.$O: \ - $(GEN_DIR)/ortools/data/jobshop_scheduling.pb.cc | $(OBJ_DIR)/data - $(CCC) $(CFLAGS) -c $(GEN_PATH)$Sortools$Sdata$Sjobshop_scheduling.pb.cc $(OBJ_OUT)$(OBJ_DIR)$Sdata$Sjobshop_scheduling.pb.$O +$(OBJ_DIR)/scheduling/jobshop_scheduling.pb.$O: \ + $(GEN_DIR)/ortools/scheduling/jobshop_scheduling.pb.cc | $(OBJ_DIR)/scheduling + $(CCC) $(CFLAGS) -c $(GEN_PATH)$Sortools$Sscheduling$Sjobshop_scheduling.pb.cc $(OBJ_OUT)$(OBJ_DIR)$Sscheduling$Sjobshop_scheduling.pb.$O -ortools/data/rcpsp.proto: ; +ortools/scheduling/rcpsp.proto: ; -$(GEN_DIR)/ortools/data/rcpsp.pb.cc: \ - $(SRC_DIR)/ortools/data/rcpsp.proto | $(GEN_DIR)/ortools/data - $(PROTOC) --experimental_allow_proto3_optional --proto_path=$(INC_DIR) $(PROTOBUF_PROTOC_INC) --cpp_out=$(GEN_PATH) $(SRC_DIR)/ortools/data/rcpsp.proto +$(GEN_DIR)/ortools/scheduling/rcpsp.pb.cc: \ + $(SRC_DIR)/ortools/scheduling/rcpsp.proto | $(GEN_DIR)/ortools/scheduling + $(PROTOC) --experimental_allow_proto3_optional --proto_path=$(INC_DIR) $(PROTOBUF_PROTOC_INC) --cpp_out=$(GEN_PATH) $(SRC_DIR)/ortools/scheduling/rcpsp.proto -$(GEN_DIR)/ortools/data/rcpsp.pb.h: \ - $(GEN_DIR)/ortools/data/rcpsp.pb.cc - $(TOUCH) $(GEN_PATH)$Sortools$Sdata$Srcpsp.pb.h +$(GEN_DIR)/ortools/scheduling/rcpsp.pb.h: \ + $(GEN_DIR)/ortools/scheduling/rcpsp.pb.cc + $(TOUCH) $(GEN_PATH)$Sortools$Sscheduling$Srcpsp.pb.h -$(OBJ_DIR)/data/rcpsp.pb.$O: \ - $(GEN_DIR)/ortools/data/rcpsp.pb.cc | $(OBJ_DIR)/data - $(CCC) $(CFLAGS) -c $(GEN_PATH)$Sortools$Sdata$Srcpsp.pb.cc $(OBJ_OUT)$(OBJ_DIR)$Sdata$Srcpsp.pb.$O +$(OBJ_DIR)/scheduling/rcpsp.pb.$O: \ + $(GEN_DIR)/ortools/scheduling/rcpsp.pb.cc | $(OBJ_DIR)/scheduling + $(CCC) $(CFLAGS) -c $(GEN_PATH)$Sortools$Sscheduling$Srcpsp.pb.cc $(OBJ_OUT)$(OBJ_DIR)$Sscheduling$Srcpsp.pb.$O -ortools/data/vector_bin_packing.proto: ; +ortools/packing/vector_bin_packing.proto: ; -$(GEN_DIR)/ortools/data/vector_bin_packing.pb.cc: \ - $(SRC_DIR)/ortools/data/vector_bin_packing.proto | $(GEN_DIR)/ortools/data - $(PROTOC) --experimental_allow_proto3_optional --proto_path=$(INC_DIR) $(PROTOBUF_PROTOC_INC) --cpp_out=$(GEN_PATH) $(SRC_DIR)/ortools/data/vector_bin_packing.proto +$(GEN_DIR)/ortools/packing/vector_bin_packing.pb.cc: \ + $(SRC_DIR)/ortools/packing/vector_bin_packing.proto | $(GEN_DIR)/ortools/packing + $(PROTOC) --experimental_allow_proto3_optional --proto_path=$(INC_DIR) $(PROTOBUF_PROTOC_INC) --cpp_out=$(GEN_PATH) $(SRC_DIR)/ortools/packing/vector_bin_packing.proto -$(GEN_DIR)/ortools/data/vector_bin_packing.pb.h: \ - $(GEN_DIR)/ortools/data/vector_bin_packing.pb.cc - $(TOUCH) $(GEN_PATH)$Sortools$Sdata$Svector_bin_packing.pb.h +$(GEN_DIR)/ortools/packing/vector_bin_packing.pb.h: \ + $(GEN_DIR)/ortools/packing/vector_bin_packing.pb.cc + $(TOUCH) $(GEN_PATH)$Sortools$Spacking$Svector_bin_packing.pb.h -$(OBJ_DIR)/data/vector_bin_packing.pb.$O: \ - $(GEN_DIR)/ortools/data/vector_bin_packing.pb.cc | $(OBJ_DIR)/data - $(CCC) $(CFLAGS) -c $(GEN_PATH)$Sortools$Sdata$Svector_bin_packing.pb.cc $(OBJ_OUT)$(OBJ_DIR)$Sdata$Svector_bin_packing.pb.$O +$(OBJ_DIR)/packing/vector_bin_packing.pb.$O: \ + $(GEN_DIR)/ortools/packing/vector_bin_packing.pb.cc | $(OBJ_DIR)/packing + $(CCC) $(CFLAGS) -c $(GEN_PATH)$Sortools$Spacking$Svector_bin_packing.pb.cc $(OBJ_OUT)$(OBJ_DIR)$Spacking$Svector_bin_packing.pb.$O LP_DATA_DEPS = \ $(SRC_DIR)/ortools/lp_data/lp_data.h \ @@ -5206,4 +5189,3 @@ $(GEN_DIR)/ortools/constraint_solver/solver_parameters.pb.h: \ $(OBJ_DIR)/constraint_solver/solver_parameters.pb.$O: \ $(GEN_DIR)/ortools/constraint_solver/solver_parameters.pb.cc | $(OBJ_DIR)/constraint_solver $(CCC) $(CFLAGS) -c $(GEN_PATH)$Sortools$Sconstraint_solver$Ssolver_parameters.pb.cc $(OBJ_OUT)$(OBJ_DIR)$Sconstraint_solver$Ssolver_parameters.pb.$O - diff --git a/makefiles/Makefile.python.mk b/makefiles/Makefile.python.mk index 48e9fd6b3c..a8f63503b6 100644 --- a/makefiles/Makefile.python.mk +++ b/makefiles/Makefile.python.mk @@ -47,7 +47,7 @@ PYGRAPH_LIBS = $(LIB_DIR)/_pywrapgraph.$(SWIG_PYTHON_LIB_SUFFIX) PYCP_LIBS = $(LIB_DIR)/_pywrapcp.$(SWIG_PYTHON_LIB_SUFFIX) PYLP_LIBS = $(LIB_DIR)/_pywraplp.$(SWIG_PYTHON_LIB_SUFFIX) PYSAT_LIBS = $(LIB_DIR)/_pywrapsat.$(SWIG_PYTHON_LIB_SUFFIX) -PYDATA_LIBS = $(LIB_DIR)/_pywraprcpsp.$(SWIG_PYTHON_LIB_SUFFIX) +PYSCHEDULING_LIBS = $(LIB_DIR)/_pywraprcpsp.$(SWIG_PYTHON_LIB_SUFFIX) PYSORTED_INTERVAL_LIST_LIBS = $(LIB_DIR)/_sorted_interval_list.$(SWIG_PYTHON_LIB_SUFFIX) PYTHON_OR_TOOLS_LIBS = \ $(GEN_DIR)/ortools/__init__.py \ @@ -57,7 +57,7 @@ PYTHON_OR_TOOLS_LIBS = \ $(PYCP_LIBS) \ $(PYLP_LIBS) \ $(PYSAT_LIBS) \ - $(PYDATA_LIBS) \ + $(PYSCHEDULING_LIBS) \ $(PYSORTED_INTERVAL_LIST_LIBS) # Main target @@ -455,38 +455,38 @@ ifeq ($(PLATFORM),MACOSX) PYRCPSP_LDFLAGS = -install_name @rpath/_pywraprcpsp.$(SWIG_PYTHON_LIB_SUFFIX) # endif -$(GEN_DIR)/ortools/data/rcpsp_pb2.py: \ - $(SRC_DIR)/ortools/data/rcpsp.proto \ +$(GEN_DIR)/ortools/scheduling/rcpsp_pb2.py: \ + $(SRC_DIR)/ortools/scheduling/rcpsp.proto \ $(PROTOBUF_PYTHON_DESC) \ - | $(GEN_DIR)/ortools/data + | $(GEN_DIR)/ortools/scheduling $(PROTOC) --proto_path=$(INC_DIR) --python_out=$(GEN_PATH) $(MYPY_OUT) \ - $(SRC_DIR)/ortools/data/rcpsp.proto + $(SRC_DIR)/ortools/scheduling/rcpsp.proto -$(GEN_DIR)/ortools/data/pywraprcpsp.py: \ - $(SRC_DIR)/ortools/data/rcpsp_parser.h \ +$(GEN_DIR)/ortools/scheduling/pywraprcpsp.py: \ + $(SRC_DIR)/ortools/scheduling/rcpsp_parser.h \ $(SRC_DIR)/ortools/base/base.i \ - $(SRC_DIR)/ortools/data/python/rcpsp.i \ - $(GEN_DIR)/ortools/data/rcpsp_pb2.py \ + $(SRC_DIR)/ortools/scheduling/python/rcpsp.i \ + $(GEN_DIR)/ortools/scheduling/rcpsp_pb2.py \ $(DATA_DEPS) \ $(PROTOBUF_PYTHON_DESC) \ - | $(GEN_DIR)/ortools/data + | $(GEN_DIR)/ortools/scheduling $(SWIG_BINARY) $(SWIG_INC) -I$(INC_DIR) -c++ -python $(SWIG_PYTHON3_FLAG) \ - -o $(GEN_PATH)$Sortools$Sdata$Srcpsp_python_wrap.cc \ + -o $(GEN_PATH)$Sortools$Sscheduling$Srcpsp_python_wrap.cc \ -module pywraprcpsp \ - $(SRC_DIR)/ortools/data$Spython$Srcpsp.i + $(SRC_DIR)/ortools/scheduling$Spython$Srcpsp.i -$(GEN_DIR)/ortools/data/rcpsp_python_wrap.cc: \ - $(GEN_DIR)/ortools/data/pywraprcpsp.py +$(GEN_DIR)/ortools/scheduling/rcpsp_python_wrap.cc: \ + $(GEN_DIR)/ortools/scheduling/pywraprcpsp.py $(OBJ_DIR)/swig/rcpsp_python_wrap.$O: \ - $(GEN_DIR)/ortools/data/rcpsp_python_wrap.cc \ + $(GEN_DIR)/ortools/scheduling/rcpsp_python_wrap.cc \ $(DATA_DEPS) \ | $(OBJ_DIR)/swig $(CCC) $(CFLAGS) $(PYTHON_INC) $(PYTHON3_CFLAGS) \ - -c $(GEN_PATH)$Sortools$Sdata$Srcpsp_python_wrap.cc \ + -c $(GEN_PATH)$Sortools$Sscheduling$Srcpsp_python_wrap.cc \ $(OBJ_OUT)$(OBJ_DIR)$Sswig$Srcpsp_python_wrap.$O -$(PYDATA_LIBS): $(OBJ_DIR)/swig/rcpsp_python_wrap.$O $(OR_TOOLS_LIBS) +$(PYSCHEDULING_LIBS): $(OBJ_DIR)/swig/rcpsp_python_wrap.$O $(OR_TOOLS_LIBS) $(DYNAMIC_LD) \ $(PYRCPSP_LDFLAGS) \ $(LD_OUT)$(LIB_DIR)$S_pywraprcpsp.$(SWIG_PYTHON_LIB_SUFFIX) \ @@ -496,9 +496,9 @@ $(PYDATA_LIBS): $(OBJ_DIR)/swig/rcpsp_python_wrap.$O $(OR_TOOLS_LIBS) $(PYTHON_LNK) \ $(PYTHON_LDFLAGS) ifeq ($(SYSTEM),win) - copy $(LIB_DIR)$S_pywraprcpsp.$(SWIG_PYTHON_LIB_SUFFIX) $(GEN_PATH)\\ortools\\data\\_pywraprcpsp.pyd + copy $(LIB_DIR)$S_pywraprcpsp.$(SWIG_PYTHON_LIB_SUFFIX) $(GEN_PATH)\\ortools\\scheduling\\_pywraprcpsp.pyd else - cp $(PYDATA_LIBS) $(GEN_PATH)/ortools/data + cp $(PYSCHEDULING_LIBS) $(GEN_PATH)/ortools/scheduling endif # sorted_interval_list @@ -890,7 +890,7 @@ MISSING_PYPI_FILES = \ $(PYPI_ARCHIVE_TEMP_DIR)/ortools/ortools/constraint_solver \ $(PYPI_ARCHIVE_TEMP_DIR)/ortools/ortools/linear_solver \ $(PYPI_ARCHIVE_TEMP_DIR)/ortools/ortools/sat \ - $(PYPI_ARCHIVE_TEMP_DIR)/ortools/ortools/data \ + $(PYPI_ARCHIVE_TEMP_DIR)/ortools/ortools/scheduling \ $(PYPI_ARCHIVE_TEMP_DIR)/ortools/ortools/util \ $(PYPI_ARCHIVE_LIBS) @@ -991,12 +991,12 @@ $(PYPI_ARCHIVE_TEMP_DIR)/ortools/ortools/sat: $(PYSAT_LIBS) | $(PYPI_ARCHIVE_TEM -$(MKDIR) $(PYPI_ARCHIVE_TEMP_DIR)$Sortools$Sortools$Ssat$Spython $(COPY) ortools$Ssat$Spython$S*.py $(PYPI_ARCHIVE_TEMP_DIR)$Sortools$Sortools$Ssat$Spython -$(PYPI_ARCHIVE_TEMP_DIR)/ortools/ortools/data: $(PYDATA_LIBS) | $(PYPI_ARCHIVE_TEMP_DIR)/ortools/ortools - -$(DELREC) $(PYPI_ARCHIVE_TEMP_DIR)$Sortools$Sortools$Sdata - -$(MKDIR) $(PYPI_ARCHIVE_TEMP_DIR)$Sortools$Sortools$Sdata - $(TOUCH) $(PYPI_ARCHIVE_TEMP_DIR)$Sortools$Sortools$Sdata$S__init__.py - $(COPY) $(GEN_PATH)$Sortools$Sdata$S*.py* $(PYPI_ARCHIVE_TEMP_DIR)$Sortools$Sortools$Sdata - $(COPY) $(GEN_PATH)$Sortools$Sdata$S_pywraprcpsp.* $(PYPI_ARCHIVE_TEMP_DIR)$Sortools$Sortools$Sdata +$(PYPI_ARCHIVE_TEMP_DIR)/ortools/ortools/scheduling: $(PYSCHEDULING_LIBS) | $(PYPI_ARCHIVE_TEMP_DIR)/ortools/ortools + -$(DELREC) $(PYPI_ARCHIVE_TEMP_DIR)$Sortools$Sortools$Sscheduling + -$(MKDIR) $(PYPI_ARCHIVE_TEMP_DIR)$Sortools$Sortools$Sscheduling + $(TOUCH) $(PYPI_ARCHIVE_TEMP_DIR)$Sortools$Sortools$Sscheduling$S__init__.py + $(COPY) $(GEN_PATH)$Sortools$Sscheduling$S*.py* $(PYPI_ARCHIVE_TEMP_DIR)$Sortools$Sortools$Sscheduling + $(COPY) $(GEN_PATH)$Sortools$Sscheduling$S_pywraprcpsp.* $(PYPI_ARCHIVE_TEMP_DIR)$Sortools$Sortools$Sscheduling $(PYPI_ARCHIVE_TEMP_DIR)/ortools/ortools/util: $(PYSORTED_INTERVAL_LIST_LIBS) | $(PYPI_ARCHIVE_TEMP_DIR)/ortools/ortools -$(DELREC) $(PYPI_ARCHIVE_TEMP_DIR)$Sortools$Sortools$Sutil @@ -1190,13 +1190,13 @@ clean_python: -$(DELREC) ortools$Ssat$Spython$S__pycache__ -$(DEL) $(GEN_PATH)$Sortools$Ssat$S*_python_wrap.* -$(DEL) $(GEN_PATH)$Sortools$Ssat$S_pywrap* - -$(DEL) $(GEN_PATH)$Sortools$Sdata$S*.py - -$(DEL) $(GEN_PATH)$Sortools$Sdata$S*.pyc - -$(DELREC) $(GEN_PATH)$Sortools$Sdata$S__pycache__ - -$(DEL) ortools$Sdata$S*.pyc - -$(DELREC) ortools$Sdata$S__pycache__ - -$(DEL) $(GEN_PATH)$Sortools$Sdata$S*_python_wrap.* - -$(DEL) $(GEN_PATH)$Sortools$Sdata$S_pywrap* + -$(DEL) $(GEN_PATH)$Sortools$Sscheduling$S*.py + -$(DEL) $(GEN_PATH)$Sortools$Sscheduling$S*.pyc + -$(DELREC) $(GEN_PATH)$Sortools$Sscheduling$S__pycache__ + -$(DEL) ortools$Sscheduling$S*.pyc + -$(DELREC) ortools$Sscheduling$S__pycache__ + -$(DEL) $(GEN_PATH)$Sortools$Sscheduling$S*_python_wrap.* + -$(DEL) $(GEN_PATH)$Sortools$Sscheduling$S_pywrap* -$(DEL) $(GEN_PATH)$Sortools$Sutil$S*.py -$(DEL) $(GEN_PATH)$Sortools$Sutil$S*.pyc -$(DELREC) $(GEN_PATH)$Sortools$Sutil$S__pycache__ diff --git a/ortools/data/set_covering_data.h b/ortools/data/set_covering_data.h deleted file mode 100644 index 4756f9e8fb..0000000000 --- a/ortools/data/set_covering_data.h +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2010-2021 Google LLC -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef OR_TOOLS_DATA_SET_COVERING_DATA_H_ -#define OR_TOOLS_DATA_SET_COVERING_DATA_H_ - -#include - -#include "ortools/base/integral_types.h" - -namespace operations_research { -namespace scp { - -class ScpData { - public: - ScpData() : is_set_partitioning_(false) {} - // Getters. - int num_rows() const { return columns_per_row_.size(); } - int num_columns() const { return rows_per_column_.size(); } - // columns_per_row[i][j] returns the index of the jth column covering row i. - const std::vector>& columns_per_row() const { - return columns_per_row_; - } - // rows_per_column[i][j] returns the index of the jth row covering column i. - const std::vector>& rows_per_column() const { - return rows_per_column_; - } - const std::vector& column_costs() const { return column_costs_; } - - bool is_set_partitioning() const { return is_set_partitioning_; } - void set_is_set_partitioning(bool v) { is_set_partitioning_ = v; } - - // Builders. - // Calling SetProblemSize() will clear all previous data. - void SetProblemSize(int num_rows, int num_columns); - void SetColumnCost(int column_id, int cost); - void AddRowInColumn(int row, int column); - - private: - std::vector> columns_per_row_; - std::vector> rows_per_column_; - std::vector column_costs_; - bool is_set_partitioning_; -}; - -} // namespace scp -} // namespace operations_research - -#endif // OR_TOOLS_DATA_SET_COVERING_DATA_H_ diff --git a/ortools/data/set_covering_parser.cc b/ortools/data/set_covering_parser.cc deleted file mode 100644 index 047d6ca0c5..0000000000 --- a/ortools/data/set_covering_parser.cc +++ /dev/null @@ -1,207 +0,0 @@ -// Copyright 2010-2021 Google LLC -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "ortools/data/set_covering_parser.h" - -#include - -#include "absl/strings/numbers.h" -#include "absl/strings/str_split.h" -#include "ortools/base/filelineiter.h" - -namespace operations_research { -namespace scp { - -ScpParser::ScpParser() : section_(INIT), line_(0), remaining_(0), current_(0) {} - -bool ScpParser::LoadProblem(const std::string& filename, Format format, - ScpData* data) { - section_ = INIT; - line_ = 0; - remaining_ = 0; - current_ = 0; - - for (const std::string& line : FileLines(filename)) { - ProcessLine(line, format, data); - if (section_ == ERROR) return false; - } - return section_ == END; -} - -void ScpParser::ProcessLine(const std::string& line, Format format, - ScpData* data) { - line_++; - const std::vector words = - absl::StrSplit(line, absl::ByAnyChar(" :\t\r"), absl::SkipEmpty()); - switch (section_) { - case INIT: { - if (words.size() != 2) { - LogError(line, "Problem reading the size of the problem"); - return; - } - const int num_rows = strtoint32(words[0]); - const int num_columns = strtoint32(words[1]); - data->SetProblemSize(num_rows, num_columns); - current_ = 0; - switch (format) { - case SCP_FORMAT: { - section_ = COSTS; - break; - } - case RAILROAD_FORMAT: { - section_ = COLUMN; - break; - } - case TRIPLET_FORMAT: { - section_ = COLUMN; - break; - } - case SPP_FORMAT: { - section_ = COLUMN; - data->set_is_set_partitioning(true); - break; - } - } - break; - } - case COSTS: { - const int num_items = words.size(); - if (current_ + num_items > data->num_columns()) { - LogError(line, "Too many cost items"); - return; - } - for (int i = 0; i < num_items; ++i) { - data->SetColumnCost(current_++, strtoint32(words[i])); - } - if (current_ == data->num_columns()) { - section_ = NUM_COLUMNS_IN_ROW; - current_ = 0; - } - break; - } - case COLUMN: { - switch (format) { - case SCP_FORMAT: { - LogError(line, "Wrong state in the loader"); - return; - } - case RAILROAD_FORMAT: - ABSL_FALLTHROUGH_INTENDED; - case SPP_FORMAT: { - if (words.size() < 2) { - LogError(line, "Column declaration too short"); - return; - } - const int cost = strtoint32(words[0]); - data->SetColumnCost(current_, cost); - const int num_items = strtoint32(words[1]); - if (words.size() != 2 + num_items) { - LogError(line, "Mistatch in column declaration"); - return; - } - for (int i = 0; i < num_items; ++i) { - const int row = strtoint32(words[i + 2]) - 1; // 1 based. - data->AddRowInColumn(row, current_); - } - current_++; - if (current_ == data->num_columns()) { - section_ = format == RAILROAD_FORMAT ? END : NUM_NON_ZEROS; - } - break; - } - case TRIPLET_FORMAT: { - if (words.size() != 3) { - LogError(line, "Column declaration does not contain 3 rows"); - break; - } - data->SetColumnCost(current_, 1); - for (int i = 0; i < 3; ++i) { - const int row = strtoint32(words[i]) - 1; // 1 based. - data->AddRowInColumn(row, current_); - } - current_++; - if (current_ == data->num_columns()) { - section_ = END; - } - break; - } - } - break; - } - case NUM_COLUMNS_IN_ROW: { - if (words.size() != 1) { - LogError(line, "The header of a column should be one number"); - return; - } - remaining_ = strtoint32(words[0]); - section_ = ROW; - break; - } - case ROW: { - const int num_items = words.size(); - if (num_items > remaining_) { - LogError(line, "Too many columns in a row declaration"); - return; - } - for (const std::string& w : words) { - remaining_--; - const int column = strtoint32(w) - 1; // 1 based. - data->AddRowInColumn(current_, column); - } - if (remaining_ == 0) { - current_++; - if (current_ == data->num_rows()) { - section_ = END; - } else { - section_ = NUM_COLUMNS_IN_ROW; - } - } - break; - } - case NUM_NON_ZEROS: { - if (words.size() != 1) { - LogError(line, "The header of a column should be one number"); - return; - } - section_ = END; - break; - } - case END: { - break; - } - case ERROR: { - break; - } - } -} - -void ScpParser::LogError(const std::string& line, const std::string& message) { - LOG(ERROR) << "Error on line " << line_ << ": " << message << "(" << line - << ")"; - section_ = ERROR; -} - -int ScpParser::strtoint32(const std::string& word) { - int result; - CHECK(absl::SimpleAtoi(word, &result)); - return result; -} - -int64_t ScpParser::strtoint64(const std::string& word) { - int64_t result; - CHECK(absl::SimpleAtoi(word, &result)); - return result; -} - -} // namespace scp -} // namespace operations_research diff --git a/ortools/data/set_covering_parser.h b/ortools/data/set_covering_parser.h deleted file mode 100644 index 7501b4d7d7..0000000000 --- a/ortools/data/set_covering_parser.h +++ /dev/null @@ -1,113 +0,0 @@ -// Copyright 2010-2021 Google LLC -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef OR_TOOLS_DATA_SET_COVERING_PARSER_H_ -#define OR_TOOLS_DATA_SET_COVERING_PARSER_H_ - -#include -#include -#include - -#include "ortools/base/integral_types.h" -#include "ortools/data/set_covering_data.h" - -namespace operations_research { -namespace scp { - -// Set covering problem. -// -// We have a list of subsets of a set. Each subset has a cost. The -// goal is to select of solution set of subsets such that (1) all elements -// of the set belongs to at least one subset of the solution set, and (2) -// the sum of the cost of each subset in the solution set is minimal. -// -// To follow the standard literature, each element is called a row, and each -// subset is called a column. - -class ScpParser { - public: - enum Section { - INIT, - COSTS, - COLUMN, - NUM_COLUMNS_IN_ROW, - ROW, - NUM_NON_ZEROS, - END, - ERROR, - }; - - enum Format { - // The original scp format of these problem is: - // - // number of rows (m), number of columns (n) - // - // the cost of each column c(j),j=1,...,n - // - // for each row i (i=1,...,m): the number of columns which cover row - // i followed by a list of the columns which cover row i. - // - // The original problems (scp*) from the OR-LIB follow this format. - SCP_FORMAT, - // The railroad format is: - // number of rows (m), number of columns (n) - // - // for each column j (j=1,...,n): the cost of the column, the number - // of rows that it covers followed by a list of the rows that it - // covers. - // - // The railroad problems follow this format. - RAILROAD_FORMAT, - // The triplet format is: - // - // number of rows (m), number of columns (n) - // - // for each column, the 3 rows it contains. Note that the cost of - // each column is 1. - // - // The Steiner triple covering problems follow this format. - TRIPLET_FORMAT, - // The spp format is: - // number of rows (m), number of columns (n) - // - // for each column j (j=1,...,n): the cost of the column, the number - // of rows that it covers followed by a list of the rows that it - // covers. - // - // number of non_zeros - // - // The set partitioning problems follow this format. - SPP_FORMAT - }; - - ScpParser(); - - // This will clear the data before importing the file. - bool LoadProblem(const std::string& filename, Format format, ScpData* data); - - private: - void ProcessLine(const std::string& line, Format format, ScpData* data); - void LogError(const std::string& line, const std::string& error_message); - int strtoint32(const std::string& word); - int64_t strtoint64(const std::string& word); - - Section section_; - int line_; - int remaining_; - int current_; -}; - -} // namespace scp -} // namespace operations_research - -#endif // OR_TOOLS_DATA_SET_COVERING_PARSER_H_ diff --git a/ortools/packing/BUILD b/ortools/packing/BUILD new file mode 100644 index 0000000000..9e40fb978c --- /dev/null +++ b/ortools/packing/BUILD @@ -0,0 +1,3 @@ +load("@rules_cc//cc:defs.bzl", "cc_proto_library") + +package(default_visibility = ["//visibility:public"]) diff --git a/ortools/packing/CMakeLists.txt b/ortools/packing/CMakeLists.txt new file mode 100644 index 0000000000..3636dd06a1 --- /dev/null +++ b/ortools/packing/CMakeLists.txt @@ -0,0 +1,21 @@ +file(GLOB _SRCS "*.h" "*.cc") +set(NAME ${PROJECT_NAME}_packing) + +# Will be merge in libortools.so +#add_library(${NAME} STATIC ${_SRCS}) +add_library(${NAME} OBJECT ${_SRCS}) +set_target_properties(${NAME} PROPERTIES + CXX_STANDARD 17 + CXX_STANDARD_REQUIRED ON + CXX_EXTENSIONS OFF + POSITION_INDEPENDENT_CODE ON + ) +target_include_directories(${NAME} PRIVATE + ${PROJECT_SOURCE_DIR} + ${PROJECT_BINARY_DIR}) +target_link_libraries(${NAME} PRIVATE + absl::flags + absl::strings + protobuf::libprotobuf + ${PROJECT_NAME}::proto) +#add_library(${PROJECT_NAME}::packing ALIAS ${NAME}) diff --git a/ortools/data/__init__.py b/ortools/packing/__init__.py similarity index 97% rename from ortools/data/__init__.py rename to ortools/packing/__init__.py index 695694b10c..cb63857e22 100644 --- a/ortools/data/__init__.py +++ b/ortools/packing/__init__.py @@ -11,5 +11,5 @@ # See the License for the specific language governing permissions and # limitations under the License. import os as _os -__path__.append(_os.path.join(__path__[0], '..', 'gen', 'ortools', 'data')) +__path__.append(_os.path.join(__path__[0], '..', 'gen', 'ortools', 'packing')) __path__.append(_os.path.join(__path__[0], '..', '..', 'lib')) diff --git a/ortools/packing/arc_flow_builder.cc b/ortools/packing/arc_flow_builder.cc new file mode 100644 index 0000000000..3c79050ef9 --- /dev/null +++ b/ortools/packing/arc_flow_builder.cc @@ -0,0 +1,408 @@ +// Copyright 2010-2021 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "ortools/packing/arc_flow_builder.h" + +#include +#include + +#include "absl/container/flat_hash_map.h" +#include "absl/strings/str_cat.h" +#include "absl/strings/str_join.h" +#include "ortools/base/commandlineflags.h" +#include "ortools/base/map_util.h" +#include "ortools/base/stl_util.h" +#include "ortools/graph/topologicalsorter.h" + +namespace operations_research { +namespace packing { +namespace { + +class ArcFlowBuilder { + public: + // Same arguments as BuildArcFlowGraph(): see the .h. + ArcFlowBuilder(const std::vector& bin_dimensions, + const std::vector>& item_dimensions_by_type, + const std::vector& demand_by_type); + + // Builds the arc-flow graph. + ArcFlowGraph BuildVectorBinPackingGraph(); + + // For debugging purposes.tring( + // Returns the number of states explored in the dynamic programming phase. + int64_t NumDpStates() const; + + private: + // All items data, regrouped for sorting purposes. + struct Item { + std::vector dimensions; + int demand; + int original_index; + + // Used to sort items by relative size. + double NormalizedSize(const std::vector& bin_dimensions) const; + }; + + // State of the dynamic programming algorithm. + struct DpState { + int cur_item_index; + int cur_item_quantity; + std::vector used_dimensions; + // DP State indices of the states that can be obtained by moving + // either "right" to (cur_item_index, cur_item_quantity++) or "up" + // to (cur_item_index++, cur_item_quantity=0). -1 if impossible. + int right_child; + int up_child; + }; + + // Add item iteratively to create all possible nodes in a forward pass. + void ForwardCreationPass(DpState* dp_state); + // Scan DP-nodes backward to relabels each nodes by increasing them as much + // as possible. + void BackwardCompressionPass(int state_index); + // Relabel nodes by decreasing them as much as possible. + void ForwardCompressionPass(const std::vector& source_node); + + // Can we fit one more item in the bin? + bool CanFitNewItem(const std::vector& used_dimensions, int item) const; + // Create a new used_dimensions that is used_dimensions + item dimensions. + std::vector AddItem(const std::vector& used_dimensions, + int item) const; + + // DpState helpers. + int LookupOrCreateDpState(int item, int quantity, + const std::vector& used_dimensions); + + const std::vector bin_dimensions_; + std::vector items_; + + typedef absl::flat_hash_map, int> VectorIntIntMap; + int GetOrCreateNode(const std::vector& used_dimensions); + + // We store all DP states in a dense vector, and remember their index + // in the dp_state_index_ map (we use a tri-dimensional indexing because + // it's faster for the hash part). + std::vector dp_states_; + std::vector> dp_state_index_; + + // The ArcFlowGraph will have nodes which will correspond to "some" + // of the vector representing the partial bin usages encountered during + // the algo. These two data structures map one to the other (note that nodes + // are dense integers). + absl::flat_hash_map, int> node_indices_; + std::vector> nodes_; + + std::set arcs_; +}; + +double ArcFlowBuilder::Item::NormalizedSize( + const std::vector& bin_dimensions) const { + double size = 0.0; + for (int i = 0; i < bin_dimensions.size(); ++i) { + size += static_cast(dimensions[i]) / bin_dimensions[i]; + } + return size; +} + +int64_t ArcFlowBuilder::NumDpStates() const { + int64_t res = 1; // We do not store the initial state. + for (const auto& it1 : dp_state_index_) { + for (const auto& it2 : it1) { + res += it2.size(); + } + } + return res; +} + +ArcFlowBuilder::ArcFlowBuilder( + const std::vector& bin_dimensions, + const std::vector>& item_dimensions_by_type, + const std::vector& demand_by_type) + : bin_dimensions_(bin_dimensions) { + // Checks dimensions. + for (int i = 0; i < bin_dimensions.size(); ++i) { + CHECK_GT(bin_dimensions[i], 0); + } + + const int num_items = item_dimensions_by_type.size(); + items_.resize(num_items); + for (int i = 0; i < num_items; ++i) { + items_[i].dimensions = item_dimensions_by_type[i]; + items_[i].demand = demand_by_type[i]; + items_[i].original_index = i; + } + std::sort(items_.begin(), items_.end(), [&](const Item& a, const Item& b) { + return a.NormalizedSize(bin_dimensions_) > + b.NormalizedSize(bin_dimensions_); + }); +} + +bool ArcFlowBuilder::CanFitNewItem(const std::vector& used_dimensions, + int item) const { + for (int d = 0; d < bin_dimensions_.size(); ++d) { + if (used_dimensions[d] + items_[item].dimensions[d] > bin_dimensions_[d]) { + return false; + } + } + return true; +} + +std::vector ArcFlowBuilder::AddItem( + const std::vector& used_dimensions, int item) const { + DCHECK(CanFitNewItem(used_dimensions, item)); + std::vector result = used_dimensions; + for (int d = 0; d < bin_dimensions_.size(); ++d) { + result[d] += items_[item].dimensions[d]; + } + return result; +} + +int ArcFlowBuilder::GetOrCreateNode(const std::vector& used_dimensions) { + const auto& it = node_indices_.find(used_dimensions); + if (it != node_indices_.end()) { + return it->second; + } + const int index = node_indices_.size(); + node_indices_[used_dimensions] = index; + nodes_.push_back(used_dimensions); + return index; +} + +ArcFlowGraph ArcFlowBuilder::BuildVectorBinPackingGraph() { + // Initialize the DP states map. + dp_state_index_.resize(items_.size()); + for (int i = 0; i < items_.size(); ++i) { + dp_state_index_[i].resize(items_[i].demand + 1); + } + + // Explore all possible DP states (starting from the initial 'empty' state), + // and remember their ancestry. + std::vector zero(bin_dimensions_.size(), 0); + dp_states_.push_back(new DpState({0, 0, zero, -1, -1})); + for (int i = 0; i < dp_states_.size(); ++i) { + ForwardCreationPass(dp_states_[i]); + } + + // We can clear the dp_state_index map as it will not be used anymore. + // From now on, we will use the dp_states.used_dimensions to store the new + // labels in the backward pass. + const int64_t num_dp_states = NumDpStates(); + dp_state_index_.clear(); + + // Backwards pass: "push" the bin dimensions as far as possible. + const int num_states = dp_states_.size(); + std::vector> flat_deps; + for (int i = 0; i < dp_states_.size(); ++i) { + if (dp_states_[i]->up_child != -1) { + flat_deps.push_back(std::make_pair(dp_states_[i]->up_child, i)); + } + if (dp_states_[i]->right_child != -1) { + flat_deps.push_back(std::make_pair(dp_states_[i]->right_child, i)); + } + } + const std::vector sorted_work = + util::graph::DenseIntStableTopologicalSortOrDie(num_states, flat_deps); + for (const int w : sorted_work) { + BackwardCompressionPass(w); + } + + // ForwardCreationPass again, push the bin dimensions as low as possible. + const std::vector source_node = dp_states_[0]->used_dimensions; + // We can now delete the states stored in dp_states_. + gtl::STLDeleteElements(&dp_states_); + ForwardCompressionPass(source_node); + + // We need to connect all nodes that corresponds to at least one item selected + // to the sink node. + const int sink_node_index = nodes_.size() - 1; + for (int node = 1; node < sink_node_index; ++node) { + arcs_.insert({node, sink_node_index, -1}); + } + + ArcFlowGraph result; + result.arcs.assign(arcs_.begin(), arcs_.end()); + result.nodes.assign(nodes_.begin(), nodes_.end()); + result.num_dp_states = num_dp_states; + return result; +} + +int ArcFlowBuilder::LookupOrCreateDpState( + int item, int quantity, const std::vector& used_dimensions) { + VectorIntIntMap& map = dp_state_index_[item][quantity]; + const int index = + map.insert({used_dimensions, dp_states_.size()}).first->second; + if (index == dp_states_.size()) { + dp_states_.push_back( + new DpState({item, quantity, used_dimensions, -1, -1})); + } + return index; +} + +void ArcFlowBuilder::ForwardCreationPass(DpState* dp_state) { + const int item = dp_state->cur_item_index; + const int quantity = dp_state->cur_item_quantity; + const std::vector& used_dimensions = dp_state->used_dimensions; + + // Explore path up. + if (item < items_.size() - 1) { + dp_state->up_child = LookupOrCreateDpState(item + 1, 0, used_dimensions); + } else { + dp_state->up_child = -1; + } + + // Explore path right. + if (quantity < items_[item].demand && CanFitNewItem(used_dimensions, item)) { + const std::vector added = AddItem(used_dimensions, item); + dp_state->right_child = LookupOrCreateDpState(item, quantity + 1, added); + } else { + dp_state->right_child = -1; + } +} + +void ArcFlowBuilder::BackwardCompressionPass(int state_index) { + // The goal of this function is to fill this. + std::vector& result = dp_states_[state_index]->used_dimensions; + + // Inherit our result from the result one step up. + const int up_index = dp_states_[state_index]->up_child; + const std::vector& result_up = + up_index == -1 ? bin_dimensions_ : dp_states_[up_index]->used_dimensions; + result = result_up; + + // Adjust our result from the result one step right. + const int right_index = dp_states_[state_index]->right_child; + if (right_index == -1) return; // We're done. + const std::vector& result_right = + dp_states_[right_index]->used_dimensions; + const Item& item = items_[dp_states_[state_index]->cur_item_index]; + for (int d = 0; d < bin_dimensions_.size(); ++d) { + result[d] = std::min(result[d], result_right[d] - item.dimensions[d]); + } + + // Insert the arc from the node to the "right" node. + const int node = GetOrCreateNode(result); + const int right_node = GetOrCreateNode(result_right); + DCHECK_NE(node, right_node); + arcs_.insert({node, right_node, item.original_index}); + // Also insert the 'dotted' arc from the node to the "up" node (if different). + if (result != result_up) { + const int up_node = GetOrCreateNode(result_up); + arcs_.insert({node, up_node, -1}); + } +} + +// Reverse version of the backward pass. +// Revisit states forward, and relabel nodes with the longest path in each +// dimensions from the source. The only meaningfull difference is that we use +// arcs and nodes, instead of dp_states. +void ArcFlowBuilder::ForwardCompressionPass( + const std::vector& source_node) { + const int num_nodes = node_indices_.size(); + const int num_dims = bin_dimensions_.size(); + std::set new_arcs; + std::vector> new_nodes; + VectorIntIntMap new_node_indices; + std::vector node_remap(num_nodes, -1); + // We need to revert the sorting of items as arcs store the original index. + std::vector reverse_item_index_map(items_.size(), -1); + for (int i = 0; i < items_.size(); ++i) { + reverse_item_index_map[items_[i].original_index] = i; + } + + std::vector> forward_deps; + std::vector> incoming_arcs(num_nodes); + for (const ArcFlowGraph::Arc& arc : arcs_) { + forward_deps.push_back(std::make_pair(arc.source, arc.destination)); + incoming_arcs[arc.destination].push_back(arc); + } + + const std::vector sorted_work = + util::graph::DenseIntStableTopologicalSortOrDie(num_nodes, forward_deps); + + const int old_source_node = GetOrCreateNode(source_node); + const int old_sink_node = GetOrCreateNode(bin_dimensions_); + CHECK_EQ(sorted_work.front(), old_source_node); + CHECK_EQ(sorted_work.back(), old_sink_node); + + // Process nodes in order and remap state to max(previous_state + item + // dimensions). + for (const int w : sorted_work) { + std::vector new_used(num_dims, 0); + if (w == sorted_work.back()) { // Do not compress the sink node. + new_used = bin_dimensions_; + } else { + for (const ArcFlowGraph::Arc& arc : incoming_arcs[w]) { + const int item = + arc.item_index == -1 ? -1 : reverse_item_index_map[arc.item_index]; + const int prev_node = node_remap[arc.source]; + const std::vector& prev = new_nodes[prev_node]; + DCHECK_NE(prev_node, -1); + for (int d = 0; d < num_dims; ++d) { + if (item != -1) { + new_used[d] = + std::max(new_used[d], prev[d] + items_[item].dimensions[d]); + } else { + new_used[d] = std::max(new_used[d], prev[d]); + } + } + } + } + const auto& it = new_node_indices.find(new_used); + if (it != new_node_indices.end()) { + node_remap[w] = it->second; + } else { + const int new_index = new_nodes.size(); + new_nodes.push_back(new_used); + new_node_indices[new_used] = new_index; + node_remap[w] = new_index; + } + } + // Remap arcs. + for (const ArcFlowGraph::Arc& arc : arcs_) { + CHECK_NE(node_remap[arc.source], -1); + CHECK_NE(node_remap[arc.destination], -1); + // Remove loss arcs between merged nodes. + if (arc.item_index == -1 && + node_remap[arc.source] == node_remap[arc.destination]) + continue; + new_arcs.insert( + {node_remap[arc.source], node_remap[arc.destination], arc.item_index}); + } + VLOG(1) << "Reduced nodes from " << num_nodes << " to " << new_nodes.size(); + VLOG(1) << "Reduced arcs from " << arcs_.size() << " to " << new_arcs.size(); + nodes_ = new_nodes; + arcs_ = new_arcs; + CHECK_NE(node_remap[old_source_node], -1); + CHECK_EQ(0, node_remap[old_source_node]); + CHECK_NE(node_remap[old_sink_node], -1); + CHECK_EQ(nodes_.size() - 1, node_remap[old_sink_node]); +} + +} // namespace + +bool ArcFlowGraph::Arc::operator<(const ArcFlowGraph::Arc& other) const { + if (source != other.source) return source < other.source; + if (destination != other.destination) return destination < other.destination; + return item_index < other.item_index; +} + +ArcFlowGraph BuildArcFlowGraph( + const std::vector& bin_dimensions, + const std::vector>& item_dimensions_by_type, + const std::vector& demand_by_type) { + ArcFlowBuilder afb(bin_dimensions, item_dimensions_by_type, demand_by_type); + return afb.BuildVectorBinPackingGraph(); +} + +} // namespace packing +} // namespace operations_research diff --git a/ortools/packing/arc_flow_builder.h b/ortools/packing/arc_flow_builder.h new file mode 100644 index 0000000000..1cb0bc1e31 --- /dev/null +++ b/ortools/packing/arc_flow_builder.h @@ -0,0 +1,92 @@ +// Copyright 2010-2021 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// This code builds an arc-flow generator for vector-bin-packing problems. +// see https://people.math.gatech.edu/~tetali/PUBLIS/CKPT.pdf +// It implements a non-recursive version of algorithm 1 described in: +// http://www.dcc.fc.up.pt/~fdabrandao/papers/arcflow_manuscript.pdf +// And in (poster version): +// http://www.dcc.fc.up.pt/~fdabrandao/papers/arcflow_poster.pdf +// Available at: +// https://drive.google.com/open?id=1y-Vs1orv-QHO4lb2sjVWrZr9GQd5d2st +// https://drive.google.com/open?id=1fsWRqgNJ_3ClrhoKIeVc1EOd5s8Mj33i (poster) +// Some improvements are not yet implemented: +// - Lifted stated: when storing a state of the dynamic programming forward +// pass, one can lift a state. A lifted state of a state S is a maximal +// increase of S that does not lose any state in the forward pass. +// A simple example is the following: +// bin, 1 dimension, capacity 5 +// 2 item of size 2. +// After adding item 1 in the DP pass, the state is (2). +// The lifted state is (3) that is (5) - (2) which is the maximal increase +// of (2) that does not loose any state. +// To limit time spent computing this, one can lift a state only if the +// remaining number of item is below a threshold. +// - Disable the backward pass (compress state towards the bin capacity). +// Although this reduces the graph a lot, this simplication is not valid +// when the cost is not the number of bins, but a function of the capacity +// used (useful for fair allocation). + +#ifndef OR_TOOLS_PACKING_ARC_FLOW_BUILDER_H_ +#define OR_TOOLS_PACKING_ARC_FLOW_BUILDER_H_ + +#include +#include +#include + +#include "absl/container/flat_hash_map.h" +#include "ortools/base/integral_types.h" + +namespace operations_research { +namespace packing { + +// Arc flow gragh built from a vector bin packing problem. +// The first node will always be the source. The last will always be the sink +// of the arc-flow graph. +struct ArcFlowGraph { + struct Arc { + int source; + int destination; + int item_index; + + // Needed for std::set. + bool operator<(const Arc& other) const; + }; + + std::vector arcs; + // All the nodes explored during the DP phase. + // In the forward pass, these are the consumed capacity of the bin at this + // state. In the backward pass, this is pushed up towards the max capacity + // of the bin. In the final compression phase, this is pushed down towards + // the initial zero state. + std::vector> nodes; + // Debug info. + int64_t num_dp_states; +}; + +// Main method. + +// Arc flow builder. The input must enforce the following constraints: +// - item_dimensions_by_type.size() == demand_by_type.size() == num types +// - for each type t: +// item_dimensions_by_type[t].size() == bin_dimensions.size() == +// num_dimensions +ArcFlowGraph BuildArcFlowGraph( + const std::vector& bin_dimensions, + const std::vector>& item_dimensions_by_type, + const std::vector& demand_by_type); + +} // namespace packing +} // namespace operations_research + +#endif // OR_TOOLS_PACKING_ARC_FLOW_BUILDER_H_ diff --git a/ortools/packing/arc_flow_solver.cc b/ortools/packing/arc_flow_solver.cc new file mode 100644 index 0000000000..248e751a76 --- /dev/null +++ b/ortools/packing/arc_flow_solver.cc @@ -0,0 +1,192 @@ +// Copyright 2010-2021 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "ortools/packing/arc_flow_solver.h" + +#include "absl/flags/flag.h" +#include "ortools/base/commandlineflags.h" +#include "ortools/base/file.h" +#include "ortools/base/timer.h" +#include "ortools/packing/arc_flow_builder.h" + +ABSL_FLAG(std::string, arc_flow_dump_model, "", + "File to store the solver specific optimization proto."); + +namespace operations_research { +namespace packing { + +namespace { +double ConvertVectorBinPackingProblem(const vbp::VectorBinPackingProblem& input, + ArcFlowGraph* graph) { + WallTimer timer; + timer.Start(); + const int num_items = input.item_size(); + const int num_dims = input.resource_capacity_size(); + + // Collect problem data. + std::vector> shapes(num_items); + std::vector demands(num_items); + std::vector capacities(num_dims); + for (int i = 0; i < num_items; ++i) { + shapes[i].assign(input.item(i).resource_usage().begin(), + input.item(i).resource_usage().end()); + demands[i] = input.item(i).num_copies(); + } + for (int i = 0; i < num_dims; ++i) { + capacities[i] = input.resource_capacity(i); + } + + // Add extra dimensions to encode max_number_of_copies_per_bin. + for (int i = 0; i < num_items; ++i) { + const int max_copies = input.item(i).max_number_of_copies_per_bin(); + if (max_copies == 0 || max_copies >= demands[i]) continue; + capacities.push_back(max_copies); + for (int j = 0; j < num_items; ++j) { + shapes[j].push_back(i == j); + } + } + + *graph = BuildArcFlowGraph(capacities, shapes, demands); + const double arc_flow_time = timer.Get(); + + VLOG(1) << "The arc-flow grah has " << graph->nodes.size() << " nodes, and " + << graph->arcs.size() << " arcs. It was created by exploring " + << graph->num_dp_states + << " states in the dynamic programming phase in " << arc_flow_time + << " s"; + return arc_flow_time; +} +} // namespace + +vbp::VectorBinPackingSolution SolveVectorBinPackingWithArcFlow( + const vbp::VectorBinPackingProblem& problem, + MPSolver::OptimizationProblemType solver_type, + const std::string& mip_params, double time_limit, int num_threads, + bool log_statistics) { + ArcFlowGraph graph; + const double arc_flow_time = ConvertVectorBinPackingProblem(problem, &graph); + + int max_num_bins = 0; + for (const auto& item : problem.item()) { + max_num_bins += item.num_copies(); + } + const int num_types = problem.item_size(); + std::vector> incoming_vars(graph.nodes.size()); + std::vector> outgoing_vars(graph.nodes.size()); + std::vector arc_to_var(graph.arcs.size()); + std::vector> item_to_vars(num_types); + + MPSolver solver("VectorBinPacking", solver_type); + CHECK_OK(solver.SetNumThreads(num_threads)); + + for (int v = 0; v < graph.arcs.size(); ++v) { + const ArcFlowGraph::Arc& arc = graph.arcs[v]; + MPVariable* const var = + solver.MakeIntVar(0, max_num_bins, absl::StrCat("a", v)); + incoming_vars[arc.destination].push_back(var); + outgoing_vars[arc.source].push_back(var); + if (arc.item_index != -1) { + item_to_vars[arc.item_index].push_back(var); + } + } + + // Per item demand constraint. + for (int i = 0; i < num_types; ++i) { + MPConstraint* const ct = solver.MakeRowConstraint( + problem.item(i).num_copies(), problem.item(i).num_copies()); + for (MPVariable* const var : item_to_vars[i]) { + ct->SetCoefficient(var, 1.0); + } + } + + // Flow conservation. + for (int n = 1; n < graph.nodes.size() - 1; ++n) { // Ignore source and sink. + MPConstraint* const ct = solver.MakeRowConstraint(0.0, 0.0); + for (MPVariable* const var : incoming_vars[n]) { + ct->SetCoefficient(var, 1.0); + } + for (MPVariable* const var : outgoing_vars[n]) { + ct->SetCoefficient(var, -1.0); + } + } + + MPVariable* const obj_var = solver.MakeIntVar(0, max_num_bins, "obj_var"); + { // Source. + MPConstraint* const ct = solver.MakeRowConstraint(0.0, 0.0); + for (MPVariable* const var : outgoing_vars[/*source*/ 0]) { + ct->SetCoefficient(var, 1.0); + } + ct->SetCoefficient(obj_var, -1.0); + } + + { // Sink. + MPConstraint* const ct = solver.MakeRowConstraint(0.0, 0.0); + const int sink_node = graph.nodes.size() - 1; + for (MPVariable* const var : incoming_vars[sink_node]) { + ct->SetCoefficient(var, 1.0); + } + ct->SetCoefficient(obj_var, -1.0); + } + + MPObjective* const objective = solver.MutableObjective(); + objective->SetCoefficient(obj_var, 1.0); + + if (!absl::GetFlag(FLAGS_arc_flow_dump_model).empty()) { + MPModelProto output_model; + solver.ExportModelToProto(&output_model); + CHECK_OK(file::SetTextProto(absl::GetFlag(FLAGS_arc_flow_dump_model), + output_model, file::Defaults())); + } + + solver.EnableOutput(); + solver.SetSolverSpecificParametersAsString(mip_params); + solver.SetTimeLimit(absl::Seconds(time_limit)); + const MPSolver::ResultStatus result_status = solver.Solve(); + + vbp::VectorBinPackingSolution solution; + solution.set_solve_time_in_seconds(solver.wall_time() / 1000.0); + solution.set_arc_flow_time_in_seconds(arc_flow_time); + // Check that the problem has an optimal solution. + if (result_status == MPSolver::OPTIMAL) { + solution.set_status(vbp::OPTIMAL); + solution.set_objective_value(objective->Value()); + } else if (result_status == MPSolver::FEASIBLE) { + solution.set_status(vbp::FEASIBLE); + solution.set_objective_value(objective->Value()); + } else if (result_status == MPSolver::INFEASIBLE) { + solution.set_status(vbp::INFEASIBLE); + } + + // TODO(user): Fill bins in the solution proto. + + if (log_statistics) { + const bool has_solution = result_status == MPSolver::OPTIMAL || + result_status == MPSolver::FEASIBLE; + absl::PrintF("%-12s: %s\n", "Status", + MPSolverResponseStatus_Name( + static_cast(result_status)) + .c_str()); + absl::PrintF("%-12s: %15.15e\n", "Objective", + has_solution ? solver.Objective().Value() : 0.0); + absl::PrintF("%-12s: %15.15e\n", "BestBound", + has_solution ? solver.Objective().BestBound() : 0.0); + absl::PrintF("%-12s: %d\n", "Iterations", solver.iterations()); + absl::PrintF("%-12s: %d\n", "Nodes", solver.nodes()); + absl::PrintF("%-12s: %-6.4g\n", "Time", solution.solve_time_in_seconds()); + } + + return solution; +} + +} // namespace packing +} // namespace operations_research diff --git a/ortools/data/set_covering_data.cc b/ortools/packing/arc_flow_solver.h similarity index 53% rename from ortools/data/set_covering_data.cc rename to ortools/packing/arc_flow_solver.h index eb41ea149a..394b81cfe4 100644 --- a/ortools/data/set_covering_data.cc +++ b/ortools/packing/arc_flow_solver.h @@ -11,27 +11,22 @@ // See the License for the specific language governing permissions and // limitations under the License. -#include "ortools/data/set_covering_data.h" +#ifndef OR_TOOLS_PACKING_ARC_FLOW_SOLVER_H_ +#define OR_TOOLS_PACKING_ARC_FLOW_SOLVER_H_ + +#include "ortools/linear_solver/linear_solver.h" +#include "ortools/packing/vector_bin_packing.pb.h" namespace operations_research { -namespace scp { +namespace packing { -void ScpData::SetProblemSize(int num_rows, int num_columns) { - columns_per_row_.clear(); - columns_per_row_.resize(num_rows); - rows_per_column_.clear(); - rows_per_column_.resize(num_columns); - column_costs_.resize(num_columns, 0); -} +vbp::VectorBinPackingSolution SolveVectorBinPackingWithArcFlow( + const vbp::VectorBinPackingProblem& problem, + MPSolver::OptimizationProblemType solver_type, + const std::string& mip_params, double time_limit, int num_threads, + bool log_statistics); -void ScpData::SetColumnCost(int column_id, int cost) { - column_costs_[column_id] = cost; -} - -void ScpData::AddRowInColumn(int row_id, int column_id) { - rows_per_column_[column_id].push_back(row_id); - columns_per_row_[row_id].push_back(column_id); -} - -} // namespace scp +} // namespace packing } // namespace operations_research + +#endif // OR_TOOLS_PACKING_ARC_FLOW_SOLVER_H_ diff --git a/ortools/data/vector_bin_packing.proto b/ortools/packing/vector_bin_packing.proto similarity index 96% rename from ortools/data/vector_bin_packing.proto rename to ortools/packing/vector_bin_packing.proto index d1f4614df5..ff0d1471a3 100644 --- a/ortools/data/vector_bin_packing.proto +++ b/ortools/packing/vector_bin_packing.proto @@ -36,11 +36,11 @@ syntax = "proto3"; -option java_package = "com.google.ortools.data.vbp"; +option java_package = "com.google.ortools.packing.vbp"; option java_multiple_files = true; -option csharp_namespace = "Google.OrTools.Data.Vbp"; +option csharp_namespace = "Google.OrTools.Packing.Vbp"; -package operations_research.data.vbp; +package operations_research.packing.vbp; message Item { // Optional name. This is only used for display/debugging purposes. diff --git a/ortools/data/vector_bin_packing_parser.cc b/ortools/packing/vector_bin_packing_parser.cc similarity index 95% rename from ortools/data/vector_bin_packing_parser.cc rename to ortools/packing/vector_bin_packing_parser.cc index 5d1da4e0b5..6df8a4ce2c 100644 --- a/ortools/data/vector_bin_packing_parser.cc +++ b/ortools/packing/vector_bin_packing_parser.cc @@ -11,17 +11,17 @@ // See the License for the specific language governing permissions and // limitations under the License. -#include "ortools/data/vector_bin_packing_parser.h" +#include "ortools/packing/vector_bin_packing_parser.h" #include #include "absl/strings/numbers.h" #include "absl/strings/str_split.h" #include "ortools/base/filelineiter.h" -#include "ortools/data/vector_bin_packing.pb.h" +#include "ortools/packing/vector_bin_packing.pb.h" namespace operations_research { -namespace data { +namespace packing { namespace vbp { bool VbpParser::ParseFile(const std::string& data_filename) { @@ -117,5 +117,5 @@ int64_t VbpParser::strtoint64(const std::string& word) { } } // namespace vbp -} // namespace data +} // namespace packing } // namespace operations_research diff --git a/ortools/data/vector_bin_packing_parser.h b/ortools/packing/vector_bin_packing_parser.h similarity index 83% rename from ortools/data/vector_bin_packing_parser.h rename to ortools/packing/vector_bin_packing_parser.h index 848e5385a8..9d6a1aad3c 100644 --- a/ortools/data/vector_bin_packing_parser.h +++ b/ortools/packing/vector_bin_packing_parser.h @@ -17,18 +17,18 @@ // - vector packing solver: (.vbp files) // http://www.dcc.fc.up.pt/~fdabrandao/Vector_Packing_Solver -#ifndef OR_TOOLS_DATA_VECTOR_BIN_PACKING_PARSER_H_ -#define OR_TOOLS_DATA_VECTOR_BIN_PACKING_PARSER_H_ +#ifndef OR_TOOLS_PACKING_VECTOR_BIN_PACKING_PARSER_H_ +#define OR_TOOLS_PACKING_VECTOR_BIN_PACKING_PARSER_H_ #include #include #include #include "ortools/base/integral_types.h" -#include "ortools/data/vector_bin_packing.pb.h" +#include "ortools/packing/vector_bin_packing.pb.h" namespace operations_research { -namespace data { +namespace packing { namespace vbp { class VbpParser { @@ -38,7 +38,7 @@ class VbpParser { bool ParseFile(const std::string& data_filename); // We keep the fully qualified name for SWIG. - ::operations_research::data::vbp::VectorBinPackingProblem problem() const { + ::operations_research::packing::vbp::VectorBinPackingProblem problem() const { return vbp_; } @@ -65,7 +65,7 @@ class VbpParser { }; } // namespace vbp -} // namespace data +} // namespace packing } // namespace operations_research -#endif // OR_TOOLS_DATA_VECTOR_BIN_PACKING_PARSER_H_ +#endif // OR_TOOLS_PACKING_VECTOR_BIN_PACKING_PARSER_H_ diff --git a/ortools/data/BUILD b/ortools/scheduling/BUILD similarity index 100% rename from ortools/data/BUILD rename to ortools/scheduling/BUILD diff --git a/ortools/data/CMakeLists.txt b/ortools/scheduling/CMakeLists.txt similarity index 83% rename from ortools/data/CMakeLists.txt rename to ortools/scheduling/CMakeLists.txt index 141a971fa1..e0c3a831df 100644 --- a/ortools/data/CMakeLists.txt +++ b/ortools/scheduling/CMakeLists.txt @@ -1,5 +1,5 @@ file(GLOB _SRCS "*.h" "*.cc") -set(NAME ${PROJECT_NAME}_data) +set(NAME ${PROJECT_NAME}_scheduling) # Will be merge in libortools.so #add_library(${NAME} STATIC ${_SRCS}) @@ -17,4 +17,4 @@ target_link_libraries(${NAME} PRIVATE absl::strings protobuf::libprotobuf ${PROJECT_NAME}::proto) -#add_library(${PROJECT_NAME}::data ALIAS ${NAME}) +#add_library(${PROJECT_NAME}::scheduling ALIAS ${NAME}) diff --git a/ortools/scheduling/__init__.py b/ortools/scheduling/__init__.py new file mode 100644 index 0000000000..1ecf401ebf --- /dev/null +++ b/ortools/scheduling/__init__.py @@ -0,0 +1,15 @@ +# Copyright 2010-2021 Google LLC +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os as _os +__path__.append(_os.path.join(__path__[0], '..', 'gen', 'ortools', 'scheduling')) +__path__.append(_os.path.join(__path__[0], '..', '..', 'lib')) diff --git a/ortools/data/jobshop_scheduling.proto b/ortools/scheduling/jobshop_scheduling.proto similarity index 97% rename from ortools/data/jobshop_scheduling.proto rename to ortools/scheduling/jobshop_scheduling.proto index a8f91ee425..faf30dd73e 100644 --- a/ortools/data/jobshop_scheduling.proto +++ b/ortools/scheduling/jobshop_scheduling.proto @@ -39,11 +39,11 @@ syntax = "proto3"; -option java_package = "com.google.ortools.data.jssp"; +option java_package = "com.google.ortools.scheduling.jssp"; option java_multiple_files = true; -option csharp_namespace = "Google.OrTools.Data.Jssp"; +option csharp_namespace = "Google.OrTools.scheduling.Jssp"; -package operations_research.data.jssp; +package operations_research.scheduling.jssp; import "google/protobuf/wrappers.proto"; diff --git a/ortools/data/jobshop_scheduling_parser.cc b/ortools/scheduling/jobshop_scheduling_parser.cc similarity index 99% rename from ortools/data/jobshop_scheduling_parser.cc rename to ortools/scheduling/jobshop_scheduling_parser.cc index 5d1c82e06b..765277b898 100644 --- a/ortools/data/jobshop_scheduling_parser.cc +++ b/ortools/scheduling/jobshop_scheduling_parser.cc @@ -11,7 +11,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -#include "ortools/data/jobshop_scheduling_parser.h" +#include "ortools/scheduling/jobshop_scheduling_parser.h" #include #include @@ -24,13 +24,13 @@ #include "ortools/base/filelineiter.h" #include "ortools/base/integral_types.h" #include "ortools/base/logging.h" -#include "ortools/data/jobshop_scheduling.pb.h" +#include "ortools/scheduling/jobshop_scheduling.pb.h" ABSL_FLAG(int64_t, jssp_scaling_up_factor, 100000L, "Scaling factor for floating point penalties."); namespace operations_research { -namespace data { +namespace scheduling { namespace jssp { void JsspParser::SetJobs(int job_count) { @@ -541,5 +541,5 @@ int64_t JsspParser::strtoint64(const std::string& word) { } } // namespace jssp -} // namespace data +} // namespace scheduling } // namespace operations_research diff --git a/ortools/data/jobshop_scheduling_parser.h b/ortools/scheduling/jobshop_scheduling_parser.h similarity index 89% rename from ortools/data/jobshop_scheduling_parser.h rename to ortools/scheduling/jobshop_scheduling_parser.h index 9d99849ef9..dc3d404103 100644 --- a/ortools/data/jobshop_scheduling_parser.h +++ b/ortools/scheduling/jobshop_scheduling_parser.h @@ -11,17 +11,17 @@ // See the License for the specific language governing permissions and // limitations under the License. -#ifndef OR_TOOLS_DATA_JOBSHOP_SCHEDULING_PARSER_H_ -#define OR_TOOLS_DATA_JOBSHOP_SCHEDULING_PARSER_H_ +#ifndef OR_TOOLS_SCHEDULING_JOBSHOP_SCHEDULING_PARSER_H_ +#define OR_TOOLS_SCHEDULING_JOBSHOP_SCHEDULING_PARSER_H_ #include #include "absl/strings/match.h" #include "ortools/base/integral_types.h" -#include "ortools/data/jobshop_scheduling.pb.h" +#include "ortools/scheduling/jobshop_scheduling.pb.h" namespace operations_research { -namespace data { +namespace scheduling { namespace jssp { class JsspParser { @@ -87,7 +87,7 @@ class JsspParser { }; } // namespace jssp -} // namespace data +} // namespace scheduling } // namespace operations_research -#endif // OR_TOOLS_DATA_JOBSHOP_SCHEDULING_PARSER_H_ +#endif // OR_TOOLS_SCHEDULING_JOBSHOP_SCHEDULING_PARSER_H_ diff --git a/ortools/data/python/CMakeLists.txt b/ortools/scheduling/python/CMakeLists.txt similarity index 94% rename from ortools/data/python/CMakeLists.txt rename to ortools/scheduling/python/CMakeLists.txt index d603d95ac2..6e25e09729 100644 --- a/ortools/data/python/CMakeLists.txt +++ b/ortools/scheduling/python/CMakeLists.txt @@ -5,7 +5,7 @@ set_property(SOURCE rcpsp.i PROPERTY COMPILE_DEFINITIONS swig_add_library(pywraprcpsp TYPE SHARED LANGUAGE python - OUTPUT_DIR ${PROJECT_BINARY_DIR}/python/${PROJECT_NAME}/data + OUTPUT_DIR ${PROJECT_BINARY_DIR}/python/${PROJECT_NAME}/scheduling SOURCES rcpsp.i) target_include_directories(pywraprcpsp PRIVATE ${Python3_INCLUDE_DIRS}) @@ -31,4 +31,4 @@ target_link_libraries(pywraprcpsp PRIVATE ortools::ortools) # see: https://cmake.org/cmake/help/git-stage/command/target_link_libraries.html#command:target_link_libraries if(MSVC) target_link_libraries(pywraprcpsp PRIVATE ${Python3_LIBRARIES}) -endif() +endif() \ No newline at end of file diff --git a/ortools/data/python/rcpsp.i b/ortools/scheduling/python/rcpsp.i similarity index 53% rename from ortools/data/python/rcpsp.i rename to ortools/scheduling/python/rcpsp.i index af47c86323..f1f4982cef 100644 --- a/ortools/data/python/rcpsp.i +++ b/ortools/scheduling/python/rcpsp.i @@ -15,24 +15,24 @@ %include "ortools/util/python/proto.i" %{ -#include "ortools/data/rcpsp_parser.h" -#include "ortools/data/rcpsp.pb.h" +#include "ortools/scheduling/rcpsp_parser.h" +#include "ortools/scheduling/rcpsp.pb.h" %} -PY_PROTO_TYPEMAP(ortools.data.rcpsp_pb2, +PY_PROTO_TYPEMAP(ortools.scheduling.rcpsp_pb2, RcpspProblem, - ::operations_research::data::rcpsp::RcpspProblem); + ::operations_research::scheduling::rcpsp::RcpspProblem); %ignoreall %unignore operations_research; -%unignore operations_research::data; -%unignore operations_research::data::rcpsp; -%unignore operations_research::data::rcpsp::RcpspParser; -%unignore operations_research::data::rcpsp::RcpspParser::RcpspParser; -%rename (Problem) operations_research::data::rcpsp::RcpspParser::problem; -%unignore operations_research::data::rcpsp::RcpspParser::ParseFile; +%unignore operations_research::scheduling; +%unignore operations_research::scheduling::rcpsp; +%unignore operations_research::scheduling::rcpsp::RcpspParser; +%unignore operations_research::scheduling::rcpsp::RcpspParser::RcpspParser(); +%rename (Problem) operations_research::scheduling::rcpsp::RcpspParser::problem; +%unignore operations_research::scheduling::rcpsp::RcpspParser::ParseFile(const std::string& file_name); -%include "ortools/data/rcpsp_parser.h" +%include "ortools/scheduling/rcpsp_parser.h" %unignoreall diff --git a/ortools/data/rcpsp.proto b/ortools/scheduling/rcpsp.proto similarity index 97% rename from ortools/data/rcpsp.proto rename to ortools/scheduling/rcpsp.proto index ef973e4417..3acad14b54 100644 --- a/ortools/data/rcpsp.proto +++ b/ortools/scheduling/rcpsp.proto @@ -67,11 +67,11 @@ syntax = "proto3"; -option java_package = "com.google.ortools.data.rcpsp"; +option java_package = "com.google.ortools.scheduling.rcpsp"; option java_multiple_files = true; -option csharp_namespace = "Google.OrTools.Data.Rcpsp"; +option csharp_namespace = "Google.OrTools.Scheduling.Rcpsp"; -package operations_research.data.rcpsp; +package operations_research.scheduling.rcpsp; message Resource { // The max capacity of the cumulative. diff --git a/ortools/data/rcpsp_parser.cc b/ortools/scheduling/rcpsp_parser.cc similarity index 99% rename from ortools/data/rcpsp_parser.cc rename to ortools/scheduling/rcpsp_parser.cc index ca857a7493..97c74b1a15 100644 --- a/ortools/data/rcpsp_parser.cc +++ b/ortools/scheduling/rcpsp_parser.cc @@ -11,7 +11,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -#include "ortools/data/rcpsp_parser.h" +#include "ortools/scheduling/rcpsp_parser.h" #include @@ -19,10 +19,10 @@ #include "absl/strings/numbers.h" #include "absl/strings/str_split.h" #include "ortools/base/filelineiter.h" -#include "ortools/data/rcpsp.pb.h" +#include "ortools/scheduling/rcpsp.pb.h" namespace operations_research { -namespace data { +namespace scheduling { namespace rcpsp { RcpspParser::RcpspParser() @@ -605,5 +605,5 @@ int64_t RcpspParser::strtoint64(const std::string& word) { } } // namespace rcpsp -} // namespace data +} // namespace scheduling } // namespace operations_research diff --git a/ortools/data/rcpsp_parser.h b/ortools/scheduling/rcpsp_parser.h similarity index 87% rename from ortools/data/rcpsp_parser.h rename to ortools/scheduling/rcpsp_parser.h index 692118bf35..2c3658bf01 100644 --- a/ortools/data/rcpsp_parser.h +++ b/ortools/scheduling/rcpsp_parser.h @@ -14,17 +14,17 @@ // A Project Scheduling Library parser. // See: http://www.om-db.wi.tum.de/psplib/ # PSP-Lib homepage. -#ifndef OR_TOOLS_DATA_RCPSP_PARSER_H_ -#define OR_TOOLS_DATA_RCPSP_PARSER_H_ +#ifndef OR_TOOLS_SCHEDULING_RCPSP_PARSER_H_ +#define OR_TOOLS_SCHEDULING_RCPSP_PARSER_H_ #include #include #include "ortools/base/integral_types.h" -#include "ortools/data/rcpsp.pb.h" +#include "ortools/scheduling/rcpsp.pb.h" namespace operations_research { -namespace data { +namespace scheduling { namespace rcpsp { // RCPSP parser. @@ -35,7 +35,7 @@ class RcpspParser { RcpspParser(); // We keep the fully qualified name for swig. - ::operations_research::data::rcpsp::RcpspProblem problem() const { + ::operations_research::scheduling::rcpsp::RcpspProblem problem() const { return rcpsp_; } @@ -78,7 +78,7 @@ class RcpspParser { }; } // namespace rcpsp -} // namespace data +} // namespace scheduling } // namespace operations_research -#endif // OR_TOOLS_DATA_RCPSP_PARSER_H_ +#endif // OR_TOOLS_SCHEDULING_RCPSP_PARSER_H_ diff --git a/tools/generate_all_deps.sh b/tools/generate_all_deps.sh index 3cef98ea33..0a5b0918aa 100755 --- a/tools/generate_all_deps.sh +++ b/tools/generate_all_deps.sh @@ -4,15 +4,15 @@ tools/generate_deps.sh BASE base tools/generate_deps.sh PORT port base tools/generate_deps.sh UTIL util base port tools/generate_deps.sh INIT init -tools/generate_deps.sh DATA data base port util +tools/generate_deps.sh SCHEDULING scheduling base port util tools/generate_deps.sh LP_DATA lp_data util base algorithms linear_solver tools/generate_deps.sh GLOP glop util base lp_data linear_solver tools/generate_deps.sh GRAPH graph base util tools/generate_deps.sh ALGORITHMS algorithms base util graph linear_solver tools/generate_deps.sh SAT sat base util algorithms graph lp_data glop linear_solver +tools/generate_deps.sh PACKING packing base graph port util linear_solver tools/generate_deps.sh BOP bop base util lp_data glop sat tools/generate_deps.sh GSCIP gscip base port tools/generate_deps.sh GUROBI gurobi base tools/generate_deps.sh LP linear_solver base util lp_data glop bop gscip gurobi tools/generate_deps.sh CP constraint_solver base util graph linear_solver sat -