@inproceedings{ChmielaKhalilGleixneretal.2021, author = {Chmiela, Antonia and Khalil, Elias B. and Gleixner, Ambros and Lodi, Andrea and Pokutta, Sebastian}, title = {Learning to Schedule Heuristics in Branch and Bound}, series = {Thirty-fifth Conference on Neural Information Processing Systems, NeurIPS 2021}, booktitle = {Thirty-fifth Conference on Neural Information Processing Systems, NeurIPS 2021}, year = {2021}, abstract = {Primal heuristics play a crucial role in exact solvers for Mixed Integer Programming (MIP). While solvers are guaranteed to find optimal solutions given sufficient time, real-world applications typically require finding good solutions early on in the search to enable fast decision-making. While much of MIP research focuses on designing effective heuristics, the question of how to manage multiple MIP heuristics in a solver has not received equal attention. Generally, solvers follow hard-coded rules derived from empirical testing on broad sets of instances. Since the performance of heuristics is instance-dependent, using these general rules for a particular problem might not yield the best performance. In this work, we propose the first data-driven framework for scheduling heuristics in an exact MIP solver. By learning from data describing the performance of primal heuristics, we obtain a problem-specific schedule of heuristics that collectively find many solutions at minimal cost. We provide a formal description of the problem and propose an efficient algorithm for computing such a schedule. Compared to the default settings of a state-of-the-art academic MIP solver, we are able to reduce the average primal integral by up to 49\% on a class of challenging instances.}, language = {en} } @article{MuellerMunozGasseetal., author = {M{\"u}ller, Benjamin and Mu{\~n}oz, Gonzalo and Gasse, Maxime and Gleixner, Ambros and Lodi, Andrea and Serrano, Felipe}, title = {On generalized surrogate duality in mixed-integer nonlinear programming}, series = {Mathematical Programming}, volume = {192}, journal = {Mathematical Programming}, number = {1}, doi = {10.1007/s10107-021-01691-6}, pages = {89 -- 118}, abstract = {The most important ingredient for solving mixed-integer nonlinear programs (MINLPs) to global ϵ-optimality with spatial branch and bound is a tight, computationally tractable relaxation. Due to both theoretical and practical considerations, relaxations of MINLPs are usually required to be convex. Nonetheless, current optimization solvers can often successfully handle a moderate presence of nonconvexities, which opens the door for the use of potentially tighter nonconvex relaxations. In this work, we exploit this fact and make use of a nonconvex relaxation obtained via aggregation of constraints: a surrogate relaxation. These relaxations were actively studied for linear integer programs in the 70s and 80s, but they have been scarcely considered since. We revisit these relaxations in an MINLP setting and show the computational benefits and challenges they can have. Additionally, we study a generalization of such relaxation that allows for multiple aggregations simultaneously and present the first algorithm that is capable of computing the best set of aggregations. We propose a multitude of computational enhancements for improving its practical performance and evaluate the algorithm's ability to generate strong dual bounds through extensive computational experiments.}, language = {en} } @inproceedings{GasseBowlyCappartetal., author = {Gasse, Maxime and Bowly, Simon and Cappart, Quentin and Charfreitag, Jonas and Charlin, Laurent and Ch{\´e}telat, Didier and Chmiela, Antonia and Dumouchelle, Justin and Gleixner, Ambros and Kazachkov, Aleksandr M. and Khalil, Elias and Lichocki, Pawel and Lodi, Andrea and Lubin, Miles and Maddison, Chris J. and Christopher, Morris and Papageorgiou, Dimitri J. and Parjadis, Augustin and Pokutta, Sebastian and Prouvost, Antoine and Scavuzzo, Lara and Zarpellon, Giulia and Yang, Linxin and Lai, Sha and Wang, Akang and Luo, Xiaodong and Zhou, Xiang and Huang, Haohan and Shao, Shengcheng and Zhu, Yuanming and Zhang, Dong and Quan, Tao and Cao, Zixuan and Xu, Yang and Huang, Zhewei and Zhou, Shuchang and Binbin, Chen and Minggui, He and Hao, Hao and Zhiyu, Zhang and Zhiwu, An and Kun, Mao}, title = {The Machine Learning for Combinatorial Optimization Competition (ML4CO): results and insights}, series = {Proceedings of Conference on Neural Information Processing Systems}, booktitle = {Proceedings of Conference on Neural Information Processing Systems}, language = {en} }