Here you can find a consolidated (a.k.a. slowly updated) list of my publications. A frequently updated (and possibly noisy) list of works is available on my Google Scholar profile.
Please find below a short list of highlight publications for my recent activity.
Georgiev, Dobrik; Numeroso, Danilo; Bacciu, Davide; Lio, Pietro Neural Algorithmic Reasoning for Combinatorial Optimisation Proceeding PMRL, 2023. Numeroso, Danilo; Bacciu, Davide; Veličković, Petar Dual Algorithmic Reasoning Conference Proceedings of the Eleventh International Conference on Learning Representations (ICLR 2023), 2023, (Notable Spotlight paper). Numeroso, Danilo; Bacciu, Davide; Veličković, Petar Learning heuristics for A* Workshop ICRL 2022 Workshop on Anchoring Machine Learning in Classical Algorithmic Theory (GroundedML 2022), 2022.@proceedings{Georgiev2023,
title = {Neural Algorithmic Reasoning for Combinatorial Optimisation},
author = {Dobrik Georgiev and Danilo Numeroso and Davide Bacciu and Pietro Lio },
year = {2023},
date = {2023-11-27},
urldate = {2023-11-27},
booktitle = {Proceedings of the Learning on Graphs Conference (LOG 2023)},
publisher = {PMRL},
abstract = { Solving NP-hard/complete combinatorial problems with neural networks is a challenging research area that aims to surpass classical approximate algorithms. The long-term objective is to outperform hand-designed heuristics for NP-hard/complete problems by learning to generate superior solutions solely from training data. Current neural-based methods for solving CO problems often overlook the inherent "algorithmic" nature of the problems. In contrast, heuristics designed for CO problems, e.g. TSP, frequently leverage well-established algorithms, such as those for finding the minimum spanning tree. In this paper, we propose leveraging recent advancements in neural algorithmic reasoning to improve the learning of CO problems. Specifically, we suggest pre-training our neural model on relevant algorithms before training it on CO instances. Our results demonstrate that, using this learning setup, we achieve superior performance compared to non-algorithmically informed deep learning models.},
keywords = {},
pubstate = {published},
tppubtype = {proceedings}
}
@conference{Numeroso2023,
title = {Dual Algorithmic Reasoning},
author = {Danilo Numeroso and Davide Bacciu and Petar Veličković},
url = {https://openreview.net/pdf?id=hhvkdRdWt1F},
year = {2023},
date = {2023-05-01},
urldate = {2023-05-01},
booktitle = {Proceedings of the Eleventh International Conference on Learning Representations (ICLR 2023)},
abstract = {Neural Algorithmic Reasoning is an emerging area of machine learning which seeks to infuse algorithmic computation in neural networks, typically by training neural models to approximate steps of classical algorithms. In this context, much of the current work has focused on learning reachability and shortest path graph algorithms, showing that joint learning on similar algorithms is beneficial for generalisation. However, when targeting more complex problems, such "similar" algorithms become more difficult to find. Here, we propose to learn algorithms by exploiting duality of the underlying algorithmic problem. Many algorithms solve optimisation problems. We demonstrate that simultaneously learning the dual definition of these optimisation problems in algorithmic learning allows for better learning and qualitatively better solutions. Specifically, we exploit the max-flow min-cut theorem to simultaneously learn these two algorithms over synthetically generated graphs, demonstrating the effectiveness of the proposed approach. We then validate the real-world utility of our dual algorithmic reasoner by deploying it on a challenging brain vessel classification task, which likely depends on the vessels’ flow properties. We demonstrate a clear performance gain when using our model within such a context, and empirically show that learning the max-flow and min-cut algorithms together is critical for achieving such a result.},
note = {Notable Spotlight paper},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
@workshop{Numeroso2022,
title = {Learning heuristics for A*},
author = { Danilo Numeroso and Davide Bacciu and Petar Veličković},
year = {2022},
date = {2022-04-29},
urldate = {2022-04-29},
booktitle = {ICRL 2022 Workshop on Anchoring Machine Learning in Classical Algorithmic Theory (GroundedML 2022)},
abstract = {Path finding in graphs is one of the most studied classes of problems in computer science. In this context, search algorithms are often extended with heuristics for a more efficient search of target nodes. In this work we combine recent advancements in Neural Algorithmic Reasoning to learn efficient heuristic functions for path finding problems on graphs. At training time, we exploit multi-task learning to learn jointly the Dijkstra's algorithm and a {it consistent} heuristic function for the A* search algorithm. At inference time, we plug our learnt heuristics into the A* algorithm. Results show that running A* over the learnt heuristics value can greatly speed up target node searching compared to Dijkstra, while still finding minimal-cost paths.
},
keywords = {},
pubstate = {published},
tppubtype = {workshop}
}