Here you can find a consolidated (a.k.a. slowly updated) list of my publications. A frequently updated (and possibly noisy) list of works is available on my Google Scholar profile.
Please find below a short list of highlight publications for my recent activity.
Bacciu, Davide; Conte, Alessio; Landolfi, Francesco Generalizing Downsampling from Regular Data to Graphs Conference Proceedings of the Thirty-Seventh AAAI Conference on Artificial Intelligence, 2023. Bacciu, Davide; Conte, Alessio; Grossi, Roberto; Landolfi, Francesco; Marino, Andrea K-Plex Cover Pooling for Graph Neural Networks Journal Article In: Data Mining and Knowledge Discovery, 2021, (Accepted also as paper to the European Conference on Machine Learning and Principles and Practice of Knowledge Discovery in Databases (ECML-PKDD 2021)). Bacciu, Davide; Conte, Alessio; Grossi, Roberto; Landolfi, Francesco; Marino, Andrea K-plex Cover Pooling for Graph Neural Networks Workshop 34th Conference on Neural Information Processing Systems (NeurIPS 2020), Workshop on Learning Meets Combinatorial Algorithms, 2020.@conference{Bacciu2023,
title = {Generalizing Downsampling from Regular Data to Graphs},
author = {Davide Bacciu and Alessio Conte and Francesco Landolfi},
url = {https://arxiv.org/abs/2208.03523, Arxiv},
year = {2023},
date = {2023-02-07},
urldate = {2023-02-07},
booktitle = {Proceedings of the Thirty-Seventh AAAI Conference on Artificial Intelligence},
abstract = {Downsampling produces coarsened, multi-resolution representations of data and it is used, for example, to produce lossy compression and visualization of large images, reduce computational costs, and boost deep neural representation learning. Unfortunately, due to their lack of a regular structure, there is still no consensus on how downsampling should apply to graphs and linked data. Indeed reductions in graph data are still needed for the goals described above, but reduction mechanisms do not have the same focus on preserving topological structures and properties, while allowing for resolution-tuning, as is the case in regular data downsampling. In this paper, we take a step in this direction, introducing a unifying interpretation of downsampling in regular and graph data. In particular, we define a graph coarsening mechanism which is a graph-structured counterpart of controllable equispaced coarsening mechanisms in regular data. We prove theoretical guarantees for distortion bounds on path lengths, as well as the ability to preserve key topological properties in the coarsened graphs. We leverage these concepts to define a graph pooling mechanism that we empirically assess in graph classification tasks, providing a greedy algorithm that allows efficient parallel implementation on GPUs, and showing that it compares favorably against pooling methods in literature. },
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
@article{Bacciu2021b,
title = {K-Plex Cover Pooling for Graph Neural Networks},
author = {Davide Bacciu and Alessio Conte and Roberto Grossi and Francesco Landolfi and Andrea Marino},
editor = {Annalisa Appice and Sergio Escalera and José A. Gámez and Heike Trautmann},
url = {https://link.springer.com/article/10.1007/s10618-021-00779-z, Published version},
doi = {10.1007/s10618-021-00779-z},
year = {2021},
date = {2021-09-13},
urldate = {2021-09-13},
journal = {Data Mining and Knowledge Discovery},
abstract = {raph pooling methods provide mechanisms for structure reduction that are intended to ease the diffusion of context between nodes further in the graph, and that typically leverage community discovery mechanisms or node and edge pruning heuristics. In this paper, we introduce a novel pooling technique which borrows from classical results in graph theory that is non-parametric and generalizes well to graphs of different nature and connectivity patterns. Our pooling method, named KPlexPool, builds on the concepts of graph covers and k-plexes, i.e. pseudo-cliques where each node can miss up to k links. The experimental evaluation on benchmarks on molecular and social graph classification shows that KPlexPool achieves state of the art performances against both parametric and non-parametric pooling methods in the literature, despite generating pooled graphs based solely on topological information.},
note = {Accepted also as paper to the European Conference on Machine Learning and Principles and Practice of Knowledge Discovery in Databases (ECML-PKDD 2021)},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
@workshop{kplexWS2020,
title = {K-plex Cover Pooling for Graph Neural Networks},
author = {Davide Bacciu and Alessio Conte and Roberto Grossi and Francesco Landolfi and Andrea Marino},
year = {2020},
date = {2020-12-11},
urldate = {2020-12-11},
booktitle = {34th Conference on Neural Information Processing Systems (NeurIPS 2020), Workshop on Learning Meets Combinatorial Algorithms},
abstract = {We introduce a novel pooling technique which borrows from classical results in graph theory that is non-parametric and generalizes well to graphs of different nature and connectivity pattern. Our pooling method, named KPlexPool, builds on the concepts of graph covers and $k$-plexes, i.e. pseudo-cliques where each node can miss up to $k$ links. The experimental evaluation on molecular and social graph classification shows that KPlexPool achieves state of the art performances, supporting the intuition that well-founded graph-theoretic approaches can be effectively integrated in learning models for graphs. },
keywords = {},
pubstate = {published},
tppubtype = {workshop}
}