Here you can find a consolidated (a.k.a. slowly updated) list of my publications. A frequently updated (and possibly noisy) list of works is available on my Google Scholar profile.
Please find below a short list of highlight publications for my recent activity.
Caro, Valerio De; Mauro, Antonio Di; Bacciu, Davide; Gallicchio, Claudio Communication-Efficient Ridge Regression in Federated Echo State Networks Conference Proceedings of the 31th European Symposium on Artificial Neural Networks, Computational Intelligence and Machine Learning , 2023. Caro, Valerio De; Bacciu, Davide; Gallicchio, Claudio Decentralized Plasticity in Reservoir Dynamical Networks for Pervasive Environments Workshop Proceedings of the 2023 ICML Workshop on Localized Learning: Decentralized Model Updates via Non-Global Objectives
, 2023. Caro, Valerio De; Gallicchio, Claudio; Bacciu, Davide Continual adaptation of federated reservoirs in pervasive environments Journal Article In: Neurocomputing, pp. 126638, 2023, ISSN: 0925-2312. Caro, Valerio De; Gallicchio, Claudio; Bacciu, Davide Federated Adaptation of Reservoirs via Intrinsic Plasticity Conference Proceedings of the 30th European Symposium on Artificial Neural Networks, Computational Intelligence and Machine Learning (ESANN 2022), 2022. Schoitsch, Erwin; Mylonas, Georgios (Ed.) Supporting Privacy Preservation by Distributed and Federated Learning on the Edge Periodical ERCIM News, vol. 127, 2021, visited: 30.09.2021. Bacciu, Davide; Akarmazyan, Siranush; Armengaud, Eric; Bacco, Manlio; Bravos, George; Calandra, Calogero; Carlini, Emanuele; Carta, Antonio; Cassara, Pietro; Coppola, Massimo; Davalas, Charalampos; Dazzi, Patrizio; Degennaro, Maria Carmela; Sarli, Daniele Di; Dobaj, Jürgen; Gallicchio, Claudio; Girbal, Sylvain; Gotta, Alberto; Groppo, Riccardo; Lomonaco, Vincenzo; Macher, Georg; Mazzei, Daniele; Mencagli, Gabriele; Michail, Dimitrios; Micheli, Alessio; Peroglio, Roberta; Petroni, Salvatore; Potenza, Rosaria; Pourdanesh, Farank; Sardianos, Christos; Tserpes, Konstantinos; Tagliabò, Fulvio; Valtl, Jakob; Varlamis, Iraklis; Veledar, Omar (Ed.) TEACHING - Trustworthy autonomous cyber-physical applications through human-centred intelligence Conference Proceedings of the 2021 IEEE International Conference on Omni-Layer Intelligent Systems (COINS) , 2021. Bacciu, Davide; Sarli, Daniele Di; Faraji, Pouria; Gallicchio, Claudio; Micheli, Alessio Federated Reservoir Computing Neural Networks Conference Proceedings of the International Joint Conference on Neural Networks (IJCNN 2021), IEEE, 2021.@conference{Caro2023,
title = { Communication-Efficient Ridge Regression in Federated Echo State Networks },
author = {Valerio De Caro and Antonio Di Mauro and Davide Bacciu and Claudio Gallicchio
},
editor = {Michel Verleysen},
year = {2023},
date = {2023-10-04},
urldate = {2023-10-04},
booktitle = {Proceedings of the 31th European Symposium on Artificial Neural Networks, Computational Intelligence and Machine Learning },
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
@workshop{nokey,
title = {Decentralized Plasticity in Reservoir Dynamical Networks for Pervasive Environments},
author = {Valerio De Caro and Davide Bacciu and Claudio Gallicchio
},
url = {https://openreview.net/forum?id=5hScPOeDaR, PDF},
year = {2023},
date = {2023-07-29},
urldate = {2023-07-29},
booktitle = {Proceedings of the 2023 ICML Workshop on Localized Learning: Decentralized Model Updates via Non-Global Objectives
},
keywords = {},
pubstate = {published},
tppubtype = {workshop}
}
@article{DECARO2023126638,
title = {Continual adaptation of federated reservoirs in pervasive environments},
author = {Valerio De Caro and Claudio Gallicchio and Davide Bacciu},
url = {https://www.sciencedirect.com/science/article/pii/S0925231223007610},
doi = {https://doi.org/10.1016/j.neucom.2023.126638},
issn = {0925-2312},
year = {2023},
date = {2023-01-01},
urldate = {2023-01-01},
journal = {Neurocomputing},
pages = {126638},
abstract = {When performing learning tasks in pervasive environments, the main challenge arises from the need of combining federated and continual settings. The former comes from the massive distribution of devices with privacy-regulated data. The latter is required by the low resources of the participating devices, which may retain data for short periods of time. In this paper, we propose a setup for learning with Echo State Networks (ESNs) in pervasive environments. Our proposal focuses on the use of Intrinsic Plasticity (IP), a gradient-based method for adapting the reservoir’s non-linearity. First, we extend the objective function of IP to include the uncertainty arising from the distribution of the data over space and time. Then, we propose Federated Intrinsic Plasticity (FedIP), which is intended for client–server federated topologies with stationary data, and adapts the learning scheme provided by Federated Averaging (FedAvg) to include the learning rule of IP. Finally, we further extend this algorithm for learning to Federated Continual Intrinsic Plasticity (FedCLIP) to equip clients with CL strategies for dealing with continuous data streams. We evaluate our approach on an incremental setup built upon real-world datasets from human monitoring, where we tune the complexity of the scenario in terms of the distribution of the data over space and time. Results show that both our algorithms improve the representation capabilities and the performance of the ESN, while being robust to catastrophic forgetting.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
@conference{Caro2022,
title = {Federated Adaptation of Reservoirs via Intrinsic Plasticity},
author = {Valerio {De Caro} and Claudio Gallicchio and Davide Bacciu},
editor = {Michel Verleysen},
url = {https://arxiv.org/abs/2206.11087, Arxiv},
year = {2022},
date = {2022-10-05},
urldate = {2022-10-05},
booktitle = {Proceedings of the 30th European Symposium on Artificial Neural Networks, Computational Intelligence and Machine Learning (ESANN 2022)},
abstract = {We propose a novel algorithm for performing federated learning with Echo State Networks (ESNs) in a client-server scenario. In particular, our proposal focuses on the adaptation of reservoirs by combining Intrinsic Plasticity with Federated Averaging. The former is a gradient-based method for adapting the reservoir's non-linearity in a local and unsupervised manner, while the latter provides the framework for learning in the federated scenario. We evaluate our approach on real-world datasets from human monitoring, in comparison with the previous approach for federated ESNs existing in literature. Results show that adapting the reservoir with our algorithm provides a significant improvement on the performance of the global model. },
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
@periodical{Bacciu2021e,
title = {Supporting Privacy Preservation by Distributed and Federated Learning on the Edge},
author = { Davide Bacciu and Patrizio Dazzi and Alberto Gotta},
editor = {Erwin Schoitsch and Georgios Mylonas},
url = {https://ercim-news.ercim.eu/en127/r-i/supporting-privacy-preservation-by-distributed-and-federated-learning-on-the-edge},
year = {2021},
date = {2021-09-30},
urldate = {2021-09-30},
issuetitle = {ERCIM News},
volume = {127},
keywords = {},
pubstate = {published},
tppubtype = {periodical}
}
@conference{Bacciu2021d,
title = {TEACHING - Trustworthy autonomous cyber-physical applications through human-centred intelligence},
editor = {Davide Bacciu and Siranush Akarmazyan and Eric Armengaud and Manlio Bacco and George Bravos and Calogero Calandra and Emanuele Carlini and Antonio Carta and Pietro Cassara and Massimo Coppola and Charalampos Davalas and Patrizio Dazzi and Maria Carmela Degennaro and Daniele Di Sarli and Jürgen Dobaj and Claudio Gallicchio and Sylvain Girbal and Alberto Gotta and Riccardo Groppo and Vincenzo Lomonaco and Georg Macher and Daniele Mazzei and Gabriele Mencagli and Dimitrios Michail and Alessio Micheli and Roberta Peroglio and Salvatore Petroni and Rosaria Potenza and Farank Pourdanesh and Christos Sardianos and Konstantinos Tserpes and Fulvio Tagliabò and Jakob Valtl and Iraklis Varlamis and Omar Veledar},
doi = {10.1109/COINS51742.2021.9524099},
year = {2021},
date = {2021-08-23},
urldate = {2021-08-23},
booktitle = {Proceedings of the 2021 IEEE International Conference on Omni-Layer Intelligent Systems (COINS) },
abstract = {This paper discusses the perspective of the H2020 TEACHING project on the next generation of autonomous applications running in a distributed and highly heterogeneous environment comprising both virtual and physical resources spanning the edge-cloud continuum. TEACHING puts forward a human-centred vision leveraging the physiological, emotional, and cognitive state of the users as a driver for the adaptation and optimization of the autonomous applications. It does so by building a distributed, embedded and federated learning system complemented by methods and tools to enforce its dependability, security and privacy preservation. The paper discusses the main concepts of the TEACHING approach and singles out the main AI-related research challenges associated with it. Further, we provide a discussion of the design choices for the TEACHING system to tackle the aforementioned challenges},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
@conference{BacciuIJCNN2021,
title = {Federated Reservoir Computing Neural Networks},
author = {Davide Bacciu and Daniele Di Sarli and Pouria Faraji and Claudio Gallicchio and Alessio Micheli},
doi = {10.1109/IJCNN52387.2021.9534035},
year = {2021},
date = {2021-07-18},
urldate = {2021-07-18},
booktitle = {Proceedings of the International Joint Conference on Neural Networks (IJCNN 2021)},
publisher = {IEEE},
abstract = {A critical aspect in Federated Learning is the aggregation strategy for the combination of multiple models, trained on the edge, into a single model that incorporates all the knowledge in the federation. Common Federated Learning approaches for Recurrent Neural Networks (RNNs) do not provide guarantees on the predictive performance of the aggregated model. In this paper we show how the use of Echo State Networks (ESNs), which are efficient state-of-the-art RNN models for time-series processing, enables a form of federation that is optimal in the sense that it produces models mathematically equivalent to the corresponding centralized model. Furthermore, the proposed method is compliant with privacy constraints. The proposed method, which we denote as Incremental Federated Learning, is experimentally evaluated against an averaging strategy on two datasets for human state and activity recognition.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}