@inproceedings{488f1360893f41ceb3c9557e8fbc2b0e,
title = "Fingerprint attack: client de-anonymization in federated learning",
abstract = "Federated Learning allows collaborative training without data sharing in settings where participants do not trust the central server and one another. Privacy can be further improved by ensuring that communication between the participants and the server is anonymized through a shuffle; decoupling the participant identity from their data. This paper seeks to examine whether such a defense is adequate to guarantee anonymity, by proposing a novel fingerprinting attack over gradients sent by the participants to the server. We show that clustering of gradients can easily break the anonymization in an empirical study of learning federated language models on two language corpora. We then show that training with differential privacy can provide a practical defense against our fingerprint attack.",
author = "Qiongka Xu and Trevor Cohn and Olga Ohrimenko",
note = "Copyright the Author(s) 2023. Version archived for private and non-commercial use with the permission of the author/s and according to publisher conditions. For further rights please contact the publisher.; 26th European Conference on Artificial Intelligence, ECAI 2023 ; Conference date: 30-09-2023 Through 04-10-2023",
year = "2023",
doi = "10.3233/FAIA230590",
language = "English",
isbn = " 9781643684369",
series = "Frontiers in Artificial Intelligence and Applications",
publisher = "IOS Press",
pages = "2792--2801",
editor = "Kobi Gal and Ann Now{\'e} and Nalepa, {Grzegorz J.} and Roy Fairstein and Roxana R{\u a}dulescu",
booktitle = "ECAI 2023",
address = "Netherlands",
}