@inproceedings{d4f1e6ae714a4cdf94fc4458da37e6d3,
title = "A communication-concerned federated learning framework based on clustering selection",
abstract = "In federated learning, devices and edge server can jointly train a global model to mine data distributed on different devices. However, the model transmission between them consumes lots of network communication resources. Data heterogeneity and heterogeneous computation capacity cause slow convergence of model and low accuracy. To solve these problems, a cluster selection enhanced federated learning method named FedCS is proposed. Using dynamic clustering method kmeans++, FedCS divides devices with similar data distribution into the same group and performs unbiased sampling. A regularization term is added to prevent the local model from betraying the global model. We distinguish the heterogeneous computation capacity of devices based on dot product between local model updates and aggregated model updates from the same group. Each device is selected dynamically based on the dot product. Simulation results show that FedCS achieves higher accuracy and less communication rounds compared to FedAvg, FedProx, FedNova and FedMMD.",
keywords = "Federated Learning, Communication, Cluster Selection",
author = "Weifeng Sun and Ailian Wang and Zunjing Gao and Yipeng Zhou",
year = "2025",
doi = "10.1007/978-981-96-0814-0_19",
language = "English",
isbn = "9789819608133",
series = "Lecture Notes in Computer Science",
publisher = "Springer, Springer Nature",
pages = "285--300",
editor = "Sheng, {Quan Z.} and Gill Dobbie and Jing Jiang and Xuyun Zhang and Zhang, {Wei Emma} and Yannis Manolopoulos and Jia Wu and Wathiq Mansoor and Congbo Ma",
booktitle = "Advanced Data Mining and Applications",
address = "United States",
note = "20th International Conference on Advanced Data Mining Applications, ADMA 2024 ; Conference date: 03-12-2024 Through 05-12-2024",
}