@article{MF4D823B2, title = "Autoencoder-Based Model Compression Schemes for Federated Learning", journal = "The Journal of Korean Institute of Communications and Information Sciences", year = "2023", issn = "1226-4717", doi = "10.7840/kics.2023.48.3.295", author = "Do-YunLee,HoonLee", keywords = "Federated Learning, FedAvg, model compression, autoencoder, deep learning, parameter sharing", abstract = "Edge intelligence has been an emerging key enabler of intelligent 6G networks. The federated learning (FL) algorithm has been regarded as a promising solution to realize remote and decentralized training processes of a number of artificial intelligence (AI) models distributed over multiple clients. For the FL system, it is essential to exchange AI model parameters among a server and clients, which incurs prohibitive communication cost. To overcome this challenge, this paper proposes an autoencoder approach to compress AI model parameters in the FL system. Inference steps of the proposed autoencoder are carefully designed such that the weighted averaging operations of the FL algorithm can be injected into the end-to-end compression-reconstruction process. Numerical results demonstrate the effectiveness of the proposed method over conventional schemes." }