@article{MB7D222C7, title = "Performance Evaluation of DQN-Based Congestion Control Algorithm for TCP", journal = "The Journal of Korean Institute of Communications and Information Sciences", year = "2024", issn = "1226-4717", doi = "10.7840/kics.2024.49.4.567", author = "Sang-Jin Seo, You-Ze Cho", keywords = "TCP Congestion Control, TCP CUBIC, Deep Q-Network, Reinforcement Learning", abstract = "The existing TCP congestion control suffers from the problem of slow congestion window (cwnd) increase, leading to underutilization of available bandwidth in environments where there is either a very large link bandwidth or frequent changes in channel characteristics. To address these issues, research on adaptive TCP congestion control using machine learning has been consistently progressing. In this paper, we propose DQN-based NewReno and DQN-based CUBIC, which enhance performance by applying a type of reinforcement learning, Deep-Q Network (DQN) to TCP congestion control algorithms. The implemented algorithms underwent performance evaluation using the Network Simulator 3 (NS3). Experimental results reveal that DQN-based CUBIC, in particular, demonstrates higher throughput compared to traditional congestion control. Additionally, fairness between different congestion control and round-trip time (RTT) fairness is also improved." }