@article{AnagnostopoulosTeymuriSeratietal.2023, author = {Anagnostopoulos, Nikolaos Athanasios and Teymuri, Benyamin and Serati, Reza and Rasti, Mehdi}, title = {LP-MAB: Improving the Energy Efficiency of LoRaWAN Using a Reinforcement-Learning-Based Adaptive Configuration Algorithm}, series = {Sensors}, volume = {23}, journal = {Sensors}, number = {4}, editor = {Xie, Bin and Wang, Ning and Gu, Yi and Stefanidis, Angelos}, publisher = {MDPI}, address = {Basel, Switzerland}, issn = {1424-8220}, doi = {10.3390/s23042363}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus4-11853}, pages = {22 Seiten}, year = {2023}, abstract = {In the Internet of Things (IoT), Low-Power Wide-Area Networks (LPWANs) are designed to provide low energy consumption while maintaining a long communications' range for End Devices (EDs). LoRa is a communication protocol that can cover a wide range with low energy consumption. To evaluate the efficiency of the LoRa Wide-Area Network (LoRaWAN), three criteria can be considered, namely, the Packet Delivery Rate (PDR), Energy Consumption (EC), and coverage area. A set of transmission parameters have to be configured to establish a communication link. These parameters can affect the data rate, noise resistance, receiver sensitivity, and EC. The Adaptive Data Rate (ADR) algorithm is a mechanism to configure the transmission parameters of EDs aiming to improve the PDR. Therefore, we introduce a new algorithm using the Multi-Armed Bandit (MAB) technique, to configure the EDs' transmission parameters in a centralized manner on the Network Server (NS) side, while improving the EC, too. The performance of the proposed algorithm, the Low-Power Multi-Armed Bandit (LP-MAB), is evaluated through simulation results and is compared with other approaches in different scenarios. The simulation results indicate that the LP-MAB's EC outperforms other algorithms while maintaining a relatively high PDR in various circumstances.}, language = {en} }