@article{GroteRammLanuschnyLorenzenetal.2023, author = {Grote-Ramm, Wolfgang and Lanuschny, David and Lorenzen, Finn and Oliveira Brito, Marcel and Sch{\"o}nig, Felix}, title = {Continual learning for neural regression networks to cope with concept drift in industrial processes using convex optimisation}, series = {Engineering Applications of Artificial Intelligence}, volume = {120}, journal = {Engineering Applications of Artificial Intelligence}, publisher = {Elsevier}, address = {New York}, issn = {1873-6769}, doi = {10.1016/j.engappai.2023.105927}, pages = {10}, year = {2023}, abstract = {Process models in industrial applications, e.g. predictive maintenance or automation, are subject to both divergence from the underlying system due to their time-variant nature and to high complexity resulting from a wide operational range being covered. Hence, regression models require high accuracy for the present system state and at the same time need to be valid across the whole system operating space. While accuracy for the current system state can be gained by updating the model on the current data, the overall validity must often be retrieved from historical or design data. We propose a method to find an appropriate compromise for these two demands. A pre-trained artificial neural network (ANN) is continually updated on the current sensor data stream using convex optimisation. Thus, a unique and optimal solution is generated in each update step, while robust regression accuracy on the domain that is not covered by the arriving data subset is maintained. This is achieved by introducing a data management system to provide some historical data, constraining the optimisation problem and manipulating the architecture of the ANN. Models updated with this method show reasonable stability but display plastic behaviour at the current operating point.}, language = {en} }