@article{BarzSeligerMarxetal., author = {Barz, Tilman and Seliger, Dominik and Marx, Klemens and Sommer, Andreas and Walter, Sebastian F. and Bock, Hans Georg and Koerkel, Stefan}, title = {State and state of charge estimation for a latent heat storage}, series = {Control Engineering Practice}, volume = {72}, journal = {Control Engineering Practice}, publisher = {Pergamon-Elsevier d.}, doi = {10.1016/j.conengprac.2017.11.006}, pages = {151 -- 166}, abstract = {A nonlinear state observer is designed for a thermal energy storage with solid/liquid phase change material (PCM). Using a physical 2D dynamic model, the observer reconstructs transient spatial temperature fields inside the storage and estimates the stored energy and the state of charge. The observer has been successfully tested with a lab-scale latent heat storage with a single pass tube bundle and the phase change material located in a shell around each tube. It turns out that the observer robustly tracks the real process data with as few as four internal PCM temperature sensors. © 2017 Elsevier Ltd. All rights reserved.}, language = {en} } @inproceedings{SchwindlVolbertBock, author = {Schwindl, Tobias and Volbert, Klaus and Bock, Sebastian}, title = {Fast and Reliable Update Protocols in WSNs During Software Development, Testing and Deployment}, series = {Proceedings of the 7th International Conference on Sensor Networks - SENSORNETS, Funchal, January 22-24, 2018, Madeira, Portugal}, booktitle = {Proceedings of the 7th International Conference on Sensor Networks - SENSORNETS, Funchal, January 22-24, 2018, Madeira, Portugal}, editor = {Fleury, Eric and Ahrens, Andreas and Benavente-Peces, C{\´e}sar and Cam-Winget, Nancy}, publisher = {SCITEPRESS - Science and Technology Publications Lda}, address = {Set{\´u}bal, Portugal}, organization = {Institute for Systems and Technologies of Information, Control and Communication}, isbn = {978-989-758-284-4}, doi = {10.5220/0006534400190030}, pages = {19 -- 30}, abstract = {A lot of research has been done in the area of Wireless Sensor Networks during the past years. Today, Wireless Sensor Networks are in field in many different ways and applications (e.g. energy management services, heat and water billing, smoke detectors). Nevertheless, research and development is continued in this area. After the network is deployed, software updates are performed very rarely, but during development and testing one typical, high frequented task is to deploy a new firmware to thousands of nodes. In this paper, we consider such a software update for a special, but well-known and frequently used sensor network platform. There exist some interesting research papers about updating sensor nodes, but we have a special focus on the technical update process. In this context, we show the reasons why these existing update processes do not cover our challenges. Our goal is to allow a developer to update thousands of nodes reliably and very fast during development and testing. Fo r this purpose, it is not so important to perform the best update with regard to energy consumption. We do not need a multi hop protocol, because all devices are in range, e.g., in a laboratory. In our work, we present a model of the update process and give very fast protocols to solve it. The results of our extensive simulations show that the developed protocols do a fast, scalable and reliable update.}, language = {en} } @unpublished{BockWeiss, author = {Bock, Sebastian and Weiß, Martin Georg}, title = {Local Convergence of Adaptive Gradient Descent Optimizers}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:898-opus4-7546}, abstract = {Adaptive Moment Estimation (ADAM) is a very popular training algorithm for deep neural networks and belongs to the family of adaptive gradient descent optimizers. However to the best of the authors knowledge no complete convergence analysis exists for ADAM. The contribution of this paper is a method for the local convergence analysis in batch mode for a deterministic fixed training set, which gives necessary conditions for the hyperparameters of the ADAM algorithm. Due to the local nature of the arguments the objective function can be non-convex but must be at least twice continuously differentiable. Then we apply this procedure to other adaptive gradient descent algorithms and show for most of them local convergence with hyperparameter bounds.}, language = {en} } @inproceedings{BockWeiss, author = {Bock, Sebastian and Weiß, Martin Georg}, title = {A Proof of Local Convergence for the Adam Optimizer}, series = {Proceedings of the 2019 International Joint Conference on Neural Networks (IJCNN), 2019, Budapest, Hungary, July 14-19}, volume = {2019}, booktitle = {Proceedings of the 2019 International Joint Conference on Neural Networks (IJCNN), 2019, Budapest, Hungary, July 14-19}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:898-opus4-501}, pages = {1 -- 8}, abstract = {Adaptive Moment Estimation (Adam) is a very popular training algorithm for deep neural networks, implemented in many machine learning frameworks. To the best of the authors knowledge no complete convergence analysis exists for Adam. The contribution of this paper is a method for the local convergence analysis in batch mode for a deterministic fixed training set, which gives necessary conditions for the hyperparameters of the Adam algorithm. Due to the local nature of the arguments the objective function can be non-convex but must be at least twice continuously differentiable.}, subject = {Neuronales Netz}, language = {en} } @unpublished{BockWeiss, author = {Bock, Sebastian and Weiß, Martin Georg}, title = {Rotation Detection of Components with Convolutional Neural Networks}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:898-opus4-4120}, abstract = {The main issues in many image processing applications are object recognition and detection of objects, which answers the questions whether an object is present and if it is present, where it is located. Popular object detection algorithms like YOLO use a regression formulation for the whole problem, especially for the bounding box parameters. In production industry the setting usually is different: One usually knows the object type and rather wants to know with high precision where the object is. We study a prototype application in this area where we identify the rotation of an object in a plane. To solve this problem use a regression approach with a CNN architecture as a function approximator. We compare our results to standard image processing algorithms, which do not use neural networks, and present quantitative results on the accuracy. CNNs seem at least competitive to classical image processing.}, language = {en} } @inproceedings{BockWeiss, author = {Bock, Sebastian and Weiß, Martin Georg}, title = {Non-Convergence and Limit Cycles in the Adam Optimizer}, series = {Proceedings of the 28th International Conference on Artificial Neural Networks, 2019, Munich, Germany, September 17-19}, booktitle = {Proceedings of the 28th International Conference on Artificial Neural Networks, 2019, Munich, Germany, September 17-19}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:898-opus4-490}, pages = {232 -- 243}, abstract = {One of the most popular training algorithms for deep neural networks is the Adaptive Moment Estimation (Adam) introduced by Kingma and Ba. Despite its success in many applications there is no satisfactory convergence analysis: only local convergence can be shown for batch mode under some restrictions on the hyperparameters, counterexamples exist for incremental mode. Recent results show that for simple quadratic objective functions limit cycles of period 2 exist in batch mode, but only for atypical hyperparameters, and only for the algorithm without bias correction. We extend the convergence analysis to all choices of the hyperparameters for quadratic functions. This finally answers the question of convergence for Adam in batch mode to the negative. We analyze the stability of these limit cycles and relate our analysis to other results where approximate convergence was shown, but under the additional assumption of bounded gradients which does not apply to quadratic functions. The investigation heavily relies on the use of computer algebra due to the complexity of the equations.}, subject = {Neuronales Netz}, language = {en} }