@misc{StritterFreilingKoenigetal., author = {Stritter, Benjamin and Freiling, Felix and K{\"o}nig, Hartmut and Rietz, Ren{\´e} and Ullrich, Steffen and Gernler, Alexander von and Erlacher, Felix}, title = {Cleaning up Web 2.0's Security Mess - at Least Partly}, series = {IEEE Security \& Privacy}, volume = {14}, journal = {IEEE Security \& Privacy}, number = {2}, issn = {1540-7993}, pages = {48 -- 71}, language = {en} } @inproceedings{RietzKoenigUllrichetal., author = {Rietz, Ren{\´e} and K{\"o}nig, Hartmut and Ullrich, Steffen and Stritter, Benjamin}, title = {Firewalls for the Web 2.0}, series = {Proceedings of the 2016 IEEE International Conference on Software Quality, Reliability and Security (QRS), 1-3 Aug. 2016}, booktitle = {Proceedings of the 2016 IEEE International Conference on Software Quality, Reliability and Security (QRS), 1-3 Aug. 2016}, publisher = {IEEE}, address = {Piscataway, NJ}, isbn = {978-1-5090-4127-5}, doi = {10.1109/QRS.2016.36}, pages = {242 -- 253}, abstract = {The widespread use of Web 2.0 technologies yields an increasing threat potential for users and related systems. Modern web applications and online services are nowadays based on Web 2.0 technologies, such as JavaScript and AJAX, and thus on the execution of active content in the browsers of the users. Firewalls are a common practice to securely connecting to the internet. In this paper, we propose a novel perimeter firewall architecture for web applications that addresses the entire process chain starting from the data transfer with HTTP via the analysis of manipulated web documents to the extraction and analysis of active contents. The basic idea is to allow only a restricted set of web applications to pass the firewall based on a model of their HTML and JavaScript structure. We evaluate the capability of the resulting models for identifying the underlying web applications and their ability to ward off additional malicious inputs.}, language = {en} }