@techreport{AzcuyBecquerHeinrich2023, author = {Azcuy Becquer, Claudia and Heinrich, Horst-Alfred}, title = {Data documentation on history visualisations on the covers of all issues of Der Spiegel between 1965 and 2021}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus4-12678}, pages = {126 Seiten}, year = {2023}, abstract = {To answer the research question, all SPIEGEL covers from 1965 to 2021 were examined for a reference to history topics. The report documents the assignments of the 533 covers recorded to the categories of history narrative, politics of memory and politics of the past. Main article: https://doi.org/10.3167/jemms.2023.150107}, language = {en} } @phdthesis{Stuedlein2022, author = {St{\"u}dlein, Nadine}, title = {Data as a Common Good: Essays on Data Portability and B2B Industrial Data Sharing}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus4-11205}, school = {Universit{\"a}t Passau}, pages = {iii, 96 Seiten}, year = {2022}, abstract = {Data is an important resource in our economy and society, substantially improving overall business efficiency, innovativeness and competitiveness, and shaping our everyday lives. Yet, to leverage the data's full potential, its access and availability is vital. Thus, data sharing across organizations is of particular importance. This thesis examines the role of data sharing in the digital economy and contributes to a better understanding why data sharing matters, why it is still underutilized, and how data sharing can be encouraged. Thereby, the thesis contributes to the ongoing academic debate as well as the practical and political efforts on how to promote data sharing. The thesis is comprised of three studies. Study 1 examines personal data sharing among (competing) online services. Particularly, it investigates the consequences of Article 20 in the General Data Protection Regulation (GDPR, May 2018), ensuring the right to data portability. This relatively new right allows online service users to transfer any personal data from one service provider to another. Focusing on a) the amount of data provided by users and b) the amount of user data disclosed to third party data brokers by service providers, the study investigates the right to data portability's effect on competitiveness and consumer surplus. Study 2 and Study 3 focus on non-personal data sharing among competing firms. Study 2 examines the literature to identify and classify barriers to non-personal, machine-generated data sharing. The study explains firms' reluctance to sharing data and discusses policy and managerial implications for overcoming the data sharing barriers. Study 3 focuses on data sharing via platforms. It investigates the Business-to-Business (B2B) data sharing platform design implications for promoting industrial data sharing. In particular, Study 3 investigates the dimensions control and transparency regarding their effect in eliciting cooperation and encouraging data sharing among firms. In summary, this thesis examines and reveals how access and availability of data can be increased through creating beneficial data sharing conditions in B2B relationships. Particularly, the thesis contributes to the understanding of a) the implications of data sharing laws, defined in the GDPR for personal data and b) the challenges and measures of the not yet successfully established, non-personal data sharing.}, language = {en} } @phdthesis{Fischer2001, author = {Fischer, Bernd}, title = {Deduction-Based Software Component Retrieval}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-231}, school = {Universit{\"a}t Passau}, year = {2001}, abstract = {Deduction-based software component retrieval is a software reuse technique that uses formal specifications as component descriptors and as search keys; matching components are identified using an automated theorem prover. This dissertation contains a detailed theoretical investigation of the concept as well as the first substantial experimental evaluation of its technical feasibility.}, subject = {Software engineering}, language = {en} } @phdthesis{Braumandl2002, author = {Braumandl, Reinhard}, title = {Quality of Service and Optimization in Data Integration Systems}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-279}, school = {Universit{\"a}t Passau}, year = {2002}, abstract = {This work presents techniques for the construction of a global data integrations system. Similar to distributed databases this system allows declarative queries in order to express user-specific information needs. Scalability towards global data integration systems and openness were major design goals for the architecture and techniques developed in this work. It is shown how service composition, extensibility and quality of service can be supported in an open system of providers for data, functionality for query processing operations, and computing power.}, subject = {Dienstg{\"u}te}, language = {en} } @phdthesis{Baehne2015, author = {B{\"a}hne, Katharina}, title = {The Will to Play. Performance and Construction of Royal Masculinity in Early Modern History Plays}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus4-3329}, school = {Universit{\"a}t Passau}, pages = {415}, year = {2015}, abstract = {Die vorliegende Arbeit untersucht M{\"a}nnlichkeitskonzepte in der Fr{\"u}hen Neuzeit, wobei das Hauptaugenmerk auf die dramatische Konstruktion der Figur des K{\"o}nigs gerichtet wird. Anhand von zehn Historiendramen der 1590er wird zum einen die diskursive Komplexit{\"a}t k{\"o}niglicher M{\"a}nnlichkeit in der Renaissance untersucht, um darauf aufbauend deren performative Darstellung zu analysieren. Im Theorieteil werden M{\"a}nnlichkeit und Herrschaft im elisabethanischen England mithilfe zeitgen{\"o}ssischer Texte diskutiert und durch den Genderdiskurs und die Performativit{\"a}t von Gender erweitert. Der darauf folgende Methodikteil entwickelt aus den gewonnenen Erkenntnissen eine Semiotik von k{\"o}niglicher M{\"a}nnlichkeit, die anschließend im Analyseteil anhand der ausgew{\"a}hlten Historiendramen evaluiert wird.}, subject = {M{\"a}nnlichkeit}, language = {en} } @phdthesis{Zukowski2001, author = {Zukowski, Ulrich}, title = {Flexible Computation of the Well-Founded Semantics of Normal Logic Programs}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-226}, school = {Universit{\"a}t Passau}, year = {2001}, abstract = {The well-founded semantics has been accepted as the most relevant semantics for logic-based information systems. In this dissertation a framework based on a set of program transformations is presented that generalizes all major computation approaches for the well-founded semantics using a common data structure and provides a common language to describe their evaluation strategy. This rewriting system gives the formal background to analyze and combine different evaluation strategies in a common framework, or to design new algorithms and prove the correctness of its implementations at a high level just by changing the order of program transformations.}, subject = {Logische Programmierung}, language = {en} } @phdthesis{Wichert2000, author = {Wichert, Carl-Alexander}, title = {ULTRA - A Logic Transaction Programming Language}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-105}, school = {Universit{\"a}t Passau}, year = {2000}, abstract = {Rule-based language for the specification of complex database updates and transactions. Formal treatment of the syntax and the declarative semantics}, subject = {Programmierlogik}, language = {en} } @phdthesis{Dolzmann2000, author = {Dolzmann, Andreas}, title = {Algorithmic strategies for applicable real quantifier elimination}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-64}, school = {Universit{\"a}t Passau}, year = {2000}, abstract = {One of the most important algorithms for real quantifier elimination is the quantifier elimination by virtual substitution introduced by Weispfenning in 1988. In this thesis we present numerous algorithmic approaches for optimizing this quantifier elimination algorithm. Optimization goals are the actual running time of the implementation of the algorithm and the size of the output formula. Strategies for obtaining these goals include simplification of first-order formulas,reduction of the size of the computed elimination set, and condensing a new replacement for the virtual substitution. Local quantifier elimination computes formulas that are equivalent to the input formula only nearby a given point. We can make use of this restriction for further optimizing the quantifier elimination by virtual substitution. Finally we discuss how to solve a large class of scheduling problems by real quantifier elimination. To optimize our algorithm for solving scheduling problems we make use of the special form of the input formula and of additional information given by the description of the scheduling problem}, subject = {Quantorenelimination}, language = {en} } @phdthesis{Seidl2006, author = {Seidl, Andreas}, title = {Cylindrical Decomposition Under Application-Oriented Paradigms}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-816}, school = {Universit{\"a}t Passau}, year = {2006}, abstract = {Quantifier elimination (QE) is a powerful tool for problem solving. Once a problem is expressed as a formula, such a method converts it to a simpler, quantifier-free equivalent, thus solving the problem. Particularly many problems live in the domain of real numbers, which makes real QE very interesting. Among the so far implemented methods, QE by cylindrical algebraic decomposition (CAD) is the most important complete method. The aim of this thesis is to develop CAD-based algorithms, which can solve more problems in practice and/or provide more interesting information as output. An algorithm that satisfies these standards would concentrate on generic cases and postpone special and degenerated ones to be treated separately or to be abandoned completely. It would give a solution, which is locally correct for a region the user is interested in. It would give answers, which can provide much valuable information in particular for decision problems. It would combine these methods with more specialized ones, for subcases that allow for. It would exploit degrees of freedom in the algorithms by deciding to proceed in a way that promises to be efficient. It is the focus of this dissertation to treat these challenges. Algorithms described here are implemented in the computer logic system REDLOG and ship with the computer algebra system REDUCE.}, subject = {Quantorenelimination}, language = {en} } @phdthesis{Ellmenreich2004, author = {Ellmenreich, Nils}, title = {PolyAPM: Comparative Parallel Programming with Abstract Parallel Machines}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-447}, school = {Universit{\"a}t Passau}, year = {2004}, abstract = {A parallelising compilation consists of many translation and optimisation stages. The programmer may steer the compiler through these stages by supplying directives with the source code or setting compiler switches. However, for an evaluation of the effects of individual stages, their selection and their best order, this approach is not optimal. To solve this problem, we propose the following method. The compilation is cast as a sequence of program transformations. Each intermediate program runs on an Abstract Parallel Machine (APM), while the program generated by the final transformation runs on the target architecture. Our intermediate programs are all in the same language, Haskell. Thus, each program is executable and still abstract enough to be legible, which enables the evaluation of the transformation that generated it. This evaluation is supported by a cost model, which makes a performance prediction of the abstract program for a real machine. Our project, PolyAPM, provides an acyclic directed graph -- usually a tree -- of APMs whose traversal specifies different combinations and orders of transformations. From one source program, several target programs can be constructed. Their run time characteristics can be evaluated and compared. The goal of PolyAPM is not to support the one-off construction of parallel application programs. For the method's overhead to pay off, the project aims rather at supporting the construction and comparison of many similar variations of a parallel program and a comparative evaluation of parallelisation techniques. With the automation of transformations, PolyAPM can also be used to construct semi-automatic compilation systems.}, subject = {Parallelverarbeitung}, language = {en} } @phdthesis{Forster2004, author = {Forster, Michael}, title = {Crossings in Clustered Level Graphs}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-481}, school = {Universit{\"a}t Passau}, year = {2004}, abstract = {Clustered graphs are an enhanced graph model with a recursive clustering of the vertices according to a given nesting relation. This prime technique for expressing coherence of certain parts of the graph is used in many applications, such as biochemical pathways and UML class diagrams. For directed clustered graphs usually level drawings are used, leading to clustered level graphs. In this thesis we analyze the interrelation of clusters and levels and their influence on edge crossings and cluster/edge crossings.}, subject = {Graphenzeichnen}, language = {en} } @phdthesis{Prakash2025, author = {Prakash, Jyoti}, title = {Static Analyses of Interlanguage Interoperations}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus4-15736}, school = {Universit{\"a}t Passau}, pages = {ix, 126 Seiten}, year = {2025}, abstract = {Software Developers are moving towards a multilingual development where they combine two languages in a single application to harness the strengths of each language. For example, performance-critical components of a Java application can be implemented in C language. It provides flexibility, at the same time, it becomes difficult to statically analyze these applications. The amalgamation of two languages in a single application may introduce bugs ranging from type-mismatch to security vulnerabilities. Therefore, it is necessary to develop static analysis techniques to aid developers in multilingual development. In this thesis, we develop techniques to study and analyze these applications. In the first part of the thesis, we study the prevalence of security and privacy vulnerabilities in hybrid apps. Hybrid apps are Android apps that combine both Java and Javascript components, where the Android part is secured (on the device), while the JavaScript part is exposed to web. Additionally, some of the Java functions are available to JavaScript component through an interface called as bridge interface. In the pursuit of the goal, we adopt a static backtracking of data dependencies to determine the flow of information from the android component to the web component. Our study revealed the potential sources of unsoundness in the existing static analyses. Static backtracing also induces imprecision in the analysis, i.e., there might be some flows that are not possible during runtime albeit are reported by the analysis. These were mitigated through a manual verification. This work reveals that the android-web hybridization can lead to (potential) vulnerabilities that might impact the confidentiality as well as the integrity properties of these apps. From the communication patterns occurring in Android WebView, we noticed that its is feasible for an attacker to jeopardize the integrity of apps by corrupting some value, say an input on the web through bridge interfaces. Motivated by this, we define a information flow analysis of the bridge interfaces and the associated data flows in hybrid apps. In the first step, we propose a novel threat model where we model the attacker as someone who wants to influence the behavior of android app as an integrity violation. Based on this threat model, we then propose a demand-driven analysis technique to detect confidentiality and integrity violations. Our analysis leverages, a demand-driven technique, where it only analyzes the relevant part of app for the information flow analysis with the help of function summaries --- escaping the need of a whole-program analysis. In the second part of the thesis, we generalize the approach to static analysis of multilingual applications. To this end, we investigate into the question of combining existing single language analyses to analyze multilingual programs. To provide an affirmative answer, we define an analysis to leverage single language analyses for call-graph and pointer analysis of multilingual programs. Our analysis takes two existing unilingual analyses and analyzes the complete multilingual program. It uses a novel summary specialization technique that resolves the information flows at the bridge interfaces by utilizing independent pre-analyses (modulo foreign function interfaces) of each language component. We apply this technique to analyze Android-NDK and GraalVM Java-Python multilingual applications for generating call-graphs. In summary, we have developed novel techniques for information flow and call-graph analysis for multilingual programs. With this, we motivate the need of static analyses for multilingual applications and its applications which includes, vulnerability detection, program understanding, amongst others.}, language = {en} } @phdthesis{Zimmermann2023, author = {Zimmermann, Johanna Anita}, title = {Taking the First Step on the Consumer Privacy Journey - Three Essays on Consumers' Privacy-Related Decision-Making}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus4-13761}, school = {Universit{\"a}t Passau}, pages = {IX, 183 Seiten}, year = {2023}, abstract = {Data has become a necessary resource for firm operations in the modern digital world, explaining their growing data gathering efforts. Due to this development, consumers are confronted with decisions to disclose personal data on a daily basis, and have become increasingly intentional about data sharing. While this reluctance to disclose personal data poses challenges for firms, at the same time, it also creates new opportunities for improving privacy-related interactions with customers. This dissertation advocates for a more holistic perspective on consumers' privacy-related decision-making and introduces the consumer privacy journey consisting of three subsequent phases: pre data disclosure, data disclosure, post data disclosure. In three independent essays, I stress the importance of investigating data requests (i.e., the first step of this journey) as they represent a largely neglected, yet, potentially powerful means to influence consumers' decision-making and decision-evaluation processes. Based on dual-processing models of decision-making, this dissertation focuses on both consumers' cognitive and affective evaluations of privacy-related information: First, Essay 1 offers novel conceptualizations and operationalizations of consumers' perceived behavioral control over personal data (i.e., cognitive processing) in the context of Artificial Intelligence (AI)-based data disclosure processes. Next, Essay 2 examines consumers' cognitive and affective processing of a data request that entails relevance arguments as well as relevance-illustrating game elements. Finally, Essay 3 categorizes affective cues that trigger consumers' affective processing of a data request and proposes that such cues need to fit with a specific data disclosure situation to foster long-term decision satisfaction. Collectively, my findings provide research and practice with new insights into consumers' privacy perceptions and behaviors, which are particularly valuable in the context of complex, new (technology-enabled) data disclosure situations.}, language = {en} } @phdthesis{Ihl2020, author = {Ihl, Andreas}, title = {Four investigations of arising phenomena in contemporary work settings: the cases of mindfulness practices and crowdworking online platforms}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus4-8696}, school = {Universit{\"a}t Passau}, pages = {148 Seiten}, year = {2020}, abstract = {New arising phenomena in the occupational realm strongly shape contemporary work settings. These developments heavily affect how individuals work within and beyond organizational boundaries. Two phenomena associated with the changing nature of work have been especially prevalent in work settings and intensively discussed in public debates. First, organizations started to introduce mindfulness practices to their workforce. Rooted in spirituality and formerly used in clinical therapy, mindfulness is applied as a human resource development practice to train employees and managers to cope with the increased work intensification. Second, digitization and the importance of individualization opened up the path for work settings beyond organizational boundaries on crowdworking online platforms. On these online platforms, workers process tasks independently and remotely. Research just started to address the implications and meaning of mindfulness practices in organizations and the rise of crowdworking platforms. Several questions remain unanswered. This dissertation addresses unanswered but pressing questions related to these two phenomena shaping contemporary work settings. Structured in four essays the first two essays address the application and meaning of mindfulness practices. The first essay analyzes the meaning and interpretations of these new practices within organizations. The second essay takes contextual factors of the organizational environment into account and investigates their relevance for the successful implementation of mindfulness practices. The second two essays are dedicated to work attitudes and behavior on crowdworking online platform. Essay three captures individuals' motivation for working on such platforms and their effects for workers' work performance. The last essay deals with the role of professional crowdworking online communities in the work experience and asses the effects of social support in these communities on occupational identification, work meaningfulness and finally on work engagement. Each essay in this dissertation generates new insights on arising phenomena in contemporary work settings. They address several timely yet unanswered research questions for these rising phenomena and thereby offer a deeper and more nuanced understanding of the role mindfulness practices and crowdworking online platforms play in the context of the future of work.}, subject = {Organisation}, language = {en} } @article{UrhahneWijnia2023, author = {Urhahne, Detlef and Wijnia, Lisette}, title = {Theories of Motivation in Education: an Integrative Framework}, series = {Educational Psychology Review}, volume = {35}, journal = {Educational Psychology Review}, number = {2}, publisher = {Springer Nature}, address = {Berlin}, doi = {10.1007/s10648-023-09767-9}, url = {http://nbn-resolving.de/urn:nbn:de:101:1-2023092911454443208560}, pages = {1 -- 35}, year = {2023}, abstract = {Several major theories have been established in research on motivation in education to describe, explain, and predict the direction, initiation, intensity, and persistence of learning behaviors. The most commonly cited theories of academic motivation include expectancy-value theory, social cognitive theory, self-determination theory, interest theory, achievement goal theory, and attribution theory. To gain a deeper understanding of the similarities and differences among these prominent theories, we present an integrative framework based on an action model (Heckhausen \& Heckhausen, 2018). The basic model is deliberately parsimonious, consisting of six stages of action: the situation, the self, the goal, the action, the outcome, and the consequences. Motivational constructs from each major theory are related to these determinants in the course of action, mainly revealing differences and to a lesser extent commonalities. In the integrative model, learning outcomes represent a typical indicator of goal-directed behavior. Associated recent meta-analyses demonstrate the empirical relationship between the motivational constructs of the six central theories and academic achievement. They provide evidence for the explanatory value of each theory for students' learning.}, language = {en} } @unpublished{Yakouchyk2018, author = {Yakouchyk, Katsiaryna}, title = {Belarusian State Ideology: A Strategy of Flexible Adaptation}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus4-6028}, pages = {20 Seiten}, year = {2018}, abstract = {While in some Eastern European countries a wave of colored revolutions challenged existing political orders, Belarus has remained largely untouched by mass protests. In Minsk, the diffusion of democratic ideas leading to the mobilization of population meets a stable authoritarian regime. Nevertheless, the stagnating democratization process cannot be only attributed to the strong authoritarian rule and abuse of power. Indeed, Belarusian president Alexander Lukashenko still enjoys popularity by a large part of the population. Although international observers report that elections in Belarus have never been free and fair, few commentators doubt that Lukashenko would not have won in democratic elections. This evidence suggests that the regime succeeded in building a strong legitimizing basis, which has not been seriously challenged during the last two decades. This paper explores the authoritarian stability in Belarus by looking at the patterns of state ideology. The government effectively spreads state ideology since the early 2000s. Ideology departments have been created in almost all state institutions. The education sector has been affected by the introduction of the compulsory course "The Fundamentals of Belarusian State Ideology" at all universities, and increasing attention to the patriotic education at schools. Based on document analysis, I trace the creation of "ideological vertical" in Belarus and focuse on the issue of ideology in education and youth policy sectors.}, language = {en} } @phdthesis{ELKhoury2014, author = {EL-Khoury, Vanessa}, title = {Semantic Protection and Personalization of Video Content. PIAF: MPEG Compliant Adaptation Framework Preserving the User Perceived Quality}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-27360}, school = {Universit{\"a}t Passau}, year = {2014}, abstract = {UME is the notion that a user should receive informative adapted content anytime and anywhere. Personalization of videos, which adapts their content according to user preferences, is a vital aspect of achieving the UME vision. User preferences can be translated into several types of constraints that must be considered by the adaptation process, including semantic constraints directly related to the content of the video. To deal with these semantic constraints, a fine-grained adaptation, which can go down to the level of video objects, is necessary. The overall goal of this adaptation process is to provide users with adapted content that maximizes their Quality of Experience (QoE). This QoE depends at the same time on the level of the user's satisfaction in perceiving the adapted content, the amount of knowledge assimilated by the user, and the adaptation execution time. In video adaptation frameworks, the Adaptation Decision Taking Engine (ADTE), which can be considered as the "brain" of the adaptation engine, is responsible for achieving this goal. The task of the ADTE is challenging as many adaptation operations can satisfy the same semantic constraint, and thus arising in several feasible adaptation plans. Indeed, for each entity undergoing the adaptation process, the ADTE must decide on the adequate adaptation operator that satisfies the user's preferences while maximizing his/her quality of experience. The first challenge to achieve in this is to objectively measure the quality of the adapted video, taking into consideration the multiple aspects of the QoE. The second challenge is to assess beforehand this quality in order to choose the most appropriate adaptation plan among all possible plans. The third challenge is to resolve conflicting or overlapping semantic constraints, in particular conflicts arising from constraints expressed by owner's intellectual property rights about the modification of the content. In this thesis, we tackled the aforementioned challenges by proposing a Utility Function (UF), which integrates semantic concerns with user's perceptual considerations. This UF models the relationships among adaptation operations, user preferences, and the quality of the video content. We integrated this UF into an ADTE. This ADTE performs a multi-level piecewise reasoning to choose the adaptation plan that maximizes the user-perceived quality. Furthermore, we included intellectual property rights in the adaptation process. Thereby, we modeled content owner constraints. We dealt with the problem of conflicting user and owner constraints by mapping it to a known optimization problem. Moreover, we developed the SVCAT, which produces structural and high-level semantic annotation according to an original object-based video content model. We modeled as well the user's preferences proposing extensions to MPEG-7 and MPEG-21. All the developed contributions were carried out as part of a coherent framework called PIAF. PIAF is a complete modular MPEG standard compliant framework that covers the whole process of semantic video adaptation. We validated this research with qualitative and quantitative evaluations, which assess the performance and the efficiency of the proposed adaptation decision-taking engine within PIAF. The experimental results show that the proposed UF has a high correlation with subjective video quality evaluation.}, subject = {MPEG-Standard}, language = {en} } @phdthesis{Poliakoff2025, author = {Poliakoff, Serge}, title = {From Faking Online Content to Orchestrating Its Creation by Public Workers: Examining Russian Disinformation Production Organisations through Curriculum Vitae Analysis (2013-2024)}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus4-16100}, school = {Universit{\"a}t Passau}, pages = {259 Seiten}, year = {2025}, abstract = {This dissertation examines the evolution of Russian digital authoritarianism, focusing on its organisations that produce disinformation, the transition from the Internet Research Agency to the Patriot Media Group and the emergence of ANO Dialog. It shows how the Internet Research Agency operated as a sophisticated "troll farm" during the early stages of the Russo-Ukrainian war, with activities similar to those of a PR firm. The Patriot Media Group absorbed these activities, blurring the lines between media and disinformation and prioritising metrics-driven propaganda over journalistic professionalism. This dissertation identifies the organisation ANO Dialog as a new model, heavily integrated with state structures, combining digital surveillance, repression, and Soviet-style agitation adapted to the digital age. Using a novel methodology of career profile collection and analysis, my research illustrates the institutionalisation and regional expansion of disinformation tactics in Russian digital authoritarianism, highlighting its operational adaptability and evolving infrastructure.}, language = {en} } @phdthesis{Gruber2025, author = {Gruber, Martin}, title = {Tackling Test Flakiness: Understanding the Problem and Providing Practical Mitigations}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus4-15549}, school = {Universit{\"a}t Passau}, pages = {127 Seiten}, year = {2025}, abstract = {"Software is eating the world". With this phrase from his 2011 Wall Street Journal interview, Marc Andreessen predicted a decade of disruptive software-based innovations affecting various industries. Today, over ten years later, many of his predictions have come true: six of the seven most valuable companies worldwide are computer technology firms, and more than half of the world's population has access to the internet and owns a smartphone, with numbers still growing rapidly. The increasing importance of software has also changed software development. To ensure product quality despite high complexity and fast product cycles, software developers started to adopt continuous integration and regression testing practices: each change to an existing system is automatically tested and reverted in case it breaks any existing functionality. As a result, large software projects are conducting millions of test executions each day. One obstacle to such extensive testing are non-deterministic tests that can pass and fail without any changes to the underlying system or the test itself. These tests are commonly referred to as flaky tests. Flaky tests break regression testing, as they cause test failures that are unrelated to the changes that are being tested. Developers are forced to investigate these intermittent failures, wasting their time and decreasing their trust in testing. This thesis presents our research that aims at understanding and mitigating test flakiness. To comprehend the nature of flaky tests, we conducted both code-based studies on open-source projects, as well as a developer survey. All our investigations confirmed that flakiness is a frequently occurring and severe issue. The causes of flakiness, however, depend on the domain of the project and the source of the test: while asynchronous waiting and concurrency are overall the most prevalent causes aside from test order dependencies, Python projects tend to experience more flakiness caused by networking and randomness. Flaky tests that were not written by developers but generated automatically tend to be more often caused by randomness or unspecified behavior. To avoid test flakiness in generated tests, developers can use existing flakiness suppression mechanisms of test generation frameworks, which we found to be effective. In general, however, most developers currently address the issue of test flakiness by rerunning failing tests. Nevertheless, they would like more support when dealing with test flakiness, namely better visualizations, automated detection and debugging techniques, and education on the topic. In response to this feedback, we developed and evaluated a generic flakiness prediction approach, as well as an automated flakiness debugging technique. Our flakiness prediction method is easy to use and widely applicable. In contrast to previous techniques, it avoids any form of static or dynamic analysis. Instead, it relies solely on a test's execution result history and version control information, two commonly available artifacts. Additionally, it aims to classify real-world failures as either caused by flakiness or a regression. Previous techniques mainly focused on identifying potential flaky test cases in test suites, a related but less actionable question. An evaluation on a large-scale automotive software project yielded positive results. Our approach showed a strong predictive performance (95.5\% F1-score), outperforming the previously used heuristic. We also introduced Spectrum-based Flaky Fault Localization (SFFL), an automated debugging technique that aims to pinpoint the specific lines in the source code that cause a flaky test's non-deterministic behavior. SFFL extends traditional Spectrum-based Fault Localization (SFL) by considering multiple coverage behaviors of the same test case, a highly common phenomenon among flaky tests. Our evaluation on 101 flaky Python tests showed that SFFL outperforms traditional SFL and was able to narrow down the flaky fault's location to 3.5\% of a project's code base on average.}, subject = {Softwareentwicklung}, language = {en} } @phdthesis{NorbertoSales2022, author = {Norberto Sales, Juliano Efson}, title = {An Explainable Semantic Parser for End-User Development}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus4-10718}, school = {Universit{\"a}t Passau}, pages = {xvi, 165 Seiten}, year = {2022}, abstract = {Programming is a key skill in a world where businesses are driven by digital transformations. Although many of the programming demand can be addressed by a simple set of instructions composing libraries and services available in the web, non-technical professionals, such as domain experts and analysts, are still unable to construct their own programs due to the intrinsic complexity of coding. Among other types of end-user development, natural language programming has emerged to allow users to program without the formalism of traditional programming languages, where a tailored semantic parser can translate a natural language utterance to a formal command representation able to be processed by a computational machine. Currently, semantic parsers are typically built on the top of a learning method that defines its behaviours based on the patterns behind a large training data, whose production frequently are costly and time-consuming. Our research is devoted to study and propose a semantic parser for natural language commands targeting a scenario with low availability of training data. Our proposed semantic parser follows a multi-component architecture, composed of a specialised shallow parser that associates natural language commands to predicate-argument structures, integrated to a distributional ranking model that matches the command to a function signature available from an API knowledge base. Systems developed with statistical learning models and complex linguistics resources, as the proposed semantic parser, do not provide natively an easy way to associate a single feature from the input data to the impact in system behaviour. In this scenario, end-user explanations for intelligent systems has become a strong requirement to increase user confidence and system literacy. Thus, our research designed an explanation model for the proposed semantic parser that fits the heterogeneity of its multi-component architecture. The explanation model explores a hierarchical representation in an increasing degree of technical depth, providing higher-level explanations in the initial layers, going gradually to those that demand technical knowledge, applying different explanation strategies to better express the approach behind each component. With the support of a user-centred experiment, we compared the utility of different types of explanations and the impact of background knowledge in their preferences.}, language = {en} } @phdthesis{Sui2018, author = {Sui, Zhiyuan}, title = {Security and Privacy Schemes for Demand Response in Smart Grids}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus4-5809}, school = {Universit{\"a}t Passau}, pages = {xx, 153 Seiten}, year = {2018}, abstract = {Smart Grids integrate currently isolated power and communications networks, while introducing several new technologies on the hardware and software sides. One of the most important ingredients is the potential for demand-response programs, which offer the possibility of sending instructions to consumers to adapt their power consumption over a certain period of time. However, high-frequency data collection exposes consumers' usage behaviors, leading to security and privacy challenges for Smart Grids. In this thesis, three cryptographic schemes are constructed for different demand-response programs. In the mandatory incentive-based demand-response program, privacy preservation depends on the power consumption of consumers. An anonymous authentication scheme is constructed for overload auditing and privacy preservation. Consumers' identities are anonymous during normal operation. The operation center defines an acceptable consumption threshold at times of power shortage. Consumers must follow the instruction and curtail their power consumption to meet the threshold. If they do so, the consumers keep their anonymity, while disobedient consumers, whose power consumption exceeds the threshold, can be identified. Security analysis demonstrates that the constructed anonymous authentication scheme is secure in a random oracle model. In the voluntary incentivebased demand-response program, consumers are categorized as either obedient or disobedient consumers according to their consumption curtailment. Consumers utilize a homomorphic encryption algorithm to encrypt their usage and report the ciphertexts to the operation center periodically. At a time of grid instability, the obedient consumers reduce their consumption and prove their curtailment by using a range proof. Both the usage reports and the proofs from obedient consumers concerning their consumption are reported without leaking private information. In order to achieve the real-time requirement, a security model is proposed and a batch verification algorithm is constructed, which is proved to be secure in the defined oracle model. Apart from reward and penalty detection in demand-response programs, theft detection is also an important requirement in Smart Grids. In order to achieve theft detection, this thesis employs the dynamic k-times anonymous authentication and blind signatures to create an efficient theft detection mechanism in the prepaid card system, where consumers pay for their consumption in advance and obtain credentials. A consumer sends the credentials anonymously and obtains corresponding credentials during times of consumption. If a thief tries to send reused credentials to steal electricity, his anonymity will be revoked. Finally, this thesis proves that the proposed mechanism finds the real identities of power thieves, without sacrificing the privacy of honest consumers under the random oracle model.}, subject = {Intelligentes Stromnetz}, language = {en} } @phdthesis{Walsh2024, author = {Walsh, Florian}, title = {Computing the Binomial Part of Polynomial Ideals}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus4-15096}, school = {Universit{\"a}t Passau}, pages = {vi, 131 Seiten}, year = {2024}, abstract = {Given an ideal in a polynomial ring over a field, we present a complete algorithm to compute its binomial part.}, language = {en} } @phdthesis{Hintz2009, author = {Hintz, Martin}, title = {Micro-Impact: Deconstructing the complex impact process of a simple microinsurance product in Indonesia}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-20389}, school = {Universit{\"a}t Passau}, year = {2009}, abstract = {This thesis analyses the social impact of Payung Keluarga, an obligatory enhanced credit life microinsurance product launched by Allianz in Indonesia in 2006. Payung Keluarga automatically insures micro-borrowers who take out microcredits from microfinance institutions. In case of death, the outstanding credit balance is canceled and the beneficiary receives twice the original loan as additional payout. Payung Keluarga was conceived to ameliorate the assumed post-mortem financial crisis of low-asset families. Through qualitative-explorative field research from 2006 until 2008 I investigated if this developmental intention was realized. It is the first impact analysis on microinsurance in Indonesia. In the research process, I took the position of an observing participant. As operational project leader for Allianz in Indonesia I was virtually doing research on my own work. The resulting challenge to research neutrality is primarily mitigated by the sobering to discerning social impact which was eventually revealed. The majority of insured were married female Muslim petty traders in urban and semi-urban areas around Jakarta. Socio-economically these women stand at the upper end of the low-asset stratum. Their husbands were generally the main bread-winners of the family, and it was mostly them who received the insurance payouts. It could therefore be said that Payung Keluarga benefited the main breadwinner instead of insuring him. The study found that norms of a moral economy are still exerting significant clout on the insured. The moral economy aims at providing "subsistence insurance" for all community members through an intricate collective system of balanced exchanges. The corresponding "premium" is a denouncement of self-interested material asset accumulation. Next to structural reasons, it was this moral restriction that saw the businesses of the women stagnate at low and socially inconspicuous levels. Payung Keluarga did not help to overcome the assumed post-mortem financial crisis. In reality, such crisis did not exist since community and family support among low-asset Muslim Indonesians is normally strong enough to largely provide for the bereft family. This support is driven by the perception of death as a collective risk in the light of the moral economy and hinged on principles of balanced reciprocity. For cultural and religious reasons, the beneficiaries used most of the insurance payouts for funeral ceremonies and repayment of informal debt. With the advent of Payung Keluarga familial post-mortem assistance has been reduced. Funeral costs also seem to have been inflated by the product. It has thereby promoted a long-term societal shift from equality-seeking balanced reciprocity towards status-seeking and socially diversifying general reciprocity. In effect, Payung Keluarga has attacked cooperative social cohesion head-on where it is still strongest in a rapidly modernizing Indonesian society. This discerning and unintended impact of Payung Keluarga is hardly offset by a positive increase in financial literacy among the insured. Furthermore, the effect on "peace of mind" on the insured is ambivalent: while most insured stated to feel safer, some declared to feel less secure with their obligatory coverage for fear of interference with divine predetermination. Its overall developmental impact can be literally described as "micro". Instead of protecting the status-quo of the family, Payung Keluarga has assumed the role of an actor of social change. Not only because it has changed the funeral pattern of the beneficiaries, but also because it promotes a far-reaching conceptual paradigm shift from balanced reciprocity, which forms a core pillar of the insured's social structure, towards general reciprocity. The thesis hypothesizes that with sufficient insurance coverage provided, the insured will increasingly opt out of the coercively egalitarian "subsistence insurance" system. Such opt out will allow the insured to pursue a more aggressive economic asset accumulation strategy, particularly in combination with micro-credit. For the individual, this can be seen as a "liberating fortune" that would induce more women to grow their businesses to significant sizes. In parallel, it would deal a blow to cooperative social cohesion. I propose to call this the "double fortune / double blow" dilemma of microfinance. Although this thesis is exemplary, some of its findings can be generalized: The impact of microinsurance is highly dependent on cultural, religious and socio-demographic context. Any microinsurance intervention concerned with social impact should be preceded by a thick contextualization going beyond the usual demand assessments. In turn, microinsurance likewise impacts context as an actor of ambivalent social change. The complex influence of context and the role of microinsurance as an actor of social change have so far been hardly discussed in the development discourse.}, subject = {Versicherung}, language = {en} } @phdthesis{RehnSonigo2009, author = {Rehn-Sonigo, Veronika}, title = {Multi-criteria Mapping and Scheduling of Workflow Applications onto Heterogeneous Platforms}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-22249}, school = {Universit{\"a}t Passau}, year = {2009}, abstract = {The results summarized in this thesis deal with the mapping and scheduling of workflow applications on heterogeneous platforms. In this context, we focus on three different types of streaming applications: * Replica placement in tree networks * In this kind of application, clients are issuing requests to some servers and the question is where to place replicas in the network such that all requests can be processed. We discuss and compare several policies to place replicas in tree networks, subject to server capacity, Quality of Service (QoS) and bandwidth constraints. The client requests are known beforehand, while the number and location of the servers have to be determined. The standard approach in the literature is to enforce that all requests of a client be served by the closest server in the tree. We introduce and study two new policies. One major contribution of this work is to assess the impact of these new policies on the total replication cost. Another important goal is to assess the impact of server heterogeneity, both from a theoretical and a practical perspective. We establish several new complexity results, and provide several efficient polynomial heuristics for NP-complete instances of the problem. * Pipeline workflow applications * We consider workflow applications that can be expressed as linear pipeline graphs. An example for this application type is digital image processing, where images are treated in steady-state mode. Several antagonist criteria should be optimized, such as throughput and latency (or a combination) as well as latency and reliability (i.e., the probability that the computation will be successful) of the application. While simple polynomial algorithms can be found for fully homogeneous platforms, the problem becomes NP-hard when tackling heterogeneous platforms. We present an integer linear programming formulation for this latter problem. Furthermore, we provide several efficient polynomial bi-criteria heuristics, whose relative performances are evaluated through extensive simulation. As a case-study, we provide simulations and MPI experimental results for the JPEG encoder application pipeline on a cluster of workstations. * Complex streaming applications * We consider the execution of applications structured as trees of operators, i.e., the application of one or several trees of operators in steady-state to multiple data objects that are continuously updated at various locations in a network. A first goal is to provide the user with a set of processors that should be bought or rented in order to ensure that the application achieves a minimum steady-state throughput, and with the objective of minimizing platform cost. We then extend our model to multiple applications: several concurrent applications are executed at the same time in a network, and one has to ensure that all applications can reach their application throughput. Another contribution of this work is to provide complexity results for different instances of the basic problem, as well as integer linear program formulations of various problem instances. The third contribution is the design of several polynomial-time heuristics, for both application models. One of the primary objectives of the heuristics for concurrent applications is to reuse intermediate results shared by multiple applications.}, language = {en} } @phdthesis{Faber2007, author = {Faber, Peter}, title = {Code Optimization in the Polyhedron Model - Improving the Efficiency of Parallel Loop Nests}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-12512}, school = {Universit{\"a}t Passau}, year = {2007}, abstract = {A safe basis for automatic loop parallelization is the polyhedron model which represents the iteration domain of a loop nest as a polyhedron in \$\mathbb{Z}^n\$. However, turning the parallel loop program in the model to efficient code meets with several obstacles, due to which performance may deteriorate seriously -- especially on distributed memory architectures. We introduce a fine-grained model of the computation performed and show how this model can be applied to create efficient code.}, subject = {Polyeder}, language = {en} } @phdthesis{Weitl2007, author = {Weitl, Franz}, title = {Document Verification with Temporal Description Logics}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-12528}, school = {Universit{\"a}t Passau}, year = {2007}, abstract = {The thesis proposes a new formal framework for checking the content of web documents along individual reading paths. It is vital for the readability of web documents that their content is consistent and coherent along the possible browsing paths through the document. Manually ensuring the coherence of content along the possibly huge number of different browsing paths in a web document is time-consuming and error-prone. Existing methods for document validation and verification are not sufficiently expressive and efficient. The innovative core idea of this thesis is to combine the temporal logic CTL and description logic ALC for the representation of consistency criteria. The resulting new temporal description logics ALCCTL can - in contrast to existing specification formalisms - compactly represent coherence criteria on documents. Verification of web documents is modelled as a model checking problem of ALCCTL. The decidability and polynomial complexity of the ALCCTL model checking problem is proven and a sound, complete, and optimal model checking algorithm is presented. Case studies on real and realistic web documents demonstrate the performance and adequacy of the proposed methods. Existing methods such as symbolic model checking or XML-based document validation are outperformed in both expressiveness and speed.}, subject = {Verifikation}, language = {en} } @phdthesis{Nawichai2008, author = {Nawichai, Patcharin}, title = {Ethnic Group Livelihood Strategies and State Integration: Moken and the Hill People in Negotiation with the State}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-12456}, school = {Universit{\"a}t Passau}, year = {2008}, abstract = {In the past Moken and Hill people held little attraction for external actors like the state because they live in the periphery areas. Thereby, they could maintain their common way of life. However, as a result of the emergence of national security as relevant issue, resulting from communist insurgencies, opium cultivation and migration, and conservation awareness due to rapid degradation of natural resources in connection with shifting cultivation, deforestation and over exploitation, the Moken and hill people came into the focus of the state. One of the first measures a state takes when integrating new regions is registering the population and trying to control their activities. Registration provides the possibility to become Thai citizen and thereby receive civil rights common to the Thais. But integration into Thai society implies integration into the state administration as well as the economy. The Moken and hill people face new circumstances that have a far reaching impact on their livelihood. The perspectives of proper practices as seen by the state contrast with the views of the ethnic minorities. For example the policies of environment protection prohibit traditional practices like gathering sea snails in the case of the Moken, or shifting cultivation as practiced by most hill people. Consequently, the people have to find ways to cope with new situations. One means is to readily adapt to the administrative regulations. More common is, however, to negotiate, which means to find and define arenas and spaces. This study focuses mainly on how the Moken and hill people apply the issues of citizenship, space and ethnic identity to negotiate with the state in order to maintain their livelihood. It is found that in negotiation with the state, the hill people want to be accepted as Thai and try to establish a positive image instead of being seen as destroyers of the environment, drug producers, uneducated, etc. This attempt to be seen as Thai is accompanied by the demand for recognition of cultural differences. This might be due to the fact that for the hill people it is important to distinguish themselves from other minorities within the same region. In the case of the Moken this interest to establish a positive image is far less pronounced. This is possibly because they are not seen as a threat as the hill people, but only as stupid and primitive. They do not attempt to obliterate the stereotypes of being poor and stupid people because it facilitates avoidance of regulations and provides exceptional treatment. In the same time, they also want to be recognized as Thai to have equal right as the majority.}, subject = {Nationale Minderheit}, language = {en} } @unpublished{Kreitmeier2008, author = {Kreitmeier, Wolfgang}, title = {Optimal quantization for uniform distributions on Cantor-like sets}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-12449}, year = {2008}, abstract = {In this paper, the problem of optimal quantization is solved for uniform distributions on some higher dimensional, not necessarily self-similar \$N-\$adic Cantor-like sets. The optimal codebooks are determined and the optimal quantization error is calculated. The existence of the quantization dimension is characterized and it is shown that the quantization coefficient does not exist. The special case of self-similarity is also discussed. The conditions imposed are a separation property of the distribution and strict monotonicity of the first \$N\$ quantization error differences. Criteria for these conditions are proved and as special examples modified versions of classical fractal distributions are discussed.}, subject = {Maßtheorie}, language = {en} } @phdthesis{Guppenberger2010, author = {Guppenberger, Michael}, title = {Enhancing Information Systems with Event-Handling - A Non-Invasive Approach}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-22485}, school = {Universit{\"a}t Passau}, year = {2010}, abstract = {Due to the immense advance of widely accessible information systems in industrial applications, science, education and every day use, it becomes more and more difficult for users of those information systems to keep track with new and updated information. An approach to cope with this problem is to go beyond traditional search facilities and instead use the users' profiles to monitor data changes and to actively inform them about these updates - an aspect that has to be explicitly developed and integrated into a variety of information systems. This is traditionally done in an individual way, depending on the application and its platform. In this dissertation, we present a novel approach to model the semantic interrelations that specify which users to inform about which updates, based on the underlying model of the respective information system. For the first time, a meta-model that allows information system designers to tag an arbitrary data model and thus specify the event-handling semantics is presented. A formal specification of how to interpret meta-models to determine the receivers of the events completes the presented concept. For the practical realization of this new concept, model driven architecture (MDA) shows to be an ideal technical means. Using our newly developed UML profile based on data-modelling standards, an implementation of the event-handling specification can automatically be generated for a variety of different target platforms, like e.g. relational databases, using triggers. This meta-approach makes the proposed solution ideal with respect to maintainability and genericity. Our solution significantly reduces the overall development efforts for an event-handling facility. In addition, the enhanced model of the information system can be used to generate an implementation that also fulfils non-functional requirements like high performance and extensibility. The overall framework, consisting of the domain specific language (i.e. the meta-model), formal and technical transformations of how to interpret the enhanced information system model and a cost-based optimizing strategy, constitutes an integrated approach, offering several advantages over traditional implementation techniques: our framework can be applied to new information systems as well as to legacy applications without having to modify existing systems; it offers an extensible, easy-to-use, generic and thus re-usable solution and it can be tailored to and optimized for many use cases, as the practical evaluation presented in this dissertation verifies.}, subject = {Notifikation}, language = {en} } @phdthesis{Berl2011, author = {Berl, Andreas}, title = {Energy Efficiency in Office Computing Environments}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-22516}, school = {Universit{\"a}t Passau}, year = {2011}, abstract = {The increasing cost of energy and the worldwide desire to reduce CO2 emissions has raised concern about the energy efficiency of information and communication technology. Whilst research has focused on data centres recently, this thesis identifies office computing environments as significant consumers of energy. Office computing environments offer great potential for energy savings: On one hand, such environments consist of a large number of hosts. On the other hand, these hosts often remain turned on 24~hours per day while being underutilised or even idle. This thesis analyzes the energy consumption within office computing environments and suggests an energy-efficient virtualized office environment. The office environment is virtualized to achieve flexible virtualized office resources that enable an energy-based resource management. This resource management stops idle services and idle hosts from consuming resources within the office and consolidates utilised office services on office hosts. This increases the utilisation of some hosts while other hosts are turned off to save energy. The suggested architecture is based on a decentralized approach that can be applied to all kinds of office computing environments, even if no centralized data centre infrastructure is available. The thesis develops the architecture of the virtualized office environment together with an energy consumption model that is able to estimate the energy consumption of hosts and network within office environments. The model enables the energy-related comparison of ordinary and virtualized office environments, considering the energy-efficient management of services. Furthermore, this thesis evaluates energy efficiency and overhead of the suggested approach. First, it theoretically proves the energy efficiency of the virtualized office environment with respect to the energy consumption model. Second, it uses Markov processes to evaluate the impact of user behaviour on the suggested architecture. Finally, the thesis develops a discrete-event simulation that enables the simulation and evaluation of office computing environments with respect to varying virtualization approaches, resource management parameters, user behaviour, and office equipment. The evaluation shows that the virtualized office environment saves more than half of the energy consumption within office computing environments, depending on user behaviour and office equipment.}, subject = {Energieeffizienz}, language = {en} } @unpublished{Kreitmeier2011, author = {Kreitmeier, Wolfgang}, title = {Optimal vector quantization in terms of Wasserstein distance}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-22502}, year = {2011}, abstract = {The optimal quantizer in memory-size constrained vector quantization induces a quantization error which is equal to a Wasserstein distortion. However, for the optimal (Shannon-)entropy constrained quantization error a proof for a similar identity is still missing. Relying on principal results of the optimal mass transportation theory, we will prove that the optimal quantization error is equal to a Wasserstein distance. Since we will state the quantization problem in a very general setting, our approach includes the R\'enyi-\$\alpha\$-entropy as a complexity constraint, which includes the special case of (Shannon-)entropy constrained \$(\alpha = 1)\$ and memory-size constrained \$(\alpha = 0)\$ quantization. Additionally, we will derive for certain distance functions codecell convexity for quantizers with a finite codebook. Using other methods, this regularity in codecell geometry has already been proved earlier by Gy\"{o}rgy and Linder.}, subject = {Maßtheorie}, language = {en} } @phdthesis{Stoerzer2006, author = {St{\"o}rzer, Maximilian}, title = {Impact Analysis for AspectJ - A Critical Analysis and Tool-Based Approach to AOP}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-897}, school = {Universit{\"a}t Passau}, year = {2006}, abstract = {Aspect-Oriented Programming (AOP) has been promoted as a solution for modularization problems known as the tyranny of the dominant decomposition in literature. However, when analyzing AOP languages it can be doubted that uncontrolled AOP is indeed a silver bullet. The contributions of the work presented in this thesis are twofold. First, we critically analyze AOP language constructs and their effects on program semantics to sensitize programmers and researchers to resulting problems. We further demonstrate that AOP—as available in AspectJ and similar languages—can easily result in less understandable, less evolvable, and thus error prone code—quite opposite to its claims. Second, we examine how tools relying on both static and dynamic program analysis can help to detect problematical usage of aspect-oriented constructs. We propose to use change impact analysis techniques to both automatically determine the impact of aspects and to deal with AOP system evolution. We further introduce an analysis technique to detect potential semantical issues related to undefined advice precedence. The thesis concludes with an overview of available open source AspectJ systems and an assessment of aspect-oriented programming considering both fundamentals of software engineering and the contents of this thesis.}, subject = {Modularit{\"a}t}, language = {en} } @unpublished{Kreitmeier2005, author = {Kreitmeier, Wolfgang}, title = {Optimal Quantization for Dyadic Homogeneous Cantor Distributions}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-3845}, year = {2005}, abstract = {For a large class of dyadic homogeneous Cantor distributions in \mathbb{R}, which are not necessarily self-similar, we determine the optimal quantizers, give a characterization for the existence of the quantization dimension, and show the non-existence of the quantization coefficient. The class contains all self-similar dyadic Cantor distributions, with contraction factor less than or equal to \frac{1}{3}. For these distributions we calculate the quantization errors explicitly.}, subject = {Maßtheorie}, language = {en} } @phdthesis{Hoelbling2011, author = {H{\"o}lbling, G{\"u}nther}, title = {Personalized Means of Interacting with Multimedia Content}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-24210}, school = {Universit{\"a}t Passau}, year = {2011}, abstract = {Today the world of multimedia is almost completely device- and content-centered. It focuses it's energy nearly exclusively on technical issues such as computing power, network specifics or content and device characteristics and capabilities. In most multimedia systems, the presentation of multimedia content and the basic controls for playback are main issues. Because of this, a very passive user experience, comparable to that of traditional TV, is most often provided. In the face of recent developments and changes in the realm of multimedia and mass media, this "traditional" focus seems outdated. The increasing use of multimedia content on mobile devices, along with the continuous growth in the amount and variety of content available, make necessary an urgent re-orientation of this domain. In order to highlight the depth of the increasingly difficult situation faced by users of such systems, it is only logical that these individuals be brought to the center of attention. In this thesis we consider these trends and developments by applying concepts and mechanisms to multimedia systems that were first introduced in the domain of usercentrism. Central to the concept of user-centrism is that devices should provide users with an easy way to access services and applications. Thus, the current challenge is to combine mobility, additional services and easy access in a single and user-centric approach. This thesis presents a framework for introducing and supporting several of the key concepts of user-centrism in multimedia systems. Additionally, a new definition of a user-centric multimedia framework has been developed and implemented. To satisfy the user's need for mobility and flexibility, our framework makes possible seamless media and service consumption. The main aim of session mobility is to help people cope with the increasing number of different devices in use. Using a mobile agent system, multimedia sessions can be transferred between different devices in a context-sensitive way. The use of the international standard MPEG-21 guarantees extensibility and the integration of content adaptation mechanisms. Furthermore, a concept is presented that will allow for individualized and personalized selection and face the need for finding appropriate content. All of which can be done, using this approach, in an easy and intuitive way. Especially in the realm of television, the demand that such systems cater to the need of the audience is constantly growing. Our approach combines content-filtering methods, state-of-the-art classification techniques and mechanisms well known from the area of information retrieval and text mining. These are all utilized for the generation of recommendations in a promising new way. Additionally, concepts from the area of collaborative tagging systems are also used. An extensive experimental evaluation resulted in several interesting findings and proves the applicability of our approach. In contrast to the "lean-back" experience of traditional media consumption, interactive media services offer a solution to make possible the active participation of the audience. Thus, we present a concept which enables the use of interactive media services on mobile devices in a personalized way. Finally, a use case for enriching TV with additional content and services demonstrates the feasibility of this concept.}, subject = {Empfehlungssystem}, language = {en} } @phdthesis{Johns2009, author = {Johns, Martin}, title = {Code Injection Vulnerabilities in Web Applications - Exemplified at Cross-site Scripting}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-23626}, school = {Universit{\"a}t Passau}, year = {2009}, abstract = {The majority of all security problems in today's Web applications is caused by string-based code injection, with Cross-site Scripting (XSS)being the dominant representative of this vulnerability class. This thesis discusses XSS and suggests defense mechanisms. We do so in three stages: First, we conduct a thorough analysis of JavaScript's capabilities and explain how these capabilities are utilized in XSS attacks. We subsequently design a systematic, hierarchical classification of XSS payloads. In addition, we present a comprehensive survey of publicly documented XSS payloads which is structured according to our proposed classification scheme. Secondly, we explore defensive mechanisms which dynamically prevent the execution of some payload types without eliminating the actual vulnerability. More specifically, we discuss the design and implementation of countermeasures against the XSS payloads Session Hijacking'', Cross-site Request Forgery'', and attacks that target intranet resources. We build upon this and introduce a general methodology for developing such countermeasures: We determine a necessary set of basic capabilities an adversary needs for successfully executing an attack through an analysis of the targeted payload type. The resulting countermeasure relies on revoking one of these capabilities, which in turn renders the payload infeasible. Finally, we present two language-based approaches that prevent XSS and related vulnerabilities: We identify the implicit mixing of data and code during string-based syntax assembly as the root cause of string-based code injection attacks. Consequently, we explore data/code separation in web applications. For this purpose, we propose a novel methodology for token-level data/code partitioning of a computer language's syntactical elements. This forms the basis for our two distinct techniques: For one, we present an approach to detect data/code confusion on run-time and demonstrate how this can be used for attack prevention. Furthermore, we show how vulnerabilities can be avoided through altering the underlying programming language. We introduce a dedicated datatype for syntax assembly instead of using string datatypes themselves for this purpose. We develop a formal, type-theoretical model of the proposed datatype and proof that it provides reliable separation between data and code hence, preventing code injection vulnerabilities. We verify our approach's applicability utilizing a practical implementation for the J2EE application server.}, subject = {Computersicherheit}, language = {en} } @phdthesis{Rabl2011, author = {Rabl, Tilmann}, title = {Efficiency in Cluster Database Systems - Dynamic and Workload-Aware Scaling and Allocation}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-25821}, school = {Universit{\"a}t Passau}, year = {2011}, abstract = {Database systems have been vital in all forms of data processing for a long time. In recent years, the amount of processed data has been growing dramatically, even in small projects. Nevertheless, database management systems tend to be static in terms of size and performance which makes scaling a difficult and expensive task. Because of performance and especially cost advantages more and more installed systems have a shared nothing cluster architecture. Due to the massive parallelism of the hardware programming paradigms from high performance computing are translated into data processing. Database research struggles to keep up with this trend. A key feature of traditional database systems is to provide transparent access to the stored data. This introduces data dependencies and increases system complexity and inter process communication. Therefore, many developers are exchanging this feature for a better scalability. However, explicitly managing the data distribution and data flow requires a deep understanding of the distributed system and reduces the possibilities for automatic and autonomic optimization. In this thesis we present an approach for database system scaling and allocation that features good scalability although it keeps the data distribution transparent. The first part of this thesis analyzes the challenges and opportunities for self-scaling database management systems in cluster environments. Scalability is a major concern of Internet based applications. Access peaks that overload the application are a financial risk. Therefore, systems are usually configured to be able to process peaks at any given moment. As a result, server systems often have a very low utilization. In distributed systems the efficiency can be increased by adapting the number of nodes to the current workload. We propose a processing model and an architecture that allows efficient self-scaling of cluster database systems. In the second part we consider different allocation approaches. To increase the efficiency we present a workload-aware, query-centric model. The approach is formalized; optimal and heuristic algorithms are presented. The algorithms optimize the data distribution for local query execution and balance the workload according to the query history. We present different query classification schemes for different forms of partitioning. The approach is evaluated for OLTP and OLAP style workloads. It is shown that variants of the approach scale well for both fields of application. The third part of the thesis considers benchmarks for large, adaptive systems. First, we present a data generator for cloud-sized applications. Due to its architecture the data generator can easily be extended and configured. A key feature is the high degree of parallelism that makes linear speedup for arbitrary numbers of nodes possible. To simulate systems with user interaction, we have analyzed a productive online e-learning management system. Based on our findings, we present a model for workload generation that considers the temporal dependency of user interaction.}, subject = {Verteiltes Datenbanksystem}, language = {en} } @phdthesis{Ali2011, author = {Ali, Rashid}, title = {Weyl Gr{\"o}bner Basis Cryptosystems}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-23195}, school = {Universit{\"a}t Passau}, year = {2011}, abstract = {In this thesis, we shall consider a certain class of algebraic cryptosystems called Gr{\"o}bner Basis Cryptosystems. In 1994, Koblitz introduced the Polly Cracker cryptosystem that is based on the theory of Gr{\"o}bner basis in commutative polynomials rings. The security of this cryptosystem relies on the fact that the computation of Gr{\"o}bner basis is, in general, EXPSPACE-hard. Cryptanalysis of these commutative Polly Cracker type cryptosystems is possible by using attacks that do not require the computation of Gr{\"o}bner basis for breaking the system, for example, the attacks based on linear algebra. To secure these (commutative) Gr{\"o}bner basis cryptosystems against various attacks, among others, Ackermann and Kreuzer introduced a general class of Gr{\"o}bner Basis Cryptosystems that are based on the difficulty of computing module Gr{\"o}bner bases over general non-commutative rings. The objective of this research is to describe a special class of such cryptosystems by introducing the Weyl Gr{\"o}bner Basis Cryptosystems. We divide this class of cryptosystems in two parts namely the (left) Weyl Gr{\"o}bner Basis Cryptosystems and Two-Sided Weyl Gr{\"o}bner Basis Cryptosystems. We suggest to use Gr{\"o}bner bases for left and two-sided ideals in Weyl algebras to construct specific instances of such cryptosystems. We analyse the resistance of these cryptosystems to the standard attacks and provide computational evidence that secure Weyl Gr{\"o}bner Basis Cryptosystems can be built using left (resp. two-sided) Gr{\"o}bner bases in Weyl algebras.}, subject = {Gr{\"o}bner-Basis}, language = {en} } @phdthesis{Dietz2011, author = {Dietz, Sebastian}, title = {Autoregressive Neural Network Processes - Univariate, Multivariate and Cointegrated Models with Application to the German Automobile Industry}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-22524}, school = {Universit{\"a}t Passau}, year = {2011}, abstract = {Prediction of economic variables is a basic component not only for economic models, but also for many business decisions. Nevertheless it is difficult to produce accurate predictions in times of economic crises, which cause nonlinear effects in the data. In this dissertation a nonlinear model for analysis of time series with nonlinear effects is introduced. Linear autoregressive processes are extended by neural networks to overcome the problem of nonlinearity. This idea is based on the universal approximation property of single hidden layer feedforward neural networks of Hornik (1993). Univariate Autoregressive Neural Network Processes (AR-NN) as well as Vector Autoregressive Neural Network Processes (VAR-NN) and Neural Network Vector Error Correction Models (NN-VEC) are introduced. Various methods for variable selection, parameter estimation and inference are discussed. AR-NN's as well as a NN-VEC are used for prediction and analysis of the relationships between 4 variables related to the German automobile industry: The US Dollar to Euro exchange rate, the industrial output of the German automobile industry, the sales of imported cars in the USA and an index of shares of German automobile manufacturing companies. Prediction results are compared to various linear and nonlinear univariate and multivariate models.}, subject = {Nichtlineare Zeitreihenanalyse}, language = {en} } @phdthesis{Pescher2011, author = {Pescher, Christian}, title = {Social Networks in New Product Forecasts and Marketing}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-24348}, school = {Universit{\"a}t Passau}, year = {2011}, abstract = {Consumers interact with each other and within their social networks. Influentials have an overproportional influence on other consumers' preferences and choices, thus having relevant implications for product development, marketing planning and strategic marketing. An important question that previous research has not analyzed yet, is whether and how to capture their influence on other consumers in preference-based market forecasts. This study analyzes these aspects for a representative sample of the German mobile phone market. It finds that assigning higher weights to the preferences of influentials significantly increases forecast accuracy. Other chapters of this thesis analyze the role of brokers in consumer networks and the decision process seeding points in viral marketing campaigns.}, subject = {Marketing}, language = {en} } @phdthesis{Tigges2009, author = {Tigges, Ruth}, title = {Professionals and governance: juggling actors in the reform of the Cambodian administration}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-17923}, school = {Universit{\"a}t Passau}, year = {2009}, abstract = {After two decades of almost complete isolation, Cambodia was rather suddenly integrated in 1991 into the global 'free world'. As a result, the function and the role of the administration had to be modified. These transformations in the name of modernization and development have led to a significant rise of professionals, as these provide the necessary knowledge to manage the ongoing socio-economic and political transformations. Due to their educational socialization in urban centers and overseas, they consider themselves the most modern section of their society. Their common objective is to make the organizations they work for less rigid and to reform them along lines of technical-pragmatic concepts. Especially for the professionals working within the Cambodian state, the movement towards more efficiency and effectiveness in the administration and the strengthening of the rule of law is seen as a means to increase credibility of the Government on one hand and to attract foreign investment on the other. However, due to their functional specialization and expertise, professionals in Cambodia are marginalized from the overall socio-political changes in their environment. This has led to the reduction of their possibilities to communicate and organize effectively. Instead, their knowledge is applied to serve the requirements of their organizations/ patron-client networks and not as a resource to form alliances on a national or on a transnational level. A potential exists though for the professionals working in the state administration to either form a strategic group through processes of hybridization, or to form a 'neutral' rational administration after the ceding of the 'old guard' and thus their 'disentanglement' from old clientele structures.}, subject = {Governance}, language = {en} } @phdthesis{Houyou2009, author = {Houyou, Amine Mohamed}, title = {Context-Aware Mobility: A Distributed Approach to Context Management}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-17975}, school = {Universit{\"a}t Passau}, year = {2009}, abstract = {The recent development of a whole plethora of new wireless technologies, such as IEEE 802.11, IEEE 802.15, IEEE 802.16, UMTS, and more recently LTE, etc, has triggered several efforts to integrate these technologies in a converged world of transparent and ubiquitous wireless connectivity. Most of these technologies have evolved around a certain use case and with some user behaviour being assumed; however, there still lacks a holistic solution to adapt access to user needs, in an automatic and transparent manner. One major problem that has to be addressed first, is mobility management between heterogeneous wireless networks. Current mobility management solutions mostly originate from cellular networking systems, which are operator specific, centralised, and focused on a single link technology. In order to deal with the wireless diversity of future wireless and mobile Internet, a new approach is needed. Adaptive wireless connectivity that is tailored around the user needs and capabilities is named context-aware mobility management. Context refers to the information describing the surroundings of the user as well as his/her behaviour, and additional semantic information that could optimise the adaption process. Context management normally entails discovering and tracking context, reasoning based on the discovered information, then adapting (or acting) upon the context-aware application or system. This context management chain is adapted throughout the thesis to the task of context-aware mobility management. The added complexity is necessary to adapt the ubiquitous access to the condition of both the user and the surrounding networks, while assuming that overlapping wireless networks could still be managed in separate management domains. Linking these management domains and aggregating this composite information in the form of a network context is one of the major contributions of this work. An overlay-based solution takes into account this scattered nature of the context management system, which is modelled as a decentralised dynamic location-based service. The proposed architecture is generalised to support ubiquitous location-based services, and a design methodology is proposed to ensure the localised impact of mobility-led context retrieval overhead.}, subject = {Netze}, language = {en} } @phdthesis{Groesslinger2009, author = {Gr{\"o}ßlinger, Armin}, title = {The Challenges of Non-linear Parameters and Variables in Automatic Loop Parallelisation}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-17893}, school = {Universit{\"a}t Passau}, year = {2009}, abstract = {With the rise of manycore processors, parallelism is becoming a mainstream necessity. Unfortunately, parallel programming is inherently more difficult than sequential programming; therefore, techniques for automatic parallelisation will become indispensable. We aim at extending the well-known polyhedron model, which promises this automation, beyond some of its current restrictions. Up to now, loop bounds and array subscripts in the modelled codes must be expressions linear in both the variables and the parameters. We lift this restriction and allow certain polynomial expressions instead of linear ones. With our extensions, we are able to handle more programs in all phases of the parallelisation process (dependence analysis, transformation of the program model, code generation). We extend Banerjee's classical dependence analysis to handle one non-linear parameter p, i.e., we are able to determine precisely the solutions of the system of conflict equalities for input programs with non-linear array accesses like A[p*i] in dependence of the residue class of p. We make contributions to three transformations desirable in automatic parallelisation. First, we show that using a generalised Simplex algorithm, which we have developed, schedules with non-linear parameters like theta(i)=floor(i/n) can be computed. In addition, such schedules can be expressed easily as a quantifier elimination problem but this approach turns out to be computationally less efficient with the available implementation. As a second transformation, we study parametric tiling which is used to adapt a parallelised program to the number of available processors at run time. Third, we present a localisation technique to exploit scratchpad memories on architectures on which data caching has to be handled by software. We transform a given code such that it keeps values which are reused in successive iterations of a sequential loop in the scratchpad. An access to a value written in an earlier iteration is served from the scratchpad to accelerate the access. In general, this transformation introduces non-linear loop bounds in the transformed model. Finally, we present an algorithm for generating code for arbitrary semi-algebraic iteration sets, i.e., for iteration sets described by polynomial inequalities in the variables and parameters. This is a vast generalisation of existing polyhedral code generation techniques. Although our algorithm is less efficient than polyhedral code generators, this paves the way for a code generator that can handle arbitrary parametric tilings and other transformations which introduce non-linear parameters (like non-linear schedules and the localisation we present) or even non-linear variables.}, subject = {Automatische Parallelisierung}, language = {en} } @unpublished{Kreitmeier2009, author = {Kreitmeier, Wolfgang}, title = {Hausdorff measure of uniform self-similar fractals}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-17948}, year = {2009}, abstract = {Let d \&\#8805; 1 be an integer and E a self-similar fractal set, which is the attractor of a uniform contracting iterated function system (UIFS) on Rd. Denote by D the Hausdorff dimension, by HD(E) the Hausdorff measure and by diam (E) the diameter of E. If the UIFS is parametrised by its contracting factor c, while the set \ω of fixed points of the UIFS does not depend on c, we will show the existence of a positive constant depending only on \ω, such that the Hausdorff dimension is smaller than one and HD = (E) D if c is smaller than this constant. We apply our result to modified versions of various classical fractals. Moreover we present a parametrised UIFS where \ω depends on c and HD < diam(E)D, if c is small enough.}, subject = {Maßtheorie}, language = {en} } @phdthesis{Bajec2008, author = {Bajec, Luka}, title = {Three Essays On The Bank Lending Channel}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-12827}, school = {Universit{\"a}t Passau}, year = {2008}, abstract = {Monetary policy is commonly assumed to impact on commodity demand via relative prices. The bank lending channel (BLC) proposes an additional effect via the quantity of loans. This has found its way into economic textbooks, although it remains empirically controversial. I present various theoretical criticisms of the BLC and its building block, the formal model by Bernanke and Blinder (1988). This model operates with lopsided loan demand, money demand and money supply functions. The logic of the BLC is valid for individual investors who are affected by a cut in bank loans. For a whole sector with a given level of interest rates a reduction of loans does not however dry up investment, but only the holding of money. Since 1988 academics have been using model by Bernanke and Blinder as a work horse to empirically address the question of the quantitative relevance of the BLC. Cecchetti (1995) und Hubbard (1995) summarize the overall evolution of the controversial debate up to then. The data used for the research is mainly from the United States. In this literature review, I mainly focus on the next and more recent cohort of empirical investigations on the BLC in Europe that follow papers by Kashyap and Stein (1995, 2000) and Kishan and Opiela (2000) on U.S. transmission mechanisms. It is crucial that these authors are the first to address the question using individual bank balance sheet data for the U.S. Until now, empirical research has produced largely inconsistent results. This is more revealing as many of these investigations have deficiencies in controlling for other transmission channels that relate to relative prices. The debate on how monetary policy works has not ended: the BLC, which stresses the importance of potential changes in the supply of loans as a result of monetary policy, and its subsequent impact on aggregate demand, became prominent recently, but the concluding empirical evidence is absent. I attempt to contribute to this debate by conducting a cross-section and panel data analysis of developed and developing countries and by choosing the availability of bank loans as a dependent variable. The latter circumvents identification problems that appear when analyzing the response of aggregated bank loans to monetary policy changes. This evidence finds no support for the prediction of the BLC that there is an additional channel of monetary transmission mechanism.}, subject = {Bankkredit}, language = {en} } @unpublished{Kreitmeier2009, author = {Kreitmeier, Wolfgang}, title = {Error bounds for high-resolution quantization with R{\´e}nyi - \&\#945; - entropy constraints}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-16647}, year = {2009}, abstract = {We consider the problem of optimal quantization with norm exponent r > 0 for Borel probabilities on Rd under constrained R{\´e}nyi-\&\#945;-entropy of the quantizers. If the bound on the entropy becomes large, then sharp asymptotics for the optimal quantization error are well-known in the special cases \&\#945; = 0 (memory-constrained quantization) and \&\#945; = 1 (Shannon-entropy-constrained quantization). In this paper we determine sharp asymptotics for the optimal quantization error under large entropy bound with entropy parameter \&\#945; \&\#8712; [1+r/d, \&\#8734;]. For \&\#945; \&\#8712; [0,1+r/d[ we specify the asymptotical order of the optimal quantization error under large entropy bound. The optimal quantization error decays exponentially fast with the entropy bound and the exact decay rate is determined for all \&\#945; \&\#8712; [0, \&\#8734;].}, subject = {Maßtheorie}, language = {en} } @book{Weithmann2003, author = {Weithmann, Michael W.}, title = {Lawrence of Bavaria. The english writer D.H. Lawrence in Bavaria and beyond. Collected Essays. Reisen David Herbert Lawrences in Bayern und in die Alpenl{\"a}nder}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-596}, publisher = {Universit{\"a}t Passau}, year = {2003}, abstract = {The collection of various texts on D. H. Lawrence (1885-1930) represents the English writer's first journey abroad having led the young and receptive teacher - already deeply influenced by German philosophy - into Bavaria and the Tyrol. Vividly featured in his - during his lifetime unpublished - novel "Mr Noon" the stay in Germany and Bavaria in the years 1912 and 1913 and the people he met there were to be the plot of Lawrence's main works. In Munich Lawrence and his later German wife Frieda von Richthofen (1879-1956) were part of the so-called Schwabing-Boh{\`e}me. In these circles of artists, poets, social-reformes, as well as of heroines of free love, anarchists and early fascists the author received his ideas about sex and erotics, which were performed in his famous novel "Lady Chatterley's Lover" in 1927/1928. Especially the impact of the Austrian Doctor Otto Gross (1877-1920), a former lover of Frieda Lawrence, who tried to connect Friedrich Nietzsche's "Will to Power" and Sigmund Freud's Psychoanalysis, on Lawrence's work is a remarkable criterion. The studies also follow Lawrence's tracks into the Tyrol and his and Frieda's wandering across the Alps to Northern Italy (1912-1913), an adventure playing the real setting of his novel "Women in Love" of 1920 and described in his essays "Twilight in Italy" (1916).}, subject = {Lawrence}, language = {en} } @phdthesis{Schwarzfischer2004, author = {Schwarzfischer, Thomas}, title = {Quality and Utility - On the Use of Time-Value Functions to Integrate Quality and Timeliness Flexible Aspects in a Dynamic Real-Time Scheduling Environment}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-619}, school = {Universit{\"a}t Passau}, year = {2004}, abstract = {Scheduling methodologies for real-time applications have been of keen interest to diverse research communities for several decades. Depending on the application area, algorithms have been developed that are tailored to specific requirements with respect to both the individual components of which an application is made up and the computational platform on which it is to be executed. Many real-time scheduling algorithms base their decisions solely or partly on timing constraints expressed by deadlines which must be met even under worst-case conditions. The increasing complexity of computing hardware means that worst-case execution time analysis becomes increasingly pessimistic. Scheduling hard real-time computations according to their worst-case execution times (which is common practice) will thus result, on average, in an increasing amount of spare capacity. The main goal of flexible real-time scheduling is to exploit this otherwise wasted capacity. Flexible scheduling schemes have been proposed to increase the ability of a real-time system to adapt to changing requirements and nondeterminism in the application behaviour. These models can be categorised as those whose source of flexibility is the quality of computations and those which are flexible regarding their timing constraints. This work describes a novel model which allows to specify both flexible timing constraints and quality profiles for an application. Furthermore, it demonstrates the applicability of this specification method to real-world examples and suggests a set of feasible scheduling algorithms for the proposed problem class.}, subject = {Echtzeitsystem}, language = {en} } @phdthesis{Karoly2007, author = {Karoly, Andrea}, title = {Investment Strategies under Uncertainty: Theory and evidence of preemption in case of geographical market entrance}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-12000}, school = {Universit{\"a}t Passau}, year = {2007}, abstract = {This thesis develops an equilibrium framework for strategic exercise of geographical market entry option. The theoretical model analyses the impact of asymmetries of the competing firms such as follower entry barrier and asymmetric profitability on the optimal market entry timing and firm values. The duopoly model shows the existence of three types of equilibrium strategies and expresses the critical level of asymmetry which separates the equilibrium regions. The analysis proves that the softer competition does not force the stronger firm to enter the market at his preemption point and as a consequence the rent equalisation between the firms does not occur. However, it is also shown that the critical level of asymmetry is mitigated or strengthened by common economic factors such as the host market profit volatility and the interest rate. Extending the duopoly model to the oligopoly case the results present that each additional competitor delays the first market entrance compared to the duopolist leader preemption point. Hence, one additional competitor accelerates the first market entry if the number of competing firms excluding him is odd and has the reverse impact if it is even. It is further observed that continuation may disappear in some subgames of the market entry game in an oligopoly as a result of which no closed loop market entry strategy set exists. The equilibrium results of the theoretical models are tested empirically by applying the Cox proportional hazard model on entry behaviour of 61 retailers into 6 Eastern European countries from 1989 until 2005. The results explain why retailers entered certain markets earlier and why some firms succeeded more in seizing the entry opportunity. The results show that driven by the development of demand potential on the host market and by the intensity of competition, foreign retailers had a limited period of time - defined as the "window of opportunity" - to carry out their market entry.}, subject = {Auslandsinvestition}, language = {en} } @phdthesis{Engelen2007, author = {Engelen, Christian}, title = {Three Essays on Intra-Creditor Coordination Failures in Sovereign Debt Restructuring}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-11960}, school = {Universit{\"a}t Passau}, year = {2007}, abstract = {This work is comprised of three essays that attempt to contribute to the task of reviewing the prevailing (solely market-based) contractual approach for sovereign debt restructuring. These essays particularly focus on aspects of intra-creditor coordination. Although the content of these essays is interconnected, each unit is a stand-alone entity. Essay I: The latest Argentinean debt restructuring was the first time the resolution of a modern financial crisis was completely handed over to the private financial markets without official intervention by public institutions. This essay argues that the resulting harshest haircut for private creditors in history can be at least partially related to an assurance game played by creditors. It shows that incentive schemes provided by the Argentinean government were factors facilitating this haircut. The analysis suggests that, contrary to the recognition in the literature, the effects of Collective Action Clauses and Exit Consents within a restructuring process are not equal. In the case of Argentina, the inclusion of Collective Action Clauses in the defaulted bonds could have benefited the holdout creditors. Essay II: Experience from events of sovereign debt restructuring over the last decade shows that the prevailing process is mainly shaped by exchange-offers launched by the debtor. This suggests that negotiations for changing the repayment terms of the debt take place in an ultimatum game which centers virtually all bargaining power on the debtor side. Creditors vote according to reservations values that might be influenced by fairness consideration both vis-{\`a}-vis the debtor and their fellow creditors. And, as fairness is usually a highly subjective influence, this can result in a heterogeneity of reservation values which might impede effective intra-creditor coordination for the benefit of the debtor. Essay III: Mitigating intra-creditor coordination failures has always been crucial in any proposal for an institutionalized process of restructuring sovereign bonds. However, one source of failure in creditor coordination has not been taken into consideration. The current process of sovereign debt restructuring enables the debtor to launch an exchange offer which provides incentives to inter-temporally discriminate among creditors with different reservation values. Only a creditor representation that can effectively bind in all different creditor types will mitigate this failure and thereby prevent potential conflicts of interests among creditors. Enhancing the current proposal of creditor groups so that creditors can effectively pre-commit can shield the process from this kind of coordination failure. This essay concludes with a proposal for a creation of a creditor representation body which exhibits a similar mode of operation as a celebrated institutionalized creditor representation body in the penultimate century. To summarize the conclusions drawn from these essays, the contractual approach is not yet able to guarantee effective creditor coordination due to a lack of a comprehensive and forceful permanent creditor representation. Establishing such a permanent representation body would replicate the institutional development experienced during the last heydays of bonds as a source of emerging market financing. This would lead to a significant improvement in creditor coordination. Moreover, since the result of a potential debt restructuring draws back to the ex-ante lending decision by the individual investor, this improvement could contribute to the welfare-enhancing effects of external financing by private creditors for developing economies.}, subject = {{\"O}ffentliche Schulden}, language = {en} } @unpublished{Kreitmeier2007, author = {Kreitmeier, Wolfgang}, title = {Optimal quantization of probabilities concentrated on small balls}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:739-opus-12010}, year = {2007}, abstract = {We consider probability distributions which are uniformly distributed on a disjoint union of balls with equal radius. For small enough radius the optimal quantization error is calculated explicitly in terms of the ball centroids. We apply the results to special self-similar measures.}, subject = {Maßtheorie}, language = {en} }