@incollection{EichhornWettachMueller, author = {Eichhorn, Elisabeth and Wettach, Reto and M{\"u}ller, Boris}, title = {Recording emotions with MyInnerLife}, series = {3rd International Conference on Affective Computing and Intelligent Interaction and workshops, 2009 : ACII 2009}, booktitle = {3rd International Conference on Affective Computing and Intelligent Interaction and workshops, 2009 : ACII 2009}, isbn = {978-1-4244-4800-5}, doi = {10.1109/ACII.2009.5349520}, pages = {1 -- 2}, abstract = {Our project is a system to express emotions and record them on a long-term basis. In contrast to a lot of the research in the field of affective computing our project is not dedicated to enable machines to detect human emotions but to allow new input methods. This demo presents 'MyInnerLife', a physical input device to express and record emotions non-verbally.}, subject = {Benutzeroberfl{\"a}che}, language = {en} } @inproceedings{Eichhorn, author = {Eichhorn, Elisabeth}, title = {Recording inner life}, series = {TEI '10 : Proceedings of the fourth international conference on Tangible, embedded, and embodied interaction}, booktitle = {TEI '10 : Proceedings of the fourth international conference on Tangible, embedded, and embodied interaction}, publisher = {Association for Computing Machinery}, address = {New York}, isbn = {978-1-60558-841-4}, doi = {10.1145/1709886.1709959}, pages = {313 -- 314}, abstract = {"Recording Inner Life" is the title of my master's thesis. It is a system to express the "inner life" (feelings, affects, and emotions) and record them on a long-term basis into a computational system. In contrast to a lot of the research in the field of affective computing this project is not dedicated to enable machines to detect human emotions but to allow an adequate input for emotions. It presents "MyInnerLife", a physical input device to express and record emotions non-verbally.}, subject = {Gef{\"u}hl}, language = {en} } @inproceedings{EichhornWettachHornecker, author = {Eichhorn, Elisabeth and Wettach, Reto and Hornecker, Eva}, title = {A stroking device for spatially separated couples}, series = {MobileHCI '08 : Proceedings of the 10th international conference on Human computer interaction with mobile devices and services}, booktitle = {MobileHCI '08 : Proceedings of the 10th international conference on Human computer interaction with mobile devices and services}, publisher = {Association for Computing Machinery}, address = {New York}, isbn = {978-1-59593-952-4}, doi = {10.1145/1409240.1409274}, pages = {303 -- 306}, abstract = {In this paper we present a device to support the communication of couples in long-distance relationships. While a synchronous exchange of factual information over distance is supported by telephone, e-mail and chat-systems, the transmission of nonverbal aspects of communication is still unsatisfactory. Videocalls let us see the partners' facial expression in real time. However, to experience a more intimate conversation physical closeness is needed. Stroking while holding hands is a special and emotional gesture for couples. Hence, we developed a device that enables couples to exchange the physical gesture of stroking regardless of distance and location. The device allows both sending and receiving. A user test supported our concept and provided new insights for future development.}, subject = {Design}, language = {en} }