<?xml version="1.0" encoding="utf-8"?>
<export-example>
  <doc>
    <id>3792</id>
    <completedYear/>
    <publishedYear>2018</publishedYear>
    <thesisYearAccepted/>
    <language>eng</language>
    <pageFirst>837</pageFirst>
    <pageLast>842</pageLast>
    <pageNumber/>
    <edition/>
    <issue/>
    <volume/>
    <type>conferenceobject</type>
    <publisherName>IEEE</publisherName>
    <publisherPlace/>
    <creatingCorporation/>
    <contributingCorporation/>
    <belongsToBibliography>0</belongsToBibliography>
    <completedDate>--</completedDate>
    <publishedDate>--</publishedDate>
    <thesisDateAccepted>--</thesisDateAccepted>
    <title language="eng">Effects on User Experience During Human-Robot Collaboration in Industrial Scenarios</title>
    <abstract language="eng">In smart manufacturing environments robots collaborate with human operators as peers. They even share the same working space and time. An intuitive interaction with different input modalities is decisive to reduce workload and training periods for collaboration. We introduce our interaction system that is able to recognize gestures, actions and objects in a typical smart working scenario. As key aspect, this article considers an empirical investigation of input modalities (touch, gesture), individual differences (performance, recognition rate, previous knowledge) and boundary conditions (level of automation) on user experience. Therefore, answers from 31 participants within two experiments are collected. We show that the arrangement of the human-robot collaboration (input modalities, boundary conditions) has a significant effect on user experience in real-world environments. This effect and the individual differences between participants can be measured utilizing recognition rates and standardized usability questionnaires.</abstract>
    <parentTitle language="eng">2018 IEEE International Conference on Systems, Man, and Cybernetics (SMC), 7-10 Oct. 2018, Miyazaki, Japan</parentTitle>
    <identifier type="doi">10.1109/SMC.2018.00150</identifier>
    <enrichment key="opus.import.date">2022-04-28T19:43:52+00:00</enrichment>
    <enrichment key="opus.source">sword</enrichment>
    <enrichment key="opus.import.user">importuser</enrichment>
    <enrichment key="BegutachtungStatus">peer-reviewed</enrichment>
    <enrichment key="opus.doi.autoCreate">false</enrichment>
    <enrichment key="opus.urn.autoCreate">false</enrichment>
    <licence>Keine Lizenz - Es gilt das deutsche Urheberrecht: § 53 UrhG</licence>
    <author>Clemens Pohlt</author>
    <author>Franz Haubner</author>
    <author>Jonas Lang</author>
    <author>Sandra Rochholz</author>
    <author>Thomas Schlegl</author>
    <author>Sven Wachsmuth</author>
    <subject>
      <language>eng</language>
      <type>uncontrolled</type>
      <value>Collaboration</value>
    </subject>
    <subject>
      <language>eng</language>
      <type>uncontrolled</type>
      <value>Gesture recognition</value>
    </subject>
    <subject>
      <language>eng</language>
      <type>uncontrolled</type>
      <value>Robots</value>
    </subject>
    <subject>
      <language>eng</language>
      <type>uncontrolled</type>
      <value>Task analysis</value>
    </subject>
    <subject>
      <language>eng</language>
      <type>uncontrolled</type>
      <value>Three-dimensional displays</value>
    </subject>
    <subject>
      <language>eng</language>
      <type>uncontrolled</type>
      <value>Training</value>
    </subject>
    <subject>
      <language>eng</language>
      <type>uncontrolled</type>
      <value>Tutorials</value>
    </subject>
    <collection role="institutes" number="FAKMB">Fakultät Maschinenbau</collection>
    <collection role="othforschungsschwerpunkt" number="16316">Produktion und Systeme</collection>
  </doc>
  <doc>
    <id>2704</id>
    <completedYear/>
    <publishedYear>2020</publishedYear>
    <thesisYearAccepted/>
    <language>eng</language>
    <pageFirst>8381</pageFirst>
    <pageLast>8386</pageLast>
    <pageNumber/>
    <edition/>
    <issue/>
    <volume/>
    <type>conferenceobject</type>
    <publisherName>IEEE</publisherName>
    <publisherPlace/>
    <creatingCorporation/>
    <contributingCorporation/>
    <belongsToBibliography>0</belongsToBibliography>
    <completedDate>--</completedDate>
    <publishedDate>--</publishedDate>
    <thesisDateAccepted>--</thesisDateAccepted>
    <title language="eng">Weakly-Supervised Learning for Multimodal Human Activity Recognition in Human-Robot Collaboration Scenarios</title>
    <abstract language="eng">The ability to synchronize expectations among human-robot teams and understand discrepancies between expectations and reality is essential for human-robot collaboration scenarios. To ensure this, human activities and intentions must be interpreted quickly and reliably by the robot using various modalities. In this paper we propose a multimodal recognition system designed to detect physical interactions as well as nonverbal gestures. Existing approaches feature high post-transfer recognition rates which, however, can only be achieved based on well-prepared and large datasets. Unfortunately, the acquisition and preparation of domain-specific samples especially in industrial context is time consuming and expensive. To reduce this effort we introduce a weakly-supervised classification approach. Therefore, we learn a latent representation of the human activities with a variational autoencoder network. Additional modalities and unlabeled samples are incorporated by a scalable product-of-expert sampling approach. The applicability in industrial context is evaluated by two domain-specific collaborative robot datasets. Our results demonstrate, that we can keep the number of labeled samples constant while increasing the network performance by providing additional unprocessed information.</abstract>
    <parentTitle language="eng">2020 IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS): October 25-29, 2020, Las Vegas, NV, USA (virtual)</parentTitle>
    <identifier type="doi">10.1109/IROS45743.2020.9340788</identifier>
    <enrichment key="opus.import.date">2022-02-04T08:58:18+00:00</enrichment>
    <enrichment key="opus.source">sword</enrichment>
    <enrichment key="opus.import.user">importuser</enrichment>
    <enrichment key="BegutachtungStatus">peer-reviewed</enrichment>
    <licence>Keine Lizenz - Es gilt das deutsche Urheberrecht: § 53 UrhG</licence>
    <author>Clemens Pohlt</author>
    <author>Thomas Schlegl</author>
    <author>Sven Wachsmuth</author>
    <subject>
      <language>eng</language>
      <type>uncontrolled</type>
      <value>Activity recognition</value>
    </subject>
    <subject>
      <language>eng</language>
      <type>uncontrolled</type>
      <value>Collaboration</value>
    </subject>
    <subject>
      <language>eng</language>
      <type>uncontrolled</type>
      <value>Intelligent robots</value>
    </subject>
    <subject>
      <language>eng</language>
      <type>uncontrolled</type>
      <value>reliability</value>
    </subject>
    <subject>
      <language>eng</language>
      <type>uncontrolled</type>
      <value>Service robots</value>
    </subject>
    <subject>
      <language>eng</language>
      <type>uncontrolled</type>
      <value>Synchronization</value>
    </subject>
    <collection role="institutes" number="FakEI">Fakultät Elektro- und Informationstechnik</collection>
    <collection role="institutes" number="FAKMB">Fakultät Maschinenbau</collection>
    <collection role="othforschungsschwerpunkt" number="16316">Produktion und Systeme</collection>
  </doc>
</export-example>
