@inproceedings{SalehSahuZafaretal., author = {Saleh, Salah and Sahu, Manish and Zafar, Zuhair and Berns, Karsten}, title = {A Multimodal Nonverbal Human-robot Communication System}, series = {VI International Conference on Computational Bioengineering}, booktitle = {VI International Conference on Computational Bioengineering}, abstract = {Socially interactive robot needs the same behaviors and capabilities of human to be accepted as a member in human society. The environment, in which this robot should operate, is the human daily life. The interaction capabilities of current robots are still limited due to complex inter-human interaction system. Humans usually use different types of verbal and nonverbal cues in their communication. Facial expression and head movement are good examples of nonverbal cues used in feedback. This paper presents a biological inspired system for Human-Robot Interaction (HRI). This system is based on the interactive model of inter-human communication proposed by Schramm. In this model, the robot and its interaction partner can be send and receive information at the same time. For example, if the robot is talking, it also perceive the feedback of the human via his/her nonverbal cues. In this work, we are focusing on recognizing the facial expression of human. The proposed facial expression recognition technique is based on machine learning. Multi SVMs have been used to recognize the six basic emotions in addition to the neutral expression. This technique uses only the depth information, acquired by Kinect, of human face.}, language = {en} }