@misc{13383,
  author       = {{Ehm, Thomas}},
  keywords     = {{Berührungslose Interaktion, Gestensteuerung, Webcam-Tracking, MediaPipe, Babylon.js, Web-Game, User Interface}},
  pages        = {{71}},
  publisher    = {{Technische Hochschule Ostwestfalen-Lippe}},
  title        = {{{Berührungslose Interaktion: Konzeption und Entwicklung eines Videospiels mit kamerabasierter Gestensteuerung}}},
  year         = {{2026}},
}

@inproceedings{4096,
  abstract     = {{Projection-based assistive systems have shown to be a promising technology to support workers during manual assembly processes in industrial manufacturing by projecting instructions into the working area. While existing studies have investigated various aspects of these systems, little research has been conducted regarding the way in which the user accesses the provided instructions. In this paper we analyze the eye movements of users during the repeated execution of an assembly task at a projection-based assistive system in order to gain insights into the utilization of the presented instructions. For this purpose, we analyzed eye tracking recordings from a user study with 15 participants to investigate the sequences in which the respective instructions are observed by the users. The results show a significantly lower number of nonlinear gaze sequences as well as a significantly higher number of steps without observing the instructions during the repeated use of the assistive system. In addition, there was a significantly lower task completion time during repeated use of the assistive system.}},
  author       = {{Heinz, Mario and Büttner, Sebastian and Röcker, Carsten}},
  booktitle    = {{22nd International Conference on Human-Computer Interaction}},
  isbn         = {{978-3-030-50343-7}},
  keywords     = {{Assistive systems, Eye tracking, Human behavior}},
  location     = {{Copenhagen, Denmark}},
  pages        = {{259--272}},
  publisher    = {{Springer}},
  title        = {{{Exploring Users' Eye Movements When Using Projection-based Assembly Assistive Systems}}},
  doi          = {{https://doi.org/10.1007/978-3-030-50344-4_19}},
  volume       = {{12203}},
  year         = {{2020}},
}

@inproceedings{4259,
  abstract     = {{This paper presents a prototype of an intelligent assistive system for workers in stationary manual assembly using projection-based augmented reality (AR) and intelligent hand tracking. By using depth cameras, the system can track the hands of the user and makes the user aware of wrong picking actions or errors in the assembly process. The system automatically adapts the digital projection-based overlay according to the current work situation. The main research contribution of our work is the presentation of a novel hand-tracking algorithm. In addition, we present the results of an user study of the system that shows the challenges and opportunities of our system and the hand-tracking algorithm in particular. We assume that our results will inform the future design of assistive systems in manual assembly.}},
  author       = {{Büttner, Sebastian and Sand, Oliver and Röcker, Carsten}},
  booktitle    = {{European Conference on Ambient Intelligence}},
  isbn         = {{978-3-319-56996-3}},
  keywords     = {{Augmented reality, Mobile projection, Hand tracking, Manufacturing, Industry 4.0}},
  location     = {{ Malaga, Spain}},
  pages        = {{33--45}},
  publisher    = {{Springer}},
  title        = {{{Exploring Design Opportunities for Intelligent Worker Assistance: A New Approach Using Projetion-Based AR and a Novel Hand-Tracking Algorithm}}},
  doi          = {{10.1007/978-3-319-56997-0_3}},
  volume       = {{10217}},
  year         = {{2017}},
}

@misc{796,
  author       = {{Kelm, Jasper}},
  keywords     = {{2D-Tracking, 3D-Tracking, Integration, Kamera, Rekonstruktion}},
  pages        = {{89}},
  publisher    = {{Hochschule Ostwestfalen-Lippe}},
  title        = {{{3D-Tracking : Gegenüberstellung verschiedener Implementierungen in Labor- und Praxisanwendung}}},
  year         = {{2017}},
}

@inproceedings{4331,
  abstract     = {{When a user enters a personal identification number (PIN) into an automated teller machine or a point of sale terminal, there is a risk of some one watching from behind, trying to guess the PIN code. Such shoulder-surfing is a major security threat. In order to overcome this problem different PIN entry methods have been suggested. In this regard, gaze interaction methods are receiving attention in recent years, owing to the lowering cost of eye tracking technology. In this paper, we present SafetyPIN - an eye tracking based PIN entry system - which is aimed at making the PIN entry more secure with the help of an eye tracking device. We discuss the implementation and the initial evaluation of this system.}},
  author       = {{Seetharama, Mythreya and Paelke, Volker and Röcker, Carsten}},
  booktitle    = {{Human Aspects of Information Security, Privacy, and Trust}},
  editor       = {{Tryfonas, Theo and Askoxylakis, Ioannis}},
  isbn         = {{978-3-319-20375-1}},
  keywords     = {{PIN entry, Eye tracking, Security, Usability, Point of sale terminals}},
  location     = {{Los Angeles, CA, USA}},
  pages        = {{426--435}},
  publisher    = {{Springer}},
  title        = {{{SafetyPIN: Secure PIN Entry through Eye Tracking}}},
  doi          = {{10.1007/978-3-319-20376-8_38}},
  volume       = {{9190}},
  year         = {{2015}},
}

@inproceedings{4373,
  abstract     = {{Today, usability measures for the evaluation of systems and interfaces are mostly assessed in an isolated way. This paper addresses the question whether an integrative multi-dimensional feature evaluation can lead to different and more holistic results. We combined traditional measures (e.g., time to task completion) with advanced measures, like eye tracking, biosignal data logging and assessment of user emotions. For the evaluation of emotions, we used verbal methods (PAD Semantic Scale and a questionnaire) and a nonverbal method with EmoCards. The overall goal was to document and analyze the interaction as completely as possible (including effectiveness, efficiency and user satisfaction), focusing especially on objective measurements. Furthermore, the recording and assessment of emotions, which are part of the user experience, should give insights into user satisfaction.}},
  author       = {{Ermes, Verena and Janß, Armin and Radermacher, Klaus and Röcker, Carsten}},
  booktitle    = {{Proceedings of the 8th International Conference on Pervasive Computing Technologies for Healthcare }},
  editor       = {{Hein, Andreas}},
  keywords     = {{Usability Measures, User Experience, Eye Tracking, Biosignal Data Logging, Emotion Evaluation, Risk Analysis, Medical Devices, Integrated Usability Evaluation}},
  location     = {{Oldenburg}},
  pages        = {{227 -- 230}},
  publisher    = {{ ICST (Institute for Computer Sciences, Social-Informatics and Telecommunications Engineering)}},
  title        = {{{Analyzing the Benefits of Integrative Multi-Dimensional Assessments of Usability Features in Interaction-Centered User Studies}}},
  doi          = {{10.4108/icst.pervasivehealth.2014.255142#d73284787e1}},
  year         = {{2014}},
}

