@inproceedings{4094,
  abstract     = {{Projection-based assitive systems that guide users through assembly work are on their way to industrial application. Previous research work investigated how people can be supported with such systems. However, there has been little work on the question on how to generate and author sequential instructions for assitive systems. In this paper, we present a new concept and a prototypical implementation of an assitive system that can be taught by demonstrating an assembly process. By using a combination of RGB and depth cameras, we can generate an assembly instruction of Lego Duplo bricks based on the demonstration of a user. This generated manual can later on be used for assisting other users in the assembly process. By our prototype system, we show the technological feasibility of assistive systems that can learn from users.}},
  author       = {{Büttner, Sebastian and Peda, Andreas and Heinz, Mario and Röcker, Carsten}},
  booktitle    = {{22nd International Conference on Human-Computer Interaction}},
  isbn         = {{978-3-030-50343-7}},
  keywords     = {{Assitive system, Authoring, Instruction generation, Computer vision, Teaching by demonstration}},
  location     = {{Copenhagen, Denmark}},
  pages        = {{153--163}},
  publisher    = {{Springer}},
  title        = {{{Teaching by Demonstrating – How Smart Assistive Systems Can Learn from Users}}},
  doi          = {{https://doi.org/10.1007/978-3-030-50344-4_12}},
  volume       = {{12203}},
  year         = {{2020}},
}

@inproceedings{4095,
  abstract     = {{Using remote control transmitters is a common way to control a drone. For the future, we envision drones that are intuitively controllable with new input devices. One possibility could be the use of one-hand controllers. Here, we present an exploration of using a 3-D mouse as a controller for human-drone interaction. We ran a pre-study that investigated the users’ natural spatial mapping between controller and drone dimensions. Based on these results we developed our prototype that shows the feasibility of our concept. A series of flight tests were conducted and the mapping between controller and flight movements were iteratively improved. In this paper, we present our development process and the implementation of our prototype.}},
  author       = {{Büttner, Sebastian and Zaitoon, Rami and Heinz, Mario and Röcker, Carsten}},
  booktitle    = {{22nd International Conference on Human-Computer Interaction}},
  isbn         = {{978-3-030-49061-4}},
  keywords     = {{Human-Drone Interaction, Unmanned Aerial Vehicle, 3-D mouse, Spatial mapping, Prototyping}},
  location     = {{Copenhagen, Denmark}},
  pages        = {{ 535--548}},
  publisher    = {{Springer}},
  title        = {{{One-hand Controller for Human-Drone Interaction – a Human-centered Prototype Development}}},
  doi          = {{https://doi.org/10.1007/978-3-030-49062-1_36}},
  volume       = {{12203}},
  year         = {{2020}},
}

@inproceedings{4096,
  abstract     = {{Projection-based assistive systems have shown to be a promising technology to support workers during manual assembly processes in industrial manufacturing by projecting instructions into the working area. While existing studies have investigated various aspects of these systems, little research has been conducted regarding the way in which the user accesses the provided instructions. In this paper we analyze the eye movements of users during the repeated execution of an assembly task at a projection-based assistive system in order to gain insights into the utilization of the presented instructions. For this purpose, we analyzed eye tracking recordings from a user study with 15 participants to investigate the sequences in which the respective instructions are observed by the users. The results show a significantly lower number of nonlinear gaze sequences as well as a significantly higher number of steps without observing the instructions during the repeated use of the assistive system. In addition, there was a significantly lower task completion time during repeated use of the assistive system.}},
  author       = {{Heinz, Mario and Büttner, Sebastian and Röcker, Carsten}},
  booktitle    = {{22nd International Conference on Human-Computer Interaction}},
  isbn         = {{978-3-030-50343-7}},
  keywords     = {{Assistive systems, Eye tracking, Human behavior}},
  location     = {{Copenhagen, Denmark}},
  pages        = {{259--272}},
  publisher    = {{Springer}},
  title        = {{{Exploring Users' Eye Movements When Using Projection-based Assembly Assistive Systems}}},
  doi          = {{https://doi.org/10.1007/978-3-030-50344-4_19}},
  volume       = {{12203}},
  year         = {{2020}},
}

@article{4104,
  author       = {{Büttner, Sebastian and Heinz, Mario and Zaitoon, Rami and Röcker, Carsten}},
  journal      = {{Proceedings of the International workshop on Human-Drone Interaction (iHDI'19) as part of the ACM Conference on Human Factors in Computing Systems (CHI '19), pp. 76 - 83}},
  location     = {{Glasgow, Scotland Uk}},
  pages        = {{76 -- 83}},
  title        = {{{Investigating Users' Natural Spatial Mapping between Drone Dimensions and One-Hand Drone Controllers}}},
  year         = {{2019}},
}

@inproceedings{4106,
  abstract     = {{Der steigende Automatisierungsgrad in der Produktion führt dazu, dass einzelne Mitarbeiter für eine wachsende Zahl an Maschinen verantwortlich sind. Um Informationen von Maschinen in einer verteilten Industrieumgebung zu Mitarbeitern zu bringen, setzt Phoenix Contact seit kurzer Zeit Smartwatches ein. Im Rahmen dieses Beitrags wird der Entwicklungsprozess sowie das Ergebnis der entsprechenden Smartwatch-Anwendung vorgestellt.Um eine hohe Gebrauchstauglichkeit und die Akzeptanz der neuen Technologie bei der Belegschaft zu erreichen, wurden von Beginn an Nutzer in die Entwicklung der Anwendung einbezogen. Durch Kontextanalysen, Diskussionen von Storyboards sowie die iterative Prototypen-Erstellung und -Evaluierung wurde die Interaktion zwischen Mensch und Produktionsumgebung mit Hilfe von Smartwatches optimiert.}},
  author       = {{Bröring, Andre and Büttner, Sebastian and Heinz, Mario and Röcker, Carsten}},
  booktitle    = {{H. Fischer, S. Hess (Eds.): Mensch und Computer 2019 - Usability Professionals. Gesellschaft für Informatik e.V. Und German UPA e.V., Bonn, Germany, pp. 228 - 235}},
  location     = {{Hamburg}},
  pages        = {{ 228 -- 235}},
  publisher    = {{Gesellschaft für Informatik e.V.}},
  title        = {{{Smartwatches zur Unterstützung von Produktionsmitarbeitern.}}},
  doi          = {{10.18420/muc2019-up-0324}},
  year         = {{2019}},
}

@inproceedings{4107,
  abstract     = {{Using remote control transmitters is a common way to control a drone. For the future, we envision drones that are intuitively controllable with new input devices. One possibility could be the use of one-hand controllers, e.g. 3-D mice. While developing such a device, we investigated the users’ natural spatial mapping between controller and drone dimensions. In this paper we present our insights about this mapping and show why relative position control is an important control concept for novice users.}},
  author       = {{Büttner, Sebastian and Heinz, Mario and Zanini, Paulo and Röcker, Carsten}},
  booktitle    = {{Proceedings of the International workshop on Human-Drone Interaction (iHDI'19) as part of the ACM Conference on Human Factors in Computing Systems (CHI '19)}},
  location     = {{Glasgow, Scotland Uk}},
  title        = {{{Investigating Users' Natural Spatial Mapping between Drone Dimensions and One-Hand Drone Controllers}}},
  year         = {{2019}},
}

@inproceedings{4108,
  abstract     = {{In this paper, we present a conceptual approach and the Mirst prototype of a mobile training system to provide non-expert users with helpful information about the functionality of complex automated industrial systems. The system uses an augmented reality (AR) tablet application to visualize information about internal processes, sensor states, settings and hidden parts of a production system directly in the Mield of view of a user. The available information can be accessed via four different methods which combine elements of step-by- step tutorials and open exploration. Our prototype aims to support users to better understand automated systems. While such systems will become more complex in future, we believe that augmented reality is a key concept that could help humans to better understand and experience automated systems and its consequences in general. }},
  author       = {{Heinz, Mario and Büttner, Sebastian and Röcker, Carsten}},
  booktitle    = {{Proceedings of the 12th ACM International Conference on Pervasive Technologies Related to Assistive Environments (PETRA'19)}},
  editor       = {{Makedon, Fillia}},
  location     = {{ Rhodes Greece }},
  pages        = {{398--401}},
  title        = {{{Exploring Training Modes for Industrial Augmented Reality Learning}}},
  doi          = {{10.1145/3316782.3322753}},
  year         = {{2019}},
}

@inproceedings{4110,
  abstract     = {{Der steigende Automatisierungsgrad in der Produktion führt dazu, dass einzelne Mitarbeiter für eine wachsende Zahl an Maschinen verantwortlich sind. Um Informationen von Maschinen in einer verteilten Industrieumgebung zu Mitarbeitern zu bringen, setzt Phoenix Contact seit kurzer Zeit Smartwatches ein. Im Rahmen dieses Beitrags wird der Entwicklungsprozess sowie das Ergebnis der entsprechenden Smartwatch-Anwendung vorgestellt. Um eine hohe Gebrauchstauglichkeit und die Akzeptanz der neuen Technologie bei der Belegschaft zu erreichen, wurden von Beginn an Nutzer in die Entwicklung der Anwendung einbezogen. Durch Kontextanalysen, Diskussionen von Storyboards sowie die iterative Prototypen-Erstellung und -Evaluierung wurde die Interaktion zwischen Mensch und Produktionsumgebung mit Hilfe von Smartwatches optimiert. }},
  author       = {{Bröring, Andre and Fast, Arno and Büttner, Sebastian and Heinz, Mario and Röcker, Carsten}},
  booktitle    = {{Mensch und Computer 2019 - Usability Professionals}},
  location     = {{Hamburg}},
  publisher    = {{Gesellschaft für Informatik e.V. Und German UPA e.V.}},
  title        = {{{Smartwatches zur Unterstüzung von Produktionsmitarbeitern }}},
  doi          = {{10.18420/muc2019-up-0324}},
  year         = {{2019}},
}

@inproceedings{4111,
  author       = {{Heinz, Mario and Büttner, Sebastian and Röcker, Carsten}},
  booktitle    = {{Workshop "Everyday Automation Experience" at ACM CHI Conference on Human Factors in Computing Systems (CHI 2019)}},
  location     = {{Glasgow}},
  publisher    = {{ACM}},
  title        = {{{Exploring Augmented Reality Training for Automated Systems}}},
  year         = {{2019}},
}

@inproceedings{4315,
  abstract     = {{Recent advances in the field of industrial digitization and automation lead to an increasing need for assistance systems to support workers in various fields of activity, such as assembly, logistics and maintenance. Current assistance systems for the maintenance area are usually based on a single visualization tech- nology. However, in our view, this is not practicable in terms of real activities, as these operations involve various subtasks for which different interaction con- cepts would be advantageous. Therefore, in this paper, we propose a concept for a multi-device assistive system, which combines multiple devices to provide workers with relevant information over different subtasks of a maintenance operation and present our first prototype for such a system. }},
  author       = {{Heinz, Mario and Dhiman, Hitesh and Röcker, Carsten}},
  booktitle    = {{International Cross-Domain Conference for Machine Learning and Knowledge Extraction}},
  location     = {{Canterbury, United Kingdom.}},
  publisher    = {{Springer}},
  title        = {{{A Multi-Device Assistive System for Industrial Maintenance Operations}}},
  year         = {{2019}},
}

@inbook{4183,
  abstract     = {{The localization of employees in the industrial environment plays a major role in the development of future intelligent user interfaces and systems. Yet, localizing people also raises ethical, legal and social issues. While a precise localization is essential for context-aware systems and real-time optimization of processes, a permanently high localization accuracy creates opportunities for surveillance and therefore has a negative impact on workplace privacy. In this paper, we propose a new concept of a multi-level localization system which tries to find a way to meet both the technical requirements for a localization with a high accuracy as well as the interests of employees in terms of privacy. Depending on the users’ location, different localization technologies are used, that restrict the accuracy to the least required level by design. Furthermore, we present a prototypical implementation of the concept that shows the feasibility of our multi-level localization concept. Using this system, intelligent systems become able to react on employees based on their location without permanently monitoring the precise user location. }},
  author       = {{Heinz, Mario and Büttner, Sebastian and Wegerich, Martin and Marek, Frank and Röcker, Carsten}},
  booktitle    = {{Distributed, Ambient and Pervasive Interactions: Understanding Humans: 6th International Conference, DAPI 2018, Held as Part of HCI International 2018, Las Vegas, NV, USA, July 15–20, 2018, Proceedings, Part I }},
  editor       = {{Streitz, Norbert and Konomi, Shin’ichi}},
  isbn         = {{978-3-319-91130-4}},
  keywords     = {{Indoor localization, Intelligent user interface, Process planning}},
  location     = {{Las Vegas, NV, USA}},
  pages        = {{3--47}},
  publisher    = {{Springer}},
  title        = {{{A Multi-level Localization System for Intelligent User Interfaces}}},
  doi          = {{10.1007/978-3-319-91131-1_3}},
  volume       = {{10921}},
  year         = {{2018}},
}

@inproceedings{4318,
  abstract     = {{Recent advances in the field of industrial digitization and automation lead to an increasing need for assistance systems to support workers in various fields of activity, such as assembly, logistics and maintenance. Current assistance systems for the maintenance area are usually based on a single visualization technology. However, in our view, this is not practicable in terms of real activities, as these operations involve various subtasks for which different interaction concepts would be advantageous. Therefore, in this paper, we propose a concept for a multi-device assistive system, which combines multiple devices to provide workers with relevant information over different subtasks of a maintenance operation and present our first prototype for such a system.}},
  author       = {{Heinz, Mario and Dhiman, Hitesh and Röcker, Carsten}},
  booktitle    = {{Machine Learning and Knowledge Extraction :Second IFIP TC 5, TC 8/WG 8.4, 8.9, TC 12/WG 12.9 International Cross-Domain Conference, CD-MAKE 2018}},
  editor       = {{Holzinger, Andreas and Kieseberg, Peter and Tjoa, A Min and Weippl, Edgar}},
  isbn         = {{978-3-319-99739-1}},
  keywords     = {{Human-machine-interaction, Multimodal feedback, Assistive systems, Augmented-reality, Smart factory}},
  location     = {{Hamburg}},
  pages        = {{239 -- 247}},
  publisher    = {{Springer}},
  title        = {{{A Multi-Device Assistive System for Industrial Maintenance Operations}}},
  doi          = {{10.1007/978-3-319-99740-7_16}},
  volume       = {{11015}},
  year         = {{2018}},
}

@inproceedings{4324,
  abstract     = {{On the long term, the current wave of digitization and automation in the industrial environment will result in a progressively higher complexity and heterogeneity in the industrial environment. In this context, a growing need arises for the development of digital assistance systems to support workers in various fields of activities. Current systems are generally limited to visualizations and visual feedback. Therefore, in the scope of this paper, we take a look at the major challenges and opportunities for the integration of multimodal feedback systems in today’s and future industrial environments. It shows that the integration of multimodal feedback is subject to a complex combination of technical, user-cenric and legal aspects.}},
  author       = {{Heinz, Mario and Röcker, Carsten}},
  booktitle    = {{Machine Learning and Knowledge Extraction :Second IFIP TC 5, TC 8/WG 8.4, 8.9, TC 12/WG 12.9 International Cross-Domain Conference, CD-MAKE 2018}},
  editor       = {{Holzinger, Andreas and  Kieseberg, Peter and Tjoa, A Min and Weippl, Edgar}},
  isbn         = {{978-3-319-99739-1}},
  keywords     = {{Human-machine-interaction, Multimodal feedback, Assistive systems, Augmented-reality, Smart factory}},
  location     = {{Hamburg}},
  publisher    = {{Springer}},
  title        = {{{Feedback Presentation for Workers in Industrial Environments–Challenges and Opportunities}}},
  doi          = {{10.1007/978-3-319-99740-7_17}},
  volume       = {{11015}},
  year         = {{2018}},
}

@inproceedings{4327,
  abstract     = {{In ever changing world, the industrial systems become more and more complex. Machine feedback in the form of alarms and notifications, due to its growing volume, becomes overwhelming for the operator. In addition, expectations in relation to system availability are growing as well. Therefore, there exists strong need for new solutions guaranteeing fast troubleshooting of problems that arise during system operation. The approach proposed in this study uses advantages of the Asset Administration Shell, machine learning, and human-machine interaction in order to create the assistance system which holistically addresses the issue of troubleshooting complex industrial systems.}},
  author       = {{Lang, Dorota and Wunderlich, Paul and Heinz, Mario and Wisniewski, Lukasz and Jasperneite, Jürgen and Niggemann, Oliver and Röcker, Carsten}},
  booktitle    = {{14th IEEE International Workshop on Factory Communication Systems (WFCS)}},
  keywords     = {{Maintenance engineering, Adaptation models, Machine learning, Data models, Standards, Software, Bayes methods}},
  location     = {{Imperia, Italy }},
  publisher    = {{IEEE}},
  title        = {{{Assistance System to Support Troubleshooting of Complex Industrial Systems}}},
  doi          = {{10.1109/WFCS.2018.8402380}},
  year         = {{2018}},
}

@inproceedings{4254,
  abstract     = {{The current trend of integrating machines and factories into cyber-physical systems (CPS) creates an enormous complexity for operators of such systems. Especially the search for the root cause of cascading failures becomes highly time-consuming. Within this paper, we address the question on how to help human users to better and faster understand root causes of such situations. We propose a concept of interactive alarm flood reduction and present the implementation of a first vertical prototype for such a system. We consider this prototype as a first artifact to be discussed by the research community and aim towards an incremental further development of the system in order to support humans in complex error situations.}},
  author       = {{Büttner, Sebastian and Wunderlich, Paul and Heinz, Mario and Niggemann, Oliver and Röcker, Carsten}},
  booktitle    = {{ Machine Learning and Knowledge Extraction : First IFIP TC 5, WG 8.4, 8.9, 12.9 International Cross-Domain Conference, CD-MAKE 2017, Reggio, Italy, August 29 – September 1, 2017, Proceedings}},
  editor       = {{Holzinger, Andreas}},
  isbn         = {{978-3-319-66807-9}},
  keywords     = {{Alarm flood reduction, Machine learning, Assistive system}},
  location     = {{Reggio, Italy}},
  pages        = {{69--82}},
  publisher    = {{Springer}},
  title        = {{{Managing Complexity: Towards Intelligent Error-Handling Assistance Trough Interactive Alarm Flood Reduction}}},
  volume       = {{10410}},
  year         = {{2017}},
}

