@inproceedings{4092,
  abstract     = {{The use of augmented reality (AR) guidance is seen as an opportunity to address the growing complexity of industrial tasks. Previous research showed benefits of AR for different industrial tasks especially for novice users, while other research suggests that AR was not superior to other means for novices. However, there is not much work that looks at the relation between initial exposure of users to AR (that is, if users have never used AR before) and different types of tasks. In this paper, addressing the field of car maintenance and repair, we look into the question of how AR support impacts the performance in familiar and unfamiliar task if the AR user has never used AR before. By running an experiment under field conditions, we investigate whether the familiarity of a specific repair task has an impact on the performance under AR guidance compared to a traditional repair guideline. Our experiment reveals interesting insights. First, we show that familiarity and routine have an important impact on adherence to (all) repair guidelines, which should be regarded in future studies. Second, despite its novelty and the corresponding added time to deal with AR, we found that guidance via AR worked better for unfamiliar tasks. This shows the potential of AR for guidance of industrial tasks in practice, and it brings up design suggestions for the implementation of this guidance in practice.}},
  author       = {{Hoffmann, Clemens and Büttner, Sebastian and Wundram, Kai and Prilla, Michael}},
  booktitle    = {{Mensch und Computer 2020}},
  location     = {{Magdeburg }},
  pages        = {{279--289}},
  publisher    = {{Gesellschaft für Informatik e.V.}},
  title        = {{{Impact of Augmented Reality Guidance for Car Repairs on Novice Users of AR – A Field Experiment on Familiar and Unfamiliar Tasks}}},
  doi          = {{ 10.1145/3404983.3405594}},
  year         = {{2020}},
}

@inproceedings{4093,
  abstract     = {{Im Kontext von Industrie 4.0 werden gegenwärtig Möglichkeiten intelligenter Assistenzsysteme diskutiert, deren Einsatz große Chancen zur Steigerung der Produktivität manueller Tätigkeiten bedeuten könnte. Während diese Systeme große Chancen für die Unternehmen bieten, bedeuten sie für Arbeitnehmer*innen auch eine potenzielle Kontrolle und Überwachung. In diesem Positionsbeitrag wollen wir daher für eine partizipative Gestaltung solcher Systeme plädieren. Der vorliegende Beitrag beschreibt zunächst die Zusammenarbeit von Assistenzsystemen und Beschäftigten im industriellen Umfeld. Anschließend wird die Kontrolle und Überwachung von Mitarbeiter*innen betrachtet, zunächst im historischen Kontext, dann im aktuellen. Es wird aufgezeigt inwieweit Beschäftigte durch Assistenzsysteme überwacht beziehungsweise kontrolliert werden können. Im Spannungsfeld zwischen Effizienzsteigerung und Arbeitnehmer*innen-Interessen plädieren wir für die Einbeziehung von System-Nutzer*innen sowie weiteren Interessensvertretungen, wie z. B. Gewerkschaften, in den Entwicklungsprozess, damit ein ethisch vertretbarer Einsatz von Assistenzsystemen gelingen kann.}},
  author       = {{Pitz, Nina and Büttner, Sebastian and Röcker, Carsten}},
  booktitle    = {{Mensch und Computer 2020 Workshopband}},
  keywords     = {{Assistenzsystem, Digitalisierung, Industrie 4.0, Partizipative Gestaltung, Ethik}},
  location     = {{Magdeburg}},
  publisher    = {{Gesellschaft für Informatik e.V.}},
  title        = {{{Assistenzsysteme im Kontext von Industrie 4.0–Partizipative Technologiegestaltung zur Wahrung der Arbeitnehmer*innen-Interessen}}},
  doi          = {{ 10.18420/muc2020-ws109-275}},
  year         = {{2020}},
}

@inproceedings{4094,
  abstract     = {{Projection-based assitive systems that guide users through assembly work are on their way to industrial application. Previous research work investigated how people can be supported with such systems. However, there has been little work on the question on how to generate and author sequential instructions for assitive systems. In this paper, we present a new concept and a prototypical implementation of an assitive system that can be taught by demonstrating an assembly process. By using a combination of RGB and depth cameras, we can generate an assembly instruction of Lego Duplo bricks based on the demonstration of a user. This generated manual can later on be used for assisting other users in the assembly process. By our prototype system, we show the technological feasibility of assistive systems that can learn from users.}},
  author       = {{Büttner, Sebastian and Peda, Andreas and Heinz, Mario and Röcker, Carsten}},
  booktitle    = {{22nd International Conference on Human-Computer Interaction}},
  isbn         = {{978-3-030-50343-7}},
  keywords     = {{Assitive system, Authoring, Instruction generation, Computer vision, Teaching by demonstration}},
  location     = {{Copenhagen, Denmark}},
  pages        = {{153--163}},
  publisher    = {{Springer}},
  title        = {{{Teaching by Demonstrating – How Smart Assistive Systems Can Learn from Users}}},
  doi          = {{https://doi.org/10.1007/978-3-030-50344-4_12}},
  volume       = {{12203}},
  year         = {{2020}},
}

@inproceedings{4095,
  abstract     = {{Using remote control transmitters is a common way to control a drone. For the future, we envision drones that are intuitively controllable with new input devices. One possibility could be the use of one-hand controllers. Here, we present an exploration of using a 3-D mouse as a controller for human-drone interaction. We ran a pre-study that investigated the users’ natural spatial mapping between controller and drone dimensions. Based on these results we developed our prototype that shows the feasibility of our concept. A series of flight tests were conducted and the mapping between controller and flight movements were iteratively improved. In this paper, we present our development process and the implementation of our prototype.}},
  author       = {{Büttner, Sebastian and Zaitoon, Rami and Heinz, Mario and Röcker, Carsten}},
  booktitle    = {{22nd International Conference on Human-Computer Interaction}},
  isbn         = {{978-3-030-49061-4}},
  keywords     = {{Human-Drone Interaction, Unmanned Aerial Vehicle, 3-D mouse, Spatial mapping, Prototyping}},
  location     = {{Copenhagen, Denmark}},
  pages        = {{ 535--548}},
  publisher    = {{Springer}},
  title        = {{{One-hand Controller for Human-Drone Interaction – a Human-centered Prototype Development}}},
  doi          = {{https://doi.org/10.1007/978-3-030-49062-1_36}},
  volume       = {{12203}},
  year         = {{2020}},
}

@inproceedings{4096,
  abstract     = {{Projection-based assistive systems have shown to be a promising technology to support workers during manual assembly processes in industrial manufacturing by projecting instructions into the working area. While existing studies have investigated various aspects of these systems, little research has been conducted regarding the way in which the user accesses the provided instructions. In this paper we analyze the eye movements of users during the repeated execution of an assembly task at a projection-based assistive system in order to gain insights into the utilization of the presented instructions. For this purpose, we analyzed eye tracking recordings from a user study with 15 participants to investigate the sequences in which the respective instructions are observed by the users. The results show a significantly lower number of nonlinear gaze sequences as well as a significantly higher number of steps without observing the instructions during the repeated use of the assistive system. In addition, there was a significantly lower task completion time during repeated use of the assistive system.}},
  author       = {{Heinz, Mario and Büttner, Sebastian and Röcker, Carsten}},
  booktitle    = {{22nd International Conference on Human-Computer Interaction}},
  isbn         = {{978-3-030-50343-7}},
  keywords     = {{Assistive systems, Eye tracking, Human behavior}},
  location     = {{Copenhagen, Denmark}},
  pages        = {{259--272}},
  publisher    = {{Springer}},
  title        = {{{Exploring Users' Eye Movements When Using Projection-based Assembly Assistive Systems}}},
  doi          = {{https://doi.org/10.1007/978-3-030-50344-4_19}},
  volume       = {{12203}},
  year         = {{2020}},
}

@inproceedings{4097,
  abstract     = {{The capabilities of object detection are well known, but many projects don’t use them, despite potential benefit. Even though the use of object detection algorithms is facilitated through frameworks and publications, a big issue is the creation of the necessary training data. To tackle this issue, this work shows the design and evaluation of a prototype, which allows users to create synthetic datasets for object detection in images. The prototype is evaluated using YOLOv3 as the underlying detector and shows that the generated datasets are equally good in quality as manually created data. This encourages a wide adoption of object detection algorithms in different areas, since image creation and labeling is often the most time consuming step.}},
  author       = {{Besginow, Andreas and Büttner, Sebastian and Röcker, Carsten}},
  booktitle    = {{22nd International Conference on Human-Computer Interaction}},
  isbn         = {{978-3-030-50343-7}},
  keywords     = {{Object detection, Synthetic datasets, Machine learning, Deep learning}},
  location     = {{Copenhagen, Denmark}},
  pages        = {{178--192}},
  publisher    = {{Springer}},
  title        = {{{Making Object Detection Available to Everyone - A Hardware Prototype for Semi-automatic Synthetic Data Generation}}},
  doi          = {{https://doi.org/10.1007/978-3-030-50344-4_14}},
  volume       = {{12203}},
  year         = {{2020}},
}

@inproceedings{4098,
  abstract     = {{Augmented Reality (AR) systems are on their way to industrial application, e.g. projection-based AR is used to enhance assembly work. Previous studies showed advantages of the systems in permanent-use scenarios, such as faster assembly times. In this paper, we investigate whether such systems are suitable for training purposes. Within an experiment, we observed the training with a projection-based AR system over multiple sessions and compared it with a personal training and a paper manual training. Our study shows that projection-based AR systems offer only small benefits in the training scenario. While a systematic mislearning of content is prevented through immediate feedback, our results show that the AR training does not reach the personal training in terms of speed and recall precision after 24 hours. Furthermore, we show that once an assembly task is properly trained, there are no differences in the long-term recall precision, regardless of the training method.}},
  author       = {{Büttner, Sebastian and Prilla, Michael and Röcker, Carsten}},
  booktitle    = {{ACM CHI Conference on Human Factors in Computing Systems (CHI 2020)}},
  editor       = {{Bernhaupt, Regina and Mueller, Florian "Floyd"}},
  location     = {{Honolulu HI USA }},
  publisher    = {{ACM}},
  title        = {{{Augmented Reality Training for Industrial Assembly Work – Are Projection-based AR Assistive Systems an Appropriate Tool for Assembly Training?}}},
  doi          = {{https://doi.org/10.1145/3313831.3376720}},
  year         = {{2020}},
}

@inproceedings{4102,
  abstract     = {{Complexity is a fundamental part of product design and manufacturing today, owing to increased demands for customization and advances in digital design techniques. Assembling and repairing such an enormous variety of components means that workers are cognitively challenged, take longer to search for the relevant information and are prone to making mistakes. Although in recent years deep learning approaches to object recognition have seen rapid advances, the combined potential of deep learning and augmented reality in the industrial domain remains relatively under explored. In this paper we introduce AR-ProMO, a combined hardware/software solution that provides a generalizable assistance system for identifying mistakes during product assembly and repair.}},
  author       = {{Dhiman, Hitesh and Büttner, Sebastian and Röcker, Carsten and Reisch, Raphael}},
  booktitle    = {{Proceedings of the 31st Australian Conference on Human-Computer-Interaction (OzCHI'19) : 2nd Dec.-5th Dec. 2019, Perth/Fremantle, WA, Australia}},
  isbn         = {{978-1-4503-7696-9}},
  keywords     = {{Augmented Reality, Deep Learning}},
  location     = {{Perth/Fremantle, WA, Australia}},
  pages        = {{ 518–522}},
  publisher    = {{ACM}},
  title        = {{{Handling Work Complexity with AR/Deep Learning}}},
  doi          = {{10.1145/3369457.3370919}},
  year         = {{2019}},
}

@article{4104,
  author       = {{Büttner, Sebastian and Heinz, Mario and Zaitoon, Rami and Röcker, Carsten}},
  journal      = {{Proceedings of the International workshop on Human-Drone Interaction (iHDI'19) as part of the ACM Conference on Human Factors in Computing Systems (CHI '19), pp. 76 - 83}},
  location     = {{Glasgow, Scotland Uk}},
  pages        = {{76 -- 83}},
  title        = {{{Investigating Users' Natural Spatial Mapping between Drone Dimensions and One-Hand Drone Controllers}}},
  year         = {{2019}},
}

@article{4105,
  abstract     = {{Seit dem Verkaufsstart von Microsofts Augmented-Reality-Brille HoloLens hat Augmented Reality (AR) den Massenmarkt erreicht. So setzen beispielsweise Museen auf die erweiterte Realität zur digitalen Erweiterung ihrer physischen Ausstellungsstücke. Im Kontext von Industrie 4.0 wird eine Vielzahl verschiedener Anwendungsszenarien diskutiert. Um das Potential von AR zu evaluieren, wurde bei Phoenix Contact ein personalloser Werksrundgang mittels AR-Brille durch ein Produktionsgebäude entwickelt. Die HoloLens-Implementierung erfolgte über die Spiele-Engine Unity und nutzt markerbasiertes Tracking für die Darstellung von Inhalten. Dabei werden im Gebäude unterschiedliche Hotspots definiert, welche distanzabhängig getrackt werden können, um den Nutzer mit Hilfe von 3D-Objekten, Animationen und Texten Informationen zu liefern. In diesem Beitrag präsentieren wir die konzeptionelle und technische Realisierung der Anwendung.}},
  author       = {{Rohde, Raphael and Büttner, Sebastian and Röcker, Carsten}},
  journal      = {{H. Fischer, S. Hess (Eds.): Mensch und Computer 2019 - Usability Professionals. Gesellschaft für Informatik e.V. Und German UPA e.V., Bonn, Germany, pp. 236 - 244}},
  location     = {{Hamburg}},
  publisher    = {{Gesellschaft für Informatik e.V.}},
  title        = {{{Verwendung von Augmented Reality im Industrieumfeld}}},
  doi          = {{10.18420/muc2019-up-0336}},
  year         = {{2019}},
}

@inproceedings{4106,
  abstract     = {{Der steigende Automatisierungsgrad in der Produktion führt dazu, dass einzelne Mitarbeiter für eine wachsende Zahl an Maschinen verantwortlich sind. Um Informationen von Maschinen in einer verteilten Industrieumgebung zu Mitarbeitern zu bringen, setzt Phoenix Contact seit kurzer Zeit Smartwatches ein. Im Rahmen dieses Beitrags wird der Entwicklungsprozess sowie das Ergebnis der entsprechenden Smartwatch-Anwendung vorgestellt.Um eine hohe Gebrauchstauglichkeit und die Akzeptanz der neuen Technologie bei der Belegschaft zu erreichen, wurden von Beginn an Nutzer in die Entwicklung der Anwendung einbezogen. Durch Kontextanalysen, Diskussionen von Storyboards sowie die iterative Prototypen-Erstellung und -Evaluierung wurde die Interaktion zwischen Mensch und Produktionsumgebung mit Hilfe von Smartwatches optimiert.}},
  author       = {{Bröring, Andre and Büttner, Sebastian and Heinz, Mario and Röcker, Carsten}},
  booktitle    = {{H. Fischer, S. Hess (Eds.): Mensch und Computer 2019 - Usability Professionals. Gesellschaft für Informatik e.V. Und German UPA e.V., Bonn, Germany, pp. 228 - 235}},
  location     = {{Hamburg}},
  pages        = {{ 228 -- 235}},
  publisher    = {{Gesellschaft für Informatik e.V.}},
  title        = {{{Smartwatches zur Unterstützung von Produktionsmitarbeitern.}}},
  doi          = {{10.18420/muc2019-up-0324}},
  year         = {{2019}},
}

@inproceedings{4107,
  abstract     = {{Using remote control transmitters is a common way to control a drone. For the future, we envision drones that are intuitively controllable with new input devices. One possibility could be the use of one-hand controllers, e.g. 3-D mice. While developing such a device, we investigated the users’ natural spatial mapping between controller and drone dimensions. In this paper we present our insights about this mapping and show why relative position control is an important control concept for novice users.}},
  author       = {{Büttner, Sebastian and Heinz, Mario and Zanini, Paulo and Röcker, Carsten}},
  booktitle    = {{Proceedings of the International workshop on Human-Drone Interaction (iHDI'19) as part of the ACM Conference on Human Factors in Computing Systems (CHI '19)}},
  location     = {{Glasgow, Scotland Uk}},
  title        = {{{Investigating Users' Natural Spatial Mapping between Drone Dimensions and One-Hand Drone Controllers}}},
  year         = {{2019}},
}

@inproceedings{4108,
  abstract     = {{In this paper, we present a conceptual approach and the Mirst prototype of a mobile training system to provide non-expert users with helpful information about the functionality of complex automated industrial systems. The system uses an augmented reality (AR) tablet application to visualize information about internal processes, sensor states, settings and hidden parts of a production system directly in the Mield of view of a user. The available information can be accessed via four different methods which combine elements of step-by- step tutorials and open exploration. Our prototype aims to support users to better understand automated systems. While such systems will become more complex in future, we believe that augmented reality is a key concept that could help humans to better understand and experience automated systems and its consequences in general. }},
  author       = {{Heinz, Mario and Büttner, Sebastian and Röcker, Carsten}},
  booktitle    = {{Proceedings of the 12th ACM International Conference on Pervasive Technologies Related to Assistive Environments (PETRA'19)}},
  editor       = {{Makedon, Fillia}},
  location     = {{ Rhodes Greece }},
  pages        = {{398--401}},
  title        = {{{Exploring Training Modes for Industrial Augmented Reality Learning}}},
  doi          = {{10.1145/3316782.3322753}},
  year         = {{2019}},
}

@inproceedings{4109,
  abstract     = {{Seit dem Verkaufsstart von Microsofts Augmented-Reality-Brille HoloLens hat Augmented Reality (AR) den Massenmarkt er- reicht. So setzen beispielsweise Museen auf die erweiterte Realität zur digitalen Erweiterung ihrer physischen Ausstellungsstücke. Im Kontext von Industrie 4.0 wird eine Vielzahl verschiede- ner Anwendungsszenarien diskutiert. Um das Potential von AR zu evaluieren, wurde bei Phoenix Contact ein personalloser Werksrundgang mittels AR-Brille durch ein Produktionsgebäude entwickelt. Die HoloLens-Implementierung erfolgte über die Spiele-Engine Unity und nutzt markerbasiertes Tracking für die Darstellung von Inhalten. Dabei werden im Gebäude unter- schiedliche Hotspots definiert, welche distanzabhängig getrackt werden können, um den Nutzer mit Hilfe von 3D-Objekten, Animationen und Texten Informationen zu liefern. In diesem Beitrag präsentieren wir die konzeptionelle und technische Realisierung der Anwendung. }},
  author       = {{Töberg, Jan-Philipp and Rohde, Raphael and Büttner, Sebastian and Röcker, Carsten}},
  booktitle    = {{Mensch und Computer 2019 - Usability Professionals}},
  location     = {{Hamburg}},
  publisher    = {{Gesellschaft für Informatik e.V. Und German UPA e.V.}},
  title        = {{{Verwendung von Augmented Reality im Industrieumfeld}}},
  doi          = {{10.18420/muc2019-up-0336}},
  year         = {{2019}},
}

@inproceedings{4110,
  abstract     = {{Der steigende Automatisierungsgrad in der Produktion führt dazu, dass einzelne Mitarbeiter für eine wachsende Zahl an Maschinen verantwortlich sind. Um Informationen von Maschinen in einer verteilten Industrieumgebung zu Mitarbeitern zu bringen, setzt Phoenix Contact seit kurzer Zeit Smartwatches ein. Im Rahmen dieses Beitrags wird der Entwicklungsprozess sowie das Ergebnis der entsprechenden Smartwatch-Anwendung vorgestellt. Um eine hohe Gebrauchstauglichkeit und die Akzeptanz der neuen Technologie bei der Belegschaft zu erreichen, wurden von Beginn an Nutzer in die Entwicklung der Anwendung einbezogen. Durch Kontextanalysen, Diskussionen von Storyboards sowie die iterative Prototypen-Erstellung und -Evaluierung wurde die Interaktion zwischen Mensch und Produktionsumgebung mit Hilfe von Smartwatches optimiert. }},
  author       = {{Bröring, Andre and Fast, Arno and Büttner, Sebastian and Heinz, Mario and Röcker, Carsten}},
  booktitle    = {{Mensch und Computer 2019 - Usability Professionals}},
  location     = {{Hamburg}},
  publisher    = {{Gesellschaft für Informatik e.V. Und German UPA e.V.}},
  title        = {{{Smartwatches zur Unterstüzung von Produktionsmitarbeitern }}},
  doi          = {{10.18420/muc2019-up-0324}},
  year         = {{2019}},
}

@inproceedings{4111,
  author       = {{Heinz, Mario and Büttner, Sebastian and Röcker, Carsten}},
  booktitle    = {{Workshop "Everyday Automation Experience" at ACM CHI Conference on Human Factors in Computing Systems (CHI 2019)}},
  location     = {{Glasgow}},
  publisher    = {{ACM}},
  title        = {{{Exploring Augmented Reality Training for Automated Systems}}},
  year         = {{2019}},
}

@inbook{4178,
  abstract     = {{Im Folgenden werden die Pilotprojekte des ReSerW-Projekts vorgestellt. Diese Pilotprojekte sollen die Fähigkeit der in den Querschnittsprojekten (siehe Abb. 4.1) erarbeiteten Methoden nachweisen und die Industrietauglichkeit der Lösungen sicherstellen. Aus diesem Grund fließen die Ergebnisse jedes einzelnen Querschnittsprojekts in die vier Pilotprojekte ein.}},
  author       = {{Wöhler, Mathias and Blum, Alexandra and Bringewatt, Wilhelm and Büttner, Sebastian and Dellnitz, Michael and Gräler, Manuel and Just, Viktor and Kummert, Franz and Mucha, Henrik and Peitz, Sebastian and Röcker, Carsten and Sielermann, Jürgen and Tschirner, Christian and Wittrowski, Jens}},
  booktitle    = {{ Ressourceneffiziente Selbstoptimierende Wäscherei}},
  editor       = {{Trächtler, Ansgar}},
  isbn         = {{978-3-662-56389-2}},
  issn         = {{2523-3637}},
  pages        = {{105--158}},
  publisher    = {{Springer}},
  title        = {{{Pilotprojekte}}},
  doi          = {{10.1007/978-3-662-56390-8_4}},
  year         = {{2018}},
}

@inbook{4179,
  abstract     = {{Für die Weiterentwicklung der klassischen Wäschereitechnik zu intelligenten technischen Systemen ist ein strukturiertes Vorgehen unerlässlich. In dem Projekt wurden wissenschaftliche Methoden genutzt und auf die Wäschereitechnik adaptiert, um das Ziel einer ressourcenschonenden Wäscherei zu erreichen. Neben der modellbasierten Entwurfstechnik für intelligente Systeme wurden ebenfalls Methoden für die Bildverarbeitung, mathematische Optimierung und Usability von Maschinen in dem Projekt verwendet. Diese werden in den folgenden Abschnitten näher vorgestellt.}},
  author       = {{Kummert, Franz and Albers , Alexander A. and Bremer, Christian and Büttner, Sebastian and Dellnitz, Michael and Dumitrescu, Roman and Gräler, Manuel and Just, Viktor and Mucha, Henrik and Peitz, Sebastian and Röcker, Carsten and Trächtler, Ansgar and Tschirner, Christian and Wang, Shuo and Wittrowski, Jens}},
  booktitle    = {{Ressourceneffiziente Selbstoptimierende Wäscherei}},
  editor       = {{Trächtler, Ansgar}},
  isbn         = {{978-3-662-56389-2}},
  issn         = {{2523-3637}},
  pages        = {{41--104}},
  publisher    = {{Springer}},
  title        = {{{Eingesetzte wissenschaftliche Methoden}}},
  year         = {{2018}},
}

@inproceedings{4180,
  abstract     = {{In this paper we give an overview of features and use cases that Intelligent Adaptive Assistance Systems (IAAS) in the literature commonly provide. For this, a literature research has been executed where 29 papers were selected for inspection. In the course of this inspection, most common features are noted, compared and assessed against the definitions we gave for an IAAS. It showed that the development of IAAS can benefit from an intensified research in cooperation with machine learning experts to further develop the intelligence and adaptivity of future IAAS.}},
  author       = {{Besginow, Andreas and Büttner, Sebastian and Röcker, Carsten}},
  booktitle    = {{5. Workshop zu Smart Factories: Mitarbeiter-zentrierte Informationssysteme für die Zusammenarbeit der Zukunft, Mensch und Computer 2018}},
  location     = {{Dresden}},
  publisher    = {{Gesellschaft für Informatik e.V.}},
  title        = {{{Intelligent Adaptive Assistance Systems in an Industrial Context – Overview of Use Cases and Features}}},
  doi          = {{10.18420/muc2018-ws18-0533}},
  year         = {{2018}},
}

@inproceedings{4181,
  abstract     = {{Projection-based Augmented Reality (AR) might change the interactions with digital systems in future work environments. A lot of stationary projection-based AR assistive systems have been presented that might support future work processes. However, not much research has been done beyond stationary settings. With moving towards mobile settings, fast and robust object recognition algorithms are required that allow real-time tracking of physical objects as targets for the projected digital overlay. With this work, we present a portable projection-based AR platform that recognizes objects in real time and overlays physical objects with in-situ projections of digital content. We consider our system as a precursor to a future mobile projection-based assistive system. By presenting the system, we want to start a discussion in the HCI community about the potential of mobile projection-based AR in future work environments.}},
  author       = {{Büttner, Sebastian and Besginow, Andreas and Prilla, Michael and Röcker, Carsten}},
  booktitle    = {{Workshop on Virtual and Augmented Reality in Everyday Context (VARECo), Mensch und Computer 2018}},
  location     = {{Dresden}},
  publisher    = {{Gesellschaft für Informatik e.V.}},
  title        = {{{Mobile Projection-based Augmented Reality in Work Environments – an Exploratory Approach}}},
  doi          = {{10.18420/muc2018-ws07-0364}},
  year         = {{2018}},
}

@inbook{4183,
  abstract     = {{The localization of employees in the industrial environment plays a major role in the development of future intelligent user interfaces and systems. Yet, localizing people also raises ethical, legal and social issues. While a precise localization is essential for context-aware systems and real-time optimization of processes, a permanently high localization accuracy creates opportunities for surveillance and therefore has a negative impact on workplace privacy. In this paper, we propose a new concept of a multi-level localization system which tries to find a way to meet both the technical requirements for a localization with a high accuracy as well as the interests of employees in terms of privacy. Depending on the users’ location, different localization technologies are used, that restrict the accuracy to the least required level by design. Furthermore, we present a prototypical implementation of the concept that shows the feasibility of our multi-level localization concept. Using this system, intelligent systems become able to react on employees based on their location without permanently monitoring the precise user location. }},
  author       = {{Heinz, Mario and Büttner, Sebastian and Wegerich, Martin and Marek, Frank and Röcker, Carsten}},
  booktitle    = {{Distributed, Ambient and Pervasive Interactions: Understanding Humans: 6th International Conference, DAPI 2018, Held as Part of HCI International 2018, Las Vegas, NV, USA, July 15–20, 2018, Proceedings, Part I }},
  editor       = {{Streitz, Norbert and Konomi, Shin’ichi}},
  isbn         = {{978-3-319-91130-4}},
  keywords     = {{Indoor localization, Intelligent user interface, Process planning}},
  location     = {{Las Vegas, NV, USA}},
  pages        = {{3--47}},
  publisher    = {{Springer}},
  title        = {{{A Multi-level Localization System for Intelligent User Interfaces}}},
  doi          = {{10.1007/978-3-319-91131-1_3}},
  volume       = {{10921}},
  year         = {{2018}},
}

@inproceedings{4251,
  abstract     = {{Im Kontext von Industrie 4.0 nimmt die Gestaltung der Mensch-Maschine-Schnittstelle eine herausragende Stellung ein. Folglich müssen neue technologische und menschzentrierte Ansätze zur Unterstützung von Menschen in Smart Factories entwickelt und erforscht werden. In Lemgo wurde zu Forschungs- und Demonstrationszwecken die SmartFactoryOWL errichtet. In diesem Beitrag geben wir einen Überblick über die SmartFactoryOWL und ihren Beitrag zur hiesigen HCI-Forschung. Wir zeigen, wie diese Einrichtung unsere Forschung an neuen Systemen, wie z. B. Systemen zur Informations- und Wissensvernetzung in Smart Factories beiträgt. Weiterhin präsentieren wir in diesem Beitrag AR-Cube, eine durchgängige Lösung für eine Smart-Factory-Lernumgebung, welche die Remote-Interaktion zwischen Produktionssystemen innerhalb und außerhalb der SmartFactoryOWL ermöglicht. Mit der Workshop-Teilnahme wollen wir unsere hier skizzierten Konzepte und Visionen gerne in der HCI-Community diskutieren und die Grundlage für zukünftige Weiterentwicklungen legen.}},
  author       = {{Büttner, Sebastian and Mucha, Henrik and Robert, Sebastian and Hellweg, Fabian and Röcker, Carsten}},
  booktitle    = {{4. Workshop zu Smart Factories: Mitarbeiter-zentrierte Informationssysteme für die Zusammenarbeit der Zukunft, Mensch und Computer 2017}},
  location     = {{Regensburg}},
  publisher    = {{Gesellschaft für Informatik e.V.}},
  title        = {{{HCI in der SmartFactoryOWL – Angewandte Forschung Entwicklung}}},
  doi          = {{10.18420/muc2017-ws04-0389}},
  year         = {{2017}},
}

@inproceedings{4252,
  author       = {{Büttner, Sebastian}},
  booktitle    = {{Doktorandenseminar, Mensch und Computer 2017}},
  location     = {{Regensburg}},
  publisher    = {{Gesellschaft für Informatik e.V.}},
  title        = {{{Menschzentrierte Dienste in der Fabrik der Zukunft – ein Framework für adaptive projektionsbasierte AR-Assistenz}}},
  year         = {{2017}},
}

@inproceedings{4254,
  abstract     = {{The current trend of integrating machines and factories into cyber-physical systems (CPS) creates an enormous complexity for operators of such systems. Especially the search for the root cause of cascading failures becomes highly time-consuming. Within this paper, we address the question on how to help human users to better and faster understand root causes of such situations. We propose a concept of interactive alarm flood reduction and present the implementation of a first vertical prototype for such a system. We consider this prototype as a first artifact to be discussed by the research community and aim towards an incremental further development of the system in order to support humans in complex error situations.}},
  author       = {{Büttner, Sebastian and Wunderlich, Paul and Heinz, Mario and Niggemann, Oliver and Röcker, Carsten}},
  booktitle    = {{ Machine Learning and Knowledge Extraction : First IFIP TC 5, WG 8.4, 8.9, 12.9 International Cross-Domain Conference, CD-MAKE 2017, Reggio, Italy, August 29 – September 1, 2017, Proceedings}},
  editor       = {{Holzinger, Andreas}},
  isbn         = {{978-3-319-66807-9}},
  keywords     = {{Alarm flood reduction, Machine learning, Assistive system}},
  location     = {{Reggio, Italy}},
  pages        = {{69--82}},
  publisher    = {{Springer}},
  title        = {{{Managing Complexity: Towards Intelligent Error-Handling Assistance Trough Interactive Alarm Flood Reduction}}},
  volume       = {{10410}},
  year         = {{2017}},
}

@inproceedings{4255,
  abstract     = {{Increasingly, production processes are enabled and controlled by Information Technology (IT), a development being also referred to as “Industry 4.0”. IT thereby contributes to flexible and adaptive production processes, and in this sense factories become “smart factories”. In line with this, IT also more and more supports human workers via various assistance systems. This support aims to both support workers to better execute their tasks and to reduce the effort and time required when working. However, due to the large spectrum of assistance systems, it is hard to acquire an overview and to select an adequate system for a smart factory based on meaningful criteria. We therefore synthesize a set of comparison criteria into a consistent framework and demonstrate the application of our framework by classifying three examples.}},
  author       = {{Fellmann, Michael and Robert, Sebastian and Büttner, Sebastian and Mucha, Henrik and Röcker, Carsten}},
  booktitle    = {{ Machine Learning and Knowledge Extraction : First IFIP TC 5, WG 8.4, 8.9, 12.9 International Cross-Domain Conference, CD-MAKE 2017, Reggio, Italy, August 29 – September 1, 2017, Proceedings}},
  editor       = {{Holzinger, Andreas}},
  isbn         = {{978-3-319-66807-9}},
  keywords     = {{Assistance systems, Smart factory, Production processes}},
  location     = {{Reggio, Italy}},
  pages        = {{59--68}},
  publisher    = {{Springer}},
  title        = {{{Towards a Framework for Assistance Systems to Support Work Processes in Smart Factories}}},
  doi          = {{10.1007/978-3-319-66808-6_5}},
  volume       = {{10410}},
  year         = {{2017}},
}

@inproceedings{4257,
  abstract     = {{The selection of suitable display technologies for industrial augmented reality (AR) applications is becoming increasingly relevant as such applications move from the proof-of-concept to the application stage. To support project managers, designers and developers in the critical selection process we have developed a checklist of important aspects and related evaluation hints that helps to speed up and improve the selection process. The checklist presented in this paper was designed to be useful for both researchers and practitioners. It combines pertinent information from relevant standards like ISO 9241-210 with results from current research literature and experience from several AR projects in industrial contexts. It can be applied both in collaboration with AR experts, where it helps to prepare relevant information for the collaboration and thus streamlines the process, or stand-alone, as a guideline for the evaluation of different options by a design team. }},
  author       = {{Paelke, Volker and Büttner, Sebastian and Mucha, Henrik and Röcker, Carsten}},
  booktitle    = {{Advances in Ergonomics of Manufacturing: Managing the Enterprise of the Future.}},
  editor       = {{Trzcielinski, Stefan}},
  isbn         = {{978-3-319-60473-2}},
  issn         = {{2194-5365}},
  keywords     = {{Augmented reality, Displays, Evaluation, Checklist, Context of use, Human-Computer interaction}},
  location     = {{Los Angeles, California, USA}},
  pages        = {{225--234}},
  publisher    = {{Springer}},
  title        = {{{A Checklist-Based Approach for Evaluating Augmented Reality Displays in Industrial Applications}}},
  doi          = {{10.1007/978-3-319-60474-9_21}},
  volume       = {{606}},
  year         = {{2017}},
}

@inproceedings{4258,
  abstract     = {{Research on how to take advantage of Augmented Reality and Virtual Reality applications and technologies in the domain of manufacturing has brought forward a great number of concepts, prototypes, and working systems. Although comprehensive surveys have taken account of the state of the art, the design space of industrial augmented and virtual reality keeps diversifying. We propose a visual approach towards assessing this space and present an interactive, community-driven tool which supports interested researchers and practitioners in gaining an overview of the aforementioned design space. Using such a framework we collected and classified relevant publications in terms of application areas and technology platforms. This tool shall facilitate initial research activities as well as the identification of research opportunities. Thus, we lay the groundwork, forthcoming workshops and discussions shall address the refinement.}},
  author       = {{Büttner, Sebastian and Mucha, Henrik and Funk, Markus and Kosch, Thomas and Aehnelt, Mario and Robert, Sebastian and Röcker, Carsten}},
  booktitle    = {{10th ACM International Conference on PErvasive Technologies Related to Assistive Environments (PETRA '17) }},
  keywords     = {{Applied computing, Operations research, Operations research, Human-centered computing, Human computer interaction (HCI), Interaction paradigms}},
  location     = {{Rhodes,  Greece}},
  pages        = {{433440}},
  publisher    = {{ACM}},
  title        = {{{The Design Space of Augmented and Virtual Reality Applications for Assistive Environments in Manufacturing: A Visual Approach}}},
  doi          = {{10.1145/3056540.3076193}},
  year         = {{2017}},
}

@inproceedings{4259,
  abstract     = {{This paper presents a prototype of an intelligent assistive system for workers in stationary manual assembly using projection-based augmented reality (AR) and intelligent hand tracking. By using depth cameras, the system can track the hands of the user and makes the user aware of wrong picking actions or errors in the assembly process. The system automatically adapts the digital projection-based overlay according to the current work situation. The main research contribution of our work is the presentation of a novel hand-tracking algorithm. In addition, we present the results of an user study of the system that shows the challenges and opportunities of our system and the hand-tracking algorithm in particular. We assume that our results will inform the future design of assistive systems in manual assembly.}},
  author       = {{Büttner, Sebastian and Sand, Oliver and Röcker, Carsten}},
  booktitle    = {{European Conference on Ambient Intelligence}},
  isbn         = {{978-3-319-56996-3}},
  keywords     = {{Augmented reality, Mobile projection, Hand tracking, Manufacturing, Industry 4.0}},
  location     = {{ Malaga, Spain}},
  pages        = {{33--45}},
  publisher    = {{Springer}},
  title        = {{{Exploring Design Opportunities for Intelligent Worker Assistance: A New Approach Using Projetion-Based AR and a Novel Hand-Tracking Algorithm}}},
  doi          = {{10.1007/978-3-319-56997-0_3}},
  volume       = {{10217}},
  year         = {{2017}},
}

@inbook{4298,
  abstract     = {{In this paper, we present the current state-of-the-art of decision making (DM) and machine learning (ML) and bridge the two research domains to create an integrated approach of complex problem solving based on human and computational agents. We present a novel classification of ML, emphasizing the human-in-the-loop in interactive ML (iML) and more specific on collaborative interactive ML (ciML), which we understand as a deep integrated version of iML, where humans and algorithms work hand in hand to solve complex problems. Both humans and computers have specific strengths and weaknesses and integrating humans into machine learning processes might be a very efficient way for tackling problems. This approach bears immense research potential for various domains, e.g., in health informatics or in industrial applications. We outline open questions and name future challenges that have to be addressed by the research community to enable the use of collaborative interactive machine learning for problem solving in a large scale.}},
  author       = {{Robert, Sebastian and Büttner, Sebastian and Röcker, Carsten and Holzinger, Andreas}},
  booktitle    = {{Machine Learning for Health Informatics : State-of-the-Art and Future Challenges }},
  editor       = {{Holzinger, Andreas}},
  isbn         = {{978-3-319-50477-3 }},
  keywords     = {{Decision making, Reasoning, Interactive machine learning, Collaborative interactive machine learning}},
  pages        = {{357--376}},
  publisher    = {{Springer}},
  title        = {{{Reasoning Under Uncertainty: Towards Collaborative Interactive Machine Learning}}},
  doi          = {{10.1007/978-3-319-50478-0_18}},
  volume       = {{9605}},
  year         = {{2016}},
}

@inproceedings{4299,
  abstract     = {{With the advances of technology, intelligent assistive systems that adapt to individual humans will become feasible. However, application areas of such systems are rarely discussed within the Industry 4.0 community. Most assistive systems that have been presented have been developed for specific task, such as assembly support or warehouse picking, but there might be a huge uncovered design space to be explored. With this work, we want to step back from existing systems and analyze the design opportunities of assistive systems especially for small and medium-size enterprises (SME). To achieve this goal we conducted a study in four SME, consisting of observations and interviews. Here we present our findings about the potential future application areas of human-centered assistive systems.}},
  author       = {{Mucha, Henrik and Büttner, Sebastian and Röcker, Carsten}},
  booktitle    = {{Human-Computer Interaction – Perspectives on Industry 4.0. Workshop at i-KNOW 2016 }},
  keywords     = {{Industry 4.0, Assistive Systems, Small and Medium-Sized Enterprises, SME, Human-Centered Design, Human-Computer Interaction for Industry}},
  location     = {{Graz, Austria}},
  publisher    = {{TU}},
  title        = {{{Application Areas for Human-Centered Assistive Systems}}},
  year         = {{2016}},
}

@inproceedings{4300,
  abstract     = {{Human-centered design methods are nowadays widely used in the design of consumer products. These methods aim at designing products with a high usability and a positive user experience (UX). However, in the domain of the design of industrial machines, design is often driven by functional requirements mostly neglecting the usability and user experience of products. Together with a medium-sized manufacturer of industrial laundry machines we applied the human-centered design process in an industrial context. In this field report, we describe the human-centered design methods applied in the project, the adaptations we had to make in the process and the challenges and opportunities for applying human-centered design in an industrial environment in general. }},
  author       = {{Büttner, Sebastian and Röcker, Carsten}},
  booktitle    = {{Human-Computer Interaction – Perspectives on Industry 4.0. Workshop at i-KNOW 2016}},
  keywords     = {{Design Methods, Human-Centered Design, Industrial Human-Computer Interaction, Industry 4.0}},
  location     = {{Graz, Austria}},
  publisher    = {{TU}},
  title        = {{{Applying Human-Centered Design Methods in Industry – a Field Report}}},
  year         = {{2016}},
}

@inbook{4302,
  abstract     = {{In this paper we present smARt.assembly – a projection-based augmented reality (AR) assembly assistance system for industrial applications. Our system projects digital guidance information in terms of picking information and assembly data into the physical workspace of a user. By using projections, we eliminate the use of smart glasses that have drawbacks such as a limited field of view or low wearing comfort. With smARt.assembly, users are able to assemble products without previous knowledge and without any other assistance.}},
  author       = {{Sand, Oliver and Büttner, Sebastian and Paelke, Volker and Röcker, Carsten}},
  booktitle    = {{18th International Conference on Human-Computer Interaction (HCII '16)}},
  editor       = {{Lackey, Stefanie and Shumaker, Randall}},
  isbn         = {{978-3-319-39906-5}},
  keywords     = {{Augmented reality, Projection, Assembly work, Manual assembly}},
  location     = {{Toronto, Canada}},
  pages        = {{643--652}},
  publisher    = {{Springer}},
  title        = {{{smARt.Assembly: Projection-Based Augmented Reality for Supporting Assembly Workers}}},
  doi          = {{10.1007/978-3-319-39907-2_61}},
  volume       = {{9740}},
  year         = {{2016}},
}

@inproceedings{4303,
  abstract     = {{The increasing demand to customize products affects production workers in many industries, as assembly tasks become more complex due to higher product variety. Assistive systems providing instructions at the workplace have been proposed to overcome increasing cognitive demand during assembly tasks. Commercially available assistive systems provide spatially registered instructions, either by using in-situ projections or head-mounted displays (HMDs). As there is little empirical knowledge about the individual advantages and disadvantages of both approaches, we are interested in comparing both types of systems. Through a user study at a manual assembly workplace, we compare both approaches to a paper baseline. Our results reveal that both in-situ instructions and paper instructions lead to significantly faster task completion times and significantly fewer errors than HMDs. Using additional questionnaires and interviews, we are able to identify the shortcomings of HMD-based instructions and discuss the possibilities of using flexible in-situ instructions for worker assistance.}},
  author       = {{Büttner, Sebastian and Funk, Markus and Sand, Oliver and Röcker, Carsten}},
  booktitle    = {{9th ACM International Conference on PErvasive Technologies Related to Assistive Environments (PETRA '16) }},
  isbn         = {{978-1-4503-4337-4}},
  keywords     = {{Spatial Augmented Reality, Industrial Augmented Reality, Projection-based Augmented Reality, Head-Mounted Display, Manufacturing, Assistive System}},
  location     = {{Corfu; Greece }},
  pages        = {{1--8}},
  publisher    = {{ACM}},
  title        = {{{Using Head-Mounted Displays and In-Situ Projection for Assistive Systems : A Comparison}}},
  doi          = {{10.1145/2910674.2910679}},
  year         = {{2016}},
}

@inproceedings{4304,
  author       = {{Behlen, Manuel and Büttner, Sebastian and Schmidt, Sebastian and Pyritz, Sarah and Röcker, Carsten}},
  booktitle    = {{Automation 2016 : secure & reliable in the digital world : 17. Branchentreff der Mess- und Automatisierungstechnik : Kongresshaus Baden-Baden, 07. und 08. Juni 2016 }},
  isbn         = {{978-3-18-092284-3}},
  location     = {{Baden-Baden}},
  pages        = {{169--1970}},
  publisher    = {{VDI}},
  title        = {{{Multitouch im industriellen Umfeld: Evaluierung bestehender Systeme, identifizierte Anwendungsszenarien und Handlungsempfehlungen für zukünftige Systeme}}},
  volume       = {{2284}},
  year         = {{2016}},
}

@inproceedings{687,
  author       = {{Büttner, Sebastian and Sand, Oliver and Röcker, Carsten}},
  booktitle    = {{Proceedings of the 17th International Conference on Human-Computer Interaction with Mobile Devices and Services Adjunct}},
  isbn         = {{978-1-4503-3653-6}},
  keywords     = {{Augmented Reality, Manufacturing, Mobile Projection}},
  location     = {{Kopenhagen, Denmark}},
  pages        = {{1130--1133}},
  publisher    = {{ACM}},
  title        = {{{Extending the Design Space in Industrial Manufacturing Through Mobile Projection}}},
  doi          = {{10.1145/2786567.2794342}},
  year         = {{2015}},
}

@article{688,
  abstract     = {{In this paper, we analyze the specific requirements of interacting with cyber-physical systems and propose a design approach that is driven by user needs and makes use of an expanded toolbox that contains state-of-the-art interaction technologies including Smart Glasses and Wearables. We present several examples of assistance systems in industrial production that use these interaction technologies and discuss the corresponding usability and implementation aspects. }},
  author       = {{Paelke, Volker  and Röcker, Carsten and Koch, Nils and Flatt, Holger and Büttner, Sebastian}},
  issn         = {{2196-677X}},
  journal      = {{at - Automatisierungstechnik}},
  keywords     = {{User centred design, user interfaces, user expe-rience, cyber-physical systems, smart glasses, wearables}},
  number       = {{10}},
  pages        = {{833--843}},
  publisher    = {{De Gryter Oldenbourg}},
  title        = {{{User Interfaces for Cyber-Physical Systems : Expanding the Designer’s Toolbox}}},
  doi          = {{10.1515/auto-2015-0016}},
  volume       = {{63}},
  year         = {{2015}},
}

