@misc{8824,
  author       = {{Müller, Dustin}},
  keywords     = {{Metaverse, Digitalisierung, Virtual Reality, Augmented Reality}},
  pages        = {{43}},
  publisher    = {{Technische Hochschule Ostwestfalen-Lippe}},
  title        = {{{Metaverse - wie sich die Welt digital und virtuell verändert (Chancen und Risiken des Metaverse)}}},
  year         = {{2022}},
}

@misc{9162,
  abstract     = {{The German manufacturing industry has been carrying out new developments towards the next industrial revolution, focusing on smart manufacturing environments. Our work emphasizes human-centered control rooms in the context of production plants. Increased automation does not have to come with less human control. Therefore, we report on multimodal collaborative interaction techniques to augment industrial control rooms. In particular, we include mobile workers who use the control room while being in the production hall using tablets or specifically mixed reality glasses. Collaborative annotation dashboards support discussions and a shared understanding among analysts. Manufacturing-related data can be integrated into business analytics environments so that holistic analyses can be performed. Multimodal interaction techniques can support effective interaction with the control room based on the users’ preferences. Immersive experience through mixed reality-based three-dimensional visualizations and interaction possibilities support users in obtaining a clear understanding of the underlying data.}},
  author       = {{Rubart, Jessica and Grimm, Valentin and Potthast, Jonas}},
  booktitle    = {{Journal Future Internet}},
  issn         = {{1999-5903 }},
  keywords     = {{control room, multimodel interaction, augmented reality, mixed reality}},
  number       = {{8}},
  pages        = {{1--18}},
  publisher    = {{MDPI}},
  title        = {{{Augmenting Industrial Control Rooms with Multimodal Collaborative Interaction Techniques}}},
  doi          = {{https://doi.org/10.3390/fi14080224}},
  volume       = {{14}},
  year         = {{2022}},
}

@inproceedings{4102,
  abstract     = {{Complexity is a fundamental part of product design and manufacturing today, owing to increased demands for customization and advances in digital design techniques. Assembling and repairing such an enormous variety of components means that workers are cognitively challenged, take longer to search for the relevant information and are prone to making mistakes. Although in recent years deep learning approaches to object recognition have seen rapid advances, the combined potential of deep learning and augmented reality in the industrial domain remains relatively under explored. In this paper we introduce AR-ProMO, a combined hardware/software solution that provides a generalizable assistance system for identifying mistakes during product assembly and repair.}},
  author       = {{Dhiman, Hitesh and Büttner, Sebastian and Röcker, Carsten and Reisch, Raphael}},
  booktitle    = {{Proceedings of the 31st Australian Conference on Human-Computer-Interaction (OzCHI'19) : 2nd Dec.-5th Dec. 2019, Perth/Fremantle, WA, Australia}},
  isbn         = {{978-1-4503-7696-9}},
  keywords     = {{Augmented Reality, Deep Learning}},
  location     = {{Perth/Fremantle, WA, Australia}},
  pages        = {{ 518–522}},
  publisher    = {{ACM}},
  title        = {{{Handling Work Complexity with AR/Deep Learning}}},
  doi          = {{10.1145/3369457.3370919}},
  year         = {{2019}},
}

@inbook{4311,
  abstract     = {{Recent trends towards digitization in the industrial domain are also driving profound socio-technical changes. On the one hand, these technologies enable shorter product lifecycles and servitization, but on the other hand, the increasing technical complexity of the equipment makes its operation and maintenance a challenge for workers. Assistance systems using pervasive technologies can bridge the gap between the abilities of the workers and the demands of handling technical complexity by enriching workplace activities with relevant, context-dependent information. In this paper, we present an application that replaces a conventional, paper-based maintenance manual with digital, Augmented Reality based instructions that are delivered at the appropriate place and time.}},
  author       = {{Dhiman, Hitesh and Röcker, Carsten}},
  booktitle    = {{2019 IEEE International Conference on Pervasive Computing and Communications Workshops (PerCom Workshops)}},
  isbn         = {{978-1-5386-9151-9}},
  keywords     = {{Industry 4.0, Cyber Physical Systems, Augmented Reality, Complexity, Maintenance, HoloLens}},
  location     = {{Kyoto, Japan}},
  pages        = {{95 -- 100}},
  publisher    = {{IEEE}},
  title        = {{{Worker Assistance in Smart Production Environments using Pervasive Technologies}}},
  doi          = {{10.1109/PERCOMW.2019.8730771}},
  year         = {{2019}},
}

@inbook{4312,
  abstract     = {{Computer-aided assistance systems are entering the world of work and production. Such systems utilize augmented- and virtual-reality for operator training and live guidance as well as mobile maintenance and support. This is particularly important in the modern production reality of ever-changing products and `lot size one' customization of production.This paper focuses on the application of machine learning approach to extend the functionality of assistance systems. Machine learning provides tools to analyse large amounts of data and extract meaningful information. The goal here is to recognize the movement of an operator which would enable automatic display of instructions relevant to them.We present the challenges facing machine learning applications in human-centered assistance systems and a framework to assess machine learning approaches feasible for this scenario. The approach is assessed on a historical data set and then deployed in a work station for live testing. The post-hoc, or historical, analysis yields promising results. The ad-hoc, or live, analysis is a complex task and the results are affected by multiple factors, most of which are introduced by the human influence.The contribution of this paper is an approach to adapt state- of-the-art machine learning to operator movement recognition with a special focus on approaches to spatial time series data pre-processing. Presented experiment results validate the approach and show that it performs well in a real-world scenario.}},
  author       = {{Fullen, Marta and Maier, Alexander and Nazarenko, Arthur and Jenderny, Sascha and Röcker, Carsten}},
  booktitle    = {{2019 IEEE 17th International Conference on Industrial Informatics (INDIN)}},
  isbn         = {{978-1-7281-2927-3}},
  issn         = {{2378-363X}},
  keywords     = {{augmented reality, computer based training, data handling, industrial training, learning (artificial intelligence), time series}},
  location     = {{Helsinki, Finland,}},
  pages        = {{296 -- 302}},
  publisher    = {{IEEE}},
  title        = {{{Machine Learning for Assistance Systems: Pattern-Based Approach to Online Step Recognition}}},
  doi          = {{10.1109/INDIN41052.2019.8972122}},
  year         = {{2019}},
}

@inproceedings{4318,
  abstract     = {{Recent advances in the field of industrial digitization and automation lead to an increasing need for assistance systems to support workers in various fields of activity, such as assembly, logistics and maintenance. Current assistance systems for the maintenance area are usually based on a single visualization technology. However, in our view, this is not practicable in terms of real activities, as these operations involve various subtasks for which different interaction concepts would be advantageous. Therefore, in this paper, we propose a concept for a multi-device assistive system, which combines multiple devices to provide workers with relevant information over different subtasks of a maintenance operation and present our first prototype for such a system.}},
  author       = {{Heinz, Mario and Dhiman, Hitesh and Röcker, Carsten}},
  booktitle    = {{Machine Learning and Knowledge Extraction :Second IFIP TC 5, TC 8/WG 8.4, 8.9, TC 12/WG 12.9 International Cross-Domain Conference, CD-MAKE 2018}},
  editor       = {{Holzinger, Andreas and Kieseberg, Peter and Tjoa, A Min and Weippl, Edgar}},
  isbn         = {{978-3-319-99739-1}},
  keywords     = {{Human-machine-interaction, Multimodal feedback, Assistive systems, Augmented-reality, Smart factory}},
  location     = {{Hamburg}},
  pages        = {{239 -- 247}},
  publisher    = {{Springer}},
  title        = {{{A Multi-Device Assistive System for Industrial Maintenance Operations}}},
  doi          = {{10.1007/978-3-319-99740-7_16}},
  volume       = {{11015}},
  year         = {{2018}},
}

@inproceedings{4322,
  abstract     = {{This paper presents a test platform for the systematic evaluation of head-mounted displays (HMDs). The focus is on an augmented reality (AR) test application for assembly tasks, which supports tests that are flexible in terms of complexity and scope, thus enabling the realistic assessment of usability, comfort and ergonomics by the test users.}},
  author       = {{Paelke, Volker and Bulk, Jendrik and Röcker, Carsten}},
  booktitle    = {{International Conference on Applied Human Factors and Ergonomics}},
  isbn         = {{978-3-319-94195-0}},
  keywords     = {{Head-Mounted Displays (HMDs), Augmented Reality (AR), Test platform, Evaluation, Assembly}},
  location     = {{Orlando, Florida, USA}},
  pages        = {{25--35}},
  publisher    = {{Springer}},
  title        = {{{A Test Platform for the Evaluation of Augmented Reality Head Mounted Displays in Industrial Applications }}},
  doi          = {{10.1007/978-3-319-94196-7_3}},
  volume       = {{793}},
  year         = {{2018}},
}

@inproceedings{4324,
  abstract     = {{On the long term, the current wave of digitization and automation in the industrial environment will result in a progressively higher complexity and heterogeneity in the industrial environment. In this context, a growing need arises for the development of digital assistance systems to support workers in various fields of activities. Current systems are generally limited to visualizations and visual feedback. Therefore, in the scope of this paper, we take a look at the major challenges and opportunities for the integration of multimodal feedback systems in today’s and future industrial environments. It shows that the integration of multimodal feedback is subject to a complex combination of technical, user-cenric and legal aspects.}},
  author       = {{Heinz, Mario and Röcker, Carsten}},
  booktitle    = {{Machine Learning and Knowledge Extraction :Second IFIP TC 5, TC 8/WG 8.4, 8.9, TC 12/WG 12.9 International Cross-Domain Conference, CD-MAKE 2018}},
  editor       = {{Holzinger, Andreas and  Kieseberg, Peter and Tjoa, A Min and Weippl, Edgar}},
  isbn         = {{978-3-319-99739-1}},
  keywords     = {{Human-machine-interaction, Multimodal feedback, Assistive systems, Augmented-reality, Smart factory}},
  location     = {{Hamburg}},
  publisher    = {{Springer}},
  title        = {{{Feedback Presentation for Workers in Industrial Environments–Challenges and Opportunities}}},
  doi          = {{10.1007/978-3-319-99740-7_17}},
  volume       = {{11015}},
  year         = {{2018}},
}

@inproceedings{4257,
  abstract     = {{The selection of suitable display technologies for industrial augmented reality (AR) applications is becoming increasingly relevant as such applications move from the proof-of-concept to the application stage. To support project managers, designers and developers in the critical selection process we have developed a checklist of important aspects and related evaluation hints that helps to speed up and improve the selection process. The checklist presented in this paper was designed to be useful for both researchers and practitioners. It combines pertinent information from relevant standards like ISO 9241-210 with results from current research literature and experience from several AR projects in industrial contexts. It can be applied both in collaboration with AR experts, where it helps to prepare relevant information for the collaboration and thus streamlines the process, or stand-alone, as a guideline for the evaluation of different options by a design team. }},
  author       = {{Paelke, Volker and Büttner, Sebastian and Mucha, Henrik and Röcker, Carsten}},
  booktitle    = {{Advances in Ergonomics of Manufacturing: Managing the Enterprise of the Future.}},
  editor       = {{Trzcielinski, Stefan}},
  isbn         = {{978-3-319-60473-2}},
  issn         = {{2194-5365}},
  keywords     = {{Augmented reality, Displays, Evaluation, Checklist, Context of use, Human-Computer interaction}},
  location     = {{Los Angeles, California, USA}},
  pages        = {{225--234}},
  publisher    = {{Springer}},
  title        = {{{A Checklist-Based Approach for Evaluating Augmented Reality Displays in Industrial Applications}}},
  doi          = {{10.1007/978-3-319-60474-9_21}},
  volume       = {{606}},
  year         = {{2017}},
}

@inproceedings{4259,
  abstract     = {{This paper presents a prototype of an intelligent assistive system for workers in stationary manual assembly using projection-based augmented reality (AR) and intelligent hand tracking. By using depth cameras, the system can track the hands of the user and makes the user aware of wrong picking actions or errors in the assembly process. The system automatically adapts the digital projection-based overlay according to the current work situation. The main research contribution of our work is the presentation of a novel hand-tracking algorithm. In addition, we present the results of an user study of the system that shows the challenges and opportunities of our system and the hand-tracking algorithm in particular. We assume that our results will inform the future design of assistive systems in manual assembly.}},
  author       = {{Büttner, Sebastian and Sand, Oliver and Röcker, Carsten}},
  booktitle    = {{European Conference on Ambient Intelligence}},
  isbn         = {{978-3-319-56996-3}},
  keywords     = {{Augmented reality, Mobile projection, Hand tracking, Manufacturing, Industry 4.0}},
  location     = {{ Malaga, Spain}},
  pages        = {{33--45}},
  publisher    = {{Springer}},
  title        = {{{Exploring Design Opportunities for Intelligent Worker Assistance: A New Approach Using Projetion-Based AR and a Novel Hand-Tracking Algorithm}}},
  doi          = {{10.1007/978-3-319-56997-0_3}},
  volume       = {{10217}},
  year         = {{2017}},
}

@inbook{4302,
  abstract     = {{In this paper we present smARt.assembly – a projection-based augmented reality (AR) assembly assistance system for industrial applications. Our system projects digital guidance information in terms of picking information and assembly data into the physical workspace of a user. By using projections, we eliminate the use of smart glasses that have drawbacks such as a limited field of view or low wearing comfort. With smARt.assembly, users are able to assemble products without previous knowledge and without any other assistance.}},
  author       = {{Sand, Oliver and Büttner, Sebastian and Paelke, Volker and Röcker, Carsten}},
  booktitle    = {{18th International Conference on Human-Computer Interaction (HCII '16)}},
  editor       = {{Lackey, Stefanie and Shumaker, Randall}},
  isbn         = {{978-3-319-39906-5}},
  keywords     = {{Augmented reality, Projection, Assembly work, Manual assembly}},
  location     = {{Toronto, Canada}},
  pages        = {{643--652}},
  publisher    = {{Springer}},
  title        = {{{smARt.Assembly: Projection-Based Augmented Reality for Supporting Assembly Workers}}},
  doi          = {{10.1007/978-3-319-39907-2_61}},
  volume       = {{9740}},
  year         = {{2016}},
}

@inproceedings{4303,
  abstract     = {{The increasing demand to customize products affects production workers in many industries, as assembly tasks become more complex due to higher product variety. Assistive systems providing instructions at the workplace have been proposed to overcome increasing cognitive demand during assembly tasks. Commercially available assistive systems provide spatially registered instructions, either by using in-situ projections or head-mounted displays (HMDs). As there is little empirical knowledge about the individual advantages and disadvantages of both approaches, we are interested in comparing both types of systems. Through a user study at a manual assembly workplace, we compare both approaches to a paper baseline. Our results reveal that both in-situ instructions and paper instructions lead to significantly faster task completion times and significantly fewer errors than HMDs. Using additional questionnaires and interviews, we are able to identify the shortcomings of HMD-based instructions and discuss the possibilities of using flexible in-situ instructions for worker assistance.}},
  author       = {{Büttner, Sebastian and Funk, Markus and Sand, Oliver and Röcker, Carsten}},
  booktitle    = {{9th ACM International Conference on PErvasive Technologies Related to Assistive Environments (PETRA '16) }},
  isbn         = {{978-1-4503-4337-4}},
  keywords     = {{Spatial Augmented Reality, Industrial Augmented Reality, Projection-based Augmented Reality, Head-Mounted Display, Manufacturing, Assistive System}},
  location     = {{Corfu; Greece }},
  pages        = {{1--8}},
  publisher    = {{ACM}},
  title        = {{{Using Head-Mounted Displays and In-Situ Projection for Assistive Systems : A Comparison}}},
  doi          = {{10.1145/2910674.2910679}},
  year         = {{2016}},
}

@inproceedings{4329,
  abstract     = {{The term Industrie 4.0 carries the vision of smart factories, which automatically adapt to changes and assist the human as much as possible during operation and maintenance. This includes smart human machine interfaces, which reduce the chances of errors and help to make the right decisions. This paper presents an approach to equip the maintenance software running on a tablet PC with augmented reality functionality to be able to place virtual sticky notes at production modules. Additionally, these sticky notes are enriched with position information. The central element of this approach is an ontology-based context-aware framework, which aggregates and processes data from different sources. As a result, a tablet PC application was implemented, which allows displaying maintenance information as well as live plant process data in the form of augmented reality. More than 100 of those sticky notes can be placed using this system, whereas each note requires a file size of 12 to 16 kilo bytes. After placing a sticky note, the system recognizes it even if the camera's position is not exactly the same as during the placing process.}},
  author       = {{Flatt, Holger and Koch, Nils and Guenter, Andrei and Röcker, Carsten and Jasperneite, Jürgen}},
  booktitle    = {{ 2015 IEEE 20th Conference on Emerging Technologies & Factory Automation (ETFA)}},
  keywords     = {{Maintenance engineering, Augmented reality, Context, Context modelin, Production facilities, Cameras}},
  location     = {{Luxembourg, Luxembourg}},
  publisher    = {{IEEE}},
  title        = {{{A Context-Aware Assistance System for Maintenance Applications in Smart Factories based on Augmented Reality and Indoor Localization}}},
  doi          = {{10.1109/ETFA.2015.7301586}},
  year         = {{2015}},
}

@inproceedings{4330,
  abstract     = {{Catchwords such as “Cyber-Physical-Systems” and “Industry 4.0” describe the current development of systems with embedded intelligence. These systems can be characterized by an increasing technical complexity that must be addressed in the user interface. In this paper we analyze the specific requirements posed by the interaction with cyber-physical-systems, present a coordinated approach to these requirements and illustrate our approach with a practical example of an assistance system for assembly workers in an industrial production environment.}},
  author       = {{Paelke, Volker and Röcker, Carsten}},
  booktitle    = {{Design, User Experience, and Usability: Design Discourse}},
  isbn         = {{978-3-319-20885-5}},
  keywords     = {{Industrial IT, User-Centered design, Usability, User interfaces, Cyber-Physical-Systems, Industry 4.0, Augmented reality, Development processes and methods}},
  location     = {{Los Angeles, CA, USA}},
  pages        = {{75--85 }},
  publisher    = {{Springer}},
  title        = {{{User Interfaces for Cyber-Physical Systems: Challenges and Possible Approaches. }}},
  doi          = {{10.1007/978-3-319-20886-2_8}},
  volume       = {{9186}},
  year         = {{2015}},
}

@inproceedings{687,
  author       = {{Büttner, Sebastian and Sand, Oliver and Röcker, Carsten}},
  booktitle    = {{Proceedings of the 17th International Conference on Human-Computer Interaction with Mobile Devices and Services Adjunct}},
  isbn         = {{978-1-4503-3653-6}},
  keywords     = {{Augmented Reality, Manufacturing, Mobile Projection}},
  location     = {{Kopenhagen, Denmark}},
  pages        = {{1130--1133}},
  publisher    = {{ACM}},
  title        = {{{Extending the Design Space in Industrial Manufacturing Through Mobile Projection}}},
  doi          = {{10.1145/2786567.2794342}},
  year         = {{2015}},
}

