@misc{13334,
  abstract     = {{Retrieval-augmented generation (RAG) based on large language models (LLMs) has established itself as a key technology for combining domain-specific information with generative language skills, thereby providing transparent, up-to-date information. Many firms are already piloting such LLM-based information systems, but report a high degree of complexity in planning and implementation. A generally accepted regulatory framework that consistently maps key decisions is not yet available to companies. This article therefore presents a multi-level system that organizes design decisions throughout the configuration process. This framework is intended to support users in the planning, realizing, evaluation, and further development of an LLM-based information system. To achieve this goal, a qualitative-empirical research design was chosen. First, publications from the period 2022 to 2025 were identified and selected using a systematic literature search in accordance with the PRISMA guideline. The selected publications were then evaluated using a qualitative content analysis. The result is a system that was reviewed, revised and finalized at an expert workshop.}},
  author       = {{Ullrich, Dominik and Wallys, Jens and Hinrichsen, Sven}},
  booktitle    = {{Intelligent Human Systems Integration (IHSI 2026): Disruptive and Innovative Technologies}},
  editor       = {{Ahram, Tareq and Karwowski, Waldemar and Giraldi , Laura and Benelli , Elisabetta}},
  isbn         = {{978-1-964867-76-2}},
  issn         = {{2771-0718}},
  keywords     = {{Retrieval-Augmented Generation, LLM-Based Information System, Conceptual Framework}},
  location     = {{Florence}},
  pages        = {{63--73}},
  publisher    = {{AHFE International}},
  title        = {{{Conceptual Framework for Designing Domain-Specific LLM-Based Information Systems}}},
  doi          = {{10.54941/ahfe1007065}},
  volume       = {{200}},
  year         = {{2026}},
}

@misc{13291,
  abstract     = {{The application of Large Language Models (LLMs) for the automated generation of assembly instructions shows significant potential for improving work preparation in production processes. However, challenges remain regarding the overall information quality and precision of the generated instructions. In light of these challenges, this study explores how the information quality of automatically generated assembly instructions can be enhanced through the targeted provision of structured input data, such as Assembly and Quantity BOMs (Bills of Materials), as well as the use of optimized prompt chaining techniques. The methodology employs ChatGPT-4o in combination with Retrieval Augmented Generation (RAG) within the Microsoft Azure environment. The results demonstrate that structured data inputs, particularly the use of Assembly BOMs with defined Tool-to-Component relations, significantly improve the precision and relevance of the generated instructions. Despite these advancements, achieving consistent information quality remains a barrier to broader practical implementation. Therefore, feedback loops should be integrated into the assembly instruction generation process to ensure continuous refinement and reliability. Future research should investigate the use of RAG or similar frameworks, focusing on optimizing data structures and implementing feedback mechanisms to enhance the automated generation of assembly instructions.}},
  author       = {{Herbort, Robin and Green, Dominik and Hinrichsen, Sven}},
  booktitle    = {{Intelligent Human Systems Integration (IHSI 2025): Integrating People and Intelligent Systems}},
  editor       = {{Ahram, Tareq  and Karwowski, Waldemar  and Martino, Carlo  and Di Bucchianico, Giuseppe  and Maselli, Vincenzo }},
  isbn         = {{978-1-964867-36-6}},
  issn         = {{2771-0718}},
  keywords     = {{Assembly Instruction, Retrieval Augmented Generation (RAG), Large Language Model (LLM)}},
  location     = {{Rome, Italy}},
  pages        = {{765--775}},
  publisher    = {{AHFE }},
  title        = {{{Automatic Creation of Assembly Instructions by Using Retrieval Augmented Generation}}},
  doi          = {{10.54941/ahfe1005883}},
  volume       = {{160}},
  year         = {{2025}},
}

@misc{13292,
  abstract     = {{The application of Large Language Models (LLMs) for the automated generation of assembly instructions shows significant potential for improving work preparation in production processes. However, challenges remain regarding the overall information quality and precision of the generated instructions. In light of these challenges, this study explores how the information quality of automatically generated assembly instructions can be enhanced through the targeted provision of structured input data, such as Assembly and Quantity BOMs (Bills of Materials), as well as the use of optimized prompt chaining techniques. The methodology employs ChatGPT-4o in combination with Retrieval Augmented Generation (RAG) within the Microsoft Azure environment. The results demonstrate that structured data inputs, particularly the use of Assembly BOMs with defined Tool-to-Component relations, significantly improve the precision and relevance of the generated instructions. Despite these advancements, achieving consistent information quality remains a barrier to broader practical implementation. Therefore, feedback loops should be integrated into the assembly instruction generation process to ensure continuous refinement and reliability. Future research should investigate the use of RAG or similar frameworks, focusing on optimizing data structures and implementing feedback mechanisms to enhance the automated generation of assembly instructions.}},
  author       = {{Herbort, Robin and Green, Dominik and Hinrichsen, Sven}},
  booktitle    = {{Intelligent Human Systems Integration (IHSI 2025): Integrating People and Intelligent Systems}},
  editor       = {{Ahram, Tareq and Karwowski, Waldemar and Martino, Carlo and Di Bucchianico, Giuseppe and Maselli, Vincenzo}},
  isbn         = {{978-1-964867-36-6}},
  issn         = {{2771-0718}},
  keywords     = {{Retrieval Augmented Generation, Large Language Model, Assembly Instructions}},
  location     = {{Rome}},
  publisher    = {{AHFE}},
  title        = {{{Automatic Creation of Assembly Instructions by Using Retrieval Augmented Generation}}},
  doi          = {{10.54941/ahfe1005883}},
  volume       = {{160}},
  year         = {{2025}},
}

@misc{13293,
  abstract     = {{The performance of large language models (LLMs) has improved significantly in recent years, with the result that they are now used in many companies in various industries. However, the design of a company-specific information system involving an LLM is associated with a large number of decisions. This leads to a high level of complexity in the design task. Against this background, companies need a structured approach that methodically supports the planning, development, implementation and long-term maintenance of LLM-based information systems so that domain- and company-specific requirements are taken into account as a result. This article therefore describes a method that supports the design, introduction and maintenance process of an LLM-based information system. The method consists of a process model and a list of design principles, which are also referred to as success factors. The process model developed is based on the proven six-stage REFA planning system. To identify and describe success factors, a systematic literature search was carried out. Based on an analysis of the contents of individual literature sources, success factors for the design of LLM-based information systems were identified. These success factors relate, for example, to the quality of the data provided, data security, user-centered system design and feedback mechanisms for improving information output.}},
  author       = {{Hinrichsen, Sven and Herbort, Robin and Green, Dominik and Adrian, Benjamin}},
  booktitle    = {{Human Interaction and Emerging Technologies (IHIET 2025)}},
  editor       = {{Ahram, Tareq and Motschnig, Renate }},
  isbn         = {{978-1-964867-73-1}},
  issn         = {{2771-0718}},
  keywords     = {{Large language model, Information system, Retrieval augmented generation}},
  location     = {{Vienna}},
  publisher    = {{AHFE}},
  title        = {{{How to Design an Operation-Specific LLM-Based Information System}}},
  doi          = {{10.54941/ahfe1006709}},
  volume       = {{197}},
  year         = {{2025}},
}

@misc{12991,
  abstract     = {{Introduction: This study examines the perception of presence among students using virtual reality (VR) compared to iPads. The research aimed to provide deeper insights into students' immersive experiences and identify factors influencing perceived presence.

Method and results: Using a comparative approach, we show a significant difference between the two groups, with students using VR reporting a heightened sense of immersion. Additionally, participant's previous experience with immersive VR affect the presence significantly, while we report no detectable effects of age and gender.

Discussion: These findings contribute to the discussion on innovative teaching methods, supporting the development of more effective and inclusive virtual learning environments.}},
  author       = {{Austermann, Christine and von Blanckenburg, Florin and von Blanckenburg, Korbinian and Utesch, Till}},
  booktitle    = {{Frontiers in Education}},
  issn         = {{2504-284X}},
  keywords     = {{virtual reality (VR), presence perception, immersion, learning environment, classroom experiment}},
  publisher    = {{Frontiers Media}},
  title        = {{{Exploring the impact of virtual reality on presence: findings from a classroom experiment}}},
  doi          = {{10.3389/feduc.2025.1560626}},
  volume       = {{10}},
  year         = {{2025}},
}

@misc{8824,
  author       = {{Müller, Dustin}},
  keywords     = {{Metaverse, Digitalisierung, Virtual Reality, Augmented Reality}},
  pages        = {{43}},
  publisher    = {{Technische Hochschule Ostwestfalen-Lippe}},
  title        = {{{Metaverse - wie sich die Welt digital und virtuell verändert (Chancen und Risiken des Metaverse)}}},
  year         = {{2022}},
}

@misc{9162,
  abstract     = {{The German manufacturing industry has been carrying out new developments towards the next industrial revolution, focusing on smart manufacturing environments. Our work emphasizes human-centered control rooms in the context of production plants. Increased automation does not have to come with less human control. Therefore, we report on multimodal collaborative interaction techniques to augment industrial control rooms. In particular, we include mobile workers who use the control room while being in the production hall using tablets or specifically mixed reality glasses. Collaborative annotation dashboards support discussions and a shared understanding among analysts. Manufacturing-related data can be integrated into business analytics environments so that holistic analyses can be performed. Multimodal interaction techniques can support effective interaction with the control room based on the users’ preferences. Immersive experience through mixed reality-based three-dimensional visualizations and interaction possibilities support users in obtaining a clear understanding of the underlying data.}},
  author       = {{Rubart, Jessica and Grimm, Valentin and Potthast, Jonas}},
  booktitle    = {{Journal Future Internet}},
  issn         = {{1999-5903 }},
  keywords     = {{control room, multimodel interaction, augmented reality, mixed reality}},
  number       = {{8}},
  pages        = {{1--18}},
  publisher    = {{MDPI}},
  title        = {{{Augmenting Industrial Control Rooms with Multimodal Collaborative Interaction Techniques}}},
  doi          = {{https://doi.org/10.3390/fi14080224}},
  volume       = {{14}},
  year         = {{2022}},
}

@misc{6081,
  abstract     = {{Die vorliegende Bachelorbeit beschäftigt sich mit dem Aufbau und der Gestaltung von 360°-Rundgängen am Beispiel der von uns im praktischen Teil erstellten digitalen Hofführung für die Sentana Stiftung. Trotz der vorrübergehenden Schließung des Hofes, soll der 360°-Rundgang in der Pandemie ergänzend als Marketinginstrument eingesetzt werden, neue Sponsoren und Patenschaften akquirieren und zusätzlich die Bekanntheit der Stiftung fördern. Dabei ist es wichtig eine informative digitale Hofführung zu erstellen, die den Alltag auf dem Hof näher bringt, die Geschichten der Tiere erzählt und den Besuchern einen persönlichen, liebevollen Eindruck der Stiftung vermittelt. Hierzu gehört nicht nur die Erstellung und Gestaltung des Rundgangs, sondern auch die Entwicklung einer Strategie, um die Besucher bestmöglichst zu informieren und aufzuklären. Daher wird im theoretischen Teil herausgearbeitet, welche zentralen Elemente es gibt, um unseren Rundgang für die Besucher möglichst interessant zu gestalten und ob der 360°-Rundgang als Ergänzung auf der Webseite der Sentana Stiftung ein nützliches Marketinginstrument ist. Mithilfe einer Umfrage wird untersucht, ob die digitale Hofführung von Interessenten und möglichen Besuchern des Hofes angenommen wird. Auf Grundlage der Ergebnisse der Online-Befragung und des praktischen Teils, soll im Anschluss eine Empfehlung für die Gestaltung von 360°-Rundgängen erarbeitet werden, welcher zukünftigen Medienproduzenten dabei helfen kann zu entscheiden, welche zentralen Elemente die Passenden für ihren geplanten 360°-Rundgang sind.}},
  author       = {{Dietz, Jenny}},
  keywords     = {{Virtual Reality, 360-Grad-Rundgang, Gnadenhof, Videos, Virtual Tour, Panorama Tour, Sentana Stiftung}},
  pages        = {{145}},
  publisher    = {{Technische Hochschule Ostwestfalen-Lippe}},
  title        = {{{Virtual Reality als Marketinginstrument - Eine digitale Hofführung auf dem Gnaden- und Begegnungshof der Sentana Stiftung}}},
  year         = {{2021}},
}

@inproceedings{4102,
  abstract     = {{Complexity is a fundamental part of product design and manufacturing today, owing to increased demands for customization and advances in digital design techniques. Assembling and repairing such an enormous variety of components means that workers are cognitively challenged, take longer to search for the relevant information and are prone to making mistakes. Although in recent years deep learning approaches to object recognition have seen rapid advances, the combined potential of deep learning and augmented reality in the industrial domain remains relatively under explored. In this paper we introduce AR-ProMO, a combined hardware/software solution that provides a generalizable assistance system for identifying mistakes during product assembly and repair.}},
  author       = {{Dhiman, Hitesh and Büttner, Sebastian and Röcker, Carsten and Reisch, Raphael}},
  booktitle    = {{Proceedings of the 31st Australian Conference on Human-Computer-Interaction (OzCHI'19) : 2nd Dec.-5th Dec. 2019, Perth/Fremantle, WA, Australia}},
  isbn         = {{978-1-4503-7696-9}},
  keywords     = {{Augmented Reality, Deep Learning}},
  location     = {{Perth/Fremantle, WA, Australia}},
  pages        = {{ 518–522}},
  publisher    = {{ACM}},
  title        = {{{Handling Work Complexity with AR/Deep Learning}}},
  doi          = {{10.1145/3369457.3370919}},
  year         = {{2019}},
}

@inbook{4311,
  abstract     = {{Recent trends towards digitization in the industrial domain are also driving profound socio-technical changes. On the one hand, these technologies enable shorter product lifecycles and servitization, but on the other hand, the increasing technical complexity of the equipment makes its operation and maintenance a challenge for workers. Assistance systems using pervasive technologies can bridge the gap between the abilities of the workers and the demands of handling technical complexity by enriching workplace activities with relevant, context-dependent information. In this paper, we present an application that replaces a conventional, paper-based maintenance manual with digital, Augmented Reality based instructions that are delivered at the appropriate place and time.}},
  author       = {{Dhiman, Hitesh and Röcker, Carsten}},
  booktitle    = {{2019 IEEE International Conference on Pervasive Computing and Communications Workshops (PerCom Workshops)}},
  isbn         = {{978-1-5386-9151-9}},
  keywords     = {{Industry 4.0, Cyber Physical Systems, Augmented Reality, Complexity, Maintenance, HoloLens}},
  location     = {{Kyoto, Japan}},
  pages        = {{95 -- 100}},
  publisher    = {{IEEE}},
  title        = {{{Worker Assistance in Smart Production Environments using Pervasive Technologies}}},
  doi          = {{10.1109/PERCOMW.2019.8730771}},
  year         = {{2019}},
}

@inbook{4312,
  abstract     = {{Computer-aided assistance systems are entering the world of work and production. Such systems utilize augmented- and virtual-reality for operator training and live guidance as well as mobile maintenance and support. This is particularly important in the modern production reality of ever-changing products and `lot size one' customization of production.This paper focuses on the application of machine learning approach to extend the functionality of assistance systems. Machine learning provides tools to analyse large amounts of data and extract meaningful information. The goal here is to recognize the movement of an operator which would enable automatic display of instructions relevant to them.We present the challenges facing machine learning applications in human-centered assistance systems and a framework to assess machine learning approaches feasible for this scenario. The approach is assessed on a historical data set and then deployed in a work station for live testing. The post-hoc, or historical, analysis yields promising results. The ad-hoc, or live, analysis is a complex task and the results are affected by multiple factors, most of which are introduced by the human influence.The contribution of this paper is an approach to adapt state- of-the-art machine learning to operator movement recognition with a special focus on approaches to spatial time series data pre-processing. Presented experiment results validate the approach and show that it performs well in a real-world scenario.}},
  author       = {{Fullen, Marta and Maier, Alexander and Nazarenko, Arthur and Jenderny, Sascha and Röcker, Carsten}},
  booktitle    = {{2019 IEEE 17th International Conference on Industrial Informatics (INDIN)}},
  isbn         = {{978-1-7281-2927-3}},
  issn         = {{2378-363X}},
  keywords     = {{augmented reality, computer based training, data handling, industrial training, learning (artificial intelligence), time series}},
  location     = {{Helsinki, Finland,}},
  pages        = {{296 -- 302}},
  publisher    = {{IEEE}},
  title        = {{{Machine Learning for Assistance Systems: Pattern-Based Approach to Online Step Recognition}}},
  doi          = {{10.1109/INDIN41052.2019.8972122}},
  year         = {{2019}},
}

@misc{13578,
  abstract     = {{In recent years, virtual reality (VR) technology has found its way into nearly all fields of psychology. Previous studies indicated that virtual reality adaptations of the TSST are less potent in stimulating HPA-axis responses, with lower salivary cortisol responses recorded as compared to the in-vivo TSST. (TSST-IV). In the present experiment we tested the stress-induction potential of a refined version of the TSST-VR using a fully orthogonal experimental design in which ninety-three healthy males were either assigned to the TSST condition or a corresponding control condition in a real or virtual environment. We found a significant increase of endocrine, autonomic and self-reported stress markers in both stress conditions. Notably, we found a robust rise in salivary cortisol to the TSST-VR comparable to that observed in the TSST-IV. Despite subtle differences in response between virtual and in vivo settings, we conclude that VR adaptations of in-vivo stressors have the potential to induce real physiological and subjective reactions.}},
  author       = {{Zimmer, Patrick and Buttlar, Benjamin and Halbeisen, Georg and Walther, Eva and Domes,  Gregor}},
  booktitle    = {{Psychoneuroendocrinology}},
  issn         = {{1873-3360}},
  keywords     = {{Psychological stress, Virtual reality, Trier Social Stress Test, Free salivary cortisol, Hypothalamus-pituitary-adrenal axis, Alpha amylase}},
  number       = {{3}},
  pages        = {{186--192}},
  publisher    = {{Elsevier}},
  title        = {{{Virtually stressed? A refined virtual reality adaptation of the Trier Social Stress Test (TSST) induces robust endocrine responses}}},
  doi          = {{10.1016/j.psyneuen.2018.11.010}},
  volume       = {{101}},
  year         = {{2019}},
}

@inproceedings{4318,
  abstract     = {{Recent advances in the field of industrial digitization and automation lead to an increasing need for assistance systems to support workers in various fields of activity, such as assembly, logistics and maintenance. Current assistance systems for the maintenance area are usually based on a single visualization technology. However, in our view, this is not practicable in terms of real activities, as these operations involve various subtasks for which different interaction concepts would be advantageous. Therefore, in this paper, we propose a concept for a multi-device assistive system, which combines multiple devices to provide workers with relevant information over different subtasks of a maintenance operation and present our first prototype for such a system.}},
  author       = {{Heinz, Mario and Dhiman, Hitesh and Röcker, Carsten}},
  booktitle    = {{Machine Learning and Knowledge Extraction :Second IFIP TC 5, TC 8/WG 8.4, 8.9, TC 12/WG 12.9 International Cross-Domain Conference, CD-MAKE 2018}},
  editor       = {{Holzinger, Andreas and Kieseberg, Peter and Tjoa, A Min and Weippl, Edgar}},
  isbn         = {{978-3-319-99739-1}},
  keywords     = {{Human-machine-interaction, Multimodal feedback, Assistive systems, Augmented-reality, Smart factory}},
  location     = {{Hamburg}},
  pages        = {{239 -- 247}},
  publisher    = {{Springer}},
  title        = {{{A Multi-Device Assistive System for Industrial Maintenance Operations}}},
  doi          = {{10.1007/978-3-319-99740-7_16}},
  volume       = {{11015}},
  year         = {{2018}},
}

@inproceedings{4322,
  abstract     = {{This paper presents a test platform for the systematic evaluation of head-mounted displays (HMDs). The focus is on an augmented reality (AR) test application for assembly tasks, which supports tests that are flexible in terms of complexity and scope, thus enabling the realistic assessment of usability, comfort and ergonomics by the test users.}},
  author       = {{Paelke, Volker and Bulk, Jendrik and Röcker, Carsten}},
  booktitle    = {{International Conference on Applied Human Factors and Ergonomics}},
  isbn         = {{978-3-319-94195-0}},
  keywords     = {{Head-Mounted Displays (HMDs), Augmented Reality (AR), Test platform, Evaluation, Assembly}},
  location     = {{Orlando, Florida, USA}},
  pages        = {{25--35}},
  publisher    = {{Springer}},
  title        = {{{A Test Platform for the Evaluation of Augmented Reality Head Mounted Displays in Industrial Applications }}},
  doi          = {{10.1007/978-3-319-94196-7_3}},
  volume       = {{793}},
  year         = {{2018}},
}

@inproceedings{4324,
  abstract     = {{On the long term, the current wave of digitization and automation in the industrial environment will result in a progressively higher complexity and heterogeneity in the industrial environment. In this context, a growing need arises for the development of digital assistance systems to support workers in various fields of activities. Current systems are generally limited to visualizations and visual feedback. Therefore, in the scope of this paper, we take a look at the major challenges and opportunities for the integration of multimodal feedback systems in today’s and future industrial environments. It shows that the integration of multimodal feedback is subject to a complex combination of technical, user-cenric and legal aspects.}},
  author       = {{Heinz, Mario and Röcker, Carsten}},
  booktitle    = {{Machine Learning and Knowledge Extraction :Second IFIP TC 5, TC 8/WG 8.4, 8.9, TC 12/WG 12.9 International Cross-Domain Conference, CD-MAKE 2018}},
  editor       = {{Holzinger, Andreas and  Kieseberg, Peter and Tjoa, A Min and Weippl, Edgar}},
  isbn         = {{978-3-319-99739-1}},
  keywords     = {{Human-machine-interaction, Multimodal feedback, Assistive systems, Augmented-reality, Smart factory}},
  location     = {{Hamburg}},
  publisher    = {{Springer}},
  title        = {{{Feedback Presentation for Workers in Industrial Environments–Challenges and Opportunities}}},
  doi          = {{10.1007/978-3-319-99740-7_17}},
  volume       = {{11015}},
  year         = {{2018}},
}

@inproceedings{4257,
  abstract     = {{The selection of suitable display technologies for industrial augmented reality (AR) applications is becoming increasingly relevant as such applications move from the proof-of-concept to the application stage. To support project managers, designers and developers in the critical selection process we have developed a checklist of important aspects and related evaluation hints that helps to speed up and improve the selection process. The checklist presented in this paper was designed to be useful for both researchers and practitioners. It combines pertinent information from relevant standards like ISO 9241-210 with results from current research literature and experience from several AR projects in industrial contexts. It can be applied both in collaboration with AR experts, where it helps to prepare relevant information for the collaboration and thus streamlines the process, or stand-alone, as a guideline for the evaluation of different options by a design team. }},
  author       = {{Paelke, Volker and Büttner, Sebastian and Mucha, Henrik and Röcker, Carsten}},
  booktitle    = {{Advances in Ergonomics of Manufacturing: Managing the Enterprise of the Future.}},
  editor       = {{Trzcielinski, Stefan}},
  isbn         = {{978-3-319-60473-2}},
  issn         = {{2194-5365}},
  keywords     = {{Augmented reality, Displays, Evaluation, Checklist, Context of use, Human-Computer interaction}},
  location     = {{Los Angeles, California, USA}},
  pages        = {{225--234}},
  publisher    = {{Springer}},
  title        = {{{A Checklist-Based Approach for Evaluating Augmented Reality Displays in Industrial Applications}}},
  doi          = {{10.1007/978-3-319-60474-9_21}},
  volume       = {{606}},
  year         = {{2017}},
}

@inproceedings{4259,
  abstract     = {{This paper presents a prototype of an intelligent assistive system for workers in stationary manual assembly using projection-based augmented reality (AR) and intelligent hand tracking. By using depth cameras, the system can track the hands of the user and makes the user aware of wrong picking actions or errors in the assembly process. The system automatically adapts the digital projection-based overlay according to the current work situation. The main research contribution of our work is the presentation of a novel hand-tracking algorithm. In addition, we present the results of an user study of the system that shows the challenges and opportunities of our system and the hand-tracking algorithm in particular. We assume that our results will inform the future design of assistive systems in manual assembly.}},
  author       = {{Büttner, Sebastian and Sand, Oliver and Röcker, Carsten}},
  booktitle    = {{European Conference on Ambient Intelligence}},
  isbn         = {{978-3-319-56996-3}},
  keywords     = {{Augmented reality, Mobile projection, Hand tracking, Manufacturing, Industry 4.0}},
  location     = {{ Malaga, Spain}},
  pages        = {{33--45}},
  publisher    = {{Springer}},
  title        = {{{Exploring Design Opportunities for Intelligent Worker Assistance: A New Approach Using Projetion-Based AR and a Novel Hand-Tracking Algorithm}}},
  doi          = {{10.1007/978-3-319-56997-0_3}},
  volume       = {{10217}},
  year         = {{2017}},
}

@inbook{4302,
  abstract     = {{In this paper we present smARt.assembly – a projection-based augmented reality (AR) assembly assistance system for industrial applications. Our system projects digital guidance information in terms of picking information and assembly data into the physical workspace of a user. By using projections, we eliminate the use of smart glasses that have drawbacks such as a limited field of view or low wearing comfort. With smARt.assembly, users are able to assemble products without previous knowledge and without any other assistance.}},
  author       = {{Sand, Oliver and Büttner, Sebastian and Paelke, Volker and Röcker, Carsten}},
  booktitle    = {{18th International Conference on Human-Computer Interaction (HCII '16)}},
  editor       = {{Lackey, Stefanie and Shumaker, Randall}},
  isbn         = {{978-3-319-39906-5}},
  keywords     = {{Augmented reality, Projection, Assembly work, Manual assembly}},
  location     = {{Toronto, Canada}},
  pages        = {{643--652}},
  publisher    = {{Springer}},
  title        = {{{smARt.Assembly: Projection-Based Augmented Reality for Supporting Assembly Workers}}},
  doi          = {{10.1007/978-3-319-39907-2_61}},
  volume       = {{9740}},
  year         = {{2016}},
}

@inproceedings{4303,
  abstract     = {{The increasing demand to customize products affects production workers in many industries, as assembly tasks become more complex due to higher product variety. Assistive systems providing instructions at the workplace have been proposed to overcome increasing cognitive demand during assembly tasks. Commercially available assistive systems provide spatially registered instructions, either by using in-situ projections or head-mounted displays (HMDs). As there is little empirical knowledge about the individual advantages and disadvantages of both approaches, we are interested in comparing both types of systems. Through a user study at a manual assembly workplace, we compare both approaches to a paper baseline. Our results reveal that both in-situ instructions and paper instructions lead to significantly faster task completion times and significantly fewer errors than HMDs. Using additional questionnaires and interviews, we are able to identify the shortcomings of HMD-based instructions and discuss the possibilities of using flexible in-situ instructions for worker assistance.}},
  author       = {{Büttner, Sebastian and Funk, Markus and Sand, Oliver and Röcker, Carsten}},
  booktitle    = {{9th ACM International Conference on PErvasive Technologies Related to Assistive Environments (PETRA '16) }},
  isbn         = {{978-1-4503-4337-4}},
  keywords     = {{Spatial Augmented Reality, Industrial Augmented Reality, Projection-based Augmented Reality, Head-Mounted Display, Manufacturing, Assistive System}},
  location     = {{Corfu; Greece }},
  pages        = {{1--8}},
  publisher    = {{ACM}},
  title        = {{{Using Head-Mounted Displays and In-Situ Projection for Assistive Systems : A Comparison}}},
  doi          = {{10.1145/2910674.2910679}},
  year         = {{2016}},
}

@inproceedings{687,
  author       = {{Büttner, Sebastian and Sand, Oliver and Röcker, Carsten}},
  booktitle    = {{Proceedings of the 17th International Conference on Human-Computer Interaction with Mobile Devices and Services Adjunct}},
  isbn         = {{978-1-4503-3653-6}},
  keywords     = {{Augmented Reality, Manufacturing, Mobile Projection}},
  location     = {{Kopenhagen, Denmark}},
  pages        = {{1130--1133}},
  publisher    = {{ACM}},
  title        = {{{Extending the Design Space in Industrial Manufacturing Through Mobile Projection}}},
  doi          = {{10.1145/2786567.2794342}},
  year         = {{2015}},
}

@inproceedings{4329,
  abstract     = {{The term Industrie 4.0 carries the vision of smart factories, which automatically adapt to changes and assist the human as much as possible during operation and maintenance. This includes smart human machine interfaces, which reduce the chances of errors and help to make the right decisions. This paper presents an approach to equip the maintenance software running on a tablet PC with augmented reality functionality to be able to place virtual sticky notes at production modules. Additionally, these sticky notes are enriched with position information. The central element of this approach is an ontology-based context-aware framework, which aggregates and processes data from different sources. As a result, a tablet PC application was implemented, which allows displaying maintenance information as well as live plant process data in the form of augmented reality. More than 100 of those sticky notes can be placed using this system, whereas each note requires a file size of 12 to 16 kilo bytes. After placing a sticky note, the system recognizes it even if the camera's position is not exactly the same as during the placing process.}},
  author       = {{Flatt, Holger and Koch, Nils and Guenter, Andrei and Röcker, Carsten and Jasperneite, Jürgen}},
  booktitle    = {{ 2015 IEEE 20th Conference on Emerging Technologies & Factory Automation (ETFA)}},
  keywords     = {{Maintenance engineering, Augmented reality, Context, Context modelin, Production facilities, Cameras}},
  location     = {{Luxembourg, Luxembourg}},
  publisher    = {{IEEE}},
  title        = {{{A Context-Aware Assistance System for Maintenance Applications in Smart Factories based on Augmented Reality and Indoor Localization}}},
  doi          = {{10.1109/ETFA.2015.7301586}},
  year         = {{2015}},
}

@inproceedings{4330,
  abstract     = {{Catchwords such as “Cyber-Physical-Systems” and “Industry 4.0” describe the current development of systems with embedded intelligence. These systems can be characterized by an increasing technical complexity that must be addressed in the user interface. In this paper we analyze the specific requirements posed by the interaction with cyber-physical-systems, present a coordinated approach to these requirements and illustrate our approach with a practical example of an assistance system for assembly workers in an industrial production environment.}},
  author       = {{Paelke, Volker and Röcker, Carsten}},
  booktitle    = {{Design, User Experience, and Usability: Design Discourse}},
  isbn         = {{978-3-319-20885-5}},
  keywords     = {{Industrial IT, User-Centered design, Usability, User interfaces, Cyber-Physical-Systems, Industry 4.0, Augmented reality, Development processes and methods}},
  location     = {{Los Angeles, CA, USA}},
  pages        = {{75--85 }},
  publisher    = {{Springer}},
  title        = {{{User Interfaces for Cyber-Physical Systems: Challenges and Possible Approaches. }}},
  doi          = {{10.1007/978-3-319-20886-2_8}},
  volume       = {{9186}},
  year         = {{2015}},
}

@inproceedings{4395,
  abstract     = {{This paper presents the evaluation of a mixed reality communication system for the home domain, called roomXT. The system uses a wall-sized display that is seamlessly integrated into a living lab, to create a 'life-like' video communication experience. In order to demonstrate the potential of this approach, we conducted a living lab study comparing the developed prototype with a desktop-based system. A special video communication application, which enables spatially separated users to have a joint dinner experience, served as a common basis for the different test conditions. Results of the study show that the overall concept of roomXT was well eceived by users of a wide age range and that the developed prototype system seems to be preferred to commercially available video communication solutions with respect to the tested quality dimensions.}},
  author       = {{Kasugai, Kai and Heidrich, Felix and Röcker, Carsten and Russell, Peter and Ziefle, Martina}},
  booktitle    = {{Proceedings of the 2012 International Symposium on Pervasive Displays}},
  editor       = {{José, Rui}},
  isbn         = {{978-145-031-414-5 }},
  keywords     = {{human-computer interaction, mixed-reality, large displays, co-dining, interactive media, family communication, co-presence, architecture}},
  location     = {{Porto, Portugal }},
  pages        = {{1--6}},
  publisher    = {{ACM}},
  title        = {{{Perspective Views in Video Communication Systems: An Analysis of Fundamental User Requirements}}},
  doi          = {{10.1145/2307798.2307811}},
  year         = {{2012}},
}

@inproceedings{4494,
  abstract     = {{This paper gives an overview of how the role of architecture, design, and HCI is to be redefined in a world of ubiquitous computing and the recent advances in display technology. It describes the future care lab and our take on research in this field. We elaborate on how wall-sized displays can form an integral part of an ambient intelligence space that aims to assist the elderly and empower them to stay longer in their familiar surroundings before moving into nursing homes. We propose a classification of different concepts that virtually extend a real environment and describe some possible scenarios.}},
  author       = {{Kasugai, Kai and Ziefle, Martina and Röcker, Carsten and Russell, Peter}},
  booktitle    = {{Proceedings of Create’10 - Innovative Interactions}},
  editor       = {{Bonner,, J. and O’Neill, M. and Mival, O.}},
  keywords     = {{mixed reality, immersive co-location, ambient assisted living, architecture, smart spaces, spatial extension}},
  location     = {{Loughborough, UK}},
  pages        = {{40--45}},
  title        = {{{Creating Spatio-Temporal Contiguities Between Real and Virtual Rooms in an Assistive Living Environment}}},
  doi          = {{10.14236/ewic/CREATE2010.8}},
  year         = {{2010}},
}

@inbook{4765,
  abstract     = {{This book gives an introduction and overview of the field of Pervasive Gaming, an emerging genre in which traditional, real-world games are augmented with computing functionality, or, depending on the perspective, purely virtual computer entertainment is brought back to the real world. We try to both inform about the developments and influential works in the fi eld of Pervasive Games, and also help laying the foundation for shaping the future of this application domain. Consequently, we focus on concepts and theoretical works that help understanding and sharpening the rationale behind the field (the "what") as well as on technological means of actually realizing working prototypes of Pervasive Games (the "how"). Both perspectives have their right and need to go hand in hand. The first four of the twelve chapters contributed by leading experts in the fi eld address the theoretical foundations. The individual authors' views sometimes complement each other, sometimes they express similar ideas and concepts, and sometimes they even contradict each other severely. Since we are currently far from having established a common ground about what this emerging genre clearly defi nes and separates from related fields, we suggest treating the individual chapters with equal respect and being prepared to contribute to the fi eld with one's own thoughts and ideas that might help further shape and clarify the important issues of Pervasive Games.}},
  author       = {{Magerkurth, Carsten and Röcker, Carsten}},
  booktitle    = {{A reader for pervasive gaming research ; Vol. 1., Concepts and technologies for pervasive games }},
  isbn         = {{978-3-8322-6223-5}},
  keywords     = {{Pervasive Gaming Applications, User Interfaces, Smart Home Environments, Mixed Reality}},
  publisher    = {{Shaker}},
  title        = {{{Towards the Emergence of a New Application Domain}}},
  volume       = {{5-9}},
  year         = {{2007}},
}

@inbook{4771,
  abstract     = {{Pervasive Games have become popular in recent years. Their ambitious goal is to bring the computer into the world in order to augment existing games or to even come up with hitherto impossible or unthought-of forms of entertainment. This paper reflects on the various approaches to define what Pervasive Games consist of, how they relate to playing and games, and how different terms and views can be integrated. The paper contributes a unifying and integrating classification of the respective terms that relates different states of reality to the relevant dimensions and game elements.}},
  author       = {{Hinske, Steve and Lampe, Matthias and Magerkurth, Carsten and Röcker, Carsten}},
  booktitle    = {{A reader for pervasive gaming research / Vol. 1. Concepts and technologies for pervasive games}},
  editor       = {{Magerkurth, Carsten and Röcker, Carsten}},
  isbn         = {{978-3-8322-6223-5}},
  keywords     = {{Pervasive Gaming Applications, User Interfaces, Smart Home Environments, Mixed Reality}},
  pages        = {{11--38}},
  title        = {{{Classifying Pervasive Games: On Pervasive Computing and Mixed Reality}}},
  year         = {{2007}},
}

@inbook{4772,
  abstract     = {{Pervasive Gaming Applications; User Interfaces; Smart Home Environments; Novel Gaming Applications; Mixed Reality; Social and Physical Entertainment Systems; Augmented Reality Games; Design Aspects; Pervasive Multiplayer Game; Mapping; Exertion Interfaces; REXplorer; Pervasive Games; Socially Adaptable Games}},
  author       = {{Magerkurth, Carsten and Engelke, Timo and Röcker, Carsten}},
  booktitle    = {{A reader for pervasive gaming research / Vol. 2. Pervasive gaming applications}},
  editor       = {{Magerkurth, Carsten and Röcker, Carsten}},
  isbn         = {{978-3-8322-6223-5}},
  keywords     = {{Pervasive Gaming Applications, User Interfaces, Smart Home Environments, Mixed Reality}},
  pages        = {{85--106}},
  publisher    = {{Shaker}},
  title        = {{{Novel Gaming Applications for Smart Home Environments}}},
  year         = {{2007}},
}

@inbook{4773,
  abstract     = {{Welcome to the second volume of the reader on Pervasive Gaming Research! With this volume we focus mainly on actual implementations and prototypes of Pervasive Games. The foundations, both in technical and conceptual terms, were laid in the fi rst volume. Now, we apply these foundations to real Pervasive Games and explore the issues related to actually realizing such Pervasive Games. Clearly, the combination of virtual, physical, and social aspects in Pervasive Gaming contributes to a new complexity in game design, since games are no longer confi ned to the narrowness of a computer screen, but integrate with our real environments that consist of many things beyond our control.}},
  author       = {{Magerkurth, Carsten and Röcker, Carsten}},
  booktitle    = {{A reader for pervasive gaming research / Vol. 2. Pervasive gaming applications}},
  editor       = {{Magerkurth, Carsten and Röcker, Carsten}},
  isbn         = {{978-3-8322-6223-5}},
  keywords     = {{Pervasive Gaming Applications, User Interfaces, Smart Home Environments, Mixed Reality}},
  pages        = {{5--10}},
  publisher    = {{Shaker}},
  title        = {{{Prototyping Pervasive Games: A New Dimension of Complexity}}},
  volume       = {{5-10}},
  year         = {{2007}},
}

