@misc{12167,
  abstract     = {{Deployment of Level 3 and Level 4 autonomous vehicles (AVs) in urban environments is significantly constrained by adverse weather conditions, limiting their operation to clear weather due to safety concerns. Ensuring that AVs remain within their designated Operational Design Domain (ODD) is a formidable challenge, making boundary monitoring strategies essential for safe navigation. This study explores the critical role of an ODD monitoring system (OMS) in addressing these challenges. It reviews various methodologies for designing an OMS and presents a comprehensive visualization framework incorporating trigger points for ODD exits. These trigger points serve as essential references for effective OMS design. The study also delves into a specific use case concerning ODD exits: the reduction in road friction due to adverse weather conditions. It emphasizes the importance of contactless computer vision-based methods for road condition estimation (RCE), particularly using vision sensors such as cameras. The study details a timeline of methods involving classical machine learning and deep learning feature extraction techniques, identifying contemporary challenges such as class imbalance, lack of comprehensive datasets, annotation methods, and the scarcity of generalization techniques. Furthermore, it provides a factual comparison of two state-of-the-art RCE datasets. In essence, the study aims to address and explore ODD exits due to weather-induced road conditions, decoding the practical solutions and directions for future research in the realm of AVs.}},
  author       = {{Subramanian, Ramakrishnan and Büker, Ulrich}},
  booktitle    = {{Eng : advances in engineering}},
  issn         = {{2673-4117}},
  keywords     = {{autonomous vehicles, operational design domain, computer vision, machine learning, road surface detection}},
  number       = {{4}},
  pages        = {{2778--2804}},
  publisher    = {{MDPI AG}},
  title        = {{{Study of Contactless Computer Vision-Based Road Condition Estimation Methods Within the Framework of an Operational Design Domain Monitoring System}}},
  doi          = {{10.3390/eng5040145}},
  volume       = {{5}},
  year         = {{2024}},
}

@misc{13003,
  abstract     = {{In light of the rising importance of data transparency and open data guidelines (e.g. OGP Local1), Open Data Portals became standard in Smart City strategies (Van Oosterhout et al. 2020). While it is clear that these tools can serve as a valuable way for internal administration processes, at the same time it is critical that data is not only openly available following standard formats limited to computer-readability but above all also largely understandable for average citizens. For this reason, it is researched how open data can not only be made available but also visualized in an accessible way to all citizens. Moreover, the aim is to simultaneously boost private behavior changes which are inevitable to achieve locally-set goals in sustainability (Barr et al. 2011, TWI 2050 2018). To do so, we draw on the principle of nudging. Following
the tradition of behavioral economics, nudging is defined as a positive intervention that induces a voluntary change in behavior without resulting in external (negative) consequences (Thaler & Sunstein 2008) and thus contrasting interventions like commands or bans because freedom of choice is maintained (Mongin & Cozic 2020, Ranchordás 2020).
This paper discusses an installation that explored the potential of combining nudging and situated visualization to improve data transparency and support individual decision-making in urban public spaces. During the Detmold Design Week 2023, an event showcasing creative works in various locations, the visitor numbers at nine locations were captured using computer vision. Visitors then received on-site suggestions in real-time for the next place to visit based on the occupancy. A survey was conducted to evaluate visitors’ willingness to follow these data-informed suggestions. Findings highlight the importance of balancing between simplicity, relevance and privacy in data visualization. The results of the field test provide the foundation for the installation of interactive interfaces in Detmold's public spaces in the next years, in particular for communicating smart city topics focusing on mobility and urban climate protection.}},
  author       = {{Licht, Mareile and Barbosa Jardim, Amanda and Müh, Maximilian and Häusler, Axel}},
  booktitle    = {{Keep on Planning for the Real World. Climate Change calls for Nature-based Solutions and Smart Technologies : Proceedings of REAL CORP 2024, 29th International Conference on Urban Development, Regional Planning and Information Society }},
  editor       = {{Schrenk, Manfred and Popovich, Tatiana and Zeile,  Peter and Elisei, Pietro and Beyer, Clemens and Ryser, Judith and Kaufmann, Hans Rüdiger}},
  issn         = {{2521-3938}},
  keywords     = {{Situated Visualisation, Nudging, Planning, Computer Vision, Human-computer interaction}},
  location     = {{Mannheim}},
  pages        = {{645--653}},
  publisher    = {{CORP - Competence Center of Urban and Regional Planning }},
  title        = {{{The Use of Situated Visualized Data to Nudge Visitor’s Paths: a Case Study at the Detmold Design Week 2023}}},
  doi          = {{10.48494/REALCORP2024.9052}},
  year         = {{2024}},
}

@inproceedings{4094,
  abstract     = {{Projection-based assitive systems that guide users through assembly work are on their way to industrial application. Previous research work investigated how people can be supported with such systems. However, there has been little work on the question on how to generate and author sequential instructions for assitive systems. In this paper, we present a new concept and a prototypical implementation of an assitive system that can be taught by demonstrating an assembly process. By using a combination of RGB and depth cameras, we can generate an assembly instruction of Lego Duplo bricks based on the demonstration of a user. This generated manual can later on be used for assisting other users in the assembly process. By our prototype system, we show the technological feasibility of assistive systems that can learn from users.}},
  author       = {{Büttner, Sebastian and Peda, Andreas and Heinz, Mario and Röcker, Carsten}},
  booktitle    = {{22nd International Conference on Human-Computer Interaction}},
  isbn         = {{978-3-030-50343-7}},
  keywords     = {{Assitive system, Authoring, Instruction generation, Computer vision, Teaching by demonstration}},
  location     = {{Copenhagen, Denmark}},
  pages        = {{153--163}},
  publisher    = {{Springer}},
  title        = {{{Teaching by Demonstrating – How Smart Assistive Systems Can Learn from Users}}},
  doi          = {{https://doi.org/10.1007/978-3-030-50344-4_12}},
  volume       = {{12203}},
  year         = {{2020}},
}

