@misc{13397,
  abstract     = {{Diese Bachelorarbeit untersucht den historischen Wandel weiblicher Hauptfiguren im Disney-Animationsfilm und analysiert, inwiefern sich deren Darstellung von passiven, objektifizierten Rollen hin zu selbstbestimmten Heldinnen entwickelt hat. Ausgangspunkt der Arbeit bilden feministische filmtheoretische Ansätze, insbesondere Laura Mulveys Konzept des Male Gaze, ergänzt durch Theorien zu Female Agency, Role Model Theory, medienpsychologische Wirkungsforschung sowie ökofeministische Perspektiven.

Im analytischen Teil werden ausgewählte Disney-Prinzessinnen aus unterschiedlichen Produktionsphasen, Schneewittchen (1937), Ariel (1989) und Moana (2016), anhand zentraler Filmszenen vergleichend untersucht. Dabei stehen Handlungsmacht, Naturbezug, narrative Funktion und Vorbildwirkung im Fokus. Die Analyse zeigt, dass sich weibliche Figuren zwar zunehmend von passiven Rollenbildern lösen, zugleich jedoch neue Spannungsfelder entstehen, etwa durch romantische Erzählmuster oder eine Tendenz zur Hyperindividualisierung.
Auf Grundlage dieser Erkenntnisse wird mit der Figur Elara ein theoretisches Modell einer zeitgemäßen weiblichen Heldin entwickelt. Elara fungiert als Brückenfigur zwischen Natur und Gesellschaft und verkörpert Handlungsmacht durch Verantwortung, Reflexion und Ambivalenz statt durch Überlegenheit oder Rettungsnarrative. Die Figur wird sowohl narrativ als auch visuell konzipiert und dient als gestalterische Umsetzung der theoretischen Ergebnisse.}},
  author       = {{Schlewing, Nina}},
  keywords     = {{Disney-Prinzessinnen, Female Agency, Male Gaze, Rollenbilder im Animationsfilm, Role Model Theory, Ökofeminismus, Figurenanalyse, Zeitgemäße Heldinnen, Charakterdesign}},
  pages        = {{61}},
  publisher    = {{Technische Hochschule Ostwestfalen-Lippe}},
  title        = {{{Zwischen Natur, Tradition und Selbstbestimmung: Von Disney-Prinzessinnen zu neuen weiblichen Role Models - eine Analyse am Beispiel der Figur Elara}}},
  year         = {{2026}},
}

@misc{13291,
  abstract     = {{The application of Large Language Models (LLMs) for the automated generation of assembly instructions shows significant potential for improving work preparation in production processes. However, challenges remain regarding the overall information quality and precision of the generated instructions. In light of these challenges, this study explores how the information quality of automatically generated assembly instructions can be enhanced through the targeted provision of structured input data, such as Assembly and Quantity BOMs (Bills of Materials), as well as the use of optimized prompt chaining techniques. The methodology employs ChatGPT-4o in combination with Retrieval Augmented Generation (RAG) within the Microsoft Azure environment. The results demonstrate that structured data inputs, particularly the use of Assembly BOMs with defined Tool-to-Component relations, significantly improve the precision and relevance of the generated instructions. Despite these advancements, achieving consistent information quality remains a barrier to broader practical implementation. Therefore, feedback loops should be integrated into the assembly instruction generation process to ensure continuous refinement and reliability. Future research should investigate the use of RAG or similar frameworks, focusing on optimizing data structures and implementing feedback mechanisms to enhance the automated generation of assembly instructions.}},
  author       = {{Herbort, Robin and Green, Dominik and Hinrichsen, Sven}},
  booktitle    = {{Intelligent Human Systems Integration (IHSI 2025): Integrating People and Intelligent Systems}},
  editor       = {{Ahram, Tareq  and Karwowski, Waldemar  and Martino, Carlo  and Di Bucchianico, Giuseppe  and Maselli, Vincenzo }},
  isbn         = {{978-1-964867-36-6}},
  issn         = {{2771-0718}},
  keywords     = {{Assembly Instruction, Retrieval Augmented Generation (RAG), Large Language Model (LLM)}},
  location     = {{Rome, Italy}},
  pages        = {{765--775}},
  publisher    = {{AHFE }},
  title        = {{{Automatic Creation of Assembly Instructions by Using Retrieval Augmented Generation}}},
  doi          = {{10.54941/ahfe1005883}},
  volume       = {{160}},
  year         = {{2025}},
}

@misc{13292,
  abstract     = {{The application of Large Language Models (LLMs) for the automated generation of assembly instructions shows significant potential for improving work preparation in production processes. However, challenges remain regarding the overall information quality and precision of the generated instructions. In light of these challenges, this study explores how the information quality of automatically generated assembly instructions can be enhanced through the targeted provision of structured input data, such as Assembly and Quantity BOMs (Bills of Materials), as well as the use of optimized prompt chaining techniques. The methodology employs ChatGPT-4o in combination with Retrieval Augmented Generation (RAG) within the Microsoft Azure environment. The results demonstrate that structured data inputs, particularly the use of Assembly BOMs with defined Tool-to-Component relations, significantly improve the precision and relevance of the generated instructions. Despite these advancements, achieving consistent information quality remains a barrier to broader practical implementation. Therefore, feedback loops should be integrated into the assembly instruction generation process to ensure continuous refinement and reliability. Future research should investigate the use of RAG or similar frameworks, focusing on optimizing data structures and implementing feedback mechanisms to enhance the automated generation of assembly instructions.}},
  author       = {{Herbort, Robin and Green, Dominik and Hinrichsen, Sven}},
  booktitle    = {{Intelligent Human Systems Integration (IHSI 2025): Integrating People and Intelligent Systems}},
  editor       = {{Ahram, Tareq and Karwowski, Waldemar and Martino, Carlo and Di Bucchianico, Giuseppe and Maselli, Vincenzo}},
  isbn         = {{978-1-964867-36-6}},
  issn         = {{2771-0718}},
  keywords     = {{Retrieval Augmented Generation, Large Language Model, Assembly Instructions}},
  location     = {{Rome}},
  publisher    = {{AHFE}},
  title        = {{{Automatic Creation of Assembly Instructions by Using Retrieval Augmented Generation}}},
  doi          = {{10.54941/ahfe1005883}},
  volume       = {{160}},
  year         = {{2025}},
}

@misc{13293,
  abstract     = {{The performance of large language models (LLMs) has improved significantly in recent years, with the result that they are now used in many companies in various industries. However, the design of a company-specific information system involving an LLM is associated with a large number of decisions. This leads to a high level of complexity in the design task. Against this background, companies need a structured approach that methodically supports the planning, development, implementation and long-term maintenance of LLM-based information systems so that domain- and company-specific requirements are taken into account as a result. This article therefore describes a method that supports the design, introduction and maintenance process of an LLM-based information system. The method consists of a process model and a list of design principles, which are also referred to as success factors. The process model developed is based on the proven six-stage REFA planning system. To identify and describe success factors, a systematic literature search was carried out. Based on an analysis of the contents of individual literature sources, success factors for the design of LLM-based information systems were identified. These success factors relate, for example, to the quality of the data provided, data security, user-centered system design and feedback mechanisms for improving information output.}},
  author       = {{Hinrichsen, Sven and Herbort, Robin and Green, Dominik and Adrian, Benjamin}},
  booktitle    = {{Human Interaction and Emerging Technologies (IHIET 2025)}},
  editor       = {{Ahram, Tareq and Motschnig, Renate }},
  isbn         = {{978-1-964867-73-1}},
  issn         = {{2771-0718}},
  keywords     = {{Large language model, Information system, Retrieval augmented generation}},
  location     = {{Vienna}},
  publisher    = {{AHFE}},
  title        = {{{How to Design an Operation-Specific LLM-Based Information System}}},
  doi          = {{10.54941/ahfe1006709}},
  volume       = {{197}},
  year         = {{2025}},
}

@misc{13294,
  abstract     = {{Die Leistungsfähigkeit von Large Language Models konnte in den letzten Jahren deutlich verbessert werden, so dass viele Unternehmen solche Modelle bereits einsetzen oder ihren Einsatz planen. Die Gestaltung eines betriebsspezifischen Informationssystems unter Einbeziehung eines Large Language Model (LLM) ist allerdings mit einer Vielzahl an Entscheidungen verbunden. Vor diesem Hintergrund wird in diesem Beitrag eine Methode beschrieben, die bei der Gestaltung und Einführung eines LLM-basierten Informationssystems unterstützen kann, um im Ergebnis eine möglichst anforderungsgerechte Lösung zu entwickeln. Diese Methode besteht dabei aus einem Vorgehensmodell und einer Liste mit Gestaltungsprinzipien, die auch als Erfolgsfaktoren bezeichnet werden.}},
  author       = {{Hinrichsen, Sven and Herbort, Robin and Green, Dominik and Adrian, Benjamin}},
  booktitle    = {{Arbeit 5.0: Menschzentrierte Innovationen für die Zukunft der Arbeit}},
  isbn         = {{978-3-936804-36-2}},
  keywords     = {{Large Language Model, Informationssystem, Methode}},
  location     = {{Aachen}},
  pages        = {{642--647}},
  publisher    = {{GfA-Press}},
  title        = {{{Vorgehensmodell zur Entwicklung und Implementierung von LLM-basierten Informationssystemen}}},
  year         = {{2025}},
}

@misc{12853,
  abstract     = {{Lentic waters integrate atmosphere and catchment processes, and thus ultimately capture climate signals. However, studies of climate warming effects on lentic waters usually do not sufficiently account for a change in heat flux from the catchment through altered inflow temperature and discharge under climate change. This is particularly relevant for reservoirs, which are highly impacted by catchment hydrology and may be affected by upstream reservoirs or pre‐dams. This study explicitly quantified how the catchment and pre‐dams modify the thermal response of Rappbode Reservoir, Germany's largest drinking water reservoir system, to climate change. We established a catchment‐lake modeling chain in the main reservoir and its two pre‐dams utilizing the lake model GOTM, the catchment model mHM, and the stream temperature model Air2stream, forced by an ensemble of climate projections under RCP2.6 and 8.5 warming scenarios. Results exhibited a warming of 0.27/0.15°C decade<jats:sup>−1</jats:sup> for the surface/bottom temperatures of the main reservoir, with approximately 8%/24% of this warming attributed to the catchment warming, respectively. The catchment warming amplified the deep water warming more than at the surface, contrary to the atmospheric warming effect, and advanced stratification by about 1 week, while having a minor impact on stratification intensity. On the other hand, pre‐dams reduced the inflow temperature into the main reservoir in spring, and consequently lowered the hypolimnetic temperature and postponed stratification onset. This shielded the main reservoir from climate warming, although overall the contribution of pre‐dams was minimal. Altogether, our study highlights the importance of catchment alterations and seasonality when projecting reservoir warming, and provides insights into catchment‐reservoir coupling under climate change.}},
  author       = {{Gai, Bo and Kumar, Rohini and Hüesker, Frank and Mi, Chenxi and Kong, Xiangzhen and Boehrer, Bertram and Rinke, Karsten and Shatwell, Tom}},
  booktitle    = {{  Water resources research : an AGU journal}},
  issn         = {{1944-7973}},
  keywords     = {{climate change, coupled catchment-lake model, thermal characteristics, drinking water reservoir management, GOTMstratification}},
  number       = {{1}},
  publisher    = {{American Geophysical Union (AGU)}},
  title        = {{{Catchments Amplify Reservoir Thermal Response to Climate Warming}}},
  doi          = {{10.1029/2023wr036808}},
  volume       = {{61}},
  year         = {{2025}},
}

@misc{11330,
  abstract     = {{With the increasing complexity in manual assembly and a demographic decline in skilled workforce, the importance of well-documented processes through assembly instructions has grown. Creating these instructions is a time-consuming and knowledge-intensive task that typically relies on experienced employees. Although various automation solutions have been proposed to assist in generating assembly instructions, they often fall short in providing detailed textual guidance. With the rise of generative artificial intelligence (AI), new potentials arise in this domain. Therefore, this paper explores these potentials by employing various large language models (LLMs), prompting techniques and input data in an experimental setup for generating detailed assembly instructions, including the planning of assembly sequences as well as textual guidance on tools, assembly activities, and quality assurance measures. The findings reveal promising opportunities in leveraging LLMs but also substantial challenges, particularly in assembly sequence planning. To improve the reliability of generating assembly instructions, we propose a multi-agent concept that decomposes the complex task into simpler subtasks, each managed by specialized agents.}},
  author       = {{Meyer, Frederic and Freitag, Lennart and Hinrichsen, Sven and Niggemann, Oliver}},
  booktitle    = {{2024 IEEE 29th International Conference on Emerging Technologies and Factory Automation (ETFA)}},
  isbn         = {{979-8-3503-6123-0}},
  keywords     = {{assembly instruction, GPT, large language model, LLM, prompt}},
  location     = {{Padova, Italy}},
  publisher    = {{IEEE}},
  title        = {{{Potentials of Large Language Models for Generating Assembly Instructions}}},
  doi          = {{https://doi.org/10.1109/ETFA61755.2024.10710806}},
  volume       = {{78}},
  year         = {{2024}},
}

@misc{12207,
  abstract     = {{Lakes represent a vital source of freshwater, accounting for 87% of the Earth’s accessible surface freshwater resources and providing a range of ecosystem services, including water for human consumption. As climate change continues to unfold, understanding the potential evaporative water losses from lakes becomes crucial for effective water management strategies. Here we investigate the impacts of climate change on the evaporation rates of 23 European lakes and reservoirs of varying size during the warm season (July–September). To assess the evaporation trends, we employ a 12-member ensemble of model projections, utilizing three one-dimensional process-based lake models. These lake models were driven by bias-corrected climate simulations from four General Circulation Models (GCMs), considering both a historical (1970–2005) and future (2006–2099) period. Our findings reveal a consistent projection of increased warm-season evaporation across all lakes this century, though the magnitude varies depending on specific factors. By the end of this century (2070–2099), we estimate a 21%, 30% and 42% average increase in evaporation rates in the studied European lakes under RCP (Representative Concentration Pathway) 2.6, 6.0 and 8.5, respectively. Moreover, future projections of the relationship between precipitation (P) and evaporation (E) in the studied lakes, suggest that P-E will decrease this century, likely leading to a deficit in the availability of surface water. The projected increases in evaporation rates underscore the significance of adapting strategic management approaches for European lakes to cope with the far-reaching consequences of climate change.}},
  author       = {{La Fuente, Sofia and Jennings, Eleanor and Lenters, John D. and Verburg, Piet and Kirillin, Georgiy and Shatwell, Tom and Couture, Raoul-Marie and Côté, Marianne and Vinnå, C. Love Råman and Woolway, R. Iestyn}},
  booktitle    = {{Climatic Change}},
  issn         = {{1573-1480}},
  keywords     = {{Multi-model, Water availability, Europe, Ensemble, Lake evaporation, Latent heat flux}},
  number       = {{12}},
  publisher    = {{Springer}},
  title        = {{{Increasing warm-season evaporation rates across European lakes under climate change}}},
  doi          = {{10.1007/s10584-024-03830-2}},
  volume       = {{177}},
  year         = {{2024}},
}

@misc{12212,
  abstract     = {{Water quality and hypoxia in lakes and reservoirs are strongly associated with water renewal. Yet vertical water exchange is still not fully understood and challenging to evaluate in highly dynamic systems. Here, we applied a scaling approach using the vertical timescale, vertical water age (VWA), defined as time since a water parcel last touched the water surface. We established a 3D hydrodynamic-based age model to quantify the vertical water renewal in Xiangxi Bay, a tributary bay of the Three Gorges Dam. The integrated effects of hydrodynamic processes like stratification, intruding density currents from the mainstream, and upstream inflow on the vertical renewal were accounted for. Results indicated that the spatial–temporal distribution of VWA in Xiangxi Bay depended on stratification and forms of intruding density currents. Age was large in spring and summer, and small in autumn and winter, reaching a maximum of 25 days in April. The vertical water renewal was faster during bottom intrusions from the mainstream than during middle and surface intrusions. At times, the epilimnion contained old water due to circulations, and the hypolimnion contained young water due to upstream flushing. In contrast to natural lakes, the bottom water was often younger than overlying intermediate waters. This demonstrated that mixed layer depth was insufficient to fully capture the vertical exchange in riverine systems with significant surface/bottom intrusion. The findings suggested VWA as a quantitative measure of vertical water transport in highly dynamic systems and its usability for environmental water management.}},
  author       = {{Gai, Bo and Boehrer, Bertram and Sun, Jian and Li, Yuanyi and Lin, Binliang and Shatwell, Tom}},
  booktitle    = {{Journal of Hydrology}},
  issn         = {{1879-2707}},
  keywords     = {{Vertical water renewal, Water age, Thermal stratification, Hypoxia, 3D hydrodynamic-based age model, Water environmental management}},
  number       = {{3}},
  publisher    = {{Elsevier BV}},
  title        = {{{Vertical water age and water renewal in a large riverine reservoir}}},
  doi          = {{10.1016/j.jhydrol.2024.130701}},
  volume       = {{631}},
  year         = {{2024}},
}

@misc{12402,
  abstract     = {{In anaerobic technology, pH values are crucial for targeted volatile fatty acid production. While pH dynamics can be modeled using the Anaerobic Digestion Model No. 1 (ADM1), simulation results may be biased. To address this issue, the pH prediction routine of Visual Water, a specialized water chemistry simulator, was validated. Unlike ADM1, it accounts for ionic strength and activities while also providing an automated uncertainty analysis. The analysis revealed Visual Water simulations to better fit measured pH data from acidic solutions in a miniaturized stirred-tank reactor.}},
  author       = {{Kosse, Pascal and Hernández Rodriguez, Tanja and Frahm, Björn and Lübken, Manfred and Wichern, Marc}},
  booktitle    = {{Chemie Ingenieur Technik}},
  issn         = {{1522-2640}},
  keywords     = {{Anaerobic Digestion Model No. 1 (ADM1), Anaerobic technology, pH simulation, Uncertainty assessment, Visual Water}},
  number       = {{4}},
  pages        = {{528--534}},
  publisher    = {{Wiley}},
  title        = {{{Comparative Analysis of pH Prediction Routines in ADM1 and a Specialized Water Chemistry Simulator}}},
  doi          = {{10.1002/cite.202300188}},
  volume       = {{96}},
  year         = {{2024}},
}

@misc{12761,
  abstract     = {{For modern machines, factories and electric and autonomous vehicles, the importance of vreliable electrical connectors cannot be overstated. With an increasing number of connectors being used in machines, factories and vehicles, ensuring their reliability is crucial for comfort and safety alike. One of the key indicators of reliability is the lifetime of connectors. To evaluate the lifetime of electrical connectors, a testing method and a model for calculating their lifetime based on the test data were developed. The results from these tests were compared to failure analysis data from long-term field operations. The findings indicate that the laboratory tests can accurately reproduce the main failures observed in the field. However, such lifetime tests can be time- and labor-intensive. To address this challenge, a data-driven method is proposed that predicts the lifetime of electrical connectors using statistical analysis of electrical contact resistance data collected from short-term tests. The predictions from this method were compared to actual results obtained from long-term tests. A strong correlation was observed between the contact resistance development in short-term tests and the number of failures in later stages of testing. Thus, apart from predicting the lifetime of connectors, this method can also be applied for failure prognosis in real-time operations.}},
  author       = {{Song, Jian and Shukla, Abhay Rammurti and Probst, Roman}},
  booktitle    = {{Machines}},
  issn         = {{2075-1702 }},
  keywords     = {{electrical connectors, accelerated life testing, statistical model, lifetime prognosis, reliability, state of health}},
  number       = {{12}},
  pages        = {{474}},
  publisher    = {{MDPI}},
  title        = {{{The State of Health of Electrical Connectors}}},
  doi          = {{https://doi.org/10.3390/machines12070474}},
  volume       = {{7}},
  year         = {{2024}},
}

@misc{12855,
  abstract     = {{Global warming is shifting the thermal dynamics of lakes, with resulting climatic variability heavily affecting their mixing dynamics. We present a dual ensemble workflow coupling climate models with lake models. We used a large set of simulations across multiple domains, multi‐scenario, and multi GCM‐ RCM combinations from CORDEX data. We forced a set of multiple hydrodynamic lake models by these multiple climate simulations to explore climate change impacts on lakes. We also quantified the contributions from the different models to the overall uncertainty. We employed this workflow to investigate the effects of climate change on Lake Sevan (Armenia). We predicted for the end of the 21st century, under RCP 8.5, a sharp increase in surface temperature  and substantial bottom warming , longer stratification periods (+55 days) and disappearance of ice cover leading to a shift in mixing regime. Increased insufficient cooling during warmer winters points to the vulnerability of Lake Sevan to climate change. Our workflow leverages the strengths of multiple models at several levels of the model chain to provide a more robust projection and at the same time a better uncertainty estimate that accounts for the contributions of the different model levels to overall uncertainty. Although for specific variables, for example, summer bottom temperature, single lake models may perform better, the full ensemble provides a robust estimate of thermal dynamics that has a high transferability so that our workflow can be a blueprint for climate impact studies in other systems.}},
  author       = {{Shikhani, Muhammed and Feldbauer, Johannes and Ladwig, Robert and Mercado‐Bettín, Daniel and Moore, Tadhg N. and Gevorgyan, Artur and Misakyan, Amalya and Mi, Chenxi and Schultze, Martin and Boehrer, Bertram and Shatwell, Tom and Barfus, Klemens and Rinke, Karsten}},
  booktitle    = {{Water resources research : an AGU journal}},
  issn         = {{1944-7973}},
  keywords     = {{multi model ensemble (MME), CORDEX, LakeEnsemblR, lake modeling, climate change impacts, variance decomposition}},
  number       = {{11}},
  publisher    = {{American Geophysical Union (AGU)}},
  title        = {{{Combining a Multi‐Lake Model Ensemble and a Multi‐Domain CORDEX Climate Data Ensemble for Assessing Climate Change Impacts on Lake Sevan}}},
  doi          = {{10.1029/2023wr036511}},
  volume       = {{60}},
  year         = {{2024}},
}

@misc{11360,
  abstract     = {{The state of health and lifetime estimation process of electrical connectors via lifetime tests is a time and labor intensive process. In our previous work, a correlation between the contact resistance developments in the early stages of lifetime tests of electrical connectors with the final results was established using a data driven statistical process based on probability distribution. Also, state of health indicators for prognosis of lifetime were introduced. In this work, the state of health indicators have been optimized. The sensitivity analysis is performed with regards to the selection of the appropriate amount of test data based on test duration for the reliable prognosis of the state of health and the characteristic lifetime. Through this the possibility of further reduction of test duration required for the reliable prognosis of state of health is investigated. Based on the results of analysis, a guideline for the determination of the duration of lifetime tests which lead to a reliable prediction of lifetime of connectors can be provided. Also, an assessment of the state of art in the prognosis of the lifetime of electrical connectors has been presented.}},
  author       = {{Song, Jian and Shukla, Abhay Rammurti and Probst, Roman}},
  booktitle    = {{2023 IEEE 68th Holm Conference on Electrical Contacts (HOLM)}},
  isbn         = {{979‐8‐3503‐4244‐4}},
  issn         = {{2158‐9992}},
  keywords     = {{accelerated life testing, test duration, contact resistance, statistical model, connector reliability}},
  location     = {{Seattle}},
  pages        = {{200--208}},
  publisher    = {{IEEE}},
  title        = {{{Advances in Evaluation of State of Health of Electrical Connectors}}},
  doi          = {{10.1109/holm56075.2023.10352279}},
  year         = {{2023}},
}

@misc{10585,
  abstract     = {{Low-code programming allows the creation of software applications using a graphical user interface with minimal classical programming code ("low code") and without requiring extensive programming knowledge. This puts it in contrast to previous generations of programming languages. The advantages of low-code development are manifold, including the increase of software development capacities through a partial decentralization of the development process, speeding up software development through the low-code approach, and designing software with a strong user-centric focus. Using a low-code development platform can help companies adapt their own business processes to changing requirements more quickly and to make complexity resulting, for example, from heterogeneous customer wishes, manageable. Since many low-code development platforms are available, it is not easy for companies to select and successfully introduce a platform that meets their requirements. For this reason, this article presents a procedure model that assists in the process of selecting and implementing a platform.}},
  author       = {{Hinrichsen, Sven and Nikolenko, Alexander and Becker, Kai Leon and Adrian, Benjamin}},
  booktitle    = {{Human Systems Engineering and Design (IHSED 2023): Future Trends and Applications}},
  editor       = {{Karwowski, Waldemar and Ahram, Tareq and Milicevic, Mario and Etinger, Darko and Zubrinic, Krunoslav}},
  isbn         = {{978-1-958651-88-9}},
  issn         = {{2771-0718}},
  keywords     = {{Complexity Management, Low-Code Development Platform, Process Model for Selection and Implementation}},
  location     = {{Dubrovnik}},
  publisher    = {{AHFE International}},
  title        = {{{How to select and implement a suitable Low-Code Development Platform}}},
  doi          = {{10.54941/ahfe1004155}},
  year         = {{2023}},
}

@misc{12214,
  abstract     = {{A precise understanding of the mechanisms causing phytoplankton blooms in reservoirs is still lacking, especially in large riverine reservoirs. To better understand these blooms, the role of the complex hydrodynamics caused by dam operation must be quantified. Here we examine how synergistic hydrodynamic processes, rather than individual metrics, trigger blooms in Xiangxi Bay, a typical tributary bay of the Three Gorges Reservoir, China. We used a 3D ecological-hydrodynamic model, which integrated hydrodynamics with the abiotic factors that limit phytoplankton growth to simulate one whole year (2010). By implementing a scaling criterion, we quantified the contribution of local phytoplankton growth and hydrodynamic processes, including advection transport and vertical mixing, on bloom dynamics. Results indicated vertical mixing was the main process inhibiting blooms in colder months (from October to February) but horizontal advection, which flushed and diluted blooms, was dominant in warmer months (from May to July) when stratification was intense and nutrients were replete. Accordingly, blooms occurred when both vertical mixing and horizontal advection were low. We suggested a potential dam operation strategy to mitigate blooms during stratification, which involves withdrawing the warm surface water from upstream reservoirs to increase horizontal flows in the surface layer. Extending the application of critical turbulence model, our study shows how vertical mixing and horizontal advection rate interact with phytoplankton growth rate to drive blooms in highly dynamic riverine systems.}},
  author       = {{Gai, Bo and Sun, Jian and Lin, Binliang and Li, Yuanyi and Mi, Chenxi and Shatwell, Tom}},
  booktitle    = {{Journal of Hydrology}},
  issn         = {{1879-2707}},
  keywords     = {{Phytoplankton bloom dynamics, Vertical mixing, Advection transport, Three-dimensional ecological-hydrodynamic model, Three Gorges Reservoir, Dam operation}},
  number       = {{B}},
  publisher    = {{Elsevier BV}},
  title        = {{{Vertical mixing and horizontal transport unravel phytoplankton blooms in a large riverine reservoir}}},
  doi          = {{10.1016/j.jhydrol.2023.130430}},
  volume       = {{627}},
  year         = {{2023}},
}

@misc{12785,
  abstract     = {{Due to the demographic aging of society, the demand for skilled caregiving is increasing. However, the already existing shortage of professional caregivers will exacerbate in the future. As a result, family caregivers must shoulder a heavier share of the care burden. To ease the burden and promote a better work-life balance, we developed the Digital Case Manager. This tool uses machine learning algorithms to learn the relationship between a care situation and the next care steps and helps family caregivers balance their professional and private lives so that they are able to continue caring for their family members without sacrificing their own jobs and personal ambitions. The data for the machine learning model are generated by means of a questionnaire based on professional assessment instruments. We implemented a proof-of-concept of the Digital Case Manager and initial tests show promising results. It offers a quick and easy-to-use tool for family caregivers in the early stages of a care situation.}},
  author       = {{Wunderlich, Paul and Wiegräbe, Frauke and Dörksen, Helene}},
  booktitle    = {{INTERNATIONAL JOURNAL OF ENVIRONMENTAL RESEARCH AND PUBLIC HEALTH}},
  issn         = {{1660-4601}},
  keywords     = {{machine learning, healthcare, case management, caring, multi-label classification}},
  number       = {{2}},
  publisher    = {{MDPI}},
  title        = {{{Digital Case Manager-A Data-Driven Tool to Support Family Caregivers with Initial Guidance}}},
  doi          = {{10.3390/ijerph20021215}},
  volume       = {{20}},
  year         = {{2023}},
}

@misc{12875,
  abstract     = {{Manufacturing systems based on Industry 4.0 concepts provide a greater availability of data and have modular characteristics enabling frequent changes. This raises the need for new security engineering concepts that cover the increasing complexity and frequency of mandatory security risk assessments. In contrast, the current standardization landscape used for the assessment of these systems only offers abstract, static, manual, and resource-intensive procedures. Therefore, this work proposes a method that further specifies the IEC 62443 aiming to automate the security risk assessments in such a way that manual efforts can be reduced and a consistent quality can be achieved. The methodology is presented using network segmentation as a guiding example and consists of four main steps: Information collection based on a process analysis, information formalisation with a semi-formal model, information usage applying first order logic to extract expert knowledge, and information access using the concept of the digital twin. In addition, the applicability of the IEC 62443 standard to the risk assessment of modular manufacturing systems is evaluated.}},
  author       = {{Ehrlich, Marco and Bröring, Andre and Diedrich, Christian and Jasperneite, Jürgen}},
  booktitle    = {{Automatisierungstechnik : AT }},
  issn         = {{2196-677X}},
  keywords     = {{sasset administration shell, automation, information model, modular manufacturing system, risk assessment, security}},
  number       = {{6}},
  pages        = {{453--466}},
  publisher    = {{Walter de Gruyter GmbH}},
  title        = {{{Towards automated risk assessments for modular manufacturing systems}}},
  doi          = {{10.1515/auto-2022-0098}},
  volume       = {{71}},
  year         = {{2023}},
}

@misc{11377,
  abstract     = {{<jats:p>consuming and often performed rather empirically. Efficient optimization of multiple objectives such as process time, viable cell density, number of operating steps &amp; cultivation scales, required medium, amount of product as well as product quality depicts a promising approach. This contribution presents a workflow which couples uncertainty-based upstream simulation and Bayes optimization using Gaussian processes. Its application is demonstrated in a simulation case study for a relevant industrial task in process development, the design of a robust cell culture expansion process (seed train), meaning that despite uncertainties and variabilities concerning cell growth, low variations of viable cell density during the seed train are obtained. Compared to a non-optimized reference seed train, the optimized process showed much lower deviation rates regarding viable cell densities (&lt;10% instead of 41.7%) using five or four shake flask scales and seed train duration could be reduced by 56 h from 576 h to 520 h. Overall, it is shown that applying Bayes optimization allows for optimization of a multi-objective optimization function with several optimizable input variables and under a considerable amount of constraints with a low computational effort. This approach provides the potential to be used in the form of a decision tool, e.g., for the choice of an optimal and robust seed train design or for further optimization tasks within process development.}},
  author       = {{Hernández Rodriguez, Tanja and Sekulic, Anton and Lange-Hegermann, Markus and Frahm, Björn}},
  booktitle    = {{Processes}},
  issn         = {{2227-9717}},
  keywords     = {{Gaussian processes, Bayes optimization, Pareto optimization, multi-objective, cell culture, seed train}},
  number       = {{5}},
  publisher    = {{MDPI AG}},
  title        = {{{Designing Robust Biotechnological Processes Regarding Variabilities Using Multi-Objective Optimization Applied to a Biopharmaceutical Seed Train Design}}},
  doi          = {{10.3390/pr10050883}},
  volume       = {{10}},
  year         = {{2022}},
}

@inbook{10193,
  abstract     = {{Development and optimization of biopharmaceutical production processes with cell cultures is cost- and time-consuming and often performed rather empirically. Efficient optimization of multiple objectives such as process time, viable cell density, number of operating steps & cultivation scales, required medium, amount of product as well as product quality depicts a promising approach. This contribution presents a workflow which couples uncertainty-based upstream simulation and Bayes optimization using Gaussian processes. Its application is demonstrated in a simulation case study for a relevant industrial task in process development, the design of a robust cell culture expansion process (seed train), meaning that despite uncertainties and variabilities concerning cell growth, low variations of viable cell density during the seed train are obtained. Compared to a non-optimized reference seed train, the optimized process showed much lower deviation rates regarding viable cell densities (<10% instead of 41.7%) using five or four shake flask scales and seed train duration could be reduced by 56 h from 576 h to 520 h. Overall, it is shown that applying Bayes optimization allows for optimization of a multi-objective optimization function with several optimizable input variables and under a considerable amount of constraints with a low computational effort. This approach provides the potential to be used in the form of a decision tool, e.g., for the choice of an optimal and robust seed train design or for further optimization tasks within process development.}},
  author       = {{Hernández Rodriguez, Tanja and Sekulic, Anton and Lange-Hegermann, Markus and Frahm, Björn}},
  booktitle    = {{Bioprocess Systems Engineering Applications in Pharmaceutical Manufacturing}},
  editor       = {{Pörtner, Ralf and Möller, Johannes}},
  isbn         = {{978-3-0365-5210-1}},
  issn         = {{2227-9717}},
  keywords     = {{Gaussian processes, Bayes optimization, Pareto optimization, multi-objective, cell culture, seed train}},
  pages        = {{21--48}},
  publisher    = {{MDPI}},
  title        = {{{Designing robust biotechnological processes regarding variabilities using multi-objective optimization applied to a biopharmaceutical seed train design}}},
  doi          = {{https://doi.org/10.3390/pr10050883}},
  volume       = {{special issue}},
  year         = {{2022}},
}

@misc{8344,
  abstract     = {{BACKGROUND:The future of work in Germany is shaped by megatrends like globalization, automatization, digitization, and the demographic change. Furthermore, mass customization and the increasing usage of AI even in manual assembly offers new opportunities as well as it creates new challenges. OBJECTIVE:The trend towards mass customization in turn leads to increased complexity in production, which results in additional mental workload. This effect will continue in the foreseeable future. METHOD:Especially for small and medium sized companies, the backbone of Germany’s economy, automatization and Human-Robot-Collaboration will take time to develop. Information assistance systems are and will be a bridging technology to help organizations to manage increasing complexity and the mental workload of their employees to not only boost productivity but also keep their workforce healthy. The ongoing demographic change further underlines the need to use information assistance systems to compensate possible age-associated deficits, but also keep older employees committed to their work and avoid effects of disengagement or disenfranchisement through participatory ergonomics. RESULTS: Information assistance systems can only develop their inherent potential if they are designed to support employees of varying age, competence levels, and affinity for technology. Participatory development and early engagement are key factors for an increased acceptance and usage of the systems as well as the individualization to make it suitable for each individual employee. CONCLUSION:Expanding the functionalities to an adaptive assistance system, using physiological correlates of mental workload as an input, is conceivable in the future. }},
  author       = {{Bläsing, Dominic and Hinrichsen, Sven and Wurm, Susanne and Bornewasser, Manfred}},
  booktitle    = {{Work}},
  issn         = {{1875-9270 }},
  keywords     = {{Cognitive ergonomics, aging workforce, complexity, mixed-model assembly}},
  number       = {{4}},
  pages        = {{1535--1548}},
  publisher    = {{IOS Press}},
  title        = {{{Information assistance systems as preventive mediators between increasing customization and mental workload}}},
  doi          = {{http://doi.org/10.3233/WOR-211283}},
  volume       = {{72}},
  year         = {{2022}},
}

@misc{11376,
  abstract     = {{<jats:p>concentration is an important objective. The phenotype of the cells in a reactor plays an important role. Are clonal cell populations showing high cell-specific growth rates more favorable than cell lines with higher cell-specific productivities or vice versa? Five clonal Chinese hamster ovary cell populations were analyzed based on the data of a 3-month-stability study. We adapted a mechanistic cell culture model to the experimental data of one such clonally derived cell population. Uncertainties and prior knowledge concerning model parameters were considered using Bayesian parameter estimations. This model was used then to define an inoculum train protocol. Based on this, we subsequently simulated the impacts of differences in growth rates (±10%) and production rates (±10% and ±50%) on the overall cultivation time, including making the inoculum train cultures; the final production phase, the volumetric titer in that bioreactor and the ratio of both, defined as overall process productivity. We showed thus unequivocally that growth rates have a higher impact (up to three times) on overall process productivity and for product output per year, whereas cells with higher productivity can potentially generate higher product concentrations in the production vessel.}},
  author       = {{Hernández Rodriguez, Tanja and Morerod, Sophie and Pörtner, Ralf and Wurm, Florian M. and Frahm, Björn}},
  booktitle    = {{Processes}},
  issn         = {{2227-9717}},
  keywords     = {{clonal cell population, phenotypic diversity, inoculum train, uncertainty-based, cell culture model, biopharmaceutical manufacturing}},
  number       = {{6}},
  publisher    = {{MDPI AG}},
  title        = {{{Considerations of the Impacts of Cell-Specific Growth and Production Rate on Clone Selection—A Simulation Study}}},
  doi          = {{10.3390/pr9060964}},
  volume       = {{9}},
  year         = {{2021}},
}

@article{6925,
  abstract     = {{The compatibility concept is widely used in psychology and ergonomics. It describes the fit between elements of a sociotechnical system which is a prerequisite to successfully cooperate towards a common goal. For at least three decades, cognitive compatibility is of increasing importance. It describes the fit of externally presented information, information processing, and the required motor action. However, with increasing system complexity, probability for incompatibility increases, too, leading to time losses, errors and overall degraded performance. The elimination of cognitive incompatibilities through ergonomic measures at the workplace requires a lot of creativity and effort. Using practical examples from mixed-model assembly, improved information management and the use of informational assistance systems are discussed as promising ergonomic approaches. The ultimate goal is to avoid cognitive overload, for example in part picking or assembly tools choosing. To find a fit between externally mediated work instructions via displays and the subjectively used internal models and competencies is a challenging task. Only if this fit is given the system is perceived as beneficial. To achieve this, the assistance system should be configurable to fit individual needs as far as possible. Successful system design requires early participation and comprehensive integration of the assistance systems into the existing IT infrastructure.

Practical relevance: Varied manual assembly requires a high degree of cognitive work. A rise in complexity of the assembly task increases the risk that cognitive incompatibility and thus cognitive overload will occur more frequently. It is shown that such unhealthy conditions can be countered by better information presentation and by the use of individually adaptable informational assistance systems.}},
  author       = {{Bläsing, Dominic and Bornewasser, Manfred and Hinrichsen, Sven}},
  issn         = {{0340-2444}},
  journal      = {{Zeitschrift für Arbeitswissenschaft}},
  keywords     = {{Compatibility, Mental model, Mental workload, Multi-model assembly, Informational assistance systems}},
  publisher    = {{Springer}},
  title        = {{{Cognitive compatibility in modern manual mixed-model assembly systems}}},
  doi          = {{10.1007/s41449-021-00296-1}},
  year         = {{2021}},
}

@inbook{7983,
  abstract     = {{For the manufacturing of complex biopharmaceuticals using bioreactors with cultivated mammalian cells, high product concentration is an important objective. The phenotype of the cells in a reactor plays an important role. Are clonal cell populations showing high cell-specific growth rates more favorable than cell lines with higher cell-specific productivities or vice versa? Five clonal Chinese hamster ovary cell populations were analyzed based on the data of a 3-month-stability study. We adapted a mechanistic cell culture model to the experimental data of one such clonally derived cell population. Uncertainties and prior knowledge concerning model parameters were considered using Bayesian parameter estimations. This model was used then to define an inoculum train protocol. Based on this, we subsequently simulated the impacts of differences in growth rates (±10%) and production rates (±10% and ±50%) on the overall cultivation time, including making the inoculum train cultures; the final production phase, the volumetric titer in that bioreactor and the ratio of both, defined as overall process productivity. We showed thus unequivocally that growth rates have a higher impact (up to three times) on overall process productivity and for product output per year, whereas cells with higher productivity can potentially generate higher product concentrations in the production vessel.}},
  author       = {{Hernández Rodriguez, Tanja and Morerod, Sophie and Pörtner, Ralf and Wurm, Florian M. and Frahm, Björn}},
  booktitle    = {{Bioprocess Systems Engineering Applications in Pharmaceutical Manufacturing}},
  isbn         = {{978-3-0365-5210-1}},
  issn         = {{2227-9717 }},
  keywords     = {{clonal cell population, phenotypic diversity, inoculum train, uncertainty-based, cell culture model, biopharmaceutical manufacturing}},
  pages        = {{49--74}},
  publisher    = {{MDPI}},
  title        = {{{Considerations of the impacts of cell-specific growth and production rate on clone selection – a simulation study}}},
  doi          = {{10.3390/pr9060964}},
  volume       = {{special issue}},
  year         = {{2021}},
}

@misc{7986,
  abstract     = {{Bioprocess development and optimization are still cost- and time-intensive due to the enormous number of experiments involved. In this study, the recently introduced model-assisted Design of Experiments (mDoE) concept (Möller et al. in Bioproc Biosyst Eng 42(5):867, https://doi.org/10.1007/s00449-019-02089-7, 2019) was extended and implemented into a software (“mDoE-toolbox”) to significantly reduce the number of required cultivations. The application of the toolbox is exemplary shown in two case studies with Saccharomyces cerevisiae. In the first case study, a fed-batch process was optimized with respect to the pH value and linearly rising feeding rates of glucose and nitrogen source. Using the mDoE-toolbox, the biomass concentration was increased by 30% compared to previously performed experiments. The second case study was the whole-cell biocatalysis of ethyl acetoacetate (EAA) to (S)-ethyl-3-hydroxybutyrate (E3HB), for which the feeding rates of glucose, nitrogen source, and EAA were optimized. An increase of 80% compared to a previously performed experiment with similar initial conditions was achieved for the E3HB concentration.}},
  author       = {{Moser, André and Kuchemüller, Kim B. and Deppe, Sahar and Hernández Rodriguez, Tanja and Frahm, Björn and Pörtner, Ralf and Hass, Volker C. and Möller, Johannes}},
  booktitle    = {{Bioprocess and Biosystems Engineering}},
  isbn         = {{1615-7591}},
  issn         = {{1615-7605}},
  keywords     = {{Biocatalysis, Monte Carlo methods, Fed-batch strategy, Model-assisted design of experiments, Quality by design}},
  number       = {{4}},
  pages        = {{683--700}},
  publisher    = {{Springer}},
  title        = {{{Model-assisted DoE software: Optimization of growth and biocatalysis in Saccharomyces cerevisiae bioprocesses}}},
  doi          = {{10.1007/s00449-020-02478-3}},
  volume       = {{44}},
  year         = {{2021}},
}

@article{4897,
  abstract     = {{Assistance is becoming increasingly relevant in carrying out industrial work in the context of cyber-physical production systems (CPPSs) and Industry 4.0. While assistance in a single task via a single interaction modality has been explored previously, crossdevice interaction could improve the quality of assistance, especially given the concurrent and distributed nature of work in CPPSs. In this paper, we present the theoretical foundations and implementation of MiWSICx (Middleware for Work Support in Industrial Contexts), a middleware that showcases how multiple interactive computing devices such as tablets, smartphones, augmented/virtual reality glasses, and wearables could be combined to provide crossdevice industrial assistance. Based on activity theory, MiWSICx models human work as activities combining multiple users, artifacts, and cyber-physical objects. MiWSICx is developed using the actor model for deployment on a variety of hardware alongside a CPPS to provide multiuser, crossdevice, multiactivity assistance.}},
  author       = {{Dhiman, Hitesh and Röcker, Carsten}},
  issn         = {{2288-4300 }},
  journal      = {{Journal of Computational Design and Engineering}},
  keywords     = {{human–technology interaction, human–computer interaction, crossdevice interaction, cyber-physical systems, assistance, smart factory, middleware, actor model, information system design, industry 4.0}},
  number       = {{1}},
  pages        = {{428--451}},
  publisher    = {{Oxford University Press}},
  title        = {{{Middleware for providing activity-driven assistance in cyber-physical production systems}}},
  doi          = {{10.1093/jcde/qwaa088}},
  volume       = {{8}},
  year         = {{2021}},
}

@misc{12225,
  abstract     = {{Lake Sevan is the largest freshwater body in the Caucasus region, situated at an altitude of 1,900 m asl. While it is a major water resource in the whole region, Lake Sevan has received little attention in international limnological literature. Although recent studies pointed to algal blooms and negative impacts of climate change and eutrophication, the physical controls on thermal dynamics have not been characterized and model-based assessments of climate change impacts are lacking. We compiled a decade of historical data for meteorological conditions and temperature dynamics in Lake Sevan and used a one-dimensional hydrodynamic model (GLM 3.1) in order to study thermal structure, the stratification phenology and their meteorological drivers in this large mountain lake. We then evaluated the representativeness of meteorological data products covering almost 4 decades (EWEMBI-dataset: 1979-2016) for driving the model and found that these data are well suited to restore long term thermal dynamics in Lake Sevan. This established model setting allowed us to identify major changes in Lake Sevan’s stratification in response to changing meteorological conditions as expected from ongoing climate change. Our results point to a changing mixing type from dimictic to monomictic as Lake Sevan will experience prolonged summer stratification periods and more stable stratification. These projected changes in stratification must be included in long-term management perspectives as they will intensify water quality deteriorations like surface algal blooms or deep water anoxia.}},
  author       = {{Shikhani, Muhammed and Mi, Chenxi and Gevorgyan, Artur and Gevorgyan, Gor and Misakyan, Amalya and Azizyan, Levon and Barfus, Klemens and Schulze, Martin and Shatwell, Tom and Rinke, Karsten}},
  booktitle    = {{Journal of Limnology}},
  issn         = {{1723-8633}},
  keywords     = {{General Lake Model (GLM), Lake Sevan, temperature stratification, EWEMBI, climate warming}},
  number       = {{s1}},
  publisher    = {{Istituto per lo Studio degli Ecosistemi (Verbania) }},
  title        = {{{Simulating thermal dynamics of the largest lake in the Caucasus region: The mountain Lake Sevan}}},
  doi          = {{10.4081/jlimnol.2021.2024}},
  volume       = {{81}},
  year         = {{2021}},
}

@misc{12230,
  abstract     = {{Model ensembles have several benefits compared to single-model applications but are not frequently used within the lake modelling community. Setting up and running multiple lake models can be challenging and time consuming, despite the many similarities between the existing models (forcing data, hypsograph, etc.). Here we present an R package, LakeEnsemblR, that facilitates running ensembles of five different vertical one-dimensional hydrodynamic lake models (FLake, GLM, GOTM, Simstrat, MyLake). The package requires input in a standardised format and a single configuration file. LakeEnsemblR formats these files to the input required by each model, and provides functions to run and calibrate the models. The outputs of the different models are compiled into a single file, and several post-processing operations are supported. LakeEnsemblR's workflow standardisation can simplify model benchmarking and uncertainty quantification, and improve collaborations between scientists. We showcase the successful application of LakeEnsemblR for two different lakes.}},
  author       = {{Moore, Tadhg N. and Mesman, Jorrit P. and Ladwig, Robert and Feldbauer, Johannes and Olsson, Freya and Pilla, Rachel M. and Shatwell, Tom and Venkiteswaran, Jason J. and Delany, Austin D. and Dugan, Hilary and Rose, Kevin C. and Read, Jordan S.}},
  booktitle    = {{Environmental modelling & software with environment data news}},
  issn         = {{1873-6726}},
  keywords     = {{Ensemble modeling, Vertical one-dimensional lake model, R package, Calibration, Thermal structure, Hydrodynamics}},
  publisher    = {{Elsevier BV}},
  title        = {{{LakeEnsemblR: An R package that facilitates ensemble modelling of lakes}}},
  doi          = {{10.1016/j.envsoft.2021.105101}},
  volume       = {{143}},
  year         = {{2021}},
}

@misc{8384,
  abstract     = {{ynamic simulation models are widely utilized to evaluate complex technical components and systems like electric drives or machines. They can support the development process of a production machine by avoiding an inadequate layout of components or an erroneous control design. However, the effort for building them is often too high for this purpose (lot size one). An automated model generation can be utilized to overcome the gap between efforts and advantages of dynamic simulations.

This contribution presents an approach for simplifying the dynamic model generation of production machines by using the so-called Asset Administration Shell defined by the initiative Platform Industrie 4.0. The Asset Administration Shell was developed to aggregate all data necessary for maintaining the product across its life cycle. This includes component data and models as well as structural information about a machine. The generation process is performed by using the common FMI standard and a two-step procedure which allows the linkage of different simulation tools. The model generation is demonstrated by an example layout of a machine's internal direct current grid.}},
  author       = {{Göllner, D. and Pawlik, Thomas and Schulte, Thomas}},
  booktitle    = {{2021 IEEE International Conference on Industrial Engineering and Engineering Management (IEEM)}},
  isbn         = {{978-1-6654-3772-1 }},
  issn         = {{2157-3611}},
  keywords     = {{Digital Twin, Asset Administration Shell, Dynamic Simulation Model, Industry 4.0, Automated Model Generation}},
  location     = {{Online  (Singapore)}},
  pages        = {{808--812}},
  publisher    = {{IEEE}},
  title        = {{{Utilization of the Asset Administration Shell for the Generation of Dynamic Simulation Models}}},
  doi          = {{10.1109/IEEM50564.2021.9673089}},
  year         = {{2021}},
}

@techreport{2184,
  abstract     = {{Material flows and energy flows can be correlated with sufficient data, e.g. on production energies, annual production quantities and degrees of dissipation with temperature increases in the atmosphere, volumes of molten ice or sea level increases, as well as with probability statements, information densities and management recommendations. All these quantities can be described by the comprehensive term entropy.
In order to consider the efficiency of material and energy flows, the difficulty to understand the concept of entropy with its different definitions can be summarized simply and easily in a model related to ice cubes.
The quality of a model containing percentage probability statements, statements on dissipation in material flow models in connection with statements on information density and its description by the ice cube model is still to be determined in practice in suitable material flow models. Such projects should show the types of mathematical correlations between dissipation degrees, entropy increase, increase of molten ice and sea level rise.
}},
  author       = {{Sietz, Manfred and Wrenger, Burkhard}},
  keywords     = {{Entropy, Ice Cube, Sustainability, Sea Level Rise, Information Density, Probability Statements, Material Flow Model, Dissipation}},
  title        = {{{Entropie eines Eiswürfels, Wahrscheinlichkeitsaussagen und Meeresspiegelerhöhung}}},
  doi          = {{10.25644/76E5-PC61}},
  year         = {{2020}},
}

@article{5424,
  abstract     = {{Near infrared spectroscopy in combination with a transflection probe was investigated as inline measurement in a continuous flash pasteurizer system with a sugar-water model solution. Robustness and reproducibility of fluctuations of recorded spectra as well as trueness of the chemometric analysis were compared under different process parameter settings. Variable parameters were the flow rate (from laminar flow at 30 L/h to turbulent flow at 90 L/h), temperature (20 to 100 degrees C) and the path length of the transflection probe (2 and 4 mm) while the pressure was kept constant at 2.5 bar. Temperature and path length were identified as the most affecting parameters, in case of homogenous test medium. In case of particle containing systems, the flow rate could have an impact as well. However, the application of a PLS model, which includes a broad temperature range, and the correction of prediction results by applying a polynomial regression function for prediction errors, was able to compensate these effects. Also, a path length of 2 mm leads to a higher accuracy. The applied strategy shows that by the identification of relevant process parameters and settings as well as the establishment of a compensation strategy, near infrared spectroscopy is a powerful process analytical tool for continuous flash pasteurization systems.}},
  author       = {{Weishaupt, Imke and Zimmer, Manuel and Neubauer, Peter and Schneider, Jan}},
  isbn         = {{0022-1147}},
  issn         = {{1750-3841}},
  journal      = {{Journal of Food Science}},
  keywords     = {{flash pasteurization, inline near infrared spectroscopy, multivariate data analysis, process condition influences, sugar-water-solution model beverage}},
  number       = {{7}},
  pages        = {{2020 -- 2031}},
  title        = {{{Model based optimization of transflection near infrared spectroscopy as a process analytical tool in a continuous flash pasteurizer}}},
  doi          = {{10.1111/1750-3841.15307}},
  volume       = {{85}},
  year         = {{2020}},
}

@inproceedings{1911,
  abstract     = {{Reading about small and medium-sized manufacturing companies in newspapers and magazines, one might get the impression that integrating and exploiting new digital technologies is one of the most urgent challenges for managers today. Although many different industry 4.0 and Internet of Things (IoT) tests have been published, a framework to structure the content and the process of digital transformation is missing. Therefore, comprehensive research of available literature has been implemented; the results show that the framework from Appelfeller and Feldmann is the most suitable for application in companies. Due to the fact that the authors present no acquired experience in the application of the framework in practice, an evaluation study was set up in companies. The results of a case study are presented and an overview is given how to implement this specific check in a company.}},
  author       = {{Tackenberg, Sven and Jungkind, Wilfried and Feldmann, C. and Appelfeller, W.}},
  booktitle    = {{Production Engineering and Management}},
  editor       = {{Padoano, Elio and Villmer, Franz-Josef}},
  keywords     = {{Digital transformation, Maturity model, Check}},
  location     = {{Trieste, Italy}},
  pages        = {{281--290}},
  title        = {{{Digital Transformation of Companies: Experience gained in the Implementation of an IOT Check}}},
  year         = {{2019}},
}

@inproceedings{6003,
  abstract     = {{In industriellen Szenarien wächst seit Längerem der Bedarf an drahtloser und zuverlässiger Kommunikation. Es gibt hierzu einige aus der IT-Welt adaptierte Standards, welche in vielen Anwendungen im industriellen Umfeld nicht ausreichen. Daher wurde bei der Spezifikation des neuen 5G-Standards explizit die Nutzung im industriellen Umfeld mit berücksichtigt. Diese Arbeit beschäftigt sich mit den Vorteilen der Multi-Connectivity (MC), welche sich durch die Architektur des neuen 5G-NR Standards in der Automatisierungstechnik ergeben können. Hierzu wurden im ersten Schritt Funkkanalmessungen in einer industrienahen Umgebung – der SmartFactoryOWL – durchgeführt, und im weiteren Verlauf die MC in einen Kommunikationsstack integriert und die Zuverlässigkeit (Reliability) (REL) bei verschiedenen MCKonfigurationen ermittelt. Im praktischen Aufbau konnte eine erhöhte REL der drahtlosen Kommunikation im industriellen Kontext unter Einsatz der MC nachgewiesen werden.}},
  author       = {{Fliedner, Niels Hendrik and Heß, Roland and Witte, Stefan}},
  booktitle    = {{KommA 2019 : Kommunikation in der Automation : 20.-21.11.2019 : 10. Jahreskolloquium "Kommunikation in der Automation"}},
  editor       = {{Jumar, Ulrich and Jasperneite, Jürgen}},
  isbn         = {{978-3-944722-85-6}},
  keywords     = {{5G, Multi-Connectivity, Zuverlässigkeit}},
  location     = {{Magdeburg}},
  publisher    = {{Otto-von-Guericke-Universität Magdeburg. R-PÖ}},
  title        = {{{Multi-Connectivity in 5G Industrial Environments}}},
  year         = {{2019}},
}

@misc{12803,
  abstract     = {{The increasing amount of alarms and information for an operator in a modern plant becomes a significant safety risk. Although the notifications are a valuable support, they also lead to the curse of overloading with information for the operator. Due to the huge amount of alarms it is almost impossible to separate the crucial information from the insignificant ones. Therefore, new procedures are required to reduce these alarm floods and support the operator to minimize the safety risk. One approach is based on learning a causal model that represents the relationships between the alarms. This allows alarm sequences that are causally implied to be reduced to the root cause alarm. Fundamental element of this approach is the causal model. Therefore in this work, different probabilistic graphical models are considered and evaluated on the basis of appropriate criteria. A real use case of a bottle filling module serves as a benchmark for how well they are suitable as a causal model for the application in alarm flood reduction.}},
  author       = {{Wunderlich, Paul and Hranisavljevic, Nemanja}},
  booktitle    = {{2019 IEEE 17th International Conference on Industrial Informatics (INDIN)}},
  isbn         = {{978-1-7281-2928-0}},
  keywords     = {{probabilistic graphical model, causal model, alarm flood reduction, Bayesian network, Markov chain, restricted boltzmann machine, automata}},
  location     = {{Helsinki, Finland }},
  pages        = {{1285--1290}},
  publisher    = {{IEEE}},
  title        = {{{Comparison of Different Probabilistic Graphical Models as Causal Models in Alarm Flood Reduction}}},
  doi          = {{10.1109/indin41052.2019.8972251}},
  year         = {{2019}},
}

@misc{11746,
  abstract     = {{Objectives: Integrating Electronic Health Record (EHR) systems into the field of clinical trials still contains several challenges and obstacles. Heterogeneous standards and specifications are used to represent healthcare and clinical trial information. Therefore, this work investigates the mapping and data interoperability between healthcare and research standards: EN13606 used for the EHRs and the Clinical Data Interchange Standards Consortium Operational Data Model (CDISC ODM) used for clinical research.

Methods: Based on the specifications of CDISC ODM 1.3.2 and EN13606, a mapping between the structure and components of both standards has been performed. Archetype Definition Language (ADL) forms built with the EN13606 editor were transformed to ODM XML and reviewed. As a proof of concept, clinical sample data has been transformed into ODM and imported into an electronic data capture system. Reverse transformation from ODM to ADL has also been performed and finally reviewed concerning map-ability.

Results: The mapping between EN13606 and CDISC ODM shows the similarities and differences between the components and overall record structure of the two standards. An EN13606 archetype corresponds with a group of items within CDISC ODM. Transformations of element names, descriptions, different languages, datatypes, cardinality, optionality, units, value range and terminology codes are possible from EN13606 to CDISC ODM and vice versa.

Conclusion: It is feasible to map data elements between EN13606 and CDISC ODM and transformation of forms between ADL and ODM XML format is possible with only minor limitations. EN13606 can accommodate clinical information in a more structured manner with more constraints, whereas CDISC ODM is more suitable and specific for clinical trials and studies. It is feasible to transform EHR data in the EN13606 form to ODM to transfer it into research database. The attempt to use EN13606 to build a study protocol (that was already built with CDISC ODM) also suggests the possibility of using EN13606 standard in place of CDISC ODM if needed to avoid transformations.}},
  author       = {{Tapuria, Archana and Bruland, Philipp and Delaney, Brendan and Kalra, Dipak and Curcin, Vasa}},
  booktitle    = {{Digital health}},
  issn         = {{2055-2076}},
  keywords     = {{EN13606, Operational data model, clinical archetypes, electronic health records, interoperability.}},
  publisher    = {{SAGE Publications}},
  title        = {{{Comparison and transformation between CDISC ODM and EN13606 EHR standards in connecting EHR data with clinical trial research data}}},
  doi          = {{10.1177/2055207618777676}},
  volume       = {{4}},
  year         = {{2018}},
}

@inproceedings{2005,
  abstract     = {{We present a method for the fast and robust linear classification of badly conditioned data. In our considerations, badly conditioned data are such data which are numerically difficult to handle. Due to, e.g. a large number of features or a large number of objects representing classes as well as noise, outliers or incompleteness, the common software computation of the discriminating linear combination of features between classes fails or is extremely time consuming. The theoretical foundations of our approach are based on the single feature ranking, which allows fast calculation of the approximative initial classification boundary. For the increasing of classification accuracy of this boundary, the refinement is performed in the lower dimensional space. Our approach is tested on several datasets from UCI Reposi-tiory. Experimental results indicate high classification accuracy of the approach. For the modern real industrial applications such a method is especially suitable in the Cyber-Physical-System environments and provides a part of the workflow for the automated classifier design}},
  author       = {{Dörksen, Helene and Lohweg, Volker}},
  booktitle    = {{23rd IEEE International Conference on Emerging Technologies and Factory Automation (ETFA)}},
  keywords     = {{Task analysis, Software, Linear discriminant analysis, Dimensionality reduction, Mathematical model, Covariance matrices, Measurement}},
  location     = {{ Turin, Italy }},
  title        = {{{Linear Classification of Badly Conditioned Data. }}},
  doi          = {{10.1109/ETFA.2018.8502485}},
  year         = {{2018}},
}

@misc{549,
  author       = {{Springer, André and Nothdurft, Sarah and Lahdo, Rabi and Seffer, Oliver}},
  booktitle    = {{8th International Conference on Production Engineering and Management}},
  isbn         = {{978-3-946856-03-0}},
  keywords     = {{Dissimilar metal joints, Laser processes, Multi-material components}},
  location     = {{Lemgo}},
  publisher    = {{Technische Hochschule Ostwestfalen-Lippe}},
  title        = {{{Dissimilar Metal Joints - Laser Based Manufacturing Processes for Components of Tomorrow}}},
  year         = {{2018}},
}

@inproceedings{551,
  abstract     = {{Companies that use product lifecycle management (PLM) systems need to configure them individually. Such configuration is considered as a software development process. This article demonstrates how the software development process for PLM configuration can be improved by applying applicationlifecycle management (ALM) concepts. This paper explains how such a conceptdesigncan be created and implemented. The concept was evaluated in a real industrial case study. By this, it provides valuable insights useablefor any company, facing similar challenges as depicted in this paper.}},
  author       = {{Heister, Martin and Deuter, Andreas and Schrader, B.}},
  booktitle    = {{Production Engineering and Management}},
  editor       = {{Villmer, Franz-Josef and Padoano, Elio}},
  isbn         = {{978-3-946856-03-0}},
  keywords     = {{PLM, ALM, Software engineering, V-model, Scrum}},
  location     = {{Lemgo}},
  number       = {{1}},
  pages        = {{31--41}},
  title        = {{{Design of an ALM-Based Process for Configuring PLM Systems}}},
  year         = {{2018}},
}

@misc{12239,
  abstract     = {{The modelling community has identified challenges for the integration and assessment of lake models due to the diversity of modelling approaches and lakes. In this study, we develop and assess a one-dimensional lake model and apply it to 32 lakes from a global observatory network. The data set included lakes over broad ranges in latitude, climatic zones, size, residence time, mixing regime and trophic level. Model performance was evaluated using several error assessment metrics, and a sensitivity analysis was conducted for nine parameters that governed the surface heat exchange and mixing efficiency. There was low correlation between input data uncertainty and model performance and predictions of temperature were less sensitive to model parameters than prediction of thermocline depth and Schmidt stability. The study provides guidance to where the general model approach and associated assumptions work, and cases where adjustments to model parameterisations and/or structure are required.}},
  author       = {{Bruce, Louise C. and Frassl, Marieke A. and Arhonditsis, George B. and Gal, Gideon and Hamilton, David P. and Hanson, Paul C. and Hetherington, Amy L. and Melack, John M. and Read, Jordan S. and Rinke, Karsten and Rigosi, Anna and Trolle, Dennis and Winslow, Luke and Adrian, Rita and Ayala, Ana I. and Bocaniov, Serghei A. and Boehrer, Bertram and Boon, Casper and Brookes, Justin D. and Bueche, Thomas and Busch, Brendan D. and Copetti, Diego and Cortés, Alicia and de Eyto, Elvira and Elliott, J. Alex and Gallina, Nicole and Gilboa, Yael and Guyennon, Nicolas and Huang, Lei and Kerimoglu, Onur and Lenters, John D. and MacIntyre, Sally and Makler-Pick, Vardit and McBride, Chris G. and Moreira, Santiago and Özkundakci, Deniz and Pilotti, Marco and Rueda, Francisco J. and Rusak, James A. and Samal, Nihar R. and Schmid, Martin and Shatwell, Tom and Snorthheim, Craig and Soulignac, Frédéric and Valerio, Giulia and van der Linden, Leon and Vetter, Mark and Vinçon-Leite, Brigitte and Wang, Junbo and Weber, Michael and Wickramaratne, Chaturangi and Woolway, R. Iestyn and Yao, Huaxia and Hipsey, Matthew R.}},
  booktitle    = {{Environmental modelling & software with environment data news }},
  issn         = {{1873-6726}},
  keywords     = {{Lake model, Stratification, GLM, Model assessment, Global observatory data, Network science}},
  number       = {{4}},
  pages        = {{274--291}},
  publisher    = {{Elsevier Science}},
  title        = {{{A multi-lake comparative analysis of the General Lake Model (GLM): Stress-testing across a global observatory network}}},
  doi          = {{10.1016/j.envsoft.2017.11.016}},
  volume       = {{102}},
  year         = {{2018}},
}

@misc{12856,
  abstract     = {{Multimodal interfaces provide users with a number of different ways of interacting with a system. This paper proposes the integration of an explicit interaction model as an extension of the well-known MVC architectural pattern. It builds upon previous extensions regarding explicit structure, user interface (UI), and environmental models. In addition, this paper proposes an explicit UI layer in a service-oriented hypermedia infrastructure to address requirements from multimodal interfaces.}},
  author       = {{Rubart, Jessica}},
  booktitle    = {{Proceedings of the 1st Workshop on Human Factors in Hypertext}},
  editor       = {{Atzenbeck, Claus}},
  isbn         = {{978-1-4503-5658-9}},
  keywords     = {{Multimodal interaction, hypermedia structures, hypertext infrastructure, structure model, interaction model, MVC, digital boardroom}},
  location     = {{Baltimore, MD}},
  pages        = {{17--21}},
  publisher    = {{ACM}},
  title        = {{{Multimodal Interaction with Hypermedia Structures}}},
  doi          = {{10.1145/3215611.3215613}},
  year         = {{2018}},
}

@inproceedings{570,
  abstract     = {{Additive manufacturing (AM) has matured rapidly during the last years due to the advancement of AM machines and materials. Nevertheless, the widespread adoption of AM is still challenged by producing parts with reliable quality. The aim of this paper is t o introduce a first approach to apply in-situ monitoring for quality evaluation of produced parts. Based on the monitored data, a model is developed, in order to predict the quality of ready built parts.}},
  author       = {{Scheideler, Eva and Huxol, Andrea and Villmer, Franz-Josef}},
  booktitle    = {{Production Engineeringand Management}},
  editor       = {{Padoano, Elio and Villmer, Franz-Josef}},
  isbn         = {{978-3-946856-01-6}},
  keywords     = {{Nondestructive quality control, Predictive analytics, Metal model, Additive manufacturing}},
  location     = {{Pordenone, Italy}},
  number       = {{1}},
  pages        = {{89--100}},
  title        = {{{Nondestructive Quality Check of Additive Manufactured Parts Using Empirical Models}}},
  year         = {{2017}},
}

@inproceedings{578,
  abstract     = {{Challenges of companies are presented by an increasing number of product variants or a growing product complexity in combination with a reduction of lot size. Therefore the scope of the work in the field of manual assembly will be more complex. This situation leads to a need of assistance systems. With these systems, the assembly workers will be qualified to execute their work tasks within the requirements. This approach set up on a further implementation of an assistance system at a great device manufacturer. The main focus of this implementation was the technical and functional design of the assistance system, but a successful implementation requires also an active handling of the change process. The purpose of this paper is the presentation of design principles in form of a process model for the implementation of digital assistance systems. The development of the design principles takes place in a participative approach. Executives, work councilsand workers develop the project results together with external project members. Project managers will be able to manage implementation processes with the results and take all the success factors into account.}},
  author       = {{Kleineberg, Tim and Eichelberg, Matthias and Hinrichsen, Sven}},
  booktitle    = {{Production Engineering and Management}},
  editor       = {{Padoano, Elio and Villmer, Franz-Josef}},
  isbn         = {{978-3-946856-01-6}},
  keywords     = {{Assistance systems, Change management, Success factors, Process model}},
  location     = {{Pordenone, Italy}},
  number       = {{1}},
  pages        = {{25--36}},
  title        = {{{Participative Development of an Implementation Process for Worker Assistance Systems}}},
  year         = {{2017}},
}

@inproceedings{593,
  abstract     = {{Due to the increased individualization of customer demands in the last 20 years, the production systems are required to be more flexible and scalable. It is the samefor the material flow system with automated guided vehicles (AGVs). To realize the flexibility and scalability, it is recommended to decentralized control the vehicles. As an attempt, a concept of swarm intelligence with Radio Frequency Identification (RFID) is proposed and introduced in this article. The concept is supposed to be used for automated guided vehicle systems in which objects have to be transported from place to place. Therefore the object has to be self-organized and has to manage its own transport. In this context the vehicles have to choose the most optimal transportation. Swarm intelligence is a topic which deserves a high level of attention as a method to realize high flexibility and scalability.}},
  author       = {{Cantauw, Alisa Maria and Li, Li}},
  booktitle    = {{Department of Production Engineering and Management}},
  editor       = {{Villmer, Franz-Josef and Padoano, Elio}},
  isbn         = {{978-3-946856-00-9}},
  keywords     = {{Swarm  intelligence, Automated  guided  vehicle  system, RFID, Internet  of things, Multi-agent system}},
  location     = {{Lemgo}},
  number       = {{1}},
  pages        = {{133--143}},
  title        = {{{Application of Swarm Intelligence for Automated Guided Vehicle Systems}}},
  year         = {{2016}},
}

@inproceedings{594,
  abstract     = {{Due to steadily increased demand for customized products, as well as their enhanced complexity and shorter product lifecycles, companies in all industries require a reliable prediction of the expected product development costs from the very start of product realization. Incorrectly estimated project costs may lead to serious consequences in the course of a development project. For example, offers are most often based on such early cost estimations and consequently, a major safety margin has to be added, which may result in the refusal of an order. A too low estimation of the costs of aproduct development project, on the other hand, may result in a loss for the project.In this paper, a software tool is presented for the prediction of product development costs which offers the user the ability to create a more accurate prediction of project costs on the basis of a minimum of retrograde project information. By combining a parametric cost model and cost result with stochastic character, based on the Monte Carlo method, in one software system, it is possible to significantly improve projectcost estimations.}},
  author       = {{Otte, Andreas and Scheideler, Eva and Villmer, Franz-Josef}},
  booktitle    = {{Department of Production Engineering and Management}},
  editor       = {{Villmer, Franz-Josef and Padoano, Elio}},
  isbn         = {{978-3-946856-00-9}},
  keywords     = {{Cost prediction, Product realization projects, Monte Carlo method, Parametric cost model, Software tool}},
  location     = {{Lemgo}},
  number       = {{1}},
  pages        = {{281--292}},
  title        = {{{Project Cost Estimator - A Parameter-Based Tool to Predict Product Realization Costs at a Very Early Stage}}},
  year         = {{2016}},
}

@inbook{10066,
  abstract     = {{Namibia is the most arid country in Sub-Saharan Africa. The Cuvelai-Etosha Basin (CEB) in central northern Namibia in particular is experiencing various ecological and social-ecological challenges such as high climate variability, saline groundwater, dependence on Angola for freshwater supply, high population growth and density, and increasing urbanisation. These challenges make water supply and management difficult and threaten the livelihood of the local population and the health of the ecosystem. Facing up to these challenges, the German-Namibian research project CuveWaters has developed, adapted and set up different technologies as pilot plant s. The Integrated Water Resources Management (IWRM) concept of CuveWaters is based on a multi-resource-mix in which water is obtained from different sources (rainwater, floodwater, groundwater and wastewater) and used for various purposes. High quality water is used as drinking water; water of a relatively low quality is used for irrigation. In cooperation with the residents of four villages and one small town, the project partners are implementing different technologies to collect and store, produce, treat and reuse water. The implemented technologies are rain- and floodwater harvesting, groundwater desalination, and the combination of sanitation, wastewater treatment and water reuse. The aim is to improve peoples’ livelihood through research on innovative and adapted solutions which contribute to a successful and adapted application of IWRM. To this end, the project integrates science, technology and societal aspects in a transdisciplinary research approach by linking scientific knowledge from natural, engineering and social sciences with the everyday practices and know-how of the stakeholders involved. Thus, the technical aspects are complemented by a wide range of societal and scientific components, such as capacity development, monitoring, participation or knowledge management. These ensure societal embedding of the technologies and knowledge transfer. This paper will illustrate the transdisciplinary approach, implemented technologies and accompanying measures as well as key results.}},
  author       = {{Lier, Stephan and Brenda, Maria and Cornel, Peter and Deffner, Juta and Felmeden, Jörg and Jokisch, Alexander and Kluge, Thomas and Müller, Katherina and Röhrig, Julian and Stibitz, Vanessa and Urban, Wilhelm}},
  booktitle    = {{Integrated Water Resources Management: Concept, Research and Implementation}},
  editor       = {{Borchardt, Dietrich}},
  isbn         = {{978-3-319-79729-8}},
  keywords     = {{Multi-resource-mix, Rainwater and floodwater harvesting, Solar-coupled groundwater desalination, Sanitation and water reuse, Transdisciplinarity}},
  pages        = {{683–717}},
  publisher    = {{Springer }},
  title        = {{{From the Concept to the Tap - Integrated Water Resources Managment in Northern Namibia}}},
  doi          = {{https://doi.org/10.1007/978-3-319-25071-7_26}},
  year         = {{2016}},
}

@inproceedings{597,
  abstract     = {{This paper is aimed to discuss current research using data mining techniques and industry statistics in production environments. The general research approach is based on the idea of using data mining processes and techniques of industry statistics to find rare and hidden patterns behind failures of complex components. A case study will be applied to illustrate how the technique is carried out and where the limits of this approach occur. The case study deals with a component supplier of printing machines, which received an increasing number of client complaints, all related to one distinct problem. The observed failures seem to occur only among clients with very high quality standards. The affected component undergoes a very complex production process with several steps in different departments. Every single production unit records data information from multiple process variables and at different points in time. In the beginning there was no understanding of the failure causes in production at all. Therefore a huge amount of production data had to be analyzed to find the pattern that discloses the failure.
The data mining process starts with a first step in which the given data sets are prepared and then cleaned. Followed up by building a prediction model. The aim is to detect the root causes for failures and to predict potential failures in affected components. This paper shows how to use data mining to get the answer on pressing production failures.
}},
  author       = {{Scheideler, Eva and Ahlemeyer-Stubbe, Andrea}},
  booktitle    = {{Production engineering and management : proceedings, 5th international conference, October 1 and 2, 2015, Trieste, Italy}},
  editor       = {{Padoano, Elio and Villmer, Franz-Josef}},
  isbn         = {{978-3-941645-11-0}},
  keywords     = {{Data mining, production failure, multi-variant analysis, multivariate process control, predictive modelling, case study}},
  location     = {{Trieste, Italy}},
  number       = {{1}},
  pages        = {{163--174}},
  publisher    = {{Hochschule Ostwestfalen-Lippe}},
  title        = {{{Data Mining: A Potential Detector to Find Failure in Complex Components}}},
  year         = {{2015}},
}

@inproceedings{4371,
  abstract     = {{A major challenge in modern data-centric medicine is the increasing amount of time-dependent data, which requires efficient user-friendly solutions for dealing with such data. To create an effective and efficient knowledge discovery process, it is important to support common data manipulation tasks by creating quick, responsive and intuitive interaction methods. In this paper we describe some methods for interactive longitudinal data visualization with focus on the usage of mobile multi-touch devices as interaction medium, based on our design and development experiences. We argue that when it comes to longitudinal data this device category offers remarkable additional interaction benefits compared to standard point-and-click desktop computer devices. An important advantage of multi-touch devices arises when interacting with particularly large longitudinal data sets: Complex, coupled interactions such as zooming into a region and scrolling around almost simultaneously is more easily achieved with the possibilities of a multi-touch device than compared to a regular mouse-based interaction device.}},
  author       = {{Holzinger, Andreas and Schwarz, Michael and Ofner, Bernhard and Jeanquartier, Fleur and Calero-Valdez, Andre and Röcker, Carsten and Ziefle, Martina}},
  booktitle    = {{ Availability, Reliability, and Security in Information Systems }},
  editor       = {{Teufel, Stephanie  and Min, Tjoa A  and You, Ilsun  and Weippl, Edgar }},
  isbn         = {{978-3-319-10974-9}},
  keywords     = {{Data Visualization, Longitudinal Data, Time Series, Multi-Touch, Mobile Computing}},
  location     = {{Fribourg, Switzerland}},
  pages        = {{124 -- 137}},
  publisher    = {{Springer}},
  title        = {{{Towards Interactive Visualization of Longitudinal Data to Support Knowledge Discovery on Multi-Touch Tablet Computers}}},
  doi          = {{10.1007/978-3-319-10975-6_9}},
  volume       = {{8708}},
  year         = {{2014}},
}

@article{4377,
  abstract     = {{Within the last years the concept of trust has attracted increased attention in the field of smart home environments. However, little is known about what determines trustworthiness in this context. For this reason the objective was to examine mental models in terms of anthropomorphic perception of smart home environments and its relation to trustworthiness. Two studies (N=36) were carried out in the Future Care Lab, a simulated intelligent home environment. We used the teach-back method to help participants to talk about the smart home environment technology and asked to generate a metaphor of an experienced home-monitoring scenario. Finally, we applied linguistic analysis of responses to detect anthropomorphic characteristics. In general, results demonstrate inspiring metaphors related to the personal assistance system, e.g. "like an airbag…" or "like a family member…", which might be useful for future interface designs and approaches of communication in the context of smart home environments. However, no relation of anthropomorphism and trustworthiness could be found. Therefore, we suggest an anthropomorphic threshold, which should be investigated by using an improved method and trust scale.}},
  author       = {{Sack, Oliver and Röcker, Carsten}},
  issn         = {{2368-6103}},
  journal      = {{International Journal of Virtual Worlds and Human Computer Interaction}},
  keywords     = {{Smart environment, e-health, user study, mental model, anthropomorphism, metaphor, technology acceptance, trust, evaluation}},
  number       = {{1}},
  pages        = {{28 -- 36}},
  publisher    = {{ Avestia Publishing, International ASET Inc. }},
  title        = {{{“Like a Family Member Who Takes Care of Me” – Users’ Anthropomorphic Representations and Trustworthiness of Smart Home Environments}}},
  doi          = {{10.11159/vwhci.2014.004}},
  volume       = {{2}},
  year         = {{2014}},
}

@article{4381,
  abstract     = {{This article reports on two user studies exploring the knowledge of end users about technical processes of technology-enhanced home environments, which are often assumed to play an important role for attitudes such as privacy and security. In the first study (n=12 participants between 19-71 years of age), we analyzed user knowledge about technical processes using the teach-back methodology. In the second study, we additionally applied new developed questionnaires and analyzed participants’ data (n=24 participants between 19-76 years of age) regarding relations of user factors, users’ knowledge about technical processes and attitudes such as privacy and security of technology-enhanced environments. In contrast to existing assumptions, the results showed that general structural knowledge about technical processes was not related with attitudes such as privacy and security. Additionally, we found that most participants had only relatively superficial knowledge about technical processes, which was further influenced by age and technology experience.}},
  author       = {{Sack, Oliver and Röcker, Carsten}},
  isbn         = {{2332-3477}},
  issn         = {{2332-3485 }},
  journal      = {{Universal Journal of Psychology}},
  keywords     = {{Technology-enhanced Environment, Ambient Assisted Living, Mental Model, Teach Back, Privacy, Security}},
  number       = {{2}},
  pages        = {{72 -- 83}},
  publisher    = {{Horizion }},
  title        = {{{Privacy and Security in Technology-Enhanced Environments: Exploring Users’ Knowledge about Technological Processes of Diverse User Groups}}},
  doi          = {{10.13189/ujp.2013.010207}},
  volume       = {{1}},
  year         = {{2013}},
}

@misc{12249,
  abstract     = {{We investigated the combined effects of thermal pollution from a nuclear power plant (NPP) and regional climate warming on the thermal regime of a lake. For this purpose, we used the lake model FLake and analyzed 50 years of temperature data from Lake Stechlin, Germany, which served as the cooling water reservoir for the Rheinsberg NPP from 1966 until 1990. Both modeling and statistical data analysis revealed a strong influence of the NPP cooling water discharge on the lake water temperatures and the vertical stability of the water column. A remarkable effect of thermal pollution consisted of strong vertical mixing in winter produced by the discharge of warm water into the lake when ambient water temperatures were below 4 °C. This effect caused a significant increase in the deep hypolimnion temperatures and a corresponding decrease of the vertical stability in the summer. In turn, climate warming had the opposite effect on the summer stability by increasing lake surface temperatures. Both the thermal pollution and climate change increased the duration of the summer stratification period. Our results suggest that industrial thermal pollution in temperate lakes during winter is stored in the deep water column until the next winter, whereas heat added in the summer dissipates relatively rapidly into the atmosphere. Accordingly, the winter thermal pollution could have a long-lasting effect on the lake ecology by affecting benthic biogeochemical processes.}},
  author       = {{Kirillin, Georgiy and Shatwell, Tom and Kasprzak, Peter}},
  booktitle    = {{Journal of Hydrology}},
  issn         = {{1879-2707}},
  keywords     = {{Industrial thermal pollution, Global warming, Lake stratification, FLake model}},
  number       = {{7}},
  pages        = {{47--56}},
  publisher    = {{Elsevier BV}},
  title        = {{{Consequences of thermal pollution from a nuclear plant on lake temperature and mixing regime}}},
  doi          = {{10.1016/j.jhydrol.2013.05.023}},
  volume       = {{496}},
  year         = {{2013}},
}

@misc{1436,
  author       = {{Denninger, Fabian}},
  keywords     = {{Multi-Touch, Multitouch Multitouch}},
  pages        = {{134}},
  publisher    = {{Hochschule Ostwestfalen-Lippe}},
  title        = {{{Konzeption und Entwicklung eines multimedialen Informationssystems auf Basis von Multitouch-Technologie am Beispiel der interaktiven Flash-Anwendung "Ökotouch".}}},
  year         = {{2008}},
}

