@book{13336,
  abstract     = {{Das Lehr- und Arbeitsbuch entspricht der Einführungsvorlesung der Elektrotechnik an Hochschulen und ist explizit für das Selbststudium konzipiert.
Von den physikalischen Grundlagen, elektrotechnischen Grundbegriffen und elektromagnetischen Feldern bis hin zu Fourier-Reihen und transienten Vorgängen werden in 22 Kapiteln grundlegende und vertiefende Vorlesungsinhalte der Elektrotechnik wiedergegeben. Aufgaben, Lösungen und kleinere Zusammenfassungen am Ende jedes Kapitels unterstützen beim selbstständigen Lernen und Erarbeiten der Inhalte.
Das Buch führt als Selbststudium leicht lesbar durch die Basis der Elektrotechnik. Das Lernen mit diesem Arbeitsbuch ist in einem Bachelor-Fernstudiengang Elektrotechnik erprobt.}},
  author       = {{Meier, Uwe and Stübbe, Oliver}},
  isbn         = {{978-3-658-49183-3}},
  keywords     = {{Automotive Engineering, Computer Engineering and Networks, Electrical and Electronic Engineering, Marine Engineering, Mechanical Engineering, Civil Engineering}},
  pages        = {{587}},
  publisher    = {{Springer Vieweg}},
  title        = {{{Elektrotechnik zum Selbststudium : Grundlagen und Vertiefung}}},
  year         = {{2026}},
}

@misc{12993,
  abstract     = {{In computer science and related technical fields, researchers, educators, and practitioners are continuously automating recurring tasks for high efficiency in a wide variety of fields. In higher education, such tasks that educators face are the recurring review and assessment process of students' programming coursework. Thus, various attempts exist to automate the assessment and feedback generation for course homework and practicals in higher education. Those approaches for automated programming task assessment often comprise running automated tests to check for limited functional correctness and potentially style checking for various violations (LINTing). Educators familiar with large-scale automated task assessment are likely used to seeing hard-coded solutions specifically or accidentally designed to just pass the required tests, ignoring or misinterpreting the actual task requirements. Detecting such issues in arbitrary code is non-trivial and an ongoing research topic in software engineering. Software engineering research has yielded various semantic analysis frameworks, such as GitHub's CodeQL, which can be adapted for programming task assessment. We present a work-in-progress programming task analysis framework which employs CodeQL's analysis technology to identify the actual use of task-description-mandated syntactic and semantic elements such as loop structures or the use of mandated data blocks in branching conditions. This allows extending existing course work analysis frameworks to include a semantic check of an uploaded program which exceeds the relatively simple set of input-output test cases provided by unit tests. We use a running example of entry level programming tasks and several solution attempts to introduce and explain our proposed control flow and data flow -based analysis method. We discuss the benefits of including semantic analysis as an additional method in the automated programming task assessment toolbox. Our main contribution is the adaptation of an semantic analysis code framework to analyse syntactic and semantic components in students' programming coursework.}},
  author       = {{Wehmeier, Leon and Eilermann, Sebastian and Niggemann, Oliver and Deuter, Andreas}},
  booktitle    = {{FIE 2023 : College Station, TX, USA, October 18-21, 2023 : conference proceedings  / 2023 IEEE Frontiers in Education Conference (FIE)}},
  isbn         = {{979-8-3503-3643-6}},
  keywords     = {{Codes, Electronic learning, Soft sensors, Semantics, Education, Syntactics, Task analysis}},
  location     = {{Texas}},
  publisher    = {{IEEE}},
  title        = {{{Task-fidelity Assessment for Programming Tasks Using Semantic Code Analysis}}},
  doi          = {{10.1109/fie58773.2023.10342916}},
  year         = {{2024}},
}

@misc{11348,
  abstract     = {{Lifetime is an important feature defining the reliability of electrical connectors. In general practice, the lifetime tests required for reliability estimation are time and labor intensive. In our previous work, a data driven method using a statistical process, with an application of probability distributions such as standard normal distribution and generalized extreme value (GEV) distribution with negative skewness to predict degradation paths, was introduced for estimation of the lifetime and FIT rate with the help of electrical contact resistance data collected from short term tests. The proposed method proved its significance by showing the possibility of drastic reduction in the lifetime test duration required for reliability determination. In this work, a non-parametric distribution free method using percentiles of actual measured contact resistances is used for determining the lifetime as against the percentiles of probability distribution used in previous work, thereby simplifying the process further and leading to an even more precise estimation. The lifetimes calculated from parametric and non-parametric methods are compared to highlight the significance of distribution free method in reliability estimation.}},
  author       = {{Shukla, Abhay Rammurti and Martin, Robert and Probst, Roman and Song, Jian}},
  booktitle    = {{Microelectronics Reliability}},
  issn         = {{0026-2714}},
  keywords     = {{Electrical and Electronic Engineering, Surfaces, Coatings and Films, Safety, Risk, Reliability and Quality, Condensed Matter Physics, Atomic and Molecular Physics, and Optics, Electronic, Optical and Magnetic Materials}},
  publisher    = {{Elsevier }},
  title        = {{{Comparison of different statistical methods for prediction of lifetime of electrical connectors with short term tests}}},
  doi          = {{10.1016/j.microrel.2023.115216}},
  volume       = {{150}},
  year         = {{2023}},
}

@misc{10326,
  abstract     = {{In the food industry, and especially in wines as products thereof, ethanol and sulfur dioxide play an equally important role. Both substances are important wine quality characteristics as they influence the taste and odor. As both substances comprise volatile matter, electronic noses should be applicable to discriminate the different qualities of wines. Our study investigates the influence of alcohol and sulfur dioxide on the discrimination ability of wines (especially those of the same grape variety) using two different electronic nose systems. One system is equipped with metal oxide sensors and the other with quartz crystal microbalance sensors. Contrary to indications in literature, where the alcohol content is discussed to have a large influence on e-nose results, it was shown that a difference of 1 % ethanol was not sufficient to allow accurate discrimination using Linear Discriminant Analysis by any system. On the positive side, the analyzed concentrations of ethanol (about 12 %) did not superimpose other volatile information. So difference in sulfur dioxide content gave an accuracy for sample discrimination of up to 90.6 % with MOS nose. Thus, we are so far partially able to discriminate wines with electronic noses based on their volatile imprint.}},
  author       = {{Wörner, Julius and Dörksen, Helene and Pein-Hackelbusch, Miriam}},
  booktitle    = {{2023 IEEE 21st International Conference on Industrial Informatics (INDIN)}},
  keywords     = {{Ethanol, Pipelines, Metals, Nose, Electronic noses, Sensor systems, Sensors, Quartz crystals, Linear discriminant analysis, Sulfur}},
  location     = {{Lemgo}},
  title        = {{{Key Indicators for the Discrimination of Wines by Electronic Noses}}},
  doi          = {{https://doi.org/10.1109/INDIN51400.2023.10217912}},
  year         = {{2023}},
}

@article{11558,
  abstract     = {{Patient Reported Outcomes (PROs) provide essential clinical data for the diagnosis and treatment of patients. Mobile technologies enable rapid and structured collection of PROs with a high usability. MoPat is an electronic PRO system developed at the Münster University that enables patients to complete PROs in multiple languages. This research reports the further development of MoPat and the inclusion of features to document images electronically that will be evaluated in a multi-site clinical research.}},
  author       = {{Soto-Rey, Iñaki  and Hardt, Tobias and Hollenberg, Luca and Bruland, Philipp and Ständer, Sonja and Dugas, Martin and Storck, Michael}},
  issn         = {{1879-8365}},
  journal      = {{Stud Health Technol Inform}},
  keywords     = {{Electronic documentation, Mobile Health, Patient-reported Outcomes.}},
  pages        = {{1779--1780}},
  publisher    = {{IOS Press}},
  title        = {{{Electronic Image Documentation of Patient Reported Outcomes Using Mobile Technologies.}}},
  doi          = {{10.3233/SHTI190644}},
  volume       = {{264}},
  year         = {{2019}},
}

@inbook{4313,
  abstract     = {{This paper reports on a study (N = 471) exploring the acceptance of video-based home monitoring systems as well as criteria influencing their acceptance. While most participants stated that they would home monitoring solutions under certain conditions, the majority of participants is rather reluctant to use systems that transmit visual and acoustical information to remote medical personnel. Besides age, most user characteristics, which played important roles in technology acceptance research for many years, do not appear to be decisive factors for the acceptance of electronic home-monitoring services.}},
  author       = {{Röcker, Carsten}},
  booktitle    = {{Intelligent Human Systems Integration 2019}},
  editor       = {{Karwowski, Waldemar and Ahram, Tareq}},
  isbn         = {{978-3-030-11050-5}},
  keywords     = {{Active assisted living, Electronic homecare, e-health : Video-based monitoring, Technology acceptance, User-centered design, Study}},
  location     = {{San Diego, California, USA}},
  pages        = {{551 -- 556}},
  publisher    = {{Springer}},
  title        = {{{Exploring the Acceptance of Video-Based Medical Support}}},
  doi          = {{10.1007/978-3-030-11051-2_83}},
  volume       = {{903}},
  year         = {{2019}},
}

@misc{11746,
  abstract     = {{Objectives: Integrating Electronic Health Record (EHR) systems into the field of clinical trials still contains several challenges and obstacles. Heterogeneous standards and specifications are used to represent healthcare and clinical trial information. Therefore, this work investigates the mapping and data interoperability between healthcare and research standards: EN13606 used for the EHRs and the Clinical Data Interchange Standards Consortium Operational Data Model (CDISC ODM) used for clinical research.

Methods: Based on the specifications of CDISC ODM 1.3.2 and EN13606, a mapping between the structure and components of both standards has been performed. Archetype Definition Language (ADL) forms built with the EN13606 editor were transformed to ODM XML and reviewed. As a proof of concept, clinical sample data has been transformed into ODM and imported into an electronic data capture system. Reverse transformation from ODM to ADL has also been performed and finally reviewed concerning map-ability.

Results: The mapping between EN13606 and CDISC ODM shows the similarities and differences between the components and overall record structure of the two standards. An EN13606 archetype corresponds with a group of items within CDISC ODM. Transformations of element names, descriptions, different languages, datatypes, cardinality, optionality, units, value range and terminology codes are possible from EN13606 to CDISC ODM and vice versa.

Conclusion: It is feasible to map data elements between EN13606 and CDISC ODM and transformation of forms between ADL and ODM XML format is possible with only minor limitations. EN13606 can accommodate clinical information in a more structured manner with more constraints, whereas CDISC ODM is more suitable and specific for clinical trials and studies. It is feasible to transform EHR data in the EN13606 form to ODM to transfer it into research database. The attempt to use EN13606 to build a study protocol (that was already built with CDISC ODM) also suggests the possibility of using EN13606 standard in place of CDISC ODM if needed to avoid transformations.}},
  author       = {{Tapuria, Archana and Bruland, Philipp and Delaney, Brendan and Kalra, Dipak and Curcin, Vasa}},
  booktitle    = {{Digital health}},
  issn         = {{2055-2076}},
  keywords     = {{EN13606, Operational data model, clinical archetypes, electronic health records, interoperability.}},
  publisher    = {{SAGE Publications}},
  title        = {{{Comparison and transformation between CDISC ODM and EN13606 EHR standards in connecting EHR data with clinical trial research data}}},
  doi          = {{10.1177/2055207618777676}},
  volume       = {{4}},
  year         = {{2018}},
}

@article{11590,
  abstract     = {{Reading centers provide centralized high-quality diagnostics in ophthalmic clinical trials. Since ophthalmic images are captured in electronic format at peripheral clinics, an integrated workflow for image transfer and creation of structured reports is needed, including quality assurance. The image portal and the study database are separate components. We assessed whether this integration is feasible with trial-related IT standards and built a prototype system as a proof-of-concept. CDISC ODM and OAuth authentication were used to integrate the image portal with x4T-EDC, facilitating automatic data transfer and single sign-on.}},
  author       = {{Bruland, Philipp and Kathöfer, Ulrike and Treder, Maximilian and Eter, Nicole and Dugas, Martin}},
  issn         = {{1879-8365}},
  journal      = {{Studies in health technology and informatics}},
  keywords     = {{Electronic Data Capture, Reading Center, System Integration}},
  pages        = {{1254}},
  publisher    = {{IOS Press}},
  title        = {{{Integrating x4T-EDC into an Image-Portal to Establish an Ophthalmic Reading Center.}}},
  volume       = {{245}},
  year         = {{2017}},
}

@misc{11745,
  abstract     = {{Background: Data capture is one of the most expensive phases during the conduct of a clinical trial and the increasing use of electronic health records (EHR) offers significant savings to clinical research. To facilitate these secondary uses of routinely collected patient data, it is beneficial to know what data elements are captured in clinical trials. Therefore our aim here is to determine the most commonly used data elements in clinical trials and their availability in hospital EHR systems.

Methods: Case report forms for 23 clinical trials in differing disease areas were analyzed. Through an iterative and consensus-based process of medical informatics professionals from academia and trial experts from the European pharmaceutical industry, data elements were compiled for all disease areas and with special focus on the reporting of adverse events. Afterwards, data elements were identified and statistics acquired from hospital sites providing data to the EHR4CR project.

Results: The analysis identified 133 unique data elements. Fifty elements were congruent with a published data inventory for patient recruitment and 83 new elements were identified for clinical trial execution, including adverse event reporting. Demographic and laboratory elements lead the list of available elements in hospitals EHR systems. For the reporting of serious adverse events only very few elements could be identified in the patient records.

Conclusions: Common data elements in clinical trials have been identified and their availability in hospital systems elucidated. Several elements, often those related to reimbursement, are frequently available whereas more specialized elements are ranked at the bottom of the data inventory list. Hospitals that want to obtain the benefits of reusing data for research from their EHR are now able to prioritize their efforts based on this common data element list.}},
  author       = {{Bruland, Philipp and McGilchrist, Mark and Zapletal, Eric and Acosta, Dionisio and Proeve, Johann and Askin, Scott and Ganslandt, Thomas and Doods, Justin and Dugas, Martin}},
  booktitle    = {{BMC Medical Research Methodology}},
  issn         = {{1471-2288}},
  keywords     = {{Clinical trials, Common data elements, Data quality, Electronic health records, Metadata, Secondary use}},
  number       = {{1}},
  publisher    = {{Springer Science and Business Media LLC}},
  title        = {{{Common data elements for secondary use of electronic health record data for clinical trial execution and serious adverse event reporting}}},
  doi          = {{10.1186/s12874-016-0259-3}},
  volume       = {{16}},
  year         = {{2016}},
}

@inproceedings{4823,
  abstract     = {{The purpose of this paper is to explore alternative approaches and strategies for email filtering and notification with the rationale of developing an unobtrusive notification interface that can adapt to the user's context.}},
  author       = {{Röcker, Carsten and Bayon, Victor and Memisoglu, Maral and Streitz, Norbert}},
  booktitle    = {{ Proceedings of the 2005 International Conference on Active Media Technology}},
  editor       = {{Tarumi, H. and Li, Y. and Yoshida, T.}},
  isbn         = {{0-7803-9035-0}},
  keywords     = {{Displays, Calendars, Resumes, Filtering, Electronic mail, Personal digital assistants, Filters, Books, Data mining}},
  location     = {{Takamatsu, Kagawa, Japan}},
  pages        = {{137--138}},
  publisher    = {{IEEE}},
  title        = {{{Context-Dependent Email Notification Using Ambient Displays and Mobile Devices}}},
  doi          = {{10.1109/AMT.2005.1505288}},
  year         = {{2005}},
}

