@inproceedings{4329,
  abstract     = {{The term Industrie 4.0 carries the vision of smart factories, which automatically adapt to changes and assist the human as much as possible during operation and maintenance. This includes smart human machine interfaces, which reduce the chances of errors and help to make the right decisions. This paper presents an approach to equip the maintenance software running on a tablet PC with augmented reality functionality to be able to place virtual sticky notes at production modules. Additionally, these sticky notes are enriched with position information. The central element of this approach is an ontology-based context-aware framework, which aggregates and processes data from different sources. As a result, a tablet PC application was implemented, which allows displaying maintenance information as well as live plant process data in the form of augmented reality. More than 100 of those sticky notes can be placed using this system, whereas each note requires a file size of 12 to 16 kilo bytes. After placing a sticky note, the system recognizes it even if the camera's position is not exactly the same as during the placing process.}},
  author       = {{Flatt, Holger and Koch, Nils and Guenter, Andrei and Röcker, Carsten and Jasperneite, Jürgen}},
  booktitle    = {{ 2015 IEEE 20th Conference on Emerging Technologies & Factory Automation (ETFA)}},
  keywords     = {{Maintenance engineering, Augmented reality, Context, Context modelin, Production facilities, Cameras}},
  location     = {{Luxembourg, Luxembourg}},
  publisher    = {{IEEE}},
  title        = {{{A Context-Aware Assistance System for Maintenance Applications in Smart Factories based on Augmented Reality and Indoor Localization}}},
  doi          = {{10.1109/ETFA.2015.7301586}},
  year         = {{2015}},
}

@inproceedings{2155,
  abstract     = {{Today, mobile devices (smartphones, tablets, etc.) are widespread and of high importance for their users. Their performance as well as versatility increases over time. This leads to the opportunity to use such devices for more specific tasks like image processing in an industrial context. For the analysis of images requirements like image quality (blur, illumination, etc.) as well as a defined relative position of the object to be inspected are crucial. Since mobile devices are handheld and used in constantly changing environments the challenge is to fulfill these requirements. We present an approach to overcome the obstacles and stabilize the image capturing process such that image analysis becomes significantly improved on mobile devices. Therefore, image processing methods are combined with sensor fusion concepts. The approach consists of three main parts. First, pose estimation methods are used to guide a user moving the device to a defined position. Second, the sensors data and the pose information are combined for relative motion estimation. Finally, the image capturing process is automated. It is triggered depending on the alignment of the device and the object as well as the image quality that can be achieved under consideration of motion and environmental effects.}},
  author       = {{Henning, Kai-Fabian and Fritze, Alexander and Gillich, Eugen and Mönks, Uwe and Lohweg, Volker}},
  booktitle    = {{IST/SPIE Electronic Imaging 2015, Digital Photography and Mobile Imaging XI}},
  keywords     = {{Image processing, Image acquisition, Mobile devices  Sensors, Image fusion, Motion estimation, Cameras}},
  pages        = {{1--12}},
  publisher    = {{SPIE}},
  title        = {{{Stable Image Acquisition for Mobile Image Processing Applications}}},
  doi          = {{10.1117/12.2076146}},
  year         = {{2015}},
}

