@incollection{EngemannDuKallweitetal.2020, author = {Engemann, Heiko and Du, Shengzhi and Kallweit, Stephan and Ning, Chuanfang and Anwar, Saqib}, title = {AutoSynPose: Automatic Generation of Synthetic Datasets for 6D Object Pose Estimation}, series = {Machine Learning and Artificial Intelligence. Proceedings of MLIS 2020}, booktitle = {Machine Learning and Artificial Intelligence. Proceedings of MLIS 2020}, publisher = {IOS Press}, address = {Amsterdam}, isbn = {978-1-64368-137-5}, doi = {10.3233/FAIA200770}, pages = {89 -- 97}, year = {2020}, abstract = {We present an automated pipeline for the generation of synthetic datasets for six-dimension (6D) object pose estimation. Therefore, a completely automated generation process based on predefined settings is developed, which enables the user to create large datasets with a minimum of interaction and which is feasible for applications with a high object variance. The pipeline is based on the Unreal 4 (UE4) game engine and provides a high variation for domain randomization, such as object appearance, ambient lighting, camera-object transformation and distractor density. In addition to the object pose and bounding box, the metadata includes all randomization parameters, which enables further studies on randomization parameter tuning. The developed workflow is adaptable to other 3D objects and UE4 environments. An exemplary dataset is provided including five objects of the Yale-CMU-Berkeley (YCB) object set. The datasets consist of 6 million subsegments using 97 rendering locations in 12 different UE4 environments. Each dataset subsegment includes one RGB image, one depth image and one class segmentation image at pixel-level.}, language = {en} } @incollection{NiemuellerLakemeyerReuteretal.2017, author = {Niemueller, T. and Lakemeyer, G. and Reuter, S. and Jeschke, S. and Ferrein, Alexander}, title = {Benchmarking of Cyber-Physical Systems in Industrial Robotics: The RoboCup Logistics League as a CPS Benchmark Blueprint}, series = {Cyber-Physical Systems: Foundations, Principles and Applications}, booktitle = {Cyber-Physical Systems: Foundations, Principles and Applications}, publisher = {Academic Press}, address = {London}, doi = {10.1016/B978-0-12-803801-7.00013-4}, pages = {193 -- 207}, year = {2017}, abstract = {In the future, we expect manufacturing companies to follow a new paradigm that mandates more automation and autonomy in production processes. Such smart factories will offer a variety of production technologies as services that can be combined ad hoc to produce a large number of different product types and variants cost-effectively even in small lot sizes. This is enabled by cyber-physical systems that feature flexible automated planning methods for production scheduling, execution control, and in-factory logistics. During development, testbeds are required to determine the applicability of integrated systems in such scenarios. Furthermore, benchmarks are needed to quantify and compare system performance in these industry-inspired scenarios at a comprehensible and manageable size which is, at the same time, complex enough to yield meaningful results. In this chapter, based on our experience in the RoboCup Logistics League (RCLL) as a specific example, we derive a generic blueprint for how a holistic benchmark can be developed, which combines a specific scenario with a set of key performance indicators as metrics to evaluate the overall integrated system and its components.}, language = {de} } @incollection{NiemuellerZwillingLakemeyeretal.2017, author = {Niemueller, Tim and Zwilling, Frederik and Lakemeyer, Gerhard and L{\"o}bach, Matthias and Reuter, Sebastian and Jeschke, Sabina and Ferrein, Alexander}, title = {Cyber-Physical System Intelligence}, series = {Industrial Internet of Things}, booktitle = {Industrial Internet of Things}, publisher = {Springer}, address = {Cham}, isbn = {978-3-319-42559-7}, doi = {10.1007/978-3-319-42559-7_17}, pages = {447 -- 472}, year = {2017}, abstract = {Cyber-physical systems are ever more common in manufacturing industries. Increasing their autonomy has been declared an explicit goal, for example, as part of the Industry 4.0 vision. To achieve this system intelligence, principled and software-driven methods are required to analyze sensing data, make goal-directed decisions, and eventually execute and monitor chosen tasks. In this chapter, we present a number of knowledge-based approaches to these problems and case studies with in-depth evaluation results of several different implementations for groups of autonomous mobile robots performing in-house logistics in a smart factory. We focus on knowledge-based systems because besides providing expressive languages and capable reasoning techniques, they also allow for explaining how a particular sequence of actions came about, for example, in the case of a failure.}, language = {en} } @incollection{NiemuellerReuterEwertetal.2015, author = {Niemueller, Tim and Reuter, Sebastian and Ewert, Daniel and Ferrein, Alexander and Jeschke, Sabina and Lakemeyer, Gerhard}, title = {Decisive Factors for the Success of the Carologistics RoboCup Team in the RoboCup Logistics League 2014}, series = {RoboCup 2014: Robot World Cup XVIII}, booktitle = {RoboCup 2014: Robot World Cup XVIII}, publisher = {Springer}, isbn = {978-3-319-18615-3}, pages = {155 -- 167}, year = {2015}, language = {en} } @incollection{GoeckelSchifferWagneretal.2015, author = {Goeckel, Tom and Schiffer, Stefan and Wagner, Hermann and Lakemeyer, Gerhard}, title = {The Video Conference Tool Robot ViCToR}, series = {Intelligent Robotics and Applications : 8th International Conference, ICIRA 2015, Portsmouth, UK, August 24-27, 2015, Proceedings, Part II}, booktitle = {Intelligent Robotics and Applications : 8th International Conference, ICIRA 2015, Portsmouth, UK, August 24-27, 2015, Proceedings, Part II}, publisher = {Springer}, isbn = {978-3-319-22876-1}, doi = {10.1007/978-3-319-22876-1_6}, pages = {61 -- 73}, year = {2015}, abstract = {We present a robotic tool that autonomously follows a conversation to enable remote presence in video conferencing. When humans participate in a meeting with the help of video conferencing tools, it is crucial that they are able to follow the conversation both with acoustic and visual input. To this end, we design and implement a video conferencing tool robot that uses binaural sound source localization as its main source to autonomously orient towards the currently talking speaker. To increase robustness of the acoustic cue against noise we supplement the sound localization with a source detection stage. Also, we include a simple onset detector to retain fast response times. Since we only use two microphones, we are confronted with ambiguities on whether a source is in front or behind the device. We resolve these ambiguities with the help of face detection and additional moves. We tailor the system to our target scenarios in experiments with a four minute scripted conversation. In these experiments we evaluate the influence of different system settings on the responsiveness and accuracy of the device.}, language = {en} } @incollection{KallweitGottschalkWalenta2016, author = {Kallweit, Stephan and Gottschalk, Michael and Walenta, Robert}, title = {ROS based safety concept for collaborative robots in industrial applications}, series = {Advances in robot design and intelligent control : proceedings of the 24th International Conference on Robotics in Alpe-Adria-Danube Region (RAAD). (Advances in intelligent systems and computing ; 371)}, booktitle = {Advances in robot design and intelligent control : proceedings of the 24th International Conference on Robotics in Alpe-Adria-Danube Region (RAAD). (Advances in intelligent systems and computing ; 371)}, publisher = {Springer}, address = {Cham}, organization = {International Conference on Robotics in Alpe-Adria-Danube Region <24, 2015, Bucharest>}, isbn = {978-3-319-21289-0 (Print) ; 978-3-319-21290-6 (E-Book)}, doi = {10.1007/978-3-319-21290-6_3}, pages = {27 -- 35}, year = {2016}, abstract = {The production and assembly of customized products increases the demand for flexible automation systems. One approach is to remove the safety fences that separate human and industrial robot to combine their skills. This collaboration possesses a certain risk for the human co-worker, leading to numerous safety concepts to protect him. The human needs to be monitored and tracked by a safety system using different sensors. The proposed system consists of a RGBD camera for surveillance of the common working area, an array of optical distance sensors to compensate shadowing effects of the RGBD camera and a laser range finder to detect the co-worker when approaching the work cell. The software for collision detection, path planning, robot control and predicting the behaviour of the co-worker is based on the Robot Operating System (ROS). A first prototype of the work cell shows that with advanced algorithms from the field of mobile robotics a very flexible safety concept can be realized: the robot not simply stops its movement when detecting a collision, but plans and executes an alternative path around the obstacle.}, language = {en} }