@inproceedings{RekePeterSchulteTiggesetal.2020, author = {Reke, Michael and Peter, Daniel and Schulte-Tigges, Joschua and Schiffer, Stefan and Ferrein, Alexander and Walter, Thomas and Matheis, Dominik}, title = {A Self-Driving Car Architecture in ROS2}, series = {2020 International SAUPEC/RobMech/PRASA Conference, Cape Town, South Africa}, booktitle = {2020 International SAUPEC/RobMech/PRASA Conference, Cape Town, South Africa}, publisher = {IEEE}, address = {New York, NY}, isbn = {978-1-7281-4162-6}, doi = {10.1109/SAUPEC/RobMech/PRASA48453.2020.9041020}, pages = {1 -- 6}, year = {2020}, abstract = {In this paper we report on an architecture for a self-driving car that is based on ROS2. Self-driving cars have to take decisions based on their sensory input in real-time, providing high reliability with a strong demand in functional safety. In principle, self-driving cars are robots. However, typical robot software, in general, and the previous version of the Robot Operating System (ROS), in particular, does not always meet these requirements. With the successor ROS2 the situation has changed and it might be considered as a solution for automated and autonomous driving. Existing robotic software based on ROS was not ready for safety critical applications like self-driving cars. We propose an architecture for using ROS2 for a self-driving car that enables safe and reliable real-time behaviour, but keeping the advantages of ROS such as a distributed architecture and standardised message types. First experiments with an automated real passenger car at lower and higher speed-levels show that our approach seems feasible for autonomous driving under the necessary real-time conditions.}, language = {en} } @article{FrankoDuKallweitetal.2020, author = {Franko, Josef and Du, Shengzhi and Kallweit, Stephan and Duelberg, Enno Sebastian and Engemann, Heiko}, title = {Design of a Multi-Robot System for Wind Turbine Maintenance}, series = {Energies}, volume = {13}, journal = {Energies}, number = {10}, publisher = {MDPI}, address = {Basel}, issn = {1996-1073}, doi = {10.3390/en13102552}, pages = {Article 2552}, year = {2020}, abstract = {The maintenance of wind turbines is of growing importance considering the transition to renewable energy. This paper presents a multi-robot-approach for automated wind turbine maintenance including a novel climbing robot. Currently, wind turbine maintenance remains a manual task, which is monotonous, dangerous, and also physically demanding due to the large scale of wind turbines. Technical climbers are required to work at significant heights, even in bad weather conditions. Furthermore, a skilled labor force with sufficient knowledge in repairing fiber composite material is rare. Autonomous mobile systems enable the digitization of the maintenance process. They can be designed for weather-independent operations. This work contributes to the development and experimental validation of a maintenance system consisting of multiple robotic platforms for a variety of tasks, such as wind turbine tower and rotor blade service. In this work, multicopters with vision and LiDAR sensors for global inspection are used to guide slower climbing robots. Light-weight magnetic climbers with surface contact were used to analyze structure parts with non-destructive inspection methods and to locally repair smaller defects. Localization was enabled by adapting odometry for conical-shaped surfaces considering additional navigation sensors. Magnets were suitable for steel towers to clamp onto the surface. A friction-based climbing ring robot (SMART— Scanning, Monitoring, Analyzing, Repair and Transportation) completed the set-up for higher payload. The maintenance period could be extended by using weather-proofed maintenance robots. The multi-robot-system was running the Robot Operating System (ROS). Additionally, first steps towards machine learning would enable maintenance staff to use pattern classification for fault diagnosis in order to operate safely from the ground in the future.}, language = {en} } @incollection{EngemannDuKallweitetal.2020, author = {Engemann, Heiko and Du, Shengzhi and Kallweit, Stephan and Ning, Chuanfang and Anwar, Saqib}, title = {AutoSynPose: Automatic Generation of Synthetic Datasets for 6D Object Pose Estimation}, series = {Machine Learning and Artificial Intelligence. Proceedings of MLIS 2020}, booktitle = {Machine Learning and Artificial Intelligence. Proceedings of MLIS 2020}, publisher = {IOS Press}, address = {Amsterdam}, isbn = {978-1-64368-137-5}, doi = {10.3233/FAIA200770}, pages = {89 -- 97}, year = {2020}, abstract = {We present an automated pipeline for the generation of synthetic datasets for six-dimension (6D) object pose estimation. Therefore, a completely automated generation process based on predefined settings is developed, which enables the user to create large datasets with a minimum of interaction and which is feasible for applications with a high object variance. The pipeline is based on the Unreal 4 (UE4) game engine and provides a high variation for domain randomization, such as object appearance, ambient lighting, camera-object transformation and distractor density. In addition to the object pose and bounding box, the metadata includes all randomization parameters, which enables further studies on randomization parameter tuning. The developed workflow is adaptable to other 3D objects and UE4 environments. An exemplary dataset is provided including five objects of the Yale-CMU-Berkeley (YCB) object set. The datasets consist of 6 million subsegments using 97 rendering locations in 12 different UE4 environments. Each dataset subsegment includes one RGB image, one depth image and one class segmentation image at pixel-level.}, language = {en} } @article{EngemannDuKallweitetal.2020, author = {Engemann, Heiko and Du, Shengzhi and Kallweit, Stephan and C{\"o}nen, Patrick and Dawar, Harshal}, title = {OMNIVIL - an autonomous mobile manipulator for flexible production}, series = {Sensors}, volume = {20}, journal = {Sensors}, number = {24, art. no. 7249}, publisher = {MDPI}, address = {Basel}, isbn = {1424-8220}, doi = {10.3390/s20247249}, pages = {1 -- 30}, year = {2020}, language = {en} } @inproceedings{UlmerBraunChengetal.2020, author = {Ulmer, Jessica and Braun, Sebastian and Cheng, Chi-Tsun and Dowey, Steve and Wollert, J{\"o}rg}, title = {Gamified Virtual Reality Training Environment for the Manufacturing Industry}, series = {Proceedings of the 2020 19th International Conference on Mechatronics - Mechatronika (ME)}, booktitle = {Proceedings of the 2020 19th International Conference on Mechatronics - Mechatronika (ME)}, publisher = {IEEE}, address = {New York, NY}, doi = {10.1109/ME49197.2020.9286661}, pages = {1 -- 6}, year = {2020}, abstract = {Industry 4.0 imposes many challenges for manufacturing companies and their employees. Innovative and effective training strategies are required to cope with fast-changing production environments and new manufacturing technologies. Virtual Reality (VR) offers new ways of on-the-job, on-demand, and off-premise training. A novel concept and evaluation system combining Gamification and VR practice for flexible assembly tasks is proposed in this paper and compared to existing works. It is based on directed acyclic graphs and a leveling system. The concept enables a learning speed which is adjustable to the users' pace and dynamics, while the evaluation system facilitates adaptive work sequences and allows employee-specific task fulfillment. The concept was implemented and analyzed in the Industry 4.0 model factory at FH Aachen for mechanical assembly jobs.}, language = {de} }