@incollection{EngemannDuKallweitetal.2020, author = {Engemann, Heiko and Du, Shengzhi and Kallweit, Stephan and Ning, Chuanfang and Anwar, Saqib}, title = {AutoSynPose: Automatic Generation of Synthetic Datasets for 6D Object Pose Estimation}, series = {Machine Learning and Artificial Intelligence. Proceedings of MLIS 2020}, booktitle = {Machine Learning and Artificial Intelligence. Proceedings of MLIS 2020}, publisher = {IOS Press}, address = {Amsterdam}, isbn = {978-1-64368-137-5}, doi = {10.3233/FAIA200770}, pages = {89 -- 97}, year = {2020}, abstract = {We present an automated pipeline for the generation of synthetic datasets for six-dimension (6D) object pose estimation. Therefore, a completely automated generation process based on predefined settings is developed, which enables the user to create large datasets with a minimum of interaction and which is feasible for applications with a high object variance. The pipeline is based on the Unreal 4 (UE4) game engine and provides a high variation for domain randomization, such as object appearance, ambient lighting, camera-object transformation and distractor density. In addition to the object pose and bounding box, the metadata includes all randomization parameters, which enables further studies on randomization parameter tuning. The developed workflow is adaptable to other 3D objects and UE4 environments. An exemplary dataset is provided including five objects of the Yale-CMU-Berkeley (YCB) object set. The datasets consist of 6 million subsegments using 97 rendering locations in 12 different UE4 environments. Each dataset subsegment includes one RGB image, one depth image and one class segmentation image at pixel-level.}, language = {en} } @article{EngemannDuKallweitetal.2020, author = {Engemann, Heiko and Du, Shengzhi and Kallweit, Stephan and C{\"o}nen, Patrick and Dawar, Harshal}, title = {OMNIVIL - an autonomous mobile manipulator for flexible production}, series = {Sensors}, volume = {20}, journal = {Sensors}, number = {24, art. no. 7249}, publisher = {MDPI}, address = {Basel}, isbn = {1424-8220}, doi = {10.3390/s20247249}, pages = {1 -- 30}, year = {2020}, language = {en} } @inproceedings{UlmerBraunChengetal.2020, author = {Ulmer, Jessica and Braun, Sebastian and Cheng, Chi-Tsun and Dowey, Steve and Wollert, J{\"o}rg}, title = {Gamified Virtual Reality Training Environment for the Manufacturing Industry}, series = {Proceedings of the 2020 19th International Conference on Mechatronics - Mechatronika (ME)}, booktitle = {Proceedings of the 2020 19th International Conference on Mechatronics - Mechatronika (ME)}, publisher = {IEEE}, address = {New York, NY}, doi = {10.1109/ME49197.2020.9286661}, pages = {1 -- 6}, year = {2020}, abstract = {Industry 4.0 imposes many challenges for manufacturing companies and their employees. Innovative and effective training strategies are required to cope with fast-changing production environments and new manufacturing technologies. Virtual Reality (VR) offers new ways of on-the-job, on-demand, and off-premise training. A novel concept and evaluation system combining Gamification and VR practice for flexible assembly tasks is proposed in this paper and compared to existing works. It is based on directed acyclic graphs and a leveling system. The concept enables a learning speed which is adjustable to the users' pace and dynamics, while the evaluation system facilitates adaptive work sequences and allows employee-specific task fulfillment. The concept was implemented and analyzed in the Industry 4.0 model factory at FH Aachen for mechanical assembly jobs.}, language = {de} }