@inproceedings{ViehmannLimpertHofmannetal.2023, author = {Viehmann, Tarik and Limpert, Nicolas and Hofmann, Till and Henning, Mike and Ferrein, Alexander and Lakemeyer, Gerhard}, title = {Winning the RoboCup logistics league with visual servoing and centralized goal reasoning}, series = {RoboCup 2022}, booktitle = {RoboCup 2022}, editor = {Eguchi, Amy and Lau, Nuno and Paetzel-Pr{\"u}smann, Maike and Wanichanon, Thanapat}, publisher = {Springer}, address = {Cham}, isbn = {978-3-031-28468-7 (Print)}, doi = {https://doi.org/10.1007/978-3-031-28469-4_25}, pages = {300 -- 312}, year = {2023}, abstract = {The RoboCup Logistics League (RCLL) is a robotics competition in a production logistics scenario in the context of a Smart Factory. In the competition, a team of three robots needs to assemble products to fulfill various orders that are requested online during the game. This year, the Carologistics team was able to win the competition with a new approach to multi-agent coordination as well as significant changes to the robot's perception unit and a pragmatic network setup using the cellular network instead of WiFi. In this paper, we describe the major components of our approach with a focus on the changes compared to the last physical competition in 2019.}, language = {en} } @inproceedings{HofmannLimpertMatareetal.2019, author = {Hofmann, Till and Limpert, Nicolas and Matar{\´e}, Viktor and Ferrein, Alexander and Lakemeyer, Gerhard}, title = {Winning the RoboCup Logistics League with Fast Navigation, Precise Manipulation, and Robust Goal Reasoning}, series = {RoboCup 2019: Robot World Cup XXIII. RoboCup}, booktitle = {RoboCup 2019: Robot World Cup XXIII. RoboCup}, publisher = {Springer}, address = {Cham}, isbn = {978-3-030-35699-6}, doi = {10.1007/978-3-030-35699-6_41}, pages = {504 -- 516}, year = {2019}, language = {en} } @article{FerreinLakemeyer2005, author = {Ferrein, Alexander and Lakemeyer, Gerhard}, title = {Wie Roboter die Welt sehen : Roboterfußball im Dienst der Wissenschaft / Ferrein, Alexander ; Lakemeyer, Gerhard}, series = {Bild und Erkenntnis : Formen und Funktionen des Bildes in Wissenschaft und Technik / RWTH. Andreas Beyer ; Markus Lohoff (Hrsg.)}, journal = {Bild und Erkenntnis : Formen und Funktionen des Bildes in Wissenschaft und Technik / RWTH. Andreas Beyer ; Markus Lohoff (Hrsg.)}, publisher = {Deutscher Kunstverlag}, address = {M{\"u}nchen}, isbn = {3-422-06463-X}, pages = {360 -- 361}, year = {2005}, language = {de} } @article{FerreinCalmesLakemeyeretal.2006, author = {Ferrein, Alexander and Calmes, Laurent and Lakemeyer, Gerhard and Wagner, Hermann}, title = {Von Schleiereulen und fussballspielenden Robotern / Calmes, Laurent ; Ferrein, Alexander ; Lakemeyer, Gerhard ; Wagner, Hermann}, series = {RWTH Themen (2006)}, journal = {RWTH Themen (2006)}, isbn = {0179-079X}, pages = {30 -- 33}, year = {2006}, language = {de} } @article{FerreinFritzLakemeyer2005, author = {Ferrein, Alexander and Fritz, Christian and Lakemeyer, Gerhard}, title = {Using Golog for Deliberation and Team Coordination in Robotic Soccer / Ferrein, Alexander ; Fritz, Christian ; Lakemeyer, Gerhard}, series = {K{\"u}nstliche Intelligenz : KI. 19 (2005), H. 1}, journal = {K{\"u}nstliche Intelligenz : KI. 19 (2005), H. 1}, isbn = {0933-1875}, pages = {24 -- 30}, year = {2005}, language = {en} } @article{FerreinJacobsLakemeyer2005, author = {Ferrein, Alexander and Jacobs, Stefan and Lakemeyer, Gerhard}, title = {Unreal Golog Bots / Jacobs, Stefan ; Ferrein, Alexander ; Lakemeyer, Gerhard}, series = {IJCAI-05 Workshop on Reasoning, Representation, and Learning in Computer Games}, journal = {IJCAI-05 Workshop on Reasoning, Representation, and Learning in Computer Games}, pages = {31 -- 36}, year = {2005}, language = {en} } @article{FerreinDyllaLakemeyeretal.2005, author = {Ferrein, Alexander and Dylla, Frank and Lakemeyer, Gerhard and Murray, Jan}, title = {Towards a League-Independent Qualitative Soccer Theory for RoboCup / Dylla, Frank ; Ferrein, Alexander ; Lakemeyer, Gerhard ; Murray, Jan ; Obst, Oliver ; R{\"o}fer, Thomas ; Stolzenburg, Frieder ; Visser, Ubbo}, series = {RoboCup 2004: Robot Soccer World Cup VIII / Daniele Nardi ... (ed.).}, journal = {RoboCup 2004: Robot Soccer World Cup VIII / Daniele Nardi ... (ed.).}, publisher = {Springer}, address = {Berlin}, isbn = {978-3-540-25046-8}, pages = {611 -- 618}, year = {2005}, language = {en} } @incollection{GoeckelSchifferWagneretal.2015, author = {Goeckel, Tom and Schiffer, Stefan and Wagner, Hermann and Lakemeyer, Gerhard}, title = {The Video Conference Tool Robot ViCToR}, series = {Intelligent Robotics and Applications : 8th International Conference, ICIRA 2015, Portsmouth, UK, August 24-27, 2015, Proceedings, Part II}, booktitle = {Intelligent Robotics and Applications : 8th International Conference, ICIRA 2015, Portsmouth, UK, August 24-27, 2015, Proceedings, Part II}, publisher = {Springer}, isbn = {978-3-319-22876-1}, doi = {10.1007/978-3-319-22876-1_6}, pages = {61 -- 73}, year = {2015}, abstract = {We present a robotic tool that autonomously follows a conversation to enable remote presence in video conferencing. When humans participate in a meeting with the help of video conferencing tools, it is crucial that they are able to follow the conversation both with acoustic and visual input. To this end, we design and implement a video conferencing tool robot that uses binaural sound source localization as its main source to autonomously orient towards the currently talking speaker. To increase robustness of the acoustic cue against noise we supplement the sound localization with a source detection stage. Also, we include a simple onset detector to retain fast response times. Since we only use two microphones, we are confronted with ambiguities on whether a source is in front or behind the device. We resolve these ambiguities with the help of face detection and additional moves. We tailor the system to our target scenarios in experiments with a four minute scripted conversation. In these experiments we evaluate the influence of different system settings on the responsiveness and accuracy of the device.}, language = {en} } @inproceedings{NiemuellerFerreinReuteretal.2015, author = {Niemueller, Tim and Ferrein, Alexander and Reuter, Sebastian and Jeschke, Sabina and Lakemeyer, Gerhard}, title = {The RoboCup Logistics League as a Holistic Multi-Robot Smart Factory Benchmark}, series = {Proceedings of the IROS 2015 Open forum on evaluation of results, replication of experiments and benchmarking in robotics research}, booktitle = {Proceedings of the IROS 2015 Open forum on evaluation of results, replication of experiments and benchmarking in robotics research}, pages = {3 S.}, year = {2015}, abstract = {With autonomous mobile robots receiving increased attention in industrial contexts, the need for benchmarks becomes more and more an urgent matter. The RoboCup Logistics League (RCLL) is one specific industry-inspired scenario focusing on production logistics within a Smart Factory. In this paper, we describe how the RCLL allows to assess the performance of a group of robots within the scenario as a whole, focusing specifically on the coordination and cooperation strategies and the methods and components to achieve them. We report on recent efforts to analyze performance of teams in 2014 to understand the implications of the current grading scheme, and derived criteria and metrics for performance assessment based on Key Performance Indicators (KPI) adapted from classic factory evaluation. We reflect on differences and compatibility towards RoCKIn, a recent major benchmarking European project.}, language = {en} } @inproceedings{NiemuellerLakemeyerFerrein2015, author = {Niemueller, Tim and Lakemeyer, Gerhard and Ferrein, Alexander}, title = {The RoboCup Logistics League as a Benchmark for Planning in Robotics}, series = {Proceedings of the 3rd Workshop on Planning and Robotics (PlanRob-15); Jerusalem, Israel 7-8/6/2015}, booktitle = {Proceedings of the 3rd Workshop on Planning and Robotics (PlanRob-15); Jerusalem, Israel 7-8/6/2015}, editor = {Finzi, Alberto}, pages = {63 -- 68}, year = {2015}, language = {en} }