@inproceedings{NikolovskiRekeElsenetal.2021, author = {Nikolovski, Gjorgji and Reke, Michael and Elsen, Ingo and Schiffer, Stefan}, title = {Machine learning based 3D object detection for navigation in unstructured environments}, series = {2021 IEEE Intelligent Vehicles Symposium Workshops (IV Workshops)}, booktitle = {2021 IEEE Intelligent Vehicles Symposium Workshops (IV Workshops)}, publisher = {IEEE}, isbn = {978-1-6654-7921-9}, doi = {10.1109/IVWorkshops54471.2021.9669218}, pages = {236 -- 242}, year = {2021}, abstract = {In this paper we investigate the use of deep neural networks for 3D object detection in uncommon, unstructured environments such as in an open-pit mine. While neural nets are frequently used for object detection in regular autonomous driving applications, more unusual driving scenarios aside street traffic pose additional challenges. For one, the collection of appropriate data sets to train the networks is an issue. For another, testing the performance of trained networks often requires tailored integration with the particular domain as well. While there exist different solutions for these problems in regular autonomous driving, there are only very few approaches that work for special domains just as well. We address both the challenges above in this work. First, we discuss two possible ways of acquiring data for training and evaluation. That is, we evaluate a semi-automated annotation of recorded LIDAR data and we examine synthetic data generation. Using these datasets we train and test different deep neural network for the task of object detection. Second, we propose a possible integration of a ROS2 detector module for an autonomous driving platform. Finally, we present the performance of three state-of-the-art deep neural networks in the domain of 3D object detection on a synthetic dataset and a smaller one containing a characteristic object from an open-pit mine.}, language = {en} } @inproceedings{DeyElsenFerreinetal.2021, author = {Dey, Thomas and Elsen, Ingo and Ferrein, Alexander and Frauenrath, Tobias and Reke, Michael and Schiffer, Stefan}, title = {CO2 Meter: a do-it-yourself carbon dioxide measuring device for the classroom}, series = {PETRA 2021: The 14th PErvasive Technologies Related to Assistive Environments Conference}, booktitle = {PETRA 2021: The 14th PErvasive Technologies Related to Assistive Environments Conference}, editor = {Makedon, Fillia}, publisher = {Association for Computing Machinery}, address = {New York}, isbn = {9781450387927}, doi = {10.1145/3453892.3462697}, pages = {292 -- 299}, year = {2021}, abstract = {In this paper we report on CO2 Meter, a do-it-yourself carbon dioxide measuring device for the classroom. Part of the current measures for dealing with the SARS-CoV-2 pandemic is proper ventilation in indoor settings. This is especially important in schools with students coming back to the classroom even with high incidents rates. Static ventilation patterns do not consider the individual situation for a particular class. Influencing factors like the type of activity, the physical structure or the room occupancy are not incorporated. Also, existing devices are rather expensive and often provide only limited information and only locally without any networking. This leaves the potential of analysing the situation across different settings untapped. Carbon dioxide level can be used as an indicator of air quality, in general, and of aerosol load in particular. Since, according to the latest findings, SARS-CoV-2 can be transmitted primarily in the form of aerosols, carbon dioxide may be used as a proxy for the risk of a virus infection. Hence, schools could improve the indoor air quality and potentially reduce the infection risk if they actually had measuring devices available in the classroom. Our device supports schools in ventilation and it allows for collecting data over the Internet to enable a detailed data analysis and model generation. First deployments in schools at different levels were received very positively. A pilot installation with a larger data collection and analysis is underway.}, language = {en} }