@inproceedings{ThomaStiemerBraunetal.2023, author = {Thoma, Andreas and Stiemer, Luc and Braun, Carsten and Fisher, Alex and Gardi, Alessandro G.}, title = {Potential of hybrid neural network local path planner for small UAV in urban environments}, series = {AIAA SCITECH 2023 Forum}, booktitle = {AIAA SCITECH 2023 Forum}, publisher = {AIAA}, doi = {10.2514/6.2023-2359}, pages = {13 Seiten}, year = {2023}, abstract = {This work proposes a hybrid algorithm combining an Artificial Neural Network (ANN) with a conventional local path planner to navigate UAVs efficiently in various unknown urban environments. The proposed method of a Hybrid Artificial Neural Network Avoidance System is called HANNAS. The ANN analyses a video stream and classifies the current environment. This information about the current Environment is used to set several control parameters of a conventional local path planner, the 3DVFH*. The local path planner then plans the path toward a specific goal point based on distance data from a depth camera. We trained and tested a state-of-the-art image segmentation algorithm, PP-LiteSeg. The proposed HANNAS method reaches a failure probability of 17\%, which is less than half the failure probability of the baseline and around half the failure probability of an improved, bio-inspired version of the 3DVFH*. The proposed HANNAS method does not show any disadvantages regarding flight time or flight distance.}, language = {en} } @article{StiemerThomaBraun2023, author = {Stiemer, Luc Nicolas and Thoma, Andreas and Braun, Carsten}, title = {MBT3D: Deep learning based multi-object tracker for bumblebee 3D flight path estimation}, series = {PLoS ONE}, volume = {18}, journal = {PLoS ONE}, number = {9}, publisher = {PLOS}, address = {San Fancisco}, issn = {1932-6203}, doi = {10.1371/journal.pone.0291415}, pages = {e0291415}, year = {2023}, abstract = {This work presents the Multi-Bees-Tracker (MBT3D) algorithm, a Python framework implementing a deep association tracker for Tracking-By-Detection, to address the challenging task of tracking flight paths of bumblebees in a social group. While tracking algorithms for bumblebees exist, they often come with intensive restrictions, such as the need for sufficient lighting, high contrast between the animal and background, absence of occlusion, significant user input, etc. Tracking flight paths of bumblebees in a social group is challenging. They suddenly adjust movements and change their appearance during different wing beat states while exhibiting significant similarities in their individual appearance. The MBT3D tracker, developed in this research, is an adaptation of an existing ant tracking algorithm for bumblebee tracking. It incorporates an offline trained appearance descriptor along with a Kalman Filter for appearance and motion matching. Different detector architectures for upstream detections (You Only Look Once (YOLOv5), Faster Region Proposal Convolutional Neural Network (Faster R-CNN), and RetinaNet) are investigated in a comparative study to optimize performance. The detection models were trained on a dataset containing 11359 labeled bumblebee images. YOLOv5 reaches an Average Precision of AP = 53, 8\%, Faster R-CNN achieves AP = 45, 3\% and RetinaNet AP = 38, 4\% on the bumblebee validation dataset, which consists of 1323 labeled bumblebee images. The tracker's appearance model is trained on 144 samples. The tracker (with Faster R-CNN detections) reaches a Multiple Object Tracking Accuracy MOTA = 93, 5\% and a Multiple Object Tracking Precision MOTP = 75, 6\% on a validation dataset containing 2000 images, competing with state-of-the-art computer vision methods. The framework allows reliable tracking of different bumblebees in the same video stream with rarely occurring identity switches (IDS). MBT3D has much lower IDS than other commonly used algorithms, with one of the lowest false positive rates, competing with state-of-the-art animal tracking algorithms. The developed framework reconstructs the 3-dimensional (3D) flight paths of the bumblebees by triangulation. It also handles and compares two alternative stereo camera pairs if desired.}, language = {en} } @article{GoettenHavermannBraunetal.2019, author = {G{\"o}tten, Falk and Havermann, Marc and Braun, Carsten and Gomez, Francisco and Bil, Cees}, title = {RANS Simulation Validation of a Small Sensor Turret for UAVs}, series = {Journal of Aerospace Engineering}, volume = {32}, journal = {Journal of Aerospace Engineering}, number = {5}, publisher = {ASCE}, address = {New York}, issn = {1943-5525}, doi = {10.1061/(ASCE)AS.1943-5525.0001055}, pages = {Article number 04019060}, year = {2019}, abstract = {Recent Unmanned Aerial Vehicle (UAV) design procedures rely on full aircraft steady-state Reynolds-Averaged-Navier-Stokes (RANS) analyses in early design stages. Small sensor turrets are included in such simulations, even though their aerodynamic properties show highly unsteady behavior. Very little is known about the effects of this approach on the simulation outcomes of small turrets. Therefore, the flow around a model turret at a Reynolds number of 47,400 is simulated with a steady-state RANS approach and compared to experimental data. Lift, drag, and surface pressure show good agreement with the experiment. The RANS model predicts the separation location too far downstream and shows a larger recirculation region aft of the body. Both characteristic arch and horseshoe vortex structures are visualized and qualitatively match the ones found by the experiment. The Reynolds number dependence of the drag coefficient follows the trend of a sphere within a distinct range. The outcomes indicate that a steady-state RANS model of a small sensor turret is able to give results that are useful for UAV engineering purposes but might not be suited for detailed insight into flow properties.}, language = {en} }