@inproceedings{SildatkeKarwanniKraftetal.2020, author = {Sildatke, Michael and Karwanni, Hendrik and Kraft, Bodo and Schmidts, Oliver and Z{\"u}ndorf, Albert}, title = {Automated Software Quality Monitoring in Research Collaboration Projects}, series = {ICSEW'20: Proceedings of the IEEE/ACM 42nd International Conference on Software Engineering Workshops}, booktitle = {ICSEW'20: Proceedings of the IEEE/ACM 42nd International Conference on Software Engineering Workshops}, doi = {10.1145/3387940.3391478}, pages = {603 -- 610}, year = {2020}, language = {en} } @inproceedings{IomdinaKiselevaKotliaretal.2020, author = {Iomdina, Elena N. and Kiseleva, Anna A. and Kotliar, Konstantin and Luzhnov, Petr V.}, title = {Quantification of Choroidal Blood Flow Using the OCT-A System Based on Voxel Scan Processing}, series = {2020 International Conference on Biomedical Innovations and Applications (BIA)}, booktitle = {2020 International Conference on Biomedical Innovations and Applications (BIA)}, isbn = {978-1-7281-7073-2}, doi = {10.1109/BIA50171.2020.9244511}, pages = {41 -- 44}, year = {2020}, language = {en} } @inproceedings{TranStaat2021, author = {Tran, Ngoc Trinh and Staat, Manfred}, title = {FEM shakedown analysis of Kirchhoff-Love plates under uncertainty of strength}, series = {Proceedings of UNCECOMP 2021}, booktitle = {Proceedings of UNCECOMP 2021}, isbn = {978-618-85072-6-5}, doi = {10.7712/120221.8041.19047}, pages = {323 -- 338}, year = {2021}, abstract = {A new formulation to calculate the shakedown limit load of Kirchhoff plates under stochastic conditions of strength is developed. Direct structural reliability design by chance con-strained programming is based on the prescribed failure probabilities, which is an effective approach of stochastic programming if it can be formulated as an equivalent deterministic optimization problem. We restrict uncertainty to strength, the loading is still deterministic. A new formulation is derived in case of random strength with lognormal distribution. Upper bound and lower bound shakedown load factors are calculated simultaneously by a dual algorithm.}, language = {en} } @inproceedings{OlderogMohrBegingetal.2021, author = {Olderog, M. and Mohr, P. and Beging, Stefan and Tsoumpas, C. and Ziemons, Karl}, title = {Simulation study on the role of tissue-scattered events in improving sensitivity for a compact time of flight compton positron emission tomograph}, series = {2020 IEEE Nuclear Science Symposium and Medical Imaging Conference (NSS/MIC)}, booktitle = {2020 IEEE Nuclear Science Symposium and Medical Imaging Conference (NSS/MIC)}, publisher = {IEEE}, isbn = {978-1-7281-7693-2}, doi = {10.1109/NSS/MIC42677.2020.9507901}, pages = {4 Seiten}, year = {2021}, abstract = {In positron emission tomography improving time, energy and spatial detector resolutions and using Compton kinematics introduces the possibility to reconstruct a radioactivity distribution image from scatter coincidences, thereby enhancing image quality. The number of single scattered coincidences alone is in the same order of magnitude as true coincidences. In this work, a compact Compton camera module based on monolithic scintillation material is investigated as a detector ring module. The detector interactions are simulated with Monte Carlo package GATE. The scattering angle inside the tissue is derived from the energy of the scattered photon, which results in a set of possible scattering trajectories or broken line of response. The Compton kinematics collimation reduces the number of solutions. Additionally, the time of flight information helps localize the position of the annihilation. One of the questions of this investigation is related to how the energy, spatial and temporal resolutions help confine the possible annihilation volume. A comparison of currently technically feasible detector resolutions (under laboratory conditions) demonstrates the influence on this annihilation volume and shows that energy and coincidence time resolution have a significant impact. An enhancement of the latter from 400 ps to 100 ps leads to a smaller annihilation volume of around 50\%, while a change of the energy resolution in the absorber layer from 12\% to 4.5\% results in a reduction of 60\%. The inclusion of single tissue-scattered data has the potential to increase the sensitivity of a scanner by a factor of 2 to 3 times. The concept can be further optimized and extended for multiple scatter coincidences and subsequently validated by a reconstruction algorithm.}, language = {en} } @inproceedings{BornheimGriegerBialonski2021, author = {Bornheim, Tobias and Grieger, Niklas and Bialonski, Stephan}, title = {FHAC at GermEval 2021: Identifying German toxic, engaging, and fact-claiming comments with ensemble learning}, series = {Proceedings of the GermEval 2021 Workshop on the Identification of Toxic, Engaging, and Fact-Claiming Comments : 17th Conference on Natural Language Processing KONVENS 2021}, booktitle = {Proceedings of the GermEval 2021 Workshop on the Identification of Toxic, Engaging, and Fact-Claiming Comments : 17th Conference on Natural Language Processing KONVENS 2021}, publisher = {Heinrich Heine University}, address = {D{\"u}sseldorf}, doi = {10.48415/2021/fhw5-x128}, pages = {105 -- 111}, year = {2021}, language = {en} } @inproceedings{BlaneckBornheimGriegeretal.2022, author = {Blaneck, Patrick Gustav and Bornheim, Tobias and Grieger, Niklas and Bialonski, Stephan}, title = {Automatic readability assessment of german sentences with transformer ensembles}, series = {Proceedings of the GermEval 2022 Workshop on Text Complexity Assessment of German Text}, booktitle = {Proceedings of the GermEval 2022 Workshop on Text Complexity Assessment of German Text}, publisher = {Association for Computational Linguistics}, address = {Potsdam}, doi = {10.48550/arXiv.2209.04299}, pages = {57 -- 62}, year = {2022}, abstract = {Reliable methods for automatic readability assessment have the potential to impact a variety of fields, ranging from machine translation to self-informed learning. Recently, large language models for the German language (such as GBERT and GPT-2-Wechsel) have become available, allowing to develop Deep Learning based approaches that promise to further improve automatic readability assessment. In this contribution, we studied the ability of ensembles of fine-tuned GBERT and GPT-2-Wechsel models to reliably predict the readability of German sentences. We combined these models with linguistic features and investigated the dependence of prediction performance on ensemble size and composition. Mixed ensembles of GBERT and GPT-2-Wechsel performed better than ensembles of the same size consisting of only GBERT or GPT-2-Wechsel models. Our models were evaluated in the GermEval 2022 Shared Task on Text Complexity Assessment on data of German sentences. On out-of-sample data, our best ensemble achieved a root mean squared error of 0:435.}, language = {en} } @inproceedings{PohleFroehlichDalitzRichteretal.2020, author = {Pohle-Fr{\"o}hlich, Regina and Dalitz, Christoph and Richter, Charlotte and Hahnen, Tobias and St{\"a}udle, Benjamin and Albracht, Kirsten}, title = {Estimation of muscle fascicle orientation in ultrasonic images}, series = {VISIGRAPP 2020 - Proceedings of the 15th International Joint Conference on Computer Vision, Imaging and Computer Graphics Theory and Applications, 5}, booktitle = {VISIGRAPP 2020 - Proceedings of the 15th International Joint Conference on Computer Vision, Imaging and Computer Graphics Theory and Applications, 5}, pages = {79 -- 86}, year = {2020}, language = {en} } @inproceedings{HingleyDikta2019, author = {Hingley, Peter and Dikta, Gerhard}, title = {Finding a well performing box-jenkins forecasting model for annualised patent filings counts}, series = {International Symposium on Forecasting, Thessaloniki, Greece, June 2019}, booktitle = {International Symposium on Forecasting, Thessaloniki, Greece, June 2019}, pages = {24 Folien}, year = {2019}, language = {en} } @inproceedings{StaatTran2022, author = {Staat, Manfred and Tran, Ngoc Trinh}, title = {Strain based brittle failure criteria for rocks}, series = {Proceedings of (NACOME2022) The 11th National Conference on Mechanics, Vol. 1. Solid Mechanics, Rock Mechanics, Artificial Intelligence, Teaching and Training, Hanoi, December 2-3, 2022}, booktitle = {Proceedings of (NACOME2022) The 11th National Conference on Mechanics, Vol. 1. Solid Mechanics, Rock Mechanics, Artificial Intelligence, Teaching and Training, Hanoi, December 2-3, 2022}, publisher = {Nha xuat ban Khoa hoc tu nhien va Cong nghe (Verlag Naturwissenschaft und Technik)}, address = {Hanoi}, isbn = {978-604-357-084-7}, pages = {500 -- 509}, year = {2022}, abstract = {When confining pressure is low or absent, extensional fractures are typical, with fractures occurring on unloaded planes in rock. These "paradox" fractures can be explained by a phenomenological extension strain failure criterion. In the past, a simple empirical criterion for fracture initiation in brittle rock has been developed. But this criterion makes unrealistic strength predictions in biaxial compression and tension. A new extension strain criterion overcomes this limitation by adding a weighted principal shear component. The weight is chosen, such that the enriched extension strain criterion represents the same failure surface as the Mohr-Coulomb (MC) criterion. Thus, the MC criterion has been derived as an extension strain criterion predicting failure modes, which are unexpected in the understanding of the failure of cohesive-frictional materials. In progressive damage of rock, the most likely fracture direction is orthogonal to the maximum extension strain. The enriched extension strain criterion is proposed as a threshold surface for crack initiation CI and crack damage CD and as a failure surface at peak P. Examples show that the enriched extension strain criterion predicts much lower volumes of damaged rock mass compared to the simple extension strain criterion.}, language = {en} } @inproceedings{Gaigall2022, author = {Gaigall, Daniel}, title = {On Consistent Hypothesis Testing In General Hilbert Spaces}, publisher = {Avestia Publishing}, address = {Orl{\´e}ans, Kanada}, doi = {10.11159/icsta22.157}, pages = {Paper No. 157}, year = {2022}, abstract = {Inference on the basis of high-dimensional and functional data are two topics which are discussed frequently in the current statistical literature. A possibility to include both topics in a single approach is working on a very general space for the underlying observations, such as a separable Hilbert space. We propose a general method for consistently hypothesis testing on the basis of random variables with values in separable Hilbert spaces. We avoid concerns with the curse of dimensionality due to a projection idea. We apply well-known test statistics from nonparametric inference to the projected data and integrate over all projections from a specific set and with respect to suitable probability measures. In contrast to classical methods, which are applicable for real-valued random variables or random vectors of dimensions lower than the sample size, the tests can be applied to random vectors of dimensions larger than the sample size or even to functional and high-dimensional data. In general, resampling procedures such as bootstrap or permutation are suitable to determine critical values. The idea can be extended to the case of incomplete observations. Moreover, we develop an efficient algorithm for implementing the method. Examples are given for testing goodness-of-fit in a one-sample situation in [1] or for testing marginal homogeneity on the basis of a paired sample in [2]. Here, the test statistics in use can be seen as generalizations of the well-known Cram{\´e}rvon-Mises test statistics in the one-sample and two-samples case. The treatment of other testing problems is possible as well. By using the theory of U-statistics, for instance, asymptotic null distributions of the test statistics are obtained as the sample size tends to infinity. Standard continuity assumptions ensure the asymptotic exactness of the tests under the null hypothesis and that the tests detect any alternative in the limit. Simulation studies demonstrate size and power of the tests in the finite sample case, confirm the theoretical findings, and are used for the comparison with concurring procedures. A possible application of the general approach is inference for stock market returns, also in high data frequencies. In the field of empirical finance, statistical inference of stock market prices usually takes place on the basis of related log-returns as data. In the classical models for stock prices, i.e., the exponential L{\´e}vy model, Black-Scholes model, and Merton model, properties such as independence and stationarity of the increments ensure an independent and identically structure of the data. Specific trends during certain periods of the stock price processes can cause complications in this regard. In fact, our approach can compensate those effects by the treatment of the log-returns as random vectors or even as functional data.}, language = {en} }