@article{EgliAyerPeteretal.2010, author = {Egli, Simon and Ayer, Fran{\c{c}}ois and Peter, Martina and Eilmann, Britta and Rigling, Andreas}, title = {Is forest mushroom productivity driven by tree growth? Results from a thinning experiment}, series = {Annals of Forest Science}, volume = {67}, journal = {Annals of Forest Science}, number = {5}, publisher = {Springer}, address = {Paris}, issn = {1286-4560 (Print)}, doi = {10.1051/forest/2010011}, pages = {509}, year = {2010}, abstract = {• Most of the edible forest mushrooms are mycorrhizal and depend on carbohydrates produced by the associated trees. Fruiting patterns of these fungi are not yet fully understood since climatic factors alone do not completely explain mushroom occurrence. • The objective of this study was to retrospectively find out if changing tree growth following an increment thinning has influenced the diversity patterns and productivity of associated forest mushrooms in the fungus reserve La Chan{\´e}az, Switzerland. • The results reveal a clear temporal relationship between the thinning, the growth reaction of trees and the reaction of the fungal community, especially for the ectomycorrhizal species. The tree-ring width of the formerly suppressed beech trees and the fruit body number increased after thinning, leading to a significantly positive correlation between fruit body numbers and tree-ring width. • Fruit body production was influenced by previous annual tree growth, the best accordance was found between fruit body production and the tree-ring width two years previously. • The results support the hypothesis that ectomycorrhizal fruit body production must be linked with the growth of the associated host trees. Moreover, the findings indicate the importance of including mycorrhizal fungi as important players when discussing a tree as a carbon source or sink.}, language = {en} } @article{DitzhausGaigall2022, author = {Ditzhaus, Marc and Gaigall, Daniel}, title = {Testing marginal homogeneity in Hilbert spaces with applications to stock market returns}, series = {Test}, volume = {2022}, journal = {Test}, number = {31}, publisher = {Springer}, issn = {1863-8260}, doi = {10.1007/s11749-022-00802-5}, pages = {749 -- 770}, year = {2022}, abstract = {This paper considers a paired data framework and discusses the question of marginal homogeneity of bivariate high-dimensional or functional data. The related testing problem can be endowed into a more general setting for paired random variables taking values in a general Hilbert space. To address this problem, a Cram{\´e}r-von-Mises type test statistic is applied and a bootstrap procedure is suggested to obtain critical values and finally a consistent test. The desired properties of a bootstrap test can be derived that are asymptotic exactness under the null hypothesis and consistency under alternatives. Simulations show the quality of the test in the finite sample case. A possible application is the comparison of two possibly dependent stock market returns based on functional data. The approach is demonstrated based on historical data for different stock market indices.}, language = {en} } @article{BaringhausGaigall2018, author = {Baringhaus, Ludwig and Gaigall, Daniel}, title = {Efficiency comparison of the Wilcoxon tests in paired and independent survey samples}, series = {Metrika}, volume = {2018}, journal = {Metrika}, number = {81}, publisher = {Springer}, address = {Berlin}, issn = {1435-926X}, doi = {10.1007/s00184-018-0661-4}, pages = {891 -- 930}, year = {2018}, abstract = {The efficiency concepts of Bahadur and Pitman are used to compare the Wilcoxon tests in paired and independent survey samples. A comparison through the length of corresponding confidence intervals is also done. Simple conditions characterizing the dominance of a procedure are derived. Statistical tests for checking these conditions are suggested and discussed.}, language = {de} } @article{DuongNguyenStaat2015, author = {Duong, Minh Tuan and Nguyen, Nhu Huynh and Staat, Manfred}, title = {Physical response of hyperelastic models for composite materials and soft tissues}, series = {Asia pacific journal on computational engineering}, volume = {2}, journal = {Asia pacific journal on computational engineering}, number = {3 (December 2015)}, issn = {2196-1166}, doi = {10.1186/s40540-015-0015-x}, pages = {1 -- 18}, year = {2015}, language = {en} } @article{WiesenTippkoetterMuffleretal.2015, author = {Wiesen, Sebastian and Tippk{\"o}tter, Nils and Muffler, Kai and Suck, Kirstin and Sohling, Ulrich and Ruf, Friedrich and Ulber, Roland}, title = {Adsorption of fatty acids to layered double hydroxides in aqueous systems}, series = {Adsorption}, volume = {21}, journal = {Adsorption}, number = {6-7}, publisher = {Springer}, address = {Berlin}, pages = {459 -- 466}, year = {2015}, abstract = {Due to their anion exchange characteristics, layered double hydroxides (LDHs) are suitable for the detoxification of aqueous, fatty acid containing fermentation substrates. The aim of this study is to examine the adsorption mechanism, using crude glycerol from plant oil esterification as a model system. Changes in the intercalation structure in relation to the amount of fatty acids adsorbed are monitored by X-ray diffraction and infra-red spectroscopy. Additionally, calcination of LDH is investigated in order to increase the binding capacity for fatty acids. Our data propose that, at ambient temperature, fatty acids can be bound to the hydrotalcite by adsorption or in addition by intercalation, depending on fatty acid concentration. The adsorption of fatty acids from crude glycerol shows a BET-like behavior. Above a fatty acid concentration of 3.5 g L-1, intercalation of fatty acids can be shown by the appearance of an increased interlayer spacing. This observation suggests a two phase adsorption process. Calcination of LDHs allows increasing the binding capacity for fatty acids by more than six times, mainly by reduction of structural CO32-.}, language = {en} } @article{PhamStaat2013, author = {Pham, Phu Tinh and Staat, Manfred}, title = {An Upper Bound Algorithm for Limit and Shakedown Analysis of Bounded Linearly Kinematic Hardening Structures}, series = {Limit State of Materials and Structures : Direct Methods 2. Saxc{\´e}, G{\´e}ry de (Hrsg.)}, journal = {Limit State of Materials and Structures : Direct Methods 2. Saxc{\´e}, G{\´e}ry de (Hrsg.)}, publisher = {Springer}, address = {Dordrecht}, isbn = {978-94-007-5424-9}, pages = {71 -- 87}, year = {2013}, language = {en} } @article{MonakhovaDiehl2021, author = {Monakhova, Yulia and Diehl, Bernd W. K.}, title = {Simplification of NMR Workflows by Standardization Using 2H Integral of Deuterated Solvent as Applied to Aloe vera Preparations}, series = {Applied Magnetic Resonance}, volume = {52}, journal = {Applied Magnetic Resonance}, number = {11}, publisher = {Springer}, address = {Cham}, issn = {1613-7507}, doi = {10.1007/s00723-021-01393-4}, pages = {1591 -- 1600}, year = {2021}, abstract = {In this study, a recently proposed NMR standardization approach by 2H integral of deuterated solvent for quantitative multicomponent analysis of complex mixtures is presented. As a proof of principle, the existing NMR routine for the analysis of Aloe vera products was modified. Instead of using absolute integrals of targeted compounds and internal standard (nicotinamide) from 1H-NMR spectra, quantification was performed based on the ratio of a particular 1H-NMR compound integral and 2H-NMR signal of deuterated solvent D2O. Validation characteristics (linearity, repeatability, accuracy) were evaluated and the results showed that the method has the same precision as internal standardization in case of multicomponent screening. Moreover, a dehydration process by freeze drying is not necessary for the new routine. Now, our NMR profiling of A. vera products needs only limited sample preparation and data processing. The new standardization methodology provides an appealing alternative for multicomponent NMR screening. In general, this novel approach, using standardization by 2H integral, benefits from reduced sample preparation steps and uncertainties, and is recommended in different application areas (purity determination, forensics, pharmaceutical analysis, etc.).}, language = {en} } @incollection{EggertZaehlWolfetal.2023, author = {Eggert, Mathias and Z{\"a}hl, Philipp M. and Wolf, Martin R. and Haase, Martin}, title = {Applying leaderboards for quality improvement in software development projects}, series = {Software Engineering for Games in Serious Contexts}, booktitle = {Software Engineering for Games in Serious Contexts}, editor = {Cooper, Kendra M.L. and Bucchiarone, Antonio}, publisher = {Springer}, address = {Cham}, isbn = {978-3-031-33337-8 (Print)}, doi = {10.1007/978-3-031-33338-5_11}, pages = {243 -- 263}, year = {2023}, abstract = {Software development projects often fail because of insufficient code quality. It is now well documented that the task of testing software, for example, is perceived as uninteresting and rather boring, leading to poor software quality and major challenges to software development companies. One promising approach to increase the motivation for considering software quality is the use of gamification. Initial research works already investigated the effects of gamification on software developers and come to promising. Nevertheless, a lack of results from field experiments exists, which motivates the chapter at hand. By conducting a gamification experiment with five student software projects and by interviewing the project members, the chapter provides insights into the changing programming behavior of information systems students when confronted with a leaderboard. The results reveal a motivational effect as well as a reduction of code smells.}, language = {en} } @article{Kleefeld2021, author = {Kleefeld, Andreas}, title = {The hot spots conjecture can be false: some numerical examples}, series = {Advances in Computational Mathematics}, volume = {47}, journal = {Advances in Computational Mathematics}, publisher = {Springer}, address = {Dordrecht}, issn = {1019-7168}, doi = {10.1007/s10444-021-09911-5}, year = {2021}, abstract = {The hot spots conjecture is only known to be true for special geometries. This paper shows numerically that the hot spots conjecture can fail to be true for easy to construct bounded domains with one hole. The underlying eigenvalue problem for the Laplace equation with Neumann boundary condition is solved with boundary integral equations yielding a non-linear eigenvalue problem. Its discretization via the boundary element collocation method in combination with the algorithm by Beyn yields highly accurate results both for the first non-zero eigenvalue and its corresponding eigenfunction which is due to superconvergence. Additionally, it can be shown numerically that the ratio between the maximal/minimal value inside the domain and its maximal/minimal value on the boundary can be larger than 1 + 10- 3. Finally, numerical examples for easy to construct domains with up to five holes are provided which fail the hot spots conjecture as well.}, language = {en} } @inproceedings{KahraBreussKleefeldetal.2024, author = {Kahra, Marvin and Breuß, Michael and Kleefeld, Andreas and Welk, Martin}, title = {An Approach to Colour Morphological Supremum Formation Using the LogSumExp Approximation}, series = {Discrete Geometry and Mathematical Morphology}, booktitle = {Discrete Geometry and Mathematical Morphology}, editor = {Brunetti, Sara and Frosini, Andrea and Rinaldi, Simone}, publisher = {Springer}, address = {Cham}, isbn = {978-3-031-57793-2}, doi = {10.1007/978-3-031-57793-2_25}, pages = {325 -- 337}, year = {2024}, abstract = {Mathematical morphology is a part of image processing that has proven to be fruitful for numerous applications. Two main operations in mathematical morphology are dilation and erosion. These are based on the construction of a supremum or infimum with respect to an order over the tonal range in a certain section of the image. The tonal ordering can easily be realised in grey-scale morphology, and some morphological methods have been proposed for colour morphology. However, all of these have certain limitations. In this paper we present a novel approach to colour morphology extending upon previous work in the field based on the Loewner order. We propose to consider an approximation of the supremum by means of a log-sum exponentiation introduced by Maslov. We apply this to the embedding of an RGB image in a field of symmetric 2x2 matrices. In this way we obtain nearly isotropic matrices representing colours and the structural advantage of transitivity. In numerical experiments we highlight some remarkable properties of the proposed approach.}, language = {en} } @incollection{BaierBraunerBrillowskietal.2023, author = {Baier, Ralph and Brauner, Philipp and Brillowski, Florian and Dammers, Hannah and Liehner, Luca and P{\"u}tz, Sebastian and Schneider, Sebastian and Schollemann, Alexander and Steuer-Dankert, Linda and Vervier, Luisa and Gries, Thomas and Leicht-Scholten, Carmen and Mertens, Alexander and Nagel, Saskia K. and Schuh, G{\"u}nther and Ziefle, Martina and Nitsch, Verena}, title = {Human-centered work design for the internet of production}, series = {Internet of production - fundamentals, applications and proceedings}, booktitle = {Internet of production - fundamentals, applications and proceedings}, editor = {Brecher, Christian and Schuh, G{\"u}nther and van der Alst, Wil and Jarke, Matthias and Piller, Frank T. and Padberg, Melanie}, publisher = {Springer}, address = {Cham}, isbn = {978-3-030-98062-7}, doi = {10.1007/978-3-030-98062-7_19-1}, pages = {1 -- 23}, year = {2023}, abstract = {Like all preceding transformations of the manufacturing industry, the large-scale usage of production data will reshape the role of humans within the sociotechnical production ecosystem. To ensure that this transformation creates work systems in which employees are empowered, productive, healthy, and motivated, the transformation must be guided by principles of and research on human-centered work design. Specifically, measures must be taken at all levels of work design, ranging from (1) the work tasks to (2) the working conditions to (3) the organizational level and (4) the supra-organizational level. We present selected research across all four levels that showcase the opportunities and requirements that surface when striving for human-centered work design for the Internet of Production (IoP). (1) On the work task level, we illustrate the user-centered design of human-robot collaboration (HRC) and process planning in the composite industry as well as user-centered design factors for cognitive assistance systems. (2) On the working conditions level, we present a newly developed framework for the classification of HRC workplaces. (3) Moving to the organizational level, we show how corporate data can be used to facilitate best practice sharing in production networks, and we discuss the implications of the IoP for new leadership models. Finally, (4) on the supra-organizational level, we examine overarching ethical dimensions, investigating, e.g., how the new work contexts affect our understanding of responsibility and normative values such as autonomy and privacy. Overall, these interdisciplinary research perspectives highlight the importance and necessary scope of considering the human factor in the IoP.}, language = {en} } @article{WeiheErnstRoethetal.2013, author = {Weihe, Stefan and Ernst, Ansgar and R{\"o}th, Thilo and Proksch, Johannes}, title = {Leichtmetall-Stahl-Verbundguss im Nutzfahrzeugbau}, series = {Lightweight Design}, volume = {6}, journal = {Lightweight Design}, number = {2}, publisher = {Springer}, address = {Berlin}, issn = {2192-8738 (Online)}, pages = {38 -- 43}, year = {2013}, abstract = {In modernen Fahrzeugkarosserien der Großserie kommen zunehmend Materialmischbauweisen zur Anwendung. In Zusammenarbeit der Daimler AG, der Tower Automotive Holding GmbH, der Imperia GmbH sowie der Partnerunternehmen KSM Castings GmbH und Schaufler Tooling GmbH \& Co. KG wird das Leichtbaupotenzial von Stahlblech-AluminiumverbundgussHybriden am Beispiel des vorderen Dachquertr{\"a}gers des Mercedes-Benz Viano/Vito ausf{\"u}hrlich untersucht.}, language = {de} } @article{EggertMoulen2020, author = {Eggert, Mathias and Moulen, Tobias}, title = {Selektion von Gesch{\"a}ftsprozessen zur Anwendung von Robotic Process Automation am Beispiel einer Versicherung}, series = {HMD Praxis der Wirtschaftsinformatik}, volume = {57}, journal = {HMD Praxis der Wirtschaftsinformatik}, number = {6}, publisher = {Springer}, address = {Weinheim}, issn = {1436-3011}, doi = {10.1365/s40702-020-00665-0}, pages = {1150 -- 1162}, year = {2020}, abstract = {H{\"a}ufig bremsen geringe IT-Ressourcen, fehlende Softwareschnittstellen oder eine veraltete und komplex gewachsene Systemlandschaft die Automatisierung von Gesch{\"a}ftsprozessen. Robotic Process Automation (RPA) ist eine vielversprechende Methode, um Gesch{\"a}ftsprozesse oberfl{\"a}chenbasiert und ohne gr{\"o}ßere Systemeingriffe zu automatisieren und Medienbr{\"u}che abzubauen. Die Auswahl der passenden Prozesse ist dabei f{\"u}r den Erfolg von RPA-Projekten entscheidend. Der vorliegende Beitrag liefert daf{\"u}r Selektionskriterien, die aus einer qualitativen Inhaltanalyse von elf Interviews mit RPA-Experten aus dem Versicherungsumfeld resultieren. Das Ergebnis umfasst eine gewichtetet Liste von sieben Dimensionen und 51 Prozesskriterien, welche die Automatisierung mit Softwarerobotern beg{\"u}nstigen bzw. deren Nichterf{\"u}llung eine Umsetzung erschweren oder sogar verhindern. Die drei wichtigsten Kriterien zur Auswahl von Gesch{\"a}ftsprozessen f{\"u}r die Automatisierung mittels RPA umfassen die Entlastung der an dem Prozess mitwirkenden Mitarbeiter (Arbeitnehmer{\"u}berlastung), die Ausf{\"u}hrbarkeit des Prozesses mittels Regeln (Regelbasierte Prozessteuerung) sowie ein positiver Kosten-Nutzen-Vergleich. Praktiker k{\"o}nnen diese Kriterien verwenden, um eine systematische Auswahl von RPA-relevanten Prozessen vorzunehmen. Aus wissenschaftlicher Perspektive stellen die Ergebnisse eine Grundlage zur Erkl{\"a}rung des Erfolgs und Misserfolgs von RPA-Projekten dar.}, language = {de} } @article{SchneiderSchwabedalBialonski2022, author = {Schneider, Jules and Schwabedal, Justus T. C. and Bialonski, Stephan}, title = {Schlafspindeln - Funktion, Detektion und Nutzung als Biomarker f{\"u}r die psychiatrische Diagnostik}, series = {Der Nervenarzt}, journal = {Der Nervenarzt}, publisher = {Springer}, address = {Berlin, Heidelberg}, issn = {1433-0407}, doi = {10.1007/s00115-022-01340-z}, pages = {1 -- 8}, year = {2022}, abstract = {Hintergrund: Die Schlafspindel ist ein Graphoelement des Elektroenzephalogramms (EEG), das im Leicht- und Tiefschlaf beobachtet werden kann. Ver{\"a}nderungen der Spindelaktivit{\"a}t wurden f{\"u}r verschiedene psychiatrische Erkrankungen beschrieben. Schlafspindeln zeigen aufgrund ihrer relativ konstanten Eigenschaften Potenzial als Biomarker in der psychiatrischen Diagnostik. Methode: Dieser Beitrag liefert einen {\"U}berblick {\"u}ber den Stand der Wissenschaft zu Eigenschaften und Funktionen der Schlafspindeln sowie {\"u}ber beschriebene Ver{\"a}nderungen der Spindelaktivit{\"a}t bei psychiatrischen Erkrankungen. Verschiedene methodische Ans{\"a}tze und Ausblicke zur Spindeldetektion werden hinsichtlich deren Anwendungspotenzial in der psychiatrischen Diagnostik erl{\"a}utert. Ergebnisse und Schlussfolgerung: W{\"a}hrend Ver{\"a}nderungen der Spindelaktivit{\"a}t bei psychiatrischen Erkrankungen beschrieben wurden, ist deren exaktes Potenzial f{\"u}r die psychiatrische Diagnostik noch nicht ausreichend erforscht. Diesbez{\"u}glicher Erkenntnisgewinn wird in der Forschung gegenw{\"a}rtig durch ressourcenintensive und fehleranf{\"a}llige Methoden zur manuellen oder automatisierten Spindeldetektion ausgebremst. Neuere Detektionsans{\"a}tze, die auf Deep-Learning-Verfahren basieren, k{\"o}nnten die Schwierigkeiten bisheriger Detektionsmethoden {\"u}berwinden und damit neue M{\"o}glichkeiten f{\"u}r die praktisch}, language = {de} } @article{SteinbauerFerrein2016, author = {Steinbauer, Gerald and Ferrein, Alexander}, title = {20 Years of RoboCup}, series = {KI - K{\"u}nstliche Intelligenz}, volume = {30}, journal = {KI - K{\"u}nstliche Intelligenz}, number = {3-4}, publisher = {Springer}, address = {Berlin}, issn = {1610-1987}, doi = {10.1007/s13218-016-0442-z}, pages = {221 -- 224}, year = {2016}, language = {en} } @article{HafnerDemetzWeickertetal.2014, author = {Hafner, David and Demetz, Oliver and Weickert, Joachim and Reißel, Martin}, title = {Mathematical Foundations and Generalisations of the Census Transform for Robust Optic Flow Computation}, series = {Journal of Mathematical Imaging and Vision}, journal = {Journal of Mathematical Imaging and Vision}, publisher = {Springer}, address = {New York}, issn = {1573-7683 (Online)}, doi = {10.1007/s10851-014-0529-9}, year = {2014}, language = {en} } @article{OehlenschlaegerVolkmarStiefelmaieretal.2024, author = {Oehlenschl{\"a}ger, Katharina and Volkmar, Marianne and Stiefelmaier, Judith and Langsdorf, Alexander and Holtmann, Dirk and Tippk{\"o}tter, Nils and Ulber, Roland}, title = {New insights into the influence of pre-culture on robust solvent production of C. acetobutylicum}, series = {Applied Microbiology and Biotechnology}, volume = {108}, journal = {Applied Microbiology and Biotechnology}, publisher = {Springer}, address = {Berlin, Heidelberg}, issn = {1432-0614}, doi = {10.1007/s00253-023-12981-8}, pages = {10 Seiten}, year = {2024}, abstract = {Clostridia are known for their solvent production, especially the production of butanol. Concerning the projected depletion of fossil fuels, this is of great interest. The cultivation of clostridia is known to be challenging, and it is difficult to achieve reproducible results and robust processes. However, existing publications usually concentrate on the cultivation conditions of the main culture. In this paper, the influence of cryo-conservation and pre-culture on growth and solvent production in the resulting main cultivation are examined. A protocol was developed that leads to reproducible cultivations of Clostridium acetobutylicum. Detailed investigation of the cell conservation in cryo-cultures ensured reliable cell growth in the pre-culture. Moreover, a reason for the acid crash in the main culture was found, based on the cultivation conditions of the pre-culture. The critical parameter to avoid the acid crash and accomplish the shift to the solventogenesis of clostridia is the metabolic phase in which the cells of the pre-culture were at the time of inoculation of the main culture; this depends on the cultivation time of the pre-culture. Using cells from the exponential growth phase to inoculate the main culture leads to an acid crash. To achieve the solventogenic phase with butanol production, the inoculum should consist of older cells which are in the stationary growth phase. Considering these parameters, which affect the entire cultivation process, reproducible results and reliable solvent production are ensured.}, language = {en} } @article{FontiEilmannGarciaGonzalezetal.2009, author = {Fonti, Patrick and Eilmann, Britta and Garc{\´i}a-Gonz{\´a}lez, Ignacio and von Arx, Georg}, title = {Expeditious building of ring-porous earlywood vessel chronologies without loosing signal information}, series = {Trees : structure and function}, volume = {23}, journal = {Trees : structure and function}, number = {3}, publisher = {Springer}, address = {Berlin}, issn = {0931-1890 (Print)}, doi = {10.1007/s00468-008-0310-z}, pages = {665 -- 671}, year = {2009}, language = {en} } @article{KohlKraemerFohryetal.2024, author = {Kohl, Philipp and Kr{\"a}mer, Yoka and Fohry, Claudia and Kraft, Bodo}, title = {Scoping review of active learning strategies and their evaluation environments for entity recognition tasks}, series = {Deep learning theory and applications}, journal = {Deep learning theory and applications}, editor = {Fred, Ana and Hadjali, Allel and Gusikhin, Oleg and Sansone, Carlo}, publisher = {Springer}, address = {Cham}, isbn = {978-3-031-66694-0 (online ISBN)}, doi = {10.1007/978-3-031-66694-0_6}, pages = {84 -- 106}, year = {2024}, abstract = {We conducted a scoping review for active learning in the domain of natural language processing (NLP), which we summarize in accordance with the PRISMA-ScR guidelines as follows: Objective: Identify active learning strategies that were proposed for entity recognition and their evaluation environments (datasets, metrics, hardware, execution time). Design: We used Scopus and ACM as our search engines. We compared the results with two literature surveys to assess the search quality. We included peer-reviewed English publications introducing or comparing active learning strategies for entity recognition. Results: We analyzed 62 relevant papers and identified 106 active learning strategies. We grouped them into three categories: exploitation-based (60x), exploration-based (14x), and hybrid strategies (32x). We found that all studies used the F1-score as an evaluation metric. Information about hardware (6x) and execution time (13x) was only occasionally included. The 62 papers used 57 different datasets to evaluate their respective strategies. Most datasets contained newspaper articles or biomedical/medical data. Our analysis revealed that 26 out of 57 datasets are publicly accessible. Conclusion: Numerous active learning strategies have been identified, along with significant open questions that still need to be addressed. Researchers and practitioners face difficulties when making data-driven decisions about which active learning strategy to adopt. Conducting comprehensive empirical comparisons using the evaluation environment proposed in this study could help establish best practices in the domain.}, language = {en} } @incollection{HirtBleckBobzinetal.2011, author = {Hirt, Gerhard and Bleck, Wolfgang and Bobzin, Kirsten and Schleser, Markus and Brecher, Christian and B{\"u}hrig-Polazcek, Andreas and Haberstroh, Edmund and Klocke, Fritz and Loosen, Peter and Michaeli, Walter and Poprawe, Reinhart and Reisgen, Uwe and Arntz, Kristian and Bagcivan, Nazlim and Bambach, Markus and B{\"a}umler, Stephan and Beckemper, Stefan and Bergweiler, Georg and Breitbach, Tobias and Buchholz, Steffen and B{\"u}ltmann, Jan and Diettrich, J{\"o}rg and Do-Khac, Dennis and Eilbracht, Stephan and Emonts, Michael and Flock, Dustin and Gerhardt, Kai and Gillner, Arnold and G{\"o}ttmann, Alexander and Gr{\"o}nlund, Oliver and Hartmann, Claudia and Heinen, Daniel and Herfs, Werner and Hermani, Jan-Patrick and Holtkamp, Jens and Todor, Ivanov and Jakob, Matthias and Janssen, Andreas and Karlberger, Andreas and Klaiber, Fritz and Kutschmann, Pia and Neuß, Andreas and Prahl, Ulrich and Roderburg, Andreas and Rosen, Chris-J{\"o}rg and R{\"o}sner, Andreas and Saeed-Akbari, Alireza and Scharf, Micha and Scheik, Sven and Schleser, Markus and Sch{\"o}ngart, Maximilian and Stein, Lars and Steiners, Marius and Stollenwerk, Jochen and Araghi, Babak Taleb and Theiß, Sebastian and Wunderle, Johannes}, title = {Hybride Produktionssysteme}, series = {Integrative Produktionstechnik f{\"u}r Hochlohnl{\"a}nder / hrsg. von Christian Brecher}, booktitle = {Integrative Produktionstechnik f{\"u}r Hochlohnl{\"a}nder / hrsg. von Christian Brecher}, publisher = {Springer}, address = {Berlin [u.a.]}, isbn = {978-3-642-20692-4 (Print) ; 978-3-642-20693-1 (E-Book)}, doi = {10.1007/978-3-642-20693-1_5}, pages = {465 -- 745}, year = {2011}, abstract = {W{\"a}hrend die virtuelle Produktentstehungskette große Gestaltungsfreir{\"a}ume bietet, ist die reale Produktentstehungskette durch wesentlich mehr Randbedingungen gekennzeichnet, die nicht oder nur ansatzweise beeinflussbar sind. Die Realisierung des aus logistischer Sicht optimalen One-Piece-Flow bei gleichzeitiger Steigerung von Flexibilit{\"a}t und Produktivit{\"a}t sowie des Verschiebens der Grenze des technologisch Machbaren m{\"u}ssen in zuk{\"u}nftigen Forschungsans{\"a}tzen gleichermaßen betrachtet werden. Die Grenzverschiebung auf Basis der Integration von Technologien ist dabei ein viel versprechender Ansatz, der es in vielen F{\"a}llen erm{\"o}glicht, in allen genannten Zielrichtungen gleichermaßen Potentiale zu erschließen.}, language = {de} } @inproceedings{MaurerMiskiwAcostaetal.2023, author = {Maurer, Florian and Miskiw, Kim K. and Acosta, Rebeca Ramirez and Harder, Nick and Sander, Volker and Lehnhoff, Sebastian}, title = {Market abstraction of energy markets and policies - application in an agent-based modeling toolbox}, series = {EI.A 2023: Energy Informatics}, booktitle = {EI.A 2023: Energy Informatics}, editor = {Jorgensen, Bo Norregaard and Pereira da Silva, Luiz Carlos and Ma, Zheng}, publisher = {Springer}, address = {Cham}, isbn = {978-3-031-48651-7 (Print)}, doi = {10.1007/978-3-031-48652-4_10}, pages = {139 -- 157}, year = {2023}, abstract = {In light of emerging challenges in energy systems, markets are prone to changing dynamics and market design. Simulation models are commonly used to understand the changing dynamics of future electricity markets. However, existing market models were often created with specific use cases in mind, which limits their flexibility and usability. This can impose challenges for using a single model to compare different market designs. This paper introduces a new method of defining market designs for energy market simulations. The proposed concept makes it easy to incorporate different market designs into electricity market models by using relevant parameters derived from analyzing existing simulation tools, morphological categorization and ontologies. These parameters are then used to derive a market abstraction and integrate it into an agent-based simulation framework, allowing for a unified analysis of diverse market designs. Furthermore, we showcase the usability of integrating new types of long-term contracts and over-the-counter trading. To validate this approach, two case studies are demonstrated: a pay-as-clear market and a pay-as-bid long-term market. These examples demonstrate the capabilities of the proposed framework.}, language = {en} } @book{Heuermann2024, author = {Heuermann, Holger}, title = {Microwave technology: field simulation, non-linear circuit technology, components and subsystems, plasma technology, antennas and propagation}, publisher = {Springer}, address = {Wiesbaden}, isbn = {978-3-658-45685-6}, doi = {10.1007/978-3-658-45686-3}, pages = {XII, 391 Seiten}, year = {2024}, abstract = {The book covers various numerical field simulation methods, nonlinear circuit technology and its MF-S- and X-parameters, as well as state-of-the-art power amplifier techniques. It also describes newly presented oscillators and the emerging field of GHz plasma technology. Furthermore, it addresses aspects such as waveguides, mixers, phase-locked loops, antennas, and propagation effects, in combination with the bachelor's book 'High-Frequency Engineering,' encompassing all aspects related to the current state of GHz technology.}, language = {en} } @article{OrzadaFiedlerBitzetal.2020, author = {Orzada, Stephan and Fiedler, Thomas M. and Bitz, Andreas and Ladd, Mark E. and Quick, Harald H.}, title = {Local SAR compression with overestimation control to reduce maximum relative SAR overestimation and improve multi-channel RF array performance}, series = {Magnetic Resonance Materials in Physics, Biology and Medicine}, journal = {Magnetic Resonance Materials in Physics, Biology and Medicine}, number = {34 (2021)}, publisher = {Springer}, address = {Heidelberg}, isbn = {1352-8661}, doi = {10.1007/s10334-020-00890-0}, pages = {153 -- 164}, year = {2020}, abstract = {Objective In local SAR compression algorithms, the overestimation is generally not linearly dependent on actual local SAR. This can lead to large relative overestimation at low actual SAR values, unnecessarily constraining transmit array performance. Method Two strategies are proposed to reduce maximum relative overestimation for a given number of VOPs. The first strategy uses an overestimation matrix that roughly approximates actual local SAR; the second strategy uses a small set of pre-calculated VOPs as the overestimation term for the compression. Result Comparison with a previous method shows that for a given maximum relative overestimation the number of VOPs can be reduced by around 20\% at the cost of a higher absolute overestimation at high actual local SAR values. Conclusion The proposed strategies outperform a previously published strategy and can improve the SAR compression where maximum relative overestimation constrains the performance of parallel transmission.}, language = {en} } @article{SchifferFerreinLakemeyer2012, author = {Schiffer, Stefan and Ferrein, Alexander and Lakemeyer, Gerhard}, title = {Caesar: an intelligent domestic service robot}, series = {Intelligent service robotics}, volume = {5}, journal = {Intelligent service robotics}, number = {4}, publisher = {Springer}, address = {Berlin}, issn = {1861-2776}, doi = {10.1007/s11370-012-0118-y}, pages = {259 -- 276}, year = {2012}, abstract = {In this paper we present CAESAR, an intelligent domestic service robot. In domestic settings for service robots complex tasks have to be accomplished. Those tasks benefit from deliberation, from robust action execution and from flexible methods for human-robot interaction that account for qualitative notions used in natural language as well as human fallibility. Our robot CAESAR deploys AI techniques on several levels of its system architecture. On the low-level side, system modules for localization or navigation make, for instance, use of path-planning methods, heuristic search, and Bayesian filters. For face recognition and human-machine interaction, random trees and well-known methods from natural language processing are deployed. For deliberation, we use the robot programming and plan language READYLOG, which was developed for the high-level control of agents and robots; it allows combining programming the behaviour using planning to find a course of action. READYLOG is a variant of the robot programming language Golog. We extended READYLOG to be able to cope with qualitative notions of space frequently used by humans, such as "near" and "far". This facilitates human-robot interaction by bridging the gap between human natural language and the numerical values needed by the robot. Further, we use READYLOG to increase the flexible interpretation of human commands with decision-theoretic planning. We give an overview of the different methods deployed in CAESAR and show the applicability of a system equipped with these AI techniques in domestic service robotics}, language = {en} } @article{SchaeferHoefkenSchuba2011, author = {Schaefer, Thomas and H{\"o}fken, Hans-Wilhelm and Schuba, Marko}, title = {Windows Phone 7 from a Digital Forensics' Perspective}, publisher = {Springer}, address = {Berlin}, year = {2011}, language = {en} } @article{BergmannGoettenBraunetal.2022, author = {Bergmann, Ole and G{\"o}tten, Falk and Braun, Carsten and Janser, Frank}, title = {Comparison and evaluation of blade element methods against RANS simulations and test data}, series = {CEAS Aeronautical Journal}, volume = {13}, journal = {CEAS Aeronautical Journal}, publisher = {Springer}, address = {Wien}, issn = {1869-5590 (Online)}, doi = {10.1007/s13272-022-00579-1}, pages = {535 -- 557}, year = {2022}, abstract = {This paper compares several blade element theory (BET) method-based propeller simulation tools, including an evaluation against static propeller ground tests and high-fidelity Reynolds-Average Navier Stokes (RANS) simulations. Two proprietary propeller geometries for paraglider applications are analysed in static and flight conditions. The RANS simulations are validated with the static test data and used as a reference for comparing the BET in flight conditions. The comparison includes the analysis of varying 2D aerodynamic airfoil parameters and different induced velocity calculation methods. The evaluation of the BET propeller simulation tools shows the strength of the BET tools compared to RANS simulations. The RANS simulations underpredict static experimental data within 10\% relative error, while appropriate BET tools overpredict the RANS results by 15-20\% relative error. A variation in 2D aerodynamic data depicts the need for highly accurate 2D data for accurate BET results. The nonlinear BET coupled with XFOIL for the 2D aerodynamic data matches best with RANS in static operation and flight conditions. The novel BET tool PropCODE combines both approaches and offers further correction models for highly accurate static and flight condition results.}, language = {en} } @article{Ferrein2010, author = {Ferrein, Alexander}, title = {Robot controllers for highly dynamic environments with real-time constraints}, series = {K{\"u}nstliche Intelligenz : KI}, volume = {24}, journal = {K{\"u}nstliche Intelligenz : KI}, number = {2}, publisher = {Springer}, address = {Heidelberg}, issn = {1610-1987}, doi = {10.1007/s13218-010-0041-3}, pages = {175 -- 178}, year = {2010}, abstract = {In this extended abstract we describe the robot programming and planning language READYLOG, a GOLOG dialect which was developed to support the decision making of robots acting in dynamic real-time domains like robotic soccer. The formal framework of READYLOG, which is based on the situation calculus, features imperative control structures like loops and procedures, allows for decision-theoretic planning, and accounts for a continuously changing world. We developed high-level controllers in READYLOG for our soccer robots in RoboCup's Middle-size league, but also for service robots and for autonomous agents in interactive computer games.}, language = {en} } @article{RensVarzinczakMeyeretal.2010, author = {Rens, Gavin and Varzinczak, Ivan and Meyer, Thomas and Ferrein, Alexander}, title = {A Logic for Reasoning about Actions and Explicit Observations}, series = {AI 2010: Advances in Artificial Intelligence 23rd Australasian Joint Conference, Adelaide, Australia, December 7-10, 2010. Proceedings}, journal = {AI 2010: Advances in Artificial Intelligence 23rd Australasian Joint Conference, Adelaide, Australia, December 7-10, 2010. Proceedings}, publisher = {Springer}, address = {Berlin}, doi = {10.1007/978-3-642-17432-2_40}, pages = {395 -- 404}, year = {2010}, language = {en} } @article{MoehrenBergmannJanseretal.2024, author = {M{\"o}hren, Felix and Bergmann, Ole and Janser, Frank and Braun, Carsten}, title = {Assessment of structural mechanical effects related to torsional deformations of propellers}, series = {CEAS Aeronautical Journal}, journal = {CEAS Aeronautical Journal}, publisher = {Springer}, address = {Wien}, issn = {1869-5590 (eISSN)}, doi = {10.1007/s13272-024-00737-7}, pages = {22 Seiten}, year = {2024}, abstract = {Lifting propellers are of increasing interest for Advanced Air Mobility. All propellers and rotors are initially twisted beams, showing significant extension-twist coupling and centrifugal twisting. Torsional deformations severely impact aerodynamic performance. This paper presents a novel approach to assess different reasons for torsional deformations. A reduced-order model runs large parameter sweeps with algebraic formulations and numerical solution procedures. Generic beams represent three different propeller types for General Aviation, Commercial Aviation, and Advanced Air Mobility. Simulations include solid and hollow cross-sections made of aluminum, steel, and carbon fiber-reinforced polymer. The investigation shows that centrifugal twisting moments depend on both the elastic and initial twist. The determination of the centrifugal twisting moment solely based on the initial twist suffers from errors exceeding 5\% in some cases. The nonlinear parts of the torsional rigidity do not significantly impact the overall torsional rigidity for the investigated propeller types. The extension-twist coupling related to the initial and elastic twist in combination with tension forces significantly impacts the net cross-sectional torsional loads. While the increase in torsional stiffness due to initial twist contributes to the overall stiffness for General and Commercial Aviation propellers, its contribution to the lift propeller's stiffness is limited. The paper closes with the presentation of approximations for each effect identified as significant. Numerical evaluations are necessary to determine each effect for inhomogeneous cross-sections made of anisotropic material.}, language = {en} } @article{BelavyAlbrachtBrueggemannetal.2016, author = {Belavy, Daniel L. and Albracht, Kirsten and Br{\"u}ggemann, Gert-Peter and Vergroesen, Pieter-Paul A. and Dieen, Jaap H. van}, title = {Can exercise positively influence the intervertebral disc?}, series = {Sports Medicine}, volume = {46}, journal = {Sports Medicine}, number = {4}, publisher = {Springer}, address = {Berlin}, issn = {1179-2035}, doi = {10.1007/s40279-015-0444-2}, pages = {473 -- 485}, year = {2016}, abstract = {To better understand what kinds of sports and exercise could be beneficial for the intervertebral disc (IVD), we performed a review to synthesise the literature on IVD adaptation with loading and exercise. The state of the literature did not permit a systematic review; therefore, we performed a narrative review. The majority of the available data come from cell or whole-disc loading models and animal exercise models. However, some studies have examined the impact of specific sports on IVD degeneration in humans and acute exercise on disc size. Based on the data available in the literature, loading types that are likely beneficial to the IVD are dynamic, axial, at slow to moderate movement speeds, and of a magnitude experienced in walking and jogging. Static loading, torsional loading, flexion with compression, rapid loading, high-impact loading and explosive tasks are likely detrimental for the IVD. Reduced physical activity and disuse appear to be detrimental for the IVD. We also consider the impact of genetics and the likelihood of a 'critical period' for the effect of exercise in IVD development. The current review summarises the literature to increase awareness amongst exercise, rehabilitation and ergonomic professionals regarding IVD health and provides recommendations on future directions in research.}, language = {en} } @article{NiemuellerFerreinLakemeyer2010, author = {Niem{\"u}ller, Tim and Ferrein, Alexander and Lakemeyer, Gerhard}, title = {A Lua-based behavior engine for controlling the humanoid robot Nao}, series = {RoboCup 2009: Robot Soccer World Cup XIII}, journal = {RoboCup 2009: Robot Soccer World Cup XIII}, publisher = {Springer}, address = {Heidelberg}, doi = {10.1007/978-3-642-11876-0_21}, pages = {240 -- 251}, year = {2010}, language = {en} } @article{SchollSchubertZieneretal.2010, author = {Scholl, Ingrid and Schubert, Nicole and Ziener, Pascal and Pietrzyk, Uwe}, title = {GPU-basiertes Volumenrendering von multimodalen medizinischen Bilddaten in Echtzeit}, series = {Bildverarbeitung f{\"u}r die Medizin 2010}, journal = {Bildverarbeitung f{\"u}r die Medizin 2010}, publisher = {Springer}, address = {Berlin}, isbn = {978-3-642-11967-5}, pages = {400 -- 404}, year = {2010}, abstract = {Die vorliegende Arbeit zeichnet sich dadurch aus, dass registrierte unsegmentierte Volumina aus multimodalen Bilddatens{\"a}tzen (z.B. MRT, PET) gleichzeitig in einer 3D-Rekonstruktion visualisiert werden und in Echtzeit manipuliert werden k{\"o}nnen. Erm{\"o}glicht wird die Echtzeitf{\"a}higkeit durch die Programmierung der Algorithmen zur direkten Volumenvisualisierung auf der Grafikkarte mittels der neuen CUDA-Technologie. Die Zuordnung der Farbeigenschaften wird {\"u}ber 1D-Transferfunktionen f{\"u}r jedes Volumen getrennt gesteuert. So k{\"o}nnen durch die interaktive Ver{\"a}nderung der 1D-Transferfunktion Detailinformationen aus den zwei Bilddatens{\"a}tzen getrennt kontrolliert werden und die Vorteile der verschiedenen Bildmodalit{\"a}ten in einer Visualisierung genutzt werden. Mittels dieses interaktiven Frameworks k{\"o}nnen neue Erkenntnisse insbesondere {\"u}ber neurodegenerativen Erkrankungen gewonnen werden.}, language = {de} } @article{BerretzSkorupaSanderetal.2011, author = {Berretz, Frank and Skorupa, Sascha and Sander, Volker and Belloum, Adam and Bubak, Marian}, title = {Actor-Driven Workflow Execution in Distributed Environments}, series = {Euro-Par 2010 Parallel Processing Workshops : HeteroPAR, HPCC, HiBB, CoreGrid, UCHPC, HPCF, PROPER, CCPI, VHPC ; Ischia, Italy, August 31 - September 3, 2010 ; revised selected papers / Mario R. Guarracino ... (eds.)}, journal = {Euro-Par 2010 Parallel Processing Workshops : HeteroPAR, HPCC, HiBB, CoreGrid, UCHPC, HPCF, PROPER, CCPI, VHPC ; Ischia, Italy, August 31 - September 3, 2010 ; revised selected papers / Mario R. Guarracino ... (eds.)}, publisher = {Springer}, address = {Berlin}, isbn = {978-3-642-21877-4}, pages = {287 -- 294}, year = {2011}, language = {en} }