@article{ŠakićMarinkovićButenwegetal.2023, author = {Šakić, Bogdan and Marinković, Marko and Butenweg, Christoph and Klinkel, Sven}, title = {Influence of slab deflection on the out-of-plane capacity of unreinforced masonry partition walls}, series = {Engineering Structures}, volume = {276}, journal = {Engineering Structures}, editor = {Yang, J.}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0141-0296}, doi = {10.1016/j.engstruct.2022.115342}, year = {2023}, abstract = {Severe damage of non-structural elements is noticed in previous earthquakes, causing high economic losses and posing a life threat for the people. Masonry partition walls are one of the most commonly used non-structural elements. Therefore, their behaviour under earthquake loading in out-of-plane (OOP) direction is investigated by several researches in the past years. However, none of the existing experimental campaigns or analytical approaches consider the influence of prior slab deflection on OOP response of partition walls. Moreover, none of the existing construction techniques for the connection of partition walls with surrounding reinforced concrete (RC) is investigated for the combined slab deflection and OOP loading. However, the inevitable time-dependent behaviour of RC slabs leads to high values of final slab deflections which can further influence boundary conditions of partition walls. Therefore, a comprehensive study on the influence of slab deflection on the OOP capacity of masonry partitions is conducted. In the first step, experimental tests are carried out. Results of experimental tests are further used for the calibration of the numerical model employed for a parametric study. Based on the results, behaviour under combined loading for different construction techniques is explained. The results show that slab deflection leads either to severe damage or to a high reduction of OOP capacity. Existing practical solutions do not account for these effects. In this contribution, recommendations to overcome the problems of combined slab deflection and OOP loading on masonry partition walls are given. Possible interaction of in-plane (IP) loading, with the combined slab deflection and OOP loading on partition walls, is not investigated in this study.}, language = {en} } @article{KubalskiButenwegElDeib2022, author = {Kubalski, Thomas and Butenweg, Christoph and El-Deib, Khaled}, title = {Vereinfachte Ber{\"u}cksichtigung der Rahmentragwirkung in Mauerwerksgeb{\"a}uden}, series = {Bautechnik}, volume = {99}, journal = {Bautechnik}, number = {12}, editor = {Jesse, Dirk}, publisher = {Ernst \& Sohn}, address = {Berlin}, issn = {0932-8351}, doi = {10.1002/bate.202200081}, pages = {865 -- 928}, year = {2022}, abstract = {Aufgrund der gestiegenen Anforderungen durch h{\"o}here Ein-wirkungen aus Wind und Erdbeben ist eine Verbesserung und Optimierung der Berechnungs- und Bemessungsans{\"a}tze f{\"u}r Mauerwerksbauten erforderlich. Eine bessere Ausnutzung der Tragwerksreserven ist durch die Ber{\"u}cksichtigung der Rah-mentragwirkung mit einer Aktivierung der Deckenscheiben in den Rechenmodellen m{\"o}glich, die in der Praxis aufgrund der Komplexit{\"a}t der Wand-Decken-Interaktion bislang nicht aus-genutzt wird. Im vorliegenden Aufsatz wird ein vereinfachter Ansatz auf Grundlage der mitwirkenden Plattenbreite von Schubw{\"a}nden aus Mauerwerk vorgestellt, der die wesentli-chen Einfl ussfaktoren in parametrisierten Tabellen erfasst. Damit steht den Tragwerksplanern ein einfach anwendbares Werkzeug zur Verf{\"u}gung, um die Rahmentragwirkung in der Mauerwerksbemessung anzusetzen.}, language = {de} } @article{ButenwegMarinkovicPhlippetal.2022, author = {Butenweg, Christoph and Marinkovic, Marko and Phlipp, Michel and Lins, Robin and Renaut, Philipp}, title = {Isolierung und BIM-basiertes Bauwerksmonitoring des neuen Geb{\"a}udekomplexes f{\"u}r das BioSense-Institut in Novi Sad, Serbien}, series = {Bauingenieur}, volume = {97}, journal = {Bauingenieur}, number = {6}, editor = {Haghsheno, Shervin}, publisher = {VDI Fachmedien}, address = {D{\"u}sseldorf}, issn = {1436-4867}, doi = {10.37544/0005-6650-2022-06-28}, pages = {S3 -- S5}, year = {2022}, abstract = {Im Norden von Serbien erfolgt in Novi Sad der Neubau eines modernen Forschungsgeb{\"a}udes f{\"u}r das BioSense-Institut mit finanzieller Unterst{\"u}tzung durch die Eu-rop{\"a}ische Union. Der Geb{\"a}udeteil mit Laboren wird zum Schutz und zur Sicherstellung des reibungslosen Betriebs der sensiblen und kapitalintensiven technischen Einbauten mit ei-ner Erdbebenisolierung mit integrierter K{\"o}rperschallisolation versehen. Zus{\"a}tzlich wird der entkoppelte Laborteil des For-schungsgeb{\"a}udes mit einem BIM-basierten Bauwerksmonito-ring versehen, um {\"A}nderungen des Geb{\"a}udezustands jederzeit abfragen und beurteilen zu k{\"o}nnen.}, language = {de} } @article{MorandiButenwegBreisetal.2022, author = {Morandi, Paolo and Butenweg, Christoph and Breis, Khaled and Beyer, Katrin and Magenes, Guido}, title = {Latest findings on the behaviour factor q for the seismic design of URM buildings}, series = {Bulletin of Earthquake Engineering}, volume = {20}, journal = {Bulletin of Earthquake Engineering}, number = {11}, editor = {Ansal, Atilla}, publisher = {Springer Nature}, address = {Cham}, issn = {1573-1456}, doi = {10.1007/s10518-022-01419-7}, pages = {5797 -- 5848}, year = {2022}, abstract = {Recent earthquakes as the 2012 Emilia earthquake sequence showed that recently built unreinforced masonry (URM) buildings behaved much better than expected and sustained, despite the maximum PGA values ranged between 0.20-0.30 g, either minor damage or structural damage that is deemed repairable. Especially low-rise residential and commercial masonry buildings with a code-conforming seismic design and detailing behaved in general very well without substantial damages. The low damage grades of modern masonry buildings that was observed during this earthquake series highlighted again that codified design procedures based on linear analysis can be rather conservative. Although advances in simulation tools make nonlinear calculation methods more readily accessible to designers, linear analyses will still be the standard design method for years to come. The present paper aims to improve the linear seismic design method by providing a proper definition of the q-factor of URM buildings. These q-factors are derived for low-rise URM buildings with rigid diaphragms which represent recent construction practise in low to moderate seismic areas of Italy and Germany. The behaviour factor components for deformation and energy dissipation capacity and for overstrength due to the redistribution of forces are derived by means of pushover analyses. Furthermore, considerations on the behaviour factor component due to other sources of overstrength in masonry buildings are presented. As a result of the investigations, rationally based values of the behaviour factor q to be used in linear analyses in the range of 2.0-3.0 are proposed.}, language = {en} } @article{Timme2022, author = {Timme, Michael}, title = {Beweislast beim gutgl{\"a}ubigen Erwerb eines Kraftfahrzeugs ohne Erhalt der Zulassungsbescheinigung Teil II — Zugleich eine Besprechung von BGH, Urt. v. 23.9.2022 - V ZR 148/21, MDR 2022, 1542}, series = {Monatsschrift f{\"u}r Deutsches Recht}, volume = {77}, journal = {Monatsschrift f{\"u}r Deutsches Recht}, number = {1}, publisher = {Verlag Dr. Otto Schmidt}, address = {K{\"o}ln}, issn = {0340-1812}, doi = {doi.org/10.9785/mdtr-2023-770102}, pages = {r5 -- r7}, year = {2022}, abstract = {Im Handel mit Kraftfahrzeugen geh{\"o}ren Aspekte des gutgl{\"a}ubigen Erwerbs zu den beinahe allt{\"a}glichen Standardproblemen. Der BGH f{\"u}gt in seiner Entscheidung v. 23.9.2022-VZR148/21, MDR 2022, 1541 diesem im Detail breit gef{\"a}cherten Themenfeld einen weiteren Mosaikstein hinzu: Der Erwerber erhielt das verkaufte Kfz ohne {\"U}bergabe einer Zulassungsbescheinigung Teil II, behauptet aber, diese Bescheinigung sei dem vom ihm eingeschalteten Vermittler bei Erwerb (als F{\"a}lschung) vorgelegt worden. Tats{\"a}chlich befand sich das Original durchg{\"a}ngig beim wahren Eigent{\"u}mer, der nunmehr Herausgabe des Fahrzeugs verlangt. Der BGH sch{\"u}tzt in dieser Gestaltung im Ergebnis den Erwerber. Die Entscheidung ist in mehrfacher Hinsicht bemerkenswert.}, language = {de} } @article{MarinkovićButenweg2022, author = {Marinković, Marko and Butenweg, Christoph}, title = {Experimental testing of decoupled masonry infills with steel anchors for out-of-plane support under combined in-plane and out-of-plane seismic loading}, series = {Construction and Building Materials}, volume = {318}, journal = {Construction and Building Materials}, number = {1}, editor = {Ford, Michael C.}, publisher = {Elsevier}, address = {Amsterdam}, issn = {1879-0526}, doi = {10.1016/j.conbuildmat.2021.126041}, year = {2022}, abstract = {Because of simple construction process, high energy efficiency, significant fire resistance and excellent sound isolation, masonry infilled reinforced concrete (RC) frame structures are very popular in most of the countries in the world, as well as in seismic active areas. However, many RC frame structures with masonry infills were seriously damaged during earthquake events, as the traditional infills are generally constructed with direct contact to the RC frame which brings undesirable infill/frame interaction. This interaction leads to the activation of the equivalent diagonal strut in the infill panel, due to the RC frame deformation, and combined with seismically induced loads perpendicular to the infill panel often causes total collapses of the masonry infills and heavy damages to the RC frames. This fact was the motivation for developing different approaches for improving the behaviour of masonry infills, where infill isolation (decoupling) from the frame has been more intensively studied in the last decade. In-plane isolation of the infill wall reduces infill activation, but causes the need for additional measures to restrain out-of-plane movements. This can be provided by installing steel anchors, as proposed by some researchers. Within the framework of European research project INSYSME (Innovative Systems for Earthquake Resistant Masonry Enclosures in Reinforced Concrete Buildings) the system based on a use of elastomers for in-plane decoupling and steel anchors for out-of-plane restrain was tested. This constructive solution was tested and deeply investigated during the experimental campaign where traditional and decoupled masonry infilled RC frames with anchors were subjected to separate and combined in-plane ‬and out-of-plane loading. Based on a detailed evaluation and comparison of the test results, the performance and effectiveness of the developed system are illustrated.}, language = {en} } @article{ElDeibButenwegKlinkel2021, author = {El-Deib, Khaled and Butenweg, Christoph and Klinkel, Sven}, title = {Erdbebennachweis von Mauerwerksbauten mit realistischen Modellen und erh{\"o}hten Verhaltensbeiwerten}, series = {Mauerwerk}, volume = {2021}, journal = {Mauerwerk}, number = {3}, editor = {Jesse, Dirk}, publisher = {Wiley}, address = {Weinheim}, issn = {1437-1022}, doi = {10.1002/dama.202110014}, pages = {110 -- 119}, year = {2021}, abstract = {Die Anwendung des linearen Nachweiskonzepts auf Mauerwerksbauten f{\"u}hrt dazu, dass bereits heute Standsicherheitsnachweise f{\"u}r Geb{\"a}ude mit {\"u}blichen Grundrissen in Gebieten mit moderaten Erdbebeneinwirkungen nicht mehr gef{\"u}hrt werden k{\"o}nnen. Diese Problematik wird sich in Deutschland mit der Einf{\"u}hrung kontinuierlicher probabilistischer Erdbebenkarten weiter versch{\"a}rfen. Aufgrund der Erh{\"o}hung der seismischen Einwirkungen, die sich vielerorts ergibt, ist es erforderlich, die vorhandenen, bislang nicht ber{\"u}cksichtigten Tragf{\"a}higkeitsreserven in nachvollziehbaren Nachweiskonzepten in der Baupraxis verf{\"u}gbar zu machen. Der vorliegende Beitrag stellt ein Konzept f{\"u}r die geb{\"a}udespezifische Ermittlung von erh{\"o}hten Verhaltensbeiwerten vor. Die Verhaltensbeiwerte setzen sich aus drei Anteilen zusammen, mit denen die Lastumverteilung im Grundriss, die Verformungsf{\"a}higkeit und Energiedissipation sowie die {\"U}berfestigkeiten ber{\"u}cksichtigt werden. F{\"u}r die rechnerische Ermittlung dieser drei Anteile wird ein nichtlineares Nachweiskonzept auf Grundlage von Pushover-Analysen vorgeschlagen, in denen die Interaktionen von W{\"a}nden und Geschossdecken durch einen Einspanngrad beschrieben werden. F{\"u}r die Bestimmung der Einspanngrade wird ein nichtlinearer Modellierungsansatz eingef{\"u}hrt, mit dem die Interaktion von W{\"a}nden und Decken abgebildet werden kann. Die Anwendung des Konzepts mit erh{\"o}hten geb{\"a}udespezifischen Verhaltensbeiwerten wird am Beispiel eines Mehrfamilienhauses aus Kalksandsteinen demonstriert. Die Ergebnisse der linearen Nachweise mit erh{\"o}hten Verhaltensbeiwerten f{\"u}r dieses Geb{\"a}ude liegen deutlich n{\"a}her an den Ergebnissen nichtlinearer Nachweise und somit bleiben {\"u}bliche Grundrisse in Erdbebengebieten mit den traditionellen linearen Rechenans{\"a}tzen nachweisbar.}, language = {de} } @article{RossiWinandsButenweg2022, author = {Rossi, Leonardo and Winands, Mark H. M. and Butenweg, Christoph}, title = {Monte Carlo Tree Search as an intelligent search tool in structural design problems}, series = {Engineering with Computers : An International Journal for Simulation-Based Engineering}, volume = {38}, journal = {Engineering with Computers : An International Journal for Simulation-Based Engineering}, number = {4}, editor = {Zhang, Jessica}, publisher = {Springer Nature}, address = {Cham}, issn = {1435-5663}, doi = {10.1007/s00366-021-01338-2}, pages = {3219 -- 3236}, year = {2022}, abstract = {Monte Carlo Tree Search (MCTS) is a search technique that in the last decade emerged as a major breakthrough for Artificial Intelligence applications regarding board- and video-games. In 2016, AlphaGo, an MCTS-based software agent, outperformed the human world champion of the board game Go. This game was for long considered almost infeasible for machines, due to its immense search space and the need for a long-term strategy. Since this historical success, MCTS is considered as an effective new approach for many other scientific and technical problems. Interestingly, civil structural engineering, as a discipline, offers many tasks whose solution may benefit from intelligent search and in particular from adopting MCTS as a search tool. In this work, we show how MCTS can be adapted to search for suitable solutions of a structural engineering design problem. The problem consists of choosing the load-bearing elements in a reference reinforced concrete structure, so to achieve a set of specific dynamic characteristics. In the paper, we report the results obtained by applying both a plain and a hybrid version of single-agent MCTS. The hybrid approach consists of an integration of both MCTS and classic Genetic Algorithm (GA), the latter also serving as a term of comparison for the results. The study's outcomes may open new perspectives for the adoption of MCTS as a design tool for civil engineers.}, language = {en} } @article{ButenwegKubalskiElDeibetal.2021, author = {Butenweg, Christoph and Kubalski, Thomas and El-Deib, Khaled and Gellert, Christoph}, title = {Erdbebennachweis von Mauerwerksbauten nach DIN EN 1998-1/NA-2021}, series = {Bautechnik : Zeitschrift f{\"u}r den gesamten Ingenieurbau}, volume = {98}, journal = {Bautechnik : Zeitschrift f{\"u}r den gesamten Ingenieurbau}, number = {11}, editor = {Jesse, Dirk}, publisher = {Ernst \& Sohn}, address = {Berlin}, issn = {1437-0999}, doi = {10.1002/bate.202100064}, pages = {852 -- 863}, year = {2021}, abstract = {Mauerwerksbauten in Deutschland sind mit Einf{\"u}hrung des nationalen Anwendungsdokuments DIN EN 1998-1/NA auf Grundlage einer neuen probabilistischen Erdbebenkarte nachzuweisen. F{\"u}r erfolgreiche Erdbebennachweise {\"u}blicher Grundrissformen von Mauerwerksbauten stehen in dem zuk{\"u}nftigen Anwendungsdokument neue rechnerische Nachweism{\"o}glichkeiten zur Verf{\"u}gung, mit denen die Tragf{\"a}higkeitsreserven von Mauerwerksbauten in der Baupraxis mit einem {\"u}berschaubaren Aufwand besser in Ansatz gebracht werden k{\"o}nnen. Das Standardrechenverfahren ist weiterhin der kraftbasierte Nachweis, der nun mit h{\"o}heren Verhaltensbeiwerten im Vergleich zur DIN 4149 durchgef{\"u}hrt werden kann. Die h{\"o}heren Verhaltensbeiwerte basieren auf der besseren Ausnutzung der geb{\"a}udespezifischen Verformungsf{\"a}higkeit und Energiedissipation sowie der Lastumverteilung der Schubkr{\"a}fte im Grundriss mit Ansatz von Rahmentragwirkung durch Wand-Deckeninteraktionen. Alternativ dazu kann ein nichtlinearer Nachweis auf Grundlage von Pushover-Analysen zur Anwendung kommen. Vervollst{\"a}ndigt werden die Regelungen f{\"u}r Mauerwerksbauten durch neue Regelungen f{\"u}r nichttragende Innenw{\"a}nde und Außenmauerschalen. Der vorliegende Beitrag stellt die Grundlagen und Hintergr{\"u}nde der neuen rechnerischen Nachweise in DIN EN 1998-1/NA vor und demonstriert deren Anwendung an einem Beispiel aus der Praxis.}, language = {de} } @article{ButenwegBursiPaolaccietal.2021, author = {Butenweg, Christoph and Bursi, Oreste S. and Paolacci, Fabrizio and Marinković, Marko and Lanese, Igor and Nardin, Chiara and Quinci, Gianluca}, title = {Seismic performance of an industrial multi-storey frame structure with process equipment subjected to shake table testing}, series = {Engineering Structures}, volume = {243}, journal = {Engineering Structures}, number = {15}, editor = {Yang, J.}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0141-0296}, doi = {10.1016/j.engstruct.2021.112681}, year = {2021}, abstract = {Past earthquakes demonstrated the high vulnerability of industrial facilities equipped with complex process technologies leading to serious damage of process equipment and multiple and simultaneous release of hazardous substances. Nonetheless, current standards for seismic design of industrial facilities are considered inadequate to guarantee proper safety conditions against exceptional events entailing loss of containment and related consequences. On these premises, the SPIF project -Seismic Performance of Multi-Component Systems in Special Risk Industrial Facilities- was proposed within the framework of the European H2020 SERA funding scheme. In detail, the objective of the SPIF project is the investigation of the seismic behaviour of a representative industrial multi-storey frame structure equipped with complex process components by means of shaking table tests. Along this main vein and in a performance-based design perspective, the issues investigated in depth are the interaction between a primary moment resisting frame (MRF) steel structure and secondary process components that influence the performance of the whole system; and a proper check of floor spectra predictions. The evaluation of experimental data clearly shows a favourable performance of the MRF structure, some weaknesses of local details due to the interaction between floor crossbeams and process components and, finally, the overconservatism of current design standards w.r.t. floor spectra predictions.}, language = {en} } @article{BaringhausGaigall2017, author = {Baringhaus, Ludwig and Gaigall, Daniel}, title = {On Hotelling's T² test in a special paired sample case}, series = {Communications in Statistics - Theory and Methods}, volume = {48}, journal = {Communications in Statistics - Theory and Methods}, number = {2}, publisher = {Taylor \& Francis}, address = {London}, issn = {1532-415X}, doi = {10.1080/03610926.2017.1408828}, pages = {257 -- 267}, year = {2017}, abstract = {In a special paired sample case, Hotelling's T² test based on the differences of the paired random vectors is the likelihood ratio test for testing the hypothesis that the paired random vectors have the same mean; with respect to a special group of affine linear transformations it is the uniformly most powerful invariant test for the general alternative of a difference in mean. We present an elementary straightforward proof of this result. The likelihood ratio test for testing the hypothesis that the covariance structure is of the assumed special form is derived and discussed. Applications to real data are given.}, language = {en} } @article{BaringhausGaigall2017, author = {Baringhaus, Ludwig and Gaigall, Daniel}, title = {Hotelling's T² tests in paired and independent survey samples: An efficiency comparison}, series = {Journal of Multivariate Analysis}, volume = {2017}, journal = {Journal of Multivariate Analysis}, number = {154}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0047-259X}, doi = {10.1016/j.jmva.2016.11.004}, pages = {177 -- 198}, year = {2017}, abstract = {Hotelling's T² tests in paired and independent survey samples are compared using the traditional asymptotic efficiency concepts of Hodges-Lehmann, Bahadur and Pitman, as well as through criteria based on the volumes of corresponding confidence regions. Conditions characterizing the superiority of a procedure are given in terms of population canonical correlation type coefficients. Statistical tests for checking these conditions are developed. Test statistics based on the eigenvalues of a symmetrized sample cross-covariance matrix are suggested, as well as test statistics based on sample canonical correlation type coefficients.}, language = {en} } @article{BaringhausGaigall2018, author = {Baringhaus, Ludwig and Gaigall, Daniel}, title = {Efficiency comparison of the Wilcoxon tests in paired and independent survey samples}, series = {Metrika}, volume = {2018}, journal = {Metrika}, number = {81}, publisher = {Springer}, address = {Berlin}, issn = {1435-926X}, doi = {10.1007/s00184-018-0661-4}, pages = {891 -- 930}, year = {2018}, abstract = {The efficiency concepts of Bahadur and Pitman are used to compare the Wilcoxon tests in paired and independent survey samples. A comparison through the length of corresponding confidence intervals is also done. Simple conditions characterizing the dominance of a procedure are derived. Statistical tests for checking these conditions are suggested and discussed.}, language = {de} } @article{BaringhausGaigall2019, author = {Baringhaus, Ludwig and Gaigall, Daniel}, title = {On an asymptotic relative efficiency concept based on expected volumes of confidence regions}, series = {Statistics - A Journal of Theoretical and Applied Statistic}, volume = {53}, journal = {Statistics - A Journal of Theoretical and Applied Statistic}, number = {6}, publisher = {Taylor \& Francis}, address = {London}, issn = {1029-4910}, doi = {10.1080/02331888.2019.1683560}, pages = {1396 -- 1436}, year = {2019}, abstract = {The paper deals with an asymptotic relative efficiency concept for confidence regions of multidimensional parameters that is based on the expected volumes of the confidence regions. Under standard conditions the asymptotic relative efficiencies of confidence regions are seen to be certain powers of the ratio of the limits of the expected volumes. These limits are explicitly derived for confidence regions associated with certain plugin estimators, likelihood ratio tests and Wald tests. Under regularity conditions, the asymptotic relative efficiency of each of these procedures with respect to each one of its competitors is equal to 1. The results are applied to multivariate normal distributions and multinomial distributions in a fairly general setting.}, language = {en} } @article{Gaigall2019, author = {Gaigall, Daniel}, title = {On a new approach to the multi-sample goodness-of-fit problem}, series = {Communications in Statistics - Simulation and Computation}, volume = {53}, journal = {Communications in Statistics - Simulation and Computation}, number = {10}, publisher = {Taylor \& Francis}, address = {London}, issn = {1532-4141}, doi = {10.1080/03610918.2019.1618472}, pages = {2971 -- 2989}, year = {2019}, abstract = {Suppose we have k samples X₁,₁,…,X₁,ₙ₁,…,Xₖ,₁,…,Xₖ,ₙₖ with different sample sizes ₙ₁,…,ₙₖ and unknown underlying distribution functions F₁,…,Fₖ as observations plus k families of distribution functions {G₁(⋅,ϑ);ϑ∈Θ},…,{Gₖ(⋅,ϑ);ϑ∈Θ}, each indexed by elements ϑ from the same parameter set Θ, we consider the new goodness-of-fit problem whether or not (F₁,…,Fₖ) belongs to the parametric family {(G₁(⋅,ϑ),…,Gₖ(⋅,ϑ));ϑ∈Θ}. New test statistics are presented and a parametric bootstrap procedure for the approximation of the unknown null distributions is discussed. Under regularity assumptions, it is proved that the approximation works asymptotically, and the limiting distributions of the test statistics in the null hypothesis case are determined. Simulation studies investigate the quality of the new approach for small and moderate sample sizes. Applications to real-data sets illustrate how the idea can be used for verifying model assumptions.}, language = {en} } @article{DitzhausGaigall2018, author = {Ditzhaus, Marc and Gaigall, Daniel}, title = {A consistent goodness-of-fit test for huge dimensional and functional data}, series = {Journal of Nonparametric Statistics}, volume = {30}, journal = {Journal of Nonparametric Statistics}, number = {4}, publisher = {Taylor \& Francis}, address = {Abingdon}, issn = {1029-0311}, doi = {10.1080/10485252.2018.1486402}, pages = {834 -- 859}, year = {2018}, abstract = {A nonparametric goodness-of-fit test for random variables with values in a separable Hilbert space is investigated. To verify the null hypothesis that the data come from a specific distribution, an integral type test based on a Cram{\´e}r-von-Mises statistic is suggested. The convergence in distribution of the test statistic under the null hypothesis is proved and the test's consistency is concluded. Moreover, properties under local alternatives are discussed. Applications are given for data of huge but finite dimension and for functional data in infinite dimensional spaces. A general approach enables the treatment of incomplete data. In simulation studies the test competes with alternative proposals.}, language = {en} } @article{BaringhausGaigallThiele2018, author = {Baringhaus, Ludwig and Gaigall, Daniel and Thiele, Jan Philipp}, title = {Statistical inference for L²-distances to uniformity}, series = {Computational Statistics}, volume = {2018}, journal = {Computational Statistics}, number = {33}, publisher = {Springer}, address = {Berlin}, issn = {1613-9658}, doi = {10.1007/s00180-018-0820-0}, pages = {1863 -- 1896}, year = {2018}, abstract = {The paper deals with the asymptotic behaviour of estimators, statistical tests and confidence intervals for L²-distances to uniformity based on the empirical distribution function, the integrated empirical distribution function and the integrated empirical survival function. Approximations of power functions, confidence intervals for the L²-distances and statistical neighbourhood-of-uniformity validation tests are obtained as main applications. The finite sample behaviour of the procedures is illustrated by a simulation study.}, language = {en} } @article{BaringhausGaigall2015, author = {Baringhaus, Ludwig and Gaigall, Daniel}, title = {On an independence test approach to the goodness-of-fit problem}, series = {Journal of Multivariate Analysis}, volume = {2015}, journal = {Journal of Multivariate Analysis}, number = {140}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0047-259X}, doi = {10.1016/j.jmva.2015.05.013}, pages = {193 -- 208}, year = {2015}, abstract = {Let X₁,…,Xₙ be independent and identically distributed random variables with distribution F. Assuming that there are measurable functions f:R²→R and g:R²→R characterizing a family F of distributions on the Borel sets of R in the way that the random variables f(X₁,X₂),g(X₁,X₂) are independent, if and only if F∈F, we propose to treat the testing problem H:F∈F,K:F∉F by applying a consistent nonparametric independence test to the bivariate sample variables (f(Xᵢ,Xⱼ),g(Xᵢ,Xⱼ)),1⩽i,j⩽n,i≠j. A parametric bootstrap procedure needed to get critical values is shown to work. The consistency of the test is discussed. The power performance of the procedure is compared with that of the classical tests of Kolmogorov-Smirnov and Cram{\´e}r-von Mises in the special cases where F is the family of gamma distributions or the family of inverse Gaussian distributions.}, language = {en} } @article{Gaigall2021, author = {Gaigall, Daniel}, title = {Test for Changes in the Modeled Solvency Capital Requirement of an Internal Risk Model}, series = {ASTIN Bulletin}, volume = {51}, journal = {ASTIN Bulletin}, number = {3}, publisher = {Cambridge Univ. Press}, address = {Cambridge}, issn = {1783-1350}, doi = {10.1017/asb.2021.20}, pages = {813 -- 837}, year = {2021}, abstract = {In the context of the Solvency II directive, the operation of an internal risk model is a possible way for risk assessment and for the determination of the solvency capital requirement of an insurance company in the European Union. A Monte Carlo procedure is customary to generate a model output. To be compliant with the directive, validation of the internal risk model is conducted on the basis of the model output. For this purpose, we suggest a new test for checking whether there is a significant change in the modeled solvency capital requirement. Asymptotic properties of the test statistic are investigated and a bootstrap approximation is justified. A simulation study investigates the performance of the test in the finite sample case and confirms the theoretical results. The internal risk model and the application of the test is illustrated in a simplified example. The method has more general usage for inference of a broad class of law-invariant and coherent risk measures on the basis of a paired sample.}, language = {en} } @article{Gaigall2020, author = {Gaigall, Daniel}, title = {Hoeffding-Blum-Kiefer-Rosenblatt independence test statistic on partly not identically distributed data}, series = {Communications in Statistics - Theory and Methods}, volume = {51}, journal = {Communications in Statistics - Theory and Methods}, number = {12}, publisher = {Taylor \& Francis}, address = {London}, issn = {1532-415X}, doi = {10.1080/03610926.2020.1805767}, pages = {4006 -- 4028}, year = {2020}, abstract = {The established Hoeffding-Blum-Kiefer-Rosenblatt independence test statistic is investigated for partly not identically distributed data. Surprisingly, it turns out that the statistic has the well-known distribution-free limiting null distribution of the classical criterion under standard regularity conditions. An application is testing goodness-of-fit for the regression function in a non parametric random effects meta-regression model, where the consistency is obtained as well. Simulations investigate size and power of the approach for small and moderate sample sizes. A real data example based on clinical trials illustrates how the test can be used in applications.}, language = {en} } @article{Gaigall2020, author = {Gaigall, Daniel}, title = {Testing marginal homogeneity of a continuous bivariate distribution with possibly incomplete paired data}, series = {Metrika}, volume = {2020}, journal = {Metrika}, number = {83}, publisher = {Springer}, issn = {1435-926X}, doi = {10.1007/s00184-019-00742-5}, pages = {437 -- 465}, year = {2020}, abstract = {We discuss the testing problem of homogeneity of the marginal distributions of a continuous bivariate distribution based on a paired sample with possibly missing components (missing completely at random). Applying the well-known two-sample Cr{\´a}mer-von-Mises distance to the remaining data, we determine the limiting null distribution of our test statistic in this situation. It is seen that a new resampling approach is appropriate for the approximation of the unknown null distribution. We prove that the resulting test asymptotically reaches the significance level and is consistent. Properties of the test under local alternatives are pointed out as well. Simulations investigate the quality of the approximation and the power of the new approach in the finite sample case. As an illustration we apply the test to real data sets.}, language = {en} } @article{Gaigall2020, author = {Gaigall, Daniel}, title = {Rothman-Woodroofe symmetry test statistic revisited}, series = {Computational Statistics \& Data Analysis}, volume = {2020}, journal = {Computational Statistics \& Data Analysis}, number = {142}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0167-9473}, doi = {10.1016/j.csda.2019.106837}, pages = {Artikel 106837}, year = {2020}, abstract = {The Rothman-Woodroofe symmetry test statistic is revisited on the basis of independent but not necessarily identically distributed random variables. The distribution-freeness if the underlying distributions are all symmetric and continuous is obtained. The results are applied for testing symmetry in a meta-analysis random effects model. The consistency of the procedure is discussed in this situation as well. A comparison with an alternative proposal from the literature is conducted via simulations. Real data are analyzed to demonstrate how the new approach works in practice.}, language = {en} } @article{DitzhausGaigall2022, author = {Ditzhaus, Marc and Gaigall, Daniel}, title = {Testing marginal homogeneity in Hilbert spaces with applications to stock market returns}, series = {Test}, volume = {2022}, journal = {Test}, number = {31}, publisher = {Springer}, issn = {1863-8260}, doi = {10.1007/s11749-022-00802-5}, pages = {749 -- 770}, year = {2022}, abstract = {This paper considers a paired data framework and discusses the question of marginal homogeneity of bivariate high-dimensional or functional data. The related testing problem can be endowed into a more general setting for paired random variables taking values in a general Hilbert space. To address this problem, a Cram{\´e}r-von-Mises type test statistic is applied and a bootstrap procedure is suggested to obtain critical values and finally a consistent test. The desired properties of a bootstrap test can be derived that are asymptotic exactness under the null hypothesis and consistency under alternatives. Simulations show the quality of the test in the finite sample case. A possible application is the comparison of two possibly dependent stock market returns based on functional data. The approach is demonstrated based on historical data for different stock market indices.}, language = {en} } @article{GaigallGerstenbergTrinh2022, author = {Gaigall, Daniel and Gerstenberg, Julian and Trinh, Thi Thu Ha}, title = {Empirical process of concomitants for partly categorial data and applications in statistics}, series = {Bernoulli}, volume = {28}, journal = {Bernoulli}, number = {2}, publisher = {International Statistical Institute}, address = {Den Haag, NL}, issn = {1573-9759}, doi = {10.3150/21-BEJ1367}, pages = {803 -- 829}, year = {2022}, abstract = {On the basis of independent and identically distributed bivariate random vectors, where the components are categorial and continuous variables, respectively, the related concomitants, also called induced order statistic, are considered. The main theoretical result is a functional central limit theorem for the empirical process of the concomitants in a triangular array setting. A natural application is hypothesis testing. An independence test and a two-sample test are investigated in detail. The fairly general setting enables limit results under local alternatives and bootstrap samples. For the comparison with existing tests from the literature simulation studies are conducted. The empirical results obtained confirm the theoretical findings.}, language = {en} } @article{MarinkovićFloresCalvinistiButenweg2020, author = {Marinković, Marko and Flores Calvinisti, Santiago and Butenweg, Christoph}, title = {Numerical analysis of reinforced concrete frame buildings with decoupled infill walls}, series = {Building Materials and Structures}, volume = {63}, journal = {Building Materials and Structures}, number = {4}, publisher = {Society for Materials and Structures Testing of Serbia}, address = {Belgrad}, issn = {2217-8139}, doi = {10.5937/GRMK2004013M}, pages = {13 -- 48}, year = {2020}, abstract = {Reinforced concrete (RC) buildings with masonry infill walls are widely used in many countries all over the world. Although infills are considered as non-structural elements, they significantly change dynamic characteristics of RC frame structures during earthquake excitation. Recently, significant effort was spent on studying decoupled infills, which are isolated from the surrounding frame usually by adding a gap between frame and infill. In this case, the frame deformation does not activate infill wall, thus infills are not influencing the behaviour of the frame. This paper presents the results of the investigation of the behaviour of RC frame buildings with the INODIS system that decouples masonry infills from the surrounding frame. Effect of masonry infill decoupling was investigated first on the one-bay onestorey frame. This was used as a base for parametric study on the frames with more bays and storeys, as well as on the building level. Change of stiffness and dynamic characteristics was analysed as well as response under earthquake loading. Comparison with the bare frame and traditionally infilled frame was performed. The results show that behaviour of the decoupled infilled frames is similar to the bare frame, whereas behaviour of frames with traditional infills is significantly different and demands complex numerical models. This means that if adequate decoupling is applied, design of}, language = {mul} } @article{BaringhausGaigall2022, author = {Baringhaus, Ludwig and Gaigall, Daniel}, title = {A goodness-of-fit test for the compound Poisson exponential model}, series = {Journal of Multivariate Analysis}, volume = {195}, journal = {Journal of Multivariate Analysis}, number = {Article 105154}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0047-259X}, doi = {10.1016/j.jmva.2022.105154}, year = {2022}, abstract = {On the basis of bivariate data, assumed to be observations of independent copies of a random vector (S,N), we consider testing the hypothesis that the distribution of (S,N) belongs to the parametric class of distributions that arise with the compound Poisson exponential model. Typically, this model is used in stochastic hydrology, with N as the number of raindays, and S as total rainfall amount during a certain time period, or in actuarial science, with N as the number of losses, and S as total loss expenditure during a certain time period. The compound Poisson exponential model is characterized in the way that a specific transform associated with the distribution of (S,N) satisfies a certain differential equation. Mimicking the function part of this equation by substituting the empirical counterparts of the transform we obtain an expression the weighted integral of the square of which is used as test statistic. We deal with two variants of the latter, one of which being invariant under scale transformations of the S-part by fixed positive constants. Critical values are obtained by using a parametric bootstrap procedure. The asymptotic behavior of the tests is discussed. A simulation study demonstrates the performance of the tests in the finite sample case. The procedure is applied to rainfall data and to an actuarial dataset. A multivariate extension is also discussed.}, language = {en} } @article{TranTrinhDaoetal.2022, author = {Tran, Ngoc Trinh and Trinh, Tu Luc and Dao, Ngoc Tien and Giap, Van Tan and Truong, Manh Khuyen and Dinh, Thuy Ha and Staat, Manfred}, title = {FEM shakedown analysis of structures under random strength with chance constrained programming}, series = {Vietnam Journal of Mechanics}, volume = {44}, journal = {Vietnam Journal of Mechanics}, number = {4}, publisher = {Vietnam Academy of Science and Technology (VAST)}, issn = {0866-7136}, doi = {10.15625/0866-7136/17943}, pages = {459 -- 473}, year = {2022}, abstract = {Direct methods, comprising limit and shakedown analysis, are a branch of computational mechanics. They play a significant role in mechanical and civil engineering design. The concept of direct methods aims to determine the ultimate load carrying capacity of structures beyond the elastic range. In practical problems, the direct methods lead to nonlinear convex optimization problems with a large number of variables and constraints. If strength and loading are random quantities, the shakedown analysis can be formulated as stochastic programming problem. In this paper, a method called chance constrained programming is presented, which is an effective method of stochastic programming to solve shakedown analysis problems under random conditions of strength. In this study, the loading is deterministic, and the strength is a normally or lognormally distributed variable.}, language = {en} } @article{Czarnecki2015, author = {Czarnecki, Christian}, title = {Moderne Telekommunikationsprodukte erfordern standardisierte Gesch{\"a}ftsprozesse}, series = {Wirtschaft und Wissenschaft}, journal = {Wirtschaft und Wissenschaft}, number = {2}, publisher = {Deutsche Telekom AG. Fachhochschule Leipzig}, address = {Leipzig}, pages = {7 -- 7}, year = {2015}, language = {de} } @article{MuellerSeginWeigandetal.2022, author = {Mueller, Tobias and Segin, Alexander and Weigand, Christoph and Schmitt, Robert H.}, title = {Feature selection for measurement models}, series = {International journal of quality \& reliability management}, journal = {International journal of quality \& reliability management}, number = {Vol. ahead-of-print, No. ahead-of-print.}, publisher = {Emerald Group Publishing Limited}, address = {Bingley}, issn = {0265-671X}, doi = {10.1108/IJQRM-07-2021-0245}, year = {2022}, abstract = {Purpose In the determination of the measurement uncertainty, the GUM procedure requires the building of a measurement model that establishes a functional relationship between the measurand and all influencing quantities. Since the effort of modelling as well as quantifying the measurement uncertainties depend on the number of influencing quantities considered, the aim of this study is to determine relevant influencing quantities and to remove irrelevant ones from the dataset. Design/methodology/approach In this work, it was investigated whether the effort of modelling for the determination of measurement uncertainty can be reduced by the use of feature selection (FS) methods. For this purpose, 9 different FS methods were tested on 16 artificial test datasets, whose properties (number of data points, number of features, complexity, features with low influence and redundant features) were varied via a design of experiments. Findings Based on a success metric, the stability, universality and complexity of the method, two FS methods could be identified that reliably identify relevant and irrelevant influencing quantities for a measurement model. Originality/value For the first time, FS methods were applied to datasets with properties of classical measurement processes. The simulation-based results serve as a basis for further research in the field of FS for measurement models. The identified algorithms will be applied to real measurement processes in the future.}, language = {en} } @article{RuebbelkeVoegeleGrajewskietal.2022, author = {R{\"u}bbelke, Dirk and V{\"o}gele, Stefan and Grajewski, Matthias and Zobel, Luzy}, title = {Hydrogen-based steel production and global climate protection: An empirical analysis of the potential role of a European cross border adjustment mechanism}, series = {Journal of Cleaner Production}, volume = {380}, journal = {Journal of Cleaner Production}, number = {Part 2, Art. Nr.:135040}, publisher = {Elsevier}, issn = {0959-6526}, doi = {10.1016/j.jclepro.2022.135040}, year = {2022}, abstract = {The European Union's aim to become climate neutral by 2050 necessitates ambitious efforts to reduce carbon emissions. Large reductions can be attained particularly in energy intensive sectors like iron and steel. In order to prevent the relocation of such industries outside the EU in the course of tightening environmental regulations, the establishment of a climate club jointly with other large emitters and alternatively the unilateral implementation of an international cross-border carbon tax mechanism are proposed. This article focuses on the latter option choosing the steel sector as an example. In particular, we investigate the financial conditions under which a European cross border mechanism is capable to protect hydrogen-based steel production routes employed in Europe against more polluting competition from abroad. By using a floor price model, we assess the competitiveness of different steel production routes in selected countries. We evaluate the climate friendliness of steel production on the basis of specific GHG emissions. In addition, we utilize an input-output price model. It enables us to assess impacts of rising cost of steel production on commodities using steel as intermediates. Our results raise concerns that a cross-border tax mechanism will not suffice to bring about competitiveness of hydrogen-based steel production in Europe because the cost tends to remain higher than the cost of steel production in e.g. China. Steel is a classic example for a good used mainly as intermediate for other products. Therefore, a cross-border tax mechanism for steel will increase the price of products produced in the EU that require steel as an input. This can in turn adversely affect competitiveness of these sectors. Hence, the effects of higher steel costs on European exports should be borne in mind and could require the cross-border adjustment mechanism to also subsidize exports.}, language = {en} } @article{CzarneckiHeuserKraemeretal.2006, author = {Czarnecki, Christian and Heuser, Marcus and Kr{\"a}mer, Christian and Stummer, Christoph and Theisinger, Felix and Wettklo, Michael}, title = {How to get more customer satisfaction : Erfolg im Wettbewerb durch kundenzentrierte Prozesse}, series = {Detecon Management Report}, journal = {Detecon Management Report}, number = {3}, publisher = {Detecon Internat. GmbH}, address = {Eschborn}, issn = {1867-3147}, pages = {29 -- 33}, year = {2006}, abstract = {Unternehmen sind in der Regel {\"u}berzeugt, dass sie die Bed{\"u}rfnisse ihrer Kunden in den Mittelpunkt stellen. Aber in der direkten Interaktion mit dem Kunden zeigen sie h{\"a}ufig Schw{\"a}chen. Der folgende Beitrag illustriert, wie durch eine konsequente Ausrichtung der Wertsch{\"o}pfungsprozesse auf die zentralen Kundenbed{\"u}rfnisse ein Dreifacheffekt erzielt werden kann: Nachhaltig erh{\"o}hte Kundenzufriedenheit, gesteigerte Effizienz und eine Differenzierung im Wettbewerb.}, language = {de} } @article{CzarneckiGeierPflug2017, author = {Czarnecki, Christian and Geier, Jari and Pflug, Karolin}, title = {Netzvirtualisierung durch SDN und NFV : Unternehmensprozesse auf dem Pr{\"u}fstand}, series = {LANline}, journal = {LANline}, publisher = {AWI Aktuelles Wissen Verl.-Ges.}, address = {Trostberg}, issn = {0942-4172}, year = {2017}, abstract = {Kundenanforderungen an Netzwerke haben sich in den vergangenen Jahren stark ver{\"a}ndert. Mit NFV und SDN sind Unternehmen technisch in der Lage, diesen gerecht zu werden. Die Provider stehen jedoch vor großen Herausforderungen: Insbesondere Produkte und Prozesse m{\"u}ssen angepasst und agiler werden, um die St{\"a}rken von NFV und SDN zum Kundenvorteil auszuspielen.}, language = {de} } @article{BensbergAuthCzarnecki2020, author = {Bensberg, Frank and Auth, Gunnar and Czarnecki, Christian}, title = {Literaturanalyse mit Text Mining}, series = {E-Learning and Education (eleed)}, journal = {E-Learning and Education (eleed)}, number = {13}, editor = {Haake, J{\"o}rg M.}, publisher = {FernUniversit{\"a}t in Hagen}, address = {Hagen}, issn = {1860-7470}, pages = {1 -- 13}, year = {2020}, abstract = {Die Durchf{\"u}hrung einer systematischen Literaturrecherche ist eine zentrale Kompetenz wissenschaftlichen Arbeitens und bildet daher einen festen Ausbildungsbestandteil von Bachelor- und Masterstudieng{\"a}ngen. In entsprechenden Lehrveranstaltungen werden Studierende zwar mit den grundlegenden Hilfsmitteln zur Suche und Verwaltung von Literatur vertraut gemacht, allerdings werden die Potenziale textanalytischer Methoden und Anwendungssysteme (Text Mining, Text Analytics) dabei zumeist nicht abgedeckt. Folglich werden Datenkompetenzen, die zur systemgest{\"u}tzten Analyse und Erschließung von Literaturdaten erforderlich sind, nicht hinreichend ausgepr{\"a}gt. Um diese Kompetenzl{\"u}cke zu adressieren, ist an der Hochschule Osnabr{\"u}ck eine Lehrveranstaltung konzipiert und projektorientiert umgesetzt worden, die sich insbesondere an Studierende wirtschaftswissenschaftlicher Studieng{\"a}nge richtet. Dieser Beitrag dokumentiert die fachliche sowie technische Ausgestaltung dieser Veranstaltung und zeigt Potenziale f{\"u}r die k{\"u}nftige Weiterentwicklung auf.}, language = {de} } @article{HoerenbaumLaumannProkop2016, author = {H{\"o}renbaum, Christoph and Laumann, J{\"o}rg and Prokop, Ines}, title = {Zur Anwendung des Eurocode 3 Teil 1-2 f{\"u}r die Heißbemessung und Anregungen f{\"u}r dessen Novellierung}, series = {Stahlbau}, volume = {85}, journal = {Stahlbau}, number = {6}, publisher = {Ernst \& Sohn GmbH}, address = {Berlin}, issn = {1437-1049}, doi = {10.1002/stab.201610382}, pages = {429 -- 434}, year = {2016}, abstract = {Die Eurocodes werden bis zum Jahr 2020 im Europ{\"a}ischen Komitee f{\"u}r Normung (CEN), Technisches Komitee TC 250, {\"u}berarbeitet. In Vorbereitung auf die Eurocode-Novellierung haben engagierte Ingenieure im Rahmen der Initiative PraxisRegeln Bau (PRB) die f{\"u}r die praktische Anwendung h{\"a}ufig genutzten Teile des Eurocode 3 untersucht. Mit dem Ziel, die Praxistauglichkeit des Eurocode 3 f{\"u}r die Heißbemessung zu verbessern, wurden die bestehende Norm EN 1993 Teil 1-2 insbesondere in Bezug auf die Anwenderfreundlichkeit analysiert und Vorschl{\"a}ge f{\"u}r die europ{\"a}ische Novellierung erarbeitet. Die Analysen zeigen, dass durch Umstrukturierungen und durch die Einf{\"u}hrung von Tabellen die Verst{\"a}ndlichkeit und Anwenderfreundlichkeit der Regeln f{\"u}r die Heißbemessung bedeutend erh{\"o}ht werden k{\"o}nnen.}, language = {de} } @article{KindmannKrausLaumannetal.2022, author = {Kindmann, Rolf and Kraus, Matthias and Laumann, J{\"o}rg and Vette, Jan}, title = {Verallgemeinerte Berechnungsmethode f{\"u}r in Beton eingespannte Stahlprofile - Einspanntiefen, Tragf{\"a}higkeitsnachweise und Bemessungshilfen}, series = {Stahlbau}, volume = {93}, journal = {Stahlbau}, number = {Early View}, publisher = {Ernst \& Sohn GmbH}, address = {Berlin}, issn = {1437-1049}, doi = {10.1002/stab.202200024}, pages = {1 -- 25}, year = {2022}, abstract = {St{\"u}tzen und Tr{\"a}ger aus Stahlprofilen k{\"o}nnen in Fundamente oder W{\"a}nde aus Stahlbeton einbetoniert werden. Diese Anschl{\"u}sse wirken in der Regel wie Einspannungen, die eine ausreichende Einspanntiefe erfordern. Im Folgenden wird eine verallgemeinerte Berechnungsmethode f{\"u}r in Stahlbetonkonstruktionen eingespannte Stahlprofile aus gewalzten I-Profilen, geschweißten I-Profilen, runden Hohlprofilen, eckigen Hohlprofilen und einzelligen Kastenquerschnitten vorgestellt. F{\"u}r Beanspruchungen infolge einachsiger Biegung um die starke und schwache Profilachse werden der profilabh{\"a}ngige Ansatz der Betondruckspannungen im Einspannbereich und die Ermittlung der Einspanntiefe behandelt. Unter Ber{\"u}cksichtigung der Normalkraft werden an den maßgebenden Stellen Tragf{\"a}higkeitsnachweise f{\"u}r die Stahlprofile gef{\"u}hrt. Als Erg{\"a}nzung zu den Berechnungsformeln werden Bemessungshilfen zur Verf{\"u}gung gestellt, die die Wahl der mitwirkenden Breiten und der Einspanntiefen erleichtert.}, language = {de} } @article{EmigHebelSchwark2022, author = {Emig, J. and Hebel, Christoph and Schwark, A.}, title = {Einsatzbereiche f{\"u}r Verkehrsnachfragemodelle}, series = {Straßenverkehrstechnik}, volume = {66}, journal = {Straßenverkehrstechnik}, number = {10}, publisher = {Kirschbaum Verlag GmbH}, address = {Bonn}, issn = {0039-2219}, doi = {10.53184/SVT10-2022-2}, pages = {727 -- 736}, year = {2022}, abstract = {In der Praxis bestehen vielf{\"a}ltige Einsatzbereiche f{\"u}r Verkehrsnachfragemodelle. Mit ihnen k{\"o}nnen Kenngr{\"o}ßen des Verkehrsangebots und der Verkehrsnachfrage f{\"u}r den heutigen Zustand wie auch f{\"u}r zuk{\"u}nftige Zust{\"a}nde bereitgestellt werden, um so die Grundlagen f{\"u}r verkehrsplanerische Entscheidungen zu liefern. Die neuen „Empfehlungen zum Einsatz von Verkehrsnachfragemodellen f{\"u}r den Personenverkehr" (EVNM-PV) (FGSV 2022) veranschaulichen anhand von typischen Planungsaufgaben, welche differenzierten Anforderungen daraus f{\"u}r die Modellkonzeption und -erstellung resultieren. Vor dem Hintergrund der konkreten Aufgabenstellung sowie deren spezifischer planerischer Anforderungen bildet die abzuleitende Modellspezifikation die verabredete Grundlage zwischen Auftraggeber und Modellersteller f{\"u}r die konkrete inhaltliche, fachliche Ausgestaltung des Verkehrsmodells.}, language = {de} } @article{HahnHebelManz2022, author = {Hahn, Geogr W. and Hebel, Christoph and Manz, W.}, title = {Die neuen Empfehlungen f{\"u}r Verkehrsnachfragemodellierung im Personenverkehr}, series = {Straßenverkehrstechnik}, volume = {66}, journal = {Straßenverkehrstechnik}, number = {10}, publisher = {Kirschbaum Verlag GmbH}, address = {Bonn}, issn = {0039-2219}, doi = {10.53184/SVT10-2022-1}, pages = {721 -- 726}, year = {2022}, abstract = {Die neu erschienenen „Empfehlungen zum Einsatz von Verkehrsnachfragemodellen f{\"u}r den Personenverkehr" liefern erstmals als Empfehlungspapier der Forschungsgesellschaft f{\"u}r Straßen- und Verkehrswesen einen umfassenden {\"U}berblick zu den verschiedenen Aspekten der Modellierung und geben dem Fachplaner konkrete Hilfestellung f{\"u}r die Konzeption von Nachfragemodellen. Das Empfehlungspapier zielt unter anderem darauf ab, die Erwartungen und das Anspruchsniveau in Hinblick auf Sachgerechtigkeit der Modelle, die erzielbare Modellqualit{\"a}t und den Detaillierungsgrad der Modellaussagen zu harmonisieren.}, language = {de} } @article{CzarneckiWinkelmannSpiliopoulou2010, author = {Czarnecki, Christian and Winkelmann, Axel and Spiliopoulou, Myra}, title = {Services in electronic telecommunication markets: a framework for planning the virtualization of processes}, series = {Electronic Markets}, volume = {20}, journal = {Electronic Markets}, number = {3-4}, publisher = {Springer}, address = {Berlin}, issn = {1422-8890}, doi = {10.1007/s12525-010-0045-8}, pages = {197 -- 207}, year = {2010}, abstract = {The potential of electronic markets in enabling innovative product bundles through flexible and sustainable partnerships is not yet fully exploited in the telecommunication industry. One reason is that bundling requires seamless de-assembling and re-assembling of business processes, whilst processes in telecommunication companies are often product-dependent and hard to virtualize. We propose a framework for the planning of the virtualization of processes, intended to assist the decision maker in prioritizing the processes to be virtualized: (a) we transfer the virtualization pre-requisites stated by the Process Virtualization Theory in the context of customer-oriented processes in the telecommunication industry and assess their importance in this context, (b) we derive IT-oriented requirements for the removal of virtualization barriers and highlight their demand on changes at different levels of the organization. We present a first evaluation of our approach in a case study and report on lessons learned and further steps to be performed.}, language = {en} } @article{CzarneckiSpiliopoulou2012, author = {Czarnecki, Christian and Spiliopoulou, Myra}, title = {A holistic framework for the implementation of a next generation network}, series = {International Journal of Business Information Systems}, volume = {9}, journal = {International Journal of Business Information Systems}, number = {4}, publisher = {Inderscience Enterprises}, address = {Olney, Bucks}, issn = {1746-0972}, doi = {10.1504/IJBIS.2012.046291}, pages = {385 -- 401}, year = {2012}, abstract = {As the potential of a next generation network (NGN) is recognised, telecommunication companies consider switching to it. Although the implementation of an NGN seems to be merely a modification of the network infrastructure, it may trigger or require changes in the whole company, because it builds upon the separation between service and transport, a flexible bundling of services to products and the streamlining of the IT infrastructure. We propose a holistic framework, structured into the layers 'strategy', 'processes' and 'information systems' and incorporate into each layer all concepts necessary for the implementation of an NGN, as well as the alignment of these concepts. As a first proof-of-concept for our framework we have performed a case study on the introduction of NGN in a large telecommunication company; we show that our framework captures all topics that are affected by an NGN implementation.}, language = {en} } @article{CzarneckiWinkelmannSpiliopoulou2013, author = {Czarnecki, Christian and Winkelmann, Axel and Spiliopoulou, Myra}, title = {Referenzprozessabl{\"a}ufe f{\"u}r Telekommunikationsunternehmen: Eine Erweiterung des eTOM-Modells}, series = {Wirtschaftsinformatik}, volume = {55}, journal = {Wirtschaftsinformatik}, number = {2}, publisher = {Springer Fachmedien}, address = {Wiesbaden}, issn = {1861-8936}, doi = {10.1007/s11576-013-0351-9}, pages = {83 -- 97}, year = {2013}, abstract = {Der Telekommunikationsmarkt erf{\"a}hrt substanzielle Ver{\"a}nderungen. Neue Gesch{\"a}ftsmodelle, innovative Dienstleistungen und Technologien erfordern Reengineering, Transformation und Prozessstandardisierung. Mit der Enhanced Telecom Operation Map (eTOM) bietet das TM Forum ein international anerkanntes de facto Referenz-Prozess-Framework basierend auf spezifischen Anforderungen und Auspr{\"a}gungen der Telekommunikationsindustrie an. Allerdings enth{\"a}lt dieses Referenz-Framework nur eine hierarchische Sammlung von Prozessen auf unterschiedlichen Abstraktionsebenen. Eine Kontrollsicht verstanden als sequenzielle Anordnung von Aktivit{\"a}ten und daraus resultierend ein realer Prozessablauf fehlt ebenso wie eine Ende-zu-Ende-Sicht auf den Kunden. In diesem Artikel erweitern wir das eTOM-Referenzmodell durch Referenzprozessabl{\"a}ufe, in welchen wir das Wissen {\"u}ber Prozesse in Telekommunikationsunternehmen abstrahieren und generalisieren. Durch die Referenzprozessabl{\"a}ufe werden Unternehmen bei dem strukturierten und transparenten (Re-)Design ihrer Prozesse unterst{\"u}tzt. Wir demonstrieren die Anwendbarkeit und N{\"u}tzlichkeit unserer Referenzprozessabl{\"a}ufe in zwei Fallstudien und evaluieren diese anhand von Kriterien f{\"u}r die Bewertung von Referenzmodellen. Die Referenzprozessabl{\"a}ufe wurden vom TM Forum in den Standard aufgenommen und als Teil von eTOM Version 9 ver{\"o}ffentlicht. Dar{\"u}ber hinaus diskutieren wir die Komponenten unseres Ansatzes, die auch außerhalb der Telekommunikationsindustrie angewandt werden k{\"o}nnen.}, language = {de} } @article{BensbergAuthCzarnecki2018, author = {Bensberg, Frank and Auth, Gunnar and Czarnecki, Christian}, title = {Einsatz von Text Analytics zur Unterst{\"u}tzung literaturintensiver Forschungsprozesse: Konzeption, Realisierung und Lessons Learned}, series = {Anwendungen und Konzepte der Wirtschaftsinformatik}, volume = {2018}, journal = {Anwendungen und Konzepte der Wirtschaftsinformatik}, number = {8}, publisher = {AKWI}, address = {Luzern}, issn = {2296-4592}, doi = {10.26034/lu.akwi.2018.3221}, pages = {1 -- 6}, year = {2018}, abstract = {Das anhaltende Wachstum wissenschaftlicher Ver{\"o}ffentlichungen wirft die Fragestellung auf, wie Literaturana-lysen im Rahmen von Forschungsprozessen digitalisiert und somit produktiver realisiert werden k{\"o}nnen. Insbesondere in informationstechnischen Fachgebieten ist die Forschungspraxis durch ein rasant wachsendes Publikationsaufkommen gekennzeichnet. Infolgedessen bietet sich der Einsatz von Methoden der Textanalyse (Text Analytics) an, die Textdaten automatisch vorbereiten und verarbeiten k{\"o}nnen. Erkenntnisse entstehen dabei aus Analysen von Wortarten und Subgruppen, Korrelations- sowie Zeitreihenanalysen. Dieser Beitrag stellt die Konzeption und Realisierung eines Prototypen vor, mit dem Anwender bibliographische Daten aus der etablierten Literaturdatenbank EBSCO Discovery Service mithilfe textanalytischer Methoden erschließen k{\"o}nnen. Der Prototyp basiert auf dem Analysesystem IBM Watson Explorer, das Hochschulen lizenzkostenfrei zur Verf{\"u}gung steht. Potenzielle Adressaten des Prototypen sind Forschungseinrichtungen, Beratungsunternehmen sowie Entscheidungstr{\"a}ger in Politik und Unternehmenspraxis.}, language = {de} } @article{CzarneckiBensbergAuth2019, author = {Czarnecki, Christian and Bensberg, Frank and Auth, Gunnar}, title = {Die Rolle von Softwarerobotern f{\"u}r die zuk{\"u}nftige Arbeitswelt}, series = {HMD Praxis der Wirtschaftsinformatik}, volume = {56}, journal = {HMD Praxis der Wirtschaftsinformatik}, number = {4}, publisher = {Springer}, address = {Cham}, issn = {2198-2775}, doi = {10.1365/s40702-019-00548-z}, pages = {795 -- 808}, year = {2019}, abstract = {Im Rahmen der Digitalisierung ist die zunehmende Automatisierung von bisher manuellen Prozessschritten ein Aspekt, der massive Auswirkungen auf die zuk{\"u}nftige Arbeitswelt haben wird. In diesem Kontext werden an den Einsatz von Softwarerobotern zur Prozessautomatisierung hohe Erwartungen gekn{\"u}pft. Bei den Implementierungsans{\"a}tzen wird die Diskussion aktuell insbesondere durch Robotic Process Automation (RPA) und Chatbots gepr{\"a}gt. Beide Ans{\"a}tze verfolgen das gemeinsame Ziel einer 1:1-Automatisierung von menschlichen Handlungen und dadurch ein direktes Ersetzen von Mitarbeitern durch Maschinen. Bei RPA werden Prozesse durch Softwareroboter erlernt und automatisiert ausgef{\"u}hrt. Dabei emulieren RPA-Roboter die Eingaben auf der bestehenden Pr{\"a}sentationsschicht, so dass keine {\"A}nderungen an vorhandenen Anwendungssystemen notwendig sind. Am Markt werden bereits unterschiedliche RPA-L{\"o}sungen als Softwareprodukte angeboten. Durch Chatbots werden Ein- und Ausgaben von Anwendungssystemen {\"u}ber nat{\"u}rliche Sprache realisiert. Dadurch ist die Automatisierung von unternehmensexterner Kommunikation (z. B. mit Kunden) aber auch von unternehmensinternen Assistenzt{\"a}tigkeiten m{\"o}glich. Der Beitrag diskutiert die Auswirkungen von Softwarerobotern auf die Arbeitswelt anhand von Anwendungsbeispielen und erl{\"a}utert die unternehmensindividuelle Entscheidung {\"u}ber den Einsatz von Softwarerobotern anhand von Effektivit{\"a}ts- und Effizienzzielen.}, language = {de} } @article{CzarneckiDietze2020, author = {Czarnecki, Christian and Dietze, Christian}, title = {Gestaltungsorientierte Forschung am Beispiel der Referenzmodellierung in der Telekommunikationsindustrie}, series = {HMD Praxis der Wirtschaftsinformatik}, volume = {57}, journal = {HMD Praxis der Wirtschaftsinformatik}, number = {2}, publisher = {Springer Nature}, address = {Cham}, issn = {2198-2775}, doi = {10.1365/s40702-020-00594-y}, pages = {310 -- 323}, year = {2020}, abstract = {Am Beispiel der Telekommunikationsindustrie zeigt der Beitrag eine konkrete Ausgestaltung anwendungsorientierter Forschung, die sowohl f{\"u}r die Praxis als auch f{\"u}r die Wissenschaft nutzen- und erkenntnisbringend ist. Forschungsgegenstand sind die Referenzmodelle des Industriegremiums TM Forum, die von vielen Telekommunikationsunternehmen zur Transformation ihrer Strukturen und Systeme genutzt werden. Es wird die langj{\"a}hrige Forschungst{\"a}tigkeit bei der Weiterentwicklung und Anwendung dieser Referenzmodelle beschrieben. Dabei wird ein konsequent gestaltungsorientierter Forschungsansatz verfolgt. Das Zusammenspiel aus kontinuierlicher Weiterentwicklung in Zusammenarbeit mit einem Industriegremium und der Anwendung in vielf{\"a}ltigen Praxisprojekten f{\"u}hrt zu einer erfolgreichen Symbiose aus praktischer Nutzengenerierung sowie wissenschaftlichem Erkenntnisgewinn. Der Beitrag stellt den gew{\"a}hlten Forschungsansatz anhand konkreter Beispiele vor. Darauf basierend werden Empfehlungen und Herausforderungen f{\"u}r eine gestaltungs- und praxisorientierte Forschung diskutiert.}, language = {de} } @article{BurgerRumpfDoetal.2021, author = {Burger, Ren{\´e} and Rumpf, Jessica and Do, Xuan Tung and Monakhova, Yulia and Diehl, Bernd W. K. and Rehahn, Matthias and Schulze, Margit}, title = {Is NMR combined with multivariate regression applicable for the molecular weight determination of randomly cross-linked polymers such as lignin?}, series = {ACS Omega}, volume = {6}, journal = {ACS Omega}, number = {44}, publisher = {ACS Publications}, address = {Washington, DC}, issn = {2470-1343}, doi = {10.1021/acsomega.1c03574}, pages = {29516 -- 29524}, year = {2021}, abstract = {The molecular weight properties of lignins are one of the key elements that need to be analyzed for a successful industrial application of these promising biopolymers. In this study, the use of 1H NMR as well as diffusion-ordered spectroscopy (DOSY NMR), combined with multivariate regression methods, was investigated for the determination of the molecular weight (Mw and Mn) and the polydispersity of organosolv lignins (n = 53, Miscanthus x giganteus, Paulownia tomentosa, and Silphium perfoliatum). The suitability of the models was demonstrated by cross validation (CV) as well as by an independent validation set of samples from different biomass origins (beech wood and wheat straw). CV errors of ca. 7-9 and 14-16\% were achieved for all parameters with the models from the 1H NMR spectra and the DOSY NMR data, respectively. The prediction errors for the validation samples were in a similar range for the partial least squares model from the 1H NMR data and for a multiple linear regression using the DOSY NMR data. The results indicate the usefulness of NMR measurements combined with multivariate regression methods as a potential alternative to more time-consuming methods such as gel permeation chromatography.}, language = {en} } @article{MonakhovaDiehl2021, author = {Monakhova, Yulia and Diehl, Bernd W. K.}, title = {Simplification of NMR Workflows by Standardization Using 2H Integral of Deuterated Solvent as Applied to Aloe vera Preparations}, series = {Applied Magnetic Resonance}, volume = {52}, journal = {Applied Magnetic Resonance}, number = {11}, publisher = {Springer}, address = {Cham}, issn = {1613-7507}, doi = {10.1007/s00723-021-01393-4}, pages = {1591 -- 1600}, year = {2021}, abstract = {In this study, a recently proposed NMR standardization approach by 2H integral of deuterated solvent for quantitative multicomponent analysis of complex mixtures is presented. As a proof of principle, the existing NMR routine for the analysis of Aloe vera products was modified. Instead of using absolute integrals of targeted compounds and internal standard (nicotinamide) from 1H-NMR spectra, quantification was performed based on the ratio of a particular 1H-NMR compound integral and 2H-NMR signal of deuterated solvent D2O. Validation characteristics (linearity, repeatability, accuracy) were evaluated and the results showed that the method has the same precision as internal standardization in case of multicomponent screening. Moreover, a dehydration process by freeze drying is not necessary for the new routine. Now, our NMR profiling of A. vera products needs only limited sample preparation and data processing. The new standardization methodology provides an appealing alternative for multicomponent NMR screening. In general, this novel approach, using standardization by 2H integral, benefits from reduced sample preparation steps and uncertainties, and is recommended in different application areas (purity determination, forensics, pharmaceutical analysis, etc.).}, language = {en} } @article{BurmistrovaSobolevaMonakhova2021, author = {Burmistrova, Natalia A. and Soboleva, Polina M. and Monakhova, Yulia}, title = {Is infrared spectroscopy combined with multivariate analysis a promising tool for heparin authentication?}, series = {Journal of Pharmaceutical and Biomedical Analysis}, volume = {194}, journal = {Journal of Pharmaceutical and Biomedical Analysis}, number = {Article number: 113811}, publisher = {Elsevier}, address = {Amsterdam}, isbn = {0731-7085}, doi = {10.1016/j.jpba.2020.113811}, year = {2021}, abstract = {The investigation of the possibility to determine various characteristics of powder heparin (n = 115) was carried out with infrared spectroscopy. The evaluation of heparin samples included several parameters such as purity grade, distributing company, animal source as well as heparin species (i.e. Na-heparin, Ca-heparin, and heparinoids). Multivariate analysis using principal component analysis (PCA), soft independent modelling of class analogy (SIMCA), and partial least squares - discriminant analysis (PLS-DA) were applied for the modelling of spectral data. Different pre-processing methods were applied to IR spectral data; multiplicative scatter correction (MSC) was chosen as the most relevant. Obtained results were confirmed by nuclear magnetic resonance (NMR) spectroscopy. Good predictive ability of this approach demonstrates the potential of IR spectroscopy and chemometrics for screening of heparin quality. This approach, however, is designed as a screening tool and is not considered as a replacement for either of the methods required by USP and FDA.}, language = {en} } @article{MonakhovaDiehl2021, author = {Monakhova, Yulia and Diehl, Bernd W.K.}, title = {Novel approach of qNMR workflow by standardization using 2H integral: Application to any intrinsic calibration standard}, series = {Talanta}, volume = {222}, journal = {Talanta}, number = {Article number: 121504}, publisher = {Elsevier}, isbn = {0039-9140}, doi = {10.1016/j.talanta.2020.121504}, year = {2021}, abstract = {Quantitative nuclear magnetic resonance (qNMR) is routinely performed by the internal or external standardization. The manuscript describes a simple alternative to these common workflows by using NMR signal of another active nuclei of calibration compound. For example, for any arbitrary compound quantification by NMR can be based on the use of an indirect concentration referencing that relies on a solvent having both 1H and 2H signals. To perform high-quality quantification, the deuteration level of the utilized deuterated solvent has to be estimated. In this contribution the new method was applied to the determination of deuteration levels in different deuterated solvents (MeOD, ACN, CDCl3, acetone, benzene, DMSO-d6). Isopropanol-d6, which contains a defined number of deuterons and protons, was used for standardization. Validation characteristics (precision, accuracy, robustness) were calculated and the results showed that the method can be used in routine practice. Uncertainty budget was also evaluated. In general, this novel approach, using standardization by 2H integral, benefits from reduced sample preparation steps and uncertainties, and can be applied in different application areas (purity determination, forensics, pharmaceutical analysis, etc.).}, language = {en} } @article{MonakhovaSobolevaFedotovaetal.2022, author = {Monakhova, Yulia and Soboleva, Polina M. and Fedotova, Elena S. and Musina, Kristina T. and Burmistrova, Natalia A.}, title = {Quantum chemical calculations of IR spectra of heparin disaccharide subunits}, series = {Computational and Theoretical Chemistry}, volume = {1217}, journal = {Computational and Theoretical Chemistry}, number = {Article number: 113891}, publisher = {Elsevier}, address = {New York, NY}, isbn = {2210-271X}, doi = {10.1016/j.comptc.2022.113891}, year = {2022}, abstract = {Heparin is a natural polysaccharide, which plays essential role in many biological processes. Alterations in building blocks can modify biological roles of commercial heparin products, due to significant changes in the conformation of the polymer chain. The variability structure of heparin leads to difficulty in quality control using different analytical methods, including infrared (IR) spectroscopy. In this paper molecular modelling of heparin disaccharide subunits was performed using quantum chemistry. The structural and spectral parameters of these disaccharides have been calculated using RHF/6-311G. In addition, over-sulphated chondroitin sulphate disaccharide was studied as one of the most widespread contaminants of heparin. Calculated IR spectra were analyzed with respect to specific structure parameters. IR spectroscopic fingerprint was found to be sensitive to substitution pattern of disaccharide subunits. Vibrational assignments of calculated spectra were correlated with experimental IR spectral bands of native heparin. Chemometrics was used to perform multivariate analysis of simulated spectral data.}, language = {en} } @article{BurgerLindnerRumpfetal.2022, author = {Burger, Ren{\´e} and Lindner, Simon and Rumpf, Jessica and Do, Xuan Tung and Diehl, Bernd W.K. and Rehahn, Matthias and Monakhova, Yulia and Schulze, Margit}, title = {Benchtop versus high field NMR: Comparable performance found for the molecular weight determination of lignin}, series = {Journal of Pharmaceutical and Biomedical Analysis}, volume = {212}, journal = {Journal of Pharmaceutical and Biomedical Analysis}, number = {Article number: 114649}, publisher = {Elsevier}, address = {New York, NY}, isbn = {0731-7085}, doi = {10.1016/j.jpba.2022.114649}, year = {2022}, abstract = {Lignin is a promising renewable biopolymer being investigated worldwide as an environmentally benign substitute of fossil-based aromatic compounds, e.g. for the use as an excipient with antioxidant and antimicrobial properties in drug delivery or even as active compound. For its successful implementation into process streams, a quick, easy, and reliable method is needed for its molecular weight determination. Here we present a method using 1H spectra of benchtop as well as conventional NMR systems in combination with multivariate data analysis, to determine lignin's molecular weight (Mw and Mn) and polydispersity index (PDI). A set of 36 organosolv lignin samples (from Miscanthus x giganteus, Paulownia tomentosa and Silphium perfoliatum) was used for the calibration and cross validation, and 17 samples were used as external validation set. Validation errors between 5.6\% and 12.9\% were achieved for all parameters on all NMR devices (43, 60, 500 and 600 MHz). Surprisingly, no significant difference in the performance of the benchtop and high-field devices was found. This facilitates the application of this method for determining lignin's molecular weight in an industrial environment because of the low maintenance expenditure, small footprint, ruggedness, and low cost of permanent magnet benchtop NMR systems.}, language = {en} } @article{MonakhovaDiehl2022, author = {Monakhova, Yulia and Diehl, Bernd W.K.}, title = {Multinuclear NMR screening of pharmaceuticals using standardization by 2H integral of a deuterated solvent}, series = {Journal of Pharmaceutical and Biomedical Analysis}, volume = {209}, journal = {Journal of Pharmaceutical and Biomedical Analysis}, number = {Article number: 114530}, publisher = {Elsevier}, isbn = {0731-7085}, doi = {10.1016/j.jpba.2021.114530}, year = {2022}, abstract = {NMR standardization approach that uses the 2H integral of deuterated solvent for quantitative multinuclear analysis of pharmaceuticals is described. As a proof of principle, the existing NMR procedure for the analysis of heparin products according to US Pharmacopeia monograph is extended to the determination of Na+ and Cl- content in this matrix. Quantification is performed based on the ratio of a 23Na (35Cl) NMR integral and 2H NMR signal of deuterated solvent, D2O, acquired using the specific spectrometer hardware. As an alternative, the possibility of 133Cs standardization using the addition of Cs2CO3 stock solution is shown. Validation characteristics (linearity, repeatability, sensitivity) are evaluated. A holistic NMR profiling of heparin products can now also be used for the quantitative determination of inorganic compounds in a single analytical run using a single sample. In general, the new standardization methodology provides an appealing alternative for the NMR screening of inorganic and organic components in pharmaceutical products.}, language = {en} }