@article{BaringhausGaigall2017, author = {Baringhaus, Ludwig and Gaigall, Daniel}, title = {On Hotelling's T² test in a special paired sample case}, series = {Communications in Statistics - Theory and Methods}, volume = {48}, journal = {Communications in Statistics - Theory and Methods}, number = {2}, publisher = {Taylor \& Francis}, address = {London}, issn = {1532-415X}, doi = {10.1080/03610926.2017.1408828}, pages = {257 -- 267}, year = {2017}, abstract = {In a special paired sample case, Hotelling's T² test based on the differences of the paired random vectors is the likelihood ratio test for testing the hypothesis that the paired random vectors have the same mean; with respect to a special group of affine linear transformations it is the uniformly most powerful invariant test for the general alternative of a difference in mean. We present an elementary straightforward proof of this result. The likelihood ratio test for testing the hypothesis that the covariance structure is of the assumed special form is derived and discussed. Applications to real data are given.}, language = {en} } @article{BaringhausGaigall2017, author = {Baringhaus, Ludwig and Gaigall, Daniel}, title = {Hotelling's T² tests in paired and independent survey samples: An efficiency comparison}, series = {Journal of Multivariate Analysis}, volume = {2017}, journal = {Journal of Multivariate Analysis}, number = {154}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0047-259X}, doi = {10.1016/j.jmva.2016.11.004}, pages = {177 -- 198}, year = {2017}, abstract = {Hotelling's T² tests in paired and independent survey samples are compared using the traditional asymptotic efficiency concepts of Hodges-Lehmann, Bahadur and Pitman, as well as through criteria based on the volumes of corresponding confidence regions. Conditions characterizing the superiority of a procedure are given in terms of population canonical correlation type coefficients. Statistical tests for checking these conditions are developed. Test statistics based on the eigenvalues of a symmetrized sample cross-covariance matrix are suggested, as well as test statistics based on sample canonical correlation type coefficients.}, language = {en} } @article{BaringhausGaigall2018, author = {Baringhaus, Ludwig and Gaigall, Daniel}, title = {Efficiency comparison of the Wilcoxon tests in paired and independent survey samples}, series = {Metrika}, volume = {2018}, journal = {Metrika}, number = {81}, publisher = {Springer}, address = {Berlin}, issn = {1435-926X}, doi = {10.1007/s00184-018-0661-4}, pages = {891 -- 930}, year = {2018}, abstract = {The efficiency concepts of Bahadur and Pitman are used to compare the Wilcoxon tests in paired and independent survey samples. A comparison through the length of corresponding confidence intervals is also done. Simple conditions characterizing the dominance of a procedure are derived. Statistical tests for checking these conditions are suggested and discussed.}, language = {de} } @article{BaringhausGaigall2019, author = {Baringhaus, Ludwig and Gaigall, Daniel}, title = {On an asymptotic relative efficiency concept based on expected volumes of confidence regions}, series = {Statistics - A Journal of Theoretical and Applied Statistic}, volume = {53}, journal = {Statistics - A Journal of Theoretical and Applied Statistic}, number = {6}, publisher = {Taylor \& Francis}, address = {London}, issn = {1029-4910}, doi = {10.1080/02331888.2019.1683560}, pages = {1396 -- 1436}, year = {2019}, abstract = {The paper deals with an asymptotic relative efficiency concept for confidence regions of multidimensional parameters that is based on the expected volumes of the confidence regions. Under standard conditions the asymptotic relative efficiencies of confidence regions are seen to be certain powers of the ratio of the limits of the expected volumes. These limits are explicitly derived for confidence regions associated with certain plugin estimators, likelihood ratio tests and Wald tests. Under regularity conditions, the asymptotic relative efficiency of each of these procedures with respect to each one of its competitors is equal to 1. The results are applied to multivariate normal distributions and multinomial distributions in a fairly general setting.}, language = {en} } @article{Gaigall2019, author = {Gaigall, Daniel}, title = {On a new approach to the multi-sample goodness-of-fit problem}, series = {Communications in Statistics - Simulation and Computation}, volume = {53}, journal = {Communications in Statistics - Simulation and Computation}, number = {10}, publisher = {Taylor \& Francis}, address = {London}, issn = {1532-4141}, doi = {10.1080/03610918.2019.1618472}, pages = {2971 -- 2989}, year = {2019}, abstract = {Suppose we have k samples X₁,₁,…,X₁,ₙ₁,…,Xₖ,₁,…,Xₖ,ₙₖ with different sample sizes ₙ₁,…,ₙₖ and unknown underlying distribution functions F₁,…,Fₖ as observations plus k families of distribution functions {G₁(⋅,ϑ);ϑ∈Θ},…,{Gₖ(⋅,ϑ);ϑ∈Θ}, each indexed by elements ϑ from the same parameter set Θ, we consider the new goodness-of-fit problem whether or not (F₁,…,Fₖ) belongs to the parametric family {(G₁(⋅,ϑ),…,Gₖ(⋅,ϑ));ϑ∈Θ}. New test statistics are presented and a parametric bootstrap procedure for the approximation of the unknown null distributions is discussed. Under regularity assumptions, it is proved that the approximation works asymptotically, and the limiting distributions of the test statistics in the null hypothesis case are determined. Simulation studies investigate the quality of the new approach for small and moderate sample sizes. Applications to real-data sets illustrate how the idea can be used for verifying model assumptions.}, language = {en} } @article{DitzhausGaigall2018, author = {Ditzhaus, Marc and Gaigall, Daniel}, title = {A consistent goodness-of-fit test for huge dimensional and functional data}, series = {Journal of Nonparametric Statistics}, volume = {30}, journal = {Journal of Nonparametric Statistics}, number = {4}, publisher = {Taylor \& Francis}, address = {Abingdon}, issn = {1029-0311}, doi = {10.1080/10485252.2018.1486402}, pages = {834 -- 859}, year = {2018}, abstract = {A nonparametric goodness-of-fit test for random variables with values in a separable Hilbert space is investigated. To verify the null hypothesis that the data come from a specific distribution, an integral type test based on a Cram{\´e}r-von-Mises statistic is suggested. The convergence in distribution of the test statistic under the null hypothesis is proved and the test's consistency is concluded. Moreover, properties under local alternatives are discussed. Applications are given for data of huge but finite dimension and for functional data in infinite dimensional spaces. A general approach enables the treatment of incomplete data. In simulation studies the test competes with alternative proposals.}, language = {en} } @article{BaringhausGaigallThiele2018, author = {Baringhaus, Ludwig and Gaigall, Daniel and Thiele, Jan Philipp}, title = {Statistical inference for L²-distances to uniformity}, series = {Computational Statistics}, volume = {2018}, journal = {Computational Statistics}, number = {33}, publisher = {Springer}, address = {Berlin}, issn = {1613-9658}, doi = {10.1007/s00180-018-0820-0}, pages = {1863 -- 1896}, year = {2018}, abstract = {The paper deals with the asymptotic behaviour of estimators, statistical tests and confidence intervals for L²-distances to uniformity based on the empirical distribution function, the integrated empirical distribution function and the integrated empirical survival function. Approximations of power functions, confidence intervals for the L²-distances and statistical neighbourhood-of-uniformity validation tests are obtained as main applications. The finite sample behaviour of the procedures is illustrated by a simulation study.}, language = {en} } @article{BaringhausGaigall2015, author = {Baringhaus, Ludwig and Gaigall, Daniel}, title = {On an independence test approach to the goodness-of-fit problem}, series = {Journal of Multivariate Analysis}, volume = {2015}, journal = {Journal of Multivariate Analysis}, number = {140}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0047-259X}, doi = {10.1016/j.jmva.2015.05.013}, pages = {193 -- 208}, year = {2015}, abstract = {Let X₁,…,Xₙ be independent and identically distributed random variables with distribution F. Assuming that there are measurable functions f:R²→R and g:R²→R characterizing a family F of distributions on the Borel sets of R in the way that the random variables f(X₁,X₂),g(X₁,X₂) are independent, if and only if F∈F, we propose to treat the testing problem H:F∈F,K:F∉F by applying a consistent nonparametric independence test to the bivariate sample variables (f(Xᵢ,Xⱼ),g(Xᵢ,Xⱼ)),1⩽i,j⩽n,i≠j. A parametric bootstrap procedure needed to get critical values is shown to work. The consistency of the test is discussed. The power performance of the procedure is compared with that of the classical tests of Kolmogorov-Smirnov and Cram{\´e}r-von Mises in the special cases where F is the family of gamma distributions or the family of inverse Gaussian distributions.}, language = {en} } @article{Gaigall2021, author = {Gaigall, Daniel}, title = {Test for Changes in the Modeled Solvency Capital Requirement of an Internal Risk Model}, series = {ASTIN Bulletin}, volume = {51}, journal = {ASTIN Bulletin}, number = {3}, publisher = {Cambridge Univ. Press}, address = {Cambridge}, issn = {1783-1350}, doi = {10.1017/asb.2021.20}, pages = {813 -- 837}, year = {2021}, abstract = {In the context of the Solvency II directive, the operation of an internal risk model is a possible way for risk assessment and for the determination of the solvency capital requirement of an insurance company in the European Union. A Monte Carlo procedure is customary to generate a model output. To be compliant with the directive, validation of the internal risk model is conducted on the basis of the model output. For this purpose, we suggest a new test for checking whether there is a significant change in the modeled solvency capital requirement. Asymptotic properties of the test statistic are investigated and a bootstrap approximation is justified. A simulation study investigates the performance of the test in the finite sample case and confirms the theoretical results. The internal risk model and the application of the test is illustrated in a simplified example. The method has more general usage for inference of a broad class of law-invariant and coherent risk measures on the basis of a paired sample.}, language = {en} } @article{Gaigall2020, author = {Gaigall, Daniel}, title = {Hoeffding-Blum-Kiefer-Rosenblatt independence test statistic on partly not identically distributed data}, series = {Communications in Statistics - Theory and Methods}, volume = {51}, journal = {Communications in Statistics - Theory and Methods}, number = {12}, publisher = {Taylor \& Francis}, address = {London}, issn = {1532-415X}, doi = {10.1080/03610926.2020.1805767}, pages = {4006 -- 4028}, year = {2020}, abstract = {The established Hoeffding-Blum-Kiefer-Rosenblatt independence test statistic is investigated for partly not identically distributed data. Surprisingly, it turns out that the statistic has the well-known distribution-free limiting null distribution of the classical criterion under standard regularity conditions. An application is testing goodness-of-fit for the regression function in a non parametric random effects meta-regression model, where the consistency is obtained as well. Simulations investigate size and power of the approach for small and moderate sample sizes. A real data example based on clinical trials illustrates how the test can be used in applications.}, language = {en} } @article{Gaigall2020, author = {Gaigall, Daniel}, title = {Testing marginal homogeneity of a continuous bivariate distribution with possibly incomplete paired data}, series = {Metrika}, volume = {2020}, journal = {Metrika}, number = {83}, publisher = {Springer}, issn = {1435-926X}, doi = {10.1007/s00184-019-00742-5}, pages = {437 -- 465}, year = {2020}, abstract = {We discuss the testing problem of homogeneity of the marginal distributions of a continuous bivariate distribution based on a paired sample with possibly missing components (missing completely at random). Applying the well-known two-sample Cr{\´a}mer-von-Mises distance to the remaining data, we determine the limiting null distribution of our test statistic in this situation. It is seen that a new resampling approach is appropriate for the approximation of the unknown null distribution. We prove that the resulting test asymptotically reaches the significance level and is consistent. Properties of the test under local alternatives are pointed out as well. Simulations investigate the quality of the approximation and the power of the new approach in the finite sample case. As an illustration we apply the test to real data sets.}, language = {en} } @article{Gaigall2020, author = {Gaigall, Daniel}, title = {Rothman-Woodroofe symmetry test statistic revisited}, series = {Computational Statistics \& Data Analysis}, volume = {2020}, journal = {Computational Statistics \& Data Analysis}, number = {142}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0167-9473}, doi = {10.1016/j.csda.2019.106837}, pages = {Artikel 106837}, year = {2020}, abstract = {The Rothman-Woodroofe symmetry test statistic is revisited on the basis of independent but not necessarily identically distributed random variables. The distribution-freeness if the underlying distributions are all symmetric and continuous is obtained. The results are applied for testing symmetry in a meta-analysis random effects model. The consistency of the procedure is discussed in this situation as well. A comparison with an alternative proposal from the literature is conducted via simulations. Real data are analyzed to demonstrate how the new approach works in practice.}, language = {en} } @article{DitzhausGaigall2022, author = {Ditzhaus, Marc and Gaigall, Daniel}, title = {Testing marginal homogeneity in Hilbert spaces with applications to stock market returns}, series = {Test}, volume = {2022}, journal = {Test}, number = {31}, publisher = {Springer}, issn = {1863-8260}, doi = {10.1007/s11749-022-00802-5}, pages = {749 -- 770}, year = {2022}, abstract = {This paper considers a paired data framework and discusses the question of marginal homogeneity of bivariate high-dimensional or functional data. The related testing problem can be endowed into a more general setting for paired random variables taking values in a general Hilbert space. To address this problem, a Cram{\´e}r-von-Mises type test statistic is applied and a bootstrap procedure is suggested to obtain critical values and finally a consistent test. The desired properties of a bootstrap test can be derived that are asymptotic exactness under the null hypothesis and consistency under alternatives. Simulations show the quality of the test in the finite sample case. A possible application is the comparison of two possibly dependent stock market returns based on functional data. The approach is demonstrated based on historical data for different stock market indices.}, language = {en} } @article{GaigallGerstenbergTrinh2022, author = {Gaigall, Daniel and Gerstenberg, Julian and Trinh, Thi Thu Ha}, title = {Empirical process of concomitants for partly categorial data and applications in statistics}, series = {Bernoulli}, volume = {28}, journal = {Bernoulli}, number = {2}, publisher = {International Statistical Institute}, address = {Den Haag, NL}, issn = {1573-9759}, doi = {10.3150/21-BEJ1367}, pages = {803 -- 829}, year = {2022}, abstract = {On the basis of independent and identically distributed bivariate random vectors, where the components are categorial and continuous variables, respectively, the related concomitants, also called induced order statistic, are considered. The main theoretical result is a functional central limit theorem for the empirical process of the concomitants in a triangular array setting. A natural application is hypothesis testing. An independence test and a two-sample test are investigated in detail. The fairly general setting enables limit results under local alternatives and bootstrap samples. For the comparison with existing tests from the literature simulation studies are conducted. The empirical results obtained confirm the theoretical findings.}, language = {en} } @article{MarinkovićFloresCalvinistiButenweg2020, author = {Marinković, Marko and Flores Calvinisti, Santiago and Butenweg, Christoph}, title = {Numerical analysis of reinforced concrete frame buildings with decoupled infill walls}, series = {Building Materials and Structures}, volume = {63}, journal = {Building Materials and Structures}, number = {4}, publisher = {Society for Materials and Structures Testing of Serbia}, address = {Belgrad}, issn = {2217-8139}, doi = {10.5937/GRMK2004013M}, pages = {13 -- 48}, year = {2020}, abstract = {Reinforced concrete (RC) buildings with masonry infill walls are widely used in many countries all over the world. Although infills are considered as non-structural elements, they significantly change dynamic characteristics of RC frame structures during earthquake excitation. Recently, significant effort was spent on studying decoupled infills, which are isolated from the surrounding frame usually by adding a gap between frame and infill. In this case, the frame deformation does not activate infill wall, thus infills are not influencing the behaviour of the frame. This paper presents the results of the investigation of the behaviour of RC frame buildings with the INODIS system that decouples masonry infills from the surrounding frame. Effect of masonry infill decoupling was investigated first on the one-bay onestorey frame. This was used as a base for parametric study on the frames with more bays and storeys, as well as on the building level. Change of stiffness and dynamic characteristics was analysed as well as response under earthquake loading. Comparison with the bare frame and traditionally infilled frame was performed. The results show that behaviour of the decoupled infilled frames is similar to the bare frame, whereas behaviour of frames with traditional infills is significantly different and demands complex numerical models. This means that if adequate decoupling is applied, design of}, language = {mul} } @article{BaringhausGaigall2022, author = {Baringhaus, Ludwig and Gaigall, Daniel}, title = {A goodness-of-fit test for the compound Poisson exponential model}, series = {Journal of Multivariate Analysis}, volume = {195}, journal = {Journal of Multivariate Analysis}, number = {Article 105154}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0047-259X}, doi = {10.1016/j.jmva.2022.105154}, year = {2022}, abstract = {On the basis of bivariate data, assumed to be observations of independent copies of a random vector (S,N), we consider testing the hypothesis that the distribution of (S,N) belongs to the parametric class of distributions that arise with the compound Poisson exponential model. Typically, this model is used in stochastic hydrology, with N as the number of raindays, and S as total rainfall amount during a certain time period, or in actuarial science, with N as the number of losses, and S as total loss expenditure during a certain time period. The compound Poisson exponential model is characterized in the way that a specific transform associated with the distribution of (S,N) satisfies a certain differential equation. Mimicking the function part of this equation by substituting the empirical counterparts of the transform we obtain an expression the weighted integral of the square of which is used as test statistic. We deal with two variants of the latter, one of which being invariant under scale transformations of the S-part by fixed positive constants. Critical values are obtained by using a parametric bootstrap procedure. The asymptotic behavior of the tests is discussed. A simulation study demonstrates the performance of the tests in the finite sample case. The procedure is applied to rainfall data and to an actuarial dataset. A multivariate extension is also discussed.}, language = {en} } @article{TranTrinhDaoetal.2022, author = {Tran, Ngoc Trinh and Trinh, Tu Luc and Dao, Ngoc Tien and Giap, Van Tan and Truong, Manh Khuyen and Dinh, Thuy Ha and Staat, Manfred}, title = {FEM shakedown analysis of structures under random strength with chance constrained programming}, series = {Vietnam Journal of Mechanics}, volume = {44}, journal = {Vietnam Journal of Mechanics}, number = {4}, publisher = {Vietnam Academy of Science and Technology (VAST)}, issn = {0866-7136}, doi = {10.15625/0866-7136/17943}, pages = {459 -- 473}, year = {2022}, abstract = {Direct methods, comprising limit and shakedown analysis, are a branch of computational mechanics. They play a significant role in mechanical and civil engineering design. The concept of direct methods aims to determine the ultimate load carrying capacity of structures beyond the elastic range. In practical problems, the direct methods lead to nonlinear convex optimization problems with a large number of variables and constraints. If strength and loading are random quantities, the shakedown analysis can be formulated as stochastic programming problem. In this paper, a method called chance constrained programming is presented, which is an effective method of stochastic programming to solve shakedown analysis problems under random conditions of strength. In this study, the loading is deterministic, and the strength is a normally or lognormally distributed variable.}, language = {en} } @article{Czarnecki2015, author = {Czarnecki, Christian}, title = {Moderne Telekommunikationsprodukte erfordern standardisierte Gesch{\"a}ftsprozesse}, series = {Wirtschaft und Wissenschaft}, journal = {Wirtschaft und Wissenschaft}, number = {2}, publisher = {Deutsche Telekom AG. Fachhochschule Leipzig}, address = {Leipzig}, pages = {7 -- 7}, year = {2015}, language = {de} } @article{MuellerSeginWeigandetal.2022, author = {Mueller, Tobias and Segin, Alexander and Weigand, Christoph and Schmitt, Robert H.}, title = {Feature selection for measurement models}, series = {International journal of quality \& reliability management}, journal = {International journal of quality \& reliability management}, number = {Vol. ahead-of-print, No. ahead-of-print.}, publisher = {Emerald Group Publishing Limited}, address = {Bingley}, issn = {0265-671X}, doi = {10.1108/IJQRM-07-2021-0245}, year = {2022}, abstract = {Purpose In the determination of the measurement uncertainty, the GUM procedure requires the building of a measurement model that establishes a functional relationship between the measurand and all influencing quantities. Since the effort of modelling as well as quantifying the measurement uncertainties depend on the number of influencing quantities considered, the aim of this study is to determine relevant influencing quantities and to remove irrelevant ones from the dataset. Design/methodology/approach In this work, it was investigated whether the effort of modelling for the determination of measurement uncertainty can be reduced by the use of feature selection (FS) methods. For this purpose, 9 different FS methods were tested on 16 artificial test datasets, whose properties (number of data points, number of features, complexity, features with low influence and redundant features) were varied via a design of experiments. Findings Based on a success metric, the stability, universality and complexity of the method, two FS methods could be identified that reliably identify relevant and irrelevant influencing quantities for a measurement model. Originality/value For the first time, FS methods were applied to datasets with properties of classical measurement processes. The simulation-based results serve as a basis for further research in the field of FS for measurement models. The identified algorithms will be applied to real measurement processes in the future.}, language = {en} } @article{RuebbelkeVoegeleGrajewskietal.2022, author = {R{\"u}bbelke, Dirk and V{\"o}gele, Stefan and Grajewski, Matthias and Zobel, Luzy}, title = {Hydrogen-based steel production and global climate protection: An empirical analysis of the potential role of a European cross border adjustment mechanism}, series = {Journal of Cleaner Production}, volume = {380}, journal = {Journal of Cleaner Production}, number = {Part 2, Art. Nr.:135040}, publisher = {Elsevier}, issn = {0959-6526}, doi = {10.1016/j.jclepro.2022.135040}, year = {2022}, abstract = {The European Union's aim to become climate neutral by 2050 necessitates ambitious efforts to reduce carbon emissions. Large reductions can be attained particularly in energy intensive sectors like iron and steel. In order to prevent the relocation of such industries outside the EU in the course of tightening environmental regulations, the establishment of a climate club jointly with other large emitters and alternatively the unilateral implementation of an international cross-border carbon tax mechanism are proposed. This article focuses on the latter option choosing the steel sector as an example. In particular, we investigate the financial conditions under which a European cross border mechanism is capable to protect hydrogen-based steel production routes employed in Europe against more polluting competition from abroad. By using a floor price model, we assess the competitiveness of different steel production routes in selected countries. We evaluate the climate friendliness of steel production on the basis of specific GHG emissions. In addition, we utilize an input-output price model. It enables us to assess impacts of rising cost of steel production on commodities using steel as intermediates. Our results raise concerns that a cross-border tax mechanism will not suffice to bring about competitiveness of hydrogen-based steel production in Europe because the cost tends to remain higher than the cost of steel production in e.g. China. Steel is a classic example for a good used mainly as intermediate for other products. Therefore, a cross-border tax mechanism for steel will increase the price of products produced in the EU that require steel as an input. This can in turn adversely affect competitiveness of these sectors. Hence, the effects of higher steel costs on European exports should be borne in mind and could require the cross-border adjustment mechanism to also subsidize exports.}, language = {en} } @article{CzarneckiHeuserKraemeretal.2006, author = {Czarnecki, Christian and Heuser, Marcus and Kr{\"a}mer, Christian and Stummer, Christoph and Theisinger, Felix and Wettklo, Michael}, title = {How to get more customer satisfaction : Erfolg im Wettbewerb durch kundenzentrierte Prozesse}, series = {Detecon Management Report}, journal = {Detecon Management Report}, number = {3}, publisher = {Detecon Internat. GmbH}, address = {Eschborn}, issn = {1867-3147}, pages = {29 -- 33}, year = {2006}, abstract = {Unternehmen sind in der Regel {\"u}berzeugt, dass sie die Bed{\"u}rfnisse ihrer Kunden in den Mittelpunkt stellen. Aber in der direkten Interaktion mit dem Kunden zeigen sie h{\"a}ufig Schw{\"a}chen. Der folgende Beitrag illustriert, wie durch eine konsequente Ausrichtung der Wertsch{\"o}pfungsprozesse auf die zentralen Kundenbed{\"u}rfnisse ein Dreifacheffekt erzielt werden kann: Nachhaltig erh{\"o}hte Kundenzufriedenheit, gesteigerte Effizienz und eine Differenzierung im Wettbewerb.}, language = {de} } @article{CzarneckiGeierPflug2017, author = {Czarnecki, Christian and Geier, Jari and Pflug, Karolin}, title = {Netzvirtualisierung durch SDN und NFV : Unternehmensprozesse auf dem Pr{\"u}fstand}, series = {LANline}, journal = {LANline}, publisher = {AWI Aktuelles Wissen Verl.-Ges.}, address = {Trostberg}, issn = {0942-4172}, year = {2017}, abstract = {Kundenanforderungen an Netzwerke haben sich in den vergangenen Jahren stark ver{\"a}ndert. Mit NFV und SDN sind Unternehmen technisch in der Lage, diesen gerecht zu werden. Die Provider stehen jedoch vor großen Herausforderungen: Insbesondere Produkte und Prozesse m{\"u}ssen angepasst und agiler werden, um die St{\"a}rken von NFV und SDN zum Kundenvorteil auszuspielen.}, language = {de} } @article{BensbergAuthCzarnecki2020, author = {Bensberg, Frank and Auth, Gunnar and Czarnecki, Christian}, title = {Literaturanalyse mit Text Mining}, series = {E-Learning and Education (eleed)}, journal = {E-Learning and Education (eleed)}, number = {13}, editor = {Haake, J{\"o}rg M.}, publisher = {FernUniversit{\"a}t in Hagen}, address = {Hagen}, issn = {1860-7470}, pages = {1 -- 13}, year = {2020}, abstract = {Die Durchf{\"u}hrung einer systematischen Literaturrecherche ist eine zentrale Kompetenz wissenschaftlichen Arbeitens und bildet daher einen festen Ausbildungsbestandteil von Bachelor- und Masterstudieng{\"a}ngen. In entsprechenden Lehrveranstaltungen werden Studierende zwar mit den grundlegenden Hilfsmitteln zur Suche und Verwaltung von Literatur vertraut gemacht, allerdings werden die Potenziale textanalytischer Methoden und Anwendungssysteme (Text Mining, Text Analytics) dabei zumeist nicht abgedeckt. Folglich werden Datenkompetenzen, die zur systemgest{\"u}tzten Analyse und Erschließung von Literaturdaten erforderlich sind, nicht hinreichend ausgepr{\"a}gt. Um diese Kompetenzl{\"u}cke zu adressieren, ist an der Hochschule Osnabr{\"u}ck eine Lehrveranstaltung konzipiert und projektorientiert umgesetzt worden, die sich insbesondere an Studierende wirtschaftswissenschaftlicher Studieng{\"a}nge richtet. Dieser Beitrag dokumentiert die fachliche sowie technische Ausgestaltung dieser Veranstaltung und zeigt Potenziale f{\"u}r die k{\"u}nftige Weiterentwicklung auf.}, language = {de} } @article{HoerenbaumLaumannProkop2016, author = {H{\"o}renbaum, Christoph and Laumann, J{\"o}rg and Prokop, Ines}, title = {Zur Anwendung des Eurocode 3 Teil 1-2 f{\"u}r die Heißbemessung und Anregungen f{\"u}r dessen Novellierung}, series = {Stahlbau}, volume = {85}, journal = {Stahlbau}, number = {6}, publisher = {Ernst \& Sohn GmbH}, address = {Berlin}, issn = {1437-1049}, doi = {10.1002/stab.201610382}, pages = {429 -- 434}, year = {2016}, abstract = {Die Eurocodes werden bis zum Jahr 2020 im Europ{\"a}ischen Komitee f{\"u}r Normung (CEN), Technisches Komitee TC 250, {\"u}berarbeitet. In Vorbereitung auf die Eurocode-Novellierung haben engagierte Ingenieure im Rahmen der Initiative PraxisRegeln Bau (PRB) die f{\"u}r die praktische Anwendung h{\"a}ufig genutzten Teile des Eurocode 3 untersucht. Mit dem Ziel, die Praxistauglichkeit des Eurocode 3 f{\"u}r die Heißbemessung zu verbessern, wurden die bestehende Norm EN 1993 Teil 1-2 insbesondere in Bezug auf die Anwenderfreundlichkeit analysiert und Vorschl{\"a}ge f{\"u}r die europ{\"a}ische Novellierung erarbeitet. Die Analysen zeigen, dass durch Umstrukturierungen und durch die Einf{\"u}hrung von Tabellen die Verst{\"a}ndlichkeit und Anwenderfreundlichkeit der Regeln f{\"u}r die Heißbemessung bedeutend erh{\"o}ht werden k{\"o}nnen.}, language = {de} } @article{KindmannKrausLaumannetal.2022, author = {Kindmann, Rolf and Kraus, Matthias and Laumann, J{\"o}rg and Vette, Jan}, title = {Verallgemeinerte Berechnungsmethode f{\"u}r in Beton eingespannte Stahlprofile - Einspanntiefen, Tragf{\"a}higkeitsnachweise und Bemessungshilfen}, series = {Stahlbau}, volume = {93}, journal = {Stahlbau}, number = {Early View}, publisher = {Ernst \& Sohn GmbH}, address = {Berlin}, issn = {1437-1049}, doi = {10.1002/stab.202200024}, pages = {1 -- 25}, year = {2022}, abstract = {St{\"u}tzen und Tr{\"a}ger aus Stahlprofilen k{\"o}nnen in Fundamente oder W{\"a}nde aus Stahlbeton einbetoniert werden. Diese Anschl{\"u}sse wirken in der Regel wie Einspannungen, die eine ausreichende Einspanntiefe erfordern. Im Folgenden wird eine verallgemeinerte Berechnungsmethode f{\"u}r in Stahlbetonkonstruktionen eingespannte Stahlprofile aus gewalzten I-Profilen, geschweißten I-Profilen, runden Hohlprofilen, eckigen Hohlprofilen und einzelligen Kastenquerschnitten vorgestellt. F{\"u}r Beanspruchungen infolge einachsiger Biegung um die starke und schwache Profilachse werden der profilabh{\"a}ngige Ansatz der Betondruckspannungen im Einspannbereich und die Ermittlung der Einspanntiefe behandelt. Unter Ber{\"u}cksichtigung der Normalkraft werden an den maßgebenden Stellen Tragf{\"a}higkeitsnachweise f{\"u}r die Stahlprofile gef{\"u}hrt. Als Erg{\"a}nzung zu den Berechnungsformeln werden Bemessungshilfen zur Verf{\"u}gung gestellt, die die Wahl der mitwirkenden Breiten und der Einspanntiefen erleichtert.}, language = {de} } @article{EmigHebelSchwark2022, author = {Emig, J. and Hebel, Christoph and Schwark, A.}, title = {Einsatzbereiche f{\"u}r Verkehrsnachfragemodelle}, series = {Straßenverkehrstechnik}, volume = {66}, journal = {Straßenverkehrstechnik}, number = {10}, publisher = {Kirschbaum Verlag GmbH}, address = {Bonn}, issn = {0039-2219}, doi = {10.53184/SVT10-2022-2}, pages = {727 -- 736}, year = {2022}, abstract = {In der Praxis bestehen vielf{\"a}ltige Einsatzbereiche f{\"u}r Verkehrsnachfragemodelle. Mit ihnen k{\"o}nnen Kenngr{\"o}ßen des Verkehrsangebots und der Verkehrsnachfrage f{\"u}r den heutigen Zustand wie auch f{\"u}r zuk{\"u}nftige Zust{\"a}nde bereitgestellt werden, um so die Grundlagen f{\"u}r verkehrsplanerische Entscheidungen zu liefern. Die neuen „Empfehlungen zum Einsatz von Verkehrsnachfragemodellen f{\"u}r den Personenverkehr" (EVNM-PV) (FGSV 2022) veranschaulichen anhand von typischen Planungsaufgaben, welche differenzierten Anforderungen daraus f{\"u}r die Modellkonzeption und -erstellung resultieren. Vor dem Hintergrund der konkreten Aufgabenstellung sowie deren spezifischer planerischer Anforderungen bildet die abzuleitende Modellspezifikation die verabredete Grundlage zwischen Auftraggeber und Modellersteller f{\"u}r die konkrete inhaltliche, fachliche Ausgestaltung des Verkehrsmodells.}, language = {de} } @article{HahnHebelManz2022, author = {Hahn, Geogr W. and Hebel, Christoph and Manz, W.}, title = {Die neuen Empfehlungen f{\"u}r Verkehrsnachfragemodellierung im Personenverkehr}, series = {Straßenverkehrstechnik}, volume = {66}, journal = {Straßenverkehrstechnik}, number = {10}, publisher = {Kirschbaum Verlag GmbH}, address = {Bonn}, issn = {0039-2219}, doi = {10.53184/SVT10-2022-1}, pages = {721 -- 726}, year = {2022}, abstract = {Die neu erschienenen „Empfehlungen zum Einsatz von Verkehrsnachfragemodellen f{\"u}r den Personenverkehr" liefern erstmals als Empfehlungspapier der Forschungsgesellschaft f{\"u}r Straßen- und Verkehrswesen einen umfassenden {\"U}berblick zu den verschiedenen Aspekten der Modellierung und geben dem Fachplaner konkrete Hilfestellung f{\"u}r die Konzeption von Nachfragemodellen. Das Empfehlungspapier zielt unter anderem darauf ab, die Erwartungen und das Anspruchsniveau in Hinblick auf Sachgerechtigkeit der Modelle, die erzielbare Modellqualit{\"a}t und den Detaillierungsgrad der Modellaussagen zu harmonisieren.}, language = {de} } @article{CzarneckiWinkelmannSpiliopoulou2010, author = {Czarnecki, Christian and Winkelmann, Axel and Spiliopoulou, Myra}, title = {Services in electronic telecommunication markets: a framework for planning the virtualization of processes}, series = {Electronic Markets}, volume = {20}, journal = {Electronic Markets}, number = {3-4}, publisher = {Springer}, address = {Berlin}, issn = {1422-8890}, doi = {10.1007/s12525-010-0045-8}, pages = {197 -- 207}, year = {2010}, abstract = {The potential of electronic markets in enabling innovative product bundles through flexible and sustainable partnerships is not yet fully exploited in the telecommunication industry. One reason is that bundling requires seamless de-assembling and re-assembling of business processes, whilst processes in telecommunication companies are often product-dependent and hard to virtualize. We propose a framework for the planning of the virtualization of processes, intended to assist the decision maker in prioritizing the processes to be virtualized: (a) we transfer the virtualization pre-requisites stated by the Process Virtualization Theory in the context of customer-oriented processes in the telecommunication industry and assess their importance in this context, (b) we derive IT-oriented requirements for the removal of virtualization barriers and highlight their demand on changes at different levels of the organization. We present a first evaluation of our approach in a case study and report on lessons learned and further steps to be performed.}, language = {en} } @article{CzarneckiSpiliopoulou2012, author = {Czarnecki, Christian and Spiliopoulou, Myra}, title = {A holistic framework for the implementation of a next generation network}, series = {International Journal of Business Information Systems}, volume = {9}, journal = {International Journal of Business Information Systems}, number = {4}, publisher = {Inderscience Enterprises}, address = {Olney, Bucks}, issn = {1746-0972}, doi = {10.1504/IJBIS.2012.046291}, pages = {385 -- 401}, year = {2012}, abstract = {As the potential of a next generation network (NGN) is recognised, telecommunication companies consider switching to it. Although the implementation of an NGN seems to be merely a modification of the network infrastructure, it may trigger or require changes in the whole company, because it builds upon the separation between service and transport, a flexible bundling of services to products and the streamlining of the IT infrastructure. We propose a holistic framework, structured into the layers 'strategy', 'processes' and 'information systems' and incorporate into each layer all concepts necessary for the implementation of an NGN, as well as the alignment of these concepts. As a first proof-of-concept for our framework we have performed a case study on the introduction of NGN in a large telecommunication company; we show that our framework captures all topics that are affected by an NGN implementation.}, language = {en} } @article{CzarneckiWinkelmannSpiliopoulou2013, author = {Czarnecki, Christian and Winkelmann, Axel and Spiliopoulou, Myra}, title = {Referenzprozessabl{\"a}ufe f{\"u}r Telekommunikationsunternehmen: Eine Erweiterung des eTOM-Modells}, series = {Wirtschaftsinformatik}, volume = {55}, journal = {Wirtschaftsinformatik}, number = {2}, publisher = {Springer Fachmedien}, address = {Wiesbaden}, issn = {1861-8936}, doi = {10.1007/s11576-013-0351-9}, pages = {83 -- 97}, year = {2013}, abstract = {Der Telekommunikationsmarkt erf{\"a}hrt substanzielle Ver{\"a}nderungen. Neue Gesch{\"a}ftsmodelle, innovative Dienstleistungen und Technologien erfordern Reengineering, Transformation und Prozessstandardisierung. Mit der Enhanced Telecom Operation Map (eTOM) bietet das TM Forum ein international anerkanntes de facto Referenz-Prozess-Framework basierend auf spezifischen Anforderungen und Auspr{\"a}gungen der Telekommunikationsindustrie an. Allerdings enth{\"a}lt dieses Referenz-Framework nur eine hierarchische Sammlung von Prozessen auf unterschiedlichen Abstraktionsebenen. Eine Kontrollsicht verstanden als sequenzielle Anordnung von Aktivit{\"a}ten und daraus resultierend ein realer Prozessablauf fehlt ebenso wie eine Ende-zu-Ende-Sicht auf den Kunden. In diesem Artikel erweitern wir das eTOM-Referenzmodell durch Referenzprozessabl{\"a}ufe, in welchen wir das Wissen {\"u}ber Prozesse in Telekommunikationsunternehmen abstrahieren und generalisieren. Durch die Referenzprozessabl{\"a}ufe werden Unternehmen bei dem strukturierten und transparenten (Re-)Design ihrer Prozesse unterst{\"u}tzt. Wir demonstrieren die Anwendbarkeit und N{\"u}tzlichkeit unserer Referenzprozessabl{\"a}ufe in zwei Fallstudien und evaluieren diese anhand von Kriterien f{\"u}r die Bewertung von Referenzmodellen. Die Referenzprozessabl{\"a}ufe wurden vom TM Forum in den Standard aufgenommen und als Teil von eTOM Version 9 ver{\"o}ffentlicht. Dar{\"u}ber hinaus diskutieren wir die Komponenten unseres Ansatzes, die auch außerhalb der Telekommunikationsindustrie angewandt werden k{\"o}nnen.}, language = {de} } @article{BensbergAuthCzarnecki2018, author = {Bensberg, Frank and Auth, Gunnar and Czarnecki, Christian}, title = {Einsatz von Text Analytics zur Unterst{\"u}tzung literaturintensiver Forschungsprozesse: Konzeption, Realisierung und Lessons Learned}, series = {Anwendungen und Konzepte der Wirtschaftsinformatik}, volume = {2018}, journal = {Anwendungen und Konzepte der Wirtschaftsinformatik}, number = {8}, publisher = {AKWI}, address = {Luzern}, issn = {2296-4592}, doi = {10.26034/lu.akwi.2018.3221}, pages = {1 -- 6}, year = {2018}, abstract = {Das anhaltende Wachstum wissenschaftlicher Ver{\"o}ffentlichungen wirft die Fragestellung auf, wie Literaturana-lysen im Rahmen von Forschungsprozessen digitalisiert und somit produktiver realisiert werden k{\"o}nnen. Insbesondere in informationstechnischen Fachgebieten ist die Forschungspraxis durch ein rasant wachsendes Publikationsaufkommen gekennzeichnet. Infolgedessen bietet sich der Einsatz von Methoden der Textanalyse (Text Analytics) an, die Textdaten automatisch vorbereiten und verarbeiten k{\"o}nnen. Erkenntnisse entstehen dabei aus Analysen von Wortarten und Subgruppen, Korrelations- sowie Zeitreihenanalysen. Dieser Beitrag stellt die Konzeption und Realisierung eines Prototypen vor, mit dem Anwender bibliographische Daten aus der etablierten Literaturdatenbank EBSCO Discovery Service mithilfe textanalytischer Methoden erschließen k{\"o}nnen. Der Prototyp basiert auf dem Analysesystem IBM Watson Explorer, das Hochschulen lizenzkostenfrei zur Verf{\"u}gung steht. Potenzielle Adressaten des Prototypen sind Forschungseinrichtungen, Beratungsunternehmen sowie Entscheidungstr{\"a}ger in Politik und Unternehmenspraxis.}, language = {de} } @article{CzarneckiBensbergAuth2019, author = {Czarnecki, Christian and Bensberg, Frank and Auth, Gunnar}, title = {Die Rolle von Softwarerobotern f{\"u}r die zuk{\"u}nftige Arbeitswelt}, series = {HMD Praxis der Wirtschaftsinformatik}, volume = {56}, journal = {HMD Praxis der Wirtschaftsinformatik}, number = {4}, publisher = {Springer}, address = {Cham}, issn = {2198-2775}, doi = {10.1365/s40702-019-00548-z}, pages = {795 -- 808}, year = {2019}, abstract = {Im Rahmen der Digitalisierung ist die zunehmende Automatisierung von bisher manuellen Prozessschritten ein Aspekt, der massive Auswirkungen auf die zuk{\"u}nftige Arbeitswelt haben wird. In diesem Kontext werden an den Einsatz von Softwarerobotern zur Prozessautomatisierung hohe Erwartungen gekn{\"u}pft. Bei den Implementierungsans{\"a}tzen wird die Diskussion aktuell insbesondere durch Robotic Process Automation (RPA) und Chatbots gepr{\"a}gt. Beide Ans{\"a}tze verfolgen das gemeinsame Ziel einer 1:1-Automatisierung von menschlichen Handlungen und dadurch ein direktes Ersetzen von Mitarbeitern durch Maschinen. Bei RPA werden Prozesse durch Softwareroboter erlernt und automatisiert ausgef{\"u}hrt. Dabei emulieren RPA-Roboter die Eingaben auf der bestehenden Pr{\"a}sentationsschicht, so dass keine {\"A}nderungen an vorhandenen Anwendungssystemen notwendig sind. Am Markt werden bereits unterschiedliche RPA-L{\"o}sungen als Softwareprodukte angeboten. Durch Chatbots werden Ein- und Ausgaben von Anwendungssystemen {\"u}ber nat{\"u}rliche Sprache realisiert. Dadurch ist die Automatisierung von unternehmensexterner Kommunikation (z. B. mit Kunden) aber auch von unternehmensinternen Assistenzt{\"a}tigkeiten m{\"o}glich. Der Beitrag diskutiert die Auswirkungen von Softwarerobotern auf die Arbeitswelt anhand von Anwendungsbeispielen und erl{\"a}utert die unternehmensindividuelle Entscheidung {\"u}ber den Einsatz von Softwarerobotern anhand von Effektivit{\"a}ts- und Effizienzzielen.}, language = {de} } @article{CzarneckiDietze2020, author = {Czarnecki, Christian and Dietze, Christian}, title = {Gestaltungsorientierte Forschung am Beispiel der Referenzmodellierung in der Telekommunikationsindustrie}, series = {HMD Praxis der Wirtschaftsinformatik}, volume = {57}, journal = {HMD Praxis der Wirtschaftsinformatik}, number = {2}, publisher = {Springer Nature}, address = {Cham}, issn = {2198-2775}, doi = {10.1365/s40702-020-00594-y}, pages = {310 -- 323}, year = {2020}, abstract = {Am Beispiel der Telekommunikationsindustrie zeigt der Beitrag eine konkrete Ausgestaltung anwendungsorientierter Forschung, die sowohl f{\"u}r die Praxis als auch f{\"u}r die Wissenschaft nutzen- und erkenntnisbringend ist. Forschungsgegenstand sind die Referenzmodelle des Industriegremiums TM Forum, die von vielen Telekommunikationsunternehmen zur Transformation ihrer Strukturen und Systeme genutzt werden. Es wird die langj{\"a}hrige Forschungst{\"a}tigkeit bei der Weiterentwicklung und Anwendung dieser Referenzmodelle beschrieben. Dabei wird ein konsequent gestaltungsorientierter Forschungsansatz verfolgt. Das Zusammenspiel aus kontinuierlicher Weiterentwicklung in Zusammenarbeit mit einem Industriegremium und der Anwendung in vielf{\"a}ltigen Praxisprojekten f{\"u}hrt zu einer erfolgreichen Symbiose aus praktischer Nutzengenerierung sowie wissenschaftlichem Erkenntnisgewinn. Der Beitrag stellt den gew{\"a}hlten Forschungsansatz anhand konkreter Beispiele vor. Darauf basierend werden Empfehlungen und Herausforderungen f{\"u}r eine gestaltungs- und praxisorientierte Forschung diskutiert.}, language = {de} } @article{BurgerRumpfDoetal.2021, author = {Burger, Ren{\´e} and Rumpf, Jessica and Do, Xuan Tung and Monakhova, Yulia and Diehl, Bernd W. K. and Rehahn, Matthias and Schulze, Margit}, title = {Is NMR combined with multivariate regression applicable for the molecular weight determination of randomly cross-linked polymers such as lignin?}, series = {ACS Omega}, volume = {6}, journal = {ACS Omega}, number = {44}, publisher = {ACS Publications}, address = {Washington, DC}, issn = {2470-1343}, doi = {10.1021/acsomega.1c03574}, pages = {29516 -- 29524}, year = {2021}, abstract = {The molecular weight properties of lignins are one of the key elements that need to be analyzed for a successful industrial application of these promising biopolymers. In this study, the use of 1H NMR as well as diffusion-ordered spectroscopy (DOSY NMR), combined with multivariate regression methods, was investigated for the determination of the molecular weight (Mw and Mn) and the polydispersity of organosolv lignins (n = 53, Miscanthus x giganteus, Paulownia tomentosa, and Silphium perfoliatum). The suitability of the models was demonstrated by cross validation (CV) as well as by an independent validation set of samples from different biomass origins (beech wood and wheat straw). CV errors of ca. 7-9 and 14-16\% were achieved for all parameters with the models from the 1H NMR spectra and the DOSY NMR data, respectively. The prediction errors for the validation samples were in a similar range for the partial least squares model from the 1H NMR data and for a multiple linear regression using the DOSY NMR data. The results indicate the usefulness of NMR measurements combined with multivariate regression methods as a potential alternative to more time-consuming methods such as gel permeation chromatography.}, language = {en} } @article{MonakhovaDiehl2021, author = {Monakhova, Yulia and Diehl, Bernd W. K.}, title = {Simplification of NMR Workflows by Standardization Using 2H Integral of Deuterated Solvent as Applied to Aloe vera Preparations}, series = {Applied Magnetic Resonance}, volume = {52}, journal = {Applied Magnetic Resonance}, number = {11}, publisher = {Springer}, address = {Cham}, issn = {1613-7507}, doi = {10.1007/s00723-021-01393-4}, pages = {1591 -- 1600}, year = {2021}, abstract = {In this study, a recently proposed NMR standardization approach by 2H integral of deuterated solvent for quantitative multicomponent analysis of complex mixtures is presented. As a proof of principle, the existing NMR routine for the analysis of Aloe vera products was modified. Instead of using absolute integrals of targeted compounds and internal standard (nicotinamide) from 1H-NMR spectra, quantification was performed based on the ratio of a particular 1H-NMR compound integral and 2H-NMR signal of deuterated solvent D2O. Validation characteristics (linearity, repeatability, accuracy) were evaluated and the results showed that the method has the same precision as internal standardization in case of multicomponent screening. Moreover, a dehydration process by freeze drying is not necessary for the new routine. Now, our NMR profiling of A. vera products needs only limited sample preparation and data processing. The new standardization methodology provides an appealing alternative for multicomponent NMR screening. In general, this novel approach, using standardization by 2H integral, benefits from reduced sample preparation steps and uncertainties, and is recommended in different application areas (purity determination, forensics, pharmaceutical analysis, etc.).}, language = {en} } @article{BurmistrovaSobolevaMonakhova2021, author = {Burmistrova, Natalia A. and Soboleva, Polina M. and Monakhova, Yulia}, title = {Is infrared spectroscopy combined with multivariate analysis a promising tool for heparin authentication?}, series = {Journal of Pharmaceutical and Biomedical Analysis}, volume = {194}, journal = {Journal of Pharmaceutical and Biomedical Analysis}, number = {Article number: 113811}, publisher = {Elsevier}, address = {Amsterdam}, isbn = {0731-7085}, doi = {10.1016/j.jpba.2020.113811}, year = {2021}, abstract = {The investigation of the possibility to determine various characteristics of powder heparin (n = 115) was carried out with infrared spectroscopy. The evaluation of heparin samples included several parameters such as purity grade, distributing company, animal source as well as heparin species (i.e. Na-heparin, Ca-heparin, and heparinoids). Multivariate analysis using principal component analysis (PCA), soft independent modelling of class analogy (SIMCA), and partial least squares - discriminant analysis (PLS-DA) were applied for the modelling of spectral data. Different pre-processing methods were applied to IR spectral data; multiplicative scatter correction (MSC) was chosen as the most relevant. Obtained results were confirmed by nuclear magnetic resonance (NMR) spectroscopy. Good predictive ability of this approach demonstrates the potential of IR spectroscopy and chemometrics for screening of heparin quality. This approach, however, is designed as a screening tool and is not considered as a replacement for either of the methods required by USP and FDA.}, language = {en} } @article{MonakhovaDiehl2021, author = {Monakhova, Yulia and Diehl, Bernd W.K.}, title = {Novel approach of qNMR workflow by standardization using 2H integral: Application to any intrinsic calibration standard}, series = {Talanta}, volume = {222}, journal = {Talanta}, number = {Article number: 121504}, publisher = {Elsevier}, isbn = {0039-9140}, doi = {10.1016/j.talanta.2020.121504}, year = {2021}, abstract = {Quantitative nuclear magnetic resonance (qNMR) is routinely performed by the internal or external standardization. The manuscript describes a simple alternative to these common workflows by using NMR signal of another active nuclei of calibration compound. For example, for any arbitrary compound quantification by NMR can be based on the use of an indirect concentration referencing that relies on a solvent having both 1H and 2H signals. To perform high-quality quantification, the deuteration level of the utilized deuterated solvent has to be estimated. In this contribution the new method was applied to the determination of deuteration levels in different deuterated solvents (MeOD, ACN, CDCl3, acetone, benzene, DMSO-d6). Isopropanol-d6, which contains a defined number of deuterons and protons, was used for standardization. Validation characteristics (precision, accuracy, robustness) were calculated and the results showed that the method can be used in routine practice. Uncertainty budget was also evaluated. In general, this novel approach, using standardization by 2H integral, benefits from reduced sample preparation steps and uncertainties, and can be applied in different application areas (purity determination, forensics, pharmaceutical analysis, etc.).}, language = {en} } @article{MonakhovaSobolevaFedotovaetal.2022, author = {Monakhova, Yulia and Soboleva, Polina M. and Fedotova, Elena S. and Musina, Kristina T. and Burmistrova, Natalia A.}, title = {Quantum chemical calculations of IR spectra of heparin disaccharide subunits}, series = {Computational and Theoretical Chemistry}, volume = {1217}, journal = {Computational and Theoretical Chemistry}, number = {Article number: 113891}, publisher = {Elsevier}, address = {New York, NY}, isbn = {2210-271X}, doi = {10.1016/j.comptc.2022.113891}, year = {2022}, abstract = {Heparin is a natural polysaccharide, which plays essential role in many biological processes. Alterations in building blocks can modify biological roles of commercial heparin products, due to significant changes in the conformation of the polymer chain. The variability structure of heparin leads to difficulty in quality control using different analytical methods, including infrared (IR) spectroscopy. In this paper molecular modelling of heparin disaccharide subunits was performed using quantum chemistry. The structural and spectral parameters of these disaccharides have been calculated using RHF/6-311G. In addition, over-sulphated chondroitin sulphate disaccharide was studied as one of the most widespread contaminants of heparin. Calculated IR spectra were analyzed with respect to specific structure parameters. IR spectroscopic fingerprint was found to be sensitive to substitution pattern of disaccharide subunits. Vibrational assignments of calculated spectra were correlated with experimental IR spectral bands of native heparin. Chemometrics was used to perform multivariate analysis of simulated spectral data.}, language = {en} } @article{BurgerLindnerRumpfetal.2022, author = {Burger, Ren{\´e} and Lindner, Simon and Rumpf, Jessica and Do, Xuan Tung and Diehl, Bernd W.K. and Rehahn, Matthias and Monakhova, Yulia and Schulze, Margit}, title = {Benchtop versus high field NMR: Comparable performance found for the molecular weight determination of lignin}, series = {Journal of Pharmaceutical and Biomedical Analysis}, volume = {212}, journal = {Journal of Pharmaceutical and Biomedical Analysis}, number = {Article number: 114649}, publisher = {Elsevier}, address = {New York, NY}, isbn = {0731-7085}, doi = {10.1016/j.jpba.2022.114649}, year = {2022}, abstract = {Lignin is a promising renewable biopolymer being investigated worldwide as an environmentally benign substitute of fossil-based aromatic compounds, e.g. for the use as an excipient with antioxidant and antimicrobial properties in drug delivery or even as active compound. For its successful implementation into process streams, a quick, easy, and reliable method is needed for its molecular weight determination. Here we present a method using 1H spectra of benchtop as well as conventional NMR systems in combination with multivariate data analysis, to determine lignin's molecular weight (Mw and Mn) and polydispersity index (PDI). A set of 36 organosolv lignin samples (from Miscanthus x giganteus, Paulownia tomentosa and Silphium perfoliatum) was used for the calibration and cross validation, and 17 samples were used as external validation set. Validation errors between 5.6\% and 12.9\% were achieved for all parameters on all NMR devices (43, 60, 500 and 600 MHz). Surprisingly, no significant difference in the performance of the benchtop and high-field devices was found. This facilitates the application of this method for determining lignin's molecular weight in an industrial environment because of the low maintenance expenditure, small footprint, ruggedness, and low cost of permanent magnet benchtop NMR systems.}, language = {en} } @article{MonakhovaDiehl2022, author = {Monakhova, Yulia and Diehl, Bernd W.K.}, title = {Multinuclear NMR screening of pharmaceuticals using standardization by 2H integral of a deuterated solvent}, series = {Journal of Pharmaceutical and Biomedical Analysis}, volume = {209}, journal = {Journal of Pharmaceutical and Biomedical Analysis}, number = {Article number: 114530}, publisher = {Elsevier}, isbn = {0731-7085}, doi = {10.1016/j.jpba.2021.114530}, year = {2022}, abstract = {NMR standardization approach that uses the 2H integral of deuterated solvent for quantitative multinuclear analysis of pharmaceuticals is described. As a proof of principle, the existing NMR procedure for the analysis of heparin products according to US Pharmacopeia monograph is extended to the determination of Na+ and Cl- content in this matrix. Quantification is performed based on the ratio of a 23Na (35Cl) NMR integral and 2H NMR signal of deuterated solvent, D2O, acquired using the specific spectrometer hardware. As an alternative, the possibility of 133Cs standardization using the addition of Cs2CO3 stock solution is shown. Validation characteristics (linearity, repeatability, sensitivity) are evaluated. A holistic NMR profiling of heparin products can now also be used for the quantitative determination of inorganic compounds in a single analytical run using a single sample. In general, the new standardization methodology provides an appealing alternative for the NMR screening of inorganic and organic components in pharmaceutical products.}, language = {en} } @article{LindnerBurgerRutledgeetal.2022, author = {Lindner, Simon and Burger, Ren{\´e} and Rutledge, Douglas N. and Do, Xuan Tung and Rumpf, Jessica and Diehl, Bernd W. K. and Schulze, Margit and Monakhova, Yulia}, title = {Is the calibration transfer of multivariate calibration models between high- and low-field NMR instruments possible? A case study of lignin molecular weight}, series = {Analytical chemistry}, volume = {94}, journal = {Analytical chemistry}, number = {9}, publisher = {ACS Publications}, address = {Washington, DC}, isbn = {1520-6882}, doi = {10.1021/acs.analchem.1c05125}, pages = {3997 -- 4004}, year = {2022}, abstract = {Although several successful applications of benchtop nuclear magnetic resonance (NMR) spectroscopy in quantitative mixture analysis exist, the possibility of calibration transfer remains mostly unexplored, especially between high- and low-field NMR. This study investigates for the first time the calibration transfer of partial least squares regressions [weight average molecular weight (Mw) of lignin] between high-field (600 MHz) NMR and benchtop NMR devices (43 and 60 MHz). For the transfer, piecewise direct standardization, calibration transfer based on canonical correlation analysis, and transfer via the extreme learning machine auto-encoder method are employed. Despite the immense resolution difference between high-field and low-field NMR instruments, the results demonstrate that the calibration transfer from high- to low-field is feasible in the case of a physical property, namely, the molecular weight, achieving validation errors close to the original calibration (down to only 1.2 times higher root mean square errors). These results introduce new perspectives for applications of benchtop NMR, in which existing calibrations from expensive high-field instruments can be transferred to cheaper benchtop instruments to economize.}, language = {en} } @article{Maurischat2022, author = {Maurischat, Andreas}, title = {Algebraic independence of the Carlitz period and its hyperderivatives}, series = {Journal of Number Theory}, volume = {240}, journal = {Journal of Number Theory}, publisher = {Elsevier}, address = {Orlando, Fla.}, issn = {0022-314X}, doi = {10.1016/j.jnt.2022.01.006}, pages = {145 -- 162}, year = {2022}, language = {en} } @article{KotliarOrtnerConradietal.2022, author = {Kotliar, Konstantin and Ortner, Marion and Conradi, Anna and Hacker, Patricia and Hauser, Christine and G{\"u}nthner, Roman and Moser, Michaela and Muggenthaler, Claudia and Diehl-Schmid, Janine and Priller, Josef and Schmaderer, Christoph and Grimmer, Timo}, title = {Altered retinal cerebral vessel oscillation frequencies in Alzheimer's disease compatible with impaired amyloid clearance}, series = {Neurobiology of Aging}, volume = {120}, journal = {Neurobiology of Aging}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0197-4580}, doi = {10.1016/j.neurobiolaging.2022.08.012}, pages = {117 -- 127}, year = {2022}, abstract = {Retinal vessels are similar to cerebral vessels in their structure and function. Moderately low oscillation frequencies of around 0.1 Hz have been reported as the driving force for paravascular drainage in gray matter in mice and are known as the frequencies of lymphatic vessels in humans. We aimed to elucidate whether retinal vessel oscillations are altered in Alzheimer's disease (AD) at the stage of dementia or mild cognitive impairment (MCI). Seventeen patients with mild-to-moderate dementia due to AD (ADD); 23 patients with MCI due to AD, and 18 cognitively healthy controls (HC) were examined using Dynamic Retinal Vessel Analyzer. Oscillatory temporal changes of retinal vessel diameters were evaluated using mathematical signal analysis. Especially at moderately low frequencies around 0.1 Hz, arterial oscillations in ADD and MCI significantly prevailed over HC oscillations and correlated with disease severity. The pronounced retinal arterial vasomotion at moderately low frequencies in the ADD and MCI groups would be compatible with the view of a compensatory upregulation of paravascular drainage in AD and strengthen the amyloid clearance hypothesis.}, language = {en} } @article{ColomboDriraFrotscheretal.2022, author = {Colombo, Daniele and Drira, Slah and Frotscher, Ralf and Staat, Manfred}, title = {An element-based formulation for ES-FEM and FS-FEM models for implementation in standard solid mechanics finite element codes for 2D and 3D static analysis}, series = {International Journal for Numerical Methods in Engineering}, volume = {124}, journal = {International Journal for Numerical Methods in Engineering}, number = {2}, publisher = {Wiley}, address = {Chichester}, issn = {1097-0207}, doi = {10.1002/nme.7126}, pages = {402 -- 433}, year = {2022}, abstract = {Edge-based and face-based smoothed finite element methods (ES-FEM and FS-FEM, respectively) are modified versions of the finite element method allowing to achieve more accurate results and to reduce sensitivity to mesh distortion, at least for linear elements. These properties make the two methods very attractive. However, their implementation in a standard finite element code is nontrivial because it requires heavy and extensive modifications to the code architecture. In this article, we present an element-based formulation of ES-FEM and FS-FEM methods allowing to implement the two methods in a standard finite element code with no modifications to its architecture. Moreover, the element-based formulation permits to easily manage any type of element, especially in 3D models where, to the best of the authors' knowledge, only tetrahedral elements are used in FS-FEM applications found in the literature. Shape functions for non-simplex 3D elements are proposed in order to apply FS-FEM to any standard finite element.}, language = {en} } @article{BhattaraiMayStaatetal.2022, author = {Bhattarai, Aroj and May, Charlotte Anabell and Staat, Manfred and Kowalczyk, Wojciech and Tran, Thanh Ngoc}, title = {Layer-specific damage modeling of porcine large intestine under biaxial tension}, series = {Bioengineering}, volume = {9}, journal = {Bioengineering}, number = {10, Early Access}, publisher = {MDPI}, address = {Basel}, issn = {2306-5354}, doi = {10.3390/bioengineering9100528}, pages = {1 -- 17}, year = {2022}, abstract = {The mechanical behavior of the large intestine beyond the ultimate stress has never been investigated. Stretching beyond the ultimate stress may drastically impair the tissue microstructure, which consequently weakens its healthy state functions of absorption, temporary storage, and transportation for defecation. Due to closely similar microstructure and function with humans, biaxial tensile experiments on the porcine large intestine have been performed in this study. In this paper, we report hyperelastic characterization of the large intestine based on experiments in 102 specimens. We also report the theoretical analysis of the experimental results, including an exponential damage evolution function. The fracture energies and the threshold stresses are set as damage material parameters for the longitudinal muscular, the circumferential muscular and the submucosal collagenous layers. A biaxial tensile simulation of a linear brick element has been performed to validate the applicability of the estimated material parameters. The model successfully simulates the biomechanical response of the large intestine under physiological and non-physiological loads.}, language = {en} } @article{PhilippEfthimiouPaganoetal.2022, author = {Philipp, Mohr and Efthimiou, Nikos and Pagano, Fiammetta and Kratochwil, Nicolaus and Pizzichemi, Marco and Tsoumpas, Charalampos and Auffray, Etiennette and Ziemons, Karl}, title = {Image reconstruction analysis for positron emission tomography with heterostructured scintillators}, series = {IEEE Transactions on Radiation and Plasma Medical Sciences}, volume = {7}, journal = {IEEE Transactions on Radiation and Plasma Medical Sciences}, number = {1}, publisher = {IEEE}, address = {New York, NY}, issn = {2469-7311}, doi = {10.1109/TRPMS.2022.3208615}, pages = {41 -- 51}, year = {2022}, abstract = {The concept of structure engineering has been proposed for exploring the next generation of radiation detectors with improved performance. A TOF-PET geometry with heterostructured scintillators with a pixel size of 3.0×3.1×15 mm3 was simulated using Monte Carlo. The heterostructures consisted of alternating layers of BGO as a dense material with high stopping power and plastic (EJ232) as a fast light emitter. The detector time resolution was calculated as a function of the deposited and shared energy in both materials on an event-by-event basis. While sensitivity was reduced to 32\% for 100 μm thick plastic layers and 52\% for 50 μm, the CTR distribution improved to 204±49 ps and 220±41 ps respectively, compared to 276 ps that we considered for bulk BGO. The complex distribution of timing resolutions was accounted for in the reconstruction. We divided the events into three groups based on their CTR and modeled them with different Gaussian TOF kernels. On a NEMA IQ phantom, the heterostructures had better contrast recovery in early iterations. On the other hand, BGO achieved a better contrast to noise ratio (CNR) after the 15th iteration due to the higher sensitivity. The developed simulation and reconstruction methods constitute new tools for evaluating different detector designs with complex time responses.}, language = {en} } @article{EngelmannPourshahidiShalabyetal.2022, author = {Engelmann, Ulrich M. and Pourshahidi, Mohammad Ali and Shalaby, Ahmed and Krause, Hans-Joachim}, title = {Probing particle size dependency of frequency mixing magnetic detection with dynamic relaxation simulation}, series = {Journal of Magnetism and Magnetic Materials}, volume = {563}, journal = {Journal of Magnetism and Magnetic Materials}, number = {In progress, Art. No. 169965}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0304-8853}, doi = {10.1016/j.jmmm.2022.169965}, year = {2022}, abstract = {Biomedical applications of magnetic nanoparticles (MNP) fundamentally rely on the particles' magnetic relaxation as a response to an alternating magnetic field. The magnetic relaxation complexly depends on the interplay of MNP magnetic and physical properties with the applied field parameters. It is commonly accepted that particle core size is a major contributor to signal generation in all the above applications, however, most MNP samples comprise broad distribution spanning nm and more. Therefore, precise knowledge of the exact contribution of individual core sizes to signal generation is desired for optimal MNP design generally for each application. Specifically, we present a magnetic relaxation simulation-driven analysis of experimental frequency mixing magnetic detection (FMMD) for biosensing to quantify the contributions of individual core size fractions towards signal generation. Applying our method to two different experimental MNP systems, we found the most dominant contributions from approx. 20 nm sized particles in the two independent MNP systems. Additional comparison between freely suspended and immobilized MNP also reveals insight in the MNP microstructure, allowing to use FMMD for MNP characterization, as well as to further fine-tune its applicability in biosensing.}, language = {en} } @article{PourshahidiEngelmannOffenhaeusseretal.2022, author = {Pourshahidi, Ali Mohammad and Engelmann, Ulrich M. and Offenh{\"a}usser, Andreas and Krause, Hans-Joachim}, title = {Resolving ambiguities in core size determination of magnetic nanoparticles from magnetic frequency mixing data}, series = {Journal of Magnetism and Magnetic Materials}, volume = {563}, journal = {Journal of Magnetism and Magnetic Materials}, number = {In progress, Art. No. 169969}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0304-8853}, doi = {10.1016/j.jmmm.2022.169969}, year = {2022}, abstract = {Frequency mixing magnetic detection (FMMD) has been widely utilized as a measurement technique in magnetic immunoassays. It can also be used for the characterization and distinction (also known as "colourization") of different types of magnetic nanoparticles (MNPs) based on their core sizes. In a previous work, it was shown that the large particles contribute most of the FMMD signal. This leads to ambiguities in core size determination from fitting since the contribution of the small-sized particles is almost undetectable among the strong responses from the large ones. In this work, we report on how this ambiguity can be overcome by modelling the signal intensity using the Langevin model in thermodynamic equilibrium including a lognormal core size distribution fL(dc,d0,σ) fitted to experimentally measured FMMD data of immobilized MNPs. For each given median diameter d0, an ambiguous amount of best-fitting pairs of parameters distribution width σ and number of particles Np with R2 > 0.99 are extracted. By determining the samples' total iron mass, mFe, with inductively coupled plasma optical emission spectrometry (ICP-OES), we are then able to identify the one specific best-fitting pair (σ, Np) one uniquely. With this additional externally measured parameter, we resolved the ambiguity in core size distribution and determined the parameters (d0, σ, Np) directly from FMMD measurements, allowing precise MNPs sample characterization.}, language = {en} } @article{SteuerDankertGilmartinMulleretal.2019, author = {Steuer-Dankert, Linda and Gilmartin, Shannon K. and Muller, Carol B. and Dungs, Carolin and Sheppard, Sheri and Leicht-Scholten, Carmen}, title = {Expanding engineering limits : a concept for socially responsible education of engineers}, series = {The international journal of engineering education}, volume = {35}, journal = {The international journal of engineering education}, number = {2}, isbn = {0949-149X}, pages = {658 -- 673}, year = {2019}, language = {en} } @article{SteuerDankertSharmaBlecketal.2017, author = {Steuer-Dankert, Linda and Sharma, Mamta Rameshwarlal and Bleck, Wolfgang and Leicht-Scholten, Carmen}, title = {Diversity and innovation management in large research groups}, series = {International Journal of Innovation Management}, volume = {5}, journal = {International Journal of Innovation Management}, number = {2}, issn = {1757-5877}, pages = {49 -- 72}, year = {2017}, abstract = {Contemporary research appreciates a diverse workforce as a potential source of innovation. Researchers explore the fine details of why diversity management is central for generating innovations in heterogeneous research groups and how it could be effectively implemented into organizations. Complex research associations that discuss topics with a high impact on society increasingly address the necessity of establishing a diverse workforce to confront the challenges of tomorrow. Characterized by complex management structures as well as hierarchies, research associations have not been a subject of investigation until now. For this reason, the presented research project aims to develop a diversity and innovation management strategy with the ultimate goal of inducing change in the corporate culture. The proposed approach consisted of six phases; the first two phases investigated the status quo of diversity in the existing organizational structures of member institutes and the variety of particular working cultures within the research association. The third and the fourth phases utilized qualitative and quantitative studies. The third phase focused on the connection of management level to diversity and innovation, and the need for diversity and innovation management, and tailor-made methods of implementing them. The first three phases have been accomplished successfully; preliminary results are already available. The fourth phase will mainly focus on exploring the mind-set of the employees. The fifth phase will consolidate the findings in the first four phases into an implementable strategy. The final phase will address the implementation of this strategy into the organization. Phases 4 to 6 have not yet been undertaken}, language = {en} }