@article{DitzhausGaigall2018, author = {Ditzhaus, Marc and Gaigall, Daniel}, title = {A consistent goodness-of-fit test for huge dimensional and functional data}, series = {Journal of Nonparametric Statistics}, volume = {30}, journal = {Journal of Nonparametric Statistics}, number = {4}, publisher = {Taylor \& Francis}, address = {Abingdon}, issn = {1029-0311}, doi = {10.1080/10485252.2018.1486402}, pages = {834 -- 859}, year = {2018}, abstract = {A nonparametric goodness-of-fit test for random variables with values in a separable Hilbert space is investigated. To verify the null hypothesis that the data come from a specific distribution, an integral type test based on a Cram{\´e}r-von-Mises statistic is suggested. The convergence in distribution of the test statistic under the null hypothesis is proved and the test's consistency is concluded. Moreover, properties under local alternatives are discussed. Applications are given for data of huge but finite dimension and for functional data in infinite dimensional spaces. A general approach enables the treatment of incomplete data. In simulation studies the test competes with alternative proposals.}, language = {en} } @article{BaringhausGaigall2019, author = {Baringhaus, Ludwig and Gaigall, Daniel}, title = {On an asymptotic relative efficiency concept based on expected volumes of confidence regions}, series = {Statistics - A Journal of Theoretical and Applied Statistic}, volume = {53}, journal = {Statistics - A Journal of Theoretical and Applied Statistic}, number = {6}, publisher = {Taylor \& Francis}, address = {London}, issn = {1029-4910}, doi = {10.1080/02331888.2019.1683560}, pages = {1396 -- 1436}, year = {2019}, abstract = {The paper deals with an asymptotic relative efficiency concept for confidence regions of multidimensional parameters that is based on the expected volumes of the confidence regions. Under standard conditions the asymptotic relative efficiencies of confidence regions are seen to be certain powers of the ratio of the limits of the expected volumes. These limits are explicitly derived for confidence regions associated with certain plugin estimators, likelihood ratio tests and Wald tests. Under regularity conditions, the asymptotic relative efficiency of each of these procedures with respect to each one of its competitors is equal to 1. The results are applied to multivariate normal distributions and multinomial distributions in a fairly general setting.}, language = {en} } @article{BaringhausGaigall2015, author = {Baringhaus, Ludwig and Gaigall, Daniel}, title = {On an independence test approach to the goodness-of-fit problem}, series = {Journal of Multivariate Analysis}, volume = {2015}, journal = {Journal of Multivariate Analysis}, number = {140}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0047-259X}, doi = {10.1016/j.jmva.2015.05.013}, pages = {193 -- 208}, year = {2015}, abstract = {Let X₁,…,Xₙ be independent and identically distributed random variables with distribution F. Assuming that there are measurable functions f:R²→R and g:R²→R characterizing a family F of distributions on the Borel sets of R in the way that the random variables f(X₁,X₂),g(X₁,X₂) are independent, if and only if F∈F, we propose to treat the testing problem H:F∈F,K:F∉F by applying a consistent nonparametric independence test to the bivariate sample variables (f(Xᵢ,Xⱼ),g(Xᵢ,Xⱼ)),1⩽i,j⩽n,i≠j. A parametric bootstrap procedure needed to get critical values is shown to work. The consistency of the test is discussed. The power performance of the procedure is compared with that of the classical tests of Kolmogorov-Smirnov and Cram{\´e}r-von Mises in the special cases where F is the family of gamma distributions or the family of inverse Gaussian distributions.}, language = {en} } @article{Gaigall2020, author = {Gaigall, Daniel}, title = {Rothman-Woodroofe symmetry test statistic revisited}, series = {Computational Statistics \& Data Analysis}, volume = {2020}, journal = {Computational Statistics \& Data Analysis}, number = {142}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0167-9473}, doi = {10.1016/j.csda.2019.106837}, pages = {Artikel 106837}, year = {2020}, abstract = {The Rothman-Woodroofe symmetry test statistic is revisited on the basis of independent but not necessarily identically distributed random variables. The distribution-freeness if the underlying distributions are all symmetric and continuous is obtained. The results are applied for testing symmetry in a meta-analysis random effects model. The consistency of the procedure is discussed in this situation as well. A comparison with an alternative proposal from the literature is conducted via simulations. Real data are analyzed to demonstrate how the new approach works in practice.}, language = {en} } @article{Gaigall2021, author = {Gaigall, Daniel}, title = {Test for Changes in the Modeled Solvency Capital Requirement of an Internal Risk Model}, series = {ASTIN Bulletin}, volume = {51}, journal = {ASTIN Bulletin}, number = {3}, publisher = {Cambridge Univ. Press}, address = {Cambridge}, issn = {1783-1350}, doi = {10.1017/asb.2021.20}, pages = {813 -- 837}, year = {2021}, abstract = {In the context of the Solvency II directive, the operation of an internal risk model is a possible way for risk assessment and for the determination of the solvency capital requirement of an insurance company in the European Union. A Monte Carlo procedure is customary to generate a model output. To be compliant with the directive, validation of the internal risk model is conducted on the basis of the model output. For this purpose, we suggest a new test for checking whether there is a significant change in the modeled solvency capital requirement. Asymptotic properties of the test statistic are investigated and a bootstrap approximation is justified. A simulation study investigates the performance of the test in the finite sample case and confirms the theoretical results. The internal risk model and the application of the test is illustrated in a simplified example. The method has more general usage for inference of a broad class of law-invariant and coherent risk measures on the basis of a paired sample.}, language = {en} } @article{Gaigall2020, author = {Gaigall, Daniel}, title = {Testing marginal homogeneity of a continuous bivariate distribution with possibly incomplete paired data}, series = {Metrika}, volume = {2020}, journal = {Metrika}, number = {83}, publisher = {Springer}, issn = {1435-926X}, doi = {10.1007/s00184-019-00742-5}, pages = {437 -- 465}, year = {2020}, abstract = {We discuss the testing problem of homogeneity of the marginal distributions of a continuous bivariate distribution based on a paired sample with possibly missing components (missing completely at random). Applying the well-known two-sample Cr{\´a}mer-von-Mises distance to the remaining data, we determine the limiting null distribution of our test statistic in this situation. It is seen that a new resampling approach is appropriate for the approximation of the unknown null distribution. We prove that the resulting test asymptotically reaches the significance level and is consistent. Properties of the test under local alternatives are pointed out as well. Simulations investigate the quality of the approximation and the power of the new approach in the finite sample case. As an illustration we apply the test to real data sets.}, language = {en} } @article{Gaigall2020, author = {Gaigall, Daniel}, title = {Hoeffding-Blum-Kiefer-Rosenblatt independence test statistic on partly not identically distributed data}, series = {Communications in Statistics - Theory and Methods}, volume = {51}, journal = {Communications in Statistics - Theory and Methods}, number = {12}, publisher = {Taylor \& Francis}, address = {London}, issn = {1532-415X}, doi = {10.1080/03610926.2020.1805767}, pages = {4006 -- 4028}, year = {2020}, abstract = {The established Hoeffding-Blum-Kiefer-Rosenblatt independence test statistic is investigated for partly not identically distributed data. Surprisingly, it turns out that the statistic has the well-known distribution-free limiting null distribution of the classical criterion under standard regularity conditions. An application is testing goodness-of-fit for the regression function in a non parametric random effects meta-regression model, where the consistency is obtained as well. Simulations investigate size and power of the approach for small and moderate sample sizes. A real data example based on clinical trials illustrates how the test can be used in applications.}, language = {en} } @article{GaigallGerstenbergTrinh2022, author = {Gaigall, Daniel and Gerstenberg, Julian and Trinh, Thi Thu Ha}, title = {Empirical process of concomitants for partly categorial data and applications in statistics}, series = {Bernoulli}, volume = {28}, journal = {Bernoulli}, number = {2}, publisher = {International Statistical Institute}, address = {Den Haag, NL}, issn = {1573-9759}, doi = {10.3150/21-BEJ1367}, pages = {803 -- 829}, year = {2022}, abstract = {On the basis of independent and identically distributed bivariate random vectors, where the components are categorial and continuous variables, respectively, the related concomitants, also called induced order statistic, are considered. The main theoretical result is a functional central limit theorem for the empirical process of the concomitants in a triangular array setting. A natural application is hypothesis testing. An independence test and a two-sample test are investigated in detail. The fairly general setting enables limit results under local alternatives and bootstrap samples. For the comparison with existing tests from the literature simulation studies are conducted. The empirical results obtained confirm the theoretical findings.}, language = {en} } @article{DitzhausGaigall2022, author = {Ditzhaus, Marc and Gaigall, Daniel}, title = {Testing marginal homogeneity in Hilbert spaces with applications to stock market returns}, series = {Test}, volume = {2022}, journal = {Test}, number = {31}, publisher = {Springer}, issn = {1863-8260}, doi = {10.1007/s11749-022-00802-5}, pages = {749 -- 770}, year = {2022}, abstract = {This paper considers a paired data framework and discusses the question of marginal homogeneity of bivariate high-dimensional or functional data. The related testing problem can be endowed into a more general setting for paired random variables taking values in a general Hilbert space. To address this problem, a Cram{\´e}r-von-Mises type test statistic is applied and a bootstrap procedure is suggested to obtain critical values and finally a consistent test. The desired properties of a bootstrap test can be derived that are asymptotic exactness under the null hypothesis and consistency under alternatives. Simulations show the quality of the test in the finite sample case. A possible application is the comparison of two possibly dependent stock market returns based on functional data. The approach is demonstrated based on historical data for different stock market indices.}, language = {en} } @article{BaringhausGaigall2022, author = {Baringhaus, Ludwig and Gaigall, Daniel}, title = {A goodness-of-fit test for the compound Poisson exponential model}, series = {Journal of Multivariate Analysis}, volume = {195}, journal = {Journal of Multivariate Analysis}, number = {Article 105154}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0047-259X}, doi = {10.1016/j.jmva.2022.105154}, year = {2022}, abstract = {On the basis of bivariate data, assumed to be observations of independent copies of a random vector (S,N), we consider testing the hypothesis that the distribution of (S,N) belongs to the parametric class of distributions that arise with the compound Poisson exponential model. Typically, this model is used in stochastic hydrology, with N as the number of raindays, and S as total rainfall amount during a certain time period, or in actuarial science, with N as the number of losses, and S as total loss expenditure during a certain time period. The compound Poisson exponential model is characterized in the way that a specific transform associated with the distribution of (S,N) satisfies a certain differential equation. Mimicking the function part of this equation by substituting the empirical counterparts of the transform we obtain an expression the weighted integral of the square of which is used as test statistic. We deal with two variants of the latter, one of which being invariant under scale transformations of the S-part by fixed positive constants. Critical values are obtained by using a parametric bootstrap procedure. The asymptotic behavior of the tests is discussed. A simulation study demonstrates the performance of the tests in the finite sample case. The procedure is applied to rainfall data and to an actuarial dataset. A multivariate extension is also discussed.}, language = {en} } @article{TranTrinhDaoetal.2022, author = {Tran, Ngoc Trinh and Trinh, Tu Luc and Dao, Ngoc Tien and Giap, Van Tan and Truong, Manh Khuyen and Dinh, Thuy Ha and Staat, Manfred}, title = {FEM shakedown analysis of structures under random strength with chance constrained programming}, series = {Vietnam Journal of Mechanics}, volume = {44}, journal = {Vietnam Journal of Mechanics}, number = {4}, publisher = {Vietnam Academy of Science and Technology (VAST)}, issn = {0866-7136}, doi = {10.15625/0866-7136/17943}, pages = {459 -- 473}, year = {2022}, abstract = {Direct methods, comprising limit and shakedown analysis, are a branch of computational mechanics. They play a significant role in mechanical and civil engineering design. The concept of direct methods aims to determine the ultimate load carrying capacity of structures beyond the elastic range. In practical problems, the direct methods lead to nonlinear convex optimization problems with a large number of variables and constraints. If strength and loading are random quantities, the shakedown analysis can be formulated as stochastic programming problem. In this paper, a method called chance constrained programming is presented, which is an effective method of stochastic programming to solve shakedown analysis problems under random conditions of strength. In this study, the loading is deterministic, and the strength is a normally or lognormally distributed variable.}, language = {en} } @article{RoethenbacherCesariDoppleretal.2022, author = {R{\"o}thenbacher, Annika and Cesari, Matteo and Doppler, Christopher E.J. and Okkels, Niels and Willemsen, Nele and Sembowski, Nora and Seger, Aline and Lindner, Marie and Brune, Corinna and Stefani, Ambra and H{\"o}gl, Birgit and Bialonski, Stephan and Borghammer, Per and Fink, Gereon R. and Schober, Martin and Sommerauer, Michael}, title = {RBDtector: an open-source software to detect REM sleep without atonia according to visual scoring criteria}, series = {Scientific Reports}, volume = {12}, journal = {Scientific Reports}, number = {Article number: 20886}, publisher = {Springer Nature}, address = {London}, issn = {2045-2322}, doi = {10.1038/s41598-022-25163-9}, pages = {1 -- 14}, year = {2022}, abstract = {REM sleep without atonia (RSWA) is a key feature for the diagnosis of rapid eye movement (REM) sleep behaviour disorder (RBD). We introduce RBDtector, a novel open-source software to score RSWA according to established SINBAR visual scoring criteria. We assessed muscle activity of the mentalis, flexor digitorum superficialis (FDS), and anterior tibialis (AT) muscles. RSWA was scored manually as tonic, phasic, and any activity by human scorers as well as using RBDtector in 20 subjects. Subsequently, 174 subjects (72 without RBD and 102 with RBD) were analysed with RBDtector to show the algorithm's applicability. We additionally compared RBDtector estimates to a previously published dataset. RBDtector showed robust conformity with human scorings. The highest congruency was achieved for phasic and any activity of the FDS. Combining mentalis any and FDS any, RBDtector identified RBD subjects with 100\% specificity and 96\% sensitivity applying a cut-off of 20.6\%. Comparable performance was obtained without manual artefact removal. RBD subjects also showed muscle bouts of higher amplitude and longer duration. RBDtector provides estimates of tonic, phasic, and any activity comparable to human scorings. RBDtector, which is freely available, can help identify RBD subjects and provides reliable RSWA metrics.}, language = {en} } @article{MuellerSeginWeigandetal.2022, author = {Mueller, Tobias and Segin, Alexander and Weigand, Christoph and Schmitt, Robert H.}, title = {Feature selection for measurement models}, series = {International journal of quality \& reliability management}, journal = {International journal of quality \& reliability management}, number = {Vol. ahead-of-print, No. ahead-of-print.}, publisher = {Emerald Group Publishing Limited}, address = {Bingley}, issn = {0265-671X}, doi = {10.1108/IJQRM-07-2021-0245}, year = {2022}, abstract = {Purpose In the determination of the measurement uncertainty, the GUM procedure requires the building of a measurement model that establishes a functional relationship between the measurand and all influencing quantities. Since the effort of modelling as well as quantifying the measurement uncertainties depend on the number of influencing quantities considered, the aim of this study is to determine relevant influencing quantities and to remove irrelevant ones from the dataset. Design/methodology/approach In this work, it was investigated whether the effort of modelling for the determination of measurement uncertainty can be reduced by the use of feature selection (FS) methods. For this purpose, 9 different FS methods were tested on 16 artificial test datasets, whose properties (number of data points, number of features, complexity, features with low influence and redundant features) were varied via a design of experiments. Findings Based on a success metric, the stability, universality and complexity of the method, two FS methods could be identified that reliably identify relevant and irrelevant influencing quantities for a measurement model. Originality/value For the first time, FS methods were applied to datasets with properties of classical measurement processes. The simulation-based results serve as a basis for further research in the field of FS for measurement models. The identified algorithms will be applied to real measurement processes in the future.}, language = {en} } @article{RuebbelkeVoegeleGrajewskietal.2022, author = {R{\"u}bbelke, Dirk and V{\"o}gele, Stefan and Grajewski, Matthias and Zobel, Luzy}, title = {Hydrogen-based steel production and global climate protection: An empirical analysis of the potential role of a European cross border adjustment mechanism}, series = {Journal of Cleaner Production}, volume = {380}, journal = {Journal of Cleaner Production}, number = {Part 2, Art. Nr.:135040}, publisher = {Elsevier}, issn = {0959-6526}, doi = {10.1016/j.jclepro.2022.135040}, year = {2022}, abstract = {The European Union's aim to become climate neutral by 2050 necessitates ambitious efforts to reduce carbon emissions. Large reductions can be attained particularly in energy intensive sectors like iron and steel. In order to prevent the relocation of such industries outside the EU in the course of tightening environmental regulations, the establishment of a climate club jointly with other large emitters and alternatively the unilateral implementation of an international cross-border carbon tax mechanism are proposed. This article focuses on the latter option choosing the steel sector as an example. In particular, we investigate the financial conditions under which a European cross border mechanism is capable to protect hydrogen-based steel production routes employed in Europe against more polluting competition from abroad. By using a floor price model, we assess the competitiveness of different steel production routes in selected countries. We evaluate the climate friendliness of steel production on the basis of specific GHG emissions. In addition, we utilize an input-output price model. It enables us to assess impacts of rising cost of steel production on commodities using steel as intermediates. Our results raise concerns that a cross-border tax mechanism will not suffice to bring about competitiveness of hydrogen-based steel production in Europe because the cost tends to remain higher than the cost of steel production in e.g. China. Steel is a classic example for a good used mainly as intermediate for other products. Therefore, a cross-border tax mechanism for steel will increase the price of products produced in the EU that require steel as an input. This can in turn adversely affect competitiveness of these sectors. Hence, the effects of higher steel costs on European exports should be borne in mind and could require the cross-border adjustment mechanism to also subsidize exports.}, language = {en} } @article{CzarneckiWinkelmannSpiliopoulou2010, author = {Czarnecki, Christian and Winkelmann, Axel and Spiliopoulou, Myra}, title = {Services in electronic telecommunication markets: a framework for planning the virtualization of processes}, series = {Electronic Markets}, volume = {20}, journal = {Electronic Markets}, number = {3-4}, publisher = {Springer}, address = {Berlin}, issn = {1422-8890}, doi = {10.1007/s12525-010-0045-8}, pages = {197 -- 207}, year = {2010}, abstract = {The potential of electronic markets in enabling innovative product bundles through flexible and sustainable partnerships is not yet fully exploited in the telecommunication industry. One reason is that bundling requires seamless de-assembling and re-assembling of business processes, whilst processes in telecommunication companies are often product-dependent and hard to virtualize. We propose a framework for the planning of the virtualization of processes, intended to assist the decision maker in prioritizing the processes to be virtualized: (a) we transfer the virtualization pre-requisites stated by the Process Virtualization Theory in the context of customer-oriented processes in the telecommunication industry and assess their importance in this context, (b) we derive IT-oriented requirements for the removal of virtualization barriers and highlight their demand on changes at different levels of the organization. We present a first evaluation of our approach in a case study and report on lessons learned and further steps to be performed.}, language = {en} } @article{CzarneckiSpiliopoulou2012, author = {Czarnecki, Christian and Spiliopoulou, Myra}, title = {A holistic framework for the implementation of a next generation network}, series = {International Journal of Business Information Systems}, volume = {9}, journal = {International Journal of Business Information Systems}, number = {4}, publisher = {Inderscience Enterprises}, address = {Olney, Bucks}, issn = {1746-0972}, doi = {10.1504/IJBIS.2012.046291}, pages = {385 -- 401}, year = {2012}, abstract = {As the potential of a next generation network (NGN) is recognised, telecommunication companies consider switching to it. Although the implementation of an NGN seems to be merely a modification of the network infrastructure, it may trigger or require changes in the whole company, because it builds upon the separation between service and transport, a flexible bundling of services to products and the streamlining of the IT infrastructure. We propose a holistic framework, structured into the layers 'strategy', 'processes' and 'information systems' and incorporate into each layer all concepts necessary for the implementation of an NGN, as well as the alignment of these concepts. As a first proof-of-concept for our framework we have performed a case study on the introduction of NGN in a large telecommunication company; we show that our framework captures all topics that are affected by an NGN implementation.}, language = {en} } @article{BurgerRumpfDoetal.2021, author = {Burger, Ren{\´e} and Rumpf, Jessica and Do, Xuan Tung and Monakhova, Yulia and Diehl, Bernd W. K. and Rehahn, Matthias and Schulze, Margit}, title = {Is NMR combined with multivariate regression applicable for the molecular weight determination of randomly cross-linked polymers such as lignin?}, series = {ACS Omega}, volume = {6}, journal = {ACS Omega}, number = {44}, publisher = {ACS Publications}, address = {Washington, DC}, issn = {2470-1343}, doi = {10.1021/acsomega.1c03574}, pages = {29516 -- 29524}, year = {2021}, abstract = {The molecular weight properties of lignins are one of the key elements that need to be analyzed for a successful industrial application of these promising biopolymers. In this study, the use of 1H NMR as well as diffusion-ordered spectroscopy (DOSY NMR), combined with multivariate regression methods, was investigated for the determination of the molecular weight (Mw and Mn) and the polydispersity of organosolv lignins (n = 53, Miscanthus x giganteus, Paulownia tomentosa, and Silphium perfoliatum). The suitability of the models was demonstrated by cross validation (CV) as well as by an independent validation set of samples from different biomass origins (beech wood and wheat straw). CV errors of ca. 7-9 and 14-16\% were achieved for all parameters with the models from the 1H NMR spectra and the DOSY NMR data, respectively. The prediction errors for the validation samples were in a similar range for the partial least squares model from the 1H NMR data and for a multiple linear regression using the DOSY NMR data. The results indicate the usefulness of NMR measurements combined with multivariate regression methods as a potential alternative to more time-consuming methods such as gel permeation chromatography.}, language = {en} } @article{MonakhovaDiehl2021, author = {Monakhova, Yulia and Diehl, Bernd W. K.}, title = {Simplification of NMR Workflows by Standardization Using 2H Integral of Deuterated Solvent as Applied to Aloe vera Preparations}, series = {Applied Magnetic Resonance}, volume = {52}, journal = {Applied Magnetic Resonance}, number = {11}, publisher = {Springer}, address = {Cham}, issn = {1613-7507}, doi = {10.1007/s00723-021-01393-4}, pages = {1591 -- 1600}, year = {2021}, abstract = {In this study, a recently proposed NMR standardization approach by 2H integral of deuterated solvent for quantitative multicomponent analysis of complex mixtures is presented. As a proof of principle, the existing NMR routine for the analysis of Aloe vera products was modified. Instead of using absolute integrals of targeted compounds and internal standard (nicotinamide) from 1H-NMR spectra, quantification was performed based on the ratio of a particular 1H-NMR compound integral and 2H-NMR signal of deuterated solvent D2O. Validation characteristics (linearity, repeatability, accuracy) were evaluated and the results showed that the method has the same precision as internal standardization in case of multicomponent screening. Moreover, a dehydration process by freeze drying is not necessary for the new routine. Now, our NMR profiling of A. vera products needs only limited sample preparation and data processing. The new standardization methodology provides an appealing alternative for multicomponent NMR screening. In general, this novel approach, using standardization by 2H integral, benefits from reduced sample preparation steps and uncertainties, and is recommended in different application areas (purity determination, forensics, pharmaceutical analysis, etc.).}, language = {en} } @article{BurmistrovaSobolevaMonakhova2021, author = {Burmistrova, Natalia A. and Soboleva, Polina M. and Monakhova, Yulia}, title = {Is infrared spectroscopy combined with multivariate analysis a promising tool for heparin authentication?}, series = {Journal of Pharmaceutical and Biomedical Analysis}, volume = {194}, journal = {Journal of Pharmaceutical and Biomedical Analysis}, number = {Article number: 113811}, publisher = {Elsevier}, address = {Amsterdam}, isbn = {0731-7085}, doi = {10.1016/j.jpba.2020.113811}, year = {2021}, abstract = {The investigation of the possibility to determine various characteristics of powder heparin (n = 115) was carried out with infrared spectroscopy. The evaluation of heparin samples included several parameters such as purity grade, distributing company, animal source as well as heparin species (i.e. Na-heparin, Ca-heparin, and heparinoids). Multivariate analysis using principal component analysis (PCA), soft independent modelling of class analogy (SIMCA), and partial least squares - discriminant analysis (PLS-DA) were applied for the modelling of spectral data. Different pre-processing methods were applied to IR spectral data; multiplicative scatter correction (MSC) was chosen as the most relevant. Obtained results were confirmed by nuclear magnetic resonance (NMR) spectroscopy. Good predictive ability of this approach demonstrates the potential of IR spectroscopy and chemometrics for screening of heparin quality. This approach, however, is designed as a screening tool and is not considered as a replacement for either of the methods required by USP and FDA.}, language = {en} } @article{MonakhovaDiehl2021, author = {Monakhova, Yulia and Diehl, Bernd W.K.}, title = {Novel approach of qNMR workflow by standardization using 2H integral: Application to any intrinsic calibration standard}, series = {Talanta}, volume = {222}, journal = {Talanta}, number = {Article number: 121504}, publisher = {Elsevier}, isbn = {0039-9140}, doi = {10.1016/j.talanta.2020.121504}, year = {2021}, abstract = {Quantitative nuclear magnetic resonance (qNMR) is routinely performed by the internal or external standardization. The manuscript describes a simple alternative to these common workflows by using NMR signal of another active nuclei of calibration compound. For example, for any arbitrary compound quantification by NMR can be based on the use of an indirect concentration referencing that relies on a solvent having both 1H and 2H signals. To perform high-quality quantification, the deuteration level of the utilized deuterated solvent has to be estimated. In this contribution the new method was applied to the determination of deuteration levels in different deuterated solvents (MeOD, ACN, CDCl3, acetone, benzene, DMSO-d6). Isopropanol-d6, which contains a defined number of deuterons and protons, was used for standardization. Validation characteristics (precision, accuracy, robustness) were calculated and the results showed that the method can be used in routine practice. Uncertainty budget was also evaluated. In general, this novel approach, using standardization by 2H integral, benefits from reduced sample preparation steps and uncertainties, and can be applied in different application areas (purity determination, forensics, pharmaceutical analysis, etc.).}, language = {en} } @article{MonakhovaSobolevaFedotovaetal.2022, author = {Monakhova, Yulia and Soboleva, Polina M. and Fedotova, Elena S. and Musina, Kristina T. and Burmistrova, Natalia A.}, title = {Quantum chemical calculations of IR spectra of heparin disaccharide subunits}, series = {Computational and Theoretical Chemistry}, volume = {1217}, journal = {Computational and Theoretical Chemistry}, number = {Article number: 113891}, publisher = {Elsevier}, address = {New York, NY}, isbn = {2210-271X}, doi = {10.1016/j.comptc.2022.113891}, year = {2022}, abstract = {Heparin is a natural polysaccharide, which plays essential role in many biological processes. Alterations in building blocks can modify biological roles of commercial heparin products, due to significant changes in the conformation of the polymer chain. The variability structure of heparin leads to difficulty in quality control using different analytical methods, including infrared (IR) spectroscopy. In this paper molecular modelling of heparin disaccharide subunits was performed using quantum chemistry. The structural and spectral parameters of these disaccharides have been calculated using RHF/6-311G. In addition, over-sulphated chondroitin sulphate disaccharide was studied as one of the most widespread contaminants of heparin. Calculated IR spectra were analyzed with respect to specific structure parameters. IR spectroscopic fingerprint was found to be sensitive to substitution pattern of disaccharide subunits. Vibrational assignments of calculated spectra were correlated with experimental IR spectral bands of native heparin. Chemometrics was used to perform multivariate analysis of simulated spectral data.}, language = {en} } @article{BurgerLindnerRumpfetal.2022, author = {Burger, Ren{\´e} and Lindner, Simon and Rumpf, Jessica and Do, Xuan Tung and Diehl, Bernd W.K. and Rehahn, Matthias and Monakhova, Yulia and Schulze, Margit}, title = {Benchtop versus high field NMR: Comparable performance found for the molecular weight determination of lignin}, series = {Journal of Pharmaceutical and Biomedical Analysis}, volume = {212}, journal = {Journal of Pharmaceutical and Biomedical Analysis}, number = {Article number: 114649}, publisher = {Elsevier}, address = {New York, NY}, isbn = {0731-7085}, doi = {10.1016/j.jpba.2022.114649}, year = {2022}, abstract = {Lignin is a promising renewable biopolymer being investigated worldwide as an environmentally benign substitute of fossil-based aromatic compounds, e.g. for the use as an excipient with antioxidant and antimicrobial properties in drug delivery or even as active compound. For its successful implementation into process streams, a quick, easy, and reliable method is needed for its molecular weight determination. Here we present a method using 1H spectra of benchtop as well as conventional NMR systems in combination with multivariate data analysis, to determine lignin's molecular weight (Mw and Mn) and polydispersity index (PDI). A set of 36 organosolv lignin samples (from Miscanthus x giganteus, Paulownia tomentosa and Silphium perfoliatum) was used for the calibration and cross validation, and 17 samples were used as external validation set. Validation errors between 5.6\% and 12.9\% were achieved for all parameters on all NMR devices (43, 60, 500 and 600 MHz). Surprisingly, no significant difference in the performance of the benchtop and high-field devices was found. This facilitates the application of this method for determining lignin's molecular weight in an industrial environment because of the low maintenance expenditure, small footprint, ruggedness, and low cost of permanent magnet benchtop NMR systems.}, language = {en} } @article{MonakhovaDiehl2022, author = {Monakhova, Yulia and Diehl, Bernd W.K.}, title = {Multinuclear NMR screening of pharmaceuticals using standardization by 2H integral of a deuterated solvent}, series = {Journal of Pharmaceutical and Biomedical Analysis}, volume = {209}, journal = {Journal of Pharmaceutical and Biomedical Analysis}, number = {Article number: 114530}, publisher = {Elsevier}, isbn = {0731-7085}, doi = {10.1016/j.jpba.2021.114530}, year = {2022}, abstract = {NMR standardization approach that uses the 2H integral of deuterated solvent for quantitative multinuclear analysis of pharmaceuticals is described. As a proof of principle, the existing NMR procedure for the analysis of heparin products according to US Pharmacopeia monograph is extended to the determination of Na+ and Cl- content in this matrix. Quantification is performed based on the ratio of a 23Na (35Cl) NMR integral and 2H NMR signal of deuterated solvent, D2O, acquired using the specific spectrometer hardware. As an alternative, the possibility of 133Cs standardization using the addition of Cs2CO3 stock solution is shown. Validation characteristics (linearity, repeatability, sensitivity) are evaluated. A holistic NMR profiling of heparin products can now also be used for the quantitative determination of inorganic compounds in a single analytical run using a single sample. In general, the new standardization methodology provides an appealing alternative for the NMR screening of inorganic and organic components in pharmaceutical products.}, language = {en} } @article{LindnerBurgerRutledgeetal.2022, author = {Lindner, Simon and Burger, Ren{\´e} and Rutledge, Douglas N. and Do, Xuan Tung and Rumpf, Jessica and Diehl, Bernd W. K. and Schulze, Margit and Monakhova, Yulia}, title = {Is the calibration transfer of multivariate calibration models between high- and low-field NMR instruments possible? A case study of lignin molecular weight}, series = {Analytical chemistry}, volume = {94}, journal = {Analytical chemistry}, number = {9}, publisher = {ACS Publications}, address = {Washington, DC}, isbn = {1520-6882}, doi = {10.1021/acs.analchem.1c05125}, pages = {3997 -- 4004}, year = {2022}, abstract = {Although several successful applications of benchtop nuclear magnetic resonance (NMR) spectroscopy in quantitative mixture analysis exist, the possibility of calibration transfer remains mostly unexplored, especially between high- and low-field NMR. This study investigates for the first time the calibration transfer of partial least squares regressions [weight average molecular weight (Mw) of lignin] between high-field (600 MHz) NMR and benchtop NMR devices (43 and 60 MHz). For the transfer, piecewise direct standardization, calibration transfer based on canonical correlation analysis, and transfer via the extreme learning machine auto-encoder method are employed. Despite the immense resolution difference between high-field and low-field NMR instruments, the results demonstrate that the calibration transfer from high- to low-field is feasible in the case of a physical property, namely, the molecular weight, achieving validation errors close to the original calibration (down to only 1.2 times higher root mean square errors). These results introduce new perspectives for applications of benchtop NMR, in which existing calibrations from expensive high-field instruments can be transferred to cheaper benchtop instruments to economize.}, language = {en} } @article{SchuellerRuhlDinstuehlerSengeretal.2022, author = {Sch{\"u}ller-Ruhl, Aaron and Dinst{\"u}hler, Leonard and Senger, Thorsten and Bergfeld, Stefan and Ingenhag, Christian and Fleischhaker, Robert}, title = {Direct fabrication of arbitrary phase masks in optical glass via ultra-short pulsed laser writing of refractive index modifications}, series = {Applied Physics B}, volume = {128}, journal = {Applied Physics B}, number = {Article number: 208}, editor = {Mackenzie, Jacob}, publisher = {Springer}, address = {Berlin}, issn = {1432-0649 (Online)}, doi = {10.1007/s00340-022-07928-2}, pages = {1 -- 11}, year = {2022}, abstract = {We study the possibility to fabricate an arbitrary phase mask in a one-step laser-writing process inside the volume of an optical glass substrate. We derive the phase mask from a Gerchberg-Saxton-type algorithm as an array and create each individual phase shift using a refractive index modification of variable axial length. We realize the variable axial length by superimposing refractive index modifications induced by an ultra-short pulsed laser at different focusing depth. Each single modification is created by applying 1000 pulses with 15 μJ pulse energy at 100 kHz to a fixed spot of 25 μm diameter and the focus is then shifted axially in steps of 10 μm. With several proof-of-principle examples, we show the feasibility of our method. In particular, we identify the induced refractive index change to about a value of Δn=1.5⋅10-3. We also determine our current limitations by calculating the overlap in the form of a scalar product and we discuss possible future improvements.}, language = {en} } @article{MolinnusJanusFangetal.2022, author = {Molinnus, Denise and Janus, Kevin Alexander and Fang, Anyelina C. and Drinic, Aleksander and Achtsnicht, Stefan and K{\"o}pf, Marius and Keusgen, Michael and Sch{\"o}ning, Michael Josef}, title = {Thick-film carbon electrode deposited onto a biodegradable fibroin substrate for biosensing applications}, series = {Physica status solidi (a)}, volume = {219}, journal = {Physica status solidi (a)}, number = {23}, publisher = {Wiley-VCH}, address = {Weinheim}, issn = {1862-6319}, doi = {10.1002/pssa.202200100}, pages = {1 -- 9}, year = {2022}, abstract = {This study addresses a proof-of-concept experiment with a biocompatible screen-printed carbon electrode deposited onto a biocompatible and biodegradable substrate, which is made of fibroin, a protein derived from silk of the Bombyx mori silkworm. To demonstrate the sensor performance, the carbon electrode is functionalized as a glucose biosensor with the enzyme glucose oxidase and encapsulated with a silicone rubber to ensure biocompatibility of the contact wires. The carbon electrode is fabricated by means of thick-film technology including a curing step to solidify the carbon paste. The influence of the curing temperature and curing time on the electrode morphology is analyzed via scanning electron microscopy. The electrochemical characterization of the glucose biosensor is performed by amperometric/voltammetric measurements of different glucose concentrations in phosphate buffer. Herein, systematic studies at applied potentials from 500 to 1200 mV to the carbon working electrode (vs the Ag/AgCl reference electrode) allow to determine the optimal working potential. Additionally, the influence of the curing parameters on the glucose sensitivity is examined over a time period of up to 361 days. The sensor shows a negligible cross-sensitivity toward ascorbic acid, noradrenaline, and adrenaline. The developed biocompatible biosensor is highly promising for future in vivo and epidermal applications.}, language = {en} } @article{Maurischat2022, author = {Maurischat, Andreas}, title = {Algebraic independence of the Carlitz period and its hyperderivatives}, series = {Journal of Number Theory}, volume = {240}, journal = {Journal of Number Theory}, publisher = {Elsevier}, address = {Orlando, Fla.}, issn = {0022-314X}, doi = {10.1016/j.jnt.2022.01.006}, pages = {145 -- 162}, year = {2022}, language = {en} } @article{KotliarOrtnerConradietal.2022, author = {Kotliar, Konstantin and Ortner, Marion and Conradi, Anna and Hacker, Patricia and Hauser, Christine and G{\"u}nthner, Roman and Moser, Michaela and Muggenthaler, Claudia and Diehl-Schmid, Janine and Priller, Josef and Schmaderer, Christoph and Grimmer, Timo}, title = {Altered retinal cerebral vessel oscillation frequencies in Alzheimer's disease compatible with impaired amyloid clearance}, series = {Neurobiology of Aging}, volume = {120}, journal = {Neurobiology of Aging}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0197-4580}, doi = {10.1016/j.neurobiolaging.2022.08.012}, pages = {117 -- 127}, year = {2022}, abstract = {Retinal vessels are similar to cerebral vessels in their structure and function. Moderately low oscillation frequencies of around 0.1 Hz have been reported as the driving force for paravascular drainage in gray matter in mice and are known as the frequencies of lymphatic vessels in humans. We aimed to elucidate whether retinal vessel oscillations are altered in Alzheimer's disease (AD) at the stage of dementia or mild cognitive impairment (MCI). Seventeen patients with mild-to-moderate dementia due to AD (ADD); 23 patients with MCI due to AD, and 18 cognitively healthy controls (HC) were examined using Dynamic Retinal Vessel Analyzer. Oscillatory temporal changes of retinal vessel diameters were evaluated using mathematical signal analysis. Especially at moderately low frequencies around 0.1 Hz, arterial oscillations in ADD and MCI significantly prevailed over HC oscillations and correlated with disease severity. The pronounced retinal arterial vasomotion at moderately low frequencies in the ADD and MCI groups would be compatible with the view of a compensatory upregulation of paravascular drainage in AD and strengthen the amyloid clearance hypothesis.}, language = {en} } @article{ColomboDriraFrotscheretal.2022, author = {Colombo, Daniele and Drira, Slah and Frotscher, Ralf and Staat, Manfred}, title = {An element-based formulation for ES-FEM and FS-FEM models for implementation in standard solid mechanics finite element codes for 2D and 3D static analysis}, series = {International Journal for Numerical Methods in Engineering}, volume = {124}, journal = {International Journal for Numerical Methods in Engineering}, number = {2}, publisher = {Wiley}, address = {Chichester}, issn = {1097-0207}, doi = {10.1002/nme.7126}, pages = {402 -- 433}, year = {2022}, abstract = {Edge-based and face-based smoothed finite element methods (ES-FEM and FS-FEM, respectively) are modified versions of the finite element method allowing to achieve more accurate results and to reduce sensitivity to mesh distortion, at least for linear elements. These properties make the two methods very attractive. However, their implementation in a standard finite element code is nontrivial because it requires heavy and extensive modifications to the code architecture. In this article, we present an element-based formulation of ES-FEM and FS-FEM methods allowing to implement the two methods in a standard finite element code with no modifications to its architecture. Moreover, the element-based formulation permits to easily manage any type of element, especially in 3D models where, to the best of the authors' knowledge, only tetrahedral elements are used in FS-FEM applications found in the literature. Shape functions for non-simplex 3D elements are proposed in order to apply FS-FEM to any standard finite element.}, language = {en} } @article{BhattaraiMayStaatetal.2022, author = {Bhattarai, Aroj and May, Charlotte Anabell and Staat, Manfred and Kowalczyk, Wojciech and Tran, Thanh Ngoc}, title = {Layer-specific damage modeling of porcine large intestine under biaxial tension}, series = {Bioengineering}, volume = {9}, journal = {Bioengineering}, number = {10, Early Access}, publisher = {MDPI}, address = {Basel}, issn = {2306-5354}, doi = {10.3390/bioengineering9100528}, pages = {1 -- 17}, year = {2022}, abstract = {The mechanical behavior of the large intestine beyond the ultimate stress has never been investigated. Stretching beyond the ultimate stress may drastically impair the tissue microstructure, which consequently weakens its healthy state functions of absorption, temporary storage, and transportation for defecation. Due to closely similar microstructure and function with humans, biaxial tensile experiments on the porcine large intestine have been performed in this study. In this paper, we report hyperelastic characterization of the large intestine based on experiments in 102 specimens. We also report the theoretical analysis of the experimental results, including an exponential damage evolution function. The fracture energies and the threshold stresses are set as damage material parameters for the longitudinal muscular, the circumferential muscular and the submucosal collagenous layers. A biaxial tensile simulation of a linear brick element has been performed to validate the applicability of the estimated material parameters. The model successfully simulates the biomechanical response of the large intestine under physiological and non-physiological loads.}, language = {en} } @article{PhilippEfthimiouPaganoetal.2022, author = {Philipp, Mohr and Efthimiou, Nikos and Pagano, Fiammetta and Kratochwil, Nicolaus and Pizzichemi, Marco and Tsoumpas, Charalampos and Auffray, Etiennette and Ziemons, Karl}, title = {Image reconstruction analysis for positron emission tomography with heterostructured scintillators}, series = {IEEE Transactions on Radiation and Plasma Medical Sciences}, volume = {7}, journal = {IEEE Transactions on Radiation and Plasma Medical Sciences}, number = {1}, publisher = {IEEE}, address = {New York, NY}, issn = {2469-7311}, doi = {10.1109/TRPMS.2022.3208615}, pages = {41 -- 51}, year = {2022}, abstract = {The concept of structure engineering has been proposed for exploring the next generation of radiation detectors with improved performance. A TOF-PET geometry with heterostructured scintillators with a pixel size of 3.0×3.1×15 mm3 was simulated using Monte Carlo. The heterostructures consisted of alternating layers of BGO as a dense material with high stopping power and plastic (EJ232) as a fast light emitter. The detector time resolution was calculated as a function of the deposited and shared energy in both materials on an event-by-event basis. While sensitivity was reduced to 32\% for 100 μm thick plastic layers and 52\% for 50 μm, the CTR distribution improved to 204±49 ps and 220±41 ps respectively, compared to 276 ps that we considered for bulk BGO. The complex distribution of timing resolutions was accounted for in the reconstruction. We divided the events into three groups based on their CTR and modeled them with different Gaussian TOF kernels. On a NEMA IQ phantom, the heterostructures had better contrast recovery in early iterations. On the other hand, BGO achieved a better contrast to noise ratio (CNR) after the 15th iteration due to the higher sensitivity. The developed simulation and reconstruction methods constitute new tools for evaluating different detector designs with complex time responses.}, language = {en} } @article{EngelmannPourshahidiShalabyetal.2022, author = {Engelmann, Ulrich M. and Pourshahidi, Mohammad Ali and Shalaby, Ahmed and Krause, Hans-Joachim}, title = {Probing particle size dependency of frequency mixing magnetic detection with dynamic relaxation simulation}, series = {Journal of Magnetism and Magnetic Materials}, volume = {563}, journal = {Journal of Magnetism and Magnetic Materials}, number = {In progress, Art. No. 169965}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0304-8853}, doi = {10.1016/j.jmmm.2022.169965}, year = {2022}, abstract = {Biomedical applications of magnetic nanoparticles (MNP) fundamentally rely on the particles' magnetic relaxation as a response to an alternating magnetic field. The magnetic relaxation complexly depends on the interplay of MNP magnetic and physical properties with the applied field parameters. It is commonly accepted that particle core size is a major contributor to signal generation in all the above applications, however, most MNP samples comprise broad distribution spanning nm and more. Therefore, precise knowledge of the exact contribution of individual core sizes to signal generation is desired for optimal MNP design generally for each application. Specifically, we present a magnetic relaxation simulation-driven analysis of experimental frequency mixing magnetic detection (FMMD) for biosensing to quantify the contributions of individual core size fractions towards signal generation. Applying our method to two different experimental MNP systems, we found the most dominant contributions from approx. 20 nm sized particles in the two independent MNP systems. Additional comparison between freely suspended and immobilized MNP also reveals insight in the MNP microstructure, allowing to use FMMD for MNP characterization, as well as to further fine-tune its applicability in biosensing.}, language = {en} } @article{PourshahidiEngelmannOffenhaeusseretal.2022, author = {Pourshahidi, Ali Mohammad and Engelmann, Ulrich M. and Offenh{\"a}usser, Andreas and Krause, Hans-Joachim}, title = {Resolving ambiguities in core size determination of magnetic nanoparticles from magnetic frequency mixing data}, series = {Journal of Magnetism and Magnetic Materials}, volume = {563}, journal = {Journal of Magnetism and Magnetic Materials}, number = {In progress, Art. No. 169969}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0304-8853}, doi = {10.1016/j.jmmm.2022.169969}, year = {2022}, abstract = {Frequency mixing magnetic detection (FMMD) has been widely utilized as a measurement technique in magnetic immunoassays. It can also be used for the characterization and distinction (also known as "colourization") of different types of magnetic nanoparticles (MNPs) based on their core sizes. In a previous work, it was shown that the large particles contribute most of the FMMD signal. This leads to ambiguities in core size determination from fitting since the contribution of the small-sized particles is almost undetectable among the strong responses from the large ones. In this work, we report on how this ambiguity can be overcome by modelling the signal intensity using the Langevin model in thermodynamic equilibrium including a lognormal core size distribution fL(dc,d0,σ) fitted to experimentally measured FMMD data of immobilized MNPs. For each given median diameter d0, an ambiguous amount of best-fitting pairs of parameters distribution width σ and number of particles Np with R2 > 0.99 are extracted. By determining the samples' total iron mass, mFe, with inductively coupled plasma optical emission spectrometry (ICP-OES), we are then able to identify the one specific best-fitting pair (σ, Np) one uniquely. With this additional externally measured parameter, we resolved the ambiguity in core size distribution and determined the parameters (d0, σ, Np) directly from FMMD measurements, allowing precise MNPs sample characterization.}, language = {en} } @article{SteuerDankertGilmartinMulleretal.2019, author = {Steuer-Dankert, Linda and Gilmartin, Shannon K. and Muller, Carol B. and Dungs, Carolin and Sheppard, Sheri and Leicht-Scholten, Carmen}, title = {Expanding engineering limits : a concept for socially responsible education of engineers}, series = {The international journal of engineering education}, volume = {35}, journal = {The international journal of engineering education}, number = {2}, isbn = {0949-149X}, pages = {658 -- 673}, year = {2019}, language = {en} } @article{SteuerDankertSharmaBlecketal.2017, author = {Steuer-Dankert, Linda and Sharma, Mamta Rameshwarlal and Bleck, Wolfgang and Leicht-Scholten, Carmen}, title = {Diversity and innovation management in large research groups}, series = {International Journal of Innovation Management}, volume = {5}, journal = {International Journal of Innovation Management}, number = {2}, issn = {1757-5877}, pages = {49 -- 72}, year = {2017}, abstract = {Contemporary research appreciates a diverse workforce as a potential source of innovation. Researchers explore the fine details of why diversity management is central for generating innovations in heterogeneous research groups and how it could be effectively implemented into organizations. Complex research associations that discuss topics with a high impact on society increasingly address the necessity of establishing a diverse workforce to confront the challenges of tomorrow. Characterized by complex management structures as well as hierarchies, research associations have not been a subject of investigation until now. For this reason, the presented research project aims to develop a diversity and innovation management strategy with the ultimate goal of inducing change in the corporate culture. The proposed approach consisted of six phases; the first two phases investigated the status quo of diversity in the existing organizational structures of member institutes and the variety of particular working cultures within the research association. The third and the fourth phases utilized qualitative and quantitative studies. The third phase focused on the connection of management level to diversity and innovation, and the need for diversity and innovation management, and tailor-made methods of implementing them. The first three phases have been accomplished successfully; preliminary results are already available. The fourth phase will mainly focus on exploring the mind-set of the employees. The fifth phase will consolidate the findings in the first four phases into an implementable strategy. The final phase will address the implementation of this strategy into the organization. Phases 4 to 6 have not yet been undertaken}, language = {en} } @article{FalkenbergRahbaFischeretal.2022, author = {Falkenberg, Fabian and Rahba, Jade and Fischer, David and Bott, Michael and Bongaerts, Johannes and Siegert, Petra}, title = {Biochemical characterization of a novel oxidatively stable, halotolerant, and high-alkaline subtilisin from Alkalihalobacillus okhensis Kh10-101T}, series = {FEBS Open Bio}, volume = {12}, journal = {FEBS Open Bio}, number = {10}, publisher = {Wiley}, address = {Hoboken, NJ}, issn = {2211-5463}, doi = {10.1002/2211-5463.13457}, pages = {1729 -- 1746}, year = {2022}, abstract = {Halophilic and halotolerant microorganisms represent a promising source of salt-tolerant enzymes suitable for various biotechnological applications where high salt concentrations would otherwise limit enzymatic activity. Considering the current growing enzyme market and the need for more efficient and new biocatalysts, the present study aimed at the characterization of a high-alkaline subtilisin from Alkalihalobacillus okhensis Kh10-101T. The protease gene was cloned and expressed in Bacillus subtilis DB104. The recombinant protease SPAO with 269 amino acids belongs to the subfamily of high-alkaline subtilisins. The biochemical characteristics of purified SPAO were analyzed in comparison with subtilisin Carlsberg, Savinase, and BPN'. SPAO, a monomer with a molecular mass of 27.1 kDa, was active over a wide range of pH 6.0-12.0 and temperature 20-80 °C, optimally at pH 9.0-9.5 and 55 °C. The protease is highly oxidatively stable to hydrogen peroxide and retained 58\% of residual activity when incubated at 10 °C with 5\% (v/v) H2O2 for 1 h while stimulated at 1\% (v/v) H2O2. Furthermore, SPAO was very stable and active at NaCl concentrations up to 5.0 m. This study demonstrates the potential of SPAO for biotechnological applications in the future.}, language = {en} } @article{MonakhovaDiehl2022, author = {Monakhova, Yulia and Diehl, Bernd W.K.}, title = {Nuclear magnetic resonance spectroscopy as an elegant tool for a complete quality control of crude heparin material}, series = {Journal of Pharmaceutical and Biomedical Analysis}, volume = {219}, journal = {Journal of Pharmaceutical and Biomedical Analysis}, number = {Article number: 114915}, publisher = {Elsevier}, address = {New York, NY}, issn = {0731-7085}, doi = {10.1016/j.jpba.2022.114915}, year = {2022}, abstract = {Nuclear magnetic resonance (NMR) spectrometric methods for the quantitative analysis of pure heparin in crude heparin is proposed. For quantification, a two-step routine was developed using a USP heparin reference sample for calibration and benzoic acid as an internal standard. The method was successfully validated for its accuracy, reproducibility, and precision. The methodology was used to analyze 20 authentic porcine heparinoid samples having heparin content between 4.25 w/w \% and 64.4 w/w \%. The characterization of crude heparin products was further extended to a simultaneous analysis of these common ions: sodium, calcium, acetate and chloride. A significant, linear dependence was found between anticoagulant activity and assayed heparin content for thirteen heparinoids samples, for which reference data were available. A Diffused-ordered NMR experiment (DOSY) can be used for qualitative analysis of specific glycosaminoglycans (GAGs) in heparinoid matrices and, potentially, for quantitative prediction of molecular weight of GAGs. NMR spectrometry therefore represents a unique analytical method suitable for the simultaneous quantitative control of organic and inorganic composition of crude heparin samples (especially heparin content) as well as an estimation of other physical and quality parameters (molecular weight, animal origin and activity).}, language = {en} } @article{CheenakulaHoffstadtKrafftetal.2022, author = {Cheenakula, Dheeraja and Hoffstadt, Kevin and Krafft, Simone and Reinecke, Diana and Klose, Holger and Kuperjans, Isabel and Gr{\"o}mping, Markus}, title = {Anaerobic digestion of algal-bacterial biomass of an Algal Turf Scrubber system}, series = {Biomass Conversion and Biorefinery}, volume = {13}, journal = {Biomass Conversion and Biorefinery}, publisher = {Springer}, address = {Berlin}, issn = {2190-6823}, doi = {10.1007/s13399-022-03236-z}, pages = {15 Seiten}, year = {2022}, abstract = {This study investigated the anaerobic digestion of an algal-bacterial biofilm grown in artificial wastewater in an Algal Turf Scrubber (ATS). The ATS system was located in a greenhouse (50°54′19ʺN, 6°24′55ʺE, Germany) and was exposed to seasonal conditions during the experiment period. The methane (CH4) potential of untreated algal-bacterial biofilm (UAB) and thermally pretreated biofilm (PAB) using different microbial inocula was determined by anaerobic batch fermentation. Methane productivity of UAB differed significantly between microbial inocula of digested wastepaper, a mixture of manure and maize silage, anaerobic sewage sludge, and percolated green waste. UAB using sewage sludge as inoculum showed the highest methane productivity. The share of methane in biogas was dependent on inoculum. Using PAB, a strong positive impact on methane productivity was identified for the digested wastepaper (116.4\%) and a mixture of manure and maize silage (107.4\%) inocula. By contrast, the methane yield was significantly reduced for the digested anaerobic sewage sludge (50.6\%) and percolated green waste (43.5\%) inocula. To further evaluate the potential of algal-bacterial biofilm for biogas production in wastewater treatment and biogas plants in a circular bioeconomy, scale-up calculations were conducted. It was found that a 0.116 km2 ATS would be required in an average municipal wastewater treatment plant which can be viewed as problematic in terms of space consumption. However, a substantial amount of energy surplus (4.7-12.5 MWh a-1) can be gained through the addition of algal-bacterial biomass to the anaerobic digester of a municipal wastewater treatment plant. Wastewater treatment and subsequent energy production through algae show dominancy over conventional technologies.}, language = {en} } @article{Kurz2019, author = {Kurz, Melanie}, title = {Images of Sweden in Germany: from Sundborn and Bullerby to Ikea}, series = {nomad: the magazine for new design culture, business affairs and contemporary lifestyle}, journal = {nomad: the magazine for new design culture, business affairs and contemporary lifestyle}, number = {8}, isbn = {2513-0714}, pages = {136 -- 147}, year = {2019}, language = {en} } @article{UysalCreutzFiratetal.2022, author = {Uysal, Karya and Creutz, Till and Firat, Ipek Seda and Artmann, Gerhard and Teusch, Nicole and Temiz Artmann, Ayseg{\"u}l}, title = {Bio-functionalized ultra-thin, large-area and waterproof silicone membranes for biomechanical cellular loading and compliance experiments}, series = {Polymers}, volume = {14}, journal = {Polymers}, number = {11}, publisher = {MDPI}, address = {Basel}, issn = {2073-4360}, pages = {2213}, year = {2022}, abstract = {Biocompatibility, flexibility and durability make polydimethylsiloxane (PDMS) membranes top candidates in biomedical applications. CellDrum technology uses large area, <10 µm thin membranes as mechanical stress sensors of thin cell layers. For this to be successful, the properties (thickness, temperature, dust, wrinkles, etc.) must be precisely controlled. The following parameters of membrane fabrication by means of the Floating-on-Water (FoW) method were investigated: (1) PDMS volume, (2) ambient temperature, (3) membrane deflection and (4) membrane mechanical compliance. Significant differences were found between all PDMS volumes and thicknesses tested (p < 0.01). They also differed from the calculated values. At room temperatures between 22 and 26 °C, significant differences in average thickness values were found, as well as a continuous decrease in thicknesses within a 4 °C temperature elevation. No correlation was found between the membrane thickness groups (between 3-4 µm) in terms of deflection and compliance. We successfully present a fabrication method for thin bio-functionalized membranes in conjunction with a four-step quality management system. The results highlight the importance of tight regulation of production parameters through quality control. The use of membranes described here could also become the basis for material testing on thin, viscous layers such as polymers, dyes and adhesives, which goes far beyond biological applications.}, language = {en} } @article{HeuermannEmmrichBongartz2022, author = {Heuermann, Holger and Emmrich, Thomas and Bongartz, Simon}, title = {Microwave spark plug to support ignitions with high compression ratios}, series = {IEEE Transactions on Plasma Science}, journal = {IEEE Transactions on Plasma Science}, number = {Early Access}, publisher = {IEEE}, issn = {1939-9375}, doi = {10.1109/TPS.2022.3183690}, pages = {1 -- 6}, year = {2022}, abstract = {Upcoming gasoline engines should run with a larger number of fuels beginning from petrol over methanol up to gas by a wide range of compression ratios and a homogeneous charge. In this article, the microwave (MW) spark plug, based on a high-speed frequency hopping system, is introduced as a solution, which can support a nitrogen compression ratio up to 1:39 in a chamber and more. First, an overview of the high-speed frequency hopping MW ignition and operation system as well as the large number of applications are presented. Both gives an understanding of this new base technology for MW plasma generation. Focus of the theoretical part is the explanation of the internal construction of the spark plug, on the achievable of the high voltage generation as well as the high efficiency to hold the plasma. In detail, the development process starting with circuit simulations and ending with the numerical multiphysics field simulations is described. The concept is evaluated with a reference prototype covering the frequency range between 2.40 and 2.48 GHz and working over a large power range from 20 to 200 W. A larger number of different measurements starting by vector hot-S11 measurements and ending by combined working scenarios out of hot temperature, high pressure and charge motion are winding up the article. The limits for the successful pressure tests were given by the pressure chamber. Pressures ranged from 1 to 39 bar and charge motion up to 25 m/s as well as temperatures from 30◦ to 125◦.}, language = {en} } @article{EmhardtJarodzkaBrandGruweletal.2022, author = {Emhardt, Selina N. and Jarodzka, Halszka and Brand-Gruwel, Saskia and Drumm, Christian and Niehorster, Diederick C. and van Gog, Tamara}, title = {What is my teacher talking about? Effects of displaying the teacher's gaze and mouse cursor cues in video lectures on students' learning}, series = {Journal of Cognitive Psychology}, journal = {Journal of Cognitive Psychology}, publisher = {Routledge, Taylor \& Francis Group}, address = {Abingdon}, issn = {2044-5911}, doi = {10.1080/20445911.2022.2080831}, pages = {1 -- 19}, year = {2022}, abstract = {Eye movement modelling examples (EMME) are instructional videos that display a teacher's eye movements as "gaze cursor" (e.g. a moving dot) superimposed on the learning task. This study investigated if previous findings on the beneficial effects of EMME would extend to online lecture videos and compared the effects of displaying the teacher's gaze cursor with displaying the more traditional mouse cursor as a tool to guide learners' attention. Novices (N = 124) studied a pre-recorded video lecture on how to model business processes in a 2 (mouse cursor absent/present) × 2 (gaze cursor absent/present) between-subjects design. Unexpectedly, we did not find significant effects of the presence of gaze or mouse cursors on mental effort and learning. However, participants who watched videos with the gaze cursor found it easier to follow the teacher. Overall, participants responded positively to the gaze cursor, especially when the mouse cursor was not displayed in the video.}, language = {en} } @article{Wolf2000, author = {Wolf, Martin R.}, title = {Groupware related task design}, series = {ACM SIGGROUP Bulletin}, volume = {21}, journal = {ACM SIGGROUP Bulletin}, number = {2}, issn = {2372-7403}, doi = {10.1145/605660.605662}, pages = {5 -- 8}, year = {2000}, abstract = {his report summarizes the results of a workshop on Groupware related task design which took place at the International Conference on Supporting Group Work Group'99, Arizona, from 14 th to 17 th November 1999. The workshop was addressed to people from different viewpoints, backgrounds, and domains: - Researchers dealing with questions of task analysis and task modeling for Groupware application from an academic point of view. They may contribute modelbased design approaches or theoretically oriented work - Practitioners with experience in the design and everyday use of groupware systems. They might refer to the practical side of the topic: "real" tasks, "real" problems, "real" users, etc.}, language = {en} } @article{ChloeMalyaranCraveiroetal.2022, author = {Chlo{\´e}, Radermacher and Malyaran, Hanna and Craveiro, Rogerio Bastos and Peglow, Sarah and Behbahani, Mehdi and Pufe, Thomas and Wolf, Michael and Neuss, Sabine}, title = {Mechanical loading on cementoblasts: a mini review}, series = {Osteologie}, volume = {31}, journal = {Osteologie}, number = {2}, publisher = {Thieme}, address = {Stuttgart}, issn = {1019-1291}, doi = {10.1055/a-1826-0777}, pages = {111 -- 118}, year = {2022}, abstract = {Orthodontic treatments are concomitant with mechanical forces and thereby cause teeth movements. The applied forces are transmitted to the tooth root and the periodontal ligaments which is compressed on one side and tensed up on the other side. Indeed, strong forces can lead to tooth root resorption and the crown-to-tooth ratio is reduced with the potential for significant clinical impact. The cementum, which covers the tooth root, is a thin mineralized tissue of the periodontium that connects the periodontal ligament with the tooth and is build up by cementoblasts. The impact of tension and compression on these cells is investigated in several in vivo and in vitro studies demonstrating differences in protein expression and signaling pathways. In summary, osteogenic marker changes indicate that cyclic tensile forces support whereas static tension inhibits cementogenesis. Furthermore, cementogenesis experiences the same protein expression changes in static conditions as static tension, but cyclic compression leads to the exact opposite of cyclic tension. Consistent with marker expression changes, the singaling pathways of Wnt/ß-catenin and RANKL/OPG show that tissue compression leads to cementum degradation and tension forces to cementogenesis. However, the cementum, and in particular its cementoblasts, remain a research area which should be explored in more detail to understand the underlying mechanism of bone resorption and remodeling after orthodontic treatments.}, language = {en} } @article{LenzKahmannBehbahanietal.2022, author = {Lenz, Maximilian and Kahmann, Stephanie Lucina and Behbahani, Mehdi and Pennig, Lenhard and Hackl, Michael and Leschinger, Tim and M{\"u}ller, Lars Peter and Wegmann, Kilian}, title = {Influence of rotator cuff preload on fracture configuration in proximal humerus fractures: a proof of concept for fracture simulation}, series = {Archives of Orthopaedic and Trauma Surgery}, journal = {Archives of Orthopaedic and Trauma Surgery}, publisher = {Springer}, address = {Berlin, Heidelberg}, issn = {1434-3916}, doi = {10.1007/s00402-022-04471-9}, year = {2022}, abstract = {Introduction In regard of surgical training, the reproducible simulation of life-like proximal humerus fractures in human cadaveric specimens is desirable. The aim of the present study was to develop a technique that allows simulation of realistic proximal humerus fractures and to analyse the influence of rotator cuff preload on the generated lesions in regards of fracture configuration. Materials and methods Ten cadaveric specimens (6 left, 4 right) were fractured using a custom-made drop-test bench, in two groups. Five specimens were fractured without rotator cuff preload, while the other five were fractured with the tendons of the rotator cuff preloaded with 2 kg each. The humeral shaft and the shortened scapula were potted. The humerus was positioned at 90° of abduction and 10° of internal rotation to simulate a fall on the elevated arm. In two specimens of each group, the emergence of the fractures was documented with high-speed video imaging. Pre-fracture radiographs were taken to evaluate the deltoid-tuberosity index as a measure of bone density. Post-fracture X-rays and CT scans were performed to define the exact fracture configurations. Neer's classification was used to analyse the fractures. Results In all ten cadaveric specimens life-like proximal humerus fractures were achieved. Two III-part and three IV-part fractures resulted in each group. The preloading of the rotator cuff muscles had no further influence on the fracture configuration. High-speed videos of the fracture simulation revealed identical fracture mechanisms for both groups. We observed a two-step fracture mechanism, with initial impaction of the head segment against the glenoid followed by fracturing of the head and the tuberosities and then with further impaction of the shaft against the acromion, which lead to separation of the tuberosities. Conclusion A high energetic axial impulse can reliably induce realistic proximal humerus fractures in cadaveric specimens. The preload of the rotator cuff muscles had no influence on initial fracture configuration. Therefore, fracture simulation in the proximal humerus is less elaborate. Using the presented technique, pre-fractured specimens are available for real-life surgical education.}, language = {en} } @article{MalinowskiFournierHorbachetal.2022, author = {Malinowski, Daniel and Fournier, Yvan and Horbach, Andreas and Frick, Michael and Magliani, Mirko and Kalverkamp, Sebastian and Hildinger, Martin and Spillner, Jan and Behbahani, Mehdi and Hima, Flutura}, title = {Computational fluid dynamics analysis of endoluminal aortic perfusion}, series = {Perfusion}, volume = {0}, journal = {Perfusion}, number = {0}, publisher = {Sage}, address = {London}, issn = {1477-111X}, doi = {10.1177/02676591221099809}, pages = {1 -- 8}, year = {2022}, abstract = {Introduction: In peripheral percutaneous (VA) extracorporeal membrane oxygenation (ECMO) procedures the femoral arteries perfusion route has inherent disadvantages regarding poor upper body perfusion due to watershed. With the advent of new long flexible cannulas an advancement of the tip up to the ascending aorta has become feasible. To investigate the impact of such long endoluminal cannulas on upper body perfusion, a Computational Fluid Dynamics (CFD) study was performed considering different support levels and three cannula positions. Methods: An idealized literature-based- and a real patient proximal aortic geometry including an endoluminal cannula were constructed. The blood flow was considered continuous. Oxygen saturation was set to 80\% for the blood coming from the heart and to 100\% for the blood leaving the cannula. 50\% and 90\% venoarterial support levels from the total blood flow rate of 6 l/min were investigated for three different positions of the cannula in the aortic arch. Results: For both geometries, the placement of the cannula in the ascending aorta led to a superior oxygenation of all aortic blood vessels except for the left coronary artery. Cannula placements at the aortic arch and descending aorta could support supra-aortic arteries, but not the coronary arteries. All positions were able to support all branches with saturated blood at 90\% flow volume. Conclusions: In accordance with clinical observations CFD analysis reveals, that retrograde advancement of a long endoluminal cannula can considerably improve the oxygenation of the upper body and lead to oxygen saturation distributions similar to those of a central cannulation.}, language = {en} } @article{UlmerBraunChengetal.2022, author = {Ulmer, Jessica and Braun, Sebastian and Cheng, Chi-Tsun and Dowey, Steve and Wollert, J{\"o}rg}, title = {Gamification of virtual reality assembly training: Effects of a combined point and level system on motivation and training results}, series = {International Journal of Human-Computer Studies}, volume = {165}, journal = {International Journal of Human-Computer Studies}, number = {Art. No. 102854}, publisher = {Elsevier}, address = {Amsterdam}, issn = {1071-5819}, doi = {10.1016/j.ijhcs.2022.102854}, year = {2022}, abstract = {Virtual Reality (VR) offers novel possibilities for remote training regardless of the availability of the actual equipment, the presence of specialists, and the training locations. Research shows that training environments that adapt to users' preferences and performance can promote more effective learning. However, the observed results can hardly be traced back to specific adaptive measures but the whole new training approach. This study analyzes the effects of a combined point and leveling VR-based gamification system on assembly training targeting specific training outcomes and users' motivations. The Gamified-VR-Group with 26 subjects received the gamified training, and the Non-Gamified-VR-Group with 27 subjects received the alternative without gamified elements. Both groups conducted their VR training at least three times before assembling the actual structure. The study found that a level system that gradually increases the difficulty and error probability in VR can significantly lower real-world error rates, self-corrections, and support usages. According to our study, a high error occurrence at the highest training level reduced the Gamified-VR-Group's feeling of competence compared to the Non-Gamified-VR-Group, but at the same time also led to lower error probabilities in real-life. It is concluded that a level system with a variable task difficulty should be combined with carefully balanced positive and negative feedback messages. This way, better learning results, and an improved self-evaluation can be achieved while not causing significant impacts on the participants' feeling of competence.}, language = {en} } @article{KaulenSchwabedalSchneideretal.2022, author = {Kaulen, Lars and Schwabedal, Justus T. C. and Schneider, Jules and Ritter, Philipp and Bialonski, Stephan}, title = {Advanced sleep spindle identification with neural networks}, series = {Scientific Reports}, volume = {12}, journal = {Scientific Reports}, number = {Article number: 7686}, publisher = {Springer Nature}, address = {London}, issn = {2045-2322}, doi = {10.1038/s41598-022-11210-y}, pages = {1 -- 10}, year = {2022}, abstract = {Sleep spindles are neurophysiological phenomena that appear to be linked to memory formation and other functions of the central nervous system, and that can be observed in electroencephalographic recordings (EEG) during sleep. Manually identified spindle annotations in EEG recordings suffer from substantial intra- and inter-rater variability, even if raters have been highly trained, which reduces the reliability of spindle measures as a research and diagnostic tool. The Massive Online Data Annotation (MODA) project has recently addressed this problem by forming a consensus from multiple such rating experts, thus providing a corpus of spindle annotations of enhanced quality. Based on this dataset, we present a U-Net-type deep neural network model to automatically detect sleep spindles. Our model's performance exceeds that of the state-of-the-art detector and of most experts in the MODA dataset. We observed improved detection accuracy in subjects of all ages, including older individuals whose spindles are particularly challenging to detect reliably. Our results underline the potential of automated methods to do repetitive cumbersome tasks with super-human performance.}, language = {en} } @article{Dachwald2005, author = {Dachwald, Bernd}, title = {Optimization of very-low-thrust trajectories using evolutionary neurocontrol}, series = {Acta Astronautica}, volume = {57}, journal = {Acta Astronautica}, number = {2-8}, publisher = {Elsevier}, address = {Amsterdam [u.a.]}, isbn = {1879-2030}, pages = {175 -- 185}, year = {2005}, abstract = {Searching optimal interplanetary trajectories for low-thrust spacecraft is usually a difficult and time-consuming task that involves much experience and expert knowledge in astrodynamics and optimal control theory. This is because the convergence behavior of traditional local optimizers, which are based on numerical optimal control methods, depends on an adequate initial guess, which is often hard to find, especially for very-low-thrust trajectories that necessitate many revolutions around the sun. The obtained solutions are typically close to the initial guess that is rarely close to the (unknown) global optimum. Within this paper, trajectory optimization problems are attacked from the perspective of artificial intelligence and machine learning. Inspired by natural archetypes, a smart global method for low-thrust trajectory optimization is proposed that fuses artificial neural networks and evolutionary algorithms into so-called evolutionary neurocontrollers. This novel method runs without an initial guess and does not require the attendance of an expert in astrodynamics and optimal control theory. This paper details how evolutionary neurocontrol works and how it could be implemented. The performance of the method is assessed for three different interplanetary missions with a thrust to mass ratio <0.15mN/kg (solar sail and nuclear electric).}, language = {en} } @article{RichterBraunsteinStaeudleetal.2021, author = {Richter, Charlotte and Braunstein, Bjoern and Staeudle, Benjamin and Attias, Julia and Suess, Alexander and Weber, Tobias and Mileva, Katya N. and Rittweger, Joern and Green, David A. and Albracht, Kirsten}, title = {Contractile behavior of the gastrocnemius medialis muscle during running in simulated hypogravity}, series = {npj Microgravity}, volume = {7}, journal = {npj Microgravity}, number = {Article number: 32}, publisher = {Springer Nature}, address = {New York}, issn = {2373-8065}, doi = {10.1038/s41526-021-00155-7}, pages = {7 Seiten}, year = {2021}, abstract = {Vigorous exercise countermeasures in microgravity can largely attenuate muscular degeneration, albeit the extent of applied loading is key for the extent of muscle wasting. Running on the International Space Station is usually performed with maximum loads of 70\% body weight (0.7 g). However, it has not been investigated how the reduced musculoskeletal loading affects muscle and series elastic element dynamics, and thereby force and power generation. Therefore, this study examined the effects of running on the vertical treadmill facility, a ground-based analog, at simulated 0.7 g on gastrocnemius medialis contractile behavior. The results reveal that fascicle-series elastic element behavior differs between simulated hypogravity and 1 g running. Whilst shorter peak series elastic element lengths at simulated 0.7 g appear to be the result of lower muscular and gravitational forces acting on it, increased fascicle lengths and decreased velocities could not be anticipated, but may inform the development of optimized running training in hypogravity. However, whether the alterations in contractile behavior precipitate musculoskeletal degeneration warrants further study.}, language = {en} }