@inproceedings{Gaigall2022, author = {Gaigall, Daniel}, title = {On Consistent Hypothesis Testing In General Hilbert Spaces}, publisher = {Avestia Publishing}, address = {Orl{\´e}ans, Kanada}, doi = {10.11159/icsta22.157}, pages = {Paper No. 157}, year = {2022}, abstract = {Inference on the basis of high-dimensional and functional data are two topics which are discussed frequently in the current statistical literature. A possibility to include both topics in a single approach is working on a very general space for the underlying observations, such as a separable Hilbert space. We propose a general method for consistently hypothesis testing on the basis of random variables with values in separable Hilbert spaces. We avoid concerns with the curse of dimensionality due to a projection idea. We apply well-known test statistics from nonparametric inference to the projected data and integrate over all projections from a specific set and with respect to suitable probability measures. In contrast to classical methods, which are applicable for real-valued random variables or random vectors of dimensions lower than the sample size, the tests can be applied to random vectors of dimensions larger than the sample size or even to functional and high-dimensional data. In general, resampling procedures such as bootstrap or permutation are suitable to determine critical values. The idea can be extended to the case of incomplete observations. Moreover, we develop an efficient algorithm for implementing the method. Examples are given for testing goodness-of-fit in a one-sample situation in [1] or for testing marginal homogeneity on the basis of a paired sample in [2]. Here, the test statistics in use can be seen as generalizations of the well-known Cram{\´e}rvon-Mises test statistics in the one-sample and two-samples case. The treatment of other testing problems is possible as well. By using the theory of U-statistics, for instance, asymptotic null distributions of the test statistics are obtained as the sample size tends to infinity. Standard continuity assumptions ensure the asymptotic exactness of the tests under the null hypothesis and that the tests detect any alternative in the limit. Simulation studies demonstrate size and power of the tests in the finite sample case, confirm the theoretical findings, and are used for the comparison with concurring procedures. A possible application of the general approach is inference for stock market returns, also in high data frequencies. In the field of empirical finance, statistical inference of stock market prices usually takes place on the basis of related log-returns as data. In the classical models for stock prices, i.e., the exponential L{\´e}vy model, Black-Scholes model, and Merton model, properties such as independence and stationarity of the increments ensure an independent and identically structure of the data. Specific trends during certain periods of the stock price processes can cause complications in this regard. In fact, our approach can compensate those effects by the treatment of the log-returns as random vectors or even as functional data.}, language = {en} } @article{DitzhausGaigall2022, author = {Ditzhaus, Marc and Gaigall, Daniel}, title = {Testing marginal homogeneity in Hilbert spaces with applications to stock market returns}, series = {Test}, volume = {2022}, journal = {Test}, number = {31}, publisher = {Springer}, issn = {1863-8260}, doi = {10.1007/s11749-022-00802-5}, pages = {749 -- 770}, year = {2022}, abstract = {This paper considers a paired data framework and discusses the question of marginal homogeneity of bivariate high-dimensional or functional data. The related testing problem can be endowed into a more general setting for paired random variables taking values in a general Hilbert space. To address this problem, a Cram{\´e}r-von-Mises type test statistic is applied and a bootstrap procedure is suggested to obtain critical values and finally a consistent test. The desired properties of a bootstrap test can be derived that are asymptotic exactness under the null hypothesis and consistency under alternatives. Simulations show the quality of the test in the finite sample case. A possible application is the comparison of two possibly dependent stock market returns based on functional data. The approach is demonstrated based on historical data for different stock market indices.}, language = {en} } @article{GaigallGerstenbergTrinh2022, author = {Gaigall, Daniel and Gerstenberg, Julian and Trinh, Thi Thu Ha}, title = {Empirical process of concomitants for partly categorial data and applications in statistics}, series = {Bernoulli}, volume = {28}, journal = {Bernoulli}, number = {2}, publisher = {International Statistical Institute}, address = {Den Haag, NL}, issn = {1573-9759}, doi = {10.3150/21-BEJ1367}, pages = {803 -- 829}, year = {2022}, abstract = {On the basis of independent and identically distributed bivariate random vectors, where the components are categorial and continuous variables, respectively, the related concomitants, also called induced order statistic, are considered. The main theoretical result is a functional central limit theorem for the empirical process of the concomitants in a triangular array setting. A natural application is hypothesis testing. An independence test and a two-sample test are investigated in detail. The fairly general setting enables limit results under local alternatives and bootstrap samples. For the comparison with existing tests from the literature simulation studies are conducted. The empirical results obtained confirm the theoretical findings.}, language = {en} } @inproceedings{StaatTran2022, author = {Staat, Manfred and Tran, Ngoc Trinh}, title = {Strain based brittle failure criteria for rocks}, series = {Proceedings of (NACOME2022) The 11th National Conference on Mechanics, Vol. 1. Solid Mechanics, Rock Mechanics, Artificial Intelligence, Teaching and Training, Hanoi, December 2-3, 2022}, booktitle = {Proceedings of (NACOME2022) The 11th National Conference on Mechanics, Vol. 1. Solid Mechanics, Rock Mechanics, Artificial Intelligence, Teaching and Training, Hanoi, December 2-3, 2022}, publisher = {Nha xuat ban Khoa hoc tu nhien va Cong nghe (Verlag Naturwissenschaft und Technik)}, address = {Hanoi}, isbn = {978-604-357-084-7}, pages = {500 -- 509}, year = {2022}, abstract = {When confining pressure is low or absent, extensional fractures are typical, with fractures occurring on unloaded planes in rock. These "paradox" fractures can be explained by a phenomenological extension strain failure criterion. In the past, a simple empirical criterion for fracture initiation in brittle rock has been developed. But this criterion makes unrealistic strength predictions in biaxial compression and tension. A new extension strain criterion overcomes this limitation by adding a weighted principal shear component. The weight is chosen, such that the enriched extension strain criterion represents the same failure surface as the Mohr-Coulomb (MC) criterion. Thus, the MC criterion has been derived as an extension strain criterion predicting failure modes, which are unexpected in the understanding of the failure of cohesive-frictional materials. In progressive damage of rock, the most likely fracture direction is orthogonal to the maximum extension strain. The enriched extension strain criterion is proposed as a threshold surface for crack initiation CI and crack damage CD and as a failure surface at peak P. Examples show that the enriched extension strain criterion predicts much lower volumes of damaged rock mass compared to the simple extension strain criterion.}, language = {en} } @inproceedings{ChircuCzarneckiFriedmannetal.2023, author = {Chircu, Alina and Czarnecki, Christian and Friedmann, Daniel and Pomaskow, Johanna and Sultanow, Eldar}, title = {Towards a Digital Twin of Society}, series = {Proceedings of the 56th Hawaii International Conference on System Sciences 2023}, booktitle = {Proceedings of the 56th Hawaii International Conference on System Sciences 2023}, publisher = {University of Hawai'i}, address = {Honolulu}, isbn = {978-0-9981331-6-4}, pages = {6748 -- 6757}, year = {2023}, abstract = {This paper describes the potential for developing a digital twin of society- a dynamic model that can be used to observe, analyze, and predict the evolution of various societal aspects. Such a digital twin can help governmental agencies and policy makers in interpreting trends, understanding challenges, and making decisions regarding investments or policies necessary to support societal development and ensure future prosperity. The paper reviews related work regarding the digital twin paradigm and its applications. The paper presents a motivating case study- an analysis of opportunities and challenges faced by the German federal employment agency, Bundesagentur f¨ur Arbeit (BA), proposes solutions using digital twins, and describes initial proofs of concept for such solutions.}, language = {en} } @inproceedings{Butenweg2022, author = {Butenweg, Christoph}, title = {Seismic design and evaluation of industrial facilities}, series = {Progresses in European Earthquake Engineering and Seismology. Third European Conference on Earthquake Engineering and Seismology - Bucharest, 2022}, booktitle = {Progresses in European Earthquake Engineering and Seismology. Third European Conference on Earthquake Engineering and Seismology - Bucharest, 2022}, editor = {Vacareanu, Radu and Ionescu, Constantin}, publisher = {Springer}, address = {Cham}, isbn = {978-3-031-15103-3}, issn = {2524-342X}, doi = {10.1007/978-3-031-15104-0}, pages = {449 -- 464}, year = {2022}, abstract = {Industrial facilities must be thoroughly designed to withstand seismic actions as they exhibit an increased loss potential due to the possibly wideranging damage consequences and the valuable process engineering equipment. Past earthquakes showed the social and political consequences of seismic damage to industrial facilities and sensitized the population and politicians worldwide for the possible hazard emanating from industrial facilities. However, a holistic approach for the seismic design of industrial facilities can presently neither be found in national nor in international standards. The introduction of EN 1998-4 of the new generation of Eurocode 8 will improve the normative situation with specific seismic design rules for silos, tanks and pipelines and secondary process components. The article presents essential aspects of the seismic design of industrial facilities based on the new generation of Eurocode 8 using the example of tank structures and secondary process components. The interaction effects of the process components with the primary structure are illustrated by means of the experimental results of a shaking table test of a three story moment resisting steel frame with different process components. Finally, an integrated approach of digital plant models based on building information modelling (BIM) and structural health monitoring (SHM) is presented, which provides not only a reliable decision-making basis for operation, maintenance and repair but also an excellent tool for rapid assessment of seismic damage.}, language = {en} } @incollection{GkatzogiasVeljkovivPohorylesetal.2022, author = {Gkatzogias, Konstantinos and Veljkoviv, Ana and Pohoryles, Daniel A. and Tsionis, Georgios and Bournas, Dionysios A. and Crowley, Helen and Norl{\´e}n, Hedvig and Butenweg, Christoph and Gervasio, Helena and Manfredi, Vincenzo and Masi, Angelo and Zaharieva, Roumiana}, title = {Policy practice and regional impact assessment for building renovation}, series = {REEBUILD Integrated Techniques for the Seismic Strengthening \& Energy Efficiency of Existing Buildings}, booktitle = {REEBUILD Integrated Techniques for the Seismic Strengthening \& Energy Efficiency of Existing Buildings}, editor = {Gkatzogias, Konstantinos and Tsionis, Georgios}, publisher = {Publications Office of the European Union}, address = {Luxembourg}, isbn = {978-92-76-60454-9}, issn = {1831-9424}, doi = {10.2760/883122}, pages = {1 -- 68}, year = {2022}, abstract = {The work presented in this report provides scientific support to building renovation policies in the EU by promoting a holistic point of view on the topic. Integrated renovation can be seen as a nexus between European policies on disaster resilience, energy efficiency and circularity in the building sector. An overview of policy measures for the seismic and energy upgrading of buildings across EU Member States identified only a few available measures for combined upgrading. Regulatory framework, financial instruments and digital tools similar to those for energy renovation, together with awareness and training may promote integrated renovation. A framework for regional prioritisation of building renovation was put forward, considering seismic risk, energy efficiency, and socioeconomic vulnerability independently and in an integrated way. Results indicate that prioritisation of building renovation is a multidimensional problem. Depending on priorities, different integrated indicators should be used to inform policies and accomplish the highest relative or most spread impact across different sectors. The framework was further extended to assess the impact of renovation scenarios across the EU with a focus on priority regions. Integrated renovation can provide a risk-proofed, sustainable, and inclusive built environment, presenting an economic benefit in the order of magnitude of the highest benefit among the separate interventions. Furthermore, it presents the unique capability of reducing fatalities and energy consumption at the same time and, depending on the scenario, to a greater extent.}, language = {en} } @inproceedings{EvansBraunUlmeretal.2022, author = {Evans, Benjamin and Braun, Sebastian and Ulmer, Jessica and Wollert, J{\"o}rg}, title = {AAS implementations - current problems and solutions}, series = {20th International Conference on Mechatronics - Mechatronika (ME)}, booktitle = {20th International Conference on Mechatronics - Mechatronika (ME)}, publisher = {IEEE}, isbn = {978-1-6654-1040-3}, doi = {10.1109/ME54704.2022.9982933}, pages = {6 Seiten}, year = {2022}, abstract = {The fourth industrial revolution presents a multitude of challenges for industries, one of which being the increased flexibility required of manufacturing lines as a result of increased consumer demand for individualised products. One solution to tackle this challenge is the digital twin, more specifically the standardised model of a digital twin also known as the asset administration shell. The standardisation of an industry wide communications tool is a critical step in enabling inter-company operations. This paper discusses the current state of asset administration shells, the frameworks used to host them and their problems that need to be addressed. To tackle these issues, we propose an event-based server capable of drastically reducing response times between assets and asset administration shells and a multi-agent system used for the orchestration and deployment of the shells in the field.}, language = {en} } @article{BaringhausGaigall2022, author = {Baringhaus, Ludwig and Gaigall, Daniel}, title = {A goodness-of-fit test for the compound Poisson exponential model}, series = {Journal of Multivariate Analysis}, volume = {195}, journal = {Journal of Multivariate Analysis}, number = {Article 105154}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0047-259X}, doi = {10.1016/j.jmva.2022.105154}, year = {2022}, abstract = {On the basis of bivariate data, assumed to be observations of independent copies of a random vector (S,N), we consider testing the hypothesis that the distribution of (S,N) belongs to the parametric class of distributions that arise with the compound Poisson exponential model. Typically, this model is used in stochastic hydrology, with N as the number of raindays, and S as total rainfall amount during a certain time period, or in actuarial science, with N as the number of losses, and S as total loss expenditure during a certain time period. The compound Poisson exponential model is characterized in the way that a specific transform associated with the distribution of (S,N) satisfies a certain differential equation. Mimicking the function part of this equation by substituting the empirical counterparts of the transform we obtain an expression the weighted integral of the square of which is used as test statistic. We deal with two variants of the latter, one of which being invariant under scale transformations of the S-part by fixed positive constants. Critical values are obtained by using a parametric bootstrap procedure. The asymptotic behavior of the tests is discussed. A simulation study demonstrates the performance of the tests in the finite sample case. The procedure is applied to rainfall data and to an actuarial dataset. A multivariate extension is also discussed.}, language = {en} } @article{TranTrinhDaoetal.2022, author = {Tran, Ngoc Trinh and Trinh, Tu Luc and Dao, Ngoc Tien and Giap, Van Tan and Truong, Manh Khuyen and Dinh, Thuy Ha and Staat, Manfred}, title = {FEM shakedown analysis of structures under random strength with chance constrained programming}, series = {Vietnam Journal of Mechanics}, volume = {44}, journal = {Vietnam Journal of Mechanics}, number = {4}, publisher = {Vietnam Academy of Science and Technology (VAST)}, issn = {0866-7136}, doi = {10.15625/0866-7136/17943}, pages = {459 -- 473}, year = {2022}, abstract = {Direct methods, comprising limit and shakedown analysis, are a branch of computational mechanics. They play a significant role in mechanical and civil engineering design. The concept of direct methods aims to determine the ultimate load carrying capacity of structures beyond the elastic range. In practical problems, the direct methods lead to nonlinear convex optimization problems with a large number of variables and constraints. If strength and loading are random quantities, the shakedown analysis can be formulated as stochastic programming problem. In this paper, a method called chance constrained programming is presented, which is an effective method of stochastic programming to solve shakedown analysis problems under random conditions of strength. In this study, the loading is deterministic, and the strength is a normally or lognormally distributed variable.}, language = {en} } @incollection{Czarnecki2018, author = {Czarnecki, Christian}, title = {Establishment of a central process governance organization combined with operational process improvements : Insights from a BPM Project at a leading telecommunications operator in the Middle East}, series = {Business process management cases : digital innovation and business transformation in practice}, booktitle = {Business process management cases : digital innovation and business transformation in practice}, editor = {vom Brocke, Jan and Mendling, Jan}, publisher = {Springer}, address = {Cham}, isbn = {978-3-319-58306-8}, doi = {10.1007/978-3-319-58307-5}, pages = {57 -- 76}, year = {2018}, abstract = {Because of customer churn, strong competition, and operational inefficiencies, the telecommunications operator ME Telco (fictitious name due to confidentiality) launched a strategic transformation program that included a Business Process Management (BPM) project. Major problems were silo-oriented process management and missing cross-functional transparency. Process improvements were not consistently planned and aligned with corporate targets. Measurable inefficiencies were observed on an operational level, e.g., high lead times and reassignment rates of the incident management process.}, language = {en} } @article{RoethenbacherCesariDoppleretal.2022, author = {R{\"o}thenbacher, Annika and Cesari, Matteo and Doppler, Christopher E.J. and Okkels, Niels and Willemsen, Nele and Sembowski, Nora and Seger, Aline and Lindner, Marie and Brune, Corinna and Stefani, Ambra and H{\"o}gl, Birgit and Bialonski, Stephan and Borghammer, Per and Fink, Gereon R. and Schober, Martin and Sommerauer, Michael}, title = {RBDtector: an open-source software to detect REM sleep without atonia according to visual scoring criteria}, series = {Scientific Reports}, volume = {12}, journal = {Scientific Reports}, number = {Article number: 20886}, publisher = {Springer Nature}, address = {London}, issn = {2045-2322}, doi = {10.1038/s41598-022-25163-9}, pages = {1 -- 14}, year = {2022}, abstract = {REM sleep without atonia (RSWA) is a key feature for the diagnosis of rapid eye movement (REM) sleep behaviour disorder (RBD). We introduce RBDtector, a novel open-source software to score RSWA according to established SINBAR visual scoring criteria. We assessed muscle activity of the mentalis, flexor digitorum superficialis (FDS), and anterior tibialis (AT) muscles. RSWA was scored manually as tonic, phasic, and any activity by human scorers as well as using RBDtector in 20 subjects. Subsequently, 174 subjects (72 without RBD and 102 with RBD) were analysed with RBDtector to show the algorithm's applicability. We additionally compared RBDtector estimates to a previously published dataset. RBDtector showed robust conformity with human scorings. The highest congruency was achieved for phasic and any activity of the FDS. Combining mentalis any and FDS any, RBDtector identified RBD subjects with 100\% specificity and 96\% sensitivity applying a cut-off of 20.6\%. Comparable performance was obtained without manual artefact removal. RBD subjects also showed muscle bouts of higher amplitude and longer duration. RBDtector provides estimates of tonic, phasic, and any activity comparable to human scorings. RBDtector, which is freely available, can help identify RBD subjects and provides reliable RSWA metrics.}, language = {en} } @article{MuellerSeginWeigandetal.2022, author = {Mueller, Tobias and Segin, Alexander and Weigand, Christoph and Schmitt, Robert H.}, title = {Feature selection for measurement models}, series = {International journal of quality \& reliability management}, journal = {International journal of quality \& reliability management}, number = {Vol. ahead-of-print, No. ahead-of-print.}, publisher = {Emerald Group Publishing Limited}, address = {Bingley}, issn = {0265-671X}, doi = {10.1108/IJQRM-07-2021-0245}, year = {2022}, abstract = {Purpose In the determination of the measurement uncertainty, the GUM procedure requires the building of a measurement model that establishes a functional relationship between the measurand and all influencing quantities. Since the effort of modelling as well as quantifying the measurement uncertainties depend on the number of influencing quantities considered, the aim of this study is to determine relevant influencing quantities and to remove irrelevant ones from the dataset. Design/methodology/approach In this work, it was investigated whether the effort of modelling for the determination of measurement uncertainty can be reduced by the use of feature selection (FS) methods. For this purpose, 9 different FS methods were tested on 16 artificial test datasets, whose properties (number of data points, number of features, complexity, features with low influence and redundant features) were varied via a design of experiments. Findings Based on a success metric, the stability, universality and complexity of the method, two FS methods could be identified that reliably identify relevant and irrelevant influencing quantities for a measurement model. Originality/value For the first time, FS methods were applied to datasets with properties of classical measurement processes. The simulation-based results serve as a basis for further research in the field of FS for measurement models. The identified algorithms will be applied to real measurement processes in the future.}, language = {en} } @misc{SteuerDankertBernhardLangolfetal.2022, author = {Steuer-Dankert, Linda and Bernhard, Sebastian and Langolf, Jessica and Leicht-Scholten, Carmen}, title = {Managing change and acceptance of digitalization strategies - Implementing the vision of „Internet of Production" (IoP) in existing corporate structures}, series = {Textile Impulse f{\"u}r die Zukunft: Aachen-Dresden-Denkendorf International Textile Conference 2022 : 1. - 2. Dezember 2022, Eurogress Aachen}, journal = {Textile Impulse f{\"u}r die Zukunft: Aachen-Dresden-Denkendorf International Textile Conference 2022 : 1. - 2. Dezember 2022, Eurogress Aachen}, pages = {153 -- 153}, year = {2022}, abstract = {The vision of the Internet of Production is to enable a new level of crossdomain collaboration by providing semantically adequate and context-aware data from production, development \& usage in real-time.}, language = {en} } @incollection{SchmitzDietzeCzarnecki2019, author = {Schmitz, Manfred and Dietze, Christian and Czarnecki, Christian}, title = {Enabling digital transformation through robotic process automation at Deutsche Telekom}, series = {Enabling digital transformation through robotic process automation at Deutsche Telekom}, booktitle = {Enabling digital transformation through robotic process automation at Deutsche Telekom}, editor = {Urbach, Nils and R{\"o}glinger, Maximilian}, publisher = {Springer}, address = {Cham}, isbn = {978-3-319-95272-7}, doi = {10.1007/978-3-319-95273-4_2}, pages = {15 -- 33}, year = {2019}, abstract = {Due to the high number of customer contacts, fault clearances, installations, and product provisioning per year, the automation level of operational processes has a significant impact on financial results, quality, and customer experience. Therefore, the telecommunications operator Deutsche Telekom (DT) has defined a digital strategy with the objectives of zero complexity and zero complaint, one touch, agility in service, and disruptive thinking. In this context, Robotic Process Automation (RPA) was identified as an enabling technology to formulate and realize DT's digital strategy through automation of rule-based, routine, and predictable tasks in combination with structured and stable data.}, language = {en} } @incollection{BensbergBuscherCzarnecki2019, author = {Bensberg, Frank and Buscher, Gandalf and Czarnecki, Christian}, title = {Digital transformation and IT topics in the consulting industry: a labor market perspective}, series = {Advances in consulting research : recent findings and practical cases}, booktitle = {Advances in consulting research : recent findings and practical cases}, editor = {Nissen, Volker}, publisher = {Springer}, address = {Cham}, isbn = {978-3-319-95998-6}, doi = {10.1007/978-3-319-95999-3_16}, pages = {341 -- 357}, year = {2019}, abstract = {Information technologies, such as big data analytics, cloud computing, cyber physical systems, robotic process automation, and the internet of things, provide a sustainable impetus for the structural development of business sectors as well as the digitalization of markets, enterprises, and processes. Within the consulting industry, the proliferation of these technologies opened up the new segment of digital transformation, which focuses on setting up, controlling, and implementing projects for enterprises from a broad range of sectors. These recent developments raise the question, which requirements evolve for IT consultants as important success factors of those digital transformation projects. Therefore, this empirical contribution provides indications regarding the qualifications and competences necessary for IT consultants in the era of digital transformation from a labor market perspective. On the one hand, this knowledge base is interesting for the academic education of consultants, since it supports a market-oriented design of adequate training measures. On the other hand, insights into the competence requirements for consultants are considered relevant for skill and talent management processes in consulting practice. Assuming that consulting companies pursue a strategic human resource management approach, labor market information may also be useful to discover strategic behavioral patterns.}, language = {en} } @article{RuebbelkeVoegeleGrajewskietal.2022, author = {R{\"u}bbelke, Dirk and V{\"o}gele, Stefan and Grajewski, Matthias and Zobel, Luzy}, title = {Hydrogen-based steel production and global climate protection: An empirical analysis of the potential role of a European cross border adjustment mechanism}, series = {Journal of Cleaner Production}, volume = {380}, journal = {Journal of Cleaner Production}, number = {Part 2, Art. Nr.:135040}, publisher = {Elsevier}, issn = {0959-6526}, doi = {10.1016/j.jclepro.2022.135040}, year = {2022}, abstract = {The European Union's aim to become climate neutral by 2050 necessitates ambitious efforts to reduce carbon emissions. Large reductions can be attained particularly in energy intensive sectors like iron and steel. In order to prevent the relocation of such industries outside the EU in the course of tightening environmental regulations, the establishment of a climate club jointly with other large emitters and alternatively the unilateral implementation of an international cross-border carbon tax mechanism are proposed. This article focuses on the latter option choosing the steel sector as an example. In particular, we investigate the financial conditions under which a European cross border mechanism is capable to protect hydrogen-based steel production routes employed in Europe against more polluting competition from abroad. By using a floor price model, we assess the competitiveness of different steel production routes in selected countries. We evaluate the climate friendliness of steel production on the basis of specific GHG emissions. In addition, we utilize an input-output price model. It enables us to assess impacts of rising cost of steel production on commodities using steel as intermediates. Our results raise concerns that a cross-border tax mechanism will not suffice to bring about competitiveness of hydrogen-based steel production in Europe because the cost tends to remain higher than the cost of steel production in e.g. China. Steel is a classic example for a good used mainly as intermediate for other products. Therefore, a cross-border tax mechanism for steel will increase the price of products produced in the EU that require steel as an input. This can in turn adversely affect competitiveness of these sectors. Hence, the effects of higher steel costs on European exports should be borne in mind and could require the cross-border adjustment mechanism to also subsidize exports.}, language = {en} } @incollection{SchneiderWisselinkNoelleetal.2020, author = {Schneider, Dominik and Wisselink, Frank and N{\"o}lle, Nikolai and Czarnecki, Christian}, title = {Influence of artificial intelligence on commercial interactions in the consumer market}, series = {Automatisierung und Personalisierung von Dienstleistungen : Methoden - Potenziale - Einsatzfelder}, booktitle = {Automatisierung und Personalisierung von Dienstleistungen : Methoden - Potenziale - Einsatzfelder}, editor = {Bruhn, Manfred and Hadwich, Karsten}, publisher = {Springer Gabler}, address = {Wiesbaden}, isbn = {978-3-658-30167-5 (Print)}, doi = {10.1007/978-3-658-30168-2_7}, pages = {183 -- 205}, year = {2020}, abstract = {Recently, novel AI-based services have emerged in the consumer market. AI-based services can affect the way consumers take commercial decisions. Research on the influence of AI on commercial interactions is in its infancy. In this chapter, a framework creating a first overview of the influence of AI on commercial interactions is introduced. This framework summarizes the findings of comparing numerous customer journeys of novel AI-based services with corresponding non-AI equivalents.}, language = {en} } @incollection{CroonCzarnecki2021, author = {Croon, Philipp and Czarnecki, Christian}, title = {Liability for loss or damages caused by RPA}, series = {Robotic process automation : Management, technology, applications}, booktitle = {Robotic process automation : Management, technology, applications}, editor = {Czarnecki, Christian and Fettke, Peter}, publisher = {De Gruyter}, address = {Oldenbourg}, isbn = {9783110676778}, doi = {10.1515/9783110676693-202}, pages = {135 -- 151}, year = {2021}, abstract = {Intelligent autonomous software robots replacing human activities and performing administrative processes are reality in today's corporate world. This includes, for example, decisions about invoice payments, identification of customers for a marketing campaign, and answering customer complaints. What happens if such a software robot causes a damage? Due to the complete absence of human activities, the question is not trivial. It could even happen that no one is liable for a damage towards a third party, which could create an uncalculatable legal risk for business partners. Furthermore, the implementation and operation of those software robots involves various stakeholders, which result in the unsolvable endeavor of identifying the originator of a damage. Overall it is advisable to all involved parties to carefully consider the legal situation. This chapter discusses the liability of software robots from an interdisciplinary perspective. Based on different technical scenarios the legal aspects of liability are discussed.}, language = {en} } @incollection{BensbergAuthCzarnecki2021, author = {Bensberg, Frank and Auth, Gunnar and Czarnecki, Christian}, title = {Finding the perfect RPA match : a criteria-based selection method for RPA solutions}, series = {Robotic process automation : Management, technology, applications}, booktitle = {Robotic process automation : Management, technology, applications}, editor = {Czarnecki, Christian and Fettke, Peter}, publisher = {De Gruyter}, address = {Oldenbourg}, isbn = {978-3-11-067677-8}, doi = {10.1515/9783110676693-201}, pages = {47 -- 75}, year = {2021}, abstract = {The benefits of robotic process automation (RPA) are highly related to the usage of commercial off-the-shelf (COTS) software products that can be easily implemented and customized by business units. But, how to find the best fitting RPA product for a specific situation that creates the expected benefits? This question is related to the general area of software evaluation and selection. In the face of more than 75 RPA products currently on the market, guidance considering those specifics is required. Therefore, this chapter proposes a criteria-based selection method specifically for RPA. The method includes a quantitative evaluation of costs and benefits as well as a qualitative utility analysis based on functional criteria. By using the visualization of financial implications (VOFI) method, an application-oriented structure is provided that opposes the total cost of ownership to the time savings times salary (TSTS). For the utility analysis a detailed list of functional criteria for RPA is offered. The whole method is based on a multi-vocal review of scientific and non-scholarly literature including publications by business practitioners, consultants, and vendors. The application of the method is illustrated by a concrete RPA example. The illustrated structures, templates, and criteria can be directly utilized by practitioners in their real-life RPA implementations. In addition, a normative decision process for selecting RPA alternatives is proposed before the chapter closes with a discussion and outlook.}, language = {en} } @incollection{CzarneckiFettke2021, author = {Czarnecki, Christian and Fettke, Peter}, title = {Robotic process automation : Positioning, structuring, and framing the work}, series = {Robotic process automation : Management, technology, applications}, booktitle = {Robotic process automation : Management, technology, applications}, editor = {Czarnecki, Christian and Fettke, Peter}, publisher = {De Gruyter}, address = {Oldenbourg}, isbn = {978-3-11-067668-6 (Print)}, doi = {10.1515/9783110676693-202}, pages = {3 -- 24}, year = {2021}, abstract = {Robotic process automation (RPA) has attracted increasing attention in research and practice. This chapter positions, structures, and frames the topic as an introduction to this book. RPA is understood as a broad concept that comprises a variety of concrete solutions. From a management perspective RPA offers an innovative approach for realizing automation potentials, whereas from a technical perspective the implementation based on software products and the impact of artificial intelligence (AI) and machine learning (ML) are relevant. RPA is industry-independent and can be used, for example, in finance, telecommunications, and the public sector. With respect to RPA this chapter discusses definitions, related approaches, a structuring framework, a research framework, and an inside as well as outside architectural view. Furthermore, it provides an overview of the book combined with short summaries of each chapter.}, language = {en} } @incollection{CzarneckiHongSchmitzetal.2021, author = {Czarnecki, Christian and Hong, Chin-Gi and Schmitz, Manfred and Dietze, Christian}, title = {Enabling digital transformation through cognitive robotic process automation at Deutsche Telekom Services Europe}, series = {Digitalization Cases Vol. 2 : Mastering digital transformation for global business}, booktitle = {Digitalization Cases Vol. 2 : Mastering digital transformation for global business}, editor = {Urbach, Nils and R{\"o}glinger, Maximilian and Kautz, Karlheinz and Alias, Rose Alinda and Saunders, Carol and Wiener, Martin}, publisher = {Springer}, address = {Cham}, isbn = {978-3-030-80002-4 (Print)}, doi = {10.1007/978-3-030-80003-1}, pages = {123 -- 138}, year = {2021}, abstract = {Subject of this case is Deutsche Telekom Services Europe (DTSE), a service center for administrative processes. Due to the high volume of repetitive tasks (e.g., 100k manual uploads of offer documents into SAP per year), automation was identified as an important strategic target with a high management attention and commitment. DTSE has to work with various backend application systems without any possibility to change those systems. Furthermore, the complexity of administrative processes differed. When it comes to the transfer of unstructured data (e.g., offer documents) to structured data (e.g., MS Excel files), further cognitive technologies were needed.}, language = {en} } @book{CzarneckiDietze2017, author = {Czarnecki, Christian and Dietze, Christian}, title = {Reference architecture for the telecommunications industry: Transformation of strategy, organization, processes, data, and applications}, publisher = {Springer}, address = {Cham}, isbn = {978-3-319-46755-9 (Print)}, doi = {10.1007/978-3-319-46757-3}, pages = {XXII, 253 Seiten}, year = {2017}, abstract = {This book reflects the tremendous changes in the telecommunications industry in the course of the past few decades - shorter innovation cycles, stiffer competition and new communication products. It analyzes the transformation of processes, applications and network technologies that are now expected to take place under enormous time pressure. The International Telecommunication Union (ITU) and the TM Forum have provided reference solutions that are broadly recognized and used throughout the value chain of the telecommunications industry, and which can be considered the de facto standard. The book describes how these reference solutions can be used in a practical context: it presents the latest insights into their development, highlights lessons learned from numerous international projects and combines them with well-founded research results in enterprise architecture management and reference modeling. The complete architectural transformation is explained, from the planning and set-up stage to the implementation. Featuring a wealth of examples and illustrations, the book offers a valuable resource for telecommunication professionals, enterprise architects and project managers alike.}, language = {en} } @inproceedings{CzarneckiHeuserSpiliopoulou2009, author = {Czarnecki, Christian and Heuser, Marcus and Spiliopoulou, Myra}, title = {How does the implementation of a next generation network influence a telecommunication company?}, series = {European and Mediterranean Conference on Information Systems}, booktitle = {European and Mediterranean Conference on Information Systems}, editor = {Irani, Zahir}, publisher = {Brunel University}, address = {London}, isbn = {9781902316697}, pages = {1 -- 11}, year = {2009}, abstract = {As the potential of a Next Generation Network (NGN) is recognized, telecommunication companies consider switching to it. Although the implementation of an NGN seems to be merely a modification of the network infrastructure, it may trigger or require changes in the whole company and even influence the company strategy. To capture the effects of NGN we propose a framework based on concepts of business engineering and technical recommendations for the introduction of NGN technology. The specific design of solutions for the layers "Strategy", "Processes" and "Information Systems" as well as their interdependencies are an essential characteristic of the developed framework. We have per-formed a case study on NGN implementation and observed that all layers captured by our framework are influenced by the introduction of an NGN.}, language = {en} } @inproceedings{CzarneckiWinkelmannSpiliopoulou2011, author = {Czarnecki, Christian and Winkelmann, Axel and Spiliopoulou, Myra}, title = {Making business systems in the telecommunication industry more customer-oriented}, series = {Information Systems Development : Business Systems and Services: Modeling and Development}, booktitle = {Information Systems Development : Business Systems and Services: Modeling and Development}, editor = {Pokorny, Jaroslav and Repa, Vaclav and Richta, Karel and Wojtkowski, Wita and Linger, Henry and Barry, Chris and Lang, Michael}, publisher = {Springer}, address = {New York}, isbn = {978-1-4419-9645-9 (Print)}, doi = {10.1007/978-1-4419-9790-6_14}, pages = {169 -- 180}, year = {2011}, abstract = {Market changes have forced telecommunication companies to transform their business. Increased competition, short innovation cycles, changed usage patterns, increased customer expectations and cost reduction are the main drivers. Our objective is to analyze to what extend transformation projects have improved the orientation towards the end-customers. Therefore, we selected 38 real-life case studies that are dealing with customer orientation. Our analysis is based on a telecommunication-specific framework that aligns strategy, business processes and information systems. The result of our analysis shows the following: transformation projects that aim to improve the customer orientation are combined with clear goals on costs and revenue of the enterprise. These projects are usually directly linked to the customer touch points, but also to the development and provisioning of products. Furthermore, the analysis shows that customer orientation is not the sole trigger for transformation. There is no one-fits-all solution; rather, improved customer orientation needs aligned changes of business processes as well as information systems related to different parts of the company.}, language = {en} } @inproceedings{NursinskiStolbergGangatharanCzarnecki2016, author = {Nursinski-Stolberg, Andr{\´e} and Gangatharan, Kiritharan and Czarnecki, Christian}, title = {Development of a subject-oriented reference process model for the telecommunications industry}, series = {GI Edition Proceedings Band 259 INFORMATIK 2016}, booktitle = {GI Edition Proceedings Band 259 INFORMATIK 2016}, editor = {Mayr, Heinrich C. and Pinzger, Martin}, publisher = {Gesellschaft f{\"u}r Informatik e.V.}, address = {Bonn}, isbn = {9783885796534}, issn = {1617-5468}, pages = {699 -- 712}, year = {2016}, abstract = {Generally the usage of reference models can be structured top-down or bottom-up. The practical need of agile change and flexible organizational implementation requires a consistent mapping to an operational level. In this context, well-established reference process models are typically structured top-down. The subject-oriented Business Process Management (sBPM) offers a modeling concept that is structured bottom-up and concentrates on the process actors on an operational level. This paper applies sBPM to the enhanced Telecom Operations Map (eTOM), a well-accepted reference process model in the telecommunications industry. The resulting design artifact is a concrete example for a combination of a bottom-up and top-down developed reference model. The results are evaluated and confirmed in practical context through the involvement of the industry body TMForum.}, language = {en} } @inproceedings{CzarneckiDietze2017, author = {Czarnecki, Christian and Dietze, Christian}, title = {Domain-Specific reference modeling in the telecommunications industry}, series = {DESRIST 2017: Designing the Digital Transformation}, booktitle = {DESRIST 2017: Designing the Digital Transformation}, editor = {Maedche, Alexander and vom Brocke, Jan and Hevner, Alan}, publisher = {Springer}, address = {Cham}, isbn = {978-3-319-59144-5}, doi = {10.1007/978-3-319-59144-5_19}, pages = {313 -- 329}, year = {2017}, abstract = {The telecommunications industry is currently going through a major transformation. In this context, the enhanced Telecom Operations Map (eTOM) is a domain-specific process reference model that is offered by the industry organization TM Forum. In practice, eTOM is well accepted and confirmed as de facto standard. It provides process definitions and process flows on different levels of detail. This article discusses the reference modeling of eTOM, i.e., the design, the resulting artifact, and its evaluation based on three project cases. The application of eTOM in three projects illustrates the design approach and concrete models on strategic and operational levels. The article follows the Design Science Research (DSR) paradigm. It contributes with concrete design artifacts to the transformational needs of the telecommunications industry and offers lessons-learned from a general DSR perspective.}, language = {en} } @article{CzarneckiWinkelmannSpiliopoulou2010, author = {Czarnecki, Christian and Winkelmann, Axel and Spiliopoulou, Myra}, title = {Services in electronic telecommunication markets: a framework for planning the virtualization of processes}, series = {Electronic Markets}, volume = {20}, journal = {Electronic Markets}, number = {3-4}, publisher = {Springer}, address = {Berlin}, issn = {1422-8890}, doi = {10.1007/s12525-010-0045-8}, pages = {197 -- 207}, year = {2010}, abstract = {The potential of electronic markets in enabling innovative product bundles through flexible and sustainable partnerships is not yet fully exploited in the telecommunication industry. One reason is that bundling requires seamless de-assembling and re-assembling of business processes, whilst processes in telecommunication companies are often product-dependent and hard to virtualize. We propose a framework for the planning of the virtualization of processes, intended to assist the decision maker in prioritizing the processes to be virtualized: (a) we transfer the virtualization pre-requisites stated by the Process Virtualization Theory in the context of customer-oriented processes in the telecommunication industry and assess their importance in this context, (b) we derive IT-oriented requirements for the removal of virtualization barriers and highlight their demand on changes at different levels of the organization. We present a first evaluation of our approach in a case study and report on lessons learned and further steps to be performed.}, language = {en} } @article{CzarneckiSpiliopoulou2012, author = {Czarnecki, Christian and Spiliopoulou, Myra}, title = {A holistic framework for the implementation of a next generation network}, series = {International Journal of Business Information Systems}, volume = {9}, journal = {International Journal of Business Information Systems}, number = {4}, publisher = {Inderscience Enterprises}, address = {Olney, Bucks}, issn = {1746-0972}, doi = {10.1504/IJBIS.2012.046291}, pages = {385 -- 401}, year = {2012}, abstract = {As the potential of a next generation network (NGN) is recognised, telecommunication companies consider switching to it. Although the implementation of an NGN seems to be merely a modification of the network infrastructure, it may trigger or require changes in the whole company, because it builds upon the separation between service and transport, a flexible bundling of services to products and the streamlining of the IT infrastructure. We propose a holistic framework, structured into the layers 'strategy', 'processes' and 'information systems' and incorporate into each layer all concepts necessary for the implementation of an NGN, as well as the alignment of these concepts. As a first proof-of-concept for our framework we have performed a case study on the introduction of NGN in a large telecommunication company; we show that our framework captures all topics that are affected by an NGN implementation.}, language = {en} } @article{BurgerRumpfDoetal.2021, author = {Burger, Ren{\´e} and Rumpf, Jessica and Do, Xuan Tung and Monakhova, Yulia and Diehl, Bernd W. K. and Rehahn, Matthias and Schulze, Margit}, title = {Is NMR combined with multivariate regression applicable for the molecular weight determination of randomly cross-linked polymers such as lignin?}, series = {ACS Omega}, volume = {6}, journal = {ACS Omega}, number = {44}, publisher = {ACS Publications}, address = {Washington, DC}, issn = {2470-1343}, doi = {10.1021/acsomega.1c03574}, pages = {29516 -- 29524}, year = {2021}, abstract = {The molecular weight properties of lignins are one of the key elements that need to be analyzed for a successful industrial application of these promising biopolymers. In this study, the use of 1H NMR as well as diffusion-ordered spectroscopy (DOSY NMR), combined with multivariate regression methods, was investigated for the determination of the molecular weight (Mw and Mn) and the polydispersity of organosolv lignins (n = 53, Miscanthus x giganteus, Paulownia tomentosa, and Silphium perfoliatum). The suitability of the models was demonstrated by cross validation (CV) as well as by an independent validation set of samples from different biomass origins (beech wood and wheat straw). CV errors of ca. 7-9 and 14-16\% were achieved for all parameters with the models from the 1H NMR spectra and the DOSY NMR data, respectively. The prediction errors for the validation samples were in a similar range for the partial least squares model from the 1H NMR data and for a multiple linear regression using the DOSY NMR data. The results indicate the usefulness of NMR measurements combined with multivariate regression methods as a potential alternative to more time-consuming methods such as gel permeation chromatography.}, language = {en} } @article{MonakhovaDiehl2021, author = {Monakhova, Yulia and Diehl, Bernd W. K.}, title = {Simplification of NMR Workflows by Standardization Using 2H Integral of Deuterated Solvent as Applied to Aloe vera Preparations}, series = {Applied Magnetic Resonance}, volume = {52}, journal = {Applied Magnetic Resonance}, number = {11}, publisher = {Springer}, address = {Cham}, issn = {1613-7507}, doi = {10.1007/s00723-021-01393-4}, pages = {1591 -- 1600}, year = {2021}, abstract = {In this study, a recently proposed NMR standardization approach by 2H integral of deuterated solvent for quantitative multicomponent analysis of complex mixtures is presented. As a proof of principle, the existing NMR routine for the analysis of Aloe vera products was modified. Instead of using absolute integrals of targeted compounds and internal standard (nicotinamide) from 1H-NMR spectra, quantification was performed based on the ratio of a particular 1H-NMR compound integral and 2H-NMR signal of deuterated solvent D2O. Validation characteristics (linearity, repeatability, accuracy) were evaluated and the results showed that the method has the same precision as internal standardization in case of multicomponent screening. Moreover, a dehydration process by freeze drying is not necessary for the new routine. Now, our NMR profiling of A. vera products needs only limited sample preparation and data processing. The new standardization methodology provides an appealing alternative for multicomponent NMR screening. In general, this novel approach, using standardization by 2H integral, benefits from reduced sample preparation steps and uncertainties, and is recommended in different application areas (purity determination, forensics, pharmaceutical analysis, etc.).}, language = {en} } @article{BurmistrovaSobolevaMonakhova2021, author = {Burmistrova, Natalia A. and Soboleva, Polina M. and Monakhova, Yulia}, title = {Is infrared spectroscopy combined with multivariate analysis a promising tool for heparin authentication?}, series = {Journal of Pharmaceutical and Biomedical Analysis}, volume = {194}, journal = {Journal of Pharmaceutical and Biomedical Analysis}, number = {Article number: 113811}, publisher = {Elsevier}, address = {Amsterdam}, isbn = {0731-7085}, doi = {10.1016/j.jpba.2020.113811}, year = {2021}, abstract = {The investigation of the possibility to determine various characteristics of powder heparin (n = 115) was carried out with infrared spectroscopy. The evaluation of heparin samples included several parameters such as purity grade, distributing company, animal source as well as heparin species (i.e. Na-heparin, Ca-heparin, and heparinoids). Multivariate analysis using principal component analysis (PCA), soft independent modelling of class analogy (SIMCA), and partial least squares - discriminant analysis (PLS-DA) were applied for the modelling of spectral data. Different pre-processing methods were applied to IR spectral data; multiplicative scatter correction (MSC) was chosen as the most relevant. Obtained results were confirmed by nuclear magnetic resonance (NMR) spectroscopy. Good predictive ability of this approach demonstrates the potential of IR spectroscopy and chemometrics for screening of heparin quality. This approach, however, is designed as a screening tool and is not considered as a replacement for either of the methods required by USP and FDA.}, language = {en} } @article{MonakhovaDiehl2021, author = {Monakhova, Yulia and Diehl, Bernd W.K.}, title = {Novel approach of qNMR workflow by standardization using 2H integral: Application to any intrinsic calibration standard}, series = {Talanta}, volume = {222}, journal = {Talanta}, number = {Article number: 121504}, publisher = {Elsevier}, isbn = {0039-9140}, doi = {10.1016/j.talanta.2020.121504}, year = {2021}, abstract = {Quantitative nuclear magnetic resonance (qNMR) is routinely performed by the internal or external standardization. The manuscript describes a simple alternative to these common workflows by using NMR signal of another active nuclei of calibration compound. For example, for any arbitrary compound quantification by NMR can be based on the use of an indirect concentration referencing that relies on a solvent having both 1H and 2H signals. To perform high-quality quantification, the deuteration level of the utilized deuterated solvent has to be estimated. In this contribution the new method was applied to the determination of deuteration levels in different deuterated solvents (MeOD, ACN, CDCl3, acetone, benzene, DMSO-d6). Isopropanol-d6, which contains a defined number of deuterons and protons, was used for standardization. Validation characteristics (precision, accuracy, robustness) were calculated and the results showed that the method can be used in routine practice. Uncertainty budget was also evaluated. In general, this novel approach, using standardization by 2H integral, benefits from reduced sample preparation steps and uncertainties, and can be applied in different application areas (purity determination, forensics, pharmaceutical analysis, etc.).}, language = {en} } @article{MonakhovaSobolevaFedotovaetal.2022, author = {Monakhova, Yulia and Soboleva, Polina M. and Fedotova, Elena S. and Musina, Kristina T. and Burmistrova, Natalia A.}, title = {Quantum chemical calculations of IR spectra of heparin disaccharide subunits}, series = {Computational and Theoretical Chemistry}, volume = {1217}, journal = {Computational and Theoretical Chemistry}, number = {Article number: 113891}, publisher = {Elsevier}, address = {New York, NY}, isbn = {2210-271X}, doi = {10.1016/j.comptc.2022.113891}, year = {2022}, abstract = {Heparin is a natural polysaccharide, which plays essential role in many biological processes. Alterations in building blocks can modify biological roles of commercial heparin products, due to significant changes in the conformation of the polymer chain. The variability structure of heparin leads to difficulty in quality control using different analytical methods, including infrared (IR) spectroscopy. In this paper molecular modelling of heparin disaccharide subunits was performed using quantum chemistry. The structural and spectral parameters of these disaccharides have been calculated using RHF/6-311G. In addition, over-sulphated chondroitin sulphate disaccharide was studied as one of the most widespread contaminants of heparin. Calculated IR spectra were analyzed with respect to specific structure parameters. IR spectroscopic fingerprint was found to be sensitive to substitution pattern of disaccharide subunits. Vibrational assignments of calculated spectra were correlated with experimental IR spectral bands of native heparin. Chemometrics was used to perform multivariate analysis of simulated spectral data.}, language = {en} } @article{BurgerLindnerRumpfetal.2022, author = {Burger, Ren{\´e} and Lindner, Simon and Rumpf, Jessica and Do, Xuan Tung and Diehl, Bernd W.K. and Rehahn, Matthias and Monakhova, Yulia and Schulze, Margit}, title = {Benchtop versus high field NMR: Comparable performance found for the molecular weight determination of lignin}, series = {Journal of Pharmaceutical and Biomedical Analysis}, volume = {212}, journal = {Journal of Pharmaceutical and Biomedical Analysis}, number = {Article number: 114649}, publisher = {Elsevier}, address = {New York, NY}, isbn = {0731-7085}, doi = {10.1016/j.jpba.2022.114649}, year = {2022}, abstract = {Lignin is a promising renewable biopolymer being investigated worldwide as an environmentally benign substitute of fossil-based aromatic compounds, e.g. for the use as an excipient with antioxidant and antimicrobial properties in drug delivery or even as active compound. For its successful implementation into process streams, a quick, easy, and reliable method is needed for its molecular weight determination. Here we present a method using 1H spectra of benchtop as well as conventional NMR systems in combination with multivariate data analysis, to determine lignin's molecular weight (Mw and Mn) and polydispersity index (PDI). A set of 36 organosolv lignin samples (from Miscanthus x giganteus, Paulownia tomentosa and Silphium perfoliatum) was used for the calibration and cross validation, and 17 samples were used as external validation set. Validation errors between 5.6\% and 12.9\% were achieved for all parameters on all NMR devices (43, 60, 500 and 600 MHz). Surprisingly, no significant difference in the performance of the benchtop and high-field devices was found. This facilitates the application of this method for determining lignin's molecular weight in an industrial environment because of the low maintenance expenditure, small footprint, ruggedness, and low cost of permanent magnet benchtop NMR systems.}, language = {en} } @article{MonakhovaDiehl2022, author = {Monakhova, Yulia and Diehl, Bernd W.K.}, title = {Multinuclear NMR screening of pharmaceuticals using standardization by 2H integral of a deuterated solvent}, series = {Journal of Pharmaceutical and Biomedical Analysis}, volume = {209}, journal = {Journal of Pharmaceutical and Biomedical Analysis}, number = {Article number: 114530}, publisher = {Elsevier}, isbn = {0731-7085}, doi = {10.1016/j.jpba.2021.114530}, year = {2022}, abstract = {NMR standardization approach that uses the 2H integral of deuterated solvent for quantitative multinuclear analysis of pharmaceuticals is described. As a proof of principle, the existing NMR procedure for the analysis of heparin products according to US Pharmacopeia monograph is extended to the determination of Na+ and Cl- content in this matrix. Quantification is performed based on the ratio of a 23Na (35Cl) NMR integral and 2H NMR signal of deuterated solvent, D2O, acquired using the specific spectrometer hardware. As an alternative, the possibility of 133Cs standardization using the addition of Cs2CO3 stock solution is shown. Validation characteristics (linearity, repeatability, sensitivity) are evaluated. A holistic NMR profiling of heparin products can now also be used for the quantitative determination of inorganic compounds in a single analytical run using a single sample. In general, the new standardization methodology provides an appealing alternative for the NMR screening of inorganic and organic components in pharmaceutical products.}, language = {en} } @article{LindnerBurgerRutledgeetal.2022, author = {Lindner, Simon and Burger, Ren{\´e} and Rutledge, Douglas N. and Do, Xuan Tung and Rumpf, Jessica and Diehl, Bernd W. K. and Schulze, Margit and Monakhova, Yulia}, title = {Is the calibration transfer of multivariate calibration models between high- and low-field NMR instruments possible? A case study of lignin molecular weight}, series = {Analytical chemistry}, volume = {94}, journal = {Analytical chemistry}, number = {9}, publisher = {ACS Publications}, address = {Washington, DC}, isbn = {1520-6882}, doi = {10.1021/acs.analchem.1c05125}, pages = {3997 -- 4004}, year = {2022}, abstract = {Although several successful applications of benchtop nuclear magnetic resonance (NMR) spectroscopy in quantitative mixture analysis exist, the possibility of calibration transfer remains mostly unexplored, especially between high- and low-field NMR. This study investigates for the first time the calibration transfer of partial least squares regressions [weight average molecular weight (Mw) of lignin] between high-field (600 MHz) NMR and benchtop NMR devices (43 and 60 MHz). For the transfer, piecewise direct standardization, calibration transfer based on canonical correlation analysis, and transfer via the extreme learning machine auto-encoder method are employed. Despite the immense resolution difference between high-field and low-field NMR instruments, the results demonstrate that the calibration transfer from high- to low-field is feasible in the case of a physical property, namely, the molecular weight, achieving validation errors close to the original calibration (down to only 1.2 times higher root mean square errors). These results introduce new perspectives for applications of benchtop NMR, in which existing calibrations from expensive high-field instruments can be transferred to cheaper benchtop instruments to economize.}, language = {en} } @inproceedings{BrockmannSultanowCzarnecki2018, author = {Brockmann, Carsten and Sultanow, Eldar and Czarnecki, Christian}, title = {Enterprise architectures between agility and traditional methodologies}, series = {GI Edition Proceedings Band 285 Workshops der INFORMATIK 2018}, booktitle = {GI Edition Proceedings Band 285 Workshops der INFORMATIK 2018}, editor = {Czarnecki, Christian and Brockmann, Carsten and Sultanow, Eldar and Koschmider, Agnes and Selzer, Annika and Gesellschaft f{\"u}r Informatik e. V.,}, publisher = {K{\"o}llen}, address = {Bonn}, isbn = {9783885796794}, issn = {1617-5468}, pages = {1 Seite}, year = {2018}, abstract = {For this year's workshop on Enterpirse Architecture in Research and Practice we have received eight submissions from which four have passed the rigorous peer-review. The acceptance quote of 50\% assures that only advancements in the field are included in our workshop.}, language = {en} } @inproceedings{BensbergAuthCzarneckietal.2018, author = {Bensberg, Frank and Auth, Gunnar and Czarnecki, Christian and W{\"o}rndle, Christopher}, title = {Transforming literature-intensive research processes through text analytics - design, implementation and lessons learned}, editor = {Kemal İlter, H.}, doi = {10.6084/m9.figshare.7582073.v1}, pages = {9 Seiten}, year = {2018}, abstract = {The continuing growth of scientific publications raises the question how research processes can be digitalized and thus realized more productively. Especially in information technology fields, research practice is characterized by a rapidly growing volume of publications. For the search process various information systems exist. However, the analysis of the published content is still a highly manual task. Therefore, we propose a text analytics system that allows a fully digitalized analysis of literature sources. We have realized a prototype by using EBSCO Discovery Service in combination with IBM Watson Explorer and demonstrated the results in real-life research projects. Potential addressees are research institutions, consulting firms, and decision-makers in politics and business practice.}, language = {en} } @inproceedings{BrockmannSultanowCzarnecki2019, author = {Brockmann, Carsten and Sultanow, Eldar and Czarnecki, Christian}, title = {Is enterprise architecture still relevant in the digital age?}, series = {GI Edition Proceedings Band 295 INFORMATIK 2019, Workshop-Beitr{\"a}ge}, booktitle = {GI Edition Proceedings Band 295 INFORMATIK 2019, Workshop-Beitr{\"a}ge}, editor = {Draude, Claude and Lange, Martin and Sick, Bernhard}, publisher = {K{\"o}llen}, address = {Bonn}, isbn = {9783885796893}, issn = {1617-5468}, doi = {10.18420/inf2019_ws01}, pages = {21 -- 21}, year = {2019}, language = {en} } @inproceedings{AuthCzarneckiBensberg2019, author = {Auth, Gunnar and Czarnecki, Christian and Bensberg, Frank}, title = {Impact of robotic process automation on enterprise architectures}, series = {GI Edition Proceedings Band 295 INFORMATIK 2019, Workshop-Beitr{\"a}ge}, booktitle = {GI Edition Proceedings Band 295 INFORMATIK 2019, Workshop-Beitr{\"a}ge}, editor = {Draude, Claude and Lange, Martin and Sick, Bernhard and Gesellschaft f{\"u}r Informatik e.V. (GI),}, publisher = {K{\"o}llen}, address = {Bonn}, isbn = {9783885796893}, issn = {1617-5468}, doi = {10.18420/inf2019_ws05}, pages = {59 -- 65}, year = {2019}, abstract = {The initial idea of Robotic Process Automation (RPA) is the automation of business processes through the presentation layer of existing application systems. For this simple emulation of user input and output by software robots, no changes of the systems and architecture is required. However, considering strategic aspects of aligning business and technology on an enterprise level as well as the growing capabilities of RPA driven by artificial intelligence, interrelations between RPA and Enterprise Architecture (EA) become visible and pose new questions. In this paper we discuss the relationship between RPA and EA in terms of perspectives and implications. As workin- progress we focus on identifying new questions and research opportunities related to RPA and EA.}, language = {en} } @inproceedings{RitschelStenzelCzarneckietal.2021, author = {Ritschel, Konstantin and Stenzel, Adina and Czarnecki, Christian and Hong, Chin-Gi}, title = {Realizing robotic process automation potentials: an architectural perspective on a real-life implementation case}, series = {GI Edition Proceedings Band 314 "INFORMATIK 2021" Computer Science \& Sustainability}, booktitle = {GI Edition Proceedings Band 314 "INFORMATIK 2021" Computer Science \& Sustainability}, editor = {Gesellschaft f{\"u}r Informatik e.V. (GI),}, publisher = {K{\"o}llen}, address = {Bonn}, isbn = {9783885797081}, issn = {1617-5468}, pages = {1303 -- 1311}, year = {2021}, abstract = {The initial idea of Robotic Process Automation (RPA) is the automation of business processes through a simple emulation of user input and output by software robots. Hence, it can be assumed that no changes of the used software systems and existing Enterprise Architecture (EA) is required. In this short, practical paper we discuss this assumption based on a real-life implementation project. We show that a successful RPA implementation might require architectural work during analysis, implementation, and migration. As practical paper we focus on exemplary lessons-learned and new questions related to RPA and EA.}, language = {en} } @inproceedings{AmirBauckhageChircuetal.2022, author = {Amir, Malik and Bauckhage, Christian and Chircu, Alina and Czarnecki, Christian and Knopf, Christian and Piatkowski, Nico and Sultanow, Eldar}, title = {What can we expect from quantum (digital) twins?}, publisher = {AIS Electronic Library (AISeL)}, pages = {1 -- 14}, year = {2022}, abstract = {Digital twins enable the modeling and simulation of real-world entities (objects, processes or systems), resulting in improvements in the associated value chains. The emerging field of quantum computing holds tremendous promise for evolving this virtualization towards Quantum (Digital) Twins (QDT) and ultimately Quantum Twins (QT). The quantum (digital) twin concept is not a contradiction in terms - but instead describes a hybrid approach that can be implemented using the technologies available today by combining classical computing and digital twin concepts with quantum processing. This paper presents the status quo of research and practice on quantum (digital) twins. It also discuses their potential to create competitive advantage through real-time simulation of highly complex, interconnected entities that helps companies better address changes in their environment and differentiate their products and services.}, language = {en} } @article{SchuellerRuhlDinstuehlerSengeretal.2022, author = {Sch{\"u}ller-Ruhl, Aaron and Dinst{\"u}hler, Leonard and Senger, Thorsten and Bergfeld, Stefan and Ingenhag, Christian and Fleischhaker, Robert}, title = {Direct fabrication of arbitrary phase masks in optical glass via ultra-short pulsed laser writing of refractive index modifications}, series = {Applied Physics B}, volume = {128}, journal = {Applied Physics B}, number = {Article number: 208}, editor = {Mackenzie, Jacob}, publisher = {Springer}, address = {Berlin}, issn = {1432-0649 (Online)}, doi = {10.1007/s00340-022-07928-2}, pages = {1 -- 11}, year = {2022}, abstract = {We study the possibility to fabricate an arbitrary phase mask in a one-step laser-writing process inside the volume of an optical glass substrate. We derive the phase mask from a Gerchberg-Saxton-type algorithm as an array and create each individual phase shift using a refractive index modification of variable axial length. We realize the variable axial length by superimposing refractive index modifications induced by an ultra-short pulsed laser at different focusing depth. Each single modification is created by applying 1000 pulses with 15 μJ pulse energy at 100 kHz to a fixed spot of 25 μm diameter and the focus is then shifted axially in steps of 10 μm. With several proof-of-principle examples, we show the feasibility of our method. In particular, we identify the induced refractive index change to about a value of Δn=1.5⋅10-3. We also determine our current limitations by calculating the overlap in the form of a scalar product and we discuss possible future improvements.}, language = {en} } @inproceedings{BlaneckBornheimGriegeretal.2022, author = {Blaneck, Patrick Gustav and Bornheim, Tobias and Grieger, Niklas and Bialonski, Stephan}, title = {Automatic readability assessment of german sentences with transformer ensembles}, series = {Proceedings of the GermEval 2022 Workshop on Text Complexity Assessment of German Text}, booktitle = {Proceedings of the GermEval 2022 Workshop on Text Complexity Assessment of German Text}, publisher = {Association for Computational Linguistics}, address = {Potsdam}, doi = {10.48550/arXiv.2209.04299}, pages = {57 -- 62}, year = {2022}, abstract = {Reliable methods for automatic readability assessment have the potential to impact a variety of fields, ranging from machine translation to self-informed learning. Recently, large language models for the German language (such as GBERT and GPT-2-Wechsel) have become available, allowing to develop Deep Learning based approaches that promise to further improve automatic readability assessment. In this contribution, we studied the ability of ensembles of fine-tuned GBERT and GPT-2-Wechsel models to reliably predict the readability of German sentences. We combined these models with linguistic features and investigated the dependence of prediction performance on ensemble size and composition. Mixed ensembles of GBERT and GPT-2-Wechsel performed better than ensembles of the same size consisting of only GBERT or GPT-2-Wechsel models. Our models were evaluated in the GermEval 2022 Shared Task on Text Complexity Assessment on data of German sentences. On out-of-sample data, our best ensemble achieved a root mean squared error of 0:435.}, language = {en} } @inproceedings{SteuerDankertSharmaBlecketal.2017, author = {Steuer-Dankert, Linda and Sharma, Mamta Rameshwarlal and Bleck, Wolfgang and Leicht-Scholten, Carmen}, title = {Innovation through Diversity - Development of a Diversity and Innovation management concept}, series = {International Conference on Innovation and Management : IAM23017S : Date: July 4-7, 2017, Osaka, Japan}, booktitle = {International Conference on Innovation and Management : IAM23017S : Date: July 4-7, 2017, Osaka, Japan}, editor = {Farn, C. K.}, publisher = {Kuang Hui Chiu}, address = {Osaka}, issn = {2218-6387}, pages = {Panel C}, year = {2017}, abstract = {Acknowledging that a diverse workforce could be a potential source of innovation, the current research deals with the fine details of why diversity management is central to achieving innovation in heterogeneous research groups and how this could be effectively realized in an organization. The types of heterogeneities addressed mainly include gender, qualification, academic discipline and intercultural perspectives. The type of organization being dealt with in this work is a complex association of research institutes at a technical university in Germany (RWTH Aachen University), namely a 'Cluster of Excellence', whereby several institutes of the university work collaboratively in different sub-projects. The 'Cluster of Excellence' is a part of the 'Excellence Initiative' of the German federal and state governments German Research Foundation (DFG) and German Council of Science and Humanities, with the ultimate aim of promoting cutting-edge research. To support interdisciplinary collaboration and thus the performance of the cluster, the development of a diversity and innovation management concept is presently in the conceptual phase and will be described in the frame of this paper. The 3-S-Diversity Model, composed of the three elements: skills, structure and strategy, serves as a basis for the development of the concept. The proposed concept consists of six phases; the first two phases lay the ground work by developing an understanding of the status quo on the forms of diversity in the Cluster of Excellence, the type of organizational structure of the member institutes and the varieties of specialist work cultures of the same. The third and the fourth phases build up on this foundation by means of qualitative and quantitative studies. While the third phase deals with the sensitization of the management level to the close connection between diversity and innovation; the need to manage them thereafter and find tailor-made methods of doing so, the fourth phase shall mainly focus on the mindset of the employees in this regard. The fifth phase shall consolidate the learnings and the ideas developed in the course of the first four phases into an implementable strategy. The ultimate phase shall be the implementation of this concept in the Cluster. The first three phases have been accomplished successfully and the preliminary results are already available.}, language = {en} } @article{MolinnusJanusFangetal.2022, author = {Molinnus, Denise and Janus, Kevin Alexander and Fang, Anyelina C. and Drinic, Aleksander and Achtsnicht, Stefan and K{\"o}pf, Marius and Keusgen, Michael and Sch{\"o}ning, Michael Josef}, title = {Thick-film carbon electrode deposited onto a biodegradable fibroin substrate for biosensing applications}, series = {Physica status solidi (a)}, volume = {219}, journal = {Physica status solidi (a)}, number = {23}, publisher = {Wiley-VCH}, address = {Weinheim}, issn = {1862-6319}, doi = {10.1002/pssa.202200100}, pages = {1 -- 9}, year = {2022}, abstract = {This study addresses a proof-of-concept experiment with a biocompatible screen-printed carbon electrode deposited onto a biocompatible and biodegradable substrate, which is made of fibroin, a protein derived from silk of the Bombyx mori silkworm. To demonstrate the sensor performance, the carbon electrode is functionalized as a glucose biosensor with the enzyme glucose oxidase and encapsulated with a silicone rubber to ensure biocompatibility of the contact wires. The carbon electrode is fabricated by means of thick-film technology including a curing step to solidify the carbon paste. The influence of the curing temperature and curing time on the electrode morphology is analyzed via scanning electron microscopy. The electrochemical characterization of the glucose biosensor is performed by amperometric/voltammetric measurements of different glucose concentrations in phosphate buffer. Herein, systematic studies at applied potentials from 500 to 1200 mV to the carbon working electrode (vs the Ag/AgCl reference electrode) allow to determine the optimal working potential. Additionally, the influence of the curing parameters on the glucose sensitivity is examined over a time period of up to 361 days. The sensor shows a negligible cross-sensitivity toward ascorbic acid, noradrenaline, and adrenaline. The developed biocompatible biosensor is highly promising for future in vivo and epidermal applications.}, language = {en} } @article{Maurischat2022, author = {Maurischat, Andreas}, title = {Algebraic independence of the Carlitz period and its hyperderivatives}, series = {Journal of Number Theory}, volume = {240}, journal = {Journal of Number Theory}, publisher = {Elsevier}, address = {Orlando, Fla.}, issn = {0022-314X}, doi = {10.1016/j.jnt.2022.01.006}, pages = {145 -- 162}, year = {2022}, language = {en} } @article{KotliarOrtnerConradietal.2022, author = {Kotliar, Konstantin and Ortner, Marion and Conradi, Anna and Hacker, Patricia and Hauser, Christine and G{\"u}nthner, Roman and Moser, Michaela and Muggenthaler, Claudia and Diehl-Schmid, Janine and Priller, Josef and Schmaderer, Christoph and Grimmer, Timo}, title = {Altered retinal cerebral vessel oscillation frequencies in Alzheimer's disease compatible with impaired amyloid clearance}, series = {Neurobiology of Aging}, volume = {120}, journal = {Neurobiology of Aging}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0197-4580}, doi = {10.1016/j.neurobiolaging.2022.08.012}, pages = {117 -- 127}, year = {2022}, abstract = {Retinal vessels are similar to cerebral vessels in their structure and function. Moderately low oscillation frequencies of around 0.1 Hz have been reported as the driving force for paravascular drainage in gray matter in mice and are known as the frequencies of lymphatic vessels in humans. We aimed to elucidate whether retinal vessel oscillations are altered in Alzheimer's disease (AD) at the stage of dementia or mild cognitive impairment (MCI). Seventeen patients with mild-to-moderate dementia due to AD (ADD); 23 patients with MCI due to AD, and 18 cognitively healthy controls (HC) were examined using Dynamic Retinal Vessel Analyzer. Oscillatory temporal changes of retinal vessel diameters were evaluated using mathematical signal analysis. Especially at moderately low frequencies around 0.1 Hz, arterial oscillations in ADD and MCI significantly prevailed over HC oscillations and correlated with disease severity. The pronounced retinal arterial vasomotion at moderately low frequencies in the ADD and MCI groups would be compatible with the view of a compensatory upregulation of paravascular drainage in AD and strengthen the amyloid clearance hypothesis.}, language = {en} } @article{ColomboDriraFrotscheretal.2022, author = {Colombo, Daniele and Drira, Slah and Frotscher, Ralf and Staat, Manfred}, title = {An element-based formulation for ES-FEM and FS-FEM models for implementation in standard solid mechanics finite element codes for 2D and 3D static analysis}, series = {International Journal for Numerical Methods in Engineering}, volume = {124}, journal = {International Journal for Numerical Methods in Engineering}, number = {2}, publisher = {Wiley}, address = {Chichester}, issn = {1097-0207}, doi = {10.1002/nme.7126}, pages = {402 -- 433}, year = {2022}, abstract = {Edge-based and face-based smoothed finite element methods (ES-FEM and FS-FEM, respectively) are modified versions of the finite element method allowing to achieve more accurate results and to reduce sensitivity to mesh distortion, at least for linear elements. These properties make the two methods very attractive. However, their implementation in a standard finite element code is nontrivial because it requires heavy and extensive modifications to the code architecture. In this article, we present an element-based formulation of ES-FEM and FS-FEM methods allowing to implement the two methods in a standard finite element code with no modifications to its architecture. Moreover, the element-based formulation permits to easily manage any type of element, especially in 3D models where, to the best of the authors' knowledge, only tetrahedral elements are used in FS-FEM applications found in the literature. Shape functions for non-simplex 3D elements are proposed in order to apply FS-FEM to any standard finite element.}, language = {en} }