@inproceedings{CzarneckiWinkelmannSpiliopoulou2011, author = {Czarnecki, Christian and Winkelmann, Axel and Spiliopoulou, Myra}, title = {Making business systems in the telecommunication industry more customer-oriented}, series = {Information Systems Development : Business Systems and Services: Modeling and Development}, booktitle = {Information Systems Development : Business Systems and Services: Modeling and Development}, editor = {Pokorny, Jaroslav and Repa, Vaclav and Richta, Karel and Wojtkowski, Wita and Linger, Henry and Barry, Chris and Lang, Michael}, publisher = {Springer}, address = {New York}, isbn = {978-1-4419-9645-9 (Print)}, doi = {10.1007/978-1-4419-9790-6_14}, pages = {169 -- 180}, year = {2011}, abstract = {Market changes have forced telecommunication companies to transform their business. Increased competition, short innovation cycles, changed usage patterns, increased customer expectations and cost reduction are the main drivers. Our objective is to analyze to what extend transformation projects have improved the orientation towards the end-customers. Therefore, we selected 38 real-life case studies that are dealing with customer orientation. Our analysis is based on a telecommunication-specific framework that aligns strategy, business processes and information systems. The result of our analysis shows the following: transformation projects that aim to improve the customer orientation are combined with clear goals on costs and revenue of the enterprise. These projects are usually directly linked to the customer touch points, but also to the development and provisioning of products. Furthermore, the analysis shows that customer orientation is not the sole trigger for transformation. There is no one-fits-all solution; rather, improved customer orientation needs aligned changes of business processes as well as information systems related to different parts of the company.}, language = {en} } @inproceedings{NursinskiStolbergGangatharanCzarnecki2016, author = {Nursinski-Stolberg, Andr{\´e} and Gangatharan, Kiritharan and Czarnecki, Christian}, title = {Development of a subject-oriented reference process model for the telecommunications industry}, series = {GI Edition Proceedings Band 259 INFORMATIK 2016}, booktitle = {GI Edition Proceedings Band 259 INFORMATIK 2016}, editor = {Mayr, Heinrich C. and Pinzger, Martin}, publisher = {Gesellschaft f{\"u}r Informatik e.V.}, address = {Bonn}, isbn = {9783885796534}, issn = {1617-5468}, pages = {699 -- 712}, year = {2016}, abstract = {Generally the usage of reference models can be structured top-down or bottom-up. The practical need of agile change and flexible organizational implementation requires a consistent mapping to an operational level. In this context, well-established reference process models are typically structured top-down. The subject-oriented Business Process Management (sBPM) offers a modeling concept that is structured bottom-up and concentrates on the process actors on an operational level. This paper applies sBPM to the enhanced Telecom Operations Map (eTOM), a well-accepted reference process model in the telecommunications industry. The resulting design artifact is a concrete example for a combination of a bottom-up and top-down developed reference model. The results are evaluated and confirmed in practical context through the involvement of the industry body TMForum.}, language = {en} } @inproceedings{CzarneckiDietze2017, author = {Czarnecki, Christian and Dietze, Christian}, title = {Domain-Specific reference modeling in the telecommunications industry}, series = {DESRIST 2017: Designing the Digital Transformation}, booktitle = {DESRIST 2017: Designing the Digital Transformation}, editor = {Maedche, Alexander and vom Brocke, Jan and Hevner, Alan}, publisher = {Springer}, address = {Cham}, isbn = {978-3-319-59144-5}, doi = {10.1007/978-3-319-59144-5_19}, pages = {313 -- 329}, year = {2017}, abstract = {The telecommunications industry is currently going through a major transformation. In this context, the enhanced Telecom Operations Map (eTOM) is a domain-specific process reference model that is offered by the industry organization TM Forum. In practice, eTOM is well accepted and confirmed as de facto standard. It provides process definitions and process flows on different levels of detail. This article discusses the reference modeling of eTOM, i.e., the design, the resulting artifact, and its evaluation based on three project cases. The application of eTOM in three projects illustrates the design approach and concrete models on strategic and operational levels. The article follows the Design Science Research (DSR) paradigm. It contributes with concrete design artifacts to the transformational needs of the telecommunications industry and offers lessons-learned from a general DSR perspective.}, language = {en} } @article{CzarneckiWinkelmannSpiliopoulou2010, author = {Czarnecki, Christian and Winkelmann, Axel and Spiliopoulou, Myra}, title = {Services in electronic telecommunication markets: a framework for planning the virtualization of processes}, series = {Electronic Markets}, volume = {20}, journal = {Electronic Markets}, number = {3-4}, publisher = {Springer}, address = {Berlin}, issn = {1422-8890}, doi = {10.1007/s12525-010-0045-8}, pages = {197 -- 207}, year = {2010}, abstract = {The potential of electronic markets in enabling innovative product bundles through flexible and sustainable partnerships is not yet fully exploited in the telecommunication industry. One reason is that bundling requires seamless de-assembling and re-assembling of business processes, whilst processes in telecommunication companies are often product-dependent and hard to virtualize. We propose a framework for the planning of the virtualization of processes, intended to assist the decision maker in prioritizing the processes to be virtualized: (a) we transfer the virtualization pre-requisites stated by the Process Virtualization Theory in the context of customer-oriented processes in the telecommunication industry and assess their importance in this context, (b) we derive IT-oriented requirements for the removal of virtualization barriers and highlight their demand on changes at different levels of the organization. We present a first evaluation of our approach in a case study and report on lessons learned and further steps to be performed.}, language = {en} } @article{CzarneckiSpiliopoulou2012, author = {Czarnecki, Christian and Spiliopoulou, Myra}, title = {A holistic framework for the implementation of a next generation network}, series = {International Journal of Business Information Systems}, volume = {9}, journal = {International Journal of Business Information Systems}, number = {4}, publisher = {Inderscience Enterprises}, address = {Olney, Bucks}, issn = {1746-0972}, doi = {10.1504/IJBIS.2012.046291}, pages = {385 -- 401}, year = {2012}, abstract = {As the potential of a next generation network (NGN) is recognised, telecommunication companies consider switching to it. Although the implementation of an NGN seems to be merely a modification of the network infrastructure, it may trigger or require changes in the whole company, because it builds upon the separation between service and transport, a flexible bundling of services to products and the streamlining of the IT infrastructure. We propose a holistic framework, structured into the layers 'strategy', 'processes' and 'information systems' and incorporate into each layer all concepts necessary for the implementation of an NGN, as well as the alignment of these concepts. As a first proof-of-concept for our framework we have performed a case study on the introduction of NGN in a large telecommunication company; we show that our framework captures all topics that are affected by an NGN implementation.}, language = {en} } @article{BurgerRumpfDoetal.2021, author = {Burger, Ren{\´e} and Rumpf, Jessica and Do, Xuan Tung and Monakhova, Yulia and Diehl, Bernd W. K. and Rehahn, Matthias and Schulze, Margit}, title = {Is NMR combined with multivariate regression applicable for the molecular weight determination of randomly cross-linked polymers such as lignin?}, series = {ACS Omega}, volume = {6}, journal = {ACS Omega}, number = {44}, publisher = {ACS Publications}, address = {Washington, DC}, issn = {2470-1343}, doi = {10.1021/acsomega.1c03574}, pages = {29516 -- 29524}, year = {2021}, abstract = {The molecular weight properties of lignins are one of the key elements that need to be analyzed for a successful industrial application of these promising biopolymers. In this study, the use of 1H NMR as well as diffusion-ordered spectroscopy (DOSY NMR), combined with multivariate regression methods, was investigated for the determination of the molecular weight (Mw and Mn) and the polydispersity of organosolv lignins (n = 53, Miscanthus x giganteus, Paulownia tomentosa, and Silphium perfoliatum). The suitability of the models was demonstrated by cross validation (CV) as well as by an independent validation set of samples from different biomass origins (beech wood and wheat straw). CV errors of ca. 7-9 and 14-16\% were achieved for all parameters with the models from the 1H NMR spectra and the DOSY NMR data, respectively. The prediction errors for the validation samples were in a similar range for the partial least squares model from the 1H NMR data and for a multiple linear regression using the DOSY NMR data. The results indicate the usefulness of NMR measurements combined with multivariate regression methods as a potential alternative to more time-consuming methods such as gel permeation chromatography.}, language = {en} } @article{MonakhovaDiehl2021, author = {Monakhova, Yulia and Diehl, Bernd W. K.}, title = {Simplification of NMR Workflows by Standardization Using 2H Integral of Deuterated Solvent as Applied to Aloe vera Preparations}, series = {Applied Magnetic Resonance}, volume = {52}, journal = {Applied Magnetic Resonance}, number = {11}, publisher = {Springer}, address = {Cham}, issn = {1613-7507}, doi = {10.1007/s00723-021-01393-4}, pages = {1591 -- 1600}, year = {2021}, abstract = {In this study, a recently proposed NMR standardization approach by 2H integral of deuterated solvent for quantitative multicomponent analysis of complex mixtures is presented. As a proof of principle, the existing NMR routine for the analysis of Aloe vera products was modified. Instead of using absolute integrals of targeted compounds and internal standard (nicotinamide) from 1H-NMR spectra, quantification was performed based on the ratio of a particular 1H-NMR compound integral and 2H-NMR signal of deuterated solvent D2O. Validation characteristics (linearity, repeatability, accuracy) were evaluated and the results showed that the method has the same precision as internal standardization in case of multicomponent screening. Moreover, a dehydration process by freeze drying is not necessary for the new routine. Now, our NMR profiling of A. vera products needs only limited sample preparation and data processing. The new standardization methodology provides an appealing alternative for multicomponent NMR screening. In general, this novel approach, using standardization by 2H integral, benefits from reduced sample preparation steps and uncertainties, and is recommended in different application areas (purity determination, forensics, pharmaceutical analysis, etc.).}, language = {en} } @article{BurmistrovaSobolevaMonakhova2021, author = {Burmistrova, Natalia A. and Soboleva, Polina M. and Monakhova, Yulia}, title = {Is infrared spectroscopy combined with multivariate analysis a promising tool for heparin authentication?}, series = {Journal of Pharmaceutical and Biomedical Analysis}, volume = {194}, journal = {Journal of Pharmaceutical and Biomedical Analysis}, number = {Article number: 113811}, publisher = {Elsevier}, address = {Amsterdam}, isbn = {0731-7085}, doi = {10.1016/j.jpba.2020.113811}, year = {2021}, abstract = {The investigation of the possibility to determine various characteristics of powder heparin (n = 115) was carried out with infrared spectroscopy. The evaluation of heparin samples included several parameters such as purity grade, distributing company, animal source as well as heparin species (i.e. Na-heparin, Ca-heparin, and heparinoids). Multivariate analysis using principal component analysis (PCA), soft independent modelling of class analogy (SIMCA), and partial least squares - discriminant analysis (PLS-DA) were applied for the modelling of spectral data. Different pre-processing methods were applied to IR spectral data; multiplicative scatter correction (MSC) was chosen as the most relevant. Obtained results were confirmed by nuclear magnetic resonance (NMR) spectroscopy. Good predictive ability of this approach demonstrates the potential of IR spectroscopy and chemometrics for screening of heparin quality. This approach, however, is designed as a screening tool and is not considered as a replacement for either of the methods required by USP and FDA.}, language = {en} } @article{MonakhovaDiehl2021, author = {Monakhova, Yulia and Diehl, Bernd W.K.}, title = {Novel approach of qNMR workflow by standardization using 2H integral: Application to any intrinsic calibration standard}, series = {Talanta}, volume = {222}, journal = {Talanta}, number = {Article number: 121504}, publisher = {Elsevier}, isbn = {0039-9140}, doi = {10.1016/j.talanta.2020.121504}, year = {2021}, abstract = {Quantitative nuclear magnetic resonance (qNMR) is routinely performed by the internal or external standardization. The manuscript describes a simple alternative to these common workflows by using NMR signal of another active nuclei of calibration compound. For example, for any arbitrary compound quantification by NMR can be based on the use of an indirect concentration referencing that relies on a solvent having both 1H and 2H signals. To perform high-quality quantification, the deuteration level of the utilized deuterated solvent has to be estimated. In this contribution the new method was applied to the determination of deuteration levels in different deuterated solvents (MeOD, ACN, CDCl3, acetone, benzene, DMSO-d6). Isopropanol-d6, which contains a defined number of deuterons and protons, was used for standardization. Validation characteristics (precision, accuracy, robustness) were calculated and the results showed that the method can be used in routine practice. Uncertainty budget was also evaluated. In general, this novel approach, using standardization by 2H integral, benefits from reduced sample preparation steps and uncertainties, and can be applied in different application areas (purity determination, forensics, pharmaceutical analysis, etc.).}, language = {en} } @article{MonakhovaSobolevaFedotovaetal.2022, author = {Monakhova, Yulia and Soboleva, Polina M. and Fedotova, Elena S. and Musina, Kristina T. and Burmistrova, Natalia A.}, title = {Quantum chemical calculations of IR spectra of heparin disaccharide subunits}, series = {Computational and Theoretical Chemistry}, volume = {1217}, journal = {Computational and Theoretical Chemistry}, number = {Article number: 113891}, publisher = {Elsevier}, address = {New York, NY}, isbn = {2210-271X}, doi = {10.1016/j.comptc.2022.113891}, year = {2022}, abstract = {Heparin is a natural polysaccharide, which plays essential role in many biological processes. Alterations in building blocks can modify biological roles of commercial heparin products, due to significant changes in the conformation of the polymer chain. The variability structure of heparin leads to difficulty in quality control using different analytical methods, including infrared (IR) spectroscopy. In this paper molecular modelling of heparin disaccharide subunits was performed using quantum chemistry. The structural and spectral parameters of these disaccharides have been calculated using RHF/6-311G. In addition, over-sulphated chondroitin sulphate disaccharide was studied as one of the most widespread contaminants of heparin. Calculated IR spectra were analyzed with respect to specific structure parameters. IR spectroscopic fingerprint was found to be sensitive to substitution pattern of disaccharide subunits. Vibrational assignments of calculated spectra were correlated with experimental IR spectral bands of native heparin. Chemometrics was used to perform multivariate analysis of simulated spectral data.}, language = {en} } @article{BurgerLindnerRumpfetal.2022, author = {Burger, Ren{\´e} and Lindner, Simon and Rumpf, Jessica and Do, Xuan Tung and Diehl, Bernd W.K. and Rehahn, Matthias and Monakhova, Yulia and Schulze, Margit}, title = {Benchtop versus high field NMR: Comparable performance found for the molecular weight determination of lignin}, series = {Journal of Pharmaceutical and Biomedical Analysis}, volume = {212}, journal = {Journal of Pharmaceutical and Biomedical Analysis}, number = {Article number: 114649}, publisher = {Elsevier}, address = {New York, NY}, isbn = {0731-7085}, doi = {10.1016/j.jpba.2022.114649}, year = {2022}, abstract = {Lignin is a promising renewable biopolymer being investigated worldwide as an environmentally benign substitute of fossil-based aromatic compounds, e.g. for the use as an excipient with antioxidant and antimicrobial properties in drug delivery or even as active compound. For its successful implementation into process streams, a quick, easy, and reliable method is needed for its molecular weight determination. Here we present a method using 1H spectra of benchtop as well as conventional NMR systems in combination with multivariate data analysis, to determine lignin's molecular weight (Mw and Mn) and polydispersity index (PDI). A set of 36 organosolv lignin samples (from Miscanthus x giganteus, Paulownia tomentosa and Silphium perfoliatum) was used for the calibration and cross validation, and 17 samples were used as external validation set. Validation errors between 5.6\% and 12.9\% were achieved for all parameters on all NMR devices (43, 60, 500 and 600 MHz). Surprisingly, no significant difference in the performance of the benchtop and high-field devices was found. This facilitates the application of this method for determining lignin's molecular weight in an industrial environment because of the low maintenance expenditure, small footprint, ruggedness, and low cost of permanent magnet benchtop NMR systems.}, language = {en} } @article{MonakhovaDiehl2022, author = {Monakhova, Yulia and Diehl, Bernd W.K.}, title = {Multinuclear NMR screening of pharmaceuticals using standardization by 2H integral of a deuterated solvent}, series = {Journal of Pharmaceutical and Biomedical Analysis}, volume = {209}, journal = {Journal of Pharmaceutical and Biomedical Analysis}, number = {Article number: 114530}, publisher = {Elsevier}, isbn = {0731-7085}, doi = {10.1016/j.jpba.2021.114530}, year = {2022}, abstract = {NMR standardization approach that uses the 2H integral of deuterated solvent for quantitative multinuclear analysis of pharmaceuticals is described. As a proof of principle, the existing NMR procedure for the analysis of heparin products according to US Pharmacopeia monograph is extended to the determination of Na+ and Cl- content in this matrix. Quantification is performed based on the ratio of a 23Na (35Cl) NMR integral and 2H NMR signal of deuterated solvent, D2O, acquired using the specific spectrometer hardware. As an alternative, the possibility of 133Cs standardization using the addition of Cs2CO3 stock solution is shown. Validation characteristics (linearity, repeatability, sensitivity) are evaluated. A holistic NMR profiling of heparin products can now also be used for the quantitative determination of inorganic compounds in a single analytical run using a single sample. In general, the new standardization methodology provides an appealing alternative for the NMR screening of inorganic and organic components in pharmaceutical products.}, language = {en} } @article{LindnerBurgerRutledgeetal.2022, author = {Lindner, Simon and Burger, Ren{\´e} and Rutledge, Douglas N. and Do, Xuan Tung and Rumpf, Jessica and Diehl, Bernd W. K. and Schulze, Margit and Monakhova, Yulia}, title = {Is the calibration transfer of multivariate calibration models between high- and low-field NMR instruments possible? A case study of lignin molecular weight}, series = {Analytical chemistry}, volume = {94}, journal = {Analytical chemistry}, number = {9}, publisher = {ACS Publications}, address = {Washington, DC}, isbn = {1520-6882}, doi = {10.1021/acs.analchem.1c05125}, pages = {3997 -- 4004}, year = {2022}, abstract = {Although several successful applications of benchtop nuclear magnetic resonance (NMR) spectroscopy in quantitative mixture analysis exist, the possibility of calibration transfer remains mostly unexplored, especially between high- and low-field NMR. This study investigates for the first time the calibration transfer of partial least squares regressions [weight average molecular weight (Mw) of lignin] between high-field (600 MHz) NMR and benchtop NMR devices (43 and 60 MHz). For the transfer, piecewise direct standardization, calibration transfer based on canonical correlation analysis, and transfer via the extreme learning machine auto-encoder method are employed. Despite the immense resolution difference between high-field and low-field NMR instruments, the results demonstrate that the calibration transfer from high- to low-field is feasible in the case of a physical property, namely, the molecular weight, achieving validation errors close to the original calibration (down to only 1.2 times higher root mean square errors). These results introduce new perspectives for applications of benchtop NMR, in which existing calibrations from expensive high-field instruments can be transferred to cheaper benchtop instruments to economize.}, language = {en} } @inproceedings{BrockmannSultanowCzarnecki2018, author = {Brockmann, Carsten and Sultanow, Eldar and Czarnecki, Christian}, title = {Enterprise architectures between agility and traditional methodologies}, series = {GI Edition Proceedings Band 285 Workshops der INFORMATIK 2018}, booktitle = {GI Edition Proceedings Band 285 Workshops der INFORMATIK 2018}, editor = {Czarnecki, Christian and Brockmann, Carsten and Sultanow, Eldar and Koschmider, Agnes and Selzer, Annika and Gesellschaft f{\"u}r Informatik e. V.,}, publisher = {K{\"o}llen}, address = {Bonn}, isbn = {9783885796794}, issn = {1617-5468}, pages = {1 Seite}, year = {2018}, abstract = {For this year's workshop on Enterpirse Architecture in Research and Practice we have received eight submissions from which four have passed the rigorous peer-review. The acceptance quote of 50\% assures that only advancements in the field are included in our workshop.}, language = {en} } @inproceedings{BensbergAuthCzarneckietal.2018, author = {Bensberg, Frank and Auth, Gunnar and Czarnecki, Christian and W{\"o}rndle, Christopher}, title = {Transforming literature-intensive research processes through text analytics - design, implementation and lessons learned}, editor = {Kemal İlter, H.}, doi = {10.6084/m9.figshare.7582073.v1}, pages = {9 Seiten}, year = {2018}, abstract = {The continuing growth of scientific publications raises the question how research processes can be digitalized and thus realized more productively. Especially in information technology fields, research practice is characterized by a rapidly growing volume of publications. For the search process various information systems exist. However, the analysis of the published content is still a highly manual task. Therefore, we propose a text analytics system that allows a fully digitalized analysis of literature sources. We have realized a prototype by using EBSCO Discovery Service in combination with IBM Watson Explorer and demonstrated the results in real-life research projects. Potential addressees are research institutions, consulting firms, and decision-makers in politics and business practice.}, language = {en} } @inproceedings{BrockmannSultanowCzarnecki2019, author = {Brockmann, Carsten and Sultanow, Eldar and Czarnecki, Christian}, title = {Is enterprise architecture still relevant in the digital age?}, series = {GI Edition Proceedings Band 295 INFORMATIK 2019, Workshop-Beitr{\"a}ge}, booktitle = {GI Edition Proceedings Band 295 INFORMATIK 2019, Workshop-Beitr{\"a}ge}, editor = {Draude, Claude and Lange, Martin and Sick, Bernhard}, publisher = {K{\"o}llen}, address = {Bonn}, isbn = {9783885796893}, issn = {1617-5468}, doi = {10.18420/inf2019_ws01}, pages = {21 -- 21}, year = {2019}, language = {en} } @inproceedings{AuthCzarneckiBensberg2019, author = {Auth, Gunnar and Czarnecki, Christian and Bensberg, Frank}, title = {Impact of robotic process automation on enterprise architectures}, series = {GI Edition Proceedings Band 295 INFORMATIK 2019, Workshop-Beitr{\"a}ge}, booktitle = {GI Edition Proceedings Band 295 INFORMATIK 2019, Workshop-Beitr{\"a}ge}, editor = {Draude, Claude and Lange, Martin and Sick, Bernhard and Gesellschaft f{\"u}r Informatik e.V. (GI),}, publisher = {K{\"o}llen}, address = {Bonn}, isbn = {9783885796893}, issn = {1617-5468}, doi = {10.18420/inf2019_ws05}, pages = {59 -- 65}, year = {2019}, abstract = {The initial idea of Robotic Process Automation (RPA) is the automation of business processes through the presentation layer of existing application systems. For this simple emulation of user input and output by software robots, no changes of the systems and architecture is required. However, considering strategic aspects of aligning business and technology on an enterprise level as well as the growing capabilities of RPA driven by artificial intelligence, interrelations between RPA and Enterprise Architecture (EA) become visible and pose new questions. In this paper we discuss the relationship between RPA and EA in terms of perspectives and implications. As workin- progress we focus on identifying new questions and research opportunities related to RPA and EA.}, language = {en} } @inproceedings{RitschelStenzelCzarneckietal.2021, author = {Ritschel, Konstantin and Stenzel, Adina and Czarnecki, Christian and Hong, Chin-Gi}, title = {Realizing robotic process automation potentials: an architectural perspective on a real-life implementation case}, series = {GI Edition Proceedings Band 314 "INFORMATIK 2021" Computer Science \& Sustainability}, booktitle = {GI Edition Proceedings Band 314 "INFORMATIK 2021" Computer Science \& Sustainability}, editor = {Gesellschaft f{\"u}r Informatik e.V. (GI),}, publisher = {K{\"o}llen}, address = {Bonn}, isbn = {9783885797081}, issn = {1617-5468}, pages = {1303 -- 1311}, year = {2021}, abstract = {The initial idea of Robotic Process Automation (RPA) is the automation of business processes through a simple emulation of user input and output by software robots. Hence, it can be assumed that no changes of the used software systems and existing Enterprise Architecture (EA) is required. In this short, practical paper we discuss this assumption based on a real-life implementation project. We show that a successful RPA implementation might require architectural work during analysis, implementation, and migration. As practical paper we focus on exemplary lessons-learned and new questions related to RPA and EA.}, language = {en} } @inproceedings{AmirBauckhageChircuetal.2022, author = {Amir, Malik and Bauckhage, Christian and Chircu, Alina and Czarnecki, Christian and Knopf, Christian and Piatkowski, Nico and Sultanow, Eldar}, title = {What can we expect from quantum (digital) twins?}, publisher = {AIS Electronic Library (AISeL)}, pages = {1 -- 14}, year = {2022}, abstract = {Digital twins enable the modeling and simulation of real-world entities (objects, processes or systems), resulting in improvements in the associated value chains. The emerging field of quantum computing holds tremendous promise for evolving this virtualization towards Quantum (Digital) Twins (QDT) and ultimately Quantum Twins (QT). The quantum (digital) twin concept is not a contradiction in terms - but instead describes a hybrid approach that can be implemented using the technologies available today by combining classical computing and digital twin concepts with quantum processing. This paper presents the status quo of research and practice on quantum (digital) twins. It also discuses their potential to create competitive advantage through real-time simulation of highly complex, interconnected entities that helps companies better address changes in their environment and differentiate their products and services.}, language = {en} } @article{SchuellerRuhlDinstuehlerSengeretal.2022, author = {Sch{\"u}ller-Ruhl, Aaron and Dinst{\"u}hler, Leonard and Senger, Thorsten and Bergfeld, Stefan and Ingenhag, Christian and Fleischhaker, Robert}, title = {Direct fabrication of arbitrary phase masks in optical glass via ultra-short pulsed laser writing of refractive index modifications}, series = {Applied Physics B}, volume = {128}, journal = {Applied Physics B}, number = {Article number: 208}, editor = {Mackenzie, Jacob}, publisher = {Springer}, address = {Berlin}, issn = {1432-0649 (Online)}, doi = {10.1007/s00340-022-07928-2}, pages = {1 -- 11}, year = {2022}, abstract = {We study the possibility to fabricate an arbitrary phase mask in a one-step laser-writing process inside the volume of an optical glass substrate. We derive the phase mask from a Gerchberg-Saxton-type algorithm as an array and create each individual phase shift using a refractive index modification of variable axial length. We realize the variable axial length by superimposing refractive index modifications induced by an ultra-short pulsed laser at different focusing depth. Each single modification is created by applying 1000 pulses with 15 μJ pulse energy at 100 kHz to a fixed spot of 25 μm diameter and the focus is then shifted axially in steps of 10 μm. With several proof-of-principle examples, we show the feasibility of our method. In particular, we identify the induced refractive index change to about a value of Δn=1.5⋅10-3. We also determine our current limitations by calculating the overlap in the form of a scalar product and we discuss possible future improvements.}, language = {en} }