@article{CzarneckiWinkelmannSpiliopoulou2010, author = {Czarnecki, Christian and Winkelmann, Axel and Spiliopoulou, Myra}, title = {Services in electronic telecommunication markets: a framework for planning the virtualization of processes}, series = {Electronic Markets}, volume = {20}, journal = {Electronic Markets}, number = {3-4}, publisher = {Springer}, address = {Berlin}, issn = {1422-8890}, doi = {10.1007/s12525-010-0045-8}, pages = {197 -- 207}, year = {2010}, abstract = {The potential of electronic markets in enabling innovative product bundles through flexible and sustainable partnerships is not yet fully exploited in the telecommunication industry. One reason is that bundling requires seamless de-assembling and re-assembling of business processes, whilst processes in telecommunication companies are often product-dependent and hard to virtualize. We propose a framework for the planning of the virtualization of processes, intended to assist the decision maker in prioritizing the processes to be virtualized: (a) we transfer the virtualization pre-requisites stated by the Process Virtualization Theory in the context of customer-oriented processes in the telecommunication industry and assess their importance in this context, (b) we derive IT-oriented requirements for the removal of virtualization barriers and highlight their demand on changes at different levels of the organization. We present a first evaluation of our approach in a case study and report on lessons learned and further steps to be performed.}, language = {en} } @article{CzarneckiSpiliopoulou2012, author = {Czarnecki, Christian and Spiliopoulou, Myra}, title = {A holistic framework for the implementation of a next generation network}, series = {International Journal of Business Information Systems}, volume = {9}, journal = {International Journal of Business Information Systems}, number = {4}, publisher = {Inderscience Enterprises}, address = {Olney, Bucks}, issn = {1746-0972}, doi = {10.1504/IJBIS.2012.046291}, pages = {385 -- 401}, year = {2012}, abstract = {As the potential of a next generation network (NGN) is recognised, telecommunication companies consider switching to it. Although the implementation of an NGN seems to be merely a modification of the network infrastructure, it may trigger or require changes in the whole company, because it builds upon the separation between service and transport, a flexible bundling of services to products and the streamlining of the IT infrastructure. We propose a holistic framework, structured into the layers 'strategy', 'processes' and 'information systems' and incorporate into each layer all concepts necessary for the implementation of an NGN, as well as the alignment of these concepts. As a first proof-of-concept for our framework we have performed a case study on the introduction of NGN in a large telecommunication company; we show that our framework captures all topics that are affected by an NGN implementation.}, language = {en} } @article{CzarneckiWinkelmannSpiliopoulou2013, author = {Czarnecki, Christian and Winkelmann, Axel and Spiliopoulou, Myra}, title = {Referenzprozessabl{\"a}ufe f{\"u}r Telekommunikationsunternehmen: Eine Erweiterung des eTOM-Modells}, series = {Wirtschaftsinformatik}, volume = {55}, journal = {Wirtschaftsinformatik}, number = {2}, publisher = {Springer Fachmedien}, address = {Wiesbaden}, issn = {1861-8936}, doi = {10.1007/s11576-013-0351-9}, pages = {83 -- 97}, year = {2013}, abstract = {Der Telekommunikationsmarkt erf{\"a}hrt substanzielle Ver{\"a}nderungen. Neue Gesch{\"a}ftsmodelle, innovative Dienstleistungen und Technologien erfordern Reengineering, Transformation und Prozessstandardisierung. Mit der Enhanced Telecom Operation Map (eTOM) bietet das TM Forum ein international anerkanntes de facto Referenz-Prozess-Framework basierend auf spezifischen Anforderungen und Auspr{\"a}gungen der Telekommunikationsindustrie an. Allerdings enth{\"a}lt dieses Referenz-Framework nur eine hierarchische Sammlung von Prozessen auf unterschiedlichen Abstraktionsebenen. Eine Kontrollsicht verstanden als sequenzielle Anordnung von Aktivit{\"a}ten und daraus resultierend ein realer Prozessablauf fehlt ebenso wie eine Ende-zu-Ende-Sicht auf den Kunden. In diesem Artikel erweitern wir das eTOM-Referenzmodell durch Referenzprozessabl{\"a}ufe, in welchen wir das Wissen {\"u}ber Prozesse in Telekommunikationsunternehmen abstrahieren und generalisieren. Durch die Referenzprozessabl{\"a}ufe werden Unternehmen bei dem strukturierten und transparenten (Re-)Design ihrer Prozesse unterst{\"u}tzt. Wir demonstrieren die Anwendbarkeit und N{\"u}tzlichkeit unserer Referenzprozessabl{\"a}ufe in zwei Fallstudien und evaluieren diese anhand von Kriterien f{\"u}r die Bewertung von Referenzmodellen. Die Referenzprozessabl{\"a}ufe wurden vom TM Forum in den Standard aufgenommen und als Teil von eTOM Version 9 ver{\"o}ffentlicht. Dar{\"u}ber hinaus diskutieren wir die Komponenten unseres Ansatzes, die auch außerhalb der Telekommunikationsindustrie angewandt werden k{\"o}nnen.}, language = {de} } @article{BensbergAuthCzarnecki2018, author = {Bensberg, Frank and Auth, Gunnar and Czarnecki, Christian}, title = {Einsatz von Text Analytics zur Unterst{\"u}tzung literaturintensiver Forschungsprozesse: Konzeption, Realisierung und Lessons Learned}, series = {Anwendungen und Konzepte der Wirtschaftsinformatik}, volume = {2018}, journal = {Anwendungen und Konzepte der Wirtschaftsinformatik}, number = {8}, publisher = {AKWI}, address = {Luzern}, issn = {2296-4592}, doi = {10.26034/lu.akwi.2018.3221}, pages = {1 -- 6}, year = {2018}, abstract = {Das anhaltende Wachstum wissenschaftlicher Ver{\"o}ffentlichungen wirft die Fragestellung auf, wie Literaturana-lysen im Rahmen von Forschungsprozessen digitalisiert und somit produktiver realisiert werden k{\"o}nnen. Insbesondere in informationstechnischen Fachgebieten ist die Forschungspraxis durch ein rasant wachsendes Publikationsaufkommen gekennzeichnet. Infolgedessen bietet sich der Einsatz von Methoden der Textanalyse (Text Analytics) an, die Textdaten automatisch vorbereiten und verarbeiten k{\"o}nnen. Erkenntnisse entstehen dabei aus Analysen von Wortarten und Subgruppen, Korrelations- sowie Zeitreihenanalysen. Dieser Beitrag stellt die Konzeption und Realisierung eines Prototypen vor, mit dem Anwender bibliographische Daten aus der etablierten Literaturdatenbank EBSCO Discovery Service mithilfe textanalytischer Methoden erschließen k{\"o}nnen. Der Prototyp basiert auf dem Analysesystem IBM Watson Explorer, das Hochschulen lizenzkostenfrei zur Verf{\"u}gung steht. Potenzielle Adressaten des Prototypen sind Forschungseinrichtungen, Beratungsunternehmen sowie Entscheidungstr{\"a}ger in Politik und Unternehmenspraxis.}, language = {de} } @article{CzarneckiBensbergAuth2019, author = {Czarnecki, Christian and Bensberg, Frank and Auth, Gunnar}, title = {Die Rolle von Softwarerobotern f{\"u}r die zuk{\"u}nftige Arbeitswelt}, series = {HMD Praxis der Wirtschaftsinformatik}, volume = {56}, journal = {HMD Praxis der Wirtschaftsinformatik}, number = {4}, publisher = {Springer}, address = {Cham}, issn = {2198-2775}, doi = {10.1365/s40702-019-00548-z}, pages = {795 -- 808}, year = {2019}, abstract = {Im Rahmen der Digitalisierung ist die zunehmende Automatisierung von bisher manuellen Prozessschritten ein Aspekt, der massive Auswirkungen auf die zuk{\"u}nftige Arbeitswelt haben wird. In diesem Kontext werden an den Einsatz von Softwarerobotern zur Prozessautomatisierung hohe Erwartungen gekn{\"u}pft. Bei den Implementierungsans{\"a}tzen wird die Diskussion aktuell insbesondere durch Robotic Process Automation (RPA) und Chatbots gepr{\"a}gt. Beide Ans{\"a}tze verfolgen das gemeinsame Ziel einer 1:1-Automatisierung von menschlichen Handlungen und dadurch ein direktes Ersetzen von Mitarbeitern durch Maschinen. Bei RPA werden Prozesse durch Softwareroboter erlernt und automatisiert ausgef{\"u}hrt. Dabei emulieren RPA-Roboter die Eingaben auf der bestehenden Pr{\"a}sentationsschicht, so dass keine {\"A}nderungen an vorhandenen Anwendungssystemen notwendig sind. Am Markt werden bereits unterschiedliche RPA-L{\"o}sungen als Softwareprodukte angeboten. Durch Chatbots werden Ein- und Ausgaben von Anwendungssystemen {\"u}ber nat{\"u}rliche Sprache realisiert. Dadurch ist die Automatisierung von unternehmensexterner Kommunikation (z. B. mit Kunden) aber auch von unternehmensinternen Assistenzt{\"a}tigkeiten m{\"o}glich. Der Beitrag diskutiert die Auswirkungen von Softwarerobotern auf die Arbeitswelt anhand von Anwendungsbeispielen und erl{\"a}utert die unternehmensindividuelle Entscheidung {\"u}ber den Einsatz von Softwarerobotern anhand von Effektivit{\"a}ts- und Effizienzzielen.}, language = {de} } @inproceedings{BensbergAuthCzarnecki2018, author = {Bensberg, Frank and Auth, Gunnar and Czarnecki, Christian}, title = {Unterst{\"u}tzung von Wissenstransferprozessen durch Erschließung von Literaturdatenbanken - ein gestaltungsorientierter Ansatz auf Basis von Text Analytics}, series = {Wissenstransfer in der Wirtschaftsinformatik Fachgespr{\"a}ch im Rahmen der MKWI 2018}, booktitle = {Wissenstransfer in der Wirtschaftsinformatik Fachgespr{\"a}ch im Rahmen der MKWI 2018}, editor = {Hofmann, Georg Rainer and Alm, Wolfgang}, publisher = {IMI-Verlag ; Hochschule Aschaffenburg, Information Management Institut}, address = {Aschaffenburg}, isbn = {9783981844207}, pages = {6 -- 15}, year = {2018}, abstract = {Angesichts des anhaltenden Wachstums wissenschaftlicher Ver{\"o}ffentlichungen werden Instrumente ben{\"o}tigt, um Literaturanalysen durch Digitalisierung produktiver zu gestalten. Dieser Beitrag stellt einen Ansatz vor, der bibliographische Daten aus der Literaturdatenbank EBSCO Discovery Service mithilfe von Text-Analytics-Methoden erschließt. Die L{\"o}sung basiert auf dem Textanalysesystem IBM Watson Explorer und eignet sich f{\"u}r explorative Literaturanalysen, um beispielsweise den Status quo emergierender Technologiefelder in der Literatur zu reflektieren. Die generierten Ergebnisse sind in den Kontext der zunehmenden Werkzeugunterst{\"u}tzung des Literaturrechercheprozesses einzuordnen und k{\"o}nnen f{\"u}r intra- sowie interinstitutionelle Wissenstransferprozesse in Forschungs- und Beratungskontexten genutzt werden.}, language = {de} } @article{CzarneckiDietze2020, author = {Czarnecki, Christian and Dietze, Christian}, title = {Gestaltungsorientierte Forschung am Beispiel der Referenzmodellierung in der Telekommunikationsindustrie}, series = {HMD Praxis der Wirtschaftsinformatik}, volume = {57}, journal = {HMD Praxis der Wirtschaftsinformatik}, number = {2}, publisher = {Springer Nature}, address = {Cham}, issn = {2198-2775}, doi = {10.1365/s40702-020-00594-y}, pages = {310 -- 323}, year = {2020}, abstract = {Am Beispiel der Telekommunikationsindustrie zeigt der Beitrag eine konkrete Ausgestaltung anwendungsorientierter Forschung, die sowohl f{\"u}r die Praxis als auch f{\"u}r die Wissenschaft nutzen- und erkenntnisbringend ist. Forschungsgegenstand sind die Referenzmodelle des Industriegremiums TM Forum, die von vielen Telekommunikationsunternehmen zur Transformation ihrer Strukturen und Systeme genutzt werden. Es wird die langj{\"a}hrige Forschungst{\"a}tigkeit bei der Weiterentwicklung und Anwendung dieser Referenzmodelle beschrieben. Dabei wird ein konsequent gestaltungsorientierter Forschungsansatz verfolgt. Das Zusammenspiel aus kontinuierlicher Weiterentwicklung in Zusammenarbeit mit einem Industriegremium und der Anwendung in vielf{\"a}ltigen Praxisprojekten f{\"u}hrt zu einer erfolgreichen Symbiose aus praktischer Nutzengenerierung sowie wissenschaftlichem Erkenntnisgewinn. Der Beitrag stellt den gew{\"a}hlten Forschungsansatz anhand konkreter Beispiele vor. Darauf basierend werden Empfehlungen und Herausforderungen f{\"u}r eine gestaltungs- und praxisorientierte Forschung diskutiert.}, language = {de} } @article{BurgerRumpfDoetal.2021, author = {Burger, Ren{\´e} and Rumpf, Jessica and Do, Xuan Tung and Monakhova, Yulia and Diehl, Bernd W. K. and Rehahn, Matthias and Schulze, Margit}, title = {Is NMR combined with multivariate regression applicable for the molecular weight determination of randomly cross-linked polymers such as lignin?}, series = {ACS Omega}, volume = {6}, journal = {ACS Omega}, number = {44}, publisher = {ACS Publications}, address = {Washington, DC}, issn = {2470-1343}, doi = {10.1021/acsomega.1c03574}, pages = {29516 -- 29524}, year = {2021}, abstract = {The molecular weight properties of lignins are one of the key elements that need to be analyzed for a successful industrial application of these promising biopolymers. In this study, the use of 1H NMR as well as diffusion-ordered spectroscopy (DOSY NMR), combined with multivariate regression methods, was investigated for the determination of the molecular weight (Mw and Mn) and the polydispersity of organosolv lignins (n = 53, Miscanthus x giganteus, Paulownia tomentosa, and Silphium perfoliatum). The suitability of the models was demonstrated by cross validation (CV) as well as by an independent validation set of samples from different biomass origins (beech wood and wheat straw). CV errors of ca. 7-9 and 14-16\% were achieved for all parameters with the models from the 1H NMR spectra and the DOSY NMR data, respectively. The prediction errors for the validation samples were in a similar range for the partial least squares model from the 1H NMR data and for a multiple linear regression using the DOSY NMR data. The results indicate the usefulness of NMR measurements combined with multivariate regression methods as a potential alternative to more time-consuming methods such as gel permeation chromatography.}, language = {en} } @article{MonakhovaDiehl2021, author = {Monakhova, Yulia and Diehl, Bernd W. K.}, title = {Simplification of NMR Workflows by Standardization Using 2H Integral of Deuterated Solvent as Applied to Aloe vera Preparations}, series = {Applied Magnetic Resonance}, volume = {52}, journal = {Applied Magnetic Resonance}, number = {11}, publisher = {Springer}, address = {Cham}, issn = {1613-7507}, doi = {10.1007/s00723-021-01393-4}, pages = {1591 -- 1600}, year = {2021}, abstract = {In this study, a recently proposed NMR standardization approach by 2H integral of deuterated solvent for quantitative multicomponent analysis of complex mixtures is presented. As a proof of principle, the existing NMR routine for the analysis of Aloe vera products was modified. Instead of using absolute integrals of targeted compounds and internal standard (nicotinamide) from 1H-NMR spectra, quantification was performed based on the ratio of a particular 1H-NMR compound integral and 2H-NMR signal of deuterated solvent D2O. Validation characteristics (linearity, repeatability, accuracy) were evaluated and the results showed that the method has the same precision as internal standardization in case of multicomponent screening. Moreover, a dehydration process by freeze drying is not necessary for the new routine. Now, our NMR profiling of A. vera products needs only limited sample preparation and data processing. The new standardization methodology provides an appealing alternative for multicomponent NMR screening. In general, this novel approach, using standardization by 2H integral, benefits from reduced sample preparation steps and uncertainties, and is recommended in different application areas (purity determination, forensics, pharmaceutical analysis, etc.).}, language = {en} } @article{BurmistrovaSobolevaMonakhova2021, author = {Burmistrova, Natalia A. and Soboleva, Polina M. and Monakhova, Yulia}, title = {Is infrared spectroscopy combined with multivariate analysis a promising tool for heparin authentication?}, series = {Journal of Pharmaceutical and Biomedical Analysis}, volume = {194}, journal = {Journal of Pharmaceutical and Biomedical Analysis}, number = {Article number: 113811}, publisher = {Elsevier}, address = {Amsterdam}, isbn = {0731-7085}, doi = {10.1016/j.jpba.2020.113811}, year = {2021}, abstract = {The investigation of the possibility to determine various characteristics of powder heparin (n = 115) was carried out with infrared spectroscopy. The evaluation of heparin samples included several parameters such as purity grade, distributing company, animal source as well as heparin species (i.e. Na-heparin, Ca-heparin, and heparinoids). Multivariate analysis using principal component analysis (PCA), soft independent modelling of class analogy (SIMCA), and partial least squares - discriminant analysis (PLS-DA) were applied for the modelling of spectral data. Different pre-processing methods were applied to IR spectral data; multiplicative scatter correction (MSC) was chosen as the most relevant. Obtained results were confirmed by nuclear magnetic resonance (NMR) spectroscopy. Good predictive ability of this approach demonstrates the potential of IR spectroscopy and chemometrics for screening of heparin quality. This approach, however, is designed as a screening tool and is not considered as a replacement for either of the methods required by USP and FDA.}, language = {en} } @article{MonakhovaDiehl2021, author = {Monakhova, Yulia and Diehl, Bernd W.K.}, title = {Novel approach of qNMR workflow by standardization using 2H integral: Application to any intrinsic calibration standard}, series = {Talanta}, volume = {222}, journal = {Talanta}, number = {Article number: 121504}, publisher = {Elsevier}, isbn = {0039-9140}, doi = {10.1016/j.talanta.2020.121504}, year = {2021}, abstract = {Quantitative nuclear magnetic resonance (qNMR) is routinely performed by the internal or external standardization. The manuscript describes a simple alternative to these common workflows by using NMR signal of another active nuclei of calibration compound. For example, for any arbitrary compound quantification by NMR can be based on the use of an indirect concentration referencing that relies on a solvent having both 1H and 2H signals. To perform high-quality quantification, the deuteration level of the utilized deuterated solvent has to be estimated. In this contribution the new method was applied to the determination of deuteration levels in different deuterated solvents (MeOD, ACN, CDCl3, acetone, benzene, DMSO-d6). Isopropanol-d6, which contains a defined number of deuterons and protons, was used for standardization. Validation characteristics (precision, accuracy, robustness) were calculated and the results showed that the method can be used in routine practice. Uncertainty budget was also evaluated. In general, this novel approach, using standardization by 2H integral, benefits from reduced sample preparation steps and uncertainties, and can be applied in different application areas (purity determination, forensics, pharmaceutical analysis, etc.).}, language = {en} } @article{MonakhovaSobolevaFedotovaetal.2022, author = {Monakhova, Yulia and Soboleva, Polina M. and Fedotova, Elena S. and Musina, Kristina T. and Burmistrova, Natalia A.}, title = {Quantum chemical calculations of IR spectra of heparin disaccharide subunits}, series = {Computational and Theoretical Chemistry}, volume = {1217}, journal = {Computational and Theoretical Chemistry}, number = {Article number: 113891}, publisher = {Elsevier}, address = {New York, NY}, isbn = {2210-271X}, doi = {10.1016/j.comptc.2022.113891}, year = {2022}, abstract = {Heparin is a natural polysaccharide, which plays essential role in many biological processes. Alterations in building blocks can modify biological roles of commercial heparin products, due to significant changes in the conformation of the polymer chain. The variability structure of heparin leads to difficulty in quality control using different analytical methods, including infrared (IR) spectroscopy. In this paper molecular modelling of heparin disaccharide subunits was performed using quantum chemistry. The structural and spectral parameters of these disaccharides have been calculated using RHF/6-311G. In addition, over-sulphated chondroitin sulphate disaccharide was studied as one of the most widespread contaminants of heparin. Calculated IR spectra were analyzed with respect to specific structure parameters. IR spectroscopic fingerprint was found to be sensitive to substitution pattern of disaccharide subunits. Vibrational assignments of calculated spectra were correlated with experimental IR spectral bands of native heparin. Chemometrics was used to perform multivariate analysis of simulated spectral data.}, language = {en} } @article{BurgerLindnerRumpfetal.2022, author = {Burger, Ren{\´e} and Lindner, Simon and Rumpf, Jessica and Do, Xuan Tung and Diehl, Bernd W.K. and Rehahn, Matthias and Monakhova, Yulia and Schulze, Margit}, title = {Benchtop versus high field NMR: Comparable performance found for the molecular weight determination of lignin}, series = {Journal of Pharmaceutical and Biomedical Analysis}, volume = {212}, journal = {Journal of Pharmaceutical and Biomedical Analysis}, number = {Article number: 114649}, publisher = {Elsevier}, address = {New York, NY}, isbn = {0731-7085}, doi = {10.1016/j.jpba.2022.114649}, year = {2022}, abstract = {Lignin is a promising renewable biopolymer being investigated worldwide as an environmentally benign substitute of fossil-based aromatic compounds, e.g. for the use as an excipient with antioxidant and antimicrobial properties in drug delivery or even as active compound. For its successful implementation into process streams, a quick, easy, and reliable method is needed for its molecular weight determination. Here we present a method using 1H spectra of benchtop as well as conventional NMR systems in combination with multivariate data analysis, to determine lignin's molecular weight (Mw and Mn) and polydispersity index (PDI). A set of 36 organosolv lignin samples (from Miscanthus x giganteus, Paulownia tomentosa and Silphium perfoliatum) was used for the calibration and cross validation, and 17 samples were used as external validation set. Validation errors between 5.6\% and 12.9\% were achieved for all parameters on all NMR devices (43, 60, 500 and 600 MHz). Surprisingly, no significant difference in the performance of the benchtop and high-field devices was found. This facilitates the application of this method for determining lignin's molecular weight in an industrial environment because of the low maintenance expenditure, small footprint, ruggedness, and low cost of permanent magnet benchtop NMR systems.}, language = {en} } @article{MonakhovaDiehl2022, author = {Monakhova, Yulia and Diehl, Bernd W.K.}, title = {Multinuclear NMR screening of pharmaceuticals using standardization by 2H integral of a deuterated solvent}, series = {Journal of Pharmaceutical and Biomedical Analysis}, volume = {209}, journal = {Journal of Pharmaceutical and Biomedical Analysis}, number = {Article number: 114530}, publisher = {Elsevier}, isbn = {0731-7085}, doi = {10.1016/j.jpba.2021.114530}, year = {2022}, abstract = {NMR standardization approach that uses the 2H integral of deuterated solvent for quantitative multinuclear analysis of pharmaceuticals is described. As a proof of principle, the existing NMR procedure for the analysis of heparin products according to US Pharmacopeia monograph is extended to the determination of Na+ and Cl- content in this matrix. Quantification is performed based on the ratio of a 23Na (35Cl) NMR integral and 2H NMR signal of deuterated solvent, D2O, acquired using the specific spectrometer hardware. As an alternative, the possibility of 133Cs standardization using the addition of Cs2CO3 stock solution is shown. Validation characteristics (linearity, repeatability, sensitivity) are evaluated. A holistic NMR profiling of heparin products can now also be used for the quantitative determination of inorganic compounds in a single analytical run using a single sample. In general, the new standardization methodology provides an appealing alternative for the NMR screening of inorganic and organic components in pharmaceutical products.}, language = {en} } @article{LindnerBurgerRutledgeetal.2022, author = {Lindner, Simon and Burger, Ren{\´e} and Rutledge, Douglas N. and Do, Xuan Tung and Rumpf, Jessica and Diehl, Bernd W. K. and Schulze, Margit and Monakhova, Yulia}, title = {Is the calibration transfer of multivariate calibration models between high- and low-field NMR instruments possible? A case study of lignin molecular weight}, series = {Analytical chemistry}, volume = {94}, journal = {Analytical chemistry}, number = {9}, publisher = {ACS Publications}, address = {Washington, DC}, isbn = {1520-6882}, doi = {10.1021/acs.analchem.1c05125}, pages = {3997 -- 4004}, year = {2022}, abstract = {Although several successful applications of benchtop nuclear magnetic resonance (NMR) spectroscopy in quantitative mixture analysis exist, the possibility of calibration transfer remains mostly unexplored, especially between high- and low-field NMR. This study investigates for the first time the calibration transfer of partial least squares regressions [weight average molecular weight (Mw) of lignin] between high-field (600 MHz) NMR and benchtop NMR devices (43 and 60 MHz). For the transfer, piecewise direct standardization, calibration transfer based on canonical correlation analysis, and transfer via the extreme learning machine auto-encoder method are employed. Despite the immense resolution difference between high-field and low-field NMR instruments, the results demonstrate that the calibration transfer from high- to low-field is feasible in the case of a physical property, namely, the molecular weight, achieving validation errors close to the original calibration (down to only 1.2 times higher root mean square errors). These results introduce new perspectives for applications of benchtop NMR, in which existing calibrations from expensive high-field instruments can be transferred to cheaper benchtop instruments to economize.}, language = {en} } @inproceedings{BrockmannSultanowCzarnecki2018, author = {Brockmann, Carsten and Sultanow, Eldar and Czarnecki, Christian}, title = {Enterprise architectures between agility and traditional methodologies}, series = {GI Edition Proceedings Band 285 Workshops der INFORMATIK 2018}, booktitle = {GI Edition Proceedings Band 285 Workshops der INFORMATIK 2018}, editor = {Czarnecki, Christian and Brockmann, Carsten and Sultanow, Eldar and Koschmider, Agnes and Selzer, Annika and Gesellschaft f{\"u}r Informatik e. V.,}, publisher = {K{\"o}llen}, address = {Bonn}, isbn = {9783885796794}, issn = {1617-5468}, pages = {1 Seite}, year = {2018}, abstract = {For this year's workshop on Enterpirse Architecture in Research and Practice we have received eight submissions from which four have passed the rigorous peer-review. The acceptance quote of 50\% assures that only advancements in the field are included in our workshop.}, language = {en} } @inproceedings{BensbergAuthCzarneckietal.2018, author = {Bensberg, Frank and Auth, Gunnar and Czarnecki, Christian and W{\"o}rndle, Christopher}, title = {Transforming literature-intensive research processes through text analytics - design, implementation and lessons learned}, editor = {Kemal İlter, H.}, doi = {10.6084/m9.figshare.7582073.v1}, pages = {9 Seiten}, year = {2018}, abstract = {The continuing growth of scientific publications raises the question how research processes can be digitalized and thus realized more productively. Especially in information technology fields, research practice is characterized by a rapidly growing volume of publications. For the search process various information systems exist. However, the analysis of the published content is still a highly manual task. Therefore, we propose a text analytics system that allows a fully digitalized analysis of literature sources. We have realized a prototype by using EBSCO Discovery Service in combination with IBM Watson Explorer and demonstrated the results in real-life research projects. Potential addressees are research institutions, consulting firms, and decision-makers in politics and business practice.}, language = {en} } @inproceedings{BrockmannSultanowCzarnecki2019, author = {Brockmann, Carsten and Sultanow, Eldar and Czarnecki, Christian}, title = {Is enterprise architecture still relevant in the digital age?}, series = {GI Edition Proceedings Band 295 INFORMATIK 2019, Workshop-Beitr{\"a}ge}, booktitle = {GI Edition Proceedings Band 295 INFORMATIK 2019, Workshop-Beitr{\"a}ge}, editor = {Draude, Claude and Lange, Martin and Sick, Bernhard}, publisher = {K{\"o}llen}, address = {Bonn}, isbn = {9783885796893}, issn = {1617-5468}, doi = {10.18420/inf2019_ws01}, pages = {21 -- 21}, year = {2019}, language = {en} } @inproceedings{AuthCzarneckiBensberg2019, author = {Auth, Gunnar and Czarnecki, Christian and Bensberg, Frank}, title = {Impact of robotic process automation on enterprise architectures}, series = {GI Edition Proceedings Band 295 INFORMATIK 2019, Workshop-Beitr{\"a}ge}, booktitle = {GI Edition Proceedings Band 295 INFORMATIK 2019, Workshop-Beitr{\"a}ge}, editor = {Draude, Claude and Lange, Martin and Sick, Bernhard and Gesellschaft f{\"u}r Informatik e.V. (GI),}, publisher = {K{\"o}llen}, address = {Bonn}, isbn = {9783885796893}, issn = {1617-5468}, doi = {10.18420/inf2019_ws05}, pages = {59 -- 65}, year = {2019}, abstract = {The initial idea of Robotic Process Automation (RPA) is the automation of business processes through the presentation layer of existing application systems. For this simple emulation of user input and output by software robots, no changes of the systems and architecture is required. However, considering strategic aspects of aligning business and technology on an enterprise level as well as the growing capabilities of RPA driven by artificial intelligence, interrelations between RPA and Enterprise Architecture (EA) become visible and pose new questions. In this paper we discuss the relationship between RPA and EA in terms of perspectives and implications. As workin- progress we focus on identifying new questions and research opportunities related to RPA and EA.}, language = {en} } @inproceedings{RitschelStenzelCzarneckietal.2021, author = {Ritschel, Konstantin and Stenzel, Adina and Czarnecki, Christian and Hong, Chin-Gi}, title = {Realizing robotic process automation potentials: an architectural perspective on a real-life implementation case}, series = {GI Edition Proceedings Band 314 "INFORMATIK 2021" Computer Science \& Sustainability}, booktitle = {GI Edition Proceedings Band 314 "INFORMATIK 2021" Computer Science \& Sustainability}, editor = {Gesellschaft f{\"u}r Informatik e.V. (GI),}, publisher = {K{\"o}llen}, address = {Bonn}, isbn = {9783885797081}, issn = {1617-5468}, doi = {10.18420/informatik2021-108}, pages = {1303 -- 1311}, year = {2021}, abstract = {The initial idea of Robotic Process Automation (RPA) is the automation of business processes through a simple emulation of user input and output by software robots. Hence, it can be assumed that no changes of the used software systems and existing Enterprise Architecture (EA) is required. In this short, practical paper we discuss this assumption based on a real-life implementation project. We show that a successful RPA implementation might require architectural work during analysis, implementation, and migration. As practical paper we focus on exemplary lessons-learned and new questions related to RPA and EA.}, language = {en} }