@article{KarschuckKaulenPoghossianetal.2021, author = {Karschuck, Tobias and Kaulen, Corinna and Poghossian, Arshak and Wagner, Patrick H. and Sch{\"o}ning, Michael Josef}, title = {Gold nanoparticle-modified capacitive field-effect sensors: Studying the surface density of nanoparticles and coupling of charged polyelectrolyte macromolecules}, series = {Electrochemical Science Advances}, volume = {2}, journal = {Electrochemical Science Advances}, number = {5}, publisher = {Wiley-VCH}, address = {Weinheim}, issn = {0938-5193}, doi = {10.1002/elsa.202100179}, pages = {10 Seiten}, year = {2021}, abstract = {The coupling of ligand-stabilized gold nanoparticles with field-effect devices offers new possibilities for label-free biosensing. In this work, we study the immobilization of aminooctanethiol-stabilized gold nanoparticles (AuAOTs) on the silicon dioxide surface of a capacitive field-effect sensor. The terminal amino group of the AuAOT is well suited for the functionalization with biomolecules. The attachment of the positively-charged AuAOTs on a capacitive field-effect sensor was detected by direct electrical readout using capacitance-voltage and constant capacitance measurements. With a higher particle density on the sensor surface, the measured signal change was correspondingly more pronounced. The results demonstrate the ability of capacitive field-effect sensors for the non-destructive quantitative validation of nanoparticle immobilization. In addition, the electrostatic binding of the polyanion polystyrene sulfonate to the AuAOT-modified sensor surface was studied as a model system for the label-free detection of charged macromolecules. Most likely, this approach can be transferred to the label-free detection of other charged molecules such as enzymes or antibodies.}, language = {en} } @article{ElDeibButenwegKlinkel2021, author = {El-Deib, Khaled and Butenweg, Christoph and Klinkel, Sven}, title = {Erdbebennachweis von Mauerwerksbauten mit realistischen Modellen und erh{\"o}hten Verhaltensbeiwerten}, series = {Mauerwerk}, volume = {2021}, journal = {Mauerwerk}, number = {3}, editor = {Jesse, Dirk}, publisher = {Wiley}, address = {Weinheim}, issn = {1437-1022}, doi = {10.1002/dama.202110014}, pages = {110 -- 119}, year = {2021}, abstract = {Die Anwendung des linearen Nachweiskonzepts auf Mauerwerksbauten f{\"u}hrt dazu, dass bereits heute Standsicherheitsnachweise f{\"u}r Geb{\"a}ude mit {\"u}blichen Grundrissen in Gebieten mit moderaten Erdbebeneinwirkungen nicht mehr gef{\"u}hrt werden k{\"o}nnen. Diese Problematik wird sich in Deutschland mit der Einf{\"u}hrung kontinuierlicher probabilistischer Erdbebenkarten weiter versch{\"a}rfen. Aufgrund der Erh{\"o}hung der seismischen Einwirkungen, die sich vielerorts ergibt, ist es erforderlich, die vorhandenen, bislang nicht ber{\"u}cksichtigten Tragf{\"a}higkeitsreserven in nachvollziehbaren Nachweiskonzepten in der Baupraxis verf{\"u}gbar zu machen. Der vorliegende Beitrag stellt ein Konzept f{\"u}r die geb{\"a}udespezifische Ermittlung von erh{\"o}hten Verhaltensbeiwerten vor. Die Verhaltensbeiwerte setzen sich aus drei Anteilen zusammen, mit denen die Lastumverteilung im Grundriss, die Verformungsf{\"a}higkeit und Energiedissipation sowie die {\"U}berfestigkeiten ber{\"u}cksichtigt werden. F{\"u}r die rechnerische Ermittlung dieser drei Anteile wird ein nichtlineares Nachweiskonzept auf Grundlage von Pushover-Analysen vorgeschlagen, in denen die Interaktionen von W{\"a}nden und Geschossdecken durch einen Einspanngrad beschrieben werden. F{\"u}r die Bestimmung der Einspanngrade wird ein nichtlinearer Modellierungsansatz eingef{\"u}hrt, mit dem die Interaktion von W{\"a}nden und Decken abgebildet werden kann. Die Anwendung des Konzepts mit erh{\"o}hten geb{\"a}udespezifischen Verhaltensbeiwerten wird am Beispiel eines Mehrfamilienhauses aus Kalksandsteinen demonstriert. Die Ergebnisse der linearen Nachweise mit erh{\"o}hten Verhaltensbeiwerten f{\"u}r dieses Geb{\"a}ude liegen deutlich n{\"a}her an den Ergebnissen nichtlinearer Nachweise und somit bleiben {\"u}bliche Grundrisse in Erdbebengebieten mit den traditionellen linearen Rechenans{\"a}tzen nachweisbar.}, language = {de} } @article{ButenwegKubalskiElDeibetal.2021, author = {Butenweg, Christoph and Kubalski, Thomas and El-Deib, Khaled and Gellert, Christoph}, title = {Erdbebennachweis von Mauerwerksbauten nach DIN EN 1998-1/NA-2021}, series = {Bautechnik : Zeitschrift f{\"u}r den gesamten Ingenieurbau}, volume = {98}, journal = {Bautechnik : Zeitschrift f{\"u}r den gesamten Ingenieurbau}, number = {11}, editor = {Jesse, Dirk}, publisher = {Ernst \& Sohn}, address = {Berlin}, issn = {1437-0999}, doi = {10.1002/bate.202100064}, pages = {852 -- 863}, year = {2021}, abstract = {Mauerwerksbauten in Deutschland sind mit Einf{\"u}hrung des nationalen Anwendungsdokuments DIN EN 1998-1/NA auf Grundlage einer neuen probabilistischen Erdbebenkarte nachzuweisen. F{\"u}r erfolgreiche Erdbebennachweise {\"u}blicher Grundrissformen von Mauerwerksbauten stehen in dem zuk{\"u}nftigen Anwendungsdokument neue rechnerische Nachweism{\"o}glichkeiten zur Verf{\"u}gung, mit denen die Tragf{\"a}higkeitsreserven von Mauerwerksbauten in der Baupraxis mit einem {\"u}berschaubaren Aufwand besser in Ansatz gebracht werden k{\"o}nnen. Das Standardrechenverfahren ist weiterhin der kraftbasierte Nachweis, der nun mit h{\"o}heren Verhaltensbeiwerten im Vergleich zur DIN 4149 durchgef{\"u}hrt werden kann. Die h{\"o}heren Verhaltensbeiwerte basieren auf der besseren Ausnutzung der geb{\"a}udespezifischen Verformungsf{\"a}higkeit und Energiedissipation sowie der Lastumverteilung der Schubkr{\"a}fte im Grundriss mit Ansatz von Rahmentragwirkung durch Wand-Deckeninteraktionen. Alternativ dazu kann ein nichtlinearer Nachweis auf Grundlage von Pushover-Analysen zur Anwendung kommen. Vervollst{\"a}ndigt werden die Regelungen f{\"u}r Mauerwerksbauten durch neue Regelungen f{\"u}r nichttragende Innenw{\"a}nde und Außenmauerschalen. Der vorliegende Beitrag stellt die Grundlagen und Hintergr{\"u}nde der neuen rechnerischen Nachweise in DIN EN 1998-1/NA vor und demonstriert deren Anwendung an einem Beispiel aus der Praxis.}, language = {de} } @article{ButenwegBursiPaolaccietal.2021, author = {Butenweg, Christoph and Bursi, Oreste S. and Paolacci, Fabrizio and Marinković, Marko and Lanese, Igor and Nardin, Chiara and Quinci, Gianluca}, title = {Seismic performance of an industrial multi-storey frame structure with process equipment subjected to shake table testing}, series = {Engineering Structures}, volume = {243}, journal = {Engineering Structures}, number = {15}, editor = {Yang, J.}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0141-0296}, doi = {10.1016/j.engstruct.2021.112681}, year = {2021}, abstract = {Past earthquakes demonstrated the high vulnerability of industrial facilities equipped with complex process technologies leading to serious damage of process equipment and multiple and simultaneous release of hazardous substances. Nonetheless, current standards for seismic design of industrial facilities are considered inadequate to guarantee proper safety conditions against exceptional events entailing loss of containment and related consequences. On these premises, the SPIF project -Seismic Performance of Multi-Component Systems in Special Risk Industrial Facilities- was proposed within the framework of the European H2020 SERA funding scheme. In detail, the objective of the SPIF project is the investigation of the seismic behaviour of a representative industrial multi-storey frame structure equipped with complex process components by means of shaking table tests. Along this main vein and in a performance-based design perspective, the issues investigated in depth are the interaction between a primary moment resisting frame (MRF) steel structure and secondary process components that influence the performance of the whole system; and a proper check of floor spectra predictions. The evaluation of experimental data clearly shows a favourable performance of the MRF structure, some weaknesses of local details due to the interaction between floor crossbeams and process components and, finally, the overconservatism of current design standards w.r.t. floor spectra predictions.}, language = {en} } @article{Gaigall2021, author = {Gaigall, Daniel}, title = {Test for Changes in the Modeled Solvency Capital Requirement of an Internal Risk Model}, series = {ASTIN Bulletin}, volume = {51}, journal = {ASTIN Bulletin}, number = {3}, publisher = {Cambridge Univ. Press}, address = {Cambridge}, issn = {1783-1350}, doi = {10.1017/asb.2021.20}, pages = {813 -- 837}, year = {2021}, abstract = {In the context of the Solvency II directive, the operation of an internal risk model is a possible way for risk assessment and for the determination of the solvency capital requirement of an insurance company in the European Union. A Monte Carlo procedure is customary to generate a model output. To be compliant with the directive, validation of the internal risk model is conducted on the basis of the model output. For this purpose, we suggest a new test for checking whether there is a significant change in the modeled solvency capital requirement. Asymptotic properties of the test statistic are investigated and a bootstrap approximation is justified. A simulation study investigates the performance of the test in the finite sample case and confirms the theoretical results. The internal risk model and the application of the test is illustrated in a simplified example. The method has more general usage for inference of a broad class of law-invariant and coherent risk measures on the basis of a paired sample.}, language = {en} } @inproceedings{FiedlerGottschlichMuellerMelcher2021, author = {Fiedler, Gerda and Gottschlich-M{\"u}ller, Birgit and Melcher, Karin}, title = {Online-Pr{\"u}fungen mit STACK Aufgaben}, series = {Tagungsband ASIM Workshop STS/GMMS/EDU 2021}, booktitle = {Tagungsband ASIM Workshop STS/GMMS/EDU 2021}, editor = {Liu-Henke, Xiaobo and Durak, Umut}, publisher = {ARGESIM Verlag}, address = {Wien}, isbn = {978-3-901608-69-8}, doi = {10.11128/arep.45}, pages = {6 Seiten}, year = {2021}, abstract = {Wir stellen hier exemplarisch STACK Aufgaben vor, die frei von der Problematik sind, welche sich durch diverse Kommunikationswege und (webbasierte) Computer Algebra Systeme (CAS) ergibt. Daher sind sie insbesondere f{\"u}r eine Open-Book Online Pr{\"u}fung geeignet, da eine faire Pr{\"u}fungssituation gew{\"a}hrleistet werden kann.}, language = {de} } @incollection{SchneiderWisselinkNoelleetal.2021, author = {Schneider, Dominik and Wisselink, Frank and N{\"o}lle, Nikolai and Czarnecki, Christian}, title = {Einfluss von K{\"u}nstlicher Intelligenz auf Customer Journeys am Beispiel von intelligentem Parken}, series = {K{\"u}nstliche Intelligenz in der Anwendung : Rechtliche Aspekte, Anwendungspotenziale und Einsatzszenarien}, booktitle = {K{\"u}nstliche Intelligenz in der Anwendung : Rechtliche Aspekte, Anwendungspotenziale und Einsatzszenarien}, editor = {Barton, Thomas and M{\"u}ller, Christian}, publisher = {Springer Vieweg}, address = {Wiesbaden}, isbn = {978-3-658-30935-0 (Print)}, doi = {10.1007/978-3-658-30936-7_7}, pages = {99 -- 122}, year = {2021}, abstract = {Im Konsumentenmarkt entstehen vermehrt neue Anwendungen von K{\"u}nstlicher Intelligenz (KI). Zunehmend dr{\"a}ngen auch Ger{\"a}te und Dienste in den Markt, die eigenst{\"a}ndig {\"u}ber das Internet kommunizieren. Dadurch k{\"o}nnen diese Ger{\"a}te und Dienste mit neuartigen KI-basierten Diensten verbessert werden. Solche Dienste k{\"o}nnen die Art und Weise beeinflussen, wie Kunden kommerzielle Entscheidungen treffen und somit das Kundenerlebnis maßgeblich ver{\"a}ndern. Der Einfluss von KI auf kommerzielle Interaktionen wurde bisher noch nicht umfassend untersucht. Basierend auf einem Framework, welches einen ersten {\"U}berblick {\"u}ber die Effekte von KI auf kommerzielle Interaktionen gibt, wird in diesem Kapitel der Einfluss von KI auf Customer Journeys am konkreten Anwendungsfall des intelligenten Parkens analysiert. Die daraus gewonnenen Erkenntnisse k{\"o}nnen in der Praxis als Grundlage genutzt werden, um das Potenzial von KI zu verstehen und bei der Gestaltung eigener Customer Journeys umzusetzen.}, language = {de} } @incollection{CroonCzarnecki2021, author = {Croon, Philipp and Czarnecki, Christian}, title = {Liability for loss or damages caused by RPA}, series = {Robotic process automation : Management, technology, applications}, booktitle = {Robotic process automation : Management, technology, applications}, editor = {Czarnecki, Christian and Fettke, Peter}, publisher = {De Gruyter}, address = {Oldenbourg}, isbn = {9783110676778}, doi = {10.1515/9783110676693-202}, pages = {135 -- 151}, year = {2021}, abstract = {Intelligent autonomous software robots replacing human activities and performing administrative processes are reality in today's corporate world. This includes, for example, decisions about invoice payments, identification of customers for a marketing campaign, and answering customer complaints. What happens if such a software robot causes a damage? Due to the complete absence of human activities, the question is not trivial. It could even happen that no one is liable for a damage towards a third party, which could create an uncalculatable legal risk for business partners. Furthermore, the implementation and operation of those software robots involves various stakeholders, which result in the unsolvable endeavor of identifying the originator of a damage. Overall it is advisable to all involved parties to carefully consider the legal situation. This chapter discusses the liability of software robots from an interdisciplinary perspective. Based on different technical scenarios the legal aspects of liability are discussed.}, language = {en} } @incollection{BensbergAuthCzarnecki2021, author = {Bensberg, Frank and Auth, Gunnar and Czarnecki, Christian}, title = {Finding the perfect RPA match : a criteria-based selection method for RPA solutions}, series = {Robotic process automation : Management, technology, applications}, booktitle = {Robotic process automation : Management, technology, applications}, editor = {Czarnecki, Christian and Fettke, Peter}, publisher = {De Gruyter}, address = {Oldenbourg}, isbn = {978-3-11-067677-8}, doi = {10.1515/9783110676693-201}, pages = {47 -- 75}, year = {2021}, abstract = {The benefits of robotic process automation (RPA) are highly related to the usage of commercial off-the-shelf (COTS) software products that can be easily implemented and customized by business units. But, how to find the best fitting RPA product for a specific situation that creates the expected benefits? This question is related to the general area of software evaluation and selection. In the face of more than 75 RPA products currently on the market, guidance considering those specifics is required. Therefore, this chapter proposes a criteria-based selection method specifically for RPA. The method includes a quantitative evaluation of costs and benefits as well as a qualitative utility analysis based on functional criteria. By using the visualization of financial implications (VOFI) method, an application-oriented structure is provided that opposes the total cost of ownership to the time savings times salary (TSTS). For the utility analysis a detailed list of functional criteria for RPA is offered. The whole method is based on a multi-vocal review of scientific and non-scholarly literature including publications by business practitioners, consultants, and vendors. The application of the method is illustrated by a concrete RPA example. The illustrated structures, templates, and criteria can be directly utilized by practitioners in their real-life RPA implementations. In addition, a normative decision process for selecting RPA alternatives is proposed before the chapter closes with a discussion and outlook.}, language = {en} } @incollection{CzarneckiFettke2021, author = {Czarnecki, Christian and Fettke, Peter}, title = {Robotic process automation : Positioning, structuring, and framing the work}, series = {Robotic process automation : Management, technology, applications}, booktitle = {Robotic process automation : Management, technology, applications}, editor = {Czarnecki, Christian and Fettke, Peter}, publisher = {De Gruyter}, address = {Oldenbourg}, isbn = {978-3-11-067668-6 (Print)}, doi = {10.1515/9783110676693-202}, pages = {3 -- 24}, year = {2021}, abstract = {Robotic process automation (RPA) has attracted increasing attention in research and practice. This chapter positions, structures, and frames the topic as an introduction to this book. RPA is understood as a broad concept that comprises a variety of concrete solutions. From a management perspective RPA offers an innovative approach for realizing automation potentials, whereas from a technical perspective the implementation based on software products and the impact of artificial intelligence (AI) and machine learning (ML) are relevant. RPA is industry-independent and can be used, for example, in finance, telecommunications, and the public sector. With respect to RPA this chapter discusses definitions, related approaches, a structuring framework, a research framework, and an inside as well as outside architectural view. Furthermore, it provides an overview of the book combined with short summaries of each chapter.}, language = {en} } @incollection{CzarneckiHongSchmitzetal.2021, author = {Czarnecki, Christian and Hong, Chin-Gi and Schmitz, Manfred and Dietze, Christian}, title = {Enabling digital transformation through cognitive robotic process automation at Deutsche Telekom Services Europe}, series = {Digitalization Cases Vol. 2 : Mastering digital transformation for global business}, booktitle = {Digitalization Cases Vol. 2 : Mastering digital transformation for global business}, editor = {Urbach, Nils and R{\"o}glinger, Maximilian and Kautz, Karlheinz and Alias, Rose Alinda and Saunders, Carol and Wiener, Martin}, publisher = {Springer}, address = {Cham}, isbn = {978-3-030-80002-4 (Print)}, doi = {10.1007/978-3-030-80003-1}, pages = {123 -- 138}, year = {2021}, abstract = {Subject of this case is Deutsche Telekom Services Europe (DTSE), a service center for administrative processes. Due to the high volume of repetitive tasks (e.g., 100k manual uploads of offer documents into SAP per year), automation was identified as an important strategic target with a high management attention and commitment. DTSE has to work with various backend application systems without any possibility to change those systems. Furthermore, the complexity of administrative processes differed. When it comes to the transfer of unstructured data (e.g., offer documents) to structured data (e.g., MS Excel files), further cognitive technologies were needed.}, language = {en} } @article{BurgerRumpfDoetal.2021, author = {Burger, Ren{\´e} and Rumpf, Jessica and Do, Xuan Tung and Monakhova, Yulia and Diehl, Bernd W. K. and Rehahn, Matthias and Schulze, Margit}, title = {Is NMR combined with multivariate regression applicable for the molecular weight determination of randomly cross-linked polymers such as lignin?}, series = {ACS Omega}, volume = {6}, journal = {ACS Omega}, number = {44}, publisher = {ACS Publications}, address = {Washington, DC}, issn = {2470-1343}, doi = {10.1021/acsomega.1c03574}, pages = {29516 -- 29524}, year = {2021}, abstract = {The molecular weight properties of lignins are one of the key elements that need to be analyzed for a successful industrial application of these promising biopolymers. In this study, the use of 1H NMR as well as diffusion-ordered spectroscopy (DOSY NMR), combined with multivariate regression methods, was investigated for the determination of the molecular weight (Mw and Mn) and the polydispersity of organosolv lignins (n = 53, Miscanthus x giganteus, Paulownia tomentosa, and Silphium perfoliatum). The suitability of the models was demonstrated by cross validation (CV) as well as by an independent validation set of samples from different biomass origins (beech wood and wheat straw). CV errors of ca. 7-9 and 14-16\% were achieved for all parameters with the models from the 1H NMR spectra and the DOSY NMR data, respectively. The prediction errors for the validation samples were in a similar range for the partial least squares model from the 1H NMR data and for a multiple linear regression using the DOSY NMR data. The results indicate the usefulness of NMR measurements combined with multivariate regression methods as a potential alternative to more time-consuming methods such as gel permeation chromatography.}, language = {en} } @article{MonakhovaDiehl2021, author = {Monakhova, Yulia and Diehl, Bernd W. K.}, title = {Simplification of NMR Workflows by Standardization Using 2H Integral of Deuterated Solvent as Applied to Aloe vera Preparations}, series = {Applied Magnetic Resonance}, volume = {52}, journal = {Applied Magnetic Resonance}, number = {11}, publisher = {Springer}, address = {Cham}, issn = {1613-7507}, doi = {10.1007/s00723-021-01393-4}, pages = {1591 -- 1600}, year = {2021}, abstract = {In this study, a recently proposed NMR standardization approach by 2H integral of deuterated solvent for quantitative multicomponent analysis of complex mixtures is presented. As a proof of principle, the existing NMR routine for the analysis of Aloe vera products was modified. Instead of using absolute integrals of targeted compounds and internal standard (nicotinamide) from 1H-NMR spectra, quantification was performed based on the ratio of a particular 1H-NMR compound integral and 2H-NMR signal of deuterated solvent D2O. Validation characteristics (linearity, repeatability, accuracy) were evaluated and the results showed that the method has the same precision as internal standardization in case of multicomponent screening. Moreover, a dehydration process by freeze drying is not necessary for the new routine. Now, our NMR profiling of A. vera products needs only limited sample preparation and data processing. The new standardization methodology provides an appealing alternative for multicomponent NMR screening. In general, this novel approach, using standardization by 2H integral, benefits from reduced sample preparation steps and uncertainties, and is recommended in different application areas (purity determination, forensics, pharmaceutical analysis, etc.).}, language = {en} } @article{BurmistrovaSobolevaMonakhova2021, author = {Burmistrova, Natalia A. and Soboleva, Polina M. and Monakhova, Yulia}, title = {Is infrared spectroscopy combined with multivariate analysis a promising tool for heparin authentication?}, series = {Journal of Pharmaceutical and Biomedical Analysis}, volume = {194}, journal = {Journal of Pharmaceutical and Biomedical Analysis}, number = {Article number: 113811}, publisher = {Elsevier}, address = {Amsterdam}, isbn = {0731-7085}, doi = {10.1016/j.jpba.2020.113811}, year = {2021}, abstract = {The investigation of the possibility to determine various characteristics of powder heparin (n = 115) was carried out with infrared spectroscopy. The evaluation of heparin samples included several parameters such as purity grade, distributing company, animal source as well as heparin species (i.e. Na-heparin, Ca-heparin, and heparinoids). Multivariate analysis using principal component analysis (PCA), soft independent modelling of class analogy (SIMCA), and partial least squares - discriminant analysis (PLS-DA) were applied for the modelling of spectral data. Different pre-processing methods were applied to IR spectral data; multiplicative scatter correction (MSC) was chosen as the most relevant. Obtained results were confirmed by nuclear magnetic resonance (NMR) spectroscopy. Good predictive ability of this approach demonstrates the potential of IR spectroscopy and chemometrics for screening of heparin quality. This approach, however, is designed as a screening tool and is not considered as a replacement for either of the methods required by USP and FDA.}, language = {en} } @article{MonakhovaDiehl2021, author = {Monakhova, Yulia and Diehl, Bernd W.K.}, title = {Novel approach of qNMR workflow by standardization using 2H integral: Application to any intrinsic calibration standard}, series = {Talanta}, volume = {222}, journal = {Talanta}, number = {Article number: 121504}, publisher = {Elsevier}, isbn = {0039-9140}, doi = {10.1016/j.talanta.2020.121504}, year = {2021}, abstract = {Quantitative nuclear magnetic resonance (qNMR) is routinely performed by the internal or external standardization. The manuscript describes a simple alternative to these common workflows by using NMR signal of another active nuclei of calibration compound. For example, for any arbitrary compound quantification by NMR can be based on the use of an indirect concentration referencing that relies on a solvent having both 1H and 2H signals. To perform high-quality quantification, the deuteration level of the utilized deuterated solvent has to be estimated. In this contribution the new method was applied to the determination of deuteration levels in different deuterated solvents (MeOD, ACN, CDCl3, acetone, benzene, DMSO-d6). Isopropanol-d6, which contains a defined number of deuterons and protons, was used for standardization. Validation characteristics (precision, accuracy, robustness) were calculated and the results showed that the method can be used in routine practice. Uncertainty budget was also evaluated. In general, this novel approach, using standardization by 2H integral, benefits from reduced sample preparation steps and uncertainties, and can be applied in different application areas (purity determination, forensics, pharmaceutical analysis, etc.).}, language = {en} } @inproceedings{RitschelStenzelCzarneckietal.2021, author = {Ritschel, Konstantin and Stenzel, Adina and Czarnecki, Christian and Hong, Chin-Gi}, title = {Realizing robotic process automation potentials: an architectural perspective on a real-life implementation case}, series = {GI Edition Proceedings Band 314 "INFORMATIK 2021" Computer Science \& Sustainability}, booktitle = {GI Edition Proceedings Band 314 "INFORMATIK 2021" Computer Science \& Sustainability}, editor = {Gesellschaft f{\"u}r Informatik e.V. (GI),}, publisher = {K{\"o}llen}, address = {Bonn}, isbn = {9783885797081}, issn = {1617-5468}, pages = {1303 -- 1311}, year = {2021}, abstract = {The initial idea of Robotic Process Automation (RPA) is the automation of business processes through a simple emulation of user input and output by software robots. Hence, it can be assumed that no changes of the used software systems and existing Enterprise Architecture (EA) is required. In this short, practical paper we discuss this assumption based on a real-life implementation project. We show that a successful RPA implementation might require architectural work during analysis, implementation, and migration. As practical paper we focus on exemplary lessons-learned and new questions related to RPA and EA.}, language = {en} } @inproceedings{MertensPuetzBrauneretal.2021, author = {Mertens, Alexander and P{\"u}tz, Sebastian and Brauner, Philipp and Brillowski, Florian Sascha and Buczak, Nadine and Dammers, Hannah and van Dyck, Marc and Kong, Iris and K{\"o}nigs, Peter and Kortomeikel, Frauke Carole and Rodemann, Niklas and Schaar, Anne Kathrin and Steuer-Dankert, Linda and Wlecke, Shari and Gries, Thomas and Leicht-Scholten, Carmen and Nagel, Saskia K. and Piller, Frank Thomas and Schuh, G{\"u}nther and Ziefle, Martina and Nitsch, Verena}, title = {Human digital shadow: Data-based modeling of users and usage in the internet of production}, series = {14th International Conference on Human System Interaction : 8-10 July 2021. Gdańsk, Poland}, booktitle = {14th International Conference on Human System Interaction : 8-10 July 2021. Gdańsk, Poland}, publisher = {IEEE}, doi = {10.1109/HSI52170.2021.9538729}, pages = {1 -- 8}, year = {2021}, abstract = {Digital Shadows as the aggregation, linkage and abstraction of data relating to physical objects are a central vision for the future of production. However, the majority of current research takes a technocentric approach, in which the human actors in production play a minor role. Here, the authors present an alternative anthropocentric perspective that highlights the potential and main challenges of extending the concept of Digital Shadows to humans. Following future research methodology, three prospections that illustrate use cases for Human Digital Shadows across organizational and hierarchical levels are developed: human-robot collaboration for manual work, decision support and work organization, as well as human resource management. Potentials and challenges are identified using separate SWOT analyses for the three prospections and common themes are emphasized in a concluding discussion.}, language = {en} } @incollection{Kurz2021, author = {Kurz, Melanie}, title = {Zur Multikausalit{\"a}t von Designentscheidungen - eine Beispielsammlung}, series = {Designentscheidungen: {\"u}ber Begr{\"u}ndungen im Entwurfsprozess}, booktitle = {Designentscheidungen: {\"u}ber Begr{\"u}ndungen im Entwurfsprozess}, publisher = {avedition}, address = {Stuttgart}, isbn = {978-3-89986-353-6}, pages = {22 -- 43}, year = {2021}, language = {de} } @book{KurzSchwer2021, author = {Kurz, Melanie and Schwer, Thilo}, title = {Designentscheidungen : {\"u}ber Begr{\"u}ndungen im Entwurfsprozess / herausgegeben von Melanie Kurz und Thilo Schwer}, series = {Schriften / Gesellschaft f{\"u}r Designgeschichte}, journal = {Schriften / Gesellschaft f{\"u}r Designgeschichte}, publisher = {avedition}, address = {Stuttgart}, isbn = {978-3-89986-353-6}, pages = {143 Seiten : Illustrationen}, year = {2021}, language = {de} } @inproceedings{GrundmannBorellaCeriottietal.2021, author = {Grundmann, Jan Thimo and Borella, Laura and Ceriotti, Matteo and Chand, Suditi and Cordero, Federico and Dachwald, Bernd and Fexer, Sebastian and Grimm, Christian D. and Hendrikse, Jeffrey and Herč{\´i}k, David and Herique, Alain and Hillebrandt, Martin and Ho, Tra-Mi and Kesseler, Lars and Laabs, Martin and Lange, Caroline and Lange, Michael and Lichtenheldt, Roy and McInnes, Colin R. and Moore, Iain and Peloni, Alessandro and Plettenmeier, Dirk and Quantius, Dominik and Seefeldt, Patric and Venditti, Flaviane c. F. and Vergaaij, Merel and Viavattene, Giulia and Virkki, Anne K. and Zander, Martin}, title = {More bucks for the bang: new space solutions, impact tourism and one unique science \& engineering opportunity at T-6 months and counting}, series = {7th IAA Planetary Defense Conference}, booktitle = {7th IAA Planetary Defense Conference}, year = {2021}, abstract = {For now, the Planetary Defense Conference Exercise 2021's incoming fictitious(!), asteroid, 2021 PDC, seems headed for impact on October 20th, 2021, exactly 6 months after its discovery. Today (April 26th, 2021), the impact probability is 5\%, in a steep rise from 1 in 2500 upon discovery six days ago. We all know how these things end. Or do we? Unless somebody kicked off another headline-grabbing media scare or wants to keep civil defense very idle very soon, chances are that it will hit (note: this is an exercise!). Taking stock, it is barely 6 months to impact, a steadily rising likelihood that it will actually happen, and a huge uncertainty of possible impact energies: First estimates range from 1.2 MtTNT to 13 GtTNT, and this is not even the worst-worst case: a 700 m diameter massive NiFe asteroid (covered by a thin veneer of Ryugu-black rubble to match size and brightness), would come in at 70 GtTNT. In down to Earth terms, this could be all between smashing fireworks over some remote area of the globe and a 7.5 km crater downtown somewhere. Considering the deliberate and sedate ways of development of interplanetary missions it seems we can only stand and stare until we know well enough where to tell people to pack up all that can be moved at all and save themselves. But then, it could just as well be a smaller bright rock. The best estimate is 120 m diameter from optical observation alone, by 13\% standard albedo. NASA's upcoming DART mission to binary asteroid (65803) Didymos is designed to hit such a small target, its moonlet Dimorphos. The Deep Impact mission's impactor in 2005 successfully guided itself to the brightest spot on comet 9P/Tempel 1, a relatively small feature on the 6 km nucleus. And 'space' has changed: By the end of this decade, one satellite communication network plans to have launched over 11000 satellites at a pace of 60 per launch every other week. This level of series production is comparable in numbers to the most prolific commercial airliners. Launch vehicle production has not simply increased correspondingly - they can be reused, although in a trade for performance. Optical and radio astronomy as well as planetary radar have made great strides in the past decade, and so has the design and production capability for everyday 'high-tech' products. 60 years ago, spaceflight was invented from scratch within two years, and there are recent examples of fast-paced space projects as well as a drive towards 'responsive space'. It seems it is not quite yet time to abandon all hope. We present what could be done and what is too close to call once thinking is shoved out of the box by a clear and present danger, to show where a little more preparedness or routine would come in handy - or become decisive. And if we fail, let's stand and stare safely and well instrumented anywhere on Earth together in the greatest adventure of science.}, language = {en} }