@inproceedings{MilijašŠakićMarinkovićetal.2021, author = {Milijaš, Aleksa and Šakić, Bogdan and Marinković, Marko and Butenweg, Christoph}, title = {Experimental investigation of behaviour of masonry infilled RC frames under out-of-plane loading}, series = {8th ECCOMAS Thematic Conference on Computational Methods in Structural Dynamics and Earthquake Engineering}, booktitle = {8th ECCOMAS Thematic Conference on Computational Methods in Structural Dynamics and Earthquake Engineering}, editor = {Papadrakakis, Manolis and Fragiadakis, Michalis}, publisher = {National Technical University of Athens}, address = {Athen}, isbn = {978-618-85072-5-8}, issn = {2623-3347}, doi = {10.7712/120121.8528.18914}, pages = {829 -- 846}, year = {2021}, abstract = {Masonry infills are commonly used as exterior or interior walls in reinforced concrete (RC) frame structures and they can be encountered all over the world, including earthquake prone regions. Since the middle of the 20th century the behaviour of these non-structural elements under seismic loading has been studied in numerous experimental campaigns. However, most of the studies were carried out by means of in-plane tests, while there is a lack of out-of-plane experimental investigations. In this paper, the out-of-plane tests carried out on full scale masonry infilled frames are described. The results of the out-of-plane tests are presented in terms of force-displacement curves and measured out-of-plane displacements. Finally, the reliability of existing analytical approaches developed to estimate the out-of-plane strength of masonry infills is examined on presented experimental results.}, language = {en} } @inproceedings{Butenweg2021, author = {Butenweg, Christoph}, title = {Integrated approach for monitoring and management of buildings with digital building models and modern sensor technologies}, series = {Civil Engineering 2021 - Achievements and Visions: Proceedings of the International Conferenecs celebrating 175th Anniversary of the Faculty of Civil Engineering, University of Belgrade, October 25 - 26, 2021 Belgrade, Serbia}, booktitle = {Civil Engineering 2021 - Achievements and Visions: Proceedings of the International Conferenecs celebrating 175th Anniversary of the Faculty of Civil Engineering, University of Belgrade, October 25 - 26, 2021 Belgrade, Serbia}, editor = {Kuzmanović, Vladan and Ignjatović, Ivan}, publisher = {University of Belgrade}, address = {Belgrade}, year = {2021}, language = {en} } @inproceedings{MilkovaButenwegDumovaJovanoska2021, author = {Milkova, Kristina and Butenweg, Christoph and Dumova-Jovanoska, Elena}, title = {Region-sensitive comprehensive procedure for determination of seismic fragility curves}, series = {1st Croatian Conference on Earthquake Engineering 1CroCEE 22-24 March 2021 Zagreb, Croatia}, booktitle = {1st Croatian Conference on Earthquake Engineering 1CroCEE 22-24 March 2021 Zagreb, Croatia}, publisher = {University of Zagreb}, address = {Zagreb}, doi = {10.5592/CO/1CroCEE.2021.158}, pages = {121 -- 128}, year = {2021}, abstract = {Seismic vulnerability estimation of existing structures is unquestionably interesting topic of high priority, particularly after earthquake events. Having in mind the vast number of old masonry buildings in North Macedonia serving as public institutions, it is evident that the structural assessment of these buildings is an issue of great importance. In this paper, a comprehensive methodology for the development of seismic fragility curves of existing masonry buildings is presented. A scenario - based method that incorporates the knowledge of the tectonic style of the considered region, the active fault characterization, the earth crust model and the historical seismicity (determined via the Neo Deterministic approach) is used for calculation of the necessary response spectra. The capacity of the investigated masonry buildings has been determined by using nonlinear static analysis. MINEA software (SDA Engineering) is used for verification of the structural safety of the structures Performance point, obtained from the intersection of the capacity of the building and the spectra used, is selected as a response parameter. The thresholds of the spectral displacement are obtained by splitting the capacity curve into five parts, utilizing empirical formulas which are represented as a function of yield displacement and ultimate displacement. As a result, four levels of damage limit states are determined. A maximum likelihood estimation procedure for the process of fragility curves determination is noted as a final step in the proposed procedure. As a result, region specific series of vulnerability curves for structures are defined.}, language = {en} } @inproceedings{BalaskasHoffmeisterButenwegetal.2021, author = {Balaskas, Georgios and Hoffmeister, Benno and Butenweg, Christoph and Pilz, Marco and Bauer, Anna}, title = {Earthquake early warning and response system based on intelligent seismic and monitoring sensors embedded in a communication platform and coupled with BIM models}, series = {8th ECCOMAS Thematic Conference on Computational Methods in Structural Dynamics and Earthquake Engineering}, booktitle = {8th ECCOMAS Thematic Conference on Computational Methods in Structural Dynamics and Earthquake Engineering}, editor = {Papadrakakis, Manolis and Fragiadakis, Michalis}, publisher = {National Technical University of Athens}, address = {Athen}, isbn = {978-618-85072-5-8}, issn = {2623-3347}, doi = {10.7712/120121.8539.18855}, pages = {987 -- 998}, year = {2021}, abstract = {This paper describes the concept of an innovative, interdisciplinary, user-oriented earthquake warning and rapid response system coupled with a structural health monitoring system (SHM), capable to detect structural damages in real time. The novel system is based on interconnected decentralized seismic and structural health monitoring sensors. It is developed and will be exemplarily applied on critical infrastructures in Lower Rhine Region, in particular on a road bridge and within a chemical industrial facility. A communication network is responsible to exchange information between sensors and forward warnings and status reports about infrastructures'health condition to the concerned recipients (e.g., facility operators, local authorities). Safety measures such as emergency shutdowns are activated to mitigate structural damages and damage propagation. Local monitoring systems of the infrastructures are integrated in BIM models. The visualization of sensor data and the graphic representation of the detected damages provide spatial content to sensors data and serve as a useful and effective tool for the decision-making processes after an earthquake in the region under consideration.}, language = {en} } @inproceedings{ButenwegBursiNardinetal.2021, author = {Butenweg, Christoph and Bursi, Oreste S. and Nardin, Chiara and Lanese, Igor and Pavese, Alberto and Marinković, Marko and Paolacci, Fabrizio and Quinci, Gianluca}, title = {Experimental investigation on the seismic performance of a multi-component system for major-hazard industrial facilities}, series = {Pressure Vessels \& Piping Virtual Conference July 13-15, 2021}, booktitle = {Pressure Vessels \& Piping Virtual Conference July 13-15, 2021}, publisher = {American Society of Mechanical Engineers (ASME)}, address = {New York}, isbn = {9780791885352}, doi = {10.1115/PVP2021-61696}, pages = {8 Seiten}, year = {2021}, abstract = {Past earthquakes demonstrated the high vulnerability of industrial facilities equipped with complex process technologies leading to serious damage of the process equipment and multiple and simultaneous release of hazardous substances in industrial facilities. Nevertheless, the design of industrial plants is inadequately described in recent codes and guidelines, as they do not consider the dynamic interaction between the structure and the installations and thus the effect of seismic response of the installations on the response of the structure and vice versa. The current code-based approach for the seismic design of industrial facilities is considered not enough for ensure proper safety conditions against exceptional event entailing loss of content and related consequences. Accordingly, SPIF project (Seismic Performance of Multi-Component Systems in Special Risk Industrial Facilities) was proposed within the framework of the European H2020 - SERA funding scheme (Seismology and Earthquake Engineering Research Infrastructure Alliance for Europe). The objective of the SPIF project is the investigation of the seismic behaviour of a representative industrial structure equipped with complex process technology by means of shaking table tests. The test structure is a three-story moment resisting steel frame with vertical and horizontal vessels and cabinets, arranged on the three levels and connected by pipes. The dynamic behaviour of the test structure and of its relative several installations is investigated. Furthermore, both process components and primary structure interactions are considered and analyzed. Several PGA-scaled artificial ground motions are applied to study the seismic response at different levels. After each test, dynamic identification measurements are carried out to characterize the system condition. The contribution presents the experimental setup of the investigated structure and installations, selected measurement data and describes the obtained damage. Furthermore, important findings for the definition of performance limits, the effectiveness of floor response spectra in industrial facilities will be presented and discussed.}, language = {en} } @inproceedings{MilkovaButenwegDumovaJovanoska2020, author = {Milkova, Kristina and Butenweg, Christoph and Dumova-Jovanoska, Elena}, title = {Methodology for development of seismic vulnerability curve for existing unreinforced Masonry buildings}, series = {17th World Conference on Earthquake Engineering, Sendai, Japan, September 27 to October 2, 2021.}, booktitle = {17th World Conference on Earthquake Engineering, Sendai, Japan, September 27 to October 2, 2021.}, pages = {13 Seiten}, year = {2020}, abstract = {Seismic behavior of an existing unreinforced masonry building built pre-modern code, located in the City of Ohrid, Republic of North Macedonia has been investigated in this paper. The analyzed school building is selected as an archetype in an ongoing project named "Seismic vulnerability assessment of existing masonry structures in Republic of North Macedonia (SeismoWall)". Two independent segments were included in this research: Seismic hazard assessment by creating a cite specific response spectra and Seismic vulnerability definition by creating a region - specific series of vulnerability curves for the chosen building topology. A reliable Seismic Hazard Assessment for a selected region is a crucial point for performing a seismic risk analysis of a characteristic building class. In that manner, a scenario - based method that incorporates together the knowledge of tectonic style of the considered region, the active fault characterization, the earth crust model and the historical seismicity named Neo Deterministic approach is used for calculation of the response spectra for the location of the building. Variations of the rupturing process are taken into account in the nucleation point of the rupture, in the rupture velocity pattern and in the istribution of the slip on the fault. The results obtained from the multiple scenarios are obtained as an envelope of the response spectra computed for the cite using the procedure Maximum Credible Seismic Input (MCSI). Capacity of the selected building has been determined by using nonlinear static analysis. MINEA software (SDA Engineering) was used for verification of the structural safety of the chosen unreinforced masonry structure. In the process of optimization of the number of samples, computational cost required in a Monte Carlo simulation is significantly reduced since the simulation is performed on a polynomial response surface function for prediction of the structural response. Performance point, found as the intersection of the capacity of the building and the spectra used, is chosen as a response parameter. Five levels of damage limit states based on the capacity curve of the building are defined in dependency on the yield displacement and the maximum displacement. Maximum likelihood estimation procedure is utilized in the process of vulnerability curves determination. As a result, region specific series of vulnerability curves for the chosen type of masonry structures are defined. The obtained probabilities of exceedance a specific damage states as a result from vulnerability curves are compared with the observed damages happened after the earthquake in July 2017 in the City of Ohrid, North Macedonia.}, language = {en} } @inproceedings{MarinkovićButenweg2020, author = {Marinković, Marko and Butenweg, Christoph}, title = {Out-of-plane behavior of decoupled masonry infills under seismic loading}, series = {17th World Conference on Earthquake Engineering, Sendai, Japan, September 27 to October 2, 2021.}, booktitle = {17th World Conference on Earthquake Engineering, Sendai, Japan, September 27 to October 2, 2021.}, pages = {13 Seiten}, year = {2020}, abstract = {Masonry is used in many buildings not only for load-bearing walls, but also for non-load-bearing enclosure elements in the form of infill walls. Many studies confirmed that infill walls interact with the surrounding reinforced concrete frame, thus changing dynamic characteristics of the structure. Consequently, masonry infills cannot be neglected in the design process. However, although the relevant standards contain requirements for infill walls, they do not describe how these requirements are to be met concretely. This leads in practice to the fact that the infill walls are neither dimensioned nor constructed correctly. The evidence of this fact is confirmed by the recent earthquakes, which have led to enormous damages, sometimes followed by the total collapse of buildings and loss of human lives. Recently, the increasing effort has been dedicated to the approach of decoupling of masonry infills from the frame elements by introducing the gap in between. This helps in removing the interaction between infills and frame, but raises the question of out-of-plane stability of the panel. This paper presents the results of the experimental campaign showing the out-of-plane behavior of masonry infills decoupled with the system called INODIS (Innovative decoupled infill system), developed within the European project INSYSME (Innovative Systems for Earthquake Resistant Masonry Enclosures in Reinforced Concrete Buildings). Full scale specimens were subjected to the different loading conditions and combinations of in-plane and out-of-plane loading. Out-of-plane capacity of the masonry infills with the INODIS system is compared with traditionally constructed infills, showing that INODIS system provides reliable out-of-plane connection under various loading conditions. In contrast, traditional infills performed very poor in the case of combined and simultaneously applied in-plane and out-of-plane loading, experiencing brittle behavior under small in-plane drifts followed by high out-of-plane displacements. Decoupled infills with the INODIS system have remained stable under out-of-plane loads, even after reaching high in-plane drifts and being damaged.}, language = {en} } @inproceedings{MarinkovićButenweg2019, author = {Marinković, Marko and Butenweg, Christoph}, title = {Experimental and numerical analysis of RC frames with decoupled masonry infills}, series = {7th ECCOMAS Thematic Conference on Computational Methods in Structural Dynamics and Earthquake Engineering}, booktitle = {7th ECCOMAS Thematic Conference on Computational Methods in Structural Dynamics and Earthquake Engineering}, editor = {Papadrakakis, Manolis and Fragiadakis, Michalis}, publisher = {National Technical University of Athens}, address = {Athen}, isbn = {978-618-82844-5-6}, issn = {2623-3347}, doi = {10.7712/120119.7088.18845}, pages = {2464 -- 2479}, year = {2019}, abstract = {Masonry infill walls are commonly used in reinforced concrete (RC) frame structures, also in seismically active areas, although they often experience serious damage during earthquakes. One of the main reasons for their poor behaviour is the connection to the frame, which is usually constructed using mortar. This paper describes the novel solution for infill/frame connection based on application of elastomeric material between them. The system called INODIS (Innovative Decoupled Infill System) has the aim to postpone the activation of infill in in-plane direction and at the same time to provide sufficient out-of-plane support. First, experimental tests on infilled frame specimens are presented and the comparison of the results between traditionally infilled frames and infilled frames with the INODIS system are given. The results are then used for calibration and validation of numerical model, which can be further employed for investigating the influence of some material parameters on the behaviour of infilled frames with the INODIS system.}, language = {en} } @misc{TopcuMadabhushiStaat2022, author = {Topcu, Murat and Madabhushi, Gopal Santana Phani and Staat, Manfred}, title = {Datasets from FEM Simulations done with COMSOL Multiphysics and Code_Aster}, doi = {10.6084/m9.figshare.19333295.v2}, year = {2022}, abstract = {Datasets from FEM Simulations done with COMSOL Multiphysics and Code_Aster for an elastic stress transfer between matrix and fibres having a variable radius.}, language = {en} } @article{VahidpourGuthmanArreolaetal.2022, author = {Vahidpour, Farnoosh and Guthman, Eric and Arreola, Julia and Alghazali, Yousef H. M. and Wagner, Torsten and Sch{\"o}ning, Michael Josef}, title = {Assessment of Various Process Parameters for Optimized Sterilization Conditions Using a Multi-Sensing Platform}, series = {Foods}, volume = {11}, journal = {Foods}, number = {5}, publisher = {MDPI}, address = {Basel}, issn = {2304-8158}, doi = {10.3390/foods11050660}, pages = {Artikel 660}, year = {2022}, abstract = {In this study, an online multi-sensing platform was engineered to simultaneously evaluate various process parameters of food package sterilization using gaseous hydrogen peroxide (H₂O₂). The platform enabled the validation of critical aseptic parameters. In parallel, one series of microbiological count reduction tests was performed using highly resistant spores of B. atrophaeus DSM 675 to act as the reference method for sterility validation. By means of the multi-sensing platform together with microbiological tests, we examined sterilization process parameters to define the most effective conditions with regards to the highest spore kill rate necessary for aseptic packaging. As these parameters are mutually associated, a correlation between different factors was elaborated. The resulting correlation indicated the need for specific conditions regarding the applied H₂O₂ gas temperature, the gas flow and concentration, the relative humidity and the exposure time. Finally, the novel multi-sensing platform together with the mobile electronic readout setup allowed for the online and on-site monitoring of the sterilization process, selecting the best conditions for sterility and, at the same time, reducing the use of the time-consuming and costly microbiological tests that are currently used in the food package industry.}, language = {en} } @inproceedings{RajanKubalskiAltayetal.2017, author = {Rajan, Sreelakshmy and Kubalski, Thomas and Altay, Okyay and Dalguer, Luis A and Butenweg, Christoph}, title = {Multi-dimensional fragility analysis of a RC building with components using response surface method}, series = {24th International Conference on Structural Mechanics in Reactor Technology, Busan, Korea, 20-25 August, 2017}, booktitle = {24th International Conference on Structural Mechanics in Reactor Technology, Busan, Korea, 20-25 August, 2017}, publisher = {International Assn for Structural Mechanics in Reactor Technology (IASMiRT)}, address = {Raleigh, USA}, isbn = {9781510856776}, pages = {3126 -- 3135}, year = {2017}, abstract = {Conventional fragility curves describe the vulnerability of the main structure under external hazards. However, in complex structures such as nuclear power plants, the safety or the risk depends also on the components associated with a system. The classical fault tree analysis gives an overall view of the failure and contains several subsystems to the main event, however, the interactions in the subsystems are not well represented. In order to represent the interaction of the components, a method suggested by Cimellaro et al. (2006) using multidimensional performance limit state functions to obtain the system fragility curves is adopted. This approach gives the possibility of deriving the cumulative fragility taking into account the interaction of the response of different components. In this paper, this approach is used to evaluate seismic risk of a representative electrical building infrastructure, including the component, of a nuclear power plant. A simplified model of the structure, with nonlinear material behavior is employed for the analysis in Abaqus©. The input variables considered are the material parameters, boundary conditions and the seismic input. The variability of the seismic input is obtained from selected ground motion time histories of spectrum compatible synthetic ccelerograms. Unlike the usual Monte Carlo methods used for the probabilistic analysis of the structure, a computationally effective response surface method is used. This method reduces the computational effort of the calculations by reducing the required number of samples.}, language = {en} } @article{HeuermannHarzheimCronenbroeck2021, author = {Heuermann, Holger and Harzheim, Thomas and Cronenbroeck, Tobias}, title = {First SIMO harmonic radar based on the SFCW concept and the HR transfer function}, series = {Remote sensing}, volume = {13}, journal = {Remote sensing}, number = {24}, publisher = {MDPI}, address = {Basel}, issn = {2072-4292}, doi = {10.3390/rs13245088}, pages = {23 Seiten}, year = {2021}, abstract = {This paper presents a new SIMO radar system based on a harmonic radar (HR) stepped frequency continuous wave (SFCW) architecture. Simple tags that can be electronically individually activated and deactivated via a DC control voltage were developed and combined to form an MO array field. This HR operates in the entire 2.45 GHz ISM band for transmitting the illumination signal and receives at twice the stimulus frequency and bandwidth centered around 4.9 GHz. This paper presents the development, the basic theory of a HR system for the characterization of objects placed into the propagation path in-between the radar and the reflectors (similar to a free-space measurement with a network analyzer) as well as first measurements performed by the system. Further detailed measurement series will be made available later on to other researchers to develop AI and machine learning based signal processing routines or synthetic aperture radar algorithms for imaging, object recognition, and feature extraction. For this purpose, the necessary information is published in this paper. It is explained in detail why this SIMO-HR can be an attractive solution augmenting or replacing existing systems for radar measurements in production technology for material under test measurements and as a simplified MIMO system. The novel HR transfer function, which is a basis for researchers and developers for material characterization or imaging algorithms, is introduced and metrologically verified in a well traceable coaxial setup.}, language = {en} } @article{GorzalkaSchmiedtSchorn2021, author = {Gorzalka, Philip and Schmiedt, Jacob Estevam and Schorn, Christian}, title = {Automated Generation of an Energy Simulation Model for an Existing Building from UAV Imagery}, series = {Buildings}, volume = {11}, journal = {Buildings}, number = {9}, publisher = {MDPI}, address = {Basel}, issn = {2075-5309}, doi = {10.3390/buildings11090380}, pages = {15 Seiten}, year = {2021}, abstract = {An approach to automatically generate a dynamic energy simulation model in Modelica for a single existing building is presented. It aims at collecting data about the status quo in the preparation of energy retrofits with low effort and costs. The proposed method starts from a polygon model of the outer building envelope obtained from photogrammetrically generated point clouds. The open-source tools TEASER and AixLib are used for data enrichment and model generation. A case study was conducted on a single-family house. The resulting model can accurately reproduce the internal air temperatures during synthetical heating up and cooling down. Modelled and measured whole building heat transfer coefficients (HTC) agree within a 12\% range. A sensitivity analysis emphasises the importance of accurate window characterisations and justifies the use of a very simplified interior geometry. Uncertainties arising from the use of archetype U-values are estimated by comparing different typologies, with best- and worst-case estimates showing differences in pre-retrofit heat demand of about ±20\% to the average; however, as the assumptions made are permitted by some national standards, the method is already close to practical applicability and opens up a path to quickly estimate possible financial and energy savings after refurbishment.}, language = {en} } @article{PourshahidiAchtsnichtNambipareecheeetal.2021, author = {Pourshahidi, Ali Mohammad and Achtsnicht, Stefan and Nambipareechee, Mrinal Murali and Offenh{\"a}usser, Andreas and Krause, Hans-Joachim}, title = {Multiplex detection of magnetic beads using offset field dependent frequency mixing magnetic detection}, series = {Sensors}, volume = {21}, journal = {Sensors}, number = {17}, publisher = {MDPI}, address = {Basel}, issn = {1424-8220}, doi = {10.3390/s21175859}, pages = {16 Seiten}, year = {2021}, abstract = {Magnetic immunoassays employing Frequency Mixing Magnetic Detection (FMMD) have recently become increasingly popular for quantitative detection of various analytes. Simultaneous analysis of a sample for two or more targets is desirable in order to reduce the sample amount, save consumables, and save time. We show that different types of magnetic beads can be distinguished according to their frequency mixing response to a two-frequency magnetic excitation at different static magnetic offset fields. We recorded the offset field dependent FMMD response of two different particle types at frequencies ƒ₁ + n⋅ƒ₂, n = 1, 2, 3, 4 with ƒ₁ = 30.8 kHz and ƒ₂ = 63 Hz. Their signals were clearly distinguishable by the locations of the extremes and zeros of their responses. Binary mixtures of the two particle types were prepared with different mixing ratios. The mixture samples were analyzed by determining the best linear combination of the two pure constituents that best resembled the measured signals of the mixtures. Using a quadratic programming algorithm, the mixing ratios could be determined with an accuracy of greater than 14\%. If each particle type is functionalized with a different antibody, multiplex detection of two different analytes becomes feasible.}, language = {en} } @article{PoghossianWeldenBuniatyanetal.2021, author = {Poghossian, Arshak and Welden, Rene and Buniatyan, Vahe V. and Sch{\"o}ning, Michael Josef}, title = {An Array of On-Chip Integrated, Individually Addressable Capacitive Field-Effect Sensors with Control Gate: Design and Modelling}, series = {Sensors}, volume = {21}, journal = {Sensors}, number = {18}, publisher = {MDPI}, address = {Basel}, issn = {1424-8220}, doi = {10.3390/s21186161}, pages = {17}, year = {2021}, abstract = {The on-chip integration of multiple biochemical sensors based on field-effect electrolyte-insulator-semiconductor capacitors (EISCAP) is challenging due to technological difficulties in realization of electrically isolated EISCAPs on the same Si chip. In this work, we present a new simple design for an array of on-chip integrated, individually electrically addressable EISCAPs with an additional control gate (CG-EISCAP). The existence of the CG enables an addressable activation or deactivation of on-chip integrated individual CG-EISCAPs by simple electrical switching the CG of each sensor in various setups, and makes the new design capable for multianalyte detection without cross-talk effects between the sensors in the array. The new designed CG-EISCAP chip was modelled in so-called floating/short-circuited and floating/capacitively-coupled setups, and the corresponding electrical equivalent circuits were developed. In addition, the capacitance-voltage curves of the CG-EISCAP chip in different setups were simulated and compared with that of a single EISCAP sensor. Moreover, the sensitivity of the CG-EISCAP chip to surface potential changes induced by biochemical reactions was simulated and an impact of different parameters, such as gate voltage, insulator thickness and doping concentration in Si, on the sensitivity has been discussed.}, language = {en} } @article{AkimbekovDigelTastambeketal.2021, author = {Akimbekov, Nuraly S. and Digel, Ilya and Tastambek, Kuanysh T. and Sherelkhan, Dinara K. and Jussupova, Dariya B. and Altynbay, Nazym P.}, title = {Low-rank coal as a source of humic substances for soil amendment and fertility management}, series = {Agriculture}, volume = {11}, journal = {Agriculture}, number = {12}, publisher = {MDPI}, address = {Basel}, issn = {2077-0472}, doi = {10.3390/agriculture11121261}, pages = {25 Seiten}, year = {2021}, abstract = {Humic substances (HS), as important environmental components, are essential to soil health and agricultural sustainability. The usage of low-rank coal (LRC) for energy generation has declined considerably due to the growing popularity of renewable energy sources and gas. However, their potential as soil amendment aimed to maintain soil quality and productivity deserves more recognition. LRC, a highly heterogeneous material in nature, contains large quantities of HS and may effectively help to restore the physicochemical, biological, and ecological functionality of soil. Multiple emerging studies support the view that LRC and its derivatives can positively impact the soil microclimate, nutrient status, and organic matter turnover. Moreover, the phytotoxic effects of some pollutants can be reduced by subsequent LRC application. Broad geographical availability, relatively low cost, and good technical applicability of LRC offer the advantage of easy fulfilling soil amendment and conditioner requirements worldwide. This review analyzes and emphasizes the potential of LRC and its numerous forms/combinations for soil amelioration and crop production. A great benefit would be a systematic investment strategy implicating safe utilization and long-term application of LRC for sustainable agricultural production.}, language = {en} } @article{FiedlerOrzadaFloeseretal.2022, author = {Fiedler, Thomas M. and Orzada, Stephan and Fl{\"o}ser, Martina and Rietsch, Stefan H. G. and Schmidt, Simon and Stelter, Jonathan K. and Wittrich, Marco and Quick, Harald H. and Bitz, Andreas and Ladd, Mark E.}, title = {Performance and safety assessment of an integrated transmitarray for body imaging at 7 T under consideration of specificabsorption rate, tissue temperature, and thermal dose}, series = {NMR in Biomedicine}, volume = {35}, journal = {NMR in Biomedicine}, number = {5}, publisher = {Wiley}, issn = {0952-3480 (Print)}, doi = {10.1002/nbm.4656}, pages = {1 -- 17}, year = {2022}, abstract = {In this study, the performance of an integrated body-imaging array for 7 T with 32 radiofrequency (RF) channels under consideration of local specific absorption rate (SAR), tissue temperature, and thermal dose limits was evaluated and the imaging performance was compared with a clinical 3 T body coil. Thirty-two transmit elements were placed in three rings between the bore liner and RF shield of the gradient coil. Slice-selective RF pulse optimizations for B1 shimming and spokes were performed for differently oriented slices in the body under consideration of realistic constraints for power and local SAR. To improve the B1+ homogeneity, safety assessments based on temperature and thermal dose were performed to possibly allow for higher input power for the pulse optimization than permissible with SAR limits. The results showed that using two spokes, the 7 T array outperformed the 3 T birdcage in all the considered regions of interest. However, a significantly higher SAR or lower duty cycle at 7 T is necessary in some cases to achieve similar B1+ homogeneity as at 3 T. The homogeneity in up to 50 cm-long coronal slices can particularly benefit from the high RF shim performance provided by the 32 RF channels. The thermal dose approach increases the allowable input power and the corresponding local SAR, in one example up to 100 W/kg, without limiting the exposure time necessary for an MR examination. In conclusion, the integrated antenna array at 7 T enables a clinical workflow for body imaging and comparable imaging performance to a conventional 3 T clinical body coil.}, language = {en} } @article{AngermannGuenthnerHanssenetal.2022, author = {Angermann, Susanne and G{\"u}nthner, Roman and Hanssen, Henner and Lorenz, Georg and Braunisch, Matthias C. and Steubl, Dominik and Matschkal, Julia and Kemmner, Stephan and Hausinger, Renate and Block, Zenonas and Haller, Bernhard and Heemann, Uwe and Kotliar, Konstantin and Grimmer, Timo and Schmaderer, Christoph}, title = {Cognitive impairment and microvascular function in end-stage renal disease}, series = {International Journal of Methods in Psychiatric Research (MPR)}, volume = {31}, journal = {International Journal of Methods in Psychiatric Research (MPR)}, number = {2}, publisher = {Wiley}, issn = {1049-8931 (Print)}, doi = {10.1002/mpr.1909}, pages = {1 -- 10}, year = {2022}, abstract = {Objective Hemodialysis patients show an approximately threefold higher prevalence of cognitive impairment compared to the age-matched general population. Impaired microcirculatory function is one of the assumed causes. Dynamic retinal vessel analysis is a quantitative method for measuring neurovascular coupling and microvascular endothelial function. We hypothesize that cognitive impairment is associated with altered microcirculation of retinal vessels. Methods 152 chronic hemodialysis patients underwent cognitive testing using the Montreal Cognitive Assessment. Retinal microcirculation was assessed by Dynamic Retinal Vessel Analysis, which carries out an examination recording retinal vessels' reaction to a flicker light stimulus under standardized conditions. Results In unadjusted as well as in adjusted linear regression analyses a significant association between the visuospatial executive function domain score of the Montreal Cognitive Assessment and the maximum arteriolar dilation as response of retinal arterioles to the flicker light stimulation was obtained. Conclusion This is the first study determining retinal microvascular function as surrogate for cerebral microvascular function and cognition in hemodialysis patients. The relationship between impairment in executive function and reduced arteriolar reaction to flicker light stimulation supports the involvement of cerebral small vessel disease as contributing factor for the development of cognitive impairment in this patient population and might be a target for noninvasive disease monitoring and therapeutic intervention.}, language = {en} } @article{KarschuckKaulenPoghossianetal.2021, author = {Karschuck, Tobias and Kaulen, Corinna and Poghossian, Arshak and Wagner, Patrick H. and Sch{\"o}ning, Michael Josef}, title = {Gold nanoparticle-modified capacitive field-effect sensors: Studying the surface density of nanoparticles and coupling of charged polyelectrolyte macromolecules}, series = {Electrochemical Science Advances}, volume = {2}, journal = {Electrochemical Science Advances}, number = {5}, publisher = {Wiley-VCH}, address = {Weinheim}, issn = {0938-5193}, doi = {10.1002/elsa.202100179}, pages = {10 Seiten}, year = {2021}, abstract = {The coupling of ligand-stabilized gold nanoparticles with field-effect devices offers new possibilities for label-free biosensing. In this work, we study the immobilization of aminooctanethiol-stabilized gold nanoparticles (AuAOTs) on the silicon dioxide surface of a capacitive field-effect sensor. The terminal amino group of the AuAOT is well suited for the functionalization with biomolecules. The attachment of the positively-charged AuAOTs on a capacitive field-effect sensor was detected by direct electrical readout using capacitance-voltage and constant capacitance measurements. With a higher particle density on the sensor surface, the measured signal change was correspondingly more pronounced. The results demonstrate the ability of capacitive field-effect sensors for the non-destructive quantitative validation of nanoparticle immobilization. In addition, the electrostatic binding of the polyanion polystyrene sulfonate to the AuAOT-modified sensor surface was studied as a model system for the label-free detection of charged macromolecules. Most likely, this approach can be transferred to the label-free detection of other charged molecules such as enzymes or antibodies.}, language = {en} } @article{HoffmannRohrbachUhletal.2022, author = {Hoffmann, Andreas and Rohrbach, Felix and Uhl, Matthias and Ceblin, Maximilian and Bauer, Thomas and Mallah, Marcel and Jacob, Timo and Heuermann, Holger and Kuehne, Alexander J. C.}, title = {Atmospheric pressure plasma-jet treatment of polyacrylonitrile-nonwovens—Stabilization and roll-to-roll processing}, series = {Journal of Applied Polymer Science}, volume = {139}, journal = {Journal of Applied Polymer Science}, number = {37}, publisher = {Wiley}, issn = {0021-8995 (Print)}, doi = {10.1002/app.52887}, pages = {1 -- 9}, year = {2022}, abstract = {Carbon nanofiber nonwovens represent a powerful class of materials with prospective application in filtration technology or as electrodes with high surface area in batteries, fuel cells, and supercapacitors. While new precursor-to-carbon conversion processes have been explored to overcome productivity restrictions for carbon fiber tows, alternatives for the two-step thermal conversion of polyacrylonitrile precursors into carbon fiber nonwovens are absent. In this work, we develop a continuous roll-to-roll stabilization process using an atmospheric pressure microwave plasma jet. We explore the influence of various plasma-jet parameters on the morphology of the nonwoven and compare the stabilized nonwoven to thermally stabilized samples using scanning electron microscopy, differential scanning calorimetry, and infrared spectroscopy. We show that stabilization with a non-equilibrium plasma-jet can be twice as productive as the conventional thermal stabilization in a convection furnace, while producing electrodes of comparable electrochemical performance.}, language = {en} } @article{PourshahidiAchtsnichtOffenhaeusseretal.2022, author = {Pourshahidi, Ali Mohammad and Achtsnicht, Stefan and Offenh{\"a}usser, Andreas and Krause, Hans-Joachim}, title = {Frequency Mixing Magnetic Detection Setup Employing Permanent Ring Magnets as a Static Offset Field Source}, series = {Sensors}, volume = {22}, journal = {Sensors}, number = {22}, editor = {Offenh{\"a}usser, Andreas}, publisher = {MDPI}, address = {Basel}, issn = {1424-8220}, doi = {10.3390/s22228776}, pages = {12 Seiten}, year = {2022}, abstract = {Frequency mixing magnetic detection (FMMD) has been explored for its applications in fields of magnetic biosensing, multiplex detection of magnetic nanoparticles (MNP) and the determination of core size distribution of MNP samples. Such applications rely on the application of a static offset magnetic field, which is generated traditionally with an electromagnet. Such a setup requires a current source, as well as passive or active cooling strategies, which directly sets a limitation based on the portability aspect that is desired for point of care (POC) monitoring applications. In this work, a measurement head is introduced that involves the utilization of two ring-shaped permanent magnets to generate a static offset magnetic field. A steel cylinder in the ring bores homogenizes the field. By variation of the distance between the ring magnets and of the thickness of the steel cylinder, the magnitude of the magnetic field at the sample position can be adjusted. Furthermore, the measurement setup is compared to the electromagnet offset module based on measured signals and temperature behavior.}, language = {en} } @article{PoghossianKarschuckWagneretal.2022, author = {Poghossian, Arshak and Karschuck, Tobias and Wagner, Patrick and Sch{\"o}ning, Michael Josef}, title = {Field-Effect Capacitors Decorated with Ligand-Stabilized Gold Nanoparticles: Modeling and Experiments}, series = {Biosensors}, volume = {12}, journal = {Biosensors}, number = {5}, publisher = {MDPI}, address = {Basel}, issn = {2079-6374}, doi = {10.3390/bios12050334}, pages = {Artikel 334}, year = {2022}, abstract = {Nanoparticles are recognized as highly attractive tunable materials for designing field-effect biosensors with enhanced performance. In this work, we present a theoretical model for electrolyte-insulator-semiconductor capacitors (EISCAP) decorated with ligand-stabilized charged gold nanoparticles. The charged AuNPs are taken into account as additional, nanometer-sized local gates. The capacitance-voltage (C-V) curves and constant-capacitance (ConCap) signals of the AuNP-decorated EISCAPs have been simulated. The impact of the AuNP coverage on the shift of the C-V curves and the ConCap signals was also studied experimentally on Al-p-Si-SiO₂ EISCAPs decorated with positively charged aminooctanethiol-capped AuNPs. In addition, the surface of the EISCAPs, modified with AuNPs, was characterized by scanning electron microscopy for different immobilization times of the nanoparticles.}, language = {en} } @article{VahidpourAlghazaliAkcaetal.2022, author = {Vahidpour, Farnoosh and Alghazali, Yousef and Akca, Sevilay and Hommes, Gregor and Sch{\"o}ning, Michael Josef}, title = {An Enzyme-Based Interdigitated Electrode-Type Biosensor for Detecting Low Concentrations of H₂O₂ Vapor/Aerosol}, series = {Chemosensors}, volume = {10}, journal = {Chemosensors}, number = {6}, publisher = {MDPI}, address = {Basel}, issn = {2227-9040}, doi = {10.3390/chemosensors10060202}, pages = {Arikel 202}, year = {2022}, abstract = {This work introduces a novel method for the detection of H₂O₂ vapor/aerosol of low concentrations, which is mainly applied in the sterilization of equipment in medical industry. Interdigitated electrode (IDE) structures have been fabricated by means of microfabrication techniques. A differential setup of IDEs was prepared, containing an active sensor element (active IDE) and a passive sensor element (passive IDE), where the former was immobilized with an enzymatic membrane of horseradish peroxidase that is selective towards H₂O₂. Changes in the IDEs' capacitance values (active sensor element versus passive sensor element) under H₂O₂ vapor/aerosol atmosphere proved the detection in the concentration range up to 630 ppm with a fast response time (<60 s). The influence of relative humidity was also tested with regard to the sensor signal, showing no cross-sensitivity. The repeatability assessment of the IDE biosensors confirmed their stable capacitive signal in eight subsequent cycles of exposure to H₂O₂ vapor/aerosol. Room-temperature detection of H₂O₂ vapor/aerosol with such miniaturized biosensors will allow a future three-dimensional, flexible mapping of aseptic chambers and help to evaluate sterilization assurance in medical industry.}, language = {en} } @article{HoffmannUhlCeblinetal.2022, author = {Hoffmann, Andreas and Uhl, Matthias and Ceblin, Maximilian and Rohrbach, Felix and Bansmann, Joachim and Mallah, Marcel and Heuermann, Holger and Jacob, Timo and Kuehne, Alexander J.C.}, title = {Atmospheric pressure plasma-jet treatment of PAN-nonwovens—carbonization of nanofiber electrodes}, series = {C - Journal of Carbon Research}, volume = {8}, journal = {C - Journal of Carbon Research}, number = {3}, publisher = {MDPI}, address = {Basel}, issn = {2311-5629}, doi = {10.3390/c8030033}, pages = {8 Seiten}, year = {2022}, abstract = {Carbon nanofibers are produced from dielectric polymer precursors such as polyacrylonitrile (PAN). Carbonized nanofiber nonwovens show high surface area and good electrical conductivity, rendering these fiber materials interesting for application as electrodes in batteries, fuel cells, and supercapacitors. However, thermal processing is slow and costly, which is why new processing techniques have been explored for carbon fiber tows. Alternatives for the conversion of PAN-precursors into carbon fiber nonwovens are scarce. Here, we utilize an atmospheric pressure plasma jet to conduct carbonization of stabilized PAN nanofiber nonwovens. We explore the influence of various processing parameters on the conductivity and degree of carbonization of the converted nanofiber material. The precursor fibers are converted by plasma-jet treatment to carbon fiber nonwovens within seconds, by which they develop a rough surface making subsequent surface activation processes obsolete. The resulting carbon nanofiber nonwovens are applied as supercapacitor electrodes and examined by cyclic voltammetry and impedance spectroscopy. Nonwovens that are carbonized within 60 s show capacitances of up to 5 F g⁻¹.}, language = {en} } @inproceedings{ButenwegMeyerFehling2014, author = {Butenweg, Christoph and Meyer, Udo and Fehling, Ekkehard}, title = {INSYSME: first activities of the German partners}, series = {9th International Masonry Conference 2014 in Guimaraes, Portugal, 2014}, booktitle = {9th International Masonry Conference 2014 in Guimaraes, Portugal, 2014}, year = {2014}, language = {en} } @article{AkimbekovDigelTastambeketal.2022, author = {Akimbekov, Nuraly S. and Digel, Ilya and Tastambek, Kuanysh T. and Marat, Adel K. and Turaliyeva, Moldir A. and Kaiyrmanova, Gulzhan K.}, title = {Biotechnology of Microorganisms from Coal Environments: From Environmental Remediation to Energy Production}, series = {Biology}, volume = {11}, journal = {Biology}, number = {9}, publisher = {MDPI}, address = {Basel}, issn = {2079-7737}, doi = {10.3390/biology11091306}, pages = {47 Seiten}, year = {2022}, abstract = {It was generally believed that coal sources are not favorable as live-in habitats for microorganisms due to their recalcitrant chemical nature and negligible decomposition. However, accumulating evidence has revealed the presence of diverse microbial groups in coal environments and their significant metabolic role in coal biogeochemical dynamics and ecosystem functioning. The high oxygen content, organic fractions, and lignin-like structures of lower-rank coals may provide effective means for microbial attack, still representing a greatly unexplored frontier in microbiology. Coal degradation/conversion technology by native bacterial and fungal species has great potential in agricultural development, chemical industry production, and environmental rehabilitation. Furthermore, native microalgal species can offer a sustainable energy source and an excellent bioremediation strategy applicable to coal spill/seam waters. Additionally, the measures of the fate of the microbial community would serve as an indicator of restoration progress on post-coal-mining sites. This review puts forward a comprehensive vision of coal biodegradation and bioprocessing by microorganisms native to coal environments for determining their biotechnological potential and possible applications.}, language = {en} } @article{SchulteTiggesFoersterNikolovskietal.2022, author = {Schulte-Tigges, Joschua and F{\"o}rster, Marco and Nikolovski, Gjorgji and Reke, Michael and Ferrein, Alexander and Kaszner, Daniel and Matheis, Dominik and Walter, Thomas}, title = {Benchmarking of various LiDAR sensors for use in self-driving vehicles in real-world environments}, series = {Sensors}, volume = {22}, journal = {Sensors}, number = {19}, publisher = {MDPI}, address = {Basel}, issn = {1424-8220}, doi = {10.3390/s22197146}, pages = {20 Seiten}, year = {2022}, abstract = {Abstract In this paper, we report on our benchmark results of the LiDAR sensors Livox Horizon, Robosense M1, Blickfeld Cube, Blickfeld Cube Range, Velodyne Velarray H800, and Innoviz Pro. The idea was to test the sensors in different typical scenarios that were defined with real-world use cases in mind, in order to find a sensor that meet the requirements of self-driving vehicles. For this, we defined static and dynamic benchmark scenarios. In the static scenarios, both LiDAR and the detection target do not move during the measurement. In dynamic scenarios, the LiDAR sensor was mounted on the vehicle which was driving toward the detection target. We tested all mentioned LiDAR sensors in both scenarios, show the results regarding the detection accuracy of the targets, and discuss their usefulness for deployment in self-driving cars.}, language = {en} } @inproceedings{ButenwegRajan2014, author = {Butenweg, Christoph and Rajan, Sreelakshmy}, title = {Design and construction techniques of AAC masonry buildings in earthquakes regions}, series = {10 years Xella research in Building Materials : Symposium on the 4th and 5th of September, Potsdam 2014}, booktitle = {10 years Xella research in Building Materials : Symposium on the 4th and 5th of September, Potsdam 2014}, year = {2014}, language = {en} } @article{HaegerGrankinWagner2023, author = {Haeger, Gerrit and Grankin, Alina and Wagner, Michaela}, title = {Construction of an Aspergillus oryzae triple amylase deletion mutant as a chassis to evaluate industrially relevant amylases using multiplex CRISPR/Cas9 editing technology}, series = {Applied Research}, journal = {Applied Research}, number = {Early View}, publisher = {Wiley-VCH}, issn = {2702-4288}, doi = {10.1002/appl.202200106}, pages = {1 -- 15}, year = {2023}, abstract = {Aspergillus oryzae is an industrially relevant organism for the secretory production of heterologous enzymes, especially amylases. The activities of potential heterologous amylases, however, cannot be quantified directly from the supernatant due to the high background activity of native α-amylase. This activity is caused by the gene products of amyA, amyB, and amyC. In this study, an in vitro CRISPR/Cas9 system was established in A. oryzae to delete these genes simultaneously. First, pyrG of A. oryzae NSAR1 was mutated by exploiting NHEJ to generate a counter-selection marker. Next, all amylase genes were deleted simultaneously by co-transforming a repair template carrying pyrG of Aspergillus nidulans and flanking sequences of amylase gene loci. The rate of obtained triple knock-outs was 47\%. We showed that triple knockouts do not retain any amylase activity in the supernatant. The established in vitro CRISPR/Cas9 system was used to achieve sequence-specific knock-in of target genes. The system was intended to incorporate a single copy of the gene of interest into the desired host for the development of screening methods. Therefore, an integration cassette for the heterologous Fpi amylase was designed to specifically target the amyB locus. The site-specific integration rate of the plasmid was 78\%, with exceptional additional integrations. Integration frequency was assessed via qPCR and directly correlated with heterologous amylase activity. Hence, we could compare the efficiency between two different signal peptides. In summary, we present a strategy to exploit CRISPR/Cas9 for gene mutation, multiplex knock-out, and the targeted knock-in of an expression cassette in A. oryzae. Our system provides straightforward strain engineering and paves the way for development of fungal screening systems.}, language = {en} } @article{RuebbelkeVoegeleGrajewskietal.2023, author = {R{\"u}bbelke, Dirk and V{\"o}gele, Stefan and Grajewski, Matthias and Zobel, Luzy}, title = {Cross border adjustment mechanism: Initial data for the assessment of hydrogen-based steel production}, series = {Data in Brief}, volume = {47}, journal = {Data in Brief}, number = {Article 108907}, publisher = {Elsevier}, address = {Amsterdam}, issn = {2352-3409}, doi = {10.1016/j.dib.2023.108907}, pages = {1 -- 5}, year = {2023}, abstract = {Ambitious climate targets affect the competitiveness of industries in the international market. To prevent such industries from moving to other countries in the wake of increased climate protection efforts, cost adjustments may become necessary. Their design requires knowledge of country-specific production costs. Here, we present country-specific cost figures for different production routes of steel, paying particular attention to transportation costs. The data can be used in floor price models aiming to assess the competitiveness of different steel production routes in different countries (R{\"u}bbelke, 2022).}, language = {en} } @article{PennerUsherovichNiedermeieretal.2022, author = {Penner, Crystal and Usherovich, Samuel and Niedermeier, Jana and B{\´e}langer-Champagne, Camille and Trinczek, Michael and Paulßen, Elisabeth and Hoehr, Cornelia}, title = {Organic Scintillator-Fibre Sensors for Proton Therapy Dosimetry: SCSF-3HF and EJ-260}, series = {electronics}, volume = {12}, journal = {electronics}, number = {1}, publisher = {MDPI}, address = {Basel}, issn = {2079-9292}, doi = {10.3390/electronics12010011}, pages = {12 Seiten}, year = {2022}, abstract = {In proton therapy, the dose from secondary neutrons to the patient can contribute to side effects and the creation of secondary cancer. A simple and fast detection system to distinguish between dose from protons and neutrons both in pretreatment verification as well as potentially in vivo monitoring is needed to minimize dose from secondary neutrons. Two 3 mm long, 1 mm diameter organic scintillators were tested for candidacy to be used in a proton-neutron discrimination detector. The SCSF-3HF (1500) scintillating fibre (Kuraray Co. Chiyoda-ku, Tokyo, Japan) and EJ-260 plastic scintillator (Eljen Technology, Sweetwater, TX, USA) were irradiated at the TRIUMF Neutron Facility and the Proton Therapy Research Centre. In the proton beam, we compared the raw Bragg peak and spread-out Bragg peak response to the industry standard Markus chamber detector. Both scintillator sensors exhibited quenching at high LET in the Bragg peak, presenting a peak-to-entrance ratio of 2.59 for the EJ-260 and 2.63 for the SCSF-3HF fibre, compared to 3.70 for the Markus chamber. The SCSF-3HF sensor demonstrated 1.3 times the sensitivity to protons and 3 times the sensitivity to neutrons as compared to the EJ-260 sensor. Combined with our equations relating neutron and proton contributions to dose during proton irradiations, and the application of Birks' quenching correction, these fibres provide valid candidates for inexpensive and replicable proton-neutron discrimination detectors}, language = {en} } @article{NiedermeierPennerUsherovichetal.2023, author = {Niedermeier, Jana and Penner, Crystal and Usherovich, Samuel and B{\´e}langer-Champagne, Camille and Paulßen, Elisabeth and Cornelia, Hoehr}, title = {Optical Fibers as Dosimeter Detectors for Mixed Proton/Neutron Fields - A Biological Dosimeter}, series = {electronics}, volume = {12}, journal = {electronics}, number = {2}, publisher = {MDPI}, address = {Basel}, issn = {2079-9292}, doi = {10.3390/electronics12020324}, pages = {11 Seiten}, year = {2023}, abstract = {In recent years, proton therapy has gained importance as a cancer treatment modality due to its conformality with the tumor and the sparing of healthy tissue. However, in the interaction of the protons with the beam line elements and patient tissues, potentially harmful secondary neutrons are always generated. To ensure that this neutron dose is as low as possible, treatment plans could be created to also account for and minimize the neutron dose. To monitor such a treatment plan, a compact, easy to use, and inexpensive dosimeter must be developed that not only measures the physical dose, but which can also distinguish between proton and neutron contributions. To that end, plastic optical fibers with scintillation materials (Gd₂O₂S:Tb, Gd₂O₂S:Eu, and YVO₄:Eu) were irradiated with protons and neutrons. It was confirmed that sensors with different scintillation materials have different sensitivities to protons and neutrons. A combination of these three scintillators can be used to build a detector array to create a biological dosimeter.}, language = {en} } @inproceedings{RosinKubalskiButenweg2013, author = {Rosin, Julia and Kubalski, Thomas and Butenweg, Christoph}, title = {Seismic isolation of cylindrical liquid storage tanks}, series = {Seismic design of industrial facilities}, booktitle = {Seismic design of industrial facilities}, editor = {Klinkel, Sven and Butenweg, Christoph and Lin, Gao and Holtschoppen, Britta}, publisher = {Springer Vieweg}, address = {Wiesbaden}, isbn = {978-3-658-02810-7}, doi = {10.1007/978-3-658-02810-7_36}, pages = {429 -- 440}, year = {2013}, abstract = {Seismic excited liquid filled tanks are subjected to extreme loading due to hydrodynamic pressures, which can lead to nonlinear stability failure of the thinwalled cylindrical tanks, as it is known from past earthquakes. A significant reduction of the seismically induced loads can be obtained by the application of base isolation systems, which have to be designed carefully with respect to the modified hydrodynamic behaviour of the tank in interaction with the liquid. For this reason a highly sophisticated fluid-structure interaction model has to be applied for a realistic simulation of the overall dynamic system. In the following, such a model is presented and compared with the results of simplified mathematical models for rigidly supported tanks. Finally, it is examined to what extent a simple mechanical model can represent the behaviour of a base isolated tank in case of seismic excitation}, language = {en} } @misc{ButenwegGellertReindletal.2009, author = {Butenweg, Christoph and Gellert, Christoph and Reindl, Lukas and Meskouris, Konstantin}, title = {A nonlinear method for the seismic safety verification of masonry buildings}, publisher = {National Technical University of Athens}, address = {Athen}, year = {2009}, abstract = {In order for traditional masonry to stay a competitive building material in seismically active regions there is an urgent demand for modern, deformation-based verification procedures which exploit the nonlinear load bearing reserves. The Capacity Spectrum Method (CSM) is a widely accepted design approach in the field of reinforced concrete and steel construction. It compares the seismic action with the load-bearing capacity of the building considering nonlinear material behavior with its post-peak capacity. The bearing capacity of the building is calculated iteratively using single wall capacity curves. This paper presents a new approach for the bilinear approximation of single wall capacity curves in the style of EC6/EC8 respectively FEMA 306/FEMA 356 based on recent shear wall test results of the European Collective-Research Project "ESECMaSE". The application of the CSM to masonry structures by using bilinear approximations of capacity curves as input is demonstrated on the example of a typical German residential home.}, language = {en} } @article{ŠakićMarinkovićButenwegetal.2023, author = {Šakić, Bogdan and Marinković, Marko and Butenweg, Christoph and Klinkel, Sven}, title = {Influence of slab deflection on the out-of-plane capacity of unreinforced masonry partition walls}, series = {Engineering Structures}, volume = {276}, journal = {Engineering Structures}, editor = {Yang, J.}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0141-0296}, doi = {10.1016/j.engstruct.2022.115342}, year = {2023}, abstract = {Severe damage of non-structural elements is noticed in previous earthquakes, causing high economic losses and posing a life threat for the people. Masonry partition walls are one of the most commonly used non-structural elements. Therefore, their behaviour under earthquake loading in out-of-plane (OOP) direction is investigated by several researches in the past years. However, none of the existing experimental campaigns or analytical approaches consider the influence of prior slab deflection on OOP response of partition walls. Moreover, none of the existing construction techniques for the connection of partition walls with surrounding reinforced concrete (RC) is investigated for the combined slab deflection and OOP loading. However, the inevitable time-dependent behaviour of RC slabs leads to high values of final slab deflections which can further influence boundary conditions of partition walls. Therefore, a comprehensive study on the influence of slab deflection on the OOP capacity of masonry partitions is conducted. In the first step, experimental tests are carried out. Results of experimental tests are further used for the calibration of the numerical model employed for a parametric study. Based on the results, behaviour under combined loading for different construction techniques is explained. The results show that slab deflection leads either to severe damage or to a high reduction of OOP capacity. Existing practical solutions do not account for these effects. In this contribution, recommendations to overcome the problems of combined slab deflection and OOP loading on masonry partition walls are given. Possible interaction of in-plane (IP) loading, with the combined slab deflection and OOP loading on partition walls, is not investigated in this study.}, language = {en} } @article{MorandiButenwegBreisetal.2022, author = {Morandi, Paolo and Butenweg, Christoph and Breis, Khaled and Beyer, Katrin and Magenes, Guido}, title = {Latest findings on the behaviour factor q for the seismic design of URM buildings}, series = {Bulletin of Earthquake Engineering}, volume = {20}, journal = {Bulletin of Earthquake Engineering}, number = {11}, editor = {Ansal, Atilla}, publisher = {Springer Nature}, address = {Cham}, issn = {1573-1456}, doi = {10.1007/s10518-022-01419-7}, pages = {5797 -- 5848}, year = {2022}, abstract = {Recent earthquakes as the 2012 Emilia earthquake sequence showed that recently built unreinforced masonry (URM) buildings behaved much better than expected and sustained, despite the maximum PGA values ranged between 0.20-0.30 g, either minor damage or structural damage that is deemed repairable. Especially low-rise residential and commercial masonry buildings with a code-conforming seismic design and detailing behaved in general very well without substantial damages. The low damage grades of modern masonry buildings that was observed during this earthquake series highlighted again that codified design procedures based on linear analysis can be rather conservative. Although advances in simulation tools make nonlinear calculation methods more readily accessible to designers, linear analyses will still be the standard design method for years to come. The present paper aims to improve the linear seismic design method by providing a proper definition of the q-factor of URM buildings. These q-factors are derived for low-rise URM buildings with rigid diaphragms which represent recent construction practise in low to moderate seismic areas of Italy and Germany. The behaviour factor components for deformation and energy dissipation capacity and for overstrength due to the redistribution of forces are derived by means of pushover analyses. Furthermore, considerations on the behaviour factor component due to other sources of overstrength in masonry buildings are presented. As a result of the investigations, rationally based values of the behaviour factor q to be used in linear analyses in the range of 2.0-3.0 are proposed.}, language = {en} } @article{MarinkovićButenweg2022, author = {Marinković, Marko and Butenweg, Christoph}, title = {Experimental testing of decoupled masonry infills with steel anchors for out-of-plane support under combined in-plane and out-of-plane seismic loading}, series = {Construction and Building Materials}, volume = {318}, journal = {Construction and Building Materials}, number = {1}, editor = {Ford, Michael C.}, publisher = {Elsevier}, address = {Amsterdam}, issn = {1879-0526}, doi = {10.1016/j.conbuildmat.2021.126041}, year = {2022}, abstract = {Because of simple construction process, high energy efficiency, significant fire resistance and excellent sound isolation, masonry infilled reinforced concrete (RC) frame structures are very popular in most of the countries in the world, as well as in seismic active areas. However, many RC frame structures with masonry infills were seriously damaged during earthquake events, as the traditional infills are generally constructed with direct contact to the RC frame which brings undesirable infill/frame interaction. This interaction leads to the activation of the equivalent diagonal strut in the infill panel, due to the RC frame deformation, and combined with seismically induced loads perpendicular to the infill panel often causes total collapses of the masonry infills and heavy damages to the RC frames. This fact was the motivation for developing different approaches for improving the behaviour of masonry infills, where infill isolation (decoupling) from the frame has been more intensively studied in the last decade. In-plane isolation of the infill wall reduces infill activation, but causes the need for additional measures to restrain out-of-plane movements. This can be provided by installing steel anchors, as proposed by some researchers. Within the framework of European research project INSYSME (Innovative Systems for Earthquake Resistant Masonry Enclosures in Reinforced Concrete Buildings) the system based on a use of elastomers for in-plane decoupling and steel anchors for out-of-plane restrain was tested. This constructive solution was tested and deeply investigated during the experimental campaign where traditional and decoupled masonry infilled RC frames with anchors were subjected to separate and combined in-plane ‬and out-of-plane loading. Based on a detailed evaluation and comparison of the test results, the performance and effectiveness of the developed system are illustrated.}, language = {en} } @article{RossiWinandsButenweg2022, author = {Rossi, Leonardo and Winands, Mark H. M. and Butenweg, Christoph}, title = {Monte Carlo Tree Search as an intelligent search tool in structural design problems}, series = {Engineering with Computers : An International Journal for Simulation-Based Engineering}, volume = {38}, journal = {Engineering with Computers : An International Journal for Simulation-Based Engineering}, number = {4}, editor = {Zhang, Jessica}, publisher = {Springer Nature}, address = {Cham}, issn = {1435-5663}, doi = {10.1007/s00366-021-01338-2}, pages = {3219 -- 3236}, year = {2022}, abstract = {Monte Carlo Tree Search (MCTS) is a search technique that in the last decade emerged as a major breakthrough for Artificial Intelligence applications regarding board- and video-games. In 2016, AlphaGo, an MCTS-based software agent, outperformed the human world champion of the board game Go. This game was for long considered almost infeasible for machines, due to its immense search space and the need for a long-term strategy. Since this historical success, MCTS is considered as an effective new approach for many other scientific and technical problems. Interestingly, civil structural engineering, as a discipline, offers many tasks whose solution may benefit from intelligent search and in particular from adopting MCTS as a search tool. In this work, we show how MCTS can be adapted to search for suitable solutions of a structural engineering design problem. The problem consists of choosing the load-bearing elements in a reference reinforced concrete structure, so to achieve a set of specific dynamic characteristics. In the paper, we report the results obtained by applying both a plain and a hybrid version of single-agent MCTS. The hybrid approach consists of an integration of both MCTS and classic Genetic Algorithm (GA), the latter also serving as a term of comparison for the results. The study's outcomes may open new perspectives for the adoption of MCTS as a design tool for civil engineers.}, language = {en} } @article{ButenwegBursiPaolaccietal.2021, author = {Butenweg, Christoph and Bursi, Oreste S. and Paolacci, Fabrizio and Marinković, Marko and Lanese, Igor and Nardin, Chiara and Quinci, Gianluca}, title = {Seismic performance of an industrial multi-storey frame structure with process equipment subjected to shake table testing}, series = {Engineering Structures}, volume = {243}, journal = {Engineering Structures}, number = {15}, editor = {Yang, J.}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0141-0296}, doi = {10.1016/j.engstruct.2021.112681}, year = {2021}, abstract = {Past earthquakes demonstrated the high vulnerability of industrial facilities equipped with complex process technologies leading to serious damage of process equipment and multiple and simultaneous release of hazardous substances. Nonetheless, current standards for seismic design of industrial facilities are considered inadequate to guarantee proper safety conditions against exceptional events entailing loss of containment and related consequences. On these premises, the SPIF project -Seismic Performance of Multi-Component Systems in Special Risk Industrial Facilities- was proposed within the framework of the European H2020 SERA funding scheme. In detail, the objective of the SPIF project is the investigation of the seismic behaviour of a representative industrial multi-storey frame structure equipped with complex process components by means of shaking table tests. Along this main vein and in a performance-based design perspective, the issues investigated in depth are the interaction between a primary moment resisting frame (MRF) steel structure and secondary process components that influence the performance of the whole system; and a proper check of floor spectra predictions. The evaluation of experimental data clearly shows a favourable performance of the MRF structure, some weaknesses of local details due to the interaction between floor crossbeams and process components and, finally, the overconservatism of current design standards w.r.t. floor spectra predictions.}, language = {en} } @article{BaringhausGaigall2017, author = {Baringhaus, Ludwig and Gaigall, Daniel}, title = {On Hotelling's T² test in a special paired sample case}, series = {Communications in Statistics - Theory and Methods}, volume = {48}, journal = {Communications in Statistics - Theory and Methods}, number = {2}, publisher = {Taylor \& Francis}, address = {London}, issn = {1532-415X}, doi = {10.1080/03610926.2017.1408828}, pages = {257 -- 267}, year = {2017}, abstract = {In a special paired sample case, Hotelling's T² test based on the differences of the paired random vectors is the likelihood ratio test for testing the hypothesis that the paired random vectors have the same mean; with respect to a special group of affine linear transformations it is the uniformly most powerful invariant test for the general alternative of a difference in mean. We present an elementary straightforward proof of this result. The likelihood ratio test for testing the hypothesis that the covariance structure is of the assumed special form is derived and discussed. Applications to real data are given.}, language = {en} } @article{BaringhausGaigall2017, author = {Baringhaus, Ludwig and Gaigall, Daniel}, title = {Hotelling's T² tests in paired and independent survey samples: An efficiency comparison}, series = {Journal of Multivariate Analysis}, volume = {2017}, journal = {Journal of Multivariate Analysis}, number = {154}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0047-259X}, doi = {10.1016/j.jmva.2016.11.004}, pages = {177 -- 198}, year = {2017}, abstract = {Hotelling's T² tests in paired and independent survey samples are compared using the traditional asymptotic efficiency concepts of Hodges-Lehmann, Bahadur and Pitman, as well as through criteria based on the volumes of corresponding confidence regions. Conditions characterizing the superiority of a procedure are given in terms of population canonical correlation type coefficients. Statistical tests for checking these conditions are developed. Test statistics based on the eigenvalues of a symmetrized sample cross-covariance matrix are suggested, as well as test statistics based on sample canonical correlation type coefficients.}, language = {en} } @article{BaringhausGaigall2019, author = {Baringhaus, Ludwig and Gaigall, Daniel}, title = {On an asymptotic relative efficiency concept based on expected volumes of confidence regions}, series = {Statistics - A Journal of Theoretical and Applied Statistic}, volume = {53}, journal = {Statistics - A Journal of Theoretical and Applied Statistic}, number = {6}, publisher = {Taylor \& Francis}, address = {London}, issn = {1029-4910}, doi = {10.1080/02331888.2019.1683560}, pages = {1396 -- 1436}, year = {2019}, abstract = {The paper deals with an asymptotic relative efficiency concept for confidence regions of multidimensional parameters that is based on the expected volumes of the confidence regions. Under standard conditions the asymptotic relative efficiencies of confidence regions are seen to be certain powers of the ratio of the limits of the expected volumes. These limits are explicitly derived for confidence regions associated with certain plugin estimators, likelihood ratio tests and Wald tests. Under regularity conditions, the asymptotic relative efficiency of each of these procedures with respect to each one of its competitors is equal to 1. The results are applied to multivariate normal distributions and multinomial distributions in a fairly general setting.}, language = {en} } @article{Gaigall2019, author = {Gaigall, Daniel}, title = {On a new approach to the multi-sample goodness-of-fit problem}, series = {Communications in Statistics - Simulation and Computation}, volume = {53}, journal = {Communications in Statistics - Simulation and Computation}, number = {10}, publisher = {Taylor \& Francis}, address = {London}, issn = {1532-4141}, doi = {10.1080/03610918.2019.1618472}, pages = {2971 -- 2989}, year = {2019}, abstract = {Suppose we have k samples X₁,₁,…,X₁,ₙ₁,…,Xₖ,₁,…,Xₖ,ₙₖ with different sample sizes ₙ₁,…,ₙₖ and unknown underlying distribution functions F₁,…,Fₖ as observations plus k families of distribution functions {G₁(⋅,ϑ);ϑ∈Θ},…,{Gₖ(⋅,ϑ);ϑ∈Θ}, each indexed by elements ϑ from the same parameter set Θ, we consider the new goodness-of-fit problem whether or not (F₁,…,Fₖ) belongs to the parametric family {(G₁(⋅,ϑ),…,Gₖ(⋅,ϑ));ϑ∈Θ}. New test statistics are presented and a parametric bootstrap procedure for the approximation of the unknown null distributions is discussed. Under regularity assumptions, it is proved that the approximation works asymptotically, and the limiting distributions of the test statistics in the null hypothesis case are determined. Simulation studies investigate the quality of the new approach for small and moderate sample sizes. Applications to real-data sets illustrate how the idea can be used for verifying model assumptions.}, language = {en} } @article{DitzhausGaigall2018, author = {Ditzhaus, Marc and Gaigall, Daniel}, title = {A consistent goodness-of-fit test for huge dimensional and functional data}, series = {Journal of Nonparametric Statistics}, volume = {30}, journal = {Journal of Nonparametric Statistics}, number = {4}, publisher = {Taylor \& Francis}, address = {Abingdon}, issn = {1029-0311}, doi = {10.1080/10485252.2018.1486402}, pages = {834 -- 859}, year = {2018}, abstract = {A nonparametric goodness-of-fit test for random variables with values in a separable Hilbert space is investigated. To verify the null hypothesis that the data come from a specific distribution, an integral type test based on a Cram{\´e}r-von-Mises statistic is suggested. The convergence in distribution of the test statistic under the null hypothesis is proved and the test's consistency is concluded. Moreover, properties under local alternatives are discussed. Applications are given for data of huge but finite dimension and for functional data in infinite dimensional spaces. A general approach enables the treatment of incomplete data. In simulation studies the test competes with alternative proposals.}, language = {en} } @article{BaringhausGaigallThiele2018, author = {Baringhaus, Ludwig and Gaigall, Daniel and Thiele, Jan Philipp}, title = {Statistical inference for L²-distances to uniformity}, series = {Computational Statistics}, volume = {2018}, journal = {Computational Statistics}, number = {33}, publisher = {Springer}, address = {Berlin}, issn = {1613-9658}, doi = {10.1007/s00180-018-0820-0}, pages = {1863 -- 1896}, year = {2018}, abstract = {The paper deals with the asymptotic behaviour of estimators, statistical tests and confidence intervals for L²-distances to uniformity based on the empirical distribution function, the integrated empirical distribution function and the integrated empirical survival function. Approximations of power functions, confidence intervals for the L²-distances and statistical neighbourhood-of-uniformity validation tests are obtained as main applications. The finite sample behaviour of the procedures is illustrated by a simulation study.}, language = {en} } @article{BaringhausGaigall2015, author = {Baringhaus, Ludwig and Gaigall, Daniel}, title = {On an independence test approach to the goodness-of-fit problem}, series = {Journal of Multivariate Analysis}, volume = {2015}, journal = {Journal of Multivariate Analysis}, number = {140}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0047-259X}, doi = {10.1016/j.jmva.2015.05.013}, pages = {193 -- 208}, year = {2015}, abstract = {Let X₁,…,Xₙ be independent and identically distributed random variables with distribution F. Assuming that there are measurable functions f:R²→R and g:R²→R characterizing a family F of distributions on the Borel sets of R in the way that the random variables f(X₁,X₂),g(X₁,X₂) are independent, if and only if F∈F, we propose to treat the testing problem H:F∈F,K:F∉F by applying a consistent nonparametric independence test to the bivariate sample variables (f(Xᵢ,Xⱼ),g(Xᵢ,Xⱼ)),1⩽i,j⩽n,i≠j. A parametric bootstrap procedure needed to get critical values is shown to work. The consistency of the test is discussed. The power performance of the procedure is compared with that of the classical tests of Kolmogorov-Smirnov and Cram{\´e}r-von Mises in the special cases where F is the family of gamma distributions or the family of inverse Gaussian distributions.}, language = {en} } @article{Gaigall2021, author = {Gaigall, Daniel}, title = {Test for Changes in the Modeled Solvency Capital Requirement of an Internal Risk Model}, series = {ASTIN Bulletin}, volume = {51}, journal = {ASTIN Bulletin}, number = {3}, publisher = {Cambridge Univ. Press}, address = {Cambridge}, issn = {1783-1350}, doi = {10.1017/asb.2021.20}, pages = {813 -- 837}, year = {2021}, abstract = {In the context of the Solvency II directive, the operation of an internal risk model is a possible way for risk assessment and for the determination of the solvency capital requirement of an insurance company in the European Union. A Monte Carlo procedure is customary to generate a model output. To be compliant with the directive, validation of the internal risk model is conducted on the basis of the model output. For this purpose, we suggest a new test for checking whether there is a significant change in the modeled solvency capital requirement. Asymptotic properties of the test statistic are investigated and a bootstrap approximation is justified. A simulation study investigates the performance of the test in the finite sample case and confirms the theoretical results. The internal risk model and the application of the test is illustrated in a simplified example. The method has more general usage for inference of a broad class of law-invariant and coherent risk measures on the basis of a paired sample.}, language = {en} } @article{Gaigall2020, author = {Gaigall, Daniel}, title = {Hoeffding-Blum-Kiefer-Rosenblatt independence test statistic on partly not identically distributed data}, series = {Communications in Statistics - Theory and Methods}, volume = {51}, journal = {Communications in Statistics - Theory and Methods}, number = {12}, publisher = {Taylor \& Francis}, address = {London}, issn = {1532-415X}, doi = {10.1080/03610926.2020.1805767}, pages = {4006 -- 4028}, year = {2020}, abstract = {The established Hoeffding-Blum-Kiefer-Rosenblatt independence test statistic is investigated for partly not identically distributed data. Surprisingly, it turns out that the statistic has the well-known distribution-free limiting null distribution of the classical criterion under standard regularity conditions. An application is testing goodness-of-fit for the regression function in a non parametric random effects meta-regression model, where the consistency is obtained as well. Simulations investigate size and power of the approach for small and moderate sample sizes. A real data example based on clinical trials illustrates how the test can be used in applications.}, language = {en} } @article{Gaigall2020, author = {Gaigall, Daniel}, title = {Testing marginal homogeneity of a continuous bivariate distribution with possibly incomplete paired data}, series = {Metrika}, volume = {2020}, journal = {Metrika}, number = {83}, publisher = {Springer}, issn = {1435-926X}, doi = {10.1007/s00184-019-00742-5}, pages = {437 -- 465}, year = {2020}, abstract = {We discuss the testing problem of homogeneity of the marginal distributions of a continuous bivariate distribution based on a paired sample with possibly missing components (missing completely at random). Applying the well-known two-sample Cr{\´a}mer-von-Mises distance to the remaining data, we determine the limiting null distribution of our test statistic in this situation. It is seen that a new resampling approach is appropriate for the approximation of the unknown null distribution. We prove that the resulting test asymptotically reaches the significance level and is consistent. Properties of the test under local alternatives are pointed out as well. Simulations investigate the quality of the approximation and the power of the new approach in the finite sample case. As an illustration we apply the test to real data sets.}, language = {en} } @article{Gaigall2020, author = {Gaigall, Daniel}, title = {Rothman-Woodroofe symmetry test statistic revisited}, series = {Computational Statistics \& Data Analysis}, volume = {2020}, journal = {Computational Statistics \& Data Analysis}, number = {142}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0167-9473}, doi = {10.1016/j.csda.2019.106837}, pages = {Artikel 106837}, year = {2020}, abstract = {The Rothman-Woodroofe symmetry test statistic is revisited on the basis of independent but not necessarily identically distributed random variables. The distribution-freeness if the underlying distributions are all symmetric and continuous is obtained. The results are applied for testing symmetry in a meta-analysis random effects model. The consistency of the procedure is discussed in this situation as well. A comparison with an alternative proposal from the literature is conducted via simulations. Real data are analyzed to demonstrate how the new approach works in practice.}, language = {en} } @inproceedings{Gaigall2022, author = {Gaigall, Daniel}, title = {On Consistent Hypothesis Testing In General Hilbert Spaces}, publisher = {Avestia Publishing}, address = {Orl{\´e}ans, Kanada}, doi = {10.11159/icsta22.157}, pages = {Paper No. 157}, year = {2022}, abstract = {Inference on the basis of high-dimensional and functional data are two topics which are discussed frequently in the current statistical literature. A possibility to include both topics in a single approach is working on a very general space for the underlying observations, such as a separable Hilbert space. We propose a general method for consistently hypothesis testing on the basis of random variables with values in separable Hilbert spaces. We avoid concerns with the curse of dimensionality due to a projection idea. We apply well-known test statistics from nonparametric inference to the projected data and integrate over all projections from a specific set and with respect to suitable probability measures. In contrast to classical methods, which are applicable for real-valued random variables or random vectors of dimensions lower than the sample size, the tests can be applied to random vectors of dimensions larger than the sample size or even to functional and high-dimensional data. In general, resampling procedures such as bootstrap or permutation are suitable to determine critical values. The idea can be extended to the case of incomplete observations. Moreover, we develop an efficient algorithm for implementing the method. Examples are given for testing goodness-of-fit in a one-sample situation in [1] or for testing marginal homogeneity on the basis of a paired sample in [2]. Here, the test statistics in use can be seen as generalizations of the well-known Cram{\´e}rvon-Mises test statistics in the one-sample and two-samples case. The treatment of other testing problems is possible as well. By using the theory of U-statistics, for instance, asymptotic null distributions of the test statistics are obtained as the sample size tends to infinity. Standard continuity assumptions ensure the asymptotic exactness of the tests under the null hypothesis and that the tests detect any alternative in the limit. Simulation studies demonstrate size and power of the tests in the finite sample case, confirm the theoretical findings, and are used for the comparison with concurring procedures. A possible application of the general approach is inference for stock market returns, also in high data frequencies. In the field of empirical finance, statistical inference of stock market prices usually takes place on the basis of related log-returns as data. In the classical models for stock prices, i.e., the exponential L{\´e}vy model, Black-Scholes model, and Merton model, properties such as independence and stationarity of the increments ensure an independent and identically structure of the data. Specific trends during certain periods of the stock price processes can cause complications in this regard. In fact, our approach can compensate those effects by the treatment of the log-returns as random vectors or even as functional data.}, language = {en} } @article{DitzhausGaigall2022, author = {Ditzhaus, Marc and Gaigall, Daniel}, title = {Testing marginal homogeneity in Hilbert spaces with applications to stock market returns}, series = {Test}, volume = {2022}, journal = {Test}, number = {31}, publisher = {Springer}, issn = {1863-8260}, doi = {10.1007/s11749-022-00802-5}, pages = {749 -- 770}, year = {2022}, abstract = {This paper considers a paired data framework and discusses the question of marginal homogeneity of bivariate high-dimensional or functional data. The related testing problem can be endowed into a more general setting for paired random variables taking values in a general Hilbert space. To address this problem, a Cram{\´e}r-von-Mises type test statistic is applied and a bootstrap procedure is suggested to obtain critical values and finally a consistent test. The desired properties of a bootstrap test can be derived that are asymptotic exactness under the null hypothesis and consistency under alternatives. Simulations show the quality of the test in the finite sample case. A possible application is the comparison of two possibly dependent stock market returns based on functional data. The approach is demonstrated based on historical data for different stock market indices.}, language = {en} } @article{GaigallGerstenbergTrinh2022, author = {Gaigall, Daniel and Gerstenberg, Julian and Trinh, Thi Thu Ha}, title = {Empirical process of concomitants for partly categorial data and applications in statistics}, series = {Bernoulli}, volume = {28}, journal = {Bernoulli}, number = {2}, publisher = {International Statistical Institute}, address = {Den Haag, NL}, issn = {1573-9759}, doi = {10.3150/21-BEJ1367}, pages = {803 -- 829}, year = {2022}, abstract = {On the basis of independent and identically distributed bivariate random vectors, where the components are categorial and continuous variables, respectively, the related concomitants, also called induced order statistic, are considered. The main theoretical result is a functional central limit theorem for the empirical process of the concomitants in a triangular array setting. A natural application is hypothesis testing. An independence test and a two-sample test are investigated in detail. The fairly general setting enables limit results under local alternatives and bootstrap samples. For the comparison with existing tests from the literature simulation studies are conducted. The empirical results obtained confirm the theoretical findings.}, language = {en} } @inproceedings{StaatTran2022, author = {Staat, Manfred and Tran, Ngoc Trinh}, title = {Strain based brittle failure criteria for rocks}, series = {Proceedings of (NACOME2022) The 11th National Conference on Mechanics, Vol. 1. Solid Mechanics, Rock Mechanics, Artificial Intelligence, Teaching and Training, Hanoi, December 2-3, 2022}, booktitle = {Proceedings of (NACOME2022) The 11th National Conference on Mechanics, Vol. 1. Solid Mechanics, Rock Mechanics, Artificial Intelligence, Teaching and Training, Hanoi, December 2-3, 2022}, publisher = {Nha xuat ban Khoa hoc tu nhien va Cong nghe (Verlag Naturwissenschaft und Technik)}, address = {Hanoi}, isbn = {978-604-357-084-7}, pages = {500 -- 509}, year = {2022}, abstract = {When confining pressure is low or absent, extensional fractures are typical, with fractures occurring on unloaded planes in rock. These "paradox" fractures can be explained by a phenomenological extension strain failure criterion. In the past, a simple empirical criterion for fracture initiation in brittle rock has been developed. But this criterion makes unrealistic strength predictions in biaxial compression and tension. A new extension strain criterion overcomes this limitation by adding a weighted principal shear component. The weight is chosen, such that the enriched extension strain criterion represents the same failure surface as the Mohr-Coulomb (MC) criterion. Thus, the MC criterion has been derived as an extension strain criterion predicting failure modes, which are unexpected in the understanding of the failure of cohesive-frictional materials. In progressive damage of rock, the most likely fracture direction is orthogonal to the maximum extension strain. The enriched extension strain criterion is proposed as a threshold surface for crack initiation CI and crack damage CD and as a failure surface at peak P. Examples show that the enriched extension strain criterion predicts much lower volumes of damaged rock mass compared to the simple extension strain criterion.}, language = {en} } @inproceedings{ChircuCzarneckiFriedmannetal.2023, author = {Chircu, Alina and Czarnecki, Christian and Friedmann, Daniel and Pomaskow, Johanna and Sultanow, Eldar}, title = {Towards a Digital Twin of Society}, series = {Proceedings of the 56th Hawaii International Conference on System Sciences 2023}, booktitle = {Proceedings of the 56th Hawaii International Conference on System Sciences 2023}, publisher = {University of Hawai'i}, address = {Honolulu}, isbn = {978-0-9981331-6-4}, pages = {6748 -- 6757}, year = {2023}, abstract = {This paper describes the potential for developing a digital twin of society- a dynamic model that can be used to observe, analyze, and predict the evolution of various societal aspects. Such a digital twin can help governmental agencies and policy makers in interpreting trends, understanding challenges, and making decisions regarding investments or policies necessary to support societal development and ensure future prosperity. The paper reviews related work regarding the digital twin paradigm and its applications. The paper presents a motivating case study- an analysis of opportunities and challenges faced by the German federal employment agency, Bundesagentur f¨ur Arbeit (BA), proposes solutions using digital twins, and describes initial proofs of concept for such solutions.}, language = {en} } @inproceedings{Butenweg2022, author = {Butenweg, Christoph}, title = {Seismic design and evaluation of industrial facilities}, series = {Progresses in European Earthquake Engineering and Seismology. Third European Conference on Earthquake Engineering and Seismology - Bucharest, 2022}, booktitle = {Progresses in European Earthquake Engineering and Seismology. Third European Conference on Earthquake Engineering and Seismology - Bucharest, 2022}, editor = {Vacareanu, Radu and Ionescu, Constantin}, publisher = {Springer}, address = {Cham}, isbn = {978-3-031-15103-3}, issn = {2524-342X}, doi = {10.1007/978-3-031-15104-0}, pages = {449 -- 464}, year = {2022}, abstract = {Industrial facilities must be thoroughly designed to withstand seismic actions as they exhibit an increased loss potential due to the possibly wideranging damage consequences and the valuable process engineering equipment. Past earthquakes showed the social and political consequences of seismic damage to industrial facilities and sensitized the population and politicians worldwide for the possible hazard emanating from industrial facilities. However, a holistic approach for the seismic design of industrial facilities can presently neither be found in national nor in international standards. The introduction of EN 1998-4 of the new generation of Eurocode 8 will improve the normative situation with specific seismic design rules for silos, tanks and pipelines and secondary process components. The article presents essential aspects of the seismic design of industrial facilities based on the new generation of Eurocode 8 using the example of tank structures and secondary process components. The interaction effects of the process components with the primary structure are illustrated by means of the experimental results of a shaking table test of a three story moment resisting steel frame with different process components. Finally, an integrated approach of digital plant models based on building information modelling (BIM) and structural health monitoring (SHM) is presented, which provides not only a reliable decision-making basis for operation, maintenance and repair but also an excellent tool for rapid assessment of seismic damage.}, language = {en} } @incollection{GkatzogiasVeljkovivPohorylesetal.2022, author = {Gkatzogias, Konstantinos and Veljkoviv, Ana and Pohoryles, Daniel A. and Tsionis, Georgios and Bournas, Dionysios A. and Crowley, Helen and Norl{\´e}n, Hedvig and Butenweg, Christoph and Gervasio, Helena and Manfredi, Vincenzo and Masi, Angelo and Zaharieva, Roumiana}, title = {Policy practice and regional impact assessment for building renovation}, series = {REEBUILD Integrated Techniques for the Seismic Strengthening \& Energy Efficiency of Existing Buildings}, booktitle = {REEBUILD Integrated Techniques for the Seismic Strengthening \& Energy Efficiency of Existing Buildings}, editor = {Gkatzogias, Konstantinos and Tsionis, Georgios}, publisher = {Publications Office of the European Union}, address = {Luxembourg}, isbn = {978-92-76-60454-9}, issn = {1831-9424}, doi = {10.2760/883122}, pages = {1 -- 68}, year = {2022}, abstract = {The work presented in this report provides scientific support to building renovation policies in the EU by promoting a holistic point of view on the topic. Integrated renovation can be seen as a nexus between European policies on disaster resilience, energy efficiency and circularity in the building sector. An overview of policy measures for the seismic and energy upgrading of buildings across EU Member States identified only a few available measures for combined upgrading. Regulatory framework, financial instruments and digital tools similar to those for energy renovation, together with awareness and training may promote integrated renovation. A framework for regional prioritisation of building renovation was put forward, considering seismic risk, energy efficiency, and socioeconomic vulnerability independently and in an integrated way. Results indicate that prioritisation of building renovation is a multidimensional problem. Depending on priorities, different integrated indicators should be used to inform policies and accomplish the highest relative or most spread impact across different sectors. The framework was further extended to assess the impact of renovation scenarios across the EU with a focus on priority regions. Integrated renovation can provide a risk-proofed, sustainable, and inclusive built environment, presenting an economic benefit in the order of magnitude of the highest benefit among the separate interventions. Furthermore, it presents the unique capability of reducing fatalities and energy consumption at the same time and, depending on the scenario, to a greater extent.}, language = {en} } @inproceedings{EvansBraunUlmeretal.2022, author = {Evans, Benjamin and Braun, Sebastian and Ulmer, Jessica and Wollert, J{\"o}rg}, title = {AAS implementations - current problems and solutions}, series = {20th International Conference on Mechatronics - Mechatronika (ME)}, booktitle = {20th International Conference on Mechatronics - Mechatronika (ME)}, publisher = {IEEE}, isbn = {978-1-6654-1040-3}, doi = {10.1109/ME54704.2022.9982933}, pages = {6 Seiten}, year = {2022}, abstract = {The fourth industrial revolution presents a multitude of challenges for industries, one of which being the increased flexibility required of manufacturing lines as a result of increased consumer demand for individualised products. One solution to tackle this challenge is the digital twin, more specifically the standardised model of a digital twin also known as the asset administration shell. The standardisation of an industry wide communications tool is a critical step in enabling inter-company operations. This paper discusses the current state of asset administration shells, the frameworks used to host them and their problems that need to be addressed. To tackle these issues, we propose an event-based server capable of drastically reducing response times between assets and asset administration shells and a multi-agent system used for the orchestration and deployment of the shells in the field.}, language = {en} } @article{BaringhausGaigall2022, author = {Baringhaus, Ludwig and Gaigall, Daniel}, title = {A goodness-of-fit test for the compound Poisson exponential model}, series = {Journal of Multivariate Analysis}, volume = {195}, journal = {Journal of Multivariate Analysis}, number = {Article 105154}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0047-259X}, doi = {10.1016/j.jmva.2022.105154}, year = {2022}, abstract = {On the basis of bivariate data, assumed to be observations of independent copies of a random vector (S,N), we consider testing the hypothesis that the distribution of (S,N) belongs to the parametric class of distributions that arise with the compound Poisson exponential model. Typically, this model is used in stochastic hydrology, with N as the number of raindays, and S as total rainfall amount during a certain time period, or in actuarial science, with N as the number of losses, and S as total loss expenditure during a certain time period. The compound Poisson exponential model is characterized in the way that a specific transform associated with the distribution of (S,N) satisfies a certain differential equation. Mimicking the function part of this equation by substituting the empirical counterparts of the transform we obtain an expression the weighted integral of the square of which is used as test statistic. We deal with two variants of the latter, one of which being invariant under scale transformations of the S-part by fixed positive constants. Critical values are obtained by using a parametric bootstrap procedure. The asymptotic behavior of the tests is discussed. A simulation study demonstrates the performance of the tests in the finite sample case. The procedure is applied to rainfall data and to an actuarial dataset. A multivariate extension is also discussed.}, language = {en} } @article{TranTrinhDaoetal.2022, author = {Tran, Ngoc Trinh and Trinh, Tu Luc and Dao, Ngoc Tien and Giap, Van Tan and Truong, Manh Khuyen and Dinh, Thuy Ha and Staat, Manfred}, title = {FEM shakedown analysis of structures under random strength with chance constrained programming}, series = {Vietnam Journal of Mechanics}, volume = {44}, journal = {Vietnam Journal of Mechanics}, number = {4}, publisher = {Vietnam Academy of Science and Technology (VAST)}, issn = {0866-7136}, doi = {10.15625/0866-7136/17943}, pages = {459 -- 473}, year = {2022}, abstract = {Direct methods, comprising limit and shakedown analysis, are a branch of computational mechanics. They play a significant role in mechanical and civil engineering design. The concept of direct methods aims to determine the ultimate load carrying capacity of structures beyond the elastic range. In practical problems, the direct methods lead to nonlinear convex optimization problems with a large number of variables and constraints. If strength and loading are random quantities, the shakedown analysis can be formulated as stochastic programming problem. In this paper, a method called chance constrained programming is presented, which is an effective method of stochastic programming to solve shakedown analysis problems under random conditions of strength. In this study, the loading is deterministic, and the strength is a normally or lognormally distributed variable.}, language = {en} } @incollection{Czarnecki2018, author = {Czarnecki, Christian}, title = {Establishment of a central process governance organization combined with operational process improvements : Insights from a BPM Project at a leading telecommunications operator in the Middle East}, series = {Business process management cases : digital innovation and business transformation in practice}, booktitle = {Business process management cases : digital innovation and business transformation in practice}, editor = {vom Brocke, Jan and Mendling, Jan}, publisher = {Springer}, address = {Cham}, isbn = {978-3-319-58306-8}, doi = {10.1007/978-3-319-58307-5}, pages = {57 -- 76}, year = {2018}, abstract = {Because of customer churn, strong competition, and operational inefficiencies, the telecommunications operator ME Telco (fictitious name due to confidentiality) launched a strategic transformation program that included a Business Process Management (BPM) project. Major problems were silo-oriented process management and missing cross-functional transparency. Process improvements were not consistently planned and aligned with corporate targets. Measurable inefficiencies were observed on an operational level, e.g., high lead times and reassignment rates of the incident management process.}, language = {en} } @article{MuellerSeginWeigandetal.2022, author = {Mueller, Tobias and Segin, Alexander and Weigand, Christoph and Schmitt, Robert H.}, title = {Feature selection for measurement models}, series = {International journal of quality \& reliability management}, journal = {International journal of quality \& reliability management}, number = {Vol. ahead-of-print, No. ahead-of-print.}, publisher = {Emerald Group Publishing Limited}, address = {Bingley}, issn = {0265-671X}, doi = {10.1108/IJQRM-07-2021-0245}, year = {2022}, abstract = {Purpose In the determination of the measurement uncertainty, the GUM procedure requires the building of a measurement model that establishes a functional relationship between the measurand and all influencing quantities. Since the effort of modelling as well as quantifying the measurement uncertainties depend on the number of influencing quantities considered, the aim of this study is to determine relevant influencing quantities and to remove irrelevant ones from the dataset. Design/methodology/approach In this work, it was investigated whether the effort of modelling for the determination of measurement uncertainty can be reduced by the use of feature selection (FS) methods. For this purpose, 9 different FS methods were tested on 16 artificial test datasets, whose properties (number of data points, number of features, complexity, features with low influence and redundant features) were varied via a design of experiments. Findings Based on a success metric, the stability, universality and complexity of the method, two FS methods could be identified that reliably identify relevant and irrelevant influencing quantities for a measurement model. Originality/value For the first time, FS methods were applied to datasets with properties of classical measurement processes. The simulation-based results serve as a basis for further research in the field of FS for measurement models. The identified algorithms will be applied to real measurement processes in the future.}, language = {en} } @incollection{SchmitzDietzeCzarnecki2019, author = {Schmitz, Manfred and Dietze, Christian and Czarnecki, Christian}, title = {Enabling digital transformation through robotic process automation at Deutsche Telekom}, series = {Enabling digital transformation through robotic process automation at Deutsche Telekom}, booktitle = {Enabling digital transformation through robotic process automation at Deutsche Telekom}, editor = {Urbach, Nils and R{\"o}glinger, Maximilian}, publisher = {Springer}, address = {Cham}, isbn = {978-3-319-95272-7}, doi = {10.1007/978-3-319-95273-4_2}, pages = {15 -- 33}, year = {2019}, abstract = {Due to the high number of customer contacts, fault clearances, installations, and product provisioning per year, the automation level of operational processes has a significant impact on financial results, quality, and customer experience. Therefore, the telecommunications operator Deutsche Telekom (DT) has defined a digital strategy with the objectives of zero complexity and zero complaint, one touch, agility in service, and disruptive thinking. In this context, Robotic Process Automation (RPA) was identified as an enabling technology to formulate and realize DT's digital strategy through automation of rule-based, routine, and predictable tasks in combination with structured and stable data.}, language = {en} } @incollection{BensbergBuscherCzarnecki2019, author = {Bensberg, Frank and Buscher, Gandalf and Czarnecki, Christian}, title = {Digital transformation and IT topics in the consulting industry: a labor market perspective}, series = {Advances in consulting research : recent findings and practical cases}, booktitle = {Advances in consulting research : recent findings and practical cases}, editor = {Nissen, Volker}, publisher = {Springer}, address = {Cham}, isbn = {978-3-319-95998-6}, doi = {10.1007/978-3-319-95999-3_16}, pages = {341 -- 357}, year = {2019}, abstract = {Information technologies, such as big data analytics, cloud computing, cyber physical systems, robotic process automation, and the internet of things, provide a sustainable impetus for the structural development of business sectors as well as the digitalization of markets, enterprises, and processes. Within the consulting industry, the proliferation of these technologies opened up the new segment of digital transformation, which focuses on setting up, controlling, and implementing projects for enterprises from a broad range of sectors. These recent developments raise the question, which requirements evolve for IT consultants as important success factors of those digital transformation projects. Therefore, this empirical contribution provides indications regarding the qualifications and competences necessary for IT consultants in the era of digital transformation from a labor market perspective. On the one hand, this knowledge base is interesting for the academic education of consultants, since it supports a market-oriented design of adequate training measures. On the other hand, insights into the competence requirements for consultants are considered relevant for skill and talent management processes in consulting practice. Assuming that consulting companies pursue a strategic human resource management approach, labor market information may also be useful to discover strategic behavioral patterns.}, language = {en} } @article{RuebbelkeVoegeleGrajewskietal.2022, author = {R{\"u}bbelke, Dirk and V{\"o}gele, Stefan and Grajewski, Matthias and Zobel, Luzy}, title = {Hydrogen-based steel production and global climate protection: An empirical analysis of the potential role of a European cross border adjustment mechanism}, series = {Journal of Cleaner Production}, volume = {380}, journal = {Journal of Cleaner Production}, number = {Part 2, Art. Nr.:135040}, publisher = {Elsevier}, issn = {0959-6526}, doi = {10.1016/j.jclepro.2022.135040}, year = {2022}, abstract = {The European Union's aim to become climate neutral by 2050 necessitates ambitious efforts to reduce carbon emissions. Large reductions can be attained particularly in energy intensive sectors like iron and steel. In order to prevent the relocation of such industries outside the EU in the course of tightening environmental regulations, the establishment of a climate club jointly with other large emitters and alternatively the unilateral implementation of an international cross-border carbon tax mechanism are proposed. This article focuses on the latter option choosing the steel sector as an example. In particular, we investigate the financial conditions under which a European cross border mechanism is capable to protect hydrogen-based steel production routes employed in Europe against more polluting competition from abroad. By using a floor price model, we assess the competitiveness of different steel production routes in selected countries. We evaluate the climate friendliness of steel production on the basis of specific GHG emissions. In addition, we utilize an input-output price model. It enables us to assess impacts of rising cost of steel production on commodities using steel as intermediates. Our results raise concerns that a cross-border tax mechanism will not suffice to bring about competitiveness of hydrogen-based steel production in Europe because the cost tends to remain higher than the cost of steel production in e.g. China. Steel is a classic example for a good used mainly as intermediate for other products. Therefore, a cross-border tax mechanism for steel will increase the price of products produced in the EU that require steel as an input. This can in turn adversely affect competitiveness of these sectors. Hence, the effects of higher steel costs on European exports should be borne in mind and could require the cross-border adjustment mechanism to also subsidize exports.}, language = {en} } @incollection{SchneiderWisselinkNoelleetal.2020, author = {Schneider, Dominik and Wisselink, Frank and N{\"o}lle, Nikolai and Czarnecki, Christian}, title = {Influence of artificial intelligence on commercial interactions in the consumer market}, series = {Automatisierung und Personalisierung von Dienstleistungen : Methoden - Potenziale - Einsatzfelder}, booktitle = {Automatisierung und Personalisierung von Dienstleistungen : Methoden - Potenziale - Einsatzfelder}, editor = {Bruhn, Manfred and Hadwich, Karsten}, publisher = {Springer Gabler}, address = {Wiesbaden}, isbn = {978-3-658-30167-5 (Print)}, doi = {10.1007/978-3-658-30168-2_7}, pages = {183 -- 205}, year = {2020}, abstract = {Recently, novel AI-based services have emerged in the consumer market. AI-based services can affect the way consumers take commercial decisions. Research on the influence of AI on commercial interactions is in its infancy. In this chapter, a framework creating a first overview of the influence of AI on commercial interactions is introduced. This framework summarizes the findings of comparing numerous customer journeys of novel AI-based services with corresponding non-AI equivalents.}, language = {en} } @incollection{CroonCzarnecki2021, author = {Croon, Philipp and Czarnecki, Christian}, title = {Liability for loss or damages caused by RPA}, series = {Robotic process automation : Management, technology, applications}, booktitle = {Robotic process automation : Management, technology, applications}, editor = {Czarnecki, Christian and Fettke, Peter}, publisher = {De Gruyter}, address = {Oldenbourg}, isbn = {9783110676778}, doi = {10.1515/9783110676693-202}, pages = {135 -- 151}, year = {2021}, abstract = {Intelligent autonomous software robots replacing human activities and performing administrative processes are reality in today's corporate world. This includes, for example, decisions about invoice payments, identification of customers for a marketing campaign, and answering customer complaints. What happens if such a software robot causes a damage? Due to the complete absence of human activities, the question is not trivial. It could even happen that no one is liable for a damage towards a third party, which could create an uncalculatable legal risk for business partners. Furthermore, the implementation and operation of those software robots involves various stakeholders, which result in the unsolvable endeavor of identifying the originator of a damage. Overall it is advisable to all involved parties to carefully consider the legal situation. This chapter discusses the liability of software robots from an interdisciplinary perspective. Based on different technical scenarios the legal aspects of liability are discussed.}, language = {en} } @incollection{BensbergAuthCzarnecki2021, author = {Bensberg, Frank and Auth, Gunnar and Czarnecki, Christian}, title = {Finding the perfect RPA match : a criteria-based selection method for RPA solutions}, series = {Robotic process automation : Management, technology, applications}, booktitle = {Robotic process automation : Management, technology, applications}, editor = {Czarnecki, Christian and Fettke, Peter}, publisher = {De Gruyter}, address = {Oldenbourg}, isbn = {978-3-11-067677-8}, doi = {10.1515/9783110676693-201}, pages = {47 -- 75}, year = {2021}, abstract = {The benefits of robotic process automation (RPA) are highly related to the usage of commercial off-the-shelf (COTS) software products that can be easily implemented and customized by business units. But, how to find the best fitting RPA product for a specific situation that creates the expected benefits? This question is related to the general area of software evaluation and selection. In the face of more than 75 RPA products currently on the market, guidance considering those specifics is required. Therefore, this chapter proposes a criteria-based selection method specifically for RPA. The method includes a quantitative evaluation of costs and benefits as well as a qualitative utility analysis based on functional criteria. By using the visualization of financial implications (VOFI) method, an application-oriented structure is provided that opposes the total cost of ownership to the time savings times salary (TSTS). For the utility analysis a detailed list of functional criteria for RPA is offered. The whole method is based on a multi-vocal review of scientific and non-scholarly literature including publications by business practitioners, consultants, and vendors. The application of the method is illustrated by a concrete RPA example. The illustrated structures, templates, and criteria can be directly utilized by practitioners in their real-life RPA implementations. In addition, a normative decision process for selecting RPA alternatives is proposed before the chapter closes with a discussion and outlook.}, language = {en} } @incollection{CzarneckiFettke2021, author = {Czarnecki, Christian and Fettke, Peter}, title = {Robotic process automation : Positioning, structuring, and framing the work}, series = {Robotic process automation : Management, technology, applications}, booktitle = {Robotic process automation : Management, technology, applications}, editor = {Czarnecki, Christian and Fettke, Peter}, publisher = {De Gruyter}, address = {Oldenbourg}, isbn = {978-3-11-067668-6 (Print)}, doi = {10.1515/9783110676693-202}, pages = {3 -- 24}, year = {2021}, abstract = {Robotic process automation (RPA) has attracted increasing attention in research and practice. This chapter positions, structures, and frames the topic as an introduction to this book. RPA is understood as a broad concept that comprises a variety of concrete solutions. From a management perspective RPA offers an innovative approach for realizing automation potentials, whereas from a technical perspective the implementation based on software products and the impact of artificial intelligence (AI) and machine learning (ML) are relevant. RPA is industry-independent and can be used, for example, in finance, telecommunications, and the public sector. With respect to RPA this chapter discusses definitions, related approaches, a structuring framework, a research framework, and an inside as well as outside architectural view. Furthermore, it provides an overview of the book combined with short summaries of each chapter.}, language = {en} } @incollection{CzarneckiHongSchmitzetal.2021, author = {Czarnecki, Christian and Hong, Chin-Gi and Schmitz, Manfred and Dietze, Christian}, title = {Enabling digital transformation through cognitive robotic process automation at Deutsche Telekom Services Europe}, series = {Digitalization Cases Vol. 2 : Mastering digital transformation for global business}, booktitle = {Digitalization Cases Vol. 2 : Mastering digital transformation for global business}, editor = {Urbach, Nils and R{\"o}glinger, Maximilian and Kautz, Karlheinz and Alias, Rose Alinda and Saunders, Carol and Wiener, Martin}, publisher = {Springer}, address = {Cham}, isbn = {978-3-030-80002-4 (Print)}, doi = {10.1007/978-3-030-80003-1}, pages = {123 -- 138}, year = {2021}, abstract = {Subject of this case is Deutsche Telekom Services Europe (DTSE), a service center for administrative processes. Due to the high volume of repetitive tasks (e.g., 100k manual uploads of offer documents into SAP per year), automation was identified as an important strategic target with a high management attention and commitment. DTSE has to work with various backend application systems without any possibility to change those systems. Furthermore, the complexity of administrative processes differed. When it comes to the transfer of unstructured data (e.g., offer documents) to structured data (e.g., MS Excel files), further cognitive technologies were needed.}, language = {en} } @book{CzarneckiDietze2017, author = {Czarnecki, Christian and Dietze, Christian}, title = {Reference architecture for the telecommunications industry: Transformation of strategy, organization, processes, data, and applications}, publisher = {Springer}, address = {Cham}, isbn = {978-3-319-46755-9 (Print)}, doi = {10.1007/978-3-319-46757-3}, pages = {XXII, 253 Seiten}, year = {2017}, abstract = {This book reflects the tremendous changes in the telecommunications industry in the course of the past few decades - shorter innovation cycles, stiffer competition and new communication products. It analyzes the transformation of processes, applications and network technologies that are now expected to take place under enormous time pressure. The International Telecommunication Union (ITU) and the TM Forum have provided reference solutions that are broadly recognized and used throughout the value chain of the telecommunications industry, and which can be considered the de facto standard. The book describes how these reference solutions can be used in a practical context: it presents the latest insights into their development, highlights lessons learned from numerous international projects and combines them with well-founded research results in enterprise architecture management and reference modeling. The complete architectural transformation is explained, from the planning and set-up stage to the implementation. Featuring a wealth of examples and illustrations, the book offers a valuable resource for telecommunication professionals, enterprise architects and project managers alike.}, language = {en} } @inproceedings{CzarneckiHeuserSpiliopoulou2009, author = {Czarnecki, Christian and Heuser, Marcus and Spiliopoulou, Myra}, title = {How does the implementation of a next generation network influence a telecommunication company?}, series = {European and Mediterranean Conference on Information Systems}, booktitle = {European and Mediterranean Conference on Information Systems}, editor = {Irani, Zahir}, publisher = {Brunel University}, address = {London}, isbn = {9781902316697}, pages = {1 -- 11}, year = {2009}, abstract = {As the potential of a Next Generation Network (NGN) is recognized, telecommunication companies consider switching to it. Although the implementation of an NGN seems to be merely a modification of the network infrastructure, it may trigger or require changes in the whole company and even influence the company strategy. To capture the effects of NGN we propose a framework based on concepts of business engineering and technical recommendations for the introduction of NGN technology. The specific design of solutions for the layers "Strategy", "Processes" and "Information Systems" as well as their interdependencies are an essential characteristic of the developed framework. We have per-formed a case study on NGN implementation and observed that all layers captured by our framework are influenced by the introduction of an NGN.}, language = {en} } @inproceedings{CzarneckiWinkelmannSpiliopoulou2011, author = {Czarnecki, Christian and Winkelmann, Axel and Spiliopoulou, Myra}, title = {Making business systems in the telecommunication industry more customer-oriented}, series = {Information Systems Development : Business Systems and Services: Modeling and Development}, booktitle = {Information Systems Development : Business Systems and Services: Modeling and Development}, editor = {Pokorny, Jaroslav and Repa, Vaclav and Richta, Karel and Wojtkowski, Wita and Linger, Henry and Barry, Chris and Lang, Michael}, publisher = {Springer}, address = {New York}, isbn = {978-1-4419-9645-9 (Print)}, doi = {10.1007/978-1-4419-9790-6_14}, pages = {169 -- 180}, year = {2011}, abstract = {Market changes have forced telecommunication companies to transform their business. Increased competition, short innovation cycles, changed usage patterns, increased customer expectations and cost reduction are the main drivers. Our objective is to analyze to what extend transformation projects have improved the orientation towards the end-customers. Therefore, we selected 38 real-life case studies that are dealing with customer orientation. Our analysis is based on a telecommunication-specific framework that aligns strategy, business processes and information systems. The result of our analysis shows the following: transformation projects that aim to improve the customer orientation are combined with clear goals on costs and revenue of the enterprise. These projects are usually directly linked to the customer touch points, but also to the development and provisioning of products. Furthermore, the analysis shows that customer orientation is not the sole trigger for transformation. There is no one-fits-all solution; rather, improved customer orientation needs aligned changes of business processes as well as information systems related to different parts of the company.}, language = {en} } @inproceedings{NursinskiStolbergGangatharanCzarnecki2016, author = {Nursinski-Stolberg, Andr{\´e} and Gangatharan, Kiritharan and Czarnecki, Christian}, title = {Development of a subject-oriented reference process model for the telecommunications industry}, series = {GI Edition Proceedings Band 259 INFORMATIK 2016}, booktitle = {GI Edition Proceedings Band 259 INFORMATIK 2016}, editor = {Mayr, Heinrich C. and Pinzger, Martin}, publisher = {Gesellschaft f{\"u}r Informatik e.V.}, address = {Bonn}, isbn = {9783885796534}, issn = {1617-5468}, pages = {699 -- 712}, year = {2016}, abstract = {Generally the usage of reference models can be structured top-down or bottom-up. The practical need of agile change and flexible organizational implementation requires a consistent mapping to an operational level. In this context, well-established reference process models are typically structured top-down. The subject-oriented Business Process Management (sBPM) offers a modeling concept that is structured bottom-up and concentrates on the process actors on an operational level. This paper applies sBPM to the enhanced Telecom Operations Map (eTOM), a well-accepted reference process model in the telecommunications industry. The resulting design artifact is a concrete example for a combination of a bottom-up and top-down developed reference model. The results are evaluated and confirmed in practical context through the involvement of the industry body TMForum.}, language = {en} } @inproceedings{CzarneckiDietze2017, author = {Czarnecki, Christian and Dietze, Christian}, title = {Domain-Specific reference modeling in the telecommunications industry}, series = {DESRIST 2017: Designing the Digital Transformation}, booktitle = {DESRIST 2017: Designing the Digital Transformation}, editor = {Maedche, Alexander and vom Brocke, Jan and Hevner, Alan}, publisher = {Springer}, address = {Cham}, isbn = {978-3-319-59144-5}, doi = {10.1007/978-3-319-59144-5_19}, pages = {313 -- 329}, year = {2017}, abstract = {The telecommunications industry is currently going through a major transformation. In this context, the enhanced Telecom Operations Map (eTOM) is a domain-specific process reference model that is offered by the industry organization TM Forum. In practice, eTOM is well accepted and confirmed as de facto standard. It provides process definitions and process flows on different levels of detail. This article discusses the reference modeling of eTOM, i.e., the design, the resulting artifact, and its evaluation based on three project cases. The application of eTOM in three projects illustrates the design approach and concrete models on strategic and operational levels. The article follows the Design Science Research (DSR) paradigm. It contributes with concrete design artifacts to the transformational needs of the telecommunications industry and offers lessons-learned from a general DSR perspective.}, language = {en} } @article{CzarneckiWinkelmannSpiliopoulou2010, author = {Czarnecki, Christian and Winkelmann, Axel and Spiliopoulou, Myra}, title = {Services in electronic telecommunication markets: a framework for planning the virtualization of processes}, series = {Electronic Markets}, volume = {20}, journal = {Electronic Markets}, number = {3-4}, publisher = {Springer}, address = {Berlin}, issn = {1422-8890}, doi = {10.1007/s12525-010-0045-8}, pages = {197 -- 207}, year = {2010}, abstract = {The potential of electronic markets in enabling innovative product bundles through flexible and sustainable partnerships is not yet fully exploited in the telecommunication industry. One reason is that bundling requires seamless de-assembling and re-assembling of business processes, whilst processes in telecommunication companies are often product-dependent and hard to virtualize. We propose a framework for the planning of the virtualization of processes, intended to assist the decision maker in prioritizing the processes to be virtualized: (a) we transfer the virtualization pre-requisites stated by the Process Virtualization Theory in the context of customer-oriented processes in the telecommunication industry and assess their importance in this context, (b) we derive IT-oriented requirements for the removal of virtualization barriers and highlight their demand on changes at different levels of the organization. We present a first evaluation of our approach in a case study and report on lessons learned and further steps to be performed.}, language = {en} } @article{CzarneckiSpiliopoulou2012, author = {Czarnecki, Christian and Spiliopoulou, Myra}, title = {A holistic framework for the implementation of a next generation network}, series = {International Journal of Business Information Systems}, volume = {9}, journal = {International Journal of Business Information Systems}, number = {4}, publisher = {Inderscience Enterprises}, address = {Olney, Bucks}, issn = {1746-0972}, doi = {10.1504/IJBIS.2012.046291}, pages = {385 -- 401}, year = {2012}, abstract = {As the potential of a next generation network (NGN) is recognised, telecommunication companies consider switching to it. Although the implementation of an NGN seems to be merely a modification of the network infrastructure, it may trigger or require changes in the whole company, because it builds upon the separation between service and transport, a flexible bundling of services to products and the streamlining of the IT infrastructure. We propose a holistic framework, structured into the layers 'strategy', 'processes' and 'information systems' and incorporate into each layer all concepts necessary for the implementation of an NGN, as well as the alignment of these concepts. As a first proof-of-concept for our framework we have performed a case study on the introduction of NGN in a large telecommunication company; we show that our framework captures all topics that are affected by an NGN implementation.}, language = {en} } @article{BurgerRumpfDoetal.2021, author = {Burger, Ren{\´e} and Rumpf, Jessica and Do, Xuan Tung and Monakhova, Yulia and Diehl, Bernd W. K. and Rehahn, Matthias and Schulze, Margit}, title = {Is NMR combined with multivariate regression applicable for the molecular weight determination of randomly cross-linked polymers such as lignin?}, series = {ACS Omega}, volume = {6}, journal = {ACS Omega}, number = {44}, publisher = {ACS Publications}, address = {Washington, DC}, issn = {2470-1343}, doi = {10.1021/acsomega.1c03574}, pages = {29516 -- 29524}, year = {2021}, abstract = {The molecular weight properties of lignins are one of the key elements that need to be analyzed for a successful industrial application of these promising biopolymers. In this study, the use of 1H NMR as well as diffusion-ordered spectroscopy (DOSY NMR), combined with multivariate regression methods, was investigated for the determination of the molecular weight (Mw and Mn) and the polydispersity of organosolv lignins (n = 53, Miscanthus x giganteus, Paulownia tomentosa, and Silphium perfoliatum). The suitability of the models was demonstrated by cross validation (CV) as well as by an independent validation set of samples from different biomass origins (beech wood and wheat straw). CV errors of ca. 7-9 and 14-16\% were achieved for all parameters with the models from the 1H NMR spectra and the DOSY NMR data, respectively. The prediction errors for the validation samples were in a similar range for the partial least squares model from the 1H NMR data and for a multiple linear regression using the DOSY NMR data. The results indicate the usefulness of NMR measurements combined with multivariate regression methods as a potential alternative to more time-consuming methods such as gel permeation chromatography.}, language = {en} } @article{MonakhovaDiehl2021, author = {Monakhova, Yulia and Diehl, Bernd W. K.}, title = {Simplification of NMR Workflows by Standardization Using 2H Integral of Deuterated Solvent as Applied to Aloe vera Preparations}, series = {Applied Magnetic Resonance}, volume = {52}, journal = {Applied Magnetic Resonance}, number = {11}, publisher = {Springer}, address = {Cham}, issn = {1613-7507}, doi = {10.1007/s00723-021-01393-4}, pages = {1591 -- 1600}, year = {2021}, abstract = {In this study, a recently proposed NMR standardization approach by 2H integral of deuterated solvent for quantitative multicomponent analysis of complex mixtures is presented. As a proof of principle, the existing NMR routine for the analysis of Aloe vera products was modified. Instead of using absolute integrals of targeted compounds and internal standard (nicotinamide) from 1H-NMR spectra, quantification was performed based on the ratio of a particular 1H-NMR compound integral and 2H-NMR signal of deuterated solvent D2O. Validation characteristics (linearity, repeatability, accuracy) were evaluated and the results showed that the method has the same precision as internal standardization in case of multicomponent screening. Moreover, a dehydration process by freeze drying is not necessary for the new routine. Now, our NMR profiling of A. vera products needs only limited sample preparation and data processing. The new standardization methodology provides an appealing alternative for multicomponent NMR screening. In general, this novel approach, using standardization by 2H integral, benefits from reduced sample preparation steps and uncertainties, and is recommended in different application areas (purity determination, forensics, pharmaceutical analysis, etc.).}, language = {en} } @article{BurmistrovaSobolevaMonakhova2021, author = {Burmistrova, Natalia A. and Soboleva, Polina M. and Monakhova, Yulia}, title = {Is infrared spectroscopy combined with multivariate analysis a promising tool for heparin authentication?}, series = {Journal of Pharmaceutical and Biomedical Analysis}, volume = {194}, journal = {Journal of Pharmaceutical and Biomedical Analysis}, number = {Article number: 113811}, publisher = {Elsevier}, address = {Amsterdam}, isbn = {0731-7085}, doi = {10.1016/j.jpba.2020.113811}, year = {2021}, abstract = {The investigation of the possibility to determine various characteristics of powder heparin (n = 115) was carried out with infrared spectroscopy. The evaluation of heparin samples included several parameters such as purity grade, distributing company, animal source as well as heparin species (i.e. Na-heparin, Ca-heparin, and heparinoids). Multivariate analysis using principal component analysis (PCA), soft independent modelling of class analogy (SIMCA), and partial least squares - discriminant analysis (PLS-DA) were applied for the modelling of spectral data. Different pre-processing methods were applied to IR spectral data; multiplicative scatter correction (MSC) was chosen as the most relevant. Obtained results were confirmed by nuclear magnetic resonance (NMR) spectroscopy. Good predictive ability of this approach demonstrates the potential of IR spectroscopy and chemometrics for screening of heparin quality. This approach, however, is designed as a screening tool and is not considered as a replacement for either of the methods required by USP and FDA.}, language = {en} } @article{MonakhovaDiehl2021, author = {Monakhova, Yulia and Diehl, Bernd W.K.}, title = {Novel approach of qNMR workflow by standardization using 2H integral: Application to any intrinsic calibration standard}, series = {Talanta}, volume = {222}, journal = {Talanta}, number = {Article number: 121504}, publisher = {Elsevier}, isbn = {0039-9140}, doi = {10.1016/j.talanta.2020.121504}, year = {2021}, abstract = {Quantitative nuclear magnetic resonance (qNMR) is routinely performed by the internal or external standardization. The manuscript describes a simple alternative to these common workflows by using NMR signal of another active nuclei of calibration compound. For example, for any arbitrary compound quantification by NMR can be based on the use of an indirect concentration referencing that relies on a solvent having both 1H and 2H signals. To perform high-quality quantification, the deuteration level of the utilized deuterated solvent has to be estimated. In this contribution the new method was applied to the determination of deuteration levels in different deuterated solvents (MeOD, ACN, CDCl3, acetone, benzene, DMSO-d6). Isopropanol-d6, which contains a defined number of deuterons and protons, was used for standardization. Validation characteristics (precision, accuracy, robustness) were calculated and the results showed that the method can be used in routine practice. Uncertainty budget was also evaluated. In general, this novel approach, using standardization by 2H integral, benefits from reduced sample preparation steps and uncertainties, and can be applied in different application areas (purity determination, forensics, pharmaceutical analysis, etc.).}, language = {en} } @article{MonakhovaSobolevaFedotovaetal.2022, author = {Monakhova, Yulia and Soboleva, Polina M. and Fedotova, Elena S. and Musina, Kristina T. and Burmistrova, Natalia A.}, title = {Quantum chemical calculations of IR spectra of heparin disaccharide subunits}, series = {Computational and Theoretical Chemistry}, volume = {1217}, journal = {Computational and Theoretical Chemistry}, number = {Article number: 113891}, publisher = {Elsevier}, address = {New York, NY}, isbn = {2210-271X}, doi = {10.1016/j.comptc.2022.113891}, year = {2022}, abstract = {Heparin is a natural polysaccharide, which plays essential role in many biological processes. Alterations in building blocks can modify biological roles of commercial heparin products, due to significant changes in the conformation of the polymer chain. The variability structure of heparin leads to difficulty in quality control using different analytical methods, including infrared (IR) spectroscopy. In this paper molecular modelling of heparin disaccharide subunits was performed using quantum chemistry. The structural and spectral parameters of these disaccharides have been calculated using RHF/6-311G. In addition, over-sulphated chondroitin sulphate disaccharide was studied as one of the most widespread contaminants of heparin. Calculated IR spectra were analyzed with respect to specific structure parameters. IR spectroscopic fingerprint was found to be sensitive to substitution pattern of disaccharide subunits. Vibrational assignments of calculated spectra were correlated with experimental IR spectral bands of native heparin. Chemometrics was used to perform multivariate analysis of simulated spectral data.}, language = {en} } @article{BurgerLindnerRumpfetal.2022, author = {Burger, Ren{\´e} and Lindner, Simon and Rumpf, Jessica and Do, Xuan Tung and Diehl, Bernd W.K. and Rehahn, Matthias and Monakhova, Yulia and Schulze, Margit}, title = {Benchtop versus high field NMR: Comparable performance found for the molecular weight determination of lignin}, series = {Journal of Pharmaceutical and Biomedical Analysis}, volume = {212}, journal = {Journal of Pharmaceutical and Biomedical Analysis}, number = {Article number: 114649}, publisher = {Elsevier}, address = {New York, NY}, isbn = {0731-7085}, doi = {10.1016/j.jpba.2022.114649}, year = {2022}, abstract = {Lignin is a promising renewable biopolymer being investigated worldwide as an environmentally benign substitute of fossil-based aromatic compounds, e.g. for the use as an excipient with antioxidant and antimicrobial properties in drug delivery or even as active compound. For its successful implementation into process streams, a quick, easy, and reliable method is needed for its molecular weight determination. Here we present a method using 1H spectra of benchtop as well as conventional NMR systems in combination with multivariate data analysis, to determine lignin's molecular weight (Mw and Mn) and polydispersity index (PDI). A set of 36 organosolv lignin samples (from Miscanthus x giganteus, Paulownia tomentosa and Silphium perfoliatum) was used for the calibration and cross validation, and 17 samples were used as external validation set. Validation errors between 5.6\% and 12.9\% were achieved for all parameters on all NMR devices (43, 60, 500 and 600 MHz). Surprisingly, no significant difference in the performance of the benchtop and high-field devices was found. This facilitates the application of this method for determining lignin's molecular weight in an industrial environment because of the low maintenance expenditure, small footprint, ruggedness, and low cost of permanent magnet benchtop NMR systems.}, language = {en} } @article{MonakhovaDiehl2022, author = {Monakhova, Yulia and Diehl, Bernd W.K.}, title = {Multinuclear NMR screening of pharmaceuticals using standardization by 2H integral of a deuterated solvent}, series = {Journal of Pharmaceutical and Biomedical Analysis}, volume = {209}, journal = {Journal of Pharmaceutical and Biomedical Analysis}, number = {Article number: 114530}, publisher = {Elsevier}, isbn = {0731-7085}, doi = {10.1016/j.jpba.2021.114530}, year = {2022}, abstract = {NMR standardization approach that uses the 2H integral of deuterated solvent for quantitative multinuclear analysis of pharmaceuticals is described. As a proof of principle, the existing NMR procedure for the analysis of heparin products according to US Pharmacopeia monograph is extended to the determination of Na+ and Cl- content in this matrix. Quantification is performed based on the ratio of a 23Na (35Cl) NMR integral and 2H NMR signal of deuterated solvent, D2O, acquired using the specific spectrometer hardware. As an alternative, the possibility of 133Cs standardization using the addition of Cs2CO3 stock solution is shown. Validation characteristics (linearity, repeatability, sensitivity) are evaluated. A holistic NMR profiling of heparin products can now also be used for the quantitative determination of inorganic compounds in a single analytical run using a single sample. In general, the new standardization methodology provides an appealing alternative for the NMR screening of inorganic and organic components in pharmaceutical products.}, language = {en} } @article{LindnerBurgerRutledgeetal.2022, author = {Lindner, Simon and Burger, Ren{\´e} and Rutledge, Douglas N. and Do, Xuan Tung and Rumpf, Jessica and Diehl, Bernd W. K. and Schulze, Margit and Monakhova, Yulia}, title = {Is the calibration transfer of multivariate calibration models between high- and low-field NMR instruments possible? A case study of lignin molecular weight}, series = {Analytical chemistry}, volume = {94}, journal = {Analytical chemistry}, number = {9}, publisher = {ACS Publications}, address = {Washington, DC}, isbn = {1520-6882}, doi = {10.1021/acs.analchem.1c05125}, pages = {3997 -- 4004}, year = {2022}, abstract = {Although several successful applications of benchtop nuclear magnetic resonance (NMR) spectroscopy in quantitative mixture analysis exist, the possibility of calibration transfer remains mostly unexplored, especially between high- and low-field NMR. This study investigates for the first time the calibration transfer of partial least squares regressions [weight average molecular weight (Mw) of lignin] between high-field (600 MHz) NMR and benchtop NMR devices (43 and 60 MHz). For the transfer, piecewise direct standardization, calibration transfer based on canonical correlation analysis, and transfer via the extreme learning machine auto-encoder method are employed. Despite the immense resolution difference between high-field and low-field NMR instruments, the results demonstrate that the calibration transfer from high- to low-field is feasible in the case of a physical property, namely, the molecular weight, achieving validation errors close to the original calibration (down to only 1.2 times higher root mean square errors). These results introduce new perspectives for applications of benchtop NMR, in which existing calibrations from expensive high-field instruments can be transferred to cheaper benchtop instruments to economize.}, language = {en} } @inproceedings{BrockmannSultanowCzarnecki2018, author = {Brockmann, Carsten and Sultanow, Eldar and Czarnecki, Christian}, title = {Enterprise architectures between agility and traditional methodologies}, series = {GI Edition Proceedings Band 285 Workshops der INFORMATIK 2018}, booktitle = {GI Edition Proceedings Band 285 Workshops der INFORMATIK 2018}, editor = {Czarnecki, Christian and Brockmann, Carsten and Sultanow, Eldar and Koschmider, Agnes and Selzer, Annika and Gesellschaft f{\"u}r Informatik e. V.,}, publisher = {K{\"o}llen}, address = {Bonn}, isbn = {9783885796794}, issn = {1617-5468}, pages = {1 Seite}, year = {2018}, abstract = {For this year's workshop on Enterpirse Architecture in Research and Practice we have received eight submissions from which four have passed the rigorous peer-review. The acceptance quote of 50\% assures that only advancements in the field are included in our workshop.}, language = {en} } @inproceedings{BensbergAuthCzarneckietal.2018, author = {Bensberg, Frank and Auth, Gunnar and Czarnecki, Christian and W{\"o}rndle, Christopher}, title = {Transforming literature-intensive research processes through text analytics - design, implementation and lessons learned}, editor = {Kemal İlter, H.}, doi = {10.6084/m9.figshare.7582073.v1}, pages = {9 Seiten}, year = {2018}, abstract = {The continuing growth of scientific publications raises the question how research processes can be digitalized and thus realized more productively. Especially in information technology fields, research practice is characterized by a rapidly growing volume of publications. For the search process various information systems exist. However, the analysis of the published content is still a highly manual task. Therefore, we propose a text analytics system that allows a fully digitalized analysis of literature sources. We have realized a prototype by using EBSCO Discovery Service in combination with IBM Watson Explorer and demonstrated the results in real-life research projects. Potential addressees are research institutions, consulting firms, and decision-makers in politics and business practice.}, language = {en} } @inproceedings{BrockmannSultanowCzarnecki2019, author = {Brockmann, Carsten and Sultanow, Eldar and Czarnecki, Christian}, title = {Is enterprise architecture still relevant in the digital age?}, series = {GI Edition Proceedings Band 295 INFORMATIK 2019, Workshop-Beitr{\"a}ge}, booktitle = {GI Edition Proceedings Band 295 INFORMATIK 2019, Workshop-Beitr{\"a}ge}, editor = {Draude, Claude and Lange, Martin and Sick, Bernhard}, publisher = {K{\"o}llen}, address = {Bonn}, isbn = {9783885796893}, issn = {1617-5468}, doi = {10.18420/inf2019_ws01}, pages = {21 -- 21}, year = {2019}, language = {en} } @inproceedings{AuthCzarneckiBensberg2019, author = {Auth, Gunnar and Czarnecki, Christian and Bensberg, Frank}, title = {Impact of robotic process automation on enterprise architectures}, series = {GI Edition Proceedings Band 295 INFORMATIK 2019, Workshop-Beitr{\"a}ge}, booktitle = {GI Edition Proceedings Band 295 INFORMATIK 2019, Workshop-Beitr{\"a}ge}, editor = {Draude, Claude and Lange, Martin and Sick, Bernhard and Gesellschaft f{\"u}r Informatik e.V. (GI),}, publisher = {K{\"o}llen}, address = {Bonn}, isbn = {9783885796893}, issn = {1617-5468}, doi = {10.18420/inf2019_ws05}, pages = {59 -- 65}, year = {2019}, abstract = {The initial idea of Robotic Process Automation (RPA) is the automation of business processes through the presentation layer of existing application systems. For this simple emulation of user input and output by software robots, no changes of the systems and architecture is required. However, considering strategic aspects of aligning business and technology on an enterprise level as well as the growing capabilities of RPA driven by artificial intelligence, interrelations between RPA and Enterprise Architecture (EA) become visible and pose new questions. In this paper we discuss the relationship between RPA and EA in terms of perspectives and implications. As workin- progress we focus on identifying new questions and research opportunities related to RPA and EA.}, language = {en} } @inproceedings{RitschelStenzelCzarneckietal.2021, author = {Ritschel, Konstantin and Stenzel, Adina and Czarnecki, Christian and Hong, Chin-Gi}, title = {Realizing robotic process automation potentials: an architectural perspective on a real-life implementation case}, series = {GI Edition Proceedings Band 314 "INFORMATIK 2021" Computer Science \& Sustainability}, booktitle = {GI Edition Proceedings Band 314 "INFORMATIK 2021" Computer Science \& Sustainability}, editor = {Gesellschaft f{\"u}r Informatik e.V. (GI),}, publisher = {K{\"o}llen}, address = {Bonn}, isbn = {9783885797081}, issn = {1617-5468}, pages = {1303 -- 1311}, year = {2021}, abstract = {The initial idea of Robotic Process Automation (RPA) is the automation of business processes through a simple emulation of user input and output by software robots. Hence, it can be assumed that no changes of the used software systems and existing Enterprise Architecture (EA) is required. In this short, practical paper we discuss this assumption based on a real-life implementation project. We show that a successful RPA implementation might require architectural work during analysis, implementation, and migration. As practical paper we focus on exemplary lessons-learned and new questions related to RPA and EA.}, language = {en} } @inproceedings{AmirBauckhageChircuetal.2022, author = {Amir, Malik and Bauckhage, Christian and Chircu, Alina and Czarnecki, Christian and Knopf, Christian and Piatkowski, Nico and Sultanow, Eldar}, title = {What can we expect from quantum (digital) twins?}, publisher = {AIS Electronic Library (AISeL)}, pages = {1 -- 14}, year = {2022}, abstract = {Digital twins enable the modeling and simulation of real-world entities (objects, processes or systems), resulting in improvements in the associated value chains. The emerging field of quantum computing holds tremendous promise for evolving this virtualization towards Quantum (Digital) Twins (QDT) and ultimately Quantum Twins (QT). The quantum (digital) twin concept is not a contradiction in terms - but instead describes a hybrid approach that can be implemented using the technologies available today by combining classical computing and digital twin concepts with quantum processing. This paper presents the status quo of research and practice on quantum (digital) twins. It also discuses their potential to create competitive advantage through real-time simulation of highly complex, interconnected entities that helps companies better address changes in their environment and differentiate their products and services.}, language = {en} } @inproceedings{BlaneckBornheimGriegeretal.2022, author = {Blaneck, Patrick Gustav and Bornheim, Tobias and Grieger, Niklas and Bialonski, Stephan}, title = {Automatic readability assessment of german sentences with transformer ensembles}, series = {Proceedings of the GermEval 2022 Workshop on Text Complexity Assessment of German Text}, booktitle = {Proceedings of the GermEval 2022 Workshop on Text Complexity Assessment of German Text}, publisher = {Association for Computational Linguistics}, address = {Potsdam}, doi = {10.48550/arXiv.2209.04299}, pages = {57 -- 62}, year = {2022}, abstract = {Reliable methods for automatic readability assessment have the potential to impact a variety of fields, ranging from machine translation to self-informed learning. Recently, large language models for the German language (such as GBERT and GPT-2-Wechsel) have become available, allowing to develop Deep Learning based approaches that promise to further improve automatic readability assessment. In this contribution, we studied the ability of ensembles of fine-tuned GBERT and GPT-2-Wechsel models to reliably predict the readability of German sentences. We combined these models with linguistic features and investigated the dependence of prediction performance on ensemble size and composition. Mixed ensembles of GBERT and GPT-2-Wechsel performed better than ensembles of the same size consisting of only GBERT or GPT-2-Wechsel models. Our models were evaluated in the GermEval 2022 Shared Task on Text Complexity Assessment on data of German sentences. On out-of-sample data, our best ensemble achieved a root mean squared error of 0:435.}, language = {en} } @inproceedings{SteuerDankertSharmaBlecketal.2017, author = {Steuer-Dankert, Linda and Sharma, Mamta Rameshwarlal and Bleck, Wolfgang and Leicht-Scholten, Carmen}, title = {Innovation through Diversity - Development of a Diversity and Innovation management concept}, series = {International Conference on Innovation and Management : IAM23017S : Date: July 4-7, 2017, Osaka, Japan}, booktitle = {International Conference on Innovation and Management : IAM23017S : Date: July 4-7, 2017, Osaka, Japan}, editor = {Farn, C. K.}, publisher = {Kuang Hui Chiu}, address = {Osaka}, issn = {2218-6387}, pages = {Panel C}, year = {2017}, abstract = {Acknowledging that a diverse workforce could be a potential source of innovation, the current research deals with the fine details of why diversity management is central to achieving innovation in heterogeneous research groups and how this could be effectively realized in an organization. The types of heterogeneities addressed mainly include gender, qualification, academic discipline and intercultural perspectives. The type of organization being dealt with in this work is a complex association of research institutes at a technical university in Germany (RWTH Aachen University), namely a 'Cluster of Excellence', whereby several institutes of the university work collaboratively in different sub-projects. The 'Cluster of Excellence' is a part of the 'Excellence Initiative' of the German federal and state governments German Research Foundation (DFG) and German Council of Science and Humanities, with the ultimate aim of promoting cutting-edge research. To support interdisciplinary collaboration and thus the performance of the cluster, the development of a diversity and innovation management concept is presently in the conceptual phase and will be described in the frame of this paper. The 3-S-Diversity Model, composed of the three elements: skills, structure and strategy, serves as a basis for the development of the concept. The proposed concept consists of six phases; the first two phases lay the ground work by developing an understanding of the status quo on the forms of diversity in the Cluster of Excellence, the type of organizational structure of the member institutes and the varieties of specialist work cultures of the same. The third and the fourth phases build up on this foundation by means of qualitative and quantitative studies. While the third phase deals with the sensitization of the management level to the close connection between diversity and innovation; the need to manage them thereafter and find tailor-made methods of doing so, the fourth phase shall mainly focus on the mindset of the employees in this regard. The fifth phase shall consolidate the learnings and the ideas developed in the course of the first four phases into an implementable strategy. The ultimate phase shall be the implementation of this concept in the Cluster. The first three phases have been accomplished successfully and the preliminary results are already available.}, language = {en} } @article{Maurischat2022, author = {Maurischat, Andreas}, title = {Algebraic independence of the Carlitz period and its hyperderivatives}, series = {Journal of Number Theory}, volume = {240}, journal = {Journal of Number Theory}, publisher = {Elsevier}, address = {Orlando, Fla.}, issn = {0022-314X}, doi = {10.1016/j.jnt.2022.01.006}, pages = {145 -- 162}, year = {2022}, language = {en} } @article{KotliarOrtnerConradietal.2022, author = {Kotliar, Konstantin and Ortner, Marion and Conradi, Anna and Hacker, Patricia and Hauser, Christine and G{\"u}nthner, Roman and Moser, Michaela and Muggenthaler, Claudia and Diehl-Schmid, Janine and Priller, Josef and Schmaderer, Christoph and Grimmer, Timo}, title = {Altered retinal cerebral vessel oscillation frequencies in Alzheimer's disease compatible with impaired amyloid clearance}, series = {Neurobiology of Aging}, volume = {120}, journal = {Neurobiology of Aging}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0197-4580}, doi = {10.1016/j.neurobiolaging.2022.08.012}, pages = {117 -- 127}, year = {2022}, abstract = {Retinal vessels are similar to cerebral vessels in their structure and function. Moderately low oscillation frequencies of around 0.1 Hz have been reported as the driving force for paravascular drainage in gray matter in mice and are known as the frequencies of lymphatic vessels in humans. We aimed to elucidate whether retinal vessel oscillations are altered in Alzheimer's disease (AD) at the stage of dementia or mild cognitive impairment (MCI). Seventeen patients with mild-to-moderate dementia due to AD (ADD); 23 patients with MCI due to AD, and 18 cognitively healthy controls (HC) were examined using Dynamic Retinal Vessel Analyzer. Oscillatory temporal changes of retinal vessel diameters were evaluated using mathematical signal analysis. Especially at moderately low frequencies around 0.1 Hz, arterial oscillations in ADD and MCI significantly prevailed over HC oscillations and correlated with disease severity. The pronounced retinal arterial vasomotion at moderately low frequencies in the ADD and MCI groups would be compatible with the view of a compensatory upregulation of paravascular drainage in AD and strengthen the amyloid clearance hypothesis.}, language = {en} } @article{ColomboDriraFrotscheretal.2022, author = {Colombo, Daniele and Drira, Slah and Frotscher, Ralf and Staat, Manfred}, title = {An element-based formulation for ES-FEM and FS-FEM models for implementation in standard solid mechanics finite element codes for 2D and 3D static analysis}, series = {International Journal for Numerical Methods in Engineering}, volume = {124}, journal = {International Journal for Numerical Methods in Engineering}, number = {2}, publisher = {Wiley}, address = {Chichester}, issn = {1097-0207}, doi = {10.1002/nme.7126}, pages = {402 -- 433}, year = {2022}, abstract = {Edge-based and face-based smoothed finite element methods (ES-FEM and FS-FEM, respectively) are modified versions of the finite element method allowing to achieve more accurate results and to reduce sensitivity to mesh distortion, at least for linear elements. These properties make the two methods very attractive. However, their implementation in a standard finite element code is nontrivial because it requires heavy and extensive modifications to the code architecture. In this article, we present an element-based formulation of ES-FEM and FS-FEM methods allowing to implement the two methods in a standard finite element code with no modifications to its architecture. Moreover, the element-based formulation permits to easily manage any type of element, especially in 3D models where, to the best of the authors' knowledge, only tetrahedral elements are used in FS-FEM applications found in the literature. Shape functions for non-simplex 3D elements are proposed in order to apply FS-FEM to any standard finite element.}, language = {en} } @article{BhattaraiMayStaatetal.2022, author = {Bhattarai, Aroj and May, Charlotte Anabell and Staat, Manfred and Kowalczyk, Wojciech and Tran, Thanh Ngoc}, title = {Layer-specific damage modeling of porcine large intestine under biaxial tension}, series = {Bioengineering}, volume = {9}, journal = {Bioengineering}, number = {10, Early Access}, publisher = {MDPI}, address = {Basel}, issn = {2306-5354}, doi = {10.3390/bioengineering9100528}, pages = {1 -- 17}, year = {2022}, abstract = {The mechanical behavior of the large intestine beyond the ultimate stress has never been investigated. Stretching beyond the ultimate stress may drastically impair the tissue microstructure, which consequently weakens its healthy state functions of absorption, temporary storage, and transportation for defecation. Due to closely similar microstructure and function with humans, biaxial tensile experiments on the porcine large intestine have been performed in this study. In this paper, we report hyperelastic characterization of the large intestine based on experiments in 102 specimens. We also report the theoretical analysis of the experimental results, including an exponential damage evolution function. The fracture energies and the threshold stresses are set as damage material parameters for the longitudinal muscular, the circumferential muscular and the submucosal collagenous layers. A biaxial tensile simulation of a linear brick element has been performed to validate the applicability of the estimated material parameters. The model successfully simulates the biomechanical response of the large intestine under physiological and non-physiological loads.}, language = {en} } @incollection{StriebingMuellerSchraudneretal.2022, author = {Striebing, Clemens and M{\"u}ller, J{\"o}rg and Schraudner, Martina and Gewinner, Irina Valerie and Guerrero Morales, Patricia and Hochfeld, Katharina and Hoffman, Shekinah and Kmec, Julie A. and Nguyen, Huu Minh and Schneider, Jannick and Sheridan, Jennifer and Steuer-Dankert, Linda and Trimble O'Connor, Lindsey and Vandevelde-Rougale, Agn{\`e}s}, title = {Promoting diversity and combatting discrimination in research organizations: a practitioner's guide}, series = {Diversity and discrimination in research organizations}, booktitle = {Diversity and discrimination in research organizations}, publisher = {Emerald Publishing Limited}, address = {Bingley}, isbn = {978-1-80117-959-1 (Print)}, doi = {10.1108/978-1-80117-956-020221012}, pages = {421 -- 442}, year = {2022}, abstract = {The essay is addressed to practitioners in research management and from academic leadership. It describes which measures can contribute to creating an inclusive climate for research teams and preventing and effectively dealing with discrimination. The practical recommendations consider the policy and organizational levels, as well as the individual perspective of research managers. Following a series of basic recommendations, six lessons learned are formulated, derived from the contributions to the edited collection on "Diversity and Discrimination in Research Organizations."}, language = {en} } @incollection{SteuerDankertLeichtScholten2022, author = {Steuer-Dankert, Linda and Leicht-Scholten, Carmen}, title = {Perceiving diversity : an explorative approach in a complex research organization.}, series = {Diversity and discrimination in research organizations}, booktitle = {Diversity and discrimination in research organizations}, publisher = {Emerald Publishing Limited}, address = {Bingley}, isbn = {978-1-80117-959-1 (Print)}, doi = {10.1108/978-1-80117-956-020221010}, pages = {365 -- 392}, year = {2022}, abstract = {Diversity management is seen as a decisive factor for ensuring the development of socially responsible innovations (Beacham and Shambaugh, 2011; Sonntag, 2014; L{\´o}pez, 2015; Uebernickel et al., 2015). However, many diversity management approaches fail due to a one-sided consideration of diversity (Thomas and Ely, 2019) and a lacking linkage between the prevailing organizational culture and the perception of diversity in the respective organization. Reflecting the importance of diverse perspectives, research institutions have a special responsibility to actively deal with diversity, as they are publicly funded institutions that drive socially relevant development and educate future generations of developers, leaders and decision-makers. Nevertheless, only a few studies have so far dealt with the influence of the special framework conditions of the science system on diversity management. Focusing on the interdependency of the organizational culture and diversity management especially in a university research environment, this chapter aims in a first step to provide a theoretical perspective on the framework conditions of a complex research organization in Germany in order to understand the system-specific factors influencing diversity management. In a second step, an exploratory cluster analysis is presented, investigating the perception of diversity and possible influencing factors moderating this perception in a scientific organization. Combining both steps, the results show specific mechanisms and structures of the university research environment that have an impact on diversity management and rigidify structural barriers preventing an increase of diversity. The quantitative study also points out that the management level takes on a special role model function in the scientific system and thus has an influence on the perception of diversity. Consequently, when developing diversity management approaches in research organizations, it is necessary to consider the top-down direction of action, the special nature of organizational structures in the university research environment as well as the special role of the professorial level as role model for the scientific staff.}, language = {en} } @incollection{HinkeVervierBrauneretal.2022, author = {Hinke, Christian and Vervier, Luisa and Brauner, Philipp and Schneider, Sebastian and Steuer-Dankert, Linda and Ziefle, Martina and Leicht-Scholten, Carmen}, title = {Capability configuration in next generation manufacturing}, series = {Forecasting next generation manufacturing : digital shadows, human-machine collaboration, and data-driven business models}, booktitle = {Forecasting next generation manufacturing : digital shadows, human-machine collaboration, and data-driven business models}, publisher = {Springer}, address = {Cham}, isbn = {978-3-031-07733-3}, doi = {10.1007/978-3-031-07734-0_6}, pages = {95 -- 106}, year = {2022}, abstract = {Industrial production systems are facing radical change in multiple dimensions. This change is caused by technological developments and the digital transformation of production, as well as the call for political and social change to facilitate a transformation toward sustainability. These changes affect both the capabilities of production systems and companies and the design of higher education and educational programs. Given the high uncertainty in the likelihood of occurrence and the technical, economic, and societal impacts of these concepts, we conducted a technology foresight study, in the form of a real-time Delphi analysis, to derive reliable future scenarios featuring the next generation of manufacturing systems. This chapter presents the capabilities dimension and describes each projection in detail, offering current case study examples and discussing related research, as well as implications for policy makers and firms. Specifically, we discuss the benefits of capturing expert knowledge and making it accessible to newcomers, especially in highly specialized industries. The experts argue that in order to cope with the challenges and circumstances of today's world, students must already during their education at university learn how to work with AI and other technologies. This means that study programs must change and that universities must adapt their structural aspects to meet the needs of the students.}, language = {en} }