@inproceedings{Gaigall2022, author = {Gaigall, Daniel}, title = {On Consistent Hypothesis Testing In General Hilbert Spaces}, series = {Proceedings of the 4th International Conference on Statistics: Theory and Applications (ICSTA'22)}, booktitle = {Proceedings of the 4th International Conference on Statistics: Theory and Applications (ICSTA'22)}, publisher = {Avestia Publishing}, address = {Orl{\´e}ans, Kanada}, doi = {10.11159/icsta22.157}, pages = {Paper No. 157}, year = {2022}, abstract = {Inference on the basis of high-dimensional and functional data are two topics which are discussed frequently in the current statistical literature. A possibility to include both topics in a single approach is working on a very general space for the underlying observations, such as a separable Hilbert space. We propose a general method for consistently hypothesis testing on the basis of random variables with values in separable Hilbert spaces. We avoid concerns with the curse of dimensionality due to a projection idea. We apply well-known test statistics from nonparametric inference to the projected data and integrate over all projections from a specific set and with respect to suitable probability measures. In contrast to classical methods, which are applicable for real-valued random variables or random vectors of dimensions lower than the sample size, the tests can be applied to random vectors of dimensions larger than the sample size or even to functional and high-dimensional data. In general, resampling procedures such as bootstrap or permutation are suitable to determine critical values. The idea can be extended to the case of incomplete observations. Moreover, we develop an efficient algorithm for implementing the method. Examples are given for testing goodness-of-fit in a one-sample situation in [1] or for testing marginal homogeneity on the basis of a paired sample in [2]. Here, the test statistics in use can be seen as generalizations of the well-known Cram{\´e}rvon-Mises test statistics in the one-sample and two-samples case. The treatment of other testing problems is possible as well. By using the theory of U-statistics, for instance, asymptotic null distributions of the test statistics are obtained as the sample size tends to infinity. Standard continuity assumptions ensure the asymptotic exactness of the tests under the null hypothesis and that the tests detect any alternative in the limit. Simulation studies demonstrate size and power of the tests in the finite sample case, confirm the theoretical findings, and are used for the comparison with concurring procedures. A possible application of the general approach is inference for stock market returns, also in high data frequencies. In the field of empirical finance, statistical inference of stock market prices usually takes place on the basis of related log-returns as data. In the classical models for stock prices, i.e., the exponential L{\´e}vy model, Black-Scholes model, and Merton model, properties such as independence and stationarity of the increments ensure an independent and identically structure of the data. Specific trends during certain periods of the stock price processes can cause complications in this regard. In fact, our approach can compensate those effects by the treatment of the log-returns as random vectors or even as functional data.}, language = {en} } @inproceedings{TranTrinhDaoetal.2022, author = {Tran, Ngoc Trinh and Trinh, Tu Luc and Dao, Ngoc Tien and Giap, Van Tan and Truong, Manh Khuyen and Dinh, Thuy Ha and Staat, Manfred}, title = {Limit and shakedown analysis of structures under random strength}, series = {Proceedings of (NACOME2022) The 11th National Conference on Mechanics, Vol. 1. Solid Mechanics, Rock Mechanics, Artificial Intelligence, Teaching and Training}, booktitle = {Proceedings of (NACOME2022) The 11th National Conference on Mechanics, Vol. 1. Solid Mechanics, Rock Mechanics, Artificial Intelligence, Teaching and Training}, publisher = {Nha xuat ban Khoa hoc tu nhien va Cong nghe (Verlag Naturwissenschaft und Technik)}, address = {Hanoi}, isbn = {978-604-357-084-7}, pages = {510 -- 518}, year = {2022}, abstract = {Direct methods comprising limit and shakedown analysis is a branch of computational mechanics. It plays a significant role in mechanical and civil engineering design. The concept of direct method aims to determinate the ultimate load bearing capacity of structures beyond the elastic range. For practical problems, the direct methods lead to nonlinear convex optimization problems with a large number of variables and onstraints. If strength and loading are random quantities, the problem of shakedown analysis is considered as stochastic programming. This paper presents a method so called chance constrained programming, an effective method of stochastic programming, to solve shakedown analysis problem under random condition of strength. In this our investigation, the loading is deterministic, the strength is distributed as normal or lognormal variables.}, language = {en} } @inproceedings{Maurer2022, author = {Maurer, Florian}, title = {Framework to provide a simulative comparison of different energy market designs}, series = {Energy Informatics}, volume = {5}, booktitle = {Energy Informatics}, number = {2, Article number: 12}, publisher = {Springer Nature}, issn = {2520-8942}, doi = {10.1186/s42162-022-00215-6}, pages = {18 -- 20}, year = {2022}, abstract = {Useful market simulations are key to the evaluation of diferent market designs existing of multiple market mechanisms or rules. Yet a simulation framework which has a comparison of diferent market mechanisms in mind was not found. The need to create an objective view on different sets of market rules while investigating meaningful agent strategies concludes that such a simulation framework is needed to advance the research on this subject. An overview of diferent existing market simulation models is given which also shows the research gap and the missing capabilities of those systems. Finally, a methodology is outlined how a novel market simulation which can answer the research questions can be developed.}, language = {en} } @inproceedings{BurgethKleefeldZhangetal.2022, author = {Burgeth, Bernhard and Kleefeld, Andreas and Zhang, Eugene and Zhang, Yue}, title = {Towards Topological Analysis of Non-symmetric Tensor Fields via Complexification}, series = {Discrete Geometry and Mathematical Morphology}, booktitle = {Discrete Geometry and Mathematical Morphology}, editor = {Baudrier, {\´E}tienne and Naegel, Beno{\^i}t and Kr{\"a}henb{\"u}hl, Adrien and Tajine, Mohamed}, publisher = {Springer}, address = {Cham}, isbn = {978-3-031-19897-7}, doi = {10.1007/978-3-031-19897-7_5}, pages = {48 -- 59}, year = {2022}, abstract = {Fields of asymmetric tensors play an important role in many applications such as medical imaging (diffusion tensor magnetic resonance imaging), physics, and civil engineering (for example Cauchy-Green-deformation tensor, strain tensor with local rotations, etc.). However, such asymmetric tensors are usually symmetrized and then further processed. Using this procedure results in a loss of information. A new method for the processing of asymmetric tensor fields is proposed restricting our attention to tensors of second-order given by a 2x2 array or matrix with real entries. This is achieved by a transformation resulting in Hermitian matrices that have an eigendecomposition similar to symmetric matrices. With this new idea numerical results for real-world data arising from a deformation of an object by external forces are given. It is shown that the asymmetric part indeed contains valuable information.}, language = {en} } @inproceedings{TranStaat2021, author = {Tran, Ngoc Trinh and Staat, Manfred}, title = {FEM shakedown analysis of Kirchhoff-Love plates under uncertainty of strength}, series = {Proceedings of UNCECOMP 2021}, booktitle = {Proceedings of UNCECOMP 2021}, isbn = {978-618-85072-6-5}, doi = {10.7712/120221.8041.19047}, pages = {323 -- 338}, year = {2021}, abstract = {A new formulation to calculate the shakedown limit load of Kirchhoff plates under stochastic conditions of strength is developed. Direct structural reliability design by chance con-strained programming is based on the prescribed failure probabilities, which is an effective approach of stochastic programming if it can be formulated as an equivalent deterministic optimization problem. We restrict uncertainty to strength, the loading is still deterministic. A new formulation is derived in case of random strength with lognormal distribution. Upper bound and lower bound shakedown load factors are calculated simultaneously by a dual algorithm.}, language = {en} } @inproceedings{OlderogMohrBegingetal.2021, author = {Olderog, M. and Mohr, P. and Beging, Stefan and Tsoumpas, C. and Ziemons, Karl}, title = {Simulation study on the role of tissue-scattered events in improving sensitivity for a compact time of flight compton positron emission tomograph}, series = {2020 IEEE Nuclear Science Symposium and Medical Imaging Conference (NSS/MIC)}, booktitle = {2020 IEEE Nuclear Science Symposium and Medical Imaging Conference (NSS/MIC)}, publisher = {IEEE}, address = {New York, NY}, isbn = {978-1-7281-7693-2}, doi = {10.1109/NSS/MIC42677.2020.9507901}, pages = {4 Seiten}, year = {2021}, abstract = {In positron emission tomography improving time, energy and spatial detector resolutions and using Compton kinematics introduces the possibility to reconstruct a radioactivity distribution image from scatter coincidences, thereby enhancing image quality. The number of single scattered coincidences alone is in the same order of magnitude as true coincidences. In this work, a compact Compton camera module based on monolithic scintillation material is investigated as a detector ring module. The detector interactions are simulated with Monte Carlo package GATE. The scattering angle inside the tissue is derived from the energy of the scattered photon, which results in a set of possible scattering trajectories or broken line of response. The Compton kinematics collimation reduces the number of solutions. Additionally, the time of flight information helps localize the position of the annihilation. One of the questions of this investigation is related to how the energy, spatial and temporal resolutions help confine the possible annihilation volume. A comparison of currently technically feasible detector resolutions (under laboratory conditions) demonstrates the influence on this annihilation volume and shows that energy and coincidence time resolution have a significant impact. An enhancement of the latter from 400 ps to 100 ps leads to a smaller annihilation volume of around 50\%, while a change of the energy resolution in the absorber layer from 12\% to 4.5\% results in a reduction of 60\%. The inclusion of single tissue-scattered data has the potential to increase the sensitivity of a scanner by a factor of 2 to 3 times. The concept can be further optimized and extended for multiple scatter coincidences and subsequently validated by a reconstruction algorithm.}, language = {en} } @inproceedings{MandekarJentschLutzetal.2021, author = {Mandekar, Swati and Jentsch, Lina and Lutz, Kai and Behbahani, Mehdi and Melnykowycz, Mark}, title = {Earable design analysis for sleep EEG measurements}, series = {UbiComp '21}, booktitle = {UbiComp '21}, doi = {10.1145/3460418.3479328}, pages = {171 -- 175}, year = {2021}, abstract = {Conventional EEG devices cannot be used in everyday life and hence, past decade research has been focused on Ear-EEG for mobile, at-home monitoring for various applications ranging from emotion detection to sleep monitoring. As the area available for electrode contact in the ear is limited, the electrode size and location play a vital role for an Ear-EEG system. In this investigation, we present a quantitative study of ear-electrodes with two electrode sizes at different locations in a wet and dry configuration. Electrode impedance scales inversely with size and ranges from 450 kΩ to 1.29 MΩ for dry and from 22 kΩ to 42 kΩ for wet contact at 10 Hz. For any size, the location in the ear canal with the lowest impedance is ELE (Left Ear Superior), presumably due to increased contact pressure caused by the outer-ear anatomy. The results can be used to optimize signal pickup and SNR for specific applications. We demonstrate this by recording sleep spindles during sleep onset with high quality (5.27 μVrms).}, language = {en} } @inproceedings{KloeserKohlKraftetal.2021, author = {Kl{\"o}ser, Lars and Kohl, Philipp and Kraft, Bodo and Z{\"u}ndorf, Albert}, title = {Multi-attribute relation extraction (MARE): simplifying the application of relation extraction}, series = {Proceedings of the 2nd International Conference on Deep Learning Theory and Applications DeLTA - Volume 1}, booktitle = {Proceedings of the 2nd International Conference on Deep Learning Theory and Applications DeLTA - Volume 1}, publisher = {SciTePress}, address = {Set{\´u}bal}, isbn = {978-989-758-526-5}, doi = {10.5220/0010559201480156}, pages = {148 -- 156}, year = {2021}, abstract = {Natural language understanding's relation extraction makes innovative and encouraging novel business concepts possible and facilitates new digitilized decision-making processes. Current approaches allow the extraction of relations with a fixed number of entities as attributes. Extracting relations with an arbitrary amount of attributes requires complex systems and costly relation-trigger annotations to assist these systems. We introduce multi-attribute relation extraction (MARE) as an assumption-less problem formulation with two approaches, facilitating an explicit mapping from business use cases to the data annotations. Avoiding elaborated annotation constraints simplifies the application of relation extraction approaches. The evaluation compares our models to current state-of-the-art event extraction and binary relation extraction methods. Our approaches show improvement compared to these on the extraction of general multi-attribute relations.}, language = {en} } @inproceedings{KohlSchmidtsKloeseretal.2021, author = {Kohl, Philipp and Schmidts, Oliver and Kl{\"o}ser, Lars and Werth, Henri and Kraft, Bodo and Z{\"u}ndorf, Albert}, title = {STAMP 4 NLP - an agile framework for rapid quality-driven NLP applications development}, series = {Quality of Information and Communications Technology. QUATIC 2021}, booktitle = {Quality of Information and Communications Technology. QUATIC 2021}, publisher = {Springer}, address = {Cham}, isbn = {978-3-030-85346-4}, doi = {10.1007/978-3-030-85347-1_12}, pages = {156 -- 166}, year = {2021}, abstract = {The progress in natural language processing (NLP) research over the last years, offers novel business opportunities for companies, as automated user interaction or improved data analysis. Building sophisticated NLP applications requires dealing with modern machine learning (ML) technologies, which impedes enterprises from establishing successful NLP projects. Our experience in applied NLP research projects shows that the continuous integration of research prototypes in production-like environments with quality assurance builds trust in the software and shows convenience and usefulness regarding the business goal. We introduce STAMP 4 NLP as an iterative and incremental process model for developing NLP applications. With STAMP 4 NLP, we merge software engineering principles with best practices from data science. Instantiating our process model allows efficiently creating prototypes by utilizing templates, conventions, and implementations, enabling developers and data scientists to focus on the business goals. Due to our iterative-incremental approach, businesses can deploy an enhanced version of the prototype to their software environment after every iteration, maximizing potential business value and trust early and avoiding the cost of successful yet never deployed experiments.}, language = {en} } @inproceedings{SchmidtsKraftWinkensetal.2021, author = {Schmidts, Oliver and Kraft, Bodo and Winkens, Marvin and Z{\"u}ndorf, Albert}, title = {Catalog integration of heterogeneous and volatile product data}, series = {DATA 2020: Data Management Technologies and Applications}, booktitle = {DATA 2020: Data Management Technologies and Applications}, publisher = {Springer}, address = {Cham}, isbn = {978-3-030-83013-7}, doi = {10.1007/978-3-030-83014-4_7}, pages = {134 -- 153}, year = {2021}, abstract = {The integration of frequently changing, volatile product data from different manufacturers into a single catalog is a significant challenge for small and medium-sized e-commerce companies. They rely on timely integrating product data to present them aggregated in an online shop without knowing format specifications, concept understanding of manufacturers, and data quality. Furthermore, format, concepts, and data quality may change at any time. Consequently, integrating product catalogs into a single standardized catalog is often a laborious manual task. Current strategies to streamline or automate catalog integration use techniques based on machine learning, word vectorization, or semantic similarity. However, most approaches struggle with low-quality or real-world data. We propose Attribute Label Ranking (ALR) as a recommendation engine to simplify the integration process of previously unknown, proprietary tabular format into a standardized catalog for practitioners. We evaluate ALR by focusing on the impact of different neural network architectures, language features, and semantic similarity. Additionally, we consider metrics for industrial application and present the impact of ALR in production and its limitations.}, language = {en} } @inproceedings{BornheimGriegerBialonski2021, author = {Bornheim, Tobias and Grieger, Niklas and Bialonski, Stephan}, title = {FHAC at GermEval 2021: Identifying German toxic, engaging, and fact-claiming comments with ensemble learning}, series = {Proceedings of the GermEval 2021 Workshop on the Identification of Toxic, Engaging, and Fact-Claiming Comments : 17th Conference on Natural Language Processing KONVENS 2021}, booktitle = {Proceedings of the GermEval 2021 Workshop on the Identification of Toxic, Engaging, and Fact-Claiming Comments : 17th Conference on Natural Language Processing KONVENS 2021}, publisher = {Heinrich Heine University}, address = {D{\"u}sseldorf}, doi = {10.48415/2021/fhw5-x128}, pages = {105 -- 111}, year = {2021}, language = {en} } @inproceedings{SildatkeKarwanniKraftetal.2020, author = {Sildatke, Michael and Karwanni, Hendrik and Kraft, Bodo and Schmidts, Oliver and Z{\"u}ndorf, Albert}, title = {Automated Software Quality Monitoring in Research Collaboration Projects}, series = {ICSEW'20: Proceedings of the IEEE/ACM 42nd International Conference on Software Engineering Workshops}, booktitle = {ICSEW'20: Proceedings of the IEEE/ACM 42nd International Conference on Software Engineering Workshops}, publisher = {IEEE}, address = {New York, NY}, doi = {10.1145/3387940.3391478}, pages = {603 -- 610}, year = {2020}, abstract = {In collaborative research projects, both researchers and practitioners work together solving business-critical challenges. These projects often deal with ETL processes, in which humans extract information from non-machine-readable documents by hand. AI-based machine learning models can help to solve this problem. Since machine learning approaches are not deterministic, their quality of output may decrease over time. This fact leads to an overall quality loss of the application which embeds machine learning models. Hence, the software qualities in development and production may differ. Machine learning models are black boxes. That makes practitioners skeptical and increases the inhibition threshold for early productive use of research prototypes. Continuous monitoring of software quality in production offers an early response capability on quality loss and encourages the use of machine learning approaches. Furthermore, experts have to ensure that they integrate possible new inputs into the model training as quickly as possible. In this paper, we introduce an architecture pattern with a reference implementation that extends the concept of Metrics Driven Research Collaboration with an automated software quality monitoring in productive use and a possibility to auto-generate new test data coming from processed documents in production. Through automated monitoring of the software quality and auto-generated test data, this approach ensures that the software quality meets and keeps requested thresholds in productive use, even during further continuous deployment and changing input data.}, language = {en} } @inproceedings{PohleFroehlichDalitzRichteretal.2020, author = {Pohle-Fr{\"o}hlich, Regina and Dalitz, Christoph and Richter, Charlotte and Hahnen, Tobias and St{\"a}udle, Benjamin and Albracht, Kirsten}, title = {Estimation of muscle fascicle orientation in ultrasonic images}, series = {Proceedings of the 15th International Joint Conference on Computer Vision, Imaging and Computer Graphics Theory and Applications - Volume 5}, booktitle = {Proceedings of the 15th International Joint Conference on Computer Vision, Imaging and Computer Graphics Theory and Applications - Volume 5}, publisher = {SciTePress}, address = {Set{\´u}bal, Portugal}, isbn = {978-989-758-402-2}, doi = {10.5220/0008933900790086}, pages = {79 -- 86}, year = {2020}, abstract = {We compare four different algorithms for automatically estimating the muscle fascicle angle from ultrasonic images: the vesselness filter, the Radon transform, the projection profile method and the gray level cooccurence matrix (GLCM). The algorithm results are compared to ground truth data generated by three different experts on 425 image frames from two videos recorded during different types of motion. The best agreement with the ground truth data was achieved by a combination of pre-processing with a vesselness filter and measuring the angle with the projection profile method. The robustness of the estimation is increased by applying the algorithms to subregions with high gradients and performing a LOESS fit through these estimates.}, language = {en} } @inproceedings{SchmidtsKraftWinkensetal.2020, author = {Schmidts, Oliver and Kraft, Bodo and Winkens, Marvin and Z{\"u}ndorf, Albert}, title = {Catalog integration of low-quality product data by attribute label ranking}, series = {Proceedings of the 9th International Conference on Data Science, Technology and Applications DATA - Volume 1}, booktitle = {Proceedings of the 9th International Conference on Data Science, Technology and Applications DATA - Volume 1}, publisher = {SciTePress}, address = {Set{\´u}bal, Portugal}, isbn = {978-989-758-440-4}, doi = {10.5220/0009831000900101}, pages = {90 -- 101}, year = {2020}, abstract = {The integration of product data from heterogeneous sources and manufacturers into a single catalog is often still a laborious, manual task. Especially small- and medium-sized enterprises face the challenge of timely integrating the data their business relies on to have an up-to-date product catalog, due to format specifications, low quality of data and the requirement of expert knowledge. Additionally, modern approaches to simplify catalog integration demand experience in machine learning, word vectorization, or semantic similarity that such enterprises do not have. Furthermore, most approaches struggle with low-quality data. We propose Attribute Label Ranking (ALR), an easy to understand and simple to adapt learning approach. ALR leverages a model trained on real-world integration data to identify the best possible schema mapping of previously unknown, proprietary, tabular format into a standardized catalog schema. Our approach predicts multiple labels for every attribute of an inpu t column. The whole column is taken into consideration to rank among these labels. We evaluate ALR regarding the correctness of predictions and compare the results on real-world data to state-of-the-art approaches. Additionally, we report findings during experiments and limitations of our approach.}, language = {en} } @inproceedings{IomdinaKiselevaKotliaretal.2020, author = {Iomdina, Elena N. and Kiseleva, Anna A. and Kotliar, Konstantin and Luzhnov, Petr V.}, title = {Quantification of Choroidal Blood Flow Using the OCT-A System Based on Voxel Scan Processing}, series = {Proceedings of the International Conference on Biomedical Innovations and Applications- BIA 2020}, booktitle = {Proceedings of the International Conference on Biomedical Innovations and Applications- BIA 2020}, publisher = {IEEE}, address = {New York, NY}, isbn = {978-1-7281-7073-2}, doi = {10.1109/BIA50171.2020.9244511}, pages = {41 -- 44}, year = {2020}, abstract = {The paper presents a method for the quantitative assessment of choroidal blood flow using an OCT-A system. The developed technique for processing of OCT-A scans is divided into two stages. At the first stage, the identification of the boundaries in the selected portion was performed. At the second stage, each pixel mark on the selected layer was represented as a volume unit, a voxel, which characterizes the region of moving blood. Three geometric shapes were considered to represent the voxel. On the example of one OCT-A scan, this work presents a quantitative assessment of the blood flow index. A possible modification of two-stage algorithm based on voxel scan processing is presented.}, language = {en} } @inproceedings{SchmidtsKraftSiebigterothetal.2019, author = {Schmidts, Oliver and Kraft, Bodo and Siebigteroth, Ines and Z{\"u}ndorf, Albert}, title = {Schema Matching with Frequent Changes on Semi-Structured Input Files: A Machine Learning Approach on Biological Product Data}, series = {Proceedings of the 21st International Conference on Enterprise Information Systems - Volume 1: ICEIS}, booktitle = {Proceedings of the 21st International Conference on Enterprise Information Systems - Volume 1: ICEIS}, isbn = {978-989-758-372-8}, doi = {10.5220/0007723602080215}, pages = {208 -- 215}, year = {2019}, language = {en} } @inproceedings{EschlerWozniakRichteretal.2019, author = {Eschler, Eric and Wozniak, Felix and Richter, Christoph and Drechsler, Klaus}, title = {Materialanalyse an lokal verst{\"a}rkten Triaxialgeflechten}, series = {Leichtbau in Forschung und industrieller Anwendung von der Nano- bis zur Makroebene, LLC, Landshuter Leichtbau-Colloquium, 9}, booktitle = {Leichtbau in Forschung und industrieller Anwendung von der Nano- bis zur Makroebene, LLC, Landshuter Leichtbau-Colloquium, 9}, publisher = {Leichtbau Cluster}, address = {Landshut}, isbn = {978-3-9818439-2-7}, pages = {120 -- 131}, year = {2019}, language = {de} } @inproceedings{HingleyDikta2019, author = {Hingley, Peter and Dikta, Gerhard}, title = {Finding a well performing box-jenkins forecasting model for annualised patent filings counts}, series = {International Symposium on Forecasting, Thessaloniki, Greece, June 2019}, booktitle = {International Symposium on Forecasting, Thessaloniki, Greece, June 2019}, pages = {24 Folien}, year = {2019}, language = {en} } @inproceedings{HunkerJungGossmannetal.2019, author = {Hunker, Jan and Jung, Alexander and Goßmann, Matthias and Linder, Peter and Staat, Manfred}, title = {Development of a tool to analyze the conduction speed in microelectrode array measurements of cardiac tissue}, series = {3rd YRA MedTech Symposium 2019 : May 24 / 2019 / FH Aachen}, booktitle = {3rd YRA MedTech Symposium 2019 : May 24 / 2019 / FH Aachen}, editor = {Staat, Manfred and Erni, Daniel}, publisher = {Universit{\"a}t Duisburg-Essen}, address = {Duisburg}, organization = {MedTech Symposium}, isbn = {978-3-940402-22-6}, doi = {10.17185/duepublico/48750}, pages = {7 -- 8}, year = {2019}, abstract = {The discovery of human induced pluripotent stem cells reprogrammed from somatic cells [1] and their ability to differentiate into cardiomyocytes (hiPSC-CMs) has provided a robust platform for drug screening [2]. Drug screenings are essential in the development of new components, particularly for evaluating the potential of drugs to induce life-threatening pro-arrhythmias. Between 1988 and 2009, 14 drugs have been removed from the market for this reason [3]. The microelectrode array (MEA) technique is a robust tool for drug screening as it detects the field potentials (FPs) for the entire cell culture. Furthermore, the propagation of the field potential can be examined on an electrode basis. To analyze MEA measurements in detail, we have developed an open-source tool.}, language = {en} } @inproceedings{RamanJungHorvathetal.2019, author = {Raman, Aravind Hariharan and Jung, Alexander and Horv{\´a}th, Andr{\´a}s and Becker, Nadine and Staat, Manfred}, title = {Modification of a computer model of human stem cell-derived cardiomyocyte electrophysiology based on Patch-Clamp measurements}, series = {3rd YRA MedTech Symposium 2019 : May 24 / 2019 / FH Aachen}, booktitle = {3rd YRA MedTech Symposium 2019 : May 24 / 2019 / FH Aachen}, editor = {Staat, Manfred and Erni, Daniel}, publisher = {Universit{\"a}t Duisburg-Essen}, address = {Duisburg}, organization = {MedTech Symposium}, isbn = {978-3-940402-22-6}, doi = {10.17185/duepublico/48750}, pages = {10 -- 11}, year = {2019}, abstract = {Human induced pluripotent stem cells (hiPSCs) have shown to be promising in disease studies and drug screenings [1]. Cardiomyocytes derived from hiPSCs have been extensively investigated using patch-clamping and optical methods to compare their electromechanical behaviour relative to fully matured adult cells. Mathematical models can be used for translating findings on hiPSCCMs to adult cells [2] or to better understand the mechanisms of various ion channels when a drug is applied [3,4]. Paci et al. (2013) [3] developed the first model of hiPSC-CMs, which they later refined based on new data [3]. The model is based on iCells® (Fujifilm Cellular Dynamics, Inc. (FCDI), Madison WI, USA) but major differences among several cell lines and even within a single cell line have been found and motivate an approach for creating sample-specific models. We have developed an optimisation algorithm that parameterises the conductances (in S/F=Siemens/Farad) of the latest Paci et al. model (2018) [5] using current-voltage data obtained in individual patch-clamp experiments derived from an automated patch clamp system (Patchliner, Nanion Technologies GmbH, Munich).}, language = {en} }