@inproceedings{MaurerMiskiwAcostaetal.2023, author = {Maurer, Florian and Miskiw, Kim K. and Acosta, Rebeca Ramirez and Harder, Nick and Sander, Volker and Lehnhoff, Sebastian}, title = {Market abstraction of energy markets and policies - application in an agent-based modeling toolbox}, series = {EI.A 2023: Energy Informatics}, booktitle = {EI.A 2023: Energy Informatics}, editor = {Jorgensen, Bo Norregaard and Pereira da Silva, Luiz Carlos and Ma, Zheng}, publisher = {Springer}, address = {Cham}, isbn = {978-3-031-48651-7 (Print)}, doi = {10.1007/978-3-031-48652-4_10}, pages = {139 -- 157}, year = {2023}, abstract = {In light of emerging challenges in energy systems, markets are prone to changing dynamics and market design. Simulation models are commonly used to understand the changing dynamics of future electricity markets. However, existing market models were often created with specific use cases in mind, which limits their flexibility and usability. This can impose challenges for using a single model to compare different market designs. This paper introduces a new method of defining market designs for energy market simulations. The proposed concept makes it easy to incorporate different market designs into electricity market models by using relevant parameters derived from analyzing existing simulation tools, morphological categorization and ontologies. These parameters are then used to derive a market abstraction and integrate it into an agent-based simulation framework, allowing for a unified analysis of diverse market designs. Furthermore, we showcase the usability of integrating new types of long-term contracts and over-the-counter trading. To validate this approach, two case studies are demonstrated: a pay-as-clear market and a pay-as-bid long-term market. These examples demonstrate the capabilities of the proposed framework.}, language = {en} } @inproceedings{MaurerNitschKochemsetal.2024, author = {Maurer, Florian and Nitsch, Felix and Kochems, Johannes and Schimeczek, Christoph and Sander, Volker and Lehnhoff, Sebastian}, title = {Know your tools - a comparison of two open agent-based energy market models}, series = {2024 20th International Conference on the European Energy Market (EEM)}, booktitle = {2024 20th International Conference on the European Energy Market (EEM)}, publisher = {IEEE}, address = {New York, NY}, doi = {10.1109/EEM60825.2024.10609021}, pages = {8 Seiten}, year = {2024}, abstract = {Due to the transition to renewable energies, electricity markets need to be made fit for purpose. To enable the comparison of different energy market designs, modeling tools covering market actors and their heterogeneous behavior are needed. Agent-based models are ideally suited for this task. Such models can be used to simulate and analyze changes to market design or market mechanisms and their impact on market dynamics. In this paper, we conduct an evaluation and comparison of two actively developed open-source energy market simulation models. The two models, namely AMIRIS and ASSUME, are both designed to simulate future energy markets using an agent-based approach. The assessment encompasses modelling features and techniques, model performance, as well as a comparison of model results, which can serve as a blueprint for future comparative studies of simulation models. The main comparison dataset includes data of Germany in 2019 and simulates the Day-Ahead market and participating actors as individual agents. Both models are comparable close to the benchmark dataset with a MAE between 5.6 and 6.4 €/MWh while also modeling the actual dispatch realistically.}, language = {en} } @inproceedings{MaurerSejdijaSander2024, author = {Maurer, Florian and Sejdija, Jonathan and Sander, Volker}, title = {Decentralized energy data storages through an Open Energy Database Server}, doi = {10.5281/zenodo.10607895}, pages = {5 Seiten}, year = {2024}, abstract = {In the research domain of energy informatics, the importance of open datais rising rapidly. This can be seen as various new public datasets are created andpublished. Unfortunately, in many cases, the data is not available under a permissivelicense corresponding to the FAIR principles, often lacking accessibility or reusability.Furthermore, the source format often differs from the desired data format or does notmeet the demands to be queried in an efficient way. To solve this on a small scale atoolbox for ETL-processes is provided to create a local energy data server with openaccess data from different valuable sources in a structured format. So while the sourcesitself do not fully comply with the FAIR principles, the provided unique toolbox allows foran efficient processing of the data as if the FAIR principles would be met. The energydata server currently includes information of power systems, weather data, networkfrequency data, European energy and gas data for demand and generation and more.However, a solution to the core problem - missing alignment to the FAIR principles - isstill needed for the National Research Data Infrastructure.}, language = {en} } @inproceedings{GerhardsBelloumBerretzetal.2010, author = {Gerhards, Michael and Belloum, Adam and Berretz, Frank and Sander, Volker and Skorupa, Sascha}, title = {A history-tracing XML-based provenance framework for workflows}, series = {The 5th Workshop on Workflows in Support of Large-Scale Science}, booktitle = {The 5th Workshop on Workflows in Support of Large-Scale Science}, publisher = {IEEE}, address = {New York}, isbn = {978-1-4244-8989-3}, doi = {10.1109/WORKS.2010.5671873}, pages = {10 Seiten}, year = {2010}, abstract = {The importance of validating and reproducing the outcome of computational processes is fundamental to many application domains. Assuring the provenance of workflows will likely become even more important with respect to the incorporation of human tasks to standard workflows by emerging standards such as WS-HumanTask. This paper addresses this trend by an actor-based workflow approach that actively support provenance. It proposes a framework to track and store provenance information automatically that applies for various workflow management systems. In particular, the introduced provenance framework supports the documentation of workflows in a legally binding way. The authors therefore use the concept of layered XML documents, i.e. history-tracing XML. Furthermore, the proposed provenance framework enables the executors (actors) of a particular workflow task to attest their operations and the associated results by integrating digital XML signatures.}, language = {en} } @inproceedings{BerretzSkorupaSanderetal.2010, author = {Berretz, Frank and Skorupa, Sascha and Sander, Volker and Belloum, Adam}, title = {Towards an actor-driven workflow management system for grids}, series = {Proceedings of 2010 International Symposium on Collaborative Technologies and Systems}, booktitle = {Proceedings of 2010 International Symposium on Collaborative Technologies and Systems}, publisher = {IEEE}, address = {Piscataway, NJ}, isbn = {978-1-4244-6619-1}, doi = {10.1109/CTS.2010.5478458}, pages = {611 -- 616}, year = {2010}, abstract = {Currently, most workflow management systems in Grid environments provide push-oriented job distribution strategies, where jobs are explicitly delegated to resources. In those scenarios the dedicated resources execute submitted jobs according to the request of a workflow engine or Grid wide scheduler. This approach has various limitations, particularly if human interactions should be integrated in workflow execution. To support human interactions with the benefit of enabling inter organizational computation and community approaches, this poster paper proposes the idea of a pull-based task distribution strategy. Here, heterogeneous resources, including human interaction, should actively select tasks for execution from a central repository. This leads to special demands regarding security issues like access control. In the established push-based job execution the resources are responsible for granting access to workflows and job initiators. In general this is done by access control lists, where users are explicitly mapped to local accounts according to their policies. In the pull-based approach the resources actively apply for job executions by sending requests to a central task repository. This means that every resource has to be able to authenticate against the repository to be authorized for task execution. In other words the authorization is relocated from the resources to the repository. The poster paper introduces current work regarding to the mentioned security aspects in the pull-based approach within the scope of the project "HiX4AGWS".}, language = {en} } @inproceedings{KirchnerSpelthahnSchoeningetal.2010, author = {Kirchner, Patrick and Spelthahn, Heiko and Sch{\"o}ning, Michael Josef and Henkel, Hartmut and Schneider, Andreas and Friedrich, Peter and Kolstad, Jens and Berger, J{\"o}rg}, title = {Realisierung eines Polyimid-basierten kalorimetrischen Gassensors zur Inline-{\"U}berwachung der H2O2-Konzentration in aseptischen Abf{\"u}llsystemen}, series = {Tagungsband: Sensoren und Messsysteme 2010}, booktitle = {Tagungsband: Sensoren und Messsysteme 2010}, publisher = {VDE Verlag}, address = {Berlin}, isbn = {978-3-8007-3260-9}, pages = {607 -- 612}, year = {2010}, abstract = {In aseptischen Abf{\"u}llsystemen wird Wasserstoffperoxid in der Gasphase aufgrund der stark oxidativen Wirkung zur Packstoffentkeimung eingesetzt. Dabei wird die Effizienz der Entkeimung im Wesentlichen von der vorliegenden H2O2-Konzentration im Packstoff bestimmt. Zur Inline-{\"U}berwachung der H2O2-Konzentration wurde ein kalorimetrischer Gassensor auf Basis einer flexiblen Polyimidfolie aus temperatursensitiven D{\"u}nnschicht-Widerst{\"a}nden und Mangan(IV)-oxid als katalytische Transducerschicht realisiert. Der Sensor weist ein lineares Ansprechverhalten mit einer Sensitivit{\"a}t von 7,15 °C/Vol.-\% in einem H2O2-Konzentrationsbereich von 0 bis 8 Vol.-\% auf. Weiterhin wurde zur Auslesung des Sensorsignals eine RFID-Elektronik, bestehend aus einem Sensor-Tag und einer Sende-/Empfangseinheit ausgelegt, sowie eine Abfolge des Messzyklus aufgestellt. Im weiteren Verlauf soll der kalorimetrische Gassensor mit der RFID-Elektronik gekoppelt und in eine Testverpackung zur Inline-{\"U}berwachung der H2O2-Konzentration in aseptischen Abf{\"u}llsystemen implementiert werden.}, language = {de} } @inproceedings{WernerSpelthahnSchoeningetal.2010, author = {Werner, Frederik and Spelthahn, Heiko and Sch{\"o}ning, Michael Josef and Krumbe, Christoph and Wagner, Torsten and Yoshinobu, Tatsuo and Keusgen, Michael}, title = {Neue Ansteuerungselektronik f{\"u}r LAPS-basierte Biosensoren zur gleichzeitig ortsaufgel{\"o}sten Messung der pH-Konzentration}, series = {Tagungsband: Sensoren und Messsysteme 2010}, booktitle = {Tagungsband: Sensoren und Messsysteme 2010}, publisher = {VDE Verlag}, address = {Berlin}, isbn = {978-3-8007-3260-9}, pages = {109 -- 114}, year = {2010}, abstract = {Ein lichtadressierbarer potentiometrischer Sensor (LAPS) kann die Konzentration eines oder mehrerer Analyten ortsaufgel{\"o}st auf der Sensoroberfl{\"a}che nachweisen. Dazu wird mit einer modulierten Lichtquelle die Halbleiterstruktur des zu untersuchenden Bereiches angeregt und ein entsprechender Photostrom ausgelesen. Durch gleichzeitige Anregung mehrere Bereiche durch Lichtquellen mit unterschiedlichen Modulationsfrequenzen k{\"o}nnen diese auch zeitgleich ausgelesen werden. Mit der neuen, hier vorgestellten Ansteuerungselektronik integriert in einem "Field Programmable Gate Array" (FPGA) ist es m{\"o}glich, mehrere Leuchtquellen gleichzeitig mit unterschiedlichen, w{\"a}hrend der Laufzeit festlegbaren Frequenzen, Phasen und Lichtintensit{\"a}ten zu betreiben. Somit kann das Frequenzverhalten des Sensors untersucht und die Konzentration des Analyten {\"u}ber das Oberfl{\"a}chenpotential mit Hilfe von Strom/Spannungs-Kurven und Phase/Spannungs-Kurven bestimmt werden.}, language = {de} } @inproceedings{PoghossianWagnerSchoening2010, author = {Poghossian, Arshak and Wagner, Holger and Sch{\"o}ning, Michael Josef}, title = {Automatisiertes „wafer level"-Testsystem zur Charakterisierung von siliziumbasierten Chemo- und Biosensoren}, series = {Tagungsband: Sensoren und Messsysteme 2010}, booktitle = {Tagungsband: Sensoren und Messsysteme 2010}, publisher = {VDE Verlag}, address = {Berlin}, isbn = {978-3-8007-3260-9}, pages = {89 -- 92}, year = {2010}, abstract = {Es wurde ein automatisiertes, computerunterst{\"u}tztes Testsystem f{\"u}r die Funktionspr{\"u}fung und Charakterisierung von (bio-)chemischen Sensoren auf Waferebene entwickelt und in einen konventionellen Spitzenmessplatz integriert. Das System erm{\"o}glicht die Charakterisierung und Identifizierung „funktionstauglicher" Sensoren bereits auf Waferebene zwischen den einzelnen Herstellungsschritten, wodurch weitere, bisher {\"u}bliche Verarbeitungsschritte wie das Fixieren, Bonden und Verkapseln f{\"u}r die defekten oder nicht funktionstauglichen Sensorstrukturen entf{\"a}llt. Außerdem bietet eine speziell entworfene miniaturisierte Durchflussmesszelle die M{\"o}glichkeit, bereits auf Waferlevel die Sensitivit{\"a}t, Drift, Hysterese und Ansprechzeit der (bio-)chemischen Sensoren zu charakterisieren. Das System wurde exemplarisch mit kapazitiven, pH-sensitiven EIS- (Elektrolyt-Isolator-Silizium) Strukturen und ISFET- (ionensensitiver Feldeffekttransistor) Strukturen mit verschiedenen Geometrien und Gate-Layouts getestet.}, language = {de} } @inproceedings{BaeckerPoghossianSchoeningetal.2010, author = {B{\"a}cker, Matthias and Poghossian, Arshak and Sch{\"o}ning, Michael Josef and Schnitzler, Thomas and Biselli, Manfred and Zang, Werner and Wagner, Patrick}, title = {Entwicklung eines modularen festk{\"o}rperbasierten Sensorsystems f{\"u}r die {\"U}berwachung von Zellkulturfermentationen}, series = {Tagungsband: Sensoren und Messsysteme 2010}, booktitle = {Tagungsband: Sensoren und Messsysteme 2010}, publisher = {VDE Verlag}, address = {Berlin}, isbn = {978-3-8007-3260-9}, pages = {688 -- 698}, year = {2010}, abstract = {In diesem Beitrag werden Ergebnisse der Entwicklung eines modularen festk{\"o}rperbasierten Sensorsystems f{\"u}r die {\"U}berwachung von Zellkulturfermentationen pr{\"a}sentiert. Zur Messung der Elektrolytleitf{\"a}higkeit wurde das Layout von Interdigitalelektroden angepasst, um in vergleichsweise gut leitenden Elektrolyten zu messen. Durch Quervernetzung von Glucose-Oxidase mit Glutaraldehyd und Immobilisierung auf einer Platinelektrode wurde ein amperometrischer Glucosesensor mit einem linearen Messbereich von bis zu 2 mM und einer Sensitivit{\"a}t von 168 nA/mM realisiert.}, language = {de} } @inproceedings{PhamVuTranetal.2010, author = {Pham, Phu Tinh and Vu, Khoi Duc and Tran, Thanh Ngoc and Staat, Manfred}, title = {A primal-dual algorithm for shakedown analysis of elastic-plastic bounded linearly kinematic hardening bodies}, year = {2010}, language = {en} } @inproceedings{NguyenRaatschenStaat2010, author = {Nguyen, N.-H. and Raatschen, Hans-J{\"u}rgen and Staat, Manfred}, title = {A hyperelastic model of biological tissue materials in tubular organs}, year = {2010}, language = {en} } @inproceedings{TranStaat2010, author = {Tran, Thanh Ngoc and Staat, Manfred}, title = {Shakedown analysis of two dimensional structures by an edge-based smoothed finite element method}, year = {2010}, language = {en} } @inproceedings{Gaigall2022, author = {Gaigall, Daniel}, title = {On Consistent Hypothesis Testing In General Hilbert Spaces}, series = {Proceedings of the 4th International Conference on Statistics: Theory and Applications (ICSTA'22)}, booktitle = {Proceedings of the 4th International Conference on Statistics: Theory and Applications (ICSTA'22)}, publisher = {Avestia Publishing}, address = {Orl{\´e}ans, Kanada}, doi = {10.11159/icsta22.157}, pages = {Paper No. 157}, year = {2022}, abstract = {Inference on the basis of high-dimensional and functional data are two topics which are discussed frequently in the current statistical literature. A possibility to include both topics in a single approach is working on a very general space for the underlying observations, such as a separable Hilbert space. We propose a general method for consistently hypothesis testing on the basis of random variables with values in separable Hilbert spaces. We avoid concerns with the curse of dimensionality due to a projection idea. We apply well-known test statistics from nonparametric inference to the projected data and integrate over all projections from a specific set and with respect to suitable probability measures. In contrast to classical methods, which are applicable for real-valued random variables or random vectors of dimensions lower than the sample size, the tests can be applied to random vectors of dimensions larger than the sample size or even to functional and high-dimensional data. In general, resampling procedures such as bootstrap or permutation are suitable to determine critical values. The idea can be extended to the case of incomplete observations. Moreover, we develop an efficient algorithm for implementing the method. Examples are given for testing goodness-of-fit in a one-sample situation in [1] or for testing marginal homogeneity on the basis of a paired sample in [2]. Here, the test statistics in use can be seen as generalizations of the well-known Cram{\´e}rvon-Mises test statistics in the one-sample and two-samples case. The treatment of other testing problems is possible as well. By using the theory of U-statistics, for instance, asymptotic null distributions of the test statistics are obtained as the sample size tends to infinity. Standard continuity assumptions ensure the asymptotic exactness of the tests under the null hypothesis and that the tests detect any alternative in the limit. Simulation studies demonstrate size and power of the tests in the finite sample case, confirm the theoretical findings, and are used for the comparison with concurring procedures. A possible application of the general approach is inference for stock market returns, also in high data frequencies. In the field of empirical finance, statistical inference of stock market prices usually takes place on the basis of related log-returns as data. In the classical models for stock prices, i.e., the exponential L{\´e}vy model, Black-Scholes model, and Merton model, properties such as independence and stationarity of the increments ensure an independent and identically structure of the data. Specific trends during certain periods of the stock price processes can cause complications in this regard. In fact, our approach can compensate those effects by the treatment of the log-returns as random vectors or even as functional data.}, language = {en} } @inproceedings{TranTrinhDaoetal.2022, author = {Tran, Ngoc Trinh and Trinh, Tu Luc and Dao, Ngoc Tien and Giap, Van Tan and Truong, Manh Khuyen and Dinh, Thuy Ha and Staat, Manfred}, title = {Limit and shakedown analysis of structures under random strength}, series = {Proceedings of (NACOME2022) The 11th National Conference on Mechanics, Vol. 1. Solid Mechanics, Rock Mechanics, Artificial Intelligence, Teaching and Training}, booktitle = {Proceedings of (NACOME2022) The 11th National Conference on Mechanics, Vol. 1. Solid Mechanics, Rock Mechanics, Artificial Intelligence, Teaching and Training}, publisher = {Nha xuat ban Khoa hoc tu nhien va Cong nghe (Verlag Naturwissenschaft und Technik)}, address = {Hanoi}, isbn = {978-604-357-084-7}, pages = {510 -- 518}, year = {2022}, abstract = {Direct methods comprising limit and shakedown analysis is a branch of computational mechanics. It plays a significant role in mechanical and civil engineering design. The concept of direct method aims to determinate the ultimate load bearing capacity of structures beyond the elastic range. For practical problems, the direct methods lead to nonlinear convex optimization problems with a large number of variables and onstraints. If strength and loading are random quantities, the problem of shakedown analysis is considered as stochastic programming. This paper presents a method so called chance constrained programming, an effective method of stochastic programming, to solve shakedown analysis problem under random condition of strength. In this our investigation, the loading is deterministic, the strength is distributed as normal or lognormal variables.}, language = {en} } @inproceedings{StaatTran2022, author = {Staat, Manfred and Tran, Ngoc Trinh}, title = {Strain based brittle failure criteria for rocks}, series = {Proceedings of (NACOME2022) The 11th National Conference on Mechanics, Vol. 1. Solid Mechanics, Rock Mechanics, Artificial Intelligence, Teaching and Training}, booktitle = {Proceedings of (NACOME2022) The 11th National Conference on Mechanics, Vol. 1. Solid Mechanics, Rock Mechanics, Artificial Intelligence, Teaching and Training}, publisher = {Nha xuat ban Khoa hoc tu nhien va Cong nghe (Verlag Naturwissenschaft und Technik)}, address = {Hanoi}, isbn = {978-604-357-084-7}, pages = {500 -- 509}, year = {2022}, abstract = {When confining pressure is low or absent, extensional fractures are typical, with fractures occurring on unloaded planes in rock. These "paradox" fractures can be explained by a phenomenological extension strain failure criterion. In the past, a simple empirical criterion for fracture initiation in brittle rock has been developed. But this criterion makes unrealistic strength predictions in biaxial compression and tension. A new extension strain criterion overcomes this limitation by adding a weighted principal shear component. The weight is chosen, such that the enriched extension strain criterion represents the same failure surface as the Mohr-Coulomb (MC) criterion. Thus, the MC criterion has been derived as an extension strain criterion predicting failure modes, which are unexpected in the understanding of the failure of cohesive-frictional materials. In progressive damage of rock, the most likely fracture direction is orthogonal to the maximum extension strain. The enriched extension strain criterion is proposed as a threshold surface for crack initiation CI and crack damage CD and as a failure surface at peak P. Examples show that the enriched extension strain criterion predicts much lower volumes of damaged rock mass compared to the simple extension strain criterion.}, language = {en} } @inproceedings{KahraBreussKleefeldetal.2024, author = {Kahra, Marvin and Breuß, Michael and Kleefeld, Andreas and Welk, Martin}, title = {An Approach to Colour Morphological Supremum Formation Using the LogSumExp Approximation}, series = {Discrete Geometry and Mathematical Morphology}, booktitle = {Discrete Geometry and Mathematical Morphology}, editor = {Brunetti, Sara and Frosini, Andrea and Rinaldi, Simone}, publisher = {Springer}, address = {Cham}, isbn = {978-3-031-57793-2}, doi = {10.1007/978-3-031-57793-2_25}, pages = {325 -- 337}, year = {2024}, abstract = {Mathematical morphology is a part of image processing that has proven to be fruitful for numerous applications. Two main operations in mathematical morphology are dilation and erosion. These are based on the construction of a supremum or infimum with respect to an order over the tonal range in a certain section of the image. The tonal ordering can easily be realised in grey-scale morphology, and some morphological methods have been proposed for colour morphology. However, all of these have certain limitations. In this paper we present a novel approach to colour morphology extending upon previous work in the field based on the Loewner order. We propose to consider an approximation of the supremum by means of a log-sum exponentiation introduced by Maslov. We apply this to the embedding of an RGB image in a field of symmetric 2x2 matrices. In this way we obtain nearly isotropic matrices representing colours and the structural advantage of transitivity. In numerical experiments we highlight some remarkable properties of the proposed approach.}, language = {en} } @inproceedings{SimsekKrauseEngelmann2024, author = {Simsek, Beril and Krause, Hans-Joachim and Engelmann, Ulrich M.}, title = {Magnetic biosensing with magnetic nanoparticles: Simulative approach to predict signal intensity in frequency mixing magnetic detection}, series = {YRA MedTech Symposium (2024)}, booktitle = {YRA MedTech Symposium (2024)}, editor = {Digel, Ilya and Staat, Manfred and Trzewik, J{\"u}rgen and Sielemann, Stefanie and Erni, Daniel and Zylka, Waldemar}, publisher = {Universit{\"a}t Duisburg-Essen}, address = {Duisburg}, organization = {MedTech Symposium}, isbn = {978-3-940402-65-3}, doi = {10.17185/duepublico/81475}, pages = {27 -- 28}, year = {2024}, abstract = {Magnetic nanoparticles (MNP) are investigated with great interest for biomedical applications in diagnostics (e.g. imaging: magnetic particle imaging (MPI)), therapeutics (e.g. hyperthermia: magnetic fluid hyperthermia (MFH)) and multi-purpose biosensing (e.g. magnetic immunoassays (MIA)). What all of these applications have in common is that they are based on the unique magnetic relaxation mechanisms of MNP in an alternating magnetic field (AMF). While MFH and MPI are currently the most prominent examples of biomedical applications, here we present results on the relatively new biosensing application of frequency mixing magnetic detection (FMMD) from a simulation perspective. In general, we ask how the key parameters of MNP (core size and magnetic anisotropy) affect the FMMD signal: by varying the core size, we investigate the effect of the magnetic volume per MNP; and by changing the effective magnetic anisotropy, we study the MNPs' flexibility to leave its preferred magnetization direction. From this, we predict the most effective combination of MNP core size and magnetic anisotropy for maximum signal generation.}, language = {en} } @inproceedings{KirchnerHenkelNaetheretal.2008, author = {Kirchner, Patrick and Henkel, H. and N{\"a}ther, Niko and Spelthahn, H. and Schneider, A. and Berger, J{\"o}rg and Kolstad, J. and Friedrich, P. and Sch{\"o}ning, Michael Josef}, title = {RFID-basiertes Sensorsystem zur Realisierung intelligenter Verpackungen f{\"u}r die Nahrungsmittelindustrie}, series = {KMU - innovativ: IKT 2008. CD-ROM : BMBF-Statustagung KMU - innovativ: IKT, Darmstadt, 17. - 18. Nov. 2008}, booktitle = {KMU - innovativ: IKT 2008. CD-ROM : BMBF-Statustagung KMU - innovativ: IKT, Darmstadt, 17. - 18. Nov. 2008}, number = {CD-ROM-Ausg.}, publisher = {BMBF}, address = {Berlin}, pages = {9 S.}, year = {2008}, language = {de} } @inproceedings{KolditzAlbrachtFasseetal.2015, author = {Kolditz, Melanie and Albracht, Kirsten and Fasse, Alessandro and Albin, Thivaharan and Br{\"u}ggemann, Gert-Peter and Abel, Dirk}, title = {Evaluation of an industrial robot as a leg press training device}, series = {XV International Symposium on Computer Simulation in Biomechanics July 9th - 11th 2015, Edinburgh, UK}, booktitle = {XV International Symposium on Computer Simulation in Biomechanics July 9th - 11th 2015, Edinburgh, UK}, pages = {41 -- 42}, year = {2015}, language = {en} } @inproceedings{KolditzAlbinFasseetal.2015, author = {Kolditz, Melanie and Albin, Thivaharan and Fasse, Alessandro and Br{\"u}ggemann, Gert-Peter and Abel, Dirk and Albracht, Kirsten}, title = {Simulative Analysis of Joint Loading During Leg Press Exercise for Control Applications}, series = {IFAC-PapersOnLine}, volume = {48}, booktitle = {IFAC-PapersOnLine}, number = {20}, doi = {10.1016/j.ifacol.2015.10.179}, pages = {435 -- 440}, year = {2015}, language = {en} }