@inproceedings{KraftSchneider2005, author = {Kraft, Bodo and Schneider, Gerd}, title = {Semantic Roomobjects for Conceptual Design Support : A Knowledge-based Approach}, isbn = {978-1-4020-3460-2}, year = {2005}, abstract = {In: Computer Aided Architectural Design Futures 2005 2005, Part 4, 207-216, DOI: http://dx.doi.org/10.1007/1-4020-3698-1_19 The conceptual design at the beginning of the building construction process is essential for the success of a building project. Even if some CAD tools allow elaborating conceptual sketches, they rather focus on the shape of the building elements and not on their functionality. We introduce semantic roomobjects and roomlinks, by way of example to the CAD tool ArchiCAD. These extensions provide a basis for specifying the organisation and functionality of a building and free architects being forced to directly produce detailed constructive sketches. Furthermore, we introduce consistency analyses of the conceptual sketch, based on an ontology containing conceptual relevant knowledge, specific to one class of buildings.}, subject = {CAD}, language = {en} } @inproceedings{KraftRetkowitz2005, author = {Kraft, Bodo and Retkowitz, Daniel}, title = {Operationale Semantikdefinition f{\"u}r konzeptuelles Regelwissen}, year = {2005}, abstract = {In: Forum Bauinformatik 2005 : junge Wissenschaftler forschen / [Lehrstuhl Bauinformatik, Brandenburgische Technische Universit{\"a}t Cottbus. Frank Schley ... (Hrsg.)]. - Cottbus : Techn. Universit{\"a}t 2005. S. 1-10 ISBN 3-934934-11-0 Mittels eines operationalen Ansatzes zur Semantikdefinition wird am Bei-spiel des konzeptuellen Geb{\"a}udeentwurfs ein Regelsystem formalisiert. Dazu werdenzwei Teile, zum einen das Regelwissen, zum anderen ein konzeptueller Entwurfsplan zun{\"a}chst informell eingef{\"u}hrt und dann formal beschrieben. Darauf aufbauend wird die Grundlage f{\"u}r eine Konsistenzpr{\"u}fung des konzeptuellen Entwurfs gegen das Regel-wissen formal angeben}, subject = {CAD}, language = {de} } @inproceedings{KraftRetkowitz2006, author = {Kraft, Bodo and Retkowitz, Daniel}, title = {Graph Transformations for Dynamic Knowledge Processing}, year = {2006}, abstract = {In: Proceedings of the 39th Annual Hawaii International Conference on System Sciences, 2006. HICSS '06 http://dx.doi.org/10.1109/HICSS.2006.200 The conceptual design phase at the beginning of the building construction process is not adequately supported by any CAD-tool. Conceptual design support needs regarding two aspects: first, the architect must be able to develop conceptual sketches that provide abstraction from constructive details. Second, conceptually relevant knowledge should be available to check these conceptual sketches. The paper deals with knowledge to formalize for conceptual design. To enable domain experts formalizing knowledge, a graph-based specification is presented that allows the development of a domain ontology and design rules specific for one class of buildings at runtime. The provided tool support illustrates the introduced concepts and demonstrates the consistency analysis between knowledge and conceptual design.}, subject = {CAD}, language = {de} } @inproceedings{KraftRetkowitz2006, author = {Kraft, Bodo and Retkowitz, Daniel}, title = {Rule-Dependencies for Visual Knowledge Specification in Conceptual Design}, year = {2006}, abstract = {In: Proc. of the 11th Intl. Conf. on Computing in Civil and Building Engineering (ICCCBE-XI) ed. Hugues Rivard, Montreal, Canada, Seite 1-12, ACSE (CD-ROM), 2006 Currently, the conceptual design phase is not adequately supported by any CAD tool. Neither the support while elaborating conceptual sketches, nor the automatic proof of correctness with respect to effective restrictions is currently provided by any commercial tool. To enable domain experts to store the common as well as their personal domain knowledge, we develop a visual language for knowledge formalization. In this paper, a major extension to the already existing concepts is introduced. The possibility to define rule dependencies extends the expressiveness of the knowledge definition language and contributes to the usability of our approach.}, subject = {CAD}, language = {en} } @inproceedings{KraftNagl2003, author = {Kraft, Bodo and Nagl, Manfred}, title = {Support of Conceptual Design in Civil Engineering by Graph-based Tools}, year = {2003}, abstract = {WS GTaD-2003 - The 1st Workshop on Graph Transformations and Design ed Grabska, E., Seite 6-7, Jagiellonian University Krakow. 2 pages}, subject = {CAD}, language = {de} } @inproceedings{KraftNagl2004, author = {Kraft, Bodo and Nagl, Manfred}, title = {Parameterized specification of conceptual design tools in civil engineering}, year = {2004}, abstract = {Applications of Graph Transformations with Industrial Relevance Lecture Notes in Computer Science, 2004, Volume 3062/2004, 90-105, DOI: 10.1007/978-3-540-25959-6_7 In this paper we discuss how tools for conceptual design in civil engineering can be developed using graph transformation specifications. These tools consist of three parts: (a) for elaborating specific conceptual knowledge (knowledge engineer), (b) for working out conceptual design results (architect), and (c) automatic consistency analyses which guarantee that design results are consistent with the underlying specific conceptual knowledge. For the realization of such tools we use a machinery based on graph transformations. In a traditional PROGRES tool specification the conceptual knowledge for a class of buildings is hard-wired within the specification. This is not appropriate for the experimentation platform approach we present in this paper, as objects and relations for conceptual knowledge are due to many changes, implied by evaluation of their use and corresponding improvements. Therefore, we introduce a parametric specification method with the following characteristics: (1) The underlying specific knowledge for a class of buildings is not fixed. Instead, it is built up as a data base by using the knowledge tools. (2) The specification for the architect tools also does not incorporate specific conceptual knowledge. (3) An incremental checker guarantees whether a design result is consistent with the current state of the underlying conceptual knowledge (data base).}, subject = {CAD}, language = {de} } @inproceedings{KraftNagl2003, author = {Kraft, Bodo and Nagl, Manfred}, title = {Semantic tool support for conceptual design}, year = {2003}, abstract = {ITCE-2003 - 4th Joint Symposium on Information Technology in Civil Engineering ed Flood, I., Seite 1-12, ASCE (CD-ROM), Nashville, USA In this paper we discussed graph based tools to support architects during the conceptual design phase. Conceptual Design is defined before constructive design; the used concepts are more abstract. We develop two graph based approaches, a topdown using the graph rewriting system PROGRES and a more industrially oriented approach, where we extend the CAD system ArchiCAD. In both approaches, knowledge can be defined by a knowledge engineer, in the top-down approach in the domain model graph, in the bottom-up approach in the in an XML file. The defined knowledge is used to incrementally check the sketch and to inform the architect about violations of the defined knowledge. Our goal is to discover design error as soon as possible and to support the architect to design buildings with consideration of conceptual knowledge.}, subject = {CAD}, language = {en} } @inproceedings{KraftMeyerNagl2002, author = {Kraft, Bodo and Meyer, Oliver and Nagl, Manfred}, title = {Graph technology support for conceptual design in civil engineering}, isbn = {3-18-318004-9}, year = {2002}, abstract = {In: Advances in intelligent computing in engineering : proceedings of the 9.International EG-ICE Workshop ; Darmstadt, (01 - 03 August) 2002 / Martina Schnellenbach-Held ... (eds.) . - D{\"u}sseldorf: VDI-Verl., 2002 .- Fortschritt-Berichte VDI, Reihe 4, Bauingenieurwesen ; 180 ; S. 1-35 The paper describes a novel way to support conceptual design in civil engineering. The designer uses semantical tools guaranteeing certain internal structures of the design result but also the fulfillment of various constraints. Two different approaches and corresponding tools are discussed: (a) Visually specified tools with automatic code generation to determine a design structure as well as fixing various constraints a design has to obey. These tools are also valuable for design knowledge specialist. (b) Extensions of existing CAD tools to provide semantical knowledge to be used by an architect. It is sketched how these different tools can be combined in the future. The main part of the paper discusses the concepts and realization of two prototypes following the two above approaches. The paper especially discusses that specific graphs and the specification of their structure are useful for both tool realization projects.}, subject = {CAD}, language = {en} } @inproceedings{Kraft2004, author = {Kraft, Bodo}, title = {Conceptual design tools for civil engineering}, year = {2004}, abstract = {Applications of Graph Transformations with Industrial Relevance Lecture Notes in Computer Science, 2004, Volume 3062/2004, 434-439, DOI: http://dx.doi.org/10.1007/978-3-540-25959-6_33 This paper gives a brief overview of the tools we have developed to support conceptual design in civil engineering. Based on the UPGRADE framework, two applications, one for the knowledge engineer and another for architects allow to store domain specific knowledge and to use this knowledge during conceptual design. Consistency analyses check the design against the defined knowledge and inform the architect if rules are violated.}, subject = {CAD}, language = {en} } @inproceedings{KoplinSiemonsOcenValentinetal.2006, author = {Koplin, Tobias J. and Siemons, Maike and Oc{\´e}n-Val{\´e}ntin, C{\´e}sar and Sanders, Daniel and Simon, Ulrich}, title = {Workflow for high throughput screening of gas sensing materials}, url = {http://nbn-resolving.de/urn:nbn:de:hbz:a96-opus-1407}, year = {2006}, abstract = {The workflow of a high throughput screening setup for the rapid identification of new and improved sensor materials is presented. The polyol method was applied to prepare nanoparticular metal oxides as base materials, which were functionalised by surface doping. Using multi-electrode substrates and high throughput impedance spectroscopy (HT-IS) a wide range of materials could be screened in a short time. Applying HT-IS in search of new selective gas sensing materials a NO2-tolerant NO sensing material with reduced sensitivities towards other test gases was identified based on iridium doped zinc oxide. Analogous behaviour was observed for iridium doped indium oxide.}, subject = {Biosensor}, language = {en} } @inproceedings{KolditzAlbrachtFasseetal.2015, author = {Kolditz, Melanie and Albracht, Kirsten and Fasse, Alessandro and Albin, Thivaharan and Br{\"u}ggemann, Gert-Peter and Abel, Dirk}, title = {Evaluation of an industrial robot as a leg press training device}, series = {XV International Symposium on Computer Simulation in Biomechanics July 9th - 11th 2015, Edinburgh, UK}, booktitle = {XV International Symposium on Computer Simulation in Biomechanics July 9th - 11th 2015, Edinburgh, UK}, pages = {41 -- 42}, year = {2015}, language = {en} } @inproceedings{KolditzAlbinFasseetal.2015, author = {Kolditz, Melanie and Albin, Thivaharan and Fasse, Alessandro and Br{\"u}ggemann, Gert-Peter and Abel, Dirk and Albracht, Kirsten}, title = {Simulative Analysis of Joint Loading During Leg Press Exercise for Control Applications}, series = {IFAC-PapersOnLine}, volume = {48}, booktitle = {IFAC-PapersOnLine}, number = {20}, doi = {10.1016/j.ifacol.2015.10.179}, pages = {435 -- 440}, year = {2015}, language = {en} } @inproceedings{KolditzAlbinAlbrachtetal.2016, author = {Kolditz, Melanie and Albin, Thivaharan and Albracht, Kirsten and Br{\"u}ggemann, Gert-Peter and Abel, Dirk}, title = {Isokinematic leg extension training with an industrial robot}, series = {6th IEEE RAS/EMBS International Conference on Biomedical Robotics and Biomechatronics (BioRob) June 26-29, 2016. UTown, Singapore}, booktitle = {6th IEEE RAS/EMBS International Conference on Biomedical Robotics and Biomechatronics (BioRob) June 26-29, 2016. UTown, Singapore}, doi = {10.1109/BIOROB.2016.7523750}, pages = {950 -- 955}, year = {2016}, language = {de} } @inproceedings{KohlSchmidtsKloeseretal.2021, author = {Kohl, Philipp and Schmidts, Oliver and Kl{\"o}ser, Lars and Werth, Henri and Kraft, Bodo and Z{\"u}ndorf, Albert}, title = {STAMP 4 NLP - an agile framework for rapid quality-driven NLP applications development}, series = {Quality of Information and Communications Technology. QUATIC 2021}, booktitle = {Quality of Information and Communications Technology. QUATIC 2021}, publisher = {Springer}, address = {Cham}, isbn = {978-3-030-85346-4}, doi = {10.1007/978-3-030-85347-1_12}, pages = {156 -- 166}, year = {2021}, abstract = {The progress in natural language processing (NLP) research over the last years, offers novel business opportunities for companies, as automated user interaction or improved data analysis. Building sophisticated NLP applications requires dealing with modern machine learning (ML) technologies, which impedes enterprises from establishing successful NLP projects. Our experience in applied NLP research projects shows that the continuous integration of research prototypes in production-like environments with quality assurance builds trust in the software and shows convenience and usefulness regarding the business goal. We introduce STAMP 4 NLP as an iterative and incremental process model for developing NLP applications. With STAMP 4 NLP, we merge software engineering principles with best practices from data science. Instantiating our process model allows efficiently creating prototypes by utilizing templates, conventions, and implementations, enabling developers and data scientists to focus on the business goals. Due to our iterative-incremental approach, businesses can deploy an enhanced version of the prototype to their software environment after every iteration, maximizing potential business value and trust early and avoiding the cost of successful yet never deployed experiments.}, language = {en} } @inproceedings{KohlFreyerKraemeretal.2023, author = {Kohl, Philipp and Freyer, Nils and Kr{\"a}mer, Yoka and Werth, Henri and Wolf, Steffen and Kraft, Bodo and Meinecke, Matthias and Z{\"u}ndorf, Albert}, title = {ALE: a simulation-based active learning evaluation framework for the parameter-driven comparison of query strategies for NLP}, series = {Deep Learning Theory and Applications}, booktitle = {Deep Learning Theory and Applications}, editor = {Conte, Donatello and Fred, Ana and Gusikhin, Oleg and Sansone, Carlo}, publisher = {Springer}, address = {Cham}, isbn = {978-3-031-39058-6 (Print)}, doi = {10.1007/978-3-031-39059-3_16}, pages = {235 -- 253}, year = {2023}, abstract = {Supervised machine learning and deep learning require a large amount of labeled data, which data scientists obtain in a manual, and time-consuming annotation process. To mitigate this challenge, Active Learning (AL) proposes promising data points to annotators they annotate next instead of a subsequent or random sample. This method is supposed to save annotation effort while maintaining model performance. However, practitioners face many AL strategies for different tasks and need an empirical basis to choose between them. Surveys categorize AL strategies into taxonomies without performance indications. Presentations of novel AL strategies compare the performance to a small subset of strategies. Our contribution addresses the empirical basis by introducing a reproducible active learning evaluation (ALE) framework for the comparative evaluation of AL strategies in NLP. The framework allows the implementation of AL strategies with low effort and a fair data-driven comparison through defining and tracking experiment parameters (e.g., initial dataset size, number of data points per query step, and the budget). ALE helps practitioners to make more informed decisions, and researchers can focus on developing new, effective AL strategies and deriving best practices for specific use cases. With best practices, practitioners can lower their annotation costs. We present a case study to illustrate how to use the framework.}, language = {en} } @inproceedings{KloeserKohlKraftetal.2021, author = {Kl{\"o}ser, Lars and Kohl, Philipp and Kraft, Bodo and Z{\"u}ndorf, Albert}, title = {Multi-attribute relation extraction (MARE): simplifying the application of relation extraction}, series = {Proceedings of the 2nd International Conference on Deep Learning Theory and Applications DeLTA - Volume 1}, booktitle = {Proceedings of the 2nd International Conference on Deep Learning Theory and Applications DeLTA - Volume 1}, publisher = {SciTePress}, address = {Set{\´u}bal}, isbn = {978-989-758-526-5}, doi = {10.5220/0010559201480156}, pages = {148 -- 156}, year = {2021}, abstract = {Natural language understanding's relation extraction makes innovative and encouraging novel business concepts possible and facilitates new digitilized decision-making processes. Current approaches allow the extraction of relations with a fixed number of entities as attributes. Extracting relations with an arbitrary amount of attributes requires complex systems and costly relation-trigger annotations to assist these systems. We introduce multi-attribute relation extraction (MARE) as an assumption-less problem formulation with two approaches, facilitating an explicit mapping from business use cases to the data annotations. Avoiding elaborated annotation constraints simplifies the application of relation extraction approaches. The evaluation compares our models to current state-of-the-art event extraction and binary relation extraction methods. Our approaches show improvement compared to these on the extraction of general multi-attribute relations.}, language = {en} } @inproceedings{KloeserBuesgenKohletal.2023, author = {Kl{\"o}ser, Lars and B{\"u}sgen, Andr{\´e} and Kohl, Philipp and Kraft, Bodo and Z{\"u}ndorf, Albert}, title = {Explaining relation classification models with semantic extents}, series = {Deep Learning Theory and Applications}, booktitle = {Deep Learning Theory and Applications}, editor = {Conte, Donatello and Fred, Ana and Gusikhin, Oleg and Sansone, Carlo}, publisher = {Springer}, address = {Cham}, isbn = {978-3-031-39058-6 (Print)}, doi = {10.1007/978-3-031-39059-3_13}, pages = {189 -- 208}, year = {2023}, abstract = {In recent years, the development of large pretrained language models, such as BERT and GPT, significantly improved information extraction systems on various tasks, including relation classification. State-of-the-art systems are highly accurate on scientific benchmarks. A lack of explainability is currently a complicating factor in many real-world applications. Comprehensible systems are necessary to prevent biased, counterintuitive, or harmful decisions. We introduce semantic extents, a concept to analyze decision patterns for the relation classification task. Semantic extents are the most influential parts of texts concerning classification decisions. Our definition allows similar procedures to determine semantic extents for humans and models. We provide an annotation tool and a software framework to determine semantic extents for humans and models conveniently and reproducibly. Comparing both reveals that models tend to learn shortcut patterns from data. These patterns are hard to detect with current interpretability methods, such as input reductions. Our approach can help detect and eliminate spurious decision patterns during model development. Semantic extents can increase the reliability and security of natural language processing systems. Semantic extents are an essential step in enabling applications in critical areas like healthcare or finance. Moreover, our work opens new research directions for developing methods to explain deep learning models.}, language = {en} } @inproceedings{KloockSchoening2007, author = {Kloock, Joachim P. and Sch{\"o}ning, Michael Josef}, title = {Heavy metal detection with semiconductor devices based on PLD-prepared chalcogenide glass thin films}, series = {Armenian Journal of Physics}, booktitle = {Armenian Journal of Physics}, issn = {1829-1171}, pages = {95 -- 98}, year = {2007}, language = {en} } @inproceedings{KloockSchubertErmelenkoetal.2006, author = {Kloock, Joachim P. and Schubert, J. and Ermelenko, Y. and Vlasov, Y. G. and Bratov, A. and Sch{\"o}ning, Michael Josef}, title = {Thin-film sensors with chalcogenide glass materials - a general survey}, series = {Biochemical sensing utilisation of micro- and nanotechnologies : Warsaw, [23rd - 26th] November 2005 / ed. by M. Mascini ...}, booktitle = {Biochemical sensing utilisation of micro- and nanotechnologies : Warsaw, [23rd - 26th] November 2005 / ed. by M. Mascini ...}, address = {Warsaw}, pages = {92 -- 97}, year = {2006}, language = {en} } @inproceedings{KloockMorenoHuachupomaetal.2005, author = {Kloock, Joachim P. and Moreno, Lia and Huachupoma, S. and Xu, J. and Wagner, Torsten and Bratov, A. and Doll, T. and Vlasov, Y. and Sch{\"o}ning, Michael Josef}, title = {Halbleiterbasierte Schwermetallsensorik auf der Basis von Chalkogenidgl{\"a}sern f{\"u}r zuk{\"u}nftige „Lab on Chip"-Anwendungen}, series = {7. Dresdner Sensor-Symposium - Neue Herausforderungen und Anwendungen in der Sensortechnik}, booktitle = {7. Dresdner Sensor-Symposium - Neue Herausforderungen und Anwendungen in der Sensortechnik}, editor = {Gerlach, Gerald}, publisher = {TUDpress, Verl. der Wissenschaften}, address = {Dresden}, isbn = {3-938863-29-3}, pages = {221 -- 224}, year = {2005}, language = {de} }