@article{KraftNagl2007, author = {Kraft, Bodo and Nagl, Manfred}, title = {Visual Knowledge Specification for Conceptual Design: Definition and Tool Support}, year = {2007}, abstract = {In: Advanced Engineering Informatics. Vol 21, Issue 1, 2007, Pages 67-83 http://dx.doi.org/10.1016/j.aei.2006.10.001 eds. J.C. Kunz, I.F.C. Smith and T. Tomiyama, Elsevier, Seite 1-22 Current CAD tools are not able to support the conceptual design phase, and none of them provides a consistency analysis for sketches produced by architects. This phase is fundamental and crucial for the whole design and construction process of a building. To give architects a better support, we developed a CAD tool for conceptual design and a knowledge specification tool. The knowledge is specific to one class of buildings and it can be reused. Based on a dynamic and domain-specific knowledge ontology, different types of design rules formalize this knowledge in a graph-based form. An expressive visual language provides a user-friendly, human readable representation. Finally, a consistency analysis tool enables conceptual designs to be checked against this formal conceptual knowledge. In this article, we concentrate on the knowledge specification part. For that, we introduce the concepts and usage of a novel visual language and describe its semantics. To demonstrate the usability of our approach, two graph-based visual tools for knowledge specification and conceptual design are explained.}, subject = {CAD}, language = {en} } @inproceedings{HeerRedkowitzKraft2008, author = {Heer, Thomas and Redkowitz, Daniel and Kraft, Bodo}, title = {Tool Support for the Integration of Light-Weight Ontologies}, isbn = {978-3-642-00670-8}, year = {2008}, abstract = {Abstract of the authors: In many areas of computer science ontologies become more and more important. The use of ontologies for domain modeling often brings up the issue of ontology integration. The task of merging several ontologies, covering specific subdomains, into one united ontology has to be solved. Many approaches for ontology integration aim at automating the process of ontology alignment. However, a complete automation is not feasible, and user interaction is always required. Nevertheless, most ontology integration tools offer only very limited support for the interactive part of the integration process. In this paper, we present a novel approach for the interactive integration of ontologies. The result of the ontology integration is incrementally updated after each definition of a correspondence between ontology elements. The user is guided through the ontologies to be integrated. By restricting the possible user actions, the integrity of all defined correspondences is ensured by the tool we developed. We evaluated our tool by integrating different regulations concerning building design.}, subject = {Ontologie }, language = {de} } @inproceedings{KraftMeyerNagl2002, author = {Kraft, Bodo and Meyer, Oliver and Nagl, Manfred}, title = {Graph technology support for conceptual design in civil engineering}, isbn = {3-18-318004-9}, year = {2002}, abstract = {In: Advances in intelligent computing in engineering : proceedings of the 9.International EG-ICE Workshop ; Darmstadt, (01 - 03 August) 2002 / Martina Schnellenbach-Held ... (eds.) . - D{\"u}sseldorf: VDI-Verl., 2002 .- Fortschritt-Berichte VDI, Reihe 4, Bauingenieurwesen ; 180 ; S. 1-35 The paper describes a novel way to support conceptual design in civil engineering. The designer uses semantical tools guaranteeing certain internal structures of the design result but also the fulfillment of various constraints. Two different approaches and corresponding tools are discussed: (a) Visually specified tools with automatic code generation to determine a design structure as well as fixing various constraints a design has to obey. These tools are also valuable for design knowledge specialist. (b) Extensions of existing CAD tools to provide semantical knowledge to be used by an architect. It is sketched how these different tools can be combined in the future. The main part of the paper discusses the concepts and realization of two prototypes following the two above approaches. The paper especially discusses that specific graphs and the specification of their structure are useful for both tool realization projects.}, subject = {CAD}, language = {en} } @inproceedings{KraftNagl2003, author = {Kraft, Bodo and Nagl, Manfred}, title = {Support of Conceptual Design in Civil Engineering by Graph-based Tools}, year = {2003}, abstract = {WS GTaD-2003 - The 1st Workshop on Graph Transformations and Design ed Grabska, E., Seite 6-7, Jagiellonian University Krakow. 2 pages}, subject = {CAD}, language = {de} } @inproceedings{KraftNagl2004, author = {Kraft, Bodo and Nagl, Manfred}, title = {Parameterized specification of conceptual design tools in civil engineering}, year = {2004}, abstract = {Applications of Graph Transformations with Industrial Relevance Lecture Notes in Computer Science, 2004, Volume 3062/2004, 90-105, DOI: 10.1007/978-3-540-25959-6_7 In this paper we discuss how tools for conceptual design in civil engineering can be developed using graph transformation specifications. These tools consist of three parts: (a) for elaborating specific conceptual knowledge (knowledge engineer), (b) for working out conceptual design results (architect), and (c) automatic consistency analyses which guarantee that design results are consistent with the underlying specific conceptual knowledge. For the realization of such tools we use a machinery based on graph transformations. In a traditional PROGRES tool specification the conceptual knowledge for a class of buildings is hard-wired within the specification. This is not appropriate for the experimentation platform approach we present in this paper, as objects and relations for conceptual knowledge are due to many changes, implied by evaluation of their use and corresponding improvements. Therefore, we introduce a parametric specification method with the following characteristics: (1) The underlying specific knowledge for a class of buildings is not fixed. Instead, it is built up as a data base by using the knowledge tools. (2) The specification for the architect tools also does not incorporate specific conceptual knowledge. (3) An incremental checker guarantees whether a design result is consistent with the current state of the underlying conceptual knowledge (data base).}, subject = {CAD}, language = {de} } @inproceedings{Kraft2004, author = {Kraft, Bodo}, title = {Conceptual design tools for civil engineering}, year = {2004}, abstract = {Applications of Graph Transformations with Industrial Relevance Lecture Notes in Computer Science, 2004, Volume 3062/2004, 434-439, DOI: http://dx.doi.org/10.1007/978-3-540-25959-6_33 This paper gives a brief overview of the tools we have developed to support conceptual design in civil engineering. Based on the UPGRADE framework, two applications, one for the knowledge engineer and another for architects allow to store domain specific knowledge and to use this knowledge during conceptual design. Consistency analyses check the design against the defined knowledge and inform the architect if rules are violated.}, subject = {CAD}, language = {en} } @article{Kraft2003, author = {Kraft, Bodo}, title = {Conceptual design mit ArchiCAD 8 : Forschungsprojekt an der RWTH Aachen}, year = {2003}, abstract = {Projektbericht in GraphisoftNews - Architektur und Bauen in einer vernetzten Welt 3/2003 4 Seiten}, subject = {CAD}, language = {de} } @misc{Kraft2003, author = {Kraft, Bodo}, title = {LexiCAD Step by Step : B{\"u}rogeb{\"a}ude : Erstellen eines Grundrisses mit RoomObjects und LexiCAD}, year = {2003}, abstract = {11 Seiten, 22 Abbildungen 1. Konstruktion des Außenumrisses 2. Festlegung der inneren R{\"a}ume 3. Einf{\"u}gen der RoomLinks 4. Wallgenerator}, subject = {CAD}, language = {de} } @inproceedings{KraftNagl2003, author = {Kraft, Bodo and Nagl, Manfred}, title = {Semantic tool support for conceptual design}, year = {2003}, abstract = {ITCE-2003 - 4th Joint Symposium on Information Technology in Civil Engineering ed Flood, I., Seite 1-12, ASCE (CD-ROM), Nashville, USA In this paper we discussed graph based tools to support architects during the conceptual design phase. Conceptual Design is defined before constructive design; the used concepts are more abstract. We develop two graph based approaches, a topdown using the graph rewriting system PROGRES and a more industrially oriented approach, where we extend the CAD system ArchiCAD. In both approaches, knowledge can be defined by a knowledge engineer, in the top-down approach in the domain model graph, in the bottom-up approach in the in an XML file. The defined knowledge is used to incrementally check the sketch and to inform the architect about violations of the defined knowledge. Our goal is to discover design error as soon as possible and to support the architect to design buildings with consideration of conceptual knowledge.}, subject = {CAD}, language = {en} } @inproceedings{KraftWilhelms2004, author = {Kraft, Bodo and Wilhelms, N.}, title = {Interactive distributed knowledge support for conceptual building design}, isbn = {3-86068-213-X}, year = {2004}, abstract = {In: Net-distributed Co-operation : Xth International Conference on Computing in Civil and Building Engineering, Weimar, June 02 - 04, 2004 ; proceedings / [ed. by Karl Beuke ...] . - Weimar: Bauhaus-Univ. Weimar 2004. - 1. Aufl. . Seite 1-14 ISBN 3-86068-213-X International Conference on Computing in Civil and Building Engineering <10, 2004, Weimar> Summary In our project, we develop new tools for the conceptual design phase. During conceptual design, the coarse functionality and organization of a building is more important than a detailed worked out construction. We identify two roles, first the knowledge engineer who is responsible for knowledge definition and maintenance; second the architect who elaborates the conceptual de-sign. The tool for the knowledge engineer is based on graph technology, it is specified using PROGRES and the UPGRADE framework. The tools for the architect are integrated to the in-dustrial CAD tool ArchiCAD. Consistency between knowledge and conceptual design is en-sured by the constraint checker, another extension to ArchiCAD.}, subject = {CAD}, language = {en} } @inproceedings{KirchhofKraft2004, author = {Kirchhof, M. and Kraft, Bodo}, title = {UML-based modeling of architectural knowledge and design}, year = {2004}, abstract = {IASSE-2004 - 13th International Conference on Intelligent and Adaptive Systems and Software Engineering eds. W. Dosch, N. Debnath, pp. 245-250, ISCA, Cary, NC, 1-3 July 2004, Nice, France We introduce a UML-based model for conceptual design support in civil engineering. Therefore, we identify required extensions to standard UML. Class diagrams are used for elaborating building typespecific knowledge: Object diagrams, implicitly contained in the architect's sketch, are validated against the defined knowledge. To enable the use of industrial, domain-specific tools, we provide an integrated conceptual design extension. The developed tool support is based on graph rewriting. With our approach architects are enabled to deal with semantic objects during early design phase, assisted by incremental consistency checks.}, subject = {UML}, language = {en} } @techreport{NaglKraft2004, author = {Nagl, Manfred and Kraft, Bodo}, title = {Graphbasierte Werkzeuge zur Unterst{\"u}tzung des konzeptuellen Geb{\"a}ude-Entwurfs : Bericht {\"u}ber den 2. F{\"o}rderzeitraum des Schwerpunktprogramms : DFG-Schwerpunktprogramm 1103 : Vernetzt-kooperative Planungsprozesse im Konstruktiven Ingenieurbau. - Auch unter dem Titel: Neue Software-Werkzeuge zur Unterst{\"u}tzung des konzeptuellen Geb{\"a}udeentwurfs}, year = {2004}, abstract = {Der konstruktive Entwurf wird in derzeitigen CAD-Systemen gut unterst{\"u}tzt, nicht aber der konzeptuelle Geb{\"a}ude-Entwurf. Dieser abstrahiert von konstruktiven Elementen wie Linie, Wand oder Decke, um auf die Konzepte, d.h. die eigentlichen Funktionen, heraus zu arbeiten. Diese abstraktere, funktionale Sichtweise auf ein Geb{\"a}ude ist w{\"a}hrend der fr{\"u}hen Entwurfsphase essentiell, um Struktur und Organisation des gesamten Geb{\"a}udes zu erfassen. Bereits in dieser Phase muss Fachwissen (z. B. rechtliche, {\"o}konomische und technische Bestimmungen) ber{\"u}cksichtigt werden. Im Rahmen des vorliegenden Projekts werden Software-Werkzeuge integriert in industrielle CAD-Systeme entwickelt, die den konzeptuellen Geb{\"a}ude-Entwurf erm{\"o}glichen und diesen gegen Fachwissen pr{\"u}fen. Das Projekt ist in zwei Teile gegliedert. Im Top-Down-Ansatz werden Datenstrukturen und Methoden zur Strukturierung, Repr{\"a}sentation und Evaluation von geb{\"a}udespezifischem Fachwissen erarbeitet. Dieser Teil baut auf den graphbasierten Werkzeugen PROGRES und UPGRADE des Lehrstuhls auf. Der Bottom-Up-Ansatz ist industriell orientiert und hat zum Ziel, das kommerzielle CAD-System ArchiCAD zu erweitern. Hierbei soll der fr{\"u}he, konzeptuelle Geb{\"a}ude-Entwurf in einem CAD-System erm{\"o}glicht werden. Der Entwurf kann dar{\"u}ber hinaus gegen das definierte Fachwissen gepr{\"u}ft werden. Im Rahmen des graphbasierten Top-Down-Ansatzes wurde zun{\"a}chst eine neue Spezifikationsmethode f{\"u}r die Sprache PROGRES entwickelt. Das PROGRES-System erlaubt die Spezifikation von Werkzeugen in deklarativer Form. {\"U}blicherweise wird dom{\"a}nenspezifisches Fachwissen in der PROGRES-Spezifikation codiert, das daraus generierte visuelle Werkzeug stellt dann die entsprechende Funktionalit{\"a}t zur Verf{\"u}gung. Mit dieser Methode sind am Lehrstuhl f{\"u}r Informatik III Werkzeuge f{\"u}r verschie-dene Anwendungsdom{\"a}nen entstanden. In unserem Fall versetzen wir einen Dom{\"a}nen-Experten, z. B. einen erfahrenen Architekten, in die Lage, Fachwissen zur Laufzeit einzugeben, dieses zu evaluieren, abzu{\"a}ndern oder zu erg{\"a}nzen. Im Rahmen der bisherigen Arbeit wurde dazu eine parametrisierte PROGRES-Spezifikation und zwei darauf aufbauende Werkzeuge entwickelt, welche die dynamische Eingabe von geb{\"a}ude-technisch relevantem Fachwissen erlauben und einen graphbasierten, konzeptuellen Geb{\"a}ude-Entwurf erm{\"o}glichen. In diesem konzeptuellen Geb{\"a}ude-Entwurf wird von Raumgr{\"o}ßen und Positionen abstrahiert, um die funktionale Struktur eines Geb{\"a}udes zu beschreiben. Das Fachwissen kann von einem Architekten visuell definiert werden. Es k{\"o}nnen semantische Einheiten, im einfachsten Fall R{\"a}ume, nach verschiedenen Kriterien kategorisiert und klassifiziert werden. Mit Hilfe von Attributen und Relationen k{\"o}nnen die semantischen Einheiten pr{\"a}ziser beschrieben und in Beziehung zueinander gesetzt werden. Die in PROGRES spezifizierten Konsistenz-Analysen erlauben die Pr{\"u}fung eines graphbasierten konzeptuellen Geb{\"a}ude-Entwurfs gegen das dynamisch eingef{\"u}gte Fachwissen. Im zweiten Teil des Forschungsprojekts, dem Bottom-Up-Ansatz, wird das CAD-System ArchiCAD erweitert, um den integrierten konzeptuellen Geb{\"a}ude-Entwurf zu erm{\"o}glichen. Der Architekt erh{\"a}lt dazu neue Entwurfselemente, die Raumobjekte, welche die relevanten semantischen Einheiten w{\"a}hrend der fr{\"u}hen Entwurfsphase repr{\"a}sentieren. Mit Hilfe der Raumobjekte kann der Architekt in ArchiCAD den Grundriss und das Raumprogramm eines Geb{\"a}udes entwerfen, ohne von konstruktiven Details in seiner Kreativit{\"a}t eingeschr{\"a}nkt zu werden. Die Arbeitsweise mit Raumobjekten entspricht dem informellen konzeptuellen Entwurf auf einer Papierskizze und ist daher f{\"u}r den Architekten intuitiv und einfach zu verwenden. Durch die Integration in ArchiCAD ergibt sich eine weitere Unterst{\"u}tzung: Das im Top-Down-Ansatz spezifizierte Fach-wissen wird verwendet, um den konzeptuellen Geb{\"a}ude-Entwurf des Architekten auf Regelverletzungen zu {\"u}berpr{\"u}fen. Entwurfsfehler werden angezeigt. Zum Abschluss des konzeptuellen Geb{\"a}ude-Entwurfs mit Raumobjekten wird durch ein weiteres neu entwickeltes Werkzeug eine initiale Wandstruktur automatisch erzeugt, die als Grundlage f{\"u}r die folgenden konstruktiven Entwurfsphasen dient. Alle beschriebenen Erwei-terungen sind in ArchiCAD integriert, sie sind f{\"u}r den Architekten daher leicht zu erlernen und einfach zu bedienen.}, subject = {CAD}, language = {de} } @inproceedings{KraftSchneider2005, author = {Kraft, Bodo and Schneider, Gerd}, title = {Semantic Roomobjects for Conceptual Design Support : A Knowledge-based Approach}, isbn = {978-1-4020-3460-2}, year = {2005}, abstract = {In: Computer Aided Architectural Design Futures 2005 2005, Part 4, 207-216, DOI: http://dx.doi.org/10.1007/1-4020-3698-1_19 The conceptual design at the beginning of the building construction process is essential for the success of a building project. Even if some CAD tools allow elaborating conceptual sketches, they rather focus on the shape of the building elements and not on their functionality. We introduce semantic roomobjects and roomlinks, by way of example to the CAD tool ArchiCAD. These extensions provide a basis for specifying the organisation and functionality of a building and free architects being forced to directly produce detailed constructive sketches. Furthermore, we introduce consistency analyses of the conceptual sketch, based on an ontology containing conceptual relevant knowledge, specific to one class of buildings.}, subject = {CAD}, language = {en} } @inproceedings{KraftWilhelms2005, author = {Kraft, Bodo and Wilhelms, Nils}, title = {Visual Knowledge Specification for Conceptual Design}, year = {2005}, abstract = {Proc. of the 2005 ASCE Intl. Conf. on Computing in Civil Engineering (ICCC 2005) eds. L. Soibelman und F. Pena-Mora, Seite 1-14, ASCE (CD-ROM), Cancun, Mexico, 2005 Current CAD tools are not able to support the fundamental conceptual design phase, and none of them provides consistency analyses of sketches produced by architects. To give architects a greater support at the conceptual design phase, we develop a CAD tool for conceptual design and a knowledge specification tool allowing the definition of conceptually relevant knowledge. The knowledge is specific to one class of buildings and can be reused. Based on a dynamic knowledge model, different types of design rules formalize the knowledge in a graph-based realization. An expressive visual language provides a user-friendly, human readable representation. Finally, consistency analyses enable conceptual designs to be checked against this defined knowledge. In this paper we concentrate on the knowledge specification part of our project.}, subject = {CAD}, language = {en} } @inproceedings{KraftRetkowitz2005, author = {Kraft, Bodo and Retkowitz, Daniel}, title = {Operationale Semantikdefinition f{\"u}r konzeptuelles Regelwissen}, year = {2005}, abstract = {In: Forum Bauinformatik 2005 : junge Wissenschaftler forschen / [Lehrstuhl Bauinformatik, Brandenburgische Technische Universit{\"a}t Cottbus. Frank Schley ... (Hrsg.)]. - Cottbus : Techn. Universit{\"a}t 2005. S. 1-10 ISBN 3-934934-11-0 Mittels eines operationalen Ansatzes zur Semantikdefinition wird am Bei-spiel des konzeptuellen Geb{\"a}udeentwurfs ein Regelsystem formalisiert. Dazu werdenzwei Teile, zum einen das Regelwissen, zum anderen ein konzeptueller Entwurfsplan zun{\"a}chst informell eingef{\"u}hrt und dann formal beschrieben. Darauf aufbauend wird die Grundlage f{\"u}r eine Konsistenzpr{\"u}fung des konzeptuellen Entwurfs gegen das Regel-wissen formal angeben}, subject = {CAD}, language = {de} } @inproceedings{KraftRetkowitz2006, author = {Kraft, Bodo and Retkowitz, Daniel}, title = {Graph Transformations for Dynamic Knowledge Processing}, year = {2006}, abstract = {In: Proceedings of the 39th Annual Hawaii International Conference on System Sciences, 2006. HICSS '06 http://dx.doi.org/10.1109/HICSS.2006.200 The conceptual design phase at the beginning of the building construction process is not adequately supported by any CAD-tool. Conceptual design support needs regarding two aspects: first, the architect must be able to develop conceptual sketches that provide abstraction from constructive details. Second, conceptually relevant knowledge should be available to check these conceptual sketches. The paper deals with knowledge to formalize for conceptual design. To enable domain experts formalizing knowledge, a graph-based specification is presented that allows the development of a domain ontology and design rules specific for one class of buildings at runtime. The provided tool support illustrates the introduced concepts and demonstrates the consistency analysis between knowledge and conceptual design.}, subject = {CAD}, language = {de} } @inproceedings{KraftRetkowitz2006, author = {Kraft, Bodo and Retkowitz, Daniel}, title = {Rule-Dependencies for Visual Knowledge Specification in Conceptual Design}, year = {2006}, abstract = {In: Proc. of the 11th Intl. Conf. on Computing in Civil and Building Engineering (ICCCBE-XI) ed. Hugues Rivard, Montreal, Canada, Seite 1-12, ACSE (CD-ROM), 2006 Currently, the conceptual design phase is not adequately supported by any CAD tool. Neither the support while elaborating conceptual sketches, nor the automatic proof of correctness with respect to effective restrictions is currently provided by any commercial tool. To enable domain experts to store the common as well as their personal domain knowledge, we develop a visual language for knowledge formalization. In this paper, a major extension to the already existing concepts is introduced. The possibility to define rule dependencies extends the expressiveness of the knowledge definition language and contributes to the usability of our approach.}, subject = {CAD}, language = {en} } @book{Kraft2007, author = {Kraft, Bodo}, title = {Semantische Unterst{\"u}tzung des konzeptuellen Geb{\"a}udeentwurfs}, publisher = {Shaker}, address = {Aachen}, isbn = {978-3-8322-6045-3}, pages = {VIII, 381 S. : Ill., graph. Darst.}, year = {2007}, language = {de} } @article{KraftNagl2007, author = {Kraft, Bodo and Nagl, Manfred}, title = {Graphbasierte Werkzeuge zur Unterst{\"u}tzung des konzeptuellen Geb{\"a}udeentwurfs}, series = {Vernetzt-kooperative Planungsprozesse im Konstruktiven Ingenieurbau : Grundlagen, Methoden, Anwendung und Perspektiven zur vernetzten Ingenieurkooperation / Uwe R{\"u}ppel (Hrsg.)}, journal = {Vernetzt-kooperative Planungsprozesse im Konstruktiven Ingenieurbau : Grundlagen, Methoden, Anwendung und Perspektiven zur vernetzten Ingenieurkooperation / Uwe R{\"u}ppel (Hrsg.)}, publisher = {Springer}, address = {Berlin}, isbn = {978-3-540-68102-1}, pages = {155 -- 175}, year = {2007}, language = {de} } @article{KraftNobisrathSalumaaetal.2004, author = {Kraft, Bodo and Nobisrath, Ulrich and Salumaa, Priit and Schultchen, Erhard}, title = {Fujaba based Tool Development for eHome Systems / Nobisrath, Ulrich ; Salumaa, Priit ; Schultchen, Erhard ; Kraft, Bodo}, series = {Electronic Notes in Theoretical Computer Science. 127 (2004), H. 1}, journal = {Electronic Notes in Theoretical Computer Science. 127 (2004), H. 1}, isbn = {1571-0661}, pages = {89 -- 99}, year = {2004}, language = {en} } @article{KraftHeerRetkowitz2008, author = {Kraft, Bodo and Heer, Thomas and Retkowitz, Daniel}, title = {Algorithm and Tool for Ontology Integration Based on Graph Rewriting / Heer, Thomas ; Retkowitz, Daniel ; Kraft, Bodo}, series = {Applications of Graph Transformations with Industrial Relevance / Third International Symposium, AGTIVE 2007, Kassel, Germany, October 10-12, 2007, Revised Selected and Invited Papers}, journal = {Applications of Graph Transformations with Industrial Relevance / Third International Symposium, AGTIVE 2007, Kassel, Germany, October 10-12, 2007, Revised Selected and Invited Papers}, isbn = {978-3-540-89019-5}, pages = {577 -- 582}, year = {2008}, language = {en} } @article{KraftHeerRetkowitz2008, author = {Kraft, Bodo and Heer, Thomas and Retkowitz, Daniel}, title = {Incremental Ontology Integration / Heer, Thomas ; Retkowitz, Daniel ; Kraft, Bodo}, series = {Proceedings of the 10th International Conference on Enterprise Information Systems : Barcelona, Spain, June 12 - 16, 2008 / organized by INSTICC, Institute for Systems and Technologies of Information, Control and Communication ... [Ed. by Jos{\´e} Cordeiro ...]}, journal = {Proceedings of the 10th International Conference on Enterprise Information Systems : Barcelona, Spain, June 12 - 16, 2008 / organized by INSTICC, Institute for Systems and Technologies of Information, Control and Communication ... [Ed. by Jos{\´e} Cordeiro ...]}, publisher = {INSTICC}, address = {Setubal}, pages = {13 -- 28}, year = {2008}, language = {en} } @article{HackerKraftZoell2011, author = {Hacker, Tobias and Kraft, Bodo and Z{\"o}ll, Axel}, title = {Projektzuschnitt f{\"u}r die inkrementelle Systementwicklung im Konzernverbund}, isbn = {978-3-8322-9990-3}, pages = {78 -- 83}, year = {2011}, language = {de} } @inproceedings{SchreiberHirtbachKraftetal.2013, author = {Schreiber, Marc and Hirtbach, Stefan and Kraft, Bodo and Steinmetzler, Andreas}, title = {Software in the city: visual guidance through large scale software projects}, series = {Software Engineering 2013 : Fachtagung des GI-Fachbereichs Softwaretechnik, 26. Februar-1. M{\"a}rz 2013 in Aachen. (GI-Edition ; 213)}, booktitle = {Software Engineering 2013 : Fachtagung des GI-Fachbereichs Softwaretechnik, 26. Februar-1. M{\"a}rz 2013 in Aachen. (GI-Edition ; 213)}, editor = {Kowalewski, Stefan}, publisher = {Ges. f{\"u}r Informatik}, address = {Bonn}, isbn = {978-3-88579-607-7 ; 978-3-88579-609-1}, pages = {213 -- 224}, year = {2013}, language = {en} } @inproceedings{SchreiberBarkschatKraft2014, author = {Schreiber, Marc and Barkschat, Kai and Kraft, Bodo}, title = {Using Continuous Integration to organize and monitor the annotation process of domain specific corpora}, series = {5th International Conference on Information and Communication Systems (ICICS) : 1-3 April 2014, Irbid, Jordanien}, booktitle = {5th International Conference on Information and Communication Systems (ICICS) : 1-3 April 2014, Irbid, Jordanien}, organization = {International Conference on Information and Communication Systems <5, 2014, Irbid, Jordanien>}, isbn = {978-1-4799-3022-7}, doi = {10.1109/IACS.2014.6841958}, pages = {1 -- 6}, year = {2014}, language = {en} } @inproceedings{KraftZoell2014, author = {Kraft, Bodo and Z{\"o}ll, Axel}, title = {Von der Langstrecke zum Sprint - Agile Methoden in traditionellen Unternehmen}, series = {Projektmanagement und Vorgehensmodelle 2014 : soziale Aspekte und Standardisierung}, booktitle = {Projektmanagement und Vorgehensmodelle 2014 : soziale Aspekte und Standardisierung}, editor = {Engstler, Martin}, publisher = {Gesellschaft f{\"u}r Informatik}, address = {Bonn}, organization = {FH Aachen, University of Applied Sciences}, isbn = {978-3-88579-630-5}, pages = {35 -- 46}, year = {2014}, language = {de} } @article{KirchhofKraft2012, author = {Kirchhof, Michael and Kraft, Bodo}, title = {Hybrides Vorgehensmodell : Agile und klassische Methoden im Projekt passend kombinieren}, series = {ProjektMagazin}, journal = {ProjektMagazin}, number = {11}, publisher = {Berleb Media}, address = {Taufkirchen}, pages = {11 S.}, year = {2012}, abstract = {Agil ist im Trend und immer mehr Unternehmen, die ihre Projekte bisher nach klassischen Prinzipien durchf{\"u}hrten, denken {\"u}ber den Einsatz agiler Methoden nach. Doch selbst wenn die Organisation bereits beide Philosophien unterst{\"u}tzt, gilt f{\"u}r ein Projekt meist die klare Vorgabe: agil oder klassisch. Es gibt aber noch einen anderen Ansatz, mit diesen "unterschiedlichen Welten" umzugehen: Und zwar die beiden Philosophien innerhalb eines Projekts zu kombinieren. Wie dies in der Praxis aussehen und gelingen kann, zeigen Dr. Michael Kirchhof und Prof. Dr. Bodo Kraft in diesem Beitrag.}, language = {de} } @inproceedings{KirchhofKraft2011, author = {Kirchhof, Michael and Kraft, Bodo}, title = {Dogmatisches „Entweder agil oder klassisch" im Projektmanagement hat ausgedient - die richtige Mischung macht's}, series = {Projekt-Sternstunden : strahlende Erfolge durch Kompetenz}, booktitle = {Projekt-Sternstunden : strahlende Erfolge durch Kompetenz}, publisher = {GPM}, address = {N{\"u}rnberg}, isbn = {978-3-924841-60-7}, pages = {414 -- 425}, year = {2011}, language = {de} } @article{SchreiberBarkschatKraftetal.2015, author = {Schreiber, Marc and Barkschat, Kai and Kraft, Bodo and Z{\"u}ndorf, Albert}, title = {Quick Pad Tagger : An Efficient Graphical User Interface for Building Annotated Corpora with Multiple Annotation Layers}, series = {Computer Science \& Information Technology (CS \& IT)}, volume = {5}, journal = {Computer Science \& Information Technology (CS \& IT)}, number = {4}, publisher = {Academy \& Industry Research Collaboration Center (AIRCC)}, isbn = {978-1-921987-32-8}, issn = {2231 - 5403}, doi = {10.5121/csit.2015.50413}, pages = {131 -- 143}, year = {2015}, language = {en} } @inproceedings{SchreiberKraftZuendorf2016, author = {Schreiber, Marc and Kraft, Bodo and Z{\"u}ndorf, Albert}, title = {Cost-efficient quality assurance of natural language processing tools through continuous monitoring with continuous integration}, series = {3rd International Workshop on Software Engineering Research and Industrial Practice}, booktitle = {3rd International Workshop on Software Engineering Research and Industrial Practice}, doi = {10.1145/2897022.2897029}, pages = {46 -- 52}, year = {2016}, language = {en} } @inproceedings{SchreiberKraftZuendorf2017, author = {Schreiber, Marc and Kraft, Bodo and Z{\"u}ndorf, Albert}, title = {Metrics Driven Research Collaboration: Focusing on Common Project Goals Continuously}, series = {39th International Conference on Software Engineering, May 20-28, 2017 - Buenos Aires, Argentina}, booktitle = {39th International Conference on Software Engineering, May 20-28, 2017 - Buenos Aires, Argentina}, pages = {8 Seiten}, year = {2017}, abstract = {Research collaborations provide opportunities for both practitioners and researchers: practitioners need solutions for difficult business challenges and researchers are looking for hard problems to solve and publish. Nevertheless, research collaborations carry the risk that practitioners focus on quick solutions too much and that researchers tackle theoretical problems, resulting in products which do not fulfill the project requirements. In this paper we introduce an approach extending the ideas of agile and lean software development. It helps practitioners and researchers keep track of their common research collaboration goal: a scientifically enriched software product which fulfills the needs of the practitioner's business model. This approach gives first-class status to application-oriented metrics that measure progress and success of a research collaboration continuously. Those metrics are derived from the collaboration requirements and help to focus on a commonly defined goal. An appropriate tool set evaluates and visualizes those metrics with minimal effort, and all participants will be pushed to focus on their tasks with appropriate effort. Thus project status, challenges and progress are transparent to all research collaboration members at any time.}, language = {en} } @inproceedings{SchreiberKraftZuendorf2017, author = {Schreiber, Marc and Kraft, Bodo and Z{\"u}ndorf, Albert}, title = {Metrics driven research collaboration: focusing on common project goals continuously}, series = {Proceedings : 2017 IEEE/ACM 4th International Workshop on Software Engineering Research and Industrial Practice : SER\&IP 2017 : 21 May 2017 Buenos Aires, Argentina}, booktitle = {Proceedings : 2017 IEEE/ACM 4th International Workshop on Software Engineering Research and Industrial Practice : SER\&IP 2017 : 21 May 2017 Buenos Aires, Argentina}, editor = {Bilof, Randall}, publisher = {IEEE Press}, address = {Piscataway, NJ}, isbn = {978-1-5386-2797-6}, doi = {10.1109/SER-IP.2017..6}, pages = {41 -- 47}, year = {2017}, language = {en} } @misc{NobisrathZuendorfGeorgeetal.2017, author = {Nobisrath, Ulrich and Z{\"u}ndorf, Albert and George, Tobias and Ruben, Jubeh and Kraft, Bodo}, title = {Software Stories Guide}, pages = {21}, year = {2017}, abstract = {Software Stories are a simple graphical notation for requirements analysis and design in agile software projects. Software Stories are based on example scenarios. Example scenarios facilitate the communication between lay people or domain experts and software experts.}, language = {en} } @inproceedings{SchmidtsBoltesKraftetal.2017, author = {Schmidts, Oliver and Boltes, Maik and Kraft, Bodo and Schreiber, Marc}, title = {Multi-pedestrian tracking by moving Bluetooth-LE beacons and stationary receivers}, series = {2017 International Conference on Indoor Positioning and Indoor Navigation (IPIN), 18-21 September 2017, Sapporo, Japan}, booktitle = {2017 International Conference on Indoor Positioning and Indoor Navigation (IPIN), 18-21 September 2017, Sapporo, Japan}, pages = {1 -- 4}, year = {2017}, language = {en} } @inproceedings{SchreiberKraftZuendorf2018, author = {Schreiber, Marc and Kraft, Bodo and Z{\"u}ndorf, Albert}, title = {NLP Lean Programming Framework: Developing NLP Applications More Effectively}, series = {Proceedings of NAACL-HLT 2018: Demonstrations, New Orleans, Louisiana, June 2 - 4, 2018}, booktitle = {Proceedings of NAACL-HLT 2018: Demonstrations, New Orleans, Louisiana, June 2 - 4, 2018}, doi = {10.18653/v1/N18-5001 }, pages = {5 Seiten}, year = {2018}, abstract = {This paper presents NLP Lean Programming framework (NLPf), a new framework for creating custom natural language processing (NLP) models and pipelines by utilizing common software development build systems. This approach allows developers to train and integrate domain-specific NLP pipelines into their applications seamlessly. Additionally, NLPf provides an annotation tool which improves the annotation process significantly by providing a well-designed GUI and sophisticated way of using input devices. Due to NLPf's properties developers and domain experts are able to build domain-specific NLP applications more efficiently. NLPf is Opensource software and available at https:// gitlab.com/schrieveslaach/NLPf.}, language = {en} } @inproceedings{SchmidtsKraftSchreiberetal.2018, author = {Schmidts, Oliver and Kraft, Bodo and Schreiber, Marc and Z{\"u}ndorf, Albert}, title = {Continuously evaluated research projects in collaborative decoupled environments}, series = {2018 ACM/IEEE 5th International Workshop on Software Engineering Research and Industrial PracticePractice, May 29, 2018, Gothenburg, Sweden : SER\&IP' 18}, booktitle = {2018 ACM/IEEE 5th International Workshop on Software Engineering Research and Industrial PracticePractice, May 29, 2018, Gothenburg, Sweden : SER\&IP' 18}, publisher = {ACM}, address = {New York, NY}, pages = {1 -- 9}, year = {2018}, abstract = {Often, research results from collaboration projects are not transferred into productive environments even though approaches are proven to work in demonstration prototypes. These demonstration prototypes are usually too fragile and error-prone to be transferred easily into productive environments. A lot of additional work is required. Inspired by the idea of an incremental delivery process, we introduce an architecture pattern, which combines the approach of Metrics Driven Research Collaboration with microservices for the ease of integration. It enables keeping track of project goals over the course of the collaboration while every party may focus on their expert skills: researchers may focus on complex algorithms, practitioners may focus on their business goals. Through the simplified integration (intermediate) research results can be introduced into a productive environment which enables getting an early user feedback and allows for the early evaluation of different approaches. The practitioners' business model benefits throughout the full project duration.}, language = {en} } @inproceedings{SchmidtsKraftSiebigterothetal.2019, author = {Schmidts, Oliver and Kraft, Bodo and Siebigteroth, Ines and Z{\"u}ndorf, Albert}, title = {Schema Matching with Frequent Changes on Semi-Structured Input Files: A Machine Learning Approach on Biological Product Data}, series = {Proceedings of the 21st International Conference on Enterprise Information Systems - Volume 1: ICEIS}, booktitle = {Proceedings of the 21st International Conference on Enterprise Information Systems - Volume 1: ICEIS}, isbn = {978-989-758-372-8}, doi = {10.5220/0007723602080215}, pages = {208 -- 215}, year = {2019}, language = {en} } @inproceedings{SiebigterothKraftSchmidtsetal.2019, author = {Siebigteroth, Ines and Kraft, Bodo and Schmidts, Oliver and Z{\"u}ndorf, Albert}, title = {A Study on Improving Corpus Creation by Pair Annotation}, series = {Proceedings of the Poster Session of the 2nd Conference on Language, Data and Knowledge (LDK-PS 2019)}, booktitle = {Proceedings of the Poster Session of the 2nd Conference on Language, Data and Knowledge (LDK-PS 2019)}, issn = {1613-0073}, pages = {40 -- 44}, year = {2019}, language = {en} } @inproceedings{SchmidtsKraftWinkensetal.2020, author = {Schmidts, Oliver and Kraft, Bodo and Winkens, Marvin and Z{\"u}ndorf, Albert}, title = {Catalog integration of low-quality product data by attribute label ranking}, series = {Proceedings of the 9th International Conference on Data Science, Technology and Applications - Volume 1: DATA}, booktitle = {Proceedings of the 9th International Conference on Data Science, Technology and Applications - Volume 1: DATA}, isbn = {978-989-758-440-4}, doi = {10.5220/0009831000900101}, pages = {90 -- 101}, year = {2020}, language = {en} } @inproceedings{KloeserKohlKraftetal.2021, author = {Kl{\"o}ser, Lars and Kohl, Philipp and Kraft, Bodo and Z{\"u}ndorf, Albert}, title = {Multi-attribute relation extraction (MARE): simplifying the application of relation extraction}, series = {Proceedings of the 2nd International Conference on Deep Learning Theory and Applications - DeLTA}, booktitle = {Proceedings of the 2nd International Conference on Deep Learning Theory and Applications - DeLTA}, isbn = {978-989-758-526-5}, doi = {10.5220/0010559201480156}, pages = {148 -- 156}, year = {2021}, abstract = {Natural language understanding's relation extraction makes innovative and encouraging novel business concepts possible and facilitates new digitilized decision-making processes. Current approaches allow the extraction of relations with a fixed number of entities as attributes. Extracting relations with an arbitrary amount of attributes requires complex systems and costly relation-trigger annotations to assist these systems. We introduce multi-attribute relation extraction (MARE) as an assumption-less problem formulation with two approaches, facilitating an explicit mapping from business use cases to the data annotations. Avoiding elaborated annotation constraints simplifies the application of relation extraction approaches. The evaluation compares our models to current state-of-the-art event extraction and binary relation extraction methods. Our approaches show improvement compared to these on the extraction of general multi-attribute relations.}, language = {en} } @inproceedings{KohlSchmidtsKloeseretal.2021, author = {Kohl, Philipp and Schmidts, Oliver and Kl{\"o}ser, Lars and Werth, Henri and Kraft, Bodo and Z{\"u}ndorf, Albert}, title = {STAMP 4 NLP - an agile framework for rapid quality-driven NLP applications development}, series = {Quality of Information and Communications Technology. QUATIC 2021}, booktitle = {Quality of Information and Communications Technology. QUATIC 2021}, publisher = {Springer}, address = {Cham}, isbn = {978-3-030-85346-4}, doi = {10.1007/978-3-030-85347-1_12}, pages = {156 -- 166}, year = {2021}, abstract = {The progress in natural language processing (NLP) research over the last years, offers novel business opportunities for companies, as automated user interaction or improved data analysis. Building sophisticated NLP applications requires dealing with modern machine learning (ML) technologies, which impedes enterprises from establishing successful NLP projects. Our experience in applied NLP research projects shows that the continuous integration of research prototypes in production-like environments with quality assurance builds trust in the software and shows convenience and usefulness regarding the business goal. We introduce STAMP 4 NLP as an iterative and incremental process model for developing NLP applications. With STAMP 4 NLP, we merge software engineering principles with best practices from data science. Instantiating our process model allows efficiently creating prototypes by utilizing templates, conventions, and implementations, enabling developers and data scientists to focus on the business goals. Due to our iterative-incremental approach, businesses can deploy an enhanced version of the prototype to their software environment after every iteration, maximizing potential business value and trust early and avoiding the cost of successful yet never deployed experiments.}, language = {en} } @inproceedings{SchmidtsKraftWinkensetal.2021, author = {Schmidts, Oliver and Kraft, Bodo and Winkens, Marvin and Z{\"u}ndorf, Albert}, title = {Catalog integration of heterogeneous and volatile product data}, series = {DATA 2020: Data Management Technologies and Applications}, booktitle = {DATA 2020: Data Management Technologies and Applications}, publisher = {Springer}, address = {Cham}, isbn = {978-3-030-83013-7}, doi = {10.1007/978-3-030-83014-4_7}, pages = {134 -- 153}, year = {2021}, abstract = {The integration of frequently changing, volatile product data from different manufacturers into a single catalog is a significant challenge for small and medium-sized e-commerce companies. They rely on timely integrating product data to present them aggregated in an online shop without knowing format specifications, concept understanding of manufacturers, and data quality. Furthermore, format, concepts, and data quality may change at any time. Consequently, integrating product catalogs into a single standardized catalog is often a laborious manual task. Current strategies to streamline or automate catalog integration use techniques based on machine learning, word vectorization, or semantic similarity. However, most approaches struggle with low-quality or real-world data. We propose Attribute Label Ranking (ALR) as a recommendation engine to simplify the integration process of previously unknown, proprietary tabular format into a standardized catalog for practitioners. We evaluate ALR by focusing on the impact of different neural network architectures, language features, and semantic similarity. Additionally, we consider metrics for industrial application and present the impact of ALR in production and its limitations.}, language = {en} } @inproceedings{BuesgenKloeserKohletal.2022, author = {B{\"u}sgen, Andr{\´e} and Kl{\"o}ser, Lars and Kohl, Philipp and Schmidts, Oliver and Kraft, Bodo and Z{\"u}ndorf, Albert}, title = {Exploratory analysis of chat-based black market profiles with natural language processing}, series = {Proceedings of the 11th International Conference on Data Science, Technology and Applications}, booktitle = {Proceedings of the 11th International Conference on Data Science, Technology and Applications}, isbn = {978-989-758-583-8}, issn = {2184-285X}, doi = {10.5220/0011271400003269}, pages = {83 -- 94}, year = {2022}, abstract = {Messenger apps like WhatsApp or Telegram are an integral part of daily communication. Besides the various positive effects, those services extend the operating range of criminals. Open trading groups with many thousand participants emerged on Telegram. Law enforcement agencies monitor suspicious users in such chat rooms. This research shows that text analysis, based on natural language processing, facilitates this through a meaningful domain overview and detailed investigations. We crawled a corpus from such self-proclaimed black markets and annotated five attribute types products, money, payment methods, user names, and locations. Based on each message a user sends, we extract and group these attributes to build profiles. Then, we build features to cluster the profiles. Pretrained word vectors yield better unsupervised clustering results than current state-of-the-art transformer models. The result is a semantically meaningful high-level overview of the user landscape of black market chatrooms. Additionally, the extracted structured information serves as a foundation for further data exploration, for example, the most active users or preferred payment methods.}, language = {en} } @inproceedings{SildatkeKarwanniKraftetal.2020, author = {Sildatke, Michael and Karwanni, Hendrik and Kraft, Bodo and Schmidts, Oliver and Z{\"u}ndorf, Albert}, title = {Automated Software Quality Monitoring in Research Collaboration Projects}, series = {ICSEW'20: Proceedings of the IEEE/ACM 42nd International Conference on Software Engineering Workshops}, booktitle = {ICSEW'20: Proceedings of the IEEE/ACM 42nd International Conference on Software Engineering Workshops}, doi = {10.1145/3387940.3391478}, pages = {603 -- 610}, year = {2020}, language = {en} } @incollection{KraftKohlMeinecke2024, author = {Kraft, Bodo and Kohl, Philipp and Meinecke, Matthias}, title = {Analyse und Nachverfolgung von Projektzielen durch Einsatz von Natural Language Processing}, series = {KI in der Projektwirtschaft : was ver{\"a}ndert sich durch KI im Projektmanagement?}, booktitle = {KI in der Projektwirtschaft : was ver{\"a}ndert sich durch KI im Projektmanagement?}, editor = {Bernert, Christian and Scheurer, Steffen and Wehnes, Harald}, publisher = {UVK Verlag}, isbn = {978-3-3811-1132-9 (Online)}, doi = {10.24053/9783381111329}, pages = {157 -- 167}, year = {2024}, language = {de} } @inproceedings{BuesgenKloeserKohletal.2023, author = {B{\"u}sgen, Andr{\´e} and Kl{\"o}ser, Lars and Kohl, Philipp and Schmidts, Oliver and Kraft, Bodo and Z{\"u}ndorf, Albert}, title = {From cracked accounts to fake IDs: user profiling on German telegram black market channels}, series = {Data Management Technologies and Applications}, booktitle = {Data Management Technologies and Applications}, editor = {Cuzzocrea, Alfredo and Gusikhin, Oleg and Hammoudi, Slimane and Quix, Christoph}, publisher = {Springer}, address = {Cham}, isbn = {978-3-031-37889-8 (Print)}, doi = {10.1007/978-3-031-37890-4_9}, pages = {176 -- 202}, year = {2023}, abstract = {Messenger apps like WhatsApp and Telegram are frequently used for everyday communication, but they can also be utilized as a platform for illegal activity. Telegram allows public groups with up to 200.000 participants. Criminals use these public groups for trading illegal commodities and services, which becomes a concern for law enforcement agencies, who manually monitor suspicious activity in these chat rooms. This research demonstrates how natural language processing (NLP) can assist in analyzing these chat rooms, providing an explorative overview of the domain and facilitating purposeful analyses of user behavior. We provide a publicly available corpus of annotated text messages with entities and relations from four self-proclaimed black market chat rooms. Our pipeline approach aggregates the extracted product attributes from user messages to profiles and uses these with their sold products as features for clustering. The extracted structured information is the foundation for further data exploration, such as identifying the top vendors or fine-granular price analyses. Our evaluation shows that pretrained word vectors perform better for unsupervised clustering than state-of-the-art transformer models, while the latter is still superior for sequence labeling.}, language = {en} } @inproceedings{KohlFreyerKraemeretal.2023, author = {Kohl, Philipp and Freyer, Nils and Kr{\"a}mer, Yoka and Werth, Henri and Wolf, Steffen and Kraft, Bodo and Meinecke, Matthias and Z{\"u}ndorf, Albert}, title = {ALE: a simulation-based active learning evaluation framework for the parameter-driven comparison of query strategies for NLP}, series = {Deep Learning Theory and Applications. DeLTA 2023. Communications in Computer and Information Science}, booktitle = {Deep Learning Theory and Applications. DeLTA 2023. Communications in Computer and Information Science}, editor = {Conte, Donatello and Fred, Ana and Gusikhin, Oleg and Sansone, Carlo}, publisher = {Springer}, address = {Cham}, isbn = {978-3-031-39058-6 (Print)}, doi = {978-3-031-39059-3}, pages = {235 -- 253}, year = {2023}, abstract = {Supervised machine learning and deep learning require a large amount of labeled data, which data scientists obtain in a manual, and time-consuming annotation process. To mitigate this challenge, Active Learning (AL) proposes promising data points to annotators they annotate next instead of a subsequent or random sample. This method is supposed to save annotation effort while maintaining model performance. However, practitioners face many AL strategies for different tasks and need an empirical basis to choose between them. Surveys categorize AL strategies into taxonomies without performance indications. Presentations of novel AL strategies compare the performance to a small subset of strategies. Our contribution addresses the empirical basis by introducing a reproducible active learning evaluation (ALE) framework for the comparative evaluation of AL strategies in NLP. The framework allows the implementation of AL strategies with low effort and a fair data-driven comparison through defining and tracking experiment parameters (e.g., initial dataset size, number of data points per query step, and the budget). ALE helps practitioners to make more informed decisions, and researchers can focus on developing new, effective AL strategies and deriving best practices for specific use cases. With best practices, practitioners can lower their annotation costs. We present a case study to illustrate how to use the framework.}, language = {en} } @inproceedings{KloeserBuesgenKohletal.2023, author = {Kl{\"o}ser, Lars and B{\"u}sgen, Andr{\´e} and Kohl, Philipp and Kraft, Bodo and Z{\"u}ndorf, Albert}, title = {Explaining relation classification models with semantic extents}, series = {DeLTA 2023: Deep Learning Theory and Applications}, booktitle = {DeLTA 2023: Deep Learning Theory and Applications}, editor = {Conte, Donatello and Fred, Ana and Gusikhin, Oleg and Sansone, Carlo}, publisher = {Springer}, address = {Cham}, isbn = {978-3-031-39058-6 (Print)}, doi = {10.1007/978-3-031-39059-3_13}, pages = {189 -- 208}, year = {2023}, abstract = {In recent years, the development of large pretrained language models, such as BERT and GPT, significantly improved information extraction systems on various tasks, including relation classification. State-of-the-art systems are highly accurate on scientific benchmarks. A lack of explainability is currently a complicating factor in many real-world applications. Comprehensible systems are necessary to prevent biased, counterintuitive, or harmful decisions. We introduce semantic extents, a concept to analyze decision patterns for the relation classification task. Semantic extents are the most influential parts of texts concerning classification decisions. Our definition allows similar procedures to determine semantic extents for humans and models. We provide an annotation tool and a software framework to determine semantic extents for humans and models conveniently and reproducibly. Comparing both reveals that models tend to learn shortcut patterns from data. These patterns are hard to detect with current interpretability methods, such as input reductions. Our approach can help detect and eliminate spurious decision patterns during model development. Semantic extents can increase the reliability and security of natural language processing systems. Semantic extents are an essential step in enabling applications in critical areas like healthcare or finance. Moreover, our work opens new research directions for developing methods to explain deep learning models.}, language = {en} } @article{SildatkeKarwanniKraftetal.2023, author = {Sildatke, Michael and Karwanni, Hendrik and Kraft, Bodo and Z{\"u}ndorf, Albert}, title = {A distributed microservice architecture pattern for the automated generation of information extraction pipelines}, series = {SN Computer Science}, journal = {SN Computer Science}, number = {4, Article number: 833}, publisher = {Springer Singapore}, address = {Singapore}, issn = {2661-8907}, doi = {10.1007/s42979-023-02256-4}, pages = {19 Seiten}, year = {2023}, abstract = {Companies often build their businesses based on product information and therefore try to automate the process of information extraction (IE). Since the information source is usually heterogeneous and non-standardized, classic extract, transform, load techniques reach their limits. Hence, companies must implement the newest findings from research to tackle the challenges of process automation. They require a flexible and robust system that is extendable and ensures the optimal processing of the different document types. This paper provides a distributed microservice architecture pattern that enables the automated generation of IE pipelines. Since their optimal design is individual for each input document, the system ensures the ad-hoc generation of pipelines depending on specific document characteristics at runtime. Furthermore, it introduces the automated quality determination of each available pipeline and controls the integration of new microservices based on their impact on the business value. The introduced system enables fast prototyping of the newest approaches from research and supports companies in automating their IE processes. Based on the automated quality determination, it ensures that the generated pipelines always meet defined business requirements when they come into productive use.}, language = {en} }