@article{SanchezCespedesLeasureTejedorGaravitoetal.2023, author = {Sanchez-Cespedes, Lina Maria and Leasure, Douglas Ryan and Tejedor-Garavito, Natalia and Amaya Cruz, Glenn Harry and Garcia Velez, Gustavo Adolfo and Mendoza Beltr{\´a}n, Andryu Enrique and Mar{\´i}n-Salazar, Yenny Andrea and Esch, Thomas and Tatem, Andrew J. and Ospina Boh{\´o}rquez, Mariana Francisca}, title = {Social cartography and satellite-derived building coverage for post-census population estimates in difficult-to-access regions of Colombia}, series = {Population studies : a Journal of Demography}, volume = {78}, journal = {Population studies : a Journal of Demography}, number = {1}, publisher = {Taylor \& Francis}, address = {London}, issn = {1477-4747}, doi = {10.1080/00324728.2023.2190151}, pages = {3 -- 20}, year = {2023}, abstract = {Effective government services rely on accurate population numbers to allocate resources. In Colombia and globally, census enumeration is challenging in remote regions and where armed conflict is occurring. During census preparations, the Colombian National Administrative Department of Statistics conducted social cartography workshops, where community representatives estimated numbers of dwellings and people throughout their regions. We repurposed this information, combining it with remotely sensed buildings data and other geospatial data. To estimate building counts and population sizes, we developed hierarchical Bayesian models, trained using nearby full-coverage census enumerations and assessed using 10-fold cross-validation. We compared models to assess the relative contributions of community knowledge, remotely sensed buildings, and their combination to model fit. The Community model was unbiased but imprecise; the Satellite model was more precise but biased; and the Combination model was best for overall accuracy. Results reaffirmed the power of remotely sensed buildings data for population estimation and highlighted the value of incorporating local knowledge.}, language = {en} } @article{VarrialeHengsbachGuoetal.2024, author = {Varriale, Ludovica and Hengsbach, Jan-Niklas and Guo, Tianyi and Kuka, Katrin and Tippk{\"o}tter, Nils and Ulber, Roland}, title = {Sustainable production of lactic acid using a perennial ryegrass as feedstock—a comparative study of fermentation at the bench- and reactor-scale, and ensiling}, series = {Sustainability}, volume = {16}, journal = {Sustainability}, number = {18}, publisher = {MDPI}, address = {Basel}, issn = {2071-1050}, doi = {10.3390/su16188054}, year = {2024}, abstract = {Perennial ryegrass (Lolium perenne) is an underutilized lignocellulosic biomass that has several benefits such as high availability, renewability, and biomass yield. The grass press-juice obtained from the mechanical pretreatment can be used for the bio-based production of chemicals. Lactic acid is a platform chemical that has attracted consideration due to its broad area of applications. For this reason, the more sustainable production of lactic acid is expected to increase. In this work, lactic acid was produced using complex medium at the bench- and reactor scale, and the results were compared to those obtained using an optimized press-juice medium. Bench-scale fermentations were carried out in a pH-control system and lactic acid production reached approximately 21.84 ± 0.95 g/L in complex medium, and 26.61 ± 1.2 g/L in press-juice medium. In the bioreactor, the production yield was 0.91 ± 0.07 g/g, corresponding to a 1.4-fold increase with respect to the complex medium with fructose. As a comparison to the traditional ensiling process, the ensiling of whole grass fractions of different varieties harvested in summer and autumn was performed. Ensiling showed variations in lactic acid yields, with a yield up to 15.2\% dry mass for the late-harvested samples, surpassing typical silage yields of 6-10\% dry mass.}, language = {en} } @article{OrtnerHauserSchmadereretal.2019, author = {Ortner, Marion and Hauser, Christine and Schmaderer, Christoph and Muggenthaler, Claudia and Hapfelmeier, Alexander and Sorg, Christian and Diehl-Schmid, Janine and Kurz, Alexander and F{\"o}rstl, Hans and Ikenberg, Benno and Kotliar, Konstantin and Holger, Poppert and Grimmer, Timo}, title = {Decreased vascular pulsatility in Alzheimer's disease dementia measured by transcranial color-coded duplex sonography}, series = {Neuropsychiatric disease and treatment}, journal = {Neuropsychiatric disease and treatment}, number = {15}, publisher = {Dove Medical Press}, address = {Albany, Auckland}, issn = {1178-2021}, doi = {10.2147/NDT.S225754}, pages = {3487 -- 3499}, year = {2019}, abstract = {Purpose: Impaired paravascular drainage of β-Amyloid (Aβ) has been proposed as a contributing cause for sporadic Alzheimer's disease (AD), as decreased cerebral blood vessel pulsatility and subsequently reduced propulsion in this pathway could lead to the accumulation and deposition of Aβ in the brain. Therefore, we hypothesized that there is an increased impairment in pulsatility across AD spectrum. Patients and Methods: Using transcranial color-coded duplex sonography (TCCS) the resistance and pulsatility index (RI; PI) of the middle cerebral artery (MCA) in healthy controls (HC, n=14) and patients with AD dementia (ADD, n=12) were measured. In a second step, we extended the sample by adding patients with mild cognitive impairment (MCI) stratified by the presence (MCI-AD, n=8) or absence of biomarkers (MCI-nonAD, n=8) indicative for underlying AD pathology, and compared RI and PI across the groups. To control for atherosclerosis as a confounder, we measured the arteriolar-venular-ratio of retinal vessels. Results: Left and right RI (p=0.020; p=0.027) and left PI (p=0.034) differed between HC and ADD controlled for atherosclerosis with AUCs of 0.776, 0.763, and 0.718, respectively. The RI and PI of MCI-AD tended towards ADD, of MCI-nonAD towards HC, respectively. RIs and PIs were associated with disease severity (p=0.010, p=0.023). Conclusion: Our results strengthen the hypothesis that impaired pulsatility could cause impaired amyloid clearance from the brain and thereby might contribute to the development of AD. However, further studies considering other factors possibly influencing amyloid clearance as well as larger sample sizes are needed.}, language = {en} } @article{BandlitzNakhoulKotliar2022, author = {Bandlitz, Stefan and Nakhoul, Makram and Kotliar, Konstantin}, title = {Daily variations of corneal white-to-white diameter measured with different methods}, series = {Clinical and experimental optometry}, journal = {Clinical and experimental optometry}, number = {14}, publisher = {Taylor \& Francis}, address = {London}, issn = {0816-4622}, doi = {10.2147/OPTO.S360651}, pages = {173 -- 181}, year = {2022}, abstract = {Purpose: A precise determination of the corneal diameter is essential for the diagnosis of various ocular diseases, cataract and refractive surgery as well as for the selection and fitting of contact lenses. The aim of this study was to investigate the agreement between two automatic and one manual method for corneal diameter determination and to evaluate possible diurnal variations in corneal diameter. Patients and Methods: Horizontal white-to-white corneal diameter of 20 volunteers was measured at three different fixed times of a day with three methods: Scheimpflug method (Pentacam HR, Oculus), placido based topography (Keratograph 5M, Oculus) and manual method using an image analysis software at a slitlamp (BQ900, Haag-Streit). Results: The two-factorial analysis of variance could not show a significant effect of the different instruments (p = 0.117), the different time points (p = 0.506) and the interaction between instrument and time point (p = 0.182). Very good repeatability (intraclass correlation coefficient ICC, quartile coefficient of dispersion QCD) was found for all three devices. However, manual slitlamp measurements showed a higher QCD than the automatic measurements with the Keratograph 5M and the Pentacam HR at all measurement times. Conclusion: The manual and automated methods used in the study to determine corneal diameter showed good agreement and repeatability. No significant diurnal variations of corneal diameter were observed during the period of time studied.}, language = {en} } @article{KnoedlerRuehlEmontsetal.2019, author = {Kn{\"o}dler, Matthias and R{\"u}hl, Clemens and Emonts, Jessica and Buyel, Johannes Felix}, title = {Seasonal weather changes affect the yield and quality of recombinant proteins produced in transgenic tobacco plants in a greenhouse setting}, series = {Frontiers in Plant Science}, journal = {Frontiers in Plant Science}, number = {10}, publisher = {Frontiers Media}, address = {Lausanne}, issn = {1664-462X (online-ressource)}, doi = {10.3389/fpls.2019.01245}, pages = {13 Seiten}, year = {2019}, abstract = {Transgenic plants have the potential to produce recombinant proteins on an agricultural scale, with yields of several tons per year. The cost-effectiveness of transgenic plants increases if simple cultivation facilities such as greenhouses can be used for production. In such a setting, we expressed a novel affinity ligand based on the fluorescent protein DsRed, which we used as a carrier for the linear epitope ELDKWA from the HIV-neutralizing antibody 2F5. The DsRed-2F5-epitope (DFE) fusion protein was produced in 12 consecutive batches of transgenic tobacco (Nicotiana tabacum) plants over the course of 2 years and was purified using a combination of blanching and immobilized metal-ion affinity chromatography (IMAC). The average purity after IMAC was 57 ± 26\% (n = 24) in terms of total soluble protein, but the average yield of pure DFE (12 mg kg-1) showed substantial variation (± 97 mg kg-1, n = 24) which correlated with seasonal changes. Specifically, we found that temperature peaks (>28°C) and intense illuminance (>45 klx h-1) were associated with lower DFE yields after purification, reflecting the loss of the epitope-containing C-terminus in up to 90\% of the product. Whereas the weather factors were of limited use to predict product yields of individual harvests conducted for each batch (spaced by 1 week), the average batch yields were well approximated by simple linear regression models using two independent variables for prediction (illuminance and plant age). Interestingly, accumulation levels determined by fluorescence analysis were not affected by weather conditions but positively correlated with plant age, suggesting that the product was still expressed at high levels, but the extreme conditions affected its stability, albeit still preserving the fluorophore function. The efficient production of intact recombinant proteins in plants may therefore require adequate climate control and shading in greenhouses or even cultivation in fully controlled indoor farms.}, language = {en} } @article{BernauKnoedlerEmontsetal.2022, author = {Bernau, C. R. and Kn{\"o}dler, Matthias and Emonts, Jessica and J{\"a}pel, Ronald Colin and Buyel, Johannes Felix}, title = {The use of predictive models to develop chromatography-based purification processes}, series = {Frontiers in Bioengineering and Biotechnology}, journal = {Frontiers in Bioengineering and Biotechnology}, number = {10}, publisher = {Frontiers Media}, address = {Lausanne}, issn = {2296-4185 (online-ressource)}, doi = {10.3389/fbioe.2022.1009102}, pages = {25 Seiten}, year = {2022}, abstract = {Chromatography is the workhorse of biopharmaceutical downstream processing because it can selectively enrich a target product while removing impurities from complex feed streams. This is achieved by exploiting differences in molecular properties, such as size, charge and hydrophobicity (alone or in different combinations). Accordingly, many parameters must be tested during process development in order to maximize product purity and recovery, including resin and ligand types, conductivity, pH, gradient profiles, and the sequence of separation operations. The number of possible experimental conditions quickly becomes unmanageable. Although the range of suitable conditions can be narrowed based on experience, the time and cost of the work remain high even when using high-throughput laboratory automation. In contrast, chromatography modeling using inexpensive, parallelized computer hardware can provide expert knowledge, predicting conditions that achieve high purity and efficient recovery. The prediction of suitable conditions in silico reduces the number of empirical tests required and provides in-depth process understanding, which is recommended by regulatory authorities. In this article, we discuss the benefits and specific challenges of chromatography modeling. We describe the experimental characterization of chromatography devices and settings prior to modeling, such as the determination of column porosity. We also consider the challenges that must be overcome when models are set up and calibrated, including the cross-validation and verification of data-driven and hybrid (combined data-driven and mechanistic) models. This review will therefore support researchers intending to establish a chromatography modeling workflow in their laboratory.}, language = {en} } @article{EmontsBuyel2023, author = {Emonts, Jessica and Buyel, Johannes Felix}, title = {An overview of descriptors to capture protein properties - Tools and perspectives in the context of QSAR modeling}, series = {Computational and Structural Biotechnology Journal}, journal = {Computational and Structural Biotechnology Journal}, number = {21}, publisher = {Research Network of Computational and Structural Biotechnology}, address = {Gotenburg}, issn = {2001-0370 (online-ressource)}, doi = {10.1016/j.csbj.2023.05.022}, pages = {3234 -- 3247}, year = {2023}, abstract = {Proteins are important ingredients in food and feed, they are the active components of many pharmaceutical products, and they are necessary, in the form of enzymes, for the success of many technical processes. However, production can be challenging, especially when using heterologous host cells such as bacteria to express and assemble recombinant mammalian proteins. The manufacturability of proteins can be hindered by low solubility, a tendency to aggregate, or inefficient purification. Tools such as in silico protein engineering and models that predict separation criteria can overcome these issues but usually require the complex shape and surface properties of proteins to be represented by a small number of quantitative numeric values known as descriptors, as similarly used to capture the features of small molecules. Here, we review the current status of protein descriptors, especially for application in quantitative structure activity relationship (QSAR) models. First, we describe the complexity of proteins and the properties that descriptors must accommodate. Then we introduce descriptors of shape and surface properties that quantify the global and local features of proteins. Finally, we highlight the current limitations of protein descriptors and propose strategies for the derivation of novel protein descriptors that are more informative.}, language = {en} } @article{RieplPettrakFaulstichetal.2010, author = {Riepl, Herbert Matthias and Pettrak, J{\"u}rgen and Faulstich, Martin and Herrmann, Wolfgang Anton}, title = {Self metathesis of fatty alcohols and amines to provide monomers for polyester and polyamide products}, series = {Macromolecular Symposia}, volume = {293}, journal = {Macromolecular Symposia}, number = {1}, publisher = {Wiley-VCH}, address = {Weinheim}, issn = {1521-3900 (eISSN)}, doi = {10.1002/masy.200900041}, pages = {39 -- 42}, year = {2010}, abstract = {Self metathesis of oleochemicals offers a variety of bifunctional compounds, that can be used as monomer for polymer production. Many precursors are in huge scales available, like oleic acid ester (biodiesel), oleyl alcohol (tensides), oleyl amines (tensides, lubricants). We show several ways to produce and separate and purify C18-α,ω-bifunctional compounds, using Grubbs 2nd Generation catalysts, starting from technical grade educts.}, language = {en} } @article{GrosshauserPettrak2023, author = {Großhauser, Christian and Pettrak, J{\"u}rgen}, title = {Die Rolle des Wasserstoffs in der Abwasserbehandlung}, series = {Wasser und Abfall}, journal = {Wasser und Abfall}, number = {7/8}, publisher = {Springer Fachmedien}, address = {Wiesbaden}, issn = {1436-9095}, doi = {10.1007/s35152-023-1444-4}, year = {2023}, abstract = {Die Bereitstellung von nachhaltig erzeugtem Wasserstoff als Energietr{\"a}ger und Rohstoff ist eine wichtige Schl{\"u}sseltechnologie sowohl als Ersatz f{\"u}r fossile Energietr{\"a}ger, aber auch als Produkt im Zusammenhang mit Kreislaufprozessen. In der Abwasserbehandlung bestehen verschiedene M{\"o}glichkeiten Wasserstoff herzustellen. Mehrere Wege, m{\"o}gliche Synergien, aber auch deren Nachteile werden vorgestellt.}, language = {de} } @article{RakeEnningKurthetal.1994, author = {Rake, Heinrich and Enning, Manfred and Kurth, Johannes and Schr{\"o}der, Walter}, title = {Automatic uncoupler completes automation at the hump}, series = {RGI - Railway Gazette International}, volume = {150}, journal = {RGI - Railway Gazette International}, number = {6}, publisher = {Reed Business}, address = {Sutton}, issn = {0373-5346}, pages = {371 -- 374}, year = {1994}, language = {en} } @article{RakeSchwanhaeusserFrederichetal.1993, author = {Rake, Heinrich and Schwanh{\"a}ußer, Wulf and Frederich, Fritz and Enning, Manfred}, title = {Automatisierung von Ablaufanlagen mit dem Kupplungsroboter}, series = {ETR - Eisenbahntechnische Rundschau}, journal = {ETR - Eisenbahntechnische Rundschau}, number = {4}, publisher = {DVV Media Group}, address = {Hamburg}, issn = {0013-2845}, pages = {249 -- 254}, year = {1993}, language = {de} } @article{ManfredMaySutter2020, author = {Manfred, Enning and May, J{\"o}rg and Sutter, Stefan}, title = {Innovation am G{\"u}terwagen am Beispiel der Automatischen Bremsprobe}, series = {ETR - Eisenbahntechnische Rundschau}, journal = {ETR - Eisenbahntechnische Rundschau}, number = {12}, publisher = {DVV Media Group}, address = {Hamburg}, issn = {0013-2845}, pages = {14 -- 19}, year = {2020}, abstract = {Mit der Digitalen Automatischen Kupplung beginnt ein neues Kapitel des Schieneng{\"u}terverkehrs, in dem zusammengestellte Wagen sich automatisch in wenigen Minuten abfahrbereit machen, ohne dass der Mensch eingreifen muss. Eines des gr{\"o}ßten Hemmnisse der umweltfreundlichen Schiene wird dann entfallen. Notwendig ist jetzt eine Diskussion {\"u}ber den Umfang und die Systemgrenzen der Automatischen Bremsprobe.}, language = {de} } @article{EnningSchmidtWilbring2019, author = {Enning, Manfred and Schmidt, Bernd and Wilbring, Daniela}, title = {Auf dem Weg zur autonomen Anschlussbedienung}, series = {Privatbahn-Magazin}, journal = {Privatbahn-Magazin}, number = {3}, publisher = {Bahn Media}, address = {Suhlendorf}, issn = {1865-0163}, pages = {40 -- 43}, year = {2019}, language = {de} } @article{EnningPfaff2017, author = {Enning, Manfred and Pfaff, Raphael}, title = {G{\"u}terwagen 4.0 - Mehr als nur technischer Fortschritt}, series = {Privatbahn-Magazin}, journal = {Privatbahn-Magazin}, number = {2}, publisher = {Bahn Media}, address = {Suhlendorf}, issn = {1865-0163}, pages = {21 -- 25}, year = {2017}, abstract = {Lokomotiven sind dank modernster Konzepte der Antriebstechnik heute energiesparend und umweltfreundlich. Eine Ausr{\"u}stung mit Telematik und Assistenzfunktionen ist Standard. Auf der Strecke zeigt sich moderne Technik in Form elektronischer Stellwerke und Zugsicherungssysteme und in Rangier- und Abstellanlagen als EOW-Technik. Am G{\"u}terwagen hingegen ist der technische Fortschritt komplett vorbeigegangen. Auch beim modernsten Wagen (Abb. 1) ist die einzige „Automatik"-Funktion die zentral {\"u}ber die Hauptluftleitung (HL) versorgte und bet{\"a}tigte Luftbremse.}, language = {de} } @article{EnningPfaff2016, author = {Enning, Manfred and Pfaff, Raphael}, title = {Digitalisierung bringt mehr G{\"u}ter auf die Schiene}, series = {Sonderprojekte ATM/MTZ}, volume = {21}, journal = {Sonderprojekte ATM/MTZ}, number = {6 (suppl.)}, publisher = {Springer Fachmedien}, address = {Wiesbaden}, issn = {2509-4610}, doi = {10.1007/s41491-016-0570-7}, pages = {34 -- 37}, year = {2016}, language = {de} } @article{AsarStapenhorst2020, author = {Asar, Hande and Stapenhorst, Carolin}, title = {Zvi Hecker: Drawing on drawing}, series = {Archives of Design Research}, volume = {33}, journal = {Archives of Design Research}, number = {3}, publisher = {Korean Society of Design Science}, address = {Seongnam}, issn = {1226-8046}, doi = {10.15187/adr.2020.08.33.3.45}, pages = {45 -- 53}, year = {2020}, abstract = {Background: Architectural representation, nurtured by the interaction between design thinking and design action, is inherently multi-layered. However, the representation object cannot always reflect these layers. Therefore, it is claimed that these reflections and layerings can gain visibility through 'performativity in personal knowledge', which basically has a performative character. The specific layers of representation produced during the performativity in personal knowledge permit insights about the 'personal way of designing' [1]. Therefore, the question, 'how can these layered drawings be decomposed to understand the personal way of designing', can be defined as the beginning of the study. On the other hand, performativity in personal knowledge in architectural design is handled through the relationship between explicit and tacit knowledge and representational and non-representational theory. To discuss the practical dimension of these theoretical relations, Zvi Hecker's drawing of the Heinz-Galinski-School is examined as an example. The study aims to understand the relationships between the layers by decomposing a layered drawing analytically in order to exemplify personal ways of designing. Methods: The study is based on qualitative research methodologies. First, a model has been formed through theoretical readings to discuss the performativity in personal knowledge. This model is used to understand the layered representations and to research the personal way of designing. Thus, one drawing of Hecker's Heinz-Galinski-School project is chosen. Second, its layers are decomposed to detect and analyze diverse objects, which hint to different types of design tools and their application. Third, Zvi Hecker's statements of the design process are explained through the interview data [2] and other sources. The obtained data are compared with each other. Results: By decomposing the drawing, eleven layers are defined. These layers are used to understand the relation between the design idea and its representation. They can also be thought of as a reading system. In other words, a method to discuss Hecker's performativity in personal knowledge is developed. Furthermore, the layers and their interconnections are described in relation to Zvi Hecker's personal way of designing. Conclusions: It can be said that layered representations, which are associated with the multilayered structure of performativity in personal knowledge, form the personal way of designing.}, language = {en} } @article{StapenhorstZabekHildebrand2018, author = {Stapenhorst, Carolin and Zabek, Magdalena and Hildebrand, Linda}, title = {Communication process and information flow in the architectural planning context}, series = {Creativity game : theory and practice of spatial planning}, journal = {Creativity game : theory and practice of spatial planning}, number = {6}, publisher = {University of Ljubljana}, address = {Ljubljana}, doi = {10.15292/IU-CG.2018.06.066-073}, pages = {66 -- 73}, year = {2018}, abstract = {Against the background of growing data in everyday life, data processing tools become more powerful to deal with the increasing complexity in building design. The architectural planning process is offered a variety of new instruments to design, plan and communicate planning decisions. Ideally the access to information serves to secure and document the quality of the building and in the worst case, the increased data absorbs time by collection and processing without any benefit for the building and its user. Process models can illustrate the impact of information on the design- and planning process so that architect and planner can steer the process. This paper provides historic and contemporary models to visualize the architectural planning process and introduces means to describe today's situation consisting of stakeholders, events and instruments. It explains conceptions during Renaissance in contrast to models used in the second half of the 20th century. Contemporary models are discussed regarding their value against the background of increasing computation in the building process.}, language = {en} } @article{StapenhorstMotta2018, author = {Stapenhorst, Carolin and Motta, Luciano}, title = {Citt{\`a} Olivettiana in Ivrea, Italien}, series = {Bauwelt}, volume = {109}, journal = {Bauwelt}, number = {22}, publisher = {Bauverlag BV}, address = {G{\"u}tersloh}, issn = {0005-6855}, pages = {20 -- 31}, year = {2018}, language = {de} } @article{StapenhorstDutto2016, author = {Stapenhorst, Carolin and Dutto, Andrea Alberto}, title = {Notes on conceptual learning in architecture}, series = {Cartha - The Form of Form}, journal = {Cartha - The Form of Form}, address = {Basel}, year = {2016}, language = {en} } @article{KohlKraemerFohryetal.2024, author = {Kohl, Philipp and Kr{\"a}mer, Yoka and Fohry, Claudia and Kraft, Bodo}, title = {Scoping review of active learning strategies and their evaluation environments for entity recognition tasks}, series = {Deep learning theory and applications}, journal = {Deep learning theory and applications}, editor = {Fred, Ana and Hadjali, Allel and Gusikhin, Oleg and Sansone, Carlo}, publisher = {Springer}, address = {Cham}, isbn = {978-3-031-66694-0 (online ISBN)}, doi = {10.1007/978-3-031-66694-0_6}, pages = {84 -- 106}, year = {2024}, abstract = {We conducted a scoping review for active learning in the domain of natural language processing (NLP), which we summarize in accordance with the PRISMA-ScR guidelines as follows: Objective: Identify active learning strategies that were proposed for entity recognition and their evaluation environments (datasets, metrics, hardware, execution time). Design: We used Scopus and ACM as our search engines. We compared the results with two literature surveys to assess the search quality. We included peer-reviewed English publications introducing or comparing active learning strategies for entity recognition. Results: We analyzed 62 relevant papers and identified 106 active learning strategies. We grouped them into three categories: exploitation-based (60x), exploration-based (14x), and hybrid strategies (32x). We found that all studies used the F1-score as an evaluation metric. Information about hardware (6x) and execution time (13x) was only occasionally included. The 62 papers used 57 different datasets to evaluate their respective strategies. Most datasets contained newspaper articles or biomedical/medical data. Our analysis revealed that 26 out of 57 datasets are publicly accessible. Conclusion: Numerous active learning strategies have been identified, along with significant open questions that still need to be addressed. Researchers and practitioners face difficulties when making data-driven decisions about which active learning strategy to adopt. Conducting comprehensive empirical comparisons using the evaluation environment proposed in this study could help establish best practices in the domain.}, language = {en} }