@article{MonakhovaDiehl2021, author = {Monakhova, Yulia and Diehl, Bernd W. K.}, title = {A step towards optimization of the qNMR workflow: proficiency testing exercise at an GxP-accredited laboratory}, series = {Applied Magnetic Resonance}, volume = {52}, journal = {Applied Magnetic Resonance}, publisher = {Springer Nature}, address = {Wien}, issn = {1613-7507}, doi = {10.1007/s00723-021-01324-3}, pages = {581 -- 593}, year = {2021}, abstract = {Quantitative nuclear magnetic resonance (qNMR) is considered as a powerful tool for multicomponent mixture analysis as well as for the purity determination of single compounds. Special attention is currently paid to the training of operators and study directors involved in qNMR testing. To assure that only qualified personnel are used for sample preparation at our GxP-accredited laboratory, weighing test was proposed. Sixteen participants performed six-fold weighing of the binary mixture of dibutylated hydroxytoluene (BHT) and 1,2,4,5-tetrachloro-3-nitrobenzene (TCNB). To evaluate the quality of data analysis, all spectra were evaluated manually by a qNMR expert and using in-house developed automated routine. The results revealed that mean values are comparable and both evaluation approaches are free of systematic error. However, automated evaluation resulted in an approximately 20\% increase in precision. The same findings were revealed for qNMR analysis of 32 compounds used in pharmaceutical industry. Weighing test by six-fold determination in binary mixtures and automated qNMR methodology can be recommended as efficient tools for evaluating staff proficiency. The automated qNMR method significantly increases throughput and precision of qNMR for routine measurements and extends application scope of qNMR.}, language = {en} } @article{MolinnusJanusFangetal.2022, author = {Molinnus, Denise and Janus, Kevin Alexander and Fang, Anyelina C. and Drinic, Aleksander and Achtsnicht, Stefan and K{\"o}pf, Marius and Keusgen, Michael and Sch{\"o}ning, Michael Josef}, title = {Thick-film carbon electrode deposited onto a biodegradable fibroin substrate for biosensing applications}, series = {Physica status solidi (a)}, volume = {219}, journal = {Physica status solidi (a)}, number = {23}, publisher = {Wiley-VCH}, address = {Weinheim}, issn = {1862-6319}, doi = {10.1002/pssa.202200100}, pages = {1 -- 9}, year = {2022}, abstract = {This study addresses a proof-of-concept experiment with a biocompatible screen-printed carbon electrode deposited onto a biocompatible and biodegradable substrate, which is made of fibroin, a protein derived from silk of the Bombyx mori silkworm. To demonstrate the sensor performance, the carbon electrode is functionalized as a glucose biosensor with the enzyme glucose oxidase and encapsulated with a silicone rubber to ensure biocompatibility of the contact wires. The carbon electrode is fabricated by means of thick-film technology including a curing step to solidify the carbon paste. The influence of the curing temperature and curing time on the electrode morphology is analyzed via scanning electron microscopy. The electrochemical characterization of the glucose biosensor is performed by amperometric/voltammetric measurements of different glucose concentrations in phosphate buffer. Herein, systematic studies at applied potentials from 500 to 1200 mV to the carbon working electrode (vs the Ag/AgCl reference electrode) allow to determine the optimal working potential. Additionally, the influence of the curing parameters on the glucose sensitivity is examined over a time period of up to 361 days. The sensor shows a negligible cross-sensitivity toward ascorbic acid, noradrenaline, and adrenaline. The developed biocompatible biosensor is highly promising for future in vivo and epidermal applications.}, language = {en} } @article{MolinnusIkenJohnenetal.2022, author = {Molinnus, Denise and Iken, Heiko and Johnen, Anna Lynn and Richstein, Benjamin and Hellmich, Lena and Poghossian, Arshak and Knoch, Joachim and Sch{\"o}ning, Michael Josef}, title = {Miniaturized pH-Sensitive Field-Effect Capacitors with Ultrathin Ta₂O₅ Films Prepared by Atomic Layer Deposition}, series = {physica status solidi (a) applications and materials science}, volume = {219}, journal = {physica status solidi (a) applications and materials science}, number = {8}, publisher = {Wiley-VCH}, address = {Weinheim}, issn = {1862-6319}, doi = {10.1002/pssa.202100660}, pages = {7 Seiten}, year = {2022}, abstract = {Miniaturized electrolyte-insulator-semiconductor capacitors (EISCAPs) with ultrathin gate insulators have been studied in terms of their pH-sensitive sensor characteristics: three different EISCAP systems consisting of Al-p-Si-Ta2O5(5 nm), Al-p-Si-Si3N4(1 or 2 nm)-Ta2O5 (5 nm), and Al-p-Si-SiO2(3.6 nm)-Ta2O5(5 nm) layer structures are characterized in buffer solution with different pH values by means of capacitance-voltage and constant capacitance method. The SiO2 and Si3N4 gate insulators are deposited by rapid thermal oxidation and rapid thermal nitridation, respectively, whereas the Ta2O5 film is prepared by atomic layer deposition. All EISCAP systems have a clear pH response, favoring the stacked gate insulators SiO2-Ta2O5 when considering the overall sensor characteristics, while the Si3N4(1 nm)-Ta2O5 stack delivers the largest accumulation capacitance (due to the lower equivalent oxide thickness) and a higher steepness in the slope of the capacitance-voltage curve among the studied stacked gate insulator systems.}, language = {en} } @article{MaurerRiekeSchemmetal.2023, author = {Maurer, Florian and Rieke, Christian and Schemm, Ralf and Stollenwerk, Dominik}, title = {Analysis of an urban grid with high photovoltaic and e-mobility penetration}, series = {Energies}, volume = {16}, journal = {Energies}, number = {8}, publisher = {MDPI}, address = {Basel}, issn = {1996-1073}, doi = {10.3390/en16083380}, pages = {18 Seiten}, year = {2023}, abstract = {This study analyses the expected utilization of an urban distribution grid under high penetration of photovoltaic and e-mobility with charging infrastructure on a residential level. The grid utilization and the corresponding power flow are evaluated, while varying the control strategies and photovoltaic installed capacity in different scenarios. Four scenarios are used to analyze the impact of e-mobility. The individual mobility demand is modelled based on the largest German studies on mobility "Mobilit{\"a}t in Deutschland", which is carried out every 5 years. To estimate the ramp-up of photovoltaic generation, a potential analysis of the roof surfaces in the supply area is carried out via an evaluation of an open solar potential study. The photovoltaic feed-in time series is derived individually for each installed system in a resolution of 15 min. The residential consumption is estimated using historical smart meter data, which are collected in London between 2012 and 2014. For a realistic charging demand, each residential household decides daily on the state of charge if their vehicle requires to be charged. The resulting charging time series depends on the underlying behavior scenario. Market prices and mobility demand are therefore used as scenario input parameters for a utility function based on the current state of charge to model individual behavior. The aggregated electricity demand is the starting point of the power flow calculation. The evaluation is carried out for an urban region with approximately 3100 residents. The analysis shows that increased penetration of photovoltaics combined with a flexible and adaptive charging strategy can maximize PV usage and reduce the need for congestion-related intervention by the grid operator by reducing the amount of kWh charged from the grid by 30\% which reduces the average price of a charged kWh by 35\% to 14 ct/kWh from 21.8 ct/kWh without PV optimization. The resulting grid congestions are managed by implementing an intelligent price or control signal. The analysis took place using data from a real German grid with 10 subgrids. The entire software can be adapted for the analysis of different distribution grids and is publicly available as an open-source software library on GitHub.}, language = {en} } @article{MatheisRoethWagner2005, author = {Matheis, Anton and R{\"o}th, Thilo and Wagner, Manfred}, title = {Studentenprojekt "Capro" - eine virtuelle Sportwagenstudie "Vision 2015"}, year = {2005}, abstract = {Design- und Karosseriebaustudenten der FH Aachen entwickeln gemeinsam mit externen Fachleuten unter Einsatz virtueller Entwicklungswerkzeuge ein Konzept f{\"u}r einen Sportwagen}, subject = {Karosseriebau}, language = {de} } @article{LuftBremenLuft2023, author = {Luft, Angela and Bremen, Sebastian and Luft, Nils}, title = {A cost/benefit and flexibility evaluation framework for additive technologies in strategic factory planning}, series = {Processes}, volume = {11}, journal = {Processes}, number = {7}, publisher = {MDPI}, address = {Basel}, issn = {2227-9717}, doi = {10.3390/pr11071968}, pages = {Artikel 1968}, year = {2023}, abstract = {There is a growing demand for more flexibility in manufacturing to counter the volatility and unpredictability of the markets and provide more individualization for customers. However, the design and implementation of flexibility within manufacturing systems are costly and only economically viable if applicable to actual demand fluctuations. To this end, companies are considering additive manufacturing (AM) to make production more flexible. This paper develops a conceptual model for the impact quantification of AM on volume and mix flexibility within production systems in the early stages of the factory-planning process. Together with the model, an application guideline is presented to help planners with the flexibility quantification and the factory design process. Following the development of the model and guideline, a case study is presented to indicate the potential impact additive technologies can have on manufacturing flexibility Within the case study, various scenarios with different production system configurations and production programs are analyzed, and the impact of the additive technologies on volume and mix flexibility is calculated. This work will allow factory planners to determine the potential impacts of AM on manufacturing flexibility in an early planning stage and design their production systems accordingly.}, language = {en} } @article{Lohr1996, author = {Lohr, J{\"u}rgen}, title = {Server f{\"u}r den interaktiven Einsatz}, year = {1996}, abstract = {J{\"u}rgen Lohr, Jahrgang 1962, besch{\"a}ftigt mit Softwareentwicklung im Projekt "Interaktive Multimedia" bei Telekom AG, Entwicklungszentrum Berlin. Zuerst erschienen in: Telekom-Praxis Ausgabe 1996. Inhaltsverzeichnis: 1. Einleitung 1.1 Einf{\"u}hrung 1.2 Neue Dienste und Anwendungen 2 Modell zur Verteilung und Architektur 3 Technologien 3.1 Netzwerk 3.2 Computertechniken 3.3. Aufgaben der Server 4 Geplanter Einsatz der Pilotprojekte 4.1 Pilote der Telekom 4.2 Show-Case Berlin 5 Verwendete Server-Architektur 5.1 Berlin - SEL/Alcatel 5.2 Hanburg - Philips 5.3. K{\"o}ln/Bonn - Digital, FUBA und Nokia 5.4 N{\"u}rnberg - Oracle, nCube und Sequent 5.5 Stuttgart - SEL/Alcatel, Hewlett Packard und Bosch 6 Zuk{\"u}nftige Aspekte 6.1 DVB 6.2 DAVIC 6.3 weitere Aspekte 7 Zusammenfassung 8 Schrifttum 9 verwendete Abk{\"u}rzungen}, subject = {Multimediamarkt}, language = {de} } @article{Lohr1997, author = {Lohr, J{\"u}rgen}, title = {XAPI - eine universelle Kommunikationsplattform}, year = {1997}, abstract = {zuerst erschienen in Telekom-Praxis Ausgabe 1997. Von J{\"u}rgen Lohr, Jahrgang 1962, besch{\"a}ftigt mit Softwareentwicklung im Projekt "Interaktive Multimedia" bei der Deutschen Telekom AG, Entwicklungszentrum Berlin. 26 S. Der Beitrag befaßt sich mit dem Thema der universellen Kommunikationsplattform f{\"u}r neue, interaktive, multimediale Dienste und Anwendungen. Ausgehend von den Diensten wird ein Referenzmodell f{\"u}r offene Kommunikation und die Kommunikationsplattform kurz vorgestellt. Desweiteren wird die XAPI mit den Grundbegriffen, den Phasen der Kommunikation und dem Status Modell dargelegt. Ebenfalls werden die realisierten Service Provider erl{\"a}utert. Abschließend werden zuk{\"u}nftige Vorhaben aus den Standardisierungsprojekten ITU und DAVIC sowie weitere Realisierungen aufgezeigt.}, subject = {Multimediamarkt}, language = {de} } @article{Lohr2000, author = {Lohr, J{\"u}rgen}, title = {MPEG-Standards f{\"u}r Multimedia-Dienste (Video-Standards f{\"u}r Multimedia)}, year = {2000}, abstract = {Zuerst erschienen in Telekom-Praxis Ausgabe 2000. 24 S. Innovative multimediale Dienste werden durch die Globalisierung und Konvergenz der M{\"a}rkte, als auch durch Provider-Strategien ausgerichtet. Grundlegende Innovationsfelder sind: Globaler Zugang, Navigation und Intelligenter Inhalt. Die MPEG-Standards - im besonderen MPEG-4 und MPEG-7 - helfen, die oben genannten Forderungen zu erf{\"u}llen. Weiterhin erm{\"o}glichen sie auch f{\"u}r die Provider und den Kunden eine Zukunftssicherheit zu geben und einen zeitlichen Bestand f{\"u}r innovative Produkte zu sichern. Die Aufw{\"a}rtkompabilit{\"a}t der MPEG-Standards erm{\"o}glicht die Vermeidung von {\"U}berschneidung und die Erschließung neuer Dimensionen.}, subject = {Multimediamarkt}, language = {de} } @article{Lohr2000, author = {Lohr, J{\"u}rgen}, title = {Die Standards MPEG-4 und MPEG-7 in den Multimedia-Diensten}, year = {2000}, abstract = {In: Unterrichtsbl{\"a}tter / Deutsche Telekom AG. 53. 2000. 7. S. 326-340. (15 S. ) Die Multimedia-Dienste erhalten durch die Datenreduktion bei der Kompressionstechnologie eine Wirtschaftlichkeit, die den breiteren Einsatz von breitbandigen Diensten erlaubt. Die Dienste ben{\"o}tigen f{\"u}r die verschiedenen Medien nicht mehr so große {\"U}bertragungs- und Speicherleistungen. Bei den entwickelten Verfahren, den so genannten MPEG-(Motion Picture Experts Group-)Standards, werden die Video- und Tonsignale in die digitale Ebene {\"u}berf{\"u}hrt und anschließend unrelevante Signalanteile entfernt. Der daraus resultierende Datenstrom ben{\"o}tigt weniger Bandbreite bei der {\"U}bertragung zum Endkunden. Die MPEG-Organisation wurde bereits im Jahre 1988 ins Leben gerufen und ist ein gemeinsames Gremium der beiden Organisationen ISO (International Standard Organization) und IEC (International Electrotechnical Commission), welches sich mit der Standardisierung von Kodier- und Kompressionsverfahren f{\"u}r die digitalen Bild-, Video und Audioformate befasst. Mittlerweile sind vier wichtige Standards mit MPEG-1, MPEG-2 und MPEG-4 verabschiedet worden sowie mit MPEG-7 in Vorbereitung. Da die Grundlagen zu MPEG-1, -2 und -Audio bereits in anderen Beitr{\"a}gen behandelt wurden, werden hier ausschließlich die neuen bzw. aktuellen MPEG-Standards vorgestellt.}, subject = {MPEG 4}, language = {de} }