@article{PhilippEfthimiouPaganoetal.2022, author = {Philipp, Mohr and Efthimiou, Nikos and Pagano, Fiammetta and Kratochwil, Nicolaus and Pizzichemi, Marco and Tsoumpas, Charalampos and Auffray, Etiennette and Ziemons, Karl}, title = {Image reconstruction analysis for positron emission tomography with heterostructured scintillators}, series = {IEEE Transactions on Radiation and Plasma Medical Sciences}, volume = {7}, journal = {IEEE Transactions on Radiation and Plasma Medical Sciences}, number = {1}, publisher = {IEEE}, address = {New York, NY}, issn = {2469-7311}, doi = {10.1109/TRPMS.2022.3208615}, pages = {41 -- 51}, year = {2022}, abstract = {The concept of structure engineering has been proposed for exploring the next generation of radiation detectors with improved performance. A TOF-PET geometry with heterostructured scintillators with a pixel size of 3.0×3.1×15 mm3 was simulated using Monte Carlo. The heterostructures consisted of alternating layers of BGO as a dense material with high stopping power and plastic (EJ232) as a fast light emitter. The detector time resolution was calculated as a function of the deposited and shared energy in both materials on an event-by-event basis. While sensitivity was reduced to 32\% for 100 μm thick plastic layers and 52\% for 50 μm, the CTR distribution improved to 204±49 ps and 220±41 ps respectively, compared to 276 ps that we considered for bulk BGO. The complex distribution of timing resolutions was accounted for in the reconstruction. We divided the events into three groups based on their CTR and modeled them with different Gaussian TOF kernels. On a NEMA IQ phantom, the heterostructures had better contrast recovery in early iterations. On the other hand, BGO achieved a better contrast to noise ratio (CNR) after the 15th iteration due to the higher sensitivity. The developed simulation and reconstruction methods constitute new tools for evaluating different detector designs with complex time responses.}, language = {en} } @article{PourshahidiEngelmannOffenhaeusseretal.2022, author = {Pourshahidi, Ali Mohammad and Engelmann, Ulrich M. and Offenh{\"a}usser, Andreas and Krause, Hans-Joachim}, title = {Resolving ambiguities in core size determination of magnetic nanoparticles from magnetic frequency mixing data}, series = {Journal of Magnetism and Magnetic Materials}, volume = {563}, journal = {Journal of Magnetism and Magnetic Materials}, number = {In progress, Art. No. 169969}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0304-8853}, doi = {10.1016/j.jmmm.2022.169969}, year = {2022}, abstract = {Frequency mixing magnetic detection (FMMD) has been widely utilized as a measurement technique in magnetic immunoassays. It can also be used for the characterization and distinction (also known as "colourization") of different types of magnetic nanoparticles (MNPs) based on their core sizes. In a previous work, it was shown that the large particles contribute most of the FMMD signal. This leads to ambiguities in core size determination from fitting since the contribution of the small-sized particles is almost undetectable among the strong responses from the large ones. In this work, we report on how this ambiguity can be overcome by modelling the signal intensity using the Langevin model in thermodynamic equilibrium including a lognormal core size distribution fL(dc,d0,σ) fitted to experimentally measured FMMD data of immobilized MNPs. For each given median diameter d0, an ambiguous amount of best-fitting pairs of parameters distribution width σ and number of particles Np with R2 > 0.99 are extracted. By determining the samples' total iron mass, mFe, with inductively coupled plasma optical emission spectrometry (ICP-OES), we are then able to identify the one specific best-fitting pair (σ, Np) one uniquely. With this additional externally measured parameter, we resolved the ambiguity in core size distribution and determined the parameters (d0, σ, Np) directly from FMMD measurements, allowing precise MNPs sample characterization.}, language = {en} } @article{EngelmannPourshahidiShalabyetal.2022, author = {Engelmann, Ulrich M. and Pourshahidi, Mohammad Ali and Shalaby, Ahmed and Krause, Hans-Joachim}, title = {Probing particle size dependency of frequency mixing magnetic detection with dynamic relaxation simulation}, series = {Journal of Magnetism and Magnetic Materials}, volume = {563}, journal = {Journal of Magnetism and Magnetic Materials}, number = {In progress, Art. No. 169965}, publisher = {Elsevier}, address = {Amsterdam}, issn = {0304-8853}, doi = {10.1016/j.jmmm.2022.169965}, year = {2022}, abstract = {Biomedical applications of magnetic nanoparticles (MNP) fundamentally rely on the particles' magnetic relaxation as a response to an alternating magnetic field. The magnetic relaxation complexly depends on the interplay of MNP magnetic and physical properties with the applied field parameters. It is commonly accepted that particle core size is a major contributor to signal generation in all the above applications, however, most MNP samples comprise broad distribution spanning nm and more. Therefore, precise knowledge of the exact contribution of individual core sizes to signal generation is desired for optimal MNP design generally for each application. Specifically, we present a magnetic relaxation simulation-driven analysis of experimental frequency mixing magnetic detection (FMMD) for biosensing to quantify the contributions of individual core size fractions towards signal generation. Applying our method to two different experimental MNP systems, we found the most dominant contributions from approx. 20 nm sized particles in the two independent MNP systems. Additional comparison between freely suspended and immobilized MNP also reveals insight in the MNP microstructure, allowing to use FMMD for MNP characterization, as well as to further fine-tune its applicability in biosensing.}, language = {en} } @article{LindnerBurgerRutledgeetal.2022, author = {Lindner, Simon and Burger, Ren{\´e} and Rutledge, Douglas N. and Do, Xuan Tung and Rumpf, Jessica and Diehl, Bernd W. K. and Schulze, Margit and Monakhova, Yulia}, title = {Is the calibration transfer of multivariate calibration models between high- and low-field NMR instruments possible? A case study of lignin molecular weight}, series = {Analytical chemistry}, volume = {94}, journal = {Analytical chemistry}, number = {9}, publisher = {ACS Publications}, address = {Washington, DC}, isbn = {1520-6882}, doi = {10.1021/acs.analchem.1c05125}, pages = {3997 -- 4004}, year = {2022}, abstract = {Although several successful applications of benchtop nuclear magnetic resonance (NMR) spectroscopy in quantitative mixture analysis exist, the possibility of calibration transfer remains mostly unexplored, especially between high- and low-field NMR. This study investigates for the first time the calibration transfer of partial least squares regressions [weight average molecular weight (Mw) of lignin] between high-field (600 MHz) NMR and benchtop NMR devices (43 and 60 MHz). For the transfer, piecewise direct standardization, calibration transfer based on canonical correlation analysis, and transfer via the extreme learning machine auto-encoder method are employed. Despite the immense resolution difference between high-field and low-field NMR instruments, the results demonstrate that the calibration transfer from high- to low-field is feasible in the case of a physical property, namely, the molecular weight, achieving validation errors close to the original calibration (down to only 1.2 times higher root mean square errors). These results introduce new perspectives for applications of benchtop NMR, in which existing calibrations from expensive high-field instruments can be transferred to cheaper benchtop instruments to economize.}, language = {en} } @article{MonakhovaDiehl2022, author = {Monakhova, Yulia and Diehl, Bernd W.K.}, title = {Multinuclear NMR screening of pharmaceuticals using standardization by 2H integral of a deuterated solvent}, series = {Journal of Pharmaceutical and Biomedical Analysis}, volume = {209}, journal = {Journal of Pharmaceutical and Biomedical Analysis}, number = {Article number: 114530}, publisher = {Elsevier}, isbn = {0731-7085}, doi = {10.1016/j.jpba.2021.114530}, year = {2022}, abstract = {NMR standardization approach that uses the 2H integral of deuterated solvent for quantitative multinuclear analysis of pharmaceuticals is described. As a proof of principle, the existing NMR procedure for the analysis of heparin products according to US Pharmacopeia monograph is extended to the determination of Na+ and Cl- content in this matrix. Quantification is performed based on the ratio of a 23Na (35Cl) NMR integral and 2H NMR signal of deuterated solvent, D2O, acquired using the specific spectrometer hardware. As an alternative, the possibility of 133Cs standardization using the addition of Cs2CO3 stock solution is shown. Validation characteristics (linearity, repeatability, sensitivity) are evaluated. A holistic NMR profiling of heparin products can now also be used for the quantitative determination of inorganic compounds in a single analytical run using a single sample. In general, the new standardization methodology provides an appealing alternative for the NMR screening of inorganic and organic components in pharmaceutical products.}, language = {en} } @article{BurgerLindnerRumpfetal.2022, author = {Burger, Ren{\´e} and Lindner, Simon and Rumpf, Jessica and Do, Xuan Tung and Diehl, Bernd W.K. and Rehahn, Matthias and Monakhova, Yulia and Schulze, Margit}, title = {Benchtop versus high field NMR: Comparable performance found for the molecular weight determination of lignin}, series = {Journal of Pharmaceutical and Biomedical Analysis}, volume = {212}, journal = {Journal of Pharmaceutical and Biomedical Analysis}, number = {Article number: 114649}, publisher = {Elsevier}, address = {New York, NY}, isbn = {0731-7085}, doi = {10.1016/j.jpba.2022.114649}, year = {2022}, abstract = {Lignin is a promising renewable biopolymer being investigated worldwide as an environmentally benign substitute of fossil-based aromatic compounds, e.g. for the use as an excipient with antioxidant and antimicrobial properties in drug delivery or even as active compound. For its successful implementation into process streams, a quick, easy, and reliable method is needed for its molecular weight determination. Here we present a method using 1H spectra of benchtop as well as conventional NMR systems in combination with multivariate data analysis, to determine lignin's molecular weight (Mw and Mn) and polydispersity index (PDI). A set of 36 organosolv lignin samples (from Miscanthus x giganteus, Paulownia tomentosa and Silphium perfoliatum) was used for the calibration and cross validation, and 17 samples were used as external validation set. Validation errors between 5.6\% and 12.9\% were achieved for all parameters on all NMR devices (43, 60, 500 and 600 MHz). Surprisingly, no significant difference in the performance of the benchtop and high-field devices was found. This facilitates the application of this method for determining lignin's molecular weight in an industrial environment because of the low maintenance expenditure, small footprint, ruggedness, and low cost of permanent magnet benchtop NMR systems.}, language = {en} } @article{MonakhovaSobolevaFedotovaetal.2022, author = {Monakhova, Yulia and Soboleva, Polina M. and Fedotova, Elena S. and Musina, Kristina T. and Burmistrova, Natalia A.}, title = {Quantum chemical calculations of IR spectra of heparin disaccharide subunits}, series = {Computational and Theoretical Chemistry}, volume = {1217}, journal = {Computational and Theoretical Chemistry}, number = {Article number: 113891}, publisher = {Elsevier}, address = {New York, NY}, isbn = {2210-271X}, doi = {10.1016/j.comptc.2022.113891}, year = {2022}, abstract = {Heparin is a natural polysaccharide, which plays essential role in many biological processes. Alterations in building blocks can modify biological roles of commercial heparin products, due to significant changes in the conformation of the polymer chain. The variability structure of heparin leads to difficulty in quality control using different analytical methods, including infrared (IR) spectroscopy. In this paper molecular modelling of heparin disaccharide subunits was performed using quantum chemistry. The structural and spectral parameters of these disaccharides have been calculated using RHF/6-311G. In addition, over-sulphated chondroitin sulphate disaccharide was studied as one of the most widespread contaminants of heparin. Calculated IR spectra were analyzed with respect to specific structure parameters. IR spectroscopic fingerprint was found to be sensitive to substitution pattern of disaccharide subunits. Vibrational assignments of calculated spectra were correlated with experimental IR spectral bands of native heparin. Chemometrics was used to perform multivariate analysis of simulated spectral data.}, language = {en} } @article{MuellerSeginWeigandetal.2022, author = {Mueller, Tobias and Segin, Alexander and Weigand, Christoph and Schmitt, Robert H.}, title = {Feature selection for measurement models}, series = {International journal of quality \& reliability management}, journal = {International journal of quality \& reliability management}, number = {Vol. ahead-of-print, No. ahead-of-print.}, publisher = {Emerald Group Publishing Limited}, address = {Bingley}, issn = {0265-671X}, doi = {10.1108/IJQRM-07-2021-0245}, year = {2022}, abstract = {Purpose In the determination of the measurement uncertainty, the GUM procedure requires the building of a measurement model that establishes a functional relationship between the measurand and all influencing quantities. Since the effort of modelling as well as quantifying the measurement uncertainties depend on the number of influencing quantities considered, the aim of this study is to determine relevant influencing quantities and to remove irrelevant ones from the dataset. Design/methodology/approach In this work, it was investigated whether the effort of modelling for the determination of measurement uncertainty can be reduced by the use of feature selection (FS) methods. For this purpose, 9 different FS methods were tested on 16 artificial test datasets, whose properties (number of data points, number of features, complexity, features with low influence and redundant features) were varied via a design of experiments. Findings Based on a success metric, the stability, universality and complexity of the method, two FS methods could be identified that reliably identify relevant and irrelevant influencing quantities for a measurement model. Originality/value For the first time, FS methods were applied to datasets with properties of classical measurement processes. The simulation-based results serve as a basis for further research in the field of FS for measurement models. The identified algorithms will be applied to real measurement processes in the future.}, language = {en} } @article{RuebbelkeVoegeleGrajewskietal.2022, author = {R{\"u}bbelke, Dirk and V{\"o}gele, Stefan and Grajewski, Matthias and Zobel, Luzy}, title = {Hydrogen-based steel production and global climate protection: An empirical analysis of the potential role of a European cross border adjustment mechanism}, series = {Journal of Cleaner Production}, volume = {380}, journal = {Journal of Cleaner Production}, number = {Part 2, Art. Nr.:135040}, publisher = {Elsevier}, issn = {0959-6526}, doi = {10.1016/j.jclepro.2022.135040}, year = {2022}, abstract = {The European Union's aim to become climate neutral by 2050 necessitates ambitious efforts to reduce carbon emissions. Large reductions can be attained particularly in energy intensive sectors like iron and steel. In order to prevent the relocation of such industries outside the EU in the course of tightening environmental regulations, the establishment of a climate club jointly with other large emitters and alternatively the unilateral implementation of an international cross-border carbon tax mechanism are proposed. This article focuses on the latter option choosing the steel sector as an example. In particular, we investigate the financial conditions under which a European cross border mechanism is capable to protect hydrogen-based steel production routes employed in Europe against more polluting competition from abroad. By using a floor price model, we assess the competitiveness of different steel production routes in selected countries. We evaluate the climate friendliness of steel production on the basis of specific GHG emissions. In addition, we utilize an input-output price model. It enables us to assess impacts of rising cost of steel production on commodities using steel as intermediates. Our results raise concerns that a cross-border tax mechanism will not suffice to bring about competitiveness of hydrogen-based steel production in Europe because the cost tends to remain higher than the cost of steel production in e.g. China. Steel is a classic example for a good used mainly as intermediate for other products. Therefore, a cross-border tax mechanism for steel will increase the price of products produced in the EU that require steel as an input. This can in turn adversely affect competitiveness of these sectors. Hence, the effects of higher steel costs on European exports should be borne in mind and could require the cross-border adjustment mechanism to also subsidize exports.}, language = {en} } @article{TranTrinhDaoetal.2022, author = {Tran, Ngoc Trinh and Trinh, Tu Luc and Dao, Ngoc Tien and Giap, Van Tan and Truong, Manh Khuyen and Dinh, Thuy Ha and Staat, Manfred}, title = {FEM shakedown analysis of structures under random strength with chance constrained programming}, series = {Vietnam Journal of Mechanics}, volume = {44}, journal = {Vietnam Journal of Mechanics}, number = {4}, publisher = {Vietnam Academy of Science and Technology (VAST)}, issn = {0866-7136}, doi = {10.15625/0866-7136/17943}, pages = {459 -- 473}, year = {2022}, abstract = {Direct methods, comprising limit and shakedown analysis, are a branch of computational mechanics. They play a significant role in mechanical and civil engineering design. The concept of direct methods aims to determine the ultimate load carrying capacity of structures beyond the elastic range. In practical problems, the direct methods lead to nonlinear convex optimization problems with a large number of variables and constraints. If strength and loading are random quantities, the shakedown analysis can be formulated as stochastic programming problem. In this paper, a method called chance constrained programming is presented, which is an effective method of stochastic programming to solve shakedown analysis problems under random conditions of strength. In this study, the loading is deterministic, and the strength is a normally or lognormally distributed variable.}, language = {en} }