% % This file was created by the TYPO3 extension % publications % --- Timezone: CEST % Creation date: 2025-05-04 % Creation time: 12:14:50 % --- Number of references % 18 % @Article { coleman2022assessment, author = {Coleman, Jesse and Ginsburg, Amy Sarah and Macharia, William M and Ochieng, Roseline and Chomba, Dorothy and Zhou, Guohai and Dunsmuir, Dustin and Karlen, Walter and Ansermino, J Mark}, title = {Assessment of neonatal respiratory rate variability}, abstract = {Accurate measurement of respiratory rate (RR) in neonates is challenging due to high neonatal RR variability (RRV). There is growing evidence that RRV measurement could inform and guide neonatal care. We sought to quantify neonatal RRV during a clinical study in which we compared multiparameter continuous physiological monitoring (MCPM) devices. Measurements of capnography-recorded exhaled carbon dioxide across 60-s epochs were collected from neonates admitted to the neonatal unit at Aga Khan University-Nairobi hospital. Breaths were manually counted from capnograms and using an automated signal detection algorithm which also calculated mean and median RR for each epoch. Outcome measures were between- and within-neonate RRV, between- and within-epoch RRV, and 95{\%} limits of agreement, bias, and root-mean-square deviation. Twenty-seven neonates were included, with 130 epochs analysed. Mean manual breath count (MBC) was 48 breaths per minute. Median RRV ranged from 11.5{\%} (interquartile range (IQR) 6.8-18.9{\%}) to 28.1{\%} (IQR 23.5-36.7{\%}). Bias and limits of agreement for MBC vs algorithm-derived breath count, MBC vs algorithm-derived median breath rate, MBC vs algorithm-derived mean breath rate were - 0.5 (- 2.7, 1.66), - 3.16 (- 12.12, 5.8), and - 3.99 (- 11.3, 3.32), respectively. The marked RRV highlights the challenge of performing accurate RR measurements in neonates. More research is required to optimize the use of RRV to improve care. When evaluating MCPM devices, accuracy thresholds should be less stringent in newborns due to increased RRV. Lastly, median RR, which discounts the impact of extreme outliers, may be more reflective of the underlying physiological control of breathing.}, year = {2022}, DOI = {10.1007/s10877-022-00840-2}, journal = {Journal of Clinical Monitoring and Computing}, publisher = {Springer}, pages = {ahead of print}, tags = {signalprocessing quality LMIC}, web_url = {https://link.springer.com/article/10.1007/s10877-022-00840-2} } @Article { SCEBBA2022100884, author = {Scebba, Gaetano and Zhang, Jia and Catanzaro, Sabrina and Mihai, Carina and Distler, Oliver and Berli, Martin and Karlen, Walter}, title = {Detect-and-segment: A deep learning approach to automate wound image segmentation}, abstract = {Chronic wounds significantly impact quality of life. They can rapidly deteriorate and require close monitoring of healing progress. Image-based wound analysis is a way of objectively assessing the wound status by quantifying important features that are related to healing. However, high heterogeneity of the wound types and imaging conditions challenge the robust segmentation of wound images. We present Detect-and-Segment (DS), a deep learning approach to produce wound segmentation maps with high generalization capabilities. In our approach, dedicated deep neural networks detected the wound position, isolated the wound from the perturbing background, and computed a wound segmentation map. We tested this approach on a diabetic foot ulcers data set and compared it to a segmentation method based on the full image. To evaluate its generalizability on out-of-distribution data, we measured the performance of the DS approach on 4 additional independent data sets, with larger variety of wound types from different body locations. The Matthews’ correlation coefficient (MCC) improved from 0.29 (full image) to 0.85 (DS) on the diabetic foot ulcer data set. When the DS was tested on the independent data sets, the mean MCC increased from 0.17 to 0.85 . Furthermore, the DS enabled the training of segmentation models with up to 90{\%} less training data without impacting the segmentation performance. The proposed DS approach is a step towards automating wound analysis and reducing efforts to manage chronic wounds.}, year = {2022}, issn = {2352-9148}, DOI = {https://doi.org/10.1016/j.imu.2022.100884}, journal = {Informatics in Medicine Unlocked}, volume = {29}, pages = {100884}, keywords = {Chronic wounds, Semantic segmentation, Machine learning, Generalizability, Smartphone}, tags = {signalprocessing camera machine learning}, file_url = {https://www.sciencedirect.com/science/article/pii/S2352914822000375} } @Inproceedings { Zhang, author = {Zhang, Jia and Karlen, Walter}, title = {A novel quality indicator for displaying and comparing the missingness of the PPG derived respiratory rate}, year = {2021}, DOI = {10.3929/ethz-b-000458643}, booktitle = {Society for Technology in Anaesthesia Annual Meeting 2021 (STA 2021)}, pages = {67}, tags = {signalprocessing quality} } @Article { Muroi2019, author = {Muroi, Carl and Meier, Sando and De Luca, Valeria and Mack, David J. and Str\"{a}ssle, Christian and Schwab, Patrick and Karlen, Walter and Keller, Emanuela}, title = {Automated False Alarm Reduction in a Real-Life Intensive Care Setting Using Motion Detection}, abstract = {Background: Contemporary monitoring systems are sensitive to motion artifacts and cause an excess of false alarms. This results in alarm fatigue and hazardous alarm desensitization. To reduce the number of false alarms, we developed and validated a novel algorithm to classify alarms, based on automatic motion detection in videos. Methods: We considered alarms generated by the following continuously measured parameters: arterial oxygen saturation, systolic blood pressure, mean blood pressure, heart rate, and mean intracranial pressure. The movements of the patient and in his/her surroundings were monitored by a camera situated at the ceiling. Using the algorithm, alarms were classified into RED (true), ORANGE (possibly false), and GREEN alarms (false, i.e., artifact). Alarms were reclassified by blinded clinicians. The performance was evaluated using confusion matrices. Results: A total of 2349 alarms from 45 patients were reclassified. For RED alarms, sensitivity was high (87.0\{{\%}\}) and specificity was low (29.6\{{\%}\}) for all parameters. As the sensitivities and specificities for RED and GREEN alarms are interrelated, the opposite was observed for GREEN alarms, i.e., low sensitivity (30.2\{{\%}\}) and high specificity (87.2\{{\%}\}). As RED alarms should not be missed, even at the expense of false positives, the performance was acceptable. The low sensitivity for GREEN alarms is acceptable, as it is not harmful to tag a GREEN alarm as RED/ORANGE. It still contributes to alarm reduction. However, a 12.8\{{\%}\} false-positive rate for GREEN alarms is critical. Conclusions: The proposed system is a step forward toward alarm reduction; however, implementation of additional layers, such as signal curve analysis, multiple parameter correlation analysis and/or more sophisticated video-based analytics are needed for improvement.}, year = {2020}, issn = {1541-6933}, DOI = {10.1007/s12028-019-00711-w}, journal = {Neurocritical Care}, volume = {32}, pages = {419--426}, number = {2}, keywords = {Alarm fatigue,Alarm reduction,False alarms,ICU,Motion sensor,Smart alarms}, tags = {quality signalprocessing ml}, file_url = {http://link.springer.com/10.1007/s12028-019-00711-w} } @Inproceedings { Zhang2020a, author = {Zhang, Jia and Scebba, Gaetano and Karlen, Walter}, title = {Covariance intersection to improve the robustness of the photoplethysmogram derived respiratory rate}, year = {2020}, isbn = {978-1-7281-1990-8}, DOI = {10.1109/EMBC44109.2020.9175943}, booktitle = {42nd Annual International Conference of the IEEE Engineering in Medicine \\& Biology Society (EMBC)}, publisher = {IEEE}, address = {Montreal, CA}, pages = {5939--42}, tags = {signalprocessing}, file_url = {https://arxiv.org/abs/2004.09934 https://ieeexplore.ieee.org/document/9175943/} } @Inproceedings { Karlen2019, author = {Karlen, Walter}, title = {Automated point-of-care processing and interpretation of pulse oximetry for global health applications}, year = {2019}, DOI = {10.3929/ethz-b-000359389}, booktitle = {41st International Engineering in Medicine and Biology Conference: Biomedical Engineering Ranging From Wellness To Intensive Care (EMBC 2019)}, pages = {WeC04.4}, keywords = {Optical and photonic sensors and systems,Physiological monitoring - Modeling and analysis,Portable miniaturized systems}, tags = {signalprocessing} } @Inproceedings { Ferster2019a, author = {Ferster, Maria Laura and Lustenberger, Caroline and Karlen, Walter}, title = {Hitting the phase : How filtering parameters affect the EEG slow wave phase analysis}, year = {2019}, booktitle = {International Conference on Advanced Sleep Modulation Technologies}, address = {Monte Verita, Ascona, Switzerland}, tags = {signalprocessing} } @Inproceedings { Scebba2018, author = {Scebba, Gaetano and T\"{u}shaus, Laura and Karlen, Walter}, title = {Multispectral camera fusion increases robustness of ROI detection for biosignal estimation with nearables in real-world scenarios}, abstract = {Thermal cameras enable non-contact estimation of the respiratory rate (RR). Accurate estimation of RR is highly dependent on the reliable detection of the region of interest (ROI), especially when using cameras with low pixel resolution. We present a novel approach for the automatic detection of the human nose ROI, based on facial landmark detection from an RGB camera that is fused with the thermal image after tracking. We evaluated the detection rate and spatial accuracy of the novel algorithm on recordings obtained from 16 subjects under challenging detection scenarios. Results show a high detection rate (median: 100 \{{\%}\}, 5th - 95th percentile: 92 \{{\%}\} - 100 \{{\%}\}) and very good spatial accuracy with an average root mean square error of 2 pixels in the detected ROI center when compared to manual labeling. Therefore, the implementation of a multispectral camera fusion algorithm is a valid strategy to improve the reliability of non-contact RR estimation with nearable devices featuring thermal cameras.}, year = {2018}, month = {7}, isbn = {978-1-5386-3646-6}, DOI = {10.1109/EMBC.2018.8513501}, booktitle = {Annual International Conference of the IEEE Engineering in Medicine and Biology Society (EMBC)}, publisher = {IEEE}, address = {Honolulu, HI, USA}, pages = {5672--5}, tags = {camera signalprocessing rr nearable}, file_url = {https://ieeexplore.ieee.org/document/8513501/} } @Article { Schwab2017, author = {Schwab, Patrick and Scebba, Gaetano Claudio and Zhang, Jia and Delai, Marco and Karlen, Walter}, title = {Beat by Beat: Classifying Cardiac Arrhythmias with Recurrent Neural Networks}, abstract = {INTRODUCTION: Previous work on detecting arrhythmias in electrocardiogram (ECG) records has predominantly focused on identifying atrial fibrillation (AF) in data obtained from clinical settings or Holter devices, where long-term recordings with multiple leads are the norm. However, the advent of mobile cardiac event recorders increased the importance of being able to differentiate between multiple types of rhythms in noisy short-term recordings with just a single lead. We propose a machine-learning architecture to learn the temporal and morphological patterns of various types of rhythms in order to perform multiclass classification under these more challenging conditions. METHODS: We segment the input ECG signal with a QRS detector into individual heartbeats. From each heartbeat, we extract - among others - morphological features with the encoding side of a stacked denoising autoencoder that was trained in an unsupervised manner. The extracted features are passed in original heartbeat order as input sequences to an ensemble of recurrent neural networks (RNNs). The RNNs were trained on different features, random overlapping subsets of the training data and in various one-versus-all setups in order to increase the model diversity within the ensemble. We blend the individual RNNs' predictions into a final classification solution using a multilayer perceptron (MLP) that was trained on held-out data. RESULTS: Our best ensemble at time of writing achieves an average F1-score over all classes of 0.78 (F1,normal=0.88, F1,af=0.75, F1,other=0.72, F1,noisy=0.78) on an out-of-sample test set (342 samples) and an average F1-score over all classes of 0.65 (F1,normal=0.82, F1,af=0.77, F1,other=0.64, F1,noisy=0.36) on the private test set for phase 1 of the PhysioNet 2017 challenge. CONCLUSION: Deep recurrent models enable our ensemble to differentiate between multiple types of heart rhythms by identifying temporal and morphological patterns in segmented ECG recordings of any length.}, year = {2017}, month = {9}, DOI = {10.22489/CinC.2017.363-223}, journal = {Computing in Cardiology (CinC)}, volume = {44}, address = {Rennes, F}, pages = {1--4}, tags = {signalprocessing ml}, file_url = {http://www.cinc.org/archives/2017/pdf/363-223.pdf} } @Inproceedings { Ding2016, author = {Ding, Xiaorong and Zhang, Yuan-Ting and Tsang, Hon Ki and Karlen, Walter}, title = {A pulse transit time based fusion method for the noninvasive and continuous monitoring of respiratory rate}, year = {2016}, isbn = {978-1-4577-0220-4}, DOI = {10.1109/EMBC.2016.7591663}, booktitle = {Annual International Conference of the IEEE Engineering in Medicine and Biology Society (EMBC)}, publisher = {IEEE}, pages = {4240--3}, tags = {ppg signalprocessing}, file_url = {http://ieeexplore.ieee.org/document/7591663/} } @Inproceedings { deluca16_temporal_prediction_cerebral, author = {Luca, Valeria De and Jaggi, Martin and Karlen, Walter and Keller, Emanuela}, title = {Temporal prediction of cerebral hypoxia in neurointensive care patients: a feasibility study}, year = {2016}, booktitle = {International Symposium on Intracranial Pressure and Neuromonitoring}, pages = {86--87}, tags = {signalprocessing ml} } @Article { Karlen2015c, author = {Karlen, Walter and Petersen, Christian L and Dumont, Guy A and Ansermino, J Mark}, title = {Variability in estimating shunt from single pulse oximetry measurement}, year = {2015}, issn = {0967-3334}, DOI = {10.1088/0967-3334/36/5/967}, journal = {Physiological Measurement}, volume = {36}, pages = {967--981}, number = {5}, keywords = {corresponding author,of arterial oxygen saturation,oximeter accuracy on estimations,the impact of pulse}, tags = {signalprocessing}, file_url = {http://stacks.iop.org/0967-3334/36/i=5/a=967?key=crossref.8c2a59105fefc7e1db14fdff4a81fb49} } @Inproceedings { Garde2013a, author = {Garde, Ainara and Karlen, Walter and Dehkordi, Parastoo and Ansermino, J Mark and Dumont, Guy A}, title = {Empirical mode decomposition for respiratory and heart rate estimation from the photoplethysmogram}, year = {2013}, booktitle = {Computing in Cardiology (CinC)}, volume = {40}, publisher = {IEEE}, address = {Zaragoza}, editor = {Murray, Alan}, pages = {799--802}, tags = {rr signalprocessing}, file_url = {http://ieeexplore.ieee.org/xpl/articleDetails.jsp?tp=\{\\\&\}arnumber=6713498 http://ieeexplore.ieee.org/xpls/abs\{\\_\}all.jsp?arnumber=6713498} } @Article { Brouse2013a, author = {Brouse, Chris J. and Karlen, Walter and Dumont, Guy A and Myers, Dorothy and Cooke, Erin and Stinson, Jonathan and Lim, Joanne and Ansermino, J Mark}, title = {Monitoring nociception during general anesthesia with cardiorespiratory coherence}, abstract = {A novel wavelet transform cardiorespiratory coherence (WTCRC) algorithm has been developed to measure the autonomic state. WTCRC may be used as a nociception index, ranging from 0 (no nociception, strong coherence) to 100 (strong nociception, low coherence). The aim of this study is to estimate the sensitivity of the algorithm to nociception (dental dam insertions) and antinociception (bolus doses of anesthetic drugs). WTCRC's sensitivity is compared to mean heart rate (HRmean) and mean non-invasive blood pressure (NIBPmean), which are commonly used clinical signs. Data were collected from 48 children receiving general anesthesia during dental surgery. The times of dental dam insertion and anesthetic bolus events were noted in real-time during surgeries. 42 dental dam insertion and 57 anesthetic bolus events were analyzed. The change in average WTCRC, HRmean, and NIBPmean was calculated between a baseline period before each event and a response period after. A Wilcoxon rank-sum test was used to compare changes. Dental dam insertion changed the WTCRC nociception index by an average of 14 (82 \{{\%}\}) [95 \{{\%}\} CI from 7.4 to 19], HRmean by 7.3 beats/min (8.1 \{{\%}\}) [5.6-9.6], and NIBPmean by 8.3 mmHg (12 \{{\%}\}) [4.9-13]. A bolus dose of anesthetics changed the WTCRC by -15 (-50 \{{\%}\}) [-21 to -9.3], HRmean by -4.8 beats/min (4.6 \{{\%}\}) [-6.6 to -2.9], and NIBPmean by -2.6 mmHg (3.4 \{{\%}\}) [-4.7 to -0.50]. A nociception index based on cardiorespiratory coherence is more sensitive to nociception and antinociception than are HRmean or NIBPmean. The WTCRC algorithm shows promise for noninvasively monitoring nociception during general anesthesia.}, year = {2013}, issn = {1573-2614}, DOI = {10.1007/s10877-013-9463-4}, journal = {Journal of clinical monitoring and computing}, volume = {27}, pages = {551--60}, number = {5}, keywords = {antinociception \{\'\{a\}\} analgesia,arrhythmia nociception,cardiorespiratory coherence heart rate,variability respiratory sinus}, tags = {anesthesia signalprocessing}, file_url = {http://www.ncbi.nlm.nih.gov/pubmed/23568315} } @Article { Karlen2013a, author = {Karlen, Walter and Raman, Srinivas and Ansermino, J Mark and Dumont, Guy A}, title = {Multiparameter respiratory rate estimation from the photoplethysmogram}, abstract = {We present a novel method for estimating respiratory rate in real time from the photoplethysmogram (PPG) obtained from pulse oximetry. Three respiratory-induced variations (frequency, intensity, and amplitude) are extracted from the PPG using the Incremental-Merge Segmentation algorithm. Frequency content of each respiratory-induced variation is analyzed using fast Fourier transforms. The proposed Smart Fusion method then combines the results of the three respiratory-induced variations using a transparent mean calculation. It automatically eliminates estimations considered to be unreliable because of detected presence of artifacts in the PPG or disagreement between the different individual respiratory rate estimations. The algorithm has been tested on data obtained from 29 children and 13 adults. Results show that it is important to combine the three respiratory-induced variations for robust estimation of respiratory rate. The Smart Fusion showed trends of improved estimation (mean root mean square error 3.0 breaths/min) compared to the individual estimation methods (5.8, 6.2, and 3.9 breaths/min). The Smart Fusion algorithm is being implemented in a mobile phone pulse oximeter device to facilitate the diagnosis of severe childhood pneumonia in remote areas.}, year = {2013}, issn = {1558-2531}, DOI = {10.1109/TBME.2013.2246160}, journal = {IEEE Transactions on Biomedical Engineering}, volume = {60}, pages = {1946--53}, number = {7}, keywords = {Adolescent,Adult,Aged,Algorithms,Automated,Automated: methods,Child,Computer Systems,Computer-Assisted,Computer-Assisted: methods,Data Interpretation,Diagnosis,Fourier Analysis,Humans,Infant,Middle Aged,Pattern Recognition,Photoplethysmography,Photoplethysmography: methods,Preschool,Reproducibility of Results,Respiratory Rate,Respiratory Rate: physiology,Sensitivity and Specificity,Statistical,Young Adult,photoplethysmogram,pulse oximeter,respiratory rate}, tags = {rr signalprocessing ppg}, file_url = {http://www.ncbi.nlm.nih.gov/pubmed/23399950 https://www.researchgate.net/publication/235521997\{\\_\}Multiparameter\{\\_\}Respiratory\{\\_\}Rate\{\\_\}Estimation\{\\_\}From\{\\_\}the\{\\_\}Photoplethysmogram} } @Inproceedings { Brouse2012c, author = {Brouse, Christopher J and Karlen, Walter and Myers, Dorothy and Cooke, Erin and Stinson, Jonathan and Lim, Joanne and Ansermino, J Mark}, title = {Measuring Adequacy of Analgesia with Cardiorespiratory Coherence}, year = {2012}, issn = {1526-7598}, DOI = {10.1213/01.ane.0000418552.16222.39}, booktitle = {Abstracts of the 2012 Annual Meeting of the Society for Technology in Anesthesia (STA)}, volume = {115}, publisher = {Anesthesia and analgesia}, address = {West Palm Beach}, pages = {S8}, number = {2 Suppl}, keywords = {Anesthesia,Anesthesiology,Humans,Medical Laboratory Science}, tags = {anesthesia signalprocessing}, file_url = {http://www.ncbi.nlm.nih.gov/pubmed/22826529} } @Inproceedings { Brouse2011, author = {Brouse, Christopher J and Karlen, Walter and Myers, Dorothy and Cooke, Erin and Stinson, Jonathan and Lim, Joanne and Dumont, Guy A and Ansermino, J Mark}, title = {Wavelet transform cardiorespiratory coherence detects patient movement during general anesthesia}, abstract = {Heart rate variability (HRV) may provide anesthesiologists with a noninvasive tool for monitoring nociception during general anesthesia. A novel wavelet transform cardiores-piratory coherence (WTCRC) algorithm has been developed to calculate estimates of the linear coupling between heart rate and respiration. WTCRC values range from 1 (high coherence, no nociception) to 0 (low coherence, strong nociception). We have assessed the algorithm's ability to detect movement events (indicative of patient response to nociception) in 39 pediatric patients receiving general anesthesia. Sixty movement events were recorded during the 39 surgical procedures. Minimum and average WTCRC were calculated in a 30 second window surrounding each movement event. We used a 95\{{\%}\} significance level as the threshold for detecting nociception during patient movement. The 95\{{\%}\} significance level was calculated relative to a red noise background, using Monte Carlo simulations. It was calculated to be 0.7. Values below this threshold were treated as successful detection. The algorithm was found to detect movement with sensitivity ranging from 95\{{\%}\} (minimum WTCRC) to 65\{{\%}\} (average WTCRC). The WTCRC algorithm thus shows promise for noninvasively monitoring nociception during general anesthesia, using only heart rate and respiration.}, year = {2011}, month = {8}, issn = {1557-170X}, DOI = {10.1109/IEMBS.2011.6091510}, booktitle = {Annual International Conference of the IEEE Engineering in Medicine and Biology Society (EMBC)}, volume = {2011}, pages = {6114--7}, tags = {signalprocessing}, file_url = {http://www.ncbi.nlm.nih.gov/pubmed/22255734} } @Inproceedings { Karlen2011, author = {Karlen, Walter and Petersen, Chris and Gow, Jennifer and Ansermino, J Mark and Dumont, Guy A}, title = {An Adaptive Single Frequency Phase Vocoder For Low-power Heart Rate Detection}, abstract = {Mobile phones can be used as a platform for clinical decision making in resource-poor and remote areas. Their limited battery and computational resources, however, demand efficient and low-power algorithms. We present a new algorithm for the fast and economical estimation of heart rate (HR) from the photoplethysmogram (PPG) recorded with a pulse oximeter connected to a mobile phone. The new method estimates the HR frequency by adaptively modeling the PPG wave with a sine function using a modified phase vocoder. The obtained wave is also used as an envelope for the detection of peaks in the PPG signal. HR is computed using the vocoder center frequency and using the peak intervals in a histogram. Experiments on a mobile device show comparable speed performance with other time domain algorithms. Preliminary tests show that the HR computed from the vocoder center frequency is robust to noisy PPG. The instantaneous HR calculated with the vocoder peak detection method was more sensitive to short-term HR variations. These results point to further developments using a combination of both HR estimation methods that will enable the robust implementation of adaptive phase vocoders into mobile phone applications.}, year = {2011}, booktitle = {BIODEVICES 2011 - Proceedings of the International Conference on Biomedical Electronics and Devices, Rome, Italy, January 26-29, 2011}, publisher = {INSTICC Press}, pages = {30--35}, keywords = {embedded systems,heart rate estimation,mobile phones,photoplethysmography.,pulse detection}, tags = {ppg signalprocessing} }