Kim, Andrew S.; Yeung, Chris; Szabo, Robert; Sunderland, Kyle; Hisey, Rebecca; Morton, David; Kikinis, Ron; Diao, Babacar; Mousavi, Parvin; Ungi, Tamas; Fichtinger, Gabor
SPIE, 2024.
@proceedings{Kim2024,
title = {Percutaneous nephrostomy needle guidance using real-time 3D anatomical visualization with live ultrasound segmentation},
author = {Andrew S. Kim and Chris Yeung and Robert Szabo and Kyle Sunderland and Rebecca Hisey and David Morton and Ron Kikinis and Babacar Diao and Parvin Mousavi and Tamas Ungi and Gabor Fichtinger},
editor = {Maryam E. Rettmann and Jeffrey H. Siewerdsen},
doi = {10.1117/12.3006533},
year = {2024},
date = {2024-03-29},
urldate = {2024-03-29},
publisher = {SPIE},
abstract = {
PURPOSE: Percutaneous nephrostomy is a commonly performed procedure to drain urine to provide relief in patients with hydronephrosis. Conventional percutaneous nephrostomy needle guidance methods can be difficult, expensive, or not portable. We propose an open-source real-time 3D anatomical visualization aid for needle guidance with live ultrasound segmentation and 3D volume reconstruction using free, open-source software. METHODS: Basic hydronephrotic kidney phantoms were created, and recordings of these models were manually segmented and used to train a deep learning model that makes live segmentation predictions to perform live 3D volume reconstruction of the fluid-filled cavity. Participants performed 5 needle insertions with the visualization aid and 5 insertions with ultrasound needle guidance on a kidney phantom in randomized order, and these were recorded. Recordings of the trials were analyzed for needle tip distance to the center of the target calyx, needle insertion time, and success rate. Participants also completed a survey on their experience. RESULTS: Using the visualization aid showed significantly higher accuracy, while needle insertion time and success rate were not statistically significant at our sample size. Participants mostly responded positively to the visualization aid, and 80% found it easier to use than ultrasound needle guidance. CONCLUSION: We found that our visualization aid produced increased accuracy and an overall positive experience. We demonstrated that our system is functional and stable and believe that the workflow with this system can be applied to other procedures. This visualization aid system is effective on phantoms and is ready for translation with clinical data.},
keywords = {},
pubstate = {published},
tppubtype = {proceedings}
}
PURPOSE: Percutaneous nephrostomy is a commonly performed procedure to drain urine to provide relief in patients with hydronephrosis. Conventional percutaneous nephrostomy needle guidance methods can be difficult, expensive, or not portable. We propose an open-source real-time 3D anatomical visualization aid for needle guidance with live ultrasound segmentation and 3D volume reconstruction using free, open-source software. METHODS: Basic hydronephrotic kidney phantoms were created, and recordings of these models were manually segmented and used to train a deep learning model that makes live segmentation predictions to perform live 3D volume reconstruction of the fluid-filled cavity. Participants performed 5 needle insertions with the visualization aid and 5 insertions with ultrasound needle guidance on a kidney phantom in randomized order, and these were recorded. Recordings of the trials were analyzed for needle tip distance to the center of the target calyx, needle insertion time, and success rate. Participants also completed a survey on their experience. RESULTS: Using the visualization aid showed significantly higher accuracy, while needle insertion time and success rate were not statistically significant at our sample size. Participants mostly responded positively to the visualization aid, and 80% found it easier to use than ultrasound needle guidance. CONCLUSION: We found that our visualization aid produced increased accuracy and an overall positive experience. We demonstrated that our system is functional and stable and believe that the workflow with this system can be applied to other procedures. This visualization aid system is effective on phantoms and is ready for translation with clinical data.
Connolly, Laura; Fooladgar, Fahimeh; Jamzad, Amoon; Kaufmann, Martin; Syeda, Ayesha; Ren, Kevin; Abolmaesumi, Purang; Rudan, John F; McKay, Doug; Fichtinger, Gabor; others,
ImSpect: Image-driven self-supervised learning for surgical margin evaluation with mass spectrometry Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, pp. 1–8, 2024.
@article{connolly2024imspect,
title = {ImSpect: Image-driven self-supervised learning for surgical margin evaluation with mass spectrometry},
author = {Laura Connolly and Fahimeh Fooladgar and Amoon Jamzad and Martin Kaufmann and Ayesha Syeda and Kevin Ren and Purang Abolmaesumi and John F Rudan and Doug McKay and Gabor Fichtinger and others},
doi = {https://doi.org/10.1007/s11548-024-03106-1},
year = {2024},
date = {2024-01-01},
urldate = {2024-01-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
pages = {1–8},
publisher = {Springer},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Radcliffe, Olivia; Connolly, Laura; Ungi, Tamas; Yeo, Caitlin; Rudan, John F.; Fichtinger, Gabor; Mousavi, Parvin
Navigated surgical resection cavity inspection for breast conserving surgery Proceedings
2023.
@proceedings{nokey,
title = {Navigated surgical resection cavity inspection for breast conserving surgery},
author = {Olivia Radcliffe and Laura Connolly and Tamas Ungi and Caitlin Yeo and John F. Rudan and Gabor Fichtinger and Parvin Mousavi},
doi = {https://doi.org/10.1117/12.2654015},
year = {2023},
date = {2023-04-03},
abstract = {Up to 40% of Breast Conserving Surgery (BCS) patients must undergo repeat surgery because cancer is left behind in the resection cavity. The mobility of the breast resection cavity makes it difficult to localize residual cancer and, therefore, cavity shaving is a common technique for cancer removal. Cavity shaving involves removing an additional layer of tissue from the entire resection cavity, often resulting in unnecessary healthy tissue loss. In this study, we demonstrated a navigation system and open-source software module that facilitates visualization of the breast resection cavity for targeted localization of residual cancer.},
keywords = {},
pubstate = {published},
tppubtype = {proceedings}
}
Cernelev, Pavel-Dumitru; Moga, Kristof; Groves, Leah; Haidegger, Tamás; Fichtinger, Gabor; Ungi, Tamas
Determining boundaries of accurate tracking for electromagnetic sensors Conference
SPIE, 2023.
@conference{Cernelev2023,
title = {Determining boundaries of accurate tracking for electromagnetic sensors},
author = {Pavel-Dumitru Cernelev and Kristof Moga and Leah Groves and Tamás Haidegger and Gabor Fichtinger and Tamas Ungi},
editor = {Cristian A. Linte and Jeffrey H. Siewerdsen},
doi = {10.1117/12.2654428},
year = {2023},
date = {2023-04-03},
urldate = {2023-04-03},
publisher = {SPIE},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Orosz, Gábor; Szabó, Róbert Zsolt; Ungi, Tamás; Barr, Colton; Yeung, Chris; Fichtinger, Gábor; Gál, János; Haidegger, Tamás
Lung Ultrasound Imaging and Image Processing with Artificial Intelligence Methods for Bedside Diagnostic Examinations Journal Article
In: Acta Polytechnica Hungarica, vol. 20, iss. 8, 2023.
@article{fichtinger2023d,
title = {Lung Ultrasound Imaging and Image Processing with Artificial Intelligence Methods for Bedside Diagnostic Examinations},
author = {Gábor Orosz and Róbert Zsolt Szabó and Tamás Ungi and Colton Barr and Chris Yeung and Gábor Fichtinger and János Gál and Tamás Haidegger},
url = {https://acta.uni-obuda.hu/Orosz_Szabo_Ungi_Barr_Yeung_Fichtinger_Gal_Haidegger_137.pdf},
year = {2023},
date = {2023-01-01},
journal = {Acta Polytechnica Hungarica},
volume = {20},
issue = {8},
abstract = {Artificial Intelligence-assisted radiology has shown to offer significant benefits in clinical care. Physicians often face challenges in identifying the underlying causes of acute respiratory failure. One method employed by experts is the utilization of bedside lung ultrasound, although it has a significant learning curve. In our study, we explore the potential of a Machine Learning-based automated decision-support system to assist inexperienced practitioners in interpreting lung ultrasound scans. This system incorporates medical ultrasound, advanced data processing techniques, and a neural network implementation to achieve its objective. The article provides a comprehensive overview of the steps involved in data preparation and the implementation of the neural network. The accuracy and error rate of the most effective model are presented, accompanied by illustrative examples of their predictions. Furthermore, the paper concludes with an evaluation of the results, identification of limitations, and recommendations for future enhancements.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Nam, Hannah H; Flynn, Maura; Lasso, Andras; Herz, Christian; Sabin, Patricia; Wang, Yan; Cianciulli, Alana; Vigil, Chad; Huang, Jing; Vicory, Jared; Paniagua, Beatriz; Allemang, David; Goldberg, David J; Nuri, Mohammed; Cohen, Meryl S; Fichtinger, Gabor; Jolley, Matthew A
Modeling of the tricuspid valve and right ventricle in hypoplastic left heart syndrome with a Fontan circulation Journal Article
In: Circulation: Cardiovascular Imaging, vol. 16, iss. 3, pp. e014671, 2023.
@article{fichtinger2023c,
title = {Modeling of the tricuspid valve and right ventricle in hypoplastic left heart syndrome with a Fontan circulation},
author = {Hannah H Nam and Maura Flynn and Andras Lasso and Christian Herz and Patricia Sabin and Yan Wang and Alana Cianciulli and Chad Vigil and Jing Huang and Jared Vicory and Beatriz Paniagua and David Allemang and David J Goldberg and Mohammed Nuri and Meryl S Cohen and Gabor Fichtinger and Matthew A Jolley},
url = {https://www.ahajournals.org/doi/abs/10.1161/CIRCIMAGING.122.014671},
year = {2023},
date = {2023-01-01},
journal = {Circulation: Cardiovascular Imaging},
volume = {16},
issue = {3},
pages = {e014671},
publisher = {Lippincott Williams & Wilkins},
abstract = {Background
In hypoplastic left heart syndrome, tricuspid regurgitation (TR) is associated with circulatory failure and death. We hypothesized that the tricuspid valve (TV) structure of patients with hypoplastic left heart syndrome with a Fontan circulation and moderate or greater TR differs from those with mild or less TR, and that right ventricle volume is associated with TV structure and dysfunction.
Methods
TV of 100 patients with hypoplastic left heart syndrome and a Fontan circulation were modeled using transthoracic 3-dimensional echocardiograms and custom software in SlicerHeart. Associations of TV structure to TR grade and right ventricle function and volume were investigated. Shape parameterization and analysis was used to calculate the mean shape of the TV leaflets, their principal modes of variation, and to characterize associations of TV leaflet shape to TR.
Results
In univariate modeling, patients with …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
In hypoplastic left heart syndrome, tricuspid regurgitation (TR) is associated with circulatory failure and death. We hypothesized that the tricuspid valve (TV) structure of patients with hypoplastic left heart syndrome with a Fontan circulation and moderate or greater TR differs from those with mild or less TR, and that right ventricle volume is associated with TV structure and dysfunction.
Methods
TV of 100 patients with hypoplastic left heart syndrome and a Fontan circulation were modeled using transthoracic 3-dimensional echocardiograms and custom software in SlicerHeart. Associations of TV structure to TR grade and right ventricle function and volume were investigated. Shape parameterization and analysis was used to calculate the mean shape of the TV leaflets, their principal modes of variation, and to characterize associations of TV leaflet shape to TR.
Results
In univariate modeling, patients with …
Kitner, Nicole; Rodgers, Jessica R; Ungi, Tamas; Korzeniowski, Martin; Olding, Timothy; Mousavi, Parvin; Fichtinger, Gabor
Multi-catheter modelling in reconstructed 3D transrectal ultrasound images from prostate brachytherapy Journal Article
In: vol. 12466, pp. 126-135, 2023.
@article{fichtinger2023b,
title = {Multi-catheter modelling in reconstructed 3D transrectal ultrasound images from prostate brachytherapy},
author = {Nicole Kitner and Jessica R Rodgers and Tamas Ungi and Martin Korzeniowski and Timothy Olding and Parvin Mousavi and Gabor Fichtinger},
url = {https://www.spiedigitallibrary.org/conference-proceedings-of-spie/12466/124660I/Multi-catheter-modelling-in-reconstructed-3D-transrectal-ultrasound-images-from/10.1117/12.2654019.short},
year = {2023},
date = {2023-01-01},
volume = {12466},
pages = {126-135},
publisher = {SPIE},
abstract = {High-dose-rate brachytherapy is an accepted standard-of-care treatment for prostate cancer. In this procedure, catheters are inserted using three-dimensional (3D) transrectal ultrasound image-guidance. Their positions are manually segmented for treatment planning and delivery. The transverse ultrasound sweep, which is subject to tip and depth error for catheter localization, is a commonly used ultrasound imaging option available for image acquisition. We propose a two-step pipeline that uses a deep-learning network and curve fitting to automatically localize and model catheters in transversely reconstructed 3D ultrasound images. In the first step, a 3D U-Net was trained to automatically segment all catheters in a 3D ultrasound image. Following this step, curve fitting was implemented to detect the shapes of individual catheters using polynomial fitting. Of the 343 catheters (from 20 patients) in the testing data, the …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Kaufmann, Martin; Iaboni, Natasha; Jamzad, Amoon; Hurlbut, David; Ren, Kevin Yi Mi; Rudan, John F; Mousavi, Parvin; Fichtinger, Gabor; Varma, Sonal; Caycedo-Marulanda, Antonio; Nicol, Christopher JB
In: Metabolites, vol. 13, iss. 4, pp. 508, 2023.
@article{fichtinger2023,
title = {Metabolically active zones involving fatty acid elongation delineated by DESI-MSI correlate with pathological and prognostic features of colorectal cancer},
author = {Martin Kaufmann and Natasha Iaboni and Amoon Jamzad and David Hurlbut and Kevin Yi Mi Ren and John F Rudan and Parvin Mousavi and Gabor Fichtinger and Sonal Varma and Antonio Caycedo-Marulanda and Christopher JB Nicol},
url = {https://www.mdpi.com/2218-1989/13/4/508},
year = {2023},
date = {2023-01-01},
journal = {Metabolites},
volume = {13},
issue = {4},
pages = {508},
publisher = {MDPI},
abstract = {Colorectal cancer (CRC) is the second leading cause of cancer deaths. Despite recent advances, five-year survival rates remain largely unchanged. Desorption electrospray ionization mass spectrometry imaging (DESI) is an emerging nondestructive metabolomics-based method that retains the spatial orientation of small-molecule profiles on tissue sections, which may be validated by ‘gold standard’ histopathology. In this study, CRC samples were analyzed by DESI from 10 patients undergoing surgery at Kingston Health Sciences Center. The spatial correlation of the mass spectral profiles was compared with histopathological annotations and prognostic biomarkers. Fresh frozen sections of representative colorectal cross sections and simulated endoscopic biopsy samples containing tumour and non-neoplastic mucosa for each patient were generated and analyzed by DESI in a blinded fashion. Sections were then hematoxylin and eosin (H and E) stained, annotated by two independent pathologists, and analyzed. Using PCA/LDA-based models, DESI profiles of the cross sections and biopsies achieved 97% and 75% accuracies in identifying the presence of adenocarcinoma, using leave-one-patient-out cross validation. Among the m/z ratios exhibiting the greatest differential abundance in adenocarcinoma were a series of eight long-chain or very-long-chain fatty acids, consistent with molecular and targeted metabolomics indicators of de novo lipogenesis in CRC tissue. Sample stratification based on the presence of lympovascular invasion (LVI), a poor CRC prognostic indicator, revealed the abundance of oxidized phospholipids, suggestive …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Fooladgar, Fahimeh; Jamzad, Amoon; Connolly, Laura; Santilli, Alice; Kaufmann, Martin; Ren, Kevin; Abolmaesumi, Purang; Rudan, John; McKay, Doug; Fichtinger, Gabor; Mousavi, Parvin
Uncertainty estimation for margin detection in cancer surgery using mass spectrometry Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, 2022.
@article{Fooladgar2022,
title = {Uncertainty estimation for margin detection in cancer surgery using mass spectrometry},
author = {Fahimeh Fooladgar and Amoon Jamzad and Laura Connolly and Alice Santilli and Martin Kaufmann and Kevin Ren and Purang Abolmaesumi and John Rudan and Doug McKay and Gabor Fichtinger and Parvin Mousavi},
doi = {https://doi.org/10.1007/s11548-022-02764-3},
year = {2022},
date = {2022-09-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Kitner, Nicole; Rodgers, Jessica R.; Ungi, Tamas; Olding, Timothy; Joshi, C. P.; Mousavi, Parvin; Fichtinger, Gabor; Korzeniowski, Martin
Automated catheter localization in ultrasound images from High-dose-rate prostate brachytherapy using deep learning and feature extraction Conference
Canadian Association for Radiation Oncologists (CARO) Annual Scientific Meeting, Canadian Association for Radiation Oncologists, 2022.
@conference{Kitner2022ab,
title = {Automated catheter localization in ultrasound images from High-dose-rate prostate brachytherapy using deep learning and feature extraction},
author = {Nicole Kitner and Jessica R. Rodgers and Tamas Ungi and Timothy Olding and C. P. Joshi and Parvin Mousavi and Gabor Fichtinger and Martin Korzeniowski},
year = {2022},
date = {2022-09-01},
urldate = {2022-09-01},
booktitle = {Canadian Association for Radiation Oncologists (CARO) Annual Scientific Meeting},
publisher = {Canadian Association for Radiation Oncologists},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Connolly, Laura; Degeut, Anton; Leonard, Simon; Tokuda, Junichi; Ungi, Tamas; Krieger, Axel; Kazanzides, Peter; Mousavi, Parvin; Fichtinger, Gabor; Taylor, Russell H.
Bridging 3D Slicer and ROS2 for Image-Guided Robotic Interventions Journal Article
In: Sensors, vol. 22, 2022.
@article{Connolly2022c,
title = {Bridging 3D Slicer and ROS2 for Image-Guided Robotic Interventions},
author = {Laura Connolly and Anton Degeut and Simon Leonard and Junichi Tokuda and Tamas Ungi and Axel Krieger and Peter Kazanzides and Parvin Mousavi and Gabor Fichtinger and Russell H. Taylor},
doi = {https://doi.org/10.3390/s22145336},
year = {2022},
date = {2022-07-01},
journal = {Sensors},
volume = {22},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Kitner, Nicole; Rodgers, Jessica R.; Ungi, Tamas; Korzeniowski, Martin; Olding, Timothy; Joshi, C. P.; Mousavi, Parvin; Fichtinger, Gabor
Automated Automatic catheter modelling in 3D transrectal ultrasound images from high-dose-rate prostate brachytherapy using a deep learning and feature extraction pipeline Conference
Canadian Organization of Medical Physicists (COMP) Annual Scientific Meeting, Canadian Organization of Medical Physicists, 2022.
@conference{Kitner2022a,
title = {Automated Automatic catheter modelling in 3D transrectal ultrasound images from high-dose-rate prostate brachytherapy using a deep learning and feature extraction pipeline},
author = {Nicole Kitner and Jessica R. Rodgers and Tamas Ungi and Martin Korzeniowski and Timothy Olding and C. P. Joshi and Parvin Mousavi and Gabor Fichtinger},
year = {2022},
date = {2022-06-01},
urldate = {2022-06-01},
booktitle = {Canadian Organization of Medical Physicists (COMP) Annual Scientific Meeting},
publisher = {Canadian Organization of Medical Physicists},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Hu, Zoe; Fauerbach, Paola V. Nasute; Yeung, Chris; Ungi, Tamas; Rudan, John; Engel, C. Jay; Mousavi, Parvin; Fichtinger, Gabor; Jabs, Doris
Real-time automatic tumor segmentation for ultrasound-guided breast-conserving surgery navigation Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, vol. 17, no. 9, pp. 1663–1672, 2022.
@article{Hu2022,
title = {Real-time automatic tumor segmentation for ultrasound-guided breast-conserving surgery navigation},
author = {Zoe Hu and Paola V. Nasute Fauerbach and Chris Yeung and Tamas Ungi and John Rudan and C. Jay Engel and Parvin Mousavi and Gabor Fichtinger and Doris Jabs},
doi = {10.1007/s11548-022-02658-4},
year = {2022},
date = {2022-05-01},
urldate = {2022-05-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
volume = {17},
number = {9},
pages = {1663–1672},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Connolly, Laura; Jamzad, Amoon; Nikniazi, Arash; Poushimin, Rana; Nunzi, Jean Michel; Rudan, John; Fichtinger, Gabor; Mousavi, Parvin
Feasibility of combined optical and acoustic imaging for surgical cavity scanning Conference
SPIE Medical Imaging 2022: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 12034, San Diego (online), 2022.
@conference{Connolly2022,
title = {Feasibility of combined optical and acoustic imaging for surgical cavity scanning},
author = {Laura Connolly and Amoon Jamzad and Arash Nikniazi and Rana Poushimin and Jean Michel Nunzi and John Rudan and Gabor Fichtinger and Parvin Mousavi},
doi = {https://doi.org/10.1117/12.2611964},
year = {2022},
date = {2022-04-01},
booktitle = {SPIE Medical Imaging 2022: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {12034},
address = {San Diego (online)},
abstract = {PURPOSE: Over 30% of breast conserving surgery patients must undergo repeat surgery to address incomplete tumor resection. We hypothesize that the addition of a robotic cavity scanning system can improve the success rates of these procedures by performing additional, intraoperative imaging to detect left-over cancer cells. In this study, we assess the feasibility of a combined optical and acoustic imaging approach for this cavity scanning system. METHODS: Dual-layer tissue phantoms are imaged with both throughput broadband spectroscopy and an endocavity ultrasound probe. The absorbance and transmittance of the incident light from the broadband source is used to characterize each tissue sample optically. Additionally, a temporally enhanced ultrasound approach is used to distinguish the heterogeneity of the tissue sample by classifying individual pixels in the ultrasound image with a support vector machine. The goal of this combined approach is to use optical characterization to classify the tissue surface, and acoustic characterization to classify the sample heterogeneity. RESULTS: Both optical and acoustic characterization demonstrated promising preliminary results. The class of each tissue sample is distinctly separable based on the transmittance and absorption of the broadband light. Additionally, an SVM trained on the temporally enhance ultrasound signals for each tissue type, showed 82% linear separability of labelled temporally enhanced ultrasound sequences in our test set. CONCLUSIONS: By combining broadband and ultrasound imaging, we demonstrate a potential non-destructive imaging approach for this robotic cavity scanning system. With this approach, our system can detect both surface level tissue characteristics and depth information. Applying this to breast conserving surgery can help inform the surgeon about the tissue composition of the resection cavity after initial tumor resection.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Klosa, Elizabeth; Hisey, R.; Nazari, Tahmina; Wiggers, Theo; Zevin, Boris; Ungi, Tamas; Fichtinger, Gabor
Tissue segmentation for workflow recognition in open inguinal hernia repair training Conference
SPIE Medical Imaging, SPIE Medical Imaging SPIE Medical Imaging, San Diego, 2022.
@conference{Klosa2022a,
title = {Tissue segmentation for workflow recognition in open inguinal hernia repair training},
author = {Elizabeth Klosa and R. Hisey and Tahmina Nazari and Theo Wiggers and Boris Zevin and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Klosa2022a.pdf},
year = {2022},
date = {2022-02-01},
urldate = {2022-02-01},
booktitle = {SPIE Medical Imaging},
publisher = {SPIE Medical Imaging},
address = {San Diego},
organization = {SPIE Medical Imaging},
abstract = {PURPOSE: As medical education adopts a competency-based training method, experts are spending substantial amounts of time instructing and assessing trainees’ competence. In this study, we look to develop a computer-assisted training platform that can provide instruction and assessment of open inguinal hernia repairs without needing an expert observer. We recognize workflow tasks based on the tool-tissue interactions, suggesting that we first need a method to identify tissues. This study aims to train a neural network in identifying tissues in a low-cost phantom as we work towards identifying the tool-tissue interactions needed for task recognition. METHODS: Eight simulated tissues were segmented throughout five videos from experienced surgeons who performed open inguinal hernia repairs on phantoms. A U-Net was trained using leave-one-user-out cross validation. The average F-score, false positive rate and false negative rate were calculated for each tissue to evaluate the U-Net’s performance. RESULTS: Higher F-scores and lower false negative and positive rates were recorded for the skin, hernia sac, spermatic cord, and nerves, while slightly lower metrics were recorded for the subcutaneous tissue, Scarpa’s fascia, external oblique aponeurosis and superficial epigastric vessels. CONCLUSION: The U-Net performed better in recognizing tissues that were relatively larger in size and more prevalent, while struggling to recognize smaller tissues only briefly visible. Since workflow recognition does not require perfect segmentation, we believe our U-Net is sufficient in recognizing the tissues of an inguinal hernia repair phantom. Future studies will explore combining our segmentation U-Net with tool detection as we work towards workflow recognition.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Klosa, Elizabeth; Hisey, R.; Nazari, Tahmina; Wiggers, Theo; Zevin, Boris; Ungi, Tamas; Fichtinger, Gabor
Identifying tissues for task recognition in training of open inguinal hernia repairs Conference
Imaging Network of Ontario Symposium, 2022.
@conference{Klosa2022b,
title = {Identifying tissues for task recognition in training of open inguinal hernia repairs},
author = {Elizabeth Klosa and R. Hisey and Tahmina Nazari and Theo Wiggers and Boris Zevin and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/03/Klosa2022b.pdf},
year = {2022},
date = {2022-02-01},
urldate = {2022-02-01},
booktitle = {Imaging Network of Ontario Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
O’Driscoll, Olivia; Hisey, R.; Holden, M.; Camire, Daenis; Erb, Jason; Howes, Daniel; Ungi, Tamas; Fichtinger, Gabor
Feasibility of using object detection for performance assessment in central venous catherization Conference
Imaging Network of Ontario Symposium, 2022.
@conference{ODriscoll2022b,
title = {Feasibility of using object detection for performance assessment in central venous catherization},
author = {Olivia O’Driscoll and R. Hisey and M. Holden and Daenis Camire and Jason Erb and Daniel Howes and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/ODriscoll2021b.pdf},
year = {2022},
date = {2022-02-01},
urldate = {2022-02-01},
booktitle = {Imaging Network of Ontario Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
O’Driscoll, Olivia; Hisey, R.; Holden, M.; Camire, Daenis; Erb, Jason; Howes, Daniel; Ungi, Tamas; Fichtinger, Gabor
Feasibility of object detection for skill assessment in central venous catheterization Conference
SPIE Medical Imaging, SPIE Medical Imaging SPIE Medical Imaging, San Diego, 2022.
@conference{ODriscoll2022a,
title = {Feasibility of object detection for skill assessment in central venous catheterization},
author = {Olivia O’Driscoll and R. Hisey and M. Holden and Daenis Camire and Jason Erb and Daniel Howes and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/ODriscoll2022a.pdf},
year = {2022},
date = {2022-02-01},
urldate = {2022-02-01},
booktitle = {SPIE Medical Imaging},
publisher = {SPIE Medical Imaging},
address = {San Diego},
organization = {SPIE Medical Imaging},
abstract = {<p><strong>Purpose: </strong>Computer-assisted surgical skill assessment methods have traditionally relied on tracking tool motion with physical sensors. These tracking systems can be expensive, bulky, and impede tool function. Recent advances in object detection networks have made it possible to quantify tool motion using only a camera. These advances open the door for a low-cost alternative to current physical tracking systems for surgical skill assessment. This study determines the feasibility of using metrics computed with object detection by comparing them to widely accepted metrics computed using traditional tracking methods in central venous catheterization. <strong>Methods:</strong> Both video and tracking data were recorded from participants performing central venous catheterization on a venous access phantom. A Faster Region-Based Convolutional Neural Network was trained to recognize the ultrasound probe and syringe on the video data. Tracking-based metrics were computed using the Perk Tutor extension of 3D Slicer. The path length and usage time for each tool were then computed using both the video and tracking data. The metrics from object detection and tracking were compared using Spearman rank correlation. <strong>Results: </strong>The path lengths had a rank correlation coefficient of 0.22 for the syringe (p<0.03) and 0.35 (p<0.001) for the ultrasound probe. For the usage times, the correlation coefficient was 0.37 (p<0.001) for the syringe and 0.34 (p<0.001) for the ultrasound probe. <strong>Conclusions</strong>: The video-based metrics correlated significantly with the tracked metrics, suggesting that object detection could be a feasible skill assessment method for central venous catheterization.</p>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Kitner, Nicole; Rodgers, Jessica R.; Ungi, Tamas; Korzeniowski, Martin; Olding, Timothy; Joshi, C. P.; Mousavi, Parvin; Fichtinger, Gabor
Automated Catheter Segmentation in 3D Ultrasound Images from High-Dose-Rate Prostate Brachytherapy Conference
Imaging Network Ontario (IMNO) 2022 Symposium, Imaging Network of Ontario, Online, 2022.
@conference{Kitner2022ac,
title = {Automated Catheter Segmentation in 3D Ultrasound Images from High-Dose-Rate Prostate Brachytherapy},
author = {Nicole Kitner and Jessica R. Rodgers and Tamas Ungi and Martin Korzeniowski and Timothy Olding and C. P. Joshi and Parvin Mousavi and Gabor Fichtinger},
year = {2022},
date = {2022-02-01},
urldate = {2022-02-01},
booktitle = {Imaging Network Ontario (IMNO) 2022 Symposium},
publisher = {Imaging Network of Ontario},
address = {Online},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Ehrlich, Josh; Jamzad, Amoon; Asselin, Mark; Rodgers, Jessica Robin; Kaufmann, Martin; Haidegger, Tamas; Rudan, John; Mousavi, Parvin; Fichtinger, Gabor; Ungi, Tamas
Sensor-Based Automated Detection of Electrosurgical Cautery States Journal Article
In: Sensors, vol. 22, iss. 15, pp. 5808, 2022.
@article{fichtinger2022i,
title = {Sensor-Based Automated Detection of Electrosurgical Cautery States},
author = {Josh Ehrlich and Amoon Jamzad and Mark Asselin and Jessica Robin Rodgers and Martin Kaufmann and Tamas Haidegger and John Rudan and Parvin Mousavi and Gabor Fichtinger and Tamas Ungi},
url = {https://www.mdpi.com/1424-8220/22/15/5808},
year = {2022},
date = {2022-01-01},
journal = {Sensors},
volume = {22},
issue = {15},
pages = {5808},
publisher = {MDPI},
abstract = {In computer-assisted surgery, it is typically required to detect when the tool comes into contact with the patient. In activated electrosurgery, this is known as the energy event. By continuously tracking the electrosurgical tools’ location using a navigation system, energy events can help determine locations of sensor-classified tissues. Our objective was to detect the energy event and determine the settings of electrosurgical cautery—robustly and automatically based on sensor data. This study aims to demonstrate the feasibility of using the cautery state to detect surgical incisions, without disrupting the surgical workflow. We detected current changes in the wires of the cautery device and grounding pad using non-invasive current sensors and an oscilloscope. An open-source software was implemented to apply machine learning on sensor data to detect energy events and cautery settings. Our methods classified each cautery state at an average accuracy of 95.56% across different tissue types and energy level parameters altered by surgeons during an operation. Our results demonstrate the feasibility of automatically identifying energy events during surgical incisions, which could be an important safety feature in robotic and computer-integrated surgery. This study provides a key step towards locating tissue classifications during breast cancer operations and reducing the rate of positive margins.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}