Kim, Andrew S.; Yeung, Chris; Szabo, Robert; Sunderland, Kyle; Hisey, Rebecca; Morton, David; Kikinis, Ron; Diao, Babacar; Mousavi, Parvin; Ungi, Tamas; Fichtinger, Gabor
SPIE, 2024.
@proceedings{Kim2024,
title = {Percutaneous nephrostomy needle guidance using real-time 3D anatomical visualization with live ultrasound segmentation},
author = {Andrew S. Kim and Chris Yeung and Robert Szabo and Kyle Sunderland and Rebecca Hisey and David Morton and Ron Kikinis and Babacar Diao and Parvin Mousavi and Tamas Ungi and Gabor Fichtinger},
editor = {Maryam E. Rettmann and Jeffrey H. Siewerdsen},
doi = {10.1117/12.3006533},
year = {2024},
date = {2024-03-29},
urldate = {2024-03-29},
publisher = {SPIE},
abstract = {
PURPOSE: Percutaneous nephrostomy is a commonly performed procedure to drain urine to provide relief in patients with hydronephrosis. Conventional percutaneous nephrostomy needle guidance methods can be difficult, expensive, or not portable. We propose an open-source real-time 3D anatomical visualization aid for needle guidance with live ultrasound segmentation and 3D volume reconstruction using free, open-source software. METHODS: Basic hydronephrotic kidney phantoms were created, and recordings of these models were manually segmented and used to train a deep learning model that makes live segmentation predictions to perform live 3D volume reconstruction of the fluid-filled cavity. Participants performed 5 needle insertions with the visualization aid and 5 insertions with ultrasound needle guidance on a kidney phantom in randomized order, and these were recorded. Recordings of the trials were analyzed for needle tip distance to the center of the target calyx, needle insertion time, and success rate. Participants also completed a survey on their experience. RESULTS: Using the visualization aid showed significantly higher accuracy, while needle insertion time and success rate were not statistically significant at our sample size. Participants mostly responded positively to the visualization aid, and 80% found it easier to use than ultrasound needle guidance. CONCLUSION: We found that our visualization aid produced increased accuracy and an overall positive experience. We demonstrated that our system is functional and stable and believe that the workflow with this system can be applied to other procedures. This visualization aid system is effective on phantoms and is ready for translation with clinical data.},
keywords = {},
pubstate = {published},
tppubtype = {proceedings}
}
PURPOSE: Percutaneous nephrostomy is a commonly performed procedure to drain urine to provide relief in patients with hydronephrosis. Conventional percutaneous nephrostomy needle guidance methods can be difficult, expensive, or not portable. We propose an open-source real-time 3D anatomical visualization aid for needle guidance with live ultrasound segmentation and 3D volume reconstruction using free, open-source software. METHODS: Basic hydronephrotic kidney phantoms were created, and recordings of these models were manually segmented and used to train a deep learning model that makes live segmentation predictions to perform live 3D volume reconstruction of the fluid-filled cavity. Participants performed 5 needle insertions with the visualization aid and 5 insertions with ultrasound needle guidance on a kidney phantom in randomized order, and these were recorded. Recordings of the trials were analyzed for needle tip distance to the center of the target calyx, needle insertion time, and success rate. Participants also completed a survey on their experience. RESULTS: Using the visualization aid showed significantly higher accuracy, while needle insertion time and success rate were not statistically significant at our sample size. Participants mostly responded positively to the visualization aid, and 80% found it easier to use than ultrasound needle guidance. CONCLUSION: We found that our visualization aid produced increased accuracy and an overall positive experience. We demonstrated that our system is functional and stable and believe that the workflow with this system can be applied to other procedures. This visualization aid system is effective on phantoms and is ready for translation with clinical data.
Connolly, Laura; Fooladgar, Fahimeh; Jamzad, Amoon; Kaufmann, Martin; Syeda, Ayesha; Ren, Kevin; Abolmaesumi, Purang; Rudan, John F; McKay, Doug; Fichtinger, Gabor; others,
ImSpect: Image-driven self-supervised learning for surgical margin evaluation with mass spectrometry Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, pp. 1–8, 2024.
@article{connolly2024imspect,
title = {ImSpect: Image-driven self-supervised learning for surgical margin evaluation with mass spectrometry},
author = {Laura Connolly and Fahimeh Fooladgar and Amoon Jamzad and Martin Kaufmann and Ayesha Syeda and Kevin Ren and Purang Abolmaesumi and John F Rudan and Doug McKay and Gabor Fichtinger and others},
doi = {https://doi.org/10.1007/s11548-024-03106-1},
year = {2024},
date = {2024-01-01},
urldate = {2024-01-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
pages = {1–8},
publisher = {Springer},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Radcliffe, Olivia; Connolly, Laura; Ungi, Tamas; Yeo, Caitlin; Rudan, John F.; Fichtinger, Gabor; Mousavi, Parvin
Navigated surgical resection cavity inspection for breast conserving surgery Proceedings
2023.
@proceedings{nokey,
title = {Navigated surgical resection cavity inspection for breast conserving surgery},
author = {Olivia Radcliffe and Laura Connolly and Tamas Ungi and Caitlin Yeo and John F. Rudan and Gabor Fichtinger and Parvin Mousavi},
doi = {https://doi.org/10.1117/12.2654015},
year = {2023},
date = {2023-04-03},
abstract = {Up to 40% of Breast Conserving Surgery (BCS) patients must undergo repeat surgery because cancer is left behind in the resection cavity. The mobility of the breast resection cavity makes it difficult to localize residual cancer and, therefore, cavity shaving is a common technique for cancer removal. Cavity shaving involves removing an additional layer of tissue from the entire resection cavity, often resulting in unnecessary healthy tissue loss. In this study, we demonstrated a navigation system and open-source software module that facilitates visualization of the breast resection cavity for targeted localization of residual cancer.},
keywords = {},
pubstate = {published},
tppubtype = {proceedings}
}
Cernelev, Pavel-Dumitru; Moga, Kristof; Groves, Leah; Haidegger, Tamás; Fichtinger, Gabor; Ungi, Tamas
Determining boundaries of accurate tracking for electromagnetic sensors Conference
SPIE, 2023.
@conference{Cernelev2023,
title = {Determining boundaries of accurate tracking for electromagnetic sensors},
author = {Pavel-Dumitru Cernelev and Kristof Moga and Leah Groves and Tamás Haidegger and Gabor Fichtinger and Tamas Ungi},
editor = {Cristian A. Linte and Jeffrey H. Siewerdsen},
doi = {10.1117/12.2654428},
year = {2023},
date = {2023-04-03},
urldate = {2023-04-03},
publisher = {SPIE},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Fooladgar, Fahimeh; Jamzad, Amoon; Connolly, Laura; Santilli, Alice; Kaufmann, Martin; Ren, Kevin; Abolmaesumi, Purang; Rudan, John; McKay, Doug; Fichtinger, Gabor; Mousavi, Parvin
Uncertainty estimation for margin detection in cancer surgery using mass spectrometry Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, 2022.
@article{Fooladgar2022,
title = {Uncertainty estimation for margin detection in cancer surgery using mass spectrometry},
author = {Fahimeh Fooladgar and Amoon Jamzad and Laura Connolly and Alice Santilli and Martin Kaufmann and Kevin Ren and Purang Abolmaesumi and John Rudan and Doug McKay and Gabor Fichtinger and Parvin Mousavi},
doi = {https://doi.org/10.1007/s11548-022-02764-3},
year = {2022},
date = {2022-09-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Kitner, Nicole; Rodgers, Jessica R.; Ungi, Tamas; Olding, Timothy; Joshi, C. P.; Mousavi, Parvin; Fichtinger, Gabor; Korzeniowski, Martin
Automated catheter localization in ultrasound images from High-dose-rate prostate brachytherapy using deep learning and feature extraction Conference
Canadian Association for Radiation Oncologists (CARO) Annual Scientific Meeting, Canadian Association for Radiation Oncologists, 2022.
@conference{Kitner2022ab,
title = {Automated catheter localization in ultrasound images from High-dose-rate prostate brachytherapy using deep learning and feature extraction},
author = {Nicole Kitner and Jessica R. Rodgers and Tamas Ungi and Timothy Olding and C. P. Joshi and Parvin Mousavi and Gabor Fichtinger and Martin Korzeniowski},
year = {2022},
date = {2022-09-01},
urldate = {2022-09-01},
booktitle = {Canadian Association for Radiation Oncologists (CARO) Annual Scientific Meeting},
publisher = {Canadian Association for Radiation Oncologists},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Connolly, Laura; Degeut, Anton; Leonard, Simon; Tokuda, Junichi; Ungi, Tamas; Krieger, Axel; Kazanzides, Peter; Mousavi, Parvin; Fichtinger, Gabor; Taylor, Russell H.
Bridging 3D Slicer and ROS2 for Image-Guided Robotic Interventions Journal Article
In: Sensors, vol. 22, 2022.
@article{Connolly2022c,
title = {Bridging 3D Slicer and ROS2 for Image-Guided Robotic Interventions},
author = {Laura Connolly and Anton Degeut and Simon Leonard and Junichi Tokuda and Tamas Ungi and Axel Krieger and Peter Kazanzides and Parvin Mousavi and Gabor Fichtinger and Russell H. Taylor},
doi = {https://doi.org/10.3390/s22145336},
year = {2022},
date = {2022-07-01},
journal = {Sensors},
volume = {22},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Kitner, Nicole; Rodgers, Jessica R.; Ungi, Tamas; Korzeniowski, Martin; Olding, Timothy; Joshi, C. P.; Mousavi, Parvin; Fichtinger, Gabor
Automated Automatic catheter modelling in 3D transrectal ultrasound images from high-dose-rate prostate brachytherapy using a deep learning and feature extraction pipeline Conference
Canadian Organization of Medical Physicists (COMP) Annual Scientific Meeting, Canadian Organization of Medical Physicists, 2022.
@conference{Kitner2022a,
title = {Automated Automatic catheter modelling in 3D transrectal ultrasound images from high-dose-rate prostate brachytherapy using a deep learning and feature extraction pipeline},
author = {Nicole Kitner and Jessica R. Rodgers and Tamas Ungi and Martin Korzeniowski and Timothy Olding and C. P. Joshi and Parvin Mousavi and Gabor Fichtinger},
year = {2022},
date = {2022-06-01},
urldate = {2022-06-01},
booktitle = {Canadian Organization of Medical Physicists (COMP) Annual Scientific Meeting},
publisher = {Canadian Organization of Medical Physicists},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Hu, Zoe; Fauerbach, Paola V. Nasute; Yeung, Chris; Ungi, Tamas; Rudan, John; Engel, C. Jay; Mousavi, Parvin; Fichtinger, Gabor; Jabs, Doris
Real-time automatic tumor segmentation for ultrasound-guided breast-conserving surgery navigation Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, vol. 17, no. 9, pp. 1663–1672, 2022.
@article{Hu2022,
title = {Real-time automatic tumor segmentation for ultrasound-guided breast-conserving surgery navigation},
author = {Zoe Hu and Paola V. Nasute Fauerbach and Chris Yeung and Tamas Ungi and John Rudan and C. Jay Engel and Parvin Mousavi and Gabor Fichtinger and Doris Jabs},
doi = {10.1007/s11548-022-02658-4},
year = {2022},
date = {2022-05-01},
urldate = {2022-05-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
volume = {17},
number = {9},
pages = {1663–1672},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Connolly, Laura; Jamzad, Amoon; Nikniazi, Arash; Poushimin, Rana; Nunzi, Jean Michel; Rudan, John; Fichtinger, Gabor; Mousavi, Parvin
Feasibility of combined optical and acoustic imaging for surgical cavity scanning Conference
SPIE Medical Imaging 2022: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 12034, San Diego (online), 2022.
@conference{Connolly2022,
title = {Feasibility of combined optical and acoustic imaging for surgical cavity scanning},
author = {Laura Connolly and Amoon Jamzad and Arash Nikniazi and Rana Poushimin and Jean Michel Nunzi and John Rudan and Gabor Fichtinger and Parvin Mousavi},
doi = {https://doi.org/10.1117/12.2611964},
year = {2022},
date = {2022-04-01},
booktitle = {SPIE Medical Imaging 2022: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {12034},
address = {San Diego (online)},
abstract = {PURPOSE: Over 30% of breast conserving surgery patients must undergo repeat surgery to address incomplete tumor resection. We hypothesize that the addition of a robotic cavity scanning system can improve the success rates of these procedures by performing additional, intraoperative imaging to detect left-over cancer cells. In this study, we assess the feasibility of a combined optical and acoustic imaging approach for this cavity scanning system. METHODS: Dual-layer tissue phantoms are imaged with both throughput broadband spectroscopy and an endocavity ultrasound probe. The absorbance and transmittance of the incident light from the broadband source is used to characterize each tissue sample optically. Additionally, a temporally enhanced ultrasound approach is used to distinguish the heterogeneity of the tissue sample by classifying individual pixels in the ultrasound image with a support vector machine. The goal of this combined approach is to use optical characterization to classify the tissue surface, and acoustic characterization to classify the sample heterogeneity. RESULTS: Both optical and acoustic characterization demonstrated promising preliminary results. The class of each tissue sample is distinctly separable based on the transmittance and absorption of the broadband light. Additionally, an SVM trained on the temporally enhance ultrasound signals for each tissue type, showed 82% linear separability of labelled temporally enhanced ultrasound sequences in our test set. CONCLUSIONS: By combining broadband and ultrasound imaging, we demonstrate a potential non-destructive imaging approach for this robotic cavity scanning system. With this approach, our system can detect both surface level tissue characteristics and depth information. Applying this to breast conserving surgery can help inform the surgeon about the tissue composition of the resection cavity after initial tumor resection.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Klosa, Elizabeth; Hisey, R.; Nazari, Tahmina; Wiggers, Theo; Zevin, Boris; Ungi, Tamas; Fichtinger, Gabor
Tissue segmentation for workflow recognition in open inguinal hernia repair training Conference
SPIE Medical Imaging, SPIE Medical Imaging SPIE Medical Imaging, San Diego, 2022.
@conference{Klosa2022a,
title = {Tissue segmentation for workflow recognition in open inguinal hernia repair training},
author = {Elizabeth Klosa and R. Hisey and Tahmina Nazari and Theo Wiggers and Boris Zevin and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Klosa2022a.pdf},
year = {2022},
date = {2022-02-01},
urldate = {2022-02-01},
booktitle = {SPIE Medical Imaging},
publisher = {SPIE Medical Imaging},
address = {San Diego},
organization = {SPIE Medical Imaging},
abstract = {PURPOSE: As medical education adopts a competency-based training method, experts are spending substantial amounts of time instructing and assessing trainees’ competence. In this study, we look to develop a computer-assisted training platform that can provide instruction and assessment of open inguinal hernia repairs without needing an expert observer. We recognize workflow tasks based on the tool-tissue interactions, suggesting that we first need a method to identify tissues. This study aims to train a neural network in identifying tissues in a low-cost phantom as we work towards identifying the tool-tissue interactions needed for task recognition. METHODS: Eight simulated tissues were segmented throughout five videos from experienced surgeons who performed open inguinal hernia repairs on phantoms. A U-Net was trained using leave-one-user-out cross validation. The average F-score, false positive rate and false negative rate were calculated for each tissue to evaluate the U-Net’s performance. RESULTS: Higher F-scores and lower false negative and positive rates were recorded for the skin, hernia sac, spermatic cord, and nerves, while slightly lower metrics were recorded for the subcutaneous tissue, Scarpa’s fascia, external oblique aponeurosis and superficial epigastric vessels. CONCLUSION: The U-Net performed better in recognizing tissues that were relatively larger in size and more prevalent, while struggling to recognize smaller tissues only briefly visible. Since workflow recognition does not require perfect segmentation, we believe our U-Net is sufficient in recognizing the tissues of an inguinal hernia repair phantom. Future studies will explore combining our segmentation U-Net with tool detection as we work towards workflow recognition.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Klosa, Elizabeth; Hisey, R.; Nazari, Tahmina; Wiggers, Theo; Zevin, Boris; Ungi, Tamas; Fichtinger, Gabor
Identifying tissues for task recognition in training of open inguinal hernia repairs Conference
Imaging Network of Ontario Symposium, 2022.
@conference{Klosa2022b,
title = {Identifying tissues for task recognition in training of open inguinal hernia repairs},
author = {Elizabeth Klosa and R. Hisey and Tahmina Nazari and Theo Wiggers and Boris Zevin and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/03/Klosa2022b.pdf},
year = {2022},
date = {2022-02-01},
urldate = {2022-02-01},
booktitle = {Imaging Network of Ontario Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
O’Driscoll, Olivia; Hisey, R.; Holden, M.; Camire, Daenis; Erb, Jason; Howes, Daniel; Ungi, Tamas; Fichtinger, Gabor
Feasibility of using object detection for performance assessment in central venous catherization Conference
Imaging Network of Ontario Symposium, 2022.
@conference{ODriscoll2022b,
title = {Feasibility of using object detection for performance assessment in central venous catherization},
author = {Olivia O’Driscoll and R. Hisey and M. Holden and Daenis Camire and Jason Erb and Daniel Howes and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/ODriscoll2021b.pdf},
year = {2022},
date = {2022-02-01},
urldate = {2022-02-01},
booktitle = {Imaging Network of Ontario Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
O’Driscoll, Olivia; Hisey, R.; Holden, M.; Camire, Daenis; Erb, Jason; Howes, Daniel; Ungi, Tamas; Fichtinger, Gabor
Feasibility of object detection for skill assessment in central venous catheterization Conference
SPIE Medical Imaging, SPIE Medical Imaging SPIE Medical Imaging, San Diego, 2022.
@conference{ODriscoll2022a,
title = {Feasibility of object detection for skill assessment in central venous catheterization},
author = {Olivia O’Driscoll and R. Hisey and M. Holden and Daenis Camire and Jason Erb and Daniel Howes and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/ODriscoll2022a.pdf},
year = {2022},
date = {2022-02-01},
urldate = {2022-02-01},
booktitle = {SPIE Medical Imaging},
publisher = {SPIE Medical Imaging},
address = {San Diego},
organization = {SPIE Medical Imaging},
abstract = {<p><strong>Purpose: </strong>Computer-assisted surgical skill assessment methods have traditionally relied on tracking tool motion with physical sensors. These tracking systems can be expensive, bulky, and impede tool function. Recent advances in object detection networks have made it possible to quantify tool motion using only a camera. These advances open the door for a low-cost alternative to current physical tracking systems for surgical skill assessment. This study determines the feasibility of using metrics computed with object detection by comparing them to widely accepted metrics computed using traditional tracking methods in central venous catheterization. <strong>Methods:</strong> Both video and tracking data were recorded from participants performing central venous catheterization on a venous access phantom. A Faster Region-Based Convolutional Neural Network was trained to recognize the ultrasound probe and syringe on the video data. Tracking-based metrics were computed using the Perk Tutor extension of 3D Slicer. The path length and usage time for each tool were then computed using both the video and tracking data. The metrics from object detection and tracking were compared using Spearman rank correlation. <strong>Results: </strong>The path lengths had a rank correlation coefficient of 0.22 for the syringe (p<0.03) and 0.35 (p<0.001) for the ultrasound probe. For the usage times, the correlation coefficient was 0.37 (p<0.001) for the syringe and 0.34 (p<0.001) for the ultrasound probe. <strong>Conclusions</strong>: The video-based metrics correlated significantly with the tracked metrics, suggesting that object detection could be a feasible skill assessment method for central venous catheterization.</p>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Kitner, Nicole; Rodgers, Jessica R.; Ungi, Tamas; Korzeniowski, Martin; Olding, Timothy; Joshi, C. P.; Mousavi, Parvin; Fichtinger, Gabor
Automated Catheter Segmentation in 3D Ultrasound Images from High-Dose-Rate Prostate Brachytherapy Conference
Imaging Network Ontario (IMNO) 2022 Symposium, Imaging Network of Ontario, Online, 2022.
@conference{Kitner2022ac,
title = {Automated Catheter Segmentation in 3D Ultrasound Images from High-Dose-Rate Prostate Brachytherapy},
author = {Nicole Kitner and Jessica R. Rodgers and Tamas Ungi and Martin Korzeniowski and Timothy Olding and C. P. Joshi and Parvin Mousavi and Gabor Fichtinger},
year = {2022},
date = {2022-02-01},
urldate = {2022-02-01},
booktitle = {Imaging Network Ontario (IMNO) 2022 Symposium},
publisher = {Imaging Network of Ontario},
address = {Online},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Lasso, Andras; Herz, Christian; Nam, Hannah; Cianciulli, Alana; Pieper, Steve; Drouin, Simon; Pinter, Csaba; St-Onge, Samuelle; Vigil, Chad; Ching, Stephen; Sunderland, Kyle; Fichtinger, Gabor; Kikinis, Ron; Jolley, Matthew A
SlicerHeart: An open-source computing platform for cardiac image analysis and modeling Journal Article
In: vol. 9, pp. 886549, 2022.
@article{fichtinger2022c,
title = {SlicerHeart: An open-source computing platform for cardiac image analysis and modeling},
author = {Andras Lasso and Christian Herz and Hannah Nam and Alana Cianciulli and Steve Pieper and Simon Drouin and Csaba Pinter and Samuelle St-Onge and Chad Vigil and Stephen Ching and Kyle Sunderland and Gabor Fichtinger and Ron Kikinis and Matthew A Jolley},
url = {https://www.frontiersin.org/articles/10.3389/fcvm.2022.886549/full},
year = {2022},
date = {2022-01-01},
volume = {9},
pages = {886549},
publisher = {Frontiers},
abstract = {Cardiovascular disease is a significant cause of morbidity and mortality in the developed world. 3D imaging of the heart’s structure is critical to the understanding and treatment of cardiovascular disease. However, open-source tools for image analysis of cardiac images, particularly 3D echocardiographic (3DE) data, are limited. We describe the rationale, development, implementation, and application of SlicerHeart, a cardiac-focused toolkit for image analysis built upon 3D Slicer, an open-source image computing platform. We designed and implemented multiple Python scripted modules within 3D Slicer to import, register, and view 3DE data, including new code to volume render and crop 3DE. In addition, we developed dedicated workflows for the modeling and quantitative analysis of multi-modality image-derived heart models, including heart valves. Finally, we created and integrated new functionality to facilitate the planning of cardiac interventions and surgery. We demonstrate application of SlicerHeart to a diverse range of cardiovascular modeling and simulation including volume rendering of 3DE images, mitral valve modeling, transcatheter device modeling, and planning of complex surgical intervention such as cardiac baffle creation. SlicerHeart is an evolving open-source image processing platform based on 3D Slicer initiated to support the investigation and treatment of congenital heart disease. The technology in SlicerHeart provides a robust foundation for 3D image-based investigation in cardiovascular medicine.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Fichtinger, Gabor; Troccaz, Jocelyne; Haidegger, Tamas
Image-guided interventional robotics: Lost in translation? Journal Article
In: Proceedings of the IEEE, vol. 110, iss. 7, pp. 932-950, 2022.
@article{fichtinger2022b,
title = {Image-guided interventional robotics: Lost in translation?},
author = {Gabor Fichtinger and Jocelyne Troccaz and Tamas Haidegger},
url = {https://ieeexplore.ieee.org/abstract/document/9777771/},
year = {2022},
date = {2022-01-01},
journal = {Proceedings of the IEEE},
volume = {110},
issue = {7},
pages = {932-950},
publisher = {IEEE},
abstract = {Interventional robotic systems have been deployed with all existing imaging modalities in an expansive portfolio of therapies and surgeries. Over the years, literature reviews have painted a comprehensive portrait of the translation of the underlying technology from research to practice. While many of these robots performed promisingly in preclinical settings, only a handful of them managed to evolve further, break through the commercialization boundary, and even fewer reached a wide-scale adoption. Despite the undeniable success of service robotics in general and particularly in some sophisticated medical applications, image-guided robotics’ impact remained modest compared to other surgical areas, especially laparoscopic minimally invasive surgery. This article aims to embrace the state of the art on the one hand, provide a comprehensive narrative of the situation described, support future system developers …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Maier-Hein, Lena; Eisenmann, Matthias; Sarikaya, Duygu; März, Keno; Collins, Toby; Malpani, Anand; Fallert, Johannes; Feussner, Hubertus; Giannarou, Stamatia; Mascagni, Pietro; Nakawala, Hirenkumar; Park, Adrian; Pugh, Carla; Stoyanov, Danail; Vedula, Swaroop S; Cleary, Kevin; Fichtinger, Gabor; Forestier, Germain; Gibaud, Bernard; Grantcharov, Teodor; Hashizume, Makoto; Heckmann-Nötzel, Doreen; Kenngott, Hannes G; Kikinis, Ron; Mündermann, Lars; Navab, Nassir; Onogur, Sinan; Ross, Tobias; Sznitman, Raphael; Taylor, Russell H; Tizabi, Minu D; Wagner, Martin; Hager, Gregory D; Neumuth, Thomas; Padoy, Nicolas; Collins, Justin; Gockel, Ines; Goedeke, Jan; Hashimoto, Daniel A; Joyeux, Luc; Lam, Kyle; Leff, Daniel R; Madani, Amin; Marcus, Hani J; Meireles, Ozanan; Seitel, Alexander; Teber, Dogu; Ueckert, Frank; Müller-Stich, Beat P; Jannin, Pierre; Speidel, Stefanie
Surgical data science–from concepts toward clinical translation Journal Article
In: vol. 76, pp. 102306, 2022.
@article{fichtinger2022,
title = {Surgical data science–from concepts toward clinical translation},
author = {Lena Maier-Hein and Matthias Eisenmann and Duygu Sarikaya and Keno März and Toby Collins and Anand Malpani and Johannes Fallert and Hubertus Feussner and Stamatia Giannarou and Pietro Mascagni and Hirenkumar Nakawala and Adrian Park and Carla Pugh and Danail Stoyanov and Swaroop S Vedula and Kevin Cleary and Gabor Fichtinger and Germain Forestier and Bernard Gibaud and Teodor Grantcharov and Makoto Hashizume and Doreen Heckmann-Nötzel and Hannes G Kenngott and Ron Kikinis and Lars Mündermann and Nassir Navab and Sinan Onogur and Tobias Ross and Raphael Sznitman and Russell H Taylor and Minu D Tizabi and Martin Wagner and Gregory D Hager and Thomas Neumuth and Nicolas Padoy and Justin Collins and Ines Gockel and Jan Goedeke and Daniel A Hashimoto and Luc Joyeux and Kyle Lam and Daniel R Leff and Amin Madani and Hani J Marcus and Ozanan Meireles and Alexander Seitel and Dogu Teber and Frank Ueckert and Beat P Müller-Stich and Pierre Jannin and Stefanie Speidel},
url = {https://www.sciencedirect.com/science/article/pii/S1361841521003510},
year = {2022},
date = {2022-01-01},
volume = {76},
pages = {102306},
publisher = {Elsevier},
abstract = {Recent developments in data science in general and machine learning in particular have transformed the way experts envision the future of surgery. Surgical Data Science (SDS) is a new research field that aims to improve the quality of interventional healthcare through the capture, organization, analysis and modeling of data. While an increasing number of data-driven approaches and clinical applications have been studied in the fields of radiological and clinical data science, translational success stories are still lacking in surgery. In this publication, we shed light on the underlying reasons and provide a roadmap for future advances in the field. Based on an international workshop involving leading researchers in the field of SDS, we review current practice, key achievements and initiatives as well as available standards and tools for a number of topics relevant to the field, namely (1) infrastructure for data …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Connolly, Laura; Jamzad, Amoon; Nikniazi, Arash; Poushimin, Rana; Lasso, Andras; Sunderland, Kyle R.; Ungi, Tamas; Nunzi, Jean Michel; Rudan, John; Fichtinger, Gabor; Mousavi, Parvin
An open-source testbed for developing image-guided robotic tumor-bed inspection Conference
Imaging Network of Ontario (ImNO) Symposium, 2022.
@conference{connolly2022b,
title = {An open-source testbed for developing image-guided robotic tumor-bed inspection},
author = {Laura Connolly and Amoon Jamzad and Arash Nikniazi and Rana Poushimin and Andras Lasso and Kyle R. Sunderland and Tamas Ungi and Jean Michel Nunzi and John Rudan and Gabor Fichtinger and Parvin Mousavi},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/01/Connolly2022b.pdf},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {Imaging Network of Ontario (ImNO) Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Kaufmann, Martin; Vaysse, Pierre-Maxence; Savage, Adele; Amgheib, Ala; Marton, András; Manoli, Eftychios; Fichtinger, Gabor; Pringle, Steven D.; Rudan, John; Heeren, Ron M. A.; Takáts, Zoltán; Balog, Júlia; Siegel, Tiffany Porta
Harmonization of Rapid Evaporative Ionization Mass Spectrometry Workflows across Four Sites and Testing Using Reference Material and Local Food-Grade Meats Journal Article
In: Metabolites, 2022.
@article{Kaufmann2022b,
title = {Harmonization of Rapid Evaporative Ionization Mass Spectrometry Workflows across Four Sites and Testing Using Reference Material and Local Food-Grade Meats},
author = {Martin Kaufmann and Pierre-Maxence Vaysse and Adele Savage and Ala Amgheib and András Marton and Eftychios Manoli and Gabor Fichtinger and Steven D. Pringle and John Rudan and Ron M. A. Heeren and Zoltán Takáts and Júlia Balog and Tiffany Porta Siegel},
year = {2022},
date = {2022-01-01},
journal = {Metabolites},
keywords = {},
pubstate = {published},
tppubtype = {article}
}