Image Guidance

References

  • R. T. Azuma and others, “A survey of augmented reality.”
    [Bibtex]
    @ARTICLE{Azuma1997,
      author = {Azuma, R.T. and others},
      title = {A survey of augmented reality},
      file = {Azuma1997.pdf:Azuma1997.pdf:PDF},
      issn = {1054-7460},
      keywords = {REV, AUR},
      owner = {thomaskroes},
      publisher = {Citeseer},
      timestamp = {2011.01.06}
    }
  • W. Birkfellner, J. Hummel, E. Wilson, and K. Cleary, “Tracking Devices,” in Image-Guided Interventions, T. Peters and K. Cleary, Eds., Springer US, 2008, pp. 23-44.
    [Bibtex]
    @INCOLLECTION{Birkfellner2008,
      author = {Birkfellner, Wolfgang and Hummel, Johann and Wilson, Emmanuel and
      Cleary, Kevin},
      title = {Tracking Devices},
      booktitle = {Image-Guided Interventions},
      publisher = {Springer US},
      year = {2008},
      editor = {Peters, Terry and Cleary, Kevin},
      pages = {23 - 44},
      note = {Chapter 2},
      abstract = {Tracking devices are an essential component of an image-guided surgery
      system. These devices are used to track the position of instruments
      relative to the patient anatomy. Although early tracking systems
      were essentially mechanical digitizers, the field quickly adopted
      optical tracking systems because of their high accuracy and relatively
      large workspace. However, optical tracking systems require that a
      line-of-sight be maintained between the tracking device and the instrument
      to be tracked, which is not always convenient and precludes tracking
      of flexible instruments inside the body. Therefore, electromagnetic
      tracking systems were developed that had no line-of-sight requirement
      and could track instruments such as catheters and the tips of needles
      inside the body. The choice of tracking system is highly application
      dependent and requires an understanding of the desired working volume
      and accuracy requirements. To meet these needs, a variety of tracking
      devices and techniques have been introduced as described in this
      chapter.},
      affiliation = {Medical University Vienna Austria},
      file = {Birkfellner2008.pdf:Birkfellner2008.pdf:PDF},
      isbn = {978-0-387-73858-1},
      keyword = {Engineering},
      keywords = {REV},
      owner = {Thomas},
      timestamp = {2011.02.24}
    }
  • M. Blackwell, C. Nikou, A. M. DiGioia, and T. Kanade, “An image overlay system for medical data visualization,” Medical Image Analysis, vol. 4, iss. 1, pp. 67-72, 2000.
    [Bibtex]
    @ARTICLE{Blackwell2000,
      author = {Blackwell, M. and Nikou, C. and DiGioia, A.M. and Kanade, T.},
      title = {An image overlay system for medical data visualization},
      journal = {Medical Image Analysis},
      year = {2000},
      volume = {4},
      pages = {67 - 72},
      number = {1},
      abstract = {Image Overlay is a computer display technique which superimposes computer
      images over the user’s direct view of the real world. The images
      are transformed in real-time so they appear to the user to be an
      integral part of the surrounding environment. By using Image
      
      Overlay with three-dimensional medical images such as CT reconstructions,
      a surgeon can visualize the data ‘in-vivo’, exactly positioned within
      the patient’s anatomy, and potentially enhance the surgeon’s ability
      to perform a complex procedure. This paper describes
      
      prototype Image Overlay systems and initial experimental results from
      those systems.},
      file = {Blackwell2000.pdf:Blackwell2000.pdf:PDF},
      issn = {1361-8415},
      keywords = {TEC, AUR, STV, SUR, GUI},
      owner = {thomaskroes},
      publisher = {Elsevier},
      timestamp = {2011.01.04}
    }
  • G. J. Bootsma, J. H. Siewerdsen, M. J. Daly, and D. A. Jaffray, “Initial investigation of an automatic registration algorithm for surgical navigation,” in Engineering in Medicine and Biology Society, 2008. EMBS 2008. 30th Annual International Conference of the IEEE, 2008, pp. 3638-3642.
    [Bibtex]
    @INPROCEEDINGS{Bootsma2008,
      author = {Bootsma, Gregory J. and Siewerdsen, Jeffrey H. and Daly, Michael
      J. and Jaffray, David A.},
      title = {Initial investigation of an automatic registration algorithm for
      surgical navigation},
      booktitle = {Engineering in Medicine and Biology Society, 2008. EMBS 2008. 30th
      Annual International Conference of the IEEE},
      year = {2008},
      pages = {3638 - 3642},
      month = {August},
      abstract = {The procedure required for registering a surgical navigation system
      prior to use in a surgical procedure is conventionally a time-consuming
      manual process that is prone to human errors and must be repeated
      as necessary through the course of a procedure. The conventional
      procedure becomes even more time consuming when intra-operative 3D
      imaging such as the C-arm cone-beam CT (CBCT) is introduced, as each
      updated volume set requires a new registration. To improve the speed
      and accuracy of registering image and world reference frames in image-guided
      surgery, a novel automatic registration algorithm was developed and
      investigated. The surgical navigation system consists of either Polaris
      (Northern Digital Inc., Waterloo, ON) or MicronTracker (Claron Technology
      Inc., Toronto, ON) tracking camera(s), custom software (Cogito running
      on a PC), and a prototype CBCT imaging system based on a mobile isocentric
      C-arm (Siemens, Erlangen, Germany). Experiments were conducted to
      test the accuracy of automatic registration methods for both the
      MicronTracker and Polaris tracking cameras. Results indicate the
      automated registration performs as well as the manual registration
      procedure using either the Claron or Polaris camera. The average
      root-mean-squared (rms) observed target registration error (TRE)
      for the manual procedure was 2.58 +/ #x2212; 0.42 mm and 1.76 +/
      #x2212; 0.49 mm for the Polaris and MicronTracker, respectively.
      The mean observed TRE for the automatic algorithm was 2.11 +/ #x2212;
      0.13 and 2.03 +/ #x2212; 0.3 mm for the Polaris and MicronTracker,
      respectively. Implementation and optimization of the automatic registration
      technique in Carm CBCT guidance of surgical procedures is underway.},
      file = {:Bootsma2008.pdf:PDF},
      issn = {1557-170X},
      keywords = {Algorithms;Automatic Data Processing;Equipment Design;Humans;Models,
      Statistical;Pattern Recognition, Automated;Photography;Reproducibility
      of Results;Signal Processing, Computer-Assisted;Software;Surgery,
      Computer-Assisted;Tomography Scanners, X-Ray Computed;Tomography,
      X-Ray Computed;, TEC, GUI, SUR, SLR},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • L. H. C. Cevidanes, S. Tucker, M. Styner, H. Kim, J. Chapuis, M. Reyes, W. Proffit, T. Turvey, and M. Jaskolka, “Three-dimensional surgical simulation.,” American journal of orthodontics and dentofacial orthopedics : official publication of the American Association of Orthodontists, its constituent societies, and the American Board of Orthodontics, vol. 138, iss. 3, pp. 361-71, 2010.
    [Bibtex]
    @ARTICLE{Cevidanes2010,
      author = {Cevidanes, Lucia H C and Tucker, Scott and Styner, Martin and Kim,
      Hyungmin and Chapuis, Jonas and Reyes, Mauricio and Proffit, William
      and Turvey, Timothy and Jaskolka, Michael},
      title = {Three-dimensional surgical simulation.},
      journal = {American journal of orthodontics and dentofacial orthopedics : official
      publication of the American Association of Orthodontists, its constituent
      societies, and the American Board of Orthodontics},
      year = {2010},
      volume = {138},
      pages = {361-71},
      number = {3},
      month = {September},
      abstract = {In this article, we discuss the development of methods for computer-aided
      jaw surgery, which allows us to incorporate the high level of precision
      necessary for transferring virtual plans into the operating room.
      We also present a complete computer-aided surgery system developed
      in close collaboration with surgeons. Surgery planning and simulation
      include construction of 3-dimensional surface models from cone-beam
      computed tomography, dynamic cephalometry, semiautomatic mirroring,
      interactive cutting of bone, and bony segment repositioning. A virtual
      setup can be used to manufacture positioning splints for intraoperative
      guidance. The system provides further intraoperative assistance with
      a computer display showing jaw positions and 3-dimensional positioning
      guides updated in real time during the surgical procedure. The computer-aided
      surgery system aids in dealing with complex cases with benefits for
      the patient, with surgical practice, and for orthodontic finishing.
      Advanced software tools for diagnosis and treatment planning allow
      preparation of detailed operative plans, osteotomy repositioning,
      bone reconstructions, surgical resident training, and assessing the
      difficulties of the surgical procedures before the surgery. Computer-aided
      surgery can make the elaboration of the surgical plan a more flexible
      process, increase the level of detail and accuracy of the plan, yield
      higher operative precision and control, and enhance documentation
      of cases.},
      file = {Cevidanes2010.pdf:Cevidanes2010.pdf:PDF},
      issn = {1097-6752},
      keywords = {Cephalometry,Cephalometry: methods,Computer Simulation,Cone-Beam Computed
      Tomography,Data Display,Dental Models,Finite Element Analysis,Humans,Image
      Processing, Computer-Assisted,Image Processing, Computer-Assisted:
      methods,Imaging, Three-Dimensional,Imaging, Three-Dimensional: methods,Information
      Systems,Intraoperative Care,Orthognathic Surgical Procedures,Orthognathic
      Surgical Procedures: methods,Osteotomy,Osteotomy: methods,Patient
      Care Planning,Reconstructive Surgical Procedures,Reconstructive Surgical
      Procedures: methods,Software,Surgery, Computer-Assisted,Surgery,
      Computer-Assisted: methods,User-Computer Interface, APP, CMS, OCS,
      TEC, GUI, PLA, SUR},
      owner = {thomaskroes},
      pmid = {20816308},
      publisher = {American Association of Orthodontists},
      timestamp = {2010.10.22}
    }
  • A. C. F. Colchester, J. Zhao, K. S. Holton-Tainter, C. J. Henri, N. Maitland, P. T. E. Roberts, C. G. Harris, and R. J. Evans, “Development and preliminary evaluation of VISLAN, a surgical planning and guidance system using intra-operative video imaging,” Medical Image Analysis, vol. 1, iss. 1, pp. 73-90, 1996.
    [Bibtex]
    @ARTICLE{Colchester1996,
      author = {Alan C.F. Colchester and Jason Zhao and Kerrie S. Holton-Tainter
      and Christopher J. Henri and Neil Maitland and Patricia T.E. Roberts
      and Christopher G. Harris and Richard J. Evans},
      title = {Development and preliminary evaluation of VISLAN, a surgical planning
      and guidance system using intra-operative video imaging},
      journal = {Medical Image Analysis},
      year = {1996},
      volume = {1},
      pages = {73 - 90},
      number = {1},
      abstract = {VISLAN is an integrated neurosurgical planning and guidance system.
      New segmentation and rendering techniques have been incorporated.
      A stereo video system is used intra-operatively and fulfils four
      roles. First, the video display is overlaid with graphical outlines
      showing the position of the planned craniotomy or the target (enhanced
      reality displays). Second, a skin surface patch is reconstructed
      from the stereo video images using patterned light (mean errors of
      surface point location are <0.15 mm). Third, a freely mobile, hand-held
      localizer is tracked in real time (position errors are <0.5 mm and
      with improved calibration <0.2 mm), with its position superimposed
      on the pre-operative patient representation to assist surgical guidance.
      Fourth, markers fixed to the skull bone next to the cranial opening
      are used to detect intra-operative movement and to update registration.
      Initial results from phantom experiments show an overall system accuracy
      of better than 0.9 mm for intra-operative localization of features
      defined in pre-operative images. The prototype system has been tested
      during six neurosurgical operations with very good results.},
      file = {:Colchester1996.pdf:PDF},
      issn = {1361-8415},
      keywords = {enhanced reality, APP, PLA, GUI, NES, STV, SUR},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • a Digioiaiii, B. Jaramaz, C. Nikou, R. Labarca, J. Moody, and B. Colgan, “Surgical navigation for total hip replacement with the use of hipnav,” Operative Techniques in Orthopaedics, vol. 10, iss. 1, pp. 3-8, 2000.
    [Bibtex]
    @ARTICLE{Digioiaiii2000,
      author = {Digioiaiii, a and Jaramaz, B and Nikou, C and Labarca, R and Moody,
      J and Colgan, B},
      title = {Surgical navigation for total hip replacement with the use of hipnav},
      journal = {Operative Techniques in Orthopaedics},
      year = {2000},
      volume = {10},
      pages = {3-8},
      number = {1},
      month = {January},
      abstract = {HipNax; an image-guided surgical navigation system, is presented.
      The system was developed to measure and guide the placement of prosthetic
      components in total hip replacement surgery (THR), it incorporates
      a 3-dimensional preoperative planner with a simulator and an intraoperative
      surgical navigator. Coupling optimized preoperative planning with
      accurate surgical navigation will assist the surgeon in properly
      orienting the components, minimizing the risk of impingement and
      dislocation, lntraoperatively, the system uses image-guided tools
      to assist in accurate placement of the acetabular cup. The acetabular
      implant is placed in the planned position with the aid of a simple
      "aim-and-shoot" interface. The actual measurements of version and
      abduction are also provided. The use of this new class of operative
      sensors has been incorporated into a regular surgical routine. There
      are few additional steps necessary, therefore, for the image-guided
      procedure, which does not add significantly to the total time of
      surgery. We expect that these tools will lead to less invasive and
      more accurate THR surgery and directly relate patient outcomes to
      measured surgical practice.},
      file = {Digioiaiii2000.pdf:Digioiaiii2000.pdf:PDF},
      issn = {10486666},
      keywords = {3-dimensional planner,4 the leading,a significant clinical problem,after
      total hip replacement,dislocation continues to be,mechanisms of dislocation
      are,orientation,prosthetic impingement,simulation,surgery,surgical
      navigation,thr,total hip replacement, APP, PLA, GUI, OCS},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • P. Dumpuri, L. W. Clements, B. M. Dawant, and M. I. Miga, “Model-updated image-guided liver surgery: Preliminary results using surface characterization.,” Progress in biophysics and molecular biology, iss. September, pp. 1-11, 2010.
    [Bibtex]
    @ARTICLE{Dumpuri2010,
      author = {Dumpuri, Prashanth and Clements, Logan W and Dawant, Benoit M and
      Miga, Michael I},
      title = {Model-updated image-guided liver surgery: Preliminary results using
      surface characterization.},
      journal = {Progress in biophysics and molecular biology},
      year = {2010},
      pages = {1-11},
      number = {September},
      month = {September},
      abstract = {The current protocol for image guidance in open abdominal liver tumor
      removal surgeries involves a rigid registration between the patient's
      operating room space and the pre-operative diagnostic image-space.
      Systematic studies have shown that the liver can deform up to 2cm
      during surgeries in a non-rigid fashion thereby compromising the
      accuracy of these surgical navigation systems. Compensating for intra-operative
      deformations using mathematical models has shown promising results.
      In this work, we follow up the initial rigid registration with a
      computational approach that is geared towards minimizing the residual
      closest point distances between the un-deformed pre-operative surface
      and the rigidly registered intra-operative surface. We also use a
      surface Laplacian equation based filter that generates a realistic
      deformation field. Preliminary validation of the proposed computational
      framework was performed using phantom experiments and clinical trials.
      The proposed framework improved the rigid registration errors for
      the phantom experiments on average by 43\%, and 74\% using partial
      and full surface data, respectively. With respect to clinical data,
      it improved the closest point residual error associated with rigid
      registration by 68\% on average for the clinical cases. These results
      are highly encouraging and suggest that computational models can
      be used to increase the accuracy of image-guided open abdominal liver
      tumor removal surgeries.},
      file = {Dumpuri2010.pdf:Dumpuri2010.pdf:PDF},
      issn = {1873-1732},
      keywords = {finite element analysis,image-guided liver surgeries,linear elastic
      model,methods, APP, HES, GUI},
      owner = {thomaskroes},
      pmid = {20869385},
      publisher = {Elsevier Ltd},
      timestamp = {2010.10.22}
    }
  • J. Fischer, M. Neff, D. Freudenstein, and D. Bartz, “Medical augmented reality based on commercial image guided surgery,” , pp. 83-86, 2004.
    [Bibtex]
    @CONFERENCE{Fischer2004,
      author = {Fischer, J. and Neff, M. and Freudenstein, D. and Bartz, D.},
      title = {Medical augmented reality based on commercial image guided surgery},
      booktitle = {Eurographics Symposium on Virtual Environments (EGVE)},
      year = {2004},
      pages = {83 - 86},
      organization = {Citeseer},
      abstract = {Utilizing augmented reality for applications in medicine has been
      a topic of intense research for several years. A number of challenging
      tasks need to be addressed when designing a medical AR system. These
      include the import and management of medical datasets and preoperatively
      created planning data, the registration of the patient with respect
      to a global coordinate system, and accurate tracking of the camera
      used in the AR setup as well as the respective surgical instruments.
      Most research systems rely on specialized hardware or algorithms
      for realizing
      
      augmented reality in medicine. Such base technologies can be expensive
      or very time-consuming to implement. In this paper, we propose an
      alternative approach of building a surgical AR system by harnessing
      existing, commercially available equipment for image guided surgery
      (IGS). We describe the prototype of an augmented reality application,
      which receives all necessary information from a device for intraoperative
      navigation.},
      file = {Fischer2004.pdf:Fischer2004.pdf:PDF},
      keywords = {TEC, AUR},
      owner = {thomaskroes},
      timestamp = {2011.01.03}
    }
  • H. Fuchs, M. A. Livingston, R. Raskar, D. Colucci, A. State, J. R. Crawford, P. Rademacher, S. H. Drake, and A. A. Meyer, “Augmented Reality Visualization for Laparoscopic Surgery,” Surgery, pp. 934-943, 1998.
    [Bibtex]
    @ARTICLE{Fuchs1998,
      author = {Fuchs, Henry and Livingston, Mark A and Raskar, Ramesh and Colucci,
      D and State, Andrei and Crawford, Jessica R and Rademacher, Paul
      and Drake, Samuel H and Meyer, Anthony A},
      title = {Augmented Reality Visualization for Laparoscopic Surgery},
      journal = {Surgery},
      year = {1998},
      pages = {934-943},
      abstract = {We present the design and a prototype implementation of a three-dimensional
      visualization system to assist with laparoscopic sur- gical procedures.
      The system uses 3D visualization, depth extraction from laparoscopic
      images, and six degree-of-freedom head and laparos- cope tracking
      to display a merged real and synthetic image in the sur- geon’s video-see-through
      head-mounted display. We also introduce a cu- stom design for this
      display. A digital light projector, a camera, and a conventional
      laparoscope create a prototype 3D laparoscope that can extract depth
      and video imagery. Such a system can restore the physician’s natural
      point of view and head motion parallax that are used to understand
      the 3D structure during open surgery. These cues are not available
      in conventional laparoscopic surgery due to the displacement of the
      laparoscopic camera from the physician’s viewpoint. The system can
      also display multiple laparoscopic range imaging data sets to widen
      the effective field of view of the device. These data sets can be
      displayed in true 3D and registered to the exterior anatomy of the
      patient. Much work remains to realize a clinically useful system,
      notably in the acquisition speed, reconstruction, and registration
      of the 3D imagery.},
      annote = {Interesting paper, uses structured light to generate realtime 3D
      augmented images during surgery},
      file = {Fuchs1998.pdf:Fuchs1998.pdf:PDF},
      keywords = {TEC, AUR},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • R. Galloway and T. Peters, “Overview and History of Image-Guided Interventions,” Image-Guided Interventions, pp. 1-21, 2008.
    [Bibtex]
    @ARTICLE{Galloway2008,
      author = {Galloway, R. and Peters, T.},
      title = {Overview and History of Image-Guided Interventions},
      journal = {Image-Guided Interventions},
      year = {2008},
      pages = {1 - 21},
      note = {Chapter 1},
      file = {Galloway2008.pdf:Galloway2008.pdf:PDF},
      keywords = {REV},
      owner = {Thomas},
      publisher = {Springer},
      timestamp = {2011.02.28}
    }
  • K. Gary, L. Ibanez, S. Aylward, D. Gobbi, M. B. Blake, and K. Cleary, “IGSTK: an open source software toolkit for image-guided surgery,” Computer, vol. 39, iss. 4, pp. 46-53, 2006.
    [Bibtex]
    @ARTICLE{Gary2006,
      author = {Gary, K. and Ibanez, L. and Aylward, S. and Gobbi, D. and Blake,
      M.B. and Cleary, K.},
      title = {IGSTK: an open source software toolkit for image-guided surgery},
      journal = {Computer},
      year = {2006},
      volume = {39},
      pages = { 46 - 53},
      number = {4},
      month = {April},
      file = {:Gary2006.pdf:PDF},
      issn = {0018-9162},
      keywords = {IGSTK; Image-Guided Software Toolkit; agile software engineering principles;
      component-based software engineering principles; image-guided surgical
      application; minimally invasive procedures; open source development;
      open source software toolkit; reliable software; reusable software
      infrastructure; medical image processing; object-oriented programming;
      public domain software; software reliability; software reusability;
      surgery;},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • W. Grimson, R. Kikinis, F. Jolesz, and P. Black, “Image-guided surgery,” Scientific American, vol. 280, iss. 6, pp. 54-61, 1999.
    [Bibtex]
    @ARTICLE{Grimson1999,
      author = {Grimson, WEL and Kikinis, R. and Jolesz, FA and Black, PM},
      title = {Image-guided surgery},
      journal = {Scientific American},
      year = {1999},
      volume = {280},
      pages = {54 - 61},
      number = {6},
      file = {Grimson1999.pdf:Grimson1999.pdf:PDF},
      issn = {0036-8733},
      keywords = {TEC, IMP},
      owner = {Thomas},
      timestamp = {2011.03.09}
    }
  • W. L. Grimson, G. J. Ettinger, S. J. White, T. Lozano-Perez, W. M. Wells, and R. Kikinis, “An automatic registration method for frameless stereotaxy, image guided surgery, and enhanced reality visualization.,” IEEE transactions on medical imaging, vol. 15, iss. 2, pp. 129-40, 1996.
    [Bibtex]
    @ARTICLE{Grimson1996,
      author = {Grimson, W L and Ettinger, G J and White, S J and Lozano-Perez, T
      and Wells, W M and Kikinis, R},
      title = {An automatic registration method for frameless stereotaxy, image
      guided surgery, and enhanced reality visualization.},
      journal = {IEEE transactions on medical imaging},
      year = {1996},
      volume = {15},
      pages = {129-40},
      number = {2},
      month = {January},
      abstract = {There is a need for frameless guidance systems to help surgeons plan
      the exact location for incisions, to define the margins of tumors,
      and to precisely identify locations of neighboring critical structures.
      The authors have developed an automatic technique for registering
      clinical data, such as segmented magnetic resonance imaging (MRI)
      or computed tomography (CT) reconstructions, with any view of the
      patient on the operating table. The authors demonstrate on the specific
      example of neurosurgery. The method enables a visual mix of live
      video of the patient and the segmented three-dimensional (3-D) MRI
      or CT model. This supports enhanced reality techniques for planning
      and guiding neurosurgical procedures and allows us to interactively
      view extracranial or intracranial structures nonintrusively. Extensions
      of the method include image guided biopsies, focused therapeutic
      procedures, and clinical studies involving change detection over
      time sequences of images.},
      file = {Grimson1996.pdf:Grimson1996.pdf:PDF},
      issn = {0278-0062},
      owner = {thomaskroes},
      pmid = {18215896},
      timestamp = {2010.10.22}
    }
  • C. Hansen, F. Ritter, J. Wieferich, H. Hahn, and H. -O. Peitgen, “Illustration of Vascular Structures for Augmented Reality in Liver Surgery,” in World Congress on Medical Physics and Biomedical Engineering, September 7 – 12, 2009, Munich, Germany, R. Magjarevic, O. Dössel, and W. C. Schlegel, Eds., Springer Berlin Heidelberg, 2010, vol. 25 / 4, pp. 2113-2116.
    [Bibtex]
    @INCOLLECTION{Hansen2010c,
      author = {Hansen, C. and Ritter, F. and Wieferich, J. and Hahn, H. and Peitgen,
      H. -O.},
      title = {Illustration of Vascular Structures for Augmented Reality in Liver
      Surgery},
      booktitle = {World Congress on Medical Physics and Biomedical Engineering, September
      7 - 12, 2009, Munich, Germany},
      publisher = {Springer Berlin Heidelberg},
      year = {2010},
      editor = {Magjarevic, Ratko and Dössel, Olaf and Schlegel, Wolfgang C.},
      volume = {25 / 4},
      series = {IFMBE Proceedings},
      pages = {2113 - 2116},
      abstract = {We present methods for intraoperative visualization of vascular structures
      in liver surgery. The underlying concept combines conventional augmented
      reality approaches with illustrative rendering techniques. Our methods
      reduce the visual complexity of vascular structures, and accentuate
      spatial relations. The proposed visualization techniques are embedded
      in a clinical prototype application that has already been used in
      the operating room for preliminary evaluations. To verify the expressiveness
      of our illustration methods, we performed a user study with controlled
      lab conditions. The study revealed a clear advantage in distance
      assessment for the proposed illustrative approach in comparison to
      conventional rendering techniques.},
      affiliation = {Institute for Medical Image Computing, Fraunhofer MEVIS, Bremen, Germany},
      file = {Hansen2010c.pdf:Hansen2010c.pdf:PDF},
      isbn = {978-3-642-03882-2},
      keyword = {Engineering},
      keywords = {TEC, HES},
      owner = {thomaskroes},
      timestamp = {2011.01.26}
    }
  • C. Hansen, J. Wieferich, F. Ritter, C. Rieder, and H. Peitgen, “Illustrative visualization of 3D planning models for augmented reality in liver surgery.,” International journal of computer assisted radiology and surgery, vol. 5, iss. 2, pp. 133-41, 2010.
    [Bibtex]
    @ARTICLE{Hansen2010a,
      author = {Hansen, Christian and Wieferich, Jan and Ritter, Felix and Rieder,
      Christian and Peitgen, Heinz-Otto},
      title = {Illustrative visualization of 3D planning models for augmented reality
      in liver surgery.},
      journal = {International journal of computer assisted radiology and surgery},
      year = {2010},
      volume = {5},
      pages = {133-41},
      number = {2},
      month = {March},
      abstract = {PURPOSE: Augmented reality (AR) obtains increasing acceptance in the
      operating room. However, a meaningful augmentation of the surgical
      view with a 3D visualization of planning data which allows reliable
      comparisons of distances and spatial relations is still an open request.
      METHODS: We introduce methods for intraoperative visualization of
      3D planning models which extend illustrative rendering and AR techniques.
      We aim to reduce visual complexity of 3D planning models and accentuate
      spatial relations between relevant objects. The main contribution
      of our work is an advanced silhouette algorithm for 3D planning models
      (distance-encoding silhouettes) combined with procedural textures
      (distance-encoding surfaces). In addition, we present a method for
      illustrative visualization of resection surfaces. RESULTS: The developed
      algorithms have been embedded into a clinical prototype that has
      been evaluated in the operating room. To verify the expressiveness
      of our illustration methods, we performed a user study under controlled
      conditions. The study revealed a clear advantage in distance assessment
      with the proposed illustrative approach in comparison to classical
      rendering techniques. CONCLUSION: The presented illustration methods
      are beneficial for distance assessment in surgical AR. To increase
      the safety of interventions with the proposed approach, the reduction
      of inaccuracies in tracking and registration is a subject of our
      current research.},
      file = {Hansen2010a.pdf:Hansen2010a.pdf:PDF},
      issn = {1861-6429},
      keywords = {Humans,Imaging, Three-Dimensional,Intraoperative Period,Liver Diseases,Liver
      Diseases: surgery,Software,Space Perception,Surgery, Computer-Assisted,Surgery,
      Computer-Assisted: methods,Surgery, Computer-Assisted: standards,
      TRM, AUR, APP, HES, SUR},
      owner = {thomaskroes},
      pmid = {20033519},
      timestamp = {2010.10.22}
    }
  • P. Jannin and W. Korb, “Assessment of Image-Guided Interventions,” in Image-Guided Interventions, T. Peters and K. Cleary, Eds., Springer US, 2008, pp. 531-549.
    [Bibtex]
    @INCOLLECTION{Jannin2008,
      author = {Jannin, Pierre and Korb, Werner},
      title = {Assessment of Image-Guided Interventions},
      booktitle = {Image-Guided Interventions},
      publisher = {Springer US},
      year = {2008},
      editor = {Peters, Terry and Cleary, Kevin},
      pages = {531 - 549},
      note = {Chapter 18},
      abstract = {Assessment of systems and procedures in image-guided interventions
      (IGI) is crucial but complex, and addresses diverse aspects. This
      chapter introduces a framework for dealing with this complexity and
      diversity, and is based on some of the major related concepts in
      health care. Six assessment levels are distinguished in IGI. The
      main phases and components of assessment methodology are described
      with an emphasis on the specification and the reporting phases, and
      on the clear initial formulation of the assessment objective. The
      methodology is presented in a systematic order to allow interinstitutional
      comparison. Finally, we outline the need for standardization in IGI
      assessment to improve the quality of systems, their acceptance by
      surgeons, and facilitate their transfer from research to clinical
      practice.},
      affiliation = {INSERM, Faculté de Médecine CS Rennes France},
      file = {Jannin2008.pdf:Jannin2008.pdf:PDF},
      isbn = {978-0-387-73858-1},
      keyword = {Engineering},
      keywords = {REV},
      owner = {Thomas},
      timestamp = {2011.02.24}
    }
  • R. Khadem, C. C. Yeh, M. Sadeghi-Tehrani, M. R. Bax, J. A. Johnson, J. N. Welch, E. P. Wilkinson, and R. Shahidi, “Comparative tracking error analysis of five different optical tracking systems,” Computer Aided Surgery, vol. 5, iss. 2, pp. 98-107, 2000.
    [Bibtex]
    @ARTICLE{Khadem2000,
      author = {Khadem, Rasool and Yeh, Clement C. and Sadeghi-Tehrani, Mohammad
      and Bax, Michael R. and Johnson, Jeremy A. and Welch, Jacqueline
      Nerney and Wilkinson, Eric P. and Shahidi, Ramin},
      title = {Comparative tracking error analysis of five different optical tracking
      systems},
      journal = {Computer Aided Surgery},
      year = {2000},
      volume = {5},
      pages = {98 - 107},
      number = {2},
      abstract = {Abstract Objective: Effective utilization of an optical tracking system
      for image-based surgical guidance requires optimal placement of the
      dynamic reference frame (DRF) with respect to the tracking camera.
      Unlike other studies that measure the overall accuracy of a particular
      navigation system, this study investigates the precision of one component
      of the navigation system: the optical tracking system (OTS). The
      precision of OTS measurements is quantified as jitter. By measuring
      jitter, one can better understand how system inaccuracies depend
      on the position of the DRF with respect to the camera.Materials and
      Methods: Both FlashPointâ„¢ (Image Guided Technologies, Inc., Boulder,
      Colorado) and Polarisâ„¢ (Northern Digital Inc., Ontario, Canada)
      optical tracking systems were tested in five different camera and
      DRF configurations. A linear testing apparatus with a software interface
      was designed to facilitate data collection. Jitter measurements were
      collected over a single quadrant within the camera viewing volume,
      as symmetry was assumed about the horizontal and vertical axes.Results:
      Excluding the highest 5% of jitter, the FlashPoint cameras had an
      RMS jitter range of 0.028 ± 0.012 mm for the 300 mm model, 0.051
      ± 0.038 mm for the 580 mm model, and 0.059 ± 0.047 mm for the 1
      m model. The Polaris camera had an RMS jitter range of 0.058 ± 0.037
      mm with an active DRF and 0.115 ± 0.075 mm with a passive DRF.Conclusion:
      Both FlashPoint and Polaris have jitter less than 0.11 mm, although
      the error distributions differ significantly. Total jitter for all
      systems is dominated by the component measured in the axis directed
      away from the camera. Comp Aid Surg 5:98–107 (2000). © 2000 Wiley-Liss,
      Inc.},
      file = {Khadem2000.pdf:Khadem2000.pdf:PDF},
      issn = {1097-0150},
      keywords = {optical tracking system, tracking accuracy, image-guided surgery,
      stereotactic surgery},
      owner = {Thomas},
      publisher = {John Wiley \& Sons, Inc.},
      timestamp = {2011.02.17}
    }
  • M. Lee, “Hands-on Practice and Implementations on a Sound-Guided 3D Navigation System for Orthopedic Surgical Applications,” In Vitro, pp. 641-646, 2005.
    [Bibtex]
    @ARTICLE{Lee2005,
      author = {Lee, Ming-yih},
      title = {Hands-on Practice and Implementations on a Sound-Guided 3D Navigation
      System for Orthopedic Surgical Applications},
      journal = {In Vitro},
      year = {2005},
      pages = {641-646},
      abstract = {Computer assisted surgical navigation becomes crucial as the demand
      for accuracy and minimal invasiveness increases. During the treatment
      of tibial fracture with interlocking nail, the most uncomfortable
      procedure for an orthopedic surgeon is to find the location for distal
      locking screws. In this study, hands-on practice and implementation
      of a sound-guided 3D navigation system was discussed for the fixation
      of distal locking screws in tbe tibial intramedullary nailing. This
      system consists of a 3D digitizer arm, 3D coordinate registration
      / transformation module and sound-guided navigation module. In addition,
      the proposed sound-guided navigation module was designed with an
      audio guiding mechanism through which a sound with different tones
      and intermittence frequencies will be produced for surgical manipulation.
      In vitro assessment was performed with a donor bone successfully,
      and a clinical case of a young male with tibial fracture was also
      carried out at the operation tbeater with satisfied results. No preoperative
      computed tomography or intraoperative fluoroscopy was required.},
      file = {Lee2005.pdf:Lee2005.pdf:PDF},
      keywords = {navigation systcm.,orthopedic surgeries,sound-guided},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • S. Lee, A. Chung, M. Lerotic, M. a Hawkins, D. Tait, and G. Yang, “Dynamic shape instantiation for intra-operative guidance.,” Medical image computing and computer-assisted intervention : MICCAI … International Conference on Medical Image Computing and Computer-Assisted Intervention, vol. 13, iss. Pt 1, pp. 69-76, 2010.
    [Bibtex]
    @ARTICLE{Lee2010a,
      author = {Lee, Su-Lin and Chung, Adrian and Lerotic, Mirna and Hawkins, Maria
      a and Tait, Diana and Yang, Guang-Zhong},
      title = {Dynamic shape instantiation for intra-operative guidance.},
      journal = {Medical image computing and computer-assisted intervention : MICCAI
      ... International Conference on Medical Image Computing and Computer-Assisted
      Intervention},
      year = {2010},
      volume = {13},
      pages = {69-76},
      number = {Pt 1},
      month = {January},
      abstract = {Primary liver cancer and oligometastatic liver disease are one of
      the major causes of mortality worldwide and its treatment ranges
      from surgery to more minimally invasive ablative procedures. With
      the increasing availability of minimally invasive hepatic approaches,
      a real-time method of determining the 3D structure of the liver and
      its location during the respiratory cycle is clinically important.
      However, during treatment, it is difficult to acquire images spanning
      the entire 3D volume rapidly. In this paper, a dynamic 3D shape instantiation
      scheme is developed for providing subject-specific optimal scan planning.
      Using only limited planar information, it is possible to instantiate
      the entire 3D geometry of the organ of interest. The efficacy of
      the proposed method is demonstrated with both detailed numerical
      simulation and a liver phantom with known ground-truth data. Preliminary
      clinical application of the technique is evaluated on a patient group
      with metastatic liver tumours.},
      file = {Lee2010a.pdf:Lee2010a.pdf:PDF},
      keywords = {intra-operative guidance,patient-specific deformation analysis,regression
      analysis,shape instantiation,shape modeling, TEC},
      owner = {thomaskroes},
      pmid = {20879216},
      timestamp = {2010.10.22}
    }
  • S. Lee, M. Lerotic, V. Vitiello, S. Giannarou, K. Kwok, M. Visentini-Scarzanella, and G. Yang, “From medical images to minimally invasive intervention: Computer assistance for robotic surgery.,” Computerized medical imaging and graphics : the official journal of the Computerized Medical Imaging Society, vol. 34, iss. 1, pp. 33-45, 2010.
    [Bibtex]
    @ARTICLE{Lee2010,
      author = {Lee, Su-Lin and Lerotic, Mirna and Vitiello, Valentina and Giannarou,
      Stamatia and Kwok, Ka-Wai and Visentini-Scarzanella, Marco and Yang,
      Guang-Zhong},
      title = {From medical images to minimally invasive intervention: Computer
      assistance for robotic surgery.},
      journal = {Computerized medical imaging and graphics : the official journal
      of the Computerized Medical Imaging Society},
      year = {2010},
      volume = {34},
      pages = {33-45},
      number = {1},
      month = {January},
      abstract = {Minimally invasive surgery has been established as an important way
      forward in surgery for reducing patient trauma and hospitalization
      with improved prognosis. The introduction of robotic assistance enhances
      the manual dexterity and accuracy of instrument manipulation. Further
      development of the field in using pre- and intra-operative imaging
      guidance requires the integration of the general anatomy of the patient
      with clear pathologic indications and geometrical information for
      preoperative planning and intra-operative manipulation. It also requires
      effective visualization and the recreation of haptic and tactile
      sensing with dynamic active constraints to improve consistency and
      safety of the surgical procedures. This paper describes key technical
      considerations of tissue deformation tracking, 3D reconstruction,
      subject-specific modeling, image guidance and augmented reality for
      robotic assisted minimally invasive surgery. It highlights the importance
      of adapting preoperative surgical planning according to intra-operative
      data and illustrates how dynamic information such as tissue deformation
      can be incorporated into the surgical navigation framework. Some
      of the recent trends are discussed in terms of instrument design
      and the usage of dynamic active constraints and human-robot perceptual
      docking for robotic assisted minimally invasive surgery.},
      file = {Lee2010.pdf:Lee2010.pdf:PDF},
      issn = {1879-0771},
      keywords = {Computer Simulation,Elasticity Imaging Techniques,Elasticity Imaging
      Techniques: methods,Humans,Imaging, Three-Dimensional,Imaging, Three-Dimensional:
      methods,Models, Biological,Robotics,Robotics: methods,Surgery, Computer-Assisted,Surgery,
      Computer-Assisted: methods,Surgical Procedures, Minimally Invasive,Surgical
      Procedures, Minimally Invasive: methods,User-Computer Interface},
      owner = {thomaskroes},
      pmid = {19699056},
      timestamp = {2010.10.22}
    }
  • H. Liao, H. Ishihara, H. H. Tran, K. Masamune, I. Sakuma, and T. Dohi, “Precision-guided surgical navigation system using laser guidance and 3D autostereoscopic image overlay.,” Computerized medical imaging and graphics : the official journal of the Computerized Medical Imaging Society, vol. 34, iss. 1, pp. 46-54, 2010.
    [Bibtex]
    @ARTICLE{Liao2010a,
      author = {Liao, Hongen and Ishihara, Hirotaka and Tran, Huy Hoang and Masamune,
      Ken and Sakuma, Ichiro and Dohi, Takeyoshi},
      title = {Precision-guided surgical navigation system using laser guidance
      and 3D autostereoscopic image overlay.},
      journal = {Computerized medical imaging and graphics : the official journal
      of the Computerized Medical Imaging Society},
      year = {2010},
      volume = {34},
      pages = {46-54},
      number = {1},
      month = {January},
      abstract = {This paper describes a precision-guided surgical navigation system
      for minimally invasive surgery. The system combines a laser guidance
      technique with a three-dimensional (3D) autostereoscopic image overlay
      technique. Images of surgical anatomic structures superimposed onto
      the patient are created by employing an animated imaging method called
      integral videography (IV), which can display geometrically accurate
      3D autostereoscopic images and reproduce motion parallax without
      the need for special viewing or tracking devices. To improve the
      placement accuracy of surgical instruments, we integrated an image
      overlay system with a laser guidance system for alignment of the
      surgical instrument and better visualization of patient's internal
      structure. We fabricated a laser guidance device and mounted it on
      an IV image overlay device. Experimental evaluations showed that
      the system could guide a linear surgical instrument toward a target
      with an average error of 2.48 mm and standard deviation of 1.76 mm.
      Further improvement to the design of the laser guidance device and
      the patient-image registration procedure of the IV image overlay
      will make this system practical; its use would increase surgical
      accuracy and reduce invasiveness.},
      file = {Liao2010a.pdf:Liao2010a.pdf:PDF},
      issn = {1879-0771},
      keywords = {Equipment Design,Equipment Failure Analysis,Humans,Imaging, Three-Dimensional,Imaging,
      Three-Dimensional: instrumentation,Robotics,Robotics: instrumentation,Sensitivity
      and Specificity,Subtraction Technique,Subtraction Technique: instrumentation,Surgery,
      Computer-Assisted,Surgery, Computer-Assisted: instrumentation,Surgical
      Procedures, Minimally Invasive,Surgical Procedures, Minimally Invasive:
      instrumen,User-Computer Interface, STV, TEC},
      owner = {thomaskroes},
      pmid = {19674871},
      timestamp = {2010.10.22}
    }
  • H. Liao, S. Nakajima, M. Iwahara, E. Kobayashi, I. Sakuma, N. Yahagi, and T. Dohi, “Intra-operative real-time 3-D information display system based on integral videography,” , pp. 392-400, 2010.
    [Bibtex]
    @CONFERENCE{Liao2010,
      author = {Liao, H. and Nakajima, S. and Iwahara, M. and Kobayashi, E. and Sakuma,
      I. and Yahagi, N. and Dohi, T.},
      title = {Intra-operative real-time 3-D information display system based on
      integral videography},
      booktitle = {Medical Image Computing and Computer-Assisted Intervention - MICCAI
      2001},
      year = {2010},
      pages = {392 - 400},
      organization = {Springer},
      file = {Liao2010.pdf:Liao2010.pdf:PDF},
      owner = {thomaskroes},
      timestamp = {2010.10.26}
    }
  • M. Lievin, “Stereoscopic augmented reality system for computer-assisted surgery,” International Congress Series, vol. 1230, pp. 107-111, 2001.
    [Bibtex]
    @ARTICLE{Lievin2001,
      author = {Lievin, M},
      title = {Stereoscopic augmented reality system for computer-assisted surgery},
      journal = {International Congress Series},
      year = {2001},
      volume = {1230},
      pages = {107-111},
      month = {June},
      abstract = {A first architecture for an augmented reality system in computer-assisted
      surgery is presented in this paper. Like in ‘‘X-ray vision’’
      systems, a stereoscopic overlay is visually superimposed on the patient.
      The main purpose of our approach is user-friendliness for the surgeon:
      no additive wearing equipment is required. Registration, rigid body
      location and 3D volume computation are proven to respect real-time
      processing, thanks to an optical navigation system and our integrated
      software framework. Studies are undertaken to replace our actual
      monitor display by an upcoming holographic screen.},
      file = {Lievin2001.pdf:Lievin2001.pdf:PDF},
      issn = {05315131},
      keywords = {augmented reality system,computer-assisted surgery,stereoscopic overlay,
      TEC, AUR, STV},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • B. Mansoux, L. Nigay, and J. Troccaz, “Output Multimodal Interaction: The Case of Augmented Surgery,” in People and Computers XX — Engage, N. Bryan-Kinns, A. Blanford, P. Curzon, and L. Nigay, Eds., Springer London, 2007, pp. 177-192.
    [Bibtex]
    @INCOLLECTION{Mansoux2007,
      author = {Mansoux, Benoît and Nigay, Laurence and Troccaz, Jocelyne},
      title = {Output Multimodal Interaction: The Case of Augmented Surgery},
      booktitle = {People and Computers XX — Engage},
      publisher = {Springer London},
      year = {2007},
      editor = {Bryan-Kinns, Nick and Blanford, Ann and Curzon, Paul and Nigay, Laurence},
      pages = {177 - 192},
      abstract = {Output multimodal interaction involves choice and combination of relevant
      interaction modalities to present information to the user. In this
      paper, we present a framework based on reusable software components
      for rapidly developing output multimodal interfaces by choosing and
      combining interaction modalities. Such an approach enables us to
      quickly explore several design alternatives as part of an iterative
      design process. Our approach is illustrated by examples from a computer-assisted
      surgery system that runs in a specific environment (i.e. an operating
      room) and so needs adapted multimodal interaction. Our approach supports
      the exploration of several output multimodal interaction design alternatives
      with the surgeons.},
      affiliation = {CLIPS-IMAG / équipe IIHM 385 rue de la Bibliothèque 38041 Grenoble
      cedex 9 France},
      file = {Mansoux2007.pdf:Mansoux2007.pdf:PDF},
      isbn = {978-1-84628-664-3},
      keyword = {Computer Science},
      keywords = {TEC, AUR},
      owner = {thomaskroes},
      timestamp = {2011.01.06}
    }
  • R. Marmulla, M. Hilbert, and H. Niederdellmann, “Inherent precision of mechanical, infrared and laser-guided navigation systems for computer-assisted surgery,” Journal of Cranio-Maxillofacial Surgery, vol. 25, iss. 4, pp. 192-197, 1997.
    [Bibtex]
    @ARTICLE{Marmulla1997,
      author = {Marmulla, R. and Hilbert, M. and Niederdellmann, H.},
      title = {Inherent precision of mechanical, infrared and laser-guided navigation
      systems for computer-assisted surgery},
      journal = {Journal of Cranio-Maxillofacial Surgery},
      year = {1997},
      volume = {25},
      pages = {192 - 197},
      number = {4},
      file = {Marmulla1997.pdf:Marmulla1997.pdf:PDF},
      issn = {1010-5182},
      owner = {Thomas},
      publisher = {Elsevier},
      timestamp = {2011.02.04}
    }
  • R. Marmulla, T. Luth, J. Muhling, and S. Hassfeld, “Automated laser registration in image-guided surgery: evaluation of the correlation between laser scan resolution and navigation accuracy.,” International journal of oral and maxillofacial surgery, vol. 33, iss. 7, pp. 642-8, 2004.
    [Bibtex]
    @ARTICLE{Marmulla2004,
      author = {Marmulla, R and Luth, T and Muhling, J and Hassfeld, S},
      title = {Automated laser registration in image-guided surgery: evaluation
      of the correlation between laser scan resolution and navigation accuracy.},
      journal = {International journal of oral and maxillofacial surgery},
      year = {2004},
      volume = {33},
      pages = {642-8},
      number = {7},
      month = {October},
      abstract = {Markerless patient registration based on the facial skin surface makes
      logistics prior to image-guided surgery much easier, as it is not
      necessary to place and measure registration markers. A laser scan
      registration of the surgical site takes the place of conventional
      marker-based registration. In a clinical study, the stability and
      accuracy of markerless patient registration was evaluated in 12 patients.
      Intraoral titanium markers served as targets for the infrared-pointer
      of the navigation system in order to check the accuracy of the markerless
      registration process. The correlation between laser scan resolution
      and navigation accuracy was checked using seven different laser scan
      resolutions (a cloud of 300,000 laser scan points down to 3750 laser
      scan points of the surgical site). The markerless patient registration
      was successful as long as high laser scan resolution was used (30,000
      laser scan points and more): the titanium markers were detected with
      a mean deviation of 1.1 +/- 0.2 mm. Low resolution laser scans (6000
      laser scan points of the surgical site and less) revealed inaccuracies
      up to 6 mm.},
      file = {Marmulla2004.pdf:Marmulla2004.pdf:PDF},
      issn = {0901-5027},
      keywords = {Humans,Imaging, Three-Dimensional,Imaging, Three-Dimensional: methods,Lasers,Lasers:
      diagnostic use,Prospective Studies,Reproducibility of Results,Skull
      Neoplasms,Skull Neoplasms: diagnosis,Skull Neoplasms: radiography,Surgery,
      Computer-Assisted,Tomography, X-Ray Computed},
      owner = {thomaskroes},
      pmid = {15337176},
      timestamp = {2010.10.22}
    }
  • D. Maupu, M. H. Van Horn, S. Weeks, and E. Bullitt, “3D stereo interactive medical visualization,” Computer Graphics and Applications, IEEE, vol. 25, iss. 5, pp. 67-71, 2005.
    [Bibtex]
    @ARTICLE{Maupu2005,
      author = {Maupu, D. and Van Horn, M.H. and Weeks, S. and Bullitt, E.},
      title = {3D stereo interactive medical visualization},
      journal = {Computer Graphics and Applications, IEEE},
      year = {2005},
      volume = {25},
      pages = {67 - 71},
      number = {5},
      month = {September - October},
      abstract = {Our interactive, 3D stereo display helps guide clinicians during endovascular
      procedures, such as intraoperative needle insertion and stent placement
      relative to the target organs. We describe a new method of guiding
      endovascular procedures using interactive 3D stereo visualizations.
      We use as an example the transjugular intrahepatic portosystemic
      shunt (TIPS) procedure. Our goal is to increase the speed and safety
      of endovascular procedures by providing the interventionalist with
      3D information as the operation proceeds. Our goal is to provide
      3D image guidance of the TIPS procedure so that the interventionalist
      can readily adjust the needle position and trajectory to reach the
      target on the first pass. We propose a 3D stereo display of the interventionalist's
      needle and target vessels. We also add interactivity via head tracking
      so that the interventionalist gains a better 3D sense of the relationship
      between the target vessels and the needle during needle advancement.},
      file = {:Maupu2005.pdf:PDF},
      issn = {0272-1716},
      keywords = {3D image guidance;3D information;3D stereo display;endovascular procedures;interactive
      medical visualization;intraoperative needle insertion;stent placement;transjugular
      intrahepatic portosystemic shunt procedure;blood vessels;computerised
      tomography;data visualisation;image registration;image segmentation;interactive
      systems;medical image processing;solid modelling;stereo image processing;three-dimensional
      displays;Imaging, Three-Dimensional;Photogrammetry;Portacaval Shunt,
      Surgical;Radiographic Image Interpretation, Computer-Assisted;Surgery,
      Computer-Assisted;User-Computer Interface;Vascular Surgical Procedures;,
      TEC, STV},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • R. A. Mischkowski, M. Zinser, A. Kübler, U. Seifert, and J. E. Zöller, “The Hollowman – a virtual reality tool in cranio-maxillofacial surgery,” International Congress Series, vol. 1268, pp. 658-661, 2004.
    [Bibtex]
    @ARTICLE{Mischkowski2004,
      author = {R. A. Mischkowski and M. Zinser and A. Kübler and U. Seifert and
      J. E. Zöller},
      title = {The Hollowman - a virtual reality tool in cranio-maxillofacial surgery},
      journal = {International Congress Series},
      year = {2004},
      volume = {1268},
      pages = {658 - 661},
      abstract = {A virtual reality tool for computer-assisted surgery named #The##Hollowman#
      is presented. This allows for a visual tracking of real anatomical
      structures in superposition with volume rendered CT or MRI scans
      and thus can be used for navigated translocation of bony segments.
      For an evaluation study #The##Hollowman# was used in orthognatic
      surgery to control the translocation of the maxilla after Le Fort
      I osteotomy within a bimaxillary procedure. Up to now, four patients
      have been included. The tool has proven very valuable especially
      in complex nonlinear translocations of the maxilla as the surgeon
      could directly visualise the position of the mobilised bone in relation
      to the preoperatively planned situation. The application to other
      types of interventions in cranio-maxillofacial surgery associated
      with movement of bony segments as Le Fort III osteotomy, fronto-orbital
      advancement and cranial vault reshaping or reconstruction seems to
      be considerable as well.},
      file = {Mischkowski2004.pdf:Mischkowski2004.pdf:PDF},
      issn = {0531-5131},
      keywords = {The Hollowman, VOR, APP, PLA, VOR, CMS},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • R. A. Mischkowski, M. Zinser, A. Kübler, U. Seifert, and J.E. Zöller, “Clinical and experimental evaluation of an augmented reality system in cranio-maxillofacial surgery,” International Congress Series, vol. 1281, pp. 565-570, 2005.
    [Bibtex]
    @ARTICLE{Mischkowski2005,
      author = {R.A. Mischkowski and M. Zinser and A. Kübler and U. Seifert and J.E.
      Zöller},
      title = {Clinical and experimental evaluation of an augmented reality system
      in cranio-maxillofacial surgery},
      journal = {International Congress Series},
      year = {2005},
      volume = {1281},
      pages = {565 - 570},
      abstract = {An augmented reality tool for computer-assisted surgery named X-Scope
      allows for visual tracking of real anatomical structures in superposition
      with volume rendered CT or MRI scans and thus can be used for navigated
      translocation of bony segments. In a feasibility study X-Scope was
      used in orthognatic surgery to control the translocation of the maxilla
      after Le Fort I osteotomy within a bimaxillary procedure. The achieved
      situation was compared with the computer-based preoperative planning
      by means of cephalometric analysis on lateral and frontal cephalograms.
      In addition to the clinical feasibility study, an experimental evaluation
      of system accuracy was performed. The technique could be successfully
      applied in 5 patients. The maxillary positioning using X-Scope was
      accomplished with accuracy within a range of 1 mm. The tool was used
      in all cases in addition to the usual intra-operative splints. A
      stand-alone application without conventional control mechanism seems
      to be not reasonable yet. The final analysis of data obtained from
      the accuracy study is not completed yet. The preliminary results
      indicate a deviation of the X-Scope system not significantly greater
      then the deviation of the navigation system itself with a given registration
      method. Augmented reality tools like X-Scope may be helpful for control
      of maxillary translocation in orthognathic surgery. The application
      to other types of interventions in cranio-maxillofacial surgery associated
      with movement of bony segments as Le Fort III osteotomy, fronto-orbital
      advancement, and cranial vault reshaping or reconstruction may be
      considered as well.},
      file = {Mischkowski2005.pdf:Mischkowski2005.pdf:PDF},
      issn = {0531-5131},
      keywords = {X-Scope, APP, CMS, AUR},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • R. Mischkowski, M. Zinser, L. Ritter, J. Neugebauer, E. Keeve, and J. Zöller, “Intraoperative navigation in the maxillofacial area based on 3D imaging obtained by a cone-beam device,” International journal of oral and maxillofacial surgery, vol. 36, iss. 8, pp. 687-694, 2007.
    [Bibtex]
    @ARTICLE{Mischkowski2007,
      author = {Mischkowski, RA and Zinser, MJ and Ritter, L. and Neugebauer, J.
      and Keeve, E. and Z{\\"o}ller, JE},
      title = {Intraoperative navigation in the maxillofacial area based on 3D imaging
      obtained by a cone-beam device},
      journal = {International journal of oral and maxillofacial surgery},
      year = {2007},
      volume = {36},
      pages = {687 - 694},
      number = {8},
      abstract = {The aim of this study was to evaluate intraoperative navigation in
      the maxillofacial area based on three-dimensional imaging obtained
      by a cone-beam device. Digital volume tomograms (DVT) were obtained
      by the prototype of GALILEOS (Sirona Dental Systems Inc., Bensheim,
      Germany), a newly developed, compact size, cone-beam machine with
      a scan volume of 15 cm × 15 cm × 15 cm. Intraoperative navigation
      was performed in 12 patients in three selected indications. Target
      detection error expressing the accuracy of DVT navigation and registration
      performance of specially developed methods for image-to-patient registration
      was estimated. Target detection error was maximally 2 mm and depended
      on the registration method chosen. The automatic detection rate of
      the fiducial markers ranged between 0.64 and 0.32. The preoperatively
      defined treatment plan was fully accomplished in 11 out of 12 cases.
      A favourable surgical outcome was achievable in all cases. Intraoperative
      complications were not observed. Intraoperative navigation based
      on DVT imaging can be considered as a valuable alternative to CT-based
      procedures. Special characteristics of the cone-beam technique, in
      terms of contrast resolution and the limited field-of-view size of
      the devices, restrict the indication spectrum and create a demand
      for modifications of the usual registration methods.},
      file = {Mischkowski2007.pdf:Mischkowski2007.pdf:PDF},
      issn = {0901-5027},
      owner = {thomaskroes},
      publisher = {Elsevier},
      timestamp = {2010.11.09}
    }
  • P. M. Novotny, J. a Stoll, N. V. Vasilyev, P. J. del Nido, P. E. Dupont, T. E. Zickler, and R. D. Howe, “GPU based real-time instrument tracking with three-dimensional ultrasound.,” Medical image analysis, vol. 11, iss. 5, pp. 458-64, 2007.
    [Bibtex]
    @ARTICLE{Novotny2007,
      author = {Novotny, Paul M and Stoll, Jeff a and Vasilyev, Nikolay V and del
      Nido, Pedro J and Dupont, Pierre E and Zickler, Todd E and Howe,
      Robert D},
      title = {GPU based real-time instrument tracking with three-dimensional ultrasound.},
      journal = {Medical image analysis},
      year = {2007},
      volume = {11},
      pages = {458-64},
      number = {5},
      month = {October},
      abstract = {Real-time three-dimensional ultrasound enables new intracardiac surgical
      procedures, but the distorted appearance of instruments in ultrasound
      poses a challenge to surgeons. This paper presents a detection technique
      that identifies the position of the instrument within the ultrasound
      volume. The algorithm uses a form of the generalized Radon transform
      to search for long straight objects in the ultrasound image, a feature
      characteristic of instruments and not found in cardiac tissue. When
      combined with passive markers placed on the instrument shaft, the
      full position and orientation of the instrument is found in 3D space.
      This detection technique is amenable to rapid execution on the current
      generation of personal computer graphics processor units (GPU). Our
      GPU implementation detected a surgical instrument in 31 ms, sufficient
      for real-time tracking at the 25 volumes per second rate of the ultrasound
      machine. A water tank experiment found instrument orientation errors
      of 1.1 degrees and tip position errors of less than 1.8mm. Finally,
      an in vivo study demonstrated successful instrument tracking inside
      a beating porcine heart.},
      file = {Novotny2007.pdf:Novotny2007.pdf:PDF},
      issn = {1361-8415},
      keywords = {Animals,Cardiovascular Surgical Procedures,Cardiovascular Surgical
      Procedures: instrumentatio,Cardiovascular Surgical Procedures: methods,Computer
      Systems,Echocardiography, Three-Dimensional,Echocardiography, Three-Dimensional:
      instrumentati,Echocardiography, Three-Dimensional: methods,Equipment
      Design,Equipment Failure Analysis,Phantoms, Imaging,Reproducibility
      of Results,Sensitivity and Specificity,Signal Processing, Computer-Assisted,Signal
      Processing, Computer-Assisted: instrumentat,Surgery, Computer-Assisted,Surgery,
      Computer-Assisted: instrumentation,Surgery, Computer-Assisted: methods,Surgical
      Instruments,Swine,Ultrasonography, Interventional,Ultrasonography,
      Interventional: instrumentation,Ultrasonography, Interventional:
      methods},
      owner = {thomaskroes},
      pmid = {17681483},
      timestamp = {2010.10.22}
    }
  • A. Pandya and G. Auner, “Simultaneous augmented and virtual reality for surgical navigation,” in Fuzzy Information Processing Society, 2005. NAFIPS 2005. Annual Meeting of the North American, 2005, pp. 429-435.
    [Bibtex]
    @INPROCEEDINGS{Pandya2005,
      author = {Pandya, A. and Auner, G.},
      title = {Simultaneous augmented and virtual reality for surgical navigation},
      booktitle = {Fuzzy Information Processing Society, 2005. NAFIPS 2005. Annual Meeting
      of the North American},
      year = {2005},
      pages = { 429 - 435},
      month = {June},
      abstract = {We use a passive articulated arm to track a calibrated end-effector
      mounted video camera. In real time, we can superimpose the live video
      view with the synchronized graphical view of CT-derived segmented
      object(s) of interest within a phantom skull (augmented reality (AR))
      and provide the trajectory of the end-effector (translated to the
      focal point) in orthogonal image data scans and 3D models (VR). Augmented
      reality generation is a natural extension for the surgeon because
      it does both the 2D to 3D transformation and projects the views directly
      onto the patient view. However, there are distinct advantages for
      also having a VR (image guided surgery) view of the tools trajectory.
      Both AR and VR visualization have advantages and disadvantages depending
      on the stage of the surgery and surgeons should have the option to
      select. In this paper, we provide the software design and the network
      communication details of a multi-user, on-demand, near real-time
      simultaneous AR/VR system for surgical guidance.},
      file = {:Pandya2005.pdf:PDF},
      keywords = {CT-derived segmented object; augmented reality; end-effector mounted
      video camera; image guided surgery view; live video view; orthogonal
      image data scan; passive articulated arm; software design; surgical
      guidance; surgical navigation; synchronized graphical view; tools
      trajectory; virtual reality; augmented reality; image segmentation;
      medical image processing; medical robotics; surgery; systems analysis;,
      TEC, AUR},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • T. M. Peters, “Image-guidance for surgical procedures.,” Physics in medicine and biology, vol. 51, iss. 14, p. R505-40, 2006.
    [Bibtex]
    @ARTICLE{Peters2006,
      author = {Peters, Terry M},
      title = {Image-guidance for surgical procedures.},
      journal = {Physics in medicine and biology},
      year = {2006},
      volume = {51},
      pages = {R505-40},
      number = {14},
      month = {July},
      abstract = {Contemporary imaging modalities can now provide the surgeon with high
      quality three- and four-dimensional images depicting not only normal
      anatomy and pathology, but also vascularity and function. A key component
      of image-guided surgery (IGS) is the ability to register multi-modal
      pre-operative images to each other and to the patient. The other
      important component of IGS is the ability to track instruments in
      real time during the procedure and to display them as part of a realistic
      model of the operative volume. Stereoscopic, virtual- and augmented-reality
      techniques have been implemented to enhance the visualization and
      guidance process. For the most part, IGS relies on the assumption
      that the pre-operatively acquired images used to guide the surgery
      accurately represent the morphology of the tissue during the procedure.
      This assumption may not necessarily be valid, and so intra-operative
      real-time imaging using interventional MRI, ultrasound, video and
      electrophysiological recordings are often employed to ameliorate
      this situation. Although IGS is now in extensive routine clinical
      use in neurosurgery and is gaining ground in other surgical disciplines,
      there remain many drawbacks that must be overcome before it can be
      employed in more general minimally-invasive procedures. This review
      overviews the roots of IGS in neurosurgery, provides examples of
      its use outside the brain, discusses the infrastructure required
      for successful implementation of IGS approaches and outlines the
      challenges that must be overcome for IGS to advance further.},
      file = {Peters2006.pdf:Peters2006.pdf:PDF},
      issn = {0031-9155},
      keywords = {Algorithms,Brain,Brain Neoplasms,Brain Neoplasms: radiography,Brain
      Neoplasms: surgery,Brain: radiography,Brain: surgery,Electrophysiology,Humans,Image
      Processing, Computer-Assisted,Magnetic Resonance Imaging,Neurosurgical
      Procedures,Radiosurgery,Radiosurgery: methods,Stereotaxic Techniques,Surgery,
      Computer-Assisted,Surgery, Computer-Assisted: methods, REV},
      owner = {thomaskroes},
      pmid = {16825730},
      timestamp = {2010.10.22}
    }
  • B. Reitinger, P. Werlberger, A. Bornik, R. Beichel, and D. Schmalstieg, “Spatial Measurements for Medical Augmented Reality,” in Proceedings of the 4th IEEE/ACM International Symposium on Mixed and Augmented Reality, Washington, DC, USA, 2005, pp. 208-209.
    [Bibtex]
    @INPROCEEDINGS{Reitinger2005,
      author = {Reitinger, Bernhard and Werlberger, Pascal and Bornik, Alexander
      and Beichel, Reinhard and Schmalstieg, Dieter},
      title = {Spatial Measurements for Medical Augmented Reality},
      booktitle = {Proceedings of the 4th IEEE/ACM International Symposium on Mixed
      and Augmented Reality},
      year = {2005},
      series = {ISMAR '05},
      pages = {208 - 209},
      address = {Washington, DC, USA},
      publisher = {IEEE Computer Society},
      acmid = {1105215},
      file = {Reitinger2005.pdf:Reitinger2005.pdf:PDF},
      isbn = {0-7695-2459-1},
      keywords = {TEC},
      numpages = {2},
      owner = {Thomas},
      timestamp = {2011.02.01}
    }
  • J. Rexilius, S. Warfield, C. Guttmann, X. Wei, R. Benson, L. Wolfson, M. Shenton, H. Handels, and R. Kikinis, “A Novel Nonrigid Registration Algorithm and Applications,” in Medical Image Computing and Computer-Assisted Intervention – MICCAI 2001, W. Niessen and M. Viergever, Eds., Springer Berlin / Heidelberg, 2001, vol. 2208, pp. 923-931.
    [Bibtex]
    @INCOLLECTION{Rexilius2001,
      author = {Rexilius, J. and Warfield, S. and Guttmann, C. and Wei, X. and Benson,
      R. and Wolfson, L. and Shenton, M. and Handels, H. and Kikinis, R.},
      title = {A Novel Nonrigid Registration Algorithm and Applications},
      booktitle = {Medical Image Computing and Computer-Assisted Intervention – MICCAI
      2001},
      publisher = {Springer Berlin / Heidelberg},
      year = {2001},
      editor = {Niessen, Wiro and Viergever, Max},
      volume = {2208},
      series = {Lecture Notes in Computer Science},
      pages = {923 - 931},
      abstract = {In this paper we describe a new algorithm for nonrigid registration
      of brain images based on an elastically deformable model. The use
      of registration methods has become an important tool for computer-assisted
      diagnosis and surgery. Our goal was to improve analysis in various
      applications of neurology and neurosurgery by improving nonrigid
      registration. A local gray level similarity measure is used to make
      an initial sparse displacement field estimate. The field is initially
      estimated at locations determined by local features, and then a linear
      elastic model is used to infer the volumetric deformation across
      the image. The associated partial differential equation is solved
      by a finite element approach. A model of empirically observed variability
      of the brain was created from a dataset of 154 young adults. Both
      homogeneous and inhomogeneous elasticity models were compared. The
      algorithm has been applied to medical applications including intraoperative
      images of neurosurgery showing brain shift and a study of gait and
      balance disorder.},
      affiliation = {Surgical Planning Laboratory, Harvard Medical School &amp; Brigham
      and Women’s Hospital, 75 Francis St., Boston, MA 02115, USA},
      file = {Rexilius2001.pdf:Rexilius2001.pdf:PDF},
      keywords = {TEC},
      owner = {thomaskroes},
      timestamp = {2011.01.11}
    }
  • F. Sauer, S. Vogt, and A. Khamene, “Augmented Reality,” in Image-Guided Interventions, T. Peters and K. Cleary, Eds., Springer US, 2008, pp. 81-119.
    [Bibtex]
    @INCOLLECTION{Sauer2008,
      author = {Sauer, Frank and Vogt, Sebastian and Khamene, Ali},
      title = {Augmented Reality},
      booktitle = {Image-Guided Interventions},
      publisher = {Springer US},
      year = {2008},
      editor = {Peters, Terry and Cleary, Kevin},
      pages = {81 - 119},
      note = {Chapter 4},
      abstract = {Much of the visualization in image-guided interventions is achieved
      by creating a virtual image of the surgical or therapeutic environment,
      based upon preoperative images, and displaying it on a workstation
      that is remote from the patient. Linkages between the patient and
      the image are created through image registration and tracked tools.
      Such solutions are not always ideal, and result in a psychophysical
      decoupling of the actual and virtual therapeutic working spaces.
      Using augmented reality, these two spaces are fused into a single
      volume, which is typically viewed stereoscopically so that a preoperative
      or intraoperative patient image appears at the location of the actual
      patient anatomy. The surgeon has the perception that he is seeing
      through the patient or organ surface to observe the operative site.
      This chapter reviews the various approaches to augmented reality,
      and discusses the engineering and psychophysical challenges in developing
      user-friendly systems.},
      affiliation = {Siemens Corporate Research USA},
      file = {Sauer2008.pdf:Sauer2008.pdf:PDF},
      isbn = {978-0-387-73858-1},
      keyword = {Engineering},
      keywords = {AUR},
      owner = {Thomas},
      timestamp = {2011.02.24}
    }
  • F. Sauer, F. Wenzel, S. Vogt, Y. Tao, Y. Genc, and A. Bani-Hashemi, “Augmented workspace: Designing an AR testbed,” , pp. 47-53, 2002.
    [Bibtex]
    @CONFERENCE{Sauer2002,
      author = {Sauer, F. and Wenzel, F. and Vogt, S. and Tao, Y. and Genc, Y. and
      Bani-Hashemi, A.},
      title = {Augmented workspace: Designing an AR testbed},
      booktitle = {Augmented Reality, 2000.(ISAR 2000). Proceedings. IEEE and ACM International
      Symposium on},
      year = {2002},
      pages = {47 - 53},
      organization = {IEEE},
      isbn = {0769508464},
      keywords = {AUR},
      owner = {Thomas},
      timestamp = {2011.02.28}
    }
  • M. Scheuering, “Intraoperative augmented reality for minimally invasive liver interventions,” Proceedings of SPIE, pp. 407-417, 2003.
    [Bibtex]
    @ARTICLE{Scheuering2003,
      author = {Scheuering, Michael},
      title = {Intraoperative augmented reality for minimally invasive liver interventions},
      journal = {Proceedings of SPIE},
      year = {2003},
      pages = {407 - 417},
      abstract = {Minimally invasive liver interventions demand a lot of experience
      due to the limited access to the field of operation.
      
      In particular, the correct placement of the trocar and the navigation
      within the patient’s body are hampered. In
      
      this work, we present an intraoperative augmented reality system (IARS)
      that directly projects preoperatively
      
      planned information and structures extracted from CT data, onto the
      real laparoscopic video images. Our system
      
      consists of a preoperative planning tool for liver surgery and an
      intraoperative real time visualization component.
      
      The planning software takes into account the individual anatomy of
      the intrahepatic vessels and determines the
      
      vascular territories. Methods for fast segmentation of the liver parenchyma,
      of the intrahepatic vessels and of
      
      liver lesions are provided. In addition, very efficient algorithms for
      skeletonization and vascular analysis allowing
      
      the approximation of patient-individual liver vascular territories
      are included. The intraoperative visualization is
      
      based on a standard graphics adapter for hardware accelerated high
      performance direct volume rendering. The
      
      preoperative CT data is rigidly registered to the patient position
      by the use of fiducials that are attached to
      
      the patient’s body, and anatomical landmarks in combination with an
      electro-magnetic navigation system. Our
      
      system was evaluated in vivo during a minimally invasive intervention
      simulation in a swine under anesthesia.},
      file = {Scheuering2003.pdf:Scheuering2003.pdf:PDF},
      issn = {0277786X},
      keywords = {computer as-,direct volume rendering,hardware acceleration,image-guided
      surgery,registration, AUR, HES},
      owner = {thomaskroes},
      publisher = {Spie},
      timestamp = {2010.10.25}
    }
  • J. H. Shuhaiber, “Augmented reality in surgery,” Archives of surgery, vol. 139, iss. 2, p. 170, 2004.
    [Bibtex]
    @ARTICLE{Shuhaiber2004,
      author = {Shuhaiber, J.H.},
      title = {Augmented reality in surgery},
      journal = {Archives of surgery},
      year = {2004},
      volume = {139},
      pages = {170},
      number = {2},
      file = {Shuhaiber2004.pdf:Shuhaiber2004.pdf:PDF},
      keywords = {REV, AUR},
      owner = {thomaskroes},
      publisher = {Am Med Assoc},
      timestamp = {2011.01.04}
    }
  • T. Sielhorst, M. Feuerstein, and N. Navab, “Advanced medical displays: A literature review of augmented reality,” Display Technology, Journal of, vol. 4, iss. 4, pp. 451-467, 2008.
    [Bibtex]
    @ARTICLE{Sielhorst2008,
      author = {Sielhorst, T. and Feuerstein, M. and Navab, N.},
      title = {Advanced medical displays: A literature review of augmented reality},
      journal = {Display Technology, Journal of},
      year = {2008},
      volume = {4},
      pages = {451 - 467},
      number = {4},
      file = {Sielhorst2008.pdf:Sielhorst2008.pdf:PDF},
      issn = {1551-319X},
      keywords = {REV, AUR},
      owner = {thomaskroes},
      publisher = {IEEE},
      timestamp = {2011.01.26}
    }
  • T. Tawara and K. Ono, “A framework for volume segmentation and visualization using Augmented Reality,” 2010 IEEE Symposium on 3D User Interfaces (3DUI), pp. 121-122, 2010.
    [Bibtex]
    @ARTICLE{Tawara2010,
      author = {Tawara, Takehiro and Ono, Kenji},
      title = {A framework for volume segmentation and visualization using Augmented
      Reality},
      journal = {2010 IEEE Symposium on 3D User Interfaces (3DUI)},
      year = {2010},
      pages = {121 - 122},
      month = {March},
      abstract = {We propose a two-handed direct manipulation system to achieve complex
      volume segmentation of CT/MRI data in Augmented Re- ality with a
      remote controller attached to a motion tracking cube. At the same
      time segmented data is displayed by direct volume ren- dering using
      a programmable GPU. Our system achieves visualiza- tion of real timemodification
      of volume data with complex shading including transparency control
      by changing transfer functions, dis- playing any cross section, and
      rendering multi materials using a local illumination model. Our goal
      is to build a system that facilitates direct manipulation of volumetric
      CT/MRI data for segmentation in Augmented Real- ity. Volume segmentation
      is a challenging problem and segmented data has an important role
      for visualization and analysis.},
      file = {Tawara2010.pdf:Tawara2010.pdf:PDF},
      isbn = {978-1-4244-6846-1},
      owner = {thomaskroes},
      publisher = {Ieee},
      timestamp = {2010.10.25}
    }
  • J. Traub, T. Sielhorst, S. Heining, and N. Navab, “Advanced Display and Visualization Concepts for Image Guided Surgery,” Computer Aided Surgery, vol. 4, iss. 4, pp. 483-490, 2008.
    [Bibtex]
    @ARTICLE{Traub2008,
      author = {Traub, Joerg and Sielhorst, Tobias and Heining, Sandro-michael and
      Navab, Nassir},
      title = {Advanced Display and Visualization Concepts for Image Guided Surgery},
      journal = {Computer Aided Surgery},
      year = {2008},
      volume = {4},
      pages = {483 - 490},
      number = {4},
      abstract = {Thanks to its rapid development in the last decades, image guided
      surgery (IGS) has been introduced successfully in many modern operating
      rooms. Current IGS systems provide their navigation information on
      a standard computer monitor. Alter- natively, one could enhance the
      direct sight of the physician by an overlay of the virtual data onto
      the real patient view. Such in situ visualization methods have been
      proposed in the literature for pro- viding a more intuitive visualization,
      improving the ergonomics as well as the hand-eye coordination. In
      this paper,we first discuss the fundamental issues and the recent
      endeavors in advanced display and visualization for IGS.We then present
      some of our recentwork comparing two navigation systems: 1) a classical
      monitor based navigation and 2) a new navigation system we had developed
      based on in situ visualization. As both solutions reveal shortcomings
      as well as complementary advantages, we introduce a new solution
      that combines both concepts into one hybrid user interface. Finally,
      experimental results report on the performance of several surgeons
      using an external monitor as well as a stereo video see-through head-mounted
      display (HMD). The experiments consist of drilling into a phantom
      in order to reach planted deep-seated targets only visible inComputedTomography(CT)data.We
      evaluate several vi- sualization techniques, including thenewhybridsolution,andstudy
      their influence on the performance of the participant surgeons.},
      file = {Traub2008.pdf:Traub2008.pdf:PDF},
      owner = {thomaskroes},
      timestamp = {2010.10.26}
    }

 

Leave a Reply