Bibliography

This page contains the full bibliography of our paper, formatted as HTML, with clickable DOI (when these are available) and BIBTEX links for each article. You can also download the complete database in BiBTeX format.

For your convenvience, we enriched the BiBTeX entries with extra search keywords, allowing you to filter on specific papers e.g. hepatic surgery.

Keyword Description
REV Review paper
APP Application paper
TEC Technique paper
PLA Includes planning
VOR Volume rendering
GUI Includes guidance
SUR Surface rendering
AUR Augmented reality
STV Stereo vision
SLR Slice rendering
RPP Rapid prototyping
PRS Process simulation
OCS Outcome simulation
HES Hepatic surgery
OTS Orthopaedic surgery
NES Neuro surgery
CMS Cranio-maxillofacial surgery
ENT Ear nose and throat surgery
TAS Thoracoabdominal surgery

 

  • L. Ciocca, F. De Crescenzio, M. Fantini, and R. Scotti, “CAD/CAM and rapid prototyped scaffold construction for bone regenerative medicine and surgical transfer of virtual planning: a pilot study.,” Computerized medical imaging and graphics : the official journal of the Computerized Medical Imaging Society, vol. 33, iss. 1, pp. 58-62, 2009.
    [Bibtex]
    @ARTICLE{Ciocca2009,
      author = {Ciocca, L and {De Crescenzio}, F and Fantini, M and Scotti, R},
      title = {CAD/CAM and rapid prototyped scaffold construction for bone regenerative
      medicine and surgical transfer of virtual planning: a pilot study.},
      journal = {Computerized medical imaging and graphics : the official journal
      of the Computerized Medical Imaging Society},
      year = {2009},
      volume = {33},
      pages = {58-62},
      number = {1},
      month = {January},
      abstract = {We developed a model to test new bone constructs to replace spare
      skeletal segments originating from new generation scaffolds for bone
      marrow-derived mesenchymal stem cells. Using computed tomography
      (CT) data, scaffolds were defined using computer-aided design/computer-aided
      manufacturing (CAD/CAM) for rapid prototyping by three-dimensional
      (3D) printing. A bone defect was created in pig mandible ramus by
      condyle resection for CT and CAD/CAM elaboration of bone volume for
      cutting and scaffold restoration. The protocol produced a perfect-fitting
      bone substitute model for rapid prototyped hydroxyapatite (HA) scaffolds.
      A surgical guide system was developed to accurately reproduce virtually
      planned bone sectioning procedures in animal models to obtain a perfect
      fit during surgery.},
      crossref = {xia},
      file = {Ciocca2009.pdf:Ciocca2009.pdf:PDF},
      issn = {1879-0771},
      keywords = {Animals,Bone Substitutes,Bone Substitutes: metabolism,Bone Substitutes:
      therapeutic use,Bone Transplantation,Bone Transplantation: methods,Computer-Aided
      Design,Dental Implantation, Endosseous,Dental Implantation, Endosseous:
      methods,Dental Prosthesis Design,Dental Prosthesis Design: methods,Durapatite,Durapatite:
      therapeutic use,Mandibular Condyle,Mandibular Condyle: surgery,Mesenchymal
      Stem Cells,Mesenchymal Stem Cells: cytology,Models, Anatomic,Osteotomy,Osteotomy:
      methods,Pilot Projects,Reconstructive Surgical Procedures,Reconstructive
      Surgical Procedures: methods,Regenerative Medicine,Regenerative Medicine:
      instrumentation,Regenerative Medicine: methods,Surgery, Computer-Assisted,Surgery,
      Computer-Assisted: methods,Swine,Tissue Engineering,Tissue Engineering:
      instrumentation,Tissue Engineering: methods,Tissue Scaffolds,Tomography,
      X-Ray Computed},
      owner = {thomaskroes},
      pmid = {19054651},
      timestamp = {2010.10.22}
    }
  • D. Holmes, M. Rettmann, and R. Robb, “Visualization in Image-Guided Interventions,” in Image-Guided Interventions, T. Peters and K. Cleary, Eds., Springer US, 2008, pp. 45-80.
    [Bibtex]
    @INCOLLECTION{Holmes2008,
      author = {Holmes, David and Rettmann, Maryam and Robb, Richard},
      title = {Visualization in Image-Guided Interventions},
      booktitle = {Image-Guided Interventions},
      publisher = {Springer US},
      year = {2008},
      editor = {Peters, Terry and Cleary, Kevin},
      pages = {45 - 80},
      note = {Chapter 3},
      abstract = {Visualization is one of the primary interfaces between an interventionalist
      and his patient. This interface is the final integration of several
      disparate data streams. To develop an appropriate image-guided interface,
      it is important to understand several aspects of the data acquisition,
      data processing, and visualization methodologies in the context of
      the interventional procedure. This chapter introduces the basics
      of data acquisition and processing for image guidance, including
      the benefits of both preoperative and intraoperative data streams.
      2D and 3D visualization methodologies are described with examples.
      Several different systems for visualization are introduced, ranging
      from low-level hardware to software-only render engines. Several
      clinical examples of visualization for image guidance are described.},
      affiliation = {Mayo Clinic College of Medicine Rochester MN USA},
      crossref = {f},
      file = {Holmes2008.pdf:Holmes2008.pdf:PDF},
      isbn = {978-0-387-73858-1},
      keyword = {Engineering},
      owner = {Thomas},
      timestamp = {2011.02.24}
    }
  • E. Keeve, S. Girod, and B. Girod, “Craniofacial surgery simulation,” , pp. 541-546, 1996.
    [Bibtex]
    @CONFERENCE{Keeve1996a,
      author = {Keeve, E. and Girod, S. and Girod, B.},
      title = {Craniofacial surgery simulation},
      booktitle = {Visualization in Biomedical Computing},
      year = {1996},
      pages = {541 - 546},
      organization = {Springer},
      crossref = {rmer},
      file = {Keeve1996a.pdf:Keeve1996a.pdf:PDF},
      owner = {thomaskroes},
      timestamp = {2011.01.25}
    }
  • J. P. iannotti, E. E. Spencer, and others, “Prosthetic positioning in total shoulder arthroplasty,” Journal of Shoulder and Elbow Surgery, vol. 14, iss. 1, p. S111 – S121, 2005.
    [Bibtex]
    @ARTICLE{Iannotti2005,
      author = {iannotti, J.P. and Spencer, E.E. and others},
      title = {Prosthetic positioning in total shoulder arthroplasty},
      journal = {Journal of Shoulder and Elbow Surgery},
      year = {2005},
      volume = {14},
      pages = {S111 - S121},
      number = {1},
      abstract = {Accurate positioning of the prosthetic humeral head is necessary to
      reproduce normal glenohumeral kinematics and to avoid damage to the
      rotator cuff and impingement on the glenoid component or coracoacromial
      arch. Proper positioning of the head requires accurate placement
      of the stem and prosthetic designs that allow the head position to
      adapt to the variations in both normal and pathologic humeral anatomy.
      Glenoid malpositioning can lead to both humeral instability and increased
      stress of the glenoid component that may lead to premature glenoid
      loosening. This review summarizes the cadaveric and finite-element
      model that defines the abnormalities associated with humeral and
      glenoid component malpositioning.},
      file = {:C\:\\Thomas\\PHD\\Literature\\Articles\\Ianotti2005.pdf:PDF},
      owner = {thomaskroes},
      publisher = {Elsevier},
      timestamp = {2010.10.26}
    }
  • G. Abdoulaev, S. Cadeddu, G. Delussu, M. Donizelli, L. Formaggia, A. Giachetti, E. Gobbetti, A. Leone, C. Manzi, P. Pili, and others, “ViVa: The Virtual Vascular Project,” IEEE TRANSACTIONS ON INFORMATION TECHNOLOGY IN BIOMEDICINE, vol. 2, iss. 4, 1998.
    [Bibtex]
    @ARTICLE{Abdoulaev1998,
      author = {Abdoulaev, G. and Cadeddu, S. and Delussu, G. and Donizelli, M. and
      Formaggia, L. and Giachetti, A. and Gobbetti, E. and Leone, A. and
      Manzi, C. and Pili, P. and others},
      title = {ViVa: The Virtual Vascular Project},
      journal = {IEEE TRANSACTIONS ON INFORMATION TECHNOLOGY IN BIOMEDICINE},
      year = {1998},
      volume = {2},
      number = {4},
      file = {Abdoulaev1998.pdf:Abdoulaev1998.pdf:PDF},
      keywords = {TEC},
      owner = {thomaskroes},
      timestamp = {2011.01.07}
    }
  • L. Adams, W. Krybus, D. Meyer-Ebrecht, R. Rueger, J. M. Gilsbach, R. Moesges, and G. Schloendorff, “Computer-assisted surgery,” Computer Graphics and Applications, IEEE, vol. 10, iss. 3, pp. 43-51, 1990.
    [Bibtex]
    @ARTICLE{Adams1990,
      author = {Adams, L. and Krybus, W. and Meyer-Ebrecht, D. and Rueger, R. and
      Gilsbach, J.M. and Moesges, R. and Schloendorff, G.},
      title = {Computer-assisted surgery},
      journal = {Computer Graphics and Applications, IEEE},
      year = {1990},
      volume = {10},
      pages = {43 - 51},
      number = {3},
      month = {May},
      abstract = {Computer-assisted surgery (CAS), a new navigation aid for skull-base
      surgery, is discussed. The system described combines 3-D coordinate
      measurement techniques, voxel processing methods, and pseudo-3-D
      image presentations to support preoperative planning of therapy,
      path-finding during the operation itself, and postoperative therapy
      control. The surgeon employs a hand-guided electromechanical 3-D-coordinate
      digitizer to locate points of interest within the operative field.
      The coordinates measured this way are correlated with a voxel model
      of the object gained by a preceding computed-tomography examination.
      With a prototype system the accuracy of this method has proven to
      be better than plusmn;1 mm. The system has been successfully applied
      in more than 60 ear-nose-throat operations and four neurosurgical
      procedures. The computer hardware and integration of the system into
      an experimental picture archiving and communication system are discussed},
      file = {Adams1990.pdf:Adams1990.pdf:PDF},
      issn = {0272-1716},
      keywords = {3-D coordinate measurement techniques;accuracy;ear-nose-throat operations;hand-guided
      electromechanical 3-D-coordinate digitizer;navigation aid;neurosurgical
      procedures;path-finding;postoperative therapy control;preoperative
      planning;pseudo-3-D image presentations;skull-base surgery;voxel
      processing methods;medical computing;surgery;, ENT, APP},
      owner = {thomaskroes},
      timestamp = {2011.01.07}
    }
  • A. Adili, “Robot-assisted orthopedic surgery,” Surgical Innovation, vol. 11, iss. 2, p. 89, 2004.
    [Bibtex]
    @ARTICLE{Adili2004,
      author = {Adili, A.},
      title = {Robot-assisted orthopedic surgery},
      journal = {Surgical Innovation},
      year = {2004},
      volume = {11},
      pages = {89},
      number = {2},
      abstract = {The main advantages of robot-assisted orthopedic surgery over conventional
      orthopedic techniques are improved accuracy and precision in the
      preparation of bone surfaces, more reliable and reproducible outcomes,
      and greater spatial accuracy. Orthopedic surgery is ideally suited
      for the application of robotic systems. The ability to isolate and
      rigidly fix bones in known positions allows robotic devices to be
      securely fixed to the bone. As such, the bone is treated as a fixed
      object, simplifying the computer control of the robotic system. Commercially
      available robotic systems can be categorized as either passive or
      active devices, or can be categorized as positioning or milling/cutting
      devices. Computer assisted orthopedic surgery is a related area of
      technological development in orthopedics; however, robot-assisted
      orthopedic surgery can achieve levels of accuracy, precision, and
      safety not capable with computer assisted orthopedic surgery. Applications
      of robot-assisted orthopedic surgery currently under investigation
      include total hip and knee replacement, tunnel placement for reconstruction
      of knee ligaments, and trauma and spinal procedures. Several short-term
      studies demonstrate the feasibility of robotic applications in orthopedics,
      however, there are no published long-term data defining the efficacy
      of robotassisted orthopedic surgery. Issues of cost, training, and
      safety must be addressed before robot-assisted orthopedic surgery
      becomes widely available. Robot-assisted orthopedic surgery is still
      very much in its infancy but it has the potential to transform the
      way orthopedic procedures are done in the future.},
      file = {Adili2004.pdf:Adili2004.pdf:PDF},
      issn = {1553-3506},
      keywords = {REV, OTS},
      owner = {thomaskroes},
      publisher = {SAGE Publications},
      timestamp = {2010.12.09}
    }
  • D. E. Altobelli, R. Kikinis, J. B. Mulliken, H. Cline, W. Lorensen, F. Jolesz, and others, “Computer-assisted three-dimensional planning in craniofacial surgery,” Plastic and Reconstructive Surgery, vol. 92, iss. 4, p. 576, 1993.
    [Bibtex]
    @ARTICLE{Altobelli1993,
      author = {Altobelli, D.E. and Kikinis, R. and Mulliken, J.B. and Cline, H.
      and Lorensen, W. and Jolesz, F. and others},
      title = {Computer-assisted three-dimensional planning in craniofacial surgery},
      journal = {Plastic and Reconstructive Surgery},
      year = {1993},
      volume = {92},
      pages = {576},
      number = {4},
      issn = {0032-1052},
      keywords = {CMS, APP},
      owner = {thomaskroes},
      timestamp = {2011.01.12}
    }
  • C. J. van Andel, N. Wolterbeek, C. M. a Doorenbosch, D. H. E. J. Veeger, and J. Harlaar, “Complete 3D kinematics of upper extremity functional tasks.,” Gait & posture, vol. 27, iss. 1, pp. 120-7, 2008.
    [Bibtex]
    @ARTICLE{VanAndel2008,
      author = {van Andel, Carolien J and Wolterbeek, Nienke and Doorenbosch, Caroline
      a M and Veeger, DirkJan H E J and Harlaar, Jaap},
      title = {Complete 3D kinematics of upper extremity functional tasks.},
      journal = {Gait \& posture},
      year = {2008},
      volume = {27},
      pages = {120 - 7},
      number = {1},
      month = {January},
      abstract = {Upper extremity (UX) movement analysis by means of 3D kinematics has
      the potential to become an important clinical evaluation method.
      However, no standardized protocol for clinical application has yet
      been developed, that includes the whole upper limb. Standardization
      problems include the lack of a single representative function, the
      wide range of motion of joints and the complexity of the anatomical
      structures. A useful protocol would focus on the functional status
      of the arm and particularly the orientation of the hand. The aim
      of this work was to develop a standardized measurement method for
      unconstrained movement analysis of the UX that includes hand orientation,
      for a set of functional tasks for the UX and obtain normative values.
      Ten healthy subjects performed four representative activities of
      daily living (ADL). In addition, six standard active range of motion
      (ROM) tasks were executed. Joint angles of the wrist, elbow, shoulder
      and scapula were analyzed throughout each ADL task and minimum/maximum
      angles were determined from the ROM tasks. Characteristic trajectories
      were found for the ADL tasks, standard deviations were generally
      small and ROM results were consistent with the literature. The results
      of this study could form the normative basis for the development
      of a 'UX analysis report' equivalent to the 'gait analysis report'
      and would allow for future comparisons with pediatric and/or pathologic
      movement patterns.},
      issn = {0966-6362},
      keywords = {Acromioclavicular Joint,Acromioclavicular Joint: physiology,Activities
      of Daily Living,Adult,Biomechanics,Bones of Upper Extremity,Bones
      of Upper Extremity: physiology,Elbow Joint,Elbow Joint: physiology,Feasibility
      Studies,Female,Forearm,Forearm: physiology,Humans,Imaging, Three-Dimensional,Imaging,
      Three-Dimensional: methods,Male,Movement,Photogrammetry,Pronation,Pronation:
      physiology,Range of Motion, Articular,Range of Motion, Articular:
      physiology,Rotation,Shoulder Joint,Shoulder Joint: physiology,Signal
      Processing, Computer-Assisted,Supination,Supination: physiology,Upper
      Extremity,Upper Extremity: physiology,Wrist Joint,Wrist Joint: physiology},
      owner = {thomaskroes},
      pmid = {17459709},
      timestamp = {2010.10.25}
    }
  • H. Anderl, D. Zur Nedden, and others, “CT-guided stereolithography as a new tool in craniofacial surgery,” British journal of plastic surgery, vol. 47, iss. 1, pp. 60-64, 1994.
    [Bibtex]
    @ARTICLE{Anderl1994,
      author = {Anderl, H. and Zur Nedden, D. and others},
      title = {CT-guided stereolithography as a new tool in craniofacial surgery},
      journal = {British journal of plastic surgery},
      year = {1994},
      volume = {47},
      pages = {60 - 64},
      number = {1},
      file = {Anderl1994.pdf:Anderl1994.pdf:PDF},
      issn = {0007-1226},
      keywords = {SLR, CMS, RPP, SUR},
      owner = {Thomas},
      publisher = {Elsevier},
      timestamp = {2011.02.09}
    }
  • C. Anglin, U. P. Wyss, and D. R. Pichora, “Mechanical testing of shoulder prostheses and recommendations for glenoid design,” Journal of Shoulder and Elbow Surgery, vol. 9, iss. 4, pp. 323-331, 2000.
    [Bibtex]
    @ARTICLE{Anglin2000,
      author = {Anglin, C. and Wyss, U.P. and Pichora, D.R.},
      title = {Mechanical testing of shoulder prostheses and recommendations for
      glenoid design},
      journal = {Journal of Shoulder and Elbow Surgery},
      year = {2000},
      volume = {9},
      pages = {323--331},
      number = {4},
      file = {Anglin2000.pdf:Anglin2000.pdf:PDF},
      keywords = {TEC,},
      owner = {thomaskroes},
      publisher = {Mosby},
      timestamp = {2010.10.26}
    }
  • S. G. Armato III, M. L. Giger, and H. MacMahon, “Automated detection of lung nodules in CT scans: preliminary results,” Medical Physics, vol. 28, p. 1552, 2001.
    [Bibtex]
    @ARTICLE{Armato2001,
      author = {Armato III, S.G. and Giger, M.L. and MacMahon, H.},
      title = {Automated detection of lung nodules in CT scans: preliminary results},
      journal = {Medical Physics},
      year = {2001},
      volume = {28},
      pages = {1552},
      keywords = {TEC, IMP, TEC},
      owner = {thomaskroes},
      timestamp = {2010.12.15}
    }
  • V. Arulesana, T. Kesavadasa, and K. R. Hoffmanna, “Computer Assisted Neurosurgery,” International Journal of Computer Assisted Radiology and Surgery, vol. 2, pp. 218-225, 2007.
    [Bibtex]
    @ARTICLE{Arulesana2007,
      author = {Arulesana, V. and Kesavadasa, T. and Hoffmanna, K.R.},
      title = {Computer Assisted Neurosurgery},
      journal = {International Journal of Computer Assisted Radiology and Surgery},
      year = {2007},
      volume = {2},
      pages = {218 - 225},
      file = {Arulesana2007.pdf:Arulesana2007.pdf:PDF},
      issn = {1861-6410},
      keywords = {TEC, NES},
      owner = {thomaskroes},
      publisher = {Springer},
      timestamp = {2011.01.11}
    }
  • R. Aspin, M. Smith, C. Hutchinson, and L. Funk, “MediVol: An initial Study into Real-Time, Interactive 3D Visualisation of Soft Tissue Pathologies,” in Distributed Simulation and Real-Time Applications, 2008. DS-RT 2008. 12th IEEE/ACM International Symposium on, 2008, pp. 103-110.
    [Bibtex]
    @INPROCEEDINGS{Aspin2008,
      author = {Aspin, R. and Smith, M. and Hutchinson, C. and Funk, L.},
      title = {MediVol: An initial Study into Real-Time, Interactive 3D Visualisation
      of Soft Tissue Pathologies},
      booktitle = {Distributed Simulation and Real-Time Applications, 2008. DS-RT 2008.
      12th IEEE/ACM International Symposium on},
      year = {2008},
      pages = {103 -110},
      month = October,
      abstract = {None-invasive scanning technologies, such as MRI, have greatly enhanced
      our ability to dasiaimagepsila the internal body, however the resultant
      visualisation is often difficult to comprehend due to both inadequacies
      in the scanning process and sub-optimal approaches to visualisation
      and data representation. These factors impose significant cognitive
      load on the user, requiring skill and experience to accurately comprehend
      the detail of the data, and intense concentration, in less experienced
      users, to understand the structures present. This research aims to
      improve the userspsila ability to comprehend and explore the scanned
      data sets, through a combination of enhanced data processing and
      effective 3D visualisation that will seek refined representational
      paradigms to intuitively convey meaning to the user. This is achieved
      in a real-time 3D exploration environment, utilising recent developments
      in graphics hardware, which enables the user to both refine the visual
      representation and effectively explore the data set, thereby defining
      a prototype for enhanced medical visualisation and exploration. Ultimately
      this will improve diagnosis, procedural planning and most importantly
      communication, both between medical clinicians and the patient themselves.},
      file = {Aspin2008.pdf:Aspin2008.pdf:PDF},
      issn = {1550-6525},
      keywords = {3D volumetric visualisation;MediVol real-time interactive 3D visualisation;None
      invasive scanning technology;graphics rendering;image enhancement;medical
      clinician;real-time 3D exploration environment;soft tissue MRI data;soft
      tissue pathology;visual data representation;biological tissues;biomedical
      MRI;data visualisation;image enhancement;image representation;interactive
      systems;medical image processing;rendering (computer graphics);,
      TEC, VOR, STV},
      owner = {thomaskroes},
      timestamp = {2011.01.07}
    }
  • H. Atmani, F. Merienne, D. Fofi, and P. Trouilloud, “Computer aided surgery system for shoulder prosthesis placement,” Computer Aided Surgery, vol. 12, iss. 1, pp. 60-70, 2007.
    [Bibtex]
    @ARTICLE{Atmani2007,
      author = {Atmani, H. and Merienne, F. and Fofi, D. and Trouilloud, P.},
      title = {Computer aided surgery system for shoulder prosthesis placement},
      journal = {Computer Aided Surgery},
      year = {2007},
      volume = {12},
      pages = {60 - 70},
      number = {1},
      abstract = {The aim of this research is to provide a light and easy-handling shoulder
      model for surgeons in order to ease the preoperative and peroperative
      work required when replacing the shoulder joint with a prosthesis.
      The digital mock-up of the shoulder is simplified according to the
      criteria of the surgeon, allowing easy manipulation of the model
      for a virtual operation. The model can be parameterized from X-rays
      or CT images. This paper describes the method used to obtain a virtual
      mock-up that is useful for preoperative simulation. Furthermore,
      it is shown that a real-time augmented reality system could be achieved
      for peroperative application.},
      file = {Atmani2007.pdf:Atmani2007.pdf:PDF},
      keywords = {APP, SLR, PRS, OTS},
      owner = {thomaskroes},
      timestamp = {2010.11.11}
    }
  • R. T. Azuma and others, “A survey of augmented reality.”
    [Bibtex]
    @ARTICLE{Azuma1997,
      author = {Azuma, R.T. and others},
      title = {A survey of augmented reality},
      file = {Azuma1997.pdf:Azuma1997.pdf:PDF},
      issn = {1054-7460},
      keywords = {REV, AUR},
      owner = {thomaskroes},
      publisher = {Citeseer},
      timestamp = {2011.01.06}
    }
  • K. T. Bae, M. L. Giger, C. T. Chen, and C. E. Kahn Jr, “Automatic segmentation of liver structure in CT images,” Medical Physics, vol. 20, p. 71, 1993.
    [Bibtex]
    @ARTICLE{Bae1993,
      author = {Bae, K.T. and Giger, M.L. and Chen, C.T. and Kahn Jr, C.E.},
      title = {Automatic segmentation of liver structure in CT images},
      journal = {Medical Physics},
      year = {1993},
      volume = {20},
      pages = {71},
      file = {Bae1993.pdf:Bae1993.pdf:PDF},
      keywords = {TEC},
      owner = {Thomas},
      timestamp = {2011.02.03}
    }
  • [DOI] D. C. Banks and K. Beason, “Decoupling Illumination from Isosurface Generation Using 4D Light Transport,” IEEE Transactions on Visualization and Computer Graphics, vol. 15, pp. 1595-1602, 2009.
    [Bibtex]
    @ARTICLE{Banks2009,
      author = {David C. Banks and Kevin Beason},
      title = {Decoupling Illumination from Isosurface Generation Using 4D Light
      Transport},
      journal = {IEEE Transactions on Visualization and Computer Graphics},
      year = {2009},
      volume = {15},
      pages = {1595-1602},
      address = {Los Alamitos, CA, USA},
      doi = {http://doi.ieeecomputersociety.org/10.1109/TVCG.2009.137},
      file = {Banks2009.pdf:Banks2009.pdf:PDF},
      issn = {1077-2626},
      keywords = {TEC},
      owner = {cpbotha},
      publisher = {IEEE Computer Society},
      timestamp = {2011.03.26}
    }
  • F. Banovac, J. Bruno, J. Wright, and K. Cleary, “Thoracoabdominal Interventions,” Image-Guided Interventions, pp. 387-407, 2008.
    [Bibtex]
    @ARTICLE{Banovac2008,
      author = {Banovac, F. and Bruno, J. and Wright, J. and Cleary, K.},
      title = {Thoracoabdominal Interventions},
      journal = {Image-Guided Interventions},
      year = {2008},
      pages = {387 - 407},
      note = {Chapter 13},
      file = {Banovac2008.pdf:Banovac2008.pdf:PDF},
      keywords = {REV, TAS},
      owner = {Thomas},
      publisher = {Springer},
      timestamp = {2011.02.28}
    }
  • S. Barre, C. Fernandez-Maloigne, P. Paume, and G. Subrenat, “Simulating facial surgery,” , vol. 3960, p. 334, 2000.
    [Bibtex]
    @CONFERENCE{Barre2000,
      author = {Barre, S. and Fernandez-Maloigne, C. and Paume, P. and Subrenat,
      G.},
      title = {Simulating facial surgery},
      booktitle = {Proceedings of SPIE},
      year = {2000},
      volume = {3960},
      pages = {334},
      file = {Barre2000.pdf:Barre2000.pdf:PDF},
      keywords = {TEC},
      owner = {thomaskroes},
      timestamp = {2011.01.12}
    }
  • R. W. a Barrett, B. L. Davies, M. P. S. F. Gomes, S. J. Harris, J. Henckel, M. Jakopec, F. M. Rodriguez Y Baena, and J. P. Cobb, “Preoperative planning and intraoperative guidance for accurate computer-assisted minimally invasive hip resurfacing surgery,” Proceedings of the Institution of Mechanical Engineers, Part H: Journal of Engineering in Medicine, vol. 220, iss. 7, pp. 759-773, 2006.
    [Bibtex]
    @ARTICLE{Barrett2006,
      author = {Barrett, a R W and Davies, B L and Gomes, M P S F and Harris, S J
      and Henckel, J and Jakopec, M and {Rodriguez Y Baena}, F M and Cobb,
      J P},
      title = {Preoperative planning and intraoperative guidance for accurate computer-assisted
      minimally invasive hip resurfacing surgery},
      journal = {Proceedings of the Institution of Mechanical Engineers, Part H: Journal
      of Engineering in Medicine},
      year = {2006},
      volume = {220},
      pages = {759--773},
      number = {7},
      month = jan,
      abstract = {Hip resurfacing is an alternative to total hip replacement (THR) and
      is particularly suitable for the younger, more active patient. However,
      it is a more demanding procedure. This paper describes a system that
      enables the surgeon to plan the surgery preoperatively with optimally
      sized and placed components, and then transfer this plan to an intraoperative
      system that registers computer models to the real patient and tracks
      surgical tools, allowing the surgeon to ensure that the bone is resected
      correctly and that the components are fitted in accordance with the
      plan. The paper describes a series of instruments used with the system
      which are locked to the bone. These instruments serve the dual purpose
      of soft tissue retraction and bone immobilization. The system will
      shortly be the subject of laboratory and clinical evaluation. Registration,
      a cornerstone of the tracked instrument system, has been tested,
      and accuracy measures are provided. Experimental results for the
      remainder of the system will be provided after clinical trials.},
      file = {Barrett2006.pdf:Barrett2006.pdf:PDF},
      issn = {0954-4119},
      keywords = {computer-assisted surgery,hip resurfacing,minimally invasive surgery,preoperative
      planning,surgical navigation, OTS, SUR, APP, SLR, PLA, GUI},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • B. Barsky, D. Horn, S. Klein, J. Pang, and M. Yu, “Camera models and optical systems used in computer graphics: part II, image-based techniques,” Computational Science and Its Applications—ICCSA 2003, pp. 983-983, 2003.
    [Bibtex]
    @ARTICLE{Barsky2003,
      author = {Barsky, B. and Horn, D. and Klein, S. and Pang, J. and Yu, M.},
      title = {Camera models and optical systems used in computer graphics: part
      II, image-based techniques},
      journal = {Computational Science and Its Applications—ICCSA 2003},
      year = {2003},
      pages = {983--983},
      file = {Barsky2003.pdf:Barsky2003.pdf:PDF},
      owner = {Thomas},
      publisher = {Springer},
      timestamp = {2011.03.30}
    }
  • D. Bartz, D. Mayer, J. Fischer, S. Ley, A. Del R’io, S. Thust, C. P. Heussel, H. U. Kauczor, and W. Straßer, “Hybrid segmentation and exploration of the human lungs,” , pp. 177-184, 2003.
    [Bibtex]
    @CONFERENCE{Bartz2003,
      author = {Bartz, D. and Mayer, D. and Fischer, J. and Ley, S. and Del R{\'\i}o,
      A. and Thust, S. and Heussel, C.P. and Kauczor, H.U. and Stra{\ss}er,
      W.},
      title = {Hybrid segmentation and exploration of the human lungs},
      booktitle = {Visualization, 2003. VIS 2003. IEEE},
      year = {2003},
      pages = {177 - 184},
      organization = {IEEE},
      file = {Bartz2003.pdf:Bartz2003.pdf:PDF},
      isbn = {0780381203},
      keywords = {TEC},
      owner = {thomaskroes},
      timestamp = {2011.01.03}
    }
  • H. Bathis, L. Perlick, M. Tingart, C. Luring, D. Zurakowski, and J. Grifka, “Alignment in total knee arthroplasty,” The Journal of Bone and Joint Surgery, vol. 86, iss. 5, pp. 682-687, 2004.
    [Bibtex]
    @ARTICLE{Bathis2004,
      author = {Bathis, H. and Perlick, L. and Tingart, M. and Luring, C. and Zurakowski,
      D. and Grifka, J.},
      title = {Alignment in total knee arthroplasty},
      journal = {The Journal of Bone and Joint Surgery},
      year = {2004},
      volume = {86},
      pages = {682 - 687},
      number = {5},
      month = {July},
      abstract = {Restoration of neutral alignment of the leg is an important factor
      affecting the long-term results of total knee arthroplasty (TKA).
      Recent developments in computer-assisted surgery have focused on
      systems for improving TKA. In a prospective study two groups of 80
      patients undergoing TKA had operations using either a computer-assisted
      navigation system or a conventional technique. Alignment of the leg
      and the orientation of components were determined on post-operative
      long-leg coronal and lateral films. The mechanical axis of the leg
      was significantly better in the computer-assisted group (96\%, within
      ±3˚ varus/valgus) compared with the conventional group (78\%, within
      ±3˚ varus/valgus). The coronal alignment of the femoral component
      was also more accurate in the computer-assisted group. Computer-assisted
      TKA gives a better correction of alignment of the leg and orientation
      of the components compared with the conventional technique. Potential
      benefits in the long-term outcome and functional improvement require
      further investigation.},
      file = {Bathis2004.pdf:Bathis2004.pdf:PDF},
      issn = {0301620X},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • M. Baumhauer, M. Feuerstein, H. P. Meinzer, and J. Rassweiler, “Navigation in endoscopic soft tissue surgery: perspectives and limitations,” Journal of Endourology, vol. 22, iss. 4, pp. 751-766, 2008.
    [Bibtex]
    @ARTICLE{Baumhauer2008,
      author = {Baumhauer, M. and Feuerstein, M. and Meinzer, H.P. and Rassweiler,
      J.},
      title = {Navigation in endoscopic soft tissue surgery: perspectives and limitations},
      journal = {Journal of Endourology},
      year = {2008},
      volume = {22},
      pages = {751 - 766},
      number = {4},
      file = {Baumhauer2008.pdf:Baumhauer2008.pdf:PDF},
      issn = {0892-7790},
      keywords = {REV, TAS},
      owner = {thomaskroes},
      publisher = {Mary Ann Liebert, Inc. 2 Madison Avenue Larchmont, NY 10538 USA},
      timestamp = {2011.01.26}
    }
  • E. Berry, M. Cuppone, S. Porada, P. Millner, A. Rao, N. Chiverton, and B. Seedhom, “Personalised image-based templates for intra-operative guidance,” Proceedings of the Institution of Mechanical Engineers, Part H: Journal of Engineering in Medicine, vol. 219, iss. 2, pp. 111-118, 2005.
    [Bibtex]
    @ARTICLE{Berry2005,
      author = {Berry, E. and Cuppone, M. and Porada, S. and Millner, PA and Rao,
      A. and Chiverton, N. and Seedhom, BB},
      title = {Personalised image-based templates for intra-operative guidance},
      journal = {Proceedings of the Institution of Mechanical Engineers, Part H: Journal
      of Engineering in Medicine},
      year = {2005},
      volume = {219},
      pages = {111 - 118},
      number = {2},
      file = {Berry2005.pdf:Berry2005.pdf:PDF},
      issn = {0954-4119},
      keywords = {TRM, RPP, GUI, APP, PLA, OTS},
      owner = {Thomas},
      publisher = {Prof Eng Publishing},
      timestamp = {2011.02.07}
    }
  • P. J. Besl and N. D. McKay, “A method for registration of 3-D shapes,” IEEE Transactions on pattern analysis and machine intelligence, pp. 239-256, 1992.
    [Bibtex]
    @ARTICLE{Besl1992,
      author = {Besl, P.J. and McKay, N.D.},
      title = {A method for registration of 3-D shapes},
      journal = {IEEE Transactions on pattern analysis and machine intelligence},
      year = {1992},
      pages = {239 - 256},
      file = {Besl1992.pdf:Besl1992.pdf:PDF},
      issn = {0162-8828},
      keywords = {TEC},
      owner = {thomaskroes},
      publisher = {Published by the IEEE Computer Society},
      timestamp = {2011.01.10}
    }
  • J. Beyer, M. Hadwiger, S. Wolfsberger, and K. Bühler, “High-quality multimodal volume rendering for preoperative planning of neurosurgical interventions.,” IEEE transactions on visualization and computer graphics, vol. 13, iss. 6, pp. 1696-703, 2007.
    [Bibtex]
    @ARTICLE{Beyer2007,
      author = {Beyer, Johanna and Hadwiger, Markus and Wolfsberger, Stefan and B\"{u}hler,
      Katja},
      title = {High-quality multimodal volume rendering for preoperative planning
      of neurosurgical interventions.},
      journal = {IEEE transactions on visualization and computer graphics},
      year = {2007},
      volume = {13},
      pages = {1696 - 703},
      number = {6},
      abstract = {Surgical approaches tailored to an individual patient's anatomy and
      pathology have become standard in neurosurgery. Precise preoperative
      planning of these procedures, however, is necessary to achieve an
      optimal therapeutic effect. Therefore, multiple radiological imaging
      modalities are used prior to surgery to delineate the patient's anatomy,
      neurological function, and metabolic processes. Developing a three-dimensional
      perception of the surgical approach, however, is traditionally still
      done by mentally fusing multiple modalities. Concurrent 3D visualization
      of these datasets can, therefore, improve the planning process significantly.
      In this paper we introduce an application for planning of individual
      neurosurgical approaches with high-quality interactive multimodal
      volume rendering. The application consists of three main modules
      which allow to (1) plan the optimal skin incision and opening of
      the skull tailored to the underlying pathology; (2) visualize superficial
      brain anatomy, function and metabolism; and (3) plan the patient-specific
      approach for surgery of deep-seated lesions. The visualization is
      based on direct multi-volume raycasting on graphics hardware, where
      multiple volumes from different modalities can be displayed concurrently
      at interactive frame rates. Graphics memory limitations are avoided
      by performing raycasting on bricked volumes. For preprocessing tasks
      such as registration or segmentation, the visualization modules are
      integrated into a larger framework, thus supporting the entire workflow
      of preoperative planning.},
      file = {Beyer2007.pdf:Beyer2007.pdf:PDF},
      issn = {1077-2626},
      keywords = {Algorithms,Computer Graphics,Computer Simulation,Humans,Image Enhancement,Image
      Enhancement: methods,Image Interpretation, Computer-Assisted,Image
      Interpretation, Computer-Assisted: methods,Imaging, Three-Dimensional,Imaging,
      Three-Dimensional: methods,Models, Anatomic,Models, Neurological,Neurosurgery,Neurosurgery:
      methods,Preoperative Care,Preoperative Care: methods,Reproducibility
      of Results,Sensitivity and Specificity,Surgery, Computer-Assisted,Surgery,
      Computer-Assisted: methods,User-Computer Interface, TEC, VOR, SUR,
      SLR},
      owner = {thomaskroes},
      pmid = {17968127},
      timestamp = {2010.10.22}
    }
  • D. Bielser and M. H. Gross, “Interactive simulation of surgical cuts,” , pp. 116-442, 2002.
    [Bibtex]
    @CONFERENCE{Bielser2002,
      author = {Bielser, D. and Gross, M.H.},
      title = {Interactive simulation of surgical cuts},
      booktitle = {Computer Graphics and Applications, 2000. Proceedings. The Eighth
      Pacific Conference on},
      year = {2002},
      pages = {116--442},
      organization = {IEEE},
      isbn = {0769508685},
      keywords = {TEC}
    }
  • J. S. Bill, J. F. Reuther, W. Dittmann, N. Kübler, J. L. Meier, H. Pistner, and G. Wittenberg, “Stereolithography in oral and maxillofacial operation planning= Einsatz der Stereolithographie in der Planung mund-kiefer-gesichtschirurgischer Engriffe,” International journal of oral and maxillofacial surgery, vol. 24, iss. 1, pp. 95-103, 1995.
    [Bibtex]
    @ARTICLE{Bill1995,
      author = {Bill, J.S. and Reuther, J.F. and Dittmann, W. and K{\\"u}bler, N.
      and Meier, J.L. and Pistner, H. and Wittenberg, G.},
      title = {Stereolithography in oral and maxillofacial operation planning= Einsatz
      der Stereolithographie in der Planung mund-kiefer-gesichtschirurgischer
      Engriffe},
      journal = {International journal of oral and maxillofacial surgery},
      year = {1995},
      volume = {24},
      pages = {95 - 103},
      number = {1},
      issn = {0901-5027},
      keywords = {RPP, CMS, APP},
      owner = {thomaskroes},
      publisher = {Elsevier},
      timestamp = {2011.01.12}
    }
  • W. Birkfellner, J. Hummel, E. Wilson, and K. Cleary, “Tracking Devices,” in Image-Guided Interventions, T. Peters and K. Cleary, Eds., Springer US, 2008, pp. 23-44.
    [Bibtex]
    @INCOLLECTION{Birkfellner2008,
      author = {Birkfellner, Wolfgang and Hummel, Johann and Wilson, Emmanuel and
      Cleary, Kevin},
      title = {Tracking Devices},
      booktitle = {Image-Guided Interventions},
      publisher = {Springer US},
      year = {2008},
      editor = {Peters, Terry and Cleary, Kevin},
      pages = {23 - 44},
      note = {Chapter 2},
      abstract = {Tracking devices are an essential component of an image-guided surgery
      system. These devices are used to track the position of instruments
      relative to the patient anatomy. Although early tracking systems
      were essentially mechanical digitizers, the field quickly adopted
      optical tracking systems because of their high accuracy and relatively
      large workspace. However, optical tracking systems require that a
      line-of-sight be maintained between the tracking device and the instrument
      to be tracked, which is not always convenient and precludes tracking
      of flexible instruments inside the body. Therefore, electromagnetic
      tracking systems were developed that had no line-of-sight requirement
      and could track instruments such as catheters and the tips of needles
      inside the body. The choice of tracking system is highly application
      dependent and requires an understanding of the desired working volume
      and accuracy requirements. To meet these needs, a variety of tracking
      devices and techniques have been introduced as described in this
      chapter.},
      affiliation = {Medical University Vienna Austria},
      file = {Birkfellner2008.pdf:Birkfellner2008.pdf:PDF},
      isbn = {978-0-387-73858-1},
      keyword = {Engineering},
      keywords = {REV},
      owner = {Thomas},
      timestamp = {2011.02.24}
    }
  • I. Bitter, R. Van Uitert, I. Wolf, L. Ibanez, and J. M. Kuhnigk, “Comparison of four freely available frameworks for image processing and visualization that use ITK,” Visualization and Computer Graphics, IEEE Transactions on, vol. 13, iss. 3, pp. 483-493, 2007.
    [Bibtex]
    @ARTICLE{Bitter2007,
      author = {Bitter, I. and Van Uitert, R. and Wolf, I. and Ibanez, L. and Kuhnigk,
      J.M.},
      title = {Comparison of four freely available frameworks for image processing
      and visualization that use ITK},
      journal = {Visualization and Computer Graphics, IEEE Transactions on},
      year = {2007},
      volume = {13},
      pages = {483 - 493},
      number = {3},
      file = {Bitter2007.pdf:Bitter2007.pdf:PDF},
      issn = {1077-2626},
      keywords = {REV},
      owner = {thomaskroes},
      publisher = {IEEE},
      timestamp = {2011.01.07}
    }
  • M. Blackwell, C. Nikou, A. M. DiGioia, and T. Kanade, “An image overlay system for medical data visualization,” Medical Image Analysis, vol. 4, iss. 1, pp. 67-72, 2000.
    [Bibtex]
    @ARTICLE{Blackwell2000,
      author = {Blackwell, M. and Nikou, C. and DiGioia, A.M. and Kanade, T.},
      title = {An image overlay system for medical data visualization},
      journal = {Medical Image Analysis},
      year = {2000},
      volume = {4},
      pages = {67 - 72},
      number = {1},
      abstract = {Image Overlay is a computer display technique which superimposes computer
      images over the user’s direct view of the real world. The images
      are transformed in real-time so they appear to the user to be an
      integral part of the surrounding environment. By using Image
      
      Overlay with three-dimensional medical images such as CT reconstructions,
      a surgeon can visualize the data ‘in-vivo’, exactly positioned within
      the patient’s anatomy, and potentially enhance the surgeon’s ability
      to perform a complex procedure. This paper describes
      
      prototype Image Overlay systems and initial experimental results from
      those systems.},
      file = {Blackwell2000.pdf:Blackwell2000.pdf:PDF},
      issn = {1361-8415},
      keywords = {TEC, AUR, STV, SUR, GUI},
      owner = {thomaskroes},
      publisher = {Elsevier},
      timestamp = {2011.01.04}
    }
  • J. Bloomenthal and K. Shoemake, “Convolution surfaces,” , pp. 251-256, 1991.
    [Bibtex]
    @CONFERENCE{Bloomenthal1991,
      author = {Bloomenthal, J. and Shoemake, K.},
      title = {Convolution surfaces},
      booktitle = {Proceedings of the 18th annual conference on Computer graphics and
      interactive techniques},
      year = {1991},
      pages = {251 - 256},
      organization = {ACM},
      file = {Bloomenthal1991.pdf:Bloomenthal1991.pdf:PDF},
      isbn = {0897914368},
      keywords = {TEC},
      owner = {Thomas},
      timestamp = {2011.01.31}
    }
  • S. Bo, “Automatic Segmentation and 3D Reconstruction of Human Liver Based on CT Image,” Science, pp. 1-4, 2010.
    [Bibtex]
    @ARTICLE{Bo2010,
      author = {Bo, Song},
      title = {Automatic Segmentation and 3D Reconstruction of Human Liver Based
      on CT Image},
      journal = {Science},
      year = {2010},
      pages = {1-4},
      abstract = {3D shape reconstruction of the liver from its 2D cross- sections improves
      the surgeon’s knowledge of liver anatomy and makes even more complicated
      liver surgery safe, which can be employed to aid clinical practice
      as an alternative tool. In this paper, a new method for 3D reconstruction
      of liver is proposed. It mainly consists of three steps: liver auto-segmentation,
      surface reconstruction and surface rendering. Firstly, an effective
      automatic segmentation method based on graph-theory is proposed for
      extracting liver. Then, NMC algorithm based on a combination of MC
      and Cuberille algorithms is applied to accomplish surface reconstruction
      of liver. Finally, surface rendering of liver is implemented by 3D
      graphics library OpenGL. The method mentioned above is being tested
      on numerous experiments of 3D reconstruction of liver and results
      are promising.},
      file = {Bo2010.pdf:Bo2010.pdf:PDF},
      keywords = {Automatic Segmentation and 3D Reconstruction of Hu,liver, TEC},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • G. J. Bootsma, J. H. Siewerdsen, M. J. Daly, and D. A. Jaffray, “Initial investigation of an automatic registration algorithm for surgical navigation,” in Engineering in Medicine and Biology Society, 2008. EMBS 2008. 30th Annual International Conference of the IEEE, 2008, pp. 3638-3642.
    [Bibtex]
    @INPROCEEDINGS{Bootsma2008,
      author = {Bootsma, Gregory J. and Siewerdsen, Jeffrey H. and Daly, Michael
      J. and Jaffray, David A.},
      title = {Initial investigation of an automatic registration algorithm for
      surgical navigation},
      booktitle = {Engineering in Medicine and Biology Society, 2008. EMBS 2008. 30th
      Annual International Conference of the IEEE},
      year = {2008},
      pages = {3638 - 3642},
      month = {August},
      abstract = {The procedure required for registering a surgical navigation system
      prior to use in a surgical procedure is conventionally a time-consuming
      manual process that is prone to human errors and must be repeated
      as necessary through the course of a procedure. The conventional
      procedure becomes even more time consuming when intra-operative 3D
      imaging such as the C-arm cone-beam CT (CBCT) is introduced, as each
      updated volume set requires a new registration. To improve the speed
      and accuracy of registering image and world reference frames in image-guided
      surgery, a novel automatic registration algorithm was developed and
      investigated. The surgical navigation system consists of either Polaris
      (Northern Digital Inc., Waterloo, ON) or MicronTracker (Claron Technology
      Inc., Toronto, ON) tracking camera(s), custom software (Cogito running
      on a PC), and a prototype CBCT imaging system based on a mobile isocentric
      C-arm (Siemens, Erlangen, Germany). Experiments were conducted to
      test the accuracy of automatic registration methods for both the
      MicronTracker and Polaris tracking cameras. Results indicate the
      automated registration performs as well as the manual registration
      procedure using either the Claron or Polaris camera. The average
      root-mean-squared (rms) observed target registration error (TRE)
      for the manual procedure was 2.58 +/ #x2212; 0.42 mm and 1.76 +/
      #x2212; 0.49 mm for the Polaris and MicronTracker, respectively.
      The mean observed TRE for the automatic algorithm was 2.11 +/ #x2212;
      0.13 and 2.03 +/ #x2212; 0.3 mm for the Polaris and MicronTracker,
      respectively. Implementation and optimization of the automatic registration
      technique in Carm CBCT guidance of surgical procedures is underway.},
      file = {:Bootsma2008.pdf:PDF},
      issn = {1557-170X},
      keywords = {Algorithms;Automatic Data Processing;Equipment Design;Humans;Models,
      Statistical;Pattern Recognition, Automated;Photography;Reproducibility
      of Results;Signal Processing, Computer-Assisted;Software;Surgery,
      Computer-Assisted;Tomography Scanners, X-Ray Computed;Tomography,
      X-Ray Computed;, TEC, GUI, SUR, SLR},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • a Bosnjak, G. Montilla, R. Villegas, and I. Jara, “3D Segmentation with an application of level set-method using MRI volumes for image guided surgery.,” Conference proceedings : … Annual International Conference of the IEEE Engineering in Medicine and Biology Society. IEEE Engineering in Medicine and Biology Society. Conference, vol. 2007, pp. 5263-6, 2007.
    [Bibtex]
    @ARTICLE{Bosnjak2007,
      author = {Bosnjak, a and Montilla, G and Villegas, R and Jara, I},
      title = {3D Segmentation with an application of level set-method using MRI
      volumes for image guided surgery.},
      journal = {Conference proceedings : ... Annual International Conference of the
      IEEE Engineering in Medicine and Biology Society. IEEE Engineering
      in Medicine and Biology Society. Conference},
      year = {2007},
      volume = {2007},
      pages = {5263-6},
      month = {January},
      abstract = {This paper proposes an innovation in the application for image guided
      surgery using a comparative study of three different method of segmentation.
      This segmentation method is faster than the manual segmentation of
      images, with the advantage that it allows to use the same patient
      as anatomical reference, which has more precision than a generic
      atlas. This new methodology for 3D information extraction is based
      on a processing chain structured of the following modules: 1) 3D
      Filtering: the purpose is to preserve the contours of the structures
      and to smooth the homogeneous areas; several filters were tested
      and finally an anisotropic diffusion filter was used. 2) 3D Segmentation.
      This module compares three different methods: Region growing Algorithm,
      Cubic spline hand assisted, and Level Set Method. It then proposes
      a Level Set-based on the front propagation method that allows the
      making of the reconstruction of the internal walls of the anatomical
      structures of the brain. 3) 3D visualization. The new contribution
      of this work consists on the visualization of the segmented model
      and its use in the pre-surgery planning.},
      file = {Bosnjak2007.pdf:Bosnjak2007.pdf:PDF},
      issn = {1557-170X},
      keywords = {Algorithms,Artificial Intelligence,Brain,Brain: anatomy \& histology,Brain:
      surgery,Humans,Image Enhancement,Image Enhancement: methods,Image
      Interpretation, Computer-Assisted,Image Interpretation, Computer-Assisted:
      methods,Imaging, Three-Dimensional,Imaging, Three-Dimensional: methods,Magnetic
      Resonance Imaging, Interventional,Magnetic Resonance Imaging, Interventional:
      method,Neurosurgical Procedures,Neurosurgical Procedures: methods,Pattern
      Recognition, Automated,Pattern Recognition, Automated: methods,Reproducibility
      of Results,Sensitivity and Specificity,Surgery, Computer-Assisted,Surgery,
      Computer-Assisted: methods, TEC, SLR, SUR},
      owner = {thomaskroes},
      pmid = {18003195},
      timestamp = {2010.10.22}
    }
  • C. P. Botha, “Techniques and software architectures for medical visualisation and image processing,” , 2005.
    [Bibtex]
    @ARTICLE{Botha2005,
      author = {Botha, C.P.},
      title = {Techniques and software architectures for medical visualisation and
      image processing},
      year = {2005},
      file = {Botha2005.pdf:Botha2005.pdf:PDF},
      owner = {thomaskroes},
      timestamp = {2011.01.06}
    }
  • C. P. Botha and F. H. Post, “A Visualisation Platform for Shoulder Replacement Surgery,” Development, vol. d, pp. 1-4, 2001.
    [Bibtex]
    @ARTICLE{Botha2001,
      author = {Botha, Charl P and Post, Frits H},
      title = {A Visualisation Platform for Shoulder Replacement Surgery},
      journal = {Development},
      year = {2001},
      volume = {d},
      pages = {1-4},
      abstract = {This note presents a new software platform that has been designed
      to enable the in- tegration and investigation of visualisation in
      multiple facets of the shoulder replacement process. What distinguishes
      this platform from available CAS (Computer Assisted Surgery) software
      solu- tions is a combination of its focus on visualisation (and specifically
      of the shoulder), its flexible and scalable architecture (without
      loss of domain-specific benefits) and the planned integration of
      predictive modelling.},
      file = {Botha2001.pdf:Botha2001.pdf:PDF},
      keywords = {TEC, VOR, SUR, OTS},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • A. Bottino, P. Torino, A. Laurentini, and L. Rosano, “A New Computer-aided Technique for Planning the Aesthetic Outcome of Plastic Surgery,” , 2008.
    [Bibtex]
    @ARTICLE{Bottino2008,
      author = {Bottino, Andrea and Torino, Politecnico and Laurentini, Aldo and
      Rosano, Luisa},
      title = {A New Computer-aided Technique for Planning the Aesthetic Outcome
      of Plastic Surgery},
      year = {2008},
      abstract = {Plastic surgery plays a major role in today health care. Planning
      plastic face surgery requires dealing with the elusive concept of
      attractiveness for evaluating feasible beautification of a particular
      face. The existing computer tools essentially allow to manually warp
      2D images or 3D face scans, in order to produce images simulating
      possible surgery outcomes. How to manipulate faces, as well as the
      evaluation of the results, are left to the surgeon’s judgement. We
      propose a new quantitative approach able to automatically suggest
      effective patient-specific improvements of facial attractiveness.
      The general idea is to compare the face of the patient with a large
      database of attractive faces, excluding the facial feature to be
      improved. Then, the feature of the faces more similar is applied,
      with a suitable morphing, to the face of the patient. In this paper
      we present a first application of the general idea in the field of
      nose surgery. Aesthetically effective rhinoplasty is suggested on
      the base of the entire face profile, a very important 2D feature
      for rating face attractiveness.},
      file = {Bottino2008.pdf:Bottino2008.pdf:PDF},
      keywords = {automatic beautification,face profile,plastic surgery,rhinoplasty,
      APP, PLA, OCS, CMS},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • C. Boucheny, G. Bonneau, J. Droulez, G. Thibault, and S. Ploix, “A perceptive evaluation of volume rendering techniques,” ACM Trans. Appl. Percept., vol. 5, p. 23:1 – 23:24, 2009.
    [Bibtex]
    @ARTICLE{Boucheny2009,
      author = {Boucheny, Christian and Bonneau, Georges-Pierre and Droulez, Jacques
      and Thibault, Guillaume and Ploix, Stephane},
      title = {A perceptive evaluation of volume rendering techniques},
      journal = {ACM Trans. Appl. Percept.},
      year = {2009},
      volume = {5},
      pages = {23:1 - 23:24},
      month = {February},
      acmid = {1462054},
      address = {New York, NY, USA},
      articleno = {23},
      file = {Boucheny2009.pdf:Boucheny2009.pdf:PDF},
      issn = {1544-3558},
      issue = {4},
      keywords = {Direct volume rendering, perception of transparency, perspective projection,
      structure from motion, REV, VOR},
      numpages = {24},
      owner = {thomaskroes},
      publisher = {ACM},
      timestamp = {2010.12.07}
    }
  • H. Bourquain, A. Schenk, F. Link, B. Preim, G. Prause, and H. Peitgen, “HepaVision2: A software assistant for preoperative planning in living-related liver transplantation and oncologic liver surgery,” Computer Assisted Radiology and Surgery (CARS 2002), pp. 341-346, 2002.
    [Bibtex]
    @ARTICLE{Bourquain2002,
      author = {Bourquain, H. and Schenk, A. and Link, F. and Preim, B. and Prause,
      G. and Peitgen, HO},
      title = {HepaVision2: A software assistant for preoperative planning in living-related
      liver transplantation and oncologic liver surgery},
      journal = {Computer Assisted Radiology and Surgery (CARS 2002)},
      year = {2002},
      pages = {341--346},
      abstract = {HepaVision2, a user friendly software application for preoperative
      planning based on CT images in liver surgery is presented. It is
      intended for both, evaluation of potential donors in living-related
      liver transplantation and planning of oncologic resections. The planning
      takes into account the patient’s individual anatomy allowing for
      fully automatic calculation of individual resection proposals including
      volumetric analysis. The results are visualized in 3D, thus allowing
      the surgeon to choose the optimal strategy for each patient. The
      software was tested in over 50 cases by our clinical partners and
      our institution. Average time needed per case is below one hour,
      therefore allowing the use of the software application in clinical
      routine.},
      file = {Bourquain2002.pdf:Bourquain2002.pdf:PDF},
      keywords = {PLA, VOR, HES},
      owner = {thomaskroes},
      publisher = {Citeseer},
      timestamp = {2010.11.18}
    }
  • A. BOYER and S. LAVALLEE, ADJUSTABLE GUIDE IN COMPUTER ASSISTED ORTHOPAEDIC SURGERY, 2011.
    [Bibtex]
    @MISC{march,
      author = {BOYER, A. and LAVALLEE, S.},
      title = {ADJUSTABLE GUIDE IN COMPUTER ASSISTED ORTHOPAEDIC SURGERY},
      month = {January},
      year = {2011},
      owner = {thomaskroes},
      timestamp = {2011.01.12}
    }
  • M. Brell and A. Hein, “Tactile guidance in multimodal computer navigated surgery,” Potentials, IEEE, vol. 28, iss. 4, pp. 30-35, 2009.
    [Bibtex]
    @ARTICLE{Brell2009,
      author = {Brell, M. and Hein, A.},
      title = {Tactile guidance in multimodal computer navigated surgery},
      journal = {Potentials, IEEE},
      year = {2009},
      volume = {28},
      pages = {30 - 35},
      number = {4},
      month = {July - August},
      abstract = {Computer-aided surgery (CAS) is currently a well-accepted means of
      supporting the surgeon. Primarily, the development of imaging technologies
      like ' computer tomography (CT) or magnetic resonance imaging (MRI)
      is responsible for the entrance of CAS in clinical practice. The
      design of novel pose measurement systems has abetted this entrance,
      and the imaging technologies offer many advantages such as the possibility
      of a 3-D reconstruction images of patients and preoperative planning.
      The problem was with correlating this image data with the actual
      body of the patient. The development of navigation systems with integrated
      pose measurement systems solved this problem. These systems are able
      to detect special localizers that consist of at least three markers,
      like active LEDs or a passive high contrast pattern, to determine
      a coordinate system. The localizers are fixed at the patient, and
      the surgical instruments and can be used to correlate the image data
      with the patient's body. Navigation systems can show the relative
      situation between instruments and the patient. They provide visibility
      on a screen during poor or no visibility in reality, as during endoscopic
      surgery like functional endoscopic sinus surgery (FESS). Tactile
      displays use the sense of touch for information presentation.The
      concept of the tactile display is based on the assumption that the
      position of the human hand can be guided by tactile signals.},
      file = {:Brell2009.pdf:PDF},
      issn = {0278-6648},
      keywords = {computer-aided surgery;endoscopic surgery;image data correlation;image
      reconstruction;imaging technology;multimodal computer navigated surgery;pose
      measurement system;tactile display;tactile guidance;tactile human-machine
      interface;computer displays;correlation methods;haptic interfaces;image
      reconstruction;medical image processing;surgery;, REV},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • D. Briem, A. H. Ruecker, J. Neumann, M. Gebauer, D. Kendoff, T. Gehrke, W. Lehmann, U. Schumacher, J. M. Rueger, and L. G. Grossterlinden, “3D fluoroscopic navigated reaming of the glenoid for total shoulder arthroplasty (TSA),” Computer Aided Surgery, pp. 1-7, 2011.
    [Bibtex]
    @ARTICLE{Briem2011,
      author = {Briem, D. and Ruecker, A.H. and Neumann, J. and Gebauer, M. and Kendoff,
      D. and Gehrke, T. and Lehmann, W. and Schumacher, U. and Rueger,
      J.M. and Grossterlinden, L.G.},
      title = {3D fluoroscopic navigated reaming of the glenoid for total shoulder
      arthroplasty (TSA)},
      journal = {Computer Aided Surgery},
      year = {2011},
      pages = {1 - 7},
      number = {0},
      issn = {1092-9088},
      publisher = {Informa UK Ltd UK}
    }
  • M. Bro-Nielsen, “Finite element modeling in surgery simulation,” Proceedings of the IEEE, vol. 86, iss. 3, pp. 490-503, 2002.
    [Bibtex]
    @ARTICLE{Bro2002,
      author = {Bro-Nielsen, M.},
      title = {Finite element modeling in surgery simulation},
      journal = {Proceedings of the IEEE},
      year = {2002},
      volume = {86},
      pages = {490 - 503},
      number = {3},
      file = {Bro2002.pdf:Bro2002.pdf:PDF},
      issn = {0018-9219},
      keywords = {PRS},
      owner = {thomaskroes},
      publisher = {IEEE},
      timestamp = {2011.01.25}
    }
  • S. Bruckner, S. Grimm, A. Kanitsar, and M. E. Gröller, “Illustrative context-preserving volume rendering,” , vol. 2005, pp. 69-76, 2005.
    [Bibtex]
    @CONFERENCE{Bruckner2005a,
      author = {Bruckner, S. and Grimm, S. and Kanitsar, A. and Gr\"oller, M.E.},
      title = {Illustrative context-preserving volume rendering},
      booktitle = {Proceedings of EUROVIS},
      year = {2005},
      volume = {2005},
      pages = {69 - 76},
      organization = {Citeseer},
      file = {Bruckner2005a.pdf:Bruckner2005a.pdf:PDF},
      keywords = {VOR},
      owner = {thomaskroes},
      timestamp = {2011.01.03}
    }
  • P. Buchler, “Benefits of an anatomical reconstruction of the humeral head during shoulder arthroplasty: a finite element analysis,” Clinical Biomechanics, vol. 19, iss. 1, pp. 16-23, 2004.
    [Bibtex]
    @ARTICLE{Buchler2004,
      author = {Buchler, P},
      title = {Benefits of an anatomical reconstruction of the humeral head during
      shoulder arthroplasty: a finite element analysis},
      journal = {Clinical Biomechanics},
      year = {2004},
      volume = {19},
      pages = {16-23},
      number = {1},
      month = {January},
      abstract = {Objective. To study the influence of the shape of the prosthetic humeral
      head on shoulder biomechanics and then to evaluate the benefits of
      an anatomical reconstruction of the humeral head after shoulder arthroplasty.
      Design. A 3D numerical model of a healthy shoulder was reconstructed.
      The model included the proximal humerus, the scapula and, for stability
      purposes, the subscapularis, infraspinatus and supraspinatus rotator
      cuff muscles. Background. Shoulder prostheses used nowadays, called
      third generation, allow for a better adaptation of the implant to
      the anatomy of the proximal humerus than previously used implants.
      However, no biomechanical study has shown the benefits of this anatomical
      reconstruction of the humeral head. Methods. The model was used to
      compare the biomechanics of a shoulder without implant with the biomechanics
      of the same shoulder after humeral hemiarthroplasty. Two humeral
      components were tested: a second-generation prosthesis and an implant
      with an anatomically reconstructed humeral head. Results. The anatomical
      reconstruction of the humeral head restored the physiological motions
      and limited eccentric loading of the glenoid. Conversely, the second-generation
      implant produced contact forces in the superior extremity of the
      glenoid surface leading to bone stresses up to 8 times higher than
      for the intact shoulder. Conclusions. This analysis provided insights
      into the mechanical effects of different reconstructions of the humeral
      head and highlighted the advantages of anatomical reconstructions
      of the humeral head during shoulder arthroplasty.},
      file = {Buchler2004.pdf:Buchler2004.pdf:PDF},
      issn = {02680033},
      keywords = {anatomical reconstruction,finite element,hemiarthroplasty,shoulder,
      OCS, TEC},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • S. D. Buck, J. V. Cleynenbreugel, I. Geys, T. Koninckx, P. R. Koninck, and P. Suetens, “A System to Support Laparoscopic Surgery by Augmented Reality Visualization,” , pp. 691-698, 2001.
    [Bibtex]
    @ARTICLE{Buck2001,
      author = {Buck, Stijn De and Cleynenbreugel, Johan Van and Geys, Indra and
      Koninckx, Thomas and Koninck, Philippe R and Suetens, Paul},
      title = {A System to Support Laparoscopic Surgery by Augmented Reality Visualization},
      year = {2001},
      pages = {691-698},
      abstract = {This paper describes the development of an augmented re- ality system
      for intra-operative laparoscopic surgery support. The goal of this
      system is to reveal structures, otherwise hidden within the laparoscope
      view. To allow flexible movement of the laparoscope we use optical
      tracking to track both patient and laparoscope. The necessary calibration
      and registration procedures were developed and bundled where possible
      in order to facilitate integration in a cur- rent laparoscopic procedure.
      Care was taken to achieve high accuracy by including radial distortion
      components without compromising real time speed. Finally a visual
      error assessment is performed, the usefulness is demon- strated within
      a test setup and some preliminary quantitative evaluation is done.},
      file = {Buck2001.pdf:Buck2001.pdf:PDF},
      keywords = {APP, AUR, SUR},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • K. Buhler, P. Felkel, and A. La Cruz, “Geometric methods for vessel visualization and quantification-a survey,” Geometric Modeling for Scientific Visualization, pp. 399-420, 2004.
    [Bibtex]
    @ARTICLE{Buhler2004,
      author = {Buhler, K. and Felkel, P. and La Cruz, A.},
      title = {Geometric methods for vessel visualization and quantification-a survey},
      journal = {Geometric Modeling for Scientific Visualization},
      year = {2004},
      pages = {399 - 420},
      file = {Buhler2004.pdf:Buhler2004.pdf:PDF},
      keywords = {REV, TEC},
      owner = {thomaskroes},
      timestamp = {2011.01.05}
    }
  • E. Bullitt, “Visualizing blood vessel trees in three dimensions: clinical applications,” Proceedings of SPIE, pp. 67-78, 2005.
    [Bibtex]
    @ARTICLE{Bullitt2005,
      author = {Bullitt, Elizabeth},
      title = {Visualizing blood vessel trees in three dimensions: clinical applications},
      journal = {Proceedings of SPIE},
      year = {2005},
      pages = {67-78},
      abstract = {A connected network of blood vessels surrounds and permeates almost
      every organ of the human body. The ability to define detailed blood
      vessel trees enables a variety of clinical applications. This paper
      discusses four such applications and some of the visualization challenges
      inherent to each. Guidance of endovascular surgery: 3D vessel trees
      offer important information unavailable by traditional x-ray projection
      views. How best to combine the 2- and 3D image information is unknown.
      Planning/guidance of tumor surgery: During tumor resection it is
      critical to know which blood vessels can be interrupted safely and
      which cannot. Providing efficient, clear information to the surgeon
      together with measures of uncertainty in both segmentation and registration
      can be a complex problem. Vessel-based registration: Vessel-based
      registration allows pre-and intraoperative images to be registered
      rapidly. The approach both provides a potential solution to a difficult
      clinical dilemma and offers a variety of visualization opportunities.
      Diagnosis/staging of disease: Almost every disease affects blood
      vessel morphology. The statistical analysis of vessel shape may thus
      prove to be an important tool in the noninvasive analysis of disease.
      A plethora of information is available that must be presented meaningfully
      to the clinician. As medical image analysis methods increase in sophistication,
      an increasing amount of useful information of varying types will
      become available to the clinician. New methods must be developed
      to present a potentially bewildering amount of complex data to individuals
      who are often accustomed to viewing only tissue slices or flat projection
      views.},
      file = {Bullitt2005.pdf:Bullitt2005.pdf:PDF},
      issn = {0277786X},
      keywords = {computer-assisted diagnosis,mra,registration,segmentation,surgical
      guidance,vessels, REV},
      owner = {thomaskroes},
      publisher = {Spie},
      timestamp = {2010.10.22}
    }
  • Z. Burgielski, T. Jansen, B. von Rymon-Lipinski, N. Hanssen, and E. Keeve, “Julius-a software framework for computer-aided-surgery,” Biomedizinische Technik/Biomedical Engineering, vol. 47, iss. s1a, pp. 101-103, 2002.
    [Bibtex]
    @ARTICLE{Burgielski2002,
      author = {Burgielski, Z. and Jansen, T. and von Rymon-Lipinski, B. and Hanssen,
      N. and Keeve, E.},
      title = {Julius-a software framework for computer-aided-surgery},
      journal = {Biomedizinische Technik/Biomedical Engineering},
      year = {2002},
      volume = {47},
      pages = {101 - 103},
      number = {s1a},
      issn = {0013-5585},
      keywords = {TEC},
      owner = {thomaskroes},
      publisher = {Walter de Gruyter, Berlin/New York Berlin, New York},
      timestamp = {2011.01.12}
    }
  • L. Caponetti and A. Fanelli, “Computer-aided simulation for bone surgery,” Computer Graphics and Applications, IEEE, vol. 13, iss. 6, pp. 86-92, 2002.
    [Bibtex]
    @ARTICLE{Caponetti2002,
      author = {Caponetti, L. and Fanelli, AM},
      title = {Computer-aided simulation for bone surgery},
      journal = {Computer Graphics and Applications, IEEE},
      year = {2002},
      volume = {13},
      pages = {86 - 92},
      number = {6},
      file = {Caponetti2002.pdf:Caponetti2002.pdf:PDF},
      issn = {0272-1716},
      keywords = {APP, OTS, SUR, SLR},
      owner = {thomaskroes},
      publisher = {IEEE},
      timestamp = {2011.01.25}
    }
  • M. A. Cardin, J. X. Wang, and D. B. Plewes, “A Method to Evaluate Human Spatial Coordination Interfaces for Computer-Assisted Surgery,” in Medical Image Computing and Computer-Assisted Intervention – MICCAI 2005, J. Duncan and G. Gerig, Eds., Springer Berlin / Heidelberg, 2005, vol. 3750, pp. 9-16.
    [Bibtex]
    @INCOLLECTION{Cardin2005,
      author = {Cardin, M.A. and Wang, J.X. and Plewes, D.B.},
      title = {A Method to Evaluate Human Spatial Coordination Interfaces for Computer-Assisted
      Surgery},
      booktitle = {Medical Image Computing and Computer-Assisted Intervention – MICCAI
      2005},
      publisher = {Springer Berlin / Heidelberg},
      year = {2005},
      editor = {Duncan, James and Gerig, Guido},
      volume = {3750},
      series = {Lecture Notes in Computer Science},
      pages = {9 -16},
      abstract = {Computer assistance for breast conserving surgery requires a guidance
      method to assist a surgeon in locating tumor margin accurately. A
      wide array of guidance methods can be considered ranging from various
      pictorial representations, symbolic graphical interfaces as well
      as those based on other sensory cues such as sound. In this study,
      we present an experimental framework for testing candidate guidance
      methods in isolation or in combination. A total of 22 guidance approaches,
      based on stereographic, non-stereographic, symbolic and auditory
      cues were tested in a simulation of breast conserving surgery. Observers
      were asked to circumscribe a virtual tumor with a magnetically tracked
      scalpel while measuring the spatial accuracy, time and the frequency
      with which the tumor margin was intersected. A total of 110 studies
      were performed with 5 volunteers. Based on these findings, we demonstrated
      that a single view of the tumor with a stereo presentation in conjunction
      with an auditory guidance cue provided the best balance of accuracy,
      speed and surgical integrity. This study demonstrates a practical
      and helpful framework for testing guidance methods in a context dependent
      manner.},
      affiliation = {Department of Imaging Research, Sunnybrook and Women’s College Health
      Sciences Centre, 2075 Bayview Avenue, Toronto, Ontario Canada},
      file = {Cardin2005.pdf:Cardin2005.pdf:PDF},
      keywords = {APP, PLA},
      owner = {thomaskroes},
      timestamp = {2011.01.04}
    }
  • D. Cash, M. Miga, S. Glasgow, B. Dawant, L. Clements, Z. Cao, R. Galloway, and W. Chapman, “Concepts and Preliminary Data Toward the Realization of Image-guided Liver Surgery,” Journal of Gastrointestinal Surgery, vol. 11, pp. 844-859, 2007.
    [Bibtex]
    @ARTICLE{Cash2007,
      author = {Cash, David and Miga, Michael and Glasgow, Sean and Dawant, Benoit
      and Clements, Logan and Cao, Zhujiang and Galloway, Robert and Chapman,
      William},
      title = {Concepts and Preliminary Data Toward the Realization of Image-guided
      Liver Surgery},
      journal = {Journal of Gastrointestinal Surgery},
      year = {2007},
      volume = {11},
      pages = {844 - 859},
      abstract = {Image-guided surgery provides navigational assistance to the surgeon
      by displaying the surgical probe position on a set of preoperative
      tomograms in real time. In this study, the feasibility of implementing
      image-guided surgery concepts into liver surgery was examined during
      eight hepatic resection procedures. Preoperative tomographic image
      data were acquired and processed. Accompanying intraoperative data
      on liver shape and position were obtained through optically tracked
      probes and laser range scanning technology. The preoperative and
      intraoperative representations of the liver surface were aligned
      using the iterative closest point surface matching algorithm. Surface
      registrations resulted in mean residual errors from 2 to 6 mm, with
      errors of target surface regions being below a stated goal of 1 cm.
      Issues affecting registration accuracy include liver motion due to
      respiration, the quality of the intraoperative surface data, and
      intraoperative organ deformation. Respiratory motion was quantified
      during the procedures as cyclical, primarily along the cranial–caudal
      direction. The resulting registrations were more robust and accurate
      when using laser range scanning to rapidly acquire thousands of points
      on the liver surface and when capturing unique geometric regions
      on the liver surface, such as the inferior edge. Finally, finite
      element models recovered much of the observed intraoperative deformation,
      further decreasing errors in the registration. Image-guided liver
      surgery has shown the potential to provide surgeons with important
      navigation aids that could increase the accuracy of targeting lesions
      and the number of patients eligible for surgical resection.},
      affiliation = {Vanderbilt University Department of Biomedical Engineering Nashville
      TN USA},
      file = {Cash2007.pdf:Cash2007.pdf:PDF},
      issn = {1091-255X},
      issue = {7},
      keyword = {Medicine},
      keywords = {APP, HES, SLR, SUR, PRS},
      owner = {Thomas},
      publisher = {Springer New York},
      timestamp = {2011.01.31}
    }
  • J. E. Cates, A. E. Lefohn, and R. T. Whitaker, “GIST: an interactive, GPU-based level set segmentation tool for 3D medical images.,” Medical image analysis, vol. 8, iss. 3, pp. 217-31, 2004.
    [Bibtex]
    @ARTICLE{Cates2004,
      author = {Cates, Joshua E and Lefohn, Aaron E and Whitaker, Ross T},
      title = {GIST: an interactive, GPU-based level set segmentation tool for 3D
      medical images.},
      journal = {Medical image analysis},
      year = {2004},
      volume = {8},
      pages = {217-31},
      number = {3},
      month = {September},
      abstract = {While level sets have demonstrated a great potential for 3D medical
      image segmentation, their usefulness has been limited by two problems.
      First, 3D level sets are relatively slow to compute. Second, their
      formulation usually entails several free parameters which can be
      very difficult to correctly tune for specific applications. The second
      problem is compounded by the first. This paper describes a new tool
      for 3D segmentation that addresses these problems by computing level-set
      surface models at interactive rates. This tool employs two important,
      novel technologies. First is the mapping of a 3D level-set solver
      onto a commodity graphics card (GPU). This mapping relies on a novel
      mechanism for GPU memory management. The interactive rates level-set
      PDE solver give the user immediate feedback on the parameter settings,
      and thus users can tune free parameters and control the shape of
      the model in real time. The second technology is the use of intensity-based
      speed functions, which allow a user to quickly and intuitively specify
      the behavior of the deformable model. We have found that the combination
      of these interactive tools enables users to produce good, reliable
      segmentations. To support this observation, this paper presents qualitative
      results from several different datasets as well as a quantitative
      evaluation from a study of brain tumor segmentations.},
      file = {Cates2004.pdf:Cates2004.pdf:PDF},
      issn = {1361-8415},
      keywords = {Algorithms,Brain Neoplasms,Brain Neoplasms: pathology,Computer Graphics,Humans,Image
      Processing, Computer-Assisted,Image Processing, Computer-Assisted:
      methods,Imaging, Three-Dimensional,Magnetic Resonance Imaging,Reproducibility
      of Results,Software,User-Computer Interface, TEC},
      owner = {thomaskroes},
      pmid = {15450217},
      timestamp = {2010.10.22}
    }
  • J. E. Cates, R. T. Whitaker, and G. M. Jones, “Case study: an evaluation of user-assisted hierarchical watershed segmentation.,” Medical image analysis, vol. 9, iss. 6, pp. 566-78, 2005.
    [Bibtex]
    @ARTICLE{Cates2005,
      author = {Cates, Joshua E and Whitaker, Ross T and Jones, Greg M},
      title = {Case study: an evaluation of user-assisted hierarchical watershed
      segmentation.},
      journal = {Medical image analysis},
      year = {2005},
      volume = {9},
      pages = {566-78},
      number = {6},
      month = {December},
      abstract = {This paper evaluates the effectiveness of an interactive, three-dimensional
      image segmentation technique that relies on watersheds. This paper
      presents two user-based case studies, which include two different
      groups of domain experts. Subjects manipulate a graphics-based front
      end to a hierarchy of segmented regions generated from a watershed
      segmentation algorithm, which is implemented in the Insight Toolkit.
      In the first study, medical students segment several different anatomical
      structures from the Visible Human Female head and neck color cryosection
      data. In the second study, radiologists use the interactive tool
      to produce models of brain tumors from MRI data. This paper presents
      a quantitative and qualitative comparison against hand contouring.
      To quantify accuracy, we estimate ground truth from the hand-contouring
      data using the Simultaneous Truth and Performance Estimation algorithm.
      We also apply metrics from the literature to estimate precision and
      efficiency. The watershed segmentation technique showed improved
      subject interaction times and increased inter-subject precision over
      hand contouring, with quality that is visually and statistically
      comparable. The analysis also identifies some failures in the watershed
      technique, where edges were poorly defined in the data, and note
      a trend in the hand-contouring results toward systematically larger
      segmentations, which raises questions about the wisdom of using expert
      segmentations to define ground truth.},
      file = {Cates2005.pdf:Cates2005.pdf:PDF},
      issn = {1361-8415},
      keywords = {Algorithms,Artificial Intelligence,Brain Neoplasms,Brain Neoplasms:
      pathology,Female,Humans,Image Enhancement,Image Enhancement: methods,Image
      Interpretation, Computer-Assisted,Image Interpretation, Computer-Assisted:
      methods,Imaging, Three-Dimensional,Imaging, Three-Dimensional: methods,Magnetic
      Resonance Imaging,Magnetic Resonance Imaging: methods,Male,Pattern
      Recognition, Automated,Pattern Recognition, Automated: methods,Reproducibility
      of Results,Sensitivity and Specificity,Software Validation,User-Computer
      Interface, TEC},
      owner = {thomaskroes},
      pmid = {15919233},
      timestamp = {2010.10.22}
    }
  • J. Cebral, R. Löhner, O. Soto, P. Choyke, and P. Yim, “Patient-Specific Simulation of Carotid Artery Stenting Using Computational Fluid Dynamics,” in Medical Image Computing and Computer-Assisted Intervention – MICCAI 2001, W. Niessen and M. Viergever, Eds., Springer Berlin / Heidelberg, 2001, vol. 2208, pp. 153-160.
    [Bibtex]
    @INCOLLECTION{Cebral2001,
      author = {Cebral, Juan and Löhner, Rainald and Soto, Orlando and Choyke, Peter
      and Yim, Peter},
      title = {Patient-Specific Simulation of Carotid Artery Stenting Using Computational
      Fluid Dynamics},
      booktitle = {Medical Image Computing and Computer-Assisted Intervention – MICCAI
      2001},
      publisher = {Springer Berlin / Heidelberg},
      year = {2001},
      editor = {Niessen, Wiro and Viergever, Max},
      volume = {2208},
      series = {Lecture Notes in Computer Science},
      pages = {153-160},
      abstract = {An image-based computational methodology to predict the outcome of
      carotid artery stenting procedures is presented. Anatomically realistic
      models are reconstructed from contrast-enhanced magnetic resonance
      angiography images using deformable models. Physiologic flow conditions
      areobtained from phase-contrast magnetic resonance angiography data.
      Finite element flow calculations are obtained before and after modifying
      the anatomical models in order to simulate stenting procedures. The
      methodology was tested on image data from a patient with carotid
      artery stenosis. Significant changes in the blood flow through the
      common carotid and internal carotid artery were found after conducting
      a virtual stenting intervention. Pending experimental validation,
      this methodology may potentially be used to plan and optimize vascular
      stenting procedures on a patient-specific basis.},
      affiliation = {School of Computational Sciences, George Mason University, 4400 University
      Drive M.S. 4C7, Fairfax, Virginia 22030, USA},
      file = {Cebral2001.pdf:Cebral2001.pdf:PDF},
      url = {http://dx.doi.org/10.1007/3-540-45468-3_19}
    }
  • L. H. C. Cevidanes, S. Tucker, M. Styner, H. Kim, J. Chapuis, M. Reyes, W. Proffit, T. Turvey, and M. Jaskolka, “Three-dimensional surgical simulation.,” American journal of orthodontics and dentofacial orthopedics : official publication of the American Association of Orthodontists, its constituent societies, and the American Board of Orthodontics, vol. 138, iss. 3, pp. 361-71, 2010.
    [Bibtex]
    @ARTICLE{Cevidanes2010,
      author = {Cevidanes, Lucia H C and Tucker, Scott and Styner, Martin and Kim,
      Hyungmin and Chapuis, Jonas and Reyes, Mauricio and Proffit, William
      and Turvey, Timothy and Jaskolka, Michael},
      title = {Three-dimensional surgical simulation.},
      journal = {American journal of orthodontics and dentofacial orthopedics : official
      publication of the American Association of Orthodontists, its constituent
      societies, and the American Board of Orthodontics},
      year = {2010},
      volume = {138},
      pages = {361-71},
      number = {3},
      month = {September},
      abstract = {In this article, we discuss the development of methods for computer-aided
      jaw surgery, which allows us to incorporate the high level of precision
      necessary for transferring virtual plans into the operating room.
      We also present a complete computer-aided surgery system developed
      in close collaboration with surgeons. Surgery planning and simulation
      include construction of 3-dimensional surface models from cone-beam
      computed tomography, dynamic cephalometry, semiautomatic mirroring,
      interactive cutting of bone, and bony segment repositioning. A virtual
      setup can be used to manufacture positioning splints for intraoperative
      guidance. The system provides further intraoperative assistance with
      a computer display showing jaw positions and 3-dimensional positioning
      guides updated in real time during the surgical procedure. The computer-aided
      surgery system aids in dealing with complex cases with benefits for
      the patient, with surgical practice, and for orthodontic finishing.
      Advanced software tools for diagnosis and treatment planning allow
      preparation of detailed operative plans, osteotomy repositioning,
      bone reconstructions, surgical resident training, and assessing the
      difficulties of the surgical procedures before the surgery. Computer-aided
      surgery can make the elaboration of the surgical plan a more flexible
      process, increase the level of detail and accuracy of the plan, yield
      higher operative precision and control, and enhance documentation
      of cases.},
      file = {Cevidanes2010.pdf:Cevidanes2010.pdf:PDF},
      issn = {1097-6752},
      keywords = {Cephalometry,Cephalometry: methods,Computer Simulation,Cone-Beam Computed
      Tomography,Data Display,Dental Models,Finite Element Analysis,Humans,Image
      Processing, Computer-Assisted,Image Processing, Computer-Assisted:
      methods,Imaging, Three-Dimensional,Imaging, Three-Dimensional: methods,Information
      Systems,Intraoperative Care,Orthognathic Surgical Procedures,Orthognathic
      Surgical Procedures: methods,Osteotomy,Osteotomy: methods,Patient
      Care Planning,Reconstructive Surgical Procedures,Reconstructive Surgical
      Procedures: methods,Software,Surgery, Computer-Assisted,Surgery,
      Computer-Assisted: methods,User-Computer Interface, APP, CMS, OCS,
      TEC, GUI, PLA, SUR},
      owner = {thomaskroes},
      pmid = {20816308},
      publisher = {American Association of Orthodontists},
      timestamp = {2010.10.22}
    }
  • M. Chabanas, V. Luboz, and Y. Payan, “Patient specific finite element model of the face soft tissues for computer-assisted maxillofacial surgery,” Medical Image Analysis, vol. 7, iss. 2, pp. 131-151, 2003.
    [Bibtex]
    @ARTICLE{Chabanas2003,
      author = {Chabanas, Matthieu and Luboz, Vincent and Payan, Yohan},
      title = {Patient specific finite element model of the face soft tissues for
      computer-assisted maxillofacial surgery},
      journal = {Medical Image Analysis},
      year = {2003},
      volume = {7},
      pages = {131-151},
      number = {2},
      month = {June},
      abstract = {This paper addresses the prediction of face soft tissue deformations
      resulting from bone repositioning in maxillofacial surgery. A generic
      3D Finite Element model of the face soft tissues was developed. Face
      muscles are defined in the mesh as embedded structures, with different
      mechanical properties (transverse isotropy, stiffness depending on
      muscle contraction). Simulations of face deformations under muscle
      actions can thus be performed. In the context of maxillofacial surgery,
      this generic soft-tissue model is automatically conformed to patient
      morphology by elastic registration, using skin and skull surfaces
      segmented from a CT scan. Some elements of the patient mesh could
      be geometrically distorted during the registration, which disables
      Finite Element analysis. Irregular elements are thus detected and
      automatically regularized. This semi-automatic patient model generation
      is robust, fast and easy to use. Therefore it seems compatible with
      clinical use. Six patient models were successfully built, and simulations
      of soft tissue deformations resulting from bone displacements performed
      on two patient models. Both the adequation of the models to the patient
      morphologies and the simulations of post-operative aspects were qualitatively
      validated by five surgeons. Their conclusions are that the models
      fit the morphologies of the patients, and that the predicted soft
      tissue modifications are coherent with what they would expect.},
      file = {Chabanas2003.pdf:Chabanas2003.pdf:PDF},
      issn = {13618415},
      keywords = {computer aided maxillofacial surgery,elastic registration,finite element
      mesh regularity,finite element method,mesh conformation, TEC, OCS,
      CMS, SUR},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • M. Chabanas, C. Marecaux, Y. Payan, and F. Boutault, “Computer aided planning for orthognatic surgery,” ArXiv Physics e-prints, 2006.
    [Bibtex]
    @ARTICLE{Chabanas2006,
      author = {Chabanas, M. and Marecaux, C. and Payan, Y. and Boutault, F.},
      title = {Computer aided planning for orthognatic surgery},
      journal = {ArXiv Physics e-prints},
      year = {2006},
      month = {October},
      abstract = {A computer aided maxillofacial sequence is presented, applied to orthognatic
      surgery. It consists of 5 main stages: data acquisition and integration,
      surgical planning, surgical simulation, and per operative assistance.
      The planning and simulation steps are then addressed in a way that
      is clinically relevant. First concepts toward a 3D cephalometry are
      presented for a morphological analysis, surgical planning, and bone
      and soft tissue simulation. The aesthetic surgical outcomes of bone
      repositioning are studied with a biomechanical Finite Element soft
      tissue model.},
      eprint = {arXiv:physics/0610213},
      file = {Chabanas2006.pdf:Chabanas2006.pdf:PDF},
      keywords = {Physics - Medical Physics, APP, CMS, PLA, GUI, OCS, SUR},
      owner = {thomaskroes},
      timestamp = {2010.10.26}
    }
  • M. Chabanas and Y. PAVAN, “Finite element model of the face soft tissue for computer assisted maxillofacial surgery,” , 2001.
    [Bibtex]
    @CONFERENCE{Chabanas2001,
      author = {Chabanas, M. and PAVAN, Y.},
      title = {Finite element model of the face soft tissue for computer assisted
      maxillofacial surgery},
      booktitle = {INTERNATIONAL SYMPOSIUM ON COMPUTER METHODS IN BIOMECHANICS \& BIOMEDICAL
      ENGINEERING (5.: 2001: Rome). Anais. Rome},
      year = {2001},
      file = {Chabanas2001.pdf:Chabanas2001.pdf:PDF},
      keywords = {OCS, TEC, CMS, SUR},
      owner = {thomaskroes},
      timestamp = {2011.01.10}
    }
  • M. Chabanas and Y. Payan, “A 3D Finite Element model of the face for simulation in plastic and maxillo-facial surgery,” , pp. 411-496, 2000.
    [Bibtex]
    @CONFERENCE{Chabanas2000,
      author = {Chabanas, M. and Payan, Y.},
      title = {A 3D Finite Element model of the face for simulation in plastic and
      maxillo-facial surgery},
      booktitle = {Medical Image Computing and Computer-Assisted Intervention--MICCAI
      2000},
      year = {2000},
      pages = {411 - 496},
      organization = {Springer},
      file = {Chabanas2000.pdf:Chabanas2000.pdf:PDF},
      keywords = {TEC, CMS, SUR},
      owner = {Thomas},
      timestamp = {2011.02.08}
    }
  • B. Challacombe and D. Stoianovici, “The Basic Science of Robotic Surgery,” in Urologic Robotic Surgery in Clinical Practice, Springer London, 2009, pp. 1-23.
    [Bibtex]
    @INCOLLECTION{Challacombe2009,
      author = {Challacombe, Ben and Stoianovici, Dan},
      title = {The Basic Science of Robotic Surgery},
      booktitle = {Urologic Robotic Surgery in Clinical Practice},
      publisher = {Springer London},
      year = {2009},
      pages = {1-23},
      abstract = {This chapter aims to cover the basic science of robotic surgery focusing
      on all the devices currently in clinical use. We hope to give the
      potential and practicing robotic surgeon an understanding of the
      scientific basis behind the machines themselves and provide a concise
      framework of the practical nuances.},
      affiliation = {Guy’s Hospital Department of Urology London UK},
      file = {Challacombe2009.pdf:Challacombe2009.pdf:PDF},
      isbn = {978-1-84800-243-2},
      keyword = {Medicine & Public Health},
      keywords = {REV},
      owner = {Thomas},
      timestamp = {2011.03.09},
      url = {http://dx.doi.org/10.1007/978-1-84800-243-2_2}
    }
  • H. P. Chan, K. Doi, S. Galhotra, C. J. Vyborny, H. MacMahon, and P. M. Jokich, “Image feature analysis and computer-aided diagnosis in digital radiography. I. Automated detection of microcalcifications in mammography,” Medical Physics, vol. 14, p. 538, 1987.
    [Bibtex]
    @ARTICLE{Chan1987,
      author = {Chan, H.P. and Doi, K. and Galhotra, S. and Vyborny, C.J. and MacMahon,
      H. and Jokich, P.M.},
      title = {Image feature analysis and computer-aided diagnosis in digital radiography.
      I. Automated detection of microcalcifications in mammography},
      journal = {Medical Physics},
      year = {1987},
      volume = {14},
      pages = {538},
      file = {Chan1987.pdf:Chan1987.pdf:PDF},
      keywords = {TEC, IMP},
      owner = {thomaskroes},
      timestamp = {2011.01.06}
    }
  • H. P. Chan, K. Doi, C. Vyborny, and R. Schmidt, “Improvement in radiologists’ detection of clustered microcalcifications on mammograms. The potential of computer-aided diagnosis.,” Investigative Radiology, vol. 25, iss. 10, pp. 1102-1110, 1990.
    [Bibtex]
    @ARTICLE{Chan1990,
      author = {Chan, H.P. and Doi, K. and Vyborny, CJ and Schmidt, R.},
      title = {Improvement in radiologists' detection of clustered microcalcifications
      on mammograms. The potential of computer-aided diagnosis.},
      journal = {Investigative Radiology},
      year = {1990},
      volume = {25},
      pages = {1102 - 1110},
      number = {10},
      owner = {thomaskroes},
      timestamp = {2011.01.06}
    }
  • J. Chapuis, T. Rudolph, B. Borgesson, E. De Momi, I. P. Pappas, W. Hallermann, A. Schramm, and M. Caversaccio, “3D surgical planning and navigation for CMF surgery,” , vol. 5367, p. 403, 2004.
    [Bibtex]
    @CONFERENCE{Chapuis2004,
      author = {Chapuis, J. and Rudolph, T. and Borgesson, B. and De Momi, E. and
      Pappas, I.P. and Hallermann, W. and Schramm, A. and Caversaccio,
      M.},
      title = {3D surgical planning and navigation for CMF surgery},
      booktitle = {Proceedings of SPIE},
      year = {2004},
      volume = {5367},
      pages = {403},
      keywords = {APP, CMS, PLA, GUI},
      owner = {Thomas},
      timestamp = {2011.02.03}
    }
  • J. Chapuis, A. Schramm, I. Pappas, W. Hallermann, K. Schwenzer-Zimmerer, F. Langlotz, and M. Caversaccio, “A new system for computer-aided preoperative planning and intraoperative navigation during corrective jaw surgery.,” IEEE transactions on information technology in biomedicine : a publication of the IEEE Engineering in Medicine and Biology Society, vol. 11, iss. 3, pp. 274-87, 2007.
    [Bibtex]
    @ARTICLE{Chapuis2007,
      author = {Chapuis, Jonas and Schramm, Alexander and Pappas, Ion and Hallermann,
      Wock and Schwenzer-Zimmerer, Katja and Langlotz, Frank and Caversaccio,
      Marco},
      title = {A new system for computer-aided preoperative planning and intraoperative
      navigation during corrective jaw surgery.},
      journal = {IEEE transactions on information technology in biomedicine : a publication
      of the IEEE Engineering in Medicine and Biology Society},
      year = {2007},
      volume = {11},
      pages = {274-87},
      number = {3},
      month = {May},
      abstract = {A new system for computer-aided corrective surgery of the jaws has
      been developed and introduced clinically. It combines three-dimensional
      (3-D) surgical planning with conventional dental occlusion planning.
      The developed software allows simulating the surgical correction
      on virtual 3-D models of the facial skeleton generated from computed
      tomography (CT) scans. Surgery planning and simulation include dynamic
      cephalometry, semi-automatic mirroring, interactive cutting of bone
      and segment repositioning. By coupling the software with a tracking
      system and with the help of a special registration procedure, we
      are able to acquire dental occlusion plans from plaster model mounts.
      Upon completion of the surgical plan, the setup is used to manufacture
      positioning splints for intraoperative guidance. The system provides
      further intraoperative assistance with the help of a display showing
      jaw positions and 3-D positioning guides updated in real time during
      the surgical procedure. The proposed approach offers the advantages
      of 3-D visualization and tracking technology without sacrificing
      long-proven cast-based techniques for dental occlusion evaluation.
      The system has been applied on one patient. Throughout this procedure,
      we have experienced improved assessment of pathology, increased precision,
      and augmented control.},
      file = {Chapuis2007.pdf:Chapuis2007.pdf:PDF},
      issn = {1089-7771},
      keywords = {Humans,Imaging, Three-Dimensional,Imaging, Three-Dimensional: methods,Intraoperative
      Care,Intraoperative Care: methods,Jaw Abnormalities,Jaw Abnormalities:
      surgery,Osteotomy,Osteotomy: methods,Preoperative Care,Preoperative
      Care: methods,Reconstructive Surgical Procedures,Reconstructive Surgical
      Procedures: methods,Software,Surgery, Computer-Assisted,Surgery,
      Computer-Assisted: methods,Systems Integration,User-Computer Interface,
      APP, CMS, OCS, PLA, GUI, TRM, SUR, SLR},
      owner = {thomaskroes},
      pmid = {17521077},
      timestamp = {2010.10.22}
    }
  • G. Chen, X. Li, G. Wu, Y. Wang, B. Fang, X. Xiong, R. Yang, L. Tan, S. Zhang, and J. Dong, “The use of virtual reality for the functional simulation of hepatic tumors (case control study).,” International journal of surgery (London, England), vol. 8, iss. 1, pp. 72-8, 2010.
    [Bibtex]
    @ARTICLE{Chen2010,
      author = {Chen, Gang and Li, Xue-cheng and Wu, Guo-qing and Wang, Yi and Fang,
      Bin and Xiong, Xiao-feng and Yang, Ri-gao and Tan, Li-wen and Zhang,
      Shao-xiang and Dong, Jia-hong},
      title = {The use of virtual reality for the functional simulation of hepatic
      tumors (case control study).},
      journal = {International journal of surgery (London, England)},
      year = {2010},
      volume = {8},
      pages = {72-8},
      number = {1},
      month = {January},
      abstract = {OBJECTIVE: To develop a technique for converting computed tomography
      (CT) data into a fully three-dimensional (3D) virtual reality (VR)
      environment. Preoperative simulation in 3D VR facilitates liver resection
      owing to the ability to view the tumor and its relative vessels.
      METHODS: 3D-reconstruction of the liver was restored from spiral
      CT data by using LiVirtue software and the Dextrobeam (Volume Interactions
      Pte Ltd, Singapore) was applied to view this 3D model in the VR environment.
      In order to design a rational plan of operation, the liver and its
      anatomic structure were reconstructed to illuminate the location
      of the tumor and its related vessels. RESULTS: In our series of 38
      hepatic resections, there was no significant difference between preoperatively
      calculated volumes of virtual resection part and actual volumes of
      resected specimen's weight. The LiVirtue can provide accurate and
      rapid results of individual hepatic volume and the character of anatomy
      structures. These models can be viewed and manipulated in the VR
      environment and on a personal computer. This preoperative simulation
      allowed surgeons to dissect the liver with reduced complications.
      Preoperative planning and intra-operative navigation based on this
      technique ensured the safety of liver resection. CONCLUSIONS: 3D
      models of the liver and its detailed structure articulate the possibility
      of intricate liver resection and the risk of the operation. This
      preoperative estimation from a 3D model of the liver benefits complicated
      liver resections greatly.},
      file = {Chen2010.pdf:Chen2010.pdf:PDF},
      issn = {1743-9159},
      keywords = {Adult,Case-Control Studies,Contrast Media,Female,Humans,Image Processing,
      Computer-Assisted,Imaging, Three-Dimensional,Iohexol,Iohexol: diagnostic
      use,Liver Neoplasms,Liver Neoplasms: radiography,Liver Neoplasms:
      surgery,Male,Middle Aged,Software,Tomography, X-Ray Computed,User-Computer
      Interface, APP, PLA, GUI, VOR, SUR, HES, HES},
      owner = {thomaskroes},
      pmid = {19944191},
      publisher = {Elsevier Ltd},
      timestamp = {2010.10.22}
    }
  • X. Chen, C. Chui, S. Teoh, and S. Ong, “Automatic Modeling of Anatomical Structures for Biomechanical Analysis and Visualization in a Virtual Spine Workstation 2 Automatic Modeling in the Virtual Spine Workstation,” Reverse Engineering, pp. 1170-1171, 2001.
    [Bibtex]
    @ARTICLE{Chen2001,
      author = {Chen, Xuesong and Chui, Chee-kong and Teoh, Swee-hin and Ong, Sim-heng},
      title = {Automatic Modeling of Anatomical Structures for Biomechanical Analysis
      and Visualization in a Virtual Spine Workstation 2 Automatic Modeling
      in the Virtual Spine Workstation},
      journal = {Reverse Engineering},
      year = {2001},
      pages = {1170-1171},
      abstract = {Constructing the accurate digital model of vessel networks is critical
      to vascular tissue engineering, in which the segmentation of vessel
      plays an important role. However, the existing segmentation methods
      are not able to achieve the goal of accurate segmentation of vessel
      networks. This paper presents the development of a method for vessel
      segmentation based on a data structure of octree and 3D region growing.
      Firstly, the volume data of vessel images are divided into different
      data groups according to the predetermined depth value of octree,
      and then the optimal slices sequence is defined by analyzing the
      octree’s nodes which contain the vessel region. Then, the vessel
      segmentation is conducted from the vessels images of octree nodes
      based on 3D region growing. Finally, the treated data blocks are
      reset and the segmentation results of the whole volume data are obtained.
      By applying this method to the volume data of vascular images from
      MRA, accurate vessel segmentation results are achieved. This work
      would represent a significant advance for digital modeling of vessel
      networks.},
      file = {Chen2001.pdf:Chen2001.pdf:PDF},
      keywords = {OTS, TEC},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • E. H. Chi, “A taxonomy of visualization techniques using the data state reference model,” in Information Visualization, 2000. InfoVis 2000. IEEE Symposium on, 2000, pp. 69-75.
    [Bibtex]
    @INPROCEEDINGS{Chi2002,
      author = {Chi, E.H.},
      title = {A taxonomy of visualization techniques using the data state reference
      model},
      booktitle = {Information Visualization, 2000. InfoVis 2000. IEEE Symposium on},
      year = {2000},
      pages = {69 -75},
      abstract = {In previous work, researchers have attempted to construct taxonomies
      of information visualization techniques by examining the data domains
      that are compatible with these techniques. This is useful because
      implementers can quickly identify various techniques that can be
      applied to their domain of interest. However, these taxonomies do
      not help the implementers understand how to apply and implement these
      techniques. The author extends and proposes a new way to taxonomize
      information visualization techniques by using the Data State Model
      (E.H. Chi and J.T. Reidl, 1998). In fact, as the taxonomic analysis
      in the paper will show, many of the techniques share similar operating
      steps that can easily be reused. The paper shows that the Data State
      Model not only helps researchers understand the space of design,
      but also helps implementers understand how information visualization
      techniques can be applied more broadly},
      file = {Chi2002.pdf:Chi2002.pdf:PDF},
      keywords = {Data State Model;data domains;data state reference model;information
      visualization techniques;operating steps;taxonomic analysis;data
      models;data visualisation;interactive systems;},
      owner = {thomaskroes},
      timestamp = {2010.11.18}
    }
  • T. Chiarelli, E. Lamma, and T. Sansoni, “A fully 3D work context for oral implant planning and simulation,” International Journal of Computer Assisted Radiology and Surgery, vol. 5, pp. 57-67, 2010.
    [Bibtex]
    @ARTICLE{Chiarelli2010,
      author = {Chiarelli, Tommaso and Lamma, Evelina and Sansoni, Tommaso},
      title = {A fully 3D work context for oral implant planning and simulation},
      journal = {International Journal of Computer Assisted Radiology and Surgery},
      year = {2010},
      volume = {5},
      pages = {57 - 67},
      abstract = {Purpose  Most software systems for oral implantology are based on
      a two-dimensional multi-view approach, often accompanied with a surface
      rendered model. Usually they are affected by common errors like anisotropy
      of the volume and distortion on measurements. A more integrated and
      realistic 3D approach for implant surgery is desirable in order to
      gain a deeper and surer knowledge of patient’s anatomy before inserting
      the implants, thus reducing the risk of damaging surrounding structures.
      Methods  We present a 3D software system for oral implant planning
      where computer graphic techniques have been used to create a smooth
      and user-friendly fully integrated 3D environment to work in. Both
      volume isotropy and correctness in measurements are obtained through
      slices interpolation to achieve, respectively, an isotropic voxel
      and the freedom of choosing arbitrarily, during the planning, the
      best cross-sectional plane. Correct orientation of the planned implants
      is also easily computed, by exploiting a radiological mask with radio-opaque
      markers, worn by the patient during the CT scan. Results  Precision
      in measures was validated by considering several different scans
      and comparing the measures achieved with the ones got through the
      common methodology. It has been also calculated error percentages,
      algorithms efficiencies, and performances. Precision achieved outperforms
      usual DentaScan multi-view approach one, and it is comparable with
      or better than that obtained by the DentalVox tool (from 0.16 to
      0.71% error in measures). Conclusions  The proposed software system
      provides a user-friendly, correct and precise work context for oral
      implant planning, avoiding similar software common errors. The 3D
      environment can be also exploited in the final surgical phase, in
      order to provide a flapless surgical guide, through the use of an
      anthropomorphic robot.},
      affiliation = {University of Ferrara Dipartimento di Ingegneria Via Saragat 1 44100
      Ferrara Italy},
      file = {Chiarelli2010.pdf:Chiarelli2010.pdf:PDF},
      issn = {1861-6410},
      issue = {1},
      keyword = {Medicine},
      keywords = {CMS, APP, PLA, SUR},
      owner = {Th},
      publisher = {Springer Berlin / Heidelberg},
      timestamp = {2011.03.04},
      url = {http://dx.doi.org/10.1007/s11548-009-0394-y}
    }
  • L. Chittaro, “Information visualization and its application to medicine,” Artificial Intelligence in Medicine, vol. 22, iss. 2, pp. 81-88, 2001.
    [Bibtex]
    @ARTICLE{Chittaro2001,
      author = {Luca Chittaro},
      title = {Information visualization and its application to medicine},
      journal = {Artificial Intelligence in Medicine},
      year = {2001},
      volume = {22},
      pages = {81 - 88},
      number = {2},
      abstract = {This paper provides an introduction to the field of information visualization
      (IV) and a discussion of its application to medical systems. More
      specifically, it aims at: (i) defining what IV is and what are its
      goals (ii) highlighting the similarities and differences between
      IV and traditional medical imaging (iii) illustrating the potential
      of IV for medical applications by examining several examples of implemented
      systems and (iv) giving some general indications about the purposes
      and the effective exploitation of an IV component into a medical
      system.},
      file = {Chittaro2001.pdf:Chittaro2001.pdf:PDF},
      issn = {0933-3657},
      keywords = {Information visualization},
      owner = {thomaskroes},
      timestamp = {2011.01.06}
    }
  • J. Y. Choi, J. H. Choi, N. K. Kim, Y. Kim, J. K. Lee, M. K. Kim, J. H. Lee, and M. J. Kim, “Analysis of errors in medical rapid prototyping models,” International journal of oral and maxillofacial surgery, vol. 31, iss. 1, pp. 23-32, 2002.
    [Bibtex]
    @ARTICLE{Choi2002,
      author = {Choi, J.Y. and Choi, J.H. and Kim, N.K. and Kim, Y. and Lee, J.K.
      and Kim, M.K. and Lee, J.H. and Kim, M.J.},
      title = {Analysis of errors in medical rapid prototyping models},
      journal = {International journal of oral and maxillofacial surgery},
      year = {2002},
      volume = {31},
      pages = {23 - 32},
      number = {1},
      file = {Choi2002.pdf:Choi2002.pdf:PDF},
      issn = {0901-5027},
      keywords = {TRM, CMS},
      owner = {Thomas},
      publisher = {Elsevier},
      timestamp = {2011.02.15}
    }
  • J. S. Chou, S. Y. J. Chen, G. S. Sudakoff, K. R. Hoffmann, C. T. Chen, and A. H. Dachman, “Image fusion for visualization of hepatic vasculature and tumors,” , vol. 2434, p. 157, 1995.
    [Bibtex]
    @CONFERENCE{Chou1995,
      author = {Chou, J.S. and Chen, S.Y.J. and Sudakoff, G.S. and Hoffmann, K.R.
      and Chen, C.T. and Dachman, A.H.},
      title = {Image fusion for visualization of hepatic vasculature and tumors},
      booktitle = {Proceedings of SPIE},
      year = {1995},
      volume = {2434},
      pages = {157},
      file = {Chou1995.pdf:Chou1995.pdf:PDF},
      keywords = {TEC, HES, SUR},
      owner = {Thomas},
      timestamp = {2011.02.01}
    }
  • Y. Chou, S. Sun, and Y. Chiu, “Full-Sized 3D Preoperative Planning System of the Calcaneal Osteotomy Surgery with Computer-Aided Technology,” in Biomedical Engineering and Informatics, 2009. BMEI ’09. 2nd International Conference on, 2009, pp. 1-4.
    [Bibtex]
    @INPROCEEDINGS{Chou2009,
      author = {Yi-Jiun Chou and Shun-Ping Sun and Yi-Hsin Chiu},
      title = {Full-Sized 3D Preoperative Planning System of the Calcaneal Osteotomy
      Surgery with Computer-Aided Technology},
      booktitle = {Biomedical Engineering and Informatics, 2009. BMEI '09. 2nd International
      Conference on},
      year = {2009},
      pages = {1 -4},
      month = oct.,
      abstract = {This study presents a revolutionary computer-aided surgery planning
      and simulating system under computer-based environment for the calcaneal
      osteotomy surgery. This system uses the full-scale 3D reverse engineering
      technique in designing and developing preoperative planning system
      for the calcaneal osteotomy surgery. The planning system provides
      full-sized three-dimensional images of the calcaneus and the interior
      measurements of the calcaneus from various cutting planes. This study
      applies computer-assisted technology to integrate different software's
      function into a surgical planning system. These functions include
      3D image model capturing, cutting, moving, rotating and measurement
      for relevant foot anatomy, and can be integrated as the user's function.
      Furthermore, the system is computer-based and computer-assisted technology.
      Surgeons can utilize it as part of preoperative planning to develop
      efficient operative procedures. This system also has databank that
      can update and extend, and will provide the clinical cases to different
      users for experience learning.},
      file = {:Chou2009.pdf:PDF},
      keywords = {CT;calcaneal osteotomy surgery;computer-aided surgery planning;foot
      anatomy;full-scale 3D reverse engineering technique;full-sized three-dimensional
      images;preoperative planning system;simulating system;bone;computerised
      tomography;medical image processing;surgery;, OTS, PLA, SUR},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • M. Christie, P. Olivier, and J. M. Normand, “Camera control in computer graphics,” , vol. 27, iss. 8, pp. 2197-2218, 2008.
    [Bibtex]
    @CONFERENCE{Christie2008,
      author = {Christie, M. and Olivier, P. and Normand, J.M.},
      title = {Camera control in computer graphics},
      booktitle = {Computer Graphics Forum},
      year = {2008},
      volume = {27},
      number = {8},
      pages = {2197--2218},
      organization = {Wiley Online Library},
      file = {Christie2008.pdf:Christie2008.pdf:PDF},
      issn = {1467-8659},
      owner = {thomaskroes},
      timestamp = {2011.01.26}
    }
  • M. Cimerman and A. Kristan, “Preoperative planning in pelvic and acetabular surgery: the value of advanced computerised planning modules.,” Injury, vol. 38, iss. 4, pp. 442-9, 2007.
    [Bibtex]
    @ARTICLE{Cimerman2007,
      author = {Cimerman, Matej and Kristan, Anze},
      title = {Preoperative planning in pelvic and acetabular surgery: the value
      of advanced computerised planning modules.},
      journal = {Injury},
      year = {2007},
      volume = {38},
      pages = {442-9},
      number = {4},
      month = {April},
      abstract = {An experimental computer program for virtual operation of fractured
      pelvis and acetabulum based on real data of the fracture is presented.
      The program consists of two closely integrated tools, the 3D viewing
      tools and the surgeon simulation tools. Using 3D viewing tools the
      virtual model of a fractured pelvis is built. This procedure is performed
      by computer engineers. Data from CT of a real injury in DICOM format
      are used. With segmentation process each fracture segment becomes
      a separate object and is assigned a different colour. The virtual
      object is then transferred to the personal computer of the surgeon.
      Bone fragments can be moved and rotated in all three planes and reduction
      is performed. After reduction, fixation can be undertaken. The appropriate
      ostheosynthetic material can be chosen. Contouring of the plate is
      performed automatically to the reduced pelvis. The screws can be
      inserted into the plate or across the fracture. The direction and
      length of the screws is controlled by turning the pelvis or by making
      bones more transparent. The modeling of the plate in all three axes
      can be recorded as the exact length of the screws. There is also
      a simulation tool for intraoperative C-arm imaging in all directions.
      All the steps of the procedure are recorded and printed out. Postoperative
      matching of real operation and virtual procedure is also possible.
      We operated on 10 cases using virtual preoperative planning and found
      it very useful. The international study is still in progress. One
      case is presented demonstrating all the possibilities of the virtual
      planning and surgery. The presented computer program is an easily
      usable application which brings significant value and new opportunities
      in clinical practice (preoperative planning), teaching and research.},
      file = {Cimerman2007.pdf:Cimerman2007.pdf:PDF},
      issn = {0020-1383},
      keywords = {Acetabulum,Acetabulum: injuries,Acetabulum: surgery,Computer Simulation,Europe,Fractures,
      Bone,Fractures, Bone: surgery,Humans,Imaging, Three-Dimensional,Israel,Male,Middle
      Aged,Pelvic Bones,Pelvic Bones: injuries,Pelvic Bones: surgery,Planning
      Techniques,Software,Surgery, Computer-Assisted,Surgery, Computer-Assisted:
      methods,Tomography, X-Ray Computed,User-Computer Interface, APP,
      PLA, SUR, OTS, SLR, VOR},
      owner = {thomaskroes},
      pmid = {17400226},
      timestamp = {2010.10.22}
    }
  • P. Cinquin, E. Bainville, C. Barbe, E. Bittar, V. Bouchard, L. Bricault, G. Champleboux, M. Chenin, L. Chevalier, Y. Delnondedieu, L. Desbat, V. Dessenne, A. Hamadeh, D. Henry, N. Laieb, S. Lavallee, J. M. Lefebvre, F. Leitner, Y. Menguy, F. Padieu, O. Peria, A. Poyet, M. Promayon, S. Rouault, P. Sautot, J. Troccaz, and P. Vassal, “Computer assisted medical interventions,” Engineering in Medicine and Biology Magazine, IEEE, vol. 14, iss. 3, pp. 254-263, 1995.
    [Bibtex]
    @ARTICLE{Cinquin1995,
      author = {Cinquin, P. and Bainville, E. and Barbe, C. and Bittar, E. and Bouchard,
      V. and Bricault, L. and Champleboux, G. and Chenin, M. and Chevalier,
      L. and Delnondedieu, Y. and Desbat, L. and Dessenne, V. and Hamadeh,
      A. and Henry, D. and Laieb, N. and Lavallee, S. and Lefebvre, J.M.
      and Leitner, F. and Menguy, Y. and Padieu, F. and Peria, O. and Poyet,
      A. and Promayon, M. and Rouault, S. and Sautot, P. and Troccaz, J.
      and Vassal, P.},
      title = {Computer assisted medical interventions},
      journal = {Engineering in Medicine and Biology Magazine, IEEE},
      year = {1995},
      volume = {14},
      pages = {254 - 263},
      number = {3},
      month = {May/June},
      abstract = {Many medical or surgical interventions can benefit from the use of
      computers. Through progress of technology and growing consciousness
      of the possibilities of real clinical improvements with computers,
      what was in the past the privilege of very few operations (mostly
      stereotactic neurosurgery) is now entering many surgical specialities.
      Although many technical issues remain to be solved, there is virtually
      no limit to the introduction of computers and robots in any surgical
      speciality. This tendency can take on the most varied forms. At the
      authors' institute, three golden rules have guided the computer assisted
      medical interventions (CAMI) project for about ten years: 1) conceive
      systems for which the clinical value is well defined; 2) develop
      generic tools that can be applied to many different clinical applications;
      and 3) provide efficient collaboration between the surgeon and the
      system through simple interfaces},
      file = {Cinquin1995.pdf:Cinquin1995.pdf:PDF},
      issn = {0739-5175},
      keywords = {10 y;clinical value;computer assisted medical interventions;efficient
      collaboration;generic tools;simple interfaces;stereotactic neurosurgery;surgical
      specialities;technical issues;medical computing;surgery;, REV},
      owner = {thomaskroes},
      timestamp = {2011.01.11}
    }
  • J. Clarke, A. Deakin, A. Nicol, and F. Picard, “Measuring the positional accuracy of computer assisted surgical tracking systems,” Computer Aided Surgery, pp. 1-5, 2010.
    [Bibtex]
    @ARTICLE{Clarke2010,
      author = {Clarke, JV and Deakin, AH and Nicol, AC and Picard, F.},
      title = {Measuring the positional accuracy of computer assisted surgical tracking
      systems},
      journal = {Computer Aided Surgery},
      year = {2010},
      pages = {1 - 5},
      number = {0},
      issn = {1092-9088},
      publisher = {Informa UK Ltd UK}
    }
  • A. C. F. Colchester, J. Zhao, K. S. Holton-Tainter, C. J. Henri, N. Maitland, P. T. E. Roberts, C. G. Harris, and R. J. Evans, “Development and preliminary evaluation of VISLAN, a surgical planning and guidance system using intra-operative video imaging,” Medical Image Analysis, vol. 1, iss. 1, pp. 73-90, 1996.
    [Bibtex]
    @ARTICLE{Colchester1996,
      author = {Alan C.F. Colchester and Jason Zhao and Kerrie S. Holton-Tainter
      and Christopher J. Henri and Neil Maitland and Patricia T.E. Roberts
      and Christopher G. Harris and Richard J. Evans},
      title = {Development and preliminary evaluation of VISLAN, a surgical planning
      and guidance system using intra-operative video imaging},
      journal = {Medical Image Analysis},
      year = {1996},
      volume = {1},
      pages = {73 - 90},
      number = {1},
      abstract = {VISLAN is an integrated neurosurgical planning and guidance system.
      New segmentation and rendering techniques have been incorporated.
      A stereo video system is used intra-operatively and fulfils four
      roles. First, the video display is overlaid with graphical outlines
      showing the position of the planned craniotomy or the target (enhanced
      reality displays). Second, a skin surface patch is reconstructed
      from the stereo video images using patterned light (mean errors of
      surface point location are <0.15 mm). Third, a freely mobile, hand-held
      localizer is tracked in real time (position errors are <0.5 mm and
      with improved calibration <0.2 mm), with its position superimposed
      on the pre-operative patient representation to assist surgical guidance.
      Fourth, markers fixed to the skull bone next to the cranial opening
      are used to detect intra-operative movement and to update registration.
      Initial results from phantom experiments show an overall system accuracy
      of better than 0.9 mm for intra-operative localization of features
      defined in pre-operative images. The prototype system has been tested
      during six neurosurgical operations with very good results.},
      file = {:Colchester1996.pdf:PDF},
      issn = {1361-8415},
      keywords = {enhanced reality, APP, PLA, GUI, NES, STV, SUR},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • T. F. Cootes and C. J. Taylor, “Statistical models of appearance for medical image analysis and computer vision,” , vol. 4322, pp. 236-248, 2001.
    [Bibtex]
    @CONFERENCE{Cootes2001,
      author = {Cootes, T.F. and Taylor, C.J.},
      title = {Statistical models of appearance for medical image analysis and computer
      vision},
      booktitle = {Proc. SPIE Medical Imaging},
      year = {2001},
      volume = {4322},
      pages = {236 - 248},
      organization = {Citeseer},
      file = {Cootes2001.pdf:Cootes2001.pdf:PDF},
      keywords = {TEC},
      owner = {thomaskroes},
      timestamp = {2011.01.03}
    }
  • A. F. Cosío and P. M. A. Castañeda, “Computer Assisted Surgery,” AIP Conference Proceedings, vol. 682, iss. 1, pp. 38-45, 2003.
    [Bibtex]
    @ARTICLE{Cosío2003,
      author = {F. Arámbula Cosío and M. A. Padilla Castañeda},
      title = {Computer Assisted Surgery},
      journal = {AIP Conference Proceedings},
      year = {2003},
      volume = {682},
      pages = {38 - 45},
      number = {1},
      abstract = {Computer assisted surgery (CAS) systems can provide different levels
      of assistance to a surgeon during training and execution of a surgical
      procedure. This is done through the integration of : measurements
      taken on medical images; computer graphics techniques; and positioning
      or tracking mechanisms which accurately locate the surgical instruments
      inside the operating site. According to the type of assistance that
      is provided to the surgeon, CAS systems can be classified as: Image
      guided surgery systems; Assistant robots for surgery; and Training
      simulators for surgery. In this work are presented the main characteristics
      of CAS systems. It is also described the development of a computer
      simulator for training on Transurethral Resection of the Prostate
      (TURP) based on a computer model of the prostate gland which is able
      to simulate, in real time, deformations and resections of tissue.
      The model is constructed as a 3D mesh with physical properties such
      as elasticity. We describe the main characteristics of the prostate
      model and its performance. The prostate model will also be used in
      the development of a CAS system designed to assist the surgeon during
      a real TURP procedure. The system will provide 3D views of the shape
      of the prostate of the patient, and the position of the surgical
      instrument during the operation. The development of new computer
      graphics models which are able to simulate, in real time, the mechanical
      behavior of an organ during a surgical procedure, can improve significantly
      the training and execution of other minimally invasive surgical procedures
      such as laparoscopic gall bladder surgery.},
      editor = {Mercedes Rodriguez-Villafuerte and Arnulfo Martinez-Davalos and Neil
      Bruce and Isabel Gamboa-deBuen},
      file = {Cosío2003.pdf:Cosío2003.pdf:PDF},
      keywords = {surgery; medical computing; biomedical imaging; computer graphics;
      medical robotics, REV},
      owner = {thomaskroes},
      publisher = {AIP},
      timestamp = {2010.11.09}
    }
  • S. Cotin, H. Delingette, and N. Ayache, “Real-time elastic deformations of soft tissues for surgery simulation,” Visualization and Computer Graphics, IEEE Transactions on, vol. 5, iss. 1, pp. 62-73, 1999.
    [Bibtex]
    @ARTICLE{Cotin1999,
      author = {Cotin, S. and Delingette, H. and Ayache, N.},
      title = {Real-time elastic deformations of soft tissues for surgery simulation},
      journal = {Visualization and Computer Graphics, IEEE Transactions on},
      year = {1999},
      volume = {5},
      pages = {62 - 73},
      number = {1},
      abstract = {We describe a novel method for surgery simulation including a volumetric
      model built from medical images and an elastic modeling of the deformations.
      The physical model is based on elasticity theory which suitably links
      the shape of deformable bodies and the forces associated with the
      deformation. A real time computation of the deformation is possible
      thanks to a preprocessing of elementary deformations derived from
      a finite element method. This method has been implemented in a system
      including a force feedback device and a collision detection algorithm.
      The simulator works in real time with a high resolution liver model},
      file = {Cotin1999.pdf:Cotin1999.pdf:PDF},
      issn = {1077-2626},
      keywords = {collision detection algorithm;deformable bodies;elastic modeling;elasticity
      theory;elementary deformations;finite element method;force feedback
      device;high resolution liver model;medical images;physical model;preprocessing;real
      time computation;real time elastic deformations;soft tissues;surgery
      simulation;volumetric model;biomechanics;computer graphics;digital
      simulation;elastic deformation;finite element analysis;medical computing;real-time
      systems;surgery;, APP, VOR, OCS, PRS},
      owner = {Thomas},
      timestamp = {2011.02.15}
    }
  • H. Courtecuisse, H. Jung, J. Allard, Christian Duriez, D. Y. Lee, and S. Cotin, “GPU-based real-time soft tissue deformation with cutting and haptic feedback,” Progress in Biophysics and Molecular Biology, vol. 103, iss. 2-3, pp. 159-168, 2010.
    [Bibtex]
    @ARTICLE{Courtecuisse2010,
      author = {Hadrien Courtecuisse and Hoeryong Jung and Jérémie Allard and Christian
      Duriez and Doo Yong Lee and Stéphane Cotin},
      title = {GPU-based real-time soft tissue deformation with cutting and haptic
      feedback},
      journal = {Progress in Biophysics and Molecular Biology},
      year = {2010},
      volume = {103},
      pages = {159 - 168},
      number = {2-3},
      abstract = {This article describes a series of contributions in the field of real-time
      simulation of soft tissue biomechanics. These contributions address
      various requirements for interactive simulation of complex surgical
      procedures. In particular, this article presents results in the areas
      of soft tissue deformation, contact modelling, simulation of cutting,
      and haptic rendering, which are all relevant to a variety of medical
      interventions. The contributions described in this article share
      a common underlying model of deformation and rely on GPU implementations
      to significantly improve computation times. This consistency in the
      modelling technique and computational approach ensures coherent results
      as well as efficient, robust and flexible solutions.},
      file = {Courtecuisse2010.pdf:Courtecuisse2010.pdf:PDF},
      issn = {0079-6107},
      keywords = {Biomechanics, PRS, TEC},
      owner = {thomaskroes},
      timestamp = {2011.01.26}
    }
  • B. Couteau, P. Mansat, E. Estivalèzes, R. Darmana, M. Mansat, and J. Egan, “Finite element analysis of the mechanical behavior of a scapula implanted with a glenoid prosthesis.,” Clinical biomechanics (Bristol, Avon), vol. 16, iss. 7, pp. 566-75, 2001.
    [Bibtex]
    @ARTICLE{Couteau2001,
      author = {Couteau, B and Mansat, P and Estival\`{e}zes, E and Darmana, R and
      Mansat, M and Egan, J},
      title = {Finite element analysis of the mechanical behavior of a scapula implanted
      with a glenoid prosthesis.},
      journal = {Clinical biomechanics (Bristol, Avon)},
      year = {2001},
      volume = {16},
      pages = {566-75},
      number = {7},
      month = {August},
      abstract = {The objective of the present study was to analyze the mechanical effect
      of some of the surgical variables encountered during shoulder arthroplasty
      using the finite element method. The effect of one eccentric load
      case, cement thickness and conformity has been investigated. DESIGN:
      A 3D finite element model of a healthy cadaveric scapula implanted
      with an anatomically shaped glenoid has been developed from computed
      tomography (CT) images. BACKGROUND: Glenoid component fixation can
      present the most difficult problem in total shoulder arthroplasty,
      loosening of this component remains one of the main complications.
      METHODS: The 3D finite element model was first validated by comparison
      with experimental measurements and by fitting of the mechanical properties
      of the cortical bone. Then the articular pressure location, the surface
      contact geometry and the cement thickness have been analyzed to observe
      their effect on stresses and displacements at the interfaces and
      within the scapular bone. RESULTS: The antero-posterior bending of
      the scapula was a notable feature and this was accentuated when an
      eccentric load was applied. The gleno-humeral contact area had a
      major role on the stress level in the supporting structures though
      but not on the global displacements. Varying the cement mantle modified
      stresses according to the load case and it essentially changed the
      latero-medial displacement of the cement relatively to the bone.
      CONCLUSIONS: This analysis provided an insight into the mechanical
      effects of an implanted scapula according to different parameters
      related to implantation technique. RELEVANCE: Results emphasized
      the role of some of the parameters a clinician may face. They demonstrated
      the importance of the humeral head centering in the horizontal plane.
      Conformity decreasing may involve drastic increase of stresses within
      structures and a thick cement mantle is not necessarily advantageous
      relatively to the stresses at the cement/bone interface.},
      file = {Couteau2001.pdf:Couteau2001.pdf:PDF},
      issn = {0268-0033},
      keywords = {Arthroplasty,Arthroplasty: instrumentation,Articular,Biomechanics,Bone
      Cements,Cadaver,Computer Simulation,Equipment Design,Finite Element
      Analysis,Humans,Joint Prosthesis,Mechanical,Prosthesis Failure,Range
      of Motion,Scapula,Scapula: physiopathology,Scapula: radiography,Scapula:
      surgery,Shoulder Joint,Shoulder Joint: physiopathology,Shoulder Joint:
      radiography,Shoulder Joint: surgery,Stress,Tensile Strength,Tomography,X-Ray
      Computed, OCS, TEC},
      owner = {thomaskroes},
      pmid = {11470298},
      timestamp = {2010.10.22}
    }
  • C. Cutting, F. Bookstein, B. Grayson, L. Fellingham, and J. McCarthy, “Three-dimensional computer-assisted design of craniofacial surgical procedures: optimization and interaction with cephalometric and CT-based models.,” Plastic and reconstructive surgery, vol. 77, iss. 6, p. 877, 1986.
    [Bibtex]
    @ARTICLE{Cutting1986,
      author = {Cutting, C. and Bookstein, FL and Grayson, B. and Fellingham, L.
      and McCarthy, JG},
      title = {Three-dimensional computer-assisted design of craniofacial surgical
      procedures: optimization and interaction with cephalometric and CT-based
      models.},
      journal = {Plastic and reconstructive surgery},
      year = {1986},
      volume = {77},
      pages = {877},
      number = {6},
      issn = {0032 - 1052},
      owner = {Thomas}
    }
  • P. S. D’Urso, T. M. Barker, J. W. Earwaker, L. Bruce, L. R. Atkinson, M. W. Lanigan, J. F. Arvier, and D. E. J. and, “Stereolithographic biomodelling in cranio-maxillofacial surgery: a prospective trial,” Journal of Cranio-Maxillofacial Surgery, vol. 27, iss. 1, pp. 30-37, 1999.
    [Bibtex]
    @ARTICLE{Durso1999,
      author = {Paul S. D'Urso and Timothy M. Barker and W. John Earwaker and Lain
      J. Bruce and R. Leigh Atkinson and Michael W. Lanigan and John F.
      Arvier and David J. Effeney and},
      title = {Stereolithographic biomodelling in cranio-maxillofacial surgery:
      a prospective trial},
      journal = {Journal of Cranio-Maxillofacial Surgery},
      year = {1999},
      volume = {27},
      pages = {30 - 37},
      number = {1},
      abstract = {Summary Stereolithographic (SL) biomodelling is a new technology that
      allows three-dimensional (3-D) computed tomography (CT) data to be
      used to manufacture solid plastic replicas of anatomical structures
      (biomodels). A prospective trial with the objective of assessing
      the utility of biomodelling in complex surgery has been performed.
      Forty-five patients with craniofacial, maxillofacial, skull base
      cervical spinal pathology were selected. 3-D CT or MR scanning was
      performed and the data of interest were edited and converted into
      a form acceptable to the rapid prototyping technology SL. The data
      were used to guide a laser to selectively polymerize photosensitive
      resin to manufacture biomodels. The biomodels were used by surgeons
      for patient education, diagnosis and operative planning. An assessment
      protocol was used to test the hypothesis that [`]biomodels in addition
      to standard imaging had greater utility in the surgery performed
      than the standard imaging alone'. Biomodels significantly improved
      operative planning (images 44.09%, images with biomodel 82.21%, P<.01)
      and diagnosis (images 65.63%, images with biomodel 95.23%, P<.01).
      Biomodels were found to improve measurement accuracy significantly
      (image measurement error 44.14%, biomodel measurement error 7.91%,
      P<.05). Surgeons estimated that the use of biomodels reduced operating
      time by a mean of 17.63% and were cost effective at a mean price
      of $1031 AUS. Patients found the biomodels to be helpful for informed
      consent (images 63.53%, biomodels 88.54%, P<.001). Biomodelling is
      an intuitive, user-friendly technology that facilitated diagnosis
      and operative planning. Biomodels allowed surgeons to rehearse procedures
      readily and improved communication between colleagues and patients.},
      file = {Durso1999.pdf:Durso1999.pdf:PDF},
      issn = {1010-5182},
      keywords = {APP, RPP, PLA, TRM},
      owner = {thomaskroes},
      timestamp = {2011.01.10}
    }
  • P. S. D’Urso, W. J. Earwaker, T. M. Barker, M. J. Redmond, R. G. Thompson, D. J. Effeney, and F. H. Tomlinson, “Custom cranioplasty using stereolithography and acrylic,” British Journal of Plastic Surgery, vol. 53, iss. 3, pp. 200-204, 2000.
    [Bibtex]
    @ARTICLE{Durso2000,
      author = {P. S. D'Urso and W. J. Earwaker and T. M. Barker and M. J. Redmond
      and R. G. Thompson and D. J. Effeney and F. H. Tomlinson},
      title = {Custom cranioplasty using stereolithography and acrylic},
      journal = {British Journal of Plastic Surgery},
      year = {2000},
      volume = {53},
      pages = {200 - 204},
      number = {3},
      abstract = {Numerous methods of cranioplasty have been described. Customisation
      and prefabrication have been reported to reduce operating time and
      improve cosmesis. An original technique for the manufacture of customised
      cranioplastic implants has been developed and tested in 30 patients.
      Thirty patients requiring cranioplasties were selected. Data acquired
      from computed tomography (CT) were used to manufacture exact plastic
      replicas (biomodels) of craniotomy defects and master cranioplastic
      implants using the rapid prototyping technology of stereolithography
      (SL). The three-dimensional (3D) imaging techniques of mirroring
      and interpolation were used to extrapolate on existing anatomy to
      design the master implants. The master implants were hand finished
      to fit the defect in the corresponding cranial biomodel exactly and
      were then used to create a cavity mould. The mould was used to cast
      thermally polymerised custom acrylic implants. The surgeons reported
      that the customised implants reduced operating time, afforded excellent
      cosmesis and were cost effective. The patients reported that the
      opportunity to see the biomodel and implant preoperatively improved
      their understanding of the procedure. Two complications were noted,
      one infection and one implant required significant trimming. The
      simultaneous manufacture of the master implant (male) and biomodel
      (female) components from SL allowed custom accurate implants to be
      manufactured. Disadvantages identified were the time required for
      computer manipulations of the CT data (up to 2 h), difficulty in
      assessing the accuracy of the computer generated master as a 3D rendering,
      the potential for SL parts to warp, manufacturing time (minimum 2
      days) and the cost of approximately $1300 US per case ($1000 for
      the SL biomodel and $300 for the acrylic casting). },
      file = {Durso2000.pdf:Durso2000.pdf:PDF},
      issn = {0007-1226},
      keywords = {cranioplasty, biomodelling, stereolithography, rapid prototyping,
      customised implant., APP, RPP, CMS},
      owner = {Thomas},
      timestamp = {2011.02.15}
    }
  • B. Dagon, C. Baur, and V. Bettschart, “Real-time update of 3D deformable models for computer aided liver surgery,” , pp. 1-4, 2009.
    [Bibtex]
    @CONFERENCE{Dagon2009,
      author = {Dagon, B. and Baur, C. and Bettschart, V.},
      title = {Real-time update of 3D deformable models for computer aided liver
      surgery},
      booktitle = {Pattern Recognition, 2008. ICPR 2008. 19th International Conference
      on},
      year = {2009},
      pages = {1 - 4},
      organization = {IEEE},
      abstract = {Providing accurate image-guidance for soft-tissue interventions remains
      a complex task. Most of the time, preoperative models and planning
      data are no more valid during the surgical process due to motions
      and deformations of the organ of interest. In this paper, two core
      components of a computer-assisted system for liver surgery are presented.
      One is an ultrasound segmentation techniques that allows for automatic
      liver vessels detection and the other is a mass-spring based deformable
      model used to update the shape of 3D models. Both have real-time
      capabilities and enable to update intraoperatively the data created
      during the planning phase.},
      file = {Dagon2009.pdf:Dagon2009.pdf:PDF},
      issn = {1051-4651},
      keywords = {TEC, PRS, HES},
      owner = {thomaskroes},
      timestamp = {2010.11.08}
    }
  • B. Davies, “A review of robotics in surgery,” Proceedings of the Institution of Mechanical Engineers, Part H: Journal of Engineering in Medicine, vol. 214, iss. 1, pp. 129-140, 2000.
    [Bibtex]
    @ARTICLE{Davies2000,
      author = {Davies, B.},
      title = {A review of robotics in surgery},
      journal = {Proceedings of the Institution of Mechanical Engineers, Part H: Journal
      of Engineering in Medicine},
      year = {2000},
      volume = {214},
      pages = {129 - 140},
      number = {1},
      file = {Davies2000.pdf:Davies2000.pdf:PDF},
      issn = {0954-4119},
      keywords = {REV},
      owner = {Thomas},
      publisher = {Prof Eng Publishing},
      timestamp = {2011.02.14}
    }
  • L. T. De Paolis, M. Pulimeno, and G. Aloisio, “Visualization and interaction systems for surgical planning,” in Information Technology Interfaces (ITI), 2010 32nd International Conference on, 2010, pp. 269-274.
    [Bibtex]
    @INPROCEEDINGS{Paolis2010a,
      author = {De Paolis, L.T. and Pulimeno, M. and Aloisio, G.},
      title = {Visualization and interaction systems for surgical planning},
      booktitle = {Information Technology Interfaces (ITI), 2010 32nd International
      Conference on},
      year = {2010},
      pages = {269 - 274},
      month = june,
      abstract = {The visualization of 3D models of the patient's body emerges as a
      priority in surgery. In this paper two different visualization and
      interaction systems are presented: a virtual interface and a low
      cost multi-touch screen. The systems are able to interpret in real-time
      the user's movements and can be used in the surgical pre-operative
      planning for the navigation and manipulation of 3D models of the
      human body built from CT images. The surgeon can visualize both the
      traditional patient information, as the CT image dataset, and the
      3D models of the patient's organs built from these images. The developed
      virtual interface is the first prototype of a system designed to
      avoid any contact with the computer so that the surgeon will be able
      to visualize models of the patient's organs and to interact with
      these moving the finger in the free space. The developed multi-touch
      screen provides a user interface customized for doctor requirements
      that allows users to interact at the same time with 3D models of
      the human body built from CT images for surgical pre-operative planning
      purpose.},
      file = {Paolis2010a.pdf:Paolis2010a.pdf:PDF},
      issn = {1330-1012},
      keywords = {3D models visualization;CT image;doctor requirement;interaction system;multitouch
      screen;patients body;surgical preoperative planning;user interface;virtual
      interface;visualization system;computerised tomography;human computer
      interaction;interactive systems;medical image processing;solid modelling;surgery;user
      interfaces;, TEC},
      owner = {Thomas},
      timestamp = {2011.03.09}
    }
  • L. T. De Paolis, G. Aloisio, and M. Pulimeno, “A navigator of the patient’s anatomy model for intra-operative surgical guidance,” in Virtual Environments Human-Computer Interfaces and Measurement Systems (VECIMS), 2010 IEEE International Conference on, 2010, pp. 47-51.
    [Bibtex]
    @INPROCEEDINGS{Paolis2010b,
      author = {De Paolis, Lucio T. and Aloisio, Giovanni and Pulimeno, Marco},
      title = {A navigator of the patient's anatomy model for intra-operative surgical
      guidance},
      booktitle = {Virtual Environments Human-Computer Interfaces and Measurement Systems
      (VECIMS), 2010 IEEE International Conference on},
      year = {2010},
      pages = {47 -51},
      month = {September},
      abstract = {Minimally invasive surgery offers advantages that make it the best
      choice for many diseases. The Virtual Reality technology gives a
      great support to this kind of surgical procedures through medical
      image processing and visualization, 3D organ's reconstruction and
      intra-operative surgical guidance. In this paper is presented an
      advanced visualization and navigation system and the surgeon has
      the possibility to visualize both the traditional patient information,
      as the CT image set, and a 3D model of the patient's anatomy built
      from this. Two different visualization modalities are available in
      real time and dynamically. According to the surgeon needs, it is
      possible to obtain the automatic reslicing of the orthogonal planes
      in order to have an accurate visualization of the 3D model and slices
      exactly next to the actual position of the surgical instrument tip.
      In addition, it is possible to activate the clipping modality that
      allows cutting the 3D model in correspondence of a chosen visualization
      plane. The system can be used as support for the diagnosis, for the
      surgical preoperative planning and also for an image-guided surgery.},
      file = {Paolis2010b.pdf:Paolis2010b.pdf:PDF},
      issn = {1944-9429},
      keywords = {APP, SUR, SLR, PLA, GUI},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • M. De and others, “Automatic extraction of the mid-facial plane for cranio-maxillofacial surgery planning,” International journal of oral and maxillofacial surgery, vol. 35, iss. 7, pp. 636-642, 2006.
    [Bibtex]
    @ARTICLE{De2006,
      author = {De, M. and others},
      title = {Automatic extraction of the mid-facial plane for cranio-maxillofacial
      surgery planning},
      journal = {International journal of oral and maxillofacial surgery},
      year = {2006},
      volume = {35},
      pages = {636 - 642},
      number = {7},
      issn = {0901-5027},
      keywords = {TEC, CMS},
      owner = {Thomas},
      publisher = {Elsevier},
      timestamp = {2011.02.03}
    }
  • H. Delingette, “Toward realistic soft-tissue modeling in medical simulation,” Proceedings of the IEEE, vol. 86, iss. 3, pp. 512-523, 1998.
    [Bibtex]
    @ARTICLE{Delingette1998,
      author = {Delingette, H.},
      title = {Toward realistic soft-tissue modeling in medical simulation},
      journal = {Proceedings of the IEEE},
      year = {1998},
      volume = {86},
      pages = {512 - 523},
      number = {3},
      month = mar,
      abstract = {Most of today's medical simulation systems are based on geometric
      representations of anatomical structures that take no account of
      their physical nature. Representing physical phenomena and, more
      specifically, the realistic modeling of soft tissue will not only
      improve current medical simulation systems but will considerably
      enlarge the set of applications and the credibility of medical simulation,
      from neurosurgery planning to laparoscopic-surgery simulation. To
      achieve realistic tissue deformation, it is necessary to combine
      deformation accuracy with computer efficiency. On the one hand, biomechanics
      has studied complex mathematical models and produced a large amount
      of experimental data for accurately representing the deformation
      of soft tissue. On the other hand, computer graphics has proposed
      many algorithms for the real-time computation of deformable bodies,
      often at the cost of ignoring the physics principles. The author
      surveys existing models of deformation in medical simulation and
      analyze the impediments to combining computer-graphics representations
      with biomechanical models. In particular, the different geometric
      representations of deformable tissue are compared in relation to
      the tasks of real-time deformation, tissue cutting, and force-feedback
      interaction. Last, the author inspects the potential of medical simulation
      under the development of this key technology},
      file = {Delingette1998.pdf:Delingette1998.pdf:PDF},
      issn = {0018-9219},
      keywords = {anatomical structures;biomechanics;complex mathematical models;computer
      efficiency;computer graphics;computer-graphics representations;deformable
      tissue;deformation accuracy;force-feedback interaction;geometric
      representations;laparoscopic surgery simulation;medical simulation;neurosurgery
      planning;physical phenomena;real-time computation;real-time deformation;realistic
      soft-tissue modeling;realistic tissue deformation;tissue cutting;biomechanics;computer
      graphics;deformation;digital simulation;finite element analysis;medical
      image processing;planning;, TEC},
      owner = {Thomas},
      timestamp = {2011.02.15}
    }
  • H. Delingette and N. Ayache, “Hepatic surgery simulation,” Communications of the ACM, vol. 48, iss. 2, pp. 31-36, 2005.
    [Bibtex]
    @ARTICLE{Delingette2005,
      author = {Delingette, H. and Ayache, N.},
      title = {Hepatic surgery simulation},
      journal = {Communications of the ACM},
      year = {2005},
      volume = {48},
      pages = {31 - 36},
      number = {2},
      file = {Delingette2005.pdf:Delingette2005.pdf:PDF},
      issn = {0001-0782},
      keywords = {HES, PLA, PRS, SUR},
      owner = {Thomas},
      publisher = {ACM},
      timestamp = {2011.02.03}
    }
  • S. L. Delp, D. S. Stulberg, B. Davies, F. Picard, and F. Leitner, “Computer assisted knee replacement,” Clinical orthopaedics and related research, vol. 354, p. 49, 1998.
    [Bibtex]
    @ARTICLE{Delp1998,
      author = {Delp, S.L. and Stulberg, D.S. and Davies, B. and Picard, F. and Leitner,
      F.},
      title = {Computer assisted knee replacement},
      journal = {Clinical orthopaedics and related research},
      year = {1998},
      volume = {354},
      pages = {49},
      keywords = {APP, OTS},
      owner = {thomaskroes},
      timestamp = {2011.01.12}
    }
  • E. Demomi, E. Pavan, B. Motyl, C. Bandera, and C. Frigo, “Hip joint anatomy virtual and stereolithographic reconstruction for preoperative planning of total hip replacement,” International Congress Series, vol. 1281, pp. 708-712, 2005.
    [Bibtex]
    @ARTICLE{Demomi2005,
      author = {Demomi, E and Pavan, E and Motyl, B and Bandera, C and Frigo, C},
      title = {Hip joint anatomy virtual and stereolithographic reconstruction for
      preoperative planning of total hip replacement},
      journal = {International Congress Series},
      year = {2005},
      volume = {1281},
      pages = {708-712},
      month = {May},
      abstract = {The purpose of the present work was to develop a tool for preoperatively
      planning the Total Hip Replacement (THR). Starting from the MR images,
      the 3D surface model of both the pelvis and the femur was built and
      the surgical operation was virtually performed. Data coming from
      gait analysis were added to visualize the physiologic movement of
      the hip joint. The resulting triangular mesh was sufficiently accurate
      to allow the building of the stereolithographic model of the joint
      by means of rapid prototyping technique. The plastic bones allow
      the user to have an enhanced vision of the surgical procedure to
      be performed.},
      file = {Demomi2005.pdf:Demomi2005.pdf:PDF},
      issn = {05315131},
      keywords = {magnetic resonance imaging,rapid prototyping,surgical planning,total
      hip replacement, APP, PLA, OTS, SUR, RPP},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • C. Dick, J. Georgii, R. Burgkart, and R. Westermann, “Stress tensor field visualization for implant planning in orthopedics.,” IEEE transactions on visualization and computer graphics, vol. 15, iss. 6, pp. 1399-406, 2009.
    [Bibtex]
    @ARTICLE{Dick2009a,
      author = {Dick, Christian and Georgii, Joachim and Burgkart, Rainer and Westermann,
      R\"{u}diger},
      title = {Stress tensor field visualization for implant planning in orthopedics.},
      journal = {IEEE transactions on visualization and computer graphics},
      year = {2009},
      volume = {15},
      pages = {1399-406},
      number = {6},
      abstract = {We demonstrate the application of advanced 3D visualization techniques
      to determine the optimal implant design and position in hip joint
      replacement planning. Our methods take as input the physiological
      stress distribution inside a patient's bone under load and the stress
      distribution inside this bone under the same load after a simulated
      replacement surgery. The visualization aims at showing principal
      stress directions and magnitudes, as well as differences in both
      distributions. By visualizing changes of normal and shear stresses
      with respect to the principal stress directions of the physiological
      state, a comparative analysis of the physiological stress distribution
      and the stress distribution with implant is provided, and the implant
      parameters that most closely replicate the physiological stress state
      in order to avoid stress shielding can be determined. Our method
      combines volume rendering for the visualization of stress magnitudes
      with the tracing of short line segments for the visualization of
      stress directions. To improve depth perception, transparent, shaded,
      and antialiased lines are rendered in correct visibility order, and
      they are attenuated by the volume rendering. We use a focus+context
      approach to visually guide the user to relevant regions in the data,
      and to support a detailed stress analysis in these regions while
      preserving spatial context information. Since all of our techniques
      have been realized on the GPU, they can immediately react to changes
      in the simulated stress tensor field and thus provide an effective
      means for optimal implant selection and positioning in a computational
      steering environment.},
      file = {Dick2009a.pdf:Dick2009a.pdf:PDF},
      issn = {1077-2626},
      keywords = {Biomechanics,Computer Graphics,Diagnostic Imaging,Femur Head,Femur
      Head: surgery,Humans,Image Processing, Computer-Assisted,Image Processing,
      Computer-Assisted: methods,Imaging, Three-Dimensional,Imaging, Three-Dimensional:
      methods,Orthopedics,Orthopedics: methods,Stress, Mechanical, OTS,
      OCS, TEC, GPU},
      owner = {thomaskroes},
      pmid = {19834214},
      timestamp = {2010.10.22}
    }
  • C. Dick, J. Georgii, R. Burgkart, and R. Westermann, “A 3D Simulation System for Hip Joint Replacement Planning,” , pp. 2-5, 2009.
    [Bibtex]
    @ARTICLE{Dick2009b,
      author = {Dick, C and Georgii, J and Burgkart, R and Westermann, R},
      title = {A 3D Simulation System for Hip Joint Replacement Planning},
      year = {2009},
      pages = {2-5},
      abstract = {We present a tool for hip joint replacement planning that allows the
      surgeon to rank the long-term stabil- ity of an implant, and we show
      the application of this tool in a clinical routine setting. The tool
      allows the surgeon to predict the load transmission of an implant
      to the patient-specific bone. It is used to select of a set of available
      implants the one that most closely replicates the physiological stress
      state in order to avoid stress shielding. Advanced simulation technol-
      ogy is combined with 3D visualization options to provide quick and
      intuitive understanding of the generated results. Interac- tive feedback
      rates and intuitive control mechanisms facilitate the finding of
      an optimal implant shape with respect to the patient’s specific
      anatomy. By restricting to a predetermined implant position, which
      is in accordance with the selected position in a real surgery, the
      surgeon can quickly analyze a number of different implants under
      varying load conditions.},
      file = {Dick2009b.pdf:Dick2009b.pdf:PDF},
      keywords = {computational,finite elements,implant planning,orthopedics,steering,stress
      visualization, OTS, OCS, TEC, GPU},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • C. Dick, J. Georgii, R. Burgkart, and R. Westermann, “Computational Steering for Patient-Specific Implant Planning in Orthopedics,” Orthopedics, 2008.
    [Bibtex]
    @ARTICLE{Dick2008,
      author = {Dick, Christian and Georgii, Joachim and Burgkart, Rainer and Westermann,
      R\"{u}diger},
      title = {Computational Steering for Patient-Specific Implant Planning in Orthopedics},
      journal = {Orthopedics},
      year = {2008},
      abstract = {Fast and reliablemethods for predicting andmonitoring in-vivo bone
      strength are of great importance for hip joint replacement. To avoid
      adaptive remodeling with cortical thinning and increased porosity
      of the bone due to stress shielding, in a preoperative planning process
      the optimal implant design, size, and position has to be determined.
      This process involves interactive implant positioning within the
      bone as well as simulation and visualization of the stress within
      bone and implant due to exerting forces. In this paper, we present
      a prototype of such a visual analysis tool, which, to our best knowledge,
      provides the first computational steering environment for optimal
      implant selection and positioning. This prototype considers patient-specific
      biomechanical properties of the bone to select the optimal implant
      design, size, and position according to the prediction of individual
      load transfer from the implant to the bone. We have developed a fast
      and stable multigrid finite-element solver for hexahedral elements,
      which enables interactive simulation of the stress distribution within
      the bone and the implant. By utilizing a real-time GPU-method to
      detect elements that are covered by the moving implant, we can automatically
      generate computational models from patient-specific CT scans in real-time,
      and we can instantly feed these models into the simulation process.
      Hardware-accelerated volume ray-casting, which is extended by a new
      method to accurately visualize sub-hexahedron implant boundaries,
      provides a new quality of orthopedic surgery planning.},
      file = {Dick2008.pdf:Dick2008.pdf:PDF},
      keywords = {GPU, OCS, OTS},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • C. a. Dietrich, “Real-time interactive visualization and manipulation of the volumetric data using GPU-based methods,” Proceedings of SPIE, pp. 181-192, 2004.
    [Bibtex]
    @ARTICLE{Dietrich2004,
      author = {Dietrich, Carlos a.},
      title = {Real-time interactive visualization and manipulation of the volumetric
      data using GPU-based methods},
      journal = {Proceedings of SPIE},
      year = {2004},
      pages = {181-192},
      abstract = {This work presents a set of tools developed to provide 3D visualization
      and interaction with large volumetric data that relies on recent
      programmable capabilities of consumer-level graphics cards. We are
      exploiting the programmable control of calculations performed by
      the graphics hardware for generating the appearance of each pixel
      on the screen to develop real-time, interactive volume manipulation
      tools. These tools allow real-time modification of visualization
      parameters, such as color and opacity classification or the selection
      of a volume of interest, extending the benefit of hardware acceleration
      beyond display, namely for computation of voxel visibility. Three
      interactive tools are proposed: a cutting tool that allows the selection
      of a convex volume of interest, an eraser-like tool to eliminate
      non-relevant parts of the image and a digger-like tool that allows
      the user to eliminate layers of a 3D image. To interactively apply
      the proposed tools on a volume, we are making use of some so known
      user interaction techniques, as the ones used in 2D painting systems.
      Our strategy is to minimize the user entrainment efforts involved
      in the tools learning. Finally, we illustrate the potential application
      of the conceived tools for preoperative planning of liver surgery
      and for liver vascular anatomy study. Preliminary results concerning
      the system performance and the images quality and resolution are
      presented and discussed.},
      file = {Dietrich2004.pdf:Dietrich2004.pdf:PDF},
      issn = {0277786X},
      keywords = {interactive volume clipping,medical,real-time volume rendering,texture-based
      volume visualization, TEC},
      owner = {thomaskroes},
      publisher = {Spie},
      timestamp = {2010.10.22}
    }
  • A. M. DiGioia III, B. Jaramaz, and B. D. Colgan, “Computer assisted orthopaedic surgery: image guided and robotic assistive technologies,” Clinical orthopaedics and related research, vol. 354, p. 8, 1998.
    [Bibtex]
    @ARTICLE{Digioia1998,
      author = {DiGioia III, A.M. and Jaramaz, B. and Colgan, B.D.},
      title = {Computer assisted orthopaedic surgery: image guided and robotic assistive
      technologies},
      journal = {Clinical orthopaedics and related research},
      year = {1998},
      volume = {354},
      pages = {8},
      keywords = {APP, OTS},
      owner = {Thomas},
      timestamp = {2011.02.03}
    }
  • a Digioiaiii, B. Jaramaz, C. Nikou, R. Labarca, J. Moody, and B. Colgan, “Surgical navigation for total hip replacement with the use of hipnav,” Operative Techniques in Orthopaedics, vol. 10, iss. 1, pp. 3-8, 2000.
    [Bibtex]
    @ARTICLE{Digioiaiii2000,
      author = {Digioiaiii, a and Jaramaz, B and Nikou, C and Labarca, R and Moody,
      J and Colgan, B},
      title = {Surgical navigation for total hip replacement with the use of hipnav},
      journal = {Operative Techniques in Orthopaedics},
      year = {2000},
      volume = {10},
      pages = {3-8},
      number = {1},
      month = {January},
      abstract = {HipNax; an image-guided surgical navigation system, is presented.
      The system was developed to measure and guide the placement of prosthetic
      components in total hip replacement surgery (THR), it incorporates
      a 3-dimensional preoperative planner with a simulator and an intraoperative
      surgical navigator. Coupling optimized preoperative planning with
      accurate surgical navigation will assist the surgeon in properly
      orienting the components, minimizing the risk of impingement and
      dislocation, lntraoperatively, the system uses image-guided tools
      to assist in accurate placement of the acetabular cup. The acetabular
      implant is placed in the planned position with the aid of a simple
      "aim-and-shoot" interface. The actual measurements of version and
      abduction are also provided. The use of this new class of operative
      sensors has been incorporated into a regular surgical routine. There
      are few additional steps necessary, therefore, for the image-guided
      procedure, which does not add significantly to the total time of
      surgery. We expect that these tools will lead to less invasive and
      more accurate THR surgery and directly relate patient outcomes to
      measured surgical practice.},
      file = {Digioiaiii2000.pdf:Digioiaiii2000.pdf:PDF},
      issn = {10486666},
      keywords = {3-dimensional planner,4 the leading,a significant clinical problem,after
      total hip replacement,dislocation continues to be,mechanisms of dislocation
      are,orientation,prosthetic impingement,simulation,surgery,surgical
      navigation,thr,total hip replacement, APP, PLA, GUI, OCS},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • N. C. van Dijk and P. A. J. de Leeuw, “Imaging from an orthopaedic point of view: What the orthopaedic surgeon expects from the radiologist?,” European Journal of Radiology, vol. 62, iss. 1, pp. 2-5, 2007.
    [Bibtex]
    @ARTICLE{VanDijk2007,
      author = {C. Niek van Dijk and Peter A.J. de Leeuw},
      title = {Imaging from an orthopaedic point of view: What the orthopaedic surgeon
      expects from the radiologist?},
      journal = {European Journal of Radiology},
      year = {2007},
      volume = {62},
      pages = {2 - 5},
      number = {1},
      abstract = {The paradigm of coping with sometimes gross pathology, while having
      some small and at first sight insignificant lesions demands for accurate
      radiological detection and orthopaedic treatment makes it interesting
      and challenging to be involved in the treatment of professional athletes.
      In the diagnostic process we differentiate between acute, posttraumatic
      and overuse injuries. We must realize the importance of reproducible
      routine X-rays as a first step in the diagnostic process. In case
      of additional diagnostics, appropriate consultation between the orthopaedic
      surgeon and the radiologist is essential in order to determine the
      best strategy.},
      file = {VanDijk2007.pdf:VanDijk2007.pdf:PDF},
      issn = {0720-048X},
      keywords = {Additional diagnostics},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • M. R. DiSilvestro and J. T. Sherman, System and method for performing a computer assisted orthopaedic surgical procedureGoogle Patents, 2005.
    [Bibtex]
    @MISC{Disilvestro2005,
      author = {DiSilvestro, M.R. and Sherman, J.T.},
      title = {System and method for performing a computer assisted orthopaedic
      surgical procedure},
      month = {September},
      year = {2005},
      owner = {Thomas},
      publisher = {Google Patents},
      timestamp = {2011.02.03}
    }
  • K. Doi, “Computer-aided diagnosis in medical imaging: historical review, current status and future potential,” Computerized medical imaging and graphics: the official journal of the Computerized Medical Imaging Society, vol. 31, iss. 4-5, p. 198, 2007.
    [Bibtex]
    @ARTICLE{Doi2007,
      author = {Doi, K.},
      title = {Computer-aided diagnosis in medical imaging: historical review, current
      status and future potential},
      journal = {Computerized medical imaging and graphics: the official journal of
      the Computerized Medical Imaging Society},
      year = {2007},
      volume = {31},
      pages = {198},
      number = {4-5},
      file = {Doi2007.pdf:Doi2007.pdf:PDF},
      keywords = {REV, IMP},
      owner = {thomaskroes},
      publisher = {NIH Public Access},
      timestamp = {2010.12.15}
    }
  • P. Dokládal, C. Lohou, L. Perroton, and G. Bertrand, “Liver Blood Vessels Extraction by a 3-D Topological Approach,” in Medical Image Computing and Computer-Assisted Intervention – MICCAI’99, C. Taylor and A. Colchester, Eds., Springer Berlin / Heidelberg, 1999, vol. 1679, pp. 98-105.
    [Bibtex]
    @INCOLLECTION{Dokladal1999,
      author = {Dokládal, Petr and Lohou, Christophe and Perroton, Laurent and Bertrand,
      Gilles},
      title = {Liver Blood Vessels Extraction by a 3-D Topological Approach},
      booktitle = {Medical Image Computing and Computer-Assisted Intervention – MICCAI’99},
      publisher = {Springer Berlin / Heidelberg},
      year = {1999},
      editor = {Taylor, Chris and Colchester, Alain},
      volume = {1679},
      series = {Lecture Notes in Computer Science},
      pages = {98 - 105},
      abstract = {We propose in this paper a new approach to segmentation of 3-D tomography
      of liver vessel system. The approach is based on a point-wise reconstruction
      with restriction to simple points manipulation to preserve the homotopy.
      We propose and compare two dual methods of the vessel system extraction.
      The efficiency of these methods is demonstrated on a raw X-ray tomography
      image. The desired level of detail in the vein ramification system
      is obtained by adjusting one parameter controlling the admitted level
      of light intensity. The paper is organized as follows: In the introductory
      section we present the main principles of the approach using simple
      points. We explain the algorithm as well as the aspects of efficient
      computer implementation. Experiment results for different parameter
      values are given together with discussion and conclusions.},
      affiliation = {ESIEE Cité Descartes, B.P. 99, 93 162 Noisy-le-Grand Cedex France
      France},
      file = {Dokladal1999.pdf:Dokladal1999.pdf:PDF},
      keywords = {TEC, IMP},
      owner = {Thomas},
      timestamp = {2011.02.01}
    }
  • J. Dong and M. Zhou, “Two Parameter Image Representation in 3D Biomedical Visualization with CT Images,” in Computer Science and Information Technology – Spring Conference, 2009. IACSITSC ’09. International Association of, 2009, pp. 506-509.
    [Bibtex]
    @INPROCEEDINGS{Dong2009,
      author = {Jianmin Dong and Mingquan Zhou},
      title = {Two Parameter Image Representation in 3D Biomedical Visualization
      with CT Images},
      booktitle = {Computer Science and Information Technology - Spring Conference,
      2009. IACSITSC '09. International Association of},
      year = {2009},
      pages = {506 -509},
      month = {April},
      abstract = {Three space dimension model representation plays a key role in medical
      image processing, surgical plan decision, computer-aided diagnosis
      (CAD) and other medical activities. A three space dimension face
      model of biomedical visualization is proposed in the paper through
      two parameter image representation, which are based on these three
      facts: 1. three space dimension simple surface could be represented
      by two free parameters; 2. any three space dimension simple surface
      could be two smooth manifold in three space dimension embed problem;
      3. simple surface visualization is one important aspects of biomedical
      visualization. Without extraction of the volume data from these images
      from computer computed tomography (CT), the method is accomplished
      through extraction of contours from these images and map these points
      into their two parameter space while their geometric properties are
      easy to be calculated and scale properties relationships between
      their three dimension space are also to be determined. Given the
      extraction algorithms of contours from these images and the map which
      the method used, geometric and scale relationship are listed in the
      paper as well as the implements of the method through numerical tests.},
      file = {:Dong2009.pdf:PDF},
      keywords = {3D biomedical visualization;CT images;computed tomography;computer
      aided diagnosis;image representation;medical image processing;surface
      visualization;surgical plan decision;computerised tomography;data
      visualisation;medical image processing;},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • J. Dorsey, A. Edelman, H. W. Jensen, J. Legakis, and H. Pedersen, “Modeling and rendering of weathered stone,” , pp. 225-234, 1999.
    [Bibtex]
    @CONFERENCE{Dorsey1999,
      author = {Dorsey, J. and Edelman, A. and Jensen, H.W. and Legakis, J. and Pedersen,
      H.K.hol},
      title = {Modeling and rendering of weathered stone},
      booktitle = {Proceedings of the 26th annual conference on Computer graphics and
      interactive techniques},
      year = {1999},
      pages = {225--234},
      organization = {ACM Press/Addison-Wesley Publishing Co.},
      file = {Dorsey1999.pdf:Dorsey1999.pdf:PDF},
      isbn = {0201485605},
      owner = {thomaskroes},
      timestamp = {2011.01.11}
    }
  • E. Dubois, L. Nigay, J. Troccaz, O. Chavanon, L. Carrat, and others, “Classification space for augmented surgery, an augmented reality case study,” , vol. 99, pp. 353-359, 1999.
    [Bibtex]
    @CONFERENCE{Dubois1999,
      author = {Dubois, E. and Nigay, L. and Troccaz, J. and Chavanon, O. and Carrat,
      L. and others},
      title = {Classification space for augmented surgery, an augmented reality
      case study},
      booktitle = {Conference Proceedings of Interact},
      year = {1999},
      volume = {99},
      pages = {353 - 359},
      organization = {Citeseer},
      abstract = {One of the recent design goals in Human Computer Interaction has been
      to extend the sensory-motor capabilities of computer systems to combine
      the real and the virtual in order to assist the user in his environment.
      Such systems are called Augmented Reality (AR). Although AR systems
      are becoming more prevalent we still do not have a clear understanding
      of this interaction paradigm. In this paper we propose OPAS as a
      generic framework for classifying existing AR systems. Computer Assisted
      Medical Interventions (CAMI),
      
      for which the added value of AR has been demonstrated by experience,
      are discussed in light of OPAS. We illustrate OPAS using our system,
      CASPER (Computer ASsisted PERicardial puncture), a CAMI system which
      assists in surgical procedures (pericardial punctures).},
      file = {Dubois1999.pdf:Dubois1999.pdf:PDF},
      keywords = {APP, AUR, TAS},
      owner = {thomaskroes},
      timestamp = {2010.11.24}
    }
  • P. Dumpuri, L. W. Clements, B. M. Dawant, and M. I. Miga, “Model-updated image-guided liver surgery: Preliminary results using surface characterization.,” Progress in biophysics and molecular biology, iss. September, pp. 1-11, 2010.
    [Bibtex]
    @ARTICLE{Dumpuri2010,
      author = {Dumpuri, Prashanth and Clements, Logan W and Dawant, Benoit M and
      Miga, Michael I},
      title = {Model-updated image-guided liver surgery: Preliminary results using
      surface characterization.},
      journal = {Progress in biophysics and molecular biology},
      year = {2010},
      pages = {1-11},
      number = {September},
      month = {September},
      abstract = {The current protocol for image guidance in open abdominal liver tumor
      removal surgeries involves a rigid registration between the patient's
      operating room space and the pre-operative diagnostic image-space.
      Systematic studies have shown that the liver can deform up to 2cm
      during surgeries in a non-rigid fashion thereby compromising the
      accuracy of these surgical navigation systems. Compensating for intra-operative
      deformations using mathematical models has shown promising results.
      In this work, we follow up the initial rigid registration with a
      computational approach that is geared towards minimizing the residual
      closest point distances between the un-deformed pre-operative surface
      and the rigidly registered intra-operative surface. We also use a
      surface Laplacian equation based filter that generates a realistic
      deformation field. Preliminary validation of the proposed computational
      framework was performed using phantom experiments and clinical trials.
      The proposed framework improved the rigid registration errors for
      the phantom experiments on average by 43\%, and 74\% using partial
      and full surface data, respectively. With respect to clinical data,
      it improved the closest point residual error associated with rigid
      registration by 68\% on average for the clinical cases. These results
      are highly encouraging and suggest that computational models can
      be used to increase the accuracy of image-guided open abdominal liver
      tumor removal surgeries.},
      file = {Dumpuri2010.pdf:Dumpuri2010.pdf:PDF},
      issn = {1873-1732},
      keywords = {finite element analysis,image-guided liver surgeries,linear elastic
      model,methods, APP, HES, GUI},
      owner = {thomaskroes},
      pmid = {20869385},
      publisher = {Elsevier Ltd},
      timestamp = {2010.10.22}
    }
  • D. Ebert and P. Rheingans, “Volume illustration: non-photorealistic rendering of volume models,” in Visualization 2000. Proceedings, 2000, pp. 195-202.
    [Bibtex]
    @INPROCEEDINGS{Ebert2000,
      author = {Ebert, D. and Rheingans, P.},
      title = {Volume illustration: non-photorealistic rendering of volume models},
      booktitle = {Visualization 2000. Proceedings},
      year = {2000},
      pages = {195 -202},
      month = October,
      abstract = {Accurately and automatically conveying the structure of a volume model
      is a problem that has not been fully solved by existing volume rendering
      approaches. Physics-based volume rendering approaches create images
      which may match the appearance of translucent materials in nature
      but may not embody important structural details. Transfer function
      approaches allow flexible design of the volume appearance but generally
      require substantial hand-tuning for each new data set in order to
      be effective. We introduce the volume illustration approach, combining
      the familiarity of a physics-based illumination model with the ability
      to enhance important features using non-photorealistic rendering
      techniques. Since the features to be enhanced are defined on the
      basis of local volume characteristics rather than volume sample values,
      the application of volume illustration techniques requires less manual
      tuning than the design of a good transfer function. Volume illustration
      provides a flexible unified framework for enhancing structural perception
      of volume models through the amplification of features and the addition
      of illumination effects.},
      file = {Ebert2000.pdf:Ebert2000.pdf:PDF},
      keywords = {data visualization;feature amplification;flexible design;important
      feature enhancement;lighting models;local volume characteristics;manual
      tuning;nonphotorealistic rendering;physics-based illumination model;shading;structural
      details;structural perception;transfer function;translucent materials;volume
      appearance;volume illustration;volume models;volume rendering approaches;data
      visualisation;image enhancement;lighting;rendering (computer graphics);solid
      modelling;},
      owner = {Thomas},
      timestamp = {2011.02.03}
    }
  • B. T. Edwards, G. M. Gartsman, D. P. O’Connor, and V. K. Sarin, “Safety and utility of computer-aided shoulder arthroplasty,” Journal of Shoulder and Elbow Surgery, vol. 17, iss. 3, pp. 503-508, 2008.
    [Bibtex]
    @ARTICLE{Edwards2008,
      author = {T. Bradley Edwards and Gary M. Gartsman and Daniel P. O'Connor and
      Vineet K. Sarin},
      title = {Safety and utility of computer-aided shoulder arthroplasty},
      journal = {Journal of Shoulder and Elbow Surgery},
      year = {2008},
      volume = {17},
      pages = {503 - 508},
      number = {3},
      abstract = {This study evaluated the safety and utility of a novel, image-free,
      shoulder navigation system in a cadaver and in an initial cohort
      of shoulder arthroplasty patients. Shoulder arthroplasty was performed
      on a cadaver and 27 patients using an image-free navigation system
      (NaviPro(TM); Kinamed Navigation Systems LLC, Camarillo, CA). Optical
      trackers were attached to the proximal humerus and the coracoid process.
      Prior to and following humeral head resection, the anatomic neck
      axis (retroversion, inclination) and humeral head diameter were measured
      with the navigation system. Native glenoid surface orientation was
      registered, and a navigation tracker was attached to the glenoid
      reamer. The navigation system recorded change in inclination and
      version relative to the native glenoid during reaming. The cadaver
      results demonstrated that the trackers did not impede surgical performance
      and that system accuracy was 2.6° ± 2.5°. In the clinical series,
      the navigation system reported the anatomic humeral neck measurements
      (retroversion 30.0° ± 16.0°; inclination 137.0° ± 11.7°), the humeral
      head diameters (major axis 46.2 mm ± 4.8 mm; minor axis 43.2 mm ±
      3.8 mm), the humeral neck resection angles (retroversion 29.9° ±
      15.1° and inclination 135.6° ± 9.1°), and glenoid reaming orientation
      relative to the native glenoid (+3.0° ± 6.3° of version; -6.7° ±
      4.4° of inclination). This initial clinical experience with computer-aided
      shoulder navigation demonstrates that the procedure is safe and can
      provide valuable intraoperative measurements. With an anatomic humeral
      implant system, the navigation system provides real-time feedback
      on the humeral resection as it relates to anatomic neck geometry.
      The system also provides real-time angulation of the glenoid reamer
      relative to preoperative glenoid deformity.},
      file = {Edwards2008.pdf:Edwards2008.pdf:PDF},
      issn = {1058-2746},
      keywords = {APP, OTS, GUI},
      owner = {thomaskroes},
      timestamp = {2010.12.22}
    }
  • A. El-Bialy, “Towards a Complete Computer Dental Treatment System,” in Biomedical Engineering Conference, 2008. CIBEC 2008. Cairo International, 2008, pp. 1-8.
    [Bibtex]
    @INPROCEEDINGS{Bialy2008,
      author = {El-Bialy, A.},
      title = {Towards a Complete Computer Dental Treatment System},
      booktitle = {Biomedical Engineering Conference, 2008. CIBEC 2008. Cairo International},
      year = {2008},
      pages = {1 -8},
      month = {December},
      abstract = {This paper introduces the production of 3D virtual clinic to help
      dentists in their treatment. To achieve this goal, different scientific
      areas are integrated such as: computer graphics, pattern recognition,
      computer vision, information technology and finite element machine
      (FEM). The proposed system includes the following tools; patient
      information system, automatic 2-D cephalometrics, 3-D cephalometrics,
      3-D visualization, surgical planning, 3-D registration, soft tissue
      simulation, pre and post treatment analysis, etc. Acquisition of
      the 3D virtual model of the patient is the foundation of this work.
      The CT slides of the patient's head (soft and hard tissues) are collected
      in a DICOM (Digital Imaging and Communication in Medicine) format.
      These slides are then compiled to build up the patient's 3D model.
      Using ray-casting volume rendering technique, a digital computer
      based 3D replica is built. The theme also includes the detection
      of defective skeletal and dental areas by applying the appropriate
      diagnostic procedures. Based upon the diagnostic outcome, the necessary
      changes are executed; manipulation of the virtual 3D image and evaluation
      of the final result after rectification will be possible.},
      file = {:Bialy2008.pdf:PDF},
      keywords = {2D cephalometrics;3D cephalometrics;3D image registration;3D image
      visualization;3D virtual clinic;DICOM format;computer dental treatment
      system;computer graphics;computer vision;dentistry;digital imaging
      and communication in medicine format;finite element machine;information
      technology;patient diagnostics;patient information system;pattern
      recognition;ray casting volume rendering;soft tissue simulation;surgical
      planning;PACS;dentistry;finite element analysis;image registration;medical
      image processing;patient diagnosis;patient treatment;virtual reality;,
      CMS, APP, PLA, VOR, SUR, OCS},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • R. Ellis, “From scans to sutures: computer-assisted orthopedic surgery in the twenty-first century.,” Conference proceedings : … Annual International Conference of the IEEE Engineering in Medicine and Biology Society. IEEE Engineering in Medicine and Biology Society. Conference, vol. 7, pp. 7234-7, 2005.
    [Bibtex]
    @ARTICLE{Ellis2005,
      author = {Ellis, R},
      title = {From scans to sutures: computer-assisted orthopedic surgery in the
      twenty-first century.},
      journal = {Conference proceedings : ... Annual International Conference of the
      IEEE Engineering in Medicine and Biology Society. IEEE Engineering
      in Medicine and Biology Society. Conference},
      year = {2005},
      volume = {7},
      pages = {7234-7},
      month = {January},
      abstract = {Computer-assisted surgery is the process of using medical images,
      such as CT scans, X-ray fluoroscopy, or 3D ultrasound, to improve
      patient care. A typical surgical procedure begins by acquiring and
      processing a CT scan with specially developed image-analysis software.
      A surgeon then performs a "virtual surgery" on the patient to develop
      a preoperative plan. In the operating room the medical image is registered
      to the patient's anatomy by finding an optimal rigid-body transformation.
      This transformation allows an object or motion in one coordinate
      frame to be represented in the other frame, and thus a surgeon can
      visualize the location of an instrument deep within concealed anatomy
      while avoiding structures at risk. The operating surgeon can also
      use computer-tracked fluoroscopy or ultrasound for 3D guidance. For
      the past seven years, our interdisciplinary research group has been
      investigating fundamental problems in orthopedic surgery of bones
      and joints. This paper is an overview of the problems and solutions
      that have been tested in a set of pilot clinical trials in which
      we have treated more than 250 patients for early or advanced arthritis,
      poorly healed bone fractures, and treatment of deep bone tumors.},
      file = {Ellis2005.pdf:Ellis2005.pdf:PDF},
      issn = {1557-170X},
      keywords = {REV, OTS},
      owner = {thomaskroes},
      pmid = {17281949},
      timestamp = {2010.10.22}
    }
  • T. T. Elvins, “A survey of algorithms for volume visualization,” SIGGRAPH Comput. Graph., vol. 26, pp. 194-201, 1992.
    [Bibtex]
    @ARTICLE{Elvins1992,
      author = {Elvins, T. Todd},
      title = {A survey of algorithms for volume visualization},
      journal = {SIGGRAPH Comput. Graph.},
      year = {1992},
      volume = {26},
      pages = {194 - 201},
      month = {August},
      acmid = {142427},
      address = {New York, NY, USA},
      file = {Elvins1992.pdf:Elvins1992.pdf:PDF},
      issn = {0097-8930},
      issue = {3},
      numpages = {8},
      owner = {thomaskroes},
      publisher = {ACM},
      timestamp = {2010.12.07}
    }
  • S. Ershov, K. Kolchin, and K. Myszkowski, “Rendering Pearlescent Appearance Based On Paint-Composition Modelling,” Computer Graphics Forum, vol. 20, iss. 3, pp. 227-238, 2001.
    [Bibtex]
    @ARTICLE{Ershov2001,
      author = {Ershov, Sergey and Kolchin, Konstantin and Myszkowski, Karol},
      title = {Rendering Pearlescent Appearance Based On Paint-Composition Modelling},
      journal = {Computer Graphics Forum},
      year = {2001},
      volume = {20},
      pages = {227 - 238},
      number = {3},
      month = {September},
      abstract = {We describe a new approach to modelling pearlescent paints based on
      decomposing paint layers into stacks of imaginary thin sublayers.
      The sublayers are chosen so thin that multiple scattering can be
      considered across dif- ferent sublayers, while it can be neglected
      within each of the sublayers. Based on this assumption, an efficient
      recursive procedure of assembling the layers is developed, which
      enables to compute the paint BRDF at interac- tive speeds. Since
      the proposed paint model connects fundamental optical properties
      of multi-layer pearlescent and metallic paints with their microscopic
      structure, interactive prediction of the paint appearance based on
      its composition becomes possible.},
      file = {:H\:\\Thomas\\PHD\\Literature\\Articles\\Ershov2001.pdf:PDF},
      issn = {0167-7055},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • R. Ewers, K. Schicho, G. Undt, F. Wanschitz, M. Truppe, R. Seemann, and a Wagner, “Basic research and 12 years of clinical experience in computer-assisted navigation technology: a review.,” International journal of oral and maxillofacial surgery, vol. 34, iss. 1, pp. 1-8, 2005.
    [Bibtex]
    @ARTICLE{Ewers2005,
      author = {Ewers, R and Schicho, K and Undt, G and Wanschitz, F and Truppe,
      M and Seemann, R and Wagner, a},
      title = {Basic research and 12 years of clinical experience in computer-assisted
      navigation technology: a review.},
      journal = {International journal of oral and maxillofacial surgery},
      year = {2005},
      volume = {34},
      pages = {1-8},
      number = {1},
      month = {January},
      abstract = {Computer-aided surgical navigation technology is commonly used in
      craniomaxillofacial surgery. It offers substantial improvement regarding
      esthetic and functional aspects in a range of surgical procedures.
      Based on augmented reality principles, where the real operative site
      is merged with computer generated graphic information, computer-aided
      navigation systems were employed, among other procedures, in dental
      implantology, arthroscopy of the temporomandibular joint, osteotomies,
      distraction osteogenesis, image guided biopsies and removals of foreign
      bodies. The decision to perform a procedure with or without computer-aided
      intraoperative navigation depends on the expected benefit to the
      procedure as well as on the technical expenditure necessary to achieve
      that goal. This paper comprises the experience gained in 12 years
      of research, development and routine clinical application. One hundred
      and fifty-eight operations with successful application of surgical
      navigation technology--divided into five groups--are evaluated regarding
      the criteria "medical benefit" and "technical expenditure" necessary
      to perform these procedures. Our results indicate that the medical
      benefit is likely to outweight the expenditure of technology with
      few exceptions (calvaria transplant, resection of the temporal bone,
      reconstruction of the orbital floor). Especially in dental implantology,
      specialized software reduces time and additional costs necessary
      to plan and perform procedures with computer-aided surgical navigation.},
      file = {Ewers2005.pdf:Ewers2005.pdf:PDF},
      issn = {0901-5027},
      keywords = {Computer Graphics,Dental Research,Humans,Imaging, Three-Dimensional,Oral
      Surgical Procedures,Oral Surgical Procedures: methods,Surgery, Computer-Assisted,Technology,
      High-Cost,User-Computer Interface, REV, CMS},
      owner = {thomaskroes},
      pmid = {15617960},
      timestamp = {2010.10.22}
    }
  • P. Favre, B. Moor, J. G. Snedeker, and C. Gerber, “Influence of component positioning on impingement in conventional total shoulder arthroplasty.,” Clinical biomechanics (Bristol, Avon), vol. 23, iss. 2, pp. 175-83, 2008.
    [Bibtex]
    @ARTICLE{Favre2008,
      author = {Favre, Philippe and Moor, Beat and Snedeker, Jess G and Gerber, Christian},
      title = {Influence of component positioning on impingement in conventional
      total shoulder arthroplasty.},
      journal = {Clinical biomechanics (Bristol, Avon)},
      year = {2008},
      volume = {23},
      pages = {175-83},
      number = {2},
      month = {February},
      abstract = {Clinical experience suggests that component impingement can lead to
      eccentric implant loading and thereby cause glenoid loosening in
      conventional total shoulder arthroplasty. This study tests the hypothesis
      that certain implant component positioning configurations may lead
      to impingement within the physiological range of motion. METHODS:
      A rigid-body model of the shoulder comprising the scapula and humerus
      was constructed. Within this 3D model, a commercially available total
      shoulder arthroplasty implant was positioned according to manufacturer
      guidelines. The configuration was modified around this default position
      to investigate the associated angle of inferior and superior impingement
      during glenohumeral elevation, as well as in lateral impingement
      during axial rotation at both 0 degrees and 60 degrees of glenohumeral
      elevation. Glenoid component size, version, inclination and inferior-superior
      offset as well as humeral component size, torsion, inclination, offset
      and height were examined. The influence of the humeral calcar anatomy
      was also investigated. FINDINGS: Certain implant configurations caused
      component impingement in the physiological range of motion. The most
      sensitive parameters affecting impingement were: (1) the inclination
      of the glenoid component, (2) the inferior-superior position of the
      humeral component along the resection line and (3) the prominence
      of the humeral calcar. Glenoid offset and inclination and humeral
      head offset and height directly affected subacromial impingement.
      INTERPRETATION: This study suggests that several intraoperatively
      adjustable implant positioning parameters can influence the likelihood
      of implant impingement in conventional total shoulder arthroplasty,
      and that the geometry of the humeral calcar should be taken into
      consideration when designing an operative strategy for shoulder joint
      replacement.},
      file = {Favre2008.pdf:Favre2008.pdf:PDF},
      issn = {0268-0033},
      keywords = {Anatomic,Arthroplasty,Articular,Humans,Joint Prosthesis,Models,Range
      of Motion,Replacement,Replacement: methods,Shoulder Impingement Syndrome,Shoulder
      Impingement Syndrome: etiology,Shoulder Impingement Syndrome: physiopathology,Shoulder
      Joint,Shoulder Joint: physiopathology,Shoulder Joint: surgery, OTS,
      OCS, PLA},
      owner = {thomaskroes},
      pmid = {17983693},
      timestamp = {2010.10.22}
    }
  • P. Felkel, A. Fuhrmann, A. Kanitsar, and R. Wegenkittl, “Surface reconstruction of the branching vessels for augmented reality aided surgery,” BIOSIGNAL 2002, vol. 16, pp. 252-254, 2002.
    [Bibtex]
    @ARTICLE{Felkel2002,
      author = {Felkel, P. and Fuhrmann, A. and Kanitsar, A. and Wegenkittl, R.},
      title = {Surface reconstruction of the branching vessels for augmented reality
      aided surgery},
      journal = {BIOSIGNAL 2002},
      year = {2002},
      volume = {16},
      pages = {252 - 254},
      file = {Felkel2002.pdf:Felkel2002.pdf:PDF},
      keywords = {HES, AUR},
      owner = {thomaskroes},
      publisher = {Citeseer},
      timestamp = {2011.01.05}
    }
  • P. Felkel, R. Wegenkittl, and K. Buhler, “Surface models of tube trees,” , pp. 70-77, 2005.
    [Bibtex]
    @CONFERENCE{Felkel2005,
      author = {Felkel, P. and Wegenkittl, R. and Buhler, K.},
      title = {Surface models of tube trees},
      booktitle = {Computer Graphics International, 2004. Proceedings},
      year = {2005},
      pages = {70 - 77},
      organization = {IEEE},
      abstract = {This paper describes a new method for generating surfaces of branching
      tubular structures with given centerlines and radii. As the centerlines
      are not straight lines, the cross-sections are not parallel and well-known
      algorithms for surface tiling from parallel cross-sections cannot
      
      be used. Non-parallel cross-sections can be tiled by means of the
      maximal-disc interpolation method; special methods for branching-structures
      modeling by means of convolution surfaces produce excellent results,
      but these methods are more complex than our approach. The proposed
      method tiles non-parallel circular cross-sections and constructs
      a topologically-correct surface mesh. The method is not artifact-free,
      but it is fast and simple. The surface mesh serves as a data representation
      of a vessel tree suitable for real-time Virtual Reality operation
      planning and operation support within a medical application. Proposed
      method extracts a “classical” polygonal representation, which can
      be used in common surface-oriented graphic accelerators.},
      file = {Felkel2005.pdf:Felkel2005.pdf:PDF},
      isbn = {0769521711},
      issn = {1530-1052},
      keywords = {TEC},
      owner = {thomaskroes},
      timestamp = {2011.01.05}
    }
  • J. J. Fenton, S. H. Taplin, P. A. Carney, L. Abraham, E. A. Sickles, C. D’Orsi, E. A. Berns, G. Cutter, R. E. Hendrick, W. E. Barlow, and others, “Influence of Computer-Aided Detection on Performance of Screening Mammography,” N Engl J Med, vol. 356, pp. 1399-409, 2007.
    [Bibtex]
    @ARTICLE{Fenton2007,
      author = {Fenton, J.J. and Taplin, S.H. and Carney, P.A. and Abraham, L. and
      Sickles, E.A. and D’Orsi, C. and Berns, E.A. and Cutter, G. and Hendrick,
      R.E. and Barlow, W.E. and others},
      title = {Influence of Computer-Aided Detection on Performance of Screening
      Mammography},
      journal = {N Engl J Med},
      year = {2007},
      volume = {356},
      pages = {1399 - 409},
      file = {Fenton2007.pdf:Fenton2007.pdf:PDF},
      owner = {thomaskroes},
      timestamp = {2011.01.06}
    }
  • M. Feuerstein, T. Mussack, S. M. Heining, and N. Navab, “Intraoperative Laparoscope Augmentation for Port Placement and Resection Planning in Minimally Invasive Liver Resection,” Medical Imaging, IEEE Transactions on, vol. 27, iss. 3, pp. 355-369, 2008.
    [Bibtex]
    @ARTICLE{Feuerstein2008,
      author = {Feuerstein, M. and Mussack, T. and Heining, S.M. and Navab, N.},
      title = {Intraoperative Laparoscope Augmentation for Port Placement and Resection
      Planning in Minimally Invasive Liver Resection},
      journal = {Medical Imaging, IEEE Transactions on},
      year = {2008},
      volume = {27},
      pages = {355 - 369},
      number = {3},
      month = {March},
      abstract = {In recent years, an increasing number of liver tumor indications were
      treated by minimally invasive laparoscopic resection. Besides the
      restricted view, two major intraoperative issues in laparoscopic
      liver resection are the optimal planning of ports as well as the
      enhanced visualization of (hidden) vessels, which supply the tumorous
      liver segment and thus need to be divided (e.g., clipped) prior to
      the resection. We propose an intuitive and precise method to plan
      the placement of ports. Pre operatively, self-adhesive fiducials
      are affixed to the patient's skin and a computed tomography (CT)
      data set is acquired while contrasting the liver vessels. Immediately
      prior to the intervention, the laparoscope is moved around these
      fiducials, which are automatically reconstructed to register the
      patient to its preoperative imaging data set. This enables the simulation
      of a camera flight through the patient's interior along the laparoscope's
      or instruments' axes to easily validate potential ports. Intraoperatively,
      surgeons need to update their surgical planning based on actual patient
      data after organ deformations mainly caused by application of carbon
      dioxide pneumoperitoneum. Therefore, preoperative imaging data can
      hardly be used. Instead, we propose to use an optically tracked mobile
      C-arm providing cone-beam CT imaging capability intraoperatively.
      After patient positioning, port placement, and carbon dioxide insufflation,
      the liver vessels are contrasted and a 3-D volume is reconstructed
      during patient exhalation. Without any further need for patient registration,
      the reconstructed volume can be directly augmented on the live laparoscope
      video, since prior calibration enables both the volume and the laparoscope
      to be positioned and oriented in the tracking coordinate frame. The
      augmentation provides the surgeon with advanced visual aid for the
      localization of veins, arteries, and bile ducts to be divided or
      sealed.},
      file = {:Feuerstein2008.pdf:PDF},
      issn = {0278-0062},
      keywords = {3D volume reconstruction;arteries;bile ducts;carbon dioxide pneumoperitoneum;computed
      tomography;intraoperative laparoscope augmentation;liver tumor;minimally
      invasive liver resection;port placement;resection planning;surgical
      planning;veins;computerised tomography;liver;surgery;tumours;Animals;Equipment
      Design;Equipment Failure Analysis;Hepatectomy;Humans;Laparoscopes;Preoperative
      Care;Surgery, Computer-Assisted;Surgical Procedures, Minimally Invasive;Swine;Tomography,
      X-Ray Computed;User-Computer Interface;, HES, APP, GUI, AUR, SUR,
      VOR},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • M. Feuerstein, T. Mussack, S. M. Heining, and N. Navab, “Registration-free laparoscope augmentation for intra-operative liver resection planning,” Proceedings of SPIE, p. 650915-650915–8, 2007.
    [Bibtex]
    @ARTICLE{Feuerstein2007,
      author = {Feuerstein, Marco and Mussack, Thomas and Heining, Sandro M. and
      Navab, Nassir},
      title = {Registration-free laparoscope augmentation for intra-operative liver
      resection planning},
      journal = {Proceedings of SPIE},
      year = {2007},
      pages = {650915-650915--8},
      abstract = {In recent years, an increasing number of liver tumor indications were
      treated by minimally invasive laparoscopic resection. Besides the
      restricted view, a major issue in laparoscopic liver resection is
      the enhanced visualization of (hidden) vessels, which supply the
      tumorous liver segment and thus need to be divided prior to the resection.
      To navigate the surgeon to these vessels, pre-operative abdominal
      imaging data can hardly be used due to intra- operative organ deformations
      mainly caused by appliance of carbon dioxide pneumoperitoneum and
      respiratory motion. While regular respiratory motion can be gated
      and synchronized intra-operatively, motion caused by pneumoperitoneum
      is individual for every patient and difficult to estimate. Therefore,
      we propose to use an optically tracked mobile C-arm providing cone-beam
      CT imaging capability intra- operatively. The C-arm is able to visualize
      soft tissue by means of its new flat panel detector and is calibrated
      offline to relate its current position and orientation to the coordinate
      system of a reconstructed volume. Also the laparoscope is optically
      tracked and calibrated offline, so both laparoscope and C-arm are
      registered in the same tracking coordinate system. Intra-operatively,
      after patient positioning, port placement, and carbon dioxide insufflation,
      the liver vessels are contrasted and scanned during patient exhalation.
      Immediately, a three-dimensional volume is reconstructed. Without
      any further need for patient registration, the volume can be directly
      augmented on the live laparoscope video, visualizing the contrasted
      vessels. This augmentation provides the surgeon with advanced visual
      aid for the localization of veins, arteries, and bile ducts to be
      divided or sealed.},
      file = {Feuerstein2007.pdf:Feuerstein2007.pdf:PDF},
      issn = {0277786X},
      keywords = {abdominal procedures,calibration,enhanced reality,image-guided therapy,visualization},
      owner = {thomaskroes},
      publisher = {Spie},
      timestamp = {2010.10.22}
    }
  • G. Fichtinger, A. Deguet, G. Fischer, I. Iordachita, E. Balogh, K. Masamune, R. H. Taylor, L. M. Fayad, M. De Oliveira, and S. J. Zinreich, “Image overlay for CT-guided needle insertions,” Computer Aided Surgery, vol. 10, iss. 4, pp. 241-255, 2005.
    [Bibtex]
    @ARTICLE{Fichtinger2005,
      author = {Fichtinger, G. and Deguet, A. and Fischer, G. and Iordachita, I.
      and Balogh, E. and Masamune, K. and Taylor, R.H. and Fayad, L.M.
      and De Oliveira, M. and Zinreich, S.J.},
      title = {Image overlay for CT-guided needle insertions},
      journal = {Computer Aided Surgery},
      year = {2005},
      volume = {10},
      pages = {241--255},
      number = {4},
      file = {Fichtinger2005.pdf:Fichtinger2005.pdf:PDF},
      issn = {1092-9088},
      owner = {Thomas},
      publisher = {Informa UK Ltd UK},
      timestamp = {2011.04.13}
    }
  • J. Fischer, M. Neff, D. Freudenstein, and D. Bartz, “Medical augmented reality based on commercial image guided surgery,” , pp. 83-86, 2004.
    [Bibtex]
    @CONFERENCE{Fischer2004,
      author = {Fischer, J. and Neff, M. and Freudenstein, D. and Bartz, D.},
      title = {Medical augmented reality based on commercial image guided surgery},
      booktitle = {Eurographics Symposium on Virtual Environments (EGVE)},
      year = {2004},
      pages = {83 - 86},
      organization = {Citeseer},
      abstract = {Utilizing augmented reality for applications in medicine has been
      a topic of intense research for several years. A number of challenging
      tasks need to be addressed when designing a medical AR system. These
      include the import and management of medical datasets and preoperatively
      created planning data, the registration of the patient with respect
      to a global coordinate system, and accurate tracking of the camera
      used in the AR setup as well as the respective surgical instruments.
      Most research systems rely on specialized hardware or algorithms
      for realizing
      
      augmented reality in medicine. Such base technologies can be expensive
      or very time-consuming to implement. In this paper, we propose an
      alternative approach of building a surgical AR system by harnessing
      existing, commercially available equipment for image guided surgery
      (IGS). We describe the prototype of an augmented reality application,
      which receives all necessary information from a device for intraoperative
      navigation.},
      file = {Fischer2004.pdf:Fischer2004.pdf:PDF},
      keywords = {TEC, AUR},
      owner = {thomaskroes},
      timestamp = {2011.01.03}
    }
  • E. K. Fishman, B. S. Kuszyk, D. G. Heath, L. Gao, and B. Cabral, “Surgical planning for liver resection,” Computer, vol. 29, iss. 1, pp. 64-72, 1996.
    [Bibtex]
    @ARTICLE{Fishman1996,
      author = {Fishman, E.K. and Kuszyk, B.S. and Heath, D.G. and Luomin Gao and
      Cabral, B.},
      title = {Surgical planning for liver resection},
      journal = {Computer},
      year = {1996},
      volume = {29},
      pages = {64 -72},
      number = {1},
      month = jan,
      abstract = {Surgical resection is the cornerstone of curative therapy for primary
      and metastatic liver tumors. For best results, the surgeon must know
      the location of all hepatic tumor nodules relative to the major vessels
      that define the liver's surgical anatomy. Computed tomography is
      very sensitive for detecting liver tumors, but its planar slices
      do not fully address the three-dimensional nature of this surgical
      problem. We have developed a technique using volume rendering of
      computed tomography data that provides a preoperative 3D map of the
      liver showing tumor location relative to key blood vessels. This
      technique also has important implications for emerging, minimally
      invasive therapies},
      file = {Fishman1996.pdf:Fishman1996.pdf:PDF},
      issn = {0018-9162},
      keywords = {computed tomography data;curative therapy;hepatic tumor nodule location;liver
      resection;liver surgical anatomy;major vessels;metastatic liver tumors;minimally
      invasive therapies;preoperative 3D map;primary liver tumors;surgical
      planning;surgical resection;volume rendering;blood;computerised tomography;image
      segmentation;liver;medical image processing;physiology;planning;rendering
      (computer graphics);stereo image processing;surgery;},
      owner = {thomaskroes},
      timestamp = {2011.01.25}
    }
  • M. Fleute, S. Lavallee, and R. Julliard, “Incorporating a statistically based shape model into a system for computer-assisted anterior cruciate ligament surgery.,” Medical Image Analysis, vol. 3, iss. 3, p. 209, 1999.
    [Bibtex]
    @ARTICLE{Fleute1999,
      author = {Fleute, M. and Lavallee, S. and Julliard, R.},
      title = {Incorporating a statistically based shape model into a system for
      computer-assisted anterior cruciate ligament surgery.},
      journal = {Medical Image Analysis},
      year = {1999},
      volume = {3},
      pages = {209},
      number = {3},
      file = {Fleute1999.pdf:Fleute1999.pdf:PDF},
      keywords = {IMP, TEC},
      owner = {thomaskroes},
      timestamp = {2011.01.12}
    }
  • J. S. Fong and H. Ibrahim, “Development of a virtual reality system for Hepatocellular Carcinoma pre-surgical planning,” in Software Technology and Engineering (ICSTE), 2010 2nd International Conference on, 2010, p. V1-41 -V1-45.
    [Bibtex]
    @INPROCEEDINGS{Fong2010,
      author = {Jian Siong Fong and Ibrahim, Haidi},
      title = {Development of a virtual reality system for Hepatocellular Carcinoma
      pre-surgical planning},
      booktitle = {Software Technology and Engineering (ICSTE), 2010 2nd International
      Conference on},
      year = {2010},
      volume = {1},
      pages = {V1-41 -V1-45},
      month = oct.,
      abstract = {Hepatocellular Carcinoma (HCC) is the most primary liver tumor and
      one of the most common cancers worldwide, particularly in developing
      countries. Various treatment options are available to treat HCC with
      various degrees of success rate and mortality. Studies done by researchers
      have shown that pre-surgical planning tools aid surgeons in planning
      surgery procedures for HCC as well as increasing post-surgery survival
      rate. Previously, pre-surgical planning was done based on medical
      images acquired using medical imaging devices such as Computer Tomography
      (CT), X-Ray, Ultrasound, and Magnetic Resonance Imaging (MRI). Surgeons
      will need to examine the medical images carefully, and picturing
      a 3D liver model using imagination. This process is very time consuming,
      and anatomical variations of tumors might lead to suboptimal treatment
      strategy decision. This project focuses on the application of virtual
      reality technology in HCC pre-surgical planning. One of many deployments
      of virtual reality in pre-surgical planning is to display a virtual
      3D liver model which resembles patient's liver. It is built by taking
      segmented 2D axial slices of patient's liver as input. The operation
      is done by using surface extraction technique. The product of surface
      extraction is a virtual 3D mesh, which is consisted of a group of
      points recorded in their respective three dimensional coordinates.
      These points are mapped to graphic primitives by graphic renderer.
      Interaction control with the 3D model is done by using computer hardware
      interface devices such as mouse, keyboard, joystick or pen input
      devices.},
      file = {:Fong2010.pdf:PDF},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • A. F. Frangi, W. J. Niessen, R. M. Hoogeveen, T. van Walsum, and M. A. Viergever, “Model-based quantitation of 3-D magnetic resonance angiographic images,” Medical Imaging, IEEE Transactions on, vol. 18, iss. 10, pp. 946-956, 2002.
    [Bibtex]
    @ARTICLE{Frangi2002,
      author = {Frangi, A.F. and Niessen, W.J. and Hoogeveen, R.M. and van Walsum,
      T. and Viergever, M.A.},
      title = {Model-based quantitation of 3-D magnetic resonance angiographic images},
      journal = {Medical Imaging, IEEE Transactions on},
      year = {2002},
      volume = {18},
      pages = {946 - 956},
      number = {10},
      file = {Frangi2002.pdf:Frangi2002.pdf:PDF},
      issn = {0278-0062},
      keywords = {TEC},
      owner = {thomaskroes},
      publisher = {IEEE},
      timestamp = {2011.01.05}
    }
  • D. Freedman, R. J. Radke, T. Zhang, Y. Jeong, and G. T. Y. Chen, “Model-Based Multi-Object Segmentation via Distribution Matching,” in Computer Vision and Pattern Recognition Workshop, 2004. CVPRW ’04. Conference on, 2004, p. 11.
    [Bibtex]
    @INPROCEEDINGS{Freedman2004,
      author = {Freedman, D. and Radke, R.J. and Tao Zhang and Yongwon Jeong and
      Chen, G.T.Y.},
      title = {Model-Based Multi-Object Segmentation via Distribution Matching},
      booktitle = {Computer Vision and Pattern Recognition Workshop, 2004. CVPRW '04.
      Conference on},
      year = {2004},
      pages = { 11},
      month = {June},
      abstract = {A new algorithm for the segmentation of objects from 3D images using
      deformable models is presented. This algorithm relies on learned
      shape and appearance models for the objects of interest. The main
      innovation over similar approaches is that there is no need to compute
      a pixelwise correspondence between the model and the image; instead,
      probability distributions are compared. This allows for a faster,
      more principled algorithm. Furthermore, the algorithm is not sensitive
      to the form of the shape model, making it quite flexible. Results
      of the algorithm are shown for the segmentation of the prostate and
      bladder from medical images.},
      file = {Freedman2004.pdf:Freedman2004.pdf:PDF},
      keywords = { deformable segmentation; medical image segmentation; prostate segmentation;
      shape and appearance model;},
      owner = {thomaskroes},
      timestamp = {2011.01.03}
    }
  • M. Q. Freehill and G. Marra, “Evaluation and treatment of failed rotator cuff repairs,” October, vol. 13, iss. 4, pp. 269-276, 2003.
    [Bibtex]
    @ARTICLE{Freehill2003,
      author = {Freehill, Michael Q and Marra, Guido},
      title = {Evaluation and treatment of failed rotator cuff repairs},
      journal = {October},
      year = {2003},
      volume = {13},
      pages = {269-276},
      number = {4},
      month = {October},
      file = {Freehill2003.pdf:Freehill2003.pdf:PDF},
      keywords = {2003 elsevier inc,all rights reserved,common surgical,one of the most,procedures
      performed on the,rotator cuff repair is,rotator cuff tears,shoulder,successful
      out-,surgical options,surgical planning},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • B. Frericks, F. Caldarone, B. Nashan, D. Savellano, G. Stamm, T. Kirchhoff, H. Shin, A. Schenk, D. Selle, W. Spindler, J. Klempnauer, H. Peitgen, and M. Galanski, “3D CT modeling of hepatic vessel architecture and volume calculation in living donated liver transplantation,” European Radiology, vol. 14, pp. 326-333, 2004.
    [Bibtex]
    @ARTICLE{Frericks2004,
      author = {Frericks, BerndB. and Caldarone, FrancoC. and Nashan, Björn and Savellano,
      DagmarHögemann and Stamm, Georg and Kirchhoff, TimmD. and Shin, Hoen-Oh
      and Schenk, Andrea and Selle, Dirk and Spindler, Wolf and Klempnauer,
      Jürgen and Peitgen, Heinz-Otto and Galanski, Michael},
      title = {3D CT modeling of hepatic vessel architecture and volume calculation
      in living donated liver transplantation},
      journal = {European Radiology},
      year = {2004},
      volume = {14},
      pages = {326 - 333},
      abstract = {The aim of this study was to evaluate a software tool for non-invasive
      preoperative volumetric assessment of potential donors in living
      donated liver transplantation (LDLT). Biphasic helical CT was performed
      in 56 potential donors. Data sets were post-processed using a non-commercial
      software tool for segmentation, volumetric analysis and visualisation
      of liver segments. Semi-automatic definition of liver margins allowed
      the segmentation of parenchyma. Hepatic vessels were delineated using
      a region-growing algorithm with automatically determined thresholds.
      Volumes and shapes of liver segments were calculated automatically
      based on individual portal-venous branches. Results were visualised
      three-dimensionally and statistically compared with conventional
      volumetry and the intraoperative findings in 27 transplanted cases.
      Image processing was easy to perform within 23 min. Of the 56 potential
      donors, 27 were excluded from LDLT because of inappropriate liver
      parenchyma or vascular architecture. Two recipients were not transplanted
      due to poor clinical conditions. In the 27 transplanted cases, preoperatively
      visualised vessels were confirmed, and only one undetected accessory
      hepatic vein was revealed. Calculated graft volumes were 1110±180 ml
      for right lobes, 820 ml for the left lobe and 270±30 ml for segments
      II+III. The calculated volumes and intraoperatively measured graft
      volumes correlated significantly. No significant differences between
      the presented automatic volumetry and the conventional volumetry
      were observed. A novel image processing technique was evaluated which
      allows a semi-automatic volume calculation and 3D visualisation of
      the different liver segments.},
      affiliation = {Medizinische Hochschule Hannover Diagnostische Radiologie Hannover
      Germany},
      file = {Frericks2004.pdf:Frericks2004.pdf:PDF},
      issn = {0938-7994},
      issue = {2},
      keyword = {Medicine},
      keywords = {REV},
      owner = {thomaskroes},
      publisher = {Springer Berlin / Heidelberg},
      timestamp = {2011.01.26}
    }
  • H. Fuchs, M. A. Livingston, R. Raskar, D. Colucci, A. State, J. R. Crawford, P. Rademacher, S. H. Drake, and A. A. Meyer, “Augmented Reality Visualization for Laparoscopic Surgery,” Surgery, pp. 934-943, 1998.
    [Bibtex]
    @ARTICLE{Fuchs1998,
      author = {Fuchs, Henry and Livingston, Mark A and Raskar, Ramesh and Colucci,
      D and State, Andrei and Crawford, Jessica R and Rademacher, Paul
      and Drake, Samuel H and Meyer, Anthony A},
      title = {Augmented Reality Visualization for Laparoscopic Surgery},
      journal = {Surgery},
      year = {1998},
      pages = {934-943},
      abstract = {We present the design and a prototype implementation of a three-dimensional
      visualization system to assist with laparoscopic sur- gical procedures.
      The system uses 3D visualization, depth extraction from laparoscopic
      images, and six degree-of-freedom head and laparos- cope tracking
      to display a merged real and synthetic image in the sur- geon’s video-see-through
      head-mounted display. We also introduce a cu- stom design for this
      display. A digital light projector, a camera, and a conventional
      laparoscope create a prototype 3D laparoscope that can extract depth
      and video imagery. Such a system can restore the physician’s natural
      point of view and head motion parallax that are used to understand
      the 3D structure during open surgery. These cues are not available
      in conventional laparoscopic surgery due to the displacement of the
      laparoscopic camera from the physician’s viewpoint. The system can
      also display multiple laparoscopic range imaging data sets to widen
      the effective field of view of the device. These data sets can be
      displayed in true 3D and registered to the exterior anatomy of the
      patient. Much work remains to realize a clinically useful system,
      notably in the acquisition speed, reconstruction, and registration
      of the 3D imagery.},
      annote = {Interesting paper, uses structured light to generate realtime 3D
      augmented images during surgery},
      file = {Fuchs1998.pdf:Fuchs1998.pdf:PDF},
      keywords = {TEC, AUR},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • C. C. Galanis, M. M. Sfantsikopoulos, P. T. Koidis, N. M. Kafantaris, and P. G. Mpikos, “Computer methods for automating preoperative dental implant planning: implant positioning and size assignment.,” Computer methods and programs in biomedicine, vol. 86, iss. 1, pp. 30-8, 2007.
    [Bibtex]
    @ARTICLE{Galanis2007,
      author = {Galanis, Christos C and Sfantsikopoulos, Michael M and Koidis, Petros
      T and Kafantaris, Nikolaos M and Mpikos, Pavlos G},
      title = {Computer methods for automating preoperative dental implant planning:
      implant positioning and size assignment.},
      journal = {Computer methods and programs in biomedicine},
      year = {2007},
      volume = {86},
      pages = {30-8},
      number = {1},
      month = {April},
      abstract = {The paper presents computer-aided methods that allocate a dental implant
      and suggest its size, during the pre-operative planning stage, in
      conformance with introduced optimization criteria and established
      clinical requirements. Based on computed tomography data of the jaw
      and prosthesis anatomy, single tooth cases are planned for the best-suited
      implant insertion at a user-defined region. An optimum implantation
      axis line is produced and cylindrical implants of various candidate
      sizes are then automatically positioned, while their occlusal end
      is leveled to bone ridge, and evaluated. Radial safety margins are
      used for the assessment of the implant safety distance from neighboring
      anatomical structures and bone quantity and quality are estimated
      and taken into consideration. A case study demonstrates the concept
      and allows for its discussion.},
      file = {Galanis2007.pdf:Galanis2007.pdf:PDF},
      issn = {0169-2607},
      keywords = {Decision Making, Computer-Assisted,Dental Implants,Dentistry, Operative,Dentistry,
      Operative: organization \& administratio,Greece,Humans,Preoperative
      Care,Tomography, X-Ray Computed, APP, PLA, CMS},
      owner = {thomaskroes},
      pmid = {17267066},
      timestamp = {2010.10.22}
    }
  • R. Galloway and T. Peters, “Overview and History of Image-Guided Interventions,” Image-Guided Interventions, pp. 1-21, 2008.
    [Bibtex]
    @ARTICLE{Galloway2008,
      author = {Galloway, R. and Peters, T.},
      title = {Overview and History of Image-Guided Interventions},
      journal = {Image-Guided Interventions},
      year = {2008},
      pages = {1 - 21},
      note = {Chapter 1},
      file = {Galloway2008.pdf:Galloway2008.pdf:PDF},
      keywords = {REV},
      owner = {Thomas},
      publisher = {Springer},
      timestamp = {2011.02.28}
    }
  • J. Galloway R.L., R. J. Maciunas, and I. Edwards C.A., “Interactive image-guided neurosurgery,” Biomedical Engineering, IEEE Transactions on, vol. 39, iss. 12, pp. 1226-1231, 1992.
    [Bibtex]
    @ARTICLE{Galloway1992,
      author = {Galloway, R.L., Jr. and Maciunas, R.J. and Edwards, C.A., II},
      title = {Interactive image-guided neurosurgery},
      journal = {Biomedical Engineering, IEEE Transactions on},
      year = {1992},
      volume = {39},
      pages = {1226 - 1231},
      number = {12},
      month = {December},
      abstract = {Interactive image-guided (IIG) surgery involves the synchronal display
      of the tip of a surgical device on preoperative scans. This display
      allows the surgeon to locate the present surgical position relative
      to the final site of surgical interest. A technique for IIG surgery
      based on a 6-degrees-of-freedom articulated arm is presented. Design
      accuracy for the arm is less than 0.1 mm, and the present implementation
      has a submillimetric accuracy. The display can show the surgical
      position on any tomographic image set with simultaneous display on
      up to three image sets. Laboratory results and clinical applications
      are discussed.},
      file = {Galloway1992.pdf:Galloway1992.pdf:PDF},
      issn = {0018-9294},
      keywords = {6-degrees-of-freedom articulated arm;design accuracy;interactive image-guided
      neurosurgery;preoperative scans;submillimetric accuracy;synchronal
      display;tomographic image set;brain;surgery;Brain;Equipment Design;Humans;Models,
      Structural;Neurosurgery;Radiography, Interventional;Stereotaxic Techniques;Therapy,
      Computer-Assisted;Tomography, X-Ray Computed;},
      owner = {thomaskroes},
      timestamp = {2011.01.06}
    }
  • P. Gamage, S. Q. Xie, P. Delmas, and P. Xu, “3D Reconstruction of Patient Specific Bone Models from 2D Radiographs for Image Guided Orthopedic Surgery,” in Digital Image Computing: Techniques and Applications, 2009. DICTA ’09., 2009, pp. 212-216.
    [Bibtex]
    @INPROCEEDINGS{Gamage2009b,
      author = {Gamage, P. and Xie, S.Q. and Delmas, P. and Xu, P.},
      title = {3D Reconstruction of Patient Specific Bone Models from 2D Radiographs
      for Image Guided Orthopedic Surgery},
      booktitle = {Digital Image Computing: Techniques and Applications, 2009. DICTA
      '09.},
      year = {2009},
      pages = {212 -216},
      month = {December},
      abstract = {Three dimensional (3D) visualization of anatomy plays an important
      role in image guided orthopedic surgery and ultimately motivates
      minimally invasive procedures. However, direct 3D imaging modalities
      such as Computed Tomography (CT) are restricted to a minority of
      complex orthopedic procedures. Thus the diagnostics and planning
      of many interventions still rely on two dimensional (2D) radiographic
      images, where the surgeon has to mentally visualize the anatomy of
      interest. The purpose of this paper is to apply and validate a bi-planar
      3D reconstruction methodology driven by prominent bony anatomy edges
      and contours identified on orthogonal radiographs. The results obtained
      through the proposed methodology are benchmarked against 3D CT scan
      data to assess the accuracy of reconstruction. The human femur has
      been used as the anatomy of interest throughout the paper. The novelty
      of this methodology is that it not only involves the outer contours
      of the bony anatomy in the reconstruction but also several key interior
      edges identifiable on radiographic images. Hence, this framework
      is not simply limited to long bones, but is generally applicable
      to a multitude of other bony anatomies as illustrated in the results
      section.},
      file = {:Gamage2009b.pdf:PDF},
      keywords = {2D radiographic images;3D bone reconstruction;biplanar 3D reconstruction
      methodology;bony anatomy edges;computed tomography;data visualisation;femur;image
      guided orthopedic surgery;patient specific bone models;bone;computerised
      tomography;data visualisation;edge detection;image reconstruction;medical
      image processing;orthopaedics;radiography;, TEC, OTS},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • P. Gamage, S. Q. Xie, P. Delmas, P. Xu, and S. Mukherjee, “Intra-operative 3D pose estimation of fractured bone segments for image guided orthopedic surgery,” 2008 IEEE International Conference on Robotics and Biomimetics, pp. 288-293, 2009.
    [Bibtex]
    @ARTICLE{Gamage2009a,
      author = {Gamage, P. and Xie, S.Q. and Delmas, P. and Xu, P. and Mukherjee,
      S.},
      title = {Intra-operative 3D pose estimation of fractured bone segments for
      image guided orthopedic surgery},
      journal = {2008 IEEE International Conference on Robotics and Biomimetics},
      year = {2009},
      pages = {288-293},
      month = {February},
      abstract = {The widespread adoption of minimally invasive surgical techniques
      have driven the need for 3D intra-operative image guidance. Hence
      the 3D pose estimation (position and orientation) performed through
      the registration of pre- operatively prepared 3D anatomical data
      to intra-operative 2D fluoroscopy images is one of the main research
      areas of image guided orthopedic surgery. The goal of this 2D-3D
      registration is to fuse highly detailed 3D information with the 2D
      images acquired intra-operatively to provide a real-time 3D visualization
      of the patient’s anatomy during surgery. Existing research work
      on fractured bone pose estimation focuses on tracking pre- operatively
      obtained 3D CT data through fiduciary markers implanted intra-operatively.
      This expensive and invasive approach is not routinely available for
      diagnostics, and a majority of fracture reduction procedures solely
      relies on x- ray/fluoroscopic images. The proposed concept eliminates
      the need to have the pre- operative CT scan of the patient’s injured
      anatomy and presents a non-invasive anatomy-based method for intra-operative
      pose estimation. The concept pre-operatively reconstructs a patient-
      specific fractured bone model utilizing two conventional x-ray images
      orthogonally (in anterior and lateral views) and a generic healthy
      3D anatomical model. This pre-operatively reconstructed 3D model
      will then be utilized intra-operatively in the novel 2D- 3D registration
      process for pose estimation. It is this registration process that
      is the focus of this paper. The registration is performed solely
      utilizing bony anatomical features extracted from fluoroscopic images
      without invasiveness external fiducial markers.},
      file = {:Gamage2009a.pdf:PDF},
      isbn = {978-1-4244-2678-2},
      keywords = {TEC, IMP},
      owner = {thomaskroes},
      publisher = {Ieee},
      timestamp = {2010.10.22}
    }
  • P. Gamage, S. Q. Xie, P. Delmas, and W. L. Xu, “Computer assisted 3D pre-operative planning tool for femur fracture orthopedic surgery,” Imaging, vol. 7625, p. 76253D-76253D–11, 2010.
    [Bibtex]
    @ARTICLE{Gamage2010,
      author = {Gamage, Pavan and Xie, Sheng Quan and Delmas, Patrice and Xu, Wei
      Liang},
      title = {Computer assisted 3D pre-operative planning tool for femur fracture
      orthopedic surgery},
      journal = {Imaging},
      year = {2010},
      volume = {7625},
      pages = {76253D-76253D--11},
      abstract = {Femur shaft fractures are caused by high impact injuries and can affect
      gait functionality if not treated correctly. Until recently, the
      pre-operative planning for femur fractures has relied on two-dimensional
      (2D) radiographs, light boxes, tracing paper, and transparent bone
      templates. The recent availability of digital radiographic equipment
      has to some extent improved the workflow for preoperative planning.
      Nevertheless, imaging is still in 2D X-rays and planning/simulation
      tools to support fragment manipulation and implant selection are
      still not available. Direct three- dimensional (3D) imaging modalities
      such as Computed Tomography (CT) are also still restricted to a minority
      of complex orthopedic procedures. This paper proposes a software
      tool which allows orthopedic surgeons to visualize, diagnose, plan
      and simulate femur shaft fracture reduction procedures in 3D. The
      tool utilizes frontal and lateral 2D radiographs to model the fracture
      surface, separate a generic bone into the two fractured fragments,
      identify the pose of each fragment, and automatically customize the
      shape of the bone. The use of 3D imaging allows full spatial inspection
      of the fracture providing different views through the manipulation
      of the interactively reconstructed 3D model, and ultimately better
      pre-operative planning.},
      file = {Gamage2010.pdf:Gamage2010.pdf:PDF},
      keywords = {computer assisted interventions,femur fracture surgery planning,image
      guided surgery, APP, PLA, OCS, PRS, SLR, SUR},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • L. Gao, D. G. Heath, B. S. Kuszyk, and E. K. Fishman, “Automatic liver segmentation technique for three-dimensional visualization of CT data.,” Radiology, vol. 201, iss. 2, p. 359, 1996.
    [Bibtex]
    @ARTICLE{Gao1996,
      author = {Gao, L. and Heath, D.G. and Kuszyk, B.S. and Fishman, E.K.},
      title = {Automatic liver segmentation technique for three-dimensional visualization
      of CT data.},
      journal = {Radiology},
      year = {1996},
      volume = {201},
      pages = {359},
      number = {2},
      file = {Gao1996.pdf:Gao1996.pdf:PDF},
      issn = {0033-8419},
      keywords = {TEC, IMP},
      owner = {Thomas},
      publisher = {Radiological Society of North America},
      timestamp = {2011.02.03}
    }
  • K. Gary, L. Ibanez, S. Aylward, D. Gobbi, M. B. Blake, and K. Cleary, “IGSTK: an open source software toolkit for image-guided surgery,” Computer, vol. 39, iss. 4, pp. 46-53, 2006.
    [Bibtex]
    @ARTICLE{Gary2006,
      author = {Gary, K. and Ibanez, L. and Aylward, S. and Gobbi, D. and Blake,
      M.B. and Cleary, K.},
      title = {IGSTK: an open source software toolkit for image-guided surgery},
      journal = {Computer},
      year = {2006},
      volume = {39},
      pages = { 46 - 53},
      number = {4},
      month = {April},
      file = {:Gary2006.pdf:PDF},
      issn = {0018-9162},
      keywords = {IGSTK; Image-Guided Software Toolkit; agile software engineering principles;
      component-based software engineering principles; image-guided surgical
      application; minimally invasive procedures; open source development;
      open source software toolkit; reliable software; reusable software
      infrastructure; medical image processing; object-oriented programming;
      public domain software; software reliability; software reusability;
      surgery;},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • G. Gerig, M. Jomier, and M. Chakos, “Valmet: A new validation tool for assessing and improving 3D object segmentation,” , pp. 516-523, 2010.
    [Bibtex]
    @CONFERENCE{Gerig2010,
      author = {Gerig, G. and Jomier, M. and Chakos, M.},
      title = {Valmet: A new validation tool for assessing and improving 3D object
      segmentation},
      booktitle = {Medical Image Computing and Computer-Assisted Intervention - MICCAI
      2001},
      year = {2010},
      pages = {516 - 523},
      organization = {Springer},
      file = {:C\:\\Thomas\\PHD\\Literature\\Articles\\Gerig2010.pdf:PDF},
      keywords = {TEC, IMP},
      owner = {thomaskroes},
      timestamp = {2010.10.26}
    }
  • D. T. Gering, a Nabavi, R. Kikinis, N. Hata, L. J. O’Donnell, W. E. Grimson, F. a Jolesz, P. M. Black, and W. M. Wells, “An integrated visualization system for surgical planning and guidance using image fusion and an open MR.,” Journal of magnetic resonance imaging : JMRI, vol. 13, iss. 6, pp. 967-75, 2001.
    [Bibtex]
    @ARTICLE{Gering2001,
      author = {Gering, D T and Nabavi, a and Kikinis, R and Hata, N and O'Donnell,
      L J and Grimson, W E and Jolesz, F a and Black, P M and Wells, W
      M},
      title = {An integrated visualization system for surgical planning and guidance
      using image fusion and an open MR.},
      journal = {Journal of magnetic resonance imaging : JMRI},
      year = {2001},
      volume = {13},
      pages = {967-75},
      number = {6},
      month = {June},
      abstract = {A surgical guidance and visualization system is presented, which uniquely
      integrates capabilities for data analysis and on-line interventional
      guidance into the setting of interventional MRI. Various pre-operative
      scans (T1- and T2-weighted MRI, MR angiography, and functional MRI
      (fMRI)) are fused and automatically aligned with the operating field
      of the interventional MR system. Both pre-surgical and intra-operative
      data may be segmented to generate three-dimensional surface models
      of key anatomical and functional structures. Models are combined
      in a three-dimensional scene along with reformatted slices that are
      driven by a tracked surgical device. Thus, pre-operative data augments
      interventional imaging to expedite tissue characterization and precise
      localization and targeting. As the surgery progresses, and anatomical
      changes subsequently reduce the relevance of pre-operative data,
      interventional data is refreshed for software navigation in true
      real time. The system has been applied in 45 neurosurgical cases
      and found to have beneficial utility for planning and guidance. J.
      Magn. Reson. Imaging 2001;13:967-975.},
      file = {Gering2001.pdf:Gering2001.pdf:PDF},
      issn = {1053-1807},
      keywords = {Adolescent,Adult,Aged,Brain Neoplasms,Brain Neoplasms: diagnosis,Brain
      Neoplasms: surgery,Child,Child, Preschool,Computer Simulation,Female,Humans,Image
      Processing, Computer-Assisted,Imaging, Three-Dimensional,Magnetic
      Resonance Angiography,Magnetic Resonance Imaging,Male,Middle Aged,Patient
      Care Planning,Software,Stereotaxic Techniques, APP, NES, SLR, SUR},
      owner = {thomaskroes},
      pmid = {11382961},
      timestamp = {2010.10.22}
    }
  • I. M. Germano, “The NeuroStation System for image-guided, frameless stereotaxy,” Neurosurgery, vol. 37, iss. 2, p. 348, 1995.
    [Bibtex]
    @ARTICLE{Germano1995,
      author = {Germano, I.M.},
      title = {The NeuroStation System for image-guided, frameless stereotaxy},
      journal = {Neurosurgery},
      year = {1995},
      volume = {37},
      pages = {348},
      number = {2},
      issn = {0148-396X},
      keywords = {APP, NES},
      owner = {Thomas},
      timestamp = {2011.02.03}
    }
  • O. Gerovich, P. Marayong, and A. M. Okamura, “The effect of visual and haptic feedback on computer-assisted needle insertion,” Computer Aided Surgery, vol. 9, iss. 6, pp. 243-249, 2004.
    [Bibtex]
    @ARTICLE{Gerovich2004,
      author = {Gerovich, O. and Marayong, P. and Okamura, A.M.},
      title = {The effect of visual and haptic feedback on computer-assisted needle
      insertion},
      journal = {Computer Aided Surgery},
      year = {2004},
      volume = {9},
      pages = {243 - 249},
      number = {6},
      abstract = {Objective:We present a study evaluating the effects of visual and
      haptic feedback on human performance in a needle insertion task.
      Materials and Methods: A one-degree-of-freedom needle insertion simulator
      with a three-layer tissue model (skin, fat and muscle) was used in
      perceptual experiments. The objective of the 14 subjects was to detect
      the puncture of each tissue layer using varying haptic and visual
      cues. Performance was measured by overshoot error—the distance traveled
      by the virtual needle after puncture. Results: Without force feedback,
      real-time visual feedback reduced error by at least 87% in comparison
      to static image overlay. Force feedback, in comparison to no force
      feedback, reduced puncture overshoot by at least 52% when visual
      feedback was absent or limited to static image overlay. A combination
      of force and visual feedback improved performance, especially for
      tissues with low stiffness, by at least 43% with visual display of
      the needle position, and by at least 67% with visual display of layer
      deflection. Conclusion: Real-time image overlay significantly enhances
      controlled puncture during needle insertion. Force feedback may not
      be necessary except in circumstances where visual feedback is limited.},
      file = {Gerovich2004.pdf:Gerovich2004.pdf:PDF},
      keywords = {TEC, PRS},
      owner = {thomaskroes},
      timestamp = {2011.01.06}
    }
  • M. Gessat, S. Zachow, H. U. Lemke, and O. Burgert, “Geometric Meshes in Medical Applications & Steps towards a Specification of Geometric Models in DICOM,” Image (Rochester, N.Y.), pp. 440-442, 2007.
    [Bibtex]
    @ARTICLE{Gessat2007,
      author = {Gessat, Michael and Zachow, Stefan and Lemke, Heinz U and Burgert,
      Oliver},
      title = {Geometric Meshes in Medical Applications \& Steps towards a Specification
      of Geometric Models in DICOM},
      journal = {Image (Rochester, N.Y.)},
      year = {2007},
      pages = {440 - 442},
      file = {Gessat2007.pdf:Gessat2007.pdf:PDF},
      keywords = {3d anatomy,dicom,geometry,image guided surgery,mesh,standards,surface,surgical
      planning},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • S. Ghanai, R. Marmulla, J. Wiechnik, J. Mühling, and B. Kotrikova, “Computer-assisted three-dimensional surgical planning: 3D virtual articulator: technical note.,” International journal of oral and maxillofacial surgery, vol. 39, iss. 1, pp. 75-82, 2010.
    [Bibtex]
    @ARTICLE{Ghanai2010,
      author = {Ghanai, S and Marmulla, R and Wiechnik, J and M\"{u}hling, J and
      Kotrikova, B},
      title = {Computer-assisted three-dimensional surgical planning: 3D virtual
      articulator: technical note.},
      journal = {International journal of oral and maxillofacial surgery},
      year = {2010},
      volume = {39},
      pages = {75-82},
      number = {1},
      month = {January},
      abstract = {This study presents a computer-assisted planning system for dysgnathia
      treatment. It describes the process of information gathering using
      a virtual articulator and how the splints are constructed for orthognathic
      surgery. The deviation of the virtually planned splints is shown
      in six cases on the basis of conventionally planned cases. In all
      cases the plaster models were prepared and scanned using a 3D laser
      scanner. Successive lateral and posterior-anterior cephalometric
      images were used for reconstruction before surgery. By identifying
      specific points on the X-rays and marking them on the virtual models,
      it was possible to enhance the 2D images to create a realistic 3D
      environment and to perform virtual repositioning of the jaw. A hexapod
      was used to transfer the virtual planning to the real splints. Preliminary
      results showed that conventional repositioning could be replicated
      using the virtual articulator.},
      file = {Ghanai2010.pdf:Ghanai2010.pdf:PDF},
      issn = {1399-0020},
      keywords = {Cephalometry,Cephalometry: methods,Dental Articulators,Dental Models,Equipment
      Design,Humans,Image Processing, Computer-Assisted,Image Processing,
      Computer-Assisted: methods,Imaging, Three-Dimensional,Imaging, Three-Dimensional:
      methods,Jaw Relation Record,Jaw Relation Record: instrumentation,Jaw
      Relation Record: methods,Lasers,Mandible,Mandible: pathology,Maxilla,Maxilla:
      pathology,Orthognathic Surgical Procedures,Orthognathic Surgical
      Procedures: instrumentation,Orthognathic Surgical Procedures: methods,Patient
      Care Planning,Radiography, Dental, Digital,Radiography, Dental, Digital:
      methods,Software,Splints,Surgery, Computer-Assisted,Surgery, Computer-Assisted:
      methods,User-Computer Interface, APP, CMS, RPP, SLR, TRM},
      owner = {thomaskroes},
      pmid = {20005674},
      timestamp = {2010.10.22}
    }
  • F. L. Giesel, A. Mehndiratta, H. von Tengg-Kobligk, a Schaeffer, K. Teh, E. a Hoffman, H. Kauczor, E. J. R. van Beek, and J. M. Wild, “Rapid prototyping raw models on the basis of high resolution computed tomography lung data for respiratory flow dynamics.,” Academic radiology, vol. 16, iss. 4, pp. 495-8, 2009.
    [Bibtex]
    @ARTICLE{Giesel2009,
      author = {Giesel, Frederik L and Mehndiratta, Amit and von Tengg-Kobligk, Hendrik
      and Schaeffer, a and Teh, Kevin and Hoffman, E a and Kauczor, Hans-Ulrich
      and van Beek, E J R and Wild, Jim M},
      title = {Rapid prototyping raw models on the basis of high resolution computed
      tomography lung data for respiratory flow dynamics.},
      journal = {Academic radiology},
      year = {2009},
      volume = {16},
      pages = {495 - 8},
      number = {4},
      month = {April},
      abstract = {RATIONALE AND OBJECTIVES: Three-dimensional image reconstruction by
      volume rendering and rapid prototyping has made it possible to visualize
      anatomic structures in three dimensions for interventional planning
      and academic research. METHODS: Volumetric chest computed tomography
      was performed on a healthy volunteer. Computed tomographic images
      of the larger bronchial branches were segmented by an extended three-dimensional
      region-growing algorithm, converted into a stereolithography file,
      and used for computer-aided design on a laser sintering machine.
      The injection of gases for respiratory flow modeling and measurements
      using magnetic resonance imaging were done on a hollow cast. RESULTS:
      Manufacturing the rapid prototype took about 40 minutes and included
      the airway tree from trackea to segmental bronchi (fifth generation).
      The branching of the airways are clearly visible in the (3)He images,
      and the radial imaging has the potential to elucidate the airway
      dimensions. CONCLUSION: The results for flow patterns in the human
      bronchial tree using the rapid-prototype model with hyperpolarized
      helium-3 magnetic resonance imaging show the value of this model
      for flow phantom studies.},
      file = {Giesel2009.pdf:Giesel2009.pdf:PDF},
      issn = {1878-4046},
      keywords = {Computer Simulation,Equipment Design,Equipment Design: methods,Equipment
      Failure Analysis,Female,Humans,Lung,Lung: anatomy \& histology,Lung:
      physiology,Lung: radiography,Models, Anatomic,Models, Biological,Pulmonary
      Gas Exchange,Pulmonary Gas Exchange: physiology,Tomography, X-Ray
      Computed,Tomography, X-Ray Computed: methods,Young Adult, APP, TAS},
      owner = {thomaskroes},
      pmid = {19268862},
      publisher = {Elsevier Ltd},
      timestamp = {2010.10.25}
    }
  • M. L. Giger, K. Doi, and H. MacMahon, “Image feature analysis and computer-aided diagnosis in digital radiography. 3. Automated detection of nodules in peripheral lung fields,” Medical Physics, vol. 15, p. 158, 1988.
    [Bibtex]
    @ARTICLE{Giger1988,
      author = {Giger, M.L. and Doi, K. and MacMahon, H.},
      title = {Image feature analysis and computer-aided diagnosis in digital radiography.
      3. Automated detection of nodules in peripheral lung fields},
      journal = {Medical Physics},
      year = {1988},
      volume = {15},
      pages = {158},
      keywords = {IMP, TEC},
      owner = {thomaskroes},
      timestamp = {2010.12.15}
    }
  • M. Giger, K. Doi, and H. MacMahon, “Computerized detection of lung nodules in digital chest radiographs.,” , pp. 384-386, 1987.
    [Bibtex]
    @CONFERENCE{Giger1987,
      author = {Giger, ML and Doi, K. and MacMahon, H.},
      title = {Computerized detection of lung nodules in digital chest radiographs.},
      booktitle = {SPIE Conference on Medical Imaging},
      year = {1987},
      pages = {384--386},
      keywords = {IMP, TEC},
      owner = {Thomas},
      timestamp = {2011.02.03}
    }
  • S. Girod, E. Keeve, and B. Girod, “Advances in interactive craniofacial surgery planning by 3D simulation and visualization,” International journal of oral and maxillofacial surgery, vol. 24, iss. 1, pp. 120-125, 1995.
    [Bibtex]
    @ARTICLE{Girod1995,
      author = {Girod, S. and Keeve, E. and Girod, B.},
      title = {Advances in interactive craniofacial surgery planning by 3D simulation
      and visualization},
      journal = {International journal of oral and maxillofacial surgery},
      year = {1995},
      volume = {24},
      pages = {120 - 125},
      number = {1},
      file = {Girod1995.pdf:Girod1995.pdf:PDF},
      issn = {0901-5027},
      keywords = {APP, PLA, CMS, OCS},
      owner = {Thomas},
      publisher = {Elsevier},
      timestamp = {2011.02.03}
    }
  • S. Girod, M. Teschner, U. Schrell, B. Kevekordes, and B. Girod, “Computer-aided 3-D simulation and prediction of craniofacial surgery: a new approach,” Journal of Cranio-Maxillofacial Surgery, vol. 29, iss. 3, pp. 156-158, 2001.
    [Bibtex]
    @ARTICLE{Girod2001,
      author = {Girod, S. and Teschner, M. and Schrell, U. and Kevekordes, B. and
      Girod, B.},
      title = {Computer-aided 3-D simulation and prediction of craniofacial surgery:
      a new approach},
      journal = {Journal of Cranio-Maxillofacial Surgery},
      year = {2001},
      volume = {29},
      pages = {156 - 158},
      number = {3},
      file = {Girod2001.pdf:Girod2001.pdf:PDF},
      issn = {1010-5182},
      keywords = {APP, CMS, PLA, OCS},
      owner = {thomaskroes},
      publisher = {Elsevier},
      timestamp = {2011.01.10}
    }
  • E. Gladilin, S. Zachow, P. Deuflhard, and H. Hege, “Anatomy- and physics-based facial animation for craniofacial surgery simulations,” Medical and Biological Engineering and Computing, vol. 42, pp. 167-170, 2004.
    [Bibtex]
    @ARTICLE{Gladilin2004,
      author = {Gladilin, E. and Zachow, S. and Deuflhard, P. and Hege, H.},
      title = {Anatomy- and physics-based facial animation for craniofacial surgery
      simulations},
      journal = {Medical and Biological Engineering and Computing},
      year = {2004},
      volume = {42},
      pages = {167 - 170},
      abstract = {A modelling approach for the realistic simulation of facial expressions
      of emotion in craniofacial surgery planning is presented. The method
      is different from conventional, non-physical techniques for character
      animation in computer graphics. A consistent physiological mechanism
      for facial expressions was assumed, which was the effect of contracting
      muscles on soft tissues. For the numerical solution of the linear
      elastic boundary values, the finite element method on tetrahedral
      grids was used. The approach was validated on a geometrical model
      of a human head derived from tomographic data. Using this model,
      individual facial expressions of emotion were estimated by the superpositioning
      of precomputed single muscle actions.},
      affiliation = {Konrad-Zuse-Zentrum für Informationstechnik Berlin (ZIB) Berlin Germany
      Berlin Germany},
      file = {Gladilin2004.pdf:Gladilin2004.pdf:PDF},
      issn = {0140-0118},
      issue = {2},
      keyword = {Medicine},
      keywords = {TEC, PRS, OCS},
      owner = {thomaskroes},
      publisher = {Springer Berlin / Heidelberg},
      timestamp = {2011.01.10}
    }
  • E. Gladilin, S. Zachow, P. Deuflhard, and H. C. Hege, “Realistic prediction of individual facial emotion expressions for craniofacial surgery simulations,” , vol. 5029, p. 520, 2003.
    [Bibtex]
    @CONFERENCE{Gladilin2003,
      author = {Gladilin, E. and Zachow, S. and Deuflhard, P. and Hege, H.C.},
      title = {Realistic prediction of individual facial emotion expressions for
      craniofacial surgery simulations},
      booktitle = {Proceedings of SPIE},
      year = {2003},
      volume = {5029},
      pages = {520},
      file = {Gladilin2003.pdf:Gladilin2003.pdf:PDF},
      keywords = {APP, CMS, PLA, OCS},
      owner = {Thomas},
      timestamp = {2011.02.03}
    }
  • W. Glinkowski, “Computer Enhanced Orthopedics,” Information Technologies in Biomedicine, pp. 28-43, 2008.
    [Bibtex]
    @ARTICLE{Glinkowski2008,
      author = {Glinkowski, W.},
      title = {Computer Enhanced Orthopedics},
      journal = {Information Technologies in Biomedicine},
      year = {2008},
      pages = {28 - 43},
      file = {Glinkowski2008.pdf:Glinkowski2008.pdf:PDF},
      keywords = {REV, OTS},
      owner = {thomaskroes},
      publisher = {Springer},
      timestamp = {2011.01.11}
    }
  • G. Glombitza, W. Lamadé, M. a Demiris, M. R. Göpfert, a Mayer, M. L. Bahner, H. P. Meinzer, G. Richter, T. Lehnert, and C. Herfarth, “Virtual planning of liver resections: image processing, visualization and volumetric evaluation.,” International journal of medical informatics, vol. 53, iss. 2-3, pp. 225-37, 1999.
    [Bibtex]
    @ARTICLE{Glombitza1999,
      author = {Glombitza, G and Lamad\'{e}, W and Demiris, a M and G\"{o}pfert,
      M R and Mayer, a and Bahner, M L and Meinzer, H P and Richter, G
      and Lehnert, T and Herfarth, C},
      title = {Virtual planning of liver resections: image processing, visualization
      and volumetric evaluation.},
      journal = {International journal of medical informatics},
      year = {1999},
      volume = {53},
      pages = {225-37},
      number = {2-3},
      abstract = {Operability of a liver tumor depends on its three dimensional relation
      to the intrahepatic vascular trees as well as the volume ratio of
      healthy to tumorous tissue. Precise operation planning is complicated
      by anatomic variability and distortion of the vascular trees by the
      tumor or preceding liver resections. We have developed a computer
      based 3D virtual operation planning system which is ready to go in
      routine use. The main task of a system in this domain is a quantifiable
      patient selection by exact prediction of post-operative liver function.
      It provides the means to measure absolute and relative volumes of
      the organ structures and resected parenchyma. Another important step
      in the pre-operative phase is to visualize the relation between the
      tumor, the liver and the vessel trees for each patient. The new 3D
      operation planning system offers quantifiable liver resection proposals
      based on individualized liver anatomy. The results are presented
      as 3D movies or as interactive visualizations as well as in quantitative
      reports.},
      file = {Glombitza1999.pdf:Glombitza1999.pdf:PDF},
      issn = {1386-5056},
      keywords = {Algorithms,Humans,Image Processing, Computer-Assisted,Liver,Liver
      Neoplasms,Liver Neoplasms: radiography,Liver Neoplasms: surgery,Liver:
      blood supply,Liver: surgery,Therapy, Computer-Assisted,Tomography,
      X-Ray Computed,User-Computer Interface, APP, HES, PLA, VOR},
      owner = {thomaskroes},
      pmid = {10193891},
      timestamp = {2010.10.22}
    }
  • J. Goffin, K. Van Brussel, K. Martens, J. Vander Sloten, R. Van Audekercke, and M. H. Smet, “Three-dimensional computed tomography-based, personalized drill guide for posterior cervical stabilization at C1-C2,” Spine, vol. 26, iss. 12, p. 1343, 2001.
    [Bibtex]
    @ARTICLE{Goffin2001,
      author = {Goffin, J. and Van Brussel, K. and Martens, K. and Vander Sloten,
      J. and Van Audekercke, R. and Smet, M.H.},
      title = {Three-dimensional computed tomography-based, personalized drill guide
      for posterior cervical stabilization at C1-C2},
      journal = {Spine},
      year = {2001},
      volume = {26},
      pages = {1343},
      number = {12},
      file = {Goffin2001.pdf:Goffin2001.pdf:PDF},
      issn = {0362-2436},
      keywords = {APP, PLA, OTS},
      owner = {Thomas},
      timestamp = {2011.02.15}
    }
  • A. J. Gougoutas, A. J. Wheaton, A. Borthakur, E. M. Shapiro, J. B. Kneeland, J. K. Udupa, and R. Reddy, “Cartilage volume quantification via Live Wire segmentation1,” Academic radiology, vol. 11, iss. 12, pp. 1389-1395, 2004.
    [Bibtex]
    @ARTICLE{Gougoutas2004,
      author = {Gougoutas, A.J. and Wheaton, A.J. and Borthakur, A. and Shapiro,
      E.M. and Kneeland, J.B. and Udupa, J.K. and Reddy, R.},
      title = {Cartilage volume quantification via Live Wire segmentation1},
      journal = {Academic radiology},
      year = {2004},
      volume = {11},
      pages = {1389 - 1395},
      number = {12},
      file = {Gougoutas2004.pdf:Gougoutas2004.pdf:PDF},
      issn = {1076-6332},
      keywords = {TEC, IMP},
      owner = {thomaskroes},
      publisher = {Elsevier},
      timestamp = {2010.12.01}
    }
  • H. Graichen, “Validation of cartilage volume and thickness measurements in the human shoulder with quantitative magnetic resonance imaging,” Osteoarthritis and Cartilage, vol. 11, iss. 7, pp. 475-482, 2003.
    [Bibtex]
    @ARTICLE{Graichen2003,
      author = {Graichen, H},
      title = {Validation of cartilage volume and thickness measurements in the
      human shoulder with quantitative magnetic resonance imaging},
      journal = {Osteoarthritis and Cartilage},
      year = {2003},
      volume = {11},
      pages = {475-482},
      number = {7},
      month = {July},
      file = {Graichen2003.pdf:Graichen2003.pdf:PDF},
      issn = {10634584},
      keywords = {cartilage,magnetic resonance imaging,shoulder,thickness measurement,
      IMP, TEC},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • J. Greisberg, “Alignment and technique in total ankle arthroplasty,” Operative Techniques in Orthopaedics, vol. 14, iss. 1, pp. 21-30, 2004.
    [Bibtex]
    @ARTICLE{Greisberg2004,
      author = {Greisberg, J},
      title = {Alignment and technique in total ankle arthroplasty},
      journal = {Operative Techniques in Orthopaedics},
      year = {2004},
      volume = {14},
      pages = {21-30},
      number = {1},
      month = {January},
      abstract = {Total ankle arthroplasty has emerged as a promising alternative to
      ankle arthrodesis, especially in cases where multiple hindfoot joints
      are arthritic. Proper alignment of the limb must be restored to be
      most successful over the long term. Misalignment above the ankle
      typically involves a malunion of a previous tibia fracture and can
      be treated by corrective osteotomy. Deformity in the joint itself
      can arise from congenital malformation or from bony erosion, usually
      as a late result of joint trauma. Mild amounts of bone loss can be
      corrected through bone cuts during ankle replacement, but more severe
      deformity may require distal tibial osteotomy. Misalignment below
      the ankle (in the foot) is probably the most common deformity. Secondary
      procedures in the foot and leg, including muscle balancing, osteotomies,
      or fusions are often a part of the surgical plan and are performed
      either before or simultaneously with ankle replacement. The goal
      is to restore an ankle with neutral static and dynamic balance during
      stance and gait. Achieving that goal will give the best chance for
      pain free ankle motion over the long term.},
      file = {Greisberg2004.pdf:Greisberg2004.pdf:PDF},
      issn = {10486666},
      keywords = {2004 elsevier inc,alignment,all rights reserved,ankle,ankle arthrodesis
      has traditionally,arthroplasty,no longer responds to,non-,operative
      management,when severe ankle arthritis},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • L. Grenacher, M. Thorn, H. Knaebel, M. Vetter, P. Hassenpflug, T. Kraus, H. Meinzer, M. Büchler, G. Kauffmann, and G. Richter, “The role of 3-D imaging and computer-based postprocessing for surgery of the liver and pancreas,” R\öFo: Fortschritte auf dem Gebiete der R\öntgenstrahlen und der Nuklearmedizin, vol. 177, iss. 9, p. 1219, 2005.
    [Bibtex]
    @ARTICLE{Grenacher2005,
      author = {Grenacher, L. and Thorn, M. and Knaebel, HP and Vetter, M. and Hassenpflug,
      P. and Kraus, T. and Meinzer, HP and B{\\"u}chler, MW and Kauffmann,
      GW and Richter, GM},
      title = {The role of 3-D imaging and computer-based postprocessing for surgery
      of the liver and pancreas},
      journal = {R{\\"o}Fo: Fortschritte auf dem Gebiete der R{\\"o}ntgenstrahlen
      und der Nuklearmedizin},
      year = {2005},
      volume = {177},
      pages = {1219},
      number = {9},
      issn = {1438-9029},
      owner = {Thomas},
      timestamp = {2011.02.28}
    }
  • W. Grimson, R. Kikinis, F. Jolesz, and P. Black, “Image-guided surgery,” Scientific American, vol. 280, iss. 6, pp. 54-61, 1999.
    [Bibtex]
    @ARTICLE{Grimson1999,
      author = {Grimson, WEL and Kikinis, R. and Jolesz, FA and Black, PM},
      title = {Image-guided surgery},
      journal = {Scientific American},
      year = {1999},
      volume = {280},
      pages = {54 - 61},
      number = {6},
      file = {Grimson1999.pdf:Grimson1999.pdf:PDF},
      issn = {0036-8733},
      keywords = {TEC, IMP},
      owner = {Thomas},
      timestamp = {2011.03.09}
    }
  • W. L. Grimson, G. J. Ettinger, S. J. White, T. Lozano-Perez, W. M. Wells, and R. Kikinis, “An automatic registration method for frameless stereotaxy, image guided surgery, and enhanced reality visualization.,” IEEE transactions on medical imaging, vol. 15, iss. 2, pp. 129-40, 1996.
    [Bibtex]
    @ARTICLE{Grimson1996,
      author = {Grimson, W L and Ettinger, G J and White, S J and Lozano-Perez, T
      and Wells, W M and Kikinis, R},
      title = {An automatic registration method for frameless stereotaxy, image
      guided surgery, and enhanced reality visualization.},
      journal = {IEEE transactions on medical imaging},
      year = {1996},
      volume = {15},
      pages = {129-40},
      number = {2},
      month = {January},
      abstract = {There is a need for frameless guidance systems to help surgeons plan
      the exact location for incisions, to define the margins of tumors,
      and to precisely identify locations of neighboring critical structures.
      The authors have developed an automatic technique for registering
      clinical data, such as segmented magnetic resonance imaging (MRI)
      or computed tomography (CT) reconstructions, with any view of the
      patient on the operating table. The authors demonstrate on the specific
      example of neurosurgery. The method enables a visual mix of live
      video of the patient and the segmented three-dimensional (3-D) MRI
      or CT model. This supports enhanced reality techniques for planning
      and guiding neurosurgical procedures and allows us to interactively
      view extracranial or intracranial structures nonintrusively. Extensions
      of the method include image guided biopsies, focused therapeutic
      procedures, and clinical studies involving change detection over
      time sequences of images.},
      file = {Grimson1996.pdf:Grimson1996.pdf:PDF},
      issn = {0278-0062},
      owner = {thomaskroes},
      pmid = {18215896},
      timestamp = {2010.10.22}
    }
  • P. Grunert, K. Darabi, J. Espinosa, and R. Filippi, “Computer-aided navigation in neurosurgery,” Neurosurgical review, vol. 26, iss. 2, pp. 73-99, 2003.
    [Bibtex]
    @ARTICLE{Grunert2003,
      author = {Grunert, P. and Darabi, K. and Espinosa, J. and Filippi, R.},
      title = {Computer-aided navigation in neurosurgery},
      journal = {Neurosurgical review},
      year = {2003},
      volume = {26},
      pages = {73 - 99},
      number = {2},
      abstract = {The article comprises three main parts: a historical review on navigation,
      the mathematical basics for calculation and the clinical applications
      of navigation devices. Main historical steps are described from the
      first idea till the realisation of the frame-based and frameless
      
      navigation devices including robots. In particular the idea of robots
      can be traced back to the Iliad of Homer, the first testimony of
      European literature over 2500 years ago. In the second part the mathematical
      calculation of the mapping between the navigation and the image space
      is demonstrated, including different registration modalities and
      error estimations. The error of the navigation has to be divided
      into the technical error of the device calculating its own position
      in space, the registration error due to inaccuracies in the calculation
      of the transformation matrix between the navigation and the image
      space, and the application error caused additionally by anatomical
      shift of the brain structures during operation. In the third part
      the main clinical fields of application in modern neurosurgery are
      demonstrated, such as localisation of small intracranial lesions,
      skull-base surgery, intracerebral biopsies, intracranial endoscopy,
      functional neurosurgery and spinal navigation. At the end of the
      article some possible objections to navigation-aided surgery are
      discussed.},
      file = {Grunert2003.pdf:Grunert2003.pdf:PDF},
      issn = {0344-5607},
      keywords = {NES, REV},
      owner = {thomaskroes},
      publisher = {Springer},
      timestamp = {2010.11.19}
    }
  • X. Guan, S. Lai, J. Lackey, J. Shi, U. Techavipoo, K. Mueller, A. Flanders, D. Andrews, U. States, C. Science, and S. Brook, “MediCAD : An Integrated Visualization System for DTI and fMRI Fusion with Anatomical MRI for Presurgical Planning,” Image Processing, vol. 13, iss. 4, pp. 2878-2878, 2005.
    [Bibtex]
    @ARTICLE{Guan2005,
      author = {Guan, X and Lai, S and Lackey, J and Shi, J and Techavipoo, U and
      Mueller, K and Flanders, A and Andrews, D and States, United and
      Science, Computer and Brook, Stony},
      title = {MediCAD : An Integrated Visualization System for DTI and fMRI Fusion
      with Anatomical MRI for Presurgical Planning},
      journal = {Image Processing},
      year = {2005},
      volume = {13},
      pages = {2878-2878},
      number = {4},
      file = {Guan2005.pdf:Guan2005.pdf:PDF},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • T. Guo, K. W. FINNIS, A. G. PARRENT, and T. M. PETERS, “Visualization and navigation system development and application for stereotactic deep-brain neurosurgeries,” Computer Aided Surgery, vol. 11, iss. 5, pp. 231-239, 2006.
    [Bibtex]
    @ARTICLE{Guo2006,
      author = {Guo, T. and FINNIS, K.W. and PARRENT, A.G. and PETERS, T.M.},
      title = {Visualization and navigation system development and application for
      stereotactic deep-brain neurosurgeries},
      journal = {Computer Aided Surgery},
      year = {2006},
      volume = {11},
      pages = {231 - 239},
      number = {5},
      file = {Guo2006.pdf:Guo2006.pdf:PDF},
      keywords = {APP, GUI, PLA, SUR, SLR},
      owner = {thomaskroes},
      timestamp = {2011.01.06}
    }
  • S. Guthe, M. Wand, J. Gonser, and W. Straßer, “Interactive rendering of large volume data sets,” , pp. 53-60, 2008.
    [Bibtex]
    @CONFERENCE{Guthe2008,
      author = {Guthe, S. and Wand, M. and Gonser, J. and Stra{\ss}er, W.},
      title = {Interactive rendering of large volume data sets},
      booktitle = {Visualization, 2002. VIS 2002. IEEE},
      year = {2008},
      pages = {53--60},
      organization = {IEEE},
      file = {Guthe2008.pdf:Guthe2008.pdf:PDF},
      isbn = {0780374983},
      owner = {thomaskroes},
      timestamp = {2011.01.04}
    }
  • M. Hafez, K. Chelule, B. Seedhom, and K. Sherman, “Computer-Assisted Total Knee Arthroplasty Using Patient-Specific Templates: the Custom-made Cutting Guides,” Navigation and MIS in Orthopedic Surgery, pp. 182-188, 2007.
    [Bibtex]
    @ARTICLE{Hafez2007,
      author = {Hafez, MA and Chelule, KL and Seedhom, BB and Sherman, KP},
      title = {Computer-Assisted Total Knee Arthroplasty Using Patient-Specific
      Templates: the Custom-made Cutting Guides},
      journal = {Navigation and MIS in Orthopedic Surgery},
      year = {2007},
      pages = {182 - 188},
      file = {Hafez2007.pdf:Hafez2007.pdf:PDF},
      keywords = {TRM, OTS, APP},
      owner = {Thomas},
      publisher = {Springer},
      timestamp = {2011.02.15}
    }
  • H. K. Hahn, B. Preim, D. Selle, and H. -O. Peitgen, “Visualization and interaction techniques for the exploration of vascular structures,” in Visualization, 2001. VIS ’01. Proceedings, 2001, pp. 395-578.
    [Bibtex]
    @INPROCEEDINGS{Hahn2001,
      author = {Hahn, H.K. and Preim, B. and Selle, D. and Peitgen, H.-O.},
      title = {Visualization and interaction techniques for the exploration of vascular
      structures},
      booktitle = {Visualization, 2001. VIS '01. Proceedings},
      year = {2001},
      pages = {395 -578},
      month = {October},
      abstract = {We describe a pipeline of image processing steps for deriving symbolic
      models of vascular structures from radiological data which reflect
      the branching pattern and diameter of vessels. For the visualization
      of these symbolic models, concatenated truncated cones are smoothly
      blended at branching points. We put emphasis on the quality of the
      visualizations which is achieved by anti-aliasing operations in different
      stages of the visualization. The methods presented are referred to
      as HQVV (high quality vessel visualization). Scalable techniques
      are provided to explore vascular structures of different orders of
      magnitude. The hierarchy as well as the diameter of the branches
      of vascular systems are used to restrict visualizations to relevant
      subtrees and to emphasize parts of vascular systems. Our research
      is inspired by clear visualizations in textbooks and is targeted
      toward medical education and therapy planning. We describe the application
      of vessel visualization techniques for liver surgery planning. For
      this application it is crucial to recognize the morphology and branching
      pattern of vascular systems as well as the basic spatial relations
      between vessels and other anatomic structures.},
      file = {Hahn2001.pdf:Hahn2001.pdf:PDF},
      keywords = {anatomic structures;anti-aliasing operations;branching pattern;concatenated
      truncated cones;high quality vessel visualization;image processing;interaction
      techniques;liver surgery planning;radiological data;scalable techniques;spatial
      relations;symbolic models;vascular structures;visualization techniques;blood
      vessels;data visualisation;diagnostic radiography;directed graphs;liver;medical
      image processing;surgery;},
      owner = {thomaskroes},
      timestamp = {2010.11.15}
    }
  • W. a Hall and C. L. Truwit, “Intraoperative MR-guided neurosurgery.,” Journal of magnetic resonance imaging : JMRI, vol. 27, iss. 2, pp. 368-75, 2008.
    [Bibtex]
    @ARTICLE{Hall2008,
      author = {Hall, Walter a and Truwit, Charles L},
      title = {Intraoperative MR-guided neurosurgery.},
      journal = {Journal of magnetic resonance imaging : JMRI},
      year = {2008},
      volume = {27},
      pages = {368-75},
      number = {2},
      month = {February},
      abstract = {For more than a decade neurosurgeons have become increasingly dependent
      on image guidance to perform safe, efficient, and cost-effective
      surgery. Neuronavigation is frame-based or frameless and requires
      obtaining computed tomography or magnetic resonance imaging (MRI)
      scans several days or immediately before surgery. Unfortunately,
      these systems do not allow the neurosurgeon to adjust for the brain
      shift that occurs once the cranium is open. This technical inability
      has led to the development of intraoperative MRI (ioMRI) systems
      ranging from 0.12-3.0T in strength. The advantages of ioMRI are the
      excellent soft tissue discrimination and the ability to view the
      operative site in three dimensions. Enhanced visualization of the
      intracranial lesion enables the neurosurgeon to choose a safe surgical
      trajectory that avoids critical structures, to maximize the extent
      of the tumor resection, and to exclude an intraoperative hemorrhage.
      All ioMRI systems provide basic T1- and T2-weighted imaging capabilities
      but high-field (1.5T) systems can also perform MR spectroscopy (MRS),
      MR venography (MRV), MR angiography (MRA), brain activation studies,
      chemical shift imaging, and diffusion-weighted imaging. Identifying
      vascular structures by MRA or MRV may prevent injury during surgery.
      Demonstrating elevated phosphocholine within a tumor may improve
      the diagnostic yield of brain biopsy. Mapping out neurologic function
      may influence the surgical approach to a tumor. The optimal strength
      for MR-guided neurosurgery is currently under investigation.},
      file = {Hall2008.pdf:Hall2008.pdf:PDF},
      issn = {1053-1807},
      keywords = {Biopsy,Biopsy: instrumentation,Biopsy: methods,Brain,Brain Mapping,Brain
      Mapping: instrumentation,Brain Mapping: methods,Brain Neoplasms,Brain
      Neoplasms: surgery,Brain: pathology,Brain: surgery,Deep Brain Stimulation,Deep
      Brain Stimulation: methods,Humans,Imaging, Three-Dimensional,Imaging,
      Three-Dimensional: methods,Magnetic Resonance Imaging, Interventional,Magnetic
      Resonance Imaging, Interventional: econom,Magnetic Resonance Imaging,
      Interventional: instru,Magnetic Resonance Imaging, Interventional:
      method,Neurosurgical Procedures,Neurosurgical Procedures: adverse
      effects,Neurosurgical Procedures: methods},
      owner = {thomaskroes},
      pmid = {18183585},
      timestamp = {2010.10.22}
    }
  • W. A. Hall and C. L. Truwit, “Intraoperative MR imaging,” Magnetic resonance imaging clinics of North America, vol. 13, iss. 3, p. 533, 2005.
    [Bibtex]
    @ARTICLE{Hall2005,
      author = {Hall, W.A. and Truwit, C.L.},
      title = {Intraoperative MR imaging},
      journal = {Magnetic resonance imaging clinics of North America},
      year = {2005},
      volume = {13},
      pages = {533},
      number = {3},
      issn = {1064-9689},
      owner = {Thomas},
      timestamp = {2011.02.03}
    }
  • G. Hamarneh, J. Yang, C. McIntosh, and M. Langille, “3D live-wire-based semi-automatic segmentation of medical images,” , vol. 5747, pp. 1597-1603, 2005.
    [Bibtex]
    @CONFERENCE{Hamarneh2005,
      author = {Hamarneh, G. and Yang, J. and McIntosh, C. and Langille, M.},
      title = {3D live-wire-based semi-automatic segmentation of medical images},
      booktitle = {Proceedings of SPIE Medical Imaging: Image Processing},
      year = {2005},
      volume = {5747},
      pages = {1597 - 1603},
      organization = {Citeseer},
      abstract = {Segmenting anatomical structures from medical images is usually one
      of the most important initial steps in many applications, including
      visualization, computer-aided diagnosis, and morphometric analysis.
      Manual 2D segmentation suffers from operator variability and is tedious
      and time-consuming. These disadvantages are accentuated in 3D applications
      and, the additional requirement of producing intuitive displays to
      integrate 3D information for the user, makes manual segmentation
      even less approachable in 3D. Robust, automatic med-
      
      ical image segmentation in 2D to 3D remains an open problem caused
      particularly by sensitivity to low-level parameters of segmentation
      algorithms. Semi-automatic techniques present possible balanced solution
      where automation focuses on low-level computing-intensive tasks that
      can be hidden from the user, while manual intervention captures high-level
      expert knowledge nontrivial to capture algorithmically. In this paper
      we present a 3D extension to the 2D semi-automatic live-wire technique.
      Live-wire based contours generated semi-automatically on a selected
      set of slices are used as seed points on new unseen slices in different
      orientations. The seed points are calculated from intersections of
      user-based live-wire techniques with new slices. Our algorithm includes
      a step for ordering the live-wire seed points in the new slices,
      which is essential for subsequent multi-stage optimal path calculation.
      We present results of automatically detecting contours in new slices
      in 3D volumes from a variety of medical images.},
      file = {Hamarneh2005.pdf:Hamarneh2005.pdf:PDF},
      keywords = {TEC, IMP},
      owner = {thomaskroes},
      timestamp = {2010.12.01}
    }
  • H. Handels, “An orthopaedic atlas for the 3D operation planning and the virtual construction of endoprostheses in computer assisted orthopaedic surgery,” International Congress Series, vol. 1230, pp. 325-330, 2001.
    [Bibtex]
    @ARTICLE{Handels2001a,
      author = {Handels, H},
      title = {An orthopaedic atlas for the 3D operation planning and the virtual
      construction of endoprostheses in computer assisted orthopaedic surgery},
      journal = {International Congress Series},
      year = {2001},
      volume = {1230},
      pages = {325 - 330},
      month = {June},
      abstract = {This paper describes the structure of an orthopaedic atlas of the
      hip for the 3D operation planning. Furthermore, methods to transfer
      the atlas information to patient data sets are presented and evaluated.
      The atlas is used for the automatic recognition of anatomical structures
      that are needed during the virtual pre-operative planning of hip
      operations and the individual design of anatomically adaptable endoprostheses.
      The atlas based recognition method was evaluated using three manually
      pre-segmented 3D CT data sets of the hip. The mean, 98.2\% of the
      bony voxels could be labeled correctly by the atlas based method.
      D 2001 Elsevier Science B.V. All rights reserved.},
      file = {Handels2001a.pdf:Handels2001a.pdf:PDF},
      issn = {05315131},
      keywords = {atlas,non-rigid registration,operation planning,pattern recognition},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • H. Handels and J. Ehrhardt, “Simulation of hip operations and design of custom-made endoprostheses using virtual reality techniques,” , 2001.
    [Bibtex]
    @ARTICLE{Handels2001b,
      author = {Handels, H. and Ehrhardt, J.},
      title = {Simulation of hip operations and design of custom-made endoprostheses
      using virtual reality techniques},
      year = {2001},
      file = {Handels2001b.pdf:Handels2001b.pdf:PDF},
      issn = {0026-1270},
      keywords = {APP, OTS, PLA, SLR, SUR},
      owner = {thomaskroes},
      timestamp = {2011.01.12}
    }
  • H. Handels, J. Ehrhardt, W. Plötz, and S. Pöppl, “Computer-assisted planning and simulation of hip operations using virtual three-dimensional models.,” Studies in health technology and informatics, vol. 68, p. 686, 1999.
    [Bibtex]
    @ARTICLE{Handels1999,
      author = {Handels, H. and Ehrhardt, J. and Pl{\\"o}tz, W. and P{\\"o}ppl, SJ},
      title = {Computer-assisted planning and simulation of hip operations using
      virtual three-dimensional models.},
      journal = {Studies in health technology and informatics},
      year = {1999},
      volume = {68},
      pages = {686},
      keywords = {APP, PLA, OCS, OTS}
    }
  • C. Hansen, A. Kohn, S. Schlichting, F. Weiler, S. Zidowitz, M. Kleemann, and H. Peitgen, “Intraoperative modification of resection plans for liver surgery,” International Journal of Computer Assisted Radiology and Surgery, vol. 3, iss. 3-4, pp. 291-297, 2008.
    [Bibtex]
    @ARTICLE{Hansen2008a,
      author = {Hansen, Christian and Kohn, Alexander and Schlichting, Stefan and
      Weiler, Florian and Zidowitz, Stephan and Kleemann, Markus and Peitgen,
      Heinz-Otto},
      title = {Intraoperative modification of resection plans for liver surgery},
      journal = {International Journal of Computer Assisted Radiology and Surgery},
      year = {2008},
      volume = {3},
      pages = {291-297},
      number = {3-4},
      month = {June},
      abstract = {Objective Recent surgical planning software provides valuable tools
      for evaluating different resection strategies preoperatively.With
      such virtual resections, predictions and quantitative analyses may
      be carried out to assess the resec- tion feasibility with respect
      to tumors and risk structures. In oncologic liver surgery, additional
      tumors that were not seen in the preoperative images are often found
      during the intervention using intraoperative ultrasound (IOUS). Due
      to such findings, the resection strategy must be updated or com-
      pletely revised. Materials and methods Therefore, we have developed
      meth- ods for the intraoperative modification of resection plans.
      The probe of an ultrasound-based navigation system and alternatively
      the pointing device Wiimote are proposed as intraoperative interaction
      devices. Fast adaptation of plan- ning information and the communication
      with both interac- tion devices is supported by our system, the Intraoperative
      Planning Assistant (IPA). The IPA has been evaluated in the operation
      room (OR) during laparoscopic liver interventions on pigs. Results
      Our preliminary results confirm that intraoperative modifications
      of resection plans are both feasible and bene- ficial for liver surgery.
      After the intraoperative modification task, updated remaining liver
      volume and resection volume were displayed and quantified within
      10s. Conclusion For the first time, surgeons are provided with a
      system for intraoperative modification of resection plans that offers
      a crucial decision support, is easy to use and integrates smoothly
      into the clinical workflow. The new system pro- vides major support
      for decision making in the OR and thus improves the safety of surgical
      interventions.},
      file = {Hansen2008a.pdf:Hansen2008a.pdf:PDF},
      issn = {1861-6410},
      keywords = {liver surgery,surgery planning,ultrasound,user interaction,visualization,wiimote,
      APP, HES, GUI, PLA},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • C. Hansen, F. Ritter, J. Wieferich, H. Hahn, and H. -O. Peitgen, “Illustration of Vascular Structures for Augmented Reality in Liver Surgery,” in World Congress on Medical Physics and Biomedical Engineering, September 7 – 12, 2009, Munich, Germany, R. Magjarevic, O. Dössel, and W. C. Schlegel, Eds., Springer Berlin Heidelberg, 2010, vol. 25 / 4, pp. 2113-2116.
    [Bibtex]
    @INCOLLECTION{Hansen2010c,
      author = {Hansen, C. and Ritter, F. and Wieferich, J. and Hahn, H. and Peitgen,
      H. -O.},
      title = {Illustration of Vascular Structures for Augmented Reality in Liver
      Surgery},
      booktitle = {World Congress on Medical Physics and Biomedical Engineering, September
      7 - 12, 2009, Munich, Germany},
      publisher = {Springer Berlin Heidelberg},
      year = {2010},
      editor = {Magjarevic, Ratko and Dössel, Olaf and Schlegel, Wolfgang C.},
      volume = {25 / 4},
      series = {IFMBE Proceedings},
      pages = {2113 - 2116},
      abstract = {We present methods for intraoperative visualization of vascular structures
      in liver surgery. The underlying concept combines conventional augmented
      reality approaches with illustrative rendering techniques. Our methods
      reduce the visual complexity of vascular structures, and accentuate
      spatial relations. The proposed visualization techniques are embedded
      in a clinical prototype application that has already been used in
      the operating room for preliminary evaluations. To verify the expressiveness
      of our illustration methods, we performed a user study with controlled
      lab conditions. The study revealed a clear advantage in distance
      assessment for the proposed illustrative approach in comparison to
      conventional rendering techniques.},
      affiliation = {Institute for Medical Image Computing, Fraunhofer MEVIS, Bremen, Germany},
      file = {Hansen2010c.pdf:Hansen2010c.pdf:PDF},
      isbn = {978-3-642-03882-2},
      keyword = {Engineering},
      keywords = {TEC, HES},
      owner = {thomaskroes},
      timestamp = {2011.01.26}
    }
  • C. Hansen, S. Schlichting, S. Zidowitz, A. K, M. Hindennach, M. Kleemann, and H. Peitgen, “Intraoperative Adaptation and Visualization of Preoperative Risk Analyses for Oncologic Liver Surgery,” Methods, 2008.
    [Bibtex]
    @ARTICLE{Hansen2008b,
      author = {Hansen, Christian and Schlichting, Stefan and Zidowitz, Stephan and
      K, Alexander and Hindennach, Milo and Kleemann, Markus and Peitgen,
      Heinz-otto},
      title = {Intraoperative Adaptation and Visualization of Preoperative Risk
      Analyses for Oncologic Liver Surgery},
      journal = {Methods},
      year = {2008},
      file = {Hansen2008b.pdf:Hansen2008b.pdf:PDF},
      keywords = {intraoperative imaging,treatment planning,ultrasound guidance,visualization,
      APP, HES, GUI, SUR},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • C. Hansen, J. Wieferich, F. Ritter, C. Rieder, and H. Peitgen, “Illustrative visualization of 3D planning models for augmented reality in liver surgery.,” International journal of computer assisted radiology and surgery, vol. 5, iss. 2, pp. 133-41, 2010.
    [Bibtex]
    @ARTICLE{Hansen2010a,
      author = {Hansen, Christian and Wieferich, Jan and Ritter, Felix and Rieder,
      Christian and Peitgen, Heinz-Otto},
      title = {Illustrative visualization of 3D planning models for augmented reality
      in liver surgery.},
      journal = {International journal of computer assisted radiology and surgery},
      year = {2010},
      volume = {5},
      pages = {133-41},
      number = {2},
      month = {March},
      abstract = {PURPOSE: Augmented reality (AR) obtains increasing acceptance in the
      operating room. However, a meaningful augmentation of the surgical
      view with a 3D visualization of planning data which allows reliable
      comparisons of distances and spatial relations is still an open request.
      METHODS: We introduce methods for intraoperative visualization of
      3D planning models which extend illustrative rendering and AR techniques.
      We aim to reduce visual complexity of 3D planning models and accentuate
      spatial relations between relevant objects. The main contribution
      of our work is an advanced silhouette algorithm for 3D planning models
      (distance-encoding silhouettes) combined with procedural textures
      (distance-encoding surfaces). In addition, we present a method for
      illustrative visualization of resection surfaces. RESULTS: The developed
      algorithms have been embedded into a clinical prototype that has
      been evaluated in the operating room. To verify the expressiveness
      of our illustration methods, we performed a user study under controlled
      conditions. The study revealed a clear advantage in distance assessment
      with the proposed illustrative approach in comparison to classical
      rendering techniques. CONCLUSION: The presented illustration methods
      are beneficial for distance assessment in surgical AR. To increase
      the safety of interventions with the proposed approach, the reduction
      of inaccuracies in tracking and registration is a subject of our
      current research.},
      file = {Hansen2010a.pdf:Hansen2010a.pdf:PDF},
      issn = {1861-6429},
      keywords = {Humans,Imaging, Three-Dimensional,Intraoperative Period,Liver Diseases,Liver
      Diseases: surgery,Software,Space Perception,Surgery, Computer-Assisted,Surgery,
      Computer-Assisted: methods,Surgery, Computer-Assisted: standards,
      TRM, AUR, APP, HES, SUR},
      owner = {thomaskroes},
      pmid = {20033519},
      timestamp = {2010.10.22}
    }
  • C. Hansen, S. Zidowitz, A. Schenk, K. -J. Oldhafer, H. Lang, and H. -O. Peitgen, “Risk maps for navigation in liver surgery,” Imaging, vol. 7625, p. 762528–762528–8, 2010.
    [Bibtex]
    @ARTICLE{Hansen2010b,
      author = {Hansen, C. and Zidowitz, S. and Schenk, A. and Oldhafer, K.-J. and
      Lang, H. and Peitgen, H.-O.},
      title = {Risk maps for navigation in liver surgery},
      journal = {Imaging},
      year = {2010},
      volume = {7625},
      pages = {762528--762528--8},
      abstract = {The optimal transfer of preoperative planning data and risk evaluations
      to the operative site is challenging. A common practice is to use
      preoperative 3D planning models as a printout or as a presentation
      on a display. One important aspect is that these models were not
      developed to provide information in complex workspaces like the operating
      room. Our aim is to reduce the visual complexity of 3D planning models
      by mapping surgically relevant information onto a risk map. Therefore,
      we present methods for the identification and classification of critical
      anatomical structures in the proximity of a preoperatively planned
      resection surface. Shadow-like distance indicators are introduced
      to encode the distance from the resection surface to these critical
      structures on the risk map. In addition, contour lines are used to
      accentuate shape and spatial depth. The resulting visualization is
      clear and intuitive, allowing for a fast mental mapping of the current
      resection surface to the risk map. Preliminary evaluations by liver
      surgeons indicate that damage to risk structures may be prevented
      and patient safety may be enhanced using the proposed methods.},
      file = {Hansen2010b.pdf:Hansen2010b.pdf:PDF},
      keywords = {computer-assisted interventions,image-guided surgery,intraoperative
      visualization,surgical navigation, APP, HES, PLA, SUR},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • L. Hao, “Predictive Surgical Simulation for Cardiac Surgery,” , 2008.
    [Bibtex]
    @ARTICLE{Hao2008,
      author = {Hao, L.},
      title = {Predictive Surgical Simulation for Cardiac Surgery},
      year = {2008},
      file = {Hao2008.pdf:Hao2008.pdf:PDF},
      owner = {Thomas},
      timestamp = {2011.02.23}
    }
  • R. M. Haralick and L. G. Shapiro, “Image segmentation techniques,” Computer Vision, Graphics, and Image Processing, vol. 29, iss. 1, pp. 100-132, 1985.
    [Bibtex]
    @ARTICLE{Haralick1985,
      author = {Robert M. Haralick and Linda G. Shapiro},
      title = {Image segmentation techniques},
      journal = {Computer Vision, Graphics, and Image Processing},
      year = {1985},
      volume = {29},
      pages = {100 - 132},
      number = {1},
      abstract = {There are now a wide variety of image segmentation techniques, some
      considered general purpose and some designed for specific classes
      of images. These techniques can be classified as: measurement space
      guided spatial clustering, single linkage region growing schemes,
      hybrid linkage region growing schemes, centroid linkage region growing
      schemes, spatial clustering schemes, and split-and-merge schemes.
      In this paper, each of the major classes of image segmentation techniques
      is defined and several specific examples of each class of algorithm
      are described. The techniques are illustrated with examples of segmentations
      performed on real images.},
      file = {Haralick1985.pdf:Haralick1985.pdf:PDF},
      issn = {0734-189X},
      keywords = {TEC, IMP},
      owner = {thomaskroes},
      timestamp = {2010.12.03}
    }
  • M. Harders, A. Barlit, C. Gerber, J. Hodler, and G. Sz, “An Optimized Surgical Planning Environment for Complex Proximal Humerus Fractures,” , 2007.
    [Bibtex]
    @ARTICLE{Harders2007,
      author = {Harders, M and Barlit, A and Gerber, Ch and Hodler, J and Sz, G},
      title = {An Optimized Surgical Planning Environment for Complex Proximal Humerus
      Fractures},
      year = {2007},
      abstract = {The precise restoration of a joint’s mobility after fractures can
      only be successful, if the original anatomical relationships are
      repro- duced as closely as possible. Precise estimates about the
      morphology of the bony components are therefore of paramount importance
      both for reconstructive surgery by ostheosynthesis as well as for
      partial or total joint replacement by arthroplasty. In case of the
      shoulder joint, only the proper reassembly of the fragments provides
      sufficient information about the original anatomical relationships
      of a fractured humerus, due to high individual variability and lateral
      asymmetry. To support the precise plan- ning of the interventions,
      this paper presents an enhanced environment facilitating both underlying
      processing steps: fragment segmentation and bone reassembly. This
      includes an intuitive interface for correcting pre- processed volumetric
      CT data as well as a visuo-haptic, virtual envi- ronment for physically-based
      simulation of interactive reassembly. The planning system has been
      successfully tested in a pilot study on four clinical cases.},
      file = {Harders2007.pdf:Harders2007.pdf:PDF},
      keywords = {APP, PLA, OTS, SUR},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • J. Harms, M. Bartels, H. Bourquain, H. O. Peitgen, T. Schulz, T. Kahn, J. Hauss, and J. Fangmann, “Computerized CT-Based 3D Visualization Technique in Living Related Liver Transplantation,” Transplantation Proceedings, vol. 37, iss. 2, pp. 1059-1062, 2005.
    [Bibtex]
    @ARTICLE{Harms2005,
      author = {J. Harms and M. Bartels and H. Bourquain and H.O. Peitgen and T.
      Schulz and T. Kahn and J. Hauss and J. Fangmann},
      title = {Computerized CT-Based 3D Visualization Technique in Living Related
      Liver Transplantation},
      journal = {Transplantation Proceedings},
      year = {2005},
      volume = {37},
      pages = {1059 - 1062},
      number = {2},
      abstract = {Introduction For living donor liver transplantation (LDLT) accurate
      diagnostic workup is essential. Multiple imaging approaches are currently
      used. Problems arise in the assessment of vascular and bile duct
      anatomy, liver graft volume, and vascular territories involved. A
      3D visualization system that improves anatomic assessment, allows
      interactive surgery planning, and acts as an intraoperative guide
      with enhanced precision is required. Refinements in computed tomography
      (CT) technology with the introduction of multidetector-row CT scanners
      and implementation of mathematical methods on computerized digital
      data has enabled CT-based 3D visualizations.Materials and Methods
      Sixteen LDLT candidates and three LDLT recipients were assessed by
      multislice CT examination. Image processing of the digital raw data
      for 3D visualization included segmentation and calculation of center
      lines. A hierarchical mathematical model representing the vascular
      and biliary tree was created. This allowed calculation of individual
      vascular territories.Results 3D CT-based visualization in LDLT facilitates
      diagnostic workup with high accuracy for analyses of vascular and
      bile duct variants, volumetry, and assessement of the optimal surgical
      splitting line of the living donor liver. Resultant areas of either
      arterial devascularization or venous congestion can be displayed
      and quantified preoperatively. The diagnostic method is of major
      impact on patient selection and directly influences intraoperative
      surgical guidance. The currently practiced #multiple##imaging##approach#
      approach, especially with regard to invasive diagnostics, can be
      avoided in the future.},
      file = {Harms2005.pdf:Harms2005.pdf:PDF},
      issn = {0041-1345},
      owner = {thomaskroes},
      timestamp = {2011.01.25}
    }
  • T. Hartkens, D. Hill, A. Castellano-Smith, D. Hawkes, C. Maurer Jr, A. Martin, W. Hall, H. Liu, and C. Truwit, “Measurement and analysis of brain deformation during neurosurgery,” Medical Imaging, IEEE Transactions on, vol. 22, iss. 1, pp. 82-92, 2003.
    [Bibtex]
    @ARTICLE{Hartkens2003,
      author = {Hartkens, T. and Hill, DLG and Castellano-Smith, AD and Hawkes, DJ
      and Maurer Jr, CR and Martin, AJ and Hall, WA and Liu, H. and Truwit,
      CL},
      title = {Measurement and analysis of brain deformation during neurosurgery},
      journal = {Medical Imaging, IEEE Transactions on},
      year = {2003},
      volume = {22},
      pages = {82 - 92},
      number = {1},
      file = {Hartkens2003.pdf:Hartkens2003.pdf:PDF},
      issn = {0278-0062},
      owner = {thomaskroes},
      publisher = {IEEE},
      timestamp = {2011.01.04}
    }
  • S. Hassfeld and J. Mühling, “Computer assisted oral and maxillofacial surgery – a review and an assessment of technology,” International Journal of Oral and Maxillofacial Surgery, vol. 30, iss. 1, pp. 2-13, 2001.
    [Bibtex]
    @ARTICLE{Hassfeld2001,
      author = {Stefan Hassfeld and Joachim Mühling},
      title = {Computer assisted oral and maxillofacial surgery - a review and an
      assessment of technology},
      journal = {International Journal of Oral and Maxillofacial Surgery},
      year = {2001},
      volume = {30},
      pages = {2 - 13},
      number = {1},
      abstract = {Abstract. Advances in the basic scientific research within the field
      of computer assisted oral and maxillofacial surgery have enabled
      us to introduce features of these techniques into routine clinical
      practice. In order to simulate complex surgery with the aid of a
      computer, the diagnostic image data and especially various imaging
      modalities including computer tomography (CT), magnetic resonance
      imaging (MRI) and Ultrasound (US) must be arranged in relation to
      each other, thus enabling a rapid switching between the various modalities
      as well as the viewing of superimposed images. Segmenting techniques
      for the reconstruction of three-dimensional representations of soft
      and hard tissues are required. We must develop ergonomic and user
      friendly interactive methods for the surgeon, thus allowing for a
      precise and fast entry of the planned surgical procedure in the planning
      and simulation phase. During the surgical phase, instrument navigation
      tools offer the surgeon interactive support through operation guidance
      and control of potential dangers. This feature is already available
      today and within this article we present a review of the development
      of this rapidly evolving technique. Future intraoperative assistance
      takes the form of such passive tools for the support of intraoperative
      orientation as well as so-called [`]tracking systems' (semi-active
      systems) which accompany and support the surgeons' work. The final
      form are robots which execute specific steps completely autonomously.
      The techniques of virtual reality and computer assisted surgery are
      increasingly important in their medical applications. Many applications
      are still being developed or are still in the form of a prototype.
      It is already clear, however, that developments in this area will
      have a considerable effect on a surgeon's routine work.},
      file = {Hassfeld2001.pdf:Hassfeld2001.pdf:PDF},
      issn = {0901-5027},
      keywords = {computer assisted surgery; operation planning; surgery simulation;
      image guided surgery; navigation systems; medical robotics; review,
      CMS, REV},
      owner = {Thomas},
      timestamp = {2011.02.02}
    }
  • S. Hassfeld and J. Mühling, “Navigation in maxillofacial and craniofacial surgery,” Computer Aided Surgery, vol. 3, iss. 4, pp. 183-187, 1998.
    [Bibtex]
    @ARTICLE{Hassfeld1998b,
      author = {Hassfeld, S. and Mühling, J.},
      title = {Navigation in maxillofacial and craniofacial surgery},
      journal = {Computer Aided Surgery},
      year = {1998},
      volume = {3},
      pages = {183--187},
      number = {4},
      file = {Hassfeld1998b.pdf:Hassfeld1998b.pdf:PDF},
      issn = {1097-0150},
      keywords = {APP, CMS, SLR, PLA},
      owner = {Thomas},
      publisher = {John Wiley \& Sons},
      timestamp = {2011.02.03}
    }
  • S. Hassfeld, J. Mühling, C. Wirtz, M. Knauth, T. Lutze, and H. Schulz, “Intraoperative guidance in maxillofacial and craniofacial surgery,” Proceedings of the Institution of Mechanical Engineers, Part H: Journal of Engineering in Medicine, vol. 211, iss. 4, pp. 277-283, 1997.
    [Bibtex]
    @ARTICLE{Hassfeld1997,
      author = {Hassfeld, S. and Mühling, J. and Wirtz, CR and Knauth, M. and Lutze,
      T. and Schulz, HJ},
      title = {Intraoperative guidance in maxillofacial and craniofacial surgery},
      journal = {Proceedings of the Institution of Mechanical Engineers, Part H: Journal
      of Engineering in Medicine},
      year = {1997},
      volume = {211},
      pages = {277 - 283},
      number = {4},
      file = {Hassfeld1997.pdf:Hassfeld1997.pdf:PDF},
      issn = {0954-4119},
      keywords = {APP, CMS, GUI, SLR},
      owner = {Thomas},
      publisher = {Prof Eng Publishing},
      timestamp = {2011.02.03}
    }
  • S. Hassfeld, J. Mühling, and J. Zöller, “Intraoperative navigation in oral and maxillofacial surgery,” International journal of oral and maxillofacial surgery, vol. 24, iss. 1, pp. 111-119, 1995.
    [Bibtex]
    @ARTICLE{Hassfeld1995,
      author = {Hassfeld, S. and Mühling, J. and Zöller, J.},
      title = {Intraoperative navigation in oral and maxillofacial surgery},
      journal = {International journal of oral and maxillofacial surgery},
      year = {1995},
      volume = {24},
      pages = {111 - 119},
      number = {1},
      file = {Hassfeld1995.pdf:Hassfeld1995.pdf:PDF},
      issn = {0901-5027},
      owner = {Thomas},
      publisher = {Elsevier},
      timestamp = {2011.02.03}
    }
  • S. Hassfeld, J. Zöller, F. K. Albert, C. R. Wirtz, M. Knauth, and J. Muhling, “Preoperative planning and intraoperative navigation in skull base surgery.,” Journal of cranio-maxillo-facial surgery : official publication of the European Association for Cranio-Maxillo-Facial Surgery, vol. 26, iss. 4, pp. 220-5, 1998.
    [Bibtex]
    @ARTICLE{Hassfeld1998a,
      author = {Hassfeld, S and Zöller, J and Albert, F K and Wirtz, C R and Knauth,
      M and Muhling, J},
      title = {Preoperative planning and intraoperative navigation in skull base
      surgery.},
      journal = {Journal of cranio-maxillo-facial surgery : official publication of
      the European Association for Cranio-Maxillo-Facial Surgery},
      year = {1998},
      volume = {26},
      pages = {220 - 5},
      number = {4},
      month = {August},
      abstract = {Experience with the commercially available, 3-D navigation systems
      Viewing Wand (ISG, Mississauga, Ontario, Canada) and SPOCS (Aesculap,
      Germany) in skull base surgery is presented. Having meanwhile been
      tested in over 60 clinical trials, the systems achieved an accuracy
      of < or = 2.7 mm which, at the moment, we deem sufficiently acceptable
      to proceed with their clinical evaluation. There was no difference
      in intraoperative accuracy between the mechanical and the optical
      navigation systems. The systems proved to be very helpful in identifying
      the extent of the tumours and in visualizing the proximity of vital
      structures. 3-D-planning, simulation and intraoperative navigation
      especially facilitates surgery in anatomically complicated situations,
      without risk of damaging neighbouring structures. The SPOCS (Surgical
      Planning and Orientation Computer System) revealed a considerably
      improved flexibility in handling and a better integration into the
      surgical procedure in comparison with the relatively inflexible and
      space-demanding Viewing Wand arm. Especially, the 'offset' function
      of the SPOCS offers the possibility of a virtual elongation of the
      instrument and thus, in combination with the on-line visualization
      of the corresponding images, of a 'look ahead' operation. By using
      computer-assisted simulation and navigation systems, we can expect
      quality improvement and risk reduction. More extensive and radical
      interventions seem possible.},
      file = {Hassfeld1998a.pdf:Hassfeld1998a.pdf:PDF},
      issn = {1010-5182},
      keywords = {Computer Simulation,Computer Systems,Electronics, Medical,Electronics,
      Medical: instrumentation,Equipment Design,Humans,Image Processing,
      Computer-Assisted,Infrared Rays,Intraoperative Care,Magnetic Resonance
      Imaging,Meningioma,Meningioma: surgery,Middle Aged,Nasal Bone,Nasal
      Bone: surgery,Neoplasm Invasiveness,Nose Neoplasms,Nose Neoplasms:
      surgery,Online Systems,Orbital Neoplasms,Orbital Neoplasms: surgery,Patient
      Care Planning,Photography,Photography: instrumentation,Skull Base,Skull
      Base Neoplasms,Skull Base Neoplasms: surgery,Skull Base: surgery,Therapy,
      Computer-Assisted,Therapy, Computer-Assisted: instrumentation,Tomography,
      X-Ray Computed,User-Computer Interface},
      owner = {thomaskroes},
      pmid = {9777500},
      timestamp = {2010.10.22}
    }
  • M. Haubner, C. Krapichler, A. Losch, K. -H. Englmeier, and W. Van Eimeren, “Virtual reality in medicine-computer graphics and interaction techniques,” Information Technology in Biomedicine, IEEE Transactions on, vol. 1, iss. 1, pp. 61-72, 1997.
    [Bibtex]
    @ARTICLE{Haubner1997,
      author = {Haubner, M. and Krapichler, C. and Losch, A. and Englmeier, K.-H.
      and Van Eimeren, W.},
      title = {Virtual reality in medicine-computer graphics and interaction techniques},
      journal = {Information Technology in Biomedicine, IEEE Transactions on},
      year = {1997},
      volume = {1},
      pages = {61 - 72},
      number = {1},
      month = {march},
      abstract = {The paper describes several new visualization and interaction techniques
      that enable the use of virtual environments for routine medical purposes.
      A new volume-rendering method supports shaded and transparent visualization
      of medical image sequences in real-time with an interactive threshold
      definition. Based on these rendering algorithms two complementary
      segmentation approaches offer an intuitive assistance for a wide
      range of requirements in diagnosis and therapy planning. In addition,
      a hierarchical data representation for geometric surface descriptions
      guarantees an optimal use of available hardware resources and prevents
      inaccurate visualization. The combination of the presented techniques
      empowers the improved human-machine interface of virtual reality
      to support every interactive task in medical three-dimensional (3-D)
      image processing, from visualization of unsegmented data volumes
      up to the simulation of surgical procedures.},
      file = {Haubner1997.pdf:Haubner1997.pdf:PDF},
      issn = {1089-7771},
      keywords = {3D medical image processing;computer graphics techniques;diagnosis
      planning;geometric surface descriptions;hierarchical data representation;human-machine
      interface;interaction techniques;interactive threshold definition;intuitive
      assistance;medical image sequences;medicine;optimal hardware resource
      use;real-time shaded visualization;real-time transparent visualization;rendering
      algorithms;routine medical purposes;segmentation;therapy planning;virtual
      environments;virtual reality;visualization techniques;volume-rendering
      method;data structures;data visualisation;digital simulation;image
      reconstruction;image sequences;medical image processing;real-time
      systems;rendering (computer graphics);stereo image processing;surgery;user
      interfaces;virtual reality;Algorithms;Computer Graphics;Humans;Image
      Processing, Computer-Assisted;User-Computer Interface;},
      owner = {Thomas},
      timestamp = {2011.02.03}
    }
  • M. Hauth, Visual simulation of deformable models, Eberhard-Karls-Universitat Tubingen, Germany, Dissertation, 2004.
    [Bibtex]
    @BOOK{Hauth2004,
      title = {Visual simulation of deformable models},
      publisher = {Eberhard-Karls-Universitat Tubingen, Germany, Dissertation},
      year = {2004},
      author = {Hauth, M.},
      file = {Hauth2004.pdf:Hauth2004.pdf:PDF},
      owner = {thomaskroes},
      timestamp = {2011.01.07}
    }
  • D. Hawkes, D. Barratt, T. Carter, J. McClelland, and B. Crum, “Nonrigid Registration,” in Image-Guided Interventions, T. Peters and K. Cleary, Eds., Springer US, 2008, pp. 193-218.
    [Bibtex]
    @INCOLLECTION{Hawkes2008,
      author = {Hawkes, David and Barratt, Dean and Carter, Tim and McClelland, Jamie
      and Crum, Bill},
      title = {Nonrigid Registration},
      booktitle = {Image-Guided Interventions},
      publisher = {Springer US},
      year = {2008},
      editor = {Peters, Terry and Cleary, Kevin},
      pages = {193 - 218},
      note = {Chapter 7},
      abstract = {This chapter describes the convergence of technologies between interventional
      radiology, image-guided surgery, and image-directed therapy. Nonrigid
      registration has an important part to play in this trend and different
      approaches to nonrigid registration are summarized. The role of nonrigid
      registration for image-guided procedures in the building and instantiation
      of anatomical atlases, modeling large deformations of soft tissues
      by incorporating biomechanical models, and modeling cyclic respiratory
      and cardiac motion for image guidance is described. These concepts
      are illustrated with descriptions of prototype systems with applications
      in the brain, breast, lung, liver, and orthopaedics.},
      affiliation = {University College London, London UK},
      file = {Hawkes2008.pdf:Hawkes2008.pdf:PDF},
      isbn = {978-0-387-73858-1},
      keyword = {Engineering},
      owner = {Thomas},
      timestamp = {2011.02.24}
    }
  • D. J. Hawkes, D. Barratt, J. M. Blackall, C. Chan, P. J. Edwards, K. Rhode, G. P. Penney, J. McClelland, and D. L. G. Hill, “Tissue deformation and shape models in image-guided interventions: a discussion paper.,” Medical image analysis, vol. 9, iss. 2, pp. 163-75, 2005.
    [Bibtex]
    @ARTICLE{Hawkes2005,
      author = {Hawkes, D J and Barratt, D and Blackall, J M and Chan, C and Edwards,
      P J and Rhode, K and Penney, G P and McClelland, J and Hill, D L
      G},
      title = {Tissue deformation and shape models in image-guided interventions:
      a discussion paper.},
      journal = {Medical image analysis},
      year = {2005},
      volume = {9},
      pages = {163-75},
      number = {2},
      month = {April},
      abstract = {This paper promotes the concept of active models in image-guided interventions.
      We outline the limitations of the rigid body assumption in image-guided
      interventions and describe how intraoperative imaging provides a
      rich source of information on spatial location of anatomical structures
      and therapy devices, allowing a preoperative plan to be updated during
      an intervention. Soft tissue deformation and variation from an atlas
      to a particular individual can both be determined using non-rigid
      registration. Established methods using free-form deformations have
      a very large number of degrees of freedom. Three examples of deformable
      models--motion models, biomechanical models and statistical shape
      models--are used to illustrate how prior information can be used
      to restrict the number of degrees of freedom of the registration
      algorithm and thus provide active models for image-guided interventions.
      We provide preliminary results from applications for each type of
      model.},
      file = {Hawkes2005.pdf:Hawkes2005.pdf:PDF},
      issn = {1361-8415},
      keywords = {Algorithms,Computer Simulation,Connective Tissue,Connective Tissue:
      pathology,Connective Tissue: physiopathology,Connective Tissue: surgery,Elasticity,Image
      Enhancement,Image Enhancement: methods,Image Interpretation, Computer-Assisted,Image
      Interpretation, Computer-Assisted: methods,Models, Biological,Movement,Subtraction
      Technique,Surgery, Computer-Assisted,Surgery, Computer-Assisted:
      methods, TEC},
      owner = {thomaskroes},
      pmid = {15721231},
      timestamp = {2010.10.22}
    }
  • M. Heiland, C. R. Habermann, and R. Schmelzle, “Indications and limitations of intraoperative navigation in maxillofacial surgery,” Journal of Oral and Maxillofacial Surgery, vol. 62, iss. 9, pp. 1059-1063, 2004.
    [Bibtex]
    @ARTICLE{Heiland2004,
      author = {Heiland, M. and Habermann, C.R. and Schmelzle, R.},
      title = {Indications and limitations of intraoperative navigation in maxillofacial
      surgery},
      journal = {Journal of Oral and Maxillofacial Surgery},
      year = {2004},
      volume = {62},
      pages = {1059 - 1063},
      number = {9},
      file = {Heiland2004.pdf:Heiland2004.pdf:PDF},
      issn = {0278-2391},
      owner = {Thomas},
      publisher = {Elsevier},
      timestamp = {2011.02.04}
    }
  • M. Heiland, D. Schulze, G. Adam, and R. Schmelzle, “3D-imaging of the facial skeleton with an isocentric mobile C-arm system (Siremobil Iso-C3D),” Dentomaxillofacial Radiology, vol. 32, iss. 1, p. 21, 2003.
    [Bibtex]
    @ARTICLE{Heiland2003,
      author = {Heiland, M. and Schulze, D. and Adam, G. and Schmelzle, R.},
      title = {3D-imaging of the facial skeleton with an isocentric mobile C-arm
      system (Siremobil Iso-C3D)},
      journal = {Dentomaxillofacial Radiology},
      year = {2003},
      volume = {32},
      pages = {21},
      number = {1},
      owner = {thomaskroes},
      publisher = {Br Inst Radiology},
      timestamp = {2011.01.17}
    }
  • T. Heimann, B. van Ginneken, M. A. Styner, Y. Arzhaeva, V. Aurich, C. Bauer, A. Beck, C. Becker, R. Beichel, G. Bekes, and others, “Comparison and evaluation of methods for liver segmentation from CT datasets,” Medical Imaging, IEEE Transactions on, vol. 28, iss. 8, pp. 1251-1265, 2009.
    [Bibtex]
    @ARTICLE{Heimann2009b,
      author = {Heimann, T. and van Ginneken, B. and Styner, M.A. and Arzhaeva, Y.
      and Aurich, V. and Bauer, C. and Beck, A. and Becker, C. and Beichel,
      R. and Bekes, G. and others},
      title = {Comparison and evaluation of methods for liver segmentation from
      CT datasets},
      journal = {Medical Imaging, IEEE Transactions on},
      year = {2009},
      volume = {28},
      pages = {1251 - 1265},
      number = {8},
      file = {Heimann2009b.pdf:Heimann2009b.pdf:PDF},
      issn = {0278-0062},
      keywords = {TEC, IMP},
      owner = {thomaskroes},
      publisher = {IEEE},
      timestamp = {2011.01.26}
    }
  • T. Heimann and H. Meinzer, “Statistical shape models for 3D medical image segmentation: A review,” Medical Image Analysis, vol. 13, iss. 4, pp. 543-563, 2009.
    [Bibtex]
    @ARTICLE{Heimann2009a,
      author = {Tobias Heimann and Hans-Peter Meinzer},
      title = {Statistical shape models for 3D medical image segmentation: A review},
      journal = {Medical Image Analysis},
      year = {2009},
      volume = {13},
      pages = {543 - 563},
      number = {4},
      abstract = {Statistical shape models (SSMs) have by now been firmly established
      as a robust tool for segmentation of medical images. While 2D models
      have been in use since the early 1990s, wide-spread utilization of
      three-dimensional models appeared only in recent years, primarily
      made possible by breakthroughs in automatic detection of shape correspondences.
      In this article, we review the techniques required to create and
      employ these 3D SSMs. While we concentrate on landmark-based shape
      representations and thoroughly examine the most popular variants
      of Active Shape and Active Appearance models, we also describe several
      alternative approaches to statistical shape modeling. Structured
      into the topics of shape representation, model construction, shape
      correspondence, local appearance models and search algorithms, we
      present an overview of the current state of the art in the field.
      We conclude with a survey of applications in the medical field and
      a discussion of future developments.},
      file = {Heimann2009a.pdf:Heimann2009a.pdf:PDF},
      issn = {1361-8415},
      keywords = {Statistical shape model, TEC, REV, IMP},
      owner = {thomaskroes},
      timestamp = {2010.12.03}
    }
  • B. M. Hemminger, P. L. Molina, T. M. Egan, F. C. Detterbeck, K. E. Muller, C. S. Coffey, and J. K. T. Lee, “Assessment of real-time 3D visualization for cardiothoracic diagnostic evaluation and surgery planning.,” Journal of digital imaging : the official journal of the Society for Computer Applications in Radiology, vol. 18, iss. 2, pp. 145-53, 2005.
    [Bibtex]
    @ARTICLE{Hemminger2005,
      author = {Hemminger, Bradley M and Molina, Paul L and Egan, Thomas M and Detterbeck,
      Frank C and Muller, Keith E and Coffey, Christopher S and Lee, Joseph
      K T},
      title = {Assessment of real-time 3D visualization for cardiothoracic diagnostic
      evaluation and surgery planning.},
      journal = {Journal of digital imaging : the official journal of the Society
      for Computer Applications in Radiology},
      year = {2005},
      volume = {18},
      pages = {145-53},
      number = {2},
      month = {June},
      abstract = {RATIONALE AND OBJECTIVES: Three-dimensional (3D) real-time volume
      rendering has demonstrated improvements in clinical care for several
      areas of radiological imaging. We test whether advanced real-time
      rendering techniques combined with an effective user interface will
      allow radiologists and surgeons to improve their performance for
      cardiothoracic surgery planning and diagnostic evaluation. MATERIAL
      AND METHODS: An interactive combination 3D and 2D visualization system
      developed at the University of North Carolina at Chapel Hill was
      compared against standard tiled 2D slice presentation on a viewbox.
      The system was evaluated for 23 complex cardiothoracic computed tomographic
      (CT) cases including heart-lung and lung transplantation, tumor resection,
      airway stent placement, repair of congenital heart defects, aortic
      aneurysm repair, and resection of pulmonary arteriovenous malformation.
      Radiologists and surgeons recorded their impressions with and without
      the use of the interactive visualization system. RESULTS: The cardiothoracic
      surgeons reported positive benefits to using the 3D visualizations.
      The addition of the 3D visualization changed the surgical plan (65\%
      of cases), increased the surgeon's confidence (on average 40\% per
      case), and correlated well with the anatomy found at surgery (95\%
      of cases). The radiologists reported fewer and less major changes
      than the surgeons in their understanding of the case due to the 3D
      visualization. They found new findings or additional information
      about existing findings in 66\% of the cases; however, they changed
      their radiology report in only 14\% of the cases. CONCLUSION: With
      the appropriate choice of 3D real-time volume rendering and a well-designed
      user interface, both surgeons and radiologists benefit from viewing
      an interactive 3D visualization in addition to 2D images for surgery
      planning and diagnostic evaluation of complex cardiothoracic cases.
      This study finds that 3D visualization is especially helpful to the
      surgeon in understanding the case, and in communicating and planning
      the surgery. These results suggest that including real-time 3D visualization
      would be of clinical benefit for complex cardiothoracic CT cases.},
      file = {Hemminger2005.pdf:Hemminger2005.pdf:PDF},
      issn = {0897-1889},
      keywords = {Confidence Intervals,Humans,Imaging, Three-Dimensional,Patient Care
      Planning,Radiography, Thoracic,Radiology Information Systems,Retrospective
      Studies,Thoracic Diseases,Thoracic Diseases: radiography,Thoracic
      Diseases: surgery,Tomography, X-Ray Computed,User-Computer Interface,
      TRM},
      owner = {thomaskroes},
      pmid = {15827827},
      timestamp = {2010.10.22}
    }
  • C. J. Henri, A. C. F. Colchester, J. Zhao, D. J. Hawkes, D. L. G. Hill, and R. L. Evans, “Registration of 3-D Surface Data for Intra-Operative Guidance and Visualization in Frameless Stereotactic Neurosurgery,” Methods, 1995.
    [Bibtex]
    @ARTICLE{Henri1995,
      author = {Henri, Christopher J and Colchester, Alan C F and Zhao, Jason and
      Hawkes, David J and Hill, Derek L G and Evans, Richard L},
      title = {Registration of 3-D Surface Data for Intra-Operative Guidance and
      Visualization in Frameless Stereotactic Neurosurgery},
      journal = {Methods},
      year = {1995},
      abstract = {We describe a technique for registering 3-D multimodal im- age data,
      acquired preoperatively, with intraoperative surface data de- rived
      from stereo video during neurosurgery. Ultimately, our aim is to
      provide a system that supplants traditional frame-based stereotactic
      techniques while achieving comparable accuracy. For registration
      we em- ploy chamfer-matching in conjunction with a cost function
      that is robust to 'outliers'. To balance robustness and computation
      speed, we employ a quasi-stochastic search of parameter space that
      includes pursuing mul- tiple start points. This paper describes the
      registration problem as it pertains to our application. We discuss
      our approach to optimization and carry out a computational evaluation
      of the technique under various conditions.},
      file = {Henri1995.pdf:Henri1995.pdf:PDF},
      keywords = {TEC},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • M. Hernandez-Hoyos, A. Anwander, M. Orkisz, J. P. Roux, P. Douek, and I. Magnin, “A Deformable Vessel Model with Single Point Initialization for Segmentation, Quantification, and Visualization of Blood Vessels in 3D MRA,” , 2000.
    [Bibtex]
    @CONFERENCE{Hernandez2000,
      author = {Hernandez-Hoyos, M. and Anwander, A. and Orkisz, M. and Roux, J.P.
      and Douek, P. and Magnin, I.},
      title = {A Deformable Vessel Model with Single Point Initialization for Segmentation,
      Quantification, and Visualization of Blood Vessels in 3D MRA},
      booktitle = {Medical Image Computing and Computer-Assisted Intervention--MICCAI
      2000},
      year = {2000},
      organization = {Springer},
      file = {Hernandez2000.pdf:Hernandez2000.pdf:PDF},
      keywords = {TEC},
      owner = {thomaskroes},
      timestamp = {2011.01.05}
    }
  • L. Hieu, N. Zlatov, J. Vander Sloten, E. Bohez, L. Khanh, P. Binh, P. Oris, and Y. Toshev, “Medical rapid prototyping applications and methods,” Assembly Automation, vol. 25, iss. 4, pp. 284-292, 2005.
    [Bibtex]
    @ARTICLE{Hieu2005,
      author = {Hieu, LC and Zlatov, N. and Vander Sloten, J. and Bohez, E. and Khanh,
      L. and Binh, PH and Oris, P. and Toshev, Y.},
      title = {Medical rapid prototyping applications and methods},
      journal = {Assembly Automation},
      year = {2005},
      volume = {25},
      pages = {284 - 292},
      number = {4},
      file = {Hieu2005.pdf:Hieu2005.pdf:PDF},
      issn = {0144-5154},
      keywords = {REV, RPP},
      owner = {Thomas},
      publisher = {Emerald Group Publishing Limited},
      timestamp = {2011.02.07}
    }
  • A. Hodgson, “Computer-Assisted Orthopedic Surgery,” in Image-Guided Interventions, T. Peters and K. Cleary, Eds., Springer US, 2008, pp. 333-386.
    [Bibtex]
    @INCOLLECTION{Hodgson2008,
      author = {Hodgson, Antony},
      title = {Computer-Assisted Orthopedic Surgery},
      booktitle = {Image-Guided Interventions},
      publisher = {Springer US},
      year = {2008},
      editor = {Peters, Terry and Cleary, Kevin},
      pages = {333 - 386},
      note = {Contents},
      abstract = {Orthopedic surgeons treat musculoskeletal disorders such as arthritis,
      scoliosis, and trauma, which collectively affect hundreds of millions
      of people and are the leading cause of pain and disability. In this
      chapter, the main technical developments related to computer-assisted
      surgery (CAS) in several key areas of orthopedic surgery are reviewed:
      hip and knee replacements, spine surgery, and fracture repair. We
      also assess the evaluations of these systems performed to date, with
      a particular focus on the value proposition that CAS needs to deliver
      in order for it to become widely accepted. This means it must demonstrate
      better performance, less operating room time, and reduced costs.
      We describe several systems for both hip and knee replacement that
      are based on computed tomographic (CT) images, intraoperative fluoroscopy,
      or image-free kinematic techniques, and in each domain consider both
      manual and robotic systems. Future work in computer-assisted orthopedic
      surgery will include efforts to develop newer technologies such as
      3D ultrasound and ever less invasive procedures, but it must also
      concentrate on improving operative workflow, to transfer the benefits
      of improved accuracy to nonspecialist orthopedic surgeons working
      in community hospitals, where the case volumes are lower than in
      specialized centers. Linkages between improved accuracy during surgery
      and improved functional outcomes for the patients must be demonstrated
      for these technologies to be widely accepted.},
      affiliation = {Centre for Hip Health, University of British Columbia, Vancouver,
      BC Canada},
      file = {Hodgson2008.pdf:Hodgson2008.pdf:PDF},
      isbn = {978-0-387-73858-1},
      keyword = {Engineering},
      owner = {thomaskroes},
      timestamp = {2011.01.11}
    }
  • H. R. Hoenecke, J. C. Hermida, C. Flores-Hernandez, and D. D. D’Lima, “Accuracy of CT-based measurements of glenoid version for total shoulder arthroplasty.,” Journal of shoulder and elbow surgery / American Shoulder and Elbow Surgeons … [et al.], vol. 19, iss. 2, pp. 166-71, 2010.
    [Bibtex]
    @ARTICLE{Hoenecke2010,
      author = {Hoenecke, Heinz R and Hermida, Juan C and Flores-Hernandez, Cesar
      and D'Lima, Darryl D},
      title = {Accuracy of CT-based measurements of glenoid version for total shoulder
      arthroplasty.},
      journal = {Journal of shoulder and elbow surgery / American Shoulder and Elbow
      Surgeons ... [et al.]},
      year = {2010},
      volume = {19},
      pages = {166-71},
      number = {2},
      month = {March},
      abstract = {BACKGROUND/HYPOTHESIS: The arthritic glenoid is typically in retroversion
      and restoration to neutral version is recommended. While a method
      for measurement of glenoid version using axial computed tomography
      (CT) has been reported and has been widely accepted, its accuracy
      and reproducibility has not been established. METHODS: In 33 patients
      scheduled for shoulder arthroplasty, glenoid version and maximum
      wear of the glenoid articular surface were measured with respect
      to the scapular body axis on 2-dimensional- (2D) CT slices as well
      as on 3-dimensional- (3D) reconstructed models of the same CT slices.
      RESULTS: Clinical CT scans were axially aligned with the patient's
      torso but were almost never perpendicular to the scapular body. The
      average absolute error in version measured on the 2D-CT slice passing
      through the tip of the coracoid was 5.1 degrees (range, 0 - 16 degrees
      , P < .001). On high-resolution 3D-CT reconstructions, the location
      of maximum wear was most commonly posterior and was missed on the
      clinical 2D-CT slices in 52\% of cases. CONCLUSION: Error in measuring
      version and depth of maximum wear can substantially affect the determination
      of the degree of correction necessary in arthritic glenoids. Accurately
      measuring glenoid version and locating the direction of maximum wear
      requires a full 3D-CT reconstruction and analysis.},
      file = {Hoenecke2010.pdf:Hoenecke2010.pdf:PDF},
      issn = {1532-6500},
      keywords = {Aged,Aged, 80 and over,Arthroplasty, Replacement,Arthroplasty, Replacement:
      methods,Cohort Studies,Female,Follow-Up Studies,Humans,Imaging, Three-Dimensional,Joint
      Instability,Joint Instability: prevention \& control,Joint Prosthesis,Male,Middle
      Aged,Observer Variation,Osteoarthritis,Osteoarthritis: radiography,Osteoarthritis:
      surgery,Preoperative Care,Preoperative Care: methods,Probability,Risk
      Assessment,Scapula,Scapula: radiography,Scapula: surgery,Sensitivity
      and Specificity,Severity of Illness Index,Shoulder Joint,Shoulder
      Joint: radiography,Shoulder Joint: surgery,Tomography, X-Ray Computed,Tomography,
      X-Ray Computed: methods,Treatment Outcome},
      owner = {thomaskroes},
      pmid = {19959378},
      publisher = {Elsevier Ltd},
      timestamp = {2010.10.22}
    }
  • R. Hofstetter, D. Schlenzka, T. Laine, and T. Lund, “A new approach to computer-aided spine surgery: fluoroscopy-based surgical navigation,” European Spine Journal, vol. 9, pp. 78-88, 2000.
    [Bibtex]
    @ARTICLE{Hofstetter2000,
      author = {Hofstetter, R. and Schlenzka, D. and Laine, T. and Lund, T.},
      title = {A new approach to computer-aided spine surgery: fluoroscopy-based
      surgical navigation},
      journal = {European Spine Journal},
      year = {2000},
      volume = {9},
      pages = {78 - 88},
      issn = {0940-6719},
      owner = {Thomas},
      publisher = {Springer},
      timestamp = {2011.02.28}
    }
  • R. Hofstetter, M. Slomczykowski, I. Bourquin, and L. Nolte, “Fluoroscopy based surgical navigation-concept and clinical applications,” , pp. 956-960, 1997.
    [Bibtex]
    @CONFERENCE{Hofstetter1997,
      author = {Hofstetter, R. and Slomczykowski, M. and Bourquin, I. and Nolte,
      LP},
      title = {Fluoroscopy based surgical navigation-concept and clinical applications},
      booktitle = {Computer Assisted Radiology and Surgery},
      year = {1997},
      pages = {956 - 960},
      file = {Hofstetter1997.pdf:Hofstetter1997.pdf:PDF},
      owner = {thomaskroes},
      timestamp = {2011.01.18}
    }
  • R. L. Holmes, “Computer-assisted quality control in tree-ring dating and measurement,” Tree-ring bulletin, vol. 43, iss. 1, pp. 69-78, 1983.
    [Bibtex]
    @ARTICLE{Holmes1983,
      author = {Holmes, R.L.},
      title = {Computer-assisted quality control in tree-ring dating and measurement},
      journal = {Tree-ring bulletin},
      year = {1983},
      volume = {43},
      pages = {69--78},
      number = {1},
      owner = {Thomas},
      timestamp = {2011.02.28}
    }
  • R. D. Howe and Y. Matsuoka, “Robotics for surgery.,” Annual review of biomedical engineering, vol. 1, pp. 211-40, 1999.
    [Bibtex]
    @ARTICLE{Howe1999,
      author = {Howe, R D and Matsuoka, Y},
      title = {Robotics for surgery.},
      journal = {Annual review of biomedical engineering},
      year = {1999},
      volume = {1},
      pages = {211-40},
      month = {January},
      abstract = {Robotic technology is enhancing surgery through improved precision,
      stability, and dexterity. In image-guided procedures, robots use
      magnetic resonance and computed tomography image data to guide instruments
      to the treatment site. This requires new algorithms and user interfaces
      for planning procedures; it also requires sensors for registering
      the patient's anatomy with the preoperative image data. Minimally
      invasive procedures use remotely controlled robots that allow the
      surgeon to work inside the patient's body without making large incisions.
      Specialized mechanical designs and sensing technologies are needed
      to maximize dexterity under these access constraints. Robots have
      applications in many surgical specialties. In neurosurgery, image-guided
      robots can biopsy brain lesions with minimal damage to adjacent tissue.
      In orthopedic surgery, robots are routinely used to shape the femur
      to precisely fit prosthetic hip joint replacements. Robotic systems
      are also under development for closed-chest heart bypass, for microsurgical
      procedures in ophthalmology, and for surgical training and simulation.
      Although results from initial clinical experience is positive, issues
      of clinician acceptance, high capital costs, performance validation,
      and safety remain to be addressed.},
      file = {Howe1999.pdf:Howe1999.pdf:PDF},
      issn = {1523-9829},
      keywords = {Biomedical Engineering,Humans,Orthopedic Procedures,Orthopedic Procedures:
      instrumentation,Orthopedic Procedures: methods,Robotics,Robotics:
      education,Robotics: instrumentation,Robotics: methods,Safety,Surgical
      Equipment,Surgical Procedures, Minimally Invasive,Surgical Procedures,
      Minimally Invasive: instrumen,Surgical Procedures, Minimally Invasive:
      methods,Surgical Procedures, Operative,Surgical Procedures, Operative:
      methods,Thoracic Surgical Procedures,Thoracic Surgical Procedures:
      instrumentation,Thoracic Surgical Procedures: methods, TRM},
      owner = {thomaskroes},
      pmid = {11701488},
      timestamp = {2010.10.22}
    }
  • Q. Hu, U. Langlotz, J. Lawrence, F. Langlotz, and L. P. Nolte, “A fast impingement detection algorithm for computer-aided orthopedic surgery,” Computer Aided Surgery, vol. 6, iss. 2, pp. 104-110, 2001.
    [Bibtex]
    @ARTICLE{Hu2001,
      author = {Hu, Q. and Langlotz, U. and Lawrence, J. and Langlotz, F. and Nolte,
      L.P.},
      title = {A fast impingement detection algorithm for computer-aided orthopedic
      surgery},
      journal = {Computer Aided Surgery},
      year = {2001},
      volume = {6},
      pages = {104 - 110},
      number = {2},
      file = {Hu2001.pdf:Hu2001.pdf:PDF},
      issn = {1097-0150},
      keywords = {OCS, TEC, OTS},
      owner = {thomaskroes},
      publisher = {Wiley Online Library},
      timestamp = {2011.01.06}
    }
  • Y. Hu and R. a Malthaner, “The feasibility of three-dimensional displays of the thorax for preoperative planning in the surgical treatment of lung cancer.,” European journal of cardio-thoracic surgery : official journal of the European Association for Cardio-thoracic Surgery, vol. 31, iss. 3, pp. 506-11, 2007.
    [Bibtex]
    @ARTICLE{Hu2007,
      author = {Hu, Yaoping and Malthaner, Richard a},
      title = {The feasibility of three-dimensional displays of the thorax for preoperative
      planning in the surgical treatment of lung cancer.},
      journal = {European journal of cardio-thoracic surgery : official journal of
      the European Association for Cardio-thoracic Surgery},
      year = {2007},
      volume = {31},
      pages = {506-11},
      number = {3},
      month = {March},
      abstract = {OBJECTIVE: Three-dimensional (3D) displays of anatomic structures
      have become feasible for preoperative planning in some surgical procedures.
      There have been no reports, however, on the use of 3D displays for
      surgical treatment of lung cancer. We hypothesized that 3D displays
      of the thorax are useful for preoperative planning for lung cancer.
      METHODS: Based on virtual reality technologies, we rendered 3D displays
      of the thorax from two-dimensional (2D) computed tomographic (CT)
      images of six anonymous patients, some of whom underwent surgical
      removal of lung cancer. For determining the resectability of lung
      cancer, we tested 17 participants with varying degrees of surgical
      skills to view 3D displays and read 2D CT images of these thoracic
      cavities in a randomized order. We measured their performance in
      terms of the accuracy of predicted resectability, the confidence
      of their prediction, planning time used, and workload experienced.
      RESULTS: The results demonstrated that viewing 3D displays of thoracic
      cavities has significant advantages over reading 2D CT images in
      determining the resectability of lung cancer: increasing the accuracy
      of predicted resectability by about 20\%, enhancing the confidence
      of the prediction by about 20\%, decreasing planning time by about
      30\%, and reducing workload by about 50\%. All participants preferred
      viewing 3D displays to reading 2D CT images for preoperative planning.
      Junior residents found 3D displays of thoraces more useful than senior
      residents. CONCLUSIONS: It is feasible to use 3D displays of the
      thorax for preoperative planning in treating lung cancer. Using 3D
      displays in surgical treatment of lung cancer has potential benefits,
      once the technique is perfected.},
      file = {Hu2007.pdf:Hu2007.pdf:PDF},
      issn = {1010-7940},
      keywords = {Clinical Competence,Feasibility Studies,Humans,Imaging, Three-Dimensional,Imaging,
      Three-Dimensional: methods,Lung Neoplasms,Lung Neoplasms: radiography,Lung
      Neoplasms: surgery,Medical Staff, Hospital,Medical Staff, Hospital:
      standards,Preoperative Care,Preoperative Care: methods,Tomography,
      X-Ray Computed,Tomography, X-Ray Computed: methods,User-Computer
      Interface,Workload, TEC},
      owner = {thomaskroes},
      pmid = {17223351},
      timestamp = {2010.10.22}
    }
  • Y. Huang, M. Niu, L. Wang, and S. Chang, “Computer-aided surgery planning for implantation of artificial ear,” Sheng wu yi xue gong cheng xue za zhi= Journal of biomedical engineering= Shengwu yixue gongchengxue zazhi, vol. 26, iss. 4, p. 706, 2009.
    [Bibtex]
    @ARTICLE{Huang2009,
      author = {Huang, Y. and Niu, M. and Wang, L. and Chang, S.},
      title = {Computer-aided surgery planning for implantation of artificial ear},
      journal = {Sheng wu yi xue gong cheng xue za zhi= Journal of biomedical engineering=
      Shengwu yixue gongchengxue zazhi},
      year = {2009},
      volume = {26},
      pages = {706},
      number = {4},
      keywords = {TEC}
    }
  • L. Ibanez, “Software,” in Image-Guided Interventions, T. Peters and K. Cleary, Eds., Springer US, 2008, pp. 121-157.
    [Bibtex]
    @INCOLLECTION{Ibanez2008,
      author = {Ibanez, Luis},
      title = {Software},
      booktitle = {Image-Guided Interventions},
      publisher = {Springer US},
      year = {2008},
      editor = {Peters, Terry and Cleary, Kevin},
      pages = {121 - 157},
      note = {Chapter 5},
      abstract = {Software is a core component of any system intended to support image-guided
      surgery interventions. This chapter describes some of the important
      considerations that should be kept in mind when designing, implementing,
      and using software as a component of an image-guided surgery system.
      Particular emphasis is given to quality control and the principles
      of software design for safety-critical applications.},
      affiliation = {Kitware Inc. Clifton Park NY USA},
      file = {Ibanez2008.pdf:Ibanez2008.pdf:PDF},
      isbn = {978-0-387-73858-1},
      keyword = {Engineering},
      owner = {Thomas},
      timestamp = {2011.02.24}
    }
  • H. Ibrahim, “A review on computer aided hepatocellular carcinoma treatment planning,” in Electronics and Information Engineering (ICEIE), 2010 International Conference On, 2010, p. V1-158 -V1-161.
    [Bibtex]
    @INPROCEEDINGS{Ibrahim2010,
      author = {Ibrahim, H.},
      title = {A review on computer aided hepatocellular carcinoma treatment planning},
      booktitle = {Electronics and Information Engineering (ICEIE), 2010 International
      Conference On},
      year = {2010},
      volume = {1},
      pages = {V1-158 -V1-161},
      month = {August},
      abstract = {Hepatocellular carcinoma (HCC) is currently one of life threatening
      diseases related to the liver. Current clinical practices suggest
      that with aid of computer in treatment planning, this can significantly
      improves the treatment received by the patients. Therefore, in this
      paper, a review on computer aided HCC pre-surgical planning has been
      carried out. This review covers several important aspects that need
      to be considered on building a successful pre-surgical planning system.
      These include the common imaging modalities used in liver diagnosis,
      several 3D visualization techniques, some basic ideas on surface
      rendering, and the important liver components used in pre-surgical
      planning.},
      file = {:Ibrahim2010.pdf:PDF},
      keywords = {3D visualization techniques;clinical practices;common imaging modalities;computer
      aided Hepatocellular carcinoma treatment planning;life threatening
      diseases;liver diagnosis;patient treatment;presurgical planning;surface
      rendering;biomedical imaging;diseases;liver;medical computing;rendering
      (computer graphics);surgery;},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • N. Inaoka, H. Suzuki, and M. Fukuda, “Hepatic blood vessel recognition using anatomical knowledge,” , vol. 1652, p. 509, 1992.
    [Bibtex]
    @CONFERENCE{Inaoka1992,
      author = {Inaoka, N. and Suzuki, H. and Fukuda, M.},
      title = {Hepatic blood vessel recognition using anatomical knowledge},
      booktitle = {Proceedings of SPIE},
      year = {1992},
      volume = {1652},
      pages = {509},
      file = {Inaoka1992.pdf:Inaoka1992.pdf:PDF},
      keywords = {TEC, HES},
      owner = {Thomas},
      timestamp = {2011.02.03}
    }
  • H. Isekia, Y. Muragakia, R. Nakamuraa, T. Horib, and K. Takakuraa, “Computer Assisted Neurosurgery,” International Journal of Computer Assisted Radiology and Surgery, vol. 1, pp. 293-310, 2006.
    [Bibtex]
    @ARTICLE{Isekia2006,
      author = {Isekia, H. and Muragakia, Y. and Nakamuraa, R. and Horib, T. and
      Takakuraa, K.},
      title = {Computer Assisted Neurosurgery},
      journal = {International Journal of Computer Assisted Radiology and Surgery},
      year = {2006},
      volume = {1},
      pages = {293 - 310},
      file = {Isekia2006.pdf:Isekia2006.pdf:PDF},
      issn = {1861-6410},
      owner = {thomaskroes},
      publisher = {Springer},
      timestamp = {2011.01.11}
    }
  • M. Jakopec, R. y Baena, S. J. Harris, P. Gomes, J. Cobb, and B. L. Davies, “The hands-on orthopaedic robot,” Robotics and Automation, IEEE Transactions on, vol. 19, iss. 5, pp. 902-911, 2003.
    [Bibtex]
    @ARTICLE{Jakopec2003,
      author = {Jakopec, M. and y Baena, R. and Harris, S.J. and Gomes, P. and Cobb,
      J. and Davies, B.L.},
      title = {The hands-on orthopaedic robot},
      journal = {Robotics and Automation, IEEE Transactions on},
      year = {2003},
      volume = {19},
      pages = {902 - 911},
      number = {5},
      file = {Jakopec2003.pdf:Jakopec2003.pdf:PDF},
      issn = {1042-296X},
      keywords = {APP, GUI, OTS},
      owner = {Thomas},
      publisher = {IEEE},
      timestamp = {2011.02.28}
    }
  • P. Jannin and W. Korb, “Assessment of Image-Guided Interventions,” in Image-Guided Interventions, T. Peters and K. Cleary, Eds., Springer US, 2008, pp. 531-549.
    [Bibtex]
    @INCOLLECTION{Jannin2008,
      author = {Jannin, Pierre and Korb, Werner},
      title = {Assessment of Image-Guided Interventions},
      booktitle = {Image-Guided Interventions},
      publisher = {Springer US},
      year = {2008},
      editor = {Peters, Terry and Cleary, Kevin},
      pages = {531 - 549},
      note = {Chapter 18},
      abstract = {Assessment of systems and procedures in image-guided interventions
      (IGI) is crucial but complex, and addresses diverse aspects. This
      chapter introduces a framework for dealing with this complexity and
      diversity, and is based on some of the major related concepts in
      health care. Six assessment levels are distinguished in IGI. The
      main phases and components of assessment methodology are described
      with an emphasis on the specification and the reporting phases, and
      on the clear initial formulation of the assessment objective. The
      methodology is presented in a systematic order to allow interinstitutional
      comparison. Finally, we outline the need for standardization in IGI
      assessment to improve the quality of systems, their acceptance by
      surgeons, and facilitate their transfer from research to clinical
      practice.},
      affiliation = {INSERM, Faculté de Médecine CS Rennes France},
      file = {Jannin2008.pdf:Jannin2008.pdf:PDF},
      isbn = {978-0-387-73858-1},
      keyword = {Engineering},
      keywords = {REV},
      owner = {Thomas},
      timestamp = {2011.02.24}
    }
  • Y. Jianxi, Y. Bingqian, and C. Fengkui, “Design of a Computer Aided Surgical Navigation system based on C-arm,” in Automation and Logistics, 2008. ICAL 2008. IEEE International Conference on, 2008, pp. 73-76.
    [Bibtex]
    @INPROCEEDINGS{Jianxi2008,
      author = {Yang Jianxi and Yang Bingqian and Cui Fengkui},
      title = {Design of a Computer Aided Surgical Navigation system based on C-arm},
      booktitle = {Automation and Logistics, 2008. ICAL 2008. IEEE International Conference
      on},
      year = {2008},
      pages = {73 -76},
      month = {September},
      abstract = {Though the conventional C-arm fluoroscope is used in bone surgeries
      for decades, two outstanding disadvantages remain. One is that it
      is not well adapted to the image-guided orthopaedic surgery procedures.
      Another is exposure of patients and surgeons in the radiation environment
      are harmful. At present, computer aided surgical navigation system
      is more and more used in surgery, but it is only connected to MRI
      ( magnetic resonance imaging ) or CT (computerised tomography ).
      This research addresses these issues. Using the C-arm and binocular
      spatial position system to navigate, with the help of the computer,
      is studied in our laboratory. The new system is called CASNC (Computer
      Aided Surgical Navigation system based on C-arm). As a special example,
      the position of determining distal interlocking of tibia or femoral
      intramedullary nail using the CASNC system is also a main point in
      this research. The position of the C-arm and patient are detected
      at the same time when a preoperative patient X-ray fluoroscope was
      taken, and transfer them to the computer. Then the system has to
      establish a correlation between preoperative image data and intraoperative
      patientpsilas position and surgical tools. At last, the deviation
      of the spatial position of the hole on the nail and surgical tools
      are shown on the monitor, and it illustrates correct position and
      orientation. This system not only can maximise the advantages of
      the conventional C-arm and accuracy of drilling, but also reduce
      surgeon and stuff in the operating theatre, especially patients suffering
      the X-ray radiation exposure.},
      file = {:Jianxi2008.pdf:PDF},
      keywords = {binocular spatial position system;bone surgery;computer aided surgical
      navigation system;computerised tomography;conventional C-arm fluoroscope;femoral
      intramedullary nail;image-guided orthopaedic surgery;magnetic resonance
      imaging;patient X-ray fluoroscope;biomedical MRI;bone;computerised
      tomography;diagnostic radiography;medical robotics;orthopaedics;surgery;},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • C. R. Johnson and D. M. Weinstein, “Biomedical computing and visualization,” in Proceedings of the 29th Australasian Computer Science Conference – Volume 48, Darlinghurst, Australia, Australia, 2006, pp. 3-10.
    [Bibtex]
    @INPROCEEDINGS{Johnson2006,
      author = {Johnson, Chris R. and Weinstein, David M.},
      title = {Biomedical computing and visualization},
      booktitle = {Proceedings of the 29th Australasian Computer Science Conference
      - Volume 48},
      year = {2006},
      series = {ACSC '06},
      pages = {3 - 10},
      address = {Darlinghurst, Australia, Australia},
      publisher = {Australian Computer Society, Inc.},
      acmid = {1151700},
      file = {Johnson2006.pdf:Johnson2006.pdf:PDF},
      isbn = {1-920682-30-9},
      keywords = {biomedical computing, imaging, problem solving environment, visualization,
      REV, NES},
      location = {Hobart, Australia},
      numpages = {8},
      owner = {thomaskroes},
      timestamp = {2011.01.25}
    }
  • M. E. Johnston, K. B. Langton, R. B. Haynes, and A. Mathieu, “Effects of computer-based clinical decision support systems on clinician performance and patient outcome: a critical appraisal of research,” Annals of internal medicine, vol. 120, iss. 2, p. 135, 1994.
    [Bibtex]
    @ARTICLE{Johnston1994,
      author = {Johnston, M.E. and Langton, K.B. and Haynes, R.B. and Mathieu, A.},
      title = {Effects of computer-based clinical decision support systems on clinician
      performance and patient outcome: a critical appraisal of research},
      journal = {Annals of internal medicine},
      year = {1994},
      volume = {120},
      pages = {135},
      number = {2},
      issn = {0003-4819},
      owner = {Thomas},
      publisher = {Am Coll Physicians},
      timestamp = {2011.02.03}
    }
  • G. R. Joldes, A. Wittek, and K. Miller, “Suite of finite element algorithms for accurate computation of soft tissue deformation for surgical simulation,” Medical Image Analysis, 2008.
    [Bibtex]
    @ARTICLE{Joldes2008,
      author = {Joldes, G.R. and Wittek, A. and Miller, K.},
      title = {Suite of finite element algorithms for accurate computation of soft
      tissue deformation for surgical simulation},
      journal = {Medical Image Analysis},
      year = {2008},
      file = {Joldes2008.pdf:Joldes2008.pdf:PDF},
      keywords = {TEC},
      owner = {Thomas},
      publisher = {Elsevier},
      timestamp = {2011.02.23}
    }
  • G. R. Joldes, A. Wittek, and K. Miller, “Real-time nonlinear finite element computations on GPU – Application to neurosurgical simulation,” Computer Methods in Applied Mechanics and Engineering, 2010.
    [Bibtex]
    @ARTICLE{Joldes2010,
      author = {Joldes, Grand Roman and Wittek, Adam and Miller, Karol},
      title = {Real-time nonlinear finite element computations on GPU - Application
      to neurosurgical simulation},
      journal = {Computer Methods in Applied Mechanics and Engineering},
      year = {2010},
      month = {July},
      abstract = {Application of biomechanical modeling techniques in the area of medical
      image analysis and surgical simulation implies two conflicting requirements:
      accurate results and high solution speeds. Accurate results can be
      obtained only by using appropriate models and solution algorithms.
      In our previous papers we have presented algorithms and solution
      methods for performing accurate nonlinear finite element analysis
      of brain shift (which includes mixed mesh, different non-linear material
      models, finite deformations and brain- skull contacts) in less than
      a minute on a personal computer for models having up to 50.000 degrees
      of freedom. In this paper we present an implementation of our algorithms
      on a Graphics Processing Unit (GPU) using the new NVIDIA Compute
      Unified Device Architecture (CUDA) which leads to more than 20 times
      increase in the computation speed. This makes possible the use of
      meshes with more elements, which better represent the geometry, are
      easier to generate, and provide more accurate results.},
      file = {Joldes2010.pdf:Joldes2010.pdf:PDF},
      issn = {00457825},
      keywords = {biomechanical models,non-rigid image registration, TEC, NES, GPU},
      owner = {thomaskroes},
      publisher = {Elsevier B.V.},
      timestamp = {2010.10.25}
    }
  • F. Jolesz, N. McDannold, G. Clement, M. Kinoshita, F. Fennessy, and C. Tempany, “MRI-Guided FUS and its Clinical Applications,” Image-guided interventions: technology and applications, p. 275, 2008.
    [Bibtex]
    @ARTICLE{Jolesz2008,
      author = {Jolesz, F. and McDannold, N. and Clement, G. and Kinoshita, M. and
      Fennessy, F. and Tempany, C.},
      title = {MRI-Guided FUS and its Clinical Applications},
      journal = {Image-guided interventions: technology and applications},
      year = {2008},
      pages = {275},
      note = {Chapter 10},
      file = {Jolesz2008.pdf:Jolesz2008.pdf:PDF},
      isbn = {0387738568},
      owner = {Thomas},
      publisher = {Springer Verlag},
      timestamp = {2011.04.12}
    }
  • F. A. Jolesz, A. Nabavi, and R. Kikinis, “Integration of interventional MRI with computer-assisted surgery,” Journal of Magnetic Resonance Imaging, vol. 13, iss. 1, pp. 69-77, 2001.
    [Bibtex]
    @ARTICLE{Jolesz2001,
      author = {Jolesz, F.A. and Nabavi, A. and Kikinis, R.},
      title = {Integration of interventional MRI with computer-assisted surgery},
      journal = {Journal of Magnetic Resonance Imaging},
      year = {2001},
      volume = {13},
      pages = {69 - 77},
      number = {1},
      file = {Jolesz2001.pdf:Jolesz2001.pdf:PDF},
      issn = {1522-2586},
      owner = {thomaskroes},
      publisher = {Wiley Online Library},
      timestamp = {2011.01.11}
    }
  • A. Joshi, D. Scheinost, K. Vives, D. Spencer, L. Staib, and X. Papademetris, “Novel interaction techniques for neurosurgical planning and stereotactic navigation,” Visualization and Computer Graphics, IEEE Transactions on, vol. 14, iss. 6, pp. 1587-1594, 2008.
    [Bibtex]
    @ARTICLE{Joshi2008,
      author = {Joshi, A. and Scheinost, D. and Vives, K. and Spencer, D. and Staib,
      L. and Papademetris, X.},
      title = {Novel interaction techniques for neurosurgical planning and stereotactic
      navigation},
      journal = {Visualization and Computer Graphics, IEEE Transactions on},
      year = {2008},
      volume = {14},
      pages = {1587 -1594},
      number = {6},
      month = {November - December},
      abstract = {Neurosurgical planning and image guided neurosurgery require the visualization
      of multimodal data obtained from various functional and structural
      image modalities, such as magnetic resonance imaging (MRI), computed
      tomography (CT), functional MRI, Single photon emission computed
      tomography (SPECT) and so on. In the case of epilepsy neurosurgery
      for example, these images are used to identify brain regions to guide
      intracranial electrode implantation and resection. Generally, such
      data is visualized using 2D slices and in some cases using a 3D volume
      rendering along with the functional imaging results. Visualizing
      the activation region effectively by still preserving sufficient
      surrounding brain regions for context is exceedingly important to
      neurologists and surgeons. We present novel interaction techniques
      for visualization of multimodal data to facilitate improved exploration
      and planning for neurosurgery. We extended the line widget from VTK
      to allow surgeons to control the shape of the region of the brain
      that they can visually crop away during exploration and surgery.
      We allow simple spherical, cubical, ellipsoidal and cylindrical (probe
      aligned cuts) for exploration purposes. In addition we integrate
      the cropping tool with the image-guided navigation system used for
      epilepsy neurosurgery. We are currently investigating the use of
      these new tools in surgical planning and based on further feedback
      from our neurosurgeons we will integrate them into the setup used
      for image-guided neurosurgery.},
      file = {:Joshi2008.pdf:PDF},
      issn = {1077-2626},
      keywords = {3D volume rendering;functional MRI;image guided neurosurgery;image-guided
      neurosurgery;magnetic resonance imaging;multimodal data visualization;neurosurgeons;neurosurgical
      planning;single photon emission computed tomography;stereotactic
      navigation;structural image modalities;data visualisation;medical
      computing;rendering (computer graphics);surgery;Computer Graphics;Computer
      Simulation;Humans;Imaging, Three-Dimensional;Models, Neurological;Stereotaxic
      Techniques;Surgery, Computer-Assisted;User-Computer Interface;},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • L. Joskowicz and R. H. Taylor, “Computers in imaging and guided surgery,” Computing in Science Engineering, vol. 3, iss. 5, pp. 65-72, 2001.
    [Bibtex]
    @ARTICLE{Joskowicz2001,
      author = {Joskowicz, L. and Taylor, R.H.},
      title = {Computers in imaging and guided surgery},
      journal = {Computing in Science Engineering},
      year = {2001},
      volume = {3},
      pages = {65 - 72},
      number = {5},
      abstract = {The authors review the main technical issues in computer-integrated
      surgery (CIS) systems. They illustrate with examples of working systems
      the state of the art in the field and provide perspectives on deployment
      and future developments. They discuss the structure of CIS systems.
      At the core is a computer (or network of computers) running various
      modeling and analysis processes, including image and sensor processing,
      creation and manipulation of patient-specific anatomical models,
      surgical planning, visualization, monitoring, and control of surgical
      processes. After receiving information about the patient from medical
      imaging devices, some CIS systems act directly on the patient using
      specialized robots or other computer controlled therapy devices},
      file = {Joskowicz2001.pdf:Joskowicz2001.pdf:PDF},
      issn = {0740-7475},
      keywords = {computer-integrated surgery systems;data visualization;future development;medical
      image processing;medical robots;patient-specific anatomical models;sensor
      processing;surgical planning;medical image processing;medical robotics;surgery;},
      owner = {thomaskroes},
      timestamp = {2011.01.25}
    }
  • L. Joskowicz, L. Tockus, Z. Yaniv, A. Simkin, and C. Milgrom, “Computer-aided image-guided bone fracture surgery: concept and implementation,” Computer Aided Surgery, vol. 3, iss. 6, pp. 271-288, 1998.
    [Bibtex]
    @ARTICLE{Joskowicz1998,
      author = {Joskowicz, L. and Tockus, L. and Yaniv, Z. and Simkin, A. and Milgrom,
      C.},
      title = {Computer-aided image-guided bone fracture surgery: concept and implementation},
      journal = {Computer Aided Surgery},
      year = {1998},
      volume = {3},
      pages = {271 - 288},
      number = {6},
      file = {Joskowicz1998.pdf:Joskowicz1998.pdf:PDF},
      keywords = {APP, PLA, OTS, SUR},
      owner = {thomaskroes},
      timestamp = {2011.01.18}
    }
  • P. Kasten, M. Maier, O. Rettig, P. Raiss, S. Wolf, and M. Loew, “Proprioception in total, hemi- and reverse shoulder arthroplasty in 3D motion analyses: a prospective study.,” International orthopaedics, vol. 33, iss. 6, pp. 1641-7, 2009.
    [Bibtex]
    @ARTICLE{Kasten2009,
      author = {Kasten, Philip and Maier, Michael and Rettig, Oliver and Raiss, Patric
      and Wolf, Sebastian and Loew, Markus},
      title = {Proprioception in total, hemi- and reverse shoulder arthroplasty
      in 3D motion analyses: a prospective study.},
      journal = {International orthopaedics},
      year = {2009},
      volume = {33},
      pages = {1641-7},
      number = {6},
      month = {December},
      abstract = {The aim of the study was to assess proprioception after shoulder arthroplasty.
      Twenty-six patients were enrolled who underwent total shoulder arthroplasty
      (TSA) (n = 13) or hemi-arthroplasty (n = 8) for shoulder osteoarthritis
      or reversed arthroplasty (n = 5) for cuff tear arthropathy. All patients
      were examined before the operation and then again six months thereafter
      in a motion analysis study with an active angle-reproduction (AAR)
      test. In all groups the AAR deteriorated at 60 degrees flexion (from
      5.5 degrees [SD 2.8] to 7.6 degrees [SD 2.7]; p = 0.007) and at 30
      degrees external rotation (ER) (from 6.5 degrees [SD 3.6] to 7.3
      degrees [SD 4.8 degrees]; p = 0.023) six months after surgery. In
      the subgroup of TSA, there was deterioration at 30 degrees ER (p
      = 0.036). Otherwise, there were no significant changes within or
      among the subgroups. Proprioception, assessed by the AAR test, remained
      unchanged or deteriorated six months after shoulder arthroplasty.
      This might be related to the reduced pain or to the relatively short
      follow-up period.},
      file = {Kasten2009.pdf:Kasten2009.pdf:PDF},
      issn = {1432-5195},
      keywords = {Aged,Arthralgia,Arthralgia: physiopathology,Arthroplasty, Replacement,Arthroplasty,
      Replacement: methods,Arthroplasty, Replacement: rehabilitation,Female,Follow-Up
      Studies,Humans,Imaging, Three-Dimensional,Imaging, Three-Dimensional:
      methods,Joint Prosthesis,Male,Middle Aged,Osteoarthritis,Osteoarthritis:
      rehabilitation,Osteoarthritis: surgery,Proprioception,Proprioception:
      physiology,Prospective Studies,Range of Motion, Articular,Range of
      Motion, Articular: physiology,Rotator Cuff,Rotator Cuff: injuries,Rotator
      Cuff: surgery,Shoulder Joint,Shoulder Joint: physiology,Shoulder
      Joint: surgery,Time Factors},
      owner = {thomaskroes},
      pmid = {18956186},
      timestamp = {2010.10.22o}
    }
  • S. Katsuragawa and K. Doi, “Computer-aided diagnosis in chest radiography,” Computerized Medical Imaging and Graphics, vol. 31, iss. 4-5, pp. 212-223, 2007.
    [Bibtex]
    @ARTICLE{Katsuragawa2007,
      author = {Shigehiko Katsuragawa and Kunio Doi},
      title = {Computer-aided diagnosis in chest radiography},
      journal = {Computerized Medical Imaging and Graphics},
      year = {2007},
      volume = {31},
      pages = {212 - 223},
      number = {4-5},
      abstract = {We have developed computer-aided diagnosis (CAD) schemes for the detection
      of lung nodules, interstitial lung diseases, interval changes, and
      asymmetric opacities, and also for the differential diagnosis of
      lung nodules and interstitial lung diseases on chest radiographs.
      Observer performance studies indicate clearly that radiologists'
      diagnostic accuracy was improved significantly when radiologists
      used a computer output in their interpretations of chest radiographs.
      In addition, the automated recognition methods for the patient and
      the projection view by use of chest radiographs were useful for integrating
      the chest CAD schemes into the picture-archiving and communication
      system (PACS).},
      file = {Katsuragawa2007.pdf:Katsuragawa2007.pdf:PDF},
      issn = {0895-6111},
      keywords = {Computer-aided diagnosis},
      owner = {thomaskroes},
      timestamp = {2011.01.06}
    }
  • H. Kawachi, Y. Kawachi, C. Ikeda, R. Takagi, A. Katakura, and T. Shibahara, “Oral and Maxillofacial Surgery with Computer-assisted Navigation System,” The Bulletin of Tokyo Dental College, vol. 51, iss. 1, pp. 35-39, 2010.
    [Bibtex]
    @ARTICLE{Kawachi2010,
      author = {Kawachi, H. and Kawachi, Y. and Ikeda, C. and Takagi, R. and Katakura,
      A. and Shibahara, T.},
      title = {Oral and Maxillofacial Surgery with Computer-assisted Navigation
      System},
      journal = {The Bulletin of Tokyo Dental College},
      year = {2010},
      volume = {51},
      pages = {35 - 39},
      number = {1},
      abstract = {Intraoperative computer-assisted navigation has gained acceptance
      in maxillofacial surgery with applications in an increasing number
      of indications. We adapted a commercially available wireless passive
      marker system which allows calibration and tracking of virtually
      every instrument in maxillofacial surgery. Virtual computer-generated
      anatomical structures are displayed intraoperatively in a semi-immersive
      head-up display. Continuous observation of the operating field facilitated
      by computer assistance enables surgical navigation in accordance
      with the physician's preoperative plans. This case report documents
      the potential for augmented visualization concepts in surgical resection
      of tumors in the oral and maxillofacial region. We report a case
      of T3N2bM0 carcinoma of the maxillary gingival which was surgically
      resected with the assistance of the Stryker Navigation Cart System.
      This system was found to be useful in assisting preoperative planning
      and intraoperative monitoring.},
      file = {Kawachi2010.pdf:Kawachi2010.pdf:PDF},
      issn = {0040-8891},
      keywords = {APP, CMS, GUI, PLA, SUR},
      owner = {thomaskroes},
      publisher = {J-STAGE},
      timestamp = {2010.11.08}
    }
  • E. Keeve, S. Girod, P. Pfeifle, and B. Girod, “Anatomy-Based Facial Tissue Modeling Using the Finite Element Method,” , 1996.
    [Bibtex]
    @ARTICLE{Keeve1996b,
      author = {Keeve, Erwin and Girod, Sabine and Pfeifle, Paula and Girod, Bernd},
      title = {Anatomy-Based Facial Tissue Modeling Using the Finite Element Method},
      year = {1996},
      abstract = {Anatomy-based facial tissue modeling for surgical simulation is a
      field whose time has come. Real-time facial animation has been created
      in the last few years using models based on the anatomical structure
      of the human skin. Anatomy-based models are also under development
      in the field of medical visualization, with which facial surgery
      can be realistically simulated. In this article we present an anatomy-based
      3D finite element tissue model. Integrated into a computer-aided
      surgical planning system this model allows the precise prediction
      of soft tissue changes resulting from the realignment of the underlying
      bone structure. The model has already been used in our Department
      of Oral and Maxillofacial Surgery and has improved craniofacial surgical
      planning procedures. The model is described in detail and surgical
      simulation results are shown and discussed.},
      file = {Keeve1996b.pdf:Keeve1996b.pdf:PDF},
      keywords = {computer-aided surgery,finite element method,human facial modeling,surgery
      planning and simulation, TEC},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • E. Keeve, T. Jansen, Z. Krol, L. Ritter, B. V. Rymon-lipinski, R. Sader, and H. Zeilhofer, “JULIUS – An Extendable Software Framework for Surgical Planning and Image-Guided Navigation,” Computing, pp. 1336-1337, 2001.
    [Bibtex]
    @ARTICLE{Keeve2001,
      author = {Keeve, Erwin and Jansen, Thomas and Krol, Zdzislaw and Ritter, Lutz
      and Rymon-lipinski, Bartosz Von and Sader, Robert and Zeilhofer,
      Hans-florian},
      title = {JULIUS - An Extendable Software Framework for Surgical Planning and
      Image-Guided Navigation},
      journal = {Computing},
      year = {2001},
      pages = {1336-1337},
      abstract = {In this paper we introduce the extendable and cross-platform software
      framework JULIUS, which will become public available by the end of
      this year. JULIUS consists of three conceptual layers and provides
      diverse assistance for medical visualization, surgical planning and
      image-guided navigation. The system features a modular and portable
      design and combines both pre-operative planning and intra-operative
      assistance within one single environment.},
      file = {Keeve2001.pdf:Keeve2001.pdf:PDF},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • D. Kendoff, M. Citak, T. Hüfner, S. Chaudhary, and C. Krettek, “Current concepts and applications of computer navigation in orthopedic trauma surgery,” Central European Journal of Medicine, vol. 2, pp. 392-403, 2007.
    [Bibtex]
    @ARTICLE{Kendoff2007,
      author = {Kendoff, D. and Citak, M. and Hüfner, T. and Chaudhary, S. and Krettek,
      C.},
      title = {Current concepts and applications of computer navigation in orthopedic
      trauma surgery},
      journal = {Central European Journal of Medicine},
      year = {2007},
      volume = {2},
      pages = {392 - 403},
      abstract = {Navigation has become widely integrated into regular endoprosthetic
      procedures, but clinical use of navigation systems in orthopaedic
      trauma has only been implemented in a few indications. Navigation
      systems enable an accuracy of 1 mm or 1 degree. Navigation can achieve
      higher precision when it is combined with different imaging modalities,
      including preoperative computer tomography (CT), intraoperative CT,
      two-dimensional fluoroscopy, and, recently, intraoperative three-dimensional
      fluoroscopy. The precision of the navigation system can be influenced
      by the surgeon as well as by the camera system, type of reference
      marker, and the registration process. Recent developments in orthopedic
      trauma navigation allow for bilateral femoral anteversion measurements,
      noninvasive registration of an uninjured thigh, and intraoperative
      three-dimensional fluoroscopy-based pedicle screw placement. Although
      the use of navigation has provided initial positive results in trauma
      care, prospective clinical studies remain to be performed.},
      affiliation = {Hannover Medical School Trauma Department 30625 Germany},
      file = {Kendoff2007.pdf:Kendoff2007.pdf:PDF},
      issn = {1895-1058},
      issue = {4},
      keyword = {Medicine},
      owner = {Thomas},
      publisher = {Versita, co-published with Springer-Verlag GmbH},
      timestamp = {2011.02.03}
    }
  • a Kerdok, “Truth cube: Establishing physical standards for soft tissue simulation,” Medical Image Analysis, vol. 7, iss. 3, pp. 283-291, 2003.
    [Bibtex]
    @ARTICLE{Kerdok2003,
      author = {Kerdok, a},
      title = {Truth cube: Establishing physical standards for soft tissue simulation},
      journal = {Medical Image Analysis},
      year = {2003},
      volume = {7},
      pages = {283-291},
      number = {3},
      month = {September},
      abstract = {Accurate real-time models of soft tissue behavior are key elements
      in medical simulation systems. The need for fast computation in these
      simulations, however, often requires simplifications that limit deformation
      accuracy.Validation of these simplified models remains a challenge.
      Currently, real-time modeling is at best validated against finite
      element models that have their own intrinsic limitations. This study
      develops a physical standard to validate real-time soft tissue deformation
      models.We took CT images of a cube of silicone rubber with a pattern
      of embedded Teflon spheres that underwent uniaxial compression and
      spherical indentation tests. The known material properties, geometry
      and controlled boundary conditions resulted in a complete set of
      volumetric displacement data. The results were compared to a finite
      element model analysis of identical situations. This work has served
      as a proof of concept for a robust physical standard for use in validating
      soft tissue models. A},
      file = {Kerdok2003.pdf:Kerdok2003.pdf:PDF},
      issn = {13618415},
      keywords = {model validation,physical standard,real-time models,soft tissue mechanics,surgical
      simulation, TEC},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • C. Kermer, A. Lindner, I. Friede, A. Wagner, and W. Millesi, “Preoperative stereolithographic model planning for primary reconstruction in craniomaxillofacial trauma surgery,” Journal of Cranio-maxillofacial Surgery, vol. 26, iss. 3, pp. 136-139, 1998.
    [Bibtex]
    @ARTICLE{Kermer1998a,
      author = {Kermer, C. and Lindner, A. and Friede, I. and Wagner, A. and Millesi,
      W.},
      title = {Preoperative stereolithographic model planning for primary reconstruction
      in craniomaxillofacial trauma surgery},
      journal = {Journal of Cranio-maxillofacial Surgery},
      year = {1998},
      volume = {26},
      pages = {136 - 139},
      number = {3},
      file = {Kermer1998a.pdf:Kermer1998a.pdf:PDF},
      issn = {1010-5182},
      keywords = {APP, RPP, CMS, PLA},
      owner = {thomaskroes},
      publisher = {Elsevier},
      timestamp = {2011.01.10}
    }
  • C. Kermer, M. Rasse, G. Lagogiannis, G. Undt, A. Wagner, and W. Millesi, “Colour stereolithography for planning complex maxillofacial tumour surgery,” Journal of Cranio-maxillofacial Surgery, vol. 26, iss. 6, pp. 360-362, 1998.
    [Bibtex]
    @ARTICLE{Kermer1998b,
      author = {Kermer, C. and Rasse, M. and Lagogiannis, G. and Undt, G. and Wagner,
      A. and Millesi, W.},
      title = {Colour stereolithography for planning complex maxillofacial tumour
      surgery},
      journal = {Journal of Cranio-maxillofacial Surgery},
      year = {1998},
      volume = {26},
      pages = {360 - 362},
      number = {6},
      file = {Kermer1998b.pdf:Kermer1998b.pdf:PDF},
      issn = {1010-5182},
      keywords = {APP, CMS, PLA, CMS},
      owner = {thomaskroes},
      publisher = {Elsevier},
      timestamp = {2011.01.10}
    }
  • [DOI] M. Kersten-Oertel, P. Jannin, and L. D. Collins, “DVV: A Taxonomy for Mixed Reality Visualization in Image Guided Surgery,” IEEE Transactions on Visualization and Computer Graphics, vol. 99, iss. PrePrints, 2011.
    [Bibtex]
    @ARTICLE{Oertel2011,
      author = {Marta Kersten-Oertel and Pierre Jannin and D. Louis Collins},
      title = {DVV: A Taxonomy for Mixed Reality Visualization in Image Guided Surgery},
      journal = {IEEE Transactions on Visualization and Computer Graphics},
      year = {2011},
      volume = {99},
      number = {PrePrints},
      address = {Los Alamitos, CA, USA},
      doi = {http://doi.ieeecomputersociety.org/10.1109/TVCG.2011.50},
      issn = {1077-2626},
      keywords = {REV},
      publisher = {IEEE Computer Society}
    }
  • R. Khadem, C. C. Yeh, M. Sadeghi-Tehrani, M. R. Bax, J. A. Johnson, J. N. Welch, E. P. Wilkinson, and R. Shahidi, “Comparative tracking error analysis of five different optical tracking systems,” Computer Aided Surgery, vol. 5, iss. 2, pp. 98-107, 2000.
    [Bibtex]
    @ARTICLE{Khadem2000,
      author = {Khadem, Rasool and Yeh, Clement C. and Sadeghi-Tehrani, Mohammad
      and Bax, Michael R. and Johnson, Jeremy A. and Welch, Jacqueline
      Nerney and Wilkinson, Eric P. and Shahidi, Ramin},
      title = {Comparative tracking error analysis of five different optical tracking
      systems},
      journal = {Computer Aided Surgery},
      year = {2000},
      volume = {5},
      pages = {98 - 107},
      number = {2},
      abstract = {Abstract Objective: Effective utilization of an optical tracking system
      for image-based surgical guidance requires optimal placement of the
      dynamic reference frame (DRF) with respect to the tracking camera.
      Unlike other studies that measure the overall accuracy of a particular
      navigation system, this study investigates the precision of one component
      of the navigation system: the optical tracking system (OTS). The
      precision of OTS measurements is quantified as jitter. By measuring
      jitter, one can better understand how system inaccuracies depend
      on the position of the DRF with respect to the camera.Materials and
      Methods: Both FlashPointâ„¢ (Image Guided Technologies, Inc., Boulder,
      Colorado) and Polarisâ„¢ (Northern Digital Inc., Ontario, Canada)
      optical tracking systems were tested in five different camera and
      DRF configurations. A linear testing apparatus with a software interface
      was designed to facilitate data collection. Jitter measurements were
      collected over a single quadrant within the camera viewing volume,
      as symmetry was assumed about the horizontal and vertical axes.Results:
      Excluding the highest 5% of jitter, the FlashPoint cameras had an
      RMS jitter range of 0.028 ± 0.012 mm for the 300 mm model, 0.051
      ± 0.038 mm for the 580 mm model, and 0.059 ± 0.047 mm for the 1
      m model. The Polaris camera had an RMS jitter range of 0.058 ± 0.037
      mm with an active DRF and 0.115 ± 0.075 mm with a passive DRF.Conclusion:
      Both FlashPoint and Polaris have jitter less than 0.11 mm, although
      the error distributions differ significantly. Total jitter for all
      systems is dominated by the component measured in the axis directed
      away from the camera. Comp Aid Surg 5:98–107 (2000). © 2000 Wiley-Liss,
      Inc.},
      file = {Khadem2000.pdf:Khadem2000.pdf:PDF},
      issn = {1097-0150},
      keywords = {optical tracking system, tracking accuracy, image-guided surgery,
      stereotactic surgery},
      owner = {Thomas},
      publisher = {John Wiley \& Sons, Inc.},
      timestamp = {2011.02.17}
    }
  • R. Kikinis, P. L. Gleason, T. M. Moriarty, M. R. Moore, E. Alexander III, P. E. Stieg, M. Matsumae, W. E. Lorensen, H. E. Cline, P. M. L. Black, and others, “Computer-assisted interactive three-dimensional planning for neurosurgical procedures,” Neurosurgery, vol. 38, iss. 4, p. 640, 1996.
    [Bibtex]
    @ARTICLE{Kikinis1996,
      author = {Kikinis, R. and Gleason, P.L. and Moriarty, T.M. and Moore, M.R.
      and Alexander III, E. and Stieg, P.E. and Matsumae, M. and Lorensen,
      W.E. and Cline, H.E. and Black, P.M.L. and others},
      title = {Computer-assisted interactive three-dimensional planning for neurosurgical
      procedures},
      journal = {Neurosurgery},
      year = {1996},
      volume = {38},
      pages = {640},
      number = {4},
      issn = {0148-396X},
      keywords = {APP, PLA, NES},
      owner = {Thomas},
      timestamp = {2011.02.03}
    }
  • H. Kim, P. Jürgens, L. Nolte, and M. Reyes, “Anatomically-Driven Soft-Tissue Simulation Strategy for Cranio-Maxillofacial Surgery Using Facial Muscle Template Model,” in Medical Image Computing and Computer-Assisted Intervention – MICCAI 2010, T. Jiang, N. Navab, J. Pluim, and M. Viergever, Eds., Springer Berlin / Heidelberg, 2010, vol. 6361, pp. 61-68.
    [Bibtex]
    @INCOLLECTION{Kim2010a,
      author = {Kim, Hyungmin and Jürgens, Philipp and Nolte, Lutz-Peter and Reyes,
      Mauricio},
      title = {Anatomically-Driven Soft-Tissue Simulation Strategy for Cranio-Maxillofacial
      Surgery Using Facial Muscle Template Model},
      booktitle = {Medical Image Computing and Computer-Assisted Intervention – MICCAI
      2010},
      publisher = {Springer Berlin / Heidelberg},
      year = {2010},
      editor = {Jiang, Tianzi and Navab, Nassir and Pluim, Josien and Viergever,
      Max},
      volume = {6361},
      series = {Lecture Notes in Computer Science},
      pages = {61 - 68},
      abstract = {We propose a computationally efficient and bio-mechanically relevant
      soft-tissue simulation method for cranio-maxillofacial (CMF) surgery.
      A template-based facial muscle reconstruction was introduced to minimize
      the efforts on preparing a patient-specific model. A transversely
      isotropic mass-tensor model (MTM) was adopted to realize the effect
      of directional property of facial muscles in reasonable computation
      time. Additionally, sliding contact around teeth and mucosa was considered
      for more realistic simulation. Retrospective validation study with
      post-operative scan of a real patient showed that there were considerable
      improvements in simulation accuracy by incorporating template-based
      facial muscle anatomy and sliding contact.},
      affiliation = {Institute for Surgical Technology and Biomechanics, University of
      Bern, Stauffacherstrasse 78, 3014 Bern, Switzerland},
      file = {Kim2010a.pdf:Kim2010a.pdf:PDF},
      keywords = {TEC, CMS, OCS},
      owner = {Thomas},
      timestamp = {2011.02.14}
    }
  • H. Kim, P. Jürgens, S. Weber, Lutz-Peter Nolte, and M. Reyes, “A new soft-tissue simulation strategy for cranio-maxillofacial surgery using facial muscle template model,” Progress in Biophysics and Molecular Biology, vol. 103, iss. 2-3, pp. 284-291, 2010.
    [Bibtex]
    @ARTICLE{Kim2010b,
      author = {Hyungmin Kim and Philipp Jürgens and Stefan Weber and Lutz-Peter
      Nolte and Mauricio Reyes},
      title = {A new soft-tissue simulation strategy for cranio-maxillofacial surgery
      using facial muscle template model},
      journal = {Progress in Biophysics and Molecular Biology},
      year = {2010},
      volume = {103},
      pages = {284 - 291},
      number = {2-3},
      note = {Special Issue on Biomechanical Modelling of Soft Tissue Motion},
      abstract = {We propose a computationally efficient, bio-mechanically relevant
      soft-tissue simulation method for cranio-maxillofacial (CMF) surgery.
      Special emphasis is given to comply with the current clinical workflow.
      A template-based facial muscle prediction was introduced to avoid
      laborious segmentation from medical images. In addition, transversely
      isotropic mass-tensor model (MTM) was applied to realize the directional
      behavior of facial muscles in short computation time. Finally, sliding
      contact was incorporated to mimic realistic boundary condition in
      error-sensitive regions. Mechanical simulation result was compared
      with commercial finite element software. And retrospective validation
      study with post-operative scan of four CMF cases was performed.},
      file = {Kim2010b.pdf:Kim2010b.pdf:PDF},
      issn = {0079-6107},
      keywords = {Soft-tissue simulation, TEC, OCS, CMS},
      owner = {Thomas},
      timestamp = {2011.02.14}
    }
  • C. Kirbas and F. Quek, “A review of vessel extraction techniques and algorithms,” ACM Computing Surveys, vol. 36, iss. 2, pp. 81-121, 2004.
    [Bibtex]
    @ARTICLE{Kirbas2004,
      author = {Kirbas, C. and Quek, F.},
      title = {A review of vessel extraction techniques and algorithms},
      journal = {ACM Computing Surveys},
      year = {2004},
      volume = {36},
      pages = {81--121},
      number = {2},
      file = {Kirbas2004.pdf:Kirbas2004.pdf:PDF},
      issn = {0360-0300},
      keywords = {REV},
      owner = {thomaskroes},
      publisher = {Citeseer},
      timestamp = {2011.01.04}
    }
  • J. Kircher, M. Wiedemann, P. Magosch, S. Lichtenberg, and P. Habermeyer, “Improved accuracy of glenoid positioning in total shoulder arthroplasty with intraoperative navigation: a prospective-randomized clinical study.,” Journal of shoulder and elbow surgery / American Shoulder and Elbow Surgeons … [et al.], vol. 18, iss. 4, pp. 515-20, 2009.
    [Bibtex]
    @ARTICLE{Kircher2009,
      author = {Kircher, Jorn and Wiedemann, Markus and Magosch, Petra and Lichtenberg,
      Sven and Habermeyer, Peter},
      title = {Improved accuracy of glenoid positioning in total shoulder arthroplasty
      with intraoperative navigation: a prospective-randomized clinical
      study.},
      journal = {Journal of shoulder and elbow surgery / American Shoulder and Elbow
      Surgeons ... [et al.]},
      year = {2009},
      volume = {18},
      pages = {515-20},
      number = {4},
      abstract = {HYPOTHESIS: The correct implantation of the glenoid component is of
      paramount importance in total shoulder arthroplasty (TSA). We hypothesized
      that the accuracy of the glenoid positioning in the transverse plane
      can be improved using intraoperative navigation. MATERIALS AND METHODS:
      This prospective, randomized clinical study comprised 2 groups of
      10 patients each with osteoarthritis of the shoulder TSA, with or
      without intraoperative navigation. Glenoid version was measured on
      axial computed tomography scans preoperatively and 6 weeks postoperatively.
      RESULTS: The operating time was significantly longer in the navigation
      group (169.5 +/- 15.2 vs 138 +/- 18.4 min). We found an average change
      of retroversion from 15.4 degrees +/- 5.8 degrees (range, 3.0 degrees
      -24.0 degrees) preoperatively to 3.7 degrees +/- 6.3 degrees (range,
      -8.0 degrees to 15.0 degrees) postoperatively in the navigation group
      compared with 14.4 degrees +/- 6.1 degrees (range, 2.0 degrees -24.0
      degrees) preoperatively to 10.9 degrees +/- 6.8 degrees (range, 0.0
      degrees -19.0 degrees) postoperatively in the group without navigation
      (P = .021). CONCLUSION: We found an improved accuracy in glenoid
      positioning in the transverse plane using intraoperative navigation.
      The validity of the study is limited by the small number, which advocates
      continuation with more patients and longer follow-up. LEVEL OF EVIDENCE:
      Level 2; Therapeutic study.},
      file = {Kircher2009.pdf:Kircher2009.pdf:PDF},
      issn = {1532-6500},
      keywords = {Aged,Arthroplasty, Replacement,Arthroplasty, Replacement: methods,Female,Follow-Up
      Studies,Humans,Joint Prosthesis,Male,Middle Aged,Monitoring, Intraoperative,Monitoring,
      Intraoperative: methods,Osteoarthritis,Osteoarthritis: diagnosis,Osteoarthritis:
      surgery,Probability,Range of Motion, Articular,Range of Motion, Articular:
      physiology,Recovery of Function,Risk Assessment,Shoulder Joint,Shoulder
      Joint: physiopathology,Shoulder Joint: surgery,Surgery, Computer-Assisted,Surgery,
      Computer-Assisted: methods,Treatment Outcome},
      owner = {thomaskroes},
      pmid = {19559369},
      timestamp = {2010.10.22}
    }
  • M. Klein and M. Abrams, “Computer-guided surgery utilizing a computer-milled surgical template,” PRACTICAL PROCEDURES AND AESTHETIC DENTISTRY, vol. 13, iss. 2, pp. 165-169, 2001.
    [Bibtex]
    @ARTICLE{Klein2001,
      author = {Klein, M. and Abrams, M.},
      title = {Computer-guided surgery utilizing a computer-milled surgical template},
      journal = {PRACTICAL PROCEDURES AND AESTHETIC DENTISTRY},
      year = {2001},
      volume = {13},
      pages = {165 - 169},
      number = {2},
      file = {Klein2001.pdf:Thomas\\Visualisation for surgical planning and guidance\\Klein2001.pdf:PDF},
      keywords = {TRM},
      owner = {Thomas},
      publisher = {MONTAGE MEDIA PUBLICATION},
      timestamp = {2011.03.09}
    }
  • W. H. Kluge, “Computer assisted hip resurfacing,” Orthopaedics and Trauma, vol. 23, iss. 3, pp. 210-215, 2009.
    [Bibtex]
    @ARTICLE{Kluge2009,
      author = {Wolfram H. Kluge},
      title = {Computer assisted hip resurfacing},
      journal = {Orthopaedics and Trauma},
      year = {2009},
      volume = {23},
      pages = {210 - 215},
      number = {3},
      abstract = {Hip resurfacing has generally favourable results. Complications such
      as femoral neck fracture and implant loosening are often related
      to surgical technique. Recent published results suggest that the
      revision rate for hip resurfacing could be reduced by accurate implant
      positioning. This article is a review of the characteristics of computer
      assisted navigation systems which aim to achieve best possible alignment
      of the femoral head surface replacement in relation to the individual
      head-neck axis.},
      file = {Kluge2009.pdf:Kluge2009.pdf:PDF},
      issn = {1877-1327},
      keywords = {computer assistance, APP, GUI, SUR, OTS},
      owner = {Thomas},
      timestamp = {2011.02.07}
    }
  • E. Kobayashi, K. Masamune, I. Sakuma, T. Dohi, and D. Hashimoto, “Quantitative evaluation of the man-machine interface for a laparoscopic manipulator system,” Computer Assisted Radiology and Surgery, USA, pp. 111-115, 2000.
    [Bibtex]
    @ARTICLE{Kobayashi2000,
      author = {Kobayashi, E. and Masamune, K. and Sakuma, I. and Dohi, T. and Hashimoto,
      D.},
      title = {Quantitative evaluation of the man-machine interface for a laparoscopic
      manipulator system},
      journal = {Computer Assisted Radiology and Surgery, USA},
      year = {2000},
      pages = {111 - 115},
      file = {Kobayashi2000.pdf:Kobayashi2000.pdf:PDF},
      owner = {thomaskroes},
      timestamp = {2011.01.04}
    }
  • R. M. Koch, Methods for physics based facial surgery prediction, Hartung-Gorre, 2001.
    [Bibtex]
    @BOOK{Koch2001,
      title = {Methods for physics based facial surgery prediction},
      publisher = {Hartung-Gorre},
      year = {2001},
      author = {Koch, R.M.},
      file = {Koch2001.pdf:Koch2001.pdf:PDF},
      isbn = {389649712X},
      keywords = {APP, CMS, OCS, PLA},
      owner = {Thomas},
      timestamp = {2011.02.08}
    }
  • R. M. Koch, M. H. Gross, F. R. Carls, D. F. von Büren, G. Fankhauser, and Y. I. H. Parish, “Simulating facial surgery using finite element models,” , pp. 421-428, 1996.
    [Bibtex]
    @CONFERENCE{Koch1996,
      author = {Koch, R.M. and Gross, M.H. and Carls, F.R. and von B{\\"u}ren, D.F.
      and Fankhauser, G. and Parish, Y.I.H.},
      title = {Simulating facial surgery using finite element models},
      booktitle = {Proceedings of the 23rd annual conference on Computer graphics and
      interactive techniques},
      year = {1996},
      pages = {421 - 428},
      organization = {ACM},
      file = {Koch1996.pdf:Koch1996.pdf:PDF},
      isbn = {0897917464},
      keywords = {APP, PLA, OCS, PRS, SLR, SUR, VOR},
      owner = {Thomas},
      timestamp = {2011.02.08}
    }
  • R. Koch, S. Roth, M. Gross, A. Zimmermann, and H. Sailer, “A framework for facial surgery simulation,” , pp. 33-42, 2002.
    [Bibtex]
    @CONFERENCE{Koch2002,
      author = {Koch, RM and Roth, SHM and Gross, MH and Zimmermann, AP and Sailer,
      HF},
      title = {A framework for facial surgery simulation},
      booktitle = {Proceedings of the 18th spring conference on Computer graphics},
      year = {2002},
      pages = {33 - 42},
      organization = {ACM},
      file = {Koch2002.pdf:Koch2002.pdf:PDF},
      isbn = {1581136080},
      owner = {thomaskroes},
      timestamp = {2011.01.10}
    }
  • R. A. Kockro, L. Serra, Y. Tseng-Tsai, C. Chan, S. Yih-Yian, C. Gim-Guan, E. Lee, L. Y. Hoe, N. Hern, and W. L. Nowinski, “Planning and simulation of neurosurgery in a virtual reality environment,” Neurosurgery, vol. 46, iss. 1, p. 118, 2000.
    [Bibtex]
    @ARTICLE{Kockro2000,
      author = {Kockro, R.A. and Serra, L. and Tseng-Tsai, Y. and Chan, C. and Yih-Yian,
      S. and Gim-Guan, C. and Lee, E. and Hoe, L.Y. and Hern, N. and Nowinski,
      W.L.},
      title = {Planning and simulation of neurosurgery in a virtual reality environment},
      journal = {Neurosurgery},
      year = {2000},
      volume = {46},
      pages = {118},
      number = {1},
      issn = {0148-396X},
      owner = {Th},
      timestamp = {2011.03.04}
    }
  • O. Konrad-Verse, B. Preim, and A. Littmann, “Virtual resection with a deformable cutting plane,” , vol. 2004, pp. 203-214, 2004.
    [Bibtex]
    @CONFERENCE{Konrad2004,
      author = {Konrad-Verse, O. and Preim, B. and Littmann, A.},
      title = {Virtual resection with a deformable cutting plane},
      booktitle = {Proceedings of simulation und visualisierung},
      year = {2004},
      volume = {2004},
      pages = {203 - 214},
      organization = {Citeseer},
      file = {Konrad2004.pdf:Konrad2004.pdf:PDF},
      keywords = {TEC},
      owner = {thomaskroes},
      timestamp = {2010.11.18}
    }
  • W. Korb, R. Marmulla, J. Raczkowsky, J. Mühling, and S. Hassfeld, “Robots in the operating theatre–chances and challenges.,” International journal of oral and maxillofacial surgery, vol. 33, iss. 8, pp. 721-32, 2004.
    [Bibtex]
    @ARTICLE{Korb2004,
      author = {Korb, W and Marmulla, R and Raczkowsky, J and M\"{u}hling, J and
      Hassfeld, S},
      title = {Robots in the operating theatre--chances and challenges.},
      journal = {International journal of oral and maxillofacial surgery},
      year = {2004},
      volume = {33},
      pages = {721-32},
      number = {8},
      month = {December},
      abstract = {The use of surgical robots and manipulators is still being frequently
      discussed in the mass media as well as in the scientific community.
      Although it was already noted in 1985 that the first patient was
      treated by a joint team of robot and surgeon, today such systems
      are not routinely used. This can be explained by the high complexity
      of such systems and the often limited usability, but also, that it
      is difficult for surgeons to accept "automatic" machines. In this
      paper the possibilities and chances of robots and manipulators will
      be explained and it will be shown that robots will never work alone
      in the operating theatre as it is common in industry today. On the
      other hand, also limitations and challenges will be outlined. Therefore
      first a review on today's systems is given in different disciplines
      including oral- and cranio-maxillofacial surgery, then advantages
      and disadvantages are shown.},
      file = {Korb2004.pdf:Korb2004.pdf:PDF},
      issn = {0901-5027},
      keywords = {Equipment Design,Humans,Robotics,Robotics: classification,Robotics:
      instrumentation,Robotics: trends,Surgery, Computer-Assisted,Surgical
      Procedures, Operative,Surgical Procedures, Operative: classification,Surgical
      Procedures, Operative: trends},
      owner = {thomaskroes},
      pmid = {15556318},
      timestamp = {2010.10.22}
    }
  • Y. Kosugi, E. Watanabe, J. Goto, T. Watanabe, S. Yoshimoto, K. Takakura, and J. Ikebe, “An articulated neurosurgical navigation system using MRI and CT images,” IEEE Transactions on Biomedical Engineering, vol. 35, iss. 2, pp. 147-152, 1988.
    [Bibtex]
    @ARTICLE{Kosugi1988,
      author = {Kosugi, Y. and Watanabe, E. and Goto, J. and Watanabe, T. and Yoshimoto,
      S. and Takakura, K. and Ikebe, J.},
      title = {An articulated neurosurgical navigation system using MRI and CT images},
      journal = {IEEE Transactions on Biomedical Engineering},
      year = {1988},
      volume = {35},
      pages = {147 - 152},
      number = {2},
      file = {Kosugi1988.pdf:Kosugi1988.pdf:PDF},
      issn = {0018-9294},
      owner = {thomaskroes},
      timestamp = {2011.01.06}
    }
  • J. Kowal, F. Langlotz, and L. Nolte, “Basics of Computer-Assisted Orthopaedic Surgery,” , 2007.
    [Bibtex]
    @ARTICLE{Kowal2007,
      author = {Kowal, J and Langlotz, F and Nolte, L},
      title = {Basics of Computer-Assisted Orthopaedic Surgery},
      year = {2007},
      file = {Kowal2007.pdf:Kowal2007.pdf:PDF},
      keywords = {REV, OTS},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • C. Krapichler, M. Haubner, A. Losch, and K. -H. Englmeier, “A human-machine interface for medical image analysis and visualization in virtual environments,” in Acoustics, Speech, and Signal Processing, 1997. ICASSP-97., 1997 IEEE International Conference on, 1997, p. 2613 -2616 vol.4.
    [Bibtex]
    @INPROCEEDINGS{Krapichler1997,
      author = {Krapichler, C. and Haubner, M. and Losch, A. and Englmeier, K.-H.},
      title = {A human-machine interface for medical image analysis and visualization
      in virtual environments},
      booktitle = {Acoustics, Speech, and Signal Processing, 1997. ICASSP-97., 1997
      IEEE International Conference on},
      year = {1997},
      volume = {4},
      pages = {2613 -2616 vol.4},
      month = apr,
      abstract = {Virtual worlds open new dimensions in human-machine and even human-human
      communication. Medicine is predestined to benefit from this new technology
      in many ways. For the field of visualization and analysis of tomography
      data, an application is introduced which expedites identification
      of spatial coherencies and exploration of pathological regions. To
      facilitate work in such an environment and to avoid long periods
      of accustoming, a human-oriented interface is required allowing physicians
      to interact as close to the real world as possible. Hand gesture
      recognition (with a data glove) and eye tracking (using biosignals)
      are essential parts to fulfil this demand. Their integration into
      the virtual environment as two components of the human-machine interface
      is presented},
      file = {Krapichler1997.pdf:Krapichler1997.pdf:PDF},
      keywords = {biosignals;data glove;eye tracking;hand gesture recognition;human-machine
      interface;human-oriented interface;medical image analysis;pathological
      regions;physicians;spatial coherencies;tomography data;virtual environment;virtual
      environments;visualization;computerised tomography;data gloves;data
      visualisation;eye;medical image processing;optical tracking;tomography;user
      interfaces;virtual reality;},
      owner = {Thomas},
      timestamp = {2011.02.03}
    }
  • P. R. Krekel, P. W. de Bruin, E. R. Valstar, F. H. Post, P. M. Rozing, and C. P. Botha, “Evaluation of bone impingement prediction in pre-operative planning for shoulder arthroplasty,” Proceedings of the Institution of Mechanical Engineers, Part H: Journal of Engineering in Medicine, vol. 223, iss. 7, pp. 813-822, 2009.
    [Bibtex]
    @ARTICLE{Krekel2009,
      author = {Krekel, P R and de Bruin, P W and Valstar, E R and Post, F H and
      Rozing, P M and Botha, C P},
      title = {Evaluation of bone impingement prediction in pre-operative planning
      for shoulder arthroplasty},
      journal = {Proceedings of the Institution of Mechanical Engineers, Part H: Journal
      of Engineering in Medicine},
      year = {2009},
      volume = {223},
      pages = {813-822},
      number = {7},
      month = {October},
      abstract = {In shoulder arthroplasty, malpositioning of pros-theses often leads
      to reduced post- operative range of motion (ROM) and complications
      such as impingement, loosening, and dislocation. Furthermore, the
      risk of impingement complications increases when reverse total prostheses
      are used. For this purpose a pre-operative planning system was developed
      that enables surgeons to perform a virtual shoulder replacement procedure.
      Our pre-operative planning system simulates patient-specific bone-determined
      ROM meant to reduce the risk of impingement complications and to
      improve the ROM of patients undergoing shoulder replacement surgery.
      This paper describes a validation experiment with the purpose of
      ratifying the clinical applicability and usefulness of the ROM simulation
      module for shoulder replacement surgery. The experiment was performed
      on cadaveric shoulders. A data connection was set up between the
      software environment and an existing intra-operative guidance system
      to track the relative positions of the bones. This allowed the patient-specific
      surface models to be visualized within the software for the position
      and alignment of the tracked bones. For both shoulders, ROM measurements
      were recorded and tagged with relevant information such as the type
      of prosthesis and the type ofmovement that was performed. The observed
      ROM and occurrences of impingement were compared with the simulated
      equivalents. The median deviation between observed impingement angles
      and simulated impingement angles was 20.30u with an interquartile
      range of 5.20u (from 23.40u to 1.80u). It was concluded that the
      ROM simulator is sufficiently accurate to fulfil its role as a supportive
      instrument for orthopaedic surgeons during shoulder replacement surgery.},
      file = {Krekel2009.pdf:Krekel2009.pdf:PDF},
      issn = {0954-4119},
      keywords = {arthroplasty,medical visualization,motion,motion tracking,pre-operative
      planning,range of,shoulder, TEC, OCS},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • [PDF] P. R. Krekel, E. R. Valstar, J. D. Groot, F. H. Post, R. G. H. H. Nelissen, and C. P. Botha, “Visual Analysis of Multi-Joint Kinematic Data,” Symposium A Quarterly Journal In Modern Foreign Literatures, vol. 29, iss. 3, 2010.
    [Bibtex]
    @ARTICLE{Krekel2010a,
      author = {Krekel, Peter R and Valstar, Edward R and Groot, Jurriaan De and
      Post, Frits H and Nelissen, Rob G H H and Botha, Charl P},
      title = {Visual Analysis of Multi-Joint Kinematic Data},
      journal = {Symposium A Quarterly Journal In Modern Foreign Literatures},
      year = {2010},
      volume = {29},
      number = {3},
      abstract = {Kinematics is the analysis ofmotionswithout regarding forces or inertial
      effects,with the purpose of understanding joint behaviour. Kinematic
      data of linked joints, for example the upper extremity, i.e. the
      shoulder and arm joints, contains many related degrees of freedom
      that complicate numerical analysis. Visualisation techniques enhance
      the analysis process, thus improving the effectiveness of kinematic
      experiments. This paper describes a new visualisation systemspecifically
      designed for the analysis of multi-joint kinematic data of the upper
      extremity. The challenge inherent in the data is that the upper extremity
      is comprised of five cooper- ating joints with a total of fifteen
      degrees of freedom. The range of motion may be affected by subtle
      deficiencies of individual joints that are difficult to pinpoint.
      To highlight these subtleties our approach combines interactive filtering
      and multiple visualisation techniques. Our system is further differentiated
      by the fact that it integrates simultaneous acquisition and visual
      analysis of biokinematic data. Also, to facilitate complex queries,
      we have designed a visual query interface with visualisation and
      interaction elements that are based on the domain-specific anatomical
      representation of the data. The combi- nation of these techniques
      form an effective approach specifically tailored for the investigation
      and comparison of large collections of kinematic data. This claim
      is supported by an evaluation experiment where the technique was
      used to inspect the kinematics of the left and right arm of a patient
      with a healed proximal humerus fracture, i.e. a healed shoulder fracture.},
      file = {Krekel2010a.pdf:Krekel2010a.pdf:PDF},
      owner = {thomaskroes},
      pdf = {http://graphics.tudelft.nl/publications/Krekel2010a.pdf},
      timestamp = {2010.10.22},
      url = {http://graphics.tudelft.nl/Publications/Krekel2010a}
    }
  • P. R. Krekel, E. R. Valstar, and F. H. Post, “Combined Surface and Volume Processing for Fused Joint Segmentation,” Computer Assisted Radiology And Surgery, 2010.
    [Bibtex]
    @ARTICLE{Krekel2010b,
      author = {Krekel, Peter R and Valstar, Edward R and Post, Frits H},
      title = {Combined Surface and Volume Processing for Fused Joint Segmentation},
      journal = {Computer Assisted Radiology And Surgery},
      year = {2010},
      abstract = {Purpose Segmentation of rheumatoid joints fromCT images is a complicated
      task. The pathological state of the joint results in a non-uniformdensity
      of the bone tis- sue, with holes and irregularities complicating
      the segmentation process. For the specific case of the shoulder joint,
      existing segmentation techniques often fail and lead to poor results.
      This paper describes a novel method for the segmentation of these
      joints. Methods Given a rough surface model of the shoulder, a loop
      that encircles the joint is extracted by calculating theminimum curvature
      of the surfacemodel. The intersection points of this loopwith the
      separate CT-slices are connected bymeans of a path search algorithm.
      Inaccurate sections are corrected by iteratively apply- ing a Hough
      transform to the segmentation result. Results As a qualitativemeasure
      we calculated the Dice coefficient and Hausdorff distances of the
      automatic segmentations and expertmanual segmentations of CT- scans
      of ten severely deteriorated shoulder joints. For the humerus and
      scapula the median Dice coefficient was 98.9\% with an interquartile
      range (IQR) of 95.8\% - 99.4\% and 98.5\% (IQR 98.3\% - 99.2\%) respectively.
      The median Hausdorff dis- tances were 3.06 mm (IQR 2.30 mm - 4.14
      mm) and 3.92 mm (IQR 1.96 mm - 5.92 mm) respectively.},
      file = {Krekel2010b.pdf:Krekel2010b.pdf:PDF},
      keywords = {IMP},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • M. Krokos, D. Podgorelec, G. J. Clapworthy, R. H. Liang, D. Testi, and M. Viceconti, “Patient-specific muscle models for surgical planning,” in Medical Information Visualisation – Biomedical Visualisation, 2005. (MediVis 2005). Proceedings. Third International COnference on, 2005, pp. 3-8.
    [Bibtex]
    @INPROCEEDINGS{Krokos2005,
      author = {Krokos, M. and Podgorelec, D. and Clapworthy, G.J. and Liang, R.H.
      and Testi, D. and Viceconti, M.},
      title = {Patient-specific muscle models for surgical planning},
      booktitle = {Medical Information Visualisation - Biomedical Visualisation, 2005.
      (MediVis 2005). Proceedings. Third International COnference on},
      year = {2005},
      pages = { 3 - 8},
      month = {July},
      abstract = {In the Multisense project, several sensorial devices (haptics, speech,
      visualisation and tracking) are integrated within a virtual reality
      environment for planning total hip replacement (THR) surgery. In
      use, a fundamental requirement is the creation of a complete subject-specific
      model of the lower limb. For THR surgery, the available data is from
      computed tomography (CT) only. Although CT scans allow accurate extraction
      of bones and the skin surface, muscles appear as poorly contrasted
      structures. The challenge is, thus, to produce an intuitive modelling
      tool that will support interactive deformation of a generic atlas
      model into a patient-specific model. For this, we first generate
      synthetic scans along pre-defined slicing axes in patient space.
      Muscle outlines obtained by an initial mapping of generic atlas models
      are then superimposed on these slices. These outlines are deformed
      by simple geometric operations to match the underlying muscle shapes
      using a novel software tool, the Muscle Modelling Laboratory (MML).
      The paper presents a shape investigation of the muscles relevant
      to THR surgery and describes the MML module in operation.},
      file = {Krokos2005.pdf:Krokos2005.pdf:PDF},
      keywords = { Multisense project; Muscle Modelling Laboratory; bones; computed
      tomography; generic atlas model; medical visualisation; patient-specific
      model; patient-specific muscle models; pre-defined slicing axes;
      skin surface; total hip replacement surgical planning; virtual reality
      environment; bone; computerised tomography; data visualisation; medical
      computing; muscle; physiological models; skin; surgery; virtual reality;},
      owner = {thomaskroes},
      timestamp = {2011.01.18}
    }
  • a Kruger, C. Tietjen, J. Hintze, B. Preim, I. Hertel, and G. Straus, “Analysis and exploration of 3d visualization for neck dissection planning,” International Congress Series, vol. 1281, pp. 497-503, 2005.
    [Bibtex]
    @ARTICLE{Kruger2005,
      author = {Kruger, a and Tietjen, C and Hintze, J and Preim, B and Hertel, I
      and Straus, G},
      title = {Analysis and exploration of 3d visualization for neck dissection
      planning},
      journal = {International Congress Series},
      year = {2005},
      volume = {1281},
      pages = {497-503},
      month = {May},
      abstract = {We present visualization techniques for neck dissection planning.
      These interventions are carried out to remove lymph node metastasis
      in the neck region. In 18 CT-datasets, the relevant anatomic and
      pathologic structures were segmented. 3d visualization is intended
      to explore and to quantify anatomic and pathologic structures and
      thus support decisions concerning the surgical strategy. For this
      purpose we developed and combined visualization and interaction techniques
      such as cutaway views, silhouettes and color-coded distances. In
      addition, a standardized procedure for processing and visualization
      of the patient data is presented. D 2005 CARS \& Elsevier B.V. All
      rights reserved.},
      file = {Kruger2005.pdf:Kruger2005.pdf:PDF},
      issn = {05315131},
      keywords = {lymph node exploration,medical visualization,neck dissection,operation
      planning},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • T. Kunert, M. Thorn, and H. Meinzer, “Visualization and attributation of vascular structures for diagnostics and therapy planning.,” Studies in health technology and informatics, vol. 85, pp. 255-7, 2002.
    [Bibtex]
    @ARTICLE{Kunert2002,
      author = {Kunert, Tobias and Thorn, Matthias and Meinzer, Hans-Peter},
      title = {Visualization and attributation of vascular structures for diagnostics
      and therapy planning.},
      journal = {Studies in health technology and informatics},
      year = {2002},
      volume = {85},
      pages = {255-7},
      month = {January},
      abstract = {In various medical fields vascular structures have to be examined
      with usually two-dimensional views which present imaging techniques
      produce. The interpretation of the data can be supported by 3-dimensional
      visualization techniques. The further analysis requires often the
      attributation of the particular functional or anatomical entities.
      To attribute these interactively we developed two different visualization
      strategies. In the first one the shape of the structures is modelled
      with OpenGL achieving very fast response times, most notably during
      the navigation. The second strategy, the direct rendering of the
      volume, benefits from the accurate reproduction of the vascular structures.
      Although the rendering needs much more time, the strategy provides
      similar response times for the attributation. Thus, the strategies
      complement one another.},
      file = {Kunert2002.pdf:Kunert2002.pdf:PDF},
      issn = {0926-9630},
      keywords = {Computer Simulation,Humans,Image Processing, Computer-Assisted,Imaging,
      Three-Dimensional,Liver,Liver: blood supply,Magnetic Resonance Imaging,Surgery,
      Computer-Assisted,Tomography, X-Ray Computed,User-Computer Interface},
      owner = {thomaskroes},
      pmid = {15458097},
      timestamp = {2010.10.22}
    }
  • K. Kunkler, “The role of medical simulation: an overview,” The International Journal of Medical Robotics and Computer Assisted Surgery, vol. 2, iss. 3, pp. 203-210, 2006.
    [Bibtex]
    @ARTICLE{Kunkler2006,
      author = {Kunkler, K.},
      title = {The role of medical simulation: an overview},
      journal = {The International Journal of Medical Robotics and Computer Assisted
      Surgery},
      year = {2006},
      volume = {2},
      pages = {203 - 210},
      number = {3},
      issn = {1478-596X},
      owner = {Thomas},
      publisher = {John Wiley \& Sons},
      timestamp = {2011.02.28}
    }
  • Y. W. Kwon, K. a Powell, J. K. Yum, J. J. Brems, and J. P. Iannotti, “Use of three-dimensional computed tomography for the analysis of the glenoid anatomy.,” Journal of shoulder and elbow surgery / American Shoulder and Elbow Surgeons … [et al.], vol. 14, iss. 1, pp. 85-90, 1990.
    [Bibtex]
    @ARTICLE{Kwon1990,
      author = {Kwon, Young W and Powell, Kimerly a and Yum, Jae Kwang and Brems,
      John J and Iannotti, Joseph P},
      title = {Use of three-dimensional computed tomography for the analysis of
      the glenoid anatomy.},
      journal = {Journal of shoulder and elbow surgery / American Shoulder and Elbow
      Surgeons ... [et al.]},
      year = {1990},
      volume = {14},
      pages = {85-90},
      number = {1},
      abstract = {Preoperative evaluation for a total shoulder arthroplasty includes
      2-dimensional analysis of the glenoid through either standard radiographs
      or computed tomography (CT) images. Recent evidence suggests that
      these 2-dimensional images may actually misrepresent the 3-dimensional
      (3D) anatomy of the glenoid. Because 3D reconstructions of CT images
      allow 3D visualization and analysis of the scapula as a free body,
      we hypothesized that they can reflect the true anatomy of the glenoid
      more accurately. To test this hypothesis, we obtained various glenoid
      morphometric measurements from excised cadaveric scapulae as well
      as their respective 3D CT images. On average, the glenoid version
      angles measured from the 3D CT images were within 1.0 degrees +/-
      0.7 degrees (mean +/- SD) of those from the actual specimen (95\%
      confidence limit, <2.2 degrees for all observers). These measurements
      from the 3D CT images showed high interobserver and intraobserver
      reliability (interobserver and intraobserver correlation coefficients,
      0.983 and 0.978, respectively). Similarly, measured glenoid surface
      width and length from the 3D CT images were within 1.8 +/- 1.2 mm
      and 1.4 +/- 1.1 mm, respectively, of those from the actual specimen.
      In addition, we were able to estimate the glenoid surface area as
      well as the glenoid vault volume from the 3D CT images. These values
      were 8.67 +/- 2.73 cm2 and 11.86 +/- 5.06 cm3, respectively. The
      mean glenoid vault volume with respect to its surface area was 1.35
      +/- 0.24 cm3/cm2 (range, 1.06-1.91 cm3/cm2). These data suggest that
      3D CT images can accurately reflect the true anatomy of the glenoid
      and that they can provide valuable information regarding the glenoid
      surface and vault. As such, 3D CT images may prove to be a useful
      tool during the preoperative evaluation for a total shoulder arthroplasty,
      particularly in patients with significant glenoid bone loss.},
      file = {Kwon1990.pdf:Kwon1990.pdf:PDF},
      issn = {1058-2746},
      keywords = {Anthropometry,Cadaver,Humans,Imaging, Three-Dimensional,Observer Variation,Shoulder
      Joint,Shoulder Joint: anatomy \& histology,Shoulder Joint: radiography,Tomography,
      X-Ray Computed},
      owner = {thomaskroes},
      pmid = {15723018},
      timestamp = {2010.10.22}
    }
  • D. Lacroix and P. J. Prendergast, “3D finite element analysis of glenoid prostheses for total shoulder arthroplasty,” Journal of Biomechanics, vol. 31, iss. Supplement 1, pp. 52-52, 1998.
    [Bibtex]
    @ARTICLE{Lacroix1998,
      author = {D. Lacroix and P. J. Prendergast},
      title = {3D finite element analysis of glenoid prostheses for total shoulder
      arthroplasty},
      journal = {Journal of Biomechanics},
      year = {1998},
      volume = {31},
      pages = {52 - 52},
      number = {Supplement 1},
      file = {:C\:\\Thomas\\PHD\\Literature\\Articles\\Lacroix1998.pdf:PDF},
      issn = {0021-9290},
      keywords = {OCS, OTS},
      owner = {thomaskroes},
      timestamp = {2010.10.26}
    }
  • J. F. LaDisa, I. Guler, L. E. Olson, D. A. Hettrick, J. R. Kersten, D. C. Warltier, and P. S. Pagel, “Three-Dimensional Computational Fluid Dynamics Modeling of Alterations in Coronary Wall Shear Stress Produced by Stent Implantation,” Annals of Biomedical Engineering, vol. 31, pp. 972-980, 2003.
    [Bibtex]
    @ARTICLE{LaDisa2003,
      author = {LaDisa, John F. and Guler, Ismail and Olson, Lars E. and Hettrick,
      Douglas A. and Kersten, Judy R. and Warltier, David C. and Pagel,
      Paul S.},
      title = {Three-Dimensional Computational Fluid Dynamics Modeling of Alterations
      in Coronary Wall Shear Stress Produced by Stent Implantation},
      journal = {Annals of Biomedical Engineering},
      year = {2003},
      volume = {31},
      pages = {972-980},
      note = {10.1114/1.1588654},
      abstract = {Rates of coronary restenosis after stent implantation vary with stent
      design. Recent evidence suggests that alterations in wall shear stress
      associated with different stent types and changes in local vessel
      geometry after implantation may account for this disparity. We tested
      the hypothesis that wall shear stress is altered in a three-dimensional
      computational fluid dynamics (CFD) model after coronary implantation
      of a 16 mm slotted-tube stent during simulations of resting blood
      flow and maximal vasodilation. Canine left anterior descending coronary
      artery blood flow velocity and interior diameter were used to construct
      CFD models and evaluate wall shear stress proximal and distal to
      and within the stented region. Channeling of adjacent blood layers
      due to stent geometry had a profound affect on wall shear stress.
      Stagnation zones were localized around stent struts. Minimum wall
      shear stress decreased by 77% in stented compared to unstented vessels.
      Regions of low wall shear stress were extended at the stent outlet
      and localized to regions where adjacent axial strut spacing was minimized
      and the circumferential distance between struts was greatest within
      the stent. The present results depict alterations in wall shear stress
      caused by a slotted-tube stent and support the hypothesis that stent
      geometry may be a risk factor for restenosis by affecting local wall
      shear stress distributions. © 2003 Biomedical Engineering Society.},
      file = {LaDisa2003.pdf:LaDisa2003.pdf:PDF},
      issn = {0090-6964},
      issue = {8},
      keyword = {Biomedical and Life Sciences},
      keywords = {TEC},
      publisher = {Springer Netherlands},
      url = {http://dx.doi.org/10.1114/1.1588654}
    }
  • D. H. Laidlaw, R. M. Kirby, C. D. Jackson, J. S. Davidson, T. S. Miller, M. Da Silva, W. H. Warren, and M. J. Tarr, “Comparing 2D vector field visualization methods: A user study,” IEEE Transactions on Visualization and Computer Graphics, pp. 59-70, 2005.
    [Bibtex]
    @ARTICLE{Laidlaw2005,
      author = {Laidlaw, D.H. and Kirby, R.M. and Jackson, C.D. and Davidson, J.S.
      and Miller, T.S. and Da Silva, M. and Warren, W.H. and Tarr, M.J.},
      title = {Comparing 2D vector field visualization methods: A user study},
      journal = {IEEE Transactions on Visualization and Computer Graphics},
      year = {2005},
      pages = {59 - 70},
      file = {Laidlaw2005.pdf:Laidlaw2005.pdf:PDF},
      issn = {1077-2626},
      owner = {thomaskroes},
      publisher = {Published by the IEEE Computer Society},
      timestamp = {2011.01.07}
    }
  • W. Lamade, G. Glombitza, L. Fischer, P. Chiu, C. E. Cárdenas, M. Thorn, H. P. Meinzer, L. Grenacher, H. Bauer, T. Lehnert, and C. Herfarth, “The impact of 3-dimensional reconstructions on operation planning in liver surgery.,” Archives of surgery (Chicago, Ill. : 1960), vol. 135, iss. 11, pp. 1256-61, 2000.
    [Bibtex]
    @ARTICLE{Lamade2000,
      author = {Lamade, W and Glombitza, G and Fischer, L and Chiu, P and C\'{a}rdenas,
      C E and Thorn, M and Meinzer, H P and Grenacher, L and Bauer, H and
      Lehnert, T and Herfarth, C},
      title = {The impact of 3-dimensional reconstructions on operation planning
      in liver surgery.},
      journal = {Archives of surgery (Chicago, Ill. : 1960)},
      year = {2000},
      volume = {135},
      pages = {1256 - 61},
      number = {11},
      month = {November},
      abstract = {BACKGROUND: Operation planning in liver surgery depends on the precise
      understanding of the 3-dimensional (D) relation of the tumor to the
      intrahepatic vascular trees. To our knowledge, the impact of anatomical
      3-D reconstructions on precision in operation planning has not yet
      been studied. HYPOTHESIS: Three-dimensional reconstruction leads
      to an improvement of the ability to localize the tumor and an increased
      precision in operation planning in liver surgery. DESIGN: We developed
      a new interactive computer-based quantitative 3-D operation planning
      system for liver surgery, which is being introduced to the clinical
      routine. To evaluate whether 3-D reconstruction leads to improved
      operation planning, we conducted a clinical trial. The data sets
      of 7 virtual patients were presented to a total of 81 surgeons in
      different levels of training. The tumors had to be assigned to a
      liver segment and subsequently drawn together with the operation
      proposal into a given liver model. The precision of the assignment
      to a liver segment according to Couinaud classification and the operation
      proposal were measured quantitatively for each surgeon and stratified
      concerning 2-D and different types of 3-D presentations. RESULTS:
      The ability of correct tumor assignment to a liver segment was significantly
      correlated to the level of training (P<.05). Compared with 2-D computed
      tomography scans, 3-D reconstruction leads to a significant increase
      of precision in tumor localization by 37\%. The target area of the
      resection proposal was improved by up to 31\%. CONCLUSION: Three-dimensional
      reconstruction leads to a significant improvement of tumor localization
      ability and to an increased precision of operation planning in liver
      surgery.},
      issn = {0004-0010},
      keywords = {Computer Simulation,General Surgery,General Surgery: education,Hepatectomy,Hepatectomy:
      methods,Humans,Image Processing, Computer-Assisted,Liver,Liver Neoplasms,Liver
      Neoplasms: surgery,Liver: blood supply,Liver: pathology,Therapy,
      Computer-Assisted,Tomography, X-Ray Computed,User-Computer Interface},
      owner = {thomaskroes},
      pmid = {11074877},
      timestamp = {2010.10.22}
    }
  • P. Lamata, A. Jalote-Parmar, F. Lamata, J. Declerck, TEC, and HES, “The Resection Map, a proposal for intraoperative hepatectomy guidance,” International Journal of Computer Assisted Radiology and Surgery, vol. 3, iss. 3, pp. 299-306, 2008.
    [Bibtex]
    @ARTICLE{Lamata2008,
      author = {Lamata, P. and Jalote-Parmar, A. and Lamata, F. and Declerck, J.
      and TEC and HES},
      title = {The Resection Map, a proposal for intraoperative hepatectomy guidance},
      journal = {International Journal of Computer Assisted Radiology and Surgery},
      year = {2008},
      volume = {3},
      pages = {299 - 306},
      number = {3},
      file = {Lamata2008.pdf:Lamata2008.pdf:PDF},
      issn = {1861-6410},
      owner = {Thomas},
      publisher = {Springer},
      timestamp = {2011.02.28}
    }
  • L. A. Landeras, R. Aslam, and J. Yee, “Virtual Colonoscopy: Technique and Accuracy,” Radiologic Clinics of North America, vol. 45, iss. 2, pp. 333-345, 2007.
    [Bibtex]
    @ARTICLE{Landeras2007,
      author = {Luis A. Landeras and Rizwan Aslam and Judy Yee},
      title = {Virtual Colonoscopy: Technique and Accuracy},
      journal = {Radiologic Clinics of North America},
      year = {2007},
      volume = {45},
      pages = {333 - 345},
      number = {2},
      abstract = {Virtual colonoscopy (VC) has acquired an important role in evaluation
      of the colon. In some situations it may be a safer method to visualize
      the colon effectively, or it may be the only available option when
      other techniques have failed. This article reviews state-of-the art
      VC technique and the results of current performance trials. It discusses
      the rationale for using various colonic cleansing regimens for VC.
      It also discusses the two distending agents for VC (room air and
      carbon dioxide) and presents practical tips for administration and
      the role of antispasmodic drugs.},
      issn = {0033-8389},
      owner = {thomaskroes},
      timestamp = {2011.01.06}
    }
  • T. Lange, S. Eulenstein, M. Hünerbein, H. Lamecker, and P. M. Schlag, “Augmenting intraoperative 3D ultrasound with preoperative models for navigation in liver surgery,” Medical Image Computing and Computer-Assisted Intervention–MICCAI 2004, pp. 534-541, 2004.
    [Bibtex]
    @ARTICLE{Lange2004,
      author = {Lange, T. and Eulenstein, S. and H{\\"u}nerbein, M. and Lamecker,
      H. and Schlag, P.M.},
      title = {Augmenting intraoperative 3D ultrasound with preoperative models
      for navigation in liver surgery},
      journal = {Medical Image Computing and Computer-Assisted Intervention--MICCAI
      2004},
      year = {2004},
      pages = {534 - 541},
      file = {Lange2004.pdf:Lange2004.pdf:PDF},
      owner = {Thomas},
      publisher = {Springer},
      timestamp = {2011.01.31}
    }
  • F. Langlotz, “State-of-the-art in orthopaedic surgical navigation with a focus on medical image modalities,” The Journal of Visualization and Computer Animation, vol. 13, iss. 1, pp. 77-83, 2002.
    [Bibtex]
    @ARTICLE{Langlotz2002,
      author = {Langlotz, Frank},
      title = {State-of-the-art in orthopaedic surgical navigation with a focus
      on medical image modalities},
      journal = {The Journal of Visualization and Computer Animation},
      year = {2002},
      volume = {13},
      pages = {77 - 83},
      number = {1},
      month = {February},
      abstract = {This paper presents a review of surgical navigation systems in orthopaedics
      and categorizes these systems according to the image modalities that
      are used for the visualization of surgical action. Medical images
      used to be an essential part of surgical education and documentation
      as well as diagnosis and operation planning over many years. With
      the recent introduction of navigation techniques in orthopaedic surgery,
      a new field of application has been opened. Today surgical navigation
      systems — also known as image-guided surgery systems — are available
      for various applications in orthopaedic surgery. They visualize the
      position and orientation of surgical instruments as graphical overlays
      onto a medical image of the operated anatomy on a computer monitor.
      Preoperative image data such as computed tomography scans or intraoperatively
      generated images (for example, ultrasonic, endoscopic or fluoroscopic
      images) are suitable for this purpose. A new category of medical
      images termed ‘surgeon-defined anatomy’ has been developed that
      exclusively relies upon the usage of navigation technology. Points
      on the anatomy are digitized interactively by the surgeon and are
      used to build up an abstract geometrical model of the bony structures
      to be operated on. This},
      file = {Langlotz2002.pdf:Langlotz2002.pdf:PDF},
      issn = {1049-8907},
      keywords = {computer-assisted surgery,image guidance,intraoperative navigation,registration,
      REV},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • F. Langlotz, R. Bächler, U. Berlemann, L. P. Nolte, and R. Ganz, “Computer assistance for pelvic osteotomies,” Clinical orthopaedics and related research, vol. 354, p. 92, 1998.
    [Bibtex]
    @ARTICLE{Langlotz1998,
      author = {Langlotz, F. and Bächler, R. and Berlemann, U. and Nolte, L.P. and
      Ganz, R.},
      title = {Computer assistance for pelvic osteotomies},
      journal = {Clinical orthopaedics and related research},
      year = {1998},
      volume = {354},
      pages = {92},
      abstract = {To assist surgeons performing pelvic osteotomies for the treatment
      of dysplastic hips, an image guided freehand navigation system has
      been developed. Preoperative computed tomographic scan images are
      presented in various ways to the surgeon together with real time
      display of the instruments and surgical action on the computer screen.
      The system supports the preoperative plan and provides optimized
      control of surgical action. The main focus of the image guidance
      has been placed on the execution of the different required cuts and
      the reorientation of the acetabular fragment. Special attention also
      has been given to the development of a sophisticated surgeon-machine
      interface. Fourteen surgeries have been performed with image guidance
      so far. The visualization aids provided by the system are able to
      help reduce potential risk and thus increase safety and accuracy
      for this difficult class of surgical interventions.},
      keywords = {APP, OTS, GUI, PLA},
      owner = {Thomas},
      timestamp = {2011.02.03}
    }
  • F. Langlotz and L. Nolte, “Technical Approaches to Computer-Assisted Orthopedic Surgery,” European Journal of Trauma, vol. 30, iss. 1, pp. 1-11, 2004.
    [Bibtex]
    @ARTICLE{Langlotz2004,
      author = {Langlotz, Frank and Nolte, Lutz-Peter},
      title = {Technical Approaches to Computer-Assisted Orthopedic Surgery},
      journal = {European Journal of Trauma},
      year = {2004},
      volume = {30},
      pages = {1-11},
      number = {1},
      month = {February},
      abstract = {Surgical navigation systems and medical robotic devices are increasingly
      being used during trauma and orthopedic surgery. This article tries
      to present the underlying technology of these devices and to describe
      different approaches to the various aspects of the methods. To structure
      the variety of available products and presented research modules,
      a new categorization for these approaches is proposed. Examples of
      pre- or intraoperative imaging modalities, of trackers for navi-
      gation systems, of different surgical robots, and of methods for
      registration as well as referencing are dis- cussed. Many applications
      that have been realized for numerous surgical procedures will be
      presented and their advantages, disadvantages, and possible implica-
      tions will be elucidated.},
      file = {Langlotz2004.pdf:Langlotz2004.pdf:PDF},
      issn = {1439-0590},
      keywords = {categories of,navigation,registration,robotics, REV, OTS},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • R. Lattanzi, M. Viceconti, M. Petrone, P. Quadrani, and C. Zannoni, “Applications of 3D medical imaging in orthopaedic surgery:introducing the hip-op system,” Proceedings. First International Symposium on 3D Data Processing Visualization and Transmission, pp. 808-811, 2002.
    [Bibtex]
    @ARTICLE{Lattanzi2002,
      author = {Lattanzi, R. and Viceconti, M. and Petrone, M. and Quadrani, P. and
      Zannoni, C.},
      title = {Applications of 3D medical imaging in orthopaedic surgery:introducing
      the hip-op system},
      journal = {Proceedings. First International Symposium on 3D Data Processing
      Visualization and Transmission},
      year = {2002},
      pages = {808-811},
      abstract = {Computer Aided Surgery (CAS) systems help surgeons in planning the
      operation accurately, resulting in costs reducing, clinical outcome
      improving and enhancing of the health care delivery efficiency. This
      paper describes the Hip-Op system, which is a novel software for
      the preoperative planning of total hip replacement. The software
      uses a fully three-dimensional (3D) internal representation of the
      patient anatomy, derived from Computed Tomography data. The user
      interface is based on an innovative visualization paradigm, called
      Multimodal Display, and represents the anatomical objects by means
      of multiple views, which are familiar to medical professionals. Besides
      the original CT slices, also two synthetically generated radiographs
      are available, as well as a surface rendering view. The surgeon can
      load prosthetic models into the planning software environment, selecting
      them from those available in the implant database. Then he can perform
      a virtual surgery procedure in order to plan the proper position
      and orientation of the prosthesis within the host femur. Analysis
      modules allow to evaluate the planning from a biomechanical point
      of view.},
      file = {Lattanzi2002.pdf:Lattanzi2002.pdf:PDF},
      isbn = {0-7695-1521-5},
      owner = {thomaskroes},
      publisher = {IEEE Comput. Soc},
      timestamp = {2010.10.22}
    }
  • S. Lavallee and P. Cinquin, “IGOR: image guided operating robot,” in Advanced Robotics, 1991. ‘Robots in Unstructured Environments’, 91 ICAR., Fifth International Conference on, 1991.
    [Bibtex]
    @INPROCEEDINGS{Lavallee1991,
      author = {Lavallee, S. and Cinquin, P.},
      title = {IGOR: image guided operating robot},
      booktitle = {Advanced Robotics, 1991. 'Robots in Unstructured Environments', 91
      ICAR., Fifth International Conference on},
      year = {1991},
      month = {June},
      abstract = {Existing imaging devices can be used to plan complex medical and surgical
      interventions. Advances in robotics provide the opportunity of assisting
      the physician or the surgeon in performing the intervention. Assisting
      both planning and performing of interventions first raises problems
      of matching of various multimodality data. Then the performance of
      an intervention with a partially autonomous system gives specific
      problems which are discussed. A general methodology for computer
      assisted medical interventions is proposed, which turns out to be
      a particular case of the classical loop of perception-decision-action.
      Clinical applications are presented},
      file = {Lavallee1991.pdf:Lavallee1991.pdf:PDF},
      keywords = {IGOR;biomedical equipment;computer assisted medical interventions;image
      guided operating robot;medical image processing;multimodality data;partially
      autonomous system;surgery;biomedical equipment;medical image processing;robots;surgery;},
      owner = {thomaskroes},
      timestamp = {2011.01.06}
    }
  • M. Lee, “Hands-on Practice and Implementations on a Sound-Guided 3D Navigation System for Orthopedic Surgical Applications,” In Vitro, pp. 641-646, 2005.
    [Bibtex]
    @ARTICLE{Lee2005,
      author = {Lee, Ming-yih},
      title = {Hands-on Practice and Implementations on a Sound-Guided 3D Navigation
      System for Orthopedic Surgical Applications},
      journal = {In Vitro},
      year = {2005},
      pages = {641-646},
      abstract = {Computer assisted surgical navigation becomes crucial as the demand
      for accuracy and minimal invasiveness increases. During the treatment
      of tibial fracture with interlocking nail, the most uncomfortable
      procedure for an orthopedic surgeon is to find the location for distal
      locking screws. In this study, hands-on practice and implementation
      of a sound-guided 3D navigation system was discussed for the fixation
      of distal locking screws in tbe tibial intramedullary nailing. This
      system consists of a 3D digitizer arm, 3D coordinate registration
      / transformation module and sound-guided navigation module. In addition,
      the proposed sound-guided navigation module was designed with an
      audio guiding mechanism through which a sound with different tones
      and intermittence frequencies will be produced for surgical manipulation.
      In vitro assessment was performed with a donor bone successfully,
      and a clinical case of a young male with tibial fracture was also
      carried out at the operation tbeater with satisfied results. No preoperative
      computed tomography or intraoperative fluoroscopy was required.},
      file = {Lee2005.pdf:Lee2005.pdf:PDF},
      keywords = {navigation systcm.,orthopedic surgeries,sound-guided},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • S. Lee, A. Chung, M. Lerotic, M. a Hawkins, D. Tait, and G. Yang, “Dynamic shape instantiation for intra-operative guidance.,” Medical image computing and computer-assisted intervention : MICCAI … International Conference on Medical Image Computing and Computer-Assisted Intervention, vol. 13, iss. Pt 1, pp. 69-76, 2010.
    [Bibtex]
    @ARTICLE{Lee2010a,
      author = {Lee, Su-Lin and Chung, Adrian and Lerotic, Mirna and Hawkins, Maria
      a and Tait, Diana and Yang, Guang-Zhong},
      title = {Dynamic shape instantiation for intra-operative guidance.},
      journal = {Medical image computing and computer-assisted intervention : MICCAI
      ... International Conference on Medical Image Computing and Computer-Assisted
      Intervention},
      year = {2010},
      volume = {13},
      pages = {69-76},
      number = {Pt 1},
      month = {January},
      abstract = {Primary liver cancer and oligometastatic liver disease are one of
      the major causes of mortality worldwide and its treatment ranges
      from surgery to more minimally invasive ablative procedures. With
      the increasing availability of minimally invasive hepatic approaches,
      a real-time method of determining the 3D structure of the liver and
      its location during the respiratory cycle is clinically important.
      However, during treatment, it is difficult to acquire images spanning
      the entire 3D volume rapidly. In this paper, a dynamic 3D shape instantiation
      scheme is developed for providing subject-specific optimal scan planning.
      Using only limited planar information, it is possible to instantiate
      the entire 3D geometry of the organ of interest. The efficacy of
      the proposed method is demonstrated with both detailed numerical
      simulation and a liver phantom with known ground-truth data. Preliminary
      clinical application of the technique is evaluated on a patient group
      with metastatic liver tumours.},
      file = {Lee2010a.pdf:Lee2010a.pdf:PDF},
      keywords = {intra-operative guidance,patient-specific deformation analysis,regression
      analysis,shape instantiation,shape modeling, TEC},
      owner = {thomaskroes},
      pmid = {20879216},
      timestamp = {2010.10.22}
    }
  • S. Lee, M. Lerotic, V. Vitiello, S. Giannarou, K. Kwok, M. Visentini-Scarzanella, and G. Yang, “From medical images to minimally invasive intervention: Computer assistance for robotic surgery.,” Computerized medical imaging and graphics : the official journal of the Computerized Medical Imaging Society, vol. 34, iss. 1, pp. 33-45, 2010.
    [Bibtex]
    @ARTICLE{Lee2010,
      author = {Lee, Su-Lin and Lerotic, Mirna and Vitiello, Valentina and Giannarou,
      Stamatia and Kwok, Ka-Wai and Visentini-Scarzanella, Marco and Yang,
      Guang-Zhong},
      title = {From medical images to minimally invasive intervention: Computer
      assistance for robotic surgery.},
      journal = {Computerized medical imaging and graphics : the official journal
      of the Computerized Medical Imaging Society},
      year = {2010},
      volume = {34},
      pages = {33-45},
      number = {1},
      month = {January},
      abstract = {Minimally invasive surgery has been established as an important way
      forward in surgery for reducing patient trauma and hospitalization
      with improved prognosis. The introduction of robotic assistance enhances
      the manual dexterity and accuracy of instrument manipulation. Further
      development of the field in using pre- and intra-operative imaging
      guidance requires the integration of the general anatomy of the patient
      with clear pathologic indications and geometrical information for
      preoperative planning and intra-operative manipulation. It also requires
      effective visualization and the recreation of haptic and tactile
      sensing with dynamic active constraints to improve consistency and
      safety of the surgical procedures. This paper describes key technical
      considerations of tissue deformation tracking, 3D reconstruction,
      subject-specific modeling, image guidance and augmented reality for
      robotic assisted minimally invasive surgery. It highlights the importance
      of adapting preoperative surgical planning according to intra-operative
      data and illustrates how dynamic information such as tissue deformation
      can be incorporated into the surgical navigation framework. Some
      of the recent trends are discussed in terms of instrument design
      and the usage of dynamic active constraints and human-robot perceptual
      docking for robotic assisted minimally invasive surgery.},
      file = {Lee2010.pdf:Lee2010.pdf:PDF},
      issn = {1879-0771},
      keywords = {Computer Simulation,Elasticity Imaging Techniques,Elasticity Imaging
      Techniques: methods,Humans,Imaging, Three-Dimensional,Imaging, Three-Dimensional:
      methods,Models, Biological,Robotics,Robotics: methods,Surgery, Computer-Assisted,Surgery,
      Computer-Assisted: methods,Surgical Procedures, Minimally Invasive,Surgical
      Procedures, Minimally Invasive: methods,User-Computer Interface},
      owner = {thomaskroes},
      pmid = {19699056},
      timestamp = {2010.10.22}
    }
  • Y. Lee, D. Terzopoulos, and K. Waters, “Realistic modeling for facial animation,” , pp. 55-62, 1995.
    [Bibtex]
    @CONFERENCE{Lee1995,
      author = {Lee, Y. and Terzopoulos, D. and Waters, K.},
      title = {Realistic modeling for facial animation},
      booktitle = {Proceedings of the 22nd annual conference on Computer graphics and
      interactive techniques},
      year = {1995},
      pages = {55 - 62},
      organization = {ACM},
      file = {Lee1995.pdf:Lee1995.pdf:PDF},
      isbn = {0897917014},
      owner = {thomaskroes},
      timestamp = {2011.01.03}
    }
  • T. Leenders, D. Vandevelde, G. Mahieu, and R. Nuyts, “Reduction in variability of acetabular cup abduction using computer assisted surgery: a prospective and randomized study,” Computer Aided Surgery, vol. 7, iss. 2, pp. 99-106, 2002.
    [Bibtex]
    @ARTICLE{Leenders2002,
      author = {Leenders, T. and Vandevelde, D. and Mahieu, G. and Nuyts, R.},
      title = {Reduction in variability of acetabular cup abduction using computer
      assisted surgery: a prospective and randomized study},
      journal = {Computer Aided Surgery},
      year = {2002},
      volume = {7},
      pages = {99 - 106},
      number = {2},
      file = {Leenders2002.pdf:Leenders2002.pdf:PDF},
      issn = {1097 - 0150},
      owner = {Thomas},
      publisher = {Wiley Online Library},
      timestamp = {2011.02.03}
    }
  • G. Lehmann, A. Chiu, D. Gobbi, Y. Starreveld, D. Boyd, M. Drangova, and T. Peters, “Towards dynamic planning and guidance of minimally invasive robotic cardiac bypass surgical procedures,” , pp. 368-375, 2010.
    [Bibtex]
    @CONFERENCE{Lehmann2010,
      author = {Lehmann, G. and Chiu, A. and Gobbi, D. and Starreveld, Y. and Boyd,
      D. and Drangova, M. and Peters, T.},
      title = {Towards dynamic planning and guidance of minimally invasive robotic
      cardiac bypass surgical procedures},
      booktitle = {Medical Image Computing and Computer-Assisted Intervention--MICCAI
      2001},
      year = {2010},
      pages = {368 - 375},
      organization = {Springer},
      file = {:C\:\\Thomas\\PHD\\Literature\\Articles\\Lehmann2010.pdf:PDF},
      owner = {thomaskroes},
      timestamp = {2010.10.26}
    }
  • K. S. Lehmann, J. Ritz, S. Valdeig, A. Schenk, C. Holmer, H. Peitgen, H. Buhr, and B. B. Frericks, “Portal vein segmentation of a 3D-planning system for liver surgery–in vivo evaluation in a porcine model.,” Annals of surgical oncology, vol. 15, iss. 7, pp. 1899-907, 2008.
    [Bibtex]
    @ARTICLE{Lehmann2008,
      author = {Lehmann, Kai S and Ritz, Joerg-P and Valdeig, Steffi and Schenk,
      Andrea and Holmer, Christoph and Peitgen, Heinz-O and Buhr, Heinz-J
      and Frericks, Bernd B},
      title = {Portal vein segmentation of a 3D-planning system for liver surgery--in
      vivo evaluation in a porcine model.},
      journal = {Annals of surgical oncology},
      year = {2008},
      volume = {15},
      pages = {1899-907},
      number = {7},
      month = {July},
      abstract = {BACKGROUND: Computer systems allow the planning of complex liver operations.
      The segmentation of intrahepatic vessels builds the basis for the
      calculation of liver segments and resection proposals. For surgical
      use, it is essential to know the capabilities and limitations of
      the segmentation. The aim of this study was to determine the sensitivity
      and precision of the portal vein segmentation of a computer planning
      system for liver surgery in vivo. METHODS: Segmentations were performed
      with the software system HepaVision on computed tomography (CT) scan
      data of domestic pigs. An in situ corrosion cast of the portal vein
      served as the gold standard. The segmentation results of the portal
      vein and the corrosion cast were compared with regard to sensitivity,
      precision, and amount of short-circuit segmentations. RESULTS: The
      methodology demonstrated high resolution ex situ. The in vivo sensitivity
      of the portal vein segmentation was 100\% for vessels of more than
      5 mm in diameter and 82\% for vessels of 3-4 mm. All segment branches
      were detected as well as 84\% of the first subsegment branches with
      a diameter of more than 3 mm. The precision of the system was 100\%
      for segment branches and 89\% for the first subsegment vessels. The
      amount of internal short-circuit segmentations was less than 3.0\%.
      No external short-circuits were found. CONCLUSION: The system has
      a high precision and sensitivity under clinical conditions. The segmentation
      is suitable for portal vein branches of the first and second order
      and for vessels of >/=3 mm in diameter.},
      file = {Lehmann2008.pdf:Lehmann2008.pdf:PDF},
      issn = {1534-4681},
      keywords = {Animals,Image Processing, Computer-Assisted,Imaging, Three-Dimensional,Liver,Liver:
      anatomy \& histology,Liver: surgery,Models, Animal,Portal Vein,Portal
      Vein: anatomy \& histology,Sensitivity and Specificity,Swine,Tomography,
      X-Ray Computed, IMP, TEC},
      owner = {thomaskroes},
      pmid = {18449610},
      timestamp = {2010.10.22}
    }
  • R. Leonardi, D. Giordano, F. Maiorana, and C. Spampinato, “Automatic cephalometric analysis,” Journal Information, vol. 78, iss. 1, 2008.
    [Bibtex]
    @ARTICLE{Leonardi2008,
      author = {Leonardi, R. and Giordano, D. and Maiorana, F. and Spampinato, C.},
      title = {Automatic cephalometric analysis},
      journal = {Journal Information},
      year = {2008},
      volume = {78},
      number = {1},
      file = {Leonardi2008.pdf:Leonardi2008.pdf:PDF},
      keywords = {TEC, CMS},
      owner = {thomaskroes},
      timestamp = {2011.01.10}
    }
  • B. N. Li, P. B. Nguyen, S. H. Ong, J. Qin, L. Yang, and C. K. Chui, “Image Processing and Modeling for Active Needle Steering in Liver Surgery,” in Informatics in Control, Automation and Robotics, 2009. CAR ’09. International Asia Conference on, 2009, pp. 306-310.
    [Bibtex]
    @INPROCEEDINGS{Li2009,
      author = {Bing Nan Li and Phu Binh Nguyen and Ong, S.H. and Jing Qin and Liang
      Jing Yang and Chui, C.K.},
      title = {Image Processing and Modeling for Active Needle Steering in Liver
      Surgery},
      booktitle = {Informatics in Control, Automation and Robotics, 2009. CAR '09. International
      Asia Conference on},
      year = {2009},
      pages = {306 -310},
      month = {February},
      abstract = {Image-guided intervention and needle steering for radiofrequency ablation
      (RFA) of the liver is reviewed in this paper. In particular, the
      concept of active needle is proposed for RFA treatment. Methods and
      techniques of image processing and modeling are presented for a stereo
      liver model. The liver model and constituent components extracted
      from computerized tomography (CT) images can be used to plan the
      navigation paths of the RFA needle. The system also provides an option
      for active needles, which are more amenable to those refractory cases
      of RFA treatment.},
      file = {:Li2009.pdf:PDF},
      keywords = {RFA needle navigation path;active needle steering;computerized tomography;image
      modeling;image processing;radiofrequency ablation treatment;stereo
      liver surgery;biomedical equipment;computerised tomography;liver;medical
      image processing;needles;radiofrequency heating;stereo image processing;surgery;,
      TEC, IMP},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • Z. Li, C. Kleinstreuer, and M. Farber, “Computational analysis of biomechanical contributors to possible endovascular graft failure,” Biomechanics and Modeling in Mechanobiology, vol. 4, pp. 221-234, 2005.
    [Bibtex]
    @ARTICLE{Li2005,
      author = {Li, Z. and Kleinstreuer, C. and Farber, M.},
      title = {Computational analysis of biomechanical contributors to possible
      endovascular graft failure},
      journal = {Biomechanics and Modeling in Mechanobiology},
      year = {2005},
      volume = {4},
      pages = {221-234},
      note = {10.1007/s10237-005-0003-0},
      affiliation = {North Carolina State University Department of Mechanical and Aerospace
      Engineering and Department of Biomedical Engineering Raleigh USA},
      file = {Li2005.pdf:Li2005.pdf:PDF},
      issn = {1617-7959},
      issue = {4},
      keyword = {Physics and Astronomy},
      owner = {Thomas},
      publisher = {Springer Berlin / Heidelberg},
      timestamp = {2011.04.28},
      url = {http://dx.doi.org/10.1007/s10237-005-0003-0}
    }
  • H. Liao, H. Ishihara, H. H. Tran, K. Masamune, I. Sakuma, and T. Dohi, “Precision-guided surgical navigation system using laser guidance and 3D autostereoscopic image overlay.,” Computerized medical imaging and graphics : the official journal of the Computerized Medical Imaging Society, vol. 34, iss. 1, pp. 46-54, 2010.
    [Bibtex]
    @ARTICLE{Liao2010a,
      author = {Liao, Hongen and Ishihara, Hirotaka and Tran, Huy Hoang and Masamune,
      Ken and Sakuma, Ichiro and Dohi, Takeyoshi},
      title = {Precision-guided surgical navigation system using laser guidance
      and 3D autostereoscopic image overlay.},
      journal = {Computerized medical imaging and graphics : the official journal
      of the Computerized Medical Imaging Society},
      year = {2010},
      volume = {34},
      pages = {46-54},
      number = {1},
      month = {January},
      abstract = {This paper describes a precision-guided surgical navigation system
      for minimally invasive surgery. The system combines a laser guidance
      technique with a three-dimensional (3D) autostereoscopic image overlay
      technique. Images of surgical anatomic structures superimposed onto
      the patient are created by employing an animated imaging method called
      integral videography (IV), which can display geometrically accurate
      3D autostereoscopic images and reproduce motion parallax without
      the need for special viewing or tracking devices. To improve the
      placement accuracy of surgical instruments, we integrated an image
      overlay system with a laser guidance system for alignment of the
      surgical instrument and better visualization of patient's internal
      structure. We fabricated a laser guidance device and mounted it on
      an IV image overlay device. Experimental evaluations showed that
      the system could guide a linear surgical instrument toward a target
      with an average error of 2.48 mm and standard deviation of 1.76 mm.
      Further improvement to the design of the laser guidance device and
      the patient-image registration procedure of the IV image overlay
      will make this system practical; its use would increase surgical
      accuracy and reduce invasiveness.},
      file = {Liao2010a.pdf:Liao2010a.pdf:PDF},
      issn = {1879-0771},
      keywords = {Equipment Design,Equipment Failure Analysis,Humans,Imaging, Three-Dimensional,Imaging,
      Three-Dimensional: instrumentation,Robotics,Robotics: instrumentation,Sensitivity
      and Specificity,Subtraction Technique,Subtraction Technique: instrumentation,Surgery,
      Computer-Assisted,Surgery, Computer-Assisted: instrumentation,Surgical
      Procedures, Minimally Invasive,Surgical Procedures, Minimally Invasive:
      instrumen,User-Computer Interface, STV, TEC},
      owner = {thomaskroes},
      pmid = {19674871},
      timestamp = {2010.10.22}
    }
  • H. Liao, S. Nakajima, M. Iwahara, E. Kobayashi, I. Sakuma, N. Yahagi, and T. Dohi, “Intra-operative real-time 3-D information display system based on integral videography,” , pp. 392-400, 2010.
    [Bibtex]
    @CONFERENCE{Liao2010,
      author = {Liao, H. and Nakajima, S. and Iwahara, M. and Kobayashi, E. and Sakuma,
      I. and Yahagi, N. and Dohi, T.},
      title = {Intra-operative real-time 3-D information display system based on
      integral videography},
      booktitle = {Medical Image Computing and Computer-Assisted Intervention - MICCAI
      2001},
      year = {2010},
      pages = {392 - 400},
      organization = {Springer},
      file = {Liao2010.pdf:Liao2010.pdf:PDF},
      owner = {thomaskroes},
      timestamp = {2010.10.26}
    }
  • H. Liao, K. K. Wong, and Z. Xue, “Introduction to the special issue of image-guided surgical planning and therapy.,” Computerized medical imaging and graphics : the official journal of the Computerized Medical Imaging Society, vol. 34, iss. 1, pp. 1-2, 2010.
    [Bibtex]
    @ARTICLE{Liao2010b,
      author = {Liao, Hongen and Wong, Kelvin K and Xue, Zhong},
      title = {Introduction to the special issue of image-guided surgical planning
      and therapy.},
      journal = {Computerized medical imaging and graphics : the official journal
      of the Computerized Medical Imaging Society},
      year = {2010},
      volume = {34},
      pages = {1-2},
      number = {1},
      month = {January},
      file = {Liao2010b.pdf:Liao2010b.pdf:PDF},
      issn = {1879-0771},
      keywords = {Diagnostic Imaging,Diagnostic Imaging: trends,Preoperative Care,Preoperative
      Care: trends,Robotics,Robotics: trends,Surgery, Computer-Assisted,Surgery,
      Computer-Assisted: trends},
      owner = {thomaskroes},
      pmid = {19775868},
      timestamp = {2010.10.22}
    }
  • U. C. Liener, C. Reinhart, L. Kinzl, and F. Gebhard, “A new approach to computer guidance in orthopedic surgery using real time volume rendering.,” Journal of medical systems, vol. 23, iss. 1, pp. 35-40, 1999.
    [Bibtex]
    @ARTICLE{Liener1999,
      author = {Liener, U C and Reinhart, C and Kinzl, L and Gebhard, F},
      title = {A new approach to computer guidance in orthopedic surgery using real
      time volume rendering.},
      journal = {Journal of medical systems},
      year = {1999},
      volume = {23},
      pages = {35-40},
      number = {1},
      month = {February},
      abstract = {The purpose of this paper was to evaluate the ability of a new real
      time volume rendering system using raw unprocessed CT data on a four
      processor Pentium PC. A CT data set of the pelvic area was used to
      demonstrate the systems ability to integrate and visualize both data
      from a CT scan and a tracking system. The computer system consisted
      of a four processor Pentium PC and the software tool VGInsight (Volume
      Graphics GmbH). For study purposes in a laboratory setting a magnetic
      tracking system (Polemus Inc.) was used to simulate tracked surgical
      tools. With this new software tool and the tracking system the exact
      relative location of a tracked instrument within all structures of
      the pelvic area, soft tissue as well as bone, could be displayed
      in 3D and real time without preprocessing of the data set. Until
      recently real time volume rendering required expensive workstations.
      With a new software tool on a four processor Pentium PC the authors
      were able to introduce a 3D real time processed data supply to the
      surgeon.},
      file = {Liener1999.pdf:Liener1999.pdf:PDF},
      issn = {0148-5598},
      keywords = {Fractures, Bone,Fractures, Bone: diagnosis,Fractures, Bone: surgery,Humans,Image
      Processing, Computer-Assisted,Magnetic Resonance Imaging,Microcomputers,Orthopedic
      Procedures,Orthopedic Procedures: instrumentation,Orthopedic Procedures:
      trends,Pelvis,Pelvis: injuries,Pelvis: surgery,Technology Assessment,
      Biomedical,Therapy, Computer-Assisted,Tomography, X-Ray Computed,
      APP, GUI, VOR, OTS},
      owner = {thomaskroes},
      pmid = {10321378},
      timestamp = {2010.10.22}
    }
  • M. Lievin, “Stereoscopic augmented reality system for computer-assisted surgery,” International Congress Series, vol. 1230, pp. 107-111, 2001.
    [Bibtex]
    @ARTICLE{Lievin2001,
      author = {Lievin, M},
      title = {Stereoscopic augmented reality system for computer-assisted surgery},
      journal = {International Congress Series},
      year = {2001},
      volume = {1230},
      pages = {107-111},
      month = {June},
      abstract = {A first architecture for an augmented reality system in computer-assisted
      surgery is presented in this paper. Like in ‘‘X-ray vision’’
      systems, a stereoscopic overlay is visually superimposed on the patient.
      The main purpose of our approach is user-friendliness for the surgeon:
      no additive wearing equipment is required. Registration, rigid body
      location and 3D volume computation are proven to respect real-time
      processing, thanks to an optical navigation system and our integrated
      software framework. Studies are undertaken to replace our actual
      monitor display by an upcoming holographic screen.},
      file = {Lievin2001.pdf:Lievin2001.pdf:PDF},
      issn = {05315131},
      keywords = {augmented reality system,computer-assisted surgery,stereoscopic overlay,
      TEC, AUR, STV},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • X. Liu and L. Mao, “Visual simulation of soft tissue deformation,” in Computer and Communication Technologies in Agriculture Engineering (CCTAE), 2010 International Conference On, 2010, pp. 548-551.
    [Bibtex]
    @INPROCEEDINGS{Liu2010,
      author = {Xuemei Liu and Lei Mao},
      title = {Visual simulation of soft tissue deformation},
      booktitle = {Computer and Communication Technologies in Agriculture Engineering
      (CCTAE), 2010 International Conference On},
      year = {2010},
      volume = {3},
      pages = {548 - 551},
      month = {June},
      abstract = {Simulating the behavior of elastic objects is an important research
      in the field of virtual reality. Visual simulation of soft tissue
      deformation is the core part of the surgery simulation system. However,
      there is not a uniform deformable model. To choose a suitable model
      for surgery simulation, a classification of deformation models is
      offered in this paper. The deformable model can divide into the non-physically
      model, the physically model and the hybrid model. The advantages
      and disadvantages of three models are discussed in detail. The comparative
      analyses indicate that the hybrid model is a promising approach,
      which can satisfy the requirements of the surgery simulation system.
      This research achievement is significant for completing the simulation
      technology of soft tissue deformation and developing the real-time
      surgery simulation system.},
      file = {Liu2010.pdf:Liu2010.pdf:PDF},
      keywords = {soft tissue deformation;surgery simulation system;virtual reality;visual
      simulation;biological tissues;data visualisation;digital simulation;medical
      computing;surgery;virtual reality;, TEC, OCS},
      owner = {Thomas},
      timestamp = {2011.02.23}
    }
  • L. J. Lo, J. L. Marsh, M. W. Vannier, and V. V. Patel, “Craniofacial computer-assisted surgical planning and simulation.,” Clinics in plastic surgery, vol. 21, iss. 4, p. 501, 1994.
    [Bibtex]
    @ARTICLE{Lo1994,
      author = {Lo, L.J. and Marsh, J.L. and Vannier, M.W. and Patel, V.V.},
      title = {Craniofacial computer-assisted surgical planning and simulation.},
      journal = {Clinics in plastic surgery},
      year = {1994},
      volume = {21},
      pages = {501},
      number = {4},
      issn = {0094-1298},
      keywords = {CMS, APP, PLA, OCS},
      owner = {Thomas},
      timestamp = {2011.02.03}
    }
  • A. Lopes and K. Brodlie, “Improving the robustness and accuracy of the marching cubes algorithm for isosurfacing,” IEEE Transactions on Visualization and Computer Graphics, pp. 16-29, 2003.
    [Bibtex]
    @ARTICLE{Lopes2003,
      author = {Lopes, A. and Brodlie, K.},
      title = {Improving the robustness and accuracy of the marching cubes algorithm
      for isosurfacing},
      journal = {IEEE Transactions on Visualization and Computer Graphics},
      year = {2003},
      pages = {16 - 29},
      file = {Lopes2003.pdf:Lopes2003.pdf:PDF},
      issn = {1077-2626},
      keywords = {TEC},
      owner = {thomaskroes},
      publisher = {Published by the IEEE Computer Society},
      timestamp = {2011.01.03}
    }
  • W. E. Lorensen and H. E. Cline, “Marching cubes: A high resolution 3D surface construction algorithm,” , pp. 163-169, 1987.
    [Bibtex]
    @CONFERENCE{Lorensen1987,
      author = {Lorensen, W.E. and Cline, H.E.},
      title = {Marching cubes: A high resolution 3D surface construction algorithm},
      booktitle = {Proceedings of the 14th annual conference on Computer graphics and
      interactive techniques},
      year = {1987},
      pages = {163 - 169},
      organization = {ACM},
      file = {Lorensen1987.pdf:Lorensen1987.pdf:PDF},
      isbn = {0897912276},
      keywords = {TEC},
      owner = {thomaskroes},
      timestamp = {2011.01.03}
    }
  • S. Lu, Y. Q. Xu, Y. Z. Zhang, L. Xie, H. Guo, and D. P. Li, “A novel computer-assisted drill guide template for placement of C2 laminar screws,” European Spine Journal, vol. 18, iss. 9, pp. 1379-1385, 2009.
    [Bibtex]
    @ARTICLE{Lu2009,
      author = {Lu, S. and Xu, Y.Q. and Zhang, Y.Z. and Xie, L. and Guo, H. and Li,
      D.P.},
      title = {A novel computer-assisted drill guide template for placement of C2
      laminar screws},
      journal = {European Spine Journal},
      year = {2009},
      volume = {18},
      pages = {1379 - 1385},
      number = {9},
      file = {Lu2009.pdf:Lu2009.pdf:PDF},
      issn = {0940-6719},
      keywords = {TRM, OTS},
      owner = {thomaskroes},
      publisher = {Springer},
      timestamp = {2010.12.22}
    }
  • B. Ma and R. E. Ellis, “Robust registration for computer-integrated orthopedic surgery: laboratory validation and clinical experience.,” Medical image analysis, vol. 7, iss. 3, pp. 237-50, 2003.
    [Bibtex]
    @ARTICLE{Ma2003,
      author = {Ma, B and Ellis, R E},
      title = {Robust registration for computer-integrated orthopedic surgery: laboratory
      validation and clinical experience.},
      journal = {Medical image analysis},
      year = {2003},
      volume = {7},
      pages = {237-50},
      number = {3},
      month = {September},
      abstract = {In order to provide navigational guidance during computer-integrated
      orthopedic surgery, the anatomy of the patient must first be registered
      to a medical image or model. A common registration approach is to
      digitize points from the surface of a bone and then find the rigid
      transformation that best matches the points to the model by constrained
      optimization. Many optimization criteria, including a least-squares
      objective function, perform poorly if the data include spurious data
      points (outliers). This paper describes a statistically robust, surface-based
      registration algorithm that we have developed for orthopedic surgery.
      To find an initial estimate, the user digitizes points from predefined
      regions of bone that are large enough to reliably locate even in
      the absence of anatomic landmarks. Outliers are automatically detected
      and managed by integrating a statistically robust M-estimator with
      the iterative-closest-point algorithm. Our in vitro validation method
      simulated the registration process by drawing registration data points
      from several sets of densely digitized surface points. The method
      has been used clinically in computer-integrated surgery for high
      tibial osteotomy, distal radius osteotomy, and excision of osteoid
      osteoma.},
      file = {Ma2003.pdf:Ma2003.pdf:PDF},
      issn = {1361-8415},
      keywords = {Adolescent,Adult,Algorithms,Bone Neoplasms,Bone Neoplasms: radiography,Bone
      Neoplasms: surgery,Femur,Femur: radiography,Humans,Image Interpretation,
      Computer-Assisted,Image Interpretation, Computer-Assisted: instrumen,Image
      Interpretation, Computer-Assisted: methods,Imaging, Three-Dimensional,Imaging,
      Three-Dimensional: instrumentation,Imaging, Three-Dimensional: methods,Lumbar
      Vertebrae,Lumbar Vertebrae: radiography,Osteoma, Osteoid,Osteoma,
      Osteoid: radiography,Osteoma, Osteoid: surgery,Osteotomy,Osteotomy:
      methods,Pattern Recognition, Automated,Phantoms, Imaging,Radius Fractures,Radius
      Fractures: radiography,Radius Fractures: surgery,Reproducibility
      of Results,Sensitivity and Specificity,Subtraction Technique,Surgery,
      Computer-Assisted,Surgery, Computer-Assisted: methods,Tibia,Tibia:
      injuries,Tibia: radiography,Tibial Fractures,Tibial Fractures: radiography,Tibial
      Fractures: surgery,Tomography, X-Ray Computed,Tomography, X-Ray Computed:
      methods, TEC},
      owner = {thomaskroes},
      pmid = {12946466},
      timestamp = {2010.10.22}
    }
  • X. Ma, X. Wu, J. Liu, Y. Wu, and L. Sun, “Novel 3D Reconstruction Modeling Contributes to Development of Orthopaedic Surgical Interventions,” Biomedical Engineering, pp. 8-11, 2010.
    [Bibtex]
    @ARTICLE{Ma2010b,
      author = {Ma, Xing and Wu, Xiaoming and Liu, Jian and Wu, Yaoping and Sun,
      Long},
      title = {Novel 3D Reconstruction Modeling Contributes to Development of Orthopaedic
      Surgical Interventions},
      journal = {Biomedical Engineering},
      year = {2010},
      pages = {8-11},
      abstract = {Radiology plays important roles in orthopaedic surgery. Although various
      conventional radiological assessments including digital X-rays, magnetic
      resonance imaging (MRI), computerized tomography (CT) and the three-dimensional
      (3D) CT reconstruction images have been widely developed and utilized
      for preoperative assessment and planning, there are limitations.
      For example, despite the advances in 3D digital reconstruction images,
      the 3D structure, anatomy and damaged situation are still being inspected
      in a separate and flat manner (i.e. paper, film, etc.). Therefore
      the requirement of real 3D models for bone and joint has emerged
      clinically. In the present study, a CAD based 3D visualization system
      and a rapid prototyping (RP) technique were used to fabricate 3D
      physical models of highly difficult fractures and severe deformities
      in skeleton comminuted including severe pelvic/acetabular fractures,
      proximal humeral fractures, talar/ankle joint fractures, scoliolosis
      and progressive deformities in extremity. Applications and benefits
      of the biomedical visualization-based orthopaedic surgical strategies
      were elucidated. Keywords-bone and joint; injury; modeling;},
      file = {Ma2010b.pdf:Ma2010b.pdf:PDF},
      keywords = {-bone and joint,Novel 3D Reconstruction Modeling Contributes to De,department
      of biomedical engineering,injury,jian liu 2,long sun 2,modeling,rapid
      prototyping,rp,surgical intervention,xiaoming wu 1,yaoping wu 2},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • X. Ma, Y. Wu, X. Wu, J. Liu, and L. Sun, “Novel 3D reconstruction and visualization contribute to clinical therapy for complex extremity fractures,” 2010 International Conference on Bioinformatics and Biomedical Technology, pp. 312-316, 2010.
    [Bibtex]
    @ARTICLE{Ma2010,
      author = {Ma, Xing and Wu, Yaoping and Wu, Xiaoming and Liu, Jian and Sun,
      Long},
      title = {Novel 3D reconstruction and visualization contribute to clinical
      therapy for complex extremity fractures},
      journal = {2010 International Conference on Bioinformatics and Biomedical Technology},
      year = {2010},
      pages = {312-316},
      month = {April},
      abstract = {It is well known visualization play important parts in that medical
      imaging and traumatological orthopaedic surgery. Conventional radiological
      techniques including digital X-rays, computerized tomography (CT)
      and magnetic resonance imaging (MRI) have been widely used in clinic,
      but they have limitations. In order to achieve much deeper understandings
      and even better orthopaedic surgical interventions for complex fracture
      cases, eligible three- dimensional (3D) bone and joint simulations
      are desired. In this study, a CAD based 3D digital reconstruction
      system and a rapid prototyping (RP) technique were used to form 3D
      visualization and physical models of complex extremity fractures
      (CEF). Applications of the innovative biomedical simulation techniques
      and benefits of the 3D visualization and biomodeling in the highly
      difficult extremity fractures were elucidated.},
      isbn = {978-1-4244-6775-4},
      keywords = {- complex extremity fractures,a,cef,enrollment of patients and,in
      extremities were,modeling,patients with comminuted fractures,preparation
      of 3d data,rapid prototyping,rp,visualization},
      owner = {thomaskroes},
      publisher = {Ieee},
      timestamp = {2010.10.22}
    }
  • R. J. Maciunas, “Computer-assisted neurosurgery,” Clinical Neurosurgery, vol. 53, p. 267, 2006.
    [Bibtex]
    @ARTICLE{Maciunas2006,
      author = {Maciunas, R.J.},
      title = {Computer-assisted neurosurgery},
      journal = {Clinical Neurosurgery},
      year = {2006},
      volume = {53},
      pages = {267},
      file = {Maciunas2006.PDF:Maciunas2006.PDF:PDF},
      issn = {0069-4827},
      keywords = {REV, NES},
      owner = {thomaskroes},
      publisher = {LIPPINCOTT WILLIAMS \& WILKINS},
      timestamp = {2011.01.11}
    }
  • R. Magjarevic, T. Dohi, K. Matsumiya, and K. Masamune, “Computer Aided Surgery in The 21 Century,” in 11th Mediterranean Conference on Medical and Biomedical Engineering and Computing 2007, T. Jarm, P. Kramar, and A. Zupanic, Eds., Springer Berlin Heidelberg, 2007, vol. 16, pp. 1132-1133.
    [Bibtex]
    @INCOLLECTION{Magjarevic2007,
      author = {Magjarevic, Ratko and Dohi, Takeyoshi and Matsumiya, K. and Masamune,
      K.},
      title = {Computer Aided Surgery in The 21 Century},
      booktitle = {11th Mediterranean Conference on Medical and Biomedical Engineering
      and Computing 2007},
      publisher = {Springer Berlin Heidelberg},
      year = {2007},
      editor = {Jarm, Tomaz and Kramar, Peter and Zupanic, Anze},
      volume = {16},
      series = {IFMBE Proceedings},
      pages = {1132 - 1133},
      abstract = {Realization of new surgical treatment in the 21st century, it is necessary
      to use various advanced technologies; surgical robots, three-dimensional
      medical images, computer graphics, computer simulation technology
      and others. Three-dimensional medical image for surgical operation
      provides surgeons with advanced vision. A surgical robot provides
      surgeons with advanced hand, but it is not a machine to do the same
      action of a surgeon using scissors or a scalpel. The advanced vision
      and hands available to surgeons are creating new surgical fields,
      which are minimally invasive surgery, noninvasive surgery, virtual
      reality microsurgery, tele-surgery, fetus surgery, neuro-informatics
      surgery and others in the 21st century.},
      affiliation = {University of Tokyo 7-3-1 Hongo Tokyo Japan},
      file = {Magjarevic2007.pdf:Magjarevic2007.pdf:PDF},
      isbn = {978-3-540-73044-6},
      keyword = {Engineering},
      keywords = {REV},
      owner = {thomaskroes},
      timestamp = {2010.11.11}
    }
  • L. Maier-Hein, S. A. Müller, F. Pianka, S. Wörz, B. P. Müller-Stich, A. Seitel, K. Rohr, H. P. Meinzer, B. M. Schmied, and I. Wolf, “Respiratory motion compensation for CT-guided interventions in the liver,” Computer Aided Surgery, vol. 13, iss. 3, pp. 125-138, 2008.
    [Bibtex]
    @ARTICLE{Maier2008,
      author = {Maier-Hein, L. and M{\\"u}ller, S.A. and Pianka, F. and W{\\"o}rz,
      S. and M{\\"u}ller-Stich, B.P. and Seitel, A. and Rohr, K. and Meinzer,
      H.P. and Schmied, B.M. and Wolf, I.},
      title = {Respiratory motion compensation for CT-guided interventions in the
      liver},
      journal = {Computer Aided Surgery},
      year = {2008},
      volume = {13},
      pages = {125 - 138},
      number = {3},
      issn = {1092-9088},
      keywords = {TEC, HES},
      publisher = {Informa UK Ltd UK}
    }
  • J. B. Maintz and M. a Viergever, “A survey of medical image registration.,” Medical image analysis, vol. 2, iss. 1, pp. 1-36, 1998.
    [Bibtex]
    @ARTICLE{Maintz1998,
      author = {Maintz, J B and Viergever, M a},
      title = {A survey of medical image registration.},
      journal = {Medical image analysis},
      year = {1998},
      volume = {2},
      pages = {1-36},
      number = {1},
      month = {March},
      abstract = {The purpose of this paper is to present a survey of recent (published
      in 1993 or later) publications concerning medical image registration
      techniques. These publications will be classified according to a
      model based on nine salient criteria, the main dichotomy of which
      is extrinsic versus intrinsic methods. The statistics of the classification
      show definite trends in the evolving registration techniques, which
      will be discussed. At this moment, the bulk of interesting intrinsic
      methods is based on either segmented points or surfaces, or on techniques
      endeavouring to use the full information content of the images involved.},
      file = {Maintz1998.pdf:Maintz1998.pdf:PDF},
      issn = {1361-8415},
      keywords = {Abdomen,Diagnostic Imaging,Diagnostic Imaging: classification,Diagnostic
      Imaging: methods,Diagnostic Imaging: statistics \& numerical data,Extremities,Head,Humans,Pelvis,Reproducibility
      of Results,Spine,Thorax, REV, IMP},
      owner = {thomaskroes},
      pmid = {10638851},
      timestamp = {2010.10.22}
    }
  • V. G. Mallapragada, N. Sarkar, and T. K. Podder, “Robotic system for tumor manipulation and ultrasound image guidance during breast biopsy,” in Engineering in Medicine and Biology Society, 2008. EMBS 2008. 30th Annual International Conference of the IEEE, 2008, pp. 5589-5592.
    [Bibtex]
    @INPROCEEDINGS{Mallapragada2008,
      author = {Mallapragada, Vishnu G. and Sarkar, Nilanjan and Podder, Tarun K.},
      title = {Robotic system for tumor manipulation and ultrasound image guidance
      during breast biopsy},
      booktitle = {Engineering in Medicine and Biology Society, 2008. EMBS 2008. 30th
      Annual International Conference of the IEEE},
      year = {2008},
      pages = {5589 -5592},
      month = {August},
      abstract = {Tumor mobility poses significant difficulty in obtaining tissue samples
      during ultrasound guided breast biopsy. In this work, we present
      a new concept for coordinated real-time tumor manipulation and ultrasound
      imaging using a hybrid control architecture. The idea here is to
      demonstrate that it is possible to (1) manipulate a tumor in real-time
      by applying controlled external force, (2) control the position of
      the ultrasound probe for tracking out-of-plane target movement, and
      (3) coordinate the above systems in an automated way such that the
      tumor does not deviate from the path of the needle. Experiments are
      performed on breast tissue mimicking phantoms to demonstrate the
      efficacy of this technique. The success of this approach has the
      potential to reduce the number of attempts a surgeon makes to capture
      the desired tissue specimen, minimize tissue damage, improve speed
      of biopsy, and reduce patient discomfort.},
      file = {:Mallapragada2008.pdf:PDF},
      issn = {1557-170X},
      keywords = {Artificial Intelligence;Breast Neoplasms;Female;Humans;Image Interpretation,
      Computer-Assisted;Imaging, Three-Dimensional;Robotics;Surgery, Computer-Assisted;Ultrasonography,
      Interventional;},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • P. R. Mansat and M. Barea, “Anatomic variation the glenoid of the mechanical properties of Inf-Sup,” Knowledge Creation Diffusion Utilization, 2000.
    [Bibtex]
    @ARTICLE{Mansat2000,
      author = {Mansat, Pierre Robert and Barea, Michel},
      title = {Anatomic variation the glenoid of the mechanical properties of Inf-Sup},
      journal = {Knowledge Creation Diffusion Utilization},
      year = {2000},
      file = {Mansat2000.pdf:Mansat2000.pdf:PDF},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • B. Mansoux, L. Nigay, and J. Troccaz, “Output Multimodal Interaction: The Case of Augmented Surgery,” in People and Computers XX — Engage, N. Bryan-Kinns, A. Blanford, P. Curzon, and L. Nigay, Eds., Springer London, 2007, pp. 177-192.
    [Bibtex]
    @INCOLLECTION{Mansoux2007,
      author = {Mansoux, Benoît and Nigay, Laurence and Troccaz, Jocelyne},
      title = {Output Multimodal Interaction: The Case of Augmented Surgery},
      booktitle = {People and Computers XX — Engage},
      publisher = {Springer London},
      year = {2007},
      editor = {Bryan-Kinns, Nick and Blanford, Ann and Curzon, Paul and Nigay, Laurence},
      pages = {177 - 192},
      abstract = {Output multimodal interaction involves choice and combination of relevant
      interaction modalities to present information to the user. In this
      paper, we present a framework based on reusable software components
      for rapidly developing output multimodal interfaces by choosing and
      combining interaction modalities. Such an approach enables us to
      quickly explore several design alternatives as part of an iterative
      design process. Our approach is illustrated by examples from a computer-assisted
      surgery system that runs in a specific environment (i.e. an operating
      room) and so needs adapted multimodal interaction. Our approach supports
      the exploration of several output multimodal interaction design alternatives
      with the surgeons.},
      affiliation = {CLIPS-IMAG / équipe IIHM 385 rue de la Bibliothèque 38041 Grenoble
      cedex 9 France},
      file = {Mansoux2007.pdf:Mansoux2007.pdf:PDF},
      isbn = {978-1-84628-664-3},
      keyword = {Computer Science},
      keywords = {TEC, AUR},
      owner = {thomaskroes},
      timestamp = {2011.01.06}
    }
  • M. Marcacci, L. Nofrini, F. Iacono, A. Di Martino, S. Bignozzi, and M. Lo Presti, “A novel computer-assisted surgical technique for revision total knee arthroplasty.,” Computers in biology and medicine, vol. 37, iss. 12, pp. 1771-9, 2007.
    [Bibtex]
    @ARTICLE{Marcacci2007,
      author = {Marcacci, Maurilio and Nofrini, Laura and Iacono, Francesco and {Di
      Martino}, Alessandro and Bignozzi, Simone and {Lo Presti}, Mirco},
      title = {A novel computer-assisted surgical technique for revision total knee
      arthroplasty.},
      journal = {Computers in biology and medicine},
      year = {2007},
      volume = {37},
      pages = {1771-9},
      number = {12},
      month = {December},
      abstract = {Revision total knee arthroplasty (RTKA) is a skill-demanding intervention
      presenting several technical challenges to the surgeon due to bone
      deficiencies and lack of anatomical references. Computer-assisted
      navigation systems can potentially solve these problems. An innovative
      computer-assisted surgical technique for RTKA is presented. The system
      is image free. Based on anatomical landmarks acquired on the patient,
      the system automatically plans the intervention, and provides the
      surgeon with tools to analyse and modify the proposed plan and to
      accurately reproduce it on the patient. Although we performed few
      cases with this navigated procedure, early results obtained demonstrated
      to be very promising.},
      file = {Marcacci2007.pdf:Marcacci2007.pdf:PDF},
      issn = {0010-4825},
      keywords = {Arthroplasty, Replacement, Knee,Humans,Surgery, Computer-Assisted},
      owner = {thomaskroes},
      pmid = {17618998},
      timestamp = {2010.10.22}
    }
  • C. Marchetti, A. Bianchi, L. Muyldermans, D. M. Martino, L. Lancellotti, and A. Sarti, “Validation of new soft tissue software in orthognathic surgery planning,” International Journal of Oral and Maxillofacial Surgery, vol. 40, iss. 1, pp. 26-32, 2011.
    [Bibtex]
    @ARTICLE{Marchetti2011,
      author = {C. Marchetti and A. Bianchi and L. Muyldermans and M. Di Martino
      and L. Lancellotti and A. Sarti},
      title = {Validation of new soft tissue software in orthognathic surgery planning},
      journal = {International Journal of Oral and Maxillofacial Surgery},
      year = {2011},
      volume = {40},
      pages = {26 - 32},
      number = {1},
      abstract = {This study tests computer imaging software (SurgiCase-CMF®, Materialise)
      that enables surgeons to perform virtual orthognathic surgical planning
      using a three dimensional (3D) utility that previews the final shape
      of hard and soft tissues. It includes a soft tissue simulation module
      that has created images of soft tissues altered through bimaxillary
      orthognathic surgery to correct facial deformities. Cephalometric
      radiographs and CT scans were taken of each patient before and after
      surgery. The surgical planning system consists of four stages: CT
      data reconstruction; 3D model generation of facial hard and soft
      tissue; different virtual surgical planning and simulation modes;
      and various preoperative previews of the soft tissues. Surgical planning
      and simulation is based on a 3D CT reconstructed bone model and soft
      tissue image generation is based on physical algorithms. The software
      rapidly follows clinical options to generate a series of simulations
      and soft tissue models; to avoid TMJ functional problems, pre-surgical
      plans were evaluated by an orthodontist. Comparing simulation results
      with postoperative CT data, the reliability of the soft tissues preview
      was >91%. SurgiCase® software can provide a realistic, accurate forecast
      of the patient's facial appearance after surgery.},
      file = {Marchetti2011.pdf:Marchetti2011.pdf:PDF},
      issn = {0901-5027},
      keywords = {computer imaging software, OCS, CMS, APP, PLA},
      owner = {Thomas},
      timestamp = {2011.02.15}
    }
  • P. Markelj, D. Tomazevic, B. Likar, and F. Pernus, “A review of 3D/2D registration methods for image-guided interventions.,” Medical image analysis, 2010.
    [Bibtex]
    @ARTICLE{Markelj2010,
      author = {Markelj, P and Tomazevic, D and Likar, B and Pernus, F},
      title = {A review of 3D/2D registration methods for image-guided interventions.},
      journal = {Medical image analysis},
      year = {2010},
      month = {April},
      abstract = {Registration of pre- and intra-interventional data is one of the key
      technologies for image-guided radiation therapy, radiosurgery, minimally
      invasive surgery, endoscopy, and interventional radiology. In this
      paper, we survey those 3D/2D data registration methods that utilize
      3D computer tomography or magnetic resonance images as the pre-interventional
      data and 2D X-ray projection images as the intra-interventional data.
      The 3D/2D registration methods are reviewed with respect to image
      modality, image dimensionality, registration basis, geometric transformation,
      user interaction, optimization procedure, subject, and object of
      registration.},
      file = {Markelj2010.pdf:Markelj2010.pdf:PDF},
      issn = {1361-8423},
      keywords = {IMP, REV},
      owner = {thomaskroes},
      pmid = {20452269},
      timestamp = {2010.10.22}
    }
  • R. Marmulla, M. Hilbert, and H. Niederdellmann, “Inherent precision of mechanical, infrared and laser-guided navigation systems for computer-assisted surgery,” Journal of Cranio-Maxillofacial Surgery, vol. 25, iss. 4, pp. 192-197, 1997.
    [Bibtex]
    @ARTICLE{Marmulla1997,
      author = {Marmulla, R. and Hilbert, M. and Niederdellmann, H.},
      title = {Inherent precision of mechanical, infrared and laser-guided navigation
      systems for computer-assisted surgery},
      journal = {Journal of Cranio-Maxillofacial Surgery},
      year = {1997},
      volume = {25},
      pages = {192 - 197},
      number = {4},
      file = {Marmulla1997.pdf:Marmulla1997.pdf:PDF},
      issn = {1010-5182},
      owner = {Thomas},
      publisher = {Elsevier},
      timestamp = {2011.02.04}
    }
  • R. Marmulla, T. Luth, J. Muhling, and S. Hassfeld, “Automated laser registration in image-guided surgery: evaluation of the correlation between laser scan resolution and navigation accuracy.,” International journal of oral and maxillofacial surgery, vol. 33, iss. 7, pp. 642-8, 2004.
    [Bibtex]
    @ARTICLE{Marmulla2004,
      author = {Marmulla, R and Luth, T and Muhling, J and Hassfeld, S},
      title = {Automated laser registration in image-guided surgery: evaluation
      of the correlation between laser scan resolution and navigation accuracy.},
      journal = {International journal of oral and maxillofacial surgery},
      year = {2004},
      volume = {33},
      pages = {642-8},
      number = {7},
      month = {October},
      abstract = {Markerless patient registration based on the facial skin surface makes
      logistics prior to image-guided surgery much easier, as it is not
      necessary to place and measure registration markers. A laser scan
      registration of the surgical site takes the place of conventional
      marker-based registration. In a clinical study, the stability and
      accuracy of markerless patient registration was evaluated in 12 patients.
      Intraoral titanium markers served as targets for the infrared-pointer
      of the navigation system in order to check the accuracy of the markerless
      registration process. The correlation between laser scan resolution
      and navigation accuracy was checked using seven different laser scan
      resolutions (a cloud of 300,000 laser scan points down to 3750 laser
      scan points of the surgical site). The markerless patient registration
      was successful as long as high laser scan resolution was used (30,000
      laser scan points and more): the titanium markers were detected with
      a mean deviation of 1.1 +/- 0.2 mm. Low resolution laser scans (6000
      laser scan points of the surgical site and less) revealed inaccuracies
      up to 6 mm.},
      file = {Marmulla2004.pdf:Marmulla2004.pdf:PDF},
      issn = {0901-5027},
      keywords = {Humans,Imaging, Three-Dimensional,Imaging, Three-Dimensional: methods,Lasers,Lasers:
      diagnostic use,Prospective Studies,Reproducibility of Results,Skull
      Neoplasms,Skull Neoplasms: diagnosis,Skull Neoplasms: radiography,Surgery,
      Computer-Assisted,Tomography, X-Ray Computed},
      owner = {thomaskroes},
      pmid = {15337176},
      timestamp = {2010.10.22}
    }
  • R. Marmulla and H. Niederdellmann, “Surgical planning of computer-assisted repositioning osteotomies,” Plastic and reconstructive surgery, vol. 104, iss. 4, p. 938, 1999.
    [Bibtex]
    @ARTICLE{Marmulla1999,
      author = {Marmulla, R. and Niederdellmann, H.},
      title = {Surgical planning of computer-assisted repositioning osteotomies},
      journal = {Plastic and reconstructive surgery},
      year = {1999},
      volume = {104},
      pages = {938},
      number = {4},
      issn = {0032-1052},
      keywords = {APP, PLA, OTS},
      owner = {Thomas},
      timestamp = {2011.02.04}
    }
  • R. Marmulla and H. Niederdellmann, “Computer-assisted bone segment navigation,” Journal of Cranio-Maxillofacial Surgery, vol. 26, iss. 6, pp. 347-359, 1998.
    [Bibtex]
    @ARTICLE{Marmulla1998,
      author = {Rüdiger Marmulla and Herbert Niederdellmann},
      title = {Computer-assisted bone segment navigation},
      journal = {Journal of Cranio-Maxillofacial Surgery},
      year = {1998},
      volume = {26},
      pages = {347 - 359},
      number = {6},
      abstract = {Summary Computer-assisted bone segment navigation is defined as the
      precise 3-D positioning of geometrically mapped and mathematically
      described skeletal segments. These bone segments are osteotomized,
      fractured or prefabricated according to a surgical plan. The high-precision
      positioning should have an accuracy of 1 mm or better. Segment navigation
      should be prepared with plain computed tomography (CT) without the
      implantation of registration markers before CT in order to reduce
      the number of CTs and operations. The Surgical Segment Navigator
      (SSN) was developed at the University of Regensburg with the support
      of Carl Zeiss. This is the first system to meet these criteria. The
      SSN is based on an infrared positioning device which is connected
      to a Hewlett Packard LD Pro Workstation. Infrared transmitters are
      connected to individual templates which are fixed to the bone segment
      by osteosynthesis screws. Intraoperative correlation between surgical
      planning and surgical site is achieved by use of a surface-pattern
      of the bone segment which fits equally well to the laboratory model
      and the conditions encountered in the patient. The concept of the
      SSN was submitted by Carl Zeiss as German Patent DE 19747427 A1 in
      1997. The SSN system presented here has already been applied clinically
      and its precision has been evaluated by bone segment navigation in
      human cadavers.},
      file = {Marmulla1998.pdf:Marmulla1998.pdf:PDF},
      issn = {1010-5182},
      keywords = {APP, CMS},
      owner = {thomaskroes},
      timestamp = {2010.11.09}
    }
  • a. Marsh, F. Simistira, and R. Robb, “VR in medicine: Virtual colonoscopy,” Future Generation Computer Systems, vol. 14, iss. 3-4, pp. 253-264, 1998.
    [Bibtex]
    @ARTICLE{Marsh1998,
      author = {Marsh, a. and Simistira, F. and Robb, R.},
      title = {VR in medicine: Virtual colonoscopy},
      journal = {Future Generation Computer Systems},
      year = {1998},
      volume = {14},
      pages = {253-264},
      number = {3-4},
      month = {August},
      abstract = {Colon/rectal cancer is the second most common cause of death, yet
      among the most preventable when detected in its
      
      early stages. The traditional diagnostic procedures cause tremendous
      discomfort and are deeply invasive. The motivation for this work
      is firstly to develop an alternative technique to visualise the inner
      mucosal surface of the colonic wall. This technique will be based
      on three-dimensional (3D) visualisation and virtual reality to perform
      virtual endoscopy. However, there is a requirement of vast computational
      support. Therefore, secondly, this paper will discuss the possibilities
      adopting high performance computing and networking to support virtual
      reality medical applications. © 1998 Elsevier Science B.V.},
      file = {Marsh1998.pdf:Marsh1998.pdf:PDF},
      issn = {0167739X},
      keywords = {telemedicine,virtual colonoscopy,virtual reality,www, TEC},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • S. Martelli, S. Bignozzi, M. Bontempi, S. Zaffagnini, and L. Garcia, “Comparison of an optical and a mechanical navigation system,” Medical Image Computing and Computer-Assisted Intervention-MICCAI 2003, pp. 303-310, 2003.
    [Bibtex]
    @ARTICLE{Martelli2003,
      author = {Martelli, S. and Bignozzi, S. and Bontempi, M. and Zaffagnini, S.
      and Garcia, L.},
      title = {Comparison of an optical and a mechanical navigation system},
      journal = {Medical Image Computing and Computer-Assisted Intervention-MICCAI
      2003},
      year = {2003},
      pages = {303 - 310},
      keywords = {REV},
      owner = {Thomas},
      publisher = {Springer},
      timestamp = {2011.02.03}
    }
  • Y. Masutani, K. Masamune, and T. Dohi, “Region-growing based feature extraction algorithm for tree-like objects,” , pp. 159-171, 1996.
    [Bibtex]
    @CONFERENCE{Masutani1996,
      author = {Masutani, Y. and Masamune, K. and Dohi, T.},
      title = {Region-growing based feature extraction algorithm for tree-like objects},
      booktitle = {Visualization in Biomedical Computing},
      year = {1996},
      pages = {159 - 171},
      organization = {Springer},
      file = {Masutani1996.pdf:Masutani1996.pdf:PDF},
      keywords = {IMP},
      owner = {thomaskroes},
      timestamp = {2011.01.05}
    }
  • Y. Masutani, Y. Yamauchi, M. Suzuki, Y. Ohta, T. Dohi, M. Tsuzuki, and D. Hashimoto, “Development of interactive vessel modelling system for hepatic vasculature from MR images,” Medical and Biological Engineering and Computing, vol. 33, iss. 1, pp. 97-101, 1995.
    [Bibtex]
    @ARTICLE{Masutani1995,
      author = {Masutani, Y. and Yamauchi, Y. and Suzuki, M. and Ohta, Y. and Dohi,
      T. and Tsuzuki, M. and Hashimoto, D.},
      title = {Development of interactive vessel modelling system for hepatic vasculature
      from MR images},
      journal = {Medical and Biological Engineering and Computing},
      year = {1995},
      volume = {33},
      pages = {97 - 101},
      number = {1},
      file = {Masutani1995.pdf:Masutani1995.pdf:PDF},
      issn = {0140-0118},
      keywords = {TEC},
      owner = {Thomas},
      publisher = {Springer},
      timestamp = {2011.02.01}
    }
  • S. Matsushita, H. Oyamada, M. Kusakabe, and N. Suzuki, “Attempt to extract 3D image of liver automatically out of abdominal MRI,” , vol. 1898, pp. 803-808, 1993.
    [Bibtex]
    @CONFERENCE{Matsushita1993,
      author = {Matsushita, S. and Oyamada, H. and Kusakabe, M. and Suzuki, N.},
      title = {Attempt to extract 3D image of liver automatically out of abdominal
      MRI},
      booktitle = {Society of Photo-Optical Instrumentation Engineers (SPIE) Conference
      Series},
      year = {1993},
      volume = {1898},
      pages = {803 - 808},
      file = {Matsushita1993.pdf:Matsushita1993.pdf:PDF},
      issn = {0277-786X},
      keywords = {TEC},
      owner = {Thomas},
      timestamp = {2011.02.01}
    }
  • D. Maupu, M. H. Van Horn, S. Weeks, and E. Bullitt, “3D stereo interactive medical visualization,” Computer Graphics and Applications, IEEE, vol. 25, iss. 5, pp. 67-71, 2005.
    [Bibtex]
    @ARTICLE{Maupu2005,
      author = {Maupu, D. and Van Horn, M.H. and Weeks, S. and Bullitt, E.},
      title = {3D stereo interactive medical visualization},
      journal = {Computer Graphics and Applications, IEEE},
      year = {2005},
      volume = {25},
      pages = {67 - 71},
      number = {5},
      month = {September - October},
      abstract = {Our interactive, 3D stereo display helps guide clinicians during endovascular
      procedures, such as intraoperative needle insertion and stent placement
      relative to the target organs. We describe a new method of guiding
      endovascular procedures using interactive 3D stereo visualizations.
      We use as an example the transjugular intrahepatic portosystemic
      shunt (TIPS) procedure. Our goal is to increase the speed and safety
      of endovascular procedures by providing the interventionalist with
      3D information as the operation proceeds. Our goal is to provide
      3D image guidance of the TIPS procedure so that the interventionalist
      can readily adjust the needle position and trajectory to reach the
      target on the first pass. We propose a 3D stereo display of the interventionalist's
      needle and target vessels. We also add interactivity via head tracking
      so that the interventionalist gains a better 3D sense of the relationship
      between the target vessels and the needle during needle advancement.},
      file = {:Maupu2005.pdf:PDF},
      issn = {0272-1716},
      keywords = {3D image guidance;3D information;3D stereo display;endovascular procedures;interactive
      medical visualization;intraoperative needle insertion;stent placement;transjugular
      intrahepatic portosystemic shunt procedure;blood vessels;computerised
      tomography;data visualisation;image registration;image segmentation;interactive
      systems;medical image processing;solid modelling;stereo image processing;three-dimensional
      displays;Imaging, Three-Dimensional;Photogrammetry;Portacaval Shunt,
      Surgical;Radiographic Image Interpretation, Computer-Assisted;Surgery,
      Computer-Assisted;User-Computer Interface;Vascular Surgical Procedures;,
      TEC, STV},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • N. Maurel, a Diop, and J. Grimberg, “A 3D finite element model of an implanted scapula: importance of a multiparametric validation using experimental data.,” Journal of biomechanics, vol. 38, iss. 9, pp. 1865-72, 2005.
    [Bibtex]
    @ARTICLE{Maurel2005,
      author = {Maurel, N and Diop, a and Grimberg, J},
      title = {A 3D finite element model of an implanted scapula: importance of
      a multiparametric validation using experimental data.},
      journal = {Journal of biomechanics},
      year = {2005},
      volume = {38},
      pages = {1865-72},
      number = {9},
      month = {September},
      abstract = {In order to help to understand the loosening phenomenon around gleno\"{\i}d
      prostheses, a 3D finite element model of a previously tested implanted
      scapula has been developed. The construction of the model was done
      using CT scans of the tested scapula. Different bone material properties
      were tested and shell elements or 8 nodes hexaedric elements were
      used to model the cortical bone. Surface contact elements were introduced
      on one hand between the bone and the lower part of the plate of the
      implant, and on the other, between the loading metallic ball and
      the upper surface of the implant. The results of the model were compared
      with those issued from in vitro experiments carried out on the same
      scapula. The evaluation of the model was done for nine cases of loading
      of 500 N distributed on the implant, in terms of strains (principal
      strains of six spots around peripheral cortex of the gleno\"{\i}d)
      and displacement of four points positioned on the implant. The best
      configuration of the model presented here, fits with experiments
      for most of the strains (difference lower than 150microdef) but it
      seems to be still too stiff (mainly in the lower part). Nevertheless,
      we want, in this paper, to underline the importance of doing a multiparametric
      validation for such a model. Indeed, some models can give correct
      results for one case of loading but bad results for another kind
      of loading, some others can give good results for one kind of compared
      parameters (like strains for instance) but bad results for the other
      one (like displacements).},
      file = {Maurel2005.pdf:Maurel2005.pdf:PDF},
      issn = {0021-9290},
      keywords = {Aged,Aged, 80 and over,Computer Simulation,Diagnosis, Computer-Assisted,Diagnosis,
      Computer-Assisted: methods,Elasticity,Equipment Failure Analysis,Equipment
      Failure Analysis: methods,Finite Element Analysis,Humans,Imaging,
      Three-Dimensional,Imaging, Three-Dimensional: methods,Joint Prosthesis,Male,Models,
      Biological,Prosthesis Failure,Prosthesis Fitting,Prosthesis Fitting:
      methods,Radiographic Image Interpretation, Computer-Assist,Risk Assessment,Risk
      Assessment: methods,Risk Factors,Scapula,Scapula: physiopathology,Scapula:
      radiography,Scapula: surgery,Stress, Mechanical,Weight-Bearing, OCS,
      TEC},
      owner = {thomaskroes},
      pmid = {16023474},
      timestamp = {2010.10.22}
    }
  • B. Maurin, O. Piccin, B. Bayle, J. Gangloff, M. de Mathelin, L. Soler, and A. Gangi, “A new robotic system for CT-guided percutaneous procedures with haptic feedback,” International Congress Series, vol. 1268, pp. 515-520, 2004.
    [Bibtex]
    @ARTICLE{Maurin2004,
      author = {B. Maurin and O. Piccin and B. Bayle and J. Gangloff and M. de Mathelin
      and L. Soler and A. Gangi},
      title = {A new robotic system for CT-guided percutaneous procedures with haptic
      feedback},
      journal = {International Congress Series},
      year = {2004},
      volume = {1268},
      pages = {515 - 520},
      abstract = {In this paper, we present a new design for a teleoperated robotic
      percutaneous intervention with computed tomography guidance. Percutaneous
      needle insertions are widely used in interventional radiology for
      radiofrequency ablations or biopsy procedures. Needle insertion robots
      guided by CT images should improve accuracy and reduce X-ray exposure
      of the radiologist. We propose a new design with force feedback and
      CT guidance. A prototype is presented, together with a complete workflow
      of the system.},
      file = {:Maurin2004.pdf:PDF},
      issn = {0531-5131},
      keywords = {Percutaneous procedure, TEC},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • K. Mayasandra, H. M. Ladak, and W. Wang, “A distributed arithmetic hardware architecture for real-time Hough transform based segmentation,” in Electrical and Computer Engineering, 2005. Canadian Conference on, 2005, pp. 1469-1472.
    [Bibtex]
    @INPROCEEDINGS{Mayasandra2005,
      author = {Mayasandra, K. and Ladak, H.M. and Wei Wang},
      title = {A distributed arithmetic hardware architecture for real-time Hough
      transform based segmentation},
      booktitle = {Electrical and Computer Engineering, 2005. Canadian Conference on},
      year = {2005},
      pages = {1469 -1472},
      month = {May},
      abstract = {Real-time segmentation and tracking of biopsy needles is a very important
      part of image-guided surgery. Since the needle appears as a straight
      line in medical images, the Hough transform for straight-line detection
      is a natural and powerful choice for needle segmentation. However,
      the transform is computationally expensive, and in the standard form
      is ineffective for real-time segmentation applications. We propose
      a dedicated hardware architecture for the Hough transform based on
      distributed arithmetic (DA) principles that results in a real-time
      implementation. This architecture exploits the inherent parallelism
      of the Hough transform, and reduces the overall computation time.
      This procedure, combined with the parallel structure significantly
      reduces computation times, thus facilitating real-time implementation.
      The DA-Hough transform architecture has been implemented using the
      Xilinx FPGA. For a 256times256 image, the proposed design takes 0.1
      ms to 1.2 ms to process the Hough transform when the feature points
      in the image are varied from 2% to 50% of the total image, which
      are well within the bounds of real-time operation, and thus can facilitate
      needle segmentation in real time},
      file = {:Mayasandra2005.pdf:PDF},
      issn = {0840-7789},
      keywords = {Xilinx FPGA;biopsy needles;distributed arithmetic hardware architecture;image-guided
      surgery;real-time Hough transform based segmentation;Hough transforms;distributed
      arithmetic;field programmable gate arrays;image segmentation;medical
      image processing;surgery;, IMP},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • E. G. McFarland, P. Sanguanjit, A. Tasaki, E. Keyurapan, E. K. Fishman, and L. M. Fayad, “The reverse shoulder prosthesis: A review of imaging features and complications.,” Skeletal radiology, vol. 35, iss. 7, pp. 488-96, 2006.
    [Bibtex]
    @ARTICLE{McFarland2006,
      author = {McFarland, Edward G and Sanguanjit, Prakasit and Tasaki, Atsushi
      and Keyurapan, Ekavit and Fishman, Elliot K and Fayad, Laura M},
      title = {The reverse shoulder prosthesis: A review of imaging features and
      complications.},
      journal = {Skeletal radiology},
      year = {2006},
      volume = {35},
      pages = {488-96},
      number = {7},
      month = {July},
      abstract = {The reverse shoulder prosthesis is a prosthesis that has been in clinical
      use in Europe since 1985 and was approved for use in the United States
      in 2004. This unique prosthesis has a baseplate attached to the glenoid,
      which holds a spherical component, while the humeral component includes
      a polyethylene insert that is flat. This design is the "reverse"
      configuration of that seen with a conventional arthroplasty, in which
      the spherical component is part of the humeral component. The indications
      for the reverse prosthesis are: (1) painful arthritis associated
      with irreparable rotator cuff tears (cuff tear arthropathy), (2)
      failed hemiarthroplasty with irreparable rotator cuff tears, (3)
      pseudoparalysis due to massive, irreparable rotator cuff tears, (4)
      some reconstructions after tumor resection, and (5) some fractures
      of the shoulder not repairable or reconstructable with other techniques.
      This prosthesis can produce a significant reduction in pain and some
      improvement in function for most of the indications mentioned. However,
      the unique configuration and the challenge of its insertion can result
      in a high incidence of a wide variety of unusual complications. Some
      of these complications, such as dislocation of the components, are
      similar to conventional shoulder replacement. Other complications,
      such as notching of the scapula and acromial stress fractures, are
      unique to this prosthesis. CONCLUSION: The configuration of the reverse
      prosthesis, its normal radiographic appearance and potential complications
      associated with its use are reviewed.},
      file = {McFarland2006.pdf:McFarland2006.pdf:PDF},
      issn = {0364-2348},
      keywords = {Arthroplasty,Humans,Joint Prosthesis,Joint Prosthesis: adverse effects,Prosthesis
      Design,Prosthesis Failure,Replacement,Replacement: adverse effects,Replacement:
      classification,Replacement: trends,Shoulder Joint,Shoulder Joint:
      radiography,Shoulder Joint: surgery},
      owner = {thomaskroes},
      pmid = {16570176},
      timestamp = {2010.10.22}
    }
  • J. McGhee, “3-D visualization and animation technologies in anatomical imaging,” Journal of Anatomy, vol. 216, iss. 2, pp. 264-270, 2010.
    [Bibtex]
    @ARTICLE{Mcghee2010,
      author = {McGhee, J.},
      title = {3-D visualization and animation technologies in anatomical imaging},
      journal = {Journal of Anatomy},
      year = {2010},
      volume = {216},
      pages = {264--270},
      number = {2},
      file = {Mcghee2010.pdf:Mcghee2010.pdf:PDF},
      issn = {1469-7580},
      keywords = {REV},
      owner = {Thomas},
      publisher = {John Wiley \& Sons},
      timestamp = {2011.02.03}
    }
  • R. Mcgoughiii and a Yasko, “Techniques in pelvic resection,” Operative Techniques in Orthopaedics, vol. 14, iss. 4, pp. 259-266, 2004.
    [Bibtex]
    @ARTICLE{Mcgoughiii2004,
      author = {Mcgoughiii, R and Yasko, a},
      title = {Techniques in pelvic resection},
      journal = {Operative Techniques in Orthopaedics},
      year = {2004},
      volume = {14},
      pages = {259-266},
      number = {4},
      month = {October},
      abstract = {Resection of tumors of the bony pelvis is one of the more difficult
      procedures in orthopedic surgery. The depth of the operative field,
      complex three-dimensional anatomy, and ubiqui- tous presence of delicate
      surrounding structures makes these procedures both taxing and time
      consuming. Approaching pelvic resections requires both a thorough
      knowledge of the three-dimensional anatomy and a systematic approach
      to exposure and protection of the structures to be preserved. Because
      pelvic surgery is fraught with complications under the best of circumstances,
      deficiencies in either anatomy or approach can lead to devastating
      consequences. Although the specific order of dissection can be varied
      based on the dictates of the tumor, an organized approach follows
      a general direction from anterior to posterior, and from inside to
      outside. The abdominal wall dissection is performed first, and dissection
      of the vessels and femoral nerve often follows logically. This will
      lead the surgeon posteriorly to the sacroiliac joint and the sciatic
      notch. Completing the intrapelvic dissection allows identification
      of bony landmarks for osteotomy planning. The extrapelvic approach
      generally follows. A thoughtful, systematic, detail oriented approach
      can mini- mize both intraoperative blood loss and risk of iatrogenic
      injury to vital surrounding structures.},
      file = {Mcgoughiii2004.pdf:Mcgoughiii2004.pdf:PDF},
      issn = {10486666},
      keywords = {ilioingui-,innominate bones,pelvic resection,pelvis,sacrum,surgical
      exposure},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • T. McInerney and D. Terzopoulos, “Deformable models in medical image analysis,” in Mathematical Methods in Biomedical Image Analysis, 1996., Proceedings of the Workshop on, 1996, pp. 171-180.
    [Bibtex]
    @INPROCEEDINGS{McInerney1996,
      author = {McInerney, T. and Terzopoulos, D.},
      title = {Deformable models in medical image analysis},
      booktitle = {Mathematical Methods in Biomedical Image Analysis, 1996., Proceedings
      of the Workshop on},
      year = {1996},
      pages = {171 -180},
      month = jun,
      abstract = {This article surveys deformable models, a promising and vigorously
      researched computer-assisted medical image analysis technique. Among
      model-based techniques, deformable models offer a unique and powerful
      approach to image analysis that combines geometry, physics, and approximation
      theory. They have proven to be effective in segmenting, matching,
      and tracking anatomic structures by exploiting (bottom-up) constraints
      derived from the image data together with (top-down) a priori knowledge
      about the location, size, and shape of these structures. Deformable
      model are capable of accommodating the significant variability of
      biological structures over time and across different individuals.
      Furthermore, they support highly intuitive interaction mechanisms
      that, when necessary, allow medical scientists and practitioners
      to bring their expertise to bear on the model-based image interpretation
      task. This article reviews the rapidly expanding body of work on
      the development and application of deformable models to problems
      of fundamental importance in medical image analysis, including segmentation,
      shape representation, matching, and motion tracking},
      file = {McInerney1996.pdf:McInerney1996.pdf:PDF},
      keywords = {anatomic structures;approximation theory;biological structures;computer-assisted
      medical image analysis technique;deformable models;location;matching;medical
      image analysis;model-based image interpretation;motion tracking;segmentation;shape;shape
      representation;size;tagged MR images;biomedical NMR;image matching;image
      segmentation;medical image processing;, REV, IMP},
      owner = {Thomas},
      timestamp = {2011.02.03}
    }
  • M. Meehan, M. Teschner, and S. Girod, “Three-dimensional simulation and prediction of craniofacial surgery,” Orthodontics & Craniofacial Research, vol. 6, iss. s1, pp. 102-107, 2003.
    [Bibtex]
    @ARTICLE{Meehan2003,
      author = {Meehan, M. and Teschner, M. and Girod, S.},
      title = {Three-dimensional simulation and prediction of craniofacial surgery},
      journal = {Orthodontics \& Craniofacial Research},
      year = {2003},
      volume = {6},
      pages = {102 - 107},
      number = {s1},
      file = {Meehan2003.pdf:Meehan2003.pdf:PDF},
      issn = {1601-6343},
      keywords = {OCS, PLA, CMS, APP},
      owner = {thomaskroes},
      publisher = {John Wiley \& Sons},
      timestamp = {2011.01.25}
    }
  • U. Meier, O. López, C. Monserrat, M. C. Juan, and M. Alcañiz, “Real-time deformable models for surgery simulation: a survey,” Computer Methods and Programs in Biomedicine, vol. 77, iss. 3, pp. 183-197, 2005.
    [Bibtex]
    @ARTICLE{Meier2005,
      author = {U. Meier and O. López and C. Monserrat and M.C. Juan and M. Alcañiz},
      title = {Real-time deformable models for surgery simulation: a survey},
      journal = {Computer Methods and Programs in Biomedicine},
      year = {2005},
      volume = {77},
      pages = {183 - 197},
      number = {3},
      abstract = {Simulating the behaviour of elastic objects in real time is one of
      the current objectives of computer graphics. One of its fields of
      application lies in virtual reality, mainly in surgery simulation
      systems. In computer graphics, the models used for the construction
      of objects with deformable behaviour are known as deformable models.
      These have two conflicting characteristics: interactivity and motion
      realism. The different deformable models developed to date have promoted
      only one of these (usually interactivity) to the detriment of the
      other (biomechanical realism). In this paper, we present a classification
      of the different deformable models that have been developed. We present
      the advantages and disadvantages of each one. Finally, we make a
      comparison of deformable models and perform an evaluation of the
      state of the art and the future of deformable models.},
      file = {Meier2005.pdf:Meier2005.pdf:PDF},
      issn = {0169-2607},
      keywords = {Deformable model, TEC, REV},
      owner = {thomaskroes},
      timestamp = {2010.11.12}
    }
  • H. Meinzer, “Computerized planning of liver surgery an overview,” Computers & Graphics, vol. 26, iss. 4, pp. 569-576, 2002.
    [Bibtex]
    @ARTICLE{Meinzer2002,
      author = {Meinzer, H},
      title = {Computerized planning of liver surgery an overview},
      journal = {Computers \& Graphics},
      year = {2002},
      volume = {26},
      pages = {569-576},
      number = {4},
      month = {August},
      abstract = {Liver surgery is a field in which computer-based operation planning
      has an enormous impact on the selection of therapeutic strategy.
      Based on pre-operative analysis of image data, liver operation planning
      provides a individual impression of tumor location, the exact structure
      of the vascular system and an identification of liver segments and
      sub- segments. In this paper we present an operation planning system
      that is based on an object-oriented framework. This framework offers
      extensive automation of the integration process for software modules
      developed for medical software systems. The operation planning system
      can calculate the operation proposal results using two different
      approaches. The first method is based on the Couinaud’s classification
      system, which uses the main stems of the portal and venous trees.
      The second approach is a portal vein based method. These two approaches
      were compared using 23 liver CT scans. The volumetric data for individual
      segments demonstrates differences between the two segment classification
      methods. r 2002 Elsevier Science Ltd. All rights reserved.},
      file = {Meinzer2002.pdf:Meinzer2002.pdf:PDF},
      issn = {00978493},
      keywords = {computer tomography,computer-based surgery,liver resection,operation
      planning, HES, PLA, REV},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • P. Merloz, J. Tonetti, L. Pittet, M. Coulomb, S. Lavallee, and P. Sautot, “Pedicle screw placement using image guided techniques,” Clinical orthopaedics and related research, vol. 354, p. 39, 1998.
    [Bibtex]
    @ARTICLE{Merloz1998,
      author = {Merloz, P. and Tonetti, J. and Pittet, L. and Coulomb, M. and Lavallee,
      S. and Sautot, P.},
      title = {Pedicle screw placement using image guided techniques},
      journal = {Clinical orthopaedics and related research},
      year = {1998},
      volume = {354},
      pages = {39},
      keywords = {TRM},
      owner = {thomaskroes},
      timestamp = {2011.01.18}
    }
  • L. N. Metz and S. Burch, “Computer-assisted surgical planning and image-guided surgical navigation in refractory adult scoliosis surgery: case report and review of the literature.,” Spine, vol. 33, iss. 9, p. E287-92, 2008.
    [Bibtex]
    @ARTICLE{Metz2008,
      author = {Metz, Lionel N and Burch, Shane},
      title = {Computer-assisted surgical planning and image-guided surgical navigation
      in refractory adult scoliosis surgery: case report and review of
      the literature.},
      journal = {Spine},
      year = {2008},
      volume = {33},
      pages = {E287-92},
      number = {9},
      month = {April},
      abstract = {STUDY DESIGN: Case report and literature review. OBJECTIVE: In this
      case report, we present the utility of computer-assisted surgical
      planning and image-guided surgical navigation in the planning and
      execution of a major osteotomy to correct severe kyphoscoliosis.
      SUMMARY OF BACKGROUND DATA: Computer-assisted surgical planning is
      useful to appreciate the three-dimensional nature of scoliotic deformities
      and allows for operative maneuvers to be simulated on a computer
      before their implementation in the operating room. Image-guided surgical
      navigation improves surgical accuracy and can help translate a virtual
      surgical plan to the operative setting. METHODS: We report the case
      of a 38-year-old woman with severe, congenital kyphoscoliosis refractory
      to many previous surgeries, who presents with moderate progressive
      myelopathy and severe pain attributable to a sharp angular deformity
      at T12. Three-dimensional computed tomography reconstruction and
      computer-assisted surgical planning were used to determine the optimal
      corrective osteotomy. The surgical plan was translated to the operating
      room where a posterior vertebrectomy and instrumented correction
      were executed with the aid of image-guided surgical navigation. RESULTS:
      The osteotomy was safely performed resulting in improved sagittal
      and coronal alignments, as well as, correction of the sharp kyphoscoliotic
      deformity at the thoracolumbar junction. At 6-month follow-up, the
      patient's myelopathy and pain had largely resolved and she expressed
      high satisfaction with the procedure. CONCLUSION: We advocate this
      novel application of virtual surgical planning and intraoperative
      surgical navigation to improve the safety and efficacy of complex
      spinal deformity corrections.},
      file = {Metz2008.pdf:Metz2008.pdf:PDF},
      issn = {1528-1159},
      keywords = {Adult,Back Pain,Back Pain: etiology,Back Pain: radiography,Back Pain:
      surgery,Decompression, Surgical,Female,Humans,Imaging, Three-Dimensional,Kyphosis,Kyphosis:
      complications,Kyphosis: radiography,Kyphosis: surgery,Osteotomy,Osteotomy:
      methods,Patient Satisfaction,Radiographic Image Interpretation, Computer-Assist,Radiography,
      Interventional,Radiography, Interventional: methods,Reoperation,Scoliosis,Scoliosis:
      complications,Scoliosis: radiography,Scoliosis: surgery,Severity
      of Illness Index,Spinal Cord Diseases,Spinal Cord Diseases: etiology,Spinal
      Cord Diseases: radiography,Spinal Cord Diseases: surgery,Spinal Fusion,Surgery,
      Computer-Assisted,Thoracic Vertebrae,Thoracic Vertebrae: radiography,Thoracic
      Vertebrae: surgery,Tomography, Spiral Computed,Treatment Failure,Treatment
      Outcome,User-Computer Interface},
      owner = {thomaskroes},
      pmid = {18427309},
      timestamp = {2010.10.22}
    }
  • J. Meyer-Spradow, T. Ropinski, and K. Hinrichs, “Supporting Depth and Motion Perception in Medical Volume Data,” in Visualization in Medicine and Life Sciences, L. Linsen, H. Hagen, and B. Hamann, Eds., Springer Berlin Heidelberg, 2008, pp. 121-133.
    [Bibtex]
    @INCOLLECTION{Spradow2008,
      author = {Meyer-Spradow, Jennis and Ropinski, Timo and Hinrichs, Klaus},
      title = {Supporting Depth and Motion Perception in Medical Volume Data},
      booktitle = {Visualization in Medicine and Life Sciences},
      publisher = {Springer Berlin Heidelberg},
      year = {2008},
      editor = {Linsen, Lars and Hagen, Hans and Hamann, Bernd},
      series = {Mathematics and Visualization},
      pages = {121-133},
      abstract = {There are many application areas where dynamic visualization techniques
      cannot be used and the user can only view a still image. Perceiving
      depth and understanding spatio-temporal relations from a single still
      image are challenging tasks. We present visualization techniques
      which support the user in perceiving depth information from 3D angiography
      images, and techniques which depict motion inherent in time-varying
      medical volume datasets. In both cases no dynamic visualization is
      required.},
      affiliation = {University of Münster Visualization and Computer Graphics Research
      Group, Department of Computer Science Germany},
      file = {Spradow2008.pdf:Spradow2008.pdf:PDF},
      isbn = {978-3-540-72630-2},
      keyword = {Mathematics},
      keywords = {TEC},
      owner = {thomaskroes},
      timestamp = {2011.01.06}
    }
  • D. a Miles and M. R. Razzano, “The future of digital imaging in dentistry.,” Dental clinics of North America, vol. 44, iss. 2, p. 427-38, viii, 2000.
    [Bibtex]
    @ARTICLE{Miles2000,
      author = {Miles, D a and Razzano, M R},
      title = {The future of digital imaging in dentistry.},
      journal = {Dental clinics of North America},
      year = {2000},
      volume = {44},
      pages = {427-38, viii},
      number = {2},
      month = {April},
      abstract = {Digital radiographic imaging in dentistry has matured. All the reasons
      dentists have cited for keeping conventional, film-based systems
      have been refuted: cost, sensors, storage, and training are not issues.
      There are many reasons to switch to digital. This article outlines
      external factors that dentists may not have considered that will
      influence the way they conduct business in their offices related
      to radiographic imaging. The evidence points to the inevitable adoption
      of digital imaging systems.},
      file = {Miles2000.pdf:Miles2000.pdf:PDF},
      issn = {0011-8532},
      keywords = {Costs and Cost Analysis,Education, Dental,Education, Distance,Equipment
      Design,Forecasting,Humans,Information Storage and Retrieval,Information
      Storage and Retrieval: economics,Internet,Practice Management, Dental,Practice
      Management, Dental: economics,Radiography, Dental,Radiography, Dental,
      Digital,Radiography, Dental, Digital: economics,Radiography, Dental,
      Digital: instrumentation,Radiography, Dental, Digital: trends,Radiography,
      Dental: economics,Remote Consultation,X-Ray Film},
      owner = {thomaskroes},
      pmid = {10740777},
      timestamp = {2010.10.22}
    }
  • K. Miller, A. Wittek, G. Joldes, A. Horton, T. Dutta-Roy, J. Berger, and L. Morriss, “Modelling brain deformations for computer-integrated neurosurgery,” International Journal for Numerical Methods in Biomedical Engineering, vol. 26, iss. 1, pp. 117-138, 2010.
    [Bibtex]
    @ARTICLE{Miller2010,
      author = {Miller, K. and Wittek, A. and Joldes, G. and Horton, A. and Dutta-Roy,
      T. and Berger, J. and Morriss, L.},
      title = {Modelling brain deformations for computer-integrated neurosurgery},
      journal = {International Journal for Numerical Methods in Biomedical Engineering},
      year = {2010},
      volume = {26},
      pages = {117 - 138},
      number = {1},
      file = {Miller2010.pdf:Miller2010.pdf:PDF},
      issn = {2040-7947},
      keywords = {TEC, NES},
      owner = {thomaskroes},
      publisher = {John Wiley \& Sons},
      timestamp = {2011.01.11}
    }
  • R. A. Mischkowski, M. Zinser, A. Kübler, U. Seifert, and J. E. Zöller, “The Hollowman – a virtual reality tool in cranio-maxillofacial surgery,” International Congress Series, vol. 1268, pp. 658-661, 2004.
    [Bibtex]
    @ARTICLE{Mischkowski2004,
      author = {R. A. Mischkowski and M. Zinser and A. Kübler and U. Seifert and
      J. E. Zöller},
      title = {The Hollowman - a virtual reality tool in cranio-maxillofacial surgery},
      journal = {International Congress Series},
      year = {2004},
      volume = {1268},
      pages = {658 - 661},
      abstract = {A virtual reality tool for computer-assisted surgery named #The##Hollowman#
      is presented. This allows for a visual tracking of real anatomical
      structures in superposition with volume rendered CT or MRI scans
      and thus can be used for navigated translocation of bony segments.
      For an evaluation study #The##Hollowman# was used in orthognatic
      surgery to control the translocation of the maxilla after Le Fort
      I osteotomy within a bimaxillary procedure. Up to now, four patients
      have been included. The tool has proven very valuable especially
      in complex nonlinear translocations of the maxilla as the surgeon
      could directly visualise the position of the mobilised bone in relation
      to the preoperatively planned situation. The application to other
      types of interventions in cranio-maxillofacial surgery associated
      with movement of bony segments as Le Fort III osteotomy, fronto-orbital
      advancement and cranial vault reshaping or reconstruction seems to
      be considerable as well.},
      file = {Mischkowski2004.pdf:Mischkowski2004.pdf:PDF},
      issn = {0531-5131},
      keywords = {The Hollowman, VOR, APP, PLA, VOR, CMS},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • R. A. Mischkowski, M. Zinser, A. Kübler, U. Seifert, and J.E. Zöller, “Clinical and experimental evaluation of an augmented reality system in cranio-maxillofacial surgery,” International Congress Series, vol. 1281, pp. 565-570, 2005.
    [Bibtex]
    @ARTICLE{Mischkowski2005,
      author = {R.A. Mischkowski and M. Zinser and A. Kübler and U. Seifert and J.E.
      Zöller},
      title = {Clinical and experimental evaluation of an augmented reality system
      in cranio-maxillofacial surgery},
      journal = {International Congress Series},
      year = {2005},
      volume = {1281},
      pages = {565 - 570},
      abstract = {An augmented reality tool for computer-assisted surgery named X-Scope
      allows for visual tracking of real anatomical structures in superposition
      with volume rendered CT or MRI scans and thus can be used for navigated
      translocation of bony segments. In a feasibility study X-Scope was
      used in orthognatic surgery to control the translocation of the maxilla
      after Le Fort I osteotomy within a bimaxillary procedure. The achieved
      situation was compared with the computer-based preoperative planning
      by means of cephalometric analysis on lateral and frontal cephalograms.
      In addition to the clinical feasibility study, an experimental evaluation
      of system accuracy was performed. The technique could be successfully
      applied in 5 patients. The maxillary positioning using X-Scope was
      accomplished with accuracy within a range of 1 mm. The tool was used
      in all cases in addition to the usual intra-operative splints. A
      stand-alone application without conventional control mechanism seems
      to be not reasonable yet. The final analysis of data obtained from
      the accuracy study is not completed yet. The preliminary results
      indicate a deviation of the X-Scope system not significantly greater
      then the deviation of the navigation system itself with a given registration
      method. Augmented reality tools like X-Scope may be helpful for control
      of maxillary translocation in orthognathic surgery. The application
      to other types of interventions in cranio-maxillofacial surgery associated
      with movement of bony segments as Le Fort III osteotomy, fronto-orbital
      advancement, and cranial vault reshaping or reconstruction may be
      considered as well.},
      file = {Mischkowski2005.pdf:Mischkowski2005.pdf:PDF},
      issn = {0531-5131},
      keywords = {X-Scope, APP, CMS, AUR},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • R. Mischkowski, M. Zinser, L. Ritter, J. Neugebauer, E. Keeve, and J. Zöller, “Intraoperative navigation in the maxillofacial area based on 3D imaging obtained by a cone-beam device,” International journal of oral and maxillofacial surgery, vol. 36, iss. 8, pp. 687-694, 2007.
    [Bibtex]
    @ARTICLE{Mischkowski2007,
      author = {Mischkowski, RA and Zinser, MJ and Ritter, L. and Neugebauer, J.
      and Keeve, E. and Z{\\"o}ller, JE},
      title = {Intraoperative navigation in the maxillofacial area based on 3D imaging
      obtained by a cone-beam device},
      journal = {International journal of oral and maxillofacial surgery},
      year = {2007},
      volume = {36},
      pages = {687 - 694},
      number = {8},
      abstract = {The aim of this study was to evaluate intraoperative navigation in
      the maxillofacial area based on three-dimensional imaging obtained
      by a cone-beam device. Digital volume tomograms (DVT) were obtained
      by the prototype of GALILEOS (Sirona Dental Systems Inc., Bensheim,
      Germany), a newly developed, compact size, cone-beam machine with
      a scan volume of 15 cm × 15 cm × 15 cm. Intraoperative navigation
      was performed in 12 patients in three selected indications. Target
      detection error expressing the accuracy of DVT navigation and registration
      performance of specially developed methods for image-to-patient registration
      was estimated. Target detection error was maximally 2 mm and depended
      on the registration method chosen. The automatic detection rate of
      the fiducial markers ranged between 0.64 and 0.32. The preoperatively
      defined treatment plan was fully accomplished in 11 out of 12 cases.
      A favourable surgical outcome was achievable in all cases. Intraoperative
      complications were not observed. Intraoperative navigation based
      on DVT imaging can be considered as a valuable alternative to CT-based
      procedures. Special characteristics of the cone-beam technique, in
      terms of contrast resolution and the limited field-of-view size of
      the devices, restrict the indication spectrum and create a demand
      for modifications of the usual registration methods.},
      file = {Mischkowski2007.pdf:Mischkowski2007.pdf:PDF},
      issn = {0901-5027},
      owner = {thomaskroes},
      publisher = {Elsevier},
      timestamp = {2010.11.09}
    }
  • S. Misra, K. Ramesh, and A. M. Okamura, “Modeling of tool-tissue interactions for computer-based surgical simulation: a literature review,” , 2008.
    [Bibtex]
    @ARTICLE{Misra2008,
      author = {Misra, S. and Ramesh, KT and Okamura, A.M.},
      title = {Modeling of tool-tissue interactions for computer-based surgical
      simulation: a literature review},
      year = {2008},
      file = {Misra2008.pdf:Misra2008.pdf:PDF},
      keywords = {TEC, REV},
      owner = {Thomas},
      publisher = {MIT Press},
      timestamp = {2011.02.23}
    }
  • T. Moench, M. Neugebauer, P. Hahn, and B. Preim, “Generation of smooth and accurate surface models for surgical planning and simulation.”
    [Bibtex]
    @CONFERENCE{Moench,
      author = {Moench, T. and Neugebauer, M. and Hahn, P. and Preim, B.},
      title = {Generation of smooth and accurate surface models for surgical planning
      and simulation},
      booktitle = {Proceedings of SPIE Medical Imaging}
    }
  • B. Mollard, S. Lavallée, and G. Bettega, “Computer assisted orthognathic surgery,” Medical Image Computing and Computer-Assisted Interventation—MICCAI’98, p. 21, 1998.
    [Bibtex]
    @ARTICLE{Mollard1998,
      author = {Mollard, B. and Lavall{\'e}e, S. and Bettega, G.},
      title = {Computer assisted orthognathic surgery},
      journal = {Medical Image Computing and Computer-Assisted Interventation—MICCAI’98},
      year = {1998},
      pages = {21},
      file = {Mollard1998.pdf:Mollard1998.pdf:PDF},
      keywords = {APP, RPP, CMS, PLA, GUI, SUR},
      owner = {Thomas},
      publisher = {Springer},
      timestamp = {2011.02.08}
    }
  • A. M. Morega, A. Dobre, M. Morega, and D. Mocanu, “Computational Modeling of Arterial Blood Flow,” in International Conference on Advancements of Medicine and Health Care through Technology, R. Magjarevic, S. Vlad, R. V. Ciupa, and A. I. Nicu, Eds., Springer Berlin Heidelberg, 2009, vol. 26, pp. 373-378.
    [Bibtex]
    @INCOLLECTION{Morega2009,
      author = {Morega, A. M. and Dobre, A. and Morega, M. and Mocanu, D.},
      title = {Computational Modeling of Arterial Blood Flow},
      booktitle = {International Conference on Advancements of Medicine and Health Care
      through Technology},
      publisher = {Springer Berlin Heidelberg},
      year = {2009},
      editor = {Magjarevic, Ratko and Vlad, Simona and Ciupa, Radu V. and Nicu, Anca
      I.},
      volume = {26},
      series = {IFMBE Proceedings},
      pages = {373 - 378},
      abstract = {Recently, there is a growing interest in developing numerical methods
      and tools to investigate the hemodynamics of the arterial flow, and
      to understand its influence on the transport of solutes (e.g., oxygen),
      nutrients, etc. As arteries morphology is complex and patient-related,
      medical data based reconstruction of the geometry may be utilized
      to generate realistic computational domains. The blood flow is then
      investigated by finite element method (FEM) for a range of flow parameters.
      The flow patterns thus obtained may be utilized for vascular surgery
      training, planning and intervention, to investigate atherosclerosis
      genesis, in drug targeting, etc.},
      affiliation = {University POLITEHNICA of Bucharest Faculty of Electrical Engineering
      Romania},
      file = {Morega2009.pdf:Morega2009.pdf:PDF},
      isbn = {978-3-642-04292-8},
      keyword = {Engineering},
      keywords = {OCS},
      owner = {Thomas},
      timestamp = {2011.02.23}
    }
  • K. Muehler and B. Preim, “Smart Graphics in Medical Visualization Smart Graphics in Medizinischen Visualisierungen,” it-Information Technology, vol. 51, iss. 3, pp. 157-162, 2009.
    [Bibtex]
    @ARTICLE{Muehler2009,
      author = {Muehler, K. and Preim, B.},
      title = {Smart Graphics in Medical Visualization Smart Graphics in Medizinischen
      Visualisierungen},
      journal = {it-Information Technology},
      year = {2009},
      volume = {51},
      pages = {157 - 162},
      number = {3},
      issn = {1611-2776},
      publisher = {Oldenbourg}
    }
  • K. Mueller and A. E. Kaufman, Volume Visualization in Medicine, Second Edition ed., Elsevier Inc., 2008, vol. Vi.
    [Bibtex]
    @BOOK{Mueller2008,
      title = {Volume Visualization in Medicine},
      publisher = {Elsevier Inc.},
      year = {2008},
      author = {Mueller, Klaus and Kaufman, Arie E},
      volume = {Vi},
      pages = {785-816},
      edition = {Second Edition},
      booktitle = {HANDBOOK OF MEDICAL IMAGE PROCESSING AND ANALYSIS},
      file = {Mueller2008.pdf:Mueller2008.pdf:PDF},
      keywords = {TEC},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • K. Muhler and B. Preim, “Reusable Visualizations and Animations for Surgery Planning,” Computer Graphics Forum, vol. 29, iss. 3, pp. 1103-1112, 2010.
    [Bibtex]
    @ARTICLE{Muhler2010b,
      author = {Muhler, K. and Preim, B.},
      title = {Reusable Visualizations and Animations for Surgery Planning},
      journal = {Computer Graphics Forum},
      year = {2010},
      volume = {29},
      pages = {1103-1112},
      number = {3},
      month = {August},
      abstract = {For surgical planning, the exploration of 3D visualizations and 2D
      slice views is essential. However, the genera- tion of visualizations
      which support the specific treatment decisions is very tedious. Therefore,
      the reuse of once designed visualizations for similar cases can strongly
      accelerate the process of surgical planning. We present a new technique
      that enables the easy reuse of both medical visualization types:
      3D scenes and 2D slice views. We introduce the keystates as a concept
      to describe the state of a visualization in a general manner. They
      can be easily applied to new datasets to create similar visualizations.
      Keystates can be shared between surgeons of one spe- cialization
      to reproduce and document the planning process for collaborative
      work. Furthermore, animations can support the surgeon on individual
      exploration and are also useful in collaborative environments, where
      complex issues must be presented in a short time. Therefore, we provide
      a framework, where animations can be visually designed by surgeons
      during their exploration process without any programming or authoring
      skills. We discuss several transitions between different visualizations
      and present an application from clinical routine.},
      issn = {01677055},
      keywords = {TEC},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • K. Muhler, C. Tietjen, F. Ritter, and B. Preim, “The medical exploration toolkit: an efficient support for visual computing in surgical planning and training.,” IEEE transactions on visualization and computer graphics, vol. 16, iss. 1, pp. 133-46, 2010.
    [Bibtex]
    @ARTICLE{Muhler2010a,
      author = {Muhler, Konrad and Tietjen, Christian and Ritter, Felix and Preim,
      Bernhard},
      title = {The medical exploration toolkit: an efficient support for visual
      computing in surgical planning and training.},
      journal = {IEEE transactions on visualization and computer graphics},
      year = {2010},
      volume = {16},
      pages = {133-46},
      number = {1},
      abstract = {Application development is often guided by the usage of software libraries
      and toolkits. For medical applications, the toolkits currently available
      focus on image analysis and volume rendering. Advance interactive
      visualizations and user interface issues are not adequately supported.
      Hence, we present a toolkit for application development in the field
      of medical intervention planning, training, and presentation--the
      MEDICALEXPLORATIONTOOLKIT (METK). The METK is based on the rapid
      prototyping platform MeVisLab and offers a large variety of facilities
      for an easy and efficient application development process. We present
      dedicated techniques for advanced medical visualizations, exploration,
      standardized documentation, adn interface widgets for common tasks.
      These include, e.g., advanced animation facilities, viewpoint selection,
      several illustrative rendering techniques, and new techniques for
      object selection in 3D surface models. No extended programming skills
      are needed for application building, since a graphical programming
      approach can be used. the toolkit is freely available and well documented
      to facilitate the use and extension of the toolkit.},
      issn = {1077-2626},
      keywords = {Animals,Computer Graphics,Computer Simulation,Computer-Assisted Instruction,Computer-Assisted
      Instruction: methods,Humans,Imaging, Three-Dimensional,Imaging, Three-Dimensional:
      methods,Models, Biological,Software,Surgery, Computer-Assisted,Surgery,
      Computer-Assisted: education,Surgery, Computer-Assisted: methods,User-Computer
      Interface},
      owner = {thomaskroes},
      pmid = {19910667},
      timestamp = {2010.10.22}
    }
  • L. Mundeleer, D. Wikler, T. Leloup, and N. Warzee, “Development of a computer assisted system aimed at RFA liver surgery.,” Computerized medical imaging and graphics : the official journal of the Computerized Medical Imaging Society, vol. 32, iss. 7, pp. 611-21, 2008.
    [Bibtex]
    @ARTICLE{Mundeleer2008,
      author = {Mundeleer, Laurent and Wikler, David and Leloup, Thierry and Warzee,
      Nadine},
      title = {Development of a computer assisted system aimed at RFA liver surgery.},
      journal = {Computerized medical imaging and graphics : the official journal
      of the Computerized Medical Imaging Society},
      year = {2008},
      volume = {32},
      pages = {611-21},
      number = {7},
      month = {October},
      abstract = {Radio frequency ablation (RFA) is a minimally invasive treatment for
      either hepatocellular carcinoma or metastasis liver carcinoma. In
      order to resect large lesions, the surgeon has to perform multiple
      time-consuming destruction cycles and reposition the RFA needle for
      each of them. The critical step in handling a successful ablation
      and preventing local recurrence is the correct positioning of the
      needle. For small tumors, the surgeon places the middle of the active
      needle tip in the center of the tumor under intra-operative ultrasound
      guidance. When one application is not enough to cover the entire
      tumor, the surgeon needs to repeat the treatment after repositioning
      of the needle, but US guidance is obstructed by the opacity stemming
      from the first RFA application. In this case the surgeon can only
      rely on anatomical knowledge and the repositioning of the RFA needle
      becomes a subjective task limiting the treatment accuracy. We have
      developed a computer assisted surgery guidance application for this
      repositioning procedure. Our software application handles the complete
      process from preoperative image analysis to tool tracking in the
      operating room. Our framework is mostly used for this RFA procedure,
      but is also suitable for any other medical or surgery application.},
      file = {Mundeleer2008.pdf:Mundeleer2008.pdf:PDF},
      issn = {0895-6111},
      keywords = {Algorithms,Artificial Intelligence,Catheter Ablation,Catheter Ablation:
      instrumentation,Catheter Ablation: methods,Hepatectomy,Hepatectomy:
      instrumentation,Hepatectomy: methods,Humans,Pattern Recognition,
      Automated,Pattern Recognition, Automated: methods,Surgery, Computer-Assisted,Surgery,
      Computer-Assisted: instrumentation,Surgery, Computer-Assisted: methods,Systems
      Integration,Ultrasonography, Interventional,Ultrasonography, Interventional:
      instrumentation,Ultrasonography, Interventional: methods, TEC},
      owner = {thomaskroes},
      pmid = {18723321},
      timestamp = {2010.10.22}
    }
  • [DOI] S. Muraki and Y. Kita, “A survey of medical applications of 3D image analysis and computer graphics,” Systems and Computers in Japan, vol. 37, iss. 1, pp. 13-46, 2006.
    [Bibtex]
    @ARTICLE{Muraki2006,
      author = {Muraki, Shigeru and Kita, Yasuyo},
      title = {A survey of medical applications of 3D image analysis and computer
      graphics},
      journal = {Systems and Computers in Japan},
      year = {2006},
      volume = {37},
      pages = {13 - 46},
      number = {1},
      abstract = {Abstract This paper is a survey of visualization and analysis techniques
      for medical 3D images for researchers and students in computer science.
      Publications from this decade with internationally high evaluation
      are reviewed, focusing on medical applications of new mathematical
      techniques and computer hardware, mostly from the viewpoint of the
      authors' specialty, namely, volume graphics and computer vision.
      © 2005 Wiley Periodicals, Inc. Syst Comp Jpn, 37(1): 13–46, 2006;
      Published online in Wiley InterScience (www.interscience.wiley.com).
      DOI 10.1002/scj.20393},
      doi = {10.1002/scj.20393},
      issn = {1520-684X},
      keywords = {medical image processing, computer graphics, 3D image analysis, REV},
      publisher = {Wiley Subscription Services, Inc., A Wiley Company},
      url = {http://dx.doi.org/10.1002/scj.20393}
    }
  • T. Mönch, M. Neugebauer, and B. Preim, “Generation of Smooth and Accurate Surface Models for Surgical Planning,” , 2009.
    [Bibtex]
    @ARTICLE{Monch2009,
      author = {M{\\"o}nch, T. and Neugebauer, M. and Preim, B.},
      title = {Generation of Smooth and Accurate Surface Models for Surgical Planning},
      year = {2009},
      file = {Monch2009.pdf:Monch2009.pdf:PDF},
      owner = {Thomas},
      publisher = {Citeseer},
      timestamp = {2011.04.28}
    }
  • K. Mühler, M. Neugebauer, C. Tietjen, and B. Preim, “Viewpoint selection for intervention planning,” , pp. 267-274, 2007.
    [Bibtex]
    @CONFERENCE{Muhler2007,
      author = {M{\\"u}hler, K. and Neugebauer, M. and Tietjen, C. and Preim, B.},
      title = {Viewpoint selection for intervention planning},
      booktitle = {IEEE/eurographics symposium on visualization (EuroVis)},
      year = {2007},
      pages = {267--274},
      organization = {Citeseer},
      file = {Muhler2007.pdf:Muhler2007.pdf:PDF},
      owner = {Thomas},
      timestamp = {2011.04.28}
    }
  • K. Mühler and B. Preim, Automatic Textual Annotation for Surgical Planning, Citeseer.
    [Bibtex]
    @BOOK{Muhler2009,
      title = {Automatic Textual Annotation for Surgical Planning},
      publisher = {Citeseer},
      author = {M{\\"u}hler, K. and Preim, B.},
      file = {Muhler2009.pdf:Muhler2009.pdf:PDF},
      owner = {Th},
      timestamp = {2011.03.04}
    }
  • S. Najarian, M. Fallahnezhad, and E. Afshari, “Advances in medical robotic systems with specific applications in surgery-a review,” Journal of Medical Engineering & Technology, vol. 35, iss. 1, pp. 19-33, 2011.
    [Bibtex]
    @ARTICLE{Najarian2011,
      author = {Najarian, S. and Fallahnezhad, M. and Afshari, E.},
      title = {Advances in medical robotic systems with specific applications in
      surgery-a review},
      journal = {Journal of Medical Engineering \& Technology},
      year = {2011},
      volume = {35},
      pages = {19 - 33},
      number = {1},
      file = {Najarian2011.pdf:Najarian2011.pdf:PDF},
      issn = {0309-1902},
      keywords = {REV},
      owner = {Thomas},
      publisher = {Informa Healthcare London},
      timestamp = {2011.02.23}
    }
  • M. Nakamoto, “Automated CT-based 3D surgical planning for total hip replacement: a pilot study,” International Congress Series, vol. 1256, pp. 389-394, 2003.
    [Bibtex]
    @ARTICLE{Nakamoto2003,
      author = {Nakamoto, M},
      title = {Automated CT-based 3D surgical planning for total hip replacement:
      a pilot study},
      journal = {International Congress Series},
      year = {2003},
      volume = {1256},
      pages = {389-394},
      month = {June},
      abstract = {At the preoperative planning stage of CT-based computer-assisted total
      hip replacement, a surgeon determines the parameters such as size,
      position, and orientations of the implants based on interactive visualization
      of 3D models of the implants and hip joint bone. However, the parameters
      depend on surgeon’s visual assessment of spatial relationship between
      the hip joint bone and implants. Our objective is to investigate
      objective criteria for determination of the optimal parameters and
      formulate an automated determination procedure based on the criteria.
      We objectified expertise in the preoperative planning of an experienced
      surgeon into quantitative evaluations and geometrical constraints,
      then formulated the automated procedure as an optimization problem.
      The automated planning system was applied to three cases of patient
      data sets and compared with an experienced surgeon. In the preliminary
      results, the planning parameters determined by the system were generally
      acceptable as a pilot experiment.},
      file = {Nakamoto2003.pdf:Nakamoto2003.pdf:PDF},
      issn = {05315131},
      keywords = {TEC},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • R. Nakamura and R. Tori, “Non-Photorealistic Rendering in Stereoscopic 3D Visualization,” Symposium A Quarterly Journal In Modern Foreign Literatures, pp. 4503-4503, 2010.
    [Bibtex]
    @ARTICLE{Nakamura2010,
      author = {Nakamura, Ricardo and Tori, Romero},
      title = {Non-Photorealistic Rendering in Stereoscopic 3D Visualization},
      journal = {Symposium A Quarterly Journal In Modern Foreign Literatures},
      year = {2010},
      pages = {4503 - 4503},
      file = {Nakamura2010.pdf:Nakamura2010.pdf:PDF},
      keywords = {information,non-photorealistic rendering,real-time rendering,stereoscopy,visualization,
      TEC},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • M. Nakao, T. Kuroda, H. Oyama, G. Sakaguchi, and M. Komeda, “Physics-Based Simulation of Surgical Fields for Preoperative Strategic Planning,” Journal of Medical Systems, vol. 30, pp. 371-380, 2006.
    [Bibtex]
    @ARTICLE{Nakao2006,
      author = {Nakao, Megumi and Kuroda, Tomohiro and Oyama, Hiroshi and Sakaguchi,
      Genichi and Komeda, Masashi},
      title = {Physics-Based Simulation of Surgical Fields for Preoperative Strategic
      Planning},
      journal = {Journal of Medical Systems},
      year = {2006},
      volume = {30},
      pages = {371-380},
      abstract = {Although careful planning of surgical approach is a key for success
      of surgery, conventional planning and simulation tools cannot support
      detailed discussion. This issue is derived from the difficulty of
      estimating complex physical behavior of soft tissues provided by
      a series of surgical procedures like cutting and deformation. This
      paper proposes an adaptive physics-based framework that simulates
      both interactive cutting and accurate deformation on virtual bodies,
      and performs preoperative planning for supporting strategic discussion.
      We focus on limited use of the two models: A particle-based model
      and an FEM-based model considering required quality and performance
      in different situations. FEM-based deformation of incision accurately
      produces estimated surgical fields. Based on the framework, a strategic
      planning system was developed for supporting decision of surgical
      approach using 3D representation of the surgical fields. We applied
      clinical CT dataset of an aortic aneurysm case to the system. Some
      experiments and usability tests confirmed that the system contributes
      to grasping 3D shape and location of the target organs and performs
      detailed discussion on patient-specific surgical approaches.},
      affiliation = {Nara Institute of Science and Technology Graduate School of Information
      Science 8916-5 Takayama, Ikoma Nara Japan},
      file = {Nakao2006.pdf:Nakao2006.pdf:PDF},
      issn = {0148-5598},
      issue = {5},
      keyword = {Medicine},
      keywords = {TEC},
      owner = {Thomas},
      publisher = {Springer Netherlands},
      timestamp = {2011.03.09},
      url = {http://dx.doi.org/10.1007/s10916-006-9021-4}
    }
  • M. Nakao, H. Oyama, M. Komori, T. Matsuda, G. Sakaguchi, M. Komeda, and T. Takahashi, “Haptic reproduction and interactive visualization of a beating heart for cardiovascular surgery simulation.,” International journal of medical informatics, vol. 68, iss. 1-3, pp. 155-63, 2002.
    [Bibtex]
    @ARTICLE{Nakao2002,
      author = {Nakao, M and Oyama, H and Komori, M and Matsuda, T and Sakaguchi,
      G and Komeda, M and Takahashi, T},
      title = {Haptic reproduction and interactive visualization of a beating heart
      for cardiovascular surgery simulation.},
      journal = {International journal of medical informatics},
      year = {2002},
      volume = {68},
      pages = {155-63},
      number = {1-3},
      month = {December},
      abstract = {This paper aims to achieve haptic reproduction and real-time visualization
      of a beating heart for cardiac surgery simulation. Unlike most forgoing
      approaches, the authors focus on time series datasets and propose
      a new framework for interactive simulation of active tissues. The
      framework handles both detection and response of collisions between
      a manipulator and a beating virtual heart. Physics-based force feedback
      of autonomous cardiac motion is also produced based on a stress-pressure
      model, which is adapted to elastic objects filled with fluid. Time
      series datasets of an adult man were applied to an integrated simulation
      system with a force feedback device. The system displays multi-dimensional
      representation of a beating heart and provides a basic training environment
      for surgical palpation. Finally, results of measurement and medical
      assessment confirm the achieved quality and performance of the presented
      framework.},
      annote = {Mention this paper in survey paper},
      file = {Nakao2002.pdf:Nakao2002.pdf:PDF},
      issn = {1386-5056},
      keywords = {Algorithms,Cardiovascular Surgical Procedures,Computer Graphics,Computer
      Simulation,Computer-Assisted Instruction,Heart,Heart: physiology,Humans,Male,Models,
      Cardiovascular,Myocardial Contraction,Palpation,User-Computer Interface,
      TEC},
      owner = {thomaskroes},
      pmid = {12467799},
      timestamp = {2010.10.22}
    }
  • A. Nealen, M. Müller, R. Keiser, E. Boxerman, and M. Carlson, “Physically based deformable models in computer graphics,” , vol. 25, iss. 4, pp. 809-836, 2006.
    [Bibtex]
    @CONFERENCE{Nealen2006,
      author = {Nealen, A. and Müller, M. and Keiser, R. and Boxerman, E. and Carlson,
      M.},
      title = {Physically based deformable models in computer graphics},
      booktitle = {Computer Graphics Forum},
      year = {2006},
      volume = {25},
      number = {4},
      pages = {809 - 836},
      organization = {Wiley Online Library},
      file = {Nealen2006.pdf:Nealen2006.pdf:PDF},
      issn = {1467-8659},
      keywords = {TEC},
      owner = {thomaskroes},
      timestamp = {2011.01.26}
    }
  • S. Neeraj and A. Lalit, “Automated medical image segmentation techniques,” Journal of Medical Physics, vol. 35, 2010.
    [Bibtex]
    @ARTICLE{Neeraj2010,
      author = {Neeraj, S. and Lalit, A.},
      title = {Automated medical image segmentation techniques},
      journal = {Journal of Medical Physics},
      year = {2010},
      volume = {35},
      issn = {0971-6203},
      keywords = {TEC, REV},
      owner = {Thomas},
      timestamp = {2011.02.03}
    }
  • M. Neugebauer, R. Gasteiger, O. Beuing, V. Diehl, M. Skalej, and B. Preim, “Combining Map Displays and 3D Visualizations for the Analysis of Scalar Data on Cerebral Aneurysm Surfaces,” , 2010.
    [Bibtex]
    @ARTICLE{Neugebauer2010,
      author = {Neugebauer, M. and Gasteiger, R. and Beuing, O. and Diehl, V. and
      Skalej, M. and Preim, B.},
      title = {Combining Map Displays and 3D Visualizations for the Analysis of
      Scalar Data on Cerebral Aneurysm Surfaces},
      year = {2010},
      file = {Neugebauer2010.pdf:Neugebauer2010.pdf:PDF},
      keywords = {TEC, NES},
      owner = {Thomas},
      publisher = {Citeseer},
      timestamp = {2011.04.28}
    }
  • D. Nguyen, L. M. Ferreira, J. R. Brownhill, K. J. Faber, and J. A. Johnson, “Design and development of a computer assisted glenoid implantation technique for shoulder replacement surgery,” Computer Aided Surgery, vol. 12, iss. 3, pp. 152-159, 2007.
    [Bibtex]
    @ARTICLE{Nguyen2007,
      author = {Nguyen, D. and Ferreira, L.M. and Brownhill, J.R. and Faber, K.J.
      and Johnson, J.A.},
      title = {Design and development of a computer assisted glenoid implantation
      technique for shoulder replacement surgery},
      journal = {Computer Aided Surgery},
      year = {2007},
      volume = {12},
      pages = {152 - 159},
      number = {3},
      issn = {1092-9088},
      keywords = {APP, OTS, PLA},
      publisher = {Informa UK Ltd UK}
    }
  • S. Nicolau, X. Pennec, L. Soler, and N. Ayache, “A complete augmented reality guidance system for liver punctures: First clinical evaluation,” Medical Image Computing and Computer-Assisted Intervention – MICCAI 2005, pp. 539-547, 2005.
    [Bibtex]
    @ARTICLE{Nicolau2005,
      author = {Nicolau, SA and Pennec, X. and Soler, L. and Ayache, N.},
      title = {A complete augmented reality guidance system for liver punctures:
      First clinical evaluation},
      journal = {Medical Image Computing and Computer-Assisted Intervention - MICCAI
      2005},
      year = {2005},
      pages = {539 - 547},
      file = {Nicolau2005.pdf:Nicolau2005.pdf:PDF},
      owner = {Thomas},
      publisher = {Springer},
      timestamp = {2011.01.31}
    }
  • S. A. Nicolau, X. Pennec, L. Soler, X. Buy, A. Gangi, N. Ayache, and J. Marescaux, “An augmented reality system for liver thermal ablation: Design and evaluation on clinical cases,” Medical Image Analysis, vol. 13, iss. 3, pp. 494-506, 2009.
    [Bibtex]
    @ARTICLE{Nicolau2009,
      author = {S.A. Nicolau and X. Pennec and L. Soler and X. Buy and A. Gangi and
      N. Ayache and J. Marescaux},
      title = {An augmented reality system for liver thermal ablation: Design and
      evaluation on clinical cases},
      journal = {Medical Image Analysis},
      year = {2009},
      volume = {13},
      pages = {494 - 506},
      number = {3},
      abstract = {We present in this paper an augmented reality guidance system for
      liver thermal ablation in interventional radiology. To show the relevance
      of our methodology, the system is incrementally evaluated on an abdominal
      phantom and then on patients in the operating room. The system registers
      in a common coordinate system a preoperative image of the patient
      and the position of the needle that the practitioner manipulates.
      The breathing motion uncertainty is taken into account with a respiratory
      gating technique: the preoperative image and the guidance step are
      synchronized on expiratory phases. In order to fulfil the real-time
      constraints, we have developed and validated algorithms that automatically
      process and extract feature points. Since the guidance interface
      is also a major component of the system effectiveness, we validate
      the overall targeting accuracy on an abdominal phantom. This experiment
      showed that a practitioner can reach a predefined target with an
      accuracy of 2 mm with an insertion time below one minute. Finally,
      we propose a passive evaluation protocol of the overall system in
      the operating room during five interventions on patients. These experiments
      show that the system can provide a guidance information during expiratory
      phases with an error below 5 mm.},
      file = {Nicolau2009.pdf:Nicolau2009.pdf:PDF},
      issn = {1361-8415},
      keywords = {Augmented reality, APP, AUR, HES},
      owner = {Thomas},
      timestamp = {2011.01.31}
    }
  • H. Nienhuys and A. Frank van der Stappen, “A Surgery Simulation Supporting Cuts and Finite Element Deformation,” in Medical Image Computing and Computer-Assisted Intervention – MICCAI 2001, W. Niessen and M. Viergever, Eds., Springer Berlin / Heidelberg, 2001, vol. 2208, pp. 145-152.
    [Bibtex]
    @INCOLLECTION{Nienhuys2001,
      author = {Nienhuys, Han-Wen and Frank van der Stappen, A.},
      title = {A Surgery Simulation Supporting Cuts and Finite Element Deformation},
      booktitle = {Medical Image Computing and Computer-Assisted Intervention – MICCAI
      2001},
      publisher = {Springer Berlin / Heidelberg},
      year = {2001},
      editor = {Niessen, Wiro and Viergever, Max},
      volume = {2208},
      series = {Lecture Notes in Computer Science},
      pages = {145 - 152},
      abstract = {Interactive surgery simulations have conflicting requirements of speed
      and accuracy. In this paper we show how to combine a relatively accurate
      deformation model—the Finite Element (FE) method— and interactive
      cutting without requiring expensive matrix updates or precomputation.
      Our approach uses an iterative algorithm for an interactive linear
      FE deformation simulation. The iterative process requires no global
      precomputation, so runtime changes of the mesh, i.e. cuts, can be
      simulated efficiently. Cuts are performed along faces of the mesh;
      this prevents growth of the mesh. We present a provably correct method
      for changing the mesh topology, and a satisfactory heuristic for
      determining along which faces to perform cuts. Nodes within the mesh
      are relocated to align the mesh with a virtual scalpel. This prevents
      a jagged surface appearance, but also generates degeneracies, which
      are removed afterwards.},
      affiliation = {Institute of Information and Computing Sciences, Utrecht University,
      PO Box 80089, 3508 TB Utrecht, The Netherlands},
      file = {Nienhuys2001.pdf:Nienhuys2001.pdf:PDF},
      keywords = {APP, PRS},
      owner = {thomaskroes},
      timestamp = {2011.01.26}
    }
  • W. Niessen, “Model-Based Image Segmentation for Image-Guided Interventions,” in Image-Guided Interventions, T. Peters and K. Cleary, Eds., Springer US, 2008, pp. 219-239.
    [Bibtex]
    @INCOLLECTION{Niessen2008,
      author = {Niessen, Wiro},
      title = {Model-Based Image Segmentation for Image-Guided Interventions},
      booktitle = {Image-Guided Interventions},
      publisher = {Springer US},
      year = {2008},
      editor = {Peters, Terry and Cleary, Kevin},
      pages = {219 - 239},
      note = {Chapter 8},
      abstract = {Medical image segmentation plays an important role in the field of
      image-guided surgery and minimally invasive interventions. By creating
      three-dimensional anatomical models from individual patients, training,
      planning, and computer guidance during surgery can be improved. This
      chapter briefly describes the most frequently used image segmentation
      techniques, shows examples of their application and potential in
      the field of image-guided surgery and interventions, and discusses
      future trends.},
      affiliation = {Delft University of Technology Netherlands},
      file = {Niessen2008.pdf:Niessen2008.pdf:PDF},
      isbn = {978-0-387-73858-1},
      keyword = {Engineering},
      keywords = {REV},
      owner = {Thomas},
      timestamp = {2011.02.24}
    }
  • A. D. Nijmeh, N. M. Goodger, D. Hawkes, P. J. Edwards, and M. McGurk, “Image-guided navigation in oral and maxillofacial surgery,” British Journal of Oral and Maxillofacial Surgery, vol. 43, iss. 4, pp. 294-302, 2005.
    [Bibtex]
    @ARTICLE{Nijmeh2005,
      author = {A.D. Nijmeh and N.M. Goodger and D. Hawkes and P.J. Edwards and M.
      McGurk},
      title = {Image-guided navigation in oral and maxillofacial surgery},
      journal = {British Journal of Oral and Maxillofacial Surgery},
      year = {2005},
      volume = {43},
      pages = {294 - 302},
      number = {4},
      abstract = {Summary Image-guided surgery is the logical extension of imaging as
      it integrates previously acquired radiological or nuclear medicine
      images with the operative field. In conventional image-guided surgery,
      a surgeon uses a surgical instrument or a pointer to establish correspondence
      between features in the preoperative images and the surgical scene.
      This is not ideal because the surgeon has to look away from the operative
      field to view the data. Augmented reality guidance systems offer
      a solution to this problem but are limited by deformation of soft
      tissues. Real-time intraoperative imaging offers a potential solution
      but is currently only experimental. The additional precision and
      confidence that this technology provides make it a useful tool, and
      recent advances in image-guided surgery offer new opportunities in
      the field of oral and maxillofacial surgery. Here, we review the
      development, current technologies, and applications of image-guided
      surgery and illustrate them with two case reports.},
      file = {Nijmeh2005.pdf:Nijmeh2005.pdf:PDF},
      issn = {0266-4356},
      keywords = {Image-guided surgery, REV, CMS},
      owner = {Thomas},
      timestamp = {2011.02.09}
    }
  • C. Nikou, A. Digioiaiii, M. Blackwell, B. Jaramaz, and T. Kanade, “Augmented reality imaging technology for orthopaedic surgery,” Operative Techniques in Orthopaedics, vol. 10, iss. 1, pp. 82-86, 2000.
    [Bibtex]
    @ARTICLE{Nikou2000,
      author = {Nikou, C and Digioiaiii, A and Blackwell, M and Jaramaz, B and Kanade,
      T},
      title = {Augmented reality imaging technology for orthopaedic surgery},
      journal = {Operative Techniques in Orthopaedics},
      year = {2000},
      volume = {10},
      pages = {82-86},
      number = {1},
      month = {January},
      abstract = {{Augmented or hybrid reality is a display technique that combines
      the real world with the virtual world; it permits digital images
      or preoperative planning information to be combined with the surgeon's
      view of the real world. This technique gives surgeons "x-ray vision"
      without the use of ionizing radiation, allowing them to visualize
      parts of the patient's anatomy that are not typically exposed during
      a surgical procedure. Augmented reality can increase the surgeon's
      view of unexposed bones and other tissues during surgery while using
      less invasive techniques. These visualization devices will also allow
      the surgeon to view preoperatively determined locations of incisions
      and real-time medical images with proper spatial alignment during
      surgery. Augmented reality will eventually enable less invasive and
      minimally invasive surgical techniques that are not technologically
      feasible at this time. In this article, the augmented reality technique
      is described and illustrated, showing examples of already existing
      medical systems that use this display technolog\}: Possible orthopaedic
      applications of augmented reality are presented as well as current
      research and practical issues associated with making augmented reality
      a commonplace tool in surgical practice.},
      file = {Nikou2000.pdf:Nikou2000.pdf:PDF},
      issn = {10486666},
      keywords = {and ar-,augmented reality,before the advent of,could,fluoroscopy,imaging
      technologies,magnetic resonance imaging,mri,such as,surgical visualization,throscopy,virtual
      reality,visualization during orthopaedic surgery, TEC},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
    
    }
  • L. P. Nolte and R. Ganz, Computer assisted orthopedic surgery (CAOS), Hogrefe & Huber, 1999.
    [Bibtex]
    @BOOK{Nolte1999,
      title = {Computer assisted orthopedic surgery (CAOS)},
      publisher = {Hogrefe \& Huber},
      year = {1999},
      author = {Nolte, L.P. and Ganz, R.},
      isbn = {0889371687},
      keywords = {OTS},
      owner = {Thomas},
      timestamp = {2011.02.03}
    }
  • L-P. Nolte, L. Zamorano, H. Visarius, U. Berlemann, F. Langlotz, E. Arm, and O. Schwarzenbach, “Clinical evaluation of a system for precision enhancement in spine surgery,” Clinical Biomechanics, vol. 10, iss. 6, pp. 293-303, 1995.
    [Bibtex]
    @ARTICLE{Nolte1995,
      author = {L-P Nolte and L Zamorano and H Visarius and U Berlemann and F Langlotz
      and E Arm and O Schwarzenbach},
      title = {Clinical evaluation of a system for precision enhancement in spine
      surgery},
      journal = {Clinical Biomechanics},
      year = {1995},
      volume = {10},
      pages = {293 - 303},
      number = {6},
      abstract = {Most techniques in segmental spinal fixation surgery rely on the identification
      of predefined targets with the help of anatomical landmarks and on
      intraoperative use of image intensifiers. However, because there
      is no direct link between the image information, the accessible spinal
      anatomy, and the action of surgical instruments several potential
      problems and possible complications are still involved. A novel system
      for spinal surgery has been designed allowing for the real-time,
      intraoperative localization of surgical instruments in medical images.
      In practice this was achieved by combining image-guided stereotaxis
      with advanced optoelectronic position sensing techniques. Modules
      were developed for image data processing, surgical planning and simulation,
      and various intraoperative procedures. A detailed validation of the
      system was performed indicating an overall accuracy to be better
      than the slice distance of the spinal image used. In an in-vitro
      setting 20 pilot holes for pedicle screws were prepared in human
      cadaveric lumbar spines. An analysis in 77 histological cuts showed
      an ideal location in 70 and only minor cortex engagement in seven
      sections. In vivo the system has been successfully applied in three
      posterior low lumbar stabilizations with overall 15 transpedicular
      screws.},
      file = {Nolte1995.pdf:Nolte1995.pdf:PDF},
      issn = {0268-0033},
      keywords = {Spine surgery, TEC},
      owner = {thomaskroes},
      timestamp = {2011.01.17}
    }
  • P. M. Novotny, J. a Stoll, N. V. Vasilyev, P. J. del Nido, P. E. Dupont, T. E. Zickler, and R. D. Howe, “GPU based real-time instrument tracking with three-dimensional ultrasound.,” Medical image analysis, vol. 11, iss. 5, pp. 458-64, 2007.
    [Bibtex]
    @ARTICLE{Novotny2007,
      author = {Novotny, Paul M and Stoll, Jeff a and Vasilyev, Nikolay V and del
      Nido, Pedro J and Dupont, Pierre E and Zickler, Todd E and Howe,
      Robert D},
      title = {GPU based real-time instrument tracking with three-dimensional ultrasound.},
      journal = {Medical image analysis},
      year = {2007},
      volume = {11},
      pages = {458-64},
      number = {5},
      month = {October},
      abstract = {Real-time three-dimensional ultrasound enables new intracardiac surgical
      procedures, but the distorted appearance of instruments in ultrasound
      poses a challenge to surgeons. This paper presents a detection technique
      that identifies the position of the instrument within the ultrasound
      volume. The algorithm uses a form of the generalized Radon transform
      to search for long straight objects in the ultrasound image, a feature
      characteristic of instruments and not found in cardiac tissue. When
      combined with passive markers placed on the instrument shaft, the
      full position and orientation of the instrument is found in 3D space.
      This detection technique is amenable to rapid execution on the current
      generation of personal computer graphics processor units (GPU). Our
      GPU implementation detected a surgical instrument in 31 ms, sufficient
      for real-time tracking at the 25 volumes per second rate of the ultrasound
      machine. A water tank experiment found instrument orientation errors
      of 1.1 degrees and tip position errors of less than 1.8mm. Finally,
      an in vivo study demonstrated successful instrument tracking inside
      a beating porcine heart.},
      file = {Novotny2007.pdf:Novotny2007.pdf:PDF},
      issn = {1361-8415},
      keywords = {Animals,Cardiovascular Surgical Procedures,Cardiovascular Surgical
      Procedures: instrumentatio,Cardiovascular Surgical Procedures: methods,Computer
      Systems,Echocardiography, Three-Dimensional,Echocardiography, Three-Dimensional:
      instrumentati,Echocardiography, Three-Dimensional: methods,Equipment
      Design,Equipment Failure Analysis,Phantoms, Imaging,Reproducibility
      of Results,Sensitivity and Specificity,Signal Processing, Computer-Assisted,Signal
      Processing, Computer-Assisted: instrumentat,Surgery, Computer-Assisted,Surgery,
      Computer-Assisted: instrumentation,Surgery, Computer-Assisted: methods,Surgical
      Instruments,Swine,Ultrasonography, Interventional,Ultrasonography,
      Interventional: instrumentation,Ultrasonography, Interventional:
      methods},
      owner = {thomaskroes},
      pmid = {17681483},
      timestamp = {2010.10.22}
    }
  • K. Numminen, O. Sipila, and H. Makisalo, “Preoperative hepatic 3D models: virtual liver resection using three-dimensional imaging technique.,” European journal of radiology, vol. 56, iss. 2, pp. 179-84, 2005.
    [Bibtex]
    @ARTICLE{Numminen2005,
      author = {Numminen, Kirsti and Sipila, Outi and Makisalo, Heikki},
      title = {Preoperative hepatic 3D models: virtual liver resection using three-dimensional
      imaging technique.},
      journal = {European journal of radiology},
      year = {2005},
      volume = {56},
      pages = {179-84},
      number = {2},
      month = {November},
      abstract = {Emerging new techniques for liver resections set new requirements
      for the preoperative imaging and planning. Open surgery is a three-dimensional
      procedure and planning of the resection line may be difficult when
      basing on conventional two-dimensional CTs or MRIs, although all
      the information is there. With multidetector-row CT (MDCT), thin
      slices can be obtained with excellent temporal resolution, and precise
      three-dimensional (3D) models can be created. We regard 3D imaging
      technique useful in most liver resections. It improves the surgeon's
      knowledge of liver anatomy and makes even more complicated liver
      resections safe. Better knowledge of three-dimensional appearances
      of liver structures may further improve the results of curative liver
      surgery. However, before becoming a routine clinical procedure, research
      and development are still needed. Also, careful testing and evaluation
      of the methods have to be performed. In the future, 3D models will
      probably play an important role in the preoperative planning of liver
      resections.},
      file = {Numminen2005.pdf:Numminen2005.pdf:PDF},
      issn = {0720-048X},
      keywords = {Computer Simulation,Hepatectomy,Hepatectomy: methods,Humans,Image
      Processing, Computer-Assisted,Image Processing, Computer-Assisted:
      methods,Imaging, Three-Dimensional,Imaging, Three-Dimensional: methods,Liver,Liver
      Neoplasms,Liver Neoplasms: pathology,Liver Neoplasms: radiography,Liver
      Neoplasms: surgery,Liver: pathology,Liver: radiography,Liver: surgery,Patient
      Care Planning,Preoperative Care,Tomography, X-Ray Computed,Tomography,
      X-Ray Computed: methods,User-Computer Interface, TEC},
      owner = {thomaskroes},
      pmid = {15890482},
      timestamp = {2010.10.22}
    }
  • S. Oeltze and B. Preim, “Visualization of Anatomic Tree Structures with Convolution Surfaces,” , 2004.
    [Bibtex]
    @CONFERENCE{Oeltze2004,
      author = {Oeltze, S. and Preim, B.},
      title = {Visualization of Anatomic Tree Structures with Convolution Surfaces},
      booktitle = {in Proc. Joint IEEE/EG Symposium on Visualization. Eurographics Association},
      year = {2004},
      organization = {Citeseer},
      abstract = {We present a method for visualizing anatomic tree-like structures,
      such as vasculature and bronchial trees based on clinical CT- or
      MR data. The vessel skeleton as well as the diameter information
      per voxel serve as input. Our method adheres to these data, while
      producing smooth transitions at branchings and closed, rounded ends
      by means of convolution surfaces. We discuss the filter design with
      respect to irritating bulges, unwanted blending and the correct visualization
      of the vessel diameter. Similar to related work our method is based
      on the assumption
      
      of a circular cross-section of vasculature. In contrast to other authors
      who relied on the explicit description of the geometry we employ
      implicit surfaces to achieve high quality visualization. The method
      has been applied to a large number of vessel trees and produces good
      results in a reasonable time which is due to the efficient use of
      bounding volumes. It is intended for use in therapy planning and
      educational systems.},
      file = {Oeltze2004.pdf:Oeltze2004.pdf:PDF},
      owner = {thomaskroes},
      timestamp = {2011.01.05}
    }
  • S. Olsen, “Real-time structural analysis for preoperative surgical planning,” International Congress Series, vol. 1256, pp. 370-375, 2003.
    [Bibtex]
    @ARTICLE{Olsen2003,
      author = {Olsen, S},
      title = {Real-time structural analysis for preoperative surgical planning},
      journal = {International Congress Series},
      year = {2003},
      volume = {1256},
      pages = {370-375},
      month = {June},
      abstract = {In this paper, a novel method for incorporating automatic, patient-specific,
      structural analysis in computer-aided preoperative planning is described.
      Special emphasis has been placed on accurately capturing the mechanical
      behavior of the implant-to-bone interface where failure may occur.
      A finite element solver was developed and integrated into our computer-aided
      planning system for implant dentistry. This paper describes how 3D
      mechanical analysis of bridges, implants and bone can be performed
      in a fully automatic manner providing clinically relevant feedback
      to the surgeon in real time during preoperative planning.},
      file = {Olsen2003.pdf:Olsen2003.pdf:PDF},
      issn = {05315131},
      keywords = {finite element modeling,implant dentistry,structural analysis,surgical
      planning},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • R. Olszewski, G. Cosnard, B. Macq, P. Mahy, and H. Reychler, “3D CT-based cephalometric analysis: 3D cephalometric theoretical concept and software,” Neuroradiology, vol. 48, iss. 11, pp. 853-862, 2006.
    [Bibtex]
    @ARTICLE{Olszewski2006,
      author = {Olszewski, R. and Cosnard, G. and Macq, B. and Mahy, P. and Reychler,
      H.},
      title = {3D CT-based cephalometric analysis: 3D cephalometric theoretical
      concept and software},
      journal = {Neuroradiology},
      year = {2006},
      volume = {48},
      pages = {853 - 862},
      number = {11},
      issn = {0028-3940},
      owner = {Thomas},
      publisher = {Springer},
      timestamp = {2011.02.04}
    }
  • R. Olszewski, M. B. Villamil, D. G. Trevisan, L. P. Nedel, C. M. D. S. Freitas, H. Reychler, and B. Macq, “Towards an integrated system for planning and assisting maxillofacial orthognathic surgery.,” Computer methods and programs in biomedicine, vol. 91, iss. 1, pp. 13-21, 2008.
    [Bibtex]
    @ARTICLE{Olszewski2008,
      author = {Olszewski, Raphael and Villamil, Marta B and Trevisan, Daniela G
      and Nedel, Luciana P and Freitas, Carla M D S and Reychler, Herv\'{e}
      and Macq, Benoit},
      title = {Towards an integrated system for planning and assisting maxillofacial
      orthognathic surgery.},
      journal = {Computer methods and programs in biomedicine},
      year = {2008},
      volume = {91},
      pages = {13-21},
      number = {1},
      month = {July},
      abstract = {Computer-assisted maxillofacial orthognathic surgery is an emerging
      and interdisciplinary field linking orthognathic surgery, remote
      signal engineering and three-dimensional (3D) medical imaging. Most
      of the computational solutions already developed make use of different
      specialized systems which introduce difficulties both in the information
      transfer from one stage to the others and in the use of such systems
      by surgeons. Trying to address such issue, in this work we present
      a common computer-based system that integrates proposed modules for
      planning and assisting the maxillofacial surgery. With that we propose
      to replace the current standard orthognathic preoperative planning,
      and to bring information from a virtual planning to the real operative
      field. The system prototype, including three-dimensional cephalometric
      analysis, static and dynamic virtual orthognathic planning, and mixed
      reality transfer of information to the operation room, is described
      and the first results obtained are presented.},
      file = {Olszewski2008.pdf:Olszewski2008.pdf:PDF},
      issn = {0169-2607},
      keywords = {Computer Simulation,Imaging, Three-Dimensional,Imaging, Three-Dimensional:
      methods,Jaw Abnormalities,Jaw Abnormalities: surgery,Maxillofacial
      Abnormalities,Maxillofacial Abnormalities: surgery,Models, Biological,Surgery,
      Computer-Assisted,Surgery, Computer-Assisted: methods,Surgery, Oral,Surgery,
      Oral: methods,Systems Integration,Tomography, X-Ray Computed,Tomography,
      X-Ray Computed: methods,Tooth Abnormalities,Tooth Abnormalities:
      surgery, APP, CMS, GUI, PLA, SUR, RPP},
      owner = {thomaskroes},
      pmid = {18417245},
      timestamp = {2010.10.22}
    }
  • R. Olszewski, F. Zech, G. Cosnard, V. Nicolas, B. Macq, and H. Reychler, “Three-dimensional computed tomography cephalometric craniofacial analysis: experimental validation in vitro,” International journal of oral and maxillofacial surgery, vol. 36, iss. 9, pp. 828-833, 2007.
    [Bibtex]
    @ARTICLE{Olszewski2007,
      author = {Olszewski, R. and Zech, F. and Cosnard, G. and Nicolas, V. and Macq,
      B. and Reychler, H.},
      title = {Three-dimensional computed tomography cephalometric craniofacial
      analysis: experimental validation in vitro},
      journal = {International journal of oral and maxillofacial surgery},
      year = {2007},
      volume = {36},
      pages = {828 - 833},
      number = {9},
      issn = {0901-5027},
      owner = {Thomas},
      publisher = {Elsevier},
      timestamp = {2011.02.04}
    }
  • R. E. Ong, C. Glisson, H. Altamar, D. Viprakasit, P. Clark, S. D. Herrell, and R. L. Galloway, “Intraprocedural Registration for Image-Guided Kidney Surgery,” Mechatronics, IEEE/ASME Transactions on, vol. PP, iss. 99, pp. 1-6, 2010.
    [Bibtex]
    @ARTICLE{Ong2010,
      author = {Ong, R. E. and Glisson, C. and Altamar, H. and Viprakasit, D. and
      Clark, P. and Herrell, S. D. and Galloway, R. L.},
      title = {Intraprocedural Registration for Image-Guided Kidney Surgery},
      journal = {Mechatronics, IEEE/ASME Transactions on},
      year = {2010},
      volume = {PP},
      pages = {1 - 6},
      number = {99},
      abstract = {This paper reviews the process of using surface-based registration
      techniques for image-guided kidney surgery and presents data for
      both open and minimally invasive kidney surgery either by robot or
      by hand.},
      file = {:Ong2010.pdf:PDF},
      issn = {1083-4435},
      keywords = {TEC},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • G. Orentlicher, D. Goldsmith, and A. Horowitz, “Applications of 3-dimensional virtual computerized tomography technology in oral and maxillofacial surgery: current therapy.,” Journal of oral and maxillofacial surgery : official journal of the American Association of Oral and Maxillofacial Surgeons, vol. 68, iss. 8, pp. 1933-59, 2010.
    [Bibtex]
    @ARTICLE{Orentlicher2010,
      author = {Orentlicher, Gary and Goldsmith, Douglas and Horowitz, Andrew},
      title = {Applications of 3-dimensional virtual computerized tomography technology
      in oral and maxillofacial surgery: current therapy.},
      journal = {Journal of oral and maxillofacial surgery : official journal of the
      American Association of Oral and Maxillofacial Surgeons},
      year = {2010},
      volume = {68},
      pages = {1933-59},
      number = {8},
      month = {Augustus},
      abstract = {With the recent introduction of in-office cone-beam volumetric tomography
      scanners and the development of computed tomographic-based proprietary
      third-party 3-dimensional dental implant software programs, the field
      of implant dentistry is moving toward the 3-dimensional evaluation
      and placement of dental implants according to a restoratively driven
      treatment plan. The goal is to place the dental implant according
      to where the final dental restoration will be fabricated. The precision,
      accuracy, and 3-dimensional visualization capabilities of these technologies
      open avenues for the oral and maxillofacial surgeon in the diagnosis,
      planning, and surgical management of many nonimplant-related cases.
      The combination of these technologies is useful in expanding our
      information in dentoalveolar, preprosthetic, trauma, pathology and
      reconstruction, orthognathic and craniofacial, and cosmetic esthetic
      implant surgical cases. This article discusses the use of these technologies
      in the practice of oral and maxillofacial surgery.},
      file = {Orentlicher2010.pdf:Orentlicher2010.pdf:PDF},
      issn = {1531-5053},
      keywords = {Craniofacial Abnormalities,Craniofacial Abnormalities: radiography,Craniofacial
      Abnormalities: surgery,Dental Implantation, Endosseous,Dental Implantation,
      Endosseous: methods,Esthetics, Dental,Humans,Imaging, Three-Dimensional,Imaging,
      Three-Dimensional: methods,Maxillofacial Injuries,Maxillofacial Injuries:
      radiography,Maxillofacial Injuries: surgery,Models, Anatomic,Oral
      Surgical Procedures, Preprosthetic,Oral Surgical Procedures, Preprosthetic:
      methods,Orthognathic Surgical Procedures,Orthognathic Surgical Procedures:
      methods,Reconstructive Surgical Procedures,Reconstructive Surgical
      Procedures: methods,Software,Surgery, Computer-Assisted,Surgery,
      Oral,Tomography, X-Ray Computed,Tomography, X-Ray Computed: instrumentation,Tomography,
      X-Ray Computed: methods,Tooth Extraction,Tooth Extraction: methods,Tooth,
      Impacted,Tooth, Impacted: radiography,Tooth, Impacted: surgery,User-Computer
      Interface, CMS, APP, PLA, SUR, RPP},
      owner = {thomaskroes},
      pmid = {20542369},
      publisher = {Elsevier Inc.},
      timestamp = {2010.10.22}
    }
  • A. Osorio, J. Galan, J. Nauroy, S. Dahdouh, and O. Cedex, “Real time planning , guidance and validation of surgical acts using 3D segmentations , augmented reality projections and surgical tools video tracking,” Methods, 2010.
    [Bibtex]
    @ARTICLE{Osorio2010,
      author = {Osorio, Angel and Galan, Juan-antonio and Nauroy, Julien and Dahdouh,
      Sonia and Cedex, Orsay},
      title = {Real time planning , guidance and validation of surgical acts using
      3D segmentations , augmented reality projections and surgical tools
      video tracking},
      journal = {Methods},
      year = {2010},
      abstract = {When performing laparoscopies and punctures, the precise anatomic
      localizations are required. Current techniques very often rely on
      the mapping between the real situation and preoperative images. The
      PC based software we present realizes 3D segmentations of regions
      of interest from CT or MR slices. It allows the planning of punctures
      or trocars insertion trajectories, taking anatomical constraints
      into account. Geometrical transformations allow the projection over
      the patient’s body of the organs and lesions shapes, realistically
      reconstructed, using a standard video projector in the operating
      room. We developed specific image processing software which automatically
      segments and registers images of a webcam used in the operating room
      to give feedback to the user.},
      keywords = {augmented reality,image processing,laparoscopy,radiology,surgical
      tools tracking},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • H. M. Overhoff and M. Engineering, “Computer Assisted Orthopaedic Surgery,” International Journal of Computer Assisted Radiology and Surgery, vol. 1, iss. S1, pp. 229-250, 2006.
    [Bibtex]
    @ARTICLE{Overhoff2006,
      author = {Overhoff, H M and Engineering, Medical},
      title = {Computer Assisted Orthopaedic Surgery},
      journal = {International Journal of Computer Assisted Radiology and Surgery},
      year = {2006},
      volume = {1},
      pages = {229-250},
      number = {S1},
      month = {June},
      file = {Overhoff2006.pdf:Overhoff2006.pdf:PDF},
      issn = {1861-6410},
      keywords = {3d ultrasound \ae shoulder,endoprosthesis \ae,navigated implantation,
      APP, OTS, OCS},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • C. Paggetti, S. Martelli, L. Nofrini, and P. Vendruscolo, “Interface Design and Evaluation for CAS Systems,” in Medical Image Computing and Computer-Assisted Intervention – MICCAI 2001, W. Niessen and M. Viergever, Eds., Springer Berlin / Heidelberg, 2001, vol. 2208, pp. 1099-1106.
    [Bibtex]
    @INCOLLECTION{Paggetti2001,
      author = {Paggetti, Cristiano and Martelli, Sandra and Nofrini, Laura and Vendruscolo,
      Paolo},
      title = {Interface Design and Evaluation for CAS Systems},
      booktitle = {Medical Image Computing and Computer-Assisted Intervention – MICCAI
      2001},
      publisher = {Springer Berlin / Heidelberg},
      year = {2001},
      editor = {Niessen, Wiro and Viergever, Max},
      volume = {2208},
      series = {Lecture Notes in Computer Science},
      pages = {1099 - 1106},
      abstract = {The use of Computer Assisted Surgery (CAS) systems is becoming very
      common in the clinical practice, therefore the evaluation of such
      systems in terms of clinical outcomes and ergonomic features is more
      and more relevant. This paper goals has been to define some domain
      specific guidelines for the design of Human Computer Interfaces (HCI)
      for surgical application and to provide an evaluation protocol of
      existing CAS systems. The demonstration application has been a planning
      system developed for the Total Knee Replacement (TKR), a high skill
      demanding procedure, where the planning phase is crucial for the
      success of the intervention. The results we have obtained can be
      extended also to surgical training systems and surgical navigation
      platforms.},
      affiliation = {MEDEA — MEDical and Engineering Applications, Firenze, Italy},
      file = {Paggetti2001.pdf:Paggetti2001.pdf:PDF},
      owner = {Thomas},
      timestamp = {2011.03.09},
      url = {http://dx.doi.org/10.1007/3-540-45468-3_131}
    }
  • N. R. Pal and S. K. Pal, “A review on image segmentation techniques,” Pattern Recognition, vol. 26, iss. 9, pp. 1277-1294, 1993.
    [Bibtex]
    @ARTICLE{Pal1993,
      author = {Nikhil R Pal and Sankar K Pal},
      title = {A review on image segmentation techniques},
      journal = {Pattern Recognition},
      year = {1993},
      volume = {26},
      pages = {1277 - 1294},
      number = {9},
      abstract = {Many image segmentation techniques are available in the literature.
      Some of these techniques use only the gray level histogram, some
      use spatial details while others use fuzzy set theoretic approaches.
      Most of these techniques are not suitable for noisy environments.
      Some works have been done using the Markov Random Field (MRF) model
      which is robust to noise, but is computationally involved. Neural
      network architectures which help to get the output in real time because
      of their parallel processing ability, have also been used for segmentation
      and they work fine even when the noise level is very high. The literature
      on color image segmentation is not that rich as it is for gray tone
      images. This paper critically reviews and summarizes some of these
      techniques. Attempts have been made to cover both fuzzy and non-fuzzy
      techniques including color image segmentation and neural network
      based approaches. Adequate attention is paid to segmentation of range
      images and magnetic resonance images. It also addresses the issue
      of quantitative evaluation of segmentation results.},
      file = {Pal1993.pdf:Pal1993.pdf:PDF},
      issn = {0031-3203},
      keywords = {Image segmentation, IMP, REV},
      owner = {thomaskroes},
      timestamp = {2010.12.03}
    }
  • A. Pandya and G. Auner, “Simultaneous augmented and virtual reality for surgical navigation,” in Fuzzy Information Processing Society, 2005. NAFIPS 2005. Annual Meeting of the North American, 2005, pp. 429-435.
    [Bibtex]
    @INPROCEEDINGS{Pandya2005,
      author = {Pandya, A. and Auner, G.},
      title = {Simultaneous augmented and virtual reality for surgical navigation},
      booktitle = {Fuzzy Information Processing Society, 2005. NAFIPS 2005. Annual Meeting
      of the North American},
      year = {2005},
      pages = { 429 - 435},
      month = {June},
      abstract = {We use a passive articulated arm to track a calibrated end-effector
      mounted video camera. In real time, we can superimpose the live video
      view with the synchronized graphical view of CT-derived segmented
      object(s) of interest within a phantom skull (augmented reality (AR))
      and provide the trajectory of the end-effector (translated to the
      focal point) in orthogonal image data scans and 3D models (VR). Augmented
      reality generation is a natural extension for the surgeon because
      it does both the 2D to 3D transformation and projects the views directly
      onto the patient view. However, there are distinct advantages for
      also having a VR (image guided surgery) view of the tools trajectory.
      Both AR and VR visualization have advantages and disadvantages depending
      on the stage of the surgery and surgeons should have the option to
      select. In this paper, we provide the software design and the network
      communication details of a multi-user, on-demand, near real-time
      simultaneous AR/VR system for surgical guidance.},
      file = {:Pandya2005.pdf:PDF},
      keywords = {CT-derived segmented object; augmented reality; end-effector mounted
      video camera; image guided surgery view; live video view; orthogonal
      image data scan; passive articulated arm; software design; surgical
      guidance; surgical navigation; synchronized graphical view; tools
      trajectory; virtual reality; augmented reality; image segmentation;
      medical image processing; medical robotics; surgery; systems analysis;,
      TEC, AUR},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • D. L. T. Paolis, M. Pulimeno, and G. Aloisio, “Visualization System to Improve Surgical Performance during a Laparoscopic Procedure,” Surgery, pp. 300-303, 2010.
    [Bibtex]
    @ARTICLE{Paolis2010,
      author = {Paolis, L T De and Pulimeno, M and Aloisio, G},
      title = {Visualization System to Improve Surgical Performance during a Laparoscopic
      Procedure},
      journal = {Surgery},
      year = {2010},
      pages = {300-303},
      abstract = {Minimally invasive surgery offers advantages that make it the best
      choice for many diseases. Modern tech- nologies give a great support
      to this kind of surgical proce- dures through medical image processing
      and visualization, 3D organ’ s reconstruction and intra-operative
      surgicalguidance. In this paper is presented an advanced visualization
      system and the surgeon has the possibility to visualize both the
      tradi- tional patient information, as the CT image set, and a 3D
      model of the patient’ s anatomy built from this. Two different
      visualization modalities are available in real time and dynami- cally.
      According to the surgeon needs, it is possible to obtain the automatic
      reslicing of the orthogonal planes in order to have an accurate visualization
      of the 3D model and slices ex- actly next to the actual position
      of the surgical instrument tip. In addition, it is possible to activate
      the clipping modality that allows cutting the 3D model in correspondence
      of a chosen visualization plane. The system can be used as support
      for the diagnosis, for the surgical preoperative planning and also
      for an image-guided surgery.},
      file = {:H\:\\Thomas\\PHD\\Literature\\Articles\\Paolis2010.pdf:PDF},
      keywords = {image-guided surgery,medical images,visuali-},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • P. Paul, O. Fleig, and P. Jannin, “Augmented virtuality based on stereoscopic reconstruction in multimodal image-guided neurosurgery: Methods and performance evaluation,” Medical Imaging, IEEE Transactions on, vol. 24, iss. 11, pp. 1500-1511, 2005.
    [Bibtex]
    @ARTICLE{Paul2005,
      author = {Paul, P. and Fleig, O. and Jannin, P.},
      title = {Augmented virtuality based on stereoscopic reconstruction in multimodal
      image-guided neurosurgery: Methods and performance evaluation},
      journal = {Medical Imaging, IEEE Transactions on},
      year = {2005},
      volume = {24},
      pages = {1500 - 1511},
      number = {11},
      abstract = {Displaying anatomical and physiological information derived frompreoperative
      medical images in the operating room is critical in image-guided
      neurosurgery. This paper presents a new approach referred to as augmented
      virtuality (AV) for displaying intraoperative views of the operative
      field over three-dimensional (3-D) multimodal preoperative images
      onto an external screen during surgery. A calibrated stereovision
      system was set up between the surgical microscope and the binocular
      tubes. Three-dimensional surface meshes of the operative field were
      then generated using stereopsis. These reconstructed 3-D surface
      meshes were directly displayed without any additional geometrical
      transform over preoperative images of the patient in the physical
      space. Performance evaluation was achieved using a physical
      
      skull phantom. Accuracy of the reconstruction method itself was shown
      to be within 1 mm (median: 0.76 mm 0.27), whereas accuracy of the
      overall approach was shown to be within 3 mm (median: 2.29 mm 0.59),
      including the image-to-physical space registration error.
      
      We report the results of six surgical cases where AV was used in conjunction
      with augmented reality. AV not only enabled vision beyond the cortical
      surface but also gave an overview of the surgical area. This approach
      facilitated understanding of the spatial relationship between the
      operative field and the preoperative multimodal 3-D images of the
      patient.},
      file = {Paul2005.pdf:Paul2005.pdf:PDF},
      issn = {0278-0062},
      keywords = {APP, NES, PLA, GUI, AUR, NES},
      owner = {thomaskroes},
      publisher = {IEEE},
      timestamp = {2010.11.24}
    }
  • P. Peters, F. Langlotz, and L. -P. Nolte, “Computer assisted screw insertion into real 3D rapid prototyping pelvis models,” Clinical Biomechanics, vol. 17, iss. 5, pp. 376-382, 2002.
    [Bibtex]
    @ARTICLE{Peters2002,
      author = {P. Peters and F. Langlotz and L. -P. Nolte},
      title = {Computer assisted screw insertion into real 3D rapid prototyping
      pelvis models},
      journal = {Clinical Biomechanics},
      year = {2002},
      volume = {17},
      pages = {376 - 382},
      number = {5},
      file = {Peters2002.pdf:Peters2002.pdf:PDF},
      issn = {0268-0033},
      keywords = {Rapid prototyping, TEC, RPP},
      owner = {thomaskroes},
      timestamp = {2011.01.17}
    }
  • T. Peters, K. Finnis, T. Guo, and A. Parrent, “Neurosurgical Applications,” in Image-Guided Interventions, T. Peters and K. Cleary, Eds., Springer US, 2008, pp. 309-332.
    [Bibtex]
    @INCOLLECTION{Peters2008,
      author = {Peters, Terry and Finnis, Kirk and Guo, Ting and Parrent, Andrew},
      title = {Neurosurgical Applications},
      booktitle = {Image-Guided Interventions},
      publisher = {Springer US},
      year = {2008},
      editor = {Peters, Terry and Cleary, Kevin},
      pages = {309 - 332},
      note = {Chapter 11},
      abstract = {This chapter demonstrates a particular application of stereotactic
      neurosurgery, used in conjunction with deep brain atlases and an
      electrophysiological database, to guide the implantation of lesioning
      devices and stimulation electrodes to alleviate the symptoms of Parkinson’s
      disease and other diseases of the motor system. Central to this work
      is the nonrigid mapping of individual patients’ brains to a standard
      anatomical brain template. This operation not only maps the structure
      in the deep brain of individual patients to match the template, but
      also creates a warping matrix that allows the location of data collected
      from individual patients to be mapped to the database. This database
      may in turn be mapped to new patients to indicate the probable locations
      of stimuli and responses. This information can be employed to assist
      the surgeon in making an initial estimate of the electrode positioning,
      and reduce the exploration needed to finalize the target position
      in which to create a lesion or place a stimulator.},
      affiliation = {University of Western Ontario Robarts Research Institute 100 Perth
      Drive N6A 5K8 London ON Canada},
      file = {Peters2008.pdf:Peters2008.pdf:PDF},
      isbn = {978-0-387-73858-1},
      keyword = {Engineering},
      keywords = {REV, NES},
      owner = {Thomas},
      timestamp = {2011.02.24}
    }
  • T. M. Peters, “Image-guidance for surgical procedures.,” Physics in medicine and biology, vol. 51, iss. 14, p. R505-40, 2006.
    [Bibtex]
    @ARTICLE{Peters2006,
      author = {Peters, Terry M},
      title = {Image-guidance for surgical procedures.},
      journal = {Physics in medicine and biology},
      year = {2006},
      volume = {51},
      pages = {R505-40},
      number = {14},
      month = {July},
      abstract = {Contemporary imaging modalities can now provide the surgeon with high
      quality three- and four-dimensional images depicting not only normal
      anatomy and pathology, but also vascularity and function. A key component
      of image-guided surgery (IGS) is the ability to register multi-modal
      pre-operative images to each other and to the patient. The other
      important component of IGS is the ability to track instruments in
      real time during the procedure and to display them as part of a realistic
      model of the operative volume. Stereoscopic, virtual- and augmented-reality
      techniques have been implemented to enhance the visualization and
      guidance process. For the most part, IGS relies on the assumption
      that the pre-operatively acquired images used to guide the surgery
      accurately represent the morphology of the tissue during the procedure.
      This assumption may not necessarily be valid, and so intra-operative
      real-time imaging using interventional MRI, ultrasound, video and
      electrophysiological recordings are often employed to ameliorate
      this situation. Although IGS is now in extensive routine clinical
      use in neurosurgery and is gaining ground in other surgical disciplines,
      there remain many drawbacks that must be overcome before it can be
      employed in more general minimally-invasive procedures. This review
      overviews the roots of IGS in neurosurgery, provides examples of
      its use outside the brain, discusses the infrastructure required
      for successful implementation of IGS approaches and outlines the
      challenges that must be overcome for IGS to advance further.},
      file = {Peters2006.pdf:Peters2006.pdf:PDF},
      issn = {0031-9155},
      keywords = {Algorithms,Brain,Brain Neoplasms,Brain Neoplasms: radiography,Brain
      Neoplasms: surgery,Brain: radiography,Brain: surgery,Electrophysiology,Humans,Image
      Processing, Computer-Assisted,Magnetic Resonance Imaging,Neurosurgical
      Procedures,Radiosurgery,Radiosurgery: methods,Stereotaxic Techniques,Surgery,
      Computer-Assisted,Surgery, Computer-Assisted: methods, REV},
      owner = {thomaskroes},
      pmid = {16825730},
      timestamp = {2010.10.22}
    }
  • R. Petzold, H. -F. Zeilhofer, and W. a. Kalender, “Rapid prototyping technology in medicine—basics and applications,” Computerized Medical Imaging and Graphics, vol. 23, iss. 5, pp. 277-284, 1999.
    [Bibtex]
    @ARTICLE{Petzold1999,
      author = {Petzold, R. and Zeilhofer, H.-F. and Kalender, W.a.},
      title = {Rapid prototyping technology in medicine—basics and applications},
      journal = {Computerized Medical Imaging and Graphics},
      year = {1999},
      volume = {23},
      pages = {277-284},
      number = {5},
      month = {October},
      abstract = {Using medical models built with Rapid Prototyping (RP) technologies
      represents a new approach for surgical planning and simulation. These
      techniques allow one to reproduce anatomical objects as 3D physical
      models, which give the surgeon a realistic impression of complex
      structures before a surgical intervention. The shift from the visual
      to the visual-tactile representation of anatomical objects introduces
      a new kind of interaction called ‘touch to comprehend’. As can be
      seen, from the presented case studies of maxillo-cranio-facial surgery,
      the RP models are very well suited for use in the diagnosis and the
      precise preoperative simulation of skeleton modifying interventions.
      1999},
      file = {Petzold1999.pdf:Petzold1999.pdf:PDF},
      issn = {08956111},
      keywords = {computer-aided surgery,cranio-maxillo-facial surgery,medical models,rapid
      prototyping technology,stereolithography, REV, RPP},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • D. L. Pham, C. Xu, and J. L. Prince, “Current methods in image segmentation,” Biomedical Engineering, vol. 2, iss. 1, p. 315, 2000.
    [Bibtex]
    @ARTICLE{Pham2000,
      author = {Pham, D.L. and Xu, C. and Prince, J.L.},
      title = {Current methods in image segmentation},
      journal = {Biomedical Engineering},
      year = {2000},
      volume = {2},
      pages = {315},
      number = {1},
      keywords = {REV, IMP},
      owner = {thomaskroes},
      timestamp = {2010.11.24}
    }
  • P. J. Pickhardt, J. R. Choi, I. Hwang, J. A. Butler, M. L. Puckett, H. A. Hildebrandt, R. K. Wong, P. A. Nugent, P. A. Mysliwiec, and W. R. Schindler, “Computed tomographic virtual colonoscopy to screen for colorectal neoplasia in asymptomatic adults,” New England Journal of Medicine, vol. 349, iss. 23, p. 2191, 2003.
    [Bibtex]
    @ARTICLE{Pickhardt2003,
      author = {Pickhardt, P.J. and Choi, J.R. and Hwang, I. and Butler, J.A. and
      Puckett, M.L. and Hildebrandt, H.A. and Wong, R.K. and Nugent, P.A.
      and Mysliwiec, P.A. and Schindler, W.R.},
      title = {Computed tomographic virtual colonoscopy to screen for colorectal
      neoplasia in asymptomatic adults},
      journal = {New England Journal of Medicine},
      year = {2003},
      volume = {349},
      pages = {2191},
      number = {23},
      keywords = {TEC},
      owner = {Thomas},
      publisher = {Mass Med Soc},
      timestamp = {2011.02.03}
    }
  • S. M. Pizer, P. T. Fletcher, S. Joshi, A. Thall, J. Z. Chen, Y. Fridman, D. S. Fritsch, A. G. Gash, J. M. Glotzer, M. R. Jiroutek, and others, “Deformable m-reps for 3d medical image segmentation,” International Journal of Computer Vision, vol. 55, iss. 2, pp. 85-106, 2003.
    [Bibtex]
    @ARTICLE{Pizer2003,
      author = {Pizer, S.M. and Fletcher, P.T. and Joshi, S. and Thall, A. and Chen,
      J.Z. and Fridman, Y. and Fritsch, D.S. and Gash, A.G. and Glotzer,
      J.M. and Jiroutek, M.R. and others},
      title = {Deformable m-reps for 3d medical image segmentation},
      journal = {International Journal of Computer Vision},
      year = {2003},
      volume = {55},
      pages = {85 - 106},
      number = {2},
      file = {Pizer2003.pdf:Pizer2003.pdf:PDF},
      issn = {0920-5691},
      keywords = {TEC, IMP},
      owner = {thomaskroes},
      publisher = {Springer},
      timestamp = {2011.01.03}
    }
  • A. Polo, C. Salembier, J. Venselaar, and P. Hoskin, “Review of intraoperative imaging and planning techniques in permanent seed prostate brachytherapy.,” Radiotherapy and oncology : journal of the European Society for Therapeutic Radiology and Oncology, vol. 94, iss. 1, pp. 12-23, 2010.
    [Bibtex]
    @ARTICLE{Polo2010,
      author = {Polo, Alfredo and Salembier, Carl and Venselaar, Jack and Hoskin,
      Peter},
      title = {Review of intraoperative imaging and planning techniques in permanent
      seed prostate brachytherapy.},
      journal = {Radiotherapy and oncology : journal of the European Society for Therapeutic
      Radiology and Oncology},
      year = {2010},
      volume = {94},
      pages = {12-23},
      number = {1},
      month = {January},
      abstract = {Techniques for permanent low dose rate seed brachytherapy for prostate
      cancer have evolved in the recent years with increasing use of interactive
      planning in the operating room (OR) during seed placement. This overcomes
      one of the main sources of error in the original two-stage technique
      in which a planning study performed at a time distant from the implant
      is used to define seed positions and then an attempt to reproduce
      this at the time of implant is required. This review addresses the
      various ways in which real-time dosimetry may be used. Three basic
      approaches are described; intraoperative planning when a plan is
      produced as a separate stage prior to the implant during a single
      OR procedure, interactive planning which incorporates stepwise modification
      of the treatment plan based on feedback from real-time tracking of
      the actual needle positions and dynamic dose calculation in which
      there is a continuous updating of the dosimetry using continuous
      feedback of the seed positions as they are implanted. The impact
      of these changes on dosimetric and biochemical outcome endpoints
      is considered demonstrating the superior results which can be obtained
      by closer integration of the planning processes with actual implantation
      and seed deposition.},
      file = {Polo2010.pdf:Polo2010.pdf:PDF},
      issn = {1879-0887},
      keywords = {Brachytherapy,Humans,Intraoperative Period,Male,Prostatic Neoplasms,Prostatic
      Neoplasms: radiotherapy,Radiotherapy Dosage,Radiotherapy Planning,
      Computer-Assisted, REV},
      owner = {thomaskroes},
      pmid = {20074822},
      publisher = {Elsevier Ireland Ltd},
      timestamp = {2010.10.22}
    }
  • a Pommert, “Validation of medical volume visualization: a literature review,” International Congress Series, vol. 1256, pp. 571-576, 2003.
    [Bibtex]
    @ARTICLE{Pommert2003,
      author = {Pommert, a},
      title = {Validation of medical volume visualization: a literature review},
      journal = {International Congress Series},
      year = {2003},
      volume = {1256},
      pages = {571-576},
      month = {June},
      abstract = {For applications of volume visualization in medicine, it is important
      to assure that the 3-D images show the true anatomical situation,
      or at least to know about their limitations. In this paper, various
      methods for evaluation of image quality are reviewed. They are classified
      based on the fundamental terms of diagnostic and technical image
      quality, and discussed with respect to the question what clues they
      provide on how to choose parameters, or improve imaging and visualization
      procedures.},
      file = {Pommert2003.pdf:Pommert2003.pdf:PDF},
      issn = {05315131},
      keywords = {image quality,tomography,volume visualization, REV},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • A. Pommert and K. Höhne, “Evaluation of Image Quality in Medical Volume Visualization: The State of the Art,” in Medical Image Computing and Computer-Assisted Intervention — MICCAI 2002, T. Dohi and R. Kikinis, Eds., Springer Berlin / Heidelberg, 2002, vol. 2489, pp. 598-605.
    [Bibtex]
    @INCOLLECTION{Pommert2002,
      author = {Pommert, Andreas and Höhne, Karl},
      title = {Evaluation of Image Quality in Medical Volume Visualization: The
      State of the Art},
      booktitle = {Medical Image Computing and Computer-Assisted Intervention — MICCAI
      2002},
      publisher = {Springer Berlin / Heidelberg},
      year = {2002},
      editor = {Dohi, Takeyoshi and Kikinis, Ron},
      volume = {2489},
      series = {Lecture Notes in Computer Science},
      pages = {598-605},
      abstract = {For applications of volume visualization in medicine, it is important
      to assure that the 3D images show the true anatomical situation,
      or at least to know about their limitations. In this paper, various
      methods for evaluation of image quality are reviewed. They are classified
      based on the fundamental terms of intelligibility and fidelity, and
      discussed with respect to the question what clues they provide on
      how to choose parameters, or improve imaging and visualization procedures.},
      affiliation = {University Hospital Hamburg-Eppendorf Institute of Mathematics and
      Computer Science in Medicine (IMDM) 20251 Hamburg Germany},
      file = {Pommert2002.pdf:Pommert2002.pdf:PDF},
      keywords = {REV},
      owner = {thomaskroes},
      timestamp = {2010.12.17}
    }
  • M. Poona, G. Hamarnehb, and R. Abugharbieha, “Segmentation of complex objects with non-spherical topologies from volumetric medical images using 3D livewire,” , vol. 6512, pp. 651231-1, 2007.
    [Bibtex]
    @CONFERENCE{Poona2007,
      author = {Poona, M. and Hamarnehb, G. and Abugharbieha, R.},
      title = {Segmentation of complex objects with non-spherical topologies from
      volumetric medical images using 3D livewire},
      booktitle = {Proc. of SPIE Vol},
      year = {2007},
      volume = {6512},
      pages = {651231 - 1},
      organization = {Citeseer},
      abstract = {Segmentation of 3D data is one of the most challenging tasks in medical
      image analysis. While reliable automatic methods are typically preferred,
      their success is often hindered by poor image quality and significant
      variations in anatomy. Recent years have thus seen an increasing
      interest in the development of semi-automated segmentation methods
      that combine computational tools with intuitive, minimal user interaction.
      In an earlier work, we introduced a highly-automated technique for
      medical image segmentation, where a 3D extension of the traditional
      2D Livewire was proposed. In this paper, we present an enhanced and
      more powerful 3D Livewire-based segmentation approach with new features
      designed to primarily enable the handling of complex object topologies
      that are common in biological structures. The point ordering algorithm
      we proposed earlier, which automatically pairs up seedpoints in 3D,
      is improved in this work such that multiple sets of points are allowed
      to simultaneously exist. Point sets can now be utomatically merged
      and split to accommodate for the presence of concavities, protrusions,
      and non-spherical topologies. The robustness of the method is further
      improved by extending the ‘turtle algorithm’, presented earlier,
      by using a turtle-path pruning step. Tests on both synthetic and
      real medical images demonstrate the efficiency, reproducibility, accuracy,
      and robustness of the proposed approach. Among the examples illustrated
      is the segmentation of the left and right ventricles from a T1-weighted
      MRI scan, where an average task time reduction of 84.7% was achieved
      when compared to a user performing 2D Livewire segmentation on every
      slice.},
      file = {Poona2007.pdf:Poona2007.pdf:PDF},
      keywords = {TEC, IMP},
      owner = {thomaskroes},
      timestamp = {2010.12.21}
    }
  • I. Porro, A. Schenone, M. Fato, E. Raposio, E. Molinari, and F. Beltrame, “An integrated environment for plastic surgery support: building virtual patients, simulating interventions, and supporting intraoperative decisions.,” Computerized medical imaging and graphics : the official journal of the Computerized Medical Imaging Society, vol. 29, iss. 5, pp. 385-94, 2005.
    [Bibtex]
    @ARTICLE{Porro2005,
      author = {Porro, Ivan and Schenone, Andrea and Fato, Marco and Raposio, Edoardo
      and Molinari, Elisa and Beltrame, Francesco},
      title = {An integrated environment for plastic surgery support: building virtual
      patients, simulating interventions, and supporting intraoperative
      decisions.},
      journal = {Computerized medical imaging and graphics : the official journal
      of the Computerized Medical Imaging Society},
      year = {2005},
      volume = {29},
      pages = {385-94},
      number = {5},
      month = {July},
      abstract = {In the last decade a number of environments for Computer Supported
      Plastic Surgery have been presented. Nevertheless, an overall approach
      for training and intraoperative support is still missing or has not
      been widely exploited yet. We developed a fully integrated system
      which allows surgical simulation, planning, and support for computer-guided
      plastic surgery procedures starting from image acquisition to final
      intraoperative assistance. The system also provides the user with
      a radiological workstation able to analyse patient medical images
      and case studies, with advanced bidimensional and three dimensional
      image processing functionalities. We intend to demonstrate that such
      a platform can be built at an affordable cost. The radiological workstation
      is capable of supporting radiologists and surgeons in real patient
      case studies and the simulation workstation may be adopted by plastic
      surgeons in teaching and training of complex surgical planning. Moreover,
      results of simulation can be used in the operating room with a relatively
      high benefit in terms of improved accuracy, reduction of surgical
      risks, and decrease in training costs.},
      file = {Porro2005.pdf:Porro2005.pdf:PDF},
      issn = {0895-6111},
      keywords = {Decision Support Techniques,Humans,Patient Simulation,Radiology Information
      Systems,Radiology Information Systems: organization \& admi,Surgery,
      Computer-Assisted,Surgery, Computer-Assisted: organization \& adminis,Surgery,
      Plastic,Surgery, Plastic: education,User-Computer Interface, APP,
      CMS, GUI, PLA},
      owner = {thomaskroes},
      pmid = {15893913},
      timestamp = {2010.10.22}
    }
  • P. Potamianos, a a Amis, J. a Forester, M. McGurk, and M. Bircher, “Rapid prototyping for orthopaedic surgery.,” Proceedings of the Institution of Mechanical Engineers. Part H, Journal of engineering in medicine, vol. 212, iss. 5, pp. 383-93, 1998.
    [Bibtex]
    @ARTICLE{Potamianos1998,
      author = {Potamianos, P and Amis, a a and Forester, a J and McGurk, M and Bircher,
      M},
      title = {Rapid prototyping for orthopaedic surgery.},
      journal = {Proceedings of the Institution of Mechanical Engineers. Part H, Journal
      of engineering in medicine},
      year = {1998},
      volume = {212},
      pages = {383-93},
      number = {5},
      month = {January},
      abstract = {The revision of an orthopaedic procedure can present surgeons with
      the challenge of a complex reconstructive process. Orthopaedic surgery
      can also face considerable challenges in cases presenting extensive
      primary injuries with multiple bone fragmentation, as well as in
      cases presenting bone deformities. Radiographs are used routinely
      for orthopaedic surgical planning, yet they provide inadequate information
      on the precise three-dimensional extent of bone defects. Three-dimensional
      reconstructions from X-ray computed tomography offer superior visualization
      but are not portable for consultation or readily available in the
      operating theatre for guidance during a procedure. A physical model
      manufactured from X-ray computed tomography data can offer surgeons
      a clear understanding of complex anatomical detail, by providing
      an intuitive physical relationship between patient and model. Rapid
      prototyping was used for the construction of an anatomical model
      in a case presenting with a complex shoulder injury. The model provided
      a definitive interpretation of joint pathology and enabled a full
      assessment of the degree of injury.},
      file = {Potamianos1998.pdf:Potamianos1998.pdf:PDF},
      issn = {0954-4119},
      keywords = {Adult,Clavicle,Clavicle: injuries,Computer Simulation,Computer-Aided
      Design,Female,Fractures, Bone,Fractures, Bone: radiography,Fractures,
      Bone: surgery,Humans,Models, Anatomic,Orthopedic Procedures,Scapula,Scapula:
      injuries, TEC, RPP, OTS},
      owner = {thomaskroes},
      pmid = {9803157},
      timestamp = {2010.10.22}
    }
  • P. Pott, S. Heute, P. Weiser, A. Wagner, E. Badreddin, M. Schwarz, and E. Orthopaedics, “Computer Assistd Orthopaedic Surgery,” International Journal of Computer Assisted Radiology and Surgery, vol. 4, iss. S1, pp. 97-105, 2009.
    [Bibtex]
    @ARTICLE{Pott2009,
      author = {Pott, P and Heute, S and Weiser, P and Wagner, A and Badreddin, E
      and Schwarz, M and Orthopaedics, Experimental},
      title = {Computer Assistd Orthopaedic Surgery},
      journal = {International Journal of Computer Assisted Radiology and Surgery},
      year = {2009},
      volume = {4},
      pages = {97 - 105},
      number = {S1},
      month = {April},
      file = {Pott2009.pdf:Pott2009.pdf:PDF},
      issn = {1861-6410},
      keywords = {orthopaedic surgery a epizactor,workspace-to-volume ratio,hybrid kinematics},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • B. Preim and D. Bartz, Visualization in Medicine. Theory, Algorithms, and Applications. Series in Computer Graphics, Morgan Kaufmann, 2007.
    [Bibtex]
    @BOOK{Preim2007,
      title = {Visualization in Medicine. Theory, Algorithms, and Applications.
      Series in Computer Graphics},
      publisher = {Morgan Kaufmann},
      year = {2007},
      author = {Preim, B. and Bartz, D.},
      owner = {thomaskroes},
      timestamp = {2010.12.03}
    }
  • B. Preim and S. Oeltze, “3D Visualization of Vasculature : An Overview,” Vascular, 2008.
    [Bibtex]
    @ARTICLE{Preim2008,
      author = {Preim, Bernhard and Oeltze, Steffen},
      title = {3D Visualization of Vasculature : An Overview},
      journal = {Vascular},
      year = {2008},
      file = {Preim2008.pdf:Preim2008.pdf:PDF},
      keywords = {REV, TEC},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • B. Preim, D. Selle, W. Spindler, K. Oldhafer, and H. O. Peitgen, “Interaction techniques and vessel analysis for preoperative planning in liver surgery,” , 2000.
    [Bibtex]
    @CONFERENCE{tay,
      author = {Preim, B. and Selle, D. and Spindler, W. and Oldhafer, K. and Peitgen,
      H.O.},
      title = {Interaction techniques and vessel analysis for preoperative planning
      in liver surgery},
      booktitle = {Medical Image Computing and Computer-Assisted Intervention--MICCAI
      2000},
      year = {2000},
      organization = {Springer},
      abstract = {We present visualization and interaction techniques for preoperative
      planning in oncologic liver surgery. After several image processing
      steps a 3d visualization of all relevant anatomic and pathologic
      structures is created. In this 3d visualization a surgeon can flexibly
      specify resection regions with resection tools which can be applied
      selectively to different structures. The combination of several views
      which can be synchronized makes it easy to compare different views
      on the resection plan. In addition, we present the application of
      vessel analysis techniques in 
      
      order to make suggestions for optimal resections according to guidelines
      for liver surgery. The basic idea for these suggestions is to define
      the region which has to be removed in order to resect a lesion with
      a given tumor free margin. For this purpose, the vessels involved
      and the region supplied by them is estimated. It turned out that
      the resections suggested provide a reasonable and useful basis for
      preoperative planning. This contribution presents novel methods which
      have not been evaluated thoroughly yet.},
      file = {Preim2000.pdf:Preim2000.pdf:PDF},
      keywords = {TEC, HES},
      owner = {thomaskroes},
      timestamp = {2011.01.03}
    }
  • B. Preim, C. Tietjen, W. Spindler, and H. Peitgen, “Integration of Measurement Tools in Medical 3d Visualizations,” Work, pp. 21-28, 2002.
    [Bibtex]
    @ARTICLE{Preim2002,
      author = {Preim, Bernhard and Tietjen, Christian and Spindler, Wolf and Peitgen,
      Heinz-otto},
      title = {Integration of Measurement Tools in Medical 3d Visualizations},
      journal = {Work},
      year = {2002},
      pages = {21 - 28},
      abstract = {We discuss 3d interaction techniques for the quantitative analysis
      of spatial relations in medical visualizations. We describe the design
      and implementation of measurement tools to measure distances, angles
      and volumes in 3d visualizations. The visualization of measurement
      tools as recognizable 3d objects and a 3d interaction, which is both
      intuitive and precise, determines the usability of such facilities.
      Measurements may be carried out in 2d visualizations of the original
      radiological data and in 3d visualizations. The result of a measurement
      carried out in one view is also displayed in the other view appropriately.
      We discuss the validation of the obtained measures. Finally, we describe
      how some important measurement tasks may be solved automatically.},
      file = {Preim2002.pdf:Preim2002.pdf:PDF},
      keywords = {TEC},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • A. Qiao and Y. Liu, “Medical application oriented blood flow simulation,” Clinical Biomechanics, vol. 23, iss. Supplement 1, p. S130 – S136, 2008.
    [Bibtex]
    @ARTICLE{Qiao2008,
      author = {Aike Qiao and Youjun Liu},
      title = {Medical application oriented blood flow simulation},
      journal = {Clinical Biomechanics},
      year = {2008},
      volume = {23},
      pages = {S130 - S136},
      number = {Supplement 1},
      note = {Research and Development on Biomechanics in China},
      abstract = {In order to show the application of computational fluid dynamics in
      biomedical engineering, some numerical simulations of blood flow
      in arteries, such as hemodynamics of bypass graft for stenosed arteries,
      hemodynamics of stented aneurysm at the aortic arch, hemodynamics
      of bypass treatment for DeBakey III aortic dissection, and influence
      of blood flow on the thermal characteristics of microwave ablation,
      which were performed by the authors, were reviewed. These simulations
      can be a powerful tool for the computer assisted surgery in medical
      application.},
      file = {Qiao2008.pdf:Qiao2008.pdf:PDF},
      issn = {0268-0033},
      keywords = {Computer assisted surgery},
      owner = {Thomas},
      timestamp = {2011.02.28}
    }
  • J. Qin, W. Pang, Y. Chui, T. Wong, and P. Heng, “A Novel Modeling Framework for Multilayered Soft Tissue Deformation in Virtual Orthopedic Surgery,” Journal of Medical Systems, vol. 34, pp. 261-271, 2010.
    [Bibtex]
    @ARTICLE{Qin2010a,
      author = {Qin, Jing and Pang, Wai-Man and Chui, Yim-Pan and Wong, Tien-Tsin
      and Heng, Pheng-Ann},
      title = {A Novel Modeling Framework for Multilayered Soft Tissue Deformation
      in Virtual Orthopedic Surgery},
      journal = {Journal of Medical Systems},
      year = {2010},
      volume = {34},
      pages = {261-271},
      abstract = {Realistic modeling of soft tissue deformation is crucial to virtual
      orthopedic surgery, especially orthopedic trauma surgery which involves
      layered heterogeneous soft tissues. In this paper, a novel modeling
      framework for multilayered soft tissue deformation is proposed in
      order to facilitate the development of orthopedic surgery simulators.
      We construct our deformable model according to the layered structure
      of real human organs, and this results in a multilayered model. The
      division of layers is based on the segmented Chinese Visible Human
      (CVH) dataset. This enhances the realism and accuracy in the simulation.
      For the sake of efficiency, we employ 3D mass-spring system to our
      multilayered model. The nonlinear passive biomechanical properties
      of skin and skeletal muscle are achieved by introducing a bilinear
      elasticity scheme to the springs in the mass-spring system. To efficiently
      and accurately reproduce the biomechanical properties of certain
      human tissues, an optimization approach is employed in configuring
      the parameters of the springs. Experimental data from biomechanics
      literatures are used as benchmarking references. With the employment
      of Physics Processing Unit (PPU) and high quality volume visualization,
      our framework is developed into an interactive and intuitive platform
      for virtual surgery training systems. Several experiments demonstrate
      the feasibility of the proposed framework in providing interactive
      and realistic deformation for orthopedic surgery simulation.},
      affiliation = {The Chinese University of Hong Kong Department of Computer Science
      and Engineering Shatin N. T. Hong Kong},
      file = {Qin2010a.pdf:Qin2010a.pdf:PDF},
      issn = {0148-5598},
      issue = {3},
      keyword = {Medicine},
      keywords = {TEC},
      owner = {Thomas},
      publisher = {Springer Netherlands},
      timestamp = {2011.02.14}
    }
  • J. Qin, W. Pang, B. P. Nguyen, D. Ni, and C. Chui, “Particle-based simulation of blood flow and vessel wall interactions in virtual surgery,” in Proceedings of the 2010 Symposium on Information and Communication Technology, New York, NY, USA, 2010, pp. 128-133.
    [Bibtex]
    @INPROCEEDINGS{Qin2010b,
      author = {Qin, Jing and Pang, Wai-Man and Nguyen, Binh P. and Ni, Dong and
      Chui, Chee-Kongin},
      title = {Particle-based simulation of blood flow and vessel wall interactions
      in virtual surgery},
      booktitle = {Proceedings of the 2010 Symposium on Information and Communication
      Technology},
      year = {2010},
      series = {SoICT '10},
      pages = {128--133},
      address = {New York, NY, USA},
      publisher = {ACM},
      acmid = {1852636},
      file = {Qin2010b.pdf:Qin2010b.pdf:PDF},
      isbn = {978-1-4503-0105-3},
      keywords = {blood flow and vessel wall interactions, smoothed particle hydrodynamics,
      virtual surgery, TEC},
      location = {Hanoi, Viet nam},
      numpages = {6},
      owner = {Thomas},
      timestamp = {2011.02.23}
    }
  • Z. Qingsong, K. C. Keong, and N. W. Sing, “Interactive surgical planning using context based volume visualization techniques,” in Medical Imaging and Augmented Reality, 2001. Proceedings. International Workshop on, 2001, pp. 21-25.
    [Bibtex]
    @INPROCEEDINGS{Zou2001,
      author = {Zou Qingsong and Kwoh Chee Keong and Ng Wan Sing},
      title = {Interactive surgical planning using context based volume visualization
      techniques},
      booktitle = {Medical Imaging and Augmented Reality, 2001. Proceedings. International
      Workshop on},
      year = {2001},
      pages = {21 - 25},
      abstract = {We present a new volume visualization scheme, context based volume
      visualization to assist the surgeon in surgical planning. This visualization
      scheme differs from ordinary surface based rendering and volume rendering
      by providing a framework to combine surface based rendering and volume
      rendering. We can achieve the powerful manipulating capability of
      surface based rendering and as good a rendering effect as volume
      rendering at the same time. Using a special data structured-segment
      tree to manage the visualization scene, which includes all the volume
      objects and graphics objects (the surgical tools) needed to be visualized,
      this visualization scheme provides a common context based interface
      for both graphics objects and volume objects, through which, we can
      control graphics and volume objects easily in the same way. The context
      based visualization scheme can greatly increase the performance of
      volume visualization by generating the scene much faster through
      selectively revisualizing the affected objects. Based on these ideas,
      we implement an interactive surgical planning system, Virtual Doctor
      based on OpenGL 1.1 on WinNT platform and VolumePro vg500 card. This
      system is a good 3D volume visualization tool and augmented reality
      system for interactive surgical planning and further research in
      this area},
      file = {Zou2001.pdf:Zou2001.pdf:PDF},
      keywords = {OpenGL;Virtual Doctor;VolumePro vg500 card;WinNT platform;augmented
      reality;context based volume visualization;interactive surgical planning
      system;medical image processing;surface based rendering;surgeon;tree
      data structure;volume rendering;augmented reality;data visualisation;interactive
      systems;medical image processing;rendering (computer graphics);surgery;tree
      data structures;, TEC},
      owner = {thomaskroes},
      timestamp = {2011.01.25}
    }
  • M. Raaijmaakers, F. Gelaude, K. De Smedt, T. Clijmans, J. Dille, and M. Mulier, “A custom-made guide-wire positioning device for Hip Surface Replacement Arthroplasty: description and first results,” BMC Musculoskeletal Disorders, vol. 11, iss. 1, p. 161, 2010.
    [Bibtex]
    @ARTICLE{Raaijmaakers2010,
      author = {Raaijmaakers, M. and Gelaude, F. and De Smedt, K. and Clijmans, T.
      and Dille, J. and Mulier, M.},
      title = {A custom-made guide-wire positioning device for Hip Surface Replacement
      Arthroplasty: description and first results},
      journal = {BMC Musculoskeletal Disorders},
      year = {2010},
      volume = {11},
      pages = {161},
      number = {1},
      file = {Raaijmaakers2010.pdf:Raaijmaakers2010.pdf:PDF},
      issn = {1471-2474},
      keywords = {TRM},
      owner = {Th},
      publisher = {BioMed Central Ltd},
      timestamp = {2011.02.25}
    }
  • K. Radermacher, C. V. Pichler, S. Fischer, and G. Rau, “3D-Visualisation in Surgery,” Most, pp. 1-6, 1998.
    [Bibtex]
    @ARTICLE{Radermacher1998a,
      author = {Radermacher, K and Pichler, C V and Fischer, S and Rau, G},
      title = {3D-Visualisation in Surgery},
      journal = {Most},
      year = {1998},
      pages = {1 - 6},
      abstract = {The clinical introduction of new technologies in surgical therapy
      has changed the traditional intraoperative procedures especially
      in terms of visual information available for the surgical team. The
      direct view on the operating site is more and more replaced by indirect
      visual information on the basis of optical systems and displays.
      Especially in endoscopic minimal access surgery the surgeon is decoupled
      from the operating site and a high quality and reliability of realistic
      visual spatial information is crucial. The use of stereoscopic systems
      could potentially provide an improved visual feedback for spatial
      manipulations, but the real impact of 3D-visualisation systems strongly
      depends on its implementation and boundary conditions within clinical
      applications. This paper discusses some aspects and potential bottlenecks
      of 2D and 3D visualization systems on the basis of experiences from
      laboratory investigations and clinical field studies in the area
      of laparoscopic surgery.},
      file = {Radermacher1998a.pdf:Radermacher1998a.pdf:PDF},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • K. Radermacher, F. Portheine, M. Anton, A. Zimolong, G. Kaspers, G. Rau, and H. W. Staudte, “Computer assisted orthopaedic surgery with image based individual templates,” Clinical orthopaedics and related research, vol. 354, p. 28, 1998.
    [Bibtex]
    @ARTICLE{Radermacher1998b,
      author = {Radermacher, K. and Portheine, F. and Anton, M. and Zimolong, A.
      and Kaspers, G. and Rau, G. and Staudte, H.W.},
      title = {Computer assisted orthopaedic surgery with image based individual
      templates},
      journal = {Clinical orthopaedics and related research},
      year = {1998},
      volume = {354},
      pages = {28},
      keywords = {TRM},
      owner = {Thomas},
      timestamp = {2011.02.03}
    }
  • K. Radermacher, H. Staudte, and G. Rau, “Computer assisted matching of planning and execution orthopedic surgery,” , vol. 15, pp. 946-949, 1993.
    [Bibtex]
    @CONFERENCE{Radermacher1993,
      author = {Radermacher, K. and Staudte, HW and Rau, G.},
      title = {Computer assisted matching of planning and execution orthopedic surgery},
      booktitle = {PROC ANNU CONF ENG MED BIOL, IEEE, PISCATAWAY, NJ,(USA), 1993,},
      year = {1993},
      volume = {15},
      pages = {946 - 949},
      owner = {thomaskroes},
      timestamp = {2010.10.26}
    }
  • a Radetzky, “Visualization and simulation techniques for surgical simulators using actual patient’s data,” Artificial Intelligence in Medicine, vol. 26, iss. 3, pp. 255-279, 2002.
    [Bibtex]
    @ARTICLE{Radetzky2002,
      author = {Radetzky, a},
      title = {Visualization and simulation techniques for surgical simulators using
      actual patient's data},
      journal = {Artificial Intelligence in Medicine},
      year = {2002},
      volume = {26},
      pages = {255 - 279},
      number = {3},
      month = {November},
      abstract = {Because of the increasing complexity of surgical interventions research
      in surgical simulation became more and more important over the last
      years. However, the simulation of tissue deformation is still a challenging
      problem, mainly due to the short response times that are required
      for real-time interaction. The demands to hard and software are even
      larger if not only the modeled human anatomy is used but the anatomy
      of actual patients. This is required if the surgical simulator should
      be used as training medium for expert surgeons rather than students.
      In this article, suitable visualization and simulation methods for
      surgical simulation utilizing actual patient’s datasets are described.
      Therefore, the advantages and disadvantages of direct and indirect
      volume rendering for the visualization are discussed and a neuro-fuzzy
      system is described, which can be used for the simulation of interactive
      tissue deformations. The neuro-fuzzy system makes it possible to
      define the deformation behavior based on a linguistic description
      of the tissue characteristics or to learn the dynamics by using measured
      data of real tissue. Furthermore, a simulator for minimally-invasive
      neurosurgical interventions is presented that utilizes the described
      visualization and simulation methods. The structure of the simulator
      is described in detail and the results of a system evaluation by
      an experienced neurosurgeon—a quantitative comparison between different
      methods of virtual endoscopy as well as a comparison between real
      brain images and virtual endoscopies—are given. The evaluation
      proved that the simulator provides a higher realism of the visualization
      and simulation then other currently available simulators.},
      file = {Radetzky2002.pdf:Radetzky2002.pdf:PDF},
      issn = {09333657},
      keywords = {actual patient,deformation,neuro-fuzzy systems,s,surgiality,surgical
      simulation,visualization, REV, PRS},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • K. T. Rajamani, S. C. Joshi, M. A. Styner, and M. Construction, “Bone model morphing for enhanced surgical visualization,” Construction, pp. 1255-1258, 2004.
    [Bibtex]
    @ARTICLE{Rajamani2004,
      author = {Rajamani, Kumar T and Joshi, Sarang C and Styner, Martin A and Construction,
      Model},
      title = {Bone model morphing for enhanced surgical visualization},
      journal = {Construction},
      year = {2004},
      pages = {1255 - 1258},
      abstract = {We propose a novel method for reconstructing a complete 3D model of
      a given anatomy from minimal information. This reconstruction provides
      an appropriate intra-operative 3D visualization without the need
      for a pre or intra-operative imaging. Our method £ts a statistical
      deformable model to sparse 3D data consisting of digitized landmarks
      and bone surface points. The method also allows the incorporation
      of non-spatial data such as patient height and weight. The statistical
      model is constructed using Principal Component Analysis (PCA) from
      a set of training objects. Our morph- ing method then computes a
      Mahalanobis distance weighted least square £t of the model by solving
      a linear equation sys- tem. First experimental promising results
      with model gen- erated from 14 femoral head are presented.},
      file = {Rajamani2004.pdf:Rajamani2004.pdf:PDF},
      keywords = {TEC},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • E. Raposio, F. Norat, A. Orefice, C. Capello, A. Margara, M. Faggioni, N. Vercellino, and P. L. Santi, “Computer-enhanced preoperative planning of soft tissue sarcoma surgery,” in Molecular, Cellular and Tissue Engineering, 2002. Proceedings of the IEEE-EMBS Special Topic Conference on, 2002, pp. 196-197.
    [Bibtex]
    @INPROCEEDINGS{Raposio2002,
      author = {Raposio, E. and Norat, F. and Orefice, A. and Capello, C. and Margara,
      A. and Faggioni, M. and Vercellino, N. and Santi, P.L.},
      title = {Computer-enhanced preoperative planning of soft tissue sarcoma surgery},
      booktitle = {Molecular, Cellular and Tissue Engineering, 2002. Proceedings of
      the IEEE-EMBS Special Topic Conference on},
      year = {2002},
      pages = { 196 - 197},
      abstract = {When dealing with soft-tissue sarcomas of the extremities and aiming
      to perform limb-sparing surgery, it is mandatory to define the extent
      of the tumor and the connections with the surrounding tissues and
      anatomic structures. We started with MR images of 256 times;256 pixels
      (each pixel represented by 12 bits), at the same slice location in
      a given patient. A new single image representation of all three images
      is then generated by color compositing these into a 24-bit RGB framebuffer
      on a HP 900/735 workstation. PD was mapped to green, T1 to blue and
      T2 to red. This environment allows the creation of a spatial manipulation
      system in a 3D object space as well as the interactive navigation
      of the operator through the same space. The surgeon can thus easily
      discriminate tissues by their color characteristics. The color statistics
      of the image can be used to interactively segment out different tissues
      for removal and/or closer inspection. All this, in our experience,
      has greatly facilitated the simulation of soft-tissue sarcoma resections
      In our opinion, this computer-based technique, aiming to enhance
      the preoperative-planning's accuracy in the surgical management of
      soft-tissue sarcoma of the extremities, aids in performing a radical
      conservative eradication of the malignancy.},
      file = {:Raposio2002.pdf:PDF},
      issn = { },
      keywords = {HP 900/735 workstation; color characteristics; color composited images;
      computer-based technique; computer-enhanced preoperative planning;
      extremities; image color statistics; interactive navigation; magnetic
      resonance imaging; medical diagnostic imaging; radical conservative
      malignancy eradication; single image representation; soft-tissue
      sarcoma resections simulation; spatial manipulation system; surgical
      management; tumor extent definition; biological tissues; biomedical
      MRI; cancer; image enhancement; medical image processing; surgery;,
      TEC},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • B. Reitinger, A. Bornik, R. Beichel, and D. Schmalstieg, “Liver Surgery Planning Using Virtual Reality,” Liver, iss. December, pp. 36-47, 2006.
    [Bibtex]
    @ARTICLE{Reitinger2006,
      author = {Reitinger, Bernhard and Bornik, Alexander and Beichel, Reinhard and
      Schmalstieg, Dieter},
      title = {Liver Surgery Planning Using Virtual Reality},
      journal = {Liver},
      year = {2006},
      pages = {36 - 47},
      number = {December},
      file = {Reitinger2006.pdf:Reitinger2006.pdf:PDF},
      keywords = {APP, PLA, HES, SUR, AUR, STV},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • B. Reitinger, P. Werlberger, A. Bornik, R. Beichel, and D. Schmalstieg, “Spatial Measurements for Medical Augmented Reality,” in Proceedings of the 4th IEEE/ACM International Symposium on Mixed and Augmented Reality, Washington, DC, USA, 2005, pp. 208-209.
    [Bibtex]
    @INPROCEEDINGS{Reitinger2005,
      author = {Reitinger, Bernhard and Werlberger, Pascal and Bornik, Alexander
      and Beichel, Reinhard and Schmalstieg, Dieter},
      title = {Spatial Measurements for Medical Augmented Reality},
      booktitle = {Proceedings of the 4th IEEE/ACM International Symposium on Mixed
      and Augmented Reality},
      year = {2005},
      series = {ISMAR '05},
      pages = {208 - 209},
      address = {Washington, DC, USA},
      publisher = {IEEE Computer Society},
      acmid = {1105215},
      file = {Reitinger2005.pdf:Reitinger2005.pdf:PDF},
      isbn = {0-7695-2459-1},
      keywords = {TEC},
      numpages = {2},
      owner = {Thomas},
      timestamp = {2011.02.01}
    }
  • F. Rengier, a Mehndiratta, H. von Tengg-Kobligk, C. M. Zechmann, R. Unterhinninghofen, H-U. Kauczor, and F. L. Giesel, “3D printing based on imaging data: review of medical applications.,” International journal of computer assisted radiology and surgery, vol. 5, iss. 4, pp. 335-41, 2010.
    [Bibtex]
    @ARTICLE{Rengier2010,
      author = {Rengier, F and Mehndiratta, a and von Tengg-Kobligk, H and Zechmann,
      C M and Unterhinninghofen, R and Kauczor, H-U and Giesel, F L},
      title = {3D printing based on imaging data: review of medical applications.},
      journal = {International journal of computer assisted radiology and surgery},
      year = {2010},
      volume = {5},
      pages = {335-41},
      number = {4},
      month = {July},
      abstract = {PURPOSE: Generation of graspable three-dimensional objects applied
      for surgical planning, prosthetics and related applications using
      3D printing or rapid prototyping is summarized and evaluated. MATERIALS
      AND METHODS: Graspable 3D objects overcome the limitations of 3D
      visualizations which can only be displayed on flat screens. 3D objects
      can be produced based on CT or MRI volumetric medical images. Using
      dedicated post-processing algorithms, a spatial model can be extracted
      from image data sets and exported to machine-readable data. That
      spatial model data is utilized by special printers for generating
      the final rapid prototype model. RESULTS: Patient-clinician interaction,
      surgical training, medical research and education may require graspable
      3D objects. The limitations of rapid prototyping include cost and
      complexity, as well as the need for specialized equipment and consumables
      such as photoresist resins. CONCLUSIONS: Medical application of rapid
      prototyping is feasible for specialized surgical planning and prosthetics
      applications and has significant potential for development of new
      medical applications.},
      file = {Rengier2010.pdf:Rengier2010.pdf:PDF},
      issn = {1861-6429},
      keywords = {and implants,computer-assisted image,medical education,patient care,prostheses,rapid
      prototyping, REV, RPP},
      owner = {thomaskroes},
      pmid = {20467825},
      timestamp = {2010.10.22}
    }
  • J. Rexilius, S. Warfield, C. Guttmann, X. Wei, R. Benson, L. Wolfson, M. Shenton, H. Handels, and R. Kikinis, “A Novel Nonrigid Registration Algorithm and Applications,” in Medical Image Computing and Computer-Assisted Intervention – MICCAI 2001, W. Niessen and M. Viergever, Eds., Springer Berlin / Heidelberg, 2001, vol. 2208, pp. 923-931.
    [Bibtex]
    @INCOLLECTION{Rexilius2001,
      author = {Rexilius, J. and Warfield, S. and Guttmann, C. and Wei, X. and Benson,
      R. and Wolfson, L. and Shenton, M. and Handels, H. and Kikinis, R.},
      title = {A Novel Nonrigid Registration Algorithm and Applications},
      booktitle = {Medical Image Computing and Computer-Assisted Intervention – MICCAI
      2001},
      publisher = {Springer Berlin / Heidelberg},
      year = {2001},
      editor = {Niessen, Wiro and Viergever, Max},
      volume = {2208},
      series = {Lecture Notes in Computer Science},
      pages = {923 - 931},
      abstract = {In this paper we describe a new algorithm for nonrigid registration
      of brain images based on an elastically deformable model. The use
      of registration methods has become an important tool for computer-assisted
      diagnosis and surgery. Our goal was to improve analysis in various
      applications of neurology and neurosurgery by improving nonrigid
      registration. A local gray level similarity measure is used to make
      an initial sparse displacement field estimate. The field is initially
      estimated at locations determined by local features, and then a linear
      elastic model is used to infer the volumetric deformation across
      the image. The associated partial differential equation is solved
      by a finite element approach. A model of empirically observed variability
      of the brain was created from a dataset of 154 young adults. Both
      homogeneous and inhomogeneous elasticity models were compared. The
      algorithm has been applied to medical applications including intraoperative
      images of neurosurgery showing brain shift and a study of gait and
      balance disorder.},
      affiliation = {Surgical Planning Laboratory, Harvard Medical School &amp; Brigham
      and Women’s Hospital, 75 Francis St., Boston, MA 02115, USA},
      file = {Rexilius2001.pdf:Rexilius2001.pdf:PDF},
      keywords = {TEC},
      owner = {thomaskroes},
      timestamp = {2011.01.11}
    }
  • M. L. Rhodes, “Computer graphics and medicine: a complex partnership,” Computer Graphics and Applications, IEEE, vol. 17, iss. 1, pp. 22-28, 1997.
    [Bibtex]
    @ARTICLE{Rhodes1997,
      author = {Rhodes, M.L.},
      title = {Computer graphics and medicine: a complex partnership},
      journal = {Computer Graphics and Applications, IEEE},
      year = {1997},
      volume = {17},
      pages = {22 - 28},
      number = {1},
      abstract = {Developers of graphics applications in medicine face unique challenges
      in working closely with physicians. They must understand both the
      technology they support and the limitations of the techniques they
      use. In this article, I describe the unique challenges facing application
      developers and review the significant techniques in terms of their
      strengths and weaknesses in particular applications. In some cases,
      I also describe lessons learned about misapplying techniques. Hopefully
      these insights will help to redirect application ldquo;misses rdquo;
      and focus effort where it is most needed},
      file = {Rhodes1997.pdf:Rhodes1997.pdf:PDF},
      issn = {0272-1716},
      keywords = {application development;computer graphics;graphics applications;medicine;physicians;technique
      misapplication;computer graphics;medical computing;},
      owner = {thomaskroes},
      timestamp = {2011.01.26}
    }
  • M. L. Rhodes, “Computer graphics in medicine: the past decade,” Computer Graphics and Applications, IEEE, vol. 11, iss. 1, pp. 52-54, 1991.
    [Bibtex]
    @ARTICLE{Rhodes1991,
      author = {Rhodes, M.L.},
      title = {Computer graphics in medicine: the past decade},
      journal = {Computer Graphics and Applications, IEEE},
      year = {1991},
      volume = {11},
      pages = {52 - 54},
      number = {1},
      month = jan,
      abstract = {Clinical applications of computer graphics over the past decade are
      reviewed. Early distrust of the images provided by computer graphics
      is discussed. Differences between medical imagery and that for scientific
      visualization are examined},
      file = {Rhodes1991.pdf:Rhodes1991.pdf:PDF},
      issn = {0272-1716},
      keywords = {clinical applications;computer graphics;medical imagery;medicine;scientific
      visualization;computer graphics;medical computing;},
      owner = {thomaskroes},
      timestamp = {2011.01.26}
    }
  • M. Richter, Minimally Invasive Surgery in Orthopedics, Springer Science + Business, 2010.
    [Bibtex]
    @BOOK{Richter2010,
      title = {Minimally Invasive Surgery in Orthopedics},
      publisher = {Springer Science + Business},
      year = {2010},
      author = {M. Richter},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • C. Rieder, F. Ritter, M. Raspe, and H. -O. Peitgen, “Interactive Visualization of Multimodal Volume Data for Neurosurgical Tumor Treatment,” Computer Graphics Forum (Special Issue on Eurographics Symposium on Visualization), vol. 27, iss. 3, pp. 1055-1062, 2008.
    [Bibtex]
    @ARTICLE{Rieder2008,
      author = {C. Rieder and F. Ritter and M. Raspe and H.-O. Peitgen},
      title = {Interactive Visualization of Multimodal Volume Data for Neurosurgical
      Tumor Treatment},
      journal = {Computer Graphics Forum (Special Issue on Eurographics Symposium
      on Visualization)},
      year = {2008},
      volume = {27},
      pages = {1055-1062},
      number = {3},
      date-added = {2008-10-08 13:37:25 +0200},
      date-modified = {2008-10-08 13:38:43 +0200},
      file = {Rieder2008.pdf:Rieder2008.pdf:PDF},
      keywords = {TEC},
      owner = {cpbotha},
      timestamp = {2011.04.01}
    }
  • [DOI] C. Rieder, A. Weihusen, C. Schumann, S. Zidowitz, and H. Peitgen, “Visual Support for Interactive Post-Interventional Assessment of Radiofrequency Ablation Therapy,” Computer Graphics Forum, vol. 29, iss. 3, pp. 1093-1102, 2010.
    [Bibtex]
    @ARTICLE{Rieder2010,
      author = {Rieder, Christian and Weihusen, Andreas and Schumann, Christian and
      Zidowitz, Stephan and Peitgen, Heinz-Otto},
      title = {Visual Support for Interactive Post-Interventional Assessment of
      Radiofrequency Ablation Therapy},
      journal = {Computer Graphics Forum},
      year = {2010},
      volume = {29},
      pages = {1093 - 1102},
      number = {3},
      abstract = {Abstract Percutaneous radiofrequency (RF) ablation is a minimally
      invasive, image-guided therapy for the treatment of liver tumors.
      The assessment of the ablation area (coagulation) is performed to
      verify the treatment success as an essential part of the therapy.
      Traditionally, pre- and post-interventional CT images are used to
      visually compare the shape, size, and position of tumor and coagulation.In
      this work, we present a novel visualization as well as a navigation
      tool, the so-called tumor map. The tumor map is a pseudo-cylindrical
      mapping of the tumor surface onto a 2D image. It is used for a combined
      visualization of all ablation zones of the tumor to allow a reliable
      therapy assessment. Additionally, the tumor map serves as an interactive
      tool for intuitive navigation within the 3D volume rendering of the
      tumor vicinity as well as with familiar 2D viewers.},
      doi = {10.1111/j.1467-8659.2009.01665.x},
      file = {Rieder2010.pdf:Rieder2010.pdf:PDF},
      issn = {1467-8659},
      keywords = {1.3.6 [Computer Graphics]: Methodology and Techniques—Interaction
      techniques, 1.3.8 [Computer Graphics]: Applications—, J.3 [Life
      And Medical Sciences]: Health—, TEC},
      publisher = {Blackwell Publishing Ltd},
      url = {http://dx.doi.org/10.1111/j.1467-8659.2009.01665.x}
    }
  • F. Ritter, B. Berendt, B. Fischer, R. Richter, and B. Preim, “Virtual 3D Jigsaw Puzzles : Studying the Effect of Exploring Spatial Relations with Implicit Guidance,” , 2000.
    [Bibtex]
    @ARTICLE{Ritter2000,
      author = {Ritter, Felix and Berendt, Bettina and Fischer, Berit and Richter,
      Robert and Preim, Bernhard},
      title = {Virtual 3D Jigsaw Puzzles : Studying the Effect of Exploring Spatial
      Relations with Implicit Guidance},
      year = {2000},
      abstract = {This paper investigates the engaging concept of virtual 3D jigsaw
      puzzles to foster the understanding of spa- tial relations within
      technical or biological systems by means of virtual models. Employing
      an application in anatomy education, it answers the question: How
      does guided spatial exploration, arising while composing a 3D jigsaw,
      affect the acquisition of spatial-functional understanding in virtual
      learning environments (VLE)? In this study, 16 physiotherapy students
      were interviewed before and immediately after using either a virtual
      3D jigsaw puzzle enabled VLE or a simplified version without the
      interaction specific to the 3D jigsaw con- cept. Results indicate
      that students using the jigsaw-enabled VLE achieved a significant
      better understanding of the spatial and functional correlations illustrated
      by the model. These findings suggest that the concept of a 3D jigsaw
      puzzle, with its implicit guidance, facilitates and advances learner’s
      understanding of spatial corre- lations and related functionality.},
      file = {Ritter2000.pdf:Ritter2000.pdf:PDF},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • F. Ritter, C. Hansen, V. Dicken, O. Konrad, B. Preim, and H. O. Peitgen, “Real-time illustration of vascular structures,” IEEE transactions on visualization and computer graphics, pp. 877-884, 2006.
    [Bibtex]
    @ARTICLE{Ritter2006,
      author = {Ritter, F. and Hansen, C. and Dicken, V. and Konrad, O. and Preim,
      B. and Peitgen, H.O.},
      title = {Real-time illustration of vascular structures},
      journal = {IEEE transactions on visualization and computer graphics},
      year = {2006},
      pages = {877 - 884},
      file = {Ritter2006.pdf:Ritter2006.pdf:PDF},
      issn = {1077-2626},
      keywords = {TEC},
      owner = {Thomas},
      publisher = {Published by the IEEE Computer Society},
      timestamp = {2011.01.31}
    }
  • F. Ritter, M. Hindennach, W. Lamadé, K. Oldhafer, and H. Peitgen, “Intraoperative Adaptation of Preoperative Risk Analysis in Oncological Liver Surgery,” Proceedings of CURAC (Berlin), September 2005.
    [Bibtex]
    @ARTICLE{Ritter2005,
      author = {Ritter, F. and Hindennach, M. and Lamad{\'e}, W. and Oldhafer, K.
      and Peitgen, HO},
      title = {Intraoperative Adaptation of Preoperative Risk Analysis in Oncological
      Liver Surgery},
      journal = {Proceedings of CURAC (Berlin), September 2005},
      file = {Ritter2005.pdf:Ritter2005.pdf:PDF},
      owner = {Thomas},
      timestamp = {2011.01.31}
    }
  • R. A. Robb, “3-D visualization in biomedical applications,” , 2003.
    [Bibtex]
    @ARTICLE{Robb2003,
      author = {Robb, R.A.},
      title = {3-D visualization in biomedical applications},
      year = {2003},
      file = {Robb2003.pdf:Robb2003.pdf:PDF},
      keywords = {REV},
      owner = {thomaskroes},
      publisher = {Annual Reviews 4139 El Camino Way, PO Box 10139, Palo Alto, CA 94303-0139,
      USA},
      timestamp = {2011.01.25}
    }
  • R. A. Robb, “VR assisted surgery planning,” Engineering in Medicine and Biology Magazine, IEEE, vol. 15, iss. 2, pp. 60-69, 1996.
    [Bibtex]
    @ARTICLE{Robb1996,
      author = {Robb, R.A.},
      title = {VR assisted surgery planning},
      journal = {Engineering in Medicine and Biology Magazine, IEEE},
      year = {1996},
      volume = {15},
      pages = {60 - 69},
      number = {2},
      abstract = {We are developing a system called the virtual reality assisted surgery
      program (VRASP) for implementation in the hospital operating room
      (OR). VRASP will give the surgeon flexible intraoperatively computational
      support. It will permit modification and control of very large scan
      data sets in real time. It will render and transmit virtual imagery
      in response to the surgeon's commands without interfering with normal
      surgical activities. And it will register the displayed imagery simultaneously
      with respect to the surgeon and the patient, without perceptible
      computing or display lag. VRASP is being developed specifically to
      assist surgeons during craniofacial, neurologic, orthopedic, thoracic,
      and urologic surgery. VRASP will enable surgeons to interactively
      visualize 3D renderings of CT and MRI data with hands-free manipulation
      of the virtual display. The surgeon will be able to scale, orient,
      and position prescanned body imagery on-line in real time from any
      desired perspective. The clinical goal is dynamic fusing of 3D body
      scan data with the actual patient in the OR. The customized interface
      will permit ready on-line access to the preoperative plan and to
      update measurement and analysis based on the real time OR data},
      file = {Robb1996.pdf:Robb1996.pdf:PDF},
      issn = {0739-5175},
      keywords = {CT data;MRI data;craniofacial surgery;displayed imagery;flexible intraoperatively
      computational support;hands-free manipulation;hospital operating
      room;interactive 3D rendering visualisation;large scan data sets;neurologic
      surgery;orthopedic surgery;patient;prescanned body imagery;real time
      system;surgeon commands;surgical activities;thoracic surgery;urologic
      surgery;virtual imagery transmission;virtual reality assisted surgery
      planning;virtual reality assisted surgery program;medical computing;neurophysiology;planning;rendering
      (computer graphics);surgery;virtual reality;},
      owner = {thomaskroes},
      timestamp = {2011.01.25}
    }
  • M. Robiony, I. Salvo, F. Costa, N. Zerman, C. Bandera, S. Filippi, M. Felice, and M. Politi, “Accuracy of virtual reality and stereolithographic models in maxillo-facial surgical planning,” Journal of Craniofacial Surgery, vol. 19, iss. 2, p. 482, 2008.
    [Bibtex]
    @ARTICLE{Robiony2008,
      author = {Robiony, M. and Salvo, I. and Costa, F. and Zerman, N. and Bandera,
      C. and Filippi, S. and Felice, M. and Politi, M.},
      title = {Accuracy of virtual reality and stereolithographic models in maxillo-facial
      surgical planning},
      journal = {Journal of Craniofacial Surgery},
      year = {2008},
      volume = {19},
      pages = {482},
      number = {2},
      issn = {1049-2275},
      keywords = {REV, RPP, CMS},
      owner = {Thomas},
      timestamp = {2011.02.09}
    }
  • M. Robiony, I. Salvo, F. Costa, N. Zerman, M. Bazzocchi, F. Toso, C. Bandera, S. Filippi, M. Felice, and M. Politi, “Virtual reality surgical planning for maxillofacial distraction osteogenesis: the role of reverse engineering rapid prototyping and cooperative work,” Journal of oral and maxillofacial surgery, vol. 65, iss. 6, pp. 1198-1208, 2007.
    [Bibtex]
    @ARTICLE{Robiony2007,
      author = {Robiony, M. and Salvo, I. and Costa, F. and Zerman, N. and Bazzocchi,
      M. and Toso, F. and Bandera, C. and Filippi, S. and Felice, M. and
      Politi, M.},
      title = {Virtual reality surgical planning for maxillofacial distraction osteogenesis:
      the role of reverse engineering rapid prototyping and cooperative
      work},
      journal = {Journal of oral and maxillofacial surgery},
      year = {2007},
      volume = {65},
      pages = {1198 - 1208},
      number = {6},
      file = {Robiony2007.pdf:Robiony2007.pdf:PDF},
      issn = {0278-2391},
      keywords = {CMS, REV, RPP, SLR, SUR},
      owner = {Thomas},
      publisher = {Elsevier},
      timestamp = {2011.02.09}
    }
  • M. A. Rodriguez-florido, K. Krissian, J. Ruiz-alzola, and C. Westin, “Comparison of Two Restoration Techniques in the Context of 3D Medical Imaging,” Image (Rochester, N.Y.), pp. 1031-1039, 2001.
    [Bibtex]
    @ARTICLE{Rodriguez2001,
      author = {Rodriguez-florido, Miguel A and Krissian, Karl and Ruiz-alzola, Juan
      and Westin, Carl-fredrik},
      title = {Comparison of Two Restoration Techniques in the Context of 3D Medical
      Imaging},
      journal = {Image (Rochester, N.Y.)},
      year = {2001},
      pages = {1031 - 1039},
      file = {Rodriguez2001.pdf:Rodriguez2001.pdf:PDF},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • T. Ropinski and B. Preim, “Taxonomy and Usage Guidelines for Glyph-based Medical Visualization,” , pp. 121-138, 2008.
    [Bibtex]
    @CONFERENCE{Ropinski2008,
      author = {Ropinski, T. and Preim, B.},
      title = {Taxonomy and Usage Guidelines for Glyph-based Medical Visualization},
      booktitle = {Proceedings of the 19th Conference on Simulation and Visualization
      (SimVis08)},
      year = {2008},
      pages = {121 - 138},
      organization = {Citeseer},
      file = {Ropinski2008.pdf:Ropinski2008.pdf:PDF},
      keywords = {REV},
      owner = {thomaskroes},
      timestamp = {2010.11.18}
    }
  • C. Roux, “Image and information guided minimally invasive orthopedic surgery,” in Information Technology and Applications in Biomedicine, 2008. ITAB 2008. International Conference on, 2008, pp. 21-22.
    [Bibtex]
    @INPROCEEDINGS{Roux2008,
      author = {Roux, C.},
      title = {Image and information guided minimally invasive orthopedic surgery},
      booktitle = {Information Technology and Applications in Biomedicine, 2008. ITAB
      2008. International Conference on},
      year = {2008},
      pages = {21 -22},
      month = {May},
      abstract = {This paper will survey recent advances in research aimed at developing
      new image and information guided procedures in orthopedic surgery.
      These procedures are based on new methodological studies oriented
      towards the modelling of bone structure, which allow the physician
      to better understand the relationship between form and functionality
      of movements in the osteo-articular system and to improve diagnosis,
      therapeutic action and follow-up of the patients.},
      file = {:Roux2008.pdf:PDF},
      keywords = {bone structure modelling;image guided procedures;information guided
      procedures;orthopedic surgery;osteo articular system;biomechanics;bone;orthopaedics;physiological
      models;surgery;},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • J. Ruppin, A. Popovic, M. Strauss, E. Spüntrup, A. Steiner, and C. Stoll, “Evaluation of the accuracy of three different computer-aided surgery systems in dental implantology: optical tracking vs. stereolithographic splint systems,” Clinical Oral Implants Research, vol. 19, iss. 7, pp. 709-716, 2008.
    [Bibtex]
    @ARTICLE{Ruppin2008,
      author = {Ruppin, J. and Popovic, A. and Strauss, M. and Sp{\\"u}ntrup, E.
      and Steiner, A. and Stoll, C.},
      title = {Evaluation of the accuracy of three different computer-aided surgery
      systems in dental implantology: optical tracking vs. stereolithographic
      splint systems},
      journal = {Clinical Oral Implants Research},
      year = {2008},
      volume = {19},
      pages = {709--716},
      number = {7},
      file = {Ruppin2008.pdf:Ruppin2008.pdf:PDF},
      issn = {1600-0501},
      owner = {thomaskroes},
      publisher = {Wiley Online Library},
      timestamp = {2010.12.22}
    }
  • T. C. Ryken, J. Kim, B. D. Owen, G. E. Christensen, and J. M. Reinhardt, “Engineering patient-specific drill templates and bioabsorbable posterior cervical plates: a feasibility study,” Journal of Neurosurgery: Spine, vol. 10, iss. 2, pp. 129-132, 2009.
    [Bibtex]
    @ARTICLE{Ryken2009,
      author = {Ryken, T.C. and Kim, J. and Owen, B.D. and Christensen, G.E. and
      Reinhardt, J.M.},
      title = {Engineering patient-specific drill templates and bioabsorbable posterior
      cervical plates: a feasibility study},
      journal = {Journal of Neurosurgery: Spine},
      year = {2009},
      volume = {10},
      pages = {129 - 132},
      number = {2},
      file = {Ryken2009.pdf:Ryken2009.pdf:PDF},
      issn = {1547-5654},
      owner = {Thomas},
      publisher = {American Association of Neurological Surgeons},
      timestamp = {2011.02.07}
    }
  • A. Saad, A. El-Bialy, A. Kandil, and A. S. Ahmed, “Automatic cephalometric analysis using active appearance model and simulated annealing,” , p. 51, 2006.
    [Bibtex]
    @CONFERENCE{Saad2006,
      author = {Saad, AA and El-Bialy, A. and Kandil, AH and Ahmed, A.S.},
      title = {Automatic cephalometric analysis using active appearance model and
      simulated annealing},
      booktitle = {The International Congress for global Science and Technology},
      year = {2006},
      pages = {51},
      file = {Saad2006.pdf:Saad2006.pdf:PDF},
      owner = {thomaskroes},
      timestamp = {2011.01.10}
    }
  • H. F. Sailer, P. E. Haers, C. P. E. Zollikofer, T. Warnke, F. R. Caris, and P. Stucki, “The value of stereolithographic models for preoperative diagnosis of craniofacial deformities and planning of surgical corrections,” International Journal of Oral and Maxillofacial Surgery, vol. 27, iss. 5, pp. 327-333, 1998.
    [Bibtex]
    @ARTICLE{Sailer1998,
      author = {H.F. Sailer and P.E. Haers and C.P.E. Zollikofer and T. Warnke and
      F.R. Caris and P. Stucki},
      title = {The value of stereolithographic models for preoperative diagnosis
      of craniofacial deformities and planning of surgical corrections},
      journal = {International Journal of Oral and Maxillofacial Surgery},
      year = {1998},
      volume = {27},
      pages = {327 - 333},
      number = {5},
      abstract = {The purpose of this study was to assess the importance of stereolithographic
      models (SLMs) for preoperative diagnosis and planning in craniofacial
      surgery and to examine whether these models offer valuable additional
      information as compared to normal CT scans and 3D CT images. Craniofacial
      SLMs of 20 patients with craniomaxillofacial pathology were made.
      A helical volume CT scan of the anatomic area involved delivered
      the necessary data for their construction. These were built with
      an SLA 250 stereolithography apparatus (3D-Systems, Valencia, CA,
      USA), steered by FORM-IT/DCS® software (University of Zurich, Switzerland).
      The stereolithography models were classified according to pathology,
      type of surgery and their relevance for surgical planning. Though
      not objectively measurable, it was beyond doubt that relevant additional
      information for the surgeon was obtained in cases of hypertelorism,
      severe asymmetries of the neuro- and viscerocranium, complex cranial
      synostoses and large skull defects. The value of these models as
      realistic #duplicates# of complex or rare dysmorphic craniofacial
      pathology for the purpose of creating a didactic collection should
      also be emphasized. The models proved to be less useful in cases
      of consolidated fractures of the periorbital and naso-ethmoidal complex,
      except where there was major dislocation.},
      file = {Sailer1998.pdf:Sailer1998.pdf:PDF},
      issn = {0901-5027},
      keywords = {craniofacial surgery, CMS, RPP},
      owner = {Thomas},
      timestamp = {2011.02.07}
    }
  • P. Saiviroonporn, a Robatino, J. Zahajszky, R. Kikinis, and F. a Jolesz, “Real-time interactive three-dimensional segmentation.,” Academic radiology, vol. 5, iss. 1, pp. 49-56, 1998.
    [Bibtex]
    @ARTICLE{Saiviroonporn1998,
      author = {Saiviroonporn, P and Robatino, a and Zahajszky, J and Kikinis, R
      and Jolesz, F a},
      title = {Real-time interactive three-dimensional segmentation.},
      journal = {Academic radiology},
      year = {1998},
      volume = {5},
      pages = {49 - 56},
      number = {1},
      month = {January},
      abstract = {RATIONALE AND OBJECTIVES: The authors developed a real-time, interactive
      three-dimensional (3D) segmentation pipeline that uses relatively
      low-level segmentation operations and provides two-dimensional and
      3D visualization through a user-friendly graphical interface. MATERIALS
      AND METHODS: The low-level segmentation processes were implemented
      on a massively parallel computer; the graphical user interface was
      written with a public domain software toolkit. Since their implementation
      2 years ago, these segmentation tools have been applied to approximately
      300 computed tomographic and magnetic resonance imaging data sets.
      Two typical clinical cases are presented to demonstrate their applications.
      RESULTS: The entire processing pipeline can be executed in a few
      seconds. The tools are simple to learn because they involve the use
      of low-level procedures and a user-friendly graphical interface with
      a short interactive response time. Segmentation of the bones, aorta,
      kidneys, and kidney cysts in case 1 could be performed in about 16
      minutes. The time needed to segment each organ in case 2 ranged from
      about 15 minutes for the skin and brain to about 1 minute for the
      tumor. CONCLUSION: Satisfactory results can be obtained in a relatively
      short time with the real-time interactive 3D segmentation system.
      Operation of the system can be easily learned by a wide variety of
      nonspecialized users with some medical background.},
      file = {Saiviroonporn1998.pdf:Saiviroonporn1998.pdf:PDF},
      issn = {1076-6332},
      keywords = {Algorithms,Aorta,Aorta: pathology,Bone and Bones,Bone and Bones: pathology,Brain,Brain:
      pathology,Humans,Image Processing, Computer-Assisted,Image Processing,
      Computer-Assisted: methods,Kidney,Kidney: pathology,Liver,Liver:
      pathology,Magnetic Resonance Imaging,Reproducibility of Results,User-Computer
      Interface, IMP},
      owner = {thomaskroes},
      pmid = {9442207},
      timestamp = {2010.10.25}
    }
  • S. K. Sarker and B. Patel, “Simulation and surgical training,” International Journal of Clinical Practice, vol. 61, iss. 12, pp. 2120-2125, 2007.
    [Bibtex]
    @ARTICLE{Sarker2007,
      author = {Sarker, S. K. and Patel, B.},
      title = {Simulation and surgical training},
      journal = {International Journal of Clinical Practice},
      year = {2007},
      volume = {61},
      pages = {2120 - 2125},
      number = {12},
      abstract = {Summary The aim of this review was to outline current forms of surgical
      simulation and methods of assessing technical skills using these
      forms of simulation. To review this subject, a literature search
      was done using key words ‘assessment’, ‘simulation’, ‘surgery’,
      ‘technical skills’ and ‘virtual reality’. Simulation in surgery
      has several forms, inorganic (synthetic & computer) and organic (animal
      or cadaver). Surgical simulation is a mode of training which is promising
      and may be effective. Technical errors in the simulated environment
      do not have clinical consequences and does not have a morbidity or
      mortality. We must ensure that the competent skills learnt in the
      simulation environment are translated to the real environment. This
      can be achieved if the same assessment tools are used in both environments.
      Surgical training is entering a new era, with increased scrutiny
      and an evolving work and training environment. We as surgical teachers
      must ensure that the surgeons of the future are as competent as or
      better than their predecessors using these new modes of training
      which we have access to.},
      file = {Sarker2007.pdf:Sarker2007.pdf:PDF},
      issn = {1742-1241},
      keywords = {PRS, REV},
      owner = {Thomas},
      publisher = {Blackwell Publishing Ltd},
      timestamp = {2011.02.23}
    }
  • A. Sarti, R. Gori, and C. Lamberti, “A physically based model to simulate maxillo-facial surgery from 3D CT images,” Future Generations in Computer Systems, vol. 15, iss. 2, pp. 217-222, 1999.
    [Bibtex]
    @ARTICLE{Sarti1999,
      author = {Sarti, A. and Gori, R. and Lamberti, C.},
      title = {A physically based model to simulate maxillo-facial surgery from
      3D CT images},
      journal = {Future Generations in Computer Systems},
      year = {1999},
      volume = {15},
      pages = {217 - 222},
      number = {2},
      file = {Sarti1999.pdf:Sarti1999.pdf:PDF},
      issn = {0167-739X},
      owner = {Thomas},
      publisher = {Amsterdam, Netherlands; New York, NY: North-Holland Pub. Co.,; New
      York, NY: For customers in the USA and Canada, Elsevier Science Pub.
      Co., 1984-},
      timestamp = {2011.02.08}
    }
  • R. Satava, “Historical Review of Surgical Simulation—A Personal Perspective,” World Journal of Surgery, vol. 32, pp. 141-148, 2008.
    [Bibtex]
    @ARTICLE{Satava2008,
      author = {Satava, Richard},
      title = {Historical Review of Surgical Simulation—A Personal Perspective},
      journal = {World Journal of Surgery},
      year = {2008},
      volume = {32},
      pages = {141 - 148},
      abstract = {Although simulation is relatively new to surgical education, there
      is a long history in many other disciplines, such as military, aviation,
      and nuclear power plant operations, among others. In the late 1980s
      these technologies began to be adapted to the surgical world, along
      with the new technology of virtual reality. This is a review of the
      introduction of manikins, computers, and virtual reality into education
      and training for surgical skills. Two concomitant revolutions occurred:
      objective assessment of surgical skills and converting training from
      the apprenticeship model to one of criterion-based training. A personal
      perspective on these developments adds information not previously
      published.},
      affiliation = {University of Washington Medical Center Seattle Department of Surgery
      1959 Pacific Street NE Seattle Washington 98195 USA},
      file = {Satava2008.pdf:Satava2008.pdf:PDF},
      issn = {0364-2313},
      issue = {2},
      keyword = {Medicine},
      keywords = {PRS, REV},
      owner = {Th},
      publisher = {Springer New York},
      timestamp = {2011.03.04},
      url = {http://dx.doi.org/10.1007/s00268-007-9374-y}
    }
  • R. Satava, “Accomplishments and challenges of surgical simulation,” Surgical Endoscopy, vol. 15, pp. 232-241, 2001.
    [Bibtex]
    @ARTICLE{Satava2001,
      author = {Satava, R.},
      title = {Accomplishments and challenges of surgical simulation},
      journal = {Surgical Endoscopy},
      year = {2001},
      volume = {15},
      pages = {232 - 241},
      abstract = {For nearly a decade, advanced computer technologies have created extraordinary
      educational tools using three-dimensional (3D) visualization and
      virtual reality. Pioneering efforts in surgical simulation with these
      tools have resulted in a first generation of simulators for surgical
      technical skills. Accomplishments include simulations with 3D models
      of anatomy for practice of surgical tasks, initial assessment of
      student performance in technical skills, and awareness by professional
      societies of potential in surgical education and certification. However,
      enormous challenges remain, which include improvement of technical
      fidelity, standardization of accurate metrics for performance evaluation,
      integration of simulators into a robust educational curriculum, stringent
      evaluation of simulators for effectiveness and value added to surgical
      training, determination of simulation application to certification
      of surgical technical skills, and a business model to implement and
      disseminate simulation successfully throughout the medical education
      community. This review looks at the historical progress of surgical
      simulators, their accomplishments, and the challenges that remain.},
      affiliation = {Yale University School of Medicine 40 Temple Street 06510 New Haven
      CT USA},
      file = {Satava2001.pdf:Satava2001.pdf:PDF},
      issn = {0930-2794},
      issue = {3},
      keyword = {Medicine},
      keywords = {REV, PRS},
      owner = {Th},
      publisher = {Springer New York},
      timestamp = {2011.03.04},
      url = {http://dx.doi.org/10.1007/s004640000369}
    }
  • R. M. Satava, “Emerging technologies for surgery in the 21st century.,” Archives of surgery (Chicago, Ill. : 1960), vol. 134, iss. 11, pp. 1197-202, 1999.
    [Bibtex]
    @ARTICLE{Satava1999,
      author = {Satava, R M},
      title = {Emerging technologies for surgery in the 21st century.},
      journal = {Archives of surgery (Chicago, Ill. : 1960)},
      year = {1999},
      volume = {134},
      pages = {1197 - 202},
      number = {11},
      month = {November},
      abstract = {Laparoscopic surgery is a transition technology that marked the beginning
      of the information age revolution for surgery. Telepresence surgery,
      robotics, tele-education, and telementoring are the next step in
      the revolution. Using computer-aided systems such as robotics and
      image-guided surgery, the next generation of surgical systems will
      be more sophisticated and will permit surgeons to perform surgical
      procedures beyond the current limitations of human performance, especially
      at the microscale or on moving organs. More fundamentally, there
      will be an increased reliance on 3-dimensional images of the patient,
      gathered by computed tomography, magnetic resonance imaging, ultrasound,
      or other scanning techniques, to integrate the entire spectrum of
      surgical care from diagnosis to preoperative planning to intraoperative
      navigation to education through simulation. By working through the
      computer-generated image, first with preoperative planning and then
      during telepresence or image-guided procedures, new approaches to
      surgery will be discovered. These technologies are complemented by
      new educational opportunities, such as tele-education, surgical simulation,
      and a Web-based curriculum. Telementoring will permit further extension
      of the educational process directly into the operating room.},
      annote = {Good summary abd outlook for new technologies},
      file = {Satava1999.pdf:Satava1999.pdf:PDF},
      issn = {0004-0010},
      keywords = {Forecasting,Humans,Surgical Procedures, Operative,Surgical Procedures,
      Operative: methods,Surgical Procedures, Operative: trends, REV},
      owner = {thomaskroes},
      pmid = {10555633},
      timestamp = {2010.10.25}
    }
  • Y. Sato, Y. Nakajima, T. Nishii, and S. Tamura, “Image guided orthopedic surgery using osteotome with 3D localizer,” J Comp Aid Surg (Suppl), vol. 1, pp. 26-27, 1995.
    [Bibtex]
    @ARTICLE{Sato1995,
      author = {Sato, Y. and Nakajima, Y. and Nishii, T. and Tamura, S.},
      title = {Image guided orthopedic surgery using osteotome with 3D localizer},
      journal = {J Comp Aid Surg (Suppl)},
      year = {1995},
      volume = {1},
      pages = {26 - 27},
      file = {:C\:\\Thomas\\PHD\\Literature\\Articles\\Sato1995.pdf:PDF},
      keywords = {TEC},
      owner = {thomaskroes},
      publisher = {Citeseer},
      timestamp = {2010.10.26}
    }
  • F. Sauer, S. Vogt, and A. Khamene, “Augmented Reality,” in Image-Guided Interventions, T. Peters and K. Cleary, Eds., Springer US, 2008, pp. 81-119.
    [Bibtex]
    @INCOLLECTION{Sauer2008,
      author = {Sauer, Frank and Vogt, Sebastian and Khamene, Ali},
      title = {Augmented Reality},
      booktitle = {Image-Guided Interventions},
      publisher = {Springer US},
      year = {2008},
      editor = {Peters, Terry and Cleary, Kevin},
      pages = {81 - 119},
      note = {Chapter 4},
      abstract = {Much of the visualization in image-guided interventions is achieved
      by creating a virtual image of the surgical or therapeutic environment,
      based upon preoperative images, and displaying it on a workstation
      that is remote from the patient. Linkages between the patient and
      the image are created through image registration and tracked tools.
      Such solutions are not always ideal, and result in a psychophysical
      decoupling of the actual and virtual therapeutic working spaces.
      Using augmented reality, these two spaces are fused into a single
      volume, which is typically viewed stereoscopically so that a preoperative
      or intraoperative patient image appears at the location of the actual
      patient anatomy. The surgeon has the perception that he is seeing
      through the patient or organ surface to observe the operative site.
      This chapter reviews the various approaches to augmented reality,
      and discusses the engineering and psychophysical challenges in developing
      user-friendly systems.},
      affiliation = {Siemens Corporate Research USA},
      file = {Sauer2008.pdf:Sauer2008.pdf:PDF},
      isbn = {978-0-387-73858-1},
      keyword = {Engineering},
      keywords = {AUR},
      owner = {Thomas},
      timestamp = {2011.02.24}
    }
  • F. Sauer, F. Wenzel, S. Vogt, Y. Tao, Y. Genc, and A. Bani-Hashemi, “Augmented workspace: Designing an AR testbed,” , pp. 47-53, 2002.
    [Bibtex]
    @CONFERENCE{Sauer2002,
      author = {Sauer, F. and Wenzel, F. and Vogt, S. and Tao, Y. and Genc, Y. and
      Bani-Hashemi, A.},
      title = {Augmented workspace: Designing an AR testbed},
      booktitle = {Augmented Reality, 2000.(ISAR 2000). Proceedings. IEEE and ACM International
      Symposium on},
      year = {2002},
      pages = {47 - 53},
      organization = {IEEE},
      isbn = {0769508464},
      keywords = {AUR},
      owner = {Thomas},
      timestamp = {2011.02.28}
    }
  • A. Schenk, M. Hindennach, A. Radtke, M. Malagó, T. Schroeder, and H-O. Peitgen, “Formation of venous collaterals and regeneration in the donor remnant liver: volumetric analysis and three-dimensional visualization.,” Transplantation proceedings, vol. 41, iss. 6, pp. 2515-7, 2009.
    [Bibtex]
    @ARTICLE{Schenk2009,
      author = {Schenk, A and Hindennach, M and Radtke, A and Malag\'{o}, M and Schroeder,
      T and Peitgen, H-O},
      title = {Formation of venous collaterals and regeneration in the donor remnant
      liver: volumetric analysis and three-dimensional visualization.},
      journal = {Transplantation proceedings},
      year = {2009},
      volume = {41},
      pages = {2515 - 7},
      number = {6},
      abstract = {PURPOSE: We sought was to quantify and visualize the regeneration
      of the remnant liver after living donor liver transplantation using
      computed tomographic (CT) data. METHODS: For the evaluation of preoperative
      and follow-up data, we developed a software assistant that was able
      to compute the volume growth of the remnant liver and liver territories
      as well as visualize the individual growth of hepatic vessels over
      time. The software was applied to CT data of 20 donors who underwent
      right hepatectomy including the middle hepatic vein with at least
      3 follow-up examinations in the first year after transplantation.
      RESULTS: After donation of a right lobe graft, the remnant liver
      regenerated by an average 77\% of the original volume within the
      first 3 postoperative months and to 86\% within the first year. The
      growth of the left lateral segments was increased compared with that
      of segment IV in all cases. The visualization showed the growth of
      the portal vein and the hepatic veins. With the simultaneous display
      of pre- and postoperative results, it was possible to detect the
      formation of collaterals between truncated segment IVb veins and
      the veins of segment IVa or of the left lateral lobe. CONCLUSION:
      The software-assisted analysis of follow-up data yielded additional
      insight into territorial liver regeneration after living donor liver
      transplantation and allowed for reliable detection of relevant hepatic
      vein collaterals using CT data.},
      file = {Schenk2009.pdf:Schenk2009.pdf:PDF},
      issn = {1873-2623},
      keywords = {Collateral Circulation,Collateral Circulation: physiology,Computer-Assisted,Hepatectomy,Hepatic
      Artery,Hepatic Artery: radiography,Hepatic Veins,Hepatic Veins: radiography,Humans,Image
      Processing,Liver,Liver Circulation,Liver Circulation: physiology,Liver
      Regeneration,Liver Regeneration: physiology,Liver: anatomy \& histology,Liver:
      radiography,Living Donors,Portal System,Portal System: physiology,Portal
      Vein,Portal Vein: radiography,Software,Tomography,X-Ray Computed},
      owner = {thomaskroes},
      pmid = {19715965},
      publisher = {Elsevier Inc.},
      timestamp = {2010.10.25}
    }
  • A. Schenk, G. Prause, and H. O. Peitgen, “Efficient semiautomatic segmentation of 3d objects in medical images,” , pp. 71-131, 2000.
    [Bibtex]
    @CONFERENCE{Schenk2000,
      author = {Schenk, A. and Prause, G. and Peitgen, H.O.},
      title = {Efficient semiautomatic segmentation of 3d objects in medical images},
      booktitle = {Medical Image Computing and Computer-Assisted Intervention - MICCAI
      2000},
      year = {2000},
      pages = {71 - 131},
      organization = {Springer},
      abstract = {We present a fast and accurate tool for semiautomatic segmentation
      of volumetric medical images based on the live wire algorithm, shape-based
      interpolation and a new optimization method. While the user-steered
      live wire algorithm represents an efficient, precise and reproducible
      method for interactive segmentation of selected two-dimensional images,
      the shape-based interpolation allows the automatic approximation
      of contours on slices between user-defined boundaries. The combination
      of both methods leads to accurate segmentations with significantly
      reduced user interaction time. Moreover, the subsequent automated
      optimization of the interpolated object contours results in a better
      segmentation quality or can be used to extend the distances between
      user-segmented images and for a further reduction of interaction
      time. Experiments were carried out on hepatic computer tomographies
      from three different clinics. The results of the segmentation of
      liver parenchyma have shown that the user interaction time can be
      reduced more than 60% by the combination of shape-based interpolation
      and our optimization method with volume deviations in the magnitude
      of inter-user differences.},
      file = {Schenk2000.pdf:Schenk2000.pdf:PDF},
      keywords = {IMP},
      owner = {thomaskroes},
      timestamp = {2010.12.01}
    }
  • A. Schenk, S. Zidowitz, H. Bourquain, M. Hindennach, C. Hansen, H. K. Hahn, and H. Peitgen, “Clinical relevance of model based computer-assisted diagnosis and therapy,” Risk Analysis, 2008.
    [Bibtex]
    @ARTICLE{Schenk2008,
      author = {Schenk, Andrea and Zidowitz, Stephan and Bourquain, Holger and Hindennach,
      Milo and Hansen, Christian and Hahn, Horst K and Peitgen, Heinz-otto},
      title = {Clinical relevance of model based computer-assisted diagnosis and
      therapy},
      journal = {Risk Analysis},
      year = {2008},
      abstract = {The ability to acquire and store radiological images digitally has
      made this data available to mathematical and scientific 
      
      methods. With the step from subjective interpretation to reproducible
      measurements and knowledge, it is also possible to 
      
      develop and apply models that give additional information which is
      not directly visible in the data. In this context, it is 
      
      important to know the characteristics and limitations of each model.
      Four characteristics assure the clinical relevance of 
      
      models for computer-assisted diagnosis and therapy: ability of patient
      individual adaptation, treatment of errors and
      
      
      uncertainty, dynamic behavior, and in-depth evaluation. We demonstrate
      the development and clinical application of a 
      
      model in the context of liver surgery. Here, a model for intrahepatic
      vascular structures is combined with individual, but 
      
      in the degree of vascular details limited anatomical information from
      radiological images. As a result, the model allows 
      
      for a dedicated risk analysis and preoperative planning of oncologic
      resections as well as for living donor liver 
      
      transplantations. The clinical relevance of the method was approved
      in several evaluation studies of our medical partners 
      
      and more than 2900 complex surgical cases have been analyzed since
      2002.},
      file = {Schenk2008.pdf:Schenk2008.pdf:PDF},
      keywords = {computer-assisted diagnosis,liver surgery,medical image computing,models,risk
      analysis},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • M. Scheuering, “Intraoperative augmented reality for minimally invasive liver interventions,” Proceedings of SPIE, pp. 407-417, 2003.
    [Bibtex]
    @ARTICLE{Scheuering2003,
      author = {Scheuering, Michael},
      title = {Intraoperative augmented reality for minimally invasive liver interventions},
      journal = {Proceedings of SPIE},
      year = {2003},
      pages = {407 - 417},
      abstract = {Minimally invasive liver interventions demand a lot of experience
      due to the limited access to the field of operation.
      
      In particular, the correct placement of the trocar and the navigation
      within the patient’s body are hampered. In
      
      this work, we present an intraoperative augmented reality system (IARS)
      that directly projects preoperatively
      
      planned information and structures extracted from CT data, onto the
      real laparoscopic video images. Our system
      
      consists of a preoperative planning tool for liver surgery and an
      intraoperative real time visualization component.
      
      The planning software takes into account the individual anatomy of
      the intrahepatic vessels and determines the
      
      vascular territories. Methods for fast segmentation of the liver parenchyma,
      of the intrahepatic vessels and of
      
      liver lesions are provided. In addition, very efficient algorithms for
      skeletonization and vascular analysis allowing
      
      the approximation of patient-individual liver vascular territories
      are included. The intraoperative visualization is
      
      based on a standard graphics adapter for hardware accelerated high
      performance direct volume rendering. The
      
      preoperative CT data is rigidly registered to the patient position
      by the use of fiducials that are attached to
      
      the patient’s body, and anatomical landmarks in combination with an
      electro-magnetic navigation system. Our
      
      system was evaluated in vivo during a minimally invasive intervention
      simulation in a swine under anesthesia.},
      file = {Scheuering2003.pdf:Scheuering2003.pdf:PDF},
      issn = {0277786X},
      keywords = {computer as-,direct volume rendering,hardware acceleration,image-guided
      surgery,registration, AUR, HES},
      owner = {thomaskroes},
      publisher = {Spie},
      timestamp = {2010.10.25}
    }
  • K. Schichob, M. Figl, R. Seemann, R. Ewers, J. T. Lambrecht, A. Wagner, F. Watzinger, A. Baumann, F. Kainberger, J. Fruehwald, and others, “Accuracy of treatment planning based on stereolithography in computer assisted surgery,” Medical Physics, vol. 33, iss. 9, 2006.
    [Bibtex]
    @ARTICLE{Schichob2006,
      author = {Schichob, K. and Figl, M. and Seemann, R. and Ewers, R. and Lambrecht,
      J.T. and Wagner, A. and Watzinger, F. and Baumann, A. and Kainberger,
      F. and Fruehwald, J. and others},
      title = {Accuracy of treatment planning based on stereolithography in computer
      assisted surgery},
      journal = {Medical Physics},
      year = {2006},
      volume = {33},
      number = {9},
      file = {Schichob2006.pdf:Schichob2006.pdf:PDF},
      keywords = {RPP, CMS},
      owner = {Thomas},
      timestamp = {2011.02.23}
    }
  • J. Schwaiger, M. Markert, B. Seidl, N. Shevchenko, N. Doerfler, and T. C. Lueth, “Risk analysis for intraoperative liver surgery,” , pp. 410-413, 2010.
    [Bibtex]
    @CONFERENCE{Schwaiger2010,
      author = {Schwaiger, J. and Markert, M. and Seidl, B. and Shevchenko, N. and
      Doerfler, N. and Lueth, T.C.},
      title = {Risk analysis for intraoperative liver surgery},
      booktitle = {Engineering in Medicine and Biology Society (EMBC), 2010 Annual International
      Conference of the IEEE},
      year = {2010},
      pages = {410 - 413},
      organization = {IEEE},
      file = {Schwaiger2010.pdf:Schwaiger2010.pdf:PDF},
      issn = {1557-170X},
      keywords = {IMP},
      owner = {Thomas},
      timestamp = {2011.01.31}
    }
  • H. Seitz, C. Tille, S. Irsen, G. Bermes, R. Sader, and H. Zeilhofer, “Rapid Prototyping models for surgical planning with hard and soft tissue representation,” International Congress Series, vol. 1268, pp. 567-572, 2004.
    [Bibtex]
    @ARTICLE{Seitz2004,
      author = {Seitz, H and Tille, C and Irsen, S and Bermes, G and Sader, R and
      Zeilhofer, H},
      title = {Rapid Prototyping models for surgical planning with hard and soft
      tissue representation},
      journal = {International Congress Series},
      year = {2004},
      volume = {1268},
      pages = {567 - 572},
      month = {June},
      abstract = {This paper presents a new approach to build medical models for surgical
      planning with realistic haptic and optical representation of different
      types of tissue. This new kind of combined anatomical models is realized
      by applying different Rapid Prototyping (RP) techniques. Based on
      medical datasets, preferably computer tomography (CT) scans of the
      affected region, both soft and hard tissue structures are reconstructed.
      A stereolithography apparatus builds the hard tissue models and the
      vacuum casting technique allows to manufacture realistic representations
      of soft tissues.},
      file = {Seitz2004.pdf:Seitz2004.pdf:PDF},
      issn = {05315131},
      keywords = {combined anatomical models,rapid prototyping,surgical planning, RPP,
      CMS},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • D. Selle, B. Preim, A. Schenk, and H. Peitgen, “Analysis of vasculature for liver surgical planning.,” IEEE transactions on medical imaging, vol. 21, iss. 11, pp. 1344-57, 2002.
    [Bibtex]
    @ARTICLE{Selle2002,
      author = {Selle, Dirk and Preim, Bernhard and Schenk, Andrea and Peitgen, Heinz-Otto},
      title = {Analysis of vasculature for liver surgical planning.},
      journal = {IEEE transactions on medical imaging},
      year = {2002},
      volume = {21},
      pages = {1344 - 57},
      number = {11},
      month = {November},
      abstract = {For liver surgical planning, the structure and morphology of the hepatic
      vessels and their relationship to tumors are of major interest. To
      achieve a fast and robust assistance with optimal quantitative and
      visual information, we present methods for a geometrical and structural
      analysis of vessel systems. Starting from the raw image data a sequence
      of image processing steps has to be carried out until a three-dimensional
      representation of the relevant anatomic and pathologic structures
      is generated. Based on computed tomography (CT) scans, the following
      steps are performed. 1) The volume data is preprocessed and the vessels
      are segmented. 2) The skeleton of the vessels is determined and transformed
      into a graph enabling a geometrical and structural shape analysis.
      Using this information the different intrahepatic vessel systems
      are identified automatically. 3) Based on the structural analysis
      of the branches of the portal vein, their vascular territories are
      approximated with different methods. These methods are compared and
      validated anatomically by means of corrosion casts of human livers.
      4) Vessels are visualized with graphics primitives fitted to the
      skeleton to provide smooth visualizations without aliasing artifacts.
      The image analysis techniques have been evaluated in the clinical
      environment and have been used in more than 170 cases so far to plan
      interventions and transplantations.},
      file = {Selle2002.pdf:Selle2002.pdf:PDF},
      issn = {0278-0062},
      keywords = {Algorithms,Angiography,Angiography: methods,Bile Ducts, Intrahepatic,Bile
      Ducts, Intrahepatic: radiography,Cadaver,Hepatic Artery,Hepatic Artery:
      radiography,Hepatic Veins,Hepatic Veins: radiography,Humans,Imaging,
      Three-Dimensional,Imaging, Three-Dimensional: instrumentation,Imaging,
      Three-Dimensional: methods,Liver,Liver Neoplasms,Liver Neoplasms:
      radiography,Liver Neoplasms: surgery,Liver Transplantation,Liver
      Transplantation: methods,Liver Transplantation: radiography,Liver:
      blood supply,Liver: radiography,Liver: surgery,Pattern Recognition,
      Automated,Phantoms, Imaging,Portal Vein,Portal Vein: radiography,Preoperative
      Care,Preoperative Care: methods,Radiographic Image Enhancement,Radiographic
      Image Enhancement: instrumentation,Radiographic Image Enhancement:
      methods,Radiographic Image Interpretation, Computer-Assist,Surgery,
      Computer-Assisted,Surgery, Computer-Assisted: methods,Tomography,
      X-Ray Computed,Tomography, X-Ray Computed: instrumentation,Tomography,
      X-Ray Computed: methods,User-Computer Interface, TEC, HES, SUR, VOR},
      owner = {thomaskroes},
      pmid = {12575871},
      timestamp = {2010.10.25}
    }
  • D. Serby, M. Harders, and G. Szekely, “A new approach to cutting into finite element models,” , pp. 425-433, 2010.
    [Bibtex]
    @CONFERENCE{Serby2010,
      author = {Serby, D. and Harders, M. and Szekely, G.},
      title = {A new approach to cutting into finite element models},
      booktitle = {Medical Image Computing and Computer-Assisted Intervention--MICCAI
      2001},
      year = {2010},
      pages = {425 - 433},
      organization = {Springer},
      file = {Serby2010.pdf:Serby2010.pdf:PDF},
      keywords = {TEC},
      owner = {Thomas},
      timestamp = {2011.02.23}
    }
  • R. Shams, P. Sadeghi, R. Kennedy, and R. Hartley, “Parallel computation of mutual information on the GPU with application to real-time registration of 3D medical images.,” Computer methods and programs in biomedicine, vol. 99, iss. 2, pp. 133-146, 2009.
    [Bibtex]
    @ARTICLE{Shams2009,
      author = {Shams, Ramtin and Sadeghi, Parastoo and Kennedy, Rodney and Hartley,
      Richard},
      title = {Parallel computation of mutual information on the GPU with application
      to real-time registration of 3D medical images.},
      journal = {Computer methods and programs in biomedicine},
      year = {2009},
      volume = {99},
      pages = {133 - 146},
      number = {2},
      month = {December},
      abstract = {Due to processing constraints, automatic image-based registration
      of medical images has been largely used as a pre-operative tool.
      We propose a novel method named sort and count for efficient parallelization
      of mutual information (MI) computation designed for massively multi-processing
      architectures. Combined with a parallel transformation implementation
      and an improved optimization algorithm, our method achieves real-time
      (less than 1s) rigid registration of 3D medical images using a commodity
      graphics processing unit (GPU). This represents a more than 50-fold
      improvement over a standard implementation on a CPU. Real-time registration
      opens new possibilities for development of improved and interactive
      intraoperative tools that can be used for enhanced visualization
      and navigation during an intervention.},
      file = {Shams2009.pdf:Shams2009.pdf:PDF},
      issn = {1872-7565},
      keywords = {TEC},
      owner = {thomaskroes},
      pmid = {20004493},
      publisher = {Elsevier Ireland Ltd},
      timestamp = {2010.10.25}
    }
  • Y. Shen, B. Wang, Y. Ju, J. Xie, and Xiaoyang Huang, “Interaction Techniques for the Exploration of Hepatic Vessel Structure,” in Engineering in Medicine and Biology Society, 2005. IEEE-EMBS 2005. 27th Annual International Conference of the, 2005, pp. 2902-2905.
    [Bibtex]
    @INPROCEEDINGS{Shen2005,
      author = {Yi Shen and Boliang Wang and Ying Ju and Jiezhen Xie and Xiaoyang
      Huang},
      title = {Interaction Techniques for the Exploration of Hepatic Vessel Structure},
      booktitle = {Engineering in Medicine and Biology Society, 2005. IEEE-EMBS 2005.
      27th Annual International Conference of the},
      year = {2005},
      pages = {2902 - 2905},
      abstract = {Hepatic vessel system is one of the most complex vessel systems in
      human body. For liver surgical planning, morphology and topology
      analysis of the hepatic vessel system is of our major interest. In
      this article, we present a new method to analyze the liver vessel
      system. Starting from the raw CT data set, vessel system is segmented.
      Based on that, then skeleton line of the vessel system is extracted
      and a symbolic vessel tree is constructed. The interactive analysis
      is achieved by combination of the abstract vessel tree information
      and the vessel surface model. The experimental results illustrate
      that the algorithm is effective, easy to implement, and addresses
      fully interaction facilities. The whole processing involves no human
      interventions, except the preprocessing of CT images},
      file = {Shen2005.pdf:Shen2005.pdf:PDF},
      keywords = {CT image preprocessing;abstract vessel tree information;hepatic vessel
      structure;interaction techniques;interactive analysis;liver surgical
      planning;morphology analysis;symbolic vessel tree;topology analysis;vessel
      surface model;vessel system segmentation;computerised tomography;image
      segmentation;liver;medical image processing;, TEC},
      owner = {Thomas},
      timestamp = {2011.01.31}
    }
  • J. T. Sherman, M. R. DiSilvestro, and T. L. Dietz, Method and apparatus for performing a voice-assisted orthopaedic surgical procedureGoogle Patents, 2005.
    [Bibtex]
    @MISC{Sherman2005a,
      author = {Sherman, J.T. and DiSilvestro, M.R. and Dietz, T.L.},
      title = {Method and apparatus for performing a voice-assisted orthopaedic
      surgical procedure},
      month = {March},
      year = {2005},
      owner = {Thomas},
      publisher = {Google Patents},
      timestamp = {2011.02.03}
    }
  • J. T. Sherman, M. R. DiSilvestro, and R. S. Popovic, Apparatus and method for registering a bone of a patient with a computer assisted orthopaedic surgery systemGoogle Patents, 2005.
    [Bibtex]
    @MISC{Sherman2005b,
      author = {Sherman, J.T. and DiSilvestro, M.R. and Popovic, R.S.},
      title = {Apparatus and method for registering a bone of a patient with a computer
      assisted orthopaedic surgery system},
      month = {December},
      year = {2005},
      owner = {Thomas},
      publisher = {Google Patents},
      timestamp = {2011.02.03}
    }
  • N. Shevchenko, B. Seidl, J. Schwaiger, M. Markert, and T. C. Lueth, “MiMed liver: A planning system for liver surgery,” in Engineering in Medicine and Biology Society (EMBC), 2010 Annual International Conference of the IEEE, 2010, pp. 1882-1885.
    [Bibtex]
    @INPROCEEDINGS{Shevchenko2010,
      author = {Shevchenko, N. and Seidl, B. and Schwaiger, J. and Markert, M. and
      Lueth, T.C.},
      title = {MiMed liver: A planning system for liver surgery},
      booktitle = {Engineering in Medicine and Biology Society (EMBC), 2010 Annual International
      Conference of the IEEE},
      year = {2010},
      pages = {1882 - 1885},
      abstract = {In clinical routine of liver surgery there are a multitude of risks
      such as vessel injuries, blood loss, incomplete tumor resection,
      etc. In order to avoid these risks the surgeons perform a planning
      of a surgical intervention. A good graphical representation of the
      liver and its inner structures is of great importance for a good
      planning. In this work we introduce a new planning system for liver
      surgery, which is meant for computer tomography (CT) data analysis
      and graphical representation. The system is based on automatic and
      semiautomatic segmentation techniques as well as on a simple and
      intuitive user interface and was developed with the intention to
      help surgeons by planning an operation and increasing the efficiency
      in open liver surgery.},
      file = {Shevchenko2010.pdf:Shevchenko2010.pdf:PDF},
      issn = {1557-170X},
      keywords = {MiMed Liver planning sytem;computer tomography;data analysis;graphical
      representation;image segmentation;liver surgery;tumors;user interface;vascular
      tree;cancer;computer graphics;computerised tomography;data analysis;image
      representation;image segmentation;liver;medical image processing;surgery;tumours;user
      interfaces;, TEC, HES},
      owner = {thomaskroes},
      timestamp = {2011.01.26}
    }
  • J. H. Shuhaiber, “Augmented reality in surgery,” Archives of surgery, vol. 139, iss. 2, p. 170, 2004.
    [Bibtex]
    @ARTICLE{Shuhaiber2004,
      author = {Shuhaiber, J.H.},
      title = {Augmented reality in surgery},
      journal = {Archives of surgery},
      year = {2004},
      volume = {139},
      pages = {170},
      number = {2},
      file = {Shuhaiber2004.pdf:Shuhaiber2004.pdf:PDF},
      keywords = {REV, AUR},
      owner = {thomaskroes},
      publisher = {Am Med Assoc},
      timestamp = {2011.01.04}
    }
  • T. Sielhorst, M. Feuerstein, and N. Navab, “Advanced medical displays: A literature review of augmented reality,” Display Technology, Journal of, vol. 4, iss. 4, pp. 451-467, 2008.
    [Bibtex]
    @ARTICLE{Sielhorst2008,
      author = {Sielhorst, T. and Feuerstein, M. and Navab, N.},
      title = {Advanced medical displays: A literature review of augmented reality},
      journal = {Display Technology, Journal of},
      year = {2008},
      volume = {4},
      pages = {451 - 467},
      number = {4},
      file = {Sielhorst2008.pdf:Sielhorst2008.pdf:PDF},
      issn = {1551-319X},
      keywords = {REV, AUR},
      owner = {thomaskroes},
      publisher = {IEEE},
      timestamp = {2011.01.26}
    }
  • J. M. Sikorski and S. Chauhan, “Aspects of current management,” The Journal of Bone and Joint Surgery, vol. 85, iss. 3, pp. 319-323, 2003.
    [Bibtex]
    @ARTICLE{Sikorski2003,
      author = {Sikorski, J M and Chauhan, S},
      title = {Aspects of current management},
      journal = {The Journal of Bone and Joint Surgery},
      year = {2003},
      volume = {85},
      pages = {319 - 323},
      number = {3},
      abstract = {In orthopaedic surgery there is a well-recognised relation- ship between
      accuracy and outcome. Accurate reduction of fractures improves the
      chance of union and the cosmetic and the functional results. Precise
      correction of childhood deformities is necessary to ensure growth
      in a mechanically appropriate direction. A well-aligned hip or knee
      replace- ment is less likely to dislocate and will last longer.1,2
      Surgeons respond to the need for accuracy by taking care and continually
      modifying their techniques to improve their performance. They have
      been helped enormously by the increased and increasing safety of
      anaesthesia.3 In most operations there is no longer any premium on
      operating time. Patients having joint replacements do not suffer
      if the surgeon takes an extra 30 minutes to produce improved overall
      alignment. A better technical outcome now usually takes precedence
      over a quick operation. In striving to improve accuracy we have called
      on a large array of tools and aids. Intraoperative radiology is used
      rou- tinely in fracture surgery. Endoscopy has allowed a much more
      focused approach than is possible with open pro- cedures. Nerve stimulators
      identify nerves which may be at risk. In joint replacement freehand
      techniques have been augmented by the introduction of mechanical
      alignment jigs and cutting blocks. Each of these technical refinements
      has initially met with some resistance, but has become estab- lished
      as the resulting improvement in outcome has become clear. We are
      now being presented with another technical aid, which promises to
      do much for accuracy in a wide range of surgical endeavour. In some
      respects it is little different from other tools and aids which we
      have accepted in the past. It is going through the similar stages
      of introduction, assessment and acceptance as did fluoroscopy or
      arthro-scopy. It promises to have an impact of similar magnitude.
      Yet computer-assisted orthopaedic surgery (CAOS) is dif- ferent in
      some important respects.},
      file = {Sikorski2003.pdf:Sikorski2003.pdf:PDF},
      owner = {thomaskroes},
      timestamp = {2010.10.26}
    }
  • D. A. Simon, B. Jaramaz, M. Blackwell, F. Morgan, A. M. Digioia, E. Kischell, B. Colgan, and T. Kanade, “Development and Validation of a Navigational Guidance System for Acetabular Implant Placement,” , 1997.
    [Bibtex]
    @ARTICLE{Simon1997,
      author = {Simon, D A and Jaramaz, B and Blackwell, M and Morgan, F and Digioia,
      A M and Kischell, E and Colgan, B and Kanade, T},
      title = {Development and Validation of a Navigational Guidance System for
      Acetabular Implant Placement},
      year = {1997},
      abstract = {During the past year our group has been developing HipNav, a system
      which helps surgeons determine optimal, patient-specific acetabular
      implant placement and accurately achieve the desired implant placement
      during surgery. HipNav includes three components: a pre-operative
      planner, a range of motion simulator, and an intra-operative tracking
      and guidance system. The goals of the current HipNav system are to:
      1) reduce dislocations following total hip replace- ment surgery
      due to acetabular malposition; 2) determine and potentially increase
      the "safe" range of motion; 3) reduce wear debris resulting from
      impingement of the implant's femoral neck with the acetabular rim;
      and 4) track in real-time the position of the pelvis and acetabulum
      during surgery. The original implementation of the HipNav system
      was a proof-of-concept pro- totype which was useful for demonstrating
      the efficacy of this technology in-vit- ro. As the HipNav system
      progressed towards a clinical implementation, our efforts focussed
      on several practical development and validation issues. This pa-
      per describes our experience transforming HipNav from a proof-of-concept
      pro- totype into a robust clinical system, with emphasis on technical
      development and validation. Despite the highly applied nature of
      this endeavor, many fundamental research issues exist. The benefits
      of tightly coupling fundamental research to- gether with applied
      development in our work are discussed.},
      file = {Simon1997.pdf:Simon1997.pdf:PDF},
      keywords = {ance,computer-assisted surgery,navigational guid-,system validation,total
      hip replacement, APP, OTS, PLA, GUI, SUR},
      owner = {thomaskroes},
      timestamp = {2010.10.26}
    }
  • S. Singare, Q. Lian, W. P. Wang, J. Wang, Y. Liu, D. Li, and B. Lu, “Rapid prototyping assisted surgery planning and custom implant design,” Rapid Prototyping Journal, vol. 15, iss. 1, pp. 19-23, 2009.
    [Bibtex]
    @ARTICLE{Singare2009,
      author = {Singare, Sekou and Lian, Qin and Wang, Wei Ping and Wang, Jue and
      Liu, Yaxiong and Li, Dichen and Lu, Bingheng},
      title = {Rapid prototyping assisted surgery planning and custom implant design},
      journal = {Rapid Prototyping Journal},
      year = {2009},
      volume = {15},
      pages = {19 - 23},
      number = {1},
      abstract = {Purpose – This paper aims to describe computer-aided design and rapid
      prototyping (RP) systems for the preoperative planning and fabrication
      of custom-made implant. Design/methodology/approach – A patient with
      mandible defect underwent reconstruction using custom-made implant.
      3D models of the patient’s skull are generated based on computed
      tomography image data. After evaluation of the 3D reconstructed image,
      it was identified that some bone fragment was moved due to the missing
      segment. During the implant design process, the correct position
      of the bone fragment was defined and the geometry of the custom-made
      implant was generated based on mirror image technique and is fabricated
      by a RP machine. Surgical approach such as preoperative planning
      and simulation of surgical procedures was performed using the fabricated
      skull models and custom-made implant. Findings – Results show that
      the stereolithography model provided an accurate tool for preoperative,
      surgical simulation. Research limitations/implications – The methods
      described above suffer from the expensive cost of RP technique. Practical
      implications – This method allows accurate fabrication of the implant.
      The advantages of using this technique are that the physical model
      of the implant is fitted on the skull model so that the surgeon can
      plan and rehearse the surgery in advance and a less invasive surgical
      procedure and less time-consuming reconstructive and an adequate
      esthetic can result. Originality/value – The method improves the
      reconstructive surgery and reduces the risk of a second intervention,
      and the psychological stress of the patient will be eliminated.},
      file = {Singare2009.pdf:Singare2009.pdf:PDF},
      issn = {1355-2546},
      keywords = {body regions,computer-aided design,paper type research paper,rapid
      prototypes, CMS, APP, SUR, PLA, RPP},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • R. a Siston, N. J. Giori, S. B. Goodman, and S. L. Delp, “Surgical navigation for total knee arthroplasty: a perspective.,” Journal of biomechanics, vol. 40, iss. 4, pp. 728-35, 2007.
    [Bibtex]
    @ARTICLE{Siston2007,
      author = {Siston, Robert a and Giori, Nicholas J and Goodman, Stuart B and
      Delp, Scott L},
      title = {Surgical navigation for total knee arthroplasty: a perspective.},
      journal = {Journal of biomechanics},
      year = {2007},
      volume = {40},
      pages = {728 - 35},
      number = {4},
      month = {January},
      abstract = {A new generation of surgical tools, known as surgical navigation systems,
      has been developed to help surgeons install implants more accurately
      and reproducibly. Navigation systems also record quantitative information
      such as joint range of motion, laxity, and kinematics intra-operatively.
      This article reviews the history of surgical navigation for total
      knee arthroplasty, the biomechanical principles associated with this
      technology, and the related clinical research studies. We describe
      how navigation has the potential to address three main challenges
      for total knee arthroplasty: ensuring excellent and consistent outcomes,
      treating younger and more physically active patients, and enabling
      less invasive surgery.},
      file = {Siston2007.pdf:Siston2007.pdf:PDF},
      issn = {0021-9290},
      keywords = {Arthroplasty, Replacement, Knee,Arthroplasty, Replacement, Knee: trends,Biomechanics,Humans,Knee,Knee:
      physiology,Knee: surgery,Range of Motion, Articular,Range of Motion,
      Articular: physiology,Robotics,Robotics: methods,Surgery, Computer-Assisted,Surgery,
      Computer-Assisted: trends,Surgical Procedures, Minimally Invasive,Surgical
      Procedures, Minimally Invasive: trends, REV},
      owner = {thomaskroes},
      pmid = {17317419},
      timestamp = {2010.10.26}
    }
  • T. Sohmura, H. Hojo, M. Nakajima, K. Wakabayashi, M. Nagao, S. Iida, T. Kitagawa, M. Kogo, T. Kojima, K. Matsumura, T. Nakamura, and J. Takahashi, “Prototype of simulation of orthognathic surgery using a virtual reality haptic device.,” International journal of oral and maxillofacial surgery, vol. 33, iss. 8, pp. 740-50, 2004.
    [Bibtex]
    @ARTICLE{Sohmura2004,
      author = {Sohmura, T and Hojo, H and Nakajima, M and Wakabayashi, K and Nagao,
      M and Iida, S and Kitagawa, T and Kogo, M and Kojima, T and Matsumura,
      K and Nakamura, T and Takahashi, J},
      title = {Prototype of simulation of orthognathic surgery using a virtual reality
      haptic device.},
      journal = {International journal of oral and maxillofacial surgery},
      year = {2004},
      volume = {33},
      pages = {740 - 50},
      number = {8},
      month = {December},
      abstract = {A maxillofacial simulator can support education and training. In the
      present study, cutting, separation, and quantitative rearrangement
      of bone during orthognathic surgery were simulated by means of a
      haptic device with virtual tactile perception. Computed tomographic
      (CT) images of two patients with severe jaw deformity, one women
      and one man, were input into the device. In the woman, Le Fort I
      osteotomy of the maxilla and sagittal splitting ramus osteotomy of
      the mandible were initially simulated. During surgery with the haptic
      device, separation and rearrangement of the maxilla and the ramus
      of the mandible were initially processed. However, there was discrepancy
      and overlapping of the ramus with the mandible. Intraoral vertical
      osteotomy of the right ramus was then performed, with satisfactory
      results and less discrepancy and interference. The simulation was
      referred to at surgery, and satisfactory surgical assistance was
      postoperatively confirmed on CT images. The male patient had severe
      jaw deformity due to unequal growth between the ramuses, resulting
      in anterior crossbite. Sagittal splitting ramus osteotomy with rotation
      of the mandible was successfully simulated. Because of its versatility
      and functions, the present device was found to be useful for simulating
      various procedures for orthognathic surgery and thereby three-dimensionally
      determine surgical movements.},
      file = {Sohmura2004.pdf:Sohmura2004.pdf:PDF},
      issn = {0901-5027},
      keywords = {Adult,Computer Simulation,Dental Occlusion,Facial Asymmetry,Facial
      Asymmetry: surgery,Female,Humans,Imaging, Three-Dimensional,Male,Malocclusion,Malocclusion:
      surgery,Mandible,Mandible: surgery,Maxilla,Maxilla: surgery,Models,
      Biological,Osteotomy,Osteotomy, Le Fort,Osteotomy, Le Fort: methods,Osteotomy:
      methods,Patient Care Planning,Prognathism,Prognathism: surgery,Rotation,Tomography,
      X-Ray Computed,Touch,User-Computer Interface, APP, CMS, PLA, SUR},
      owner = {thomaskroes},
      pmid = {15556320},
      timestamp = {2010.10.25}
    }
  • V. Sojar, D. Stanisavljevic, M. Hribernik, M. Glusic, D. Kreuh, U. Velkavrh, and T. Fius, “Liver surgery training and planning in 3D virtual space,” International Congress Series, vol. 1268, pp. 390-394, 2004.
    [Bibtex]
    @ARTICLE{Sojar2004,
      author = {V. Sojar and D. Stanisavljevic and M. Hribernik and M. Glusic and
      D. Kreuh and U. Velkavrh and T. Fius},
      title = {Liver surgery training and planning in 3D virtual space},
      journal = {International Congress Series},
      year = {2004},
      volume = {1268},
      pages = {390 - 394},
      abstract = {Liver surgery is still one of the most demanding fields in surgery,
      and planning a liver resection always presents a challenge for a
      surgeon. Many authors have already described three-dimensional (3D)
      reconstructions of conventional computer tomography (CT) and nuclear
      magnetic resonance (NMR) scans, which enable precise surgery planning.
      But while the reconstruction can be seen, the user cannot simulate
      surgery. The ultimate goal has been to produce 3D reconstructions
      of the conventional CT or NMR slices and thus virtual environment
      that allows planning and simulation of the surgical procedure. The
      project of developing a computer program for planning and simulating
      liver surgery had three steps: The first step was creating a 3D virtual
      liver and virtual environment. The second step was developing the
      method to reconstruct the liver from conventional CT scans. The final
      step was transferring the data obtained to the previously created
      virtual environment for manipulation. The result of this process
      is a liver surgery educational tool, which produces a virtual environment
      using PC based software. It provides the opportunity to manipulate
      a 3D liver and it is tool for studying the internal liver structures
      for all four vessels systems. The virtual environment portrays a
      detailed liver segmentation. The program's most important function
      is allowing the opportunity to perform virtual intraoperative ultra
      sound on a virtual liver. The virtual liver can be dissected; the
      vessels can be clipped, ligated and cut. The program for 3D reconstruction
      from conventional CT scans has been developed so that the obtained
      data may be used for simulated surgery in the virtual environment.
      The virtual liver of the real patient has the capacity to be manipulated,
      cut, dissected; moreover, the intraoperative ultra sound can be performed
      as in the real surgery. First simulations have shown clear benefits
      in planning liver surgery. The rapid development of computer technology
      offers many possibilities in education and surgery planning. The
      group successfully developed the PC based application for surgery
      planning and simulation in virtual environment as well as for educational
      purposes.},
      file = {Sojar2004.pdf:Sojar2004.pdf:PDF},
      issn = {0531-5131},
      keywords = {Liver surgery planning, TEC},
      owner = {Thomas},
      timestamp = {2011.01.31}
    }
  • M. Solaiyappan, Visualization Pathways in Biomedicine, Second Edition ed., Elsevier Inc., 2008.
    [Bibtex]
    @BOOK{Solaiyappan2008,
      title = {Visualization Pathways in Biomedicine},
      publisher = {Elsevier Inc.},
      year = {2008},
      author = {Solaiyappan, Meiyappan},
      pages = {729 - 753},
      edition = {Second Edition},
      booktitle = {HANDBOOK OF MEDICAL IMAGE PROCESSING AND ANALYSIS},
      file = {Solaiyappan2008.pdf:Solaiyappan2008.pdf:PDF},
      owner = {thomaskroes},
      timestamp = {2010.10.26}
    }
  • L. Soler, H. Delingette, G. Malandain, N. Ayache, C. Koehl, J. Clement, O. Dourthe, and J. Marescaux, “An automatic virtual patient reconstruction from CT-scans for hepatic surgical planning,” Medicine meets virtual reality 2000: envisioning healing: interactive technology and the patient-practitioner dialogue, p. 316, 2000.
    [Bibtex]
    @ARTICLE{Soler2000,
      author = {Soler, L. and Delingette, H. and Malandain, G. and Ayache, N. and
      Koehl, C. and Clement, JM and Dourthe, O. and Marescaux, J.},
      title = {An automatic virtual patient reconstruction from CT-scans for hepatic
      surgical planning},
      journal = {Medicine meets virtual reality 2000: envisioning healing: interactive
      technology and the patient-practitioner dialogue},
      year = {2000},
      pages = {316},
      file = {Soler2000.pdf:Soler2000.pdf:PDF},
      isbn = {1586030140},
      keywords = {APP, PLA, PLA, HES, SUR, VOR},
      owner = {Thomas},
      publisher = {Ios Pr Inc},
      timestamp = {2011.01.31}
    }
  • L. Soler, H. Delingette, G. Malandain, J. Montagnat, N. Ayache, C. Koehl, O. Dourthe, B. Malassagne, M. Smith, D. Mutter, and others, “Fully automatic anatomical, pathological, and functional segmentation from CT scans for hepatic surgery,” Computer Aided Surgery, vol. 6, iss. 3, pp. 131-142, 2001.
    [Bibtex]
    @ARTICLE{Soler2001,
      author = {Soler, L. and Delingette, H. and Malandain, G. and Montagnat, J.
      and Ayache, N. and Koehl, C. and Dourthe, O. and Malassagne, B. and
      Smith, M. and Mutter, D. and others},
      title = {Fully automatic anatomical, pathological, and functional segmentation
      from CT scans for hepatic surgery},
      journal = {Computer Aided Surgery},
      year = {2001},
      volume = {6},
      pages = {131 - 142},
      number = {3},
      file = {Soler2001.pdf:Soler2001.pdf:PDF},
      issn = {1097-0150},
      keywords = {TEC, IMP},
      owner = {Thomas},
      publisher = {Wiley Online Library},
      timestamp = {2011.02.01}
    }
  • L. Soler and J. Marescaux, “Patient-specific Surgical Simulation,” World Journal of Surgery, vol. 32, pp. 208-212, 2008.
    [Bibtex]
    @ARTICLE{Soler2008b,
      author = {Soler, Luc and Marescaux, Jacques},
      title = {Patient-specific Surgical Simulation},
      journal = {World Journal of Surgery},
      year = {2008},
      volume = {32},
      pages = {208 - 212},
      note = {10.1007/s00268-007-9329-3},
      abstract = {Technological innovations of the twentieth century have provided medicine
      and surgery with new tools for education and therapy definition.
      Thus, by combining Medical Imaging and Virtual Reality, patient-specific
      applications providing preoperative surgical simulation have become
      possible.},
      affiliation = {Institut de Recherche sur les Cancers de l’Appareil Digestif (IRCAD),
      1 place de l’Hôpital, 67091 Strasbourg, France},
      file = {Soler2008b.pdf:Soler2008b.pdf:PDF},
      issn = {0364-2313},
      issue = {2},
      keyword = {Medicine},
      keywords = {TEC},
      owner = {Th},
      publisher = {Springer New York},
      timestamp = {2011.03.04},
      url = {http://dx.doi.org/10.1007/s00268-007-9329-3}
    }
  • L. Soler, S. Nicolau, J. Fasquel, V. Agnus, A. Charnoz, A. Hostettler, J. Moreau, C. Forest, D. Mutter, and J. Marescaux, “Virtual reality and augmented reality applied to laparoscopic and notes procedures,” 2008 5th IEEE International Symposium on Biomedical Imaging: From Nano to Macro, pp. 1399-1402, 2008.
    [Bibtex]
    @ARTICLE{Soler2008a,
      author = {Soler, Luc and Nicolau, Stephane and Fasquel, Jean-Baptiste and Agnus,
      Vincent and Charnoz, Arnaud and Hostettler, Alexandre and Moreau,
      Johan and Forest, Clement and Mutter, Didier and Marescaux, Jacques},
      title = {Virtual reality and augmented reality applied to laparoscopic and
      notes procedures},
      journal = {2008 5th IEEE International Symposium on Biomedical Imaging: From
      Nano to Macro},
      year = {2008},
      pages = {1399 - 1402},
      month = {May},
      abstract = {Computer-assisted surgery led to a major improvement in medicine.
      Such an improvement can be summarized in three major steps. The first
      one consists in an automated 3D modelling of patients from their
      medical images. The second one consists in using this modelling in
      surgical planning and simulator software offering then the opportunity
      to train the surgical gesture before carrying it out. The last step
      consists in intraoperatively superimposing preoperative data onto
      the real view of patients. This Augmented Reality provides surgeons
      a view in transparency of their patient allowing to track instruments
      and improve pathology targeting. We will present here our results
      in these different domains applied to laparoscopic and NOTES procedures.},
      file = {Soler2008a.pdf:Soler2008a.pdf:PDF},
      isbn = {978-1-4244-2002-5},
      owner = {thomaskroes},
      publisher = {Ieee},
      timestamp = {2010.10.26}
    }
  • L. Soler, S. Nicolau, A. Hostettler, J. Fasquel, V. Agnus, A. Charnoz, J. Moreau, B. Dallemagne, D. Mutter, and J. Marescaux, “Computer-Assisted Digestive Surgery,” in Computational Surgery and Dual Training, M. Garbey, B. L. Bass, C. Collet, M. Mathelin, and R. Tran-Son-Tay, Eds., Springer US, 2010, pp. 139-153.
    [Bibtex]
    @INCOLLECTION{Soler2010,
      author = {Soler, Luc and Nicolau, Stéphane and Hostettler, Alexandre and Fasquel,
      Jean-Baptiste and Agnus, Vincent and Charnoz, Arnaud and Moreau,
      Johan and Dallemagne, Bernard and Mutter, Didier and Marescaux, Jacques},
      title = {Computer-Assisted Digestive Surgery},
      booktitle = {Computational Surgery and Dual Training},
      publisher = {Springer US},
      year = {2010},
      editor = {Garbey, Marc and Bass, Barbara Lee and Collet, Christophe and Mathelin,
      Michel and Tran-Son-Tay, Roger},
      pages = {139-153},
      abstract = {Introducing an optical device into the abdomen of a patient so as
      to carry out the surgical procedure via a miniaturized camera represented
      the major change the surgical world experienced during the twentieth
      century: the minimally invasive surgery era was born. This revolution
      is about to experience a new twist linked to the appearance of a
      new original technique called Natural Orifice Transluminal Endoscopic
      Surgery (NOTES) that could replace traditional laparoscopic surgery
      for a large set of procedures. By replacing the rigid optic that
      is introduced through the skin by a flexible optic that is introduced
      through a natural orifice such as stomach, vagina or colon, this
      new technique should eliminate all visible incisions. If the benefits
      for patients have clearly been proved for laparoscopic surgery, and
      whatever the result for NOTES, such minimally invasive techniques
      bring up new difficulties for surgeons, thus reducing their gesture
      capacity. The first difficulty is the loss of several senses such
      as the sense of touch and a modification of the force feedback feeling.
      In NOTES, this loss is greatly amplified due to the length of instruments
      making it difficult to feel a contact between an instrument and an
      organ. This lack of force feedback is also featured by current robotic
      systems, such as the Da Vinci robot from the Intuitive Surgical Company,
      currently the most used surgical robot worldwide. The use of stereoscopic
      vision, however, allowed to lessen that perception limit, compensating
      it by a 3D view of the operative scene filmed by two cameras. But
      this technique will be difficult to implement for transluminal endoscopic
      surgery, since it requires the extreme miniaturization of cameras
      while maintaining a high image resolution. Another solution consists
      in using virtual reality and augmented reality. Indeed, virtual reality
      allows to provide a preoperative 3D view of patients, operated from
      their medical image (CT scan or MRI). This virtual copy of patients
      can then be used in a preoperative simulator, what provides a realistic
      3D view of patients.},
      affiliation = {IRCAD/EITS, 1, place de l’hôpital, 67091 Strasbourg Cedex, France},
      file = {Soler2010.pdf:Soler2010.pdf:PDF},
      isbn = {978-1-4419-1123-0},
      keyword = {Engineering},
      keywords = {REV},
      owner = {Thomas},
      timestamp = {2011.03.09},
      url = {http://dx.doi.org/10.1007/978-1-4419-1123-0_8}
    }
  • Y. Song, F. Xiao-ping, and L. Zhi-Fang, “A Study of the Endoscopic Surgery Simulation Training System Based on 3D Virtual Reality,” in Computational Intelligence and Software Engineering, 2009. CiSE 2009. International Conference on, 2009, pp. 1-4.
    [Bibtex]
    @INPROCEEDINGS{Song2009,
      author = {Yu Song and Fan Xiao-ping and Liao Zhi-Fang},
      title = {A Study of the Endoscopic Surgery Simulation Training System Based
      on 3D Virtual Reality},
      booktitle = {Computational Intelligence and Software Engineering, 2009. CiSE 2009.
      International Conference on},
      year = {2009},
      pages = {1 - 4},
      month = {December},
      abstract = {The key techniques and methods to design and realize the virtual endoscopic
      surgery training system were discussed. In accordance with the functional
      requirement, the virtual endoscopic surgery training system was built
      using hierarchical system structure, with the 3-dimensional model
      of human organ, soft tissue deformation and cutting algorithm, and
      also with the function of virtual scene editing and rendering. The
      tetrahedron model of the organ was designed according to the medical
      data, which was packaged with double linked list; according to the
      viscoelasticity features of soft tissue, it's deformation was been
      simulated by using spring-mass model, the cutting was been simulated
      by using cut-off points duplicating/ separating algorithm.},
      file = {:Song2009.pdf:PDF},
      keywords = {3D virtual reality;cutting algorithm;endoscopic surgery simulation
      training system;soft tissue deformation;spring-mass model;virtual
      endoscopic surgery training system;virtual scene editing;virtual
      scene rendering;biomedical education;computer based training;endoscopes;medical
      computing;rendering (computer graphics);surgery;virtual reality;},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • E. Sorantin, G. Werkgartner, R. Beichel, A. Bornik, B. Reitinger, N. Popovic, and M. Sonka, “Virtual Liver Surgery Planning,” in Image Processing in Radiology, E. Neri, D. Caramella, and C. Bartolozzi, Eds., Springer Berlin Heidelberg, 2008, pp. 411-418.
    [Bibtex]
    @INCOLLECTION{Sorantin2008,
      author = {Sorantin, Erich and Werkgartner, Georg and Beichel, Reinhard and
      Bornik, Alexander and Reitinger, Bernhard and Popovic, Nikolaus and
      Sonka, Milan},
      title = {Virtual Liver Surgery Planning},
      booktitle = {Image Processing in Radiology},
      publisher = {Springer Berlin Heidelberg},
      year = {2008},
      editor = {Neri, Emanuele and Caramella, Davide and Bartolozzi, Carlo},
      series = {Medical Radiology},
      pages = {411 - 418},
      abstract = {Liver tumors account for a considerable number of deaths every year
      (World Health Organization, 2004). One of type of primary liver tumors
      is hepatocellular carcinoma, which arises frequently as a complication
      of liver cirrhosis. Additionally, almost any tumor can seed metastasis
      within the liver, colorectal cancer being at the top of the list.},
      affiliation = {Medical University Graz Department of Radiology Auenbruggerplatz 9
      8036 Graz Austria},
      file = {Sorantin2008.pdf:Sorantin2008.pdf:PDF},
      isbn = {978-3-540-49830-8},
      keyword = {Medicine &amp; Public Health},
      owner = {thomaskroes},
      timestamp = {2011.01.26}
    }
  • T. S. Sorensen, S. V. Therkildsen, P. Makowski, J. L. Knudsen, and E. M. Pedersen, “A new virtual reality approach for planning of cardiac interventions.,” Artificial intelligence in medicine, vol. 22, iss. 3, pp. 193-214, 2001.
    [Bibtex]
    @ARTICLE{Sorensen2001,
      author = {Sorensen, T S and Therkildsen, S V and Makowski, P and Knudsen, J
      L and Pedersen, E M},
      title = {A new virtual reality approach for planning of cardiac interventions.},
      journal = {Artificial intelligence in medicine},
      year = {2001},
      volume = {22},
      pages = {193 - 214},
      number = {3},
      month = {June},
      abstract = {A novel approach to three-dimensional (3D) visualization of high quality,
      respiratory compensated cardiac magnetic resonance (MR) data is presented
      with the purpose of assisting the cardiovascular surgeon and the
      invasive cardiologist in the pre-operative planning. Developments
      included: (1) optimization of 3D, MR scan protocols; (2) dedicated
      segmentation software; (3) optimization of model generation algorithms;
      (4) interactive, virtual reality visualization. The approach is based
      on a tool for interactive, real-time visualization of 3D cardiac
      MR datasets in the form of 3D heart models displayed on virtual reality
      equipment. This allows the cardiac surgeon and the cardiologist to
      examine the model as if they were actually holding it in their hands.
      To secure relevant examination of all details related to cardiac
      morphology, the model can be re-scaled and the viewpoint can be set
      to any point inside the heart. Finally, the original, raw MR images
      can be examined on line as textures in cut-planes through the heart
      models.},
      issn = {0933-3657},
      keywords = {Cardiovascular Diseases,Cardiovascular Diseases: diagnosis,Computer
      Simulation,Heart,Heart: physiology,Humans,Imaging, Three-Dimensional,Magnetic
      Resonance Angiography,Magnetic Resonance Angiography: methods,Models,
      Theoretical},
      owner = {thomaskroes},
      pmid = {11377147},
      timestamp = {2010.10.26}
    }
  • R. Spence, Information visualization, Addison-Wesley Reading, MA, 2001.
    [Bibtex]
    @BOOK{Spence2001,
      title = {Information visualization},
      publisher = {Addison-Wesley Reading, MA},
      year = {2001},
      author = {Spence, R.},
      isbn = {0201596261},
      owner = {Thomas},
      timestamp = {2011.02.03}
    }
  • [DOI] S. A. Spicer and C. A. Taylor, “Simulation-based medical planning for cardiovascular disease: Visualization system foundations,” Computer Aided Surgery, vol. 5, iss. 2, pp. 82-89, 2000.
    [Bibtex]
    @ARTICLE{Spicer2000,
      author = {Spicer, Sean A. and Taylor, Charles A.},
      title = {Simulation-based medical planning for cardiovascular disease: Visualization
      system foundations},
      journal = {Computer Aided Surgery},
      year = {2000},
      volume = {5},
      pages = {82--89},
      number = {2},
      abstract = {Abstract A technique for visualizing computational models along with
      volumetric imaging data in a real-time, interactive, simulation-based
      medical planning system for cardiovascular disease treatment is described.
      This technique involves an ordered rendering of faceted geometry
      and volumetric image data. We have developed a software system based
      on this image-fusion technique that is capable of capturing and representing
      the inherent anatomic constraints of an individual patient. Such
      constraints must be represented accurately in a medical planning
      system to ensure the validity of a potential procedure. A hypothetical
      clinical scenario is described for which vascular treatment plans
      were constructed pre-operatively without reference to the physical
      anatomic structure. These models were later embedded into patient-specific
      diagnostic MRA scans to establish the anatomic context for physiologic
      observations. Comp Aid Surg 5:82–89 (2000). © 2000 Wiley-Liss,
      Inc.},
      doi = {10.1002/1097-0150(2000)5:2<82::AID-IGS2>3.0.CO;2-5},
      file = {Spicer2000.pdf:Spicer2000.pdf:PDF},
      issn = {1097-0150},
      keywords = {simulation-based medical planning, cardiovascular disease treatment,
      image fusion, volume-rendering},
      publisher = {John Wiley \& Sons, Inc.},
      url = {http://dx.doi.org/10.1002/1097-0150(2000)5:2<82::AID-IGS2>3.0.CO;2-5}
    }
  • A. T. Stadie, R. A. Kockro, R. Reisch, A. Tropine, S. Boor, P. Stoeter, and A. Perneczky, “Virtual reality system for planning minimally invasive neurosurgery. Technical note.,” Journal of neurosurgery, vol. 108, iss. 2, pp. 382-94, 2008.
    [Bibtex]
    @ARTICLE{Stadie2008,
      author = {Stadie, Axel Thomas and Kockro, Ralf Alfons and Reisch, Robert and
      Tropine, Andrei and Boor, Stephan and Stoeter, Peter and Perneczky,
      Axel},
      title = {Virtual reality system for planning minimally invasive neurosurgery.
      Technical note.},
      journal = {Journal of neurosurgery},
      year = {2008},
      volume = {108},
      pages = {382 - 94},
      number = {2},
      month = {February},
      abstract = {OBJECT: The authors report on their experience with a 3D virtual reality
      system for planning minimally invasive neurosurgical procedures.
      METHODS: Between October 2002 and April 2006, the authors used the
      Dextroscope (Volume Interactions, Ltd.) to plan neurosurgical procedures
      in 106 patients, including 100 with intracranial and 6 with spinal
      lesions. The planning was performed 1 to 3 days preoperatively, and
      in 12 cases, 3D prints of the planning procedure were taken into
      the operating room. A questionnaire was completed by the neurosurgeon
      after the planning procedure. RESULTS: After a short period of acclimatization,
      the system proved easy to operate and is currently used routinely
      for preoperative planning of difficult cases at the authors' institution.
      It was felt that working with a virtual reality multimodal model
      of the patient significantly improved surgical planning. The pathoanatomy
      in individual patients could easily be understood in great detail,
      enabling the authors to determine the surgical trajectory precisely
      and in the most minimally invasive way. CONCLUSIONS: The authors
      found the preoperative 3D model to be in high concordance with intraoperative
      conditions; the resulting intraoperative "d\'{e}j\`{a}-vu" feeling
      enhanced surgical confidence. In all procedures planned with the
      Dextroscope, the chosen surgical strategy proved to be the correct
      choice. Three-dimensional virtual reality models of a patient allow
      quick and easy understanding of complex intracranial lesions.},
      file = {Stadie2008.pdf:Stadie2008.pdf:PDF},
      issn = {0022-3085},
      keywords = {Adenoma,Adenoma: surgery,Adult,Aged,Angiography,Angiography: methods,Brain
      Neoplasms,Brain Neoplasms: surgery,Computer Simulation,Diffusion
      Magnetic Resonance Imaging,Female,Hemangioma, Cavernous, Central
      Nervous System,Hemangioma, Cavernous, Central Nervous System: sur,Humans,Image
      Processing, Computer-Assisted,Image Processing, Computer-Assisted:
      methods,Imaging, Three-Dimensional,Imaging, Three-Dimensional: methods,Intracranial
      Aneurysm,Intracranial Aneurysm: surgery,Magnetic Resonance Angiography,Magnetic
      Resonance Imaging,Male,Meningioma,Meningioma: surgery,Middle Aged,Neurosurgical
      Procedures,Neurosurgical Procedures: methods,Patient Care Planning,Surgery,
      Computer-Assisted,Surgery, Computer-Assisted: methods,Surgical Procedures,
      Minimally Invasive,Surgical Procedures, Minimally Invasive: methods,Tomography,
      X-Ray Computed,Tomography, X-Ray Computed: methods,User-Computer
      Interface, STV, APP, NES, AUR, SUR, VOR},
      owner = {thomaskroes},
      pmid = {18240940},
      timestamp = {2010.10.25}
    }
  • S. D. Steppacher, J. H. Kowal, and S. B. Murphy, “Improving Cup Positioning Using a Mechanical Navigation Instrument,” Clinical Orthopaedics and Related Research\textregistered, pp. 1-6, 2010.
    [Bibtex]
    @ARTICLE{Steppacher2010,
      author = {Steppacher, S.D. and Kowal, J.H. and Murphy, S.B.},
      title = {Improving Cup Positioning Using a Mechanical Navigation Instrument},
      journal = {Clinical Orthopaedics and Related Research{\textregistered}},
      year = {2010},
      pages = {1 - 6},
      file = {Steppacher2010.pdf:Steppacher2010.pdf:PDF},
      issn = {0009-921X},
      keywords = {TRM},
      owner = {Thomas},
      publisher = {Springer},
      timestamp = {2011.02.15}
    }
  • E. Stindel, J. Briard, P. Merloz, S. Plaweski, F. Dubrana, C. Lefevre, and J. Troccaz, “Bone morphing: 3D morphological data for total knee arthroplasty,” Computer Aided Surgery, vol. 7, iss. 3, pp. 156-168, 2002.
    [Bibtex]
    @ARTICLE{Stindel2002,
      author = {Stindel, E. and Briard, JL and Merloz, P. and Plaweski, S. and Dubrana,
      F. and Lefevre, C. and Troccaz, J.},
      title = {Bone morphing: 3D morphological data for total knee arthroplasty},
      journal = {Computer Aided Surgery},
      year = {2002},
      volume = {7},
      pages = {156 - 168},
      number = {3},
      file = {Stindel2002.pdf:Stindel2002.pdf:PDF},
      issn = {1097-0150},
      keywords = {TEC, OTS},
      owner = {thomaskroes},
      publisher = {Wiley Online Library},
      timestamp = {2011.01.12}
    }
  • D. Stoyanov, G. P. Mylonas, M. Lerotic, A. J. Chung, and G. Yang, “Intra-Operative Visualizations: Perceptual Fidelity and Human Factors,” Display Technology, Journal of, vol. 4, iss. 4, pp. 491-501, 2008.
    [Bibtex]
    @ARTICLE{Stoyanov2008,
      author = {Stoyanov, D. and Mylonas, G.P. and Lerotic, M. and Chung, A.J. and
      Guang-Zhong Yang},
      title = {Intra-Operative Visualizations: Perceptual Fidelity and Human Factors},
      journal = {Display Technology, Journal of},
      year = {2008},
      volume = {4},
      pages = {491 - 501},
      number = {4},
      abstract = {With increasing capability and complexity of surgical interventions,
      intra-operative visualization is becoming an important part of a
      surgical environment. This paper reviews some of our recent progress
      in the intelligent use of pre- and intra-operative data for enhanced
      surgical navigation and motion compensated visualization. High fidelity
      augmented reality (AR) with enhanced 3D depth perception is proposed
      to provide effective surgical guidance. To cater for large scale
      tissue deformation, real-time depth recovery based on stereo disparity
      and eye gaze tracking is introduced. This allows the development
      of motion compensated visualization for improved visual perception
      and for facilitating motion adaptive AR displays. The discussion
      of the paper is focused on how to ensure perceptual fidelity of AR
      and the need for real-time tissue deformation recovery and modeling,
      as well as the importance of incorporating human perceptual factors
      in surgical displays.},
      file = {Stoyanov2008.pdf:Stoyanov2008.pdf:PDF},
      issn = {1551-319X},
      keywords = {augmented reality;enhanced 3D depth perception;eye gaze tracking;human
      perceptual factors;intra-operative visualization;motion adaptive
      AR display;motion compensated visualization;perceptual fidelity;robotic
      surgery;stereo disparity;surgical displays;surgical guidance;surgical
      navigation;tissue deformation;visual perception;augmented reality;biological
      tissues;display instrumentation;medical image processing;medical
      robotics;motion compensation;surgery;virtual instrumentation;visual
      perception;},
      owner = {thomaskroes},
      timestamp = {2011.01.25}
    }
  • B. E. Strong, A. Rafii, B. Holhweg-Majert, S. C. Fuller, and M. C. Metzger, “Comparison of 3 optical navigation systems for computer-aided maxillofacial surgery.,” Archives of otolaryngology–head & neck surgery, vol. 134, iss. 10, pp. 1080-4, 2008.
    [Bibtex]
    @ARTICLE{Strong2008,
      author = {Strong, E Bradley and Rafii, Amir and Holhweg-Majert, Bettina and
      Fuller, Scott C and Metzger, Marc Christian},
      title = {Comparison of 3 optical navigation systems for computer-aided maxillofacial
      surgery.},
      journal = {Archives of otolaryngology--head \& neck surgery},
      year = {2008},
      volume = {134},
      pages = {1080 - 4},
      number = {10},
      month = {October},
      abstract = {OBJECTIVE: To compare the accuracy of 3 computer-aided surgery systems
      for maxillofacial reconstruction. DESIGN: Evaluation of 3 computer-aided
      surgery systems: StealthStation, VectorVision, and Voxim. SETTING:
      The University of California, Davis, Department of Otolaryngology
      computer-aided surgery laboratory. PARTICIPANTS: Four fresh cadaveric
      heads. MAIN OUTCOME MEASURE: Mean target registration error. RESULTS:
      The StealthStation was the most accurate (mean [SD] target registration
      error, 1.00 [0.04] mm), followed by VectorVision (1.13 [0.05] mm)
      and then Voxim (1.34 [0.04] mm). All values met statistical significance
      (P < .05). CONCLUSIONS: Measurable accuracy differences were found
      among the navigation systems evaluated. The StealthStation was the
      most accurate. However, the differences are small, and the clinical
      significance for maxillofacial reconstruction is negligible.},
      file = {:C\:\\Thomas\\PHD\\Literature\\Articles\\Strong2008.pdf:PDF},
      issn = {1538-361X},
      keywords = {Cadaver,Face,Face: anatomy \& histology,Face: surgery,Humans,Image
      Processing, Computer-Assisted,Imaging, Three-Dimensional,Maxilla,Maxilla:
      anatomy \& histology,Maxilla: surgery,Monitoring, Intraoperative,Monitoring,
      Intraoperative: methods,Neuronavigation,Neuronavigation: instrumentation,Neuronavigation:
      methods,Reconstructive Surgical Procedures,Reconstructive Surgical
      Procedures: methods,Sensitivity and Specificity,Surgery, Computer-Assisted,Surgery,
      Computer-Assisted: methods,Surgery, Oral,Surgery, Oral: instrumentation,Surgery,
      Oral: methods},
      owner = {thomaskroes},
      pmid = {18936355},
      timestamp = {2010.10.26}
    }
  • S. D. Stulberg, F. Picard, and D. Saragaglia, “Computer-assisted total knee replacement arthroplasty,” Operative Techniques in Orthopaedics, vol. 10, iss. 1, pp. 25-39, 2000.
    [Bibtex]
    @ARTICLE{Stulberg2000,
      author = {Stulberg, S.D. and Picard, F. and Saragaglia, D.},
      title = {Computer-assisted total knee replacement arthroplasty},
      journal = {Operative Techniques in Orthopaedics},
      year = {2000},
      volume = {10},
      pages = {25 - 39},
      number = {1},
      file = {Stulberg2000.pdf:Stulberg2000.pdf:PDF},
      issn = {1048-6666},
      owner = {thomaskroes},
      publisher = {Elsevier},
      timestamp = {2011.01.13}
    }
  • C. Suarez, B. Acha, C. Serrano, C. Parra, and T. Gomez, “VirSSPA- a virtual reality tool for surgical planning workflow.,” International journal of computer assisted radiology and surgery, vol. 4, iss. 2, pp. 133-9, 2009.
    [Bibtex]
    @ARTICLE{Suarez2009,
      author = {Suarez, C and Acha, B and Serrano, C and Parra, C and Gomez, T},
      title = {VirSSPA- a virtual reality tool for surgical planning workflow.},
      journal = {International journal of computer assisted radiology and surgery},
      year = {2009},
      volume = {4},
      pages = {133 - 9},
      number = {2},
      month = {March},
      abstract = {OBJECTIVE: A virtual reality tool, called VirSSPA, was developed to
      optimize the planning of surgical processes. METHODS: Segmentation
      algorithms for Computed Tomography (CT) images: a region growing
      procedure was used for soft tissues and a thresholding algorithm
      was implemented to segment bones. The algorithms operate semiautomati-
      cally since they only need seed selection with the mouse on each
      tissue segmented by the user. The novelty of the paper is the adaptation
      of an enhancement method based on histogram thresholding applied
      to CT images for surgical planning, which simplifies subsequent segmentation.
      A substantial improvement of the virtual reality tool VirSSPA was
      obtained with these algorithms. RESULTS: VirSSPA was used to optimize
      surgical planning, to decrease the time spent on surgical planning
      and to improve operative results. The success rate increases due
      to surgeons being able to see the exact extent of the patient's ailment.
      This tool can decrease operating room time, thus resulting in reduced
      costs. CONCLUSION: Virtual simulation was effective for optimizing
      surgical planning, which could, consequently, result in improved
      outcomes with reduced costs.},
      file = {Suarez2009.pdf:Suarez2009.pdf:PDF},
      issn = {1861-6429},
      keywords = {Algorithms,Equipment Design,Humans,Imaging, Three-Dimensional,Imaging,
      Three-Dimensional: methods,Surgery, Computer-Assisted,Surgery, Computer-Assisted:
      methods,Tomography, X-Ray Computed,Tomography, X-Ray Computed: instrumentation,User-Computer
      Interface},
      owner = {thomaskroes},
      pmid = {20033611},
      timestamp = {2010.10.26}
    }
  • K. Subburaj and B. Ravi, “High resolution medical models and geometric reasoning starting from CT/MR images,” , pp. 441-444, 2007.
    [Bibtex]
    @CONFERENCE{Subburaj2007,
      author = {Subburaj, K. and Ravi, B.},
      title = {High resolution medical models and geometric reasoning starting from
      CT/MR images},
      booktitle = {Computer-Aided Design and Computer Graphics, 2007 10th IEEE International
      Conference on},
      year = {2007},
      pages = {441 - 444},
      organization = {IEEE},
      file = {Subburaj2007.pdf:Subburaj2007.pdf:PDF},
      owner = {thomaskroes},
      timestamp = {2011.01.17}
    }
  • K. Subburaj, B. Ravi, and M. Agarwal, “Computer-aided methods for assessing lower limb deformities in orthopaedic surgery planning.,” Computerized medical imaging and graphics : the official journal of the Computerized Medical Imaging Society, vol. 34, iss. 4, pp. 277-88, 2010.
    [Bibtex]
    @ARTICLE{Subburaj2010,
      author = {Subburaj, K and Ravi, B and Agarwal, Manish},
      title = {Computer-aided methods for assessing lower limb deformities in orthopaedic
      surgery planning.},
      journal = {Computerized medical imaging and graphics : the official journal
      of the Computerized Medical Imaging Society},
      year = {2010},
      volume = {34},
      pages = {277 - 88},
      number = {4},
      month = {June},
      abstract = {Accurate, simple, and quick measurement of anatomical deformities
      at preoperative stage is clinically important for decision making
      in surgery planning. The deformities include excessive torsional,
      angular, and curvature deformation. This paper presents computer-aided
      methods for automatically measuring anatomical deformities of long
      bones of the lower limb. A three-dimensional bone model reconstructed
      from CT scan data of the patient is used as input. Anatomical landmarks
      on femur and tibia bone models are automatically identified using
      geometric algorithms. Medial axes of femur and tibia bones, and anatomical
      landmarks are used to generate functional and reference axes. These
      methods have been implemented in a software program and tested on
      a set of CT scan data. Overall, the performance of the computerized
      methodology was better or similar to the manual method and its results
      were reproducible.},
      file = {Subburaj2010.pdf:Subburaj2010.pdf:PDF},
      issn = {1879-0771},
      keywords = {Computer Simulation,Femur,Femur: abnormalities,Femur: radiography,Femur:
      surgery,Humans,Image Enhancement,Image Enhancement: methods,Imaging,
      Three-Dimensional,Imaging, Three-Dimensional: methods,Models, Anatomic,Models,
      Biological,Orthopedics,Orthopedics: methods,Preoperative Care,Preoperative
      Care: methods,Prognosis,Radiographic Image Interpretation, Computer-Assist,Surgery,
      Computer-Assisted,Surgery, Computer-Assisted: methods,Tibia,Tibia:
      abnormalities,Tibia: radiography,Tibia: surgery,Tomography, X-Ray
      Computed,Tomography, X-Ray Computed: methods, TEC, OTS},
      owner = {thomaskroes},
      pmid = {19963346},
      publisher = {Elsevier Ltd},
      timestamp = {2010.10.25}
    }
  • K. Subburaj, B. Ravi, and M. Agarwal, “Automated identification of anatomical landmarks on 3D bone models reconstructed from CT scan images.,” Computerized medical imaging and graphics : the official journal of the Computerized Medical Imaging Society, vol. 33, iss. 5, pp. 359-68, 2009.
    [Bibtex]
    @ARTICLE{Subburaj2009,
      author = {Subburaj, K and Ravi, B and Agarwal, Manish},
      title = {Automated identification of anatomical landmarks on 3D bone models
      reconstructed from CT scan images.},
      journal = {Computerized medical imaging and graphics : the official journal
      of the Computerized Medical Imaging Society},
      year = {2009},
      volume = {33},
      pages = {359 - 68},
      number = {5},
      month = {July},
      abstract = {Identification of anatomical landmarks on skeletal tissue reconstructed
      from CT/MR images is indispensable in patient-specific preoperative
      planning (tumour referencing, deformity evaluation, resection planning,
      and implant alignment and anchoring) as well as intra-operative navigation
      (bone registration and instruments referencing). Interactive localisation
      of landmarks on patient-specific anatomical models is time-consuming
      and may lack in repeatability and accuracy. We present a computer
      graphics-based method for automatic localisation and identification
      (labelling) of anatomical landmarks on a 3D model of bone reconstructed
      from CT images of a patient. The model surface is segmented into
      different landmark regions (peak, ridge, pit and ravine) based on
      surface curvature. These regions are labelled automatically by an
      iterative process using a spatial adjacency relationship matrix between
      the landmarks. The methodology has been implemented in a software
      program and its results (automatically identified landmarks) are
      compared with those manually palpated by three experienced orthopaedic
      surgeons, on three 3D reconstructed bone models. The variability
      in location of landmarks was found to be in the range of 2.15-5.98
      mm by manual method (inter surgeon) and 1.92-4.88 mm by our program.
      Both methods performed well in identifying sharp features. Overall,
      the performance of the automated methodology was better or similar
      to the manual method and its results were reproducible. It is expected
      to have a variety of applications in surgery planning and intra-operative
      navigation.},
      issn = {1879-0771},
      keywords = {Arthroplasty, Replacement, Knee,Bone and Bones,Bone and Bones: radiography,Computer
      Simulation,Imaging, Three-Dimensional,Imaging, Three-Dimensional:
      methods,Knee Joint,Knee Joint: anatomy \& histology,Models, Anatomic,Orthopedics,Radiographic
      Image Interpretation, Computer-Assist,Surgery, Computer-Assisted,Tomography,
      X-Ray Computed, TEC},
      owner = {thomaskroes},
      pmid = {19345065},
      timestamp = {2010.10.25}
    }
  • K. Subburaj, B. Ravi, and M. G. Agarwal, “Automated 3D geometric reasoning in Computer Assisted joint reconstructive surgery,” 2009 IEEE International Conference on Automation Science and Engineering, pp. 367-372, 2009.
    [Bibtex]
    @ARTICLE{Subburaj2009a,
      author = {Subburaj, K. and Ravi, B. and Agarwal, M. G.},
      title = {Automated 3D geometric reasoning in Computer Assisted joint reconstructive
      surgery},
      journal = {2009 IEEE International Conference on Automation Science and Engineering},
      year = {2009},
      pages = {367 - 372},
      month = {August},
      abstract = {—Computer Assisted Orthopedic Surgery (CAOS) employing information
      and computer graphics technologies for preoperative planning, intraoperative
      navigation, and for guiding or performing surgical interventions,
      has received very little attention for bone tumor surgery applications.
      We have developed a CAOS system called OrthoSYS, driven by geometric
      reasoning algorithms to visualize tumor size, shape, and plan for
      resection according to the tumor’s spread, starting from a 3D model
      reconstructed from CT images. Anatomical landmarks on bone are automatically
      identified and labeled, useful for registering patient model with
      virtual model during surgery and also as a reference for tumor resection
      and prosthesis positioning. The thickness of bone stock remaining
      after tumor resection is automatically analyzed to choose the best
      modular stem and fix the prosthesis. A method for prosthesis components
      selection using fuzzy logic has been developed to assist the surgeons.
      The medial axis of the long bones and anatomical landmarks are used
      for positioning the prosthesis in virtual planning and verification
      in the intra- operative stage. A set of anatomical metrics have been
      developed to measure the effectiveness of the prosthetic replacement
      of bone. I.},
      file = {Subburaj2009a.pdf:Subburaj2009a.pdf:PDF},
      isbn = {978-1-4244-4578-3},
      keywords = {TEC, OTS},
      owner = {thomaskroes},
      publisher = {Ieee},
      timestamp = {2010.10.25}
    }
  • N. Sugano, “Computer-assisted orthopedic surgery.,” Journal of orthopaedic science : official journal of the Japanese Orthopaedic Association, vol. 8, iss. 3, pp. 442-8, 2003.
    [Bibtex]
    @ARTICLE{Sugano2003,
      author = {Sugano, Nobuhiko},
      title = {Computer-assisted orthopedic surgery.},
      journal = {Journal of orthopaedic science : official journal of the Japanese
      Orthopaedic Association},
      year = {2003},
      volume = {8},
      pages = {442 - 8},
      number = {3},
      month = {January},
      abstract = {Computer-assisted surgery (CAS) utilizing robotic or image-guided
      technologies has been introduced into various orthopedic fields.
      Navigation and robotic systems are the most advanced parts of CAS,
      and their range of functions and applications is increasing. Surgical
      navigation is a visualization system that gives positional information
      about surgical tools or implants relative to a target organ (bone)
      on a computer display. There are three types of surgical planning
      that involve navigation systems. One makes use of volumetric images,
      such as computed tomography, magnetic resonance imaging, or ultrasound
      echograms. Another makes use of intraoperative fluoroscopic images.
      The last type makes use of kinetic information about joints or morphometric
      information about the target bones obtained intraoperatively. Systems
      that involve these planning methods are called volumetric image-based
      navigation, fluoroscopic navigation, and imageless navigation, respectively.
      To overcome the inaccuracy of hand-controlled positioning of surgical
      tools, three robotic systems have been developed. One type directs
      a cutting guide block or a drilling guide sleeve, with surgeons sliding
      a bone saw or a drill bit through the guide instrument to execute
      a surgical action. Another type constrains the range of movement
      of a surgical tool held by a robot arm such as ACROBOT. The last
      type is an active system, such as ROBODOC or CASPAR, which directs
      a milling device automatically according to preoperative planning.
      These CAS systems, their potential, and their limitations are reviewed
      here. Future technologies and future directions of CAS that will
      help provide improved patient outcomes in a cost-effective manner
      are also discussed.},
      file = {Sugano2003.pdf:Sugano2003.pdf:PDF},
      issn = {0949-2658},
      keywords = {Cost Savings,Humans,Orthopedic Procedures,Orthopedic Procedures: economics,Orthopedic
      Procedures: methods,Robotics,Surgery, Computer-Assisted,Surgery,
      Computer-Assisted: economics},
      owner = {thomaskroes},
      pmid = {12768493},
      timestamp = {2010.10.25}
    }
  • N. Suzuki and A. Hattori, “The road to surgical simulation and surgical navigation,” Virtual Reality, vol. 12, iss. 4, pp. 281-291, 2008.
    [Bibtex]
    @ARTICLE{Suzuki2008,
      author = {Suzuki, N. and Hattori, A.},
      title = {The road to surgical simulation and surgical navigation},
      journal = {Virtual Reality},
      year = {2008},
      volume = {12},
      pages = {281 - 291},
      number = {4},
      file = {Suzuki2008.pdf:Suzuki2008.pdf:PDF},
      issn = {1359-4338},
      owner = {thomaskroes},
      publisher = {Springer},
      timestamp = {2011.01.26}
    }
  • N. Suzuki, A. Hattori, A. Takatsu, A. Uchiyama, T. Kumano, A. Ikemoto, and Y. Adachi, “Virtual surgery simulator with force feedback function,” in Engineering in Medicine and Biology Society, 1998. Proceedings of the 20th Annual International Conference of the IEEE, 1998, p. 1260 -1262 vol.3.
    [Bibtex]
    @INPROCEEDINGS{Suzuki1998,
      author = {Suzuki, N. and Hattori, A. and Takatsu, A. and Uchiyama, A. and Kumano,
      T. and Ikemoto, A. and Adachi, Y.},
      title = {Virtual surgery simulator with force feedback function},
      booktitle = {Engineering in Medicine and Biology Society, 1998. Proceedings of
      the 20th Annual International Conference of the IEEE},
      year = {1998},
      volume = {3},
      pages = {1260 -1262 vol.3},
      abstract = {The authors developed a surgery planning system using virtual reality
      techniques which allows them to simulate incision of skin and organs
      which respond as elastic objects with surgical tools in virtual space.
      The authors also attempted to add a feedback function that responds
      to the pressure of the operator's hand using a force feedback device},
      file = {Suzuki1998.pdf:Suzuki1998.pdf:PDF},
      keywords = {elastic objects;feedback function;force feedback device;force feedback
      function;incision simulation;operator's hand pressure;surgery planning
      system;surgical tools;virtual space;virtual surgery simulator;biological
      organs;biomechanics;biomedical equipment;force feedback;skin;surgery;virtual
      reality;},
      owner = {Thomas},
      timestamp = {2011.02.03}
    }
  • G. R. J. Swennen, W. Mollemans, and F. Schutyser, “Three-dimensional treatment planning of orthognathic surgery in the era of virtual imaging.,” Journal of oral and maxillofacial surgery : official journal of the American Association of Oral and Maxillofacial Surgeons, vol. 67, iss. 10, pp. 2080-92, 2009.
    [Bibtex]
    @ARTICLE{Swennen2009,
      author = {Swennen, Gwen R J and Mollemans, Wouter and Schutyser, Filip},
      title = {Three-dimensional treatment planning of orthognathic surgery in the
      era of virtual imaging.},
      journal = {Journal of oral and maxillofacial surgery : official journal of the
      American Association of Oral and Maxillofacial Surgeons},
      year = {2009},
      volume = {67},
      pages = {2080 - 92},
      number = {10},
      month = {October},
      abstract = {PURPOSE: The aim of this report was to present an integrated 3-dimensional
      (3D) virtual approach toward cone-beam computed tomography-based
      treatment planning of orthognathic surgery in the clinical routine.
      MATERIALS AND METHODS: We have described the different stages of
      the workflow process for routine 3D virtual treatment planning of
      orthognathic surgery: 1) image acquisition for 3D virtual orthognathic
      surgery; 2) processing of acquired image data toward a 3D virtual
      augmented model of the patient's head; 3) 3D virtual diagnosis of
      the patient; 4) 3D virtual treatment planning of orthognathic surgery;
      5) 3D virtual treatment planning communication; 6) 3D splint manufacturing;
      7) 3D virtual treatment planning transfer to the operating room;
      and 8) 3D virtual treatment outcome evaluation. CONCLUSIONS: The
      potential benefits and actual limits of an integrated 3D virtual
      approach for the treatment of the patient with a maxillofacial deformity
      are discussed comprehensively from our experience using 3D virtual
      treatment planning clinically.},
      file = {Swennen2009.pdf:Swennen2009.pdf:PDF},
      issn = {1531-5053},
      keywords = {Algorithms,Cephalometry,Cephalometry: methods,Communication,Computer
      Graphics,Computer Simulation,Computer-Aided Design,Cone-Beam Computed
      Tomography,Cone-Beam Computed Tomography: methods,Humans,Image Processing,
      Computer-Assisted,Image Processing, Computer-Assisted: methods,Imaging,
      Three-Dimensional,Imaging, Three-Dimensional: methods,Internet,Jaw,Jaw:
      surgery,Maxillofacial Abnormalities,Maxillofacial Abnormalities:
      radiography,Maxillofacial Abnormalities: surgery,Models, Anatomic,Orthodontics,
      Corrective,Osteotomy,Osteotomy: methods,Patient Care Planning,Splints,Surgery,
      Computer-Assisted,Treatment Outcome,User-Computer Interface, VOR,
      SUR, APP, CMS, PLA, RPP},
      owner = {thomaskroes},
      pmid = {19761902},
      publisher = {Elsevier Inc.},
      timestamp = {2010.10.25}
    }
  • T. Sørensen and J. Mosegaard, “An introduction to GPU accelerated surgical simulation,” Biomedical Simulation, pp. 93-104, 2006.
    [Bibtex]
    @ARTICLE{Sorensen2006,
      author = {S{\o}rensen, T. and Mosegaard, J.},
      title = {An introduction to GPU accelerated surgical simulation},
      journal = {Biomedical Simulation},
      year = {2006},
      pages = {93 - 104},
      file = {Sorensen2006.pdf:Sorensen2006.pdf:PDF},
      keywords = {GPU, PRS, OCS, TEC},
      owner = {Thomas},
      publisher = {Springer},
      timestamp = {2011.02.23}
    }
  • N. Tatarchuk, J. Shopf, and C. Decoro, “Advanced interactive medical visualization on the GPU,” Journal of Parallel and Distributed Computing, vol. 68, iss. 10, pp. 1319-1328, 2008.
    [Bibtex]
    @ARTICLE{Tatarchuk2008,
      author = {Tatarchuk, N and Shopf, J and Decoro, C},
      title = {Advanced interactive medical visualization on the GPU},
      journal = {Journal of Parallel and Distributed Computing},
      year = {2008},
      volume = {68},
      pages = {1319 - 1328},
      number = {10},
      month = {October},
      abstract = {Interactive visual analysis of a patient’s anatomy by means of computer-generated
      3D imagery is crucial for diagnosis, pre-operative planning, and
      surgical training. The task of visualization is no longer limited
      to producing images at interactive rates, but also includes the guided
      extraction of significant features to assist the user in the data
      exploration process. An effective visualization module has to perform
      a problem-specific abstraction of the dataset, leading to a more
      compact and hence more efficient visual representation. Moreover,
      many medical applications, such as surgical training simulators and
      pre- operative planning for plastic and reconstructive surgery, require
      the visualization of datasets that are dynamically modified or even
      generated by a physics-based simulation engine. In this paper we
      present a set of approaches that allow interactive exploration of
      medical datasets in real time. Our method combines direct volume
      rendering via ray-casting with a novel approach for isosurface extraction
      and re-use directly on graphics processing units (GPUs) in a single
      framework. The isosurface extraction technique takes advantage of
      the recently introduced Microsoft DirectX r 10 pipeline for dynamic
      surface extraction in real time using geometry shaders. This surface
      is constructed in polygonal form and can be directly used post-extraction
      for collision detection, rendering, and optimization. The resulting
      polygonal surface can also be analyzed for geometric properties,
      such as feature area, volume and size deviation, which is crucial
      for semi-automatic tumor analysis as used, for example, in colonoscopy.
      Additionally, we have developed a technique for real-time volume
      data analysis by providing an interactive user interface for designing
      material properties for organs in the scanned volume. Combining isosurface
      with direct volume rendering allows visualization of the surface
      properties as well as the context of tissues surrounding the region
      and gives better context for navigation. Our application can be used
      with CT and MRI scan data, or with a variety of other medical and
      scientific applications. The techniqueswepresent are general and
      intuitive to implement and can be used for many other interactive
      environments and effects, separately or together.},
      file = {Tatarchuk2008.pdf:Tatarchuk2008.pdf:PDF},
      issn = {07437315},
      keywords = {isosurface extraction},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • T. Tawara and K. Ono, “A framework for volume segmentation and visualization using Augmented Reality,” 2010 IEEE Symposium on 3D User Interfaces (3DUI), pp. 121-122, 2010.
    [Bibtex]
    @ARTICLE{Tawara2010,
      author = {Tawara, Takehiro and Ono, Kenji},
      title = {A framework for volume segmentation and visualization using Augmented
      Reality},
      journal = {2010 IEEE Symposium on 3D User Interfaces (3DUI)},
      year = {2010},
      pages = {121 - 122},
      month = {March},
      abstract = {We propose a two-handed direct manipulation system to achieve complex
      volume segmentation of CT/MRI data in Augmented Re- ality with a
      remote controller attached to a motion tracking cube. At the same
      time segmented data is displayed by direct volume ren- dering using
      a programmable GPU. Our system achieves visualiza- tion of real timemodification
      of volume data with complex shading including transparency control
      by changing transfer functions, dis- playing any cross section, and
      rendering multi materials using a local illumination model. Our goal
      is to build a system that facilitates direct manipulation of volumetric
      CT/MRI data for segmentation in Augmented Real- ity. Volume segmentation
      is a challenging problem and segmented data has an important role
      for visualization and analysis.},
      file = {Tawara2010.pdf:Tawara2010.pdf:PDF},
      isbn = {978-1-4244-6846-1},
      owner = {thomaskroes},
      publisher = {Ieee},
      timestamp = {2010.10.25}
    }
  • [DOI] C. A. Taylor, T. J. R. Hughes, and C. K. Zarins, “Finite element modeling of blood flow in arteries,” Computer Methods in Applied Mechanics and Engineering, vol. 158, iss. 1-2, pp. 155-196, 1998.
    [Bibtex]
    @ARTICLE{Taylor1998,
      author = {Charles A. Taylor and Thomas J. R. Hughes and Christopher K. Zarins},
      title = {Finite element modeling of blood flow in arteries},
      journal = {Computer Methods in Applied Mechanics and Engineering},
      year = {1998},
      volume = {158},
      pages = {155 - 196},
      number = {1-2},
      abstract = {A comprehensive finite element framework to enable the conduct of
      computational vascular research is described. The software system
      developed provides an integrated set of tools to solve clinically
      relevant blood flow problems and test hypotheses regarding hemodynamic
      (blood flow) factors in vascular adaptation and disease. The validity
      of the computational method was established by comparing the numerical
      results to an analytic solution for pulsatile flow as well as to
      published experimental flow data. The application of the finite element
      method to qualitatively and quantitatively assess the blood flow
      field in a number of clinically relevant models is described.},
      doi = {DOI: 10.1016/S0045-7825(98)80008-X},
      file = {Taylor1998.pdf:Taylor1998.pdf:PDF},
      issn = {0045-7825},
      owner = {Thomas},
      timestamp = {2011.04.27},
      url = {http://www.sciencedirect.com/science/article/B6V29-3WN758P-8/2/7a4ec11214ec856f3f440f3784b21236}
    }
  • C. A. Taylor, M. T. Draney, J. P. Ku, D. Parker, B. N. Steele, K. Wang, and C. K. Zarins, “Biomedical Paper Predictive Medicine: Computational Techniques in Therapeutic Decision-Making,” Computer Aided Surgery, vol. 4, pp. 231-247, 1999.
    [Bibtex]
    @ARTICLE{Taylor1999,
      author = {Taylor, C.A. and Draney, M.T. and Ku, J.P. and Parker, D. and Steele,
      B.N. and Wang, K. and Zarins, C.K.},
      title = {Biomedical Paper Predictive Medicine: Computational Techniques in
      Therapeutic Decision-Making},
      journal = {Computer Aided Surgery},
      year = {1999},
      volume = {4},
      pages = {231 - 247},
      file = {Taylor1999.pdf:Taylor1999.pdf:PDF},
      keywords = {APP, VOR},
      owner = {thomaskroes},
      timestamp = {2011.01.06}
    }
  • R. H. Taylor and L. Joskowicz, “Computer-integrated surgery and medical robotics,” Standard Handbook of Biomedical Engineering and Design, pp. 325-353, 2002.
    [Bibtex]
    @ARTICLE{Taylor2002,
      author = {Taylor, R.H. and Joskowicz, L.},
      title = {Computer-integrated surgery and medical robotics},
      journal = {Standard Handbook of Biomedical Engineering and Design},
      year = {2002},
      pages = {325 - 353},
      file = {Taylor2002.pdf:Taylor2002.pdf:PDF},
      owner = {thomaskroes},
      publisher = {Citeseer},
      timestamp = {2010.12.14}
    }
  • R. H. Taylor and D. Stoianovici, “Medical Robotics in Computer-Integrated Surgery,” IEEE TRANSACTIONS ON ROBOTICS AND AUTOMATION, vol. 19, iss. 5, p. 765, 2003.
    [Bibtex]
    @ARTICLE{Taylor2003,
      author = {Taylor, R.H. and Stoianovici, D.},
      title = {Medical Robotics in Computer-Integrated Surgery},
      journal = {IEEE TRANSACTIONS ON ROBOTICS AND AUTOMATION},
      year = {2003},
      volume = {19},
      pages = {765},
      number = {5},
      file = {Taylor2003.pdf:Taylor2003.pdf:PDF},
      owner = {Thomas},
      timestamp = {2011.02.15}
    }
  • R. H. Taylor, J. Funda, L. Joskowicz, A. D. Kalvin, S. H. Gomory, A. P. Gueziec, and L. M. G. Brown, “An overview of computer-integrated surgery at the IBM Thomas J. Watson Research Center,” IBM Journal of Research and Development, vol. 40, iss. 2, pp. 163-183, 1996.
    [Bibtex]
    @ARTICLE{Taylor1996,
      author = {Taylor, R. H. and Funda, J. and Joskowicz, L. and Kalvin, A. D. and
      Gomory, S. H. and Gueziec, A. P. and Brown, L. M. G.},
      title = {An overview of computer-integrated surgery at the IBM Thomas J. Watson
      Research Center},
      journal = {IBM Journal of Research and Development},
      year = {1996},
      volume = {40},
      pages = {163 - 183},
      number = {2},
      month = {March},
      abstract = {This paper describes some past and current research activities at
      the IBM Thomas J. Watson Research Center. We begin with a brief overview
      of the emerging field of computer-integrated surgery, followed by
      a research strategy that enables a computer-oriented research laboratory
      such as ours to participate in this emerging field. We then present
      highlights of our past and current research in four key areas #x2014;orthopaedics,
      craniofacial surgery, minimally invasive surgery, and medical modeling
      #x2014;and elaborate on the relationship of this work to emerging
      topics in computer-integrated surgery.},
      file = {Taylor1996.pdf:Taylor1996.pdf:PDF},
      issn = {0018-8646},
      keywords = {REV},
      owner = {thomaskroes},
      timestamp = {2011.01.06}
    }
  • Z. A. Taylor, M. Cheng, and S. Ourselin, “High-speed nonlinear finite element analysis for surgical simulation using graphics processing units,” Medical Imaging, IEEE Transactions on, vol. 27, iss. 5, pp. 650-663, 2008.
    [Bibtex]
    @ARTICLE{Taylor2008,
      author = {Taylor, Z.A. and Cheng, M. and Ourselin, S.},
      title = {High-speed nonlinear finite element analysis for surgical simulation
      using graphics processing units},
      journal = {Medical Imaging, IEEE Transactions on},
      year = {2008},
      volume = {27},
      pages = {650 - 663},
      number = {5},
      file = {Taylor2008.pdf:Taylor2008.pdf:PDF},
      issn = {0278-0062},
      keywords = {TEC, REV},
      owner = {Th},
      publisher = {IEEE},
      timestamp = {2011.03.04}
    }
  • M. Terajima, A. Nakasima, Y. Aoki, T. Goto, K. Tokumori, N. Mori, and Y. Hoshino, “A 3-dimensional method for analyzing the morphology of patients with maxillofacial deformities,” American Journal of Orthodontics and Dentofacial Orthopedics, vol. 136, iss. 6, pp. 857-867, 2009.
    [Bibtex]
    @ARTICLE{Terajima2009,
      author = {Masahiko Terajima and Akihiko Nakasima and Yoshimitsu Aoki and Tazuko
      K. Goto and Kenji Tokumori and Noriko Mori and Yoshihiro Hoshino},
      title = {A 3-dimensional method for analyzing the morphology of patients with
      maxillofacial deformities},
      journal = {American Journal of Orthodontics and Dentofacial Orthopedics},
      year = {2009},
      volume = {136},
      pages = {857 - 867},
      number = {6},
      abstract = {Introduction Traditionally, cephalograms have been used to evaluate
      a patient's maxillofacial skeleton and facial soft-tissue morphology.
      However, magnification and distortion of the cephalograms make detailed
      morphologic analysis difficult in patients with complex deformities.
      The purpose of this article was to introduce a new method for visualizing
      deformation and deviation of the maxillofacial skeleton and facial
      soft tissues.Methods Standard 3-dimensional Japanese head models
      were sized to match the sella-to-nasion distance obtained from 2
      patients' (1 man, 1 woman) maxillofacial skeletal images. Then, the
      scaled standard model was superimposed on each patient's 3-dimensional
      computed tomography image.Results This system provided clear shape
      information independent of size and facilitated the visualization
      of shape variations in maxillofacial skeletal and facial soft-tissue
      morphology.Conclusions This method will be useful for 3-dimensional
      morphologic analysis of patients with jaw deformities.},
      file = {Terajima2009.pdf:Terajima2009.pdf:PDF},
      issn = {0889-5406},
      keywords = {TEC},
      owner = {Thomas},
      timestamp = {2011.02.09}
    }
  • D. Terzopoulos and K. Waters, “Physically-based facial modeling, analysis, and animation,” Journal of visualization and Computer Animation, vol. 1, iss. 2, pp. 73-80, 1990.
    [Bibtex]
    @ARTICLE{Terzopoulos1990,
      author = {Terzopoulos, D. and Waters, K.},
      title = {Physically-based facial modeling, analysis, and animation},
      journal = {Journal of visualization and Computer Animation},
      year = {1990},
      volume = {1},
      pages = {73 - 80},
      number = {2},
      file = {Terzopoulos1990.pdf:Terzopoulos1990.pdf:PDF},
      keywords = {TEC, OCS},
      owner = {thomaskroes},
      publisher = {Citeseer},
      timestamp = {2011.01.03}
    }
  • M. Teschner, “Realistic modeling of elasto-mechanical properties of soft tissue and its evaluation,” International Congress Series, vol. 1230, pp. 51-56, 2001.
    [Bibtex]
    @ARTICLE{Teschner2001,
      author = {Teschner, M},
      title = {Realistic modeling of elasto-mechanical properties of soft tissue
      and its evaluation},
      journal = {International Congress Series},
      year = {2001},
      volume = {1230},
      pages = {51 - 56},
      month = {June},
      abstract = {Computer-based techniques for the simulation of craniofacial surgical
      procedures and for the prediction of the surgical outcome have been
      shown to be very useful. However, the assessment of the accuracy
      of the simulated surgical outcome is difficult. In this paper, a
      technique is described, which allows to compare the simulated surgical
      outcome and the actual surgical result. The surgery simulation is
      based on a preoperative CT scan of the patient’s head and on a preoperative
      surface scan of the patient’s face. The simulated postoperative patient’s
      appearance is compared to a second surface scan, which is obtained
      postoperatively. The pre- and postoperative surface scans, which
      are different due to the surgery, are registered employing a robust
      registration method, which minimizes the median of Euclidean distances
      of corresponding points. Parameters of the soft-tissue model, which
      is used for the surgical simulation process, can be adapted with
      respect to minimized differences of corresponding points of the simulated
      postoperative and the actual postoperative surface of a patient’s
      face.},
      file = {Teschner2001.pdf:Teschner2001.pdf:PDF},
      issn = {05315131},
      keywords = {elasto-mechanical properties,realistic modeling,soft tissue, TEC,
      PRS},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • M. Teschner, S. Girod, and B. Girod, “Direct computation of nonlinear soft-tissue deformation,” Proc. Vision, Modeling, Visualization VMV’00, pp. 383-390, 2000.
    [Bibtex]
    @ARTICLE{Teschner2000,
      author = {Teschner, M. and Girod, S. and Girod, B.},
      title = {Direct computation of nonlinear soft-tissue deformation},
      journal = {Proc. Vision, Modeling, Visualization VMV'00},
      year = {2000},
      pages = {383 - 390},
      keywords = {TEC, PRS},
      owner = {Thomas},
      publisher = {Citeseer},
      timestamp = {2011.02.14}
    }
  • P. Tirelli, E. De Momi, N. Borghese, and G. Ferrigno, “Computer Assisted Neurosurgery,” International Journal of Computer Assisted Radiology and Surgery, vol. 4, pp. 85-91, 2009.
    [Bibtex]
    @ARTICLE{Tirelli2009,
      author = {Tirelli, P. and De Momi, E. and Borghese, NA and Ferrigno, G.},
      title = {Computer Assisted Neurosurgery},
      journal = {International Journal of Computer Assisted Radiology and Surgery},
      year = {2009},
      volume = {4},
      pages = {85 - 91},
      file = {Tirelli2009.pdf:Tirelli2009.pdf:PDF},
      issn = {1861-6410},
      keywords = {TEC},
      owner = {thomaskroes},
      publisher = {Springer},
      timestamp = {2011.01.11}
    }
  • L. Tockus, L. Joskowicz, A. Simkin, and C. Milgrom, “Computer-Aided Image-Guided Bone Fracture Surgery : Modeling , Visualization , and Preoperative Planning,” Bone, 1998.
    [Bibtex]
    @ARTICLE{Tockus1998,
      author = {Tockus, L and Joskowicz, L and Simkin, A and Milgrom, C},
      title = {Computer-Aided Image-Guided Bone Fracture Surgery : Modeling , Visualization
      , and Preoperative Planning},
      journal = {Bone},
      year = {1998},
      file = {Tockus1998.pdf:Tockus1998.pdf:PDF},
      keywords = {APP, PLA, GUI, OTS},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • M. Tory, A. E. Kirkpatrick, M. S. Atkins, and others, “Visualization task performance with 2D, 3D, and combination displays,” IEEE Transactions on Visualization and Computer Graphics, pp. 2-13, 2006.
    [Bibtex]
    @ARTICLE{Tory2006,
      author = {Tory, M. and Kirkpatrick, A.E. and Atkins, M.S. and others},
      title = {Visualization task performance with 2D, 3D, and combination displays},
      journal = {IEEE Transactions on Visualization and Computer Graphics},
      year = {2006},
      pages = {2 - 13},
      file = {Tory2006.pdf:Tory2006.pdf:PDF},
      issn = {1077-2626},
      keywords = {TEC},
      owner = {thomaskroes},
      publisher = {Published by the IEEE Computer Society},
      timestamp = {2011.01.04}
    }
  • M. Tory and T. Möller, “Human Factors In Visualization Research,” IEEE TRANSACTIONS ON VISUALIZATION AND COMPUTER GRAPHICS, vol. 10, iss. 1, p. 1, 2004.
    [Bibtex]
    @ARTICLE{Tory2004,
      author = {Tory, M. and Möller, T.},
      title = {Human Factors In Visualization Research},
      journal = {IEEE TRANSACTIONS ON VISUALIZATION AND COMPUTER GRAPHICS},
      year = {2004},
      volume = {10},
      pages = {1},
      number = {1},
      file = {Tory2004.pdf:Tory2004.pdf:PDF},
      owner = {thomaskroes},
      timestamp = {2011.01.06}
    }
  • M. Tory, N. Rober, T. Moller, A. Celler, and M. S. Atkins, “4D space-time techniques: a medical imaging case study,” Proceedings Visualization, 2001. VIS ’01., pp. 473-592, 2001.
    [Bibtex]
    @ARTICLE{Tory2001,
      author = {Tory, M. and Rober, N. and Moller, T. and Celler, A. and Atkins,
      M.S.},
      title = {4D space-time techniques: a medical imaging case study},
      journal = {Proceedings Visualization, 2001. VIS '01.},
      year = {2001},
      pages = {473 - 592},
      abstract = {We present the problem of visualizing time-varying medical data. Two
      medical imaging modalities are compared - MRI and dynamic SPECT.
      For each modality, we examine several derived scalar and vector quantities
      such as the change in intensity over time, the spa- tial gradient,
      and the change of the gradient over time. We com- pare several methods
      for presenting the data, including iso- surfaces, direct volume rendering,
      and vector visualization using glyphs. These techniques may provide
      more information and con- text than methods currently used in practice;
      thus it is easier to discover temporal changes and abnormalities
      in a data set.},
      file = {Tory2001.pdf:Tory2001.pdf:PDF},
      isbn = {0-7803-7200-X},
      keywords = {3,3 animations,3 health,4d visualization,7 display algorithms,body,direct,dynamic
      spect,from the data acquired,glyph,i,isosurface,j,kinetic processes
      in the,mri,other keywords,using a,volume rendering},
      owner = {thomaskroes},
      publisher = {Ieee},
      timestamp = {2010.10.26}
    }
  • J. Traub, T. Sielhorst, S. Heining, and N. Navab, “Advanced Display and Visualization Concepts for Image Guided Surgery,” Computer Aided Surgery, vol. 4, iss. 4, pp. 483-490, 2008.
    [Bibtex]
    @ARTICLE{Traub2008,
      author = {Traub, Joerg and Sielhorst, Tobias and Heining, Sandro-michael and
      Navab, Nassir},
      title = {Advanced Display and Visualization Concepts for Image Guided Surgery},
      journal = {Computer Aided Surgery},
      year = {2008},
      volume = {4},
      pages = {483 - 490},
      number = {4},
      abstract = {Thanks to its rapid development in the last decades, image guided
      surgery (IGS) has been introduced successfully in many modern operating
      rooms. Current IGS systems provide their navigation information on
      a standard computer monitor. Alter- natively, one could enhance the
      direct sight of the physician by an overlay of the virtual data onto
      the real patient view. Such in situ visualization methods have been
      proposed in the literature for pro- viding a more intuitive visualization,
      improving the ergonomics as well as the hand-eye coordination. In
      this paper,we first discuss the fundamental issues and the recent
      endeavors in advanced display and visualization for IGS.We then present
      some of our recentwork comparing two navigation systems: 1) a classical
      monitor based navigation and 2) a new navigation system we had developed
      based on in situ visualization. As both solutions reveal shortcomings
      as well as complementary advantages, we introduce a new solution
      that combines both concepts into one hybrid user interface. Finally,
      experimental results report on the performance of several surgeons
      using an external monitor as well as a stereo video see-through head-mounted
      display (HMD). The experiments consist of drilling into a phantom
      in order to reach planted deep-seated targets only visible inComputedTomography(CT)data.We
      evaluate several vi- sualization techniques, including thenewhybridsolution,andstudy
      their influence on the performance of the participant surgeons.},
      file = {Traub2008.pdf:Traub2008.pdf:PDF},
      owner = {thomaskroes},
      timestamp = {2010.10.26}
    }
  • M. Troulis, P. Everett, E. Seldin, R. Kikinis, and L. Kaban, “Development of a three-dimensional treatment planning system based on computed tomographic data,” International journal of oral and maxillofacial surgery, vol. 31, iss. 4, pp. 349-357, 2002.
    [Bibtex]
    @ARTICLE{Troulis2002,
      author = {Troulis, MJ and Everett, P. and Seldin, EB and Kikinis, R. and Kaban,
      LB},
      title = {Development of a three-dimensional treatment planning system based
      on computed tomographic data},
      journal = {International journal of oral and maxillofacial surgery},
      year = {2002},
      volume = {31},
      pages = {349 - 357},
      number = {4},
      file = {Troulis2002.pdf:Troulis2002.pdf:PDF},
      issn = {0901-5027},
      keywords = {APP, CMS, PLA, OCS, PRS},
      owner = {thomaskroes},
      publisher = {Elsevier},
      timestamp = {2011.01.10}
    }
  • M. Tsai, Y. Yeh, M. Hsieh, and M. Ma, “Glenoid and humerus bone analysis using CT transverse sections to automate gleno-humeral joint diagnoses and surgery managements.,” Computerized medical imaging and graphics : the official journal of the Computerized Medical Imaging Society, vol. 31, iss. 8, pp. 692-703, 2007.
    [Bibtex]
    @ARTICLE{Tsai2007,
      author = {Tsai, Ming-Dar and Yeh, Yi-Der and Hsieh, Ming-Shium and Ma, Ming},
      title = {Glenoid and humerus bone analysis using CT transverse sections to
      automate gleno-humeral joint diagnoses and surgery managements.},
      journal = {Computerized medical imaging and graphics : the official journal
      of the Computerized Medical Imaging Society},
      year = {2007},
      volume = {31},
      pages = {692 - 703},
      number = {8},
      month = {December},
      abstract = {This paper describes an image analysis method that evaluates the glenoid
      and humerus bone morphology to automate the gleno-humeral (GH) joint
      diagnoses and surgical managements. This method uses radial B-spline
      curves to approximate ellipse-like shoulder structures including
      the humeral stem, tubercle and contact joint as well as the glenoid
      on every CT transverse section. Radius changes from structure centers
      to bone boundaries are recognized as convex, concave, separate and
      hole features that are then identified as pathological spurs, fractures
      and tumors. The centers and radii of these structures from the transverse
      sections are integrated to determine the properties of the humeral
      stem and contact joint with the glenoid, including the stem axis
      as well as the contact joint and glenoid centers, radii and attitudes.
      Based on the geometric evaluations of these structures and features,
      the GH joint surgery including tumor dissect and bone graft, open
      reduction using screws and plate or nails, and arthroplasty are automatically
      managed to achieve the normal GH joint functions including dissection
      of tumors, reduction of fractures or dislocations, and free GH joint
      motions. This prototype system can be used as a qualitative and quantitative
      tool for the GH joint diseases diagnoses and surgery managements.
      A series of examples and case studies illustrate the effectiveness
      of this automated method.},
      file = {Tsai2007.pdf:Tsai2007.pdf:PDF},
      issn = {0895-6111},
      keywords = {Adult,Aged,Aged, 80 and over,Automation,Female,Humans,Humerus,Humerus:
      radiography,Humerus: surgery,Male,Middle Aged,Orthopedics,Tomography,
      X-Ray Computed,Tomography, X-Ray Computed: methods},
      owner = {thomaskroes},
      pmid = {17920814},
      timestamp = {2010.10.26}
    }
  • S. Tucker, L. H. S. Cevidanes, M. Styner, H. Kim, M. Reyes, W. Proffit, and T. Turvey, “Comparison of actual surgical outcomes and 3-dimensional surgical simulations.,” Journal of oral and maxillofacial surgery : official journal of the American Association of Oral and Maxillofacial Surgeons, vol. 68, iss. 10, pp. 2412-21, 2010.
    [Bibtex]
    @ARTICLE{Tucker2010,
      author = {Tucker, Scott and Cevidanes, Lucia Helena Soares and Styner, Martin
      and Kim, Hyungmin and Reyes, Mauricio and Proffit, William and Turvey,
      Timothy},
      title = {Comparison of actual surgical outcomes and 3-dimensional surgical
      simulations.},
      journal = {Journal of oral and maxillofacial surgery : official journal of the
      American Association of Oral and Maxillofacial Surgeons},
      year = {2010},
      volume = {68},
      pages = {2412 - 21},
      number = {10},
      month = {October},
      abstract = {PURPOSE: The advent of imaging software programs has proved to be
      useful for diagnosis, treatment planning, and outcome measurement,
      but precision of 3-dimensional (3D) surgical simulation still needs
      to be tested. This study was conducted to determine whether the virtual
      surgery performed on 3D models constructed from cone-beam computed
      tomography (CBCT) can correctly simulate the actual surgical outcome
      and to validate the ability of this emerging technology to recreate
      the orthognathic surgery hard tissue movements in 3 translational
      and 3 rotational planes of space. MATERIALS AND METHODS: Construction
      of pre- and postsurgery 3D models from CBCTs of 14 patients who had
      combined maxillary advancement and mandibular setback surgery and
      6 patients who had 1-piece maxillary advancement surgery was performed.
      The postsurgery and virtually simulated surgery 3D models were registered
      at the cranial base to quantify differences between simulated and
      actual surgery models. Hotelling t tests were used to assess the
      differences between simulated and actual surgical outcomes. RESULTS:
      For all anatomic regions of interest, there was no statistically
      significant difference between the simulated and the actual surgical
      models. The right lateral ramus was the only region that showed a
      statistically significant, but small difference when comparing 2-
      and 1-jaw surgeries. CONCLUSIONS: Virtual surgical methods were reliably
      reproduced. Oral surgery residents could benefit from virtual surgical
      training. Computer simulation has the potential to increase predictability
      in the operating room.},
      file = {Tucker2010.pdf:Tucker2010.pdf:PDF},
      issn = {1531-5053},
      keywords = {Adolescent,Adult,Computer Simulation,Cone-Beam Computed Tomography,Female,Humans,Imaging,
      Three-Dimensional,Jaw,Jaw: radiography,Jaw: surgery,Male,Models,
      Anatomic,Orthognathic Surgical Procedures,Reproducibility of Results,Statistics,
      Nonparametric,Subtraction Technique,User-Computer Interface,Young
      Adult, APP, CMS, PLA, SUR},
      owner = {thomaskroes},
      pmid = {20591553},
      publisher = {Elsevier Inc.},
      timestamp = {2010.10.25}
    }
  • G. Turini, N. Pietroni, F. Ganovelli, and R. Scopigno, “Techniques for computer assisted surgery,” , vol. 2007, 2007.
    [Bibtex]
    @CONFERENCE{Turini2007,
      author = {Turini, G. and Pietroni, N. and Ganovelli, F. and Scopigno, R.},
      title = {Techniques for computer assisted surgery},
      booktitle = {Eurographics Italian Chapter Conference},
      year = {2007},
      volume = {2007},
      organization = {Citeseer},
      file = {Turini2007.pdf:Turini2007.pdf:PDF},
      keywords = {TEC},
      owner = {Thomas},
      timestamp = {2011.03.09}
    }
  • E. Valstar, “Towards computer-assisted surgery in shoulder joint replacement,” ISPRS Journal of Photogrammetry and Remote Sensing, vol. 56, iss. 5-6, pp. 326-337, 2002.
    [Bibtex]
    @ARTICLE{Valstar2002,
      author = {Valstar, E},
      title = {Towards computer-assisted surgery in shoulder joint replacement},
      journal = {ISPRS Journal of Photogrammetry and Remote Sensing},
      year = {2002},
      volume = {56},
      pages = {326 - 337},
      number = {5-6},
      month = {Augustus},
      abstract = {A research programme that aims to improve the state of the art in
      shoulder joint replacement surgery has been initiated at the Delft
      University of Technology. Development of improved endoprostheses
      for the upper extremities (DIPEX), as this effort is called, is a
      clinically driven multidisciplinary programme consisting of many
      contributory aspects. A part of this research programme focuses on
      the pre-operative planning and per-operative guidance issues. The
      ultimate goal of this part of the DIPEX project is to create a surgical
      support infrastructure that can be used to predict the optimal surgical
      protocol and can assist with the selection of the most suitable endoprosthesis
      for a particular patient. In the pre-operative planning phase, advanced
      biomechanical models of the endoprosthesis fixation and the musculo-skeletal
      system of the shoulder will be incorporated, which are adjusted to
      the individual’s morphology. Subsequently, the support infrastructure
      must assist the surgeon during the operation in executing his surgical
      plan. In the per-operative phase, the chosen optimal position of
      the endoprosthesis can be realised using camera-assisted tools or
      mechanical guidance tools. In this article, the pathway towards the
      desired surgical support infrastructure is described. Furthermore,
      we discuss the pre-operative planning phase and the per- operative
      guidance phase, the initial work performed, and finally, possible
      approaches for improving prosthesis placement.},
      file = {Valstar2002.pdf:Valstar2002.pdf:PDF},
      issn = {09242716},
      keywords = {applications,computer-assisted orthopaedic surgery,ct,data registration,medical,mri,navigation,per-operative
      guidance,pre-operative planning,shoulder arthroplasty,tracking,visualisation,x-ray},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • E. Valstar, F. de Jong, H. Vrooman, P. Rozing, and J. Reiber, “Model-based Roentgen stereophotogrammetry of orthopaedic implants,” Journal of Biomechanics, vol. 34, iss. 6, pp. 715-722, 2001.
    [Bibtex]
    @ARTICLE{Valstar2001,
      author = {Valstar, ER and de Jong, FW and Vrooman, HA and Rozing, PM and Reiber,
      JHC},
      title = {Model-based Roentgen stereophotogrammetry of orthopaedic implants},
      journal = {Journal of Biomechanics},
      year = {2001},
      volume = {34},
      pages = {715 - 722},
      number = {6},
      abstract = {Attaching tantalum markers to prostheses for Roentgen stereophotogrammetry
      (RSA) may be diffcult and is sometimes even impossible. In this study,
      a model-based RSA method that avoids the attachment of markers to
      prostheses is presented and validated. This model-based RSA method
      uses a triangulated surface model of the implant. A projected contour
      of this model is calculated and this calculated model contour is
      matched onto the detected contour of the actual implant in the RSA
      radiograph. The difference between the two contours is minimized by
      variation of the position and orientation of the model. When a minimal
      difference between the contours is found, an optimal position and
      orientation of the model has been obtained. The method was validated
      by means of a phantom experiment. Three prosthesis components were
      used in this experiment: the femoral and tibial component of an Interax
      total knee prosthesis (Stryker Howmedica Osteonics Corp., Rutherfort,
      USA) and the femoral component of a Profix total knee prosthesis (Smith
      & Nephew, Memphis, USA). For the prosthesis components used in this
      study, the accuracy of the model-based method is lower than the accuracy
      of traditional RSA. For the Interax femoral and tibial components,
      significant dimensional tolerances were found that were probably caused
      by the casting process and manual polishing of the
      
      components surfaces. The largest standard deviation for any translation
      was 0.19 mm and for any rotation it was 0.528. For the Profix femoral
      component that had no large dimensional tolerances, the largest standard
      deviation for any translation was 0.22 mm and for any rotation it
      was 0.228. From this study we may conclude that the accuracy of the
      current model-based RSA method is sensitive to dimensional tolerances
      of the implant. Research is now being conducted to make model-based
      RSA less sensitive to dimensional tolerances and thereby improving
      its accuracy.},
      file = {Valstar2001.pdf:Valstar2001.pdf:PDF},
      issn = {0021-9290},
      owner = {thomaskroes},
      publisher = {Elsevier},
      timestamp = {2011.01.05}
    }
  • K. Van Brussel, J. Vander Sloten, R. Van Audekercke, and G. Fabry, “Internal fixation of the spine in traumatic and scoliotic cases. The potential of pedicle screws,” Technology and Health Care, vol. 4, iss. 4, pp. 365-384, 1996.
    [Bibtex]
    @ARTICLE{VanBrussel1996,
      author = {Van Brussel, K. and Vander Sloten, J. and Van Audekercke, R. and
      Fabry, G.},
      title = {Internal fixation of the spine in traumatic and scoliotic cases.
      The potential of pedicle screws},
      journal = {Technology and Health Care},
      year = {1996},
      volume = {4},
      pages = {365 - 384},
      number = {4},
      file = {VanBrussel1996.pdf:VanBrussel1996.pdf:PDF},
      issn = {0928-7329},
      owner = {Thomas},
      publisher = {IOS Press},
      timestamp = {2011.02.07}
    }
  • J. Van Cleynenbreugel, F. Schutyser, J. Goffin, K. Van Brussel, and P. Suetens, “Image-based planning and validation of C1–C2 transarticular screw fixation using personalized drill guides,” Computer Aided Surgery, vol. 7, iss. 1, pp. 41-48, 2002.
    [Bibtex]
    @ARTICLE{VanCleynenbreugel2002,
      author = {Van Cleynenbreugel, J. and Schutyser, F. and Goffin, J. and Van Brussel,
      K. and Suetens, P.},
      title = {Image-based planning and validation of C1--C2 transarticular screw
      fixation using personalized drill guides},
      journal = {Computer Aided Surgery},
      year = {2002},
      volume = {7},
      pages = {41 - 48},
      number = {1},
      file = {VanCleynenbreugel2002.pdf:VanCleynenbreugel2002.pdf:PDF},
      issn = {1097-0150},
      keywords = {TEC, OTS, TRM},
      owner = {Thomas},
      publisher = {Wiley Online Library},
      timestamp = {2011.02.07}
    }
  • S. Vanforeesttimp, “Iso-surface volume rendering for implant surgery,” International Congress Series, vol. 1230, pp. 733-738, 2001.
    [Bibtex]
    @ARTICLE{VanForeestTimp2001,
      author = {Vanforeesttimp, S},
      title = {Iso-surface volume rendering for implant surgery},
      journal = {International Congress Series},
      year = {2001},
      volume = {1230},
      pages = {733 - 738},
      month = {June},
      abstract = {Many clinical situations ask for the simultaneous visualization of
      anatomical surfaces and synthetic meshes. Common examples include
      hip replacement surgery, intra-operative visualization of surgical
      instruments or probes, visualization of planning information, or
      implant surgery. To be useful for treatment planning and surgery,
      the combined visualization of anatomical information with mesh-surfaces
      should meet some requirements. First, anatomical information should
      be as accurate as possible to give the clinician a realistic view
      of the different structures. Second, the clinician should be able
      to interactively position the synthetic mesh, for example, an implant
      or surgical instrument, judge the positions with respect to the anatomy,
      and make adjustments where necessary. We developed a technique that
      meets these requirements and displays anatomical surfaces together
      with synthetic meshes. The usefulness of this technique will be demonstrated
      by applying it to dental implant surgery. The technique enables clinicians
      to interactively visualize the jaw in conjunction with a constructed
      drilling template.},
      file = {VanForeestTimp2001.pdf:VanForeestTimp2001.pdf:PDF},
      issn = {05315131},
      keywords = {depth-buffer,iso-surface volume rendering,oral implant surgery,surgical
      planning, TEC, VOR},
      owner = {thomaskroes},
      timestamp = {2010.10.26}
    }
  • A. Varol and S. Basa, “The role of computer-aided 3D surgery and stereolithographic modelling for vector orientation in premaxillary and trans-sinusoidal maxillary distraction osteogenesis,” The International Journal of Medical Robotics and Computer Assisted Surgery, vol. 5, iss. 2, pp. 198-206, 2009.
    [Bibtex]
    @ARTICLE{Varol2009,
      author = {Varol, A. and Basa, S.},
      title = {The role of computer-aided 3D surgery and stereolithographic modelling
      for vector orientation in premaxillary and trans-sinusoidal maxillary
      distraction osteogenesis},
      journal = {The International Journal of Medical Robotics and Computer Assisted
      Surgery},
      year = {2009},
      volume = {5},
      pages = {198 - 206},
      number = {2},
      file = {Varol2009.pdf:Varol2009.pdf:PDF},
      issn = {1478-596X},
      keywords = {RPP, CMS, OCS, APP, PLA},
      publisher = {Wiley Online Library}
    }
  • M. T. Vesel, B. S. Olsen, J. O. Subjerg, P. Helmig, and O. Sneppen, “Humeral head size in shoulder arthroplasty: a kinematic study,” Journal of Shoulder and Elbow Surgery, vol. 6, iss. 6, pp. 549-555, 1997.
    [Bibtex]
    @ARTICLE{Vesel1997,
      author = {Vesel, M.T. and Olsen, B.S. and Subjerg, J.O. and Helmig, P. and
      Sneppen, O.},
      title = {Humeral head size in shoulder arthroplasty: a kinematic study},
      journal = {Journal of Shoulder and Elbow Surgery},
      year = {1997},
      volume = {6},
      pages = {549 - 555},
      number = {6},
      file = {Vesel1997.pdf:Vesel1997.pdf:PDF},
      owner = {thomaskroes},
      publisher = {Elsevier},
      timestamp = {2010.10.26}
    }
  • M. Vetter, I. Wolf, P. Hassenpflug, M. Hastenteufel, R. Ludwig, L. Grenacher, G. M. Richter, W. Uhl, M. W. B’chler, and H. P. Meinzer, “Navigation aids and real-time deformation modeling for open liver surgery,” , vol. 5029, p. 58, 2003.
    [Bibtex]
    @CONFERENCE{Vetter2003,
      author = {Vetter, M. and Wolf, I. and Hassenpflug, P. and Hastenteufel, M.
      and Ludwig, R. and Grenacher, L. and Richter, G.M. and Uhl, W. and
      B’chler, M.W. and Meinzer, H.P.},
      title = {Navigation aids and real-time deformation modeling for open liver
      surgery},
      booktitle = {Proceedings of SPIE},
      year = {2003},
      volume = {5029},
      pages = {58},
      file = {Vetter2003.pdf:Vetter2003.pdf:PDF},
      keywords = {APP, GUI, HES, VOR, SLR, SUR},
      owner = {thomaskroes},
      timestamp = {2011.01.26}
    }
  • E. Vezzetti, F. Calignano, and S. Moos, “Computer-aided morphological analysis for maxillo-facial diagnostic: a preliminary study,” Journal of Plastic, Reconstructive & Aesthetic Surgery, vol. 63, iss. 2, pp. 218-226, 2010.
    [Bibtex]
    @ARTICLE{Vezzetti2010,
      author = {Enrico Vezzetti and Flaviana Calignano and Sandro Moos},
      title = {Computer-aided morphological analysis for maxillo-facial diagnostic:
      a preliminary study},
      journal = {Journal of Plastic, Reconstructive \& Aesthetic Surgery},
      year = {2010},
      volume = {63},
      pages = {218 - 226},
      number = {2},
      abstract = {Summary This article compares most of the three-dimensional (3D) morphometric
      methods currently proposed by the technical literature to evaluate
      their morphological informative value, while applying them to a case
      study of five patients affected by the malocclusion pathology. The
      compared methods are: conventional cephalometric analysis (CCA),
      generalised Procrustes superimposition (GPS) with principal-components
      analysis (PCA), thin-plate spline analysis (TPS), multisectional
      spline (MS) and clearance vector mapping (CVM). The results show
      that MS provides more reliable and useful diagnostic information.},
      file = {Vezzetti2010.pdf:Vezzetti2010.pdf:PDF},
      issn = {1748-6815},
      keywords = {3D Scanner, REV},
      owner = {Thomas},
      timestamp = {2011.02.14}
    }
  • M. Viceconti, A. Chiarini, D. Testi, F. Taddei, B. Bordini, F. Traina, and A. Toni, “New aspects and approaches in pre-operative planning of hip reconstruction: a computer simulation,” Langenbeck’s Archives of Surgery, vol. 389, iss. 5, pp. 400-404, 2004.
    [Bibtex]
    @ARTICLE{Viceconti2004,
      author = {Viceconti, M. and Chiarini, A. and Testi, D. and Taddei, F. and Bordini,
      B. and Traina, F. and Toni, A.},
      title = {New aspects and approaches in pre-operative planning of hip reconstruction:
      a computer simulation},
      journal = {Langenbeck's Archives of Surgery},
      year = {2004},
      volume = {389},
      pages = {400 - 404},
      number = {5},
      file = {Viceconti2004.pdf:Viceconti2004.pdf:PDF},
      issn = {1435-2443},
      owner = {thomaskroes},
      publisher = {Springer},
      timestamp = {2011.01.12}
    }
  • M. Viceconti, R. Lattanzi, B. Antonietti, S. Paderni, R. Olmi, A. Sudanese, and A. Toni, “CT-based surgical planning software improves the accuracy of total hip replacement preoperative planning.,” Medical engineering & physics, vol. 25, iss. 5, p. 371, 2003.
    [Bibtex]
    @ARTICLE{Viceconti2003,
      author = {Viceconti, M. and Lattanzi, R. and Antonietti, B. and Paderni, S.
      and Olmi, R. and Sudanese, A. and Toni, A.},
      title = {CT-based surgical planning software improves the accuracy of total
      hip replacement preoperative planning.},
      journal = {Medical engineering \& physics},
      year = {2003},
      volume = {25},
      pages = {371},
      number = {5},
      owner = {Thomas},
      timestamp = {2011.02.03}
    }
  • M. Viceconti, R. Lattanzi, C. Zannoni, and A. Cappello, “Effect of display modality on spatial accuracy of orthopaedic surgery pre-operative planning applications,” Informatics for Health and Social Care, vol. 27, iss. 1, pp. 21-32, 2002.
    [Bibtex]
    @ARTICLE{Viceconti2002,
      author = {Viceconti, M. and Lattanzi, R. and Zannoni, C. and Cappello, A.},
      title = {Effect of display modality on spatial accuracy of orthopaedic surgery
      pre-operative planning applications},
      journal = {Informatics for Health and Social Care},
      year = {2002},
      volume = {27},
      pages = {21 - 32},
      number = {1},
      file = {Viceconti2002.pdf:Viceconti2002.pdf:PDF},
      issn = {1463-9238},
      keywords = {APP, OTS, PLA, SLR, SUR},
      owner = {Thomas},
      publisher = {Informa UK Ltd UK},
      timestamp = {2011.02.03}
    }
  • F. P. Vidal, F. Bello, K. W. Brodlie, N. W. John, D. Gould, R. Phillips, and N. J. Avis, “Principles and Applications of Computer Graphics in Medicine,” Techniques, vol. 25, iss. 1, pp. 113-137, 2006.
    [Bibtex]
    @ARTICLE{Vidal2006,
      author = {Vidal, F P and Bello, F and Brodlie, K W and John, N W and Gould,
      D and Phillips, R and Avis, N J},
      title = {Principles and Applications of Computer Graphics in Medicine},
      journal = {Techniques},
      year = {2006},
      volume = {25},
      pages = {113 - 137},
      number = {1},
      file = {Vidal2006.pdf:Vidal2006.pdf:PDF},
      keywords = {1 information interfaces and,2,3,5,5 computer graphics,acm ccs,and
      virtual realities,artificial,augmented,augmented and virtual realities,com-,computer
      graphics,graphics systems,h,health,i,medical sciences,physically-based
      modeling,presentation,remote systems,simulation,stand-alone systems,visualization,
      REV},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • F. P. V. Vidal, F. B. Ello, K. B. Rodlie, N. W. J. Ohn, D. G. Ould, R. P. Hillips, and N. J. A. Vis, “Principles and Applications of Medical Virtual Environments,” World, 2004.
    [Bibtex]
    @ARTICLE{Idal2004,
      author = {Vidal, Franck P V and Ello, Fernando B and Rodlie, Ken B and Ohn,
      Nigel W J and Ould, Derek G and Hillips, Roger P and Vis, Nick J
      A},
      title = {Principles and Applications of Medical Virtual Environments},
      journal = {World},
      year = {2004},
      abstract = {The medical domain offers many excellent opportunities for the application
      of computer graphics, visualization, and virtual environments, offering
      the potential to help improve healthcare and bring benefits to patients.
      This report provides a comprehensive overview of the state-of-the-art
      in this exciting field. It has been written from the perspective
      of both computer scientists and practicing clinicians and documents
      past and current successes together with the challenges that lie
      ahead. The report begins with a description of the commonly used
      imaging modalities and then details the software algorithms and hardware
      that allows visualization of and interaction with this data. Example
      applications from research projects and commercially available products
      are listed, including educational tools; diagnostic aids; virtual
      endoscopy; planning aids; guidance aids; skills training; computer
      augmented reality; and robotics. The final section of the report
      summarises the current issues and looks ahead to future developments.},
      file = {Idal2004.pdf:Idal2004.pdf:PDF},
      keywords = {augmented and virtual realities,computer graphics,health,medical sciences,modelling,physically
      based,simulation,virtual device interfaces, REV},
      owner = {thomaskroes},
      timestamp = {2010.10.22}
    }
  • D. J. Vining and D. W. Gelfand, “Noninvasive colonoscopy using helical CT scanning, 3D reconstruction, and virtual reality,” , 1994.
    [Bibtex]
    @CONFERENCE{Vining1994,
      author = {Vining, D.J. and Gelfand, D.W.},
      title = {Noninvasive colonoscopy using helical CT scanning, 3D reconstruction,
      and virtual reality},
      booktitle = {23rd annual meeting and postgraduate course of the Society of Gastrointestinal
      Radiologists, Maui, Hawaii},
      year = {1994},
      owner = {thomaskroes},
      timestamp = {2011.01.06}
    }
  • M. Vitrani, G. Morel, and T. Ortmaier, “Automatic Guidance of a Surgical Instrument with Ultrasound Based Visual Servoing,” System, iss. April, pp. 508-513, 2005.
    [Bibtex]
    @ARTICLE{Vitrani2005,
      author = {Vitrani, Marie-aude and Morel, Guillaume and Ortmaier, Tobias},
      title = {Automatic Guidance of a Surgical Instrument with Ultrasound Based
      Visual Servoing},
      journal = {System},
      year = {2005},
      pages = {508 - 513},
      number = {April},
      abstract = {Visual servoing is a possible solution to assist the surgeon in performing
      tasks under ultrasound (US) imaging. To this aim, a system was developed
      that allows the surgeon to select a desired instrument location on
      a US image. Then a robot is programmed to automatically move the
      instrument towards the selected location. This approach requires
      robust tracking of the instrument in the US image, together with
      modeling of the overall system and implementation of a visual servoing
      loop. This paper presents geometrical and kinematic models of the
      system, as well as the control loop design, which is validated through
      both numerical simulations, and results of in vitro experiments.},
      file = {Vitrani2005.pdf:Vitrani2005.pdf:PDF},
      keywords = {APP, GUI},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • D. A. Vorp, D. A. Steinman, and C. R. Ethier, “Computational modeling of arterial biomechanics,” Computing in Science & Engineering, vol. 3, iss. 5, pp. 51-64, 2001.
    [Bibtex]
    @ARTICLE{Vorp2001,
      author = {Vorp, D.A. and Steinman, D.A. and Ethier, C.R.},
      title = {Computational modeling of arterial biomechanics},
      journal = {Computing in Science \& Engineering},
      year = {2001},
      volume = {3},
      pages = {51--64},
      number = {5},
      issn = {0740-7475},
      publisher = {IEEE}
    }
  • F. Vos, I. Serlie, R. van Gelder, F. Post, R. Truyen, F. Gerritsen, J. Stoker, and A. Vossepoel, “A New Visualization Method for Virtual Colonoscopy,” in Medical Image Computing and Computer-Assisted Intervention – MICCAI 2001, W. Niessen and M. Viergever, Eds., Springer Berlin / Heidelberg, 2001, vol. 2208, pp. 645-654.
    [Bibtex]
    @INCOLLECTION{Vos2001,
      author = {Vos, F. and Serlie, I. and van Gelder, R. and Post, F. and Truyen,
      R. and Gerritsen, F. and Stoker, J. and Vossepoel, A.},
      title = {A New Visualization Method for Virtual Colonoscopy},
      booktitle = {Medical Image Computing and Computer-Assisted Intervention – MICCAI
      2001},
      publisher = {Springer Berlin / Heidelberg},
      year = {2001},
      editor = {Niessen, Wiro and Viergever, Max},
      volume = {2208},
      series = {Lecture Notes in Computer Science},
      pages = {645-654},
      abstract = {Virtual colonoscopy or ‘colonography’ is a patient-friendly, modern
      screening technique for polyps. Automatic detection of polyps can
      serve to assist the radiologist. This paper presents a method based
      on clustering the principal curvatures. Via automatic polyp detection
      5/6 polyps (&gt;5 mm) were detected at the expense of 9 false positive
      findings per case. For visualization, the bowel surface is presented
      to the physician in a ‘panoramic’ way as a sequence of unfolded cubes.
      Conventionally, only 93% of the colon surface is available for examination.
      In our approach the area in view is increased to 99.8%. The unfolded
      cube visualization is another step to optimize polyp detection by
      visual examination. Experiments show a sensitivity of 10/10 (on a
      per patient basis) for any polyp. The specificity was 7/10.},
      affiliation = {Pattern Recognition Group, Delft University of Technology, Lorentzweg
      1, 2628 CJ Delft, The Netherlands},
      file = {Vos2001.pdf:Vos2001.pdf:PDF},
      keywords = {TAS, TEC},
      owner = {thomaskroes},
      timestamp = {2010.12.15}
    }
  • J. Wadley, N. Dorward, N. Kitchen, and D. Thomas, “Pre-operative planning and intra-operative guidance in modern neurosurgery: a review of 300 cases.,” Annals of The Royal College of Surgeons of England, vol. 81, iss. 4, p. 217, 1999.
    [Bibtex]
    @ARTICLE{Wadley1999,
      author = {Wadley, J. and Dorward, N. and Kitchen, N. and Thomas, D.},
      title = {Pre-operative planning and intra-operative guidance in modern neurosurgery:
      a review of 300 cases.},
      journal = {Annals of The Royal College of Surgeons of England},
      year = {1999},
      volume = {81},
      pages = {217},
      number = {4},
      abstract = {Operative neurosurgery has recently entered an exciting era of image
      guided surgery or neuronavigation and application of this novel technology
      is beginning to have a significant impact in many ways in a variety
      of intracranial procedures. In order to fully assess the advantages
      of image guided techniques over conventional planning and surgery
      in selected cases, detailed prospective evaluation has been carried
      out during the advanced development of an optically tracked neuronavigation
      system. Over a 2-year period, 300 operative neurosurgical procedures
      have been performed with the assistance of interactive image guidance,
      as well as the development of new software applications and hardware
      tools. A broad range of intracranial neurosurgical procedures were
      seen to benefit from image
      
      guidance, including 163 craniotomies, 53 interactive stereotactic
      biopsies, 7 tracked neuroendoscopies and 37 complex skull base procedures.
      The most common pathological diagnoses were cerebral glioma in 98
      cases, meningioma in 64 and metastasis in 23.
      
      Detailed analysis of a battery of postoperative questions revealed
      benefits in operative planning, appreciation of anatomy, lesion location,
      safety of surgery and greatly enhanced surgical confidence. The authors
      believe that image guided surgical technology, with new developments
      such as those described, has a significant role to play in contemporary
      neurosurgery and its widespread adoption in practice will be realised
      in the near future.},
      file = {Wadley1999.pdf:Wadley1999.pdf:PDF},
      keywords = {REV, NES},
      owner = {thomaskroes},
      publisher = {Royal College of Surgeons of England},
      timestamp = {2010.11.19}
    }
  • A. Wang, A. Parrent, S. Mirsattari, and T. Peters, “Computer Assisted Neurosurgery,” International Journal of Computer Assisted Radiology and Surgery, vol. 5, pp. 106-113, 2010.
    [Bibtex]
    @ARTICLE{Wang2010,
      author = {Wang, A. and Parrent, A. and Mirsattari, S. and Peters, T.},
      title = {Computer Assisted Neurosurgery},
      journal = {International Journal of Computer Assisted Radiology and Surgery},
      year = {2010},
      volume = {5},
      pages = {106--113},
      file = {Wang2010.pdf:Wang2010.pdf:PDF},
      issn = {1861-6410},
      keywords = {APP, NES},
      owner = {thomaskroes},
      publisher = {Springer},
      timestamp = {2011.01.11}
    }
  • S. Wang and J. Yang, “Efficient collision detection for soft tissue simulation in a surgical planning system,” in Computer-Aided Design and Computer Graphics, 2009. CAD/Graphics ’09. 11th IEEE International Conference on, 2009, pp. 49-53.
    [Bibtex]
    @INPROCEEDINGS{Wang2009,
      author = {Shengzheng Wang and Jie Yang},
      title = {Efficient collision detection for soft tissue simulation in a surgical
      planning system},
      booktitle = {Computer-Aided Design and Computer Graphics, 2009. CAD/Graphics '09.
      11th IEEE International Conference on},
      year = {2009},
      pages = {49 -53},
      month = {August},
      abstract = {In the field of cranio-maxillofacial surgery, there is a huge demand
      from surgeons to be able to automatically predict the post-operative
      face appearance in terms of a pre-specified bone-remodeling plan.
      Collision detection is a promising means to achieve this simulation.
      In this paper, therefore, an efficient collision detection method
      based on a new 3D signed distance field algorithm is proposed to
      accurately detect the contact positions and compute the penetration
      depth with the moving of the bones in the simulation, and thus the
      contact force between the bones and the soft tissues can be estimated
      using penalty methods. Thereafter, a nonlinear finite element model
      is employed to compute the deformation of the soft tissue model.
      The performance of the proposed collision detection algorithm has
      been improved in memory requirements and computational efficiency
      against the conventional methods. In addition, the proposed approach
      has the superior convergence characteristics against other methods.
      Therefore, the usage of the collision detection method can effectively
      assist surgeons in automatically predicting the pos-operative face
      outline.},
      file = {Wang2009.pdf:Wang2009.pdf:PDF},
      keywords = {3D signed distance field algorithm;bone-remodeling planing;collision
      detection algorithm;convergence characteristics;cranio-maxillofacial
      surgery;nonlinear finite element model;penetration depth computation;soft
      tissue simulation;surgical planning system;biomechanics;bone;convergence;deformation;finite
      element analysis;medical computing;physiological models;surgery;,
      CMS, OCS, PLA, TEC},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • C. Ware and G. Franck, “Evaluating stereo and motion cues for visualizing information nets in three dimensions,” ACM Trans. Graph., vol. 15, pp. 121-140, 1996.
    [Bibtex]
    @ARTICLE{Ware1996,
      author = {Ware, Colin and Franck, Glenn},
      title = {Evaluating stereo and motion cues for visualizing information nets
      in three dimensions},
      journal = {ACM Trans. Graph.},
      year = {1996},
      volume = {15},
      pages = {121 - 140},
      month = {April},
      acmid = {234975},
      address = {New York, NY, USA},
      file = {Ware1996.pdf:Ware1996.pdf:PDF},
      issn = {0730-0301},
      issue = {2},
      keywords = {head-coupled display, information visualization, network visualization,
      stereopsis, virtual reality},
      numpages = {20},
      owner = {Thomas},
      publisher = {ACM},
      timestamp = {2011.02.15}
    }
  • S. Warfield and R. Kikinis, “Nonlinear Registration and Template Driven Segmentation,” Brain, 1999.
    [Bibtex]
    @ARTICLE{Warfield1999,
      author = {Warfield, Simon and Kikinis, Ron},
      title = {Nonlinear Registration and Template Driven Segmentation},
      journal = {Brain},
      year = {1999},
      file = {:C\:\\Thomas\\PHD\\Literature\\Articles\\Warfield1999.pdf:PDF},
      owner = {thomaskroes},
      timestamp = {2010.10.26}
    }
  • S. Warfield, A. Nabavi, T. Butz, K. Tuncali, S. Silverman, P. Black, F. Jolesz, and R. Kikinis, “Intraoperative segmentation and nonrigid registration for image guided therapy,” , pp. 133-226, 2000.
    [Bibtex]
    @CONFERENCE{Warfield2000b,
      author = {Warfield, S. and Nabavi, A. and Butz, T. and Tuncali, K. and Silverman,
      S. and Black, P. and Jolesz, F. and Kikinis, R.},
      title = {Intraoperative segmentation and nonrigid registration for image guided
      therapy},
      booktitle = {Medical Image Computing and Computer-Assisted Intervention--MICCAI
      2000},
      year = {2000},
      pages = {133 - 226},
      organization = {Springer},
      file = {Warfield2000b.pdf:Warfield2000b.pdf:PDF},
      owner = {thomaskroes},
      timestamp = {2011.01.10}
    }
  • S. K. Warfield, S. J. Haker, I. Talos, C. a Kemper, N. Weisenfeld, A. U. J. Mewes, D. Goldberg-Zimring, K. H. Zou, C. Westin, W. M. Wells, C. M. C. Tempany, A. Golby, P. M. Black, F. a Jolesz, and R. Kikinis, “Capturing intraoperative deformations: research experience at Brigham and Women’s Hospital.,” Medical image analysis, vol. 9, iss. 2, pp. 145-62, 2005.
    [Bibtex]
    @ARTICLE{Warfield2005,
      author = {Warfield, Simon K and Haker, Steven J and Talos, Ion-Florin and Kemper,
      Corey a and Weisenfeld, Neil and Mewes, Andrea U J and Goldberg-Zimring,
      Daniel and Zou, Kelly H and Westin, Carl-Fredrik and Wells, William
      M and Tempany, Clare M C and Golby, Alexandra and Black, Peter M
      and Jolesz, Ferenc a and Kikinis, Ron},
      title = {Capturing intraoperative deformations: research experience at Brigham
      and Women's Hospital.},
      journal = {Medical image analysis},
      year = {2005},
      volume = {9},
      pages = {145 - 62},
      number = {2},
      month = {April},
      abstract = {During neurosurgical procedures the objective of the neurosurgeon
      is to achieve the resection of as much diseased tissue as possible
      while achieving the preservation of healthy brain tissue. The restricted
      capacity of the conventional operating room to enable the surgeon
      to visualize critical healthy brain structures and tumor margin has
      lead, over the past decade, to the development of sophisticated intraoperative
      imaging techniques to enhance visualization. However, both rigid
      motion due to patient placement and nonrigid deformations occurring
      as a consequence of the surgical intervention disrupt the correspondence
      between preoperative data used to plan surgery and the intraoperative
      configuration of the patient's brain. Similar challenges are faced
      in other interventional therapies, such as in cryoablation of the
      liver, or biopsy of the prostate. We have developed algorithms to
      model the motion of key anatomical structures and system implementations
      that enable us to estimate the deformation of the critical anatomy
      from sequences of volumetric images and to prepare updated fused
      visualizations of preoperative and intraoperative images at a rate
      compatible with surgical decision making. This paper reviews the
      experience at Brigham and Women's Hospital through the process of
      developing and applying novel algorithms for capturing intraoperative
      deformations in support of image guided therapy.},
      file = {:H\:\\Thomas\\PHD\\Literature\\Articles\\Warfield2005.pdf:PDF},
      issn = {1361-8415},
      keywords = {Algorithms,Brain Neoplasms,Brain Neoplasms: pathology,Brain Neoplasms:
      surgery,Elasticity,Humans,Image Enhancement,Image Enhancement: methods,Image
      Interpretation, Computer-Assisted,Image Interpretation, Computer-Assisted:
      methods,Intraoperative Care,Intraoperative Care: methods,Movement,Neurosurgery,Neurosurgery:
      methods,Research,Research: methods,Subtraction Technique,Surgery,
      Computer-Assisted,Surgery, Computer-Assisted: methods,Utah},
      owner = {thomaskroes},
      pmid = {15721230},
      timestamp = {2010.10.25}
    }
  • S. K. Warfield, M. Kaus, F. a Jolesz, and R. Kikinis, “Adaptive, template moderated, spatially varying statistical classification.,” Medical image analysis, vol. 4, iss. 1, pp. 43-55, 2000.
    [Bibtex]
    @ARTICLE{Warfield2000a,
      author = {Warfield, S K and Kaus, M and Jolesz, F a and Kikinis, R},
      title = {Adaptive, template moderated, spatially varying statistical classification.},
      journal = {Medical image analysis},
      year = {2000},
      volume = {4},
      pages = {43 - 55},
      number = {1},
      month = {March},
      abstract = {A novel image segmentation algorithm was developed to allow the automatic
      segmentation of both normal and abnormal anatomy from medical images.
      The new algorithm is a form of spatially varying statistical classification,
      in which an explicit anatomical template is used to moderate the
      segmentation obtained by statistical classification. The algorithm
      consists of an iterated sequence of spatially varying classification
      and nonlinear registration, which forms an adaptive, template moderated
      (ATM), spatially varying statistical classification (SVC). Classification
      methods and nonlinear registration methods are often complementary,
      both in the tasks where they succeed and in the tasks where they
      fail. By integrating these approaches the new algorithm avoids many
      of the disadvantages of each approach alone while exploiting the
      combination. The ATM SVC algorithm was applied to several segmentation
      problems, involving different image contrast mechanisms and different
      locations in the body. Segmentation and validation experiments were
      carried out for problems involving the quantification of normal anatomy
      (MRI of brains of neonates) and pathology of various types (MRI of
      patients with multiple sclerosis, MRI of patients with brain tumors,
      MRI of patients with damaged knee cartilage). In each case, the ATM
      SVC algorithm provided a better segmentation than statistical classification
      or elastic matching alone.},
      file = {Warfield2000a.pdf:Warfield2000a.pdf:PDF},
      issn = {1361-8415},
      keywords = {Algorithms,Brain,Brain Neoplasms,Brain Neoplasms: pathology,Brain:
      anatomy \& histology,Brain: pathology,Cartilage, Articular,Cartilage,
      Articular: injuries,Cartilage, Articular: pathology,Humans,Image
      Processing, Computer-Assisted,Image Processing, Computer-Assisted:
      methods,Infant, Newborn,Knee Joint,Knee Joint: pathology,Magnetic
      Resonance Imaging,Multiple Sclerosis,Multiple Sclerosis: pathology},
      owner = {thomaskroes},
      pmid = {10972320},
      timestamp = {2010.10.25}
    }
  • P. A. We, “A review of rapid prototyping (RP) techniques in the medical and biomedical sector,” Engineering, vol. 24, iss. 4, pp. 149-153, 2000.
    [Bibtex]
    @ARTICLE{We2000,
      author = {We, P A},
      title = {A review of rapid prototyping (RP) techniques in the medical and
      biomedical sector},
      journal = {Engineering},
      year = {2000},
      volume = {24},
      pages = {149 - 153},
      number = {4},
      abstract = {The evolution of rapid prototyping (RP) technology is brie y discussed,
      and theapplication of RP technologies to themedical sector is reviewed.
      Although the use of RP technology has been slow arriving in the medical
      arena, the potential of the technique is seen to be widespread. Various
      uses of the technology within surgical planning, prosthesis development
      and bioengineering are discussed. Some possible drawbacks are noted
      in some applications, owing to the poor resolution of CT slice data
      in comparison with that available on RP machines, but overall, the
      methods are seen to be beneî‚Ž cial in all areas, with one early
      report suggesting large improvements in measurement and diagnostic
      accuracy as a result of using RP models.},
      file = {We2000.pdf:We2000.pdf:PDF},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • S. Weber and T. C. Lueth, “A simple system for navigation of bone alignment osteotomies of the tibia,” International Congress Series, vol. 1268, pp. 608-613, 2004.
    [Bibtex]
    @ARTICLE{Weber2004,
      author = {Stefan Weber and Tim C. Lueth},
      title = {A simple system for navigation of bone alignment osteotomies of the
      tibia},
      journal = {International Congress Series},
      year = {2004},
      volume = {1268},
      pages = {608 - 613},
      abstract = {This article describes a new bone segment navigation device for orthopaedic
      surgery. It allows for exact and CT-free alignment of bone segments
      according to a preset plan. The system guides the user through the
      initialisation process with only minor interaction. Once the bone
      segments are registered, an intuitive graphical model visualises
      current spatial position relative to a desired (initial or planned)
      position. Advantages of the system are the usability without CT data
      and a very simple user interface. We describe components of the system
      and an initial experiment to measure the overall repositioning error
      within a conventional intervention.},
      file = {:Weber2004.pdf:PDF},
      issn = {0531-5131},
      keywords = {Computer-aided orthopaedic surgery},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • Q. Wei, Y. Hu, J. H. Macgregor, and G. Gelfand, “Planning of Treating Lung Cancer,” New York, pp. 4869-4872, 2006.
    [Bibtex]
    @ARTICLE{Wei2006,
      author = {Wei, Q and Hu, Y and Macgregor, J H and Gelfand, G},
      title = {Planning of Treating Lung Cancer},
      journal = {New York},
      year = {2006},
      pages = {4869 - 4872},
      abstract = {Study has shown that three-dimensional (3D) visualization of lung
      cavities has distinct advantages over traditional computed tomographic
      (CT) images for surgical planning. A crucial step for achieving 3D
      visualization of lung cavities is the segmentation of lung lobes
      by identifying lobar fissures in volumetric CT images. Current segmentation
      algorithms for lung lobes rely on manually placed markers to identify
      the fissures. This paper presents an autonomous algorithm that effectively
      segments the lung lobes without user intervention. This algorithm
      applies a two-stage approach: (a) adaptive fissure sweeping to coarsely
      define fissure regions of lobar fissures; and (b) watershed transform
      to refine the location and curvature of fissures within the fissure
      regions. We have tested this algorithm on 4 CT data sets. Comparing
      with visual inspection, the algorithm provides an accuracy of 85.5
      – 95.0\% and 88.2 – 92.3\% for lobar fissures in the left and
      right lungs, respectively. This work proves the feasibility of developing
      an automatic algorithm for segmenting lung lobes.},
      file = {:H\:\\Thomas\\PHD\\Literature\\Articles\\Wei2006.pdf:PDF},
      keywords = {- segmentation,1,both men and women,death for,in north america,leading
      cause of cancer,lung lobar fissures,lung lobes is the,preferred choice,removal
      of the diseased,surgical,ung cancer is the,visualization},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • Z. Wei, L. Gardi, C. Edirisinghe, D. Downey, and A. Fenster, “Three-Dimensional Ultrasound Guidance and Robot Assistance for Prostate Brachytherapy,” Image-guided interventions: technology and applications, p. 429, 2008.
    [Bibtex]
    @ARTICLE{Wei2008,
      author = {Wei, Z. and Gardi, L. and Edirisinghe, C. and Downey, D. and Fenster,
      A.},
      title = {Three-Dimensional Ultrasound Guidance and Robot Assistance for Prostate
      Brachytherapy},
      journal = {Image-guided interventions: technology and applications},
      year = {2008},
      pages = {429},
      note = {Chapter 15},
      file = {Wei2008.pdf:Wei2008.pdf:PDF},
      isbn = {0387738568},
      owner = {Thomas},
      publisher = {Springer Verlag},
      timestamp = {2011.04.12}
    }
  • M. Weidenbach, C. Wick, S. Pieper, K. J. Quast, T. Fox, G. Grunst, and D. a Redel, “Augmented reality simulator for training in two-dimensional echocardiography.,” Computers and biomedical research, an international journal, vol. 33, iss. 1, pp. 11-22, 2000.
    [Bibtex]
    @ARTICLE{Weidenbach2000,
      author = {Weidenbach, M and Wick, C and Pieper, S and Quast, K J and Fox, T
      and Grunst, G and Redel, D a},
      title = {Augmented reality simulator for training in two-dimensional echocardiography.},
      journal = {Computers and biomedical research, an international journal},
      year = {2000},
      volume = {33},
      pages = {11 - 22},
      number = {1},
      month = {February},
      abstract = {In two-dimensional echocardiography the sonographer must synthesize
      multiple tomographic slices into a mental three-dimensional (3D)
      model of the heart. Computer graphics and virtual reality environments
      are ideal to visualize complex 3D spatial relationships. In augmented
      reality (AR) applications, real and virtual image data are linked,
      to increase the information content. In the presented AR simulator
      a 3D surface model of the human heart is linked with echocardiographic
      volume data sets. The 3D echocardiographic data sets are registered
      with the heart model to establish spatial and temporal congruence.
      The heart model, together with an animated ultrasound sector represents
      a reference scenario, which displays the currently selected two-dimensional
      echocardiographic cutting plane calculated from the volume data set.
      Modifications of the cutting plane within the echocardiographic data
      are transferred and visualized simultaneously and in real time within
      the reference scenario. The trainee can interactively explore the
      3D heart model and the registered 3D echocardiographic data sets
      by an animated ultrasound probe, whose position is controlled by
      an electromagnetic tracking system. The tracking system is attached
      to a dummy transducer and placed on a plastic puppet to give a realistic
      impression of a two-dimensional echocardiographic examination.},
      file = {:H\:\\Thomas\\PHD\\Literature\\Articles\\Weidenbach2000.pdf:PDF},
      issn = {0010-4809},
      keywords = {Computer Graphics,Computer Simulation,Computer-Assisted Instruction,Echocardiography,Echocardiography:
      statistics & numerical data,Humans,Models, Anatomic,Models, Cardiovascular,User-Computer
      Interface},
      owner = {thomaskroes},
      pmid = {10772781},
      timestamp = {2010.10.25}
    }
  • Y. Weil, R. Mosheiff, L. Joskowicz, and M. Liebergall, “Principles of Computer-Aided Surgery in Trauma Surgery,” in Navigation and MIS in Orthopedic Surgery, J. B. Stiehl, W. H. Konermann, R. G. Haaker, and A. M. DiGioia, Eds., Springer Berlin Heidelberg, 2007, pp. 476-485.
    [Bibtex]
    @INCOLLECTION{Weil2007,
      author = {Weil, Y. and Mosheiff, R. and Joskowicz, L. and Liebergall, M.},
      title = {Principles of Computer-Aided Surgery in Trauma Surgery},
      booktitle = {Navigation and MIS in Orthopedic Surgery},
      publisher = {Springer Berlin Heidelberg},
      year = {2007},
      editor = {Stiehl, James B. and Konermann, Werner H. and Haaker, Rolf G. and
      DiGioia, Anthony M.},
      pages = {476 - 485},
      affiliation = {Hadassah-Hebrew University Medical School, Jerusalem Department of
      Orthopaedic Surgery POB 12000 Jerusalem 91120 Israel},
      file = {Weil2007.pdf:Weil2007.pdf:PDF},
      isbn = {978-3-540-36691-1},
      keyword = {Medicine &amp; Public Health},
      owner = {thomaskroes},
      timestamp = {2011.01.18}
    }
  • T. Weingärtner, U. Rembold, and R. Dillmann, “Simulation of jaw-movements for the musculoskeletal diagnoses,” , 1997.
    [Bibtex]
    @CONFERENCE{Weingartner1997,
      author = {Weing{\\"a}rtner, T. and Rembold, U. and Dillmann, R.},
      title = {Simulation of jaw-movements for the musculoskeletal diagnoses},
      booktitle = {In Medicine Meets Virtual Reality 5},
      year = {1997},
      organization = {Citeseer},
      owner = {Thomas},
      timestamp = {2011.02.04}
    }
  • A. Wentzensen, G. Zheng, B. Vock, U. Langlotz, J. Korber, and P. A. Grutzner, “Image-based hip navigation,” International Orthopaedics, vol. 27, pp. 43-46, 2003.
    [Bibtex]
    @ARTICLE{Wentzensen2003,
      author = {Wentzensen, Andreas and Zheng, Guoyan and Vock, Bernd and Langlotz,
      Ulrich and Korber, Jurgen and Grutzner, Paul A},
      title = {Image-based hip navigation},
      journal = {International Orthopaedics},
      year = {2003},
      volume = {27},
      pages = {43 - 46},
      abstract = {After experimental and preclinical evaluation (HAP Paul Award 2001)
      of a CT-free image-guided sur- gical navigation system for acetabular
      cup placement, the system was introduced into clinical routine. The
      computation of the angular orientation of the cup is based on reference
      coordinates from the anterior pelvic plane (APP) concept. A hybrid
      strategy for pelvic land- mark acquisition has been introduced involving
      percuta- neous pointer-based digitization with the noninvasive bi-
      planar landmark reconstruction using multiple registered fluoroscopy
      images. From January 2001 to May 2002, 118 consecutive patients (mean
      age 68 years, 82 male, 36 female, and 62 left and 56 right hip joints)
      were oper- ated on with the hybrid CT-free navigation system. Dur-
      ing each operation, the angular orientation of the inserted implant
      was recorded. To determine the placement accu- racy of the acetabular
      components, the first 50 consecu- tive patients underwent a CT scan
      7–10 days postopera- tively to analyze the cup position relative
      to the APP. This was done blinded with commercial planning soft-
      ware. There was no significant learning curve observed for the use
      of the system. Mean values for postoperative inclination read 43°
      (SD 3.0, range 37–49) and antever- sion 19° (SD 3.9, range 10–28).
      The resulting system ac- curacy, i.e., the difference between intraoperatively
      cal- culated cup orientation and postoperatively measured implant
      position, shows a maximum error of 5° for the inclination (mean
      1.5°, SD 1.1) and 6° for the antever- sion (mean 2.4°, SD 1.3).
      An accuracy of better than 5° inclination and 6° anteversion was
      achieved under clini- cal conditions, which implies that there is
      no significant difference in performance from the established CT-based
      navigation methods. Image-guided CT-free cup naviga-tion provides
      a reliable solution for future total hip ar- throplasty (THA).},
      file = {:H\:\\Thomas\\PHD\\Literature\\Articles\\Wentzensen2003.pdf:PDF},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • E. Westebring-van der Putten, R. Goossens, J. Jakimowicz, and J. Dankelman, “Haptics in minimally invasive surgery-a review,” Minimally Invasive Therapy & Allied Technologies, vol. 17, iss. 1, pp. 3-16, 2008.
    [Bibtex]
    @ARTICLE{Westebring2008,
      author = {Westebring-van der Putten, EP and Goossens, RHM and Jakimowicz, JJ
      and Dankelman, J.},
      title = {Haptics in minimally invasive surgery-a review},
      journal = {Minimally Invasive Therapy \& Allied Technologies},
      year = {2008},
      volume = {17},
      pages = {3--16},
      number = {1},
      file = {Westebring2008.pdf:Westebring2008.pdf:PDF},
      issn = {1364-5706},
      owner = {thomaskroes},
      publisher = {Informa UK Ltd UK},
      timestamp = {2010.12.22}
    }
  • A. Westermark, S. Zachow, and B. L. Eppley, “Three-dimensional osteotomy planning in maxillofacial surgery including soft tissue prediction,” Journal of Craniofacial Surgery, vol. 16, iss. 1, p. 100, 2005.
    [Bibtex]
    @ARTICLE{Westermark2005,
      author = {Westermark, A. and Zachow, S. and Eppley, B.L.},
      title = {Three-dimensional osteotomy planning in maxillofacial surgery including
      soft tissue prediction},
      journal = {Journal of Craniofacial Surgery},
      year = {2005},
      volume = {16},
      pages = {100},
      number = {1},
      abstract = {Preoperative planning of complex osteotomies in craniomaxillofacial
      surgery, in conjunction with a surgeon’s expertise, is essential
      for achieving an optimal result. However, the soft tissue changes
      that accompany facial bone movements cannot yet be accurately predicted.
      Bony tissue, because of its greater density, can be better predicted,
      but it alone does not account for the final aesthetic result. A new
      approach using not only three-dimensional (3-D) surface models of
      the patient’s anatomy, but also a corresponding volumetric model,
      is dis-
      
      cussed. This 3-D planning software was used in the treatment of 15
      patients and was found to provide a good correlation between simulation
      and postoperative outcome.},
      file = {Westermark2005.pdf:Westermark2005.pdf:PDF},
      issn = {1049-2275},
      owner = {thomaskroes},
      timestamp = {2011.01.03}
    }
  • D. White, K. Chelule, and B. Seedhom, “Accuracy of MRI vs CT imaging with particular reference to patient specific templates for total knee replacement surgery,” The International Journal of Medical Robotics and Computer Assisted Surgery, vol. 4, iss. 3, pp. 224-231, 2008.
    [Bibtex]
    @ARTICLE{White2008,
      author = {White, D. and Chelule, KL and Seedhom, BB},
      title = {Accuracy of MRI vs CT imaging with particular reference to patient
      specific templates for total knee replacement surgery},
      journal = {The International Journal of Medical Robotics and Computer Assisted
      Surgery},
      year = {2008},
      volume = {4},
      pages = {224 - 231},
      number = {3},
      file = {White2008.pdf:White2008.pdf:PDF},
      issn = {1478-596X},
      owner = {Thomas},
      publisher = {Wiley Online Library},
      timestamp = {2011.02.15}
    }
  • G. R. Williams, K. L. Wong, M. D. Pepe, V. Tan, D. Silverberg, M. L. Ramsey, a Karduna, and J. P. Iannotti, “The effect of articular malposition after total shoulder arthroplasty on glenohumeral translations, range of motion, and subacromial impingement.,” Journal of shoulder and elbow surgery / American Shoulder and Elbow Surgeons … [et al.], vol. 10, iss. 5, pp. 399-409, 2001.
    [Bibtex]
    @ARTICLE{Williams2001,
      author = {Williams, G R and Wong, K L and Pepe, M D and Tan, V and Silverberg,
      D and Ramsey, M L and Karduna, a and Iannotti, J P},
      title = {The effect of articular malposition after total shoulder arthroplasty
      on glenohumeral translations, range of motion, and subacromial impingement.},
      journal = {Journal of shoulder and elbow surgery / American Shoulder and Elbow
      Surgeons ... [et al.]},
      year = {2001},
      volume = {10},
      pages = {399 - 409},
      number = {5},
      abstract = {The articular surface of the normal humeral head has a variable posterior
      and medial offset with respect to the central axis of the humeral
      shaft. Recreation of the normal humeral head shaft offset is postulated
      to be an important consideration during shoulder arthroplasty. However,
      the effect of humeral head malposition is unknown. The purpose of
      this study was to determine the effect of articular malposition after
      total shoulder arthroplasty on glenohumeral translation, range of
      motion, and subacromial impingement. Twenty-one human cadavers were
      dissected and tested with the use of an active or passive shoulder
      model. Range of motion and translation were recorded by means of
      an electromagnetic tracking device. The experiment was performed
      in 2 phases. For kinematics study, 11 cadaver shoulders were positioned
      both passively and actively from maximum internal rotation to maximum
      external rotation at 90 degrees of total elevation in the scapular
      plane. Three rotator cuff and 3 deltoid muscle lines of action were
      simulated for active joint positioning. Passive joint positioning
      was accomplished with the use of a torque wrench and a nominal centering
      force. The testing protocol was used for the natural joint as well
      as for 9 prosthetic head locations: centered and 2- and 4-mm offsets
      in the anterior, posterior, inferior, and superior directions. Repeated-measures
      analysis of variance was used to test for significant differences
      in the range of motion and translation between active and passive
      positioning of the natural joint as well as all prosthetic head positions.
      (2) For impingement study, 10 cadaver shoulders were used in a passive
      model, loading the tendons of the rotator cuff with a 30-N centering
      force. The humerus was passively rotated from maximum internal rotation
      (1500 Nmm) to maximum external rotation (1500 Nmm) by means of a
      continuous-recording digital torque wrench. Trials were performed
      with the use of centered, 4-, 6-, and 8-mm offset heads in the anterior,
      posterior, superior, and inferior positions before and after removal
      of the acromion and coracoacromial ligament. The relation between
      change in mean peak torque (with and without acromion), passive range
      of motion, and humeral head offset was analyzed by means of repeated-measures
      analysis of variance. In the kinematics study, total range of motion
      and all humeral translations were greater with passive joint positioning
      than with active positioning (P =.01) except for total superior-inferior
      translation and superior-inferior translation in external rotation.
      Anterior to posterior humeral head offset was associated with statistically
      significant changes in total range of motion (P =.02), range of internal
      rotation (P =.02), range of external rotation (P =.0001), and total
      anterior-posterior translation (P =.01). Superior to inferior humeral
      head offset resulted in statistically significant changes in total
      range of motion (P =.02), range of internal rotation (P =.0001),
      anterior-posterior translation during external rotation (P =.01),
      and total superior-inferior translation (P =.03). In the impingement
      study, there was a significant increase in torque from centered to
      4-mm inferior offset (P =.006), 6-mm inferior offset (P <.001), and
      8-mm inferior offset (P <.001). There was no significant increase
      in torque with superior, anterior, and posterior offsets. Glenohumeral
      motion significantly decreased from 129 degrees for centered head
      to 119 degrees for 8-mm superior (P =.002), 119 degrees for 8-mm
      anterior (P =.014), 118 degrees for 8-mm inferior (P <.001), and
      114 degrees for 8-mm posterior (P =.001). Humeral articular malposition
      of 4 mm or less during prosthetic arthroplasty of the glenohumeral
      joint may lead to small alterations in humeral translations and range
      of motion. Inferior malposition of greater than 4 mm can lead to
      increased subacromial contact; offset of 8 mm in any direction results
      in significant decreases in passive range of motion. Therefore if
      subacromial contact is to be minimized and glenohumeral motion maximized
      after shoulder replacement, anatomic reconstruction of the humeral
      head-humeral shaft offset to within 4 mm is desirable.},
      file = {Williams2001.pdf:Williams2001.pdf:PDF},
      issn = {1058-2746},
      keywords = {Adult,Aged,Aged, 80 and over,Arthroplasty, Replacement,Biomechanics,Humans,Joint
      Instability,Joint Instability: physiopathology,Middle Aged,Postoperative
      Period,Range of Motion, Articular,Rotation,Shoulder Joint,Shoulder
      Joint: physiopathology,Shoulder Joint: surgery},
      owner = {thomaskroes},
      pmid = {11641695},
      timestamp = {2010.10.25}
    }
  • M. J. Williams, “Segmentation in 3D virtual spine modeling for assistance in surgical planning and guidance,” PhD Thesis, 2005.
    [Bibtex]
    @PHDTHESIS{Williams2005,
      author = {Matthew James Williams},
      title = {Segmentation in 3D virtual spine modeling for assistance in surgical
      planning and guidance},
      school = {University of Florida},
      year = {2005},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • N. Wilson, K. Wang, R. Dutton, and C. Taylor, “A software framework for creating patient specific geometric models from medical imaging data for simulation based medical planning of vascular surgery,” , pp. 449-456, 2010.
    [Bibtex]
    @CONFERENCE{Wilson2010,
      author = {Wilson, N. and Wang, K. and Dutton, R. and Taylor, C.},
      title = {A software framework for creating patient specific geometric models
      from medical imaging data for simulation based medical planning of
      vascular surgery},
      booktitle = {Medical Image Computing and Computer-Assisted Intervention--MICCAI
      2001},
      year = {2010},
      pages = {449 - 456},
      organization = {Springer},
      file = {Wilson2010.pdf:Wilson2010.pdf:PDF},
      owner = {Thomas},
      timestamp = {2011.02.23}
    }
  • J. Winder and R. Bibb, “Medical Rapid Prototyping Technologies: State of the Art and Current Limitations for Application in Oral and Maxillofacial Surgery,” Journal of Oral and Maxillofacial Surgery, vol. 63, iss. 7, pp. 1006-1015, 2005.
    [Bibtex]
    @ARTICLE{Winder2005,
      author = {John Winder and Richard Bibb},
      title = {Medical Rapid Prototyping Technologies: State of the Art and Current
      Limitations for Application in Oral and Maxillofacial Surgery},
      journal = {Journal of Oral and Maxillofacial Surgery},
      year = {2005},
      volume = {63},
      pages = {1006 - 1015},
      number = {7},
      abstract = {Purpose We describe state-of-the-art software and hardware requirements
      for the manufacture of high quality medical models manufactured using
      medical rapid prototyping. The limitations of medical models, the
      source of artefacts, and their physical appearance are illustrated
      along with remedies for their removal.Materials and Methods Medical
      models were built using predominantly stereolithography and fused
      deposition modeling at both institutions over a period of 6 years.
      A combined total of 350 models have been produced for a range of
      maxillofacial, neurosurgical, and orthopedic applications. Stereolithography,
      fused deposition modeling, computerized numerical milling, and other
      technologies are described along with computer software requirements.Results
      A range of unwanted artefacts that create distortions on medical
      models have been identified. These include data import, computed
      tomography gantry distortion, metal, motion, surface roughness due
      to support structure removal or surface modeling, and image data
      thresholding. The source of the artefact has been related to the
      patient, imaging modality performance, or the modeling technology.
      Discussion as to the significance of the artefacts on clinical use
      is provided.Conclusions It is recommended that models of human anatomy
      generated by medical rapid prototyping are subject to rigorous quality
      assurance at all stages of the manufacturing process. Clinicians
      should be aware of potential areas for inaccuracies within models
      and review the source images in cases where model integrity is in
      doubt.},
      file = {Winder2005.pdf:Winder2005.pdf:PDF},
      issn = {0278-2391},
      owner = {Thomas},
      timestamp = {2011.02.09}
    }
  • O. Wink, W. J. Niessen, and M. A. Viergever, “Fast delineation and visualization of vessels in 3-D angiographic images,” Medical Imaging, IEEE Transactions on, vol. 19, iss. 4, pp. 337-346, 2000.
    [Bibtex]
    @ARTICLE{Wink2000,
      author = {Wink, O. and Niessen, W.J. and Viergever, M.A.},
      title = {Fast delineation and visualization of vessels in 3-D angiographic
      images},
      journal = {Medical Imaging, IEEE Transactions on},
      year = {2000},
      volume = {19},
      pages = {337 - 346},
      number = {4},
      month = {April},
      abstract = {A method is presented which aids the clinician in obtaining quantitative
      measures and a three-dimensional (3-D) representation of vessels
      from 3-D angiographic data with a minimum of user interaction. Based
      on two user defined starting points, an iterative procedure tracks
      the central vessel axis. During the tracking process, the minimum
      diameter and a surface rendering of the vessels are computed, allowing
      for interactive inspection of the vasculature. Applications of the
      method to CTA, contrast enhanced (CE)-MRA and phase contrast (PC)-MRA
      images of the abdomen are shown. In all applications, a long stretch
      of vessels with varying width is tracked, delineated, and visualized,
      in less than 10 s on a standard clinical workstation.},
      file = {Wink2000.pdf:Wink2000.pdf:PDF},
      issn = {0278-0062},
      keywords = {10 s;abdomen;central vessel axis tracking;contrast enhanced MRA;iterative
      procedure;magnetic resonance angiography;medical diagnostic imaging;phase
      contrast MRA;standard clinical workstation;user defined starting
      points;varying width vessels;biomedical MRI;blood vessels;iterative
      methods;medical image processing;rendering (computer graphics);Algorithms;Angiography;Aortic
      Aneurysm, Abdominal;Artifacts;Blood Vessels;Humans;Image Processing,
      Computer-Assisted;Magnetic Resonance Angiography;Radiographic Image
      Enhancement;Tomography, X-Ray Computed;},
      owner = {thomaskroes},
      timestamp = {2011.01.05}
    }
  • A. B. Wolbarst and W. R. Hendee, “Evolving and Experimental Technologies in Medical Imaging,” Radiology, vol. 238, iss. 1, p. 16, 2006.
    [Bibtex]
    @ARTICLE{Wolbarst2006,
      author = {Wolbarst, A.B. and Hendee, W.R.},
      title = {Evolving and Experimental Technologies in Medical Imaging},
      journal = {Radiology},
      year = {2006},
      volume = {238},
      pages = {16},
      number = {1},
      file = {Wolbarst2006.pdf:Wolbarst2006.pdf:PDF},
      issn = {0033-8419},
      owner = {thomaskroes},
      publisher = {Radiological Society of North America},
      timestamp = {2011.01.06}
    }
  • I. Wolf, M. Vetter, I. Wegner, T. Böttger, M. Nolden, M. Schöbinger, M. Hastenteufel, Tobias Kunert, and H. Meinzer, “The Medical Imaging Interaction Toolkit,” Medical Image Analysis, vol. 9, iss. 6, pp. 594-604, 2005.
    [Bibtex]
    @ARTICLE{Wolf2005,
      author = {Ivo Wolf and Marcus Vetter and Ingmar Wegner and Thomas Böttger and
      Marco Nolden and Max Schöbinger and Mark Hastenteufel and Tobias
      Kunert and Hans-Peter Meinzer},
      title = {The Medical Imaging Interaction Toolkit},
      journal = {Medical Image Analysis},
      year = {2005},
      volume = {9},
      pages = {594 - 604},
      number = {6},
      abstract = {Thoroughly designed, open-source toolkits emerge to boost progress
      in medical imaging. The Insight Toolkit (ITK) provides this for the
      algorithmic scope of medical imaging, especially for segmentation
      and registration. But medical imaging algorithms have to be clinically
      applied to be useful, which additionally requires visualization and
      interaction. The Visualization Toolkit (VTK) has powerful visualization
      capabilities, but only low-level support for interaction. In this
      paper, we present the Medical Imaging Interaction Toolkit (MITK).
      The goal of MITK is to significantly reduce the effort required to
      construct specifically tailored, interactive applications for medical
      image analysis. MITK allows an easy combination of algorithms developed
      by ITK with visualizations created by VTK and extends these two toolkits
      with those features, which are outside the scope of both. MITK adds
      support for complex interactions with multiple states as well as
      undo-capabilities, a very important prerequisite for convenient user
      interfaces. Furthermore, MITK facilitates the realization of multiple,
      different views of the same data (as a multiplanar reconstruction
      and a 3D rendering) and supports the visualization of 3D+t data,
      whereas VTK is only designed to create one kind of view of 2D or
      3D data. MITK reuses virtually everything from ITK and VTK. Thus,
      it is not at all a competitor to ITK or VTK, but an extension, which
      eases the combination of both and adds the features required for
      interactive, convenient to use medical imaging software. MITK is
      an open-source project (www.mitk.org).},
      file = {:H\:\\Thomas\\PHD\\Literature\\Articles\\Wolf2005.pdf:PDF},
      issn = {1361-8415},
      keywords = {ITK},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • K. Wong, S. Kumta, K. Leung, K. Ng, E. Ng, and K. Lee, “Integration of CAD/CAM planning into computer assisted orthopaedic surgery,” Computer Aided Surgery, pp. 1-10, 2010.
    [Bibtex]
    @ARTICLE{Wong2010,
      author = {Wong, KC and Kumta, SM and Leung, KS and Ng, KW and Ng, EWK and Lee,
      KS},
      title = {Integration of CAD/CAM planning into computer assisted orthopaedic
      surgery},
      journal = {Computer Aided Surgery},
      year = {2010},
      pages = {1 - 10},
      number = {0},
      issn = {1092-9088},
      owner = {thomaskroes},
      publisher = {Informa UK Ltd UK},
      timestamp = {2011.01.12}
    }
  • K. H. Wong, “Imaging Modalities,” in Image-Guided Interventions, T. Peters and K. Cleary, Eds., Springer US, 2008, pp. 241-273.
    [Bibtex]
    @INCOLLECTION{Wong2008,
      author = {Wong, Kenneth H.},
      title = {Imaging Modalities},
      booktitle = {Image-Guided Interventions},
      publisher = {Springer US},
      year = {2008},
      editor = {Peters, Terry and Cleary, Kevin},
      pages = {241 - 273},
      note = {Chapter 9},
      abstract = {This chapter provides an overview of the different imaging modalities
      used for image-guided interventions, including x-ray computed tomography
      (CT) and fluoroscopy, nuclear medicine, magnetic resonance imaging
      (MRI), and ultrasound. The emphasis is on the distinguishing physical
      and engineering properties of each modality and how these characteristics
      translate into strengths and weaknesses for the end user. Because
      the imaging methods are very different, there is no single ideal
      modality for image-guided interventions; rather, they are largely
      complementary and can all provide valuable information about the
      patient. The chapter also covers current research topics in medical
      imaging relating to image-guided interventions and how these trends
      could potentially improve image-guided interventions in the future.},
      affiliation = {Georgetown University Washington DC USA},
      file = {Wong2008.pdf:Wong2008.pdf:PDF},
      isbn = {978-0-387-73858-1},
      keyword = {Engineering},
      owner = {Thomas},
      timestamp = {2011.02.24}
    }
  • R. K. Woo, D. A. Peterson, D. Le, M. E. Gertner, and T. Krummel, “Robot-Assisted Surgery: Technology and Current Clinical Status,” Surgery, pp. 2355-2371, 2008.
    [Bibtex]
    @ARTICLE{Woo2008,
      author = {Woo, R.K. and Peterson, D.A. and Le, D. and Gertner, M.E. and Krummel,
      T.},
      title = {Robot-Assisted Surgery: Technology and Current Clinical Status},
      journal = {Surgery},
      year = {2008},
      pages = {2355 - 2371},
      file = {Woo2008.pdf:Woo2008.pdf:PDF},
      owner = {thomaskroes},
      publisher = {Springer},
      timestamp = {2011.01.24}
    }
  • B. Wu, R. L. Klatzky, and G. Stetten, “Visualizing 3D objects from 2D cross sectional images displayed in-situ versus ex-situ.,” Journal of experimental psychology. Applied, vol. 16, iss. 1, pp. 45-59, 2010.
    [Bibtex]
    @ARTICLE{Wu2010a,
      author = {Wu, Bing and Klatzky, Roberta L and Stetten, George},
      title = {Visualizing 3D objects from 2D cross sectional images displayed in-situ
      versus ex-situ.},
      journal = {Journal of experimental psychology. Applied},
      year = {2010},
      volume = {16},
      pages = {45-59},
      number = {1},
      month = {March},
      abstract = {The present research investigates how mental visualization of a 3D
      object from 2D cross sectional images is influenced by displacing
      the images from the source object, as is customary in medical imaging.
      Three experiments were conducted to assess people's ability to integrate
      spatial information over a series of cross sectional images in order
      to visualize an object posed in 3D space. Participants used a hand-held
      tool to reveal a virtual rod as a sequence of cross-sectional images,
      which were displayed either directly in the space of exploration
      (in-situ) or displaced to a remote screen (ex-situ). They manipulated
      a response stylus to match the virtual rod's pitch (vertical slant),
      yaw (horizontal slant), or both. Consistent with the hypothesis that
      spatial colocation of image and source object facilitates mental
      visualization, we found that although single dimensions of slant
      were judged accurately with both displays, judging pitch and yaw
      simultaneously produced differences in systematic error between in-situ
      and ex-situ displays. Ex-situ imaging also exhibited errors such
      that the magnitude of the response was approximately correct but
      the direction was reversed. Regression analysis indicated that the
      in-situ judgments were primarily based on spatiotemporal visualization,
      while the ex-situ judgments relied on an ad hoc, screen-based heuristic.
      These findings suggest that in-situ displays may be useful in clinical
      practice by reducing error and facilitating the ability of radiologists
      to visualize 3D anatomy from cross sectional images.},
      file = {Wu2010a.pdf:Wu2010a.pdf:PDF},
      issn = {1939-2192},
      keywords = {Echocardiography, Doppler,Fixation, Ocular,Humans,Imaging, Three-Dimensional,Professional
      Competence,Radiology,Radiology: methods,Visual Perception},
      owner = {thomaskroes},
      pmid = {20350043},
      timestamp = {2010.10.25}
    }
  • J. Wu, R. Ma, X. Ma, F. Jia, and Q. Hu, “Curvature-dependent surface visualization of vascular structures.,” Computerized medical imaging and graphics : the official journal of the Computerized Medical Imaging Society, 2010.
    [Bibtex]
    @ARTICLE{Wu2010c,
      author = {Wu, Jianhuang and Ma, Renhui and Ma, Xin and Jia, Fucang and Hu,
      Qingmao},
      title = {Curvature-dependent surface visualization of vascular structures.},
      journal = {Computerized medical imaging and graphics : the official journal
      of the Computerized Medical Imaging Society},
      year = {2010},
      month = {August},
      abstract = {Efficient visualization of vascular structures is essential for therapy
      planning and medical education. Existing techniques achieve high-quality
      visualization of vascular surfaces at the cost of low rendering speed
      and large size of resulting surface. In this paper, we present an
      approach for visualizing vascular structures by exploiting the local
      curvature information of a given surface. To handle complex topology
      of loop and multiple parents and/or multiple children, bidirectional
      adaptive sampling and modified normal calculations at joints are
      proposed. The proposed method has been applied to cerebral vascular
      trees, liver vessel trees, and aortic vessel trees. The experimental
      results show that it can obtain a high-quality surface visualization
      with fewer polygons in the approximation.},
      file = {Wu2010c.pdf:Wu2010c.pdf:PDF},
      issn = {1879-0771},
      keywords = {vessel visualization},
      owner = {thomaskroes},
      pmid = {20732792},
      publisher = {Elsevier Ltd},
      timestamp = {2010.10.25}
    }
  • S. Wörz and K. Rohr, “Localization of anatomical point landmarks in 3D medical images by fitting 3D parametric intensity models,” Medical Image Analysis, vol. 10, iss. 1, pp. 41-58, 2006.
    [Bibtex]
    @ARTICLE{Worz2006,
      author = {Stefan Wörz and Karl Rohr},
      title = {Localization of anatomical point landmarks in 3D medical images by
      fitting 3D parametric intensity models},
      journal = {Medical Image Analysis},
      year = {2006},
      volume = {10},
      pages = {41 - 58},
      number = {1},
      abstract = {We introduce a new approach for the localization of 3D anatomical
      point landmarks. This approach is based on 3D parametric intensity
      models which are directly fitted to 3D images. To efficiently model
      tip-like, saddle-like, and sphere-like anatomical structures we introduce
      analytic intensity models based on the Gaussian error function in
      conjunction with 3D rigid transformations as well as deformations.
      To select a suitable size of the region-of-interest (ROI) where model
      fitting is performed, we also propose a new scheme for automatic
      selection of an optimal 3D ROI size based on the dominant gradient
      direction. In addition, to achieve a higher level of automation we
      present an algorithm for automatic initialization of the model parameters.
      Our approach has been successfully applied to accurately localize
      anatomical landmarks in 3D synthetic data as well as 3D MR and 3D
      CT image data. We have also compared the experimental results with
      the results of a previously proposed 3D differential approach. It
      turns out that the new approach significantly improves the localization
      accuracy.},
      file = {Worz2006.pdf:Worz2006.pdf:PDF},
      issn = {1361-8415},
      keywords = {3D anatomical point landmarks},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • J. Xia, N. Samman, R. W. Yeung, D. Wang, S. G. Shen, H. H. Ip, and H. Tideman, “Computer-assisted three-dimensional surgical planing and simulation. 3D soft tissue planning and prediction.,” International journal of oral and maxillofacial surgery, vol. 29, iss. 4, pp. 250-8, 2000.
    [Bibtex]
    @ARTICLE{Xia2000,
      author = {Xia, J and Samman, N and Yeung, R W and Wang, D and Shen, S G and
      Ip, H H and Tideman, H},
      title = {Computer-assisted three-dimensional surgical planing and simulation.
      3D soft tissue planning and prediction.},
      journal = {International journal of oral and maxillofacial surgery},
      year = {2000},
      volume = {29},
      pages = {250-8},
      number = {4},
      month = {August},
      abstract = {The purpose of this paper is to report a new technique for three-dimensional
      facial soft-tissue-change prediction after simulated orthognathic
      surgical planning. A scheme for soft tissue deformation, "Computer-assisted
      three-dimensional virtual reality soft tissue planning and prediction
      for orthognathic surgery (CASP)", is presented. The surgical planning
      was based on three-dimensional reconstructed CT visualization. Soft
      tissue changes were predicted by two newly devised algorithms: Surface
      Normal-based Model Deformation Algorithm and Ray Projection-based
      Model Deformation Algorithm. A three-dimensional color facial texture-mapping
      technique was also used for generating the color photo-realistic
      facial model. As a final result, a predicted and simulated patient's
      color facial model can be visualized from arbitrary viewing points.},
      file = {:H\:\\Thomas\\PHD\\Literature\\Articles\\Xia2000.pdf:PDF},
      issn = {0901-5027},
      keywords = {Algorithms,Color,Computer Simulation,Face,Face: anatomy \& histology,Face:
      surgery,Forecasting,Humans,Imaging, Three-Dimensional,Jaw,Jaw: surgery,Maxillofacial
      Abnormalities,Maxillofacial Abnormalities: surgery,Models, Anatomic,Oral
      Surgical Procedures,Oral Surgical Procedures: methods,Osteotomy,Osteotomy:
      methods,Patient Care Planning,Therapy, Computer-Assisted,Therapy,
      Computer-Assisted: methods,Tomography, X-Ray Computed,User-Computer
      Interface},
      owner = {thomaskroes},
      pmid = {11030394},
      timestamp = {2010.10.25}
    }
  • J. Xia, D. Wang, N. Samman, R. W. K. Yeung, and H. Tideman, “Computer-assisted three-dimensional surgical planning and simulation: 3D color facial model generation,” International Journal of Oral and Maxillofacial Surgery, vol. 29, iss. 1, pp. 2-10, 2000.
    [Bibtex]
    @ARTICLE{Xia20002,
      author = {James Xia and Dongfeng Wang and Nabil Samman and Richie Wai Kit Yeung
      and Henk Tideman},
      title = {Computer-assisted three-dimensional surgical planning and simulation:
      3D color facial model generation},
      journal = {International Journal of Oral and Maxillofacial Surgery},
      year = {2000},
      volume = {29},
      pages = {2 - 10},
      number = {1},
      abstract = {A scheme for texture mapping a 3D individualized color photo-realistic
      facial model from real color portraits and CT data is described.
      First, 3D CT images including both soft and hard tissues should be
      reconstructed from sequential CT slices, using a surface rendering
      technique. Facial features are extracted from 3D soft tissue. A generic
      mesh is individualized by correspondence matching and interpolation
      from those feature vertices. Three digitized color portraits with
      the #third# dimension from reconstructed soft tissue are blended
      and texture-mapped onto the 3D head model (mesh). A color simulated
      human head generated from frontal, right and left real color portraits
      can be viewed from an arbitrary angle in an inexpensive and user-friendly
      conventional personal computer. This scheme is the basic procedure
      in 3D computer-assisted simulation surgery.},
      file = {Xia20002.pdf:Xia20002.pdf:PDF},
      issn = {0901-5027},
      keywords = {computer graphics},
      owner = {thomaskroes},
      timestamp = {2011.01.25}
    }
  • J. J. Xia, J. Gateno, J. F. Teichgraeber, A. M. Christensen, R. E. Lasky, J. J. Lemoine, and M. K. a Liebschner, “Accuracy of the computer-aided surgical simulation (CASS) system in the treatment of patients with complex craniomaxillofacial deformity: A pilot study.,” Journal of oral and maxillofacial surgery : official journal of the American Association of Oral and Maxillofacial Surgeons, vol. 65, iss. 2, pp. 248-54, 2007.
    [Bibtex]
    @ARTICLE{Xia2007,
      author = {Xia, James J and Gateno, Jaime and Teichgraeber, John F and Christensen,
      Andrew M and Lasky, Robert E and Lemoine, Jeremy J and Liebschner,
      Michael a K},
      title = {Accuracy of the computer-aided surgical simulation (CASS) system
      in the treatment of patients with complex craniomaxillofacial deformity:
      A pilot study.},
      journal = {Journal of oral and maxillofacial surgery : official journal of the
      American Association of Oral and Maxillofacial Surgeons},
      year = {2007},
      volume = {65},
      pages = {248-54},
      number = {2},
      month = {February},
      abstract = {PURPOSE: Current surgical planning methods are usually not adequate
      for the treatment of patients with complex craniomaxillofacial (CMF)
      deformities. To this end, we have developed a 3-dimensional (3D)
      computer-aided surgical simulation (CASS) planning method for the
      treatment of patients with complex CMF deformities. The purpose of
      this pilot study was to evaluate the accuracy of this technique in
      the treatment of patients with complex CMF deformities. PATIENTS
      AND METHODS: Five patients with complex CMF deformities were enrolled.
      Surgeries were planned with the CASS planning method. Surgical plans
      were transferred to patients at the time of surgery via computer-generated
      splints. After surgery, outcome evaluation was completed by first
      superimposing the postoperative computed tomography (CT) model onto
      the planned model, and then measuring the differences between planned
      and actual outcomes. The criteria used to determine the accuracy
      of the technique were as follows: a linear difference between planned
      and actual outcomes of less than 2 mm, and an angular difference
      of less than 4 degrees . RESULTS: All patients underwent surgery
      as planned. With the use of CASS planning, medians of the differences
      between planned and actual postoperative outcomes were limited to
      0.9 mm and 1.7 degrees . CONCLUSION: The results of this pilot study
      are promising. They will be used as the basis of calculations needed
      to determine the sample size for a larger and more comprehensive
      study that will be undertaken to assess the accuracy of CASS planning
      methods.},
      file = {:H\:\\Thomas\\PHD\\Literature\\Articles\\Xia2007.pdf:PDF},
      issn = {0278-2391},
      keywords = {Computer Simulation,Craniofacial Abnormalities,Craniofacial Abnormalities:
      surgery,Craniotomy,Craniotomy: methods,Head and Neck Neoplasms,Head
      and Neck Neoplasms: rehabilitation,Head and Neck Neoplasms: surgery,Humans,Models,
      Anatomic,Patient Care Planning,Pilot Projects,Surgery, Computer-Assisted,Surgery,
      Computer-Assisted: methods,Temporomandibular Joint Disorders,Temporomandibular
      Joint Disorders: surgery,Tomography, X-Ray Computed,Treatment Outcome},
      owner = {thomaskroes},
      pmid = {17236929},
      timestamp = {2010.10.25}
    }
  • J. J. Xia, C. V. Phillips, J. Gateno, J. F. Teichgraeber, A. M. Christensen, M. J. Gliddon, J. J. Lemoine, and M. A. K. Liebschner, “Cost-Effectiveness Analysis for Computer-Aided Surgical Simulation in Complex Cranio-Maxillofacial Surgery,” Journal of Oral and Maxillofacial Surgery, vol. 64, iss. 12, pp. 1780-1784, 2006.
    [Bibtex]
    @ARTICLE{Xia2006,
      author = {James J. Xia and Carl V. Phillips and Jaime Gateno and John F. Teichgraeber
      and Andrew M. Christensen and Michael J. Gliddon and Jeremy J. Lemoine
      and Michael A.K. Liebschner},
      title = {Cost-Effectiveness Analysis for Computer-Aided Surgical Simulation
      in Complex Cranio-Maxillofacial Surgery},
      journal = {Journal of Oral and Maxillofacial Surgery},
      year = {2006},
      volume = {64},
      pages = {1780 - 1784},
      number = {12},
      abstract = {Purpose The purpose of this study is to assess the costs and benefits
      of computer-aided surgical simulation (CASS) and to compare it with
      the current surgical planning methods for complex cranio-maxillofacial
      (CMF) surgery.Materials and Methods The comparison of methods applies
      to all CMF surgeries where the patient's condition is severe enough
      to undergo a computed tomography scan and a stereolithographic model
      is necessary for the surgical planning process. The costs for each
      method can be divided into time and other costs. The time was estimated
      based on the authors' experience as well as on a survey of a small
      group of 6 experienced CMF surgeons in the United States. The other
      costs were estimated based on the authors' experience.Results CASS
      has lower costs in terms of surgeon time, patient time, and material
      costs. Specifically, total surgeon hours spent in planning are 5.25
      hours compared with 9.75 for current standard methods. Material and
      scanning costs are $1,900 for CASS compared with about $3,510 for
      standard methods. Patient time for planning is reduced from 4.75
      hours to 2.25 hours with CASS. The reduction in both time and other
      costs remains when the fixed fee costs of CASS are added to the variable
      costs. Amortized across the 600 patients per year (1,800 for the
      assumed 3-year life of the training and software), this adds only
      a few dollars and a fraction of an hour per surgery. Even in the
      case of a small clinic when the cost is amortized for 6 patients
      per year (18 patients for the assumed 3-year life of the training
      and software), the per surgery costs (9.65 hours and $2,456) will
      still favor CASS.Conclusion Any great new design should consist of
      at least 2 of the 3 following features: faster, cheaper, and better
      outcome. This analysis demonstrates that CASS is faster and less
      costly than the current standard planning methods for complex CMF
      surgery. Previous studies have also shown that CASS results in better
      surgical outcomes. Thus, in all regards, CASS appears to be at least
      as good as the current methods of surgical planning.},
      file = {Xia2006.pdf:Xia2006.pdf:PDF},
      issn = {0278-2391},
      owner = {Thomas},
      timestamp = {2011.02.08}
    }
  • G. Xiong and C. Taylor, “Virtual Stent Grafting in Personalized Surgical Planning for Treatment of Aortic Aneurysms Using Image-Based Computational Fluid Dynamics,” in Medical Image Computing and Computer-Assisted Intervention – MICCAI 2010, T. Jiang, N. Navab, J. Pluim, and M. Viergever, Eds., Springer Berlin / Heidelberg, 2010, vol. 6363, pp. 375-382.
    [Bibtex]
    @INCOLLECTION{Xiong2010,
      author = {Xiong, Guanglei and Taylor, Charles},
      title = {Virtual Stent Grafting in Personalized Surgical Planning for Treatment
      of Aortic Aneurysms Using Image-Based Computational Fluid Dynamics},
      booktitle = {Medical Image Computing and Computer-Assisted Intervention – MICCAI
      2010},
      publisher = {Springer Berlin / Heidelberg},
      year = {2010},
      editor = {Jiang, Tianzi and Navab, Nassir and Pluim, Josien and Viergever,
      Max},
      volume = {6363},
      series = {Lecture Notes in Computer Science},
      pages = {375-382},
      abstract = {Image-based computational fluid dynamics provides great promise for
      evaluation of vascular devices and assessment of surgical procedures.
      However, many previous studies employ idealized arterial and device
      models or patient-specific models with a limited number of cases,
      since the model construction process is tedious and time-consuming.
      Moreover, in contrast to retrospective studies from existing image
      data, there is a pressing need of prospective analysis with the goal
      of surgical planning. Therefore, it is necessary to construct models
      with implanted devices in a fast, virtual and interactive fashion.
      The goal of this paper is to develop new geometric methods to deploy
      stent grafts virtually to patient-specific models constructed from
      direct 3D segmentation of medical images. A triangular surface representing
      vessel lumen boundary is extracted from the segmentation. The diseased
      portion is then clipped and replaced by the surface of a virtual
      stent graft following the centerline obtained from the clipped portion.
      A Y-shape stent graft is employed in case of bifurcated arteries.
      A method to map a 2D strut pattern on the stent graft is also presented.
      We demonstrate the application of our methods to quantify wall shear
      stresses and forces acting on stent grafts in personalized surgical
      planning for endovascular treatment of thoracic and abdominal aortic
      aneurysms. Our approach enables prospective model construction and
      may help to increase its throughput required by routine clinical
      uses in the future.},
      affiliation = {Biomedical Informatics Program, Stanford University, CA USA},
      file = {Xiong2010.pdf:Xiong2010.pdf:PDF},
      owner = {Thomas},
      timestamp = {2011.04.27},
      url = {http://dx.doi.org/10.1007/978-3-642-15711-0_47}
    }
  • J. Yamanaka, S. Saito, and J. Fujimoto, “Impact of preoperative planning using virtual segmental volumetry on liver resection for hepatocellular carcinoma,” World journal of surgery, vol. 31, iss. 6, pp. 1251-1257, 2007.
    [Bibtex]
    @ARTICLE{Yamanaka2007,
      author = {Yamanaka, J. and Saito, S. and Fujimoto, J.},
      title = {Impact of preoperative planning using virtual segmental volumetry
      on liver resection for hepatocellular carcinoma},
      journal = {World journal of surgery},
      year = {2007},
      volume = {31},
      pages = {1251 - 1257},
      number = {6},
      file = {Yamanaka2007.pdf:Yamanaka2007.pdf:PDF},
      issn = {0364-2313},
      owner = {Thomas},
      publisher = {Springer},
      timestamp = {2011.01.31}
    }
  • T. Yamazaki, K. Futai, T. Tomita, Y. Sato, H. Yoshikawa, S. Tamura, K. Sugamoto, and O. B. Science, “Computer Assisted Orthopaedic Surgery,” International Journal of Computer Assisted Radiology and Surgery, vol. 5, iss. S1, pp. 131-136, 2010.
    [Bibtex]
    @ARTICLE{Yamazaki2010,
      author = {Yamazaki, T and Futai, K and Tomita, T and Sato, Y and Yoshikawa,
      H and Tamura, S and Sugamoto, K and Science, Orthopaedic Biomaterial},
      title = {Computer Assisted Orthopaedic Surgery},
      journal = {International Journal of Computer Assisted Radiology and Surgery},
      year = {2010},
      volume = {5},
      pages = {131-136},
      number = {S1},
      month = {May},
      file = {Yamazaki2010.pdf:Yamazaki2010.pdf:PDF},
      issn = {1861-6410},
      keywords = {mobile-bearing insert ,total knee arthroplasty},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • Z. Yaniv, “Rigid Registration,” in Image-Guided Interventions, T. Peters and K. Cleary, Eds., Springer US, 2008, pp. 159-192.
    [Bibtex]
    @INCOLLECTION{Yaniv2008,
      author = {Yaniv, Ziv},
      title = {Rigid Registration},
      booktitle = {Image-Guided Interventions},
      publisher = {Springer US},
      year = {2008},
      editor = {Peters, Terry and Cleary, Kevin},
      pages = {159 - 192},
      note = {Chapter 6},
      abstract = {Rigid registration is a key component of all image-guided surgical
      applications, either as an end in itself or as a precursor to nonrigid
      registration. This chapter reviews common methods used for rigidly
      registering pairs of three-dimensional data sets (3D/3D registration),
      and three-dimensional data to two-dimensional data (2D/3D registration).
      The chapter defines five criteria that should be addressed when evaluating
      a registration algorithm. These include execution time, accuracy
      in the region of interest, breakdown point, automation, and reliability.
      On the basis of these criteria, one can assess whether an algorithm
      is applicable for a specific medical procedure, where acceptable
      bounds on algorithm performance are defined subjectively by physicians.
      Currently, the only registration algorithms that address these criteria
      analytically are the paired-point registration methods. All other
      algorithms have been evaluated empirically, usually using proprietary
      data sets whose transformations were estimated using paired-point
      registration. Future efforts should thus focus on addressing the
      evaluation criteria analytically, and on the establishment of publicly
      available data sets with known gold standard transformations, enabling
      objective empirical evaluations.},
      affiliation = {Georgetown University Washington DC USA},
      file = {Yaniv2008.pdf:Yaniv2008.pdf:PDF},
      isbn = {978-0-387-73858-1},
      keyword = {Engineering},
      owner = {Thomas},
      timestamp = {2011.02.24}
    }
  • L. Yanping, W. Xudong, C. Xiaojun, and W. Chengtao, “An Image-Guided Navigation System for Mandibular Angle Surgery,” in Bioinformatics and Biomedical Engineering (iCBBE), 2010 4th International Conference on, 2010, pp. 1-4.
    [Bibtex]
    @INPROCEEDINGS{Yanping2010,
      author = {Lin Yanping and Wang Xudong and Chen Xiaojun and Wang Chengtao},
      title = {An Image-Guided Navigation System for Mandibular Angle Surgery},
      booktitle = {Bioinformatics and Biomedical Engineering (iCBBE), 2010 4th International
      Conference on},
      year = {2010},
      pages = {1 - 4},
      month = {June},
      abstract = {In this paper, a computer-aided surgical 3D planning and real-time
      navigation system (Navi-CMFS) was developed to improve the accuracy
      and reliability of the mandibular angle restoration. This system
      integrates 3D medical modeling, 3D pre-operative surgical planning
      and intra-operative real-time navigation, which enabling the surgeon
      to locate the continually updated position of the instruments and
      the patient in the operating room, avoiding to hurt the inferior
      alveolar nerves, minimizing surgical risks and optimizing clinical
      results. Clinical studies were conducted on four patients with mandibular
      angles malformation. The average distance deviations between the
      planning and post-operation model (system precision) were 1.725mm.
      The results show that this system provided a more accurate and effective
      method than conventional surgery in mandibular angle surgery.},
      file = {:Yanping2010.pdf:PDF},
      issn = {2151-7614},
      keywords = {Navi-CMFS;accuracy;computer aided surgical 3D planning;image guided
      navigation system;mandibular angle surgery;post operation model;real
      time navigation system;reliability;biomedical imaging;computerised
      navigation;surgery;},
      owner = {thomaskroes},
      timestamp = {2010.11.02}
    }
  • T. Yasuda, Y. Hashimoto, S. Yokoi, and J. I. Toriwaki, “Computer system for craniofacial surgical planning based on CT images,” Medical Imaging, IEEE Transactions on, vol. 9, iss. 3, pp. 270-280, 2002.
    [Bibtex]
    @ARTICLE{Yasuda2002,
      author = {Yasuda, T. and Hashimoto, Y. and Yokoi, S. and Toriwaki, J.I.},
      title = {Computer system for craniofacial surgical planning based on CT images},
      journal = {Medical Imaging, IEEE Transactions on},
      year = {2002},
      volume = {9},
      pages = {270 - 280},
      number = {3},
      file = {Yasuda2002.pdf:Yasuda2002.pdf:PDF},
      issn = {0278 - 0062},
      owner = {Thomas},
      publisher = {IEEE},
      timestamp = {2011.02.08}
    }
  • Z. Yong-bo and D. X. B. Chen, “A Novel Method for 3D-Segmentation of Vascular images,” Science, vol. 3, iss. 2, pp. 55-61, 2010.
    [Bibtex]
    @ARTICLE{Yong-bo2010,
      author = {Yong-bo, Zhang and Chen, Daniel X B},
      title = {A Novel Method for 3D-Segmentation of Vascular images},
      journal = {Science},
      year = {2010},
      volume = {3},
      pages = {55-61},
      number = {2},
      abstract = {Constructing the accurate digital model of vessel networks is critical
      to vascular tissue engineering, in which the segmentation of vessel
      plays an important role. However, the existing segmentation methods
      are not able to achieve the goal of accurate segmentation of vessel
      networks. This paper presents the development of a method for vessel
      segmentation based on a data structure of octree and 3D region growing.
      Firstly, the volume data of vessel images are divided into different
      data groups according to the predetermined depth value of octree,
      and then the optimal slices sequence is defined by analyzing the
      octree’s nodes which contain the vessel region. Then, the vessel
      segmentation is conducted from the vessels images of octree nodes
      based on 3D region growing. Finally, the treated data blocks are
      reset and the segmentation results of the whole volume data are obtained.
      By applying this method to the volume data of vascular images from
      MRA, accurate vessel segmentation results are achieved. This work
      would represent a significant advance for digital modeling of vessel
      networks.},
      file = {:H\:\\Thomas\\PHD\\Literature\\Articles\\Yong-bo2010.pdf:PDF},
      keywords = {image segmentation,octree,region growing,vascular tissue engineering},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • S. Zachow, E. Gladilin, R. Sader, and H. F. Zeilhofer, “Draw and cut: intuitive 3D osteotomy planning on polygonal bone models,” , vol. 1256, pp. 362-369, 2003.
    [Bibtex]
    @CONFERENCE{Zachow2003,
      author = {Zachow, S. and Gladilin, E. and Sader, R. and Zeilhofer, H.F.},
      title = {Draw and cut: intuitive 3D osteotomy planning on polygonal bone models},
      booktitle = {International Congress Series},
      year = {2003},
      volume = {1256},
      pages = {362 - 369},
      organization = {Elsevier},
      file = {Zachow2003.pdf:Zachow2003.pdf:PDF},
      issn = {0531-5131},
      keywords = {APP, CMS, PLA, SUR},
      owner = {Thomas},
      timestamp = {2011.02.08}
    }
  • S. Zachow, E. Gladilin, H. Zeilhofer, and R. Sader, “Improved 3D Osteotomy Planning in Cranio-maxillofacial Surgery,” Computing, pp. 473-481, 2001.
    [Bibtex]
    @ARTICLE{Zachow2001,
      author = {Zachow, Stefan and Gladilin, Evgeny and Zeilhofer, Hans-florian and
      Sader, Robert},
      title = {Improved 3D Osteotomy Planning in Cranio-maxillofacial Surgery},
      journal = {Computing},
      year = {2001},
      pages = {473-481},
      abstract = {In this paper we present two clinical cases in maxillofacial surgery,
      where complex surgical interventions have been pre-operatively planned
      on 3D models of the patients’ heads. Our goal was to provide surgeons
      with an addi- tional planning criterion, i.e. the prediction of the
      post-operative facial appear- ance. In our first study a two step
      mandibular distraction has been planned, and in the second one a
      bimaxillary operation with a high Le Fort I osteotomy of the maxilla
      according toBell, as well as a sagittal split osteotomy on both sides
      of the mandible, according to Obwegeser–Dal Pont. Within our study
      we did focus on the three dimensional soft tissue simulation using
      finite element methods. For the provision of such a planning aid,
      concepts for an integrated 3D surgery planning system are proposed
      that are partially implemented and demonstrated.},
      file = {Zachow2001.pdf:Zachow2001.pdf:PDF},
      keywords = {computer-assisted cranio-maxillofacial surgery,finite-element methods,osteodistraction,osteotomy,soft
      tissue prediction, APP, CMS, VOR, SUR, PLA},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • S. Zachow, E. Gladiline, H. Hege, and P. Deuflhard, “Finite-element simulation of soft tissue deformation,” , pp. 23-28, 2000.
    [Bibtex]
    @CONFERENCE{Zachow2000,
      author = {Zachow, S. and Gladiline, E. and Hege, HC and Deuflhard, P.},
      title = {Finite-element simulation of soft tissue deformation},
      booktitle = {Proc. CARS},
      year = {2000},
      pages = {23 - 28},
      organization = {Citeseer},
      owner = {thomaskroes},
      timestamp = {2011.01.03}
    }
  • S. Zachow, H. C. Hege, and P. Deuflhard, “Computer assisted planning in cranio-maxillofacial surgery,” Journal of Computing and Information Technology, vol. 14, iss. 1, p. 53, 2004.
    [Bibtex]
    @ARTICLE{Zachow2004a,
      author = {Zachow, S. and Hege, H.C. and Deuflhard, P.},
      title = {Computer assisted planning in cranio-maxillofacial surgery},
      journal = {Journal of Computing and Information Technology},
      year = {2004},
      volume = {14},
      pages = {53},
      number = {1},
      file = {Zachow2004a.pdf:Zachow2004a.pdf:PDF},
      issn = {1846-3908},
      owner = {Thomas},
      timestamp = {2011.02.08}
    }
  • S. Zachow, T. Hierl, and B. Erdmann, “A quantitative evaluation of 3D soft tissue prediction in maxillofacial surgery planning,” Proc. 3. Jahrestagung der Deutschen Gesellschaft f\ür Computerund Roboter-assistierte Chirurgie (curac), pp. 75-79, 2004.
    [Bibtex]
    @ARTICLE{Zachow2004b,
      author = {Zachow, S. and Hierl, T. and Erdmann, B.},
      title = {A quantitative evaluation of 3D soft tissue prediction in maxillofacial
      surgery planning},
      journal = {Proc. 3. Jahrestagung der Deutschen Gesellschaft f{\\"u}r Computerund
      Roboter-assistierte Chirurgie (curac)},
      year = {2004},
      pages = {75 - 79},
      file = {Zachow2004b.pdf:Zachow2004b.pdf:PDF},
      owner = {Thomas},
      timestamp = {2011.02.08}
    }
  • C. Zahlten, H. Jürgens, C. Evertsz, R. Leppek, H. O. Peitgen, and K. Klose, “Portal vein reconstruction based on topology,” European journal of radiology, vol. 19, iss. 2, pp. 96-100, 1995.
    [Bibtex]
    @ARTICLE{Zahlten1995,
      author = {Zahlten, C. and J{\\"u}rgens, H. and Evertsz, CJG and Leppek, R.
      and Peitgen, H.O. and Klose, KJ},
      title = {Portal vein reconstruction based on topology},
      journal = {European journal of radiology},
      year = {1995},
      volume = {19},
      pages = {96 - 100},
      number = {2},
      file = {Zahlten1995.pdf:Zahlten1995.pdf:PDF},
      issn = {0720-048X},
      owner = {Thomas},
      publisher = {Elsevier},
      timestamp = {2011.02.01}
    }
  • W. Zhai, Y. Zhao, and P. Jia, “A Navigation System for Minimally Invasive Abdominal Intervention Surgery Robot,” Surgery, pp. 819-823, 2008.
    [Bibtex]
    @ARTICLE{Zhai2008,
      author = {Zhai, Weiming and Zhao, Yannan and Jia, Peifa},
      title = {A Navigation System for Minimally Invasive Abdominal Intervention
      Surgery Robot},
      journal = {Surgery},
      year = {2008},
      pages = {819-823},
      abstract = {This paper aims to present a navigation system 
      
      design for image guided minimal abdominal surgery robot that
      
      
      could compensate for the patient respiratory movement.
      
      
      Currently computer-aided surgery navigation technology has 
      
      been broadly applied to such fields as orthopedics and 
      
      neurosurgery, but in the field of interventional surgery, it is still
      
      
      rarely reported. As described in this paper, we introduced a 
      
      surgery navigation system which can be applied to the interstitial
      
      
      treatment. Multiply technique are used to immobilizing the
      
      
      patient body, making the route plan and tracking the surgery
      
      
      instrument, real-time ultrasound feedback and image registration 
      
      is also used to enhance the precision of instrument positioning.
      
      
      The goal of this research is to develop a computer-aided surgery 
      
      navigation system in minimally invasive surgery, and provides
      
      
      solutions to urgent requirements in optical precision positioning,
      
      
      planning and navigation in abdominal surgeries. So that it can 
      
      fine supporting completion of the surgery operation, improve
      
      
      accuracy and efficiency of the traditional surgical methods, and 
      
      reduce the suffering of patients from pain.},
      file = {Zhai2008.pdf:Zhai2008.pdf:PDF},
      keywords = {minimally,surgery navigation,surgery robot},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • H. Zhang, J. E. Fritts, and S. A. Goldman, “Image segmentation evaluation: A survey of unsupervised methods,” Computer Vision and Image Understanding, vol. 110, iss. 2, pp. 260-280, 2008.
    [Bibtex]
    @ARTICLE{Zhang2008,
      author = {Hui Zhang and Jason E. Fritts and Sally A. Goldman},
      title = {Image segmentation evaluation: A survey of unsupervised methods},
      journal = {Computer Vision and Image Understanding},
      year = {2008},
      volume = {110},
      pages = {260 - 280},
      number = {2},
      abstract = {Image segmentation is an important processing step in many image,
      video and computer vision applications. Extensive research has been
      done in creating many different approaches and algorithms for image
      segmentation, but it is still difficult to assess whether one algorithm
      produces more accurate segmentations than another, whether it be
      for a particular image or set of images, or more generally, for a
      whole class of images. To date, the most common method for evaluating
      the effectiveness of a segmentation method is subjective evaluation,
      in which a human visually compares the image segmentation results
      for separate segmentation algorithms, which is a tedious process
      and inherently limits the depth of evaluation to a relatively small
      number of segmentation comparisons over a predetermined set of images.
      Another common evaluation alternative is supervised evaluation, in
      which a segmented image is compared against a manually-segmented
      or pre-processed reference image. Evaluation methods that require
      user assistance, such as subjective evaluation and supervised evaluation,
      are infeasible in many vision applications, so unsupervised methods
      are necessary. Unsupervised evaluation enables the objective comparison
      of both different segmentation methods and different parameterizations
      of a single method, without requiring human visual comparisons or
      comparison with a manually-segmented or pre-processed reference image.
      Additionally, unsupervised methods generate results for individual
      images and images whose characteristics may not be known until evaluation
      time. Unsupervised methods are crucial to real-time segmentation
      evaluation, and can furthermore enable self-tuning of algorithm parameters
      based on evaluation results. In this paper, we examine the unsupervised
      objective evaluation methods that have been proposed in the literature.
      An extensive evaluation of these methods are presented. The advantages
      and shortcomings of the underlying design mechanisms in these methods
      are discussed and analyzed through analytical evaluation and empirical
      evaluation. Finally, possible future directions for research in unsupervised
      evaluation are proposed.},
      file = {Zhang2008.pdf:Zhang2008.pdf:PDF},
      issn = {1077-3142},
      keywords = {Image segmentation},
      owner = {thomaskroes},
      timestamp = {2011.01.05}
    }
  • Q. Zhang, R. Eagleson, and T. Peters, “High-quality cardiac image dynamic visualization with feature enhancement and virtual surgical tool inclusion,” The Visual Computer, vol. 25, pp. 1019-1035, 2009.
    [Bibtex]
    @ARTICLE{Zhang2009b,
      author = {Zhang, Qi and Eagleson, Roy and Peters, Terry},
      title = {High-quality cardiac image dynamic visualization with feature enhancement
      and virtual surgical tool inclusion},
      journal = {The Visual Computer},
      year = {2009},
      volume = {25},
      pages = {1019 - 1035},
      abstract = {Traditional approaches for rendering segmented volumetric data sets
      usually deliver unsatisfactory results, such as insufficient frame
      rate, low image quality, and intermixing artifacts. In this paper,
      we introduce a novel color encoding technique, based on graphics
      processing unit (GPU) accelerated raycasting and post-color attenuated
      classification, to address this problem. The result is an algorithm
      that can generate artifact-free dynamic volumetric images in real
      time. Next, we present a pre-integrated volume shading algorithm
      to reduce graphics memory requirements and computational cost when
      compared to traditional shading methods. We also present a normal-adjustment
      technique to improve image quality at clipped planes. Furthermore,
      we propose a new algorithm for color and depth texture indexing that
      permits virtual solid objects, such as surgical tools, to be manipulated
      within the dynamically rendered volumetric cardiac images in real
      time. Finally, all these techniques are combined within an environment
      that permits real-time visualization, enhancement, and manipulation
      of dynamic cardiac data sets.},
      affiliation = {Imaging Research Laboratories, Robarts Research Institute, University
      of Western Ontario, London, ON Canada},
      issn = {0178-2789},
      issue = {11},
      keyword = {Computer Science},
      publisher = {Springer Berlin / Heidelberg},
      url = {http://dx.doi.org/10.1007/s00371-009-0364-y}
    }
  • Q. Zhang, R. Eagleson, and T. M. Peters, “Dynamic real-time 4D cardiac MDCT image display using GPU-accelerated volume rendering.,” Computerized medical imaging and graphics : the official journal of the Computerized Medical Imaging Society, vol. 33, iss. 6, pp. 461-76, 2009.
    [Bibtex]
    @ARTICLE{Zhang2009,
      author = {Zhang, Qi and Eagleson, Roy and Peters, Terry M},
      title = {Dynamic real-time 4D cardiac MDCT image display using GPU-accelerated
      volume rendering.},
      journal = {Computerized medical imaging and graphics : the official journal
      of the Computerized Medical Imaging Society},
      year = {2009},
      volume = {33},
      pages = {461-76},
      number = {6},
      month = {September},
      abstract = {Intraoperative cardiac monitoring, accurate preoperative diagnosis,
      and surgical planning are important components of minimally-invasive
      cardiac therapy. Retrospective, electrocardiographically (ECG) gated,
      multidetector computed tomographical (MDCT), four-dimensional (3D
      + time), real-time, cardiac image visualization is an important tool
      for the surgeon in such procedure, particularly if the dynamic volumetric
      image can be registered to, and fused with the actual patient anatomy.
      The addition of stereoscopic imaging provides a more intuitive environment
      by adding binocular vision and depth cues to structures within the
      beating heart. In this paper, we describe the design and implementation
      of a comprehensive stereoscopic 4D cardiac image visualization and
      manipulation platform, based on the opacity density radiation model,
      which exploits the power of modern graphics processing units (GPUs)
      in the rendering pipeline. In addition, we present a new algorithm
      to synchronize the phases of the dynamic heart to clinical ECG signals,
      and to calculate and compensate for latencies in the visualization
      pipeline. A dynamic multiresolution display is implemented to enable
      the interactive selection and emphasis of volume of interest (VOI)
      within the entire contextual cardiac volume and to enhance performance,
      and a novel color and opacity adjustment algorithm is designed to
      increase the uniformity of the rendered multiresolution image of
      heart. Our system provides a visualization environment superior to
      noninteractive software-based implementations, but with a rendering
      speed that is comparable to traditional, but inferior quality, volume
      rendering approaches based on texture mapping. This retrospective
      ECG-gated dynamic cardiac display system can provide real-time feedback
      regarding the suspected pathology, function, and structural defects,
      as well as anatomical information such as chamber volume and morphology.},
      file = {Zhang2009.pdf:Zhang2009.pdf:PDF},
      issn = {1879-0771},
      keywords = {Cardiac Surgical Procedures,Cardiac Surgical Procedures: methods,Cardiac-Gated
      Imaging Techniques,Computer Graphics,Echocardiography, Four-Dimensional,Humans,Monitoring,
      Intraoperative,Monitoring, Intraoperative: methods,Surgery, Computer-Assisted,Surgical
      Procedures, Minimally Invasive,Tomography, X-Ray Computed,Tomography,
      X-Ray Computed: instrumentation,Tomography, X-Ray Computed: methods,User-Computer
      Interface},
      owner = {thomaskroes},
      pmid = {19467840},
      timestamp = {2010.10.25}
    }
  • G. Zheng, K. T. Rajamani, and L. Nolte, “Use of a Dense Surface Point Distribution Model in a Three-Stage Anatomical Shape Reconstruction from Sparse Information for Computer Assisted Orthopaedic Surgery : A Preliminary Study,” , pp. 52-60, 2006.
    [Bibtex]
    @ARTICLE{Zheng2006,
      author = {Zheng, Guoyan and Rajamani, Kumar T and Nolte, Lutz-peter},
      title = {Use of a Dense Surface Point Distribution Model in a Three-Stage
      Anatomical Shape Reconstruction from Sparse Information for Computer
      Assisted Orthopaedic Surgery : A Preliminary Study},
      year = {2006},
      pages = {52 - 60},
      abstract = {Constructing anatomical shape from extremely sparse information is
      a challenging task. A priori information is often required to handle
      this otherwise ill-posed problem. In the present paper, we try to
      solve the problem in an accurate and robust way. At the heart of
      our approach lies the combination of a three-stage anatomical shape
      reconstruction technique and a dense surface point distribution model
      (DS-PDM). The DS-PDM is constructed from an already- aligned sparse
      training shape set using Loop subdivision. Its application facilitates
      the setup of point correspondences for all three stages of surface
      reconstruction due to its dense description. The proposed approach
      is especially useful for accurate and stable surface reconstruction
      from sparse information when only a small number of a priori training
      shapes are available. It adapts gradually to use more information
      derived from the a priori model when larger number of training data
      are available. The proposed approach has been success- fully validated
      in a preliminary study on anatomical shape reconstruction of two
      femoral heads using only dozens of sparse points, yielding promising
      results.},
      file = {Zheng2006.pdf:Zheng2006.pdf:PDF},
      owner = {thomaskroes},
      timestamp = {2010.10.25}
    }
  • S. Zidowitz, C. Hansen, S. Schlichting, M. Kleemann, and H. Peitgen, “Software Assistance for Intra-Operative Guidance in Liver Surgery,” Surgery, pp. 205-208, 2009.
    [Bibtex]