@article{5363, author = {Ramesh S. Iyer and Jonathan O. Swanson and Randolph K. Otto and Edward Weinberger}, title = {Peer review comments augment diagnostic error characterization and departmental quality assurance: 1-year experience from a children's hospital.}, abstract = {

OBJECTIVE: The objective of our study was to categorize radiologist peer review comments and evaluate their functions within the context of a comprehensive quality assurance (QA) program.

MATERIALS AND METHODS: All randomly entered radiology peer review comments at our institution were compiled over a 1-year period (January 1, 2011, through December 31, 2011). A Web-based commercially available software package was used to query the comments, which were then exported into a spreadsheet. Each comment was then placed into a single most appropriate category based on consensus decision of two board-certified pediatric radiologists. QA scores associated with each comment were recorded.

RESULTS: A total of 427 peer review comments were evaluated. The majority of comments (85.9%) were entered voluntarily with QA scores of 1. A classification system was devised that augments traditional error classification. Seven broad comment categories were identified: errors of observation (25.5%), errors of interpretation (5.6%), inadequate patient data gathering (3.7%), errors of communication (9.6%), interobserver variability (21.3%), informational and educational feedback (23.0%), and complimentary (11.2%).

CONCLUSION: Comment-enhanced peer review expands traditional diagnostic error classification, may identify errors that were underscored, provides continuous educational feedback for participants, and promotes a collegial environment.

}, year = {2013}, journal = {AJR Am J Roentgenol}, volume = {200}, pages = {132-7}, month = {01/2013}, issn = {1546-3141}, doi = {10.2214/AJR.12.9580}, language = {eng}, }