
@article{ref1,
title="Assessment of the Human Factors Analysis and Classification System (HFACS): intra-rater and inter-rater reliability",
journal="Safety science",
year="2016",
author="Ergai, Awatef and Cohen, Tara and Sharp, Julia and Wiegmann, Doug and Gramopadhye, Anand and Shappell, Scott",
volume="82",
number="",
pages="393-398",
abstract="The Human Factors Analysis and Classification System (HFACS) is a framework for classifying and analyzing human factors associated with accidents and incidents. The purpose of the present study was to examine the inter- and intra-rater reliability of the HFACS data classification process.   Methods A total 125 safety professionals from a variety of industries were recruited from a series of two-day HFACS training workshops. Participants classified 95 real-world causal factors (five causal factors for each of the 19 HFACS categories) extracted from a variety of industrial accidents. Inter-rater reliability of the HFACS coding process was evaluated by comparing performance across participants immediately following training and intra-rater reliability was evaluated by having the same participants repeat the coding process following a two-week delay.   Results Krippendorff's Alpha was used to evaluate the reliability of the coding process across the various HFACS levels and categories. <br><br>RESULTS revealed the HFACS taxonomy to be reliable in terms of inter- and intra-rater reliability, with the latter producing slightly higher Alpha values.   Conclusion Results support the inter- and intra-rater reliability of the HFACS framework but also reveal additional opportunities for improving HFACS training and implementation.<p />",
language="en",
issn="0925-7535",
doi="10.1016/j.ssci.2015.09.028",
url="http://dx.doi.org/10.1016/j.ssci.2015.09.028"
}