@ARTICLE{10.3389/fpsyg.2012.00137,
AUTHOR={Hoekstra, Rink and Kiers, Henk and Johnson, Addie},
TITLE={Are Assumptions of Well-Known Statistical Techniques Checked, and Why (Not)?},
JOURNAL={Frontiers in Psychology},
VOLUME={3},
PAGES={137},
YEAR={2012},
URL={https://www.frontiersin.org/article/10.3389/fpsyg.2012.00137},
DOI={10.3389/fpsyg.2012.00137},
ISSN={1664-1078},
ABSTRACT={A valid interpretation of most statistical techniques requires that one or more assumptions be met. In published articles, however, little information tends to be reported on whether the data satisfy the assumptions underlying the statistical techniques used. This could be due to self-selection: Only manuscripts with data fulfilling the assumptions are submitted. Another explanation could be that violations of assumptions are rarely checked for in the first place. We studied whether and how 30 researchers checked fictitious data for violations of assumptions in their own working environment. Participants were asked to analyze the data as they would their own data, for which often used and well-known techniques such as the t-procedure, ANOVA and regression (or non-parametric alternatives) were required. It was found that the assumptions of the techniques were rarely checked, and that if they were, it was regularly by means of a statistical test. Interviews afterward revealed a general lack of knowledge about assumptions, the robustness of the techniques with regards to the assumptions, and how (or whether) assumptions should be checked. These data suggest that checking for violations of assumptions is not a well-considered choice, and that the use of statistics can be described as opportunistic.}
}