@EdWorkingPaper{ai26-1394, title = "Why Fadeout is (Probably) Worse Than We Think: Adjusting for Correlated Sampling Error in Meta-Analyses of Behavioral Interventions", author = "Joshua B. Gilbert, Zachary Himmelsbach", institution = "Annenberg Institute at Brown University", number = "1394", year = "2026", month = "January", URL = "http://www.edworkingpapers.com/ai26-1394", abstract = {The extent to which intervention effects persist or fade over time is an important question in the behavioral sciences. In meta-analysis, persistence is typically assessed by meta-regressing effect sizes at followup on effect sizes at endline. While common, the standard meta-regression does not adjust for the shared sampling error between effect sizes across time points. We show that in general, estimated slopes from the standard meta-regression are inflated under mild assumptions about correlations between outcomes across time. We show how to adjust for correlated sampling error using a sensitivity analysis approach with meta-analytic data from a series of social-emotional learning interventions. Our results suggest that effect fadeout is likely more severe than current estimates suggest.}, }