Â
# install 'devel' version of metafor package
install.packages("remotes")
remotes::install_github("wviechtb/metafor")
# load metafor package
library(metafor)
Â
Â
Â
Â
# trial author year tpos tneg cpos cneg
# 1 Aronson 1948 4 119 11 128
# 2 Ferguson & Simes 1949 6 300 29 274
# 3 Rosenthal et al 1960 3 228 11 209
# 4 Hart & Sutherland 1977 62 13536 248 12619
# 5 Frimodt-Moller et al 1973 33 5036 47 5761
# 6 Stein & Aronson 1953 180 1361 372 1079
# 7 Vandiviere et al 1973 8 2537 10 619
# 8 TPT Madras 1980 505 87886 499 87892
# 9 Coetzee & Berjak 1968 29 7470 45 7232
# 10 Rosenthal et al 1961 17 1699 65 1600
# 11 Comstock et al 1974 186 50448 141 27197
# 12 Comstock & Webster 1969 5 2493 3 2338
# 13 Comstock et al 1976 27 16886 29 17825
# calculate log risk ratios and corresponding variances
dat <- escalc(measure="RR", ai=tpos, bi=tneg,
ci=cpos, di=cneg, data=dat)
dat
# trial author year tpos tneg cpos cneg yi vi
# 1 Aronson 1948 4 119 11 128 -0.8893 0.3256
# 2 Ferguson & Simes 1949 6 300 29 274 -1.5854 0.1946
# 3 Rosenthal et al 1960 3 228 11 209 -1.3481 0.4154
# 4 Hart & Sutherland 1977 62 13536 248 12619 -1.4416 0.0200
# 5 Frimodt-Moller et al 1973 33 5036 47 5761 -0.2175 0.0512
# 6 Stein & Aronson 1953 180 1361 372 1079 -0.7861 0.0069
# 7 Vandiviere et al 1973 8 2537 10 619 -1.6209 0.2230
# 8 TPT Madras 1980 505 87886 499 87892 0.0120 0.0040
# 9 Coetzee & Berjak 1968 29 7470 45 7232 -0.4694 0.0564
# 10 Rosenthal et al 1961 17 1699 65 1600 -1.3713 0.0730
# 11 Comstock et al 1974 186 50448 141 27197 -0.3394 0.0124
# 12 Comstock & Webster 1969 5 2493 3 2338 0.4459 0.5325
# 13 Comstock et al 1976 27 16886 29 17825 -0.0173 0.0714
# Random-Effects Model (k = 13; tau^2 estimator: REML)
#
# tau^2 (estimated amount of total heterogeneity): 0.3132 (SE = 0.1664)
# tau (square root of estimated tau^2 value): 0.5597
# I^2 (total heterogeneity / total variability): 92.22%
# H^2 (total variability / sampling variability): 12.86
#
# Test for Heterogeneity:
# Q(df = 12) = 152.2330, p-val < .0001
#
# Model Results:
#
# estimate se zval pval ci.lb ci.ub
# -0.7145 0.1798 -3.9744 <.0001 -1.0669 -0.3622 ***
#
# ---
# Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
# pred ci.lb ci.ub pi.lb pi.ub
# 0.49 0.34 0.70 0.15 1.55
Â
# id author year ni yi vi
# 1 Ashworth 1992 60 0.650 0.070
# 2 Ayers 1993 34 -0.750 0.126
# 3 Baisch 1990 95 -0.210 0.042
# 4 Baker 1994 209 -0.040 0.019
# 5 Bauman 1992 182 0.230 0.022
# 6 Becker 1996 462 0.030 0.009
# 7 Bell & Bell 1985 38 0.260 0.106
# 8 Brodney 1994 542 0.060 0.007
# 9 Burton 1986 99 0.060 0.040
# 10 Davis, BH 1990 77 0.120 0.052
# 11 Davis, JJ 1996 40 0.770 0.107
# 12 Day 1994 190 0.000 0.021
# 13 Dipillo 1994 113 0.520 0.037
# 14 Ganguli 1989 50 0.540 0.083
# ...
# 46 Willey 1988 51 1.460 0.099
# 47 Willey 1988 46 0.040 0.087
# 48 Youngberg 1989 56 0.250 0.072
# illustrate the calculation of the SMD for study 14 (Ganguli, 1989)
escalc(measure="SMD", m1i=342, sd1i=68, n1i=27,
m2i=303, sd2i=75, n2i=23)
# yi vi
# 1 0.5384 0.0834
# Random-Effects Model (k = 48; tau^2 estimator: REML)
#
# tau^2 (estimated amount of total heterogeneity): 0.0499 (SE = 0.0197)
# tau (square root of estimated tau^2 value): 0.2235
# I^2 (total heterogeneity / total variability): 58.37%
# H^2 (total variability / sampling variability): 2.40
#
# Test for Heterogeneity:
# Q(df = 47) = 107.1061, p-val < .0001
#
# Model Results:
#
# estimate se zval pval ci.lb ci.ub
# 0.2219 0.0460 4.8209 <.0001 0.1317 0.3122 ***
#
# ---
# Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
# pred se ci.lb ci.ub pi.lb pi.ub
# 0.22 0.05 0.13 0.31 -0.23 0.67
Â
# studyid year source sampleid ni ri
# 1 2009 dissertation 1 76 0.8860
# 2 1975 journal 1 297 0.3000
# 4 1989 journal 1 265 0.4750
# 4 1989 journal 2 154 0.3340
# 5 2008 journal 1 162 0.6150
# 6 1999 journal 1 28 0.1450
# 6 1999 journal 2 33 0.2300
# 6 1999 journal 3 47 0.2700
# 6 1999 journal 4 25 -0.0228
# 6 1999 journal 5 48 0.4290
# 6 1999 journal 6 39 0.3490
# 6 1999 journal 7 41 0.2200
# 6 1999 journal 8 35 0.3390
# 6 1999 journal 9 46 0.4470
# ...
# 64 1980 journal 1 121 0.3500
# 65 2007 journal 1 100 0.2400
# 68 1986 journal 1 215 0.3090
# calculate r-to-z transformed correlations and corresponding variances
dat <- escalc(measure="ZCOR", ri=ri, ni=ni, data=dat)
dat
# studyid year source sampleid ni ri yi vi
# 1 2009 dissertation 1 76 0.8860 1.4030 0.0137
# 2 1975 journal 1 297 0.3000 0.3095 0.0034
# 4 1989 journal 1 265 0.4750 0.5165 0.0038
# 4 1989 journal 2 154 0.3340 0.3473 0.0066
# 5 2008 journal 1 162 0.6150 0.7169 0.0063
# 6 1999 journal 1 28 0.1450 0.1460 0.0400
# 6 1999 journal 2 33 0.2300 0.2342 0.0333
# 6 1999 journal 3 47 0.2700 0.2769 0.0227
# 6 1999 journal 4 25 -0.0228 -0.0228 0.0455
# 6 1999 journal 5 48 0.4290 0.4587 0.0222
# 6 1999 journal 6 39 0.3490 0.3643 0.0278
# 6 1999 journal 7 41 0.2200 0.2237 0.0263
# 6 1999 journal 8 35 0.3390 0.3530 0.0312
# 6 1999 journal 9 46 0.4470 0.4809 0.0233
# ...
# 64 1980 journal 1 121 0.3500 0.3654 0.0085
# 65 2007 journal 1 100 0.2400 0.2448 0.0103
# 68 1986 journal 1 215 0.3090 0.3194 0.0047
# Random-Effects Model (k = 67; tau^2 estimator: REML)
#
# tau^2 (estimated amount of total heterogeneity): 0.0511 (SE = 0.0104)
# tau (square root of estimated tau^2 value): 0.2261
# I^2 (total heterogeneity / total variability): 93.83%
# H^2 (total variability / sampling variability): 16.21
#
# Test for Heterogeneity:
# Q(df = 66) = 1068.7213, p-val < .0001
#
# Model Results:
#
# estimate se zval pval ci.lb ci.ub
# 0.4547 0.0300 15.1343 <.0001 0.3958 0.5136 ***
#
# ---
# Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
# pred ci.lb ci.ub pi.lb pi.ub
# 0.43 0.38 0.47 0.01 0.72
Â
Â
a rough rule: the sampling errors of estimates are dependent when there is at least some overlap in subjects that contribute information to their computation
Â
Â
# studyid year source sampleid ni ri yi vi
# 1 2009 dissertation 1 76 0.8860 1.4030 0.0137
# 2 1975 journal 1 297 0.3000 0.3095 0.0034
# 4 1989 journal 1 265 0.4750 0.5165 0.0038
# 4 1989 journal 2 154 0.3340 0.3473 0.0066
# 5 2008 journal 1 162 0.6150 0.7169 0.0063
# 6 1999 journal 1 28 0.1450 0.1460 0.0400
# 6 1999 journal 2 33 0.2300 0.2342 0.0333
# 6 1999 journal 3 47 0.2700 0.2769 0.0227
# 6 1999 journal 4 25 -0.0228 -0.0228 0.0455
# 6 1999 journal 5 48 0.4290 0.4587 0.0222
# 6 1999 journal 6 39 0.3490 0.3643 0.0278
# 6 1999 journal 7 41 0.2200 0.2237 0.0263
# 6 1999 journal 8 35 0.3390 0.3530 0.0312
# 6 1999 journal 9 46 0.4470 0.4809 0.0233
# ...
# 64 1980 journal 1 121 0.3500 0.3654 0.0085
# 65 2007 journal 1 100 0.2400 0.2448 0.0103
# 68 1986 journal 1 215 0.3090 0.3194 0.0047
Â
Â
# fit multilevel random-effects model
res <- rma.mv(yi, vi, random = ~ 1 | studyid/sampleid, data=dat)
res
# Multivariate Meta-Analysis Model (k = 67; method: REML)
#
# Variance Components:
#
# estim sqrt nlvls fixed factor
# sigma^2.1 0.0376 0.1939 54 no studyid
# sigma^2.2 0.0159 0.1259 67 no studyid/sampleid
#
# Test for Heterogeneity:
# Q(df = 66) = 1068.7213, p-val < .0001
#
# Model Results:
#
# estimate se zval pval ci.lb ci.ub
# 0.4798 0.0331 14.5167 <.0001 0.4151 0.5446 ***
#
# ---
# Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
# pred ci.lb ci.ub pi.lb pi.ub
# 0.45 0.39 0.50 0.02 0.73
Â
# studyid year source sampleid ni ri yi vi
# 1 2009 dissertation 1.0 76.0 0.8860000 1.4030 0.0137
# 2 1975 journal 1.0 297.0 0.3000000 0.3095 0.0034
# 4 1989 journal 1.5 209.5 0.4045000 0.4547 0.0024
# 5 2008 journal 1.0 162.0 0.6150000 0.7169 0.0063
# 6 1999 journal 5.0 38.0 0.2673556 0.3066 0.0032
# ...
# 64 1980 journal 1.0 121.0 0.3500000 0.3654 0.0085
# 65 2007 journal 1.0 100.0 0.2400000 0.2448 0.0103
# 68 1986 journal 1.0 215.0 0.3090000 0.3194 0.0047
# Random-Effects Model (k = 54; tau^2 estimator: REML)
#
# tau^2 (estimated amount of total heterogeneity): 0.0528 (SE = 0.0115)
# tau (square root of estimated tau^2 value): 0.2298
# I^2 (total heterogeneity / total variability): 95.13%
# H^2 (total variability / sampling variability): 20.54
#
# Test for Heterogeneity:
# Q(df = 53) = 1034.5792, p-val < .0001
#
# Model Results:
#
# estimate se zval pval ci.lb ci.ub
# 0.4865 0.0332 14.6416 <.0001 0.4214 0.5517 ***
#
# ---
# Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
# fit multilevel model assuming no within-study heterogeneity
res <- rma.mv(yi, vi, random = ~ 1 | studyid, data=dat)
res
# Multivariate Meta-Analysis Model (k = 67; method: REML)
#
# Variance Components:
#
# estim sqrt nlvls fixed factor
# sigma^2 0.0528 0.2298 54 no studyid
#
# Test for Heterogeneity:
# Q(df = 66) = 1068.7213, p-val < .0001
#
# Model Results:
#
# estimate se zval pval ci.lb ci.ub
# 0.4865 0.0332 14.6416 <.0001 0.4214 0.5517 ***
#
# ---
# Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
Â
Â
Â
# trial author year ni outcome yi vi v1i v2i
# 1 1 Pihlstrom et al. 1983 14 PD 0.4700 0.0075 0.0075 0.0030
# 2 1 Pihlstrom et al. 1983 14 AL -0.3200 0.0077 0.0030 0.0077
# 3 2 Lindhe et al. 1982 15 PD 0.2000 0.0057 0.0057 0.0009
# 4 2 Lindhe et al. 1982 15 AL -0.6000 0.0008 0.0009 0.0008
# 5 3 Knowles et al. 1979 78 PD 0.4000 0.0021 0.0021 0.0007
# 6 3 Knowles et al. 1979 78 AL -0.1200 0.0014 0.0007 0.0014
# 7 4 Ramfjord et al. 1987 89 PD 0.2600 0.0029 0.0029 0.0009
# 8 4 Ramfjord et al. 1987 89 AL -0.3100 0.0015 0.0009 0.0015
# 9 5 Becker et al. 1988 16 PD 0.5600 0.0148 0.0148 0.0072
# 10 5 Becker et al. 1988 16 AL -0.3900 0.0304 0.0072 0.0304
Â
# construct the block diagonal V matrix
V <- lapply(split(dat[c("v1i", "v2i")], dat$trial), as.matrix)
V <- bldiag(V)
V
# [,1] [,2] [,3] [,4] [,5] [,6] [,7] [,8] [,9] [,10]
# [1,] 0.0075 0.0030 . . . . . . . .
# [2,] 0.0030 0.0077 . . . . . . . .
# [3,] . . 0.0057 0.0009 . . . . . .
# [4,] . . 0.0009 0.0008 . . . . . .
# [5,] . . . . 0.0021 0.0007 . . . .
# [6,] . . . . 0.0007 0.0014 . . . .
# [7,] . . . . . . 0.0029 0.0009 . .
# [8,] . . . . . . 0.0009 0.0015 . .
# [9,] . . . . . . . . 0.0148 0.0072
# [10,] . . . . . . . . 0.0072 0.0304
# fit multivariate model
res <- rma.mv(yi, V, mods = ~ outcome - 1,
random = ~ outcome | trial, struct="UN", data=dat)
res
# Multivariate Meta-Analysis Model (k = 10; method: REML)
#
# Variance Components:
#
# outer factor: trial (nlvls = 5)
# inner factor: outcome (nlvls = 2)
#
# estim sqrt k.lvl fixed level
# tau^2.1 0.0327 0.1807 5 no AL
# tau^2.2 0.0117 0.1083 5 no PD
#
# rho.AL rho.PD AL PD
# AL 1 - 5
# PD 0.6088 1 no -
#
# Test for Residual Heterogeneity:
# QE(df = 8) = 128.2267, p-val < .0001
#
# Test of Moderators (coefficients 1:2):
# QM(df = 2) = 108.8607, p-val < .0001
#
# Model Results:
#
# estimate se zval pval ci.lb ci.ub
# outcomeAL -0.3392 0.0879 -3.8589 0.0001 -0.5115 -0.1669 ***
# outcomePD 0.3534 0.0588 6.0057 <.0001 0.2381 0.4688 ***
#
# ---
# Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
# Hypothesis:
# 1: outcomeAL - outcomePD = 0
#
# Results:
# estimate se zval pval
# 1: -0.6926 0.0744 -9.3120 <.0001
#
# Test of Hypothesis:
# QM(df = 1) = 86.7134, p-val < .0001
Â
Â
Â
# restructure into long format
dat <- reshape(dat, direction="long", idvar="study", v.names=c("yi","vi"),
varying=list(c(2,4,6,8), c(3,5,7,9)))
dat <- dat[order(dat$study, dat$time),]
dat <- dat[!is.na(dat$yi),]
dat
# study mdur mbase time yi vi
# Alegret (2001) 16.1 53.6 1 -33.4 14.3
# Barichella (2003) 13.5 45.3 1 -20.0 7.3
# Barichella (2003) 13.5 45.3 3 -30.0 5.7
# Berney (2002) 13.6 45.6 1 -21.1 7.3
# Burchiel (1999) 13.6 48.0 1 -20.0 8.0
# Burchiel (1999) 13.6 48.0 2 -20.0 8.0
# Burchiel (1999) 13.6 48.0 3 -18.0 5.0
# Chen (2003) 12.1 65.7 2 -32.9 125.0
# ...
# Vingerhoets (2002) 16.0 48.8 1 -19.7 18.5
# Vingerhoets (2002) 16.0 48.8 2 -22.1 18.1
# Vingerhoets (2002) 16.0 48.8 3 -24.3 18.2
# Vingerhoets (2002) 16.0 48.8 4 -21.9 16.7
# Volkman (2001) 13.1 56.4 2 -37.8 20.9
# Volkman (2001) 13.1 56.4 3 -34.0 26.4
# Weselburger (2002) 14.0 50.3 1 -22.1 40.8
# construct the (block diagonal) V matrix with an AR(1) structure
V <- vcalc(vi, cluster=study, time1=time, data=dat, phi=0.8)
V[1:8,1:8]
# [,1] [,2] [,3] [,4] [,5] [,6] [,7] [,8]
# [1,] 14.300 . . . . . . .
# [2,] . 7.300 4.128 . . . . .
# [3,] . 4.128 5.700 . . . . .
# [4,] . . . 7.300 . . . .
# [5,] . . . . 8.000 6.400 4.048 .
# [6,] . . . . 6.400 8.000 5.060 .
# [7,] . . . . 4.048 5.060 5.000 .
# [8,] . . . . . . . 125.000
# [,1] [,2] [,3] [,4] [,5] [,6] [,7] [,8]
# [1,] 1.00 . . . . . . .
# [2,] . 1.00 0.64 . . . . .
# [3,] . 0.64 1.00 . . . . .
# [4,] . . . 1.00 . . . .
# [5,] . . . . 1.00 0.80 0.64 .
# [6,] . . . . 0.80 1.00 0.80 .
# [7,] . . . . 0.64 0.80 1.00 .
# [8,] . . . . . . . 1.00
# multivariate model with heteroscedastic AR(1) structure for the true effects
res <- rma.mv(yi, V, mods = ~ factor(time) - 1, random = ~ time | study,
struct="HAR", data=dat, digits=2)
res
# Multivariate Meta-Analysis Model (k = 82; method: REML)
#
# Variance Components:
#
# outer factor: study (nlvls = 46)
# inner factor: time (nlvls = 4)
#
# estim sqrt k.lvl fixed level
# tau^2.1 21.64 4.65 24 no 1
# tau^2.2 33.80 5.81 22 no 2
# tau^2.3 26.31 5.13 25 no 3
# tau^2.4 30.69 5.54 11 no 4
# rho 0.92 no
#
# Test for Residual Heterogeneity:
# QE(df = 78) = 287.97, p-val < .01
#
# Test of Moderators (coefficients 1:4):
# QM(df = 4) = 889.57, p-val < .01
#
# Model Results:
#
# estimate se zval pval ci.lb ci.ub
# factor(time)1 -25.84 1.01 -25.70 <.01 -27.81 -23.87 ***
# factor(time)2 -27.32 1.15 -23.66 <.01 -29.58 -25.06 ***
# factor(time)3 -28.70 1.04 -27.53 <.01 -30.74 -26.66 ***
# factor(time)4 -26.27 1.44 -18.24 <.01 -29.09 -23.45 ***
#
# ---
# Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
# plot results
par(mar=c(4,4,2,2))
with(dat, interaction.plot(time, study, yi, type="b", pch=19, col="gray70",
lty="solid", xaxt="n", legend=FALSE, bty="l",
xlab="Time Point", ylab="Mean Difference"))
axis(side=1, at=1:4,
lab=c("1 (3 months)", "2 (6 months)", "3 (12 months)", "4 (12+ months)"))
points(1:4, coef(res), type="o", pch=19, lwd=4, cex=2)
Â
Â
# study esid id yi vi pubstatus year deltype
# 1 1 1 0.9066 0.0740 1 4.5 general
# 1 2 2 0.4295 0.0398 1 4.5 general
# 1 3 3 0.2679 0.0481 1 4.5 general
# 1 4 4 0.2078 0.0239 1 4.5 general
# 1 5 5 0.0526 0.0331 1 4.5 general
# 1 6 6 -0.0507 0.0886 1 4.5 general
# 2 1 7 0.5117 0.0115 1 1.5 general
# 2 2 8 0.4738 0.0076 1 1.5 general
# 2 3 9 0.3544 0.0065 1 1.5 general
# ...
# 16 1 79 0.7156 0.0914 1 2.5 overt
# 16 2 80 0.7067 0.0875 1 2.5 covert
# 16 3 81 0.6475 0.0330 1 2.5 general
# 16 4 82 0.6428 0.0861 1 2.5 covert
# 16 5 83 0.6271 0.0400 1 2.5 general
# 16 6 84 0.6238 0.0680 1 2.5 general
# 16 7 85 0.6025 0.1287 1 2.5 overt
# 16 8 86 0.5763 0.0332 1 2.5 general
# 16 9 87 0.5171 0.0517 1 2.5 covert
# 16 10 88 -0.3797 0.0390 1 2.5 covert
# 16 11 89 -0.4228 0.0664 1 2.5 covert
# 16 12 90 -0.4245 0.0809 1 2.5 covert
# 16 13 91 -0.4671 0.0667 1 2.5 covert
# 16 14 92 -0.5230 0.0988 1 2.5 overt
# 16 15 93 -0.5675 0.0340 1 2.5 covert
# 16 16 94 -0.7586 0.0437 1 2.5 covert
# 17 1 95 0.3453 0.0340 1 5.5 general
# 17 2 96 0.1221 0.0158 1 5.5 general
# 17 3 97 0.0906 0.0107 1 5.5 general
# 17 4 98 0.0040 0.0208 1 5.5 general
# 17 5 99 -0.0207 0.0123 1 5.5 general
# 17 6 100 -0.0660 0.0100 1 5.5 general
# assume that the effect sizes within studies are correlated with rho=0.6
V <- vcalc(vi, cluster=study, obs=esid, data=dat, rho=0.6)
# examine the V matrix for studies 1 and 2
V[dat$study %in% c(1,2), dat$study %in% c(1,2)]
# [,1] [,2] [,3] [,4] [,5] [,6] [,7] [,8] [,9]
# [1,] 0.0740 0.0326 0.0358 0.0252 0.0297 0.0486 . . .
# [2,] 0.0326 0.0398 0.0263 0.0185 0.0218 0.0356 . . .
# [3,] 0.0358 0.0263 0.0481 0.0203 0.0239 0.0392 . . .
# [4,] 0.0252 0.0185 0.0203 0.0239 0.0169 0.0276 . . .
# [5,] 0.0297 0.0218 0.0239 0.0169 0.0331 0.0325 . . .
# [6,] 0.0486 0.0356 0.0392 0.0276 0.0325 0.0886 . . .
# [7,] . . . . . . 0.0115 0.0056 0.0052
# [8,] . . . . . . 0.0056 0.0076 0.0042
# [9,] . . . . . . 0.0052 0.0042 0.0065
# use a correlation of 0.7 for effect sizes corresponding to the same type of
# delinquent behavior and a correlation of 0.5 for effect sizes corresponding
# to different types of delinquent behavior
V <- vcalc(vi, cluster=study, type=deltype, obs=esid, data=dat, rho=c(0.7, 0.5))
# [,1] [,2] [,3] [,4] [,5] [,6] [,7]
# [1,] 1.00 0.50 0.50 0.50 0.50 0.50 .
# [2,] 0.50 1.00 0.50 0.70 0.50 0.50 .
# [3,] 0.50 0.50 1.00 0.50 0.70 0.70 .
# [4,] 0.50 0.70 0.50 1.00 0.50 0.50 .
# [5,] 0.50 0.50 0.70 0.50 1.00 0.70 .
# [6,] 0.50 0.50 0.70 0.50 0.70 1.00 .
# [7,] . . . . . . .
# fit multilevel model with random effects for studies, delinquency types
# within studies, and observed effects within delinquency types
dat$deltype <- factor(dat$deltype)
dat$deltype <- relevel(dat$deltype, ref="general")
res <- rma.mv(yi, V, mods = ~ deltype,
random = ~ 1 | study/deltype/esid, data=dat)
res
# Multivariate Meta-Analysis Model (k = 100; method: REML)
#
# Variance Components:
#
# estim sqrt nlvls fixed factor
# sigma^2.1 0.0747 0.2734 17 no study
# sigma^2.2 0.0000 0.0000 21 no study/deltype
# sigma^2.3 0.1387 0.3724 100 no study/deltype/esid
#
# Test for Residual Heterogeneity:
# QE(df = 97) = 783.0157, p-val < .0001
#
# Test of Moderators (coefficients 2:3):
# QM(df = 2) = 8.8461, p-val = 0.0120
#
# Model Results:
#
# estimate se zval pval ci.lb ci.ub
# intrcpt 0.4029 0.0960 4.1984 <.0001 0.2148 0.5909 ***
# deltypecovert -0.6948 0.2343 -2.9652 0.0030 -1.1541 -0.2355 **
# deltypeovert -0.1569 0.1679 -0.9343 0.3501 -0.4859 0.1722
#
# ---
# Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
Â
# Multivariate Meta-Analysis Model (k = 100; method: REML)
#
# Variance Components:
#
# estim sqrt nlvls fixed factor
# sigma^2.1 0.0747 0.2734 17 no study
# sigma^2.2 0.0000 0.0000 21 no study/deltype
# sigma^2.3 0.1387 0.3724 100 no study/deltype/esid
#
# Test for Residual Heterogeneity:
# QE(df = 97) = 783.0157, p-val < .0001
#
# Number of estimates: 100
# Number of clusters: 17
# Estimates per cluster: 1-22 (mean: 5.88, median: 5)
#
# Test of Moderators (coefficients 2:3):¹
# F(df1 = 2, df2 = 14) = 423.4635, p-val < .0001
#
# Model Results:
#
# estimate se¹ tval¹ df¹ pval¹ ci.lb¹ ci.ub¹
# intrcpt 0.4029 0.1022 3.9407 14 0.0015 0.1836 0.6221 **
# deltypecovert -0.6948 0.0368 -18.8757 14 <.0001 -0.7738 -0.6159 ***
# deltypeovert -0.1569 0.0674 -2.3282 14 0.0354 -0.3013 -0.0124 *
#
# ---
# Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
#
# 1) results based on cluster-robust inference (var-cov estimator: CR1,
# approx t/F-tests and confidence intervals, df: residual method)
# cluster-robust inferences with small-sample adjustment to the SEs
# and dfs computed based on a Satterthwaite approximation
coef_test(res, vcov="CR2", cluster=dat$study)
# Coef. Estimate SE t-stat d.f. (Satt) p-val (Satt) Sig.
# intrcpt 0.403 0.0961 4.19 14.56 <0.001 ***
# deltypecovert -0.695 0.0899 -7.73 1.91 0.0185 *
# deltypeovert -0.157 0.0684 -2.29 1.94 0.1526
Â
Â
Assink, M., Put, C. E. van der, Hoeve, M., Vries, S. L. A. de, Stams, G. J. J. M., & Oort, F. J. (2015). Risk factors for persistent delinquent behavior among juveniles: A meta-analytic review. Clinical Psychology Review, 42, 47–61. https://doi.org/10.1016/j.cpr.2015.08.002
Assink, M., & Wibbelink, C. J. M. (2016). Fitting three-level meta-analytic models in R: A step-by-step tutorial. The Quantitative Methods for Psychology, 12(3), 154–174. https://doi.org/10.20982/tqmp.12.3.p154
Bangert-Drowns, R. L., Hurley, M. M., & Wilkinson, B. (2004). The effects of school-based writing-to-learn interventions on academic achievement: A meta-analysis. Review of Educational Research, 74(1), 29–58. https://doi.org/10.3102/00346543074001029
Berkey, C. S., Hoaglin, D. C., Antczak-Bouckoms, A., Mosteller, F., & Colditz, G. A. (1998). Meta-analysis of multiple outcomes by regression with random effects. Statistics in Medicine, 17(22), 2537–2550. https://doi.org/10.1002/(sici)1097-0258(19981130)17:22<2537::aid-sim953>3.0.co;2-c
Colditz, G. A., Brewer, T. F., Berkey, C. S., Wilson, M. E., Burdick, E., Fineberg, H. V., & Mosteller, F. (1994). Efficacy of BCG vaccine in the prevention of tuberculosis: Meta-analysis of the published literature. Journal of the American Medical Association, 271(9), 698–702. https://doi.org/10.1001/jama.1994.03510330076038
Credé, M., Roch, S. G., & Kieszczynka, U. M. (2010). Class attendance in college: A meta-analytic review of the relationship of class attendance with grades and student characteristics. Review of Educational Research, 80(2), 272–295. https://doi.org/10.3102/0034654310362998
Gleser, L. J., & Olkin, I. (2009). Stochastically dependent effect sizes. In H. Cooper, L. V. Hedges, & J. C. Valentine (Eds.), The handbook of research synthesis and meta-analysis (2nd ed., pp. 357–376). New York: Russell Sage Foundation.
Hasselblad, V. (1998). Meta-analysis of multitreatment studies. Medical Decision Making, 18(1), 37–43. https://doi.org/10.1177/0272989X9801800110
Hedges, L. V., Tipton, E., & Johnson, M. C. (2010). Robust variance estimation in meta-regression with dependent effect size estimates. Research Synthesis Methods, 1(1), 39–65. https://doi.org/10.1002/jrsm.5
Ishak, K. J., Platt, R. W., Joseph, L., Hanley, J. A., & Caro, J. J. (2007). Meta-analysis of longitudinal studies. Clinical Trials, 4(5), 525–539. https://doi.org/10.1177/1740774507083567
Kearon, C., Julian, J. A., Math, M., Newman, T. E., & Ginsberg, J. S. (1998). Noninvasive diagnosis of deep venous thrombosis. Annals of Internal Medicine, 128(8), 663–677. https://doi.org/10.7326/0003-4819-128-8-199804150-00011
Lajeunesse, M. J. (2011). On the meta-analysis of response ratios for studies with correlated and multi-group designs. Ecology, 92(11), 2049–2055. https://doi.org/10.1890/11-0423.1
Olkin, I., & Finn, J. D. (1990). Testing correlated correlations. Psychological Bulletin, 108(2), 330–333. https://doi.org/10.1037/0033-2909.108.2.330
Pustejovsky, J. E., & Tipton, E. (2018). Small-sample methods for cluster-robust variance estimation and hypothesis testing in fixed effects models. Journal of Business & Economic Statistics, 36(4), 672–683. https://doi.org/10.1080/07350015.2016.1247004
Pustejovsky, J. E., & Tipton, E. (2021). Meta-analysis with robust variance estimation: Expanding the range of working models. Prevention Science. https://doi.org/10.1007/s11121-021-01246-3
Steiger, J. H. (1980). Tests for comparing elements of a correlation matrix. Psychological Bulletin, 87(2), 245–251. https://doi.org/10.1037/0033-2909.87.2.245
Tanner-Smith, E. E., Tipton, E., & Polanin, J. R. (2016). Handling complex meta-analytic data structures using robust variance estimates: A tutorial in R. Journal of Developmental and Life-Course Criminology, 2(1), 85–112. https://doi.org/10.1007/s40865-016-0026-5
Tipton, E. (2015). Small sample adjustments for robust variance estimation with meta-regression. Psychological Methods, 20(3), 375–393. https://doi.org/10.1037/met0000011
Tipton, E., & Pustejovsky, J. E. (2015). Small-sample adjustments for tests of moderators and model fit using robust variance estimation in meta-regression. Journal of Educational and Behavioral Statistics, 40(6), 604–634. https://doi.org/10.3102/1076998615606099
Wei, Y., & Higgins, J. P. (2013). Estimating within-study covariances in multivariate meta-analysis with multiple outcomes. Statistics in Medicine, 32(7), 1191–1205. https://doi.org/10.1002/sim.5679
Â