#讀檔案,這是 CSV 檔(用逗號分隔的檔),可以用 notepad 或 EXCEL 開啟
dta <- read.csv("TwoModerators1.csv", header = TRUE)
#載入 PROCESS,特別記得要讓 process.r 可讀取(在同目錄,或特定目錄)
source('process.r')
##
## ********************* PROCESS for R Version 4.3.1 *********************
##
## Written by Andrew F. Hayes, Ph.D. www.afhayes.com
## Documentation available in Hayes (2022). www.guilford.com/p/hayes3
##
## ***********************************************************************
##
## PROCESS is now ready for use.
## Copyright 2020-2023 by Andrew F. Hayes ALL RIGHTS RESERVED
## Workshop schedule at http://haskayne.ucalgary.ca/CCRAM
##
#用 PROCESS 處理
#套件的變項要用字串符號括入(統計能力好,程式能力待加強)
process (data = dta, y = 'Y', x = 'X', w ='Z1',z='Z2', model = 2,
moments = 1,jn = 1,plot=1, modelbt= 1, boot = 999)
##
## ********************* PROCESS for R Version 4.3.1 *********************
##
## Written by Andrew F. Hayes, Ph.D. www.afhayes.com
## Documentation available in Hayes (2022). www.guilford.com/p/hayes3
##
## ***********************************************************************
##
## Model : 2
## Y : Y
## X : X
## W : Z1
## Z : Z2
##
## Sample size: 500
##
## Random seed: 802496
##
##
## ***********************************************************************
## Outcome Variable: Y
##
## Model Summary:
## R R-sq MSE F df1 df2 p
## 0.5195 0.2698 1.0150 36.5135 5.0000 494.0000 0.0000
##
## Model:
## coeff se t p LLCI ULCI
## constant 0.0614 0.0452 1.3570 0.1754 -0.0275 0.1503
## X 0.4462 0.0466 9.5834 0.0000 0.3547 0.5376
## Z1 0.4306 0.0451 9.5366 0.0000 0.3419 0.5193
## Int_1 0.0234 0.0462 0.5063 0.6129 -0.0674 0.1142
## Z2 0.0314 0.0456 0.6891 0.4911 -0.0582 0.1210
## Int_2 0.1003 0.0499 2.0087 0.0451 0.0022 0.1984
##
## Product terms key:
## Int_1 : X x Z1
## Int_2 : X x Z2
##
## Test(s) of highest order unconditional interaction(s):
## R2-chng F df1 df2 p
## X*W 0.0004 0.2563 1.0000 494.0000 0.6129
## X*Z 0.0060 4.0347 1.0000 494.0000 0.0451
## BOTH 0.0062 2.1052 2.0000 494.0000 0.1229
## ----------
## Focal predictor: X (X)
## Moderator: Z1 (W)
## Moderator: Z2 (Z)
##
## Conditional effects of the focal predictor at values of the moderator(s):
## Z1 Z2 effect se t p LLCI
## -1.0132 -0.9666 0.3255 0.0819 3.9734 0.0001 0.1646
## -1.0132 0.0262 0.4251 0.0646 6.5817 0.0000 0.2982
## -1.0132 1.0190 0.5247 0.0809 6.4846 0.0000 0.3657
## -0.0023 -0.9666 0.3492 0.0671 5.2013 0.0000 0.2173
## -0.0023 0.0262 0.4487 0.0466 9.6364 0.0000 0.3572
## -0.0023 1.0190 0.5483 0.0689 7.9599 0.0000 0.4130
## 1.0086 -0.9666 0.3728 0.0817 4.5659 0.0000 0.2124
## 1.0086 0.0262 0.4724 0.0673 7.0177 0.0000 0.3401
## 1.0086 1.0190 0.5720 0.0855 6.6899 0.0000 0.4040
## ULCI
## 0.4865
## 0.5520
## 0.6836
## 0.4811
## 0.5402
## 0.6837
## 0.5333
## 0.6047
## 0.7399
##
## Data for visualizing the conditional effect of the focal predictor:
## X Z1 Z2 Y
## -0.9382 -1.0132 -0.9666 -0.7107
## 0.0346 -1.0132 -0.9666 -0.3940
## 1.0074 -1.0132 -0.9666 -0.0774
## -0.9382 -1.0132 0.0262 -0.7729
## 0.0346 -1.0132 0.0262 -0.3594
## 1.0074 -1.0132 0.0262 0.0542
## -0.9382 -1.0132 1.0190 -0.8351
## 0.0346 -1.0132 1.0190 -0.3247
## 1.0074 -1.0132 1.0190 0.1857
## -0.9382 -0.0023 -0.9666 -0.2976
## 0.0346 -0.0023 -0.9666 0.0421
## 1.0074 -0.0023 -0.9666 0.3817
## -0.9382 -0.0023 0.0262 -0.3598
## 0.0346 -0.0023 0.0262 0.0767
## 1.0074 -0.0023 0.0262 0.5132
## -0.9382 -0.0023 1.0190 -0.4220
## 0.0346 -0.0023 1.0190 0.1114
## 1.0074 -0.0023 1.0190 0.6448
## -0.9382 1.0086 -0.9666 0.1155
## 0.0346 1.0086 -0.9666 0.4782
## 1.0074 1.0086 -0.9666 0.8408
## -0.9382 1.0086 0.0262 0.0533
## 0.0346 1.0086 0.0262 0.5128
## 1.0074 1.0086 0.0262 0.9723
## -0.9382 1.0086 1.0190 -0.0089
## 0.0346 1.0086 1.0190 0.5475
## 1.0074 1.0086 1.0190 1.1038
##
## ***********************************************************************
## Bootstrapping progress:
##
|
| | 0%
|
| | 1%
|
|> | 1%
|
|> | 2%
|
|>> | 2%
|
|>> | 3%
|
|>> | 4%
|
|>>> | 4%
|
|>>> | 5%
|
|>>> | 6%
|
|>>>> | 6%
|
|>>>> | 7%
|
|>>>>> | 7%
|
|>>>>> | 8%
|
|>>>>> | 9%
|
|>>>>>> | 9%
|
|>>>>>> | 10%
|
|>>>>>>> | 10%
|
|>>>>>>> | 11%
|
|>>>>>>> | 12%
|
|>>>>>>>> | 12%
|
|>>>>>>>> | 13%
|
|>>>>>>>> | 14%
|
|>>>>>>>>> | 14%
|
|>>>>>>>>> | 15%
|
|>>>>>>>>>> | 15%
|
|>>>>>>>>>> | 16%
|
|>>>>>>>>>> | 17%
|
|>>>>>>>>>>> | 17%
|
|>>>>>>>>>>> | 18%
|
|>>>>>>>>>>> | 19%
|
|>>>>>>>>>>>> | 19%
|
|>>>>>>>>>>>> | 20%
|
|>>>>>>>>>>>>> | 20%
|
|>>>>>>>>>>>>> | 21%
|
|>>>>>>>>>>>>> | 22%
|
|>>>>>>>>>>>>>> | 22%
|
|>>>>>>>>>>>>>> | 23%
|
|>>>>>>>>>>>>>>> | 23%
|
|>>>>>>>>>>>>>>> | 24%
|
|>>>>>>>>>>>>>>> | 25%
|
|>>>>>>>>>>>>>>>> | 25%
|
|>>>>>>>>>>>>>>>> | 26%
|
|>>>>>>>>>>>>>>>> | 27%
|
|>>>>>>>>>>>>>>>>> | 27%
|
|>>>>>>>>>>>>>>>>> | 28%
|
|>>>>>>>>>>>>>>>>>> | 28%
|
|>>>>>>>>>>>>>>>>>> | 29%
|
|>>>>>>>>>>>>>>>>>> | 30%
|
|>>>>>>>>>>>>>>>>>>> | 30%
|
|>>>>>>>>>>>>>>>>>>> | 31%
|
|>>>>>>>>>>>>>>>>>>>> | 31%
|
|>>>>>>>>>>>>>>>>>>>> | 32%
|
|>>>>>>>>>>>>>>>>>>>> | 33%
|
|>>>>>>>>>>>>>>>>>>>>> | 33%
|
|>>>>>>>>>>>>>>>>>>>>> | 34%
|
|>>>>>>>>>>>>>>>>>>>>> | 35%
|
|>>>>>>>>>>>>>>>>>>>>>> | 35%
|
|>>>>>>>>>>>>>>>>>>>>>> | 36%
|
|>>>>>>>>>>>>>>>>>>>>>>> | 36%
|
|>>>>>>>>>>>>>>>>>>>>>>> | 37%
|
|>>>>>>>>>>>>>>>>>>>>>>> | 38%
|
|>>>>>>>>>>>>>>>>>>>>>>>> | 38%
|
|>>>>>>>>>>>>>>>>>>>>>>>> | 39%
|
|>>>>>>>>>>>>>>>>>>>>>>>> | 40%
|
|>>>>>>>>>>>>>>>>>>>>>>>>> | 40%
|
|>>>>>>>>>>>>>>>>>>>>>>>>> | 41%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>> | 41%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>> | 42%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>> | 43%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>> | 43%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>> | 44%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 44%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 45%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 46%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 46%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 47%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 48%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 48%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 49%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 49%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 50%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 51%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 51%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 52%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 52%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 53%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 54%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 54%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 55%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 56%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 56%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 57%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 57%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 58%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 59%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 59%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 60%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 60%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 61%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 62%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 62%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 63%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 64%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 64%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 65%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 65%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 66%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 67%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 67%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 68%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 69%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 69%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 70%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 70%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 71%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 72%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 72%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 73%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 73%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 74%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 75%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 75%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 76%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 77%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 77%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 78%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 78%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 79%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 80%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 80%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 81%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 81%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 82%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 83%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 83%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 84%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 85%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 85%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 86%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 86%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 87%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 88%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 88%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 89%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 90%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 90%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 91%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 91%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 92%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 93%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 93%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 94%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 94%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 95%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 96%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 96%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 97%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 98%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 98%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | 99%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>| 99%
|
|>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>| 100%
##
## ********** BOOTSTRAP RESULTS FOR REGRESSION MODEL PARAMETERS **********
##
## Outcome variable: Y
##
## Coeff BootMean BootSE BootLLCI BootULCI
## constant 0.0614 0.0607 0.0452 -0.0302 0.1507
## X 0.4462 0.4460 0.0482 0.3535 0.5435
## Z1 0.4306 0.4289 0.0463 0.3365 0.5172
## Int_1 0.0234 0.0237 0.0457 -0.0650 0.1136
## Z2 0.0314 0.0312 0.0425 -0.0549 0.1151
## Int_2 0.1003 0.1010 0.0430 0.0147 0.1856
##
## ******************** ANALYSIS NOTES AND ERRORS ************************
##
## Level of confidence for all confidence intervals in output: 95
##
## Number of bootstraps for percentile bootstrap confidence intervals: 5000
##
## W values in conditional tables are the mean and +/- SD from the mean.
##
## Z values in conditional tables are the mean and +/- SD from the mean.
#畫圖
m3 <- lm(Y ~ X+Z1+Z2+X:Z1+X:Z2, data = dta)
interactions::interact_plot(m3, pred = X, modx = Z1,mod2=Z2, interval = TRUE,
int.type = "confidence", int.width = .8)
## Warning: X and Z1 and Z2 are not included in an interaction with one another in the
## model.
dta$int1 <- dta$X*dta$Z1
dta$int2 <- dta$X*dta$Z2
k11 <- mean(dta$Z1)-sd(dta$Z1)
k12 <- mean(dta$Z1)
k13 <- mean(dta$Z1)+sd(dta$Z1)
round(c(k11,k12,k13),3)
## [1] -1.013 -0.002 1.009
k21 <- mean(dta$Z2)-sd(dta$Z2)
k22 <- mean(dta$Z2)
k23 <- mean(dta$Z2)+sd(dta$Z2)
round(c(k21,k22,k23),3)
## [1] -0.967 0.026 1.019
model1 <-'
Y ~ b1*X + b2*Z1 + b3*Z2 + b4*int1 + b5*int2
sslope11 := b1+b4*(-1.013)+b5*(-0.967)
sslope12 := b1+b4*(-1.013)+b5*(0.026)
sslope13 := b1+b4*(-1.013)+b5*(1.019)
sslope21 := b1+b4*(-0.002)+b5*(-0.967)
sslope22 := b1+b4*(-0.002)+b5*(0.026)
sslope23 := b1+b4*(-0.002)+b5*(1.019)
sslope31 := b1+b4*(1.009)+b5*(-0.967)
sslope32 := b1+b4*(1.009)+b5*(0.026)
sslope33 := b1+b4*(1.009)+b5*(1.019)
'
#徑路分析報表
fit <- lavaan::sem(model1, data=dta)
summary(fit)
## lavaan 0.6.15 ended normally after 1 iteration
##
## Estimator ML
## Optimization method NLMINB
## Number of model parameters 6
##
## Number of observations 500
##
## Model Test User Model:
##
## Test statistic 0.000
## Degrees of freedom 0
##
## Parameter Estimates:
##
## Standard errors Standard
## Information Expected
## Information saturated (h1) model Structured
##
## Regressions:
## Estimate Std.Err z-value P(>|z|)
## Y ~
## X (b1) 0.446 0.046 9.641 0.000
## Z1 (b2) 0.431 0.045 9.594 0.000
## Z2 (b3) 0.031 0.045 0.693 0.488
## int1 (b4) 0.023 0.046 0.509 0.610
## int2 (b5) 0.100 0.050 2.021 0.043
##
## Variances:
## Estimate Std.Err z-value P(>|z|)
## .Y 1.003 0.063 15.811 0.000
##
## Defined Parameters:
## Estimate Std.Err z-value P(>|z|)
## sslope11 0.325 0.081 3.997 0.000
## sslope12 0.425 0.064 6.622 0.000
## sslope13 0.525 0.080 6.525 0.000
## sslope21 0.349 0.067 5.231 0.000
## sslope22 0.449 0.046 9.694 0.000
## sslope23 0.548 0.068 8.008 0.000
## sslope31 0.373 0.081 4.592 0.000
## sslope32 0.472 0.067 7.059 0.000
## sslope33 0.572 0.085 6.730 0.000
#以拔靴法看徑路係數與簡單效果信賴區間
set.seed(1234)
fit <- lavaan::sem(model1, data=dta, test="bootstrap", bootstrap=501)
parameterEstimates(fit,ci=TRUE,boot.ci.type="bca.simple")
lhs | op | rhs | label | est | se | z | pvalue | ci.lower | ci.upper |
---|---|---|---|---|---|---|---|---|---|
Y | ~ | X | b1 | 0.446 | 0.0463 | 9.64 | 0 | 0.355 | 0.537 |
Y | ~ | Z1 | b2 | 0.431 | 0.0449 | 9.59 | 0 | 0.343 | 0.519 |
Y | ~ | Z2 | b3 | 0.0314 | 0.0453 | 0.693 | 0.488 | -0.0574 | 0.12 |
Y | ~ | int1 | b4 | 0.0234 | 0.0459 | 0.509 | 0.61 | -0.0666 | 0.113 |
Y | ~ | int2 | b5 | 0.1 | 0.0496 | 2.02 | 0.0433 | 0.00302 | 0.198 |
Y | ~~ | Y | 1 | 0.0634 | 15.8 | 0 | 0.879 | 1.13 | |
X | ~~ | X | 0.944 | 0 | 0.944 | 0.944 | |||
X | ~~ | Z1 | -0.051 | 0 | -0.051 | -0.051 | |||
X | ~~ | Z2 | 0.0565 | 0 | 0.0565 | 0.0565 | |||
X | ~~ | int1 | -0.0485 | 0 | -0.0485 | -0.0485 | |||
X | ~~ | int2 | 0.00633 | 0 | 0.00633 | 0.00633 | |||
Z1 | ~~ | Z1 | 1.02 | 0 | 1.02 | 1.02 | |||
Z1 | ~~ | Z2 | 0.051 | 0 | 0.051 | 0.051 | |||
Z1 | ~~ | int1 | 0.134 | 0 | 0.134 | 0.134 | |||
Z1 | ~~ | int2 | -0.0204 | 0 | -0.0204 | -0.0204 | |||
Z2 | ~~ | Z2 | 0.984 | 0 | 0.984 | 0.984 | |||
Z2 | ~~ | int1 | -0.0192 | 0 | -0.0192 | -0.0192 | |||
Z2 | ~~ | int2 | 0.0288 | 0 | 0.0288 | 0.0288 | |||
int1 | ~~ | int1 | 0.972 | 0 | 0.972 | 0.972 | |||
int1 | ~~ | int2 | -0.042 | 0 | -0.042 | -0.042 | |||
int2 | ~~ | int2 | 0.817 | 0 | 0.817 | 0.817 | |||
sslope11 | := | b1+b4*(-1.013)+b5*(-0.967) | sslope11 | 0.325 | 0.0814 | 4 | 6.42e-05 | 0.166 | 0.485 |
sslope12 | := | b1+b4*(-1.013)+b5*(0.026) | sslope12 | 0.425 | 0.0642 | 6.62 | 3.54e-11 | 0.299 | 0.551 |
sslope13 | := | b1+b4*(-1.013)+b5*(1.019) | sslope13 | 0.525 | 0.0804 | 6.52 | 6.82e-11 | 0.367 | 0.682 |
sslope21 | := | b1+b4*(-0.002)+b5*(-0.967) | sslope21 | 0.349 | 0.0667 | 5.23 | 1.68e-07 | 0.218 | 0.48 |
sslope22 | := | b1+b4*(-0.002)+b5*(0.026) | sslope22 | 0.449 | 0.0463 | 9.69 | 0 | 0.358 | 0.539 |
sslope23 | := | b1+b4*(-0.002)+b5*(1.019) | sslope23 | 0.548 | 0.0685 | 8.01 | 1.11e-15 | 0.414 | 0.683 |
sslope31 | := | b1+b4*(1.009)+b5*(-0.967) | sslope31 | 0.373 | 0.0812 | 4.59 | 4.39e-06 | 0.214 | 0.532 |
sslope32 | := | b1+b4*(1.009)+b5*(0.026) | sslope32 | 0.472 | 0.0669 | 7.06 | 1.68e-12 | 0.341 | 0.604 |
sslope33 | := | b1+b4*(1.009)+b5*(1.019) | sslope33 | 0.572 | 0.085 | 6.73 | 1.7e-11 | 0.405 | 0.739 |
#畫圖看模型與估計值
lavaanPlot::lavaanPlot(model = fit,
edge_options = list(color = "grey"),
coefs = TRUE,
stand = TRUE)
#畫圖
m3 <- lm(Y ~ X+Z1+Z2+X:Z1+X:Z2, data = dta)
interactions::interact_plot(m3, pred = X, modx = Z1,mod2=Z2, interval = TRUE,
int.type = "confidence", int.width = .8)
## Warning: X and Z1 and Z2 are not included in an interaction with one another in the
## model.