第 68 章 贝叶斯层级模型

library(tidyverse)
library(tidybayes)
library(rstan)
rstan_options(auto_write = TRUE)
options(mc.cores = parallel::detectCores())

68.1 明尼苏达州房屋中氡的存在

radon <- readr::read_rds(here::here('demo_data', "radon.rds")) 
head(radon)
##   floor county log_radon log_uranium
## 1     1 AITKIN   0.83291     -0.6890
## 2     0 AITKIN   0.83291     -0.6890
## 3     0 AITKIN   1.09861     -0.6890
## 4     0 AITKIN   0.09531     -0.6890
## 5     0  ANOKA   1.16315     -0.8473
## 6     0  ANOKA   0.95551     -0.8473

数据来源美国明尼苏达州85个县中房屋氡含量测量

  • log_radon 房屋氡含量 (log scale)
  • log_uranium 这个县放射性化学元素铀的等级 (log scale)
  • floor 房屋楼层 (0 = basement, 1 = first floor)
  • county 所在县 (factor)

68.2 任务

估计房屋中的氡含量。

68.2.1 可视化探索

df_n_county <- radon %>% 
  group_by(county) %>%
  summarise(
    n = n()
  ) 

df_n_county 
## # A tibble: 85 × 2
##   county       n
##   <fct>    <int>
## 1 AITKIN       4
## 2 ANOKA       52
## 3 BECKER       3
## 4 BELTRAMI     7
## 5 BENTON       4
## 6 BIGSTONE     3
## # … with 79 more rows

统计每个县,样本量、氡含量均值、标准差、铀等级的均值、标准误

radon_county <- radon %>%
  group_by(county) %>%
  summarise(
    log_radon_mean = mean(log_radon),
    log_radon_sd   = sd(log_radon),
    log_uranium    = mean(log_uranium),
    n              = length(county)
  ) %>%
  mutate(log_radon_se = log_radon_sd / sqrt(n))

radon_county
## # A tibble: 85 × 6
##   county  log_radon_mean log_radon_sd log_uranium     n
##   <fct>            <dbl>        <dbl>       <dbl> <int>
## 1 AITKIN           0.715        0.432      -0.689     4
## 2 ANOKA            0.891        0.718      -0.847    52
## 3 BECKER           1.09         0.717      -0.113     3
## 4 BELTRA…          1.19         0.894      -0.593     7
## 5 BENTON           1.28         0.415      -0.143     4
## 6 BIGSTO…          1.54         0.504       0.387     3
## # … with 79 more rows, and 1 more variable:
## #   log_radon_se <dbl>
ggplot() +
  geom_boxplot(data = radon,
               mapping = aes(y = log_radon,
                             x = fct_reorder(county, log_radon, mean)),
               colour = "gray") +
  geom_point(data = radon,
             mapping = aes(y = log_radon,
                           x = fct_reorder(county, log_radon, mean)),
             colour = "gray") +
  geom_point(data = radon_county,
             mapping = aes(x = fct_reorder(county, log_radon_mean),
                           y = log_radon_mean),
             colour = "red") +
  coord_flip() +
  labs(y = "log(radon)", x = "")

68.2.2 pooling model

这是最简单的模型,该模型假定所有的房屋的氡含量来自同一个分布, 估计整体的均值和方差

\[ \begin{aligned}[t] y_i &\sim \operatorname{normal}(\mu, \sigma) \\ \mu &\sim \operatorname{normal}(0, 10) \\ \sigma &\sim \operatorname{exp}(1) \end{aligned} \] 这里我们指定 \(\mu\)\(\sigma\) 较弱的先验信息.

stan_program <- "
data {
  int N;
  vector[N] y;
}
parameters {
  real mu;
  real<lower=0> sigma;
}
model {
  mu ~ normal(0, 10);
  sigma ~ exponential(1);
  
  y ~ normal(mu, sigma);
}
"

stan_data <- list(
  N = nrow(radon),
  y = radon$log_radon
)

fit_pooling <- stan(model_code = stan_program, data = stan_data)

模型估计了均值和方差两个参数。

summary(fit_pooling)$summary
##            mean   se_mean      sd      2.5%       25%
## mu       1.2647 0.0004602 0.02714    1.2119    1.2464
## sigma    0.8199 0.0003225 0.01924    0.7835    0.8066
## lp__  -277.9363 0.0229975 0.98640 -280.5274 -278.3265
##             50%       75%     97.5% n_eff   Rhat
## mu       1.2645    1.2828    1.3187  3477 0.9998
## sigma    0.8199    0.8326    0.8584  3560 0.9998
## lp__  -277.6325 -277.2187 -276.9533  1840 1.0023

68.2.3 no-pooling model

每个县都有独立的均值和方差,又叫 individual model

\[ \begin{aligned}[t] y_i &\sim \operatorname{normal}(\mu_{j[i]}, \sigma) \\ \mu_j &\sim \operatorname{normal}(0, 10) \\ \sigma &\sim \operatorname{exp}(1) \end{aligned} \] 其中, \(j[i]\) 表示观测\(i\)对应的所在县。

stan_program <- "
data {
  int<lower=1> N;                            
  int<lower=2> J;                     
  int<lower=1, upper=J> county[N]; 
  vector[N] y; 
}
parameters {
  vector[J] mu;
  real<lower=0> sigma;
}
model {
  mu ~ normal(0, 10);
  sigma ~ exponential(1);
  
  for(i in 1:N) {
    y[i] ~ normal(mu[county[i]], sigma);
  }
}
"

stan_data <- list(
  N      = nrow(radon),
  J      = length(unique(radon$county)),
  county = as.numeric(radon$county),
  y      = radon$log_radon
)

fit_no_pooling <- stan(model_code = stan_program, data = stan_data)
summary(fit_no_pooling)$summary
##             mean   se_mean      sd       2.5%
## mu[1]     0.7168 0.0054455 0.38700   -0.04207
## mu[2]     0.8924 0.0016644 0.10692    0.68471
## mu[3]     1.0887 0.0059487 0.44336    0.22698
## mu[4]     1.1872 0.0041828 0.29079    0.60425
## mu[5]     1.2750 0.0052604 0.38080    0.54680
## mu[6]     1.5384 0.0062523 0.44297    0.66424
## mu[7]     1.9262 0.0029363 0.20725    1.52583
## mu[8]     1.6526 0.0055094 0.37942    0.92396
## mu[9]     0.9696 0.0031372 0.23842    0.51223
## mu[10]    1.2233 0.0040732 0.30774    0.61532
## mu[11]    1.4265 0.0046930 0.34480    0.73724
## mu[12]    1.7504 0.0055798 0.39208    1.00669
## mu[13]    1.0907 0.0044173 0.32000    0.44768
## mu[14]    1.8114 0.0029422 0.20181    1.42831
## mu[15]    1.0231 0.0051120 0.37980    0.26985
## mu[16]    0.7073 0.0075847 0.54833   -0.35882
## mu[17]    0.7547 0.0056802 0.38528   -0.01328
## mu[18]    0.9903 0.0031737 0.22140    0.56361
## mu[19]    1.3277 0.0013242 0.09860    1.12992
## mu[20]    1.8242 0.0063222 0.43835    0.98132
## mu[21]    1.6719 0.0036217 0.25959    1.15305
## mu[22]    0.6577 0.0046308 0.32498    0.01797
## mu[23]    1.0576 0.0074828 0.53540    0.02237
## mu[24]    1.9587 0.0038775 0.25331    1.45318
## mu[25]    1.8615 0.0032835 0.20189    1.47386
## mu[26]    1.3207 0.0010995 0.07561    1.16865
## mu[27]    1.5585 0.0042298 0.31554    0.93861
## mu[28]    0.8752 0.0047323 0.33055    0.21814
## mu[29]    1.0786 0.0070912 0.44182    0.22335
## mu[30]    0.9713 0.0033955 0.23010    0.51774
## mu[31]    2.0299 0.0048058 0.33486    1.36648
## mu[32]    1.2631 0.0051731 0.40397    0.46920
## mu[33]    2.0740 0.0054342 0.38731    1.30790
## mu[34]    1.1514 0.0064738 0.44477    0.26814
## mu[35]    0.4676 0.0044352 0.30061   -0.11437
## mu[36]    2.5792 0.0076299 0.54422    1.50331
## mu[37]    0.4079 0.0037247 0.25648   -0.08961
## mu[38]    1.5237 0.0053307 0.38005    0.78842
## mu[39]    1.6316 0.0046206 0.33349    0.97667
## mu[40]    2.1415 0.0054971 0.38269    1.40440
## mu[41]    1.8920 0.0041486 0.27014    1.36214
## mu[42]    1.3710 0.0104861 0.75118   -0.12403
## mu[43]    1.2550 0.0033698 0.25070    0.78064
## mu[44]    1.0024 0.0041242 0.29733    0.41841
## mu[45]    1.1125 0.0030224 0.21851    0.68144
## mu[46]    1.2454 0.0047640 0.33847    0.57995
## mu[47]    0.6190 0.0079385 0.55982   -0.47120
## mu[48]    1.1006 0.0036858 0.25701    0.60575
## mu[49]    1.6179 0.0028009 0.21082    1.19895
## mu[50]    2.4973 0.0107904 0.77863    0.99268
## mu[51]    2.1727 0.0052714 0.37310    1.43181
## mu[52]    1.9437 0.0070752 0.43777    1.09451
## mu[53]    1.0533 0.0057782 0.44293    0.18951
## mu[54]    1.2504 0.0023730 0.16328    0.93252
## mu[55]    1.3860 0.0036693 0.26610    0.86516
## mu[56]    0.7336 0.0060231 0.44198   -0.12766
## mu[57]    0.6951 0.0040099 0.30662    0.09622
## mu[58]    1.7051 0.0053314 0.37841    0.95622
## mu[59]    1.3937 0.0056523 0.38788    0.64246
## mu[60]    1.3056 0.0077868 0.53335    0.24385
## mu[61]    1.1310 0.0019814 0.13628    0.86741
## mu[62]    1.8572 0.0049217 0.34329    1.17904
## mu[63]    1.4533 0.0063077 0.43668    0.60984
## mu[64]    1.8041 0.0031468 0.23562    1.35346
## mu[65]    1.3352 0.0076752 0.54681    0.25582
## mu[66]    1.2869 0.0029355 0.21270    0.87182
## mu[67]    1.6101 0.0029348 0.21659    1.17477
## mu[68]    1.1228 0.0039487 0.27479    0.58987
## mu[69]    1.2735 0.0048785 0.37429    0.55545
## mu[70]    0.8293 0.0009309 0.07053    0.69110
## mu[71]    1.4035 0.0020400 0.15344    1.10039
## mu[72]    1.6071 0.0033365 0.24074    1.12823
## mu[73]    1.8061 0.0072602 0.53885    0.74867
## mu[74]    1.0189 0.0053831 0.38988    0.25734
## mu[75]    1.4995 0.0060762 0.44209    0.62176
## mu[76]    1.8489 0.0054889 0.37554    1.12761
## mu[77]    1.7369 0.0044189 0.29741    1.14510
## mu[78]    1.0438 0.0052664 0.33673    0.40323
## mu[79]    0.5245 0.0055840 0.38038   -0.22021
## mu[80]    1.2891 0.0015934 0.11150    1.07151
## mu[81]    2.2293 0.0060483 0.44342    1.36546
## mu[82]    2.2078 0.0100962 0.75148    0.73107
## mu[83]    1.4961 0.0029853 0.21362    1.07839
## mu[84]    1.6095 0.0032952 0.21905    1.19085
## mu[85]    1.2214 0.0083301 0.56006    0.11066
## sigma     0.7676 0.0003152 0.01868    0.73068
## lp__   -217.7147 0.1628433 6.75694 -231.84392
##              25%       50%       75%     97.5% n_eff
## mu[1]     0.4534    0.7183    0.9863    1.4528  5050
## mu[2]     0.8207    0.8933    0.9624    1.1043  4127
## mu[3]     0.7934    1.0892    1.3869    1.9673  5555
## mu[4]     0.9948    1.1878    1.3759    1.7567  4833
## mu[5]     1.0188    1.2763    1.5280    2.0167  5240
## mu[6]     1.2409    1.5377    1.8388    2.4106  5020
## mu[7]     1.7841    1.9264    2.0622    2.3402  4982
## mu[8]     1.3825    1.6587    1.9108    2.3864  4743
## mu[9]     0.8014    0.9699    1.1317    1.4329  5776
## mu[10]    1.0221    1.2183    1.4242    1.8546  5708
## mu[11]    1.1908    1.4313    1.6606    2.0847  5398
## mu[12]    1.4897    1.7490    2.0077    2.5294  4938
## mu[13]    0.8731    1.0879    1.3146    1.7195  5248
## mu[14]    1.6732    1.8080    1.9492    2.2043  4705
## mu[15]    0.7681    1.0167    1.2768    1.7723  5520
## mu[16]    0.3241    0.7182    1.0887    1.7673  5227
## mu[17]    0.4975    0.7548    1.0145    1.5285  4601
## mu[18]    0.8385    0.9912    1.1441    1.4186  4867
## mu[19]    1.2642    1.3279    1.3937    1.5209  5545
## mu[20]    1.5356    1.8245    2.1185    2.6948  4807
## mu[21]    1.5004    1.6728    1.8523    2.1720  5137
## mu[22]    0.4428    0.6612    0.8705    1.3038  4925
## mu[23]    0.6955    1.0475    1.4189    2.0958  5119
## mu[24]    1.7873    1.9617    2.1271    2.4540  4268
## mu[25]    1.7269    1.8585    1.9951    2.2641  3781
## mu[26]    1.2693    1.3213    1.3724    1.4682  4729
## mu[27]    1.3416    1.5588    1.7729    2.1860  5565
## mu[28]    0.6483    0.8840    1.0979    1.5177  4879
## mu[29]    0.7810    1.0849    1.3787    1.9433  3882
## mu[30]    0.8141    0.9697    1.1267    1.4235  4592
## mu[31]    1.8117    2.0322    2.2546    2.6796  4855
## mu[32]    0.9911    1.2684    1.5394    2.0432  6098
## mu[33]    1.8085    2.0746    2.3418    2.8350  5080
## mu[34]    0.8488    1.1575    1.4516    2.0231  4720
## mu[35]    0.2654    0.4741    0.6663    1.0564  4594
## mu[36]    2.2160    2.5774    2.9484    3.6607  5088
## mu[37]    0.2369    0.4095    0.5820    0.9142  4742
## mu[38]    1.2715    1.5189    1.7822    2.2758  5083
## mu[39]    1.4024    1.6317    1.8529    2.2807  5209
## mu[40]    1.8767    2.1416    2.4008    2.8744  4846
## mu[41]    1.7112    1.8925    2.0737    2.4299  4240
## mu[42]    0.8637    1.3701    1.8820    2.8334  5132
## mu[43]    1.0842    1.2528    1.4266    1.7467  5535
## mu[44]    0.8049    0.9975    1.1992    1.5882  5198
## mu[45]    0.9658    1.1147    1.2595    1.5342  5227
## mu[46]    1.0228    1.2408    1.4680    1.9102  5048
## mu[47]    0.2371    0.6269    0.9959    1.7064  4973
## mu[48]    0.9269    1.1002    1.2696    1.6183  4862
## mu[49]    1.4773    1.6166    1.7602    2.0178  5666
## mu[50]    1.9768    2.4955    3.0286    4.0266  5207
## mu[51]    1.9214    2.1726    2.4239    2.8955  5010
## mu[52]    1.6558    1.9404    2.2372    2.8093  3828
## mu[53]    0.7569    1.0573    1.3572    1.9140  5876
## mu[54]    1.1400    1.2475    1.3611    1.5705  4735
## mu[55]    1.2004    1.3845    1.5666    1.8942  5259
## mu[56]    0.4254    0.7288    1.0295    1.5876  5385
## mu[57]    0.4881    0.7000    0.9005    1.2820  5847
## mu[58]    1.4593    1.7029    1.9626    2.4212  5038
## mu[59]    1.1306    1.3943    1.6531    2.1566  4709
## mu[60]    0.9480    1.2969    1.6650    2.3711  4692
## mu[61]    1.0363    1.1295    1.2230    1.3994  4731
## mu[62]    1.6257    1.8565    2.0876    2.5453  4865
## mu[63]    1.1485    1.4520    1.7527    2.3219  4793
## mu[64]    1.6451    1.8029    1.9626    2.2499  5607
## mu[65]    0.9626    1.3329    1.7031    2.4018  5076
## mu[66]    1.1407    1.2870    1.4268    1.7142  5250
## mu[67]    1.4648    1.6097    1.7517    2.0356  5446
## mu[68]    0.9386    1.1171    1.3071    1.6616  4842
## mu[69]    1.0211    1.2713    1.5224    2.0188  5886
## mu[70]    0.7830    0.8289    0.8758    0.9690  5741
## mu[71]    1.3010    1.4043    1.5071    1.7080  5657
## mu[72]    1.4478    1.6086    1.7691    2.0711  5206
## mu[73]    1.4323    1.8033    2.1761    2.8423  5508
## mu[74]    0.7577    1.0229    1.2780    1.7768  5246
## mu[75]    1.1980    1.5010    1.7919    2.3618  5294
## mu[76]    1.5952    1.8515    2.0973    2.5686  4681
## mu[77]    1.5337    1.7380    1.9356    2.3249  4530
## mu[78]    0.8235    1.0398    1.2660    1.7074  4088
## mu[79]    0.2744    0.5171    0.7794    1.2786  4640
## mu[80]    1.2125    1.2900    1.3662    1.5081  4897
## mu[81]    1.9273    2.2222    2.5242    3.1373  5375
## mu[82]    1.6973    2.2171    2.7161    3.6749  5540
## mu[83]    1.3524    1.4984    1.6361    1.9162  5121
## mu[84]    1.4599    1.6086    1.7592    2.0349  4419
## mu[85]    0.8444    1.2185    1.5921    2.3305  4520
## sigma     0.7549    0.7676    0.7801    0.8045  3512
## lp__   -221.9378 -217.3673 -213.1234 -205.7069  1722
##          Rhat
## mu[1]  0.9991
## mu[2]  0.9995
## mu[3]  1.0002
## mu[4]  0.9998
## mu[5]  1.0006
## mu[6]  0.9998
## mu[7]  0.9994
## mu[8]  0.9998
## mu[9]  0.9993
## mu[10] 0.9993
## mu[11] 0.9993
## mu[12] 1.0003
## mu[13] 0.9997
## mu[14] 0.9997
## mu[15] 1.0001
## mu[16] 1.0001
## mu[17] 0.9996
## mu[18] 0.9997
## mu[19] 1.0000
## mu[20] 0.9993
## mu[21] 1.0001
## mu[22] 0.9999
## mu[23] 1.0003
## mu[24] 1.0006
## mu[25] 1.0019
## mu[26] 0.9994
## mu[27] 0.9992
## mu[28] 0.9999
## mu[29] 1.0008
## mu[30] 1.0004
## mu[31] 0.9997
## mu[32] 1.0003
## mu[33] 0.9998
## mu[34] 0.9995
## mu[35] 0.9997
## mu[36] 0.9992
## mu[37] 0.9998
## mu[38] 1.0003
## mu[39] 0.9997
## mu[40] 0.9994
## mu[41] 1.0001
## mu[42] 0.9998
## mu[43] 0.9994
## mu[44] 1.0002
## mu[45] 0.9993
## mu[46] 0.9996
## mu[47] 0.9994
## mu[48] 0.9994
## mu[49] 0.9994
## mu[50] 0.9996
## mu[51] 0.9997
## mu[52] 1.0004
## mu[53] 0.9995
## mu[54] 0.9999
## mu[55] 1.0001
## mu[56] 1.0004
## mu[57] 0.9997
## mu[58] 0.9991
## mu[59] 0.9994
## mu[60] 0.9993
## mu[61] 0.9999
## mu[62] 1.0000
## mu[63] 0.9994
## mu[64] 1.0000
## mu[65] 0.9993
## mu[66] 1.0004
## mu[67] 0.9998
## mu[68] 0.9998
## mu[69] 0.9993
## mu[70] 0.9999
## mu[71] 1.0002
## mu[72] 0.9997
## mu[73] 1.0000
## mu[74] 0.9995
## mu[75] 0.9995
## mu[76] 0.9993
## mu[77] 1.0013
## mu[78] 1.0006
## mu[79] 0.9998
## mu[80] 0.9996
## mu[81] 0.9998
## mu[82] 0.9996
## mu[83] 0.9997
## mu[84] 0.9998
## mu[85] 1.0001
## sigma  1.0004
## lp__   1.0018

有多少县,就有多少个模型,每个模型有一个 \(\mu\),参数\(\sigma\)是共同的。需要注意的是,每组之间彼此独立的,没有共享信息。

68.2.4 partially pooled model

和 “no-pooling model” 模型一样,每个县都有自己的均值,但是,这些县彼此会分享信息,一个县获取的信息可以帮助我们估计其它县的均值。

  • 模型同时考虑各个类别数据中的信息以及整个群体中的信息
  • 怎么叫共享信息?参数来自同一个分布
  • 怎么做到的呢?通过先验

\[ \begin{aligned}[t] y_i &\sim \operatorname{normal}(\mu_{j[i]}, \sigma) \\ \mu_j &\sim \operatorname{normal}(\gamma, \tau) \\ \gamma &\sim \operatorname{normal}(0, 5) \\ \tau &\sim \operatorname{exp}(1) \end{aligned} \]

每个县的氡含量均值\(\mu_j\)都服从均值为 \(\gamma\)、标准差为 \(\tau\) 的正态分布。但先验分布中的参数 \(\gamma\)\(\tau\) 都各自有自己的先验分布,即参数的参数, 通常称之为超参数,这就是多层模型中”层”的来历,\(\mu_j\) 是第一层参数,\(\gamma\) 是第二层参数。

  • \(\gamma\)\(\tau\) 的先验称为 超先验分布
  • 超参数是多层模型的标志。
stan_program <- "
data {
  int<lower=1> N;                            
  int<lower=2> J;                     
  int<lower=1, upper=J> county[N]; 
  vector[N] y; 
}
parameters {
  vector[J] mu;
  real mu_a;
  real<lower=0> sigma_y;
  real<lower=0> sigma_a;
}
model {
  mu_a ~ normal(0, 5);
  sigma_a ~ exponential(1);
  sigma_y ~ exponential(1);
  
  mu ~ normal(mu_a, sigma_a);
  
  for(i in 1:N) {
    y[i] ~ normal(mu[county[i]], sigma_y);
  }
}
"

stan_data <- list(
  N      = nrow(radon),
  J      = length(unique(radon$county)),
  county = as.numeric(radon$county),
  y      = radon$log_radon
)

fit_partial_pooling <- stan(model_code = stan_program, data = stan_data)
summary(fit_partial_pooling)$summary
##              mean   se_mean      sd      2.5%
## mu[1]      1.1093 0.0031713 0.24174    0.6298
## mu[2]      0.9436 0.0013179 0.10377    0.7386
## mu[3]      1.2685 0.0030994 0.24559    0.7656
## mu[4]      1.2749 0.0025573 0.20499    0.8736
## mu[5]      1.3183 0.0029023 0.23189    0.8654
## mu[6]      1.4119 0.0032941 0.24907    0.9294
## mu[7]      1.7418 0.0025405 0.17701    1.4071
## mu[8]      1.4654 0.0032078 0.24930    0.9830
## mu[9]      1.1275 0.0024154 0.18861    0.7499
## mu[10]     1.2861 0.0025879 0.21241    0.8737
## mu[11]     1.3829 0.0027334 0.23269    0.9193
## mu[12]     1.5018 0.0032386 0.23839    1.0607
## mu[13]     1.2206 0.0024324 0.21489    0.7894
## mu[14]     1.6634 0.0023600 0.17037    1.3356
## mu[15]     1.2221 0.0029126 0.24776    0.7125
## mu[16]     1.2011 0.0033983 0.27122    0.6674
## mu[17]     1.1237 0.0033283 0.24321    0.6416
## mu[18]     1.1140 0.0023186 0.17506    0.7801
## mu[19]     1.3298 0.0012370 0.09389    1.1418
## mu[20]     1.4990 0.0032078 0.25174    1.0161
## mu[21]     1.5336 0.0024834 0.19964    1.1409
## mu[22]     1.0185 0.0032692 0.22511    0.5718
## mu[23]     1.2839 0.0032628 0.26744    0.7493
## mu[24]     1.7048 0.0028130 0.19938    1.3185
## mu[25]     1.6998 0.0024626 0.17495    1.3662
## mu[26]     1.3214 0.0008230 0.07157    1.1813
## mu[27]     1.4508 0.0027051 0.22446    1.0104
## mu[28]     1.1381 0.0031381 0.23044    0.6716
## mu[29]     1.2712 0.0026875 0.24964    0.7713
## mu[30]     1.1075 0.0024606 0.18702    0.7458
## mu[31]     1.6511 0.0035430 0.23824    1.2011
## mu[32]     1.3205 0.0029070 0.23359    0.8623
## mu[33]     1.6256 0.0034711 0.24699    1.1547
## mu[34]     1.2878 0.0029616 0.25073    0.8060
## mu[35]     0.8958 0.0032852 0.21978    0.4638
## mu[36]     1.6521 0.0042543 0.28407    1.1129
## mu[37]     0.8124 0.0031402 0.20641    0.3946
## mu[38]     1.4265 0.0030274 0.24426    0.9512
## mu[39]     1.4670 0.0028434 0.22458    1.0271
## mu[40]     1.6545 0.0034581 0.24758    1.1852
## mu[41]     1.6501 0.0028358 0.20921    1.2534
## mu[42]     1.3526 0.0035126 0.28886    0.7799
## mu[43]     1.2910 0.0024358 0.19635    0.9021
## mu[44]     1.1741 0.0027796 0.21165    0.7317
## mu[45]     1.1854 0.0021355 0.17729    0.8299
## mu[46]     1.3022 0.0026709 0.22309    0.8620
## mu[47]     1.1734 0.0036033 0.26842    0.6415
## mu[48]     1.2068 0.0023174 0.19715    0.8281
## mu[49]     1.5344 0.0022581 0.17180    1.2004
## mu[50]     1.5095 0.0037151 0.28975    0.9568
## mu[51]     1.6711 0.0035482 0.25159    1.1893
## mu[52]     1.5413 0.0032412 0.25452    1.0486
## mu[53]     1.2513 0.0030382 0.25365    0.7574
## mu[54]     1.2745 0.0016911 0.14196    0.9944
## mu[55]     1.3656 0.0024245 0.19883    0.9813
## mu[56]     1.1494 0.0030684 0.24804    0.6710
## mu[57]     1.0364 0.0031529 0.23054    0.5736
## mu[58]     1.4850 0.0031261 0.23831    1.0321
## mu[59]     1.3637 0.0029937 0.24141    0.8901
## mu[60]     1.3345 0.0034174 0.26731    0.7934
## mu[61]     1.1699 0.0016336 0.12746    0.9193
## mu[62]     1.5741 0.0029216 0.23019    1.1502
## mu[63]     1.3855 0.0030759 0.25166    0.9031
## mu[64]     1.6296 0.0024495 0.18589    1.2721
## mu[65]     1.3435 0.0033356 0.26382    0.8233
## mu[66]     1.3075 0.0020660 0.16772    0.9789
## mu[67]     1.5226 0.0022569 0.17754    1.1721
## mu[68]     1.2282 0.0025618 0.19663    0.8361
## mu[69]     1.3230 0.0027247 0.23029    0.8695
## mu[70]     0.8568 0.0009390 0.06926    0.7198
## mu[71]     1.3990 0.0015753 0.13438    1.1392
## mu[72]     1.5058 0.0021901 0.18940    1.1386
## mu[73]     1.4634 0.0032988 0.26623    0.9593
## mu[74]     1.2286 0.0028892 0.24166    0.7515
## mu[75]     1.4015 0.0034256 0.25448    0.8972
## mu[76]     1.5445 0.0033409 0.24350    1.0848
## mu[77]     1.5556 0.0028889 0.20881    1.1462
## mu[78]     1.2161 0.0027851 0.22846    0.7575
## mu[79]     1.0371 0.0035252 0.24053    0.5492
## mu[80]     1.2957 0.0013408 0.10493    1.0857
## mu[81]     1.6332 0.0035343 0.26446    1.1185
## mu[82]     1.4798 0.0034645 0.28414    0.9251
## mu[83]     1.4453 0.0021908 0.17289    1.1077
## mu[84]     1.5241 0.0021797 0.16956    1.1971
## mu[85]     1.3203 0.0033399 0.26884    0.7784
## mu_a       1.3502 0.0008127 0.04805    1.2558
## sigma_y    0.7671 0.0002655 0.01871    0.7323
## sigma_a    0.3032 0.0014546 0.04739    0.2193
## lp__    -157.4567 0.3488873 9.80038 -177.1914
##               25%       50%       75%     97.5%  n_eff
## mu[1]      0.9464    1.1157    1.2791    1.5625 5810.6
## mu[2]      0.8735    0.9444    1.0130    1.1454 6199.5
## mu[3]      1.1099    1.2726    1.4330    1.7445 6278.9
## mu[4]      1.1348    1.2743    1.4158    1.6754 6425.4
## mu[5]      1.1642    1.3193    1.4739    1.7723 6383.8
## mu[6]      1.2419    1.4094    1.5796    1.9102 5717.1
## mu[7]      1.6195    1.7386    1.8586    2.1027 4854.5
## mu[8]      1.2965    1.4620    1.6274    1.9755 6040.0
## mu[9]      1.0023    1.1284    1.2535    1.4923 6097.5
## mu[10]     1.1471    1.2849    1.4302    1.7077 6736.6
## mu[11]     1.2248    1.3822    1.5391    1.8423 7246.9
## mu[12]     1.3391    1.4978    1.6593    1.9720 5418.3
## mu[13]     1.0782    1.2207    1.3643    1.6480 7804.7
## mu[14]     1.5489    1.6604    1.7742    2.0010 5211.4
## mu[15]     1.0657    1.2205    1.3842    1.7038 7236.1
## mu[16]     1.0176    1.2006    1.3867    1.7265 6369.6
## mu[17]     0.9629    1.1305    1.2887    1.6069 5339.6
## mu[18]     0.9906    1.1134    1.2358    1.4483 5700.4
## mu[19]     1.2678    1.3300    1.3934    1.5088 5761.8
## mu[20]     1.3267    1.5010    1.6713    1.9918 6158.6
## mu[21]     1.4022    1.5344    1.6658    1.9141 6462.5
## mu[22]     0.8733    1.0209    1.1701    1.4534 4741.5
## mu[23]     1.1116    1.2849    1.4584    1.8053 6718.3
## mu[24]     1.5685    1.7036    1.8370    2.1050 5023.7
## mu[25]     1.5784    1.6985    1.8197    2.0465 5047.3
## mu[26]     1.2721    1.3216    1.3690    1.4642 7562.5
## mu[27]     1.2982    1.4469    1.6002    1.9016 6885.1
## mu[28]     0.9900    1.1404    1.2954    1.5829 5392.3
## mu[29]     1.1058    1.2744    1.4350    1.7681 8628.5
## mu[30]     0.9848    1.1075    1.2361    1.4694 5777.2
## mu[31]     1.4877    1.6453    1.8082    2.1237 4521.5
## mu[32]     1.1606    1.3237    1.4828    1.7655 6456.8
## mu[33]     1.4588    1.6174    1.7868    2.1247 5063.2
## mu[34]     1.1180    1.2871    1.4612    1.7825 7167.4
## mu[35]     0.7503    0.8976    1.0496    1.3141 4475.5
## mu[36]     1.4581    1.6471    1.8363    2.2333 4458.6
## mu[37]     0.6762    0.8207    0.9520    1.2057 4320.6
## mu[38]     1.2632    1.4289    1.5847    1.9097 6510.0
## mu[39]     1.3184    1.4684    1.6195    1.9107 6238.0
## mu[40]     1.4882    1.6466    1.8168    2.1537 5126.0
## mu[41]     1.5034    1.6474    1.7917    2.0679 5442.6
## mu[42]     1.1524    1.3503    1.5487    1.9242 6762.8
## mu[43]     1.1569    1.2954    1.4255    1.6678 6497.8
## mu[44]     1.0408    1.1753    1.3119    1.5924 5797.9
## mu[45]     1.0665    1.1866    1.3106    1.5207 6892.5
## mu[46]     1.1500    1.3035    1.4495    1.7492 6976.4
## mu[47]     0.9976    1.1740    1.3471    1.7029 5548.9
## mu[48]     1.0717    1.2070    1.3441    1.5865 7238.1
## mu[49]     1.4200    1.5321    1.6492    1.8753 5788.4
## mu[50]     1.3122    1.4987    1.6923    2.1097 6082.7
## mu[51]     1.5010    1.6640    1.8331    2.1789 5027.5
## mu[52]     1.3715    1.5394    1.7083    2.0527 6166.7
## mu[53]     1.0824    1.2526    1.4219    1.7426 6969.9
## mu[54]     1.1809    1.2759    1.3697    1.5542 7046.3
## mu[55]     1.2335    1.3607    1.4974    1.7611 6725.6
## mu[56]     0.9788    1.1525    1.3186    1.6307 6534.6
## mu[57]     0.8812    1.0394    1.1910    1.4796 5346.6
## mu[58]     1.3242    1.4868    1.6413    1.9569 5811.5
## mu[59]     1.2000    1.3643    1.5279    1.8438 6502.7
## mu[60]     1.1594    1.3393    1.5109    1.8638 6118.3
## mu[61]     1.0849    1.1695    1.2564    1.4181 6087.5
## mu[62]     1.4126    1.5642    1.7316    2.0470 6208.0
## mu[63]     1.2150    1.3834    1.5587    1.8876 6693.9
## mu[64]     1.5053    1.6250    1.7529    1.9968 5758.8
## mu[65]     1.1728    1.3420    1.5180    1.8637 6255.5
## mu[66]     1.1912    1.3090    1.4184    1.6334 6590.9
## mu[67]     1.4035    1.5209    1.6420    1.8758 6187.7
## mu[68]     1.0991    1.2274    1.3609    1.6095 5890.9
## mu[69]     1.1688    1.3215    1.4820    1.7672 7143.2
## mu[70]     0.8092    0.8563    0.9045    0.9927 5439.9
## mu[71]     1.3084    1.3983    1.4870    1.6660 7276.6
## mu[72]     1.3794    1.5047    1.6311    1.8800 7478.7
## mu[73]     1.2828    1.4579    1.6362    2.0095 6513.1
## mu[74]     1.0727    1.2289    1.3827    1.7160 6996.6
## mu[75]     1.2320    1.3999    1.5721    1.9027 5518.9
## mu[76]     1.3763    1.5416    1.7057    2.0284 5312.1
## mu[77]     1.4039    1.5574    1.7002    1.9614 5224.2
## mu[78]     1.0668    1.2207    1.3655    1.6561 6729.1
## mu[79]     0.8850    1.0457    1.1984    1.4983 4655.5
## mu[80]     1.2264    1.2965    1.3655    1.4975 6125.4
## mu[81]     1.4537    1.6245    1.8028    2.1754 5599.0
## mu[82]     1.2898    1.4746    1.6680    2.0460 6726.3
## mu[83]     1.3286    1.4439    1.5634    1.7809 6228.1
## mu[84]     1.4079    1.5253    1.6366    1.8644 6051.4
## mu[85]     1.1419    1.3208    1.4993    1.8553 6479.3
## mu_a       1.3184    1.3505    1.3824    1.4424 3495.6
## sigma_y    0.7541    0.7665    0.7796    0.8053 4964.1
## sigma_a    0.2698    0.3003    0.3342    0.4026 1061.3
## lp__    -163.7974 -157.3440 -150.9538 -138.4475  789.1
##           Rhat
## mu[1]   0.9996
## mu[2]   0.9992
## mu[3]   0.9998
## mu[4]   1.0001
## mu[5]   0.9997
## mu[6]   1.0001
## mu[7]   1.0003
## mu[8]   0.9996
## mu[9]   0.9995
## mu[10]  0.9995
## mu[11]  0.9995
## mu[12]  1.0001
## mu[13]  0.9997
## mu[14]  1.0000
## mu[15]  0.9994
## mu[16]  0.9993
## mu[17]  0.9991
## mu[18]  0.9996
## mu[19]  0.9993
## mu[20]  0.9998
## mu[21]  0.9993
## mu[22]  0.9997
## mu[23]  0.9997
## mu[24]  0.9999
## mu[25]  0.9999
## mu[26]  1.0000
## mu[27]  0.9997
## mu[28]  0.9995
## mu[29]  1.0000
## mu[30]  0.9995
## mu[31]  0.9992
## mu[32]  0.9992
## mu[33]  0.9996
## mu[34]  0.9993
## mu[35]  0.9999
## mu[36]  0.9998
## mu[37]  0.9994
## mu[38]  0.9995
## mu[39]  1.0000
## mu[40]  0.9996
## mu[41]  0.9998
## mu[42]  0.9999
## mu[43]  1.0001
## mu[44]  0.9996
## mu[45]  0.9993
## mu[46]  0.9997
## mu[47]  0.9997
## mu[48]  0.9997
## mu[49]  1.0000
## mu[50]  0.9998
## mu[51]  0.9996
## mu[52]  0.9992
## mu[53]  0.9992
## mu[54]  0.9993
## mu[55]  0.9996
## mu[56]  0.9996
## mu[57]  1.0002
## mu[58]  0.9996
## mu[59]  0.9998
## mu[60]  0.9992
## mu[61]  0.9997
## mu[62]  0.9997
## mu[63]  0.9997
## mu[64]  0.9993
## mu[65]  0.9995
## mu[66]  0.9994
## mu[67]  0.9997
## mu[68]  0.9993
## mu[69]  0.9995
## mu[70]  0.9997
## mu[71]  1.0001
## mu[72]  0.9995
## mu[73]  0.9999
## mu[74]  0.9996
## mu[75]  0.9997
## mu[76]  0.9998
## mu[77]  1.0003
## mu[78]  0.9993
## mu[79]  0.9993
## mu[80]  0.9996
## mu[81]  0.9993
## mu[82]  0.9999
## mu[83]  0.9998
## mu[84]  0.9998
## mu[85]  0.9995
## mu_a    1.0001
## sigma_y 1.0003
## sigma_a 1.0003
## lp__    1.0020

68.2.5 对比三个模型

对比三个模型的结果

overall_mean <- broom.mixed::tidyMCMC(fit_pooling) %>% 
  filter(term == "mu") %>% 
  pull(estimate)



df_no_pooling <- fit_no_pooling %>% 
  tidybayes::gather_draws(mu[i]) %>%
  tidybayes::mean_hdi() %>% 
  ungroup() %>% 
  mutate(
    type = "no_pooling"
  ) %>% 
  select(type, .value) %>% 
  bind_cols(df_n_county)



df_partial_pooling <- fit_partial_pooling %>% 
  tidybayes::gather_draws(mu[i]) %>%
  tidybayes::mean_hdi() %>% 
  ungroup() %>% 
  mutate(
    type = "partial_pooling"
  ) %>% 
  select(type, .value) %>% 
  bind_cols(df_n_county)


bind_rows(df_no_pooling, df_partial_pooling) %>% 
  ggplot(
    aes(x = n, y = .value, color = type)
  ) +
  geom_point(size = 3) +
  geom_hline(yintercept = overall_mean) +
  scale_x_log10()
  • 层级模型可以实现不同分组之间的信息交换
  • 分组的均值向整体的均值靠拢(收缩)
  • 分组的样本量越小,收缩效应越明显

用我们四川火锅记住他们。

68.3 增加预测变量

68.3.1 增加楼层floor作为预测变量

\[ \begin{aligned} y_i &\sim N(\mu_i, \sigma^2) \\ \mu_i &= \alpha_{j[i]} + \beta~\mathtt{floor}_i \\ \alpha_j &\sim \operatorname{normal}(\gamma, \tau) \\ \beta &\sim \operatorname{normal}(0, 2.5)\\ \gamma &\sim \operatorname{normal}(0, 10) \\ \tau &\sim \operatorname{exp}(1) \\ \end{aligned} \] 不同的县有不同的截距,但有共同的\(\beta\),因此被称为变化的截距

stan_program <- "
data {
  int<lower=1> N;                            
  int<lower=2> J;                     
  int<lower=1, upper=J> county[N]; 
  vector[N] x; 
  vector[N] y; 
}
parameters {
  vector[J] alpha;
  real beta;
  real gamma;
  real<lower=0> sigma_y;
  real<lower=0> sigma_a;
}
model {
  vector[N] mu;
  for(i in 1:N) {
    mu[i] = alpha[county[i]] + beta * x[i];
  }
  
  for(i in 1:N) {
    y[i] ~ normal(mu[i], sigma_y);
  }
  
  alpha ~ normal(gamma, sigma_a);
  gamma ~ normal(0, 10);
  beta ~ normal(0, 2.5);
  sigma_a ~ exponential(1);
  sigma_y ~ exponential(1);

}
"

stan_data <- list(
  N      = nrow(radon),
  J      = length(unique(radon$county)),
  county = as.numeric(radon$county),
  x      = radon$floor,
  y      = radon$log_radon
)

fit_intercept_partial <- stan(model_code = stan_program, data = stan_data)
summary(fit_intercept_partial)$summary
##                mean   se_mean      sd      2.5%
## alpha[1]     1.2296 0.0029164 0.23968    0.7515
## alpha[2]     0.9835 0.0013090 0.09705    0.7918
## alpha[3]     1.5089 0.0028585 0.25107    1.0300
## alpha[4]     1.5366 0.0025012 0.21131    1.1246
## alpha[5]     1.4720 0.0027505 0.24882    0.9839
## alpha[6]     1.5045 0.0027811 0.24791    1.0372
## alpha[7]     1.8753 0.0020425 0.16835    1.5606
## alpha[8]     1.7030 0.0030105 0.24654    1.2255
## alpha[9]     1.1953 0.0024709 0.18670    0.8244
## alpha[10]    1.5226 0.0024652 0.21380    1.1035
## alpha[11]    1.4621 0.0024432 0.22490    1.0217
## alpha[12]    1.6036 0.0029178 0.24749    1.1315
## alpha[13]    1.2738 0.0025323 0.21437    0.8563
## alpha[14]    1.8594 0.0023593 0.17070    1.5245
## alpha[15]    1.4278 0.0027330 0.24501    0.9459
## alpha[16]    1.2700 0.0032940 0.26893    0.7334
## alpha[17]    1.3840 0.0027113 0.24818    0.8972
## alpha[18]    1.2575 0.0019995 0.17401    0.9178
## alpha[19]    1.3788 0.0010885 0.08743    1.2077
## alpha[20]    1.6114 0.0029291 0.25388    1.1083
## alpha[21]    1.6523 0.0022651 0.18705    1.2797
## alpha[22]    1.1090 0.0028177 0.22378    0.6531
## alpha[23]    1.4645 0.0031218 0.27331    0.9283
## alpha[24]    1.8813 0.0025722 0.20177    1.4961
## alpha[25]    1.8352 0.0019106 0.16503    1.5185
## alpha[26]    1.3950 0.0008026 0.07018    1.2580
## alpha[27]    1.6428 0.0026969 0.22658    1.1962
## alpha[28]    1.3768 0.0026968 0.22993    0.9261
## alpha[29]    1.3425 0.0029093 0.25185    0.8418
## alpha[30]    1.1390 0.0024109 0.18348    0.7803
## alpha[31]    1.7578 0.0027869 0.22896    1.3094
## alpha[32]    1.3952 0.0026958 0.24170    0.9138
## alpha[33]    1.7451 0.0028009 0.23868    1.2898
## alpha[34]    1.5365 0.0027420 0.25001    1.0382
## alpha[35]    1.1291 0.0028459 0.21620    0.7098
## alpha[36]    1.8966 0.0037182 0.28957    1.3620
## alpha[37]    0.8599 0.0029447 0.20443    0.4497
## alpha[38]    1.6555 0.0028766 0.23927    1.2017
## alpha[39]    1.6188 0.0026863 0.22044    1.1927
## alpha[40]    1.8522 0.0031681 0.24665    1.3887
## alpha[41]    1.7805 0.0022107 0.20596    1.3786
## alpha[42]    1.4761 0.0033526 0.29030    0.9025
## alpha[43]    1.5781 0.0022064 0.20080    1.2012
## alpha[44]    1.2687 0.0024542 0.20293    0.8675
## alpha[45]    1.3642 0.0018308 0.16700    1.0392
## alpha[46]    1.3752 0.0024798 0.23376    0.8925
## alpha[47]    1.3434 0.0032937 0.27292    0.7903
## alpha[48]    1.2921 0.0022676 0.18877    0.9229
## alpha[49]    1.6564 0.0021659 0.17271    1.3256
## alpha[50]    1.6646 0.0039563 0.30446    1.0811
## alpha[51]    1.7949 0.0032040 0.25908    1.2948
## alpha[52]    1.6530 0.0029425 0.26280    1.1395
## alpha[53]    1.4131 0.0031011 0.25936    0.8956
## alpha[54]    1.3687 0.0015747 0.14537    1.0711
## alpha[55]    1.5769 0.0023194 0.19809    1.1892
## alpha[56]    1.3789 0.0029385 0.25946    0.8664
## alpha[57]    1.1305 0.0026448 0.21954    0.6989
## alpha[58]    1.6548 0.0029764 0.23632    1.2017
## alpha[59]    1.5942 0.0028074 0.23938    1.1328
## alpha[60]    1.4409 0.0031013 0.27860    0.8914
## alpha[61]    1.2386 0.0014526 0.11634    1.0142
## alpha[62]    1.7372 0.0027911 0.23258    1.2852
## alpha[63]    1.5609 0.0030293 0.25411    1.0746
## alpha[64]    1.7407 0.0021521 0.18428    1.3733
## alpha[65]    1.4467 0.0029545 0.27528    0.9027
## alpha[66]    1.6196 0.0021199 0.16751    1.2950
## alpha[67]    1.7194 0.0023055 0.17572    1.3750
## alpha[68]    1.2674 0.0022346 0.20315    0.8723
## alpha[69]    1.4008 0.0026241 0.23171    0.9568
## alpha[70]    0.9444 0.0008537 0.06856    0.8080
## alpha[71]    1.5071 0.0016076 0.13007    1.2512
## alpha[72]    1.5628 0.0020197 0.18448    1.1941
## alpha[73]    1.5810 0.0030991 0.27465    1.0376
## alpha[74]    1.2891 0.0027716 0.24230    0.7932
## alpha[75]    1.5803 0.0029429 0.26745    1.0438
## alpha[76]    1.7199 0.0028879 0.24332    1.2484
## alpha[77]    1.6882 0.0024069 0.20582    1.2836
## alpha[78]    1.4039 0.0027957 0.22665    0.9695
## alpha[79]    1.1455 0.0028660 0.25118    0.6299
## alpha[80]    1.3746 0.0012347 0.10350    1.1730
## alpha[81]    1.9315 0.0041013 0.26537    1.4245
## alpha[82]    1.6225 0.0035938 0.29828    1.0478
## alpha[83]    1.6023 0.0019508 0.17117    1.2684
## alpha[84]    1.6154 0.0018864 0.16986    1.2785
## alpha[85]    1.4176 0.0030439 0.27129    0.8784
## beta        -0.6625 0.0010165 0.06721   -0.7960
## gamma        1.4928 0.0007845 0.04925    1.3989
## sigma_y      0.7268 0.0002369 0.01744    0.6928
## sigma_a      0.3200 0.0012738 0.04440    0.2381
## lp__      -112.6209 0.2776696 8.65002 -129.9891
##                 25%       50%       75%    97.5%
## alpha[1]     1.0750    1.2288    1.3929   1.6820
## alpha[2]     0.9177    0.9843    1.0467   1.1770
## alpha[3]     1.3416    1.5056    1.6769   2.0016
## alpha[4]     1.3980    1.5376    1.6802   1.9530
## alpha[5]     1.3110    1.4729    1.6379   1.9791
## alpha[6]     1.3361    1.5043    1.6722   1.9812
## alpha[7]     1.7605    1.8729    1.9879   2.2064
## alpha[8]     1.5382    1.7005    1.8617   2.2086
## alpha[9]     1.0689    1.2000    1.3211   1.5511
## alpha[10]    1.3764    1.5245    1.6670   1.9470
## alpha[11]    1.3089    1.4630    1.6119   1.9035
## alpha[12]    1.4340    1.6033    1.7721   2.0976
## alpha[13]    1.1311    1.2725    1.4211   1.6925
## alpha[14]    1.7438    1.8570    1.9728   2.1967
## alpha[15]    1.2649    1.4260    1.5879   1.9032
## alpha[16]    1.0953    1.2776    1.4495   1.7756
## alpha[17]    1.2188    1.3853    1.5475   1.8726
## alpha[18]    1.1413    1.2590    1.3748   1.5928
## alpha[19]    1.3186    1.3803    1.4385   1.5460
## alpha[20]    1.4415    1.6147    1.7806   2.1129
## alpha[21]    1.5255    1.6524    1.7815   2.0183
## alpha[22]    0.9687    1.1111    1.2549   1.5538
## alpha[23]    1.2783    1.4606    1.6518   1.9946
## alpha[24]    1.7431    1.8814    2.0163   2.2829
## alpha[25]    1.7223    1.8296    1.9495   2.1565
## alpha[26]    1.3468    1.3950    1.4434   1.5316
## alpha[27]    1.4893    1.6412    1.7934   2.0900
## alpha[28]    1.2202    1.3768    1.5357   1.8276
## alpha[29]    1.1800    1.3453    1.5130   1.8301
## alpha[30]    1.0123    1.1368    1.2637   1.4891
## alpha[31]    1.6024    1.7579    1.9067   2.2088
## alpha[32]    1.2353    1.3973    1.5570   1.8686
## alpha[33]    1.5821    1.7439    1.8990   2.2301
## alpha[34]    1.3685    1.5377    1.7061   2.0288
## alpha[35]    0.9782    1.1314    1.2787   1.5469
## alpha[36]    1.6973    1.8867    2.0840   2.4794
## alpha[37]    0.7274    0.8614    0.9984   1.2614
## alpha[38]    1.4947    1.6525    1.8133   2.1289
## alpha[39]    1.4729    1.6149    1.7679   2.0454
## alpha[40]    1.6767    1.8491    2.0249   2.3416
## alpha[41]    1.6413    1.7805    1.9179   2.1835
## alpha[42]    1.2834    1.4774    1.6638   2.0585
## alpha[43]    1.4371    1.5795    1.7150   1.9675
## alpha[44]    1.1339    1.2664    1.4076   1.6772
## alpha[45]    1.2485    1.3659    1.4810   1.6882
## alpha[46]    1.2230    1.3769    1.5311   1.8263
## alpha[47]    1.1713    1.3430    1.5214   1.8646
## alpha[48]    1.1626    1.2922    1.4177   1.6708
## alpha[49]    1.5408    1.6536    1.7742   1.9965
## alpha[50]    1.4563    1.6575    1.8586   2.2886
## alpha[51]    1.6199    1.7853    1.9670   2.3092
## alpha[52]    1.4772    1.6502    1.8266   2.1780
## alpha[53]    1.2392    1.4121    1.5864   1.9344
## alpha[54]    1.2712    1.3681    1.4672   1.6527
## alpha[55]    1.4434    1.5796    1.7128   1.9589
## alpha[56]    1.2140    1.3802    1.5483   1.8915
## alpha[57]    0.9798    1.1336    1.2798   1.5558
## alpha[58]    1.4920    1.6503    1.8134   2.1161
## alpha[59]    1.4335    1.5910    1.7519   2.0692
## alpha[60]    1.2511    1.4387    1.6293   1.9793
## alpha[61]    1.1601    1.2394    1.3158   1.4626
## alpha[62]    1.5753    1.7351    1.8943   2.1902
## alpha[63]    1.3912    1.5600    1.7278   2.0582
## alpha[64]    1.6185    1.7403    1.8652   2.0913
## alpha[65]    1.2663    1.4454    1.6270   1.9841
## alpha[66]    1.5076    1.6163    1.7300   1.9445
## alpha[67]    1.6009    1.7199    1.8359   2.0614
## alpha[68]    1.1343    1.2682    1.4066   1.6642
## alpha[69]    1.2369    1.4019    1.5616   1.8442
## alpha[70]    0.8993    0.9444    0.9912   1.0790
## alpha[71]    1.4218    1.5069    1.5923   1.7626
## alpha[72]    1.4412    1.5639    1.6857   1.9250
## alpha[73]    1.4014    1.5764    1.7636   2.1246
## alpha[74]    1.1268    1.2938    1.4520   1.7637
## alpha[75]    1.4008    1.5807    1.7561   2.1119
## alpha[76]    1.5543    1.7205    1.8841   2.2030
## alpha[77]    1.5482    1.6897    1.8263   2.0926
## alpha[78]    1.2489    1.4052    1.5546   1.8426
## alpha[79]    0.9821    1.1474    1.3193   1.6329
## alpha[80]    1.3058    1.3731    1.4440   1.5837
## alpha[81]    1.7520    1.9260    2.1055   2.4787
## alpha[82]    1.4192    1.6205    1.8162   2.2180
## alpha[83]    1.4883    1.6000    1.7141   1.9364
## alpha[84]    1.5007    1.6144    1.7265   1.9560
## alpha[85]    1.2406    1.4163    1.5955   1.9491
## beta        -0.7082   -0.6613   -0.6173  -0.5322
## gamma        1.4598    1.4927    1.5248   1.5936
## sigma_y      0.7149    0.7263    0.7385   0.7626
## sigma_a      0.2889    0.3183    0.3488   0.4123
## lp__      -118.5215 -112.5025 -106.6465 -96.2346
##            n_eff   Rhat
## alpha[1]  6754.2 0.9993
## alpha[2]  5497.0 1.0003
## alpha[3]  7714.4 0.9994
## alpha[4]  7136.9 0.9996
## alpha[5]  8183.4 0.9995
## alpha[6]  7945.7 0.9994
## alpha[7]  6793.9 0.9992
## alpha[8]  6706.5 0.9995
## alpha[9]  5709.4 0.9996
## alpha[10] 7521.9 0.9995
## alpha[11] 8473.4 0.9995
## alpha[12] 7194.6 0.9993
## alpha[13] 7166.4 0.9993
## alpha[14] 5234.9 0.9997
## alpha[15] 8037.3 0.9993
## alpha[16] 6665.5 0.9993
## alpha[17] 8378.5 0.9993
## alpha[18] 7573.7 0.9995
## alpha[19] 6452.3 0.9995
## alpha[20] 7512.8 0.9998
## alpha[21] 6819.3 0.9994
## alpha[22] 6307.6 0.9999
## alpha[23] 7664.5 0.9996
## alpha[24] 6152.9 0.9998
## alpha[25] 7461.4 0.9995
## alpha[26] 7645.3 0.9993
## alpha[27] 7058.5 0.9995
## alpha[28] 7269.9 1.0000
## alpha[29] 7493.7 0.9996
## alpha[30] 5792.3 0.9997
## alpha[31] 6749.8 0.9996
## alpha[32] 8038.2 0.9995
## alpha[33] 7261.5 0.9995
## alpha[34] 8312.9 0.9997
## alpha[35] 5771.3 0.9997
## alpha[36] 6065.4 1.0005
## alpha[37] 4819.6 0.9999
## alpha[38] 6918.5 1.0002
## alpha[39] 6733.7 1.0006
## alpha[40] 6061.4 1.0000
## alpha[41] 8679.9 0.9997
## alpha[42] 7497.7 0.9991
## alpha[43] 8282.3 0.9994
## alpha[44] 6837.3 0.9992
## alpha[45] 8320.1 0.9996
## alpha[46] 8886.1 0.9991
## alpha[47] 6866.0 0.9994
## alpha[48] 6930.4 0.9992
## alpha[49] 6359.0 0.9995
## alpha[50] 5922.0 1.0004
## alpha[51] 6538.3 1.0007
## alpha[52] 7976.2 1.0003
## alpha[53] 6994.6 0.9995
## alpha[54] 8523.0 1.0002
## alpha[55] 7293.8 0.9996
## alpha[56] 7796.4 0.9996
## alpha[57] 6889.9 0.9994
## alpha[58] 6304.0 0.9996
## alpha[59] 7270.6 0.9994
## alpha[60] 8070.3 1.0000
## alpha[61] 6414.8 0.9996
## alpha[62] 6943.8 0.9995
## alpha[63] 7036.4 0.9992
## alpha[64] 7332.5 0.9999
## alpha[65] 8681.4 0.9994
## alpha[66] 6244.0 0.9995
## alpha[67] 5809.2 0.9994
## alpha[68] 8264.7 0.9992
## alpha[69] 7797.1 0.9995
## alpha[70] 6449.3 0.9997
## alpha[71] 6545.9 0.9997
## alpha[72] 8342.8 0.9995
## alpha[73] 7854.0 0.9995
## alpha[74] 7642.8 0.9992
## alpha[75] 8259.4 0.9993
## alpha[76] 7098.8 0.9996
## alpha[77] 7312.8 0.9997
## alpha[78] 6572.5 0.9997
## alpha[79] 7680.5 0.9995
## alpha[80] 7026.3 0.9998
## alpha[81] 4186.6 0.9999
## alpha[82] 6888.7 0.9994
## alpha[83] 7698.6 0.9993
## alpha[84] 8108.7 1.0006
## alpha[85] 7943.2 0.9992
## beta      4372.1 1.0007
## gamma     3941.9 1.0009
## sigma_y   5422.0 0.9994
## sigma_a   1215.0 1.0028
## lp__       970.5 1.0022

68.3.2 截距中增加预测因子

相当于在第二层参数中增加预测因子

\[ \begin{aligned} y_i &\sim N(\mu_i, ~\sigma) \\ \mu_i &= \alpha_{j[i]} + \beta~\mathtt{floor}_i \\ \alpha_j &\sim \operatorname{normal}(\gamma_0 + \gamma_1~u_j, ~\tau) \\ \beta &\sim \operatorname{normal}(0, 1)\\ \gamma_0 &\sim \operatorname{normal}(0, 2.5)\\ \gamma_1 &\sim \operatorname{normal}(0, 2.5)\\ \tau &\sim \operatorname{exp}(1) \\ \end{aligned} \]

stan_program <- "
data {
  int<lower=0> N;
  vector[N] y;             
  int<lower=0, upper=1> x[N];             
  int<lower=2> J;                     
  int<lower=1, upper=J> county[N]; 
  vector[J] u; 
}
parameters {
  vector[J] alpha;
  real beta;
  real gamma0;
  real gamma1;
  real<lower=0> sigma_a;
  real<lower=0> sigma_y;
}
model {
  vector[N] mu;

  for(i in 1:N) {
    mu[i] = alpha[county[i]] + x[i] * beta;
  }
  
  for(j in 1:J) {
    alpha[j] ~ normal(gamma0 + gamma1 * u[j], sigma_a);
  }
  
  y ~ normal(mu, sigma_y);

  beta ~ normal(0, 1);
  gamma0 ~ normal(0, 2.5);
  gamma1 ~ normal(0, 2.5);
  sigma_a ~ exponential(1);
  sigma_y ~ exponential(1);

}


"

stan_data <- list(
  N      = nrow(radon),
  J      = length(unique(radon$county)),
  county = as.numeric(radon$county),
  x      = radon$floor,
  y      = radon$log_radon,
  u      = unique(radon$log_uranium)
)



fit_intercept_partial_2 <- stan(model_code = stan_program, data = stan_data)
summary(fit_intercept_partial_2, c("beta", "gamma0", "gamma1", "sigma_y", "sigma_a"))$summary
##            mean   se_mean      sd    2.5%     25%
## beta    -0.6476 0.0009836 0.06729 -0.7792 -0.6910
## gamma0   1.4930 0.0008447 0.04440  1.4066  1.4635
## gamma1   0.5861 0.0018007 0.10816  0.3735  0.5117
## sigma_y  0.7250 0.0002041 0.01757  0.6919  0.7126
## sigma_a  0.2444 0.0013112 0.03921  0.1746  0.2177
##             50%     75%   97.5%  n_eff  Rhat
## beta    -0.6481 -0.6019 -0.5185 4679.6 1.000
## gamma0   1.4931  1.5231  1.5794 2763.0 1.000
## gamma1   0.5871  0.6600  0.7978 3608.0 1.000
## sigma_y  0.7246  0.7371  0.7596 7415.6 1.000
## sigma_a  0.2412  0.2694  0.3268  894.1 1.002

beta怎么解释? - 负号,说明楼上比楼下氡含量低

68.3.3 变化的截距和斜率

之前模型假定,不管哪个县,所有的房屋一楼和二楼的氡含量的差别是一样的(beta系数是不变的),现在,我们将模型进一步扩展,假定一楼和二楼的氡含量的差别不是固定不变的,而是随县变化的,也就说不同县的房屋,一二楼氡含量差别是不同的。

写出变化的截距和斜率模型的数学表达式

\[ \begin{aligned}[t] y_i &\sim \operatorname{Normal}(\mu_i, \sigma_y) \\ \mu_i &= \alpha_{j[i]} + \beta_{j[i]}~\mathtt{floor}_i \\ \begin{bmatrix} \alpha_j \\ \beta_j \end{bmatrix} & \sim \operatorname{MVNormal} \left( \begin{bmatrix} \gamma_0^{\alpha} + \gamma_1^{\alpha} ~ u_j \\ \gamma_0^{\beta} + \gamma_1^{\beta} ~ u_j \\ \end{bmatrix}, ~\mathbf S \right) \\ \mathbf S & = \begin{bmatrix} \sigma_\alpha & 0 \\ 0 & \sigma_\beta \end{bmatrix} \mathbf R \begin{bmatrix} \sigma_\alpha & 0 \\ 0 & \sigma_\beta \end{bmatrix} \\ & = \begin{bmatrix} \sigma_\alpha & 0 \\ 0 & \sigma_\beta \end{bmatrix} \begin{bmatrix} 1 & \rho \\ \rho & 1 \end{bmatrix} \begin{bmatrix} \sigma_\alpha & 0 \\ 0 & \sigma_\beta \end{bmatrix} \\ \gamma_a & \sim \operatorname{Normal}(0, 4) \\ \gamma_b & \sim \operatorname{Normal}(0, 4) \\ \sigma & \sim \operatorname{Exponential}(1) \\ \sigma_\alpha & \sim \operatorname{Exponential}(1) \\ \sigma_\beta & \sim \operatorname{Exponential}(1) \\ \mathbf R & \sim \operatorname{LKJcorr}(2) \end{aligned} \]

  • 模型表达式中 \(\alpha_j\)\(\beta_j\) 不是直接给先验,而是给的层级先验。

  • \(\alpha_j\)\(\beta_j\) 也可能存在关联,常见的有,多元正态分布(Multivariate Gaussian Distribution)

\[ \begin{aligned}[t] \begin{bmatrix} \alpha_j \\ \beta_j \end{bmatrix} &\sim \operatorname{MVNormal}\left(\begin{bmatrix}\gamma_{\alpha} \\ \gamma_{\beta} \end{bmatrix}, \mathbf S\right) \\ \end{aligned} \]

68.3.4 协方差矩阵(covariance matrix)

MASS::mvrnorm(n, mu, Sigma)产生多元高斯分布的随机数,每组随机变量高度相关。 比如,人的身高服从正态分布,人的体重也服从正态分布,同时身高和体重又存在强烈的关联。

  • n: 随机样本的大小
  • mu: 多元高斯分布的均值向量
  • Sigma: 协方差矩阵,主要这里是大写的S (Sigma),提醒我们它是一个矩阵,不是一个数值
a       <- 3.5
b       <- -1
sigma_a <- 1
sigma_b <- 0.5
rho     <- -0.7
mu      <- c(a, b)
cov_ab  <- sigma_a * sigma_b * rho 
sigma   <- matrix(c(sigma_a^2, cov_ab, 
                    cov_ab, sigma_b^2), ncol = 2)
sigma
##       [,1]  [,2]
## [1,]  1.00 -0.35
## [2,] -0.35  0.25
d <- MASS::mvrnorm(1000, mu = mu, Sigma = sigma) %>%
  data.frame() %>%
  set_names("group_a", "group_b")
head(d)
##   group_a group_b
## 1   2.256 -0.7507
## 2   2.516 -0.5077
## 3   2.756 -1.1689
## 4   3.402 -1.2349
## 5   3.930 -1.0511
## 6   4.542 -1.3728
d %>%
  ggplot(aes(x = group_a)) +
  geom_density(
    color = "transparent",
    fill = "dodgerblue3",
    alpha = 1 / 2
  ) +
  stat_function(
    fun = dnorm,
    args = list(mean = 3.5, sd = 1),
    linetype = 2
  )
d %>%
  ggplot(aes(x = group_b)) +
  geom_density(
    color = "transparent",
    fill = "dodgerblue3",
    alpha = 1 / 2
  ) +
  stat_function(
    fun = dnorm,
    args = list(mean = -1, sd = 0.5),
    linetype = 2
  )
d %>%
  ggplot(aes(x = group_a, y = group_b)) +
  geom_point() +
  stat_ellipse(type = "norm", level = 0.95)

68.3.5 回到模型

在stan中要给协方差矩阵指定一个先验,Priors for Covariances

stan_program <- "
data {
  int<lower=0> N;
  vector[N] y;             
  int<lower=0, upper=1> x[N];             
  int<lower=2> J;                     
  int<lower=1, upper=J> county[N]; 
  vector[J] u; 
}
parameters {
  vector[J] alpha;
  vector[J] beta;
  vector[2] gamma_a;
  vector[2] gamma_b;
  
  real<lower=0> sigma;
  vector<lower=0>[2] tau;
  corr_matrix[2] Rho;
}
transformed parameters {
  vector[2] YY[J];

  for (j in 1:J) {
    YY[j] = [alpha[j], beta[j]]';
  }
}
model {
  vector[N] mu;
  vector[2] MU[J];
  
  sigma ~ exponential(1);
  tau ~ exponential(1);
  Rho ~ lkj_corr(2);
  gamma_a ~ normal(0, 2);
  gamma_b ~ normal(0, 2);
  
  for(i in 1:N) {
    mu[i] = alpha[county[i]] + beta[county[i]] * x[i];  
  }
  
  for(j in 1:J) {
    MU[j, 1] = gamma_a[1] + gamma_a[2] * u[j];
    MU[j, 2] = gamma_b[1] + gamma_b[2] * u[j];
  }
 

  target += multi_normal_lpdf(YY | MU, quad_form_diag(Rho, tau));
  
  y ~ normal(mu, sigma); 
}
"


stan_data <- list(
  N      = nrow(radon),
  J      = length(unique(radon$county)),
  county = as.numeric(radon$county),
  x      = radon$floor,
  y      = radon$log_radon,
  u      = unique(radon$log_uranium)
)


fit_slope_partial <- stan(model_code = stan_program, data = stan_data)
summary(fit_slope_partial, c("alpha"))$summary
##             mean  se_mean      sd   2.5%    25%    50%
## alpha[1]  0.9448 0.003809 0.21559 0.5206 0.8024 0.9450
## alpha[2]  0.9152 0.002968 0.09328 0.7329 0.8508 0.9131
## alpha[3]  1.4201 0.013716 0.23353 0.9383 1.2730 1.4252
## alpha[4]  1.2363 0.004205 0.19946 0.8743 1.1048 1.2260
## alpha[5]  1.4109 0.008634 0.21018 0.9911 1.2734 1.4098
## alpha[6]  1.6910 0.006269 0.22315 1.2500 1.5399 1.6917
## alpha[7]  1.8776 0.005531 0.15520 1.5682 1.7729 1.8776
## alpha[8]  1.8099 0.014159 0.22728 1.3843 1.6595 1.7963
## alpha[9]  1.1306 0.004694 0.17577 0.7825 1.0142 1.1308
## alpha[10] 1.5822 0.003775 0.20466 1.1743 1.4504 1.5719
## alpha[11] 1.2140 0.009673 0.20385 0.8271 1.0721 1.2116
## alpha[12] 1.7066 0.007357 0.21344 1.2946 1.5554 1.7024
## alpha[13] 1.0357 0.003693 0.19397 0.6460 0.9120 1.0382
## alpha[14] 1.9283 0.005345 0.16384 1.6106 1.8153 1.9248
## alpha[15] 1.4050 0.005654 0.21361 1.0052 1.2526 1.4012
## alpha[16] 1.0614 0.009702 0.23819 0.5998 0.8987 1.0583
## alpha[17] 1.6448 0.004725 0.21797 1.2222 1.5014 1.6426
## alpha[18] 1.0428 0.009360 0.17466 0.6953 0.9282 1.0473
## alpha[19] 1.3905 0.001461 0.08620 1.2175 1.3330 1.3900
## alpha[20] 1.7087 0.003554 0.21629 1.2883 1.5645 1.7032
## alpha[21] 1.6755 0.002974 0.17184 1.3378 1.5644 1.6750
## alpha[22] 1.3240 0.003737 0.20745 0.8995 1.1938 1.3295
## alpha[23] 1.7227 0.004355 0.22652 1.2754 1.5787 1.7193
## alpha[24] 1.8924 0.003329 0.17812 1.5557 1.7714 1.8891
## alpha[25] 1.8132 0.003325 0.16090 1.4968 1.7078 1.8113
## alpha[26] 1.3974 0.001261 0.06984 1.2612 1.3495 1.3997
## alpha[27] 1.8254 0.007278 0.20284 1.4474 1.6862 1.8307
## alpha[28] 1.1895 0.003871 0.20538 0.7878 1.0569 1.1946
## alpha[29] 1.0097 0.004195 0.21752 0.5850 0.8585 1.0096
## alpha[30] 0.9941 0.003285 0.16341 0.6629 0.8890 0.9957
## alpha[31] 1.8370 0.005390 0.19711 1.4486 1.7039 1.8356
## alpha[32] 1.3906 0.004238 0.20841 0.9676 1.2541 1.3953
## alpha[33] 1.7357 0.004168 0.21342 1.3159 1.5975 1.7381
## alpha[34] 1.5261 0.006119 0.22786 1.0790 1.3735 1.5239
## alpha[35] 0.7678 0.006850 0.21425 0.3320 0.6306 0.7708
## alpha[36] 1.9551 0.021126 0.25870 1.4951 1.7782 1.9385
## alpha[37] 0.7282 0.003683 0.18333 0.3529 0.6065 0.7313
## alpha[38] 1.2349 0.005673 0.22550 0.7982 1.0822 1.2328
## alpha[39] 1.6806 0.004081 0.19987 1.2893 1.5475 1.6828
## alpha[40] 1.9808 0.004462 0.21053 1.5810 1.8383 1.9777
## alpha[41] 1.8627 0.003818 0.17855 1.5051 1.7471 1.8631
## alpha[42] 1.5702 0.005826 0.24940 1.0637 1.4114 1.5681
## alpha[43] 1.7298 0.008850 0.20908 1.3436 1.5892 1.7224
## alpha[44] 1.3593 0.011746 0.20375 0.9492 1.2293 1.3540
## alpha[45] 1.4522 0.012914 0.17512 1.0817 1.3439 1.4554
## alpha[46] 1.4057 0.003481 0.19449 1.0129 1.2776 1.4102
## alpha[47] 1.2853 0.005450 0.23037 0.8337 1.1401 1.2806
## alpha[48] 1.2929 0.003168 0.17108 0.9526 1.1834 1.2905
## alpha[49] 1.7252 0.007369 0.16726 1.4051 1.6139 1.7267
## alpha[50] 1.8367 0.012944 0.24345 1.3810 1.6700 1.8371
## alpha[51] 1.8327 0.005088 0.20982 1.4103 1.6944 1.8337
## alpha[52] 1.8182 0.003398 0.20961 1.4137 1.6824 1.8147
## alpha[53] 1.5835 0.003730 0.21571 1.1481 1.4465 1.5826
## alpha[54] 1.4499 0.002969 0.13632 1.1808 1.3599 1.4497
## alpha[55] 1.4468 0.004216 0.18369 1.0958 1.3241 1.4414
## alpha[56] 1.3890 0.005149 0.22535 0.9551 1.2379 1.3844
## alpha[57] 1.1377 0.004185 0.20041 0.7378 1.0069 1.1433
## alpha[58] 1.8474 0.003931 0.20928 1.4372 1.7100 1.8510
## alpha[59] 1.7302 0.007669 0.21871 1.2885 1.5874 1.7327
## alpha[60] 1.6124 0.004453 0.21642 1.1959 1.4667 1.6206
## alpha[61] 1.1665 0.003636 0.11653 0.9425 1.0836 1.1660
## alpha[62] 1.8279 0.006175 0.21219 1.4147 1.6852 1.8290
## alpha[63] 1.7102 0.017761 0.24775 1.1367 1.5582 1.7249
## alpha[64] 1.7622 0.002751 0.16615 1.4413 1.6478 1.7573
## alpha[65] 1.7717 0.010642 0.24620 1.2844 1.6085 1.7713
## alpha[66] 1.4866 0.003301 0.16821 1.1627 1.3728 1.4893
## alpha[67] 1.4694 0.004692 0.16966 1.1313 1.3594 1.4735
## alpha[68] 1.3175 0.012752 0.19384 0.8840 1.1993 1.3242
## alpha[69] 1.1124 0.008920 0.22298 0.6732 0.9633 1.1091
## alpha[70] 0.9719 0.001562 0.07152 0.8343 0.9243 0.9691
## alpha[71] 1.5469 0.002086 0.12443 1.3034 1.4629 1.5474
## alpha[72] 1.6364 0.003011 0.17015 1.3032 1.5231 1.6376
## alpha[73] 1.7998 0.016630 0.24557 1.3152 1.6432 1.8021
## alpha[74] 1.4971 0.004611 0.20762 1.0668 1.3623 1.4995
## alpha[75] 1.5016 0.009817 0.22235 1.0850 1.3552 1.5057
## alpha[76] 1.9047 0.005912 0.21322 1.4976 1.7536 1.9022
## alpha[77] 1.7208 0.004864 0.18264 1.3806 1.5920 1.7133
## alpha[78] 1.0925 0.007402 0.21690 0.6850 0.9448 1.0861
## alpha[79] 1.3446 0.012634 0.22950 0.8934 1.1999 1.3616
## alpha[80] 1.3762 0.001577 0.09704 1.1821 1.3151 1.3743
## alpha[81] 1.8717 0.004864 0.23243 1.4287 1.7151 1.8738
## alpha[82] 1.7375 0.014592 0.25071 1.2571 1.5684 1.7231
## alpha[83] 1.7825 0.008546 0.17447 1.4465 1.6690 1.7784
## alpha[84] 1.5641 0.004317 0.15486 1.2742 1.4529 1.5602
## alpha[85] 1.6390 0.006809 0.23918 1.1827 1.4804 1.6295
##              75% 97.5%  n_eff   Rhat
## alpha[1]  1.0905 1.361 3204.3 1.0003
## alpha[2]  0.9779 1.105  988.1 1.0072
## alpha[3]  1.5757 1.867  289.9 1.0137
## alpha[4]  1.3634 1.647 2249.8 1.0018
## alpha[5]  1.5565 1.800  592.6 1.0096
## alpha[6]  1.8398 2.128 1267.2 1.0039
## alpha[7]  1.9851 2.183  787.5 1.0087
## alpha[8]  1.9532 2.282  257.7 1.0141
## alpha[9]  1.2565 1.454 1401.9 1.0045
## alpha[10] 1.7175 1.986 2938.8 1.0010
## alpha[11] 1.3586 1.600  444.2 1.0128
## alpha[12] 1.8469 2.130  841.7 1.0052
## alpha[13] 1.1595 1.414 2758.9 1.0039
## alpha[14] 2.0442 2.243  939.6 1.0048
## alpha[15] 1.5487 1.841 1427.4 1.0023
## alpha[16] 1.2238 1.543  602.7 1.0027
## alpha[17] 1.7856 2.093 2128.2 1.0031
## alpha[18] 1.1617 1.377  348.2 1.0108
## alpha[19] 1.4463 1.564 3481.1 1.0007
## alpha[20] 1.8514 2.146 3703.2 1.0000
## alpha[21] 1.7842 2.026 3339.3 0.9999
## alpha[22] 1.4618 1.722 3082.1 1.0004
## alpha[23] 1.8720 2.160 2705.0 1.0023
## alpha[24] 2.0108 2.245 2862.4 1.0009
## alpha[25] 1.9161 2.135 2341.9 1.0008
## alpha[26] 1.4432 1.533 3066.0 1.0010
## alpha[27] 1.9620 2.224  776.8 1.0092
## alpha[28] 1.3220 1.594 2814.8 1.0008
## alpha[29] 1.1515 1.432 2689.0 0.9999
## alpha[30] 1.0975 1.313 2474.1 1.0006
## alpha[31] 1.9781 2.227 1337.5 1.0046
## alpha[32] 1.5243 1.804 2418.9 0.9996
## alpha[33] 1.8696 2.153 2621.4 0.9999
## alpha[34] 1.6846 1.992 1386.7 1.0013
## alpha[35] 0.9193 1.178  978.3 1.0054
## alpha[36] 2.1119 2.602  150.0 1.0269
## alpha[37] 0.8484 1.083 2477.9 1.0036
## alpha[38] 1.3800 1.687 1580.2 1.0048
## alpha[39] 1.8166 2.072 2399.2 1.0052
## alpha[40] 2.1191 2.402 2226.5 1.0000
## alpha[41] 1.9800 2.208 2187.2 1.0004
## alpha[42] 1.7351 2.078 1832.5 1.0007
## alpha[43] 1.8618 2.175  558.2 1.0125
## alpha[44] 1.4859 1.770  300.9 1.0118
## alpha[45] 1.5722 1.789  183.9 1.0293
## alpha[46] 1.5396 1.772 3121.9 1.0003
## alpha[47] 1.4441 1.730 1787.1 1.0025
## alpha[48] 1.4101 1.629 2916.7 1.0008
## alpha[49] 1.8352 2.055  515.1 1.0082
## alpha[50] 1.9988 2.320  353.7 1.0145
## alpha[51] 1.9686 2.247 1700.9 1.0034
## alpha[52] 1.9479 2.249 3804.2 1.0000
## alpha[53] 1.7245 2.007 3345.1 0.9995
## alpha[54] 1.5405 1.715 2108.1 1.0044
## alpha[55] 1.5744 1.809 1898.5 1.0003
## alpha[56] 1.5435 1.834 1915.4 1.0059
## alpha[57] 1.2721 1.528 2292.8 1.0000
## alpha[58] 1.9921 2.256 2834.8 1.0004
## alpha[59] 1.8823 2.140  813.3 1.0008
## alpha[60] 1.7525 2.041 2362.3 0.9997
## alpha[61] 1.2465 1.395 1027.2 1.0025
## alpha[62] 1.9628 2.265 1180.8 1.0058
## alpha[63] 1.8772 2.163  194.6 1.0195
## alpha[64] 1.8730 2.088 3646.6 1.0013
## alpha[65] 1.9427 2.235  535.2 1.0039
## alpha[66] 1.5957 1.820 2596.6 0.9999
## alpha[67] 1.5815 1.803 1307.5 1.0053
## alpha[68] 1.4499 1.679  231.1 1.0135
## alpha[69] 1.2621 1.559  624.9 1.0068
## alpha[70] 1.0200 1.119 2095.7 1.0017
## alpha[71] 1.6302 1.792 3560.0 1.0014
## alpha[72] 1.7520 1.970 3192.7 1.0014
## alpha[73] 1.9606 2.286  218.1 1.0197
## alpha[74] 1.6419 1.885 2027.6 1.0022
## alpha[75] 1.6446 1.938  513.0 1.0031
## alpha[76] 2.0465 2.325 1300.7 1.0054
## alpha[77] 1.8463 2.091 1410.2 1.0072
## alpha[78] 1.2414 1.521  858.7 1.0054
## alpha[79] 1.5026 1.760  329.9 1.0142
## alpha[80] 1.4397 1.564 3788.2 1.0017
## alpha[81] 2.0194 2.339 2283.6 1.0022
## alpha[82] 1.9023 2.244  295.2 1.0161
## alpha[83] 1.8926 2.139  416.8 1.0150
## alpha[84] 1.6722 1.871 1286.7 1.0017
## alpha[85] 1.7938 2.108 1233.8 1.0037
summary(fit_slope_partial, c("beta"))$summary
##             mean  se_mean     sd    2.5%     25%
## beta[1]  -0.2993 0.006124 0.2960 -0.8728 -0.4740
## beta[2]  -0.4689 0.022751 0.2825 -1.0889 -0.6474
## beta[3]  -0.5910 0.005225 0.2503 -1.1167 -0.7235
## beta[4]  -0.4161 0.005070 0.2431 -0.9264 -0.5571
## beta[5]  -0.5488 0.005148 0.2673 -1.0759 -0.6910
## beta[6]  -0.8343 0.007315 0.3210 -1.4928 -1.0095
## beta[7]  -0.5565 0.031385 0.3034 -1.0140 -0.7796
## beta[8]  -0.7505 0.005891 0.2630 -1.2902 -0.8993
## beta[9]  -0.3475 0.015843 0.2937 -0.8285 -0.5344
## beta[10] -0.7628 0.006111 0.2414 -1.2912 -0.8978
## beta[11] -0.3848 0.006276 0.3154 -1.0207 -0.5636
## beta[12] -0.7789 0.006405 0.2960 -1.3761 -0.9383
## beta[13] -0.3223 0.007259 0.3256 -1.0324 -0.4979
## beta[14] -0.8481 0.006098 0.2362 -1.3526 -0.9886
## beta[15] -0.6258 0.004479 0.2540 -1.1551 -0.7743
## beta[16] -0.4077 0.006035 0.3050 -1.0369 -0.5731
## beta[17] -0.9444 0.017257 0.2682 -1.5780 -1.0888
## beta[18] -0.2775 0.008531 0.2571 -0.7468 -0.4438
## beta[19] -0.7241 0.006295 0.2154 -1.2038 -0.8475
## beta[20] -0.7773 0.005863 0.2997 -1.4025 -0.9433
## beta[21] -0.6820 0.006321 0.2726 -1.2075 -0.8407
## beta[22] -0.7827 0.006022 0.2851 -1.3384 -0.9421
## beta[23] -0.8356 0.005909 0.2789 -1.3909 -0.9959
## beta[24] -0.7397 0.004979 0.2526 -1.2549 -0.8851
## beta[25] -0.5404 0.026189 0.2986 -1.0199 -0.7457
## beta[26] -0.7034 0.007604 0.1694 -1.0742 -0.8084
## beta[27] -0.8285 0.006825 0.2684 -1.3462 -0.9873
## beta[28] -0.4519 0.004579 0.2440 -0.9368 -0.5974
## beta[29] -0.3115 0.007033 0.3353 -1.0002 -0.5075
## beta[30] -0.3444 0.006406 0.3163 -1.0172 -0.5194
## beta[31] -0.8057 0.007320 0.3065 -1.4616 -0.9691
## beta[32] -0.6148 0.006048 0.2883 -1.2386 -0.7637
## beta[33] -0.7166 0.006616 0.3098 -1.3956 -0.8707
## beta[34] -0.6747 0.004518 0.2577 -1.2365 -0.8108
## beta[35] -0.2210 0.006601 0.2639 -0.7286 -0.3881
## beta[36] -0.6494 0.016560 0.3124 -1.1781 -0.8474
## beta[37] -0.3172 0.006889 0.2985 -0.9458 -0.4908
## beta[38] -0.2721 0.007788 0.2895 -0.8173 -0.4609
## beta[39] -0.6792 0.006726 0.2811 -1.2372 -0.8380
## beta[40] -0.8466 0.006331 0.2814 -1.4324 -1.0052
## beta[41] -0.7345 0.012222 0.2888 -1.2400 -0.9116
## beta[42] -0.7059 0.005982 0.3040 -1.3217 -0.8702
## beta[43] -1.0281 0.039272 0.3077 -1.7314 -1.2090
## beta[44] -0.7994 0.020356 0.2951 -1.4939 -0.9460
## beta[45] -0.8296 0.008884 0.2336 -1.3356 -0.9667
## beta[46] -0.6514 0.005976 0.2879 -1.2411 -0.8014
## beta[47] -0.7092 0.018523 0.2968 -1.4314 -0.8559
## beta[48] -0.5528 0.006392 0.2680 -1.0686 -0.7050
## beta[49] -0.9211 0.017522 0.2630 -1.5263 -1.0724
## beta[50] -0.8445 0.007052 0.2976 -1.4755 -1.0076
## beta[51] -0.7835 0.006337 0.3083 -1.4257 -0.9431
## beta[52] -0.8406 0.007678 0.3251 -1.5111 -1.0079
## beta[53] -0.7961 0.005297 0.2783 -1.3782 -0.9452
## beta[54] -0.9026 0.015614 0.2484 -1.4700 -1.0432
## beta[55] -0.4809 0.007597 0.2426 -0.9289 -0.6278
## beta[56] -0.7441 0.014111 0.2647 -1.3516 -0.8889
## beta[57] -0.5967 0.005129 0.2727 -1.1828 -0.7443
## beta[58] -0.8065 0.006646 0.2833 -1.3437 -0.9744
## beta[59] -0.8396 0.008244 0.2693 -1.4413 -0.9833
## beta[60] -0.7711 0.006475 0.2929 -1.3747 -0.9330
## beta[61] -0.2989 0.021065 0.2601 -0.7345 -0.4759
## beta[62] -0.5691 0.033782 0.3521 -1.0945 -0.8155
## beta[63] -0.7089 0.012711 0.2916 -1.2228 -0.8846
## beta[64] -0.7644 0.005207 0.2780 -1.3598 -0.9086
## beta[65] -0.8828 0.007817 0.3221 -1.5458 -1.0619
## beta[66] -0.5088 0.004588 0.2009 -0.8979 -0.6291
## beta[67] -0.2408 0.024296 0.2643 -0.6889 -0.4336
## beta[68] -0.6535 0.006579 0.3103 -1.2855 -0.8169
## beta[69] -0.3446 0.006928 0.3358 -1.0537 -0.5280
## beta[70] -0.6522 0.011480 0.1759 -0.9824 -0.7746
## beta[71] -0.7837 0.010604 0.2172 -1.2692 -0.9127
## beta[72] -0.7659 0.006238 0.2942 -1.3529 -0.9277
## beta[73] -0.8660 0.007271 0.3173 -1.5155 -1.0353
## beta[74] -0.7798 0.006246 0.3106 -1.4215 -0.9464
## beta[75] -0.5214 0.010900 0.2796 -1.0267 -0.6916
## beta[76] -0.8758 0.006633 0.2900 -1.4741 -1.0495
## beta[77] -0.8337 0.012663 0.2742 -1.4685 -0.9825
## beta[78] -0.4072 0.006460 0.2753 -1.0004 -0.5645
## beta[79] -0.7990 0.007388 0.2808 -1.4056 -0.9515
## beta[80] -0.7277 0.015684 0.2181 -1.2357 -0.8512
## beta[81] -0.5645 0.020701 0.2908 -1.0484 -0.7600
## beta[82] -0.7660 0.006397 0.3004 -1.3851 -0.9331
## beta[83] -1.1245 0.035025 0.3076 -1.8196 -1.3036
## beta[84] -0.6309 0.004800 0.2664 -1.2019 -0.7691
## beta[85] -0.8026 0.006223 0.3116 -1.4534 -0.9604
##              50%      75%     97.5%   n_eff   Rhat
## beta[1]  -0.3061 -0.13032  0.332761 2336.80 1.0011
## beta[2]  -0.4347 -0.27517  0.024214  154.21 1.0323
## beta[3]  -0.5965 -0.45760 -0.065524 2295.33 0.9997
## beta[4]  -0.4131 -0.26661  0.049563 2299.26 1.0010
## beta[5]  -0.5670 -0.40913  0.051606 2695.84 1.0012
## beta[6]  -0.8310 -0.66376 -0.193837 1925.03 1.0015
## beta[7]  -0.6112 -0.38076  0.146837   93.47 1.0455
## beta[8]  -0.7576 -0.60474 -0.184617 1992.72 1.0009
## beta[9]  -0.4002 -0.18684  0.352153  343.58 1.0167
## beta[10] -0.7435 -0.62027 -0.306527 1560.15 1.0055
## beta[11] -0.3848 -0.21089  0.265530 2526.05 1.0001
## beta[12] -0.7887 -0.62767 -0.120147 2135.38 0.9998
## beta[13] -0.3174 -0.13443  0.337376 2011.79 0.9996
## beta[14] -0.8367 -0.70695 -0.380259 1500.69 1.0039
## beta[15] -0.6215 -0.48305 -0.101656 3217.04 1.0000
## beta[16] -0.4176 -0.24191  0.236593 2553.73 1.0000
## beta[17] -0.9113 -0.77235 -0.484649  241.51 1.0223
## beta[18] -0.2948 -0.12996  0.280308  908.15 1.0070
## beta[19] -0.7029 -0.58728 -0.338508 1170.64 1.0083
## beta[20] -0.7776 -0.61946 -0.142828 2614.09 1.0002
## beta[21] -0.7042 -0.53545 -0.060459 1859.96 1.0019
## beta[22] -0.7836 -0.62851 -0.180226 2241.06 1.0020
## beta[23] -0.8403 -0.67584 -0.249971 2228.31 1.0004
## beta[24] -0.7438 -0.59295 -0.226362 2572.73 1.0011
## beta[25] -0.5837 -0.38593  0.199748  130.01 1.0319
## beta[26] -0.6890 -0.59289 -0.396894  496.55 1.0125
## beta[27] -0.8474 -0.68141 -0.221428 1547.07 1.0031
## beta[28] -0.4646 -0.31015  0.076737 2839.21 1.0002
## beta[29] -0.3110 -0.11950  0.377378 2272.53 0.9999
## beta[30] -0.3458 -0.16830  0.313734 2438.11 1.0003
## beta[31] -0.8011 -0.63590 -0.182717 1752.74 0.9999
## beta[32] -0.6177 -0.46762  0.002925 2273.35 1.0003
## beta[33] -0.7144 -0.55339 -0.064067 2192.57 1.0012
## beta[34] -0.6751 -0.53008 -0.153440 3254.99 0.9999
## beta[35] -0.2283 -0.05385  0.333843 1598.34 1.0008
## beta[36] -0.6949 -0.48209  0.093874  355.79 1.0139
## beta[37] -0.3208 -0.14269  0.299151 1878.28 1.0003
## beta[38] -0.2853 -0.10449  0.348998 1382.16 1.0057
## beta[39] -0.6962 -0.52724 -0.063401 1747.22 1.0057
## beta[40] -0.8471 -0.69100 -0.257290 1976.02 1.0007
## beta[41] -0.7659 -0.57347 -0.083943  558.16 1.0108
## beta[42] -0.7132 -0.54868 -0.041003 2582.84 1.0010
## beta[43] -0.9799 -0.78806 -0.580515   61.38 1.0706
## beta[44] -0.7484 -0.61506 -0.308222  210.22 1.0269
## beta[45] -0.8059 -0.67945 -0.396133  691.41 1.0131
## beta[46] -0.6631 -0.50747 -0.000283 2320.13 1.0013
## beta[47] -0.6646 -0.52328 -0.228591  256.82 1.0213
## beta[48] -0.5768 -0.41355  0.072035 1757.75 1.0030
## beta[49] -0.8829 -0.74900 -0.483152  225.35 1.0242
## beta[50] -0.8470 -0.68133 -0.224134 1781.47 1.0010
## beta[51] -0.7796 -0.61675 -0.155584 2366.41 1.0016
## beta[52] -0.8354 -0.67620 -0.143081 1792.64 1.0029
## beta[53] -0.7913 -0.64928 -0.229146 2760.38 1.0008
## beta[54] -0.8705 -0.74239 -0.472199  253.19 1.0224
## beta[55] -0.5009 -0.35154  0.070935 1020.09 1.0074
## beta[56] -0.7135 -0.58158 -0.261874  351.99 1.0188
## beta[57] -0.5925 -0.44799 -0.037115 2826.87 0.9998
## beta[58] -0.8209 -0.65138 -0.189894 1817.34 1.0041
## beta[59] -0.8182 -0.68018 -0.325017 1066.98 1.0094
## beta[60] -0.7784 -0.60895 -0.143893 2046.49 0.9994
## beta[61] -0.3374 -0.15442  0.293728  152.40 1.0293
## beta[62] -0.6351 -0.37532  0.286147  108.61 1.0372
## beta[63] -0.7415 -0.55242 -0.028426  526.10 1.0126
## beta[64] -0.7635 -0.61097 -0.184571 2849.62 1.0007
## beta[65] -0.9009 -0.70747 -0.158222 1698.54 1.0004
## beta[66] -0.5243 -0.38808 -0.090460 1918.23 1.0026
## beta[67] -0.2779 -0.07869  0.361893  118.38 1.0410
## beta[68] -0.6700 -0.49985  0.062678 2224.87 1.0009
## beta[69] -0.3377 -0.15905  0.323426 2349.18 1.0000
## beta[70] -0.6589 -0.53708 -0.285879  234.74 1.0188
## beta[71] -0.7619 -0.64673 -0.381557  419.44 1.0163
## beta[72] -0.7703 -0.61446 -0.128527 2223.85 0.9997
## beta[73] -0.8678 -0.69465 -0.199582 1904.94 0.9998
## beta[74] -0.7920 -0.62092 -0.124534 2473.75 1.0002
## beta[75] -0.5564 -0.37498  0.109474  658.15 1.0142
## beta[76] -0.8781 -0.70533 -0.276051 1911.98 1.0006
## beta[77] -0.8000 -0.66070 -0.346094  468.80 1.0157
## beta[78] -0.3948 -0.23555  0.117353 1816.32 1.0036
## beta[79] -0.7783 -0.63684 -0.253118 1444.83 1.0040
## beta[80] -0.6984 -0.58338 -0.347865  193.31 1.0257
## beta[81] -0.6071 -0.40165  0.128262  197.28 1.0251
## beta[82] -0.7655 -0.59331 -0.148378 2205.65 1.0014
## beta[83] -1.0758 -0.89990 -0.636220   77.13 1.0574
## beta[84] -0.6307 -0.48539 -0.074515 3079.33 1.0001
## beta[85] -0.8140 -0.64010 -0.119824 2506.25 1.0024
summary(fit_slope_partial, c("sigma"))$summary
##         mean   se_mean      sd   2.5%    25%    50%
## sigma 0.7181 0.0008687 0.01791 0.6832 0.7058 0.7177
##          75%  97.5% n_eff Rhat
## sigma 0.7299 0.7545 425.2 1.01
summary(fit_slope_partial, c("gamma_a", "gamma_b"))$summary
##               mean  se_mean      sd    2.5%     25%
## gamma_a[1]  1.4927 0.001535 0.04357  1.4112  1.4624
## gamma_a[2]  0.6883 0.003885 0.11651  0.4555  0.6077
## gamma_b[1] -0.6510 0.003419 0.08049 -0.8095 -0.7033
## gamma_b[2] -0.4569 0.007436 0.21253 -0.8740 -0.6002
##                50%     75%    97.5% n_eff  Rhat
## gamma_a[1]  1.4918  1.5210  1.58160 805.4 1.007
## gamma_a[2]  0.6901  0.7718  0.90418 899.4 1.002
## gamma_b[1] -0.6518 -0.5987 -0.49104 554.4 1.004
## gamma_b[2] -0.4498 -0.3239 -0.03779 817.0 1.002
rstan::traceplot(fit_slope_partial, pars = c("sigma"))