Introduction to gglasso

Install the package

# on CRAN
install.packages("gglasso")

# dev version on GitHub
pacman::p_load_gh('emeryyi/gglasso')

Least squares regression

library(gglasso)

# load bardet data set
data(bardet)

group1 <- rep(1:20, each = 5)

fit_ls <- gglasso(x = bardet$x, y = bardet$y, group = group1, loss = "ls")

plot(fit_ls)

coef(fit_ls)[1:5,90:100]
##                      s89          s90          s91          s92          s93
## (Intercept)  8.099354325  8.098922472  8.098531366  8.098175719  8.097849146
## V1          -0.119580203 -0.120877799 -0.122079683 -0.123223779 -0.124310183
## V2          -0.113742329 -0.114834411 -0.115853997 -0.116837630 -0.117782854
## V3          -0.002584792 -0.003487571 -0.004328519 -0.005134215 -0.005904892
## V4          -0.084771705 -0.088304073 -0.091674509 -0.094978960 -0.098212775
##                      s94          s95          s96          s97          s98
## (Intercept)  8.097574095  8.097295166  8.097058895  8.096833259  8.096637676
## V1          -0.125274109 -0.126284595 -0.127173301 -0.128011016 -0.128738414
## V2          -0.118630121 -0.119526679 -0.120326451 -0.121086672 -0.121754134
## V3          -0.006593702 -0.007323107 -0.007970047 -0.008583011 -0.009116809
## V4          -0.101161988 -0.104349829 -0.107241330 -0.110045942 -0.112543755
##                      s99
## (Intercept)  8.096455264
## V1          -0.129453437
## V2          -0.122415680
## V3          -0.009645386
## V4          -0.115058449

Cross-Validation

cvfit_ls <- cv.gglasso(x = bardet$x, y = bardet$y, group = group1, loss = "ls")

plot(cvfit_ls)

coef(cvfit_ls, s = "lambda.min")
##                         1
## (Intercept)  8.226716e+00
## V1          -9.635740e-03
## V2          -5.084061e-02
## V3           3.291428e-02
## V4           8.459574e-03
## V5          -8.452885e-02
## V6           2.706252e-04
## V7           9.201146e-04
## V8           6.286221e-04
## V9           4.431794e-05
## V10         -1.692116e-03
## V11          1.691817e-02
## V12         -1.733717e-02
## V13          5.052269e-05
## V14          1.467610e-03
## V15         -3.487459e-02
## V16          6.840565e-03
## V17          4.161445e-02
## V18         -2.115620e-02
## V19         -5.998927e-03
## V20         -7.223187e-02
## V21          5.674403e-02
## V22          7.860444e-02
## V23         -4.982707e-02
## V24         -1.515584e-02
## V25         -1.908265e-01
## V26          1.015941e-01
## V27          3.579452e-02
## V28         -2.235126e-02
## V29          5.299329e-03
## V30         -1.783018e-01
## V31          0.000000e+00
## V32          0.000000e+00
## V33          0.000000e+00
## V34          0.000000e+00
## V35          0.000000e+00
## V36          3.454886e-02
## V37         -2.116777e-02
## V38          8.384875e-03
## V39          1.167973e-02
## V40         -6.184521e-02
## V41          0.000000e+00
## V42          0.000000e+00
## V43          0.000000e+00
## V44          0.000000e+00
## V45          0.000000e+00
## V46         -4.970931e-02
## V47          3.588603e-02
## V48          6.571630e-02
## V49          1.073318e-02
## V50          1.256063e-02
## V51         -1.310646e-02
## V52         -6.181994e-04
## V53          8.971670e-02
## V54          1.462995e-01
## V55          4.174255e-02
## V56          0.000000e+00
## V57          0.000000e+00
## V58          0.000000e+00
## V59          0.000000e+00
## V60          0.000000e+00
## V61         -4.671701e-02
## V62          2.198836e-02
## V63          1.783872e-03
## V64          7.292415e-02
## V65          1.600456e-01
## V66         -1.927752e-02
## V67          2.996560e-03
## V68          2.244526e-02
## V69          2.550181e-02
## V70          2.543310e-03
## V71         -9.518966e-03
## V72         -4.846444e-03
## V73          3.046000e-02
## V74         -2.054210e-03
## V75          1.499520e-02
## V76         -1.614234e-02
## V77          1.387057e-02
## V78          4.126647e-02
## V79         -6.838469e-04
## V80          1.649933e-02
## V81          5.027452e-03
## V82         -1.109285e-02
## V83          5.654746e-03
## V84          5.367879e-03
## V85         -1.945426e-02
## V86          1.094152e-02
## V87         -5.227341e-02
## V88          1.192871e-02
## V89          3.113514e-02
## V90         -7.470814e-02
## V91          1.268292e-03
## V92         -2.220488e-04
## V93         -4.511828e-04
## V94          9.765719e-04
## V95         -1.962050e-03
## V96          0.000000e+00
## V97          0.000000e+00
## V98          0.000000e+00
## V99          0.000000e+00
## V100         0.000000e+00

Weight Least squares regression

We can also perform weighted least-squares regression by specifying loss='wls', and providing a n × n weight matrix in the weights argument, where n is the number of observations. Note that cross-validation is NOT IMPLEMENTED for loss='wls'.

# generate weight matrix
times <- seq_along(bardet$y)
rho <- 0.5
sigma <- 1
H <- abs(outer(times, times, "-"))
V <- sigma * rho^H
p <- nrow(V)
V[cbind(1:p, 1:p)] <- V[cbind(1:p, 1:p)] * sigma

# reduce eps to speed up convergence for vignette build
fit_wls <- gglasso(x = bardet$x, y = bardet$y, group = group1, loss = "wls", 
                   weight = V, eps = 1e-4)

plot(fit_wls)

coef(fit_wls)[1:5,90:100]
##                     s89         s90         s91         s92         s93
## (Intercept)  8.09429262  8.09340481  8.09254573  8.09170743  8.09089247
## V1          -0.13922372 -0.14077803 -0.14222609 -0.14359110 -0.14487482
## V2          -0.15966042 -0.16117772 -0.16261019 -0.16397683 -0.16527730
## V3           0.03917529  0.03880296  0.03847035  0.03816594  0.03788642
## V4          -0.16548208 -0.17057112 -0.17546237 -0.18021267 -0.18481370
##                     s94         s95         s96         s97         s98
## (Intercept)  8.09011527  8.08935394  8.08862146  8.08793054  8.08727098
## V1          -0.14606257 -0.14719352 -0.14824987 -0.14921624 -0.15011520
## V2          -0.16649386 -0.16766459 -0.16877053 -0.16979410 -0.17075592
## V3           0.03763241  0.03739356  0.03717453  0.03697953  0.03680099
## V4          -0.18919074 -0.19347289 -0.19758499 -0.20145156 -0.20513769
##                     s99
## (Intercept)  8.08664325
## V1          -0.15094837
## V2          -0.17165664
## V3           0.03663899
## V4          -0.20863833