Introduction to gglasso

Install the package

# on CRAN
install.packages("gglasso")

# dev version on GitHub
pacman::p_load_gh('emeryyi/gglasso')

Least squares regression

library(gglasso)

# load bardet data set
data(bardet)

group1 <- rep(1:20, each = 5)

fit_ls <- gglasso(x = bardet$x, y = bardet$y, group = group1, loss = "ls")

plot(fit_ls)

coef(fit_ls)[1:5,90:100]
##                      s89          s90          s91          s92          s93
## (Intercept)  8.099354325  8.098922472  8.098531366  8.098175719  8.097849146
## V1          -0.119580203 -0.120877799 -0.122079683 -0.123223779 -0.124310183
## V2          -0.113742329 -0.114834411 -0.115853997 -0.116837630 -0.117782854
## V3          -0.002584792 -0.003487571 -0.004328519 -0.005134215 -0.005904892
## V4          -0.084771705 -0.088304073 -0.091674509 -0.094978960 -0.098212775
##                      s94          s95          s96          s97          s98
## (Intercept)  8.097574095  8.097295166  8.097058895  8.096833259  8.096637676
## V1          -0.125274109 -0.126284595 -0.127173301 -0.128011016 -0.128738414
## V2          -0.118630121 -0.119526679 -0.120326451 -0.121086672 -0.121754134
## V3          -0.006593702 -0.007323107 -0.007970047 -0.008583011 -0.009116809
## V4          -0.101161988 -0.104349829 -0.107241330 -0.110045942 -0.112543755
##                      s99
## (Intercept)  8.096455264
## V1          -0.129453437
## V2          -0.122415680
## V3          -0.009645386
## V4          -0.115058449

Cross-Validation

cvfit_ls <- cv.gglasso(x = bardet$x, y = bardet$y, group = group1, loss = "ls")

plot(cvfit_ls)

coef(cvfit_ls, s = "lambda.min")
##                         1
## (Intercept)  8.3220961522
## V1           0.0000000000
## V2           0.0000000000
## V3           0.0000000000
## V4           0.0000000000
## V5           0.0000000000
## V6           0.0000000000
## V7           0.0000000000
## V8           0.0000000000
## V9           0.0000000000
## V10          0.0000000000
## V11          0.0066279801
## V12          0.0064097672
## V13         -0.0023768851
## V14         -0.0022990388
## V15         -0.0158384056
## V16          0.0126304590
## V17          0.0151688970
## V18         -0.0092429314
## V19         -0.0028038245
## V20         -0.0308986650
## V21          0.1022361141
## V22          0.0843942797
## V23         -0.0731020157
## V24         -0.0149126702
## V25         -0.2063623510
## V26          0.0090735649
## V27          0.0137016965
## V28         -0.0011520470
## V29         -0.0009051471
## V30         -0.0240539339
## V31          0.0000000000
## V32          0.0000000000
## V33          0.0000000000
## V34          0.0000000000
## V35          0.0000000000
## V36          0.0000000000
## V37          0.0000000000
## V38          0.0000000000
## V39          0.0000000000
## V40          0.0000000000
## V41          0.0000000000
## V42          0.0000000000
## V43          0.0000000000
## V44          0.0000000000
## V45          0.0000000000
## V46          0.0000000000
## V47          0.0000000000
## V48          0.0000000000
## V49          0.0000000000
## V50          0.0000000000
## V51         -0.0093100046
## V52         -0.0237302593
## V53          0.0422546319
## V54          0.0545519902
## V55          0.0194190745
## V56          0.0000000000
## V57          0.0000000000
## V58          0.0000000000
## V59          0.0000000000
## V60          0.0000000000
## V61          0.0000000000
## V62          0.0000000000
## V63          0.0000000000
## V64          0.0000000000
## V65          0.0000000000
## V66          0.0000000000
## V67          0.0000000000
## V68          0.0000000000
## V69          0.0000000000
## V70          0.0000000000
## V71          0.0000000000
## V72          0.0000000000
## V73          0.0000000000
## V74          0.0000000000
## V75          0.0000000000
## V76          0.0000000000
## V77          0.0000000000
## V78          0.0000000000
## V79          0.0000000000
## V80          0.0000000000
## V81          0.0000000000
## V82          0.0000000000
## V83          0.0000000000
## V84          0.0000000000
## V85          0.0000000000
## V86          0.0000000000
## V87          0.0000000000
## V88          0.0000000000
## V89          0.0000000000
## V90          0.0000000000
## V91          0.0000000000
## V92          0.0000000000
## V93          0.0000000000
## V94          0.0000000000
## V95          0.0000000000
## V96          0.0000000000
## V97          0.0000000000
## V98          0.0000000000
## V99          0.0000000000
## V100         0.0000000000

Weight Least squares regression

We can also perform weighted least-squares regression by specifying loss='wls', and providing a n × n weight matrix in the weights argument, where n is the number of observations. Note that cross-validation is NOT IMPLEMENTED for loss='wls'.

# generate weight matrix
times <- seq_along(bardet$y)
rho <- 0.5
sigma <- 1
H <- abs(outer(times, times, "-"))
V <- sigma * rho^H
p <- nrow(V)
V[cbind(1:p, 1:p)] <- V[cbind(1:p, 1:p)] * sigma

# reduce eps to speed up convergence for vignette build
fit_wls <- gglasso(x = bardet$x, y = bardet$y, group = group1, loss = "wls", 
                   weight = V, eps = 1e-4)

plot(fit_wls)

coef(fit_wls)[1:5,90:100]
##                     s89         s90         s91         s92         s93
## (Intercept)  8.09429262  8.09340481  8.09254573  8.09170743  8.09089247
## V1          -0.13922372 -0.14077803 -0.14222609 -0.14359110 -0.14487482
## V2          -0.15966042 -0.16117772 -0.16261019 -0.16397683 -0.16527730
## V3           0.03917529  0.03880296  0.03847035  0.03816594  0.03788642
## V4          -0.16548208 -0.17057112 -0.17546237 -0.18021267 -0.18481370
##                     s94         s95         s96         s97         s98
## (Intercept)  8.09011527  8.08935394  8.08862146  8.08793054  8.08727098
## V1          -0.14606257 -0.14719352 -0.14824987 -0.14921624 -0.15011520
## V2          -0.16649386 -0.16766459 -0.16877053 -0.16979410 -0.17075592
## V3           0.03763241  0.03739356  0.03717453  0.03697953  0.03680099
## V4          -0.18919074 -0.19347289 -0.19758499 -0.20145156 -0.20513769
##                     s99
## (Intercept)  8.08664325
## V1          -0.15094837
## V2          -0.17165664
## V3           0.03663899
## V4          -0.20863833