Introduction to gglasso

Install the package

# on CRAN
install.packages("gglasso")

# dev version on GitHub
pacman::p_load_gh('emeryyi/gglasso')

Least squares regression

library(gglasso)

# load bardet data set
data(bardet)

group1 <- rep(1:20, each = 5)

fit_ls <- gglasso(x = bardet$x, y = bardet$y, group = group1, loss = "ls")

plot(fit_ls)

coef(fit_ls)[1:5,90:100]
##                      s89          s90          s91          s92          s93
## (Intercept)  8.099354325  8.098922472  8.098531366  8.098175719  8.097849146
## V1          -0.119580203 -0.120877799 -0.122079683 -0.123223779 -0.124310183
## V2          -0.113742329 -0.114834411 -0.115853997 -0.116837630 -0.117782854
## V3          -0.002584792 -0.003487571 -0.004328519 -0.005134215 -0.005904892
## V4          -0.084771705 -0.088304073 -0.091674509 -0.094978960 -0.098212775
##                      s94          s95          s96          s97          s98
## (Intercept)  8.097574095  8.097295166  8.097058895  8.096833259  8.096637676
## V1          -0.125274109 -0.126284595 -0.127173301 -0.128011016 -0.128738414
## V2          -0.118630121 -0.119526679 -0.120326451 -0.121086672 -0.121754134
## V3          -0.006593702 -0.007323107 -0.007970047 -0.008583011 -0.009116809
## V4          -0.101161988 -0.104349829 -0.107241330 -0.110045942 -0.112543755
##                      s99
## (Intercept)  8.096455264
## V1          -0.129453437
## V2          -0.122415680
## V3          -0.009645386
## V4          -0.115058449

Cross-Validation

cvfit_ls <- cv.gglasso(x = bardet$x, y = bardet$y, group = group1, loss = "ls")

plot(cvfit_ls)

coef(cvfit_ls, s = "lambda.min")
##                         1
## (Intercept)  8.2691967049
## V1           0.0098651704
## V2          -0.0347026239
## V3           0.0194973048
## V4           0.0056263366
## V5          -0.0658836756
## V6           0.0000000000
## V7           0.0000000000
## V8           0.0000000000
## V9           0.0000000000
## V10          0.0000000000
## V11          0.0000000000
## V12          0.0000000000
## V13          0.0000000000
## V14          0.0000000000
## V15          0.0000000000
## V16          0.0161916042
## V17          0.0334440842
## V18         -0.0159434170
## V19         -0.0052039269
## V20         -0.0745805132
## V21          0.0848188145
## V22          0.0727636287
## V23         -0.0570561461
## V24         -0.0075842476
## V25         -0.2414791825
## V26          0.0581134259
## V27          0.0422292816
## V28         -0.0024258734
## V29          0.0016724096
## V30         -0.1328795205
## V31          0.0000000000
## V32          0.0000000000
## V33          0.0000000000
## V34          0.0000000000
## V35          0.0000000000
## V36          0.0273042625
## V37         -0.0208736738
## V38          0.0068224660
## V39          0.0088068719
## V40         -0.0552005931
## V41          0.0000000000
## V42          0.0000000000
## V43          0.0000000000
## V44          0.0000000000
## V45          0.0000000000
## V46         -0.0085841251
## V47          0.0022014471
## V48          0.0166933932
## V49          0.0054600500
## V50          0.0015450213
## V51         -0.0150636572
## V52         -0.0183732351
## V53          0.0804745351
## V54          0.1197794237
## V55          0.0364945211
## V56          0.0000000000
## V57          0.0000000000
## V58          0.0000000000
## V59          0.0000000000
## V60          0.0000000000
## V61         -0.0132269844
## V62         -0.0018888114
## V63          0.0063121102
## V64          0.0232656988
## V65          0.0365704810
## V66         -0.0125006459
## V67         -0.0010445458
## V68          0.0250793929
## V69          0.0186547430
## V70          0.0013999899
## V71         -0.0042615550
## V72         -0.0043336289
## V73          0.0143064001
## V74          0.0012174606
## V75          0.0088968546
## V76         -0.0036245947
## V77          0.0011942561
## V78          0.0097608919
## V79          0.0011364566
## V80          0.0037925615
## V81          0.0017492770
## V82         -0.0020633464
## V83          0.0005003865
## V84          0.0008036183
## V85         -0.0047434207
## V86          0.0075824503
## V87         -0.0161820733
## V88          0.0054111298
## V89          0.0074636614
## V90         -0.0277389859
## V91          0.0000000000
## V92          0.0000000000
## V93          0.0000000000
## V94          0.0000000000
## V95          0.0000000000
## V96          0.0000000000
## V97          0.0000000000
## V98          0.0000000000
## V99          0.0000000000
## V100         0.0000000000

Weight Least squares regression

We can also perform weighted least-squares regression by specifying loss='wls', and providing a n × n weight matrix in the weights argument, where n is the number of observations. Note that cross-validation is NOT IMPLEMENTED for loss='wls'.

# generate weight matrix
times <- seq_along(bardet$y)
rho <- 0.5
sigma <- 1
H <- abs(outer(times, times, "-"))
V <- sigma * rho^H
p <- nrow(V)
V[cbind(1:p, 1:p)] <- V[cbind(1:p, 1:p)] * sigma

# reduce eps to speed up convergence for vignette build
fit_wls <- gglasso(x = bardet$x, y = bardet$y, group = group1, loss = "wls", 
                   weight = V, eps = 1e-4)

plot(fit_wls)

coef(fit_wls)[1:5,90:100]
##                     s89         s90         s91         s92         s93
## (Intercept)  8.09429262  8.09340481  8.09254573  8.09170743  8.09089247
## V1          -0.13922372 -0.14077803 -0.14222609 -0.14359110 -0.14487482
## V2          -0.15966042 -0.16117772 -0.16261019 -0.16397683 -0.16527730
## V3           0.03917529  0.03880296  0.03847035  0.03816594  0.03788642
## V4          -0.16548208 -0.17057112 -0.17546237 -0.18021267 -0.18481370
##                     s94         s95         s96         s97         s98
## (Intercept)  8.09011527  8.08935394  8.08862146  8.08793054  8.08727098
## V1          -0.14606257 -0.14719352 -0.14824987 -0.14921624 -0.15011520
## V2          -0.16649386 -0.16766459 -0.16877053 -0.16979410 -0.17075592
## V3           0.03763241  0.03739356  0.03717453  0.03697953  0.03680099
## V4          -0.18919074 -0.19347289 -0.19758499 -0.20145156 -0.20513769
##                     s99
## (Intercept)  8.08664325
## V1          -0.15094837
## V2          -0.17165664
## V3           0.03663899
## V4          -0.20863833