Computes forecast combination weights using GLMNET Regression (OLS) regression.

comb_GLMNET(x, custom_error = NULL)

Arguments

x

An object of class 'foreccomb'. Contains training set (actual values + matrix of model forecasts) and optionally a test set.

Value

Returns an object of class ForecastComb::foreccomb_res with the following components:

Method

Returns the best-fit forecast combination method.

Models

Returns the individual input models that were used for the forecast combinations.

Weights

Returns the combination weights obtained by applying the combination method to the training set.

Intercept

Returns the intercept of the linear regression.

Fitted

Returns the fitted values of the combination method for the training set.

Accuracy_Train

Returns range of summary measures of the forecast accuracy for the training set.

Forecasts_Test

Returns forecasts produced by the combination method for the test set. Only returned if input included a forecast matrix for the test set.

Accuracy_Test

Returns range of summary measures of the forecast accuracy for the test set. Only returned if input included a forecast matrix and a vector of actual values for the test set.

Input_Data

Returns the data forwarded to the method.

Details

The function integrates the GLMNET Regression forecast combination implementation of the ForecastCombinations package into ForecastComb.

The results are stored in an object of class 'ForecastComb::foreccomb_res', for which separate plot and summary functions are provided.

See also

Forecast_comb, foreccomb, plot.ForecastComb::foreccomb_res, summary.ForecastComb::foreccomb_res, accuracy

Examples

obs <- rnorm(100)
preds <- matrix(rnorm(1000, 1), 100, 10)
train_o<-obs[1:80]
train_p<-preds[1:80,]
test_o<-obs[81:100]
test_p<-preds[81:100,]

data<-ForecastComb::foreccomb(train_o, train_p, test_o, test_p)
#> Registered S3 methods overwritten by 'ForecastComb':
#>   method                      from 
#>   plot.foreccomb_res          ahead
#>   predict.foreccomb_res       ahead
#>   print.foreccomb_res_summary ahead
#>   summary.foreccomb_res       ahead
ahead::comb_GLMNET(data)
#> $Method
#> [1] "GLMNET Regression Regression"
#> 
#> $Models
#>  [1] "Series 1"  "Series 2"  "Series 3"  "Series 4"  "Series 5"  "Series 6" 
#>  [7] "Series 7"  "Series 8"  "Series 9"  "Series 10"
#> 
#> $Fitted
#> Time Series:
#> Start = 1 
#> End = 80 
#> Frequency = 1 
#>  [1] -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191
#>  [7] -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191
#> [13] -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191
#> [19] -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191
#> [25] -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191
#> [31] -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191
#> [37] -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191
#> [43] -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191
#> [49] -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191
#> [55] -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191
#> [61] -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191
#> [67] -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191
#> [73] -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191
#> [79] -0.1117191 -0.1117191
#> 
#> $Accuracy_Train
#>                    ME      RMSE       MAE           MPE    MAPE
#> Test set 8.152523e-18 0.9850397 0.8206731 -1.722997e-14 734.586
#> 
#> $Input_Data
#> $Input_Data$Actual_Train
#> Time Series:
#> Start = 1 
#> End = 80 
#> Frequency = 1 
#>  [1]  0.21246298  0.34654149  0.39365925  1.47734294  0.78351580  1.30828925
#>  [7] -1.23421588 -1.94728232 -0.52346389  0.45521213 -1.29385864 -0.13065036
#> [13] -1.90498277  1.03092252 -0.41481013  1.07306729 -0.45833007  0.52027485
#> [19]  0.04826655 -0.97844830  1.80817185  1.28166798 -0.98606533  0.51245947
#> [25] -0.90940181 -1.26700565 -1.22679206  0.02029513 -1.01276743 -0.35192885
#> [31] -0.76421131 -0.23535934 -0.08883041 -0.99738217  0.74651348  0.31164607
#> [37] -0.13175415  1.46999303  0.85676897  0.77175650 -0.53605677 -0.55875190
#> [43] -0.87708205  0.28935714 -0.07299547 -1.25676105 -0.56292608 -0.22946602
#> [49] -0.88345122 -1.69699786  0.12526704 -0.53319322  1.31590195  1.71816602
#> [55] -0.56764815 -0.01546457 -0.68217052  1.97025092  0.42827299 -1.80160435
#> [61]  1.04800029  0.50220940 -0.21879221 -1.07714866  0.76300604  1.43507860
#> [67] -0.36209006 -1.51169957 -0.41839249 -0.97105126  1.06908434 -1.20466020
#> [73]  0.04351017 -1.37623561  0.82465795  1.54098071 -1.17290347 -1.96460923
#> [79]  0.25631701 -0.28672556
#> 
#> $Input_Data$Forecasts_Train
#> Time Series:
#> Start = 1 
#> End = 80 
#> Frequency = 1 
#>       Series 1    Series 2    Series 3    Series 4     Series 5   Series 6
#>  1 -0.51224649 -0.45001047 -0.04356552  0.49672575  1.342560929  0.2492455
#>  2  0.14814473 -0.13973760  1.68982356  2.16069663  0.585023352  3.2096813
#>  3  0.42116313 -0.22086192  0.38283045  0.60330473  0.995723574  2.0879933
#>  4  2.03205051  0.60917693  0.34857490  1.33375854  0.696594835  2.4126182
#>  5  0.92876173  0.26736926  0.09022575  0.69213004  1.115802475  1.3819462
#>  6  3.04869307  0.46667814 -0.47023098  0.67510049 -0.168167855  1.3798269
#>  7 -0.01670488  0.77739539  1.57705557  0.24686180  1.422663130  0.1313116
#>  8  1.19229407  0.42456934 -0.13961902  2.44035834  1.643716141  1.1473217
#>  9  3.83560789  0.14381731  0.62390798  0.33472402  0.274511877  1.3820358
#> 10  2.94256044  0.90928543  0.05242335  1.43965059  1.395673155 -0.6108412
#> 11  2.00662228  0.41871639 -0.21915767  1.01330597  1.631793097  1.9098183
#> 12 -0.05893329  0.58376612  0.31378307 -0.28071021  1.294720804  1.6208598
#> 13  1.87258096  0.63975141  0.41333250  3.34476027  2.149954872  0.5742539
#> 14  1.31942950  0.41168193 -1.43781667 -0.01120514  1.191696444 -0.2641612
#> 15 -1.42745569  0.38535309  0.43032424  2.34857465 -0.577006210 -0.3302695
#> 16  0.68097355  0.08607968  1.01535604  3.92565860  1.042551502  0.7973448
#> 17 -0.54929210  3.39886338  0.96615881  1.41079006  0.909058278  1.8142556
#> 18  0.26903532  0.63863997  0.47705673  0.90237294 -0.052101051  0.4475389
#> 19 -0.59096489  0.82739111  0.16045587  1.46501919  0.559512999  0.5807107
#> 20  1.82751821  1.97747728 -1.31002416  0.39021709  1.504383799  2.7818016
#> 21 -0.30409444  2.11577397 -0.31194987 -0.25575659  0.277187872  1.2074286
#> 22  0.76423056 -0.40926319  2.02014971  0.18586756  0.671832400  0.5648789
#> 23 -0.46327023  1.31880313  0.10677493  2.19776858  1.024232743 -0.2410406
#> 24  0.82644537  0.03593709  0.74705950  0.71658948  2.076289908  0.2175615
#> 25  2.49880853  0.16122984  1.76182756 -0.15979461  1.698898737  1.8615229
#> 26  1.77395286  0.24876900  1.55341466  1.29237882  0.006720057  2.2411506
#> 27  1.88703392  1.07596251  0.94970087  2.25341448  0.468120975  2.9744071
#> 28  1.80868715 -0.72699460 -0.08043250 -0.57670050  0.403469569  0.7127342
#> 29  0.71406128  0.37437843  1.45171502  2.20283294 -0.944111080 -0.4423329
#> 30  0.50719857  3.96043557  0.22430368  2.90066688  0.122209729  0.5007546
#> 31  0.78745499  2.74338931  3.82509354  0.75670216  1.523595920 -0.6317171
#> 32  2.13824044  0.26162004  2.40620074  1.16933969  1.825201055  0.9777294
#> 33  0.12591135 -0.02482892  2.98256826  0.97612728 -0.316131496  1.2851729
#> 34  0.66262513  1.37844030  2.74654722  1.89161902  1.664840362  2.1255083
#> 35  0.95746703  1.66636889  0.92091355  2.15866270  1.018087791 -0.0436378
#> 36  1.83593021  1.51711481  0.03097920  1.10636697  1.318381403  2.8453819
#> 37  0.42166429  1.54798361  0.48859001  0.61440349  1.093942984  2.1036600
#> 38  2.60748334  0.55658815  1.57352288 -0.38283928  0.913020535 -0.5784783
#> 39 -0.44537763  1.35422746  2.16659217  2.01950238  0.927497339  0.2199252
#> 40 -2.10980677 -0.67440983  1.35864979  0.65165778  0.634377381 -1.1616889
#> 41 -0.02792231  1.02039476  1.39661174  0.51686901  1.316231629  0.8831710
#> 42  2.20162463  4.08873241  2.50387667  0.52400042  0.941787020 -0.3954883
#> 43  0.61120890  0.65401508  0.80915630  0.97833324 -0.833536750  1.6868389
#> 44  1.40713129  0.86971144  1.69736083  3.66848160 -0.253171507  1.8546134
#> 45  0.43506876 -0.29878593  0.93056425  0.65650617  2.300710302  0.6608462
#> 46  3.11252635  0.63837911  0.59845364  0.30094140  1.214454547  2.2317328
#> 47  0.98258419  0.84476315  1.56909694  3.09784693  2.460530774 -0.2449140
#> 48 -0.51533987  2.73615505  0.99833890  1.92780288  1.347533884  0.5277143
#> 49  1.16338170  1.70453907 -1.02503484  1.73945658 -0.433631732  0.5907109
#> 50  0.76983392  2.65794680 -1.55430294  1.02250781  2.027774838  2.6780763
#> 51 -0.40979524  0.78997336  1.68389429 -0.75173271  1.540976733  1.4424340
#> 52  1.80953679  1.46116922  1.87989210  0.86840735  1.797279028  0.8689834
#> 53  0.59782884  0.63721382  1.41579092 -0.09235797  0.916971700  0.9957288
#> 54  1.94697832  0.09576814  0.15830173  1.98862535  0.281072454  1.2991383
#> 55 -0.40579690  1.69824676  0.66247189  0.91479221 -0.957200924  1.6059357
#> 56  0.55531492  0.61687922  1.05125017  1.17692228  0.481208446  1.3023292
#> 57  0.12598998  0.39096982  0.47040466  0.74115120  2.465877355 -0.0382106
#> 58 -0.15048134  1.71117562  1.07097509  1.28114802  1.833655053  1.0553410
#> 59  0.66754210  1.22792817  3.13065557  2.04042540  2.462956363  2.8339819
#> 60 -0.21583264 -0.01856450  1.23656450  0.27879727  0.207064875  0.9775166
#> 61  1.43357904  1.98220029  0.44150324 -0.79417752  2.131229790  1.7001937
#> 62  3.13069262  1.12397608 -0.21197306  0.16399455  0.378987821  0.3021621
#> 63  0.68319517  1.46362652  1.52183245  1.63723724  0.960678951  0.5280580
#> 64  1.11416687 -0.31074769  1.23143860  1.41895388  1.258464201  1.8987766
#> 65  2.21020731  2.26253768 -0.78612947  0.98557603  0.724862751  0.4468699
#> 66  2.16259285  1.65767369  0.48160400  1.11889294  0.677505053  1.9215916
#> 67  1.59844782  0.01272479  1.54234252  0.48803958  2.025800420  0.8587595
#> 68  2.06611816  0.91532957 -0.05236183  2.38681306  1.447539521  0.4213179
#> 69  1.19023111 -0.39491782  2.07930898  0.44763516  1.608121466  1.4486015
#> 70 -0.38259156 -0.43063410 -0.18871693  0.45736696  1.852043700 -0.1758736
#> 71  0.84524063 -0.11834815  2.80312720  0.13296658 -1.056098198  1.9357264
#> 72  1.29021470  0.11247585  2.48328669  1.53909984  1.090880899  1.4386815
#> 73  1.57515604  0.79982783  2.16271231  1.25422663 -0.757503495  1.7147743
#> 74  1.04664277  0.24716669  0.65289625  1.53735052  1.187252226 -0.1225274
#> 75  0.78898272  2.59866017  1.09260415 -1.30108159  1.315960755  2.6014269
#> 76 -0.60101254  1.71181570  1.72949898  1.17933120  0.693265698  2.3862434
#> 77  1.06245486  1.21842761  0.67362644  0.73108285  1.685010922 -1.1961236
#> 78  1.56749571 -0.06248774  0.39340724 -1.02425130  1.055290209  1.1946924
#> 79 -0.48691479  0.41800826  1.50648186  0.31245124  1.128377607  1.1531974
#> 80  1.08417137  2.19576815  3.22508406  0.06058075  1.420187430  0.7934068
#>        Series 7    Series 8    Series 9   Series 10
#>  1  0.534359711  0.74887140  0.64574652  1.37583877
#>  2  1.371595568  2.47629322 -0.24925701  1.18046433
#>  3 -0.441438064  0.80158270  1.88718009  1.80332034
#>  4  1.008632285  1.52850293  1.86668223 -0.35926430
#>  5  0.259740507 -0.95646632  0.09794211  1.46548216
#>  6  0.389195859 -0.25078512 -0.78418046  1.70874956
#>  7  1.280357020 -0.26062242 -0.16608485 -1.00703705
#>  8  0.327951391  0.33925115  0.04657371  1.59477064
#>  9  1.517053713  1.86498688  1.14684347  0.40758604
#> 10  0.991379599 -0.18112342  0.56246376  0.47963096
#> 11  1.021423230  2.19889063  0.71695426  1.80648613
#> 12  0.018164608  0.65444163  0.83360122  0.98052640
#> 13  0.139422095  2.56768853 -0.60576800  1.86986134
#> 14 -0.538718146  0.87430709  1.00075437  0.51726970
#> 15 -0.052349224  1.36422736  0.05070988  2.47587353
#> 16  1.382667603  0.55790668 -0.09003089  2.37762385
#> 17  1.101454248  1.27283355  1.48150253  0.12618098
#> 18  0.170870826  2.15943489 -0.08080197  0.99608920
#> 19  1.724917369  0.85920274  1.13293460 -0.23419222
#> 20  1.359129923  2.68359758  1.27218816  3.14462344
#> 21  0.700195965  1.44084795 -2.07589617  1.69508233
#> 22  1.279544354  3.06475591  2.10154030  1.31512170
#> 23 -0.209346327  2.72700367  1.77477927 -1.24898746
#> 24 -1.057795950  1.16803017  1.58597557  2.76054532
#> 25 -0.292499229 -0.51710571  2.34136160  1.73887327
#> 26  0.760523698  0.61671879  2.10740958  0.96725450
#> 27  0.661594327  1.14215636  2.51533419  1.10704267
#> 28  2.232646859  1.04128671  0.59165601 -1.29676974
#> 29  1.440331160  2.44206310  0.84074769  0.25927842
#> 30  0.684750407  2.04126708  1.69573114  1.30380583
#> 31 -1.300531576  2.02122596  0.80822946  0.80993112
#> 32  2.976385773  0.91095263  1.29556760  1.58412320
#> 33  1.500590831  0.65332199  0.98785289  1.99008659
#> 34  1.404122773  2.48396032  1.80188504  1.01949267
#> 35  0.524800828  1.45825602  0.66321085  2.85306627
#> 36  0.636179629  0.87476161  2.42695779  0.26315595
#> 37  2.195751511  1.28988906 -0.08615995  0.34269923
#> 38 -0.268129170  1.70093990  1.13890944  0.04705632
#> 39  0.045722993 -0.95971392 -1.81916410  1.48908031
#> 40  0.228396281 -0.55364809  0.33179865  1.13193926
#> 41  0.687065879  1.34295259  1.34606255  2.37530412
#> 42  1.089041596  2.18933870  2.44764296  0.48789165
#> 43  1.734033739  0.81383923  1.86118161  0.13340987
#> 44  0.708164413  1.52141987  0.51995361  2.43640337
#> 45  2.444023898  3.02275798  1.69754353 -0.07223254
#> 46  0.210728104  1.34521234 -0.41117217  0.78563105
#> 47  0.320065208  2.02728689 -0.90164159  1.94952539
#> 48  0.009636034  2.12041789  1.81355612  1.22988595
#> 49  1.819377971  1.54446368  2.85103405  3.02495768
#> 50 -0.033395062  2.38702327  1.34559175  0.19901856
#> 51  0.672423876  2.02977629  0.41420119  1.04139377
#> 52  0.097538780 -0.11982175  0.47375606  1.49967296
#> 53  1.158148010  1.56407313  2.16872676  0.39667389
#> 54  1.553856358  1.03879134  1.85567685  1.84445523
#> 55 -0.844843975  1.33528302  0.14291118  2.56586898
#> 56  1.218139750  0.99876620  1.18136005 -1.03862401
#> 57  0.421953087  0.59012602  1.26702064  0.12510429
#> 58  3.342368705  1.16990820  1.83690933  1.63603292
#> 59  0.303235944  2.03293303  1.38583408  1.30552568
#> 60  1.507077064  1.38209644  1.18668288  1.80020114
#> 61  1.971718081  1.20554709  1.12827341  1.77163883
#> 62  0.704109529  2.73418927  0.49565900 -0.08137332
#> 63  2.194146112  1.69154016 -1.51837926  0.82416586
#> 64  2.339869106  0.84938961  0.65899934  0.21906926
#> 65  1.272863046  2.01305264  1.99752775  0.79881519
#> 66  0.049069937  1.14524107  0.86911021  1.09320597
#> 67  0.004454819 -0.37299377  1.17644299  0.64982987
#> 68  1.718113139  3.84010622  0.38278262  0.37053627
#> 69  1.363283467  0.91010819 -0.06164738  1.75330348
#> 70 -0.008814099  1.23566493 -0.49907385  0.20440954
#> 71  1.711324686  0.08029131  1.13859542 -0.16266154
#> 72  1.500176042 -0.30282799  2.04868627  1.41499266
#> 73  1.117285273 -0.46876571  1.70568032  1.57744578
#> 74  0.603528940  2.28714201  0.51927861  0.53989841
#> 75  1.123615066  3.26651575  1.92081024  2.60172926
#> 76  2.153517176  2.16707695  3.47491405  0.96899095
#> 77  1.766724722  2.05378115 -0.05763120  0.05441693
#> 78  1.440432366  2.30349739  0.93678985  0.97606516
#> 79  2.289475857  0.52777302  0.93118401  1.40147659
#> 80 -0.097664625  0.46232917  1.29727867  2.82405475
#> 
#> $Input_Data$Actual_Test
#>  [1] -0.71176453 -0.66882473  0.26514018  0.35736087  0.55969324 -0.01791227
#>  [7]  0.09512255  0.72057255 -0.71998649  1.95075783  1.41910457  0.30752598
#> [13] -0.20342352  0.31717526 -1.97646603  2.04850319  1.73158497  0.95569161
#> [19]  0.36688851 -0.34733207
#> 
#> $Input_Data$Forecasts_Test
#>          Series 1    Series 2    Series 3    Series 4   Series 5  Series 6
#>  [1,]  1.98970524 -0.06972209 -0.17519993 -0.08963597 1.22618483 1.5403178
#>  [2,]  1.66048361  1.22527583  1.35372425  1.16801798 0.38702843 1.5205716
#>  [3,]  0.28245723  1.34256765  0.34182397  0.53509625 1.07642826 1.0031732
#>  [4,]  1.93228135  2.02601541  1.12347825  1.02674694 1.99041518 2.1090700
#>  [5,]  0.08904543  0.29750028 -0.52106977  0.36232223 0.98951065 1.3469187
#>  [6,]  1.36706793  2.56054273 -1.27246299  1.80267154 1.06257711 0.9342006
#>  [7,]  3.47968498 -0.51718665  1.23956303  0.92918661 0.30635499 2.6519710
#>  [8,]  0.07663246  0.48109044  0.83246791  2.54976272 2.23440153 1.0185490
#>  [9,]  1.09983687  1.56688617 -0.20368886  0.84867498 0.03988159 2.7567787
#> [10,]  1.15594362  0.84174747  0.32226386  1.42581574 1.44727570 0.4626972
#> [11,]  0.06586154 -0.02944939  1.15600750  1.37561822 0.11327062 2.9681009
#> [12,]  0.69742196  1.07340678  0.88186745  1.17956681 0.17182912 0.7194141
#> [13,]  2.37278458  1.10256911  0.92147587 -0.10224850 2.36715898 0.7558930
#> [14,]  0.81636799  1.53859621  1.37124201 -0.17638867 2.15828150 1.9460339
#> [15,]  0.06002841  2.20379306  1.11591549  1.49210755 2.86901934 1.0844945
#> [16,]  0.53668684 -1.12019934  2.63506201  1.75204176 0.70924834 1.1935694
#> [17,]  0.66510522  0.92435555  1.83352894  0.60180039 0.79545659 2.7160679
#> [18,] -0.90602233  1.51026127  0.08054463  1.03499972 0.73362708 2.5395535
#> [19,]  0.68598503  1.85971785  1.52776827 -0.49063801 0.58653191 0.1254105
#> [20,]  0.54961692  1.19632347  1.42148372  1.37872081 0.84802381 2.6125470
#>          Series 7   Series 8    Series 9   Series 10
#>  [1,] -0.08757916  0.4012063  1.41012752  1.82241453
#>  [2,]  1.90335746  1.8022300  0.08178475 -0.01909239
#>  [3,]  1.79836738  3.3541273  0.60705341  0.14660848
#>  [4,] -0.09816279 -0.4829956  0.81605490  0.50704747
#>  [5,]  2.68509112 -0.4986725  0.86204635  0.85613497
#>  [6,]  2.23086575  1.3069169  0.61660671  2.47550628
#>  [7,]  1.19627115  0.4889441  2.45131269  1.77218197
#>  [8,] -0.17320187  0.6827512  0.72990977  0.11548316
#>  [9,] -0.89307008  0.6477640  1.33531778  0.33230064
#> [10,]  0.29767395 -0.1863128  3.20890880  1.94706955
#> [11,]  1.89857609  0.3088158  0.73655855  0.83200103
#> [12,]  0.08074218  0.1690462 -0.63592871  0.92244004
#> [13,]  0.78792222  1.0439330  0.13271685  0.99419208
#> [14,]  2.18802367  2.5170329  1.86139163  0.77281665
#> [15,] -0.70798314  1.2062636  0.75068746  1.06544539
#> [16,]  2.48165453  1.3915295  0.37799969 -0.45246111
#> [17,]  1.14801899  0.3124118  1.17968231 -0.82559605
#> [18,]  0.80737181  0.7450269  3.19665927  1.46048753
#> [19,]  2.22461045  2.4529380  0.03733679  0.29764980
#> [20,]  0.57535708  0.1300686  0.75063812  1.21451091
#> 
#> 
#> $Predict
#> function (object, newpreds) 
#> {
#>     return(drop(predict(object$lin_model, newpreds, s = "lambda.min")))
#> }
#> <bytecode: 0x7f995ffd9320>
#> <environment: namespace:ahead>
#> 
#> $Intercept
#> [1] -0.1117191
#> 
#> $Weights
#>  [1] 0 0 0 0 0 0 0 0 0 0
#> 
#> $Forecasts_Test
#>  [1] -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191
#>  [7] -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191
#> [13] -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191 -0.1117191
#> [19] -0.1117191 -0.1117191
#> 
#> $Accuracy_Test
#>                  ME     RMSE      MAE     MPE    MAPE
#> Test set -0.4341897 1.061331 0.829938 388.644 742.879
#> 
#> $lin_model
#> 
#> Call:  glmnet::cv.glmnet(x = as.matrix(prediction_matrix), y = as.numeric(observed_vector)) 
#> 
#> Measure: Mean-Squared Error 
#> 
#>     Lambda Index Measure     SE Nonzero
#> min 0.1553     1   1.055 0.1147       0
#> 1se 0.1553     1   1.055 0.1147       0
#> 
#> attr(,"class")
#> [1] "foreccomb_res" "comb_GLMNET"