Default of Credit Card Clients (data from UCI data archive)

https://archive.ics.uci.edu/ml/datasets/default+of+credit+card+clients Yeh, I. C., & Lien, C. H. (2009). The comparisons of data mining techniques for the predictive accuracy of probability of default of credit card clients. Expert Systems with Applications, 36(2), 2473-2480.

dcc<-read.csv("C:/Course19/BDE/data/UCI_Credit_Card.csv")
dim(dcc)  # 30000 obs. of 25 variables
## [1] 30000    25

Redefine factor variables

# SEX: 1=male, 2=female
dcc$SEX<-as.factor(dcc$SEX) 
# EDUCATION: 1=graduate school, 2=university, 3=high school, 0,4,5,6=others
dcc$EDUCATION<-ifelse(dcc$EDUCATION>3,0,dcc$EDUCATION)
dcc$EDUCATION<-as.factor(dcc$EDUCATION) 
# MARRIAGE: 1=married, 2=single, 3=divorce, 0=others
dcc$MARRIAGE<-as.factor(dcc$MARRIAGE)
# payment status: -2=no consumption, -1=pay on due, #=delayed months of payment
dcc$PAY_0<-as.factor(dcc$PAY_0) # payment status as of 9/2005
dcc$PAY_2<-as.factor(dcc$PAY_2) # payment status as of 8/2005
dcc$PAY_3<-as.factor(dcc$PAY_3) # payment status as of 7/2005
dcc$PAY_4<-as.factor(dcc$PAY_4) # payment status as of 6/2005
dcc$PAY_5<-as.factor(dcc$PAY_5) # payment status as of 5/2005
dcc$PAY_6<-as.factor(dcc$PAY_6) # payment status as of 4/2005
dcc$default.payment.next.month<-as.factor(dcc$default.payment.next.month)
# numeric data: LIMIT_BAL, BILL_AMT1, ..., BILL_AMT6; PAY_AMT1, ..., PAY_AMT6, AGE

Using h2o package with 2 CPU only

library(h2o)
## 
## ----------------------------------------------------------------------
## 
## Your next step is to start H2O:
##     > h2o.init()
## 
## For H2O package documentation, ask for help:
##     > ??h2o
## 
## After starting H2O, you can use the Web UI at http://localhost:54321
## For more information visit http://docs.h2o.ai
## 
## ----------------------------------------------------------------------
## 
## Attaching package: 'h2o'
## The following objects are masked from 'package:stats':
## 
##     cor, sd, var
## The following objects are masked from 'package:base':
## 
##     %*%, %in%, &&, ||, apply, as.factor, as.numeric, colnames,
##     colnames<-, ifelse, is.character, is.factor, is.numeric, log,
##     log10, log1p, log2, round, signif, trunc
h2o.init()
## 
## H2O is not running yet, starting it now...
## 
## Note:  In case of errors look at the following log files:
##     C:\Users\link_000\AppData\Local\Temp\Rtmp08YpKn/h2o_link_000_started_from_r.out
##     C:\Users\link_000\AppData\Local\Temp\Rtmp08YpKn/h2o_link_000_started_from_r.err
## 
## 
## Starting H2O JVM and connecting: . Connection successful!
## 
## R is connected to the H2O cluster: 
##     H2O cluster uptime:         7 seconds 611 milliseconds 
##     H2O cluster timezone:       America/Los_Angeles 
##     H2O data parsing timezone:  UTC 
##     H2O cluster version:        3.24.0.1 
##     H2O cluster version age:    27 days  
##     H2O cluster name:           H2O_started_from_R_link_000_llu778 
##     H2O cluster total nodes:    1 
##     H2O cluster total memory:   1.74 GB 
##     H2O cluster total cores:    4 
##     H2O cluster allowed cores:  4 
##     H2O cluster healthy:        TRUE 
##     H2O Connection ip:          localhost 
##     H2O Connection port:        54321 
##     H2O Connection proxy:       NA 
##     H2O Internal Security:      FALSE 
##     H2O API Extensions:         Amazon S3, Algos, AutoML, Core V3, Core V4 
##     R Version:                  R version 3.5.3 (2019-03-11)

Load base data frame, convert to h2oFrame

h2o.dcc1<-as.h2o(dcc)
## 
  |                                                                       
  |                                                                 |   0%
  |                                                                       
  |=================================================================| 100%

Include interaction variables

h2o.dcc2<-h2o.interaction(h2o.dcc1,
                factors=c("SEX","EDUCATION","MARRIAGE"),
                pairwise=T,max_factors=10,min_occurrence=3)
## 
  |                                                                       
  |                                                                 |   0%
  |                                                                       
  |=================================================================| 100%
h2o.dcc<-h2o.cbind(h2o.dcc1,h2o.dcc2)
summary(h2o.dcc,exact_quantiles=TRUE)
##  ID              LIMIT_BAL         SEX      EDUCATION MARRIAGE
##  Min.   :    1   Min.   :  10000   2:18112  2:14030   2:15964 
##  1st Qu.: 7501   1st Qu.:  50000   1:11888  1:10585   1:13659 
##  Median :15000   Median : 140000            3: 4917   3:  323 
##  Mean   :15000   Mean   : 167484            0:  468   0:   54 
##  3rd Qu.:22500   3rd Qu.: 240000                              
##  Max.   :30000   Max.   :1000000                              
##  AGE             PAY_0     PAY_2     PAY_3     PAY_4     PAY_5    
##  Min.   :21.00   0 :14737  0 :15730  0 :15764  0 :16455  0 :16947 
##  1st Qu.:28.00   -1: 5686  -1: 6050  -1: 5938  -1: 5687  -1: 5539 
##  Median :34.00   1 : 3688  2 : 3927  -2: 4085  -2: 4348  -2: 4546 
##  Mean   :35.49   -2: 2759  -2: 3782  2 : 3819  2 : 3159  2 : 2626 
##  3rd Qu.:41.00   2 : 2667  3 :  326  3 :  240  3 :  180  3 :  178 
##  Max.   :79.00   3 :  322  4 :   99  4 :   76  4 :   69  4 :   84 
##  PAY_6     BILL_AMT1         BILL_AMT2        BILL_AMT3        
##  0 :16286  Min.   :-165580   Min.   :-69777   Min.   :-157264  
##  -1: 5740  1st Qu.:   3559   1st Qu.:  2985   1st Qu.:   2666  
##  -2: 4895  Median :  22382   Median : 21200   Median :  20089  
##  2 : 2766  Mean   :  51223   Mean   : 49179   Mean   :  47013  
##  3 :  184  3rd Qu.:  67091   3rd Qu.: 64006   3rd Qu.:  60165  
##  4 :   49  Max.   : 964511   Max.   :983931   Max.   :1664089  
##  BILL_AMT4         BILL_AMT5        BILL_AMT6         PAY_AMT1        
##  Min.   :-170000   Min.   :-81334   Min.   :-339603   Min.   :     0  
##  1st Qu.:   2327   1st Qu.:  1763   1st Qu.:   1256   1st Qu.:  1000  
##  Median :  19052   Median : 18105   Median :  17071   Median :  2100  
##  Mean   :  43263   Mean   : 40311   Mean   :  38872   Mean   :  5664  
##  3rd Qu.:  54506   3rd Qu.: 50191   3rd Qu.:  49198   3rd Qu.:  5006  
##  Max.   : 891586   Max.   :927171   Max.   : 961664   Max.   :873552  
##  PAY_AMT2          PAY_AMT3         PAY_AMT4         PAY_AMT5          
##  Min.   :      0   Min.   :     0   Min.   :     0   Min.   :     0.0  
##  1st Qu.:    833   1st Qu.:   390   1st Qu.:   296   1st Qu.:   252.5  
##  Median :   2009   Median :  1800   Median :  1500   Median :  1500.0  
##  Mean   :   5921   Mean   :  5226   Mean   :  4826   Mean   :  4799.4  
##  3rd Qu.:   5000   3rd Qu.:  4505   3rd Qu.:  4013   3rd Qu.:  4031.5  
##  Max.   :1684259   Max.   :896040   Max.   :621000   Max.   :426529.0  
##  PAY_AMT6           default.payment.next.month SEX_EDUCATION SEX_MARRIAGE
##  Min.   :     0.0   0:23364                    2_2:8656      2_2:9411    
##  1st Qu.:   117.8   1: 6636                    2_1:6231      2_1:8469    
##  Median :  1500.0                              1_2:5374      1_2:6553    
##  Mean   :  5215.5                              1_1:4354      1_1:5190    
##  3rd Qu.:  4000.0                              2_3:2927      2_3: 192    
##  Max.   :528666.0                              1_3:1990      1_3: 131    
##  EDUCATION_MARRIAGE
##  2_2:7020          
##  2_1:6842          
##  1_2:6809          
##  1_1:3722          
##  3_1:2861          
##  3_2:1909

Split data for training and testing

dcc.split<-h2o.splitFrame(h2o.dcc,0.5)
train<-dcc.split[[1]]
test<-dcc.split[[2]]

Setup x and y variables

yvar<-"default.payment.next.month"
xvar<-c("LIMIT_BAL","SEX","EDUCATION","MARRIAGE","AGE",
        "SEX_EDUCATION","SEX_MARRIAGE","EDUCATION_MARRIAGE",
        "PAY_0","PAY_2","PAY_3","PAY_4","PAY_5","PAY_6",
        "BILL_AMT1","BILL_AMT2","BILL_AMT3","BILL_AMT4","BILL_AMT5","BILL_AMT6",
        "PAY_AMT1","PAY_AMT2","PAY_AMT3","PAY_AMT4","PAY_AMT5","PAY_AMT6")

Auto ML

auto1<-h2o.automl(xvar,yvar,seed=2019,nfold=10,
                  training_fram=train,
                  validation_frame=test,
                  stopping_metric = "misclassification",
                  stopping_tolerance = 0.001,
                  stopping_rounds = 5,
                  max_runtime_secs = 7200)
## 
  |                                                                       
  |                                                                 |   0%
  |                                                                       
  |====                                                             |   6%
  |                                                                       
  |====                                                             |   7%
  |                                                                       
  |=====                                                            |   8%
  |                                                                       
  |======                                                           |   9%
  |                                                                       
  |======                                                           |  10%
  |                                                                       
  |=======                                                          |  11%
  |                                                                       
  |========                                                         |  12%
  |                                                                       
  |=========                                                        |  13%
  |                                                                       
  |=========                                                        |  14%
  |                                                                       
  |==========                                                       |  15%
  |                                                                       
  |==========                                                       |  16%
  |                                                                       
  |===========                                                      |  18%
  |                                                                       
  |============                                                     |  19%
  |                                                                       
  |=============                                                    |  20%
  |                                                                       
  |==============                                                   |  22%
  |                                                                       
  |===============                                                  |  23%
  |                                                                       
  |===============                                                  |  24%
  |                                                                       
  |================                                                 |  24%
  |                                                                       
  |================                                                 |  25%
  |                                                                       
  |=================                                                |  27%
  |                                                                       
  |==================                                               |  27%
  |                                                                       
  |==================                                               |  28%
  |                                                                       
  |===================                                              |  30%
  |                                                                       
  |====================                                             |  30%
  |                                                                       
  |====================                                             |  31%
  |                                                                       
  |=====================                                            |  32%
  |                                                                       
  |======================                                           |  34%
  |                                                                       
  |=======================                                          |  35%
  |                                                                       
  |=======================                                          |  36%
  |                                                                       
  |=========================                                        |  38%
  |                                                                       
  |=========================                                        |  39%
  |                                                                       
  |==========================                                       |  40%
  |                                                                       
  |===========================                                      |  41%
  |                                                                       
  |===========================                                      |  42%
  |                                                                       
  |============================                                     |  42%
  |                                                                       
  |============================                                     |  43%
  |                                                                       
  |============================                                     |  44%
  |                                                                       
  |=============================                                    |  44%
  |                                                                       
  |=============================                                    |  45%
  |                                                                       
  |==============================                                   |  46%
  |                                                                       
  |==============================                                   |  47%
  |                                                                       
  |===============================                                  |  47%
  |                                                                       
  |===============================                                  |  48%
  |                                                                       
  |================================                                 |  49%
  |                                                                       
  |================================                                 |  50%
  |                                                                       
  |=================================                                |  50%
  |                                                                       
  |=================================                                |  51%
  |                                                                       
  |==================================                               |  52%
  |                                                                       
  |==================================                               |  53%
  |                                                                       
  |===================================                              |  53%
  |                                                                       
  |===================================                              |  54%
  |                                                                       
  |====================================                             |  55%
  |                                                                       
  |====================================                             |  56%
  |                                                                       
  |=====================================                            |  56%
  |                                                                       
  |=====================================                            |  57%
  |                                                                       
  |=====================================                            |  58%
  |                                                                       
  |======================================                           |  58%
  |                                                                       
  |======================================                           |  59%
  |                                                                       
  |=======================================                          |  60%
  |                                                                       
  |========================================                         |  61%
  |                                                                       
  |========================================                         |  62%
  |                                                                       
  |=========================================                        |  62%
  |                                                                       
  |=========================================                        |  63%
  |                                                                       
  |=========================================                        |  64%
  |                                                                       
  |==========================================                       |  64%
  |                                                                       
  |==========================================                       |  65%
  |                                                                       
  |===========================================                      |  66%
  |                                                                       
  |============================================                     |  67%
  |                                                                       
  |============================================                     |  68%
  |                                                                       
  |=============================================                    |  69%
  |                                                                       
  |=============================================                    |  70%
  |                                                                       
  |==============================================                   |  70%
  |                                                                       
  |==============================================                   |  71%
  |                                                                       
  |===============================================                  |  72%
  |                                                                       
  |===============================================                  |  73%
  |                                                                       
  |================================================                 |  73%
  |                                                                       
  |================================================                 |  74%
  |                                                                       
  |=================================================                |  75%
  |                                                                       
  |=================================================                |  76%
  |                                                                       
  |==================================================               |  76%
  |                                                                       
  |==================================================               |  77%
  |                                                                       
  |===================================================              |  78%
  |                                                                       
  |===================================================              |  79%
  |                                                                       
  |====================================================             |  80%
  |                                                                       
  |=====================================================            |  81%
  |                                                                       
  |=====================================================            |  82%
  |                                                                       
  |======================================================           |  82%
  |                                                                       
  |======================================================           |  83%
  |                                                                       
  |======================================================           |  84%
  |                                                                       
  |=======================================================          |  84%
  |                                                                       
  |=======================================================          |  85%
  |                                                                       
  |========================================================         |  86%
  |                                                                       
  |========================================================         |  87%
  |                                                                       
  |=========================================================        |  87%
  |                                                                       
  |=========================================================        |  88%
  |                                                                       
  |=============================================================    |  94%
  |                                                                       
  |=================================================================| 100%
auto1
## An object of class "H2OAutoML"
## Slot "project_name":
## [1] "automl_RTMP_sid_8fc5_50"
## 
## Slot "leader":
## Model Details:
## ==============
## 
## H2OBinomialModel: stackedensemble
## Model ID:  StackedEnsemble_AllModels_AutoML_20190428_005854 
## NULL
## 
## 
## H2OBinomialMetrics: stackedensemble
## ** Reported on training data. **
## 
## MSE:  0.1053517
## RMSE:  0.3245792
## LogLoss:  0.3482742
## Mean Per-Class Error:  0.1778003
## AUC:  0.9081976
## pr_auc:  0.75394
## Gini:  0.8163952
## 
## Confusion Matrix (vertical: actual; across: predicted) for F1-optimal threshold:
##            0    1    Error         Rate
## 0       9618 1968 0.169860  =1968/11586
## 1        607 2661 0.185741    =607/3268
## Totals 10225 4629 0.173354  =2575/14854
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                         metric threshold    value idx
## 1                       max f1  0.187986 0.673927 269
## 2                       max f2  0.147765 0.785463 302
## 3                 max f0point5  0.462813 0.692887 148
## 4                 max accuracy  0.459556 0.855931 149
## 5                max precision  0.913605 1.000000   0
## 6                   max recall  0.088053 1.000000 378
## 7              max specificity  0.913605 1.000000   0
## 8             max absolute_mcc  0.187986 0.576352 269
## 9   max min_per_class_accuracy  0.183812 0.822286 272
## 10 max mean_per_class_accuracy  0.160231 0.830676 290
## 
## Gains/Lift Table: Extract with `h2o.gainsLift(<model>, <data>)` or `h2o.gainsLift(<model>, valid=<T/F>, xval=<T/F>)`
## H2OBinomialMetrics: stackedensemble
## ** Reported on validation data. **
## 
## MSE:  0.1349173
## RMSE:  0.3673109
## LogLoss:  0.4309416
## Mean Per-Class Error:  0.2920477
## AUC:  0.7823999
## pr_auc:  0.5626649
## Gini:  0.5647999
## 
## Confusion Matrix (vertical: actual; across: predicted) for F1-optimal threshold:
##            0    1    Error         Rate
## 0      10228 1550 0.131601  =1550/11778
## 1       1524 1844 0.452494   =1524/3368
## Totals 11752 3394 0.202958  =3074/15146
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                         metric threshold    value idx
## 1                       max f1  0.249397 0.545401 230
## 2                       max f2  0.120743 0.639063 335
## 3                 max f0point5  0.454195 0.586558 149
## 4                 max accuracy  0.454195 0.821471 149
## 5                max precision  0.914741 1.000000   0
## 6                   max recall  0.072489 1.000000 397
## 7              max specificity  0.914741 1.000000   0
## 8             max absolute_mcc  0.346371 0.424427 190
## 9   max min_per_class_accuracy  0.158365 0.706651 292
## 10 max mean_per_class_accuracy  0.196029 0.714946 262
## 
## Gains/Lift Table: Extract with `h2o.gainsLift(<model>, <data>)` or `h2o.gainsLift(<model>, valid=<T/F>, xval=<T/F>)`
## H2OBinomialMetrics: stackedensemble
## ** Reported on cross-validation data. **
## ** 10-fold cross-validation on training data (Metrics computed for combined holdout predictions) **
## 
## MSE:  0.1340485
## RMSE:  0.3661263
## LogLoss:  0.4284002
## Mean Per-Class Error:  0.2872486
## AUC:  0.7845892
## pr_auc:  0.5556414
## Gini:  0.5691785
## 
## Confusion Matrix (vertical: actual; across: predicted) for F1-optimal threshold:
##            0    1    Error         Rate
## 0       9989 1597 0.137839  =1597/11586
## 1       1427 1841 0.436659   =1427/3268
## Totals 11416 3438 0.203582  =3024/14854
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                         metric threshold    value idx
## 1                       max f1  0.242904 0.549061 240
## 2                       max f2  0.114446 0.639919 346
## 3                 max f0point5  0.437463 0.581375 159
## 4                 max accuracy  0.450709 0.821732 155
## 5                max precision  0.912955 1.000000   0
## 6                   max recall  0.072006 1.000000 397
## 7              max specificity  0.912955 1.000000   0
## 8             max absolute_mcc  0.317563 0.421324 206
## 9   max min_per_class_accuracy  0.159890 0.704557 298
## 10 max mean_per_class_accuracy  0.176587 0.714879 283
## 
## Gains/Lift Table: Extract with `h2o.gainsLift(<model>, <data>)` or `h2o.gainsLift(<model>, valid=<T/F>, xval=<T/F>)`
## 
## Slot "leaderboard":
##                                              model_id       auc   logloss
## 1    StackedEnsemble_AllModels_AutoML_20190428_005854 0.7845892 0.4284002
## 2          GBM_grid_1_AutoML_20190428_005854_model_86 0.7840491 0.4262033
## 3         GBM_grid_1_AutoML_20190428_005854_model_137 0.7837912 0.4268730
## 4          GBM_grid_1_AutoML_20190428_005854_model_43 0.7837242 0.4267778
## 5          GBM_grid_1_AutoML_20190428_005854_model_12 0.7837062 0.4264270
## 6 StackedEnsemble_BestOfFamily_AutoML_20190428_005854 0.7835455 0.4286595
##   mean_per_class_error      rmse       mse
## 1            0.2872486 0.3661263 0.1340485
## 2            0.2927212 0.3655607 0.1336346
## 3            0.2923522 0.3659971 0.1339539
## 4            0.2877632 0.3660347 0.1339814
## 5            0.2880333 0.3655293 0.1336117
## 6            0.2914736 0.3661293 0.1340507
## 
## [160 rows x 6 columns]

Present the final model

auto1@leader
## Model Details:
## ==============
## 
## H2OBinomialModel: stackedensemble
## Model ID:  StackedEnsemble_AllModels_AutoML_20190428_005854 
## NULL
## 
## 
## H2OBinomialMetrics: stackedensemble
## ** Reported on training data. **
## 
## MSE:  0.1053517
## RMSE:  0.3245792
## LogLoss:  0.3482742
## Mean Per-Class Error:  0.1778003
## AUC:  0.9081976
## pr_auc:  0.75394
## Gini:  0.8163952
## 
## Confusion Matrix (vertical: actual; across: predicted) for F1-optimal threshold:
##            0    1    Error         Rate
## 0       9618 1968 0.169860  =1968/11586
## 1        607 2661 0.185741    =607/3268
## Totals 10225 4629 0.173354  =2575/14854
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                         metric threshold    value idx
## 1                       max f1  0.187986 0.673927 269
## 2                       max f2  0.147765 0.785463 302
## 3                 max f0point5  0.462813 0.692887 148
## 4                 max accuracy  0.459556 0.855931 149
## 5                max precision  0.913605 1.000000   0
## 6                   max recall  0.088053 1.000000 378
## 7              max specificity  0.913605 1.000000   0
## 8             max absolute_mcc  0.187986 0.576352 269
## 9   max min_per_class_accuracy  0.183812 0.822286 272
## 10 max mean_per_class_accuracy  0.160231 0.830676 290
## 
## Gains/Lift Table: Extract with `h2o.gainsLift(<model>, <data>)` or `h2o.gainsLift(<model>, valid=<T/F>, xval=<T/F>)`
## H2OBinomialMetrics: stackedensemble
## ** Reported on validation data. **
## 
## MSE:  0.1349173
## RMSE:  0.3673109
## LogLoss:  0.4309416
## Mean Per-Class Error:  0.2920477
## AUC:  0.7823999
## pr_auc:  0.5626649
## Gini:  0.5647999
## 
## Confusion Matrix (vertical: actual; across: predicted) for F1-optimal threshold:
##            0    1    Error         Rate
## 0      10228 1550 0.131601  =1550/11778
## 1       1524 1844 0.452494   =1524/3368
## Totals 11752 3394 0.202958  =3074/15146
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                         metric threshold    value idx
## 1                       max f1  0.249397 0.545401 230
## 2                       max f2  0.120743 0.639063 335
## 3                 max f0point5  0.454195 0.586558 149
## 4                 max accuracy  0.454195 0.821471 149
## 5                max precision  0.914741 1.000000   0
## 6                   max recall  0.072489 1.000000 397
## 7              max specificity  0.914741 1.000000   0
## 8             max absolute_mcc  0.346371 0.424427 190
## 9   max min_per_class_accuracy  0.158365 0.706651 292
## 10 max mean_per_class_accuracy  0.196029 0.714946 262
## 
## Gains/Lift Table: Extract with `h2o.gainsLift(<model>, <data>)` or `h2o.gainsLift(<model>, valid=<T/F>, xval=<T/F>)`
## H2OBinomialMetrics: stackedensemble
## ** Reported on cross-validation data. **
## ** 10-fold cross-validation on training data (Metrics computed for combined holdout predictions) **
## 
## MSE:  0.1340485
## RMSE:  0.3661263
## LogLoss:  0.4284002
## Mean Per-Class Error:  0.2872486
## AUC:  0.7845892
## pr_auc:  0.5556414
## Gini:  0.5691785
## 
## Confusion Matrix (vertical: actual; across: predicted) for F1-optimal threshold:
##            0    1    Error         Rate
## 0       9989 1597 0.137839  =1597/11586
## 1       1427 1841 0.436659   =1427/3268
## Totals 11416 3438 0.203582  =3024/14854
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                         metric threshold    value idx
## 1                       max f1  0.242904 0.549061 240
## 2                       max f2  0.114446 0.639919 346
## 3                 max f0point5  0.437463 0.581375 159
## 4                 max accuracy  0.450709 0.821732 155
## 5                max precision  0.912955 1.000000   0
## 6                   max recall  0.072006 1.000000 397
## 7              max specificity  0.912955 1.000000   0
## 8             max absolute_mcc  0.317563 0.421324 206
## 9   max min_per_class_accuracy  0.159890 0.704557 298
## 10 max mean_per_class_accuracy  0.176587 0.714879 283
## 
## Gains/Lift Table: Extract with `h2o.gainsLift(<model>, <data>)` or `h2o.gainsLift(<model>, valid=<T/F>, xval=<T/F>)`
lb1<-auto1@leaderboard
print(lb1,nrow(lb1))
##                                                model_id       auc
## 1      StackedEnsemble_AllModels_AutoML_20190428_005854 0.7845892
## 2            GBM_grid_1_AutoML_20190428_005854_model_86 0.7840491
## 3           GBM_grid_1_AutoML_20190428_005854_model_137 0.7837912
## 4            GBM_grid_1_AutoML_20190428_005854_model_43 0.7837242
## 5            GBM_grid_1_AutoML_20190428_005854_model_12 0.7837062
## 6   StackedEnsemble_BestOfFamily_AutoML_20190428_005854 0.7835455
## 7           GBM_grid_1_AutoML_20190428_005854_model_138 0.7833724
## 8           GBM_grid_1_AutoML_20190428_005854_model_132 0.7827903
## 9            GBM_grid_1_AutoML_20190428_005854_model_33 0.7819841
## 10           GBM_grid_1_AutoML_20190428_005854_model_15 0.7819047
## 11          GBM_grid_1_AutoML_20190428_005854_model_123 0.7817980
## 12           GBM_grid_1_AutoML_20190428_005854_model_68 0.7814285
## 13           GBM_grid_1_AutoML_20190428_005854_model_88 0.7813277
## 14          GBM_grid_1_AutoML_20190428_005854_model_122 0.7813205
## 15          GBM_grid_1_AutoML_20190428_005854_model_104 0.7811074
## 16          GBM_grid_1_AutoML_20190428_005854_model_106 0.7811068
## 17           GBM_grid_1_AutoML_20190428_005854_model_40 0.7805322
## 18          GBM_grid_1_AutoML_20190428_005854_model_121 0.7804938
## 19          GBM_grid_1_AutoML_20190428_005854_model_116 0.7803873
## 20           GBM_grid_1_AutoML_20190428_005854_model_96 0.7801368
## 21           GBM_grid_1_AutoML_20190428_005854_model_75 0.7800807
## 22           GBM_grid_1_AutoML_20190428_005854_model_27 0.7796575
## 23           GBM_grid_1_AutoML_20190428_005854_model_21 0.7794936
## 24          GBM_grid_1_AutoML_20190428_005854_model_115 0.7790884
## 25            GBM_grid_1_AutoML_20190428_005854_model_3 0.7788960
## 26           GBM_grid_1_AutoML_20190428_005854_model_80 0.7788785
## 27           GBM_grid_1_AutoML_20190428_005854_model_30 0.7787529
## 28           GBM_grid_1_AutoML_20190428_005854_model_51 0.7786226
## 29           GBM_grid_1_AutoML_20190428_005854_model_89 0.7783652
## 30           GBM_grid_1_AutoML_20190428_005854_model_91 0.7781623
## 31           GBM_grid_1_AutoML_20190428_005854_model_50 0.7781395
## 32                         GBM_2_AutoML_20190428_005854 0.7779896
## 33          GBM_grid_1_AutoML_20190428_005854_model_117 0.7779796
## 34           GBM_grid_1_AutoML_20190428_005854_model_48 0.7774461
## 35           GBM_grid_1_AutoML_20190428_005854_model_28 0.7773412
## 36           GBM_grid_1_AutoML_20190428_005854_model_66 0.7773358
## 37                         GBM_1_AutoML_20190428_005854 0.7772953
## 38          GBM_grid_1_AutoML_20190428_005854_model_108 0.7770097
## 39           GBM_grid_1_AutoML_20190428_005854_model_10 0.7769689
## 40          GBM_grid_1_AutoML_20190428_005854_model_102 0.7769343
## 41           GBM_grid_1_AutoML_20190428_005854_model_87 0.7768000
## 42          GBM_grid_1_AutoML_20190428_005854_model_131 0.7767256
## 43          GBM_grid_1_AutoML_20190428_005854_model_134 0.7767231
## 44           GBM_grid_1_AutoML_20190428_005854_model_59 0.7767036
## 45                         GBM_5_AutoML_20190428_005854 0.7765987
## 46           GBM_grid_1_AutoML_20190428_005854_model_42 0.7765920
## 47          GBM_grid_1_AutoML_20190428_005854_model_139 0.7764079
## 48           GBM_grid_1_AutoML_20190428_005854_model_36 0.7763376
## 49          GBM_grid_1_AutoML_20190428_005854_model_110 0.7761657
## 50          GBM_grid_1_AutoML_20190428_005854_model_119 0.7761364
## 51           GBM_grid_1_AutoML_20190428_005854_model_94 0.7760367
## 52           GBM_grid_1_AutoML_20190428_005854_model_29 0.7760307
## 53                         GBM_3_AutoML_20190428_005854 0.7758530
## 54           GBM_grid_1_AutoML_20190428_005854_model_63 0.7756729
## 55           GBM_grid_1_AutoML_20190428_005854_model_24 0.7755545
## 56           GBM_grid_1_AutoML_20190428_005854_model_73 0.7752902
## 57            GBM_grid_1_AutoML_20190428_005854_model_9 0.7752092
## 58           GBM_grid_1_AutoML_20190428_005854_model_52 0.7751698
## 59           GBM_grid_1_AutoML_20190428_005854_model_55 0.7750734
## 60          GBM_grid_1_AutoML_20190428_005854_model_114 0.7749047
## 61          GBM_grid_1_AutoML_20190428_005854_model_125 0.7746972
## 62           GBM_grid_1_AutoML_20190428_005854_model_95 0.7745450
## 63          GBM_grid_1_AutoML_20190428_005854_model_100 0.7743763
## 64           GBM_grid_1_AutoML_20190428_005854_model_45 0.7741453
## 65           GBM_grid_1_AutoML_20190428_005854_model_90 0.7737913
## 66          GBM_grid_1_AutoML_20190428_005854_model_118 0.7735334
## 67           GBM_grid_1_AutoML_20190428_005854_model_79 0.7734547
## 68          GBM_grid_1_AutoML_20190428_005854_model_124 0.7733690
## 69          GBM_grid_1_AutoML_20190428_005854_model_105 0.7726803
## 70           GBM_grid_1_AutoML_20190428_005854_model_53 0.7726691
## 71          GBM_grid_1_AutoML_20190428_005854_model_107 0.7724272
## 72           GBM_grid_1_AutoML_20190428_005854_model_69 0.7721979
## 73           GBM_grid_1_AutoML_20190428_005854_model_81 0.7719644
## 74           GBM_grid_1_AutoML_20190428_005854_model_46 0.7719499
## 75           GBM_grid_1_AutoML_20190428_005854_model_32 0.7719390
## 76          GBM_grid_1_AutoML_20190428_005854_model_109 0.7715930
## 77           GBM_grid_1_AutoML_20190428_005854_model_72 0.7714022
## 78            GBM_grid_1_AutoML_20190428_005854_model_2 0.7711870
## 79            GBM_grid_1_AutoML_20190428_005854_model_6 0.7711349
## 80          GBM_grid_1_AutoML_20190428_005854_model_128 0.7709016
## 81          GBM_grid_1_AutoML_20190428_005854_model_129 0.7708949
## 82           GBM_grid_1_AutoML_20190428_005854_model_76 0.7706791
## 83          GBM_grid_1_AutoML_20190428_005854_model_120 0.7700999
## 84           GBM_grid_1_AutoML_20190428_005854_model_44 0.7700745
## 85           GBM_grid_1_AutoML_20190428_005854_model_58 0.7700577
## 86           GBM_grid_1_AutoML_20190428_005854_model_98 0.7691363
## 87           GBM_grid_1_AutoML_20190428_005854_model_47 0.7690418
## 88           GBM_grid_1_AutoML_20190428_005854_model_41 0.7688779
## 89          GBM_grid_1_AutoML_20190428_005854_model_103 0.7686714
## 90           GBM_grid_1_AutoML_20190428_005854_model_82 0.7686220
## 91           GBM_grid_1_AutoML_20190428_005854_model_37 0.7685128
## 92                         GBM_4_AutoML_20190428_005854 0.7684412
## 93           GBM_grid_1_AutoML_20190428_005854_model_19 0.7683668
## 94            GLM_grid_1_AutoML_20190428_005854_model_1 0.7680722
## 95           GBM_grid_1_AutoML_20190428_005854_model_65 0.7672923
## 96          GBM_grid_1_AutoML_20190428_005854_model_101 0.7672629
## 97           GBM_grid_1_AutoML_20190428_005854_model_17 0.7671498
## 98           GBM_grid_1_AutoML_20190428_005854_model_31 0.7670723
## 99          GBM_grid_1_AutoML_20190428_005854_model_136 0.7667271
## 100          GBM_grid_1_AutoML_20190428_005854_model_97 0.7666519
## 101          GBM_grid_1_AutoML_20190428_005854_model_25 0.7663417
## 102               DeepLearning_1_AutoML_20190428_005854 0.7662049
## 103          GBM_grid_1_AutoML_20190428_005854_model_14 0.7660972
## 104          GBM_grid_1_AutoML_20190428_005854_model_67 0.7657508
## 105          GBM_grid_1_AutoML_20190428_005854_model_99 0.7653334
## 106         GBM_grid_1_AutoML_20190428_005854_model_135 0.7651210
## 107          GBM_grid_1_AutoML_20190428_005854_model_71 0.7646667
## 108           GBM_grid_1_AutoML_20190428_005854_model_7 0.7642704
## 109  DeepLearning_grid_1_AutoML_20190428_005854_model_3 0.7641383
## 110         GBM_grid_1_AutoML_20190428_005854_model_127 0.7627426
## 111          GBM_grid_1_AutoML_20190428_005854_model_38 0.7627360
## 112          GBM_grid_1_AutoML_20190428_005854_model_77 0.7626592
## 113          GBM_grid_1_AutoML_20190428_005854_model_62 0.7626420
## 114          GBM_grid_1_AutoML_20190428_005854_model_64 0.7614975
## 115          GBM_grid_1_AutoML_20190428_005854_model_83 0.7614574
## 116         GBM_grid_1_AutoML_20190428_005854_model_130 0.7609762
## 117  DeepLearning_grid_1_AutoML_20190428_005854_model_8 0.7609042
## 118         GBM_grid_1_AutoML_20190428_005854_model_126 0.7607253
## 119          GBM_grid_1_AutoML_20190428_005854_model_57 0.7603355
## 120          GBM_grid_1_AutoML_20190428_005854_model_92 0.7603051
## 121                        DRF_1_AutoML_20190428_005854 0.7591447
## 122                        XRT_1_AutoML_20190428_005854 0.7590349
## 123           GBM_grid_1_AutoML_20190428_005854_model_5 0.7579557
## 124          GBM_grid_1_AutoML_20190428_005854_model_13 0.7576833
## 125           GBM_grid_1_AutoML_20190428_005854_model_8 0.7575414
## 126          GBM_grid_1_AutoML_20190428_005854_model_78 0.7572563
## 127  DeepLearning_grid_1_AutoML_20190428_005854_model_4 0.7561972
## 128          GBM_grid_1_AutoML_20190428_005854_model_93 0.7557890
## 129 DeepLearning_grid_1_AutoML_20190428_005854_model_10 0.7530193
## 130          GBM_grid_1_AutoML_20190428_005854_model_22 0.7515422
## 131  DeepLearning_grid_1_AutoML_20190428_005854_model_7 0.7491566
## 132  DeepLearning_grid_1_AutoML_20190428_005854_model_2 0.7490283
## 133          GBM_grid_1_AutoML_20190428_005854_model_34 0.7479599
## 134  DeepLearning_grid_1_AutoML_20190428_005854_model_6 0.7473500
## 135          GBM_grid_1_AutoML_20190428_005854_model_23 0.7400360
## 136  DeepLearning_grid_1_AutoML_20190428_005854_model_5 0.7393223
## 137  DeepLearning_grid_1_AutoML_20190428_005854_model_1 0.7368333
## 138          GBM_grid_1_AutoML_20190428_005854_model_74 0.7359017
## 139         GBM_grid_1_AutoML_20190428_005854_model_113 0.7348553
## 140  DeepLearning_grid_1_AutoML_20190428_005854_model_9 0.7324831
## 141          GBM_grid_1_AutoML_20190428_005854_model_20 0.7305836
## 142          GBM_grid_1_AutoML_20190428_005854_model_70 0.7300917
## 143         GBM_grid_1_AutoML_20190428_005854_model_111 0.7294821
## 144          GBM_grid_1_AutoML_20190428_005854_model_60 0.7259162
## 145          GBM_grid_1_AutoML_20190428_005854_model_26 0.7251253
## 146         GBM_grid_1_AutoML_20190428_005854_model_133 0.7248563
## 147          GBM_grid_1_AutoML_20190428_005854_model_85 0.7222474
## 148           GBM_grid_1_AutoML_20190428_005854_model_1 0.7219988
## 149          GBM_grid_1_AutoML_20190428_005854_model_54 0.7208174
## 150          GBM_grid_1_AutoML_20190428_005854_model_11 0.7163489
## 151          GBM_grid_1_AutoML_20190428_005854_model_39 0.7104816
## 152          GBM_grid_1_AutoML_20190428_005854_model_16 0.7082462
## 153          GBM_grid_1_AutoML_20190428_005854_model_35 0.7065265
## 154          GBM_grid_1_AutoML_20190428_005854_model_61 0.7021981
## 155         GBM_grid_1_AutoML_20190428_005854_model_112 0.6889614
## 156          GBM_grid_1_AutoML_20190428_005854_model_84 0.6866473
## 157          GBM_grid_1_AutoML_20190428_005854_model_56 0.6519863
## 158          GBM_grid_1_AutoML_20190428_005854_model_49 0.6387311
## 159           GBM_grid_1_AutoML_20190428_005854_model_4 0.6383451
## 160          GBM_grid_1_AutoML_20190428_005854_model_18 0.6330776
##       logloss mean_per_class_error      rmse       mse
## 1   0.4284002            0.2872486 0.3661263 0.1340485
## 2   0.4262033            0.2927212 0.3655607 0.1336346
## 3   0.4268730            0.2923522 0.3659971 0.1339539
## 4   0.4267778            0.2877632 0.3660347 0.1339814
## 5   0.4264270            0.2880333 0.3655293 0.1336117
## 6   0.4286595            0.2914736 0.3661293 0.1340507
## 7   0.4260761            0.2860017 0.3654629 0.1335632
## 8   0.4263360            0.2831976 0.3655786 0.1336477
## 9   0.4270237            0.2880176 0.3656863 0.1337265
## 10  0.4277595            0.2867979 0.3661563 0.1340704
## 11  0.4274629            0.2857936 0.3656537 0.1337026
## 12  0.4843599            0.2899205 0.3947729 0.1558456
## 13  0.4311983            0.2905053 0.3684324 0.1357424
## 14  0.4286863            0.2916543 0.3668331 0.1345665
## 15  0.4283423            0.2943806 0.3665691 0.1343729
## 16  0.4659483            0.2884730 0.3854304 0.1485566
## 17  0.4289828            0.2889990 0.3670843 0.1347509
## 18  0.4283582            0.2874573 0.3665965 0.1343930
## 19  0.4283206            0.2869748 0.3665333 0.1343467
## 20  0.4303216            0.2873394 0.3674178 0.1349959
## 21  0.4303546            0.2896575 0.3677584 0.1352462
## 22  0.4695490            0.2834517 0.3869988 0.1497680
## 23  0.4781541            0.2885278 0.3916056 0.1533550
## 24  0.4540697            0.2926858 0.3790474 0.1436769
## 25  0.4505416            0.2873234 0.3774279 0.1424518
## 26  0.4304204            0.2903527 0.3676727 0.1351832
## 27  0.4314620            0.2872971 0.3685747 0.1358473
## 28  0.4320193            0.2935844 0.3687253 0.1359584
## 29  0.4960161            0.2879438 0.4004804 0.1603845
## 30  0.4744780            0.2913406 0.3897613 0.1519139
## 31  0.4825716            0.2901443 0.3939431 0.1551912
## 32  0.4317314            0.2933806 0.3684093 0.1357254
## 33  0.4701317            0.2882963 0.3874525 0.1501194
## 34  0.4830982            0.2908347 0.3941348 0.1553423
## 35  0.4311799            0.2916114 0.3677632 0.1352498
## 36  0.4750331            0.2899673 0.3901293 0.1522008
## 37  0.4320265            0.2940399 0.3685252 0.1358108
## 38  0.4615672            0.2893870 0.3828591 0.1465811
## 39  0.4910977            0.2895793 0.3980942 0.1584790
## 40  0.4930738            0.2899518 0.3989748 0.1591809
## 41  0.4753310            0.2876139 0.3903356 0.1523619
## 42  0.4784486            0.2920112 0.3916231 0.1533686
## 43  0.4612904            0.2918270 0.3829241 0.1466309
## 44  0.4675723            0.2933568 0.3863909 0.1492979
## 45  0.4317284            0.2951380 0.3679787 0.1354083
## 46  0.4537563            0.2904580 0.3789615 0.1436118
## 47  0.4319114            0.2942984 0.3683019 0.1356463
## 48  0.4876252            0.2871784 0.3961975 0.1569725
## 49  0.4503137            0.2885004 0.3772528 0.1423197
## 50  0.4327359            0.2995432 0.3688053 0.1360173
## 51  0.4322812            0.2913646 0.3680934 0.1354928
## 52  0.4333637            0.2948284 0.3695462 0.1365644
## 53  0.4340248            0.2970568 0.3693277 0.1364029
## 54  0.4337004            0.2969270 0.3696418 0.1366351
## 55  0.4675889            0.2902970 0.3865182 0.1493963
## 56  0.4530423            0.2929097 0.3786770 0.1433962
## 57  0.4911489            0.2873592 0.3979085 0.1583311
## 58  0.4545779            0.2942555 0.3792965 0.1438658
## 59  0.4932649            0.2913446 0.3991566 0.1593260
## 60  0.4344768            0.2942330 0.3694114 0.1364648
## 61  0.4751989            0.2976092 0.3900383 0.1521299
## 62  0.4341119            0.2951587 0.3696780 0.1366618
## 63  0.4497458            0.2945809 0.3769315 0.1420774
## 64  0.4599202            0.2938947 0.3821276 0.1460215
## 65  0.4627455            0.2932629 0.3834949 0.1470683
## 66  0.4572856            0.2907209 0.3806026 0.1448584
## 67  0.4529622            0.2987432 0.3784496 0.1432241
## 68  0.4700808            0.2916235 0.3877168 0.1503243
## 69  0.4884361            0.2896541 0.3966042 0.1572949
## 70  0.4923340            0.2901597 0.3985913 0.1588750
## 71  0.4677913            0.2883473 0.3861760 0.1491319
## 72  0.4733197            0.2896304 0.3889586 0.1512888
## 73  0.4358502            0.2984842 0.3693202 0.1363974
## 74  0.4769119            0.2910502 0.3908753 0.1527835
## 75  0.4661891            0.2894465 0.3858440 0.1488756
## 76  0.4367715            0.2986377 0.3705518 0.1373087
## 77  0.4604037            0.2905245 0.3820455 0.1459588
## 78  0.4501936            0.3014776 0.3725252 0.1387750
## 79  0.4730065            0.2888381 0.3887527 0.1511287
## 80  0.4933798            0.2941972 0.3992311 0.1593855
## 81  0.5005806            0.2963264 0.4024865 0.1619954
## 82  0.4739128            0.2882380 0.3892560 0.1515203
## 83  0.4364086            0.2988925 0.3705209 0.1372858
## 84  0.4772777            0.2935617 0.3910371 0.1529100
## 85  0.5129678            0.2943570 0.4082407 0.1666605
## 86  0.4546796            0.2959385 0.3796543 0.1441374
## 87  0.4637362            0.2876218 0.3837708 0.1472800
## 88  0.4637437            0.2936945 0.3846569 0.1479609
## 89  0.4433791            0.2988773 0.3726034 0.1388333
## 90  0.5154418            0.2954123 0.4093333 0.1675538
## 91  0.4668409            0.3017128 0.3861105 0.1490813
## 92  0.4425970            0.3056319 0.3730853 0.1391926
## 93  0.5190939            0.2987862 0.4109438 0.1688748
## 94  0.4358793            0.2989349 0.3690961 0.1362319
## 95  0.4585502            0.3005169 0.3815893 0.1456104
## 96  0.5131790            0.2945143 0.4083398 0.1667414
## 97  0.4399649            0.3031136 0.3712422 0.1378207
## 98  0.4781054            0.2921878 0.3911881 0.1530281
## 99  0.5170991            0.2965621 0.4100708 0.1681581
## 100 0.5168394            0.2982413 0.4099583 0.1680658
## 101 0.4815235            0.2883672 0.3930034 0.1544517
## 102 0.4365411            0.3017555 0.3695319 0.1365538
## 103 0.5189850            0.2982767 0.4108814 0.1688235
## 104 0.4620905            0.2981870 0.3833270 0.1469396
## 105 0.4409865            0.3031018 0.3717364 0.1381880
## 106 0.4459996            0.3071310 0.3731274 0.1392240
## 107 0.5170745            0.3009867 0.4100544 0.1681446
## 108 0.4705000            0.2905407 0.3870457 0.1498044
## 109 0.4571048            0.2986487 0.3721580 0.1385016
## 110 0.4503568            0.3009296 0.3752562 0.1408172
## 111 0.4442860            0.3029886 0.3738151 0.1397377
## 112 0.4695588            0.2901288 0.3864010 0.1493057
## 113 0.4489343            0.3068844 0.3752505 0.1408130
## 114 0.4713675            0.2981556 0.3880469 0.1505804
## 115 0.4819785            0.2935888 0.3930762 0.1545089
## 116 0.4559917            0.3078926 0.3779459 0.1428431
## 117 0.4453867            0.3037643 0.3722309 0.1385558
## 118 0.4740617            0.3077122 0.3796372 0.1441244
## 119 0.4599051            0.3066131 0.3772222 0.1422966
## 120 0.5142888            0.2976993 0.4088149 0.1671296
## 121 0.4632517            0.3029419 0.3751042 0.1407032
## 122 0.4562921            0.3031449 0.3793194 0.1438832
## 123 0.4557542            0.3061939 0.3775884 0.1425730
## 124 0.4614494            0.3040641 0.3794072 0.1439498
## 125 0.4537262            0.3118658 0.3763356 0.1416285
## 126 0.4962348            0.3094700 0.3841466 0.1475686
## 127 0.4721467            0.3004267 0.3748069 0.1404802
## 128 0.4666974            0.3087720 0.3787182 0.1434274
## 129 0.4630081            0.3087081 0.3813158 0.1454017
## 130 0.5227086            0.3113414 0.3908929 0.1527973
## 131 0.4784283            0.3105604 0.3835717 0.1471273
## 132 0.5060012            0.3053271 0.3845050 0.1478441
## 133 0.4795254            0.3235097 0.3854712 0.1485881
## 134 0.4878850            0.3053418 0.3783231 0.1431284
## 135 0.4889822            0.3207370 0.3871948 0.1499198
## 136 0.4959732            0.3163084 0.3876955 0.1503078
## 137 0.5521662            0.3214344 0.3892846 0.1515425
## 138 0.5424465            0.3271158 0.3962363 0.1570032
## 139 0.5034755            0.3175767 0.3914070 0.1531994
## 140 0.5036029            0.3163857 0.3826506 0.1464215
## 141 0.5083815            0.3266378 0.3939311 0.1551817
## 142 0.5242688            0.3289282 0.3969087 0.1575365
## 143 0.5372104            0.3302987 0.3997837 0.1598270
## 144 0.5362080            0.3311097 0.3996869 0.1597496
## 145 0.8335872            0.3297575 0.4187944 0.1753887
## 146 0.5563548            0.3282537 0.4010129 0.1608113
## 147 0.5587426            0.3350213 0.4054468 0.1643871
## 148 0.6379194            0.3392567 0.4156068 0.1727290
## 149 0.6009640            0.3350690 0.4103699 0.1684034
## 150 0.9561122            0.3359945 0.4308475 0.1856296
## 151 1.0950784            0.3373331 0.4341023 0.1884448
## 152 0.6099999            0.3486897 0.4152002 0.1723912
## 153 0.6352662            0.3450763 0.4178202 0.1745737
## 154 0.8965287            0.3520247 0.4383498 0.1921506
## 155 2.0080321            0.3492056 0.4523629 0.2046322
## 156 2.2874785            0.3443834 0.4548583 0.2068961
## 157 2.1016984            0.3713236 0.4695432 0.2204708
## 158 3.6877740            0.3823314 0.4964431 0.2464558
## 159 2.6333580            0.3756893 0.4771497 0.2276718
## 160 3.9845565            0.3806477 0.4853394 0.2355544
## 
## [160 rows x 6 columns]

End of H2O

Make sure to shutdown h2o

h2o.shutdown(prompt=F)
## [1] TRUE