1 Background

Using info from chapter 7 of Machine Learning with R book, caret package manual, and neuralnet package vignette.

2 Data

2.1 Prepared data

Preparations in file 00.Rmd.

Using the Index of Education and Occupation (IEO) as outcome. More info about the index from ABS.

wide_all_p <- read_rds("data/wide_all_p.Rds") %>% 
  sf::st_drop_geometry() %>% 
  # as_tibble() %>% 
  select(IEO, IEO_d, 
         akita:last_col()) %>% 
  rename(outcome = IEO,
         outcome_d = IEO_d)

# tail(names(wide_all_p), n = 1)
# skimr::skim(wide_all_p)
# report::report(wide_all_p)

2.2 Near zero-variance

nzv <- nearZeroVar(wide_all_p %>% 
                     select(-outcome, -outcome_d), 
                   saveMetrics = TRUE)
nzv <- nearZeroVar(wide_all_p %>% 
                     select(-outcome, -outcome_d))
wide_all_p <- wide_all_p[, -nzv]
rm(nzv)

2.3 Train split

75/25 split, taking into account distribution of deciles:

inTrain <- createDataPartition(
  y = wide_all_p$outcome_d,
  p = .75,
  list = FALSE
)

2.4 Normalize

normalize <- function(x) {
  return((x - min(x)) / (max(x) - min(x)))
}

# apply normalization to entire data frame
wide_all_p <- as.data.frame(lapply(wide_all_p, normalize))

Outcome distro checks

3 Analysis with neuralnet

3.1 Training

set.seed(12345)
m_nn <- neuralnet(formula = outcome ~ ., 
                  data = training, 
                  hidden = c(5, 5))
                    Length Class      Mode    
call                    4  -none-     call    
response              143  -none-     numeric 
covariate           17160  -none-     numeric 
model.list              2  -none-     list    
err.fct                 1  -none-     function
act.fct                 1  -none-     function
linear.output           1  -none-     logical 
data                  121  data.frame list    
exclude                 0  -none-     NULL    
net.result              1  -none-     list    
weights                 1  -none-     list    
generalized.weights     1  -none-     list    
startweights            1  -none-     list    
result.matrix         644  -none-     numeric 

3.2 Prediction

p_nn <- predict(m_nn, testing[-1])

Correlation:

cor(p_nn, testing$outcome)
          [,1]
[1,] 0.7536739

MAE between predicted and actual values:

MAE(p_nn, testing$outcome)
[1] 0.106147

Distro of predicted (red) nd actual (green):

3.3 Tuning

getModelInfo("neuralnet")
$neuralnet
$neuralnet$label
[1] "Neural Network"

$neuralnet$library
[1] "neuralnet"

$neuralnet$loop
NULL

$neuralnet$type
[1] "Regression"

$neuralnet$parameters
  parameter   class                    label
1    layer1 numeric #Hidden Units in Layer 1
2    layer2 numeric #Hidden Units in Layer 2
3    layer3 numeric #Hidden Units in Layer 3

$neuralnet$grid
function (x, y, len = NULL, search = "grid") 
{
    if (search == "grid") {
        out <- expand.grid(layer1 = ((1:len) * 2) - 1, layer2 = 0, 
            layer3 = 0)
    }
    else {
        out <- data.frame(layer1 = sample(2:20, replace = TRUE, 
            size = len), layer2 = sample(c(0, 2:20), replace = TRUE, 
            size = len), layer3 = sample(c(0, 2:20), replace = TRUE, 
            size = len))
    }
    out
}

$neuralnet$fit
function (x, y, wts, param, lev, last, classProbs, ...) 
{
    colNames <- colnames(x)
    dat <- if (is.data.frame(x)) 
        x
    else as.data.frame(x, stringsAsFactors = TRUE)
    dat$.outcome <- y
    form <- as.formula(paste(".outcome ~", paste(colNames, collapse = "+")))
    if (param$layer1 == 0) 
        stop("the first layer must have at least one hidden unit")
    if (param$layer2 == 0 & param$layer2 > 0) 
        stop("the second layer must have at least one hidden unit if a third layer is specified")
    nodes <- c(param$layer1)
    if (param$layer2 > 0) {
        nodes <- c(nodes, param$layer2)
        if (param$layer3 > 0) 
            nodes <- c(nodes, param$layer3)
    }
    neuralnet::neuralnet(form, data = dat, hidden = nodes, ...)
}

$neuralnet$predict
function (modelFit, newdata, submodels = NULL) 
{
    newdata <- newdata[, modelFit$model.list$variables, drop = FALSE]
    neuralnet::compute(modelFit, covariate = newdata)$net.result[, 
        1]
}

$neuralnet$prob
NULL

$neuralnet$tags
[1] "Neural Network"

$neuralnet$sort
function (x) 
x[order(x$layer1, x$layer2, x$layer3), ]
modelLookup("neuralnet")
      model parameter                    label forReg forClass probModel
1 neuralnet    layer1 #Hidden Units in Layer 1   TRUE    FALSE     FALSE
2 neuralnet    layer2 #Hidden Units in Layer 2   TRUE    FALSE     FALSE
3 neuralnet    layer3 #Hidden Units in Layer 3   TRUE    FALSE     FALSE

Using grid of parameters to explore:

grid_tu <-  expand.grid(layer1  = seq(5, 20, 1),
                        layer2  = seq(5, 20, 1),
                        layer3  = (1))

nrow(grid_tu)
[1] 256
p_load(doParallel)

cl <- makePSOCKcluster(parallel::detectCores())
registerDoParallel(cl)

set.seed(12345)
m_nn_tu <- train(outcome ~ .,
                 method = "neuralnet",
                 data = training,
                 # weights = training_weights, 
                 metric = "RMSE",
                 # metric = "Rsquared",
                 tuneGrid = grid_tu)

stopCluster(cl)
p_unload(doParallel)

m_nn_tu$finalModel
$call
neuralnet::neuralnet(formula = form, data = dat, hidden = nodes)

$response
      .outcome
X2   0.7692308
X4   0.4813896
X5   0.7741935
X7   0.2655087
X8   0.8461538
X9   0.8660050
X10  0.6129032
X11  0.9007444
X12  0.4094293
X13  0.8263027
X16  0.7667494
X18  0.4863524
X19  0.7593052
X20  0.4789082
X21  0.7543424
X22  0.5657568
X23  0.8188586
X24  0.9057072
X25  0.8734491
X26  0.5682382
X27  0.6823821
X28  0.6228288
X29  0.8238213
X30  0.6674938
X31  0.6848635
X32  0.6650124
X33  0.7146402
X34  0.6476427
X36  0.8933002
X37  0.9255583
X38  0.5583127
X39  0.6203474
X40  0.5359801
X41  0.7866005
X42  0.5359801
X43  0.8114144
X49  0.5607940
X50  0.3200993
X51  0.8188586
X52  0.7965261
X53  0.6550868
X54  0.1116625
X55  0.7493797
X57  0.8064516
X61  0.5310174
X62  0.7568238
X63  0.7022333
X64  0.5781638
X65  0.7642680
X66  0.8908189
X67  0.8511166
X68  0.7791563
X69  0.7568238
X71  0.8734491
X73  0.3920596
X74  0.8064516
X75  0.8883375
X76  0.8089330
X77  0.7270471
X78  0.7617866
X79  0.0000000
X80  0.8684864
X81  0.6302730
X82  0.7096774
X84  0.7816377
X85  0.6799007
X86  0.7171216
X87  0.8908189
X88  0.8610422
X89  0.8808933
X90  0.5682382
X91  0.7022333
X92  0.5682382
X93  0.6277916
X94  0.3821340
 [ reached getOption("max.print") -- omitted 68 rows ]

$covariate
          akita alaskan_malamute american_staffordshire_terrier
     australian_cattle_dog australian_kelpie australian_koolie
     australian_shepherd australian_silky_terrier australian_terrier
          beagle belgian_shepherd bichon_frise border_collie boston_terrier
           boxer british_bulldog   bull_arab bull_terrier bullmastiff
     cairn_terrier cavalier_king_charles_spaniel  chihuahua cocker_spaniel
     curly_coated_retriever  dachshund  dalmatian   dobermann dogue_de_bordeaux
     fox_terrier french_bulldog german_shepherd golden_retriever great_dane
     hungarian_vizsla irish_wolfhound jack_russell_terrier japanese_spitz
       keeshond king_charles_spaniel labrador_retriever lhasa_apso   maltese
     maremma_sheepdog    mastiff   papillon pomeranian     poodle poodle_toy
            pug rhodesian_ridgeback rottweiler    samoyed  schnauzer   shar_pei
      shiba_inu   shih_tzu siberian_husky staffordshire_bull_terrier
     tenterfield_terrier tibetan_spaniel tibetan_terrier welsh_corgi
     west_highland_white_terrier    whippet yorkshire_terrier airedale_terrier
     bearded_collie english_springer_spaniel english_toy_terrier
     german_shorthaired_pointer   greyhound irish_setter italian_greyhound
     old_english_sheepdog  pekingese rough_collie afghan_hound
     australian_stumpy_tail_cattle_dog    basenji basset_hound
     bedlington_terrier border_terrier      borzoi   brittany
     chinese_crested_dog  chow_chow finnish_lapphund german_spitz
     griffon_bruxellois irish_terrier italian_corso_dog    lowchen
     neapolitan_mastiff poodle_miniature     saluki scottish_terrier
     soft_coated_wheaten_terrier weimaraner poodle_standard
     chihuahua_smooth_coat  foxhound pharaoh_hound    pointer lagotto_romagnolo
          puli smooth_collie stag_hound affenpinscher flat_coated_retriever
     cane_corso english_setter german_pinscher   harrier
     irish_red_and_white_setter lakeland_terrier bloodhound
     chesapeake_bay_retriever glen_of_imaal_terrier eurasier swedish_lapphund
 [ reached getOption("max.print") -- omitted 143 rows ]

$model.list
$model.list$response
[1] ".outcome"

$model.list$variables
 [1] "akita"                          "alaskan_malamute"              
 [3] "american_staffordshire_terrier" "australian_cattle_dog"         
 [5] "australian_kelpie"              "australian_koolie"             
 [7] "australian_shepherd"            "australian_silky_terrier"      
 [9] "australian_terrier"             "beagle"                        
[11] "belgian_shepherd"               "bichon_frise"                  
[13] "border_collie"                  "boston_terrier"                
[15] "boxer"                          "british_bulldog"               
[17] "bull_arab"                      "bull_terrier"                  
[19] "bullmastiff"                    "cairn_terrier"                 
[21] "cavalier_king_charles_spaniel"  "chihuahua"                     
[23] "cocker_spaniel"                 "curly_coated_retriever"        
[25] "dachshund"                      "dalmatian"                     
[27] "dobermann"                      "dogue_de_bordeaux"             
[29] "fox_terrier"                    "french_bulldog"                
[31] "german_shepherd"                "golden_retriever"              
[33] "great_dane"                     "hungarian_vizsla"              
[35] "irish_wolfhound"                "jack_russell_terrier"          
[37] "japanese_spitz"                 "keeshond"                      
[39] "king_charles_spaniel"           "labrador_retriever"            
[41] "lhasa_apso"                     "maltese"                       
[43] "maremma_sheepdog"               "mastiff"                       
[45] "papillon"                       "pomeranian"                    
[47] "poodle"                         "poodle_toy"                    
[49] "pug"                            "rhodesian_ridgeback"           
[51] "rottweiler"                     "samoyed"                       
[53] "schnauzer"                      "shar_pei"                      
[55] "shiba_inu"                      "shih_tzu"                      
[57] "siberian_husky"                 "staffordshire_bull_terrier"    
[59] "tenterfield_terrier"            "tibetan_spaniel"               
[61] "tibetan_terrier"                "welsh_corgi"                   
[63] "west_highland_white_terrier"    "whippet"                       
[65] "yorkshire_terrier"              "airedale_terrier"              
[67] "bearded_collie"                 "english_springer_spaniel"      
[69] "english_toy_terrier"            "german_shorthaired_pointer"    
[71] "greyhound"                      "irish_setter"                  
[73] "italian_greyhound"              "old_english_sheepdog"          
[75] "pekingese"                     
 [ reached getOption("max.print") -- omitted 45 entries ]


$err.fct
function (x, y) 
{
    1/2 * (y - x)^2
}
<bytecode: 0x000000002be873c0>
<environment: 0x000000002b585338>
attr(,"type")
[1] "sse"

$act.fct
function (x) 
{
    1/(1 + exp(-x))
}
<bytecode: 0x000000002cd0e5a8>
<environment: 0x000000002b584ea0>
attr(,"type")
[1] "logistic"

$linear.output
[1] TRUE

$data
     akita alaskan_malamute american_staffordshire_terrier
     australian_cattle_dog australian_kelpie australian_koolie
     australian_shepherd australian_silky_terrier australian_terrier beagle
     belgian_shepherd bichon_frise border_collie boston_terrier boxer
     british_bulldog bull_arab bull_terrier bullmastiff cairn_terrier
     cavalier_king_charles_spaniel chihuahua cocker_spaniel
     curly_coated_retriever dachshund dalmatian dobermann dogue_de_bordeaux
     fox_terrier french_bulldog german_shepherd golden_retriever great_dane
     hungarian_vizsla irish_wolfhound jack_russell_terrier japanese_spitz
     keeshond king_charles_spaniel labrador_retriever lhasa_apso maltese
     maremma_sheepdog mastiff papillon pomeranian poodle poodle_toy pug
     rhodesian_ridgeback rottweiler samoyed schnauzer shar_pei shiba_inu
     shih_tzu siberian_husky staffordshire_bull_terrier tenterfield_terrier
     tibetan_spaniel tibetan_terrier welsh_corgi west_highland_white_terrier
     whippet yorkshire_terrier airedale_terrier bearded_collie
     english_springer_spaniel english_toy_terrier german_shorthaired_pointer
     greyhound irish_setter italian_greyhound old_english_sheepdog pekingese
     rough_collie afghan_hound australian_stumpy_tail_cattle_dog basenji
     basset_hound bedlington_terrier border_terrier borzoi brittany
     chinese_crested_dog chow_chow finnish_lapphund german_spitz
     griffon_bruxellois irish_terrier italian_corso_dog lowchen
     neapolitan_mastiff poodle_miniature saluki scottish_terrier
     soft_coated_wheaten_terrier weimaraner poodle_standard
     chihuahua_smooth_coat foxhound pharaoh_hound pointer lagotto_romagnolo
     puli smooth_collie stag_hound affenpinscher flat_coated_retriever
     cane_corso english_setter german_pinscher harrier
     irish_red_and_white_setter lakeland_terrier bloodhound
     chesapeake_bay_retriever glen_of_imaal_terrier eurasier swedish_lapphund
     .outcome
 [ reached 'max' / getOption("max.print") -- omitted 143 rows ]

$exclude
NULL

$net.result
$net.result[[1]]
           [,1]
X2   0.76935290
X4   0.48041765
X5   0.77226314
X7   0.26297447
X8   0.84599645
X9   0.86629020
X10  0.61254075
X11  0.90267586
X12  0.41007109
X13  0.82669373
X16  0.76598785
X18  0.48659652
X19  0.75955938
X20  0.47846816
X21  0.75404847
X22  0.56532409
X23  0.81886588
X24  0.90425618
X25  0.87328440
X26  0.56818358
X27  0.68204789
X28  0.62228059
X29  0.82288753
X30  0.66772684
X31  0.68525237
X32  0.66545222
X33  0.71356228
X34  0.64829924
X36  0.89335016
X37  0.92597047
X38  0.55833723
X39  0.61960897
X40  0.53572351
X41  0.78658394
X42  0.53569120
X43  0.81185121
X49  0.56029708
X50  0.31998121
X51  0.81768768
X52  0.79663725
X53  0.65537236
X54  0.10736259
X55  0.74931657
X57  0.80609599
X61  0.53016602
X62  0.75696000
X63  0.70224230
X64  0.57797744
X65  0.76475953
X66  0.89243212
X67  0.85232025
X68  0.77866213
X69  0.75697656
X71  0.87381500
X73  0.39182837
X74  0.80642300
X75  0.88864966
X76  0.81006150
X77  0.72656244
X78  0.76177380
X79  0.04359355
X80  0.86825562
X81  0.63021914
X82  0.70973688
X84  0.78218618
X85  0.67973830
X86  0.71733444
X87  0.88983401
X88  0.86318132
X89  0.88083832
X90  0.56739578
X91  0.70265244
X92  0.54802000
X93  0.62708528
X94  0.38132649
 [ reached getOption("max.print") -- omitted 68 rows ]


$weights
$weights[[1]]
$weights[[1]][[1]]
                 [,1]         [,2]          [,3]          [,4]         [,5]
  [1,]  -1.2741675193   1.42007473   0.494706615   0.007111912   0.55515344
  [2,]   0.9987764906   2.70552810  -0.204567247  -0.941671023  -0.96210599
  [3,]  -0.0868822089  -2.63833956  -0.645305685  -1.530518031  -0.17898264
               [,6]          [,7]         [,8]         [,9]       [,10]
  [1,] -0.902619612  -0.966716127  -1.43036305  -0.67126390 -0.84270759
  [2,]  1.719598845  -0.944053362  -0.76335838   0.32655013 -0.47380641
  [3,]  0.896008596  -1.590313168   4.66007176   0.01619270 -2.79550350
             [,11]        [,12]        [,13]         [,14]          [,15]
  [1,] -0.30508334  -0.09290497  -1.59118951  -1.129300733  1.10301479623
  [2,]  1.25505611   0.32796637  -1.28610655   0.914586433 -0.35987319117
  [3,]  4.26769209   0.90872086  -0.23539913   0.176734402 -0.88565272912
             [,16]        [,17]        [,18]        [,19]
  [1,] -0.61628710  0.536522411 -0.762182358   0.68183099
  [2,]  2.12284954 -0.399235461  0.589891326  -0.40985990
  [3,] -1.52635570  1.564294889  1.372110481  -0.32420972
 [ reached getOption("max.print") -- omitted 118 rows ]

$weights[[1]][[2]]
             [,1]        [,2]       [,3]        [,4]       [,5]       [,6]
 [1,]  1.27762880 -0.71452088  0.7091203 -0.09922091 -0.0443591 -0.7133035
 [2,]  0.14285715  2.09315510  0.6322392  1.63027994 -0.2379245 -1.2635440
 [3,]  0.38736038  0.70106856  1.5257967 -0.07315349 -0.1563183  0.2808511
 [4,] -1.23428780  2.09277467  2.2638332 -0.40769557  0.1913231 -0.6772170
 [5,]  0.37523540 -0.92206304 -1.9827721 -0.39534056  2.0338784  2.1412629
 [6,] -0.77291606 -0.70976293 -0.1430154 -1.29532105 -1.1680000  0.5395942
 [7,] -0.89716454  0.68911385 -0.1028997 -0.98622558  0.3322301  1.0370008
 [8,] -0.04968544 -0.30555251  0.2638954  2.72169602 -0.5708127 -4.2855313
 [9,]  0.84727263  1.41951090 -0.2839979  0.16553011 -1.0625410  0.7542633
              [,7]         [,8]
 [1,]  0.047597799 -0.285050636
 [2,] -0.233946440 -0.978143607
 [3,]  2.465531540  1.890811609
 [4,] -1.055799149  0.485839303
 [5,]  0.337313911  0.239623108
 [6,] -0.922341336 -0.192171148
 [7,] -1.326534521  1.373297366
 [8,]  1.380001559 -1.725396906
 [9,]  0.526595992  0.719395205
 [ reached getOption("max.print") -- omitted 11 rows ]

$weights[[1]][[3]]
            [,1]
 [1,] -1.0362525
 [2,]  0.9167287
 [3,]  0.3278804
 [4,]  0.8368347
 [5,] -0.5865973
 [6,] -0.8823048
 [7,] -1.7243568
 [8,]  1.0922883
 [9,] -1.5369880

$weights[[1]][[4]]
          [,1]
[1,] 0.0267439
[2,] 1.1505758



$generalized.weights
$generalized.weights[[1]]
             [,1]         [,2]        [,3]         [,4]          [,5]
              [,6]         [,7]           [,8]         [,9]        [,10]
             [,11]        [,12]         [,13]       [,14]        [,15]
            [,16]        [,17]        [,18]       [,19]        [,20]
            [,21]         [,22]        [,23]         [,24]        [,25]
           [,26]        [,27]        [,28]        [,29]        [,30]
             [,31]         [,32]        [,33]      [,34]        [,35]
            [,36]        [,37]        [,38]        [,39]        [,40]
            [,41]        [,42]        [,43]        [,44]        [,45]
            [,46]        [,47]        [,48]        [,49]        [,50]
            [,51]        [,52]        [,53]        [,54]        [,55]
              [,56]        [,57]        [,58]        [,59]         [,60]
            [,61]        [,62]         [,63]           [,64]        [,65]
             [,66]          [,67]        [,68]        [,69]        [,70]
             [,71]        [,72]        [,73]        [,74]        [,75]
             [,76]        [,77]         [,78]        [,79]        [,80]
            [,81]        [,82]       [,83]        [,84]        [,85]
            [,86]        [,87]       [,88]       [,89]        [,90]
            [,91]        [,92]         [,93]        [,94]        [,95]
            [,96]         [,97]       [,98]        [,99]        [,100]
             [,101]      [,102]      [,103]       [,104]       [,105]
          [,106]        [,107]        [,108]        [,109]       [,110]
          [,111]       [,112]        [,113]       [,114]       [,115]
           [,116]      [,117]        [,118]       [,119]       [,120]
 [ reached getOption("max.print") -- omitted 143 rows ]


$startweights
$startweights[[1]]
$startweights[[1]][[1]]
              [,1]        [,2]         [,3]         [,4]         [,5]
  [1,] -1.05785321  1.43957748  0.526903567 -0.101179773  0.402021627
  [2,]  1.23425999  0.69095292 -0.033672064 -0.775396343 -1.581566635
  [3,]  0.41468242 -0.80063762 -0.731241918  0.978394660 -0.470024336
               [,6]         [,7]        [,8]        [,9]       [,10]
  [1,] -0.815920609 -0.727747087 -1.58281801 -0.56796011 -0.70581866
  [2,]  1.075967367 -0.739765937 -0.06152844 -1.80168875 -0.27843906
  [3,]  0.669688163 -0.832924847 -0.04109916 -0.94806523 -2.61081314
             [,11]        [,12]       [,13]        [,14]        [,15]
  [1,] -0.17065720  0.012234332 -1.60095696 -1.092405590  1.804923882
  [2,]  0.77369165  0.272011712 -0.52573199  0.841168181 -0.477936977
  [3,]  0.33732652  0.648830301 -0.16141108  0.116882588 -0.758861265
              [,16]        [,17]       [,18]        [,19]
  [1,] -0.608959198  0.427403835 -0.63241579  1.004217589
  [2,]  2.129842889 -0.541306039  0.67724819 -0.723025803
  [3,] -1.809164270  1.043674590  0.65152301 -0.237578699
 [ reached getOption("max.print") -- omitted 118 rows ]

$startweights[[1]][[2]]
              [,1]       [,2]        [,3]        [,4]        [,5]        [,6]
 [1,]  1.385681795 -0.4781930  0.72752598 -0.12856411  0.30263966 -0.74612449
 [2,]  0.337695657  1.4417479  0.78519480  1.48246854 -0.33197413 -0.94749677
 [3,]  0.154326240  0.8794019  0.88928970  0.55309981  0.19736072  0.23086537
 [4,] -1.057863366  2.3478798  2.56082676 -0.55088600  0.32065227  0.14211516
 [5,]  0.567183386 -0.6132702 -1.40693483 -0.58000795  2.07865508  0.21785198
 [6,] -0.595910996 -0.4097499 -0.04051478 -1.40447435 -0.85010207  0.42434673
 [7,] -0.678725188  1.1217317  0.08134582 -1.16992164  0.45117245  0.85313473
 [8,]  0.140069575 -0.3436495  0.25523346  2.71456665 -0.13170251 -0.40303210
 [9,]  1.031700355  1.7288969 -0.17522022 -0.01488518 -0.89131986  0.61595211
              [,7]        [,8]
 [1,] -0.298356951 -0.39289665
 [2,] -0.615882688 -1.09896342
 [3,]  2.101207086  1.78365184
 [4,] -0.949717129  0.31955619
 [5,]  0.182486843  0.05609069
 [6,] -0.877411743 -0.33660385
 [7,] -1.436145324  1.05954016
 [8,]  0.479641295 -1.83136391
 [9,]  0.342813793  0.53203670
 [ reached getOption("max.print") -- omitted 11 rows ]

$startweights[[1]][[3]]
            [,1]
 [1,] -1.0067554
 [2,]  0.9462164
 [3,]  0.2251476
 [4,]  0.6069886
 [5,] -0.5570635
 [6,] -0.7752098
 [7,] -1.6029378
 [8,]  0.4029518
 [9,] -1.3996858

$startweights[[1]][[4]]
           [,1]
[1,]  0.1924474
[2,] -0.2389220



$result.matrix
                                                          [,1]
error                                            0.00122331533
reached.threshold                                0.00971819045
steps                                          945.00000000000
Intercept.to.1layhid1                           -1.27416751930
akita.to.1layhid1                                0.99877649058
alaskan_malamute.to.1layhid1                    -0.08688220887
american_staffordshire_terrier.to.1layhid1       0.63322348242
australian_cattle_dog.to.1layhid1                1.17450986711
australian_kelpie.to.1layhid1                    2.17293229473
australian_koolie.to.1layhid1                    0.72381249535
australian_shepherd.to.1layhid1                 -1.17585665688
australian_silky_terrier.to.1layhid1            -0.48475568623
australian_terrier.to.1layhid1                   0.98334875760
beagle.to.1layhid1                              -1.67395654925
belgian_shepherd.to.1layhid1                    -0.20247782419
bichon_frise.to.1layhid1                         0.98113267833
border_collie.to.1layhid1                       -0.49839268092
boston_terrier.to.1layhid1                      -0.00711430146
boxer.to.1layhid1                                1.70045982991
british_bulldog.to.1layhid1                      0.09227696784
bull_arab.to.1layhid1                            4.04889707241
bull_terrier.to.1layhid1                        -2.58626871526
bullmastiff.to.1layhid1                          0.46342294350
cairn_terrier.to.1layhid1                        0.89587762109
cavalier_king_charles_spaniel.to.1layhid1        0.08452187669
chihuahua.to.1layhid1                            0.44562423429
cocker_spaniel.to.1layhid1                       0.17735275355
curly_coated_retriever.to.1layhid1               1.32771032755
dachshund.to.1layhid1                           -1.51639513376
dalmatian.to.1layhid1                            0.14258156088
dobermann.to.1layhid1                            2.76938168261
dogue_de_bordeaux.to.1layhid1                    0.61584819482
fox_terrier.to.1layhid1                         -1.47100158996
french_bulldog.to.1layhid1                      -1.65795929639
german_shepherd.to.1layhid1                     -0.90860114718
golden_retriever.to.1layhid1                    -1.13807531404
great_dane.to.1layhid1                           0.17376357128
hungarian_vizsla.to.1layhid1                     2.97120318249
irish_wolfhound.to.1layhid1                      0.19342753245
jack_russell_terrier.to.1layhid1                 0.29940108525
japanese_spitz.to.1layhid1                       0.94305445358
keeshond.to.1layhid1                             0.32388550773
king_charles_spaniel.to.1layhid1                 1.26178811421
labrador_retriever.to.1layhid1                  -1.22104311986
lhasa_apso.to.1layhid1                          -1.38065594011
maltese.to.1layhid1                             -1.01253193376
maremma_sheepdog.to.1layhid1                    -0.02799558176
mastiff.to.1layhid1                             -0.59343954357
papillon.to.1layhid1                            -0.25867462270
pomeranian.to.1layhid1                          -0.07400329094
poodle.to.1layhid1                              -0.31996064333
poodle_toy.to.1layhid1                          -2.78962346274
pug.to.1layhid1                                  2.36721710791
rhodesian_ridgeback.to.1layhid1                 -1.06779180711
rottweiler.to.1layhid1                           1.39356939432
samoyed.to.1layhid1                             -0.76026490829
schnauzer.to.1layhid1                           -0.99484204918
shar_pei.to.1layhid1                            -1.00974518920
shiba_inu.to.1layhid1                           -1.16967636291
shih_tzu.to.1layhid1                            -0.58470207475
siberian_husky.to.1layhid1                       1.84533563297
staffordshire_bull_terrier.to.1layhid1          -0.53113752210
tenterfield_terrier.to.1layhid1                  0.26374302949
tibetan_spaniel.to.1layhid1                      0.47688934509
tibetan_terrier.to.1layhid1                      1.37054623002
welsh_corgi.to.1layhid1                         -0.84508488343
west_highland_white_terrier.to.1layhid1         -0.13521436973
whippet.to.1layhid1                             -1.13902925069
yorkshire_terrier.to.1layhid1                   -0.09471116372
airedale_terrier.to.1layhid1                     0.51144725123
bearded_collie.to.1layhid1                      -5.07904427150
english_springer_spaniel.to.1layhid1             1.91470186729
english_toy_terrier.to.1layhid1                 -0.41626045280
german_shorthaired_pointer.to.1layhid1          -0.17804979983
greyhound.to.1layhid1                           -1.20900218367
 [ reached getOption("max.print") -- omitted 2398 rows ]

$xNames
 [1] "akita"                          "alaskan_malamute"              
 [3] "american_staffordshire_terrier" "australian_cattle_dog"         
 [5] "australian_kelpie"              "australian_koolie"             
 [7] "australian_shepherd"            "australian_silky_terrier"      
 [9] "australian_terrier"             "beagle"                        
[11] "belgian_shepherd"               "bichon_frise"                  
[13] "border_collie"                  "boston_terrier"                
[15] "boxer"                          "british_bulldog"               
[17] "bull_arab"                      "bull_terrier"                  
[19] "bullmastiff"                    "cairn_terrier"                 
[21] "cavalier_king_charles_spaniel"  "chihuahua"                     
[23] "cocker_spaniel"                 "curly_coated_retriever"        
[25] "dachshund"                      "dalmatian"                     
[27] "dobermann"                      "dogue_de_bordeaux"             
[29] "fox_terrier"                    "french_bulldog"                
[31] "german_shepherd"                "golden_retriever"              
[33] "great_dane"                     "hungarian_vizsla"              
[35] "irish_wolfhound"                "jack_russell_terrier"          
[37] "japanese_spitz"                 "keeshond"                      
[39] "king_charles_spaniel"           "labrador_retriever"            
[41] "lhasa_apso"                     "maltese"                       
[43] "maremma_sheepdog"               "mastiff"                       
[45] "papillon"                       "pomeranian"                    
[47] "poodle"                         "poodle_toy"                    
[49] "pug"                            "rhodesian_ridgeback"           
[51] "rottweiler"                     "samoyed"                       
[53] "schnauzer"                      "shar_pei"                      
[55] "shiba_inu"                      "shih_tzu"                      
[57] "siberian_husky"                 "staffordshire_bull_terrier"    
[59] "tenterfield_terrier"            "tibetan_spaniel"               
[61] "tibetan_terrier"                "welsh_corgi"                   
[63] "west_highland_white_terrier"    "whippet"                       
[65] "yorkshire_terrier"              "airedale_terrier"              
[67] "bearded_collie"                 "english_springer_spaniel"      
[69] "english_toy_terrier"            "german_shorthaired_pointer"    
[71] "greyhound"                      "irish_setter"                  
[73] "italian_greyhound"              "old_english_sheepdog"          
[75] "pekingese"                     
 [ reached getOption("max.print") -- omitted 45 entries ]

$problemType
[1] "Regression"

$tuneValue
    layer1 layer2 layer3
228     19      8      1

$obsLevels
[1] NA

$param
list()

attr(,"class")
[1] "nn"
p_nn_tu <- predict(m_nn_tu, testing[-1])

Correlation:

cor(p_nn_tu, testing$outcome)
[1] 0.6215838

MAE between predicted and actual values:

MAE(p_nn_tu, testing$outcome)
[1] 0.127233

Distro of predicted (red) nd actual (green):