Please note that if the code does not work, sometimes it is because the future_map function is unstable. If this happens, please use the map function instead and write the parallel computation inside the function map.

rm(list = ls(all.names = TRUE)) #will clear all objects includes hidden objects.
gc() #free up memrory and report the memory usage.
##          used (Mb) gc trigger (Mb) max used (Mb)
## Ncells 514944 27.6    1149345 61.4   643845 34.4
## Vcells 971892  7.5    8388608 64.0  1649067 12.6

1 Data Preparation

1.1 Loading libraries

The following libraries and default settings were used during the analysis:

options(scipen = 999)

# if (!requireNamespace("BiocManager", quietly = TRUE))
#     install.packages("BiocManager")
# BiocManager::install("survcomp")

library(tidyverse)
library(tidymodels)
library("cowplot")
library("vip")
library(ggdist)
library(ggplot2)
##parallel map
library("eNetXplorer")
library(purrr)

#library(parallel)  
library("furrr")
future::plan(multiprocess(workers = 16))
theme_set(theme_bw() + theme(panel.grid = element_blank()))

1.2 Loading the data from ABCD 2.01

We first loaded all of the relevant data files (not shown here as they refer to local directories):

MRFINDINGS01 <-read.csv(paste0(dataFold, "ABCD_MRFINDINGS01_DATA_TABLE.csv")) %>% 
  filter(EVENTNAME =="baseline_year_1_arm_1") 
MRIQCRP102 <-read.csv(paste0(dataFold, "MRIQCRP102_DATA_TABLE.csv")) %>% 
  filter(EVENTNAME =="baseline_year_1_arm_1") 
MRIQCRP202 <-read.csv(paste0(dataFold, "MRIQCRP202_DATA_TABLE.csv")) %>% 
  filter(EVENTNAME =="baseline_year_1_arm_1") 
MRIQCRP302 <-read.csv(paste0(dataFold, "MRIQCRP302_DATA_TABLE.csv")) %>% 
  filter(EVENTNAME =="baseline_year_1_arm_1") 
FREESQC01 <-read.csv(paste0(dataFold, "FREESQC01_DATA_TABLE.csv")) %>% 
  filter(EVENTNAME =="baseline_year_1_arm_1") 
DMRIQC01 <-read.csv(paste0(dataFold, "DMRIQC01_DATA_TABLE.csv")) %>% 
  filter(EVENTNAME =="baseline_year_1_arm_1") 
NBackBeh <-read.csv(paste0(dataFold, "ABCD_MRINBACK02_DATA_TABLE.csv")) %>% 
  filter(EVENTNAME =="baseline_year_1_arm_1") 
NBackAparc <-read.csv(paste0(dataFold, "NBACK_BWROI02_DATA_TABLE.csv")) %>% 
  filter(EVENTNAME =="baseline_year_1_arm_1") 
NbackAsegDest <-read.csv(paste0(manipuFold, "NbackDestAsegReadableGgseg3d.csv")) 
# NbackAsegDestR1 <-read.csv(paste0(manipuFold, "NbackDestAsegReadableGgseg3dRunOne.csv"))
# NbackAsegDestR2 <-read.csv(paste0(manipuFold, "NbackDestAsegReadableGgseg3dRunTwo.csv")) 
MRIinfo <-tbl_df(read.csv(paste0(dataFold, "ABCD_MRI01_DATA_TABLE.csv"))) %>% 
  filter(EVENTNAME =="baseline_year_1_arm_1") 
Siteinfo <-tbl_df(read.csv(paste0(dataFold, "ABCD_LT01_DATA_TABLE.csv"))) %>% 
  filter(EVENTNAME =="baseline_year_1_arm_1") 
NIH_TB <-tbl_df(read.csv(paste0(dataFold,"ABCD_TBSS01_DATA_TABLE.csv"))) %>% 
  filter(EVENTNAME =="baseline_year_1_arm_1") 
LittleMan <-tbl_df(read.csv(paste0(dataFold,"LMTP201_DATA_TABLE.csv"))) %>% 
  filter(EVENTNAME =="baseline_year_1_arm_1") 
Pearson <-tbl_df(read.csv(paste0(dataFold,"ABCD_PS01_DATA_TABLE.csv"))) %>% 
  filter(EVENTNAME =="baseline_year_1_arm_1")

short_names <- tbl_df(read.csv(paste0(anotherFold,"ShortNames_all.csv") ))

short_names_two_lines <- tbl_df(read_csv(paste0(anotherFold,"ShortNames_all_two_lines_1_dec_2021_2.csv") ))

MRIQcAll <- plyr::join_all(list(MRFINDINGS01,MRIQCRP102,
                          MRIQCRP202,MRIQCRP302,FREESQC01,DMRIQC01,
                          NBackBeh,NBackAparc,NbackAsegDest,MRIinfo,Siteinfo,NIH_TB,LittleMan,Pearson), 
                          by='SUBJECTKEY', type='full')

MRIQcAll <- MRIQcAll[,!duplicated(colnames(MRIQcAll))]

1.3 Quality control (QC)

Next, we included only the participants that passed the following QC:

MRIQcAll$NoIncidental <- ifelse((MRIQcAll$MRIF_SCORE== 3 | 
                                   MRIQcAll$MRIF_SCORE== 4 |
                                   MRIQcAll$MRIF_HYDROCEPHALUS == "yes"|         
                                   MRIQcAll$MRIF_HERNIATION == "yes"), 0, 1)
MRIQcAll %>% count(NoIncidental)
##   NoIncidental     n
## 1            0   451
## 2            1 11359
## 3           NA    65
MRIQcAll %>% count(IQC_T1_OK_SER)
##   IQC_T1_OK_SER     n
## 1             0    63
## 2             1 10553
## 3             2   870
## 4             3    97
## 5            NA   292
MRIQcAll %>% count(FSQC_QC)
##   FSQC_QC     n
## 1       0   462
## 2       1 11076
## 3      NA   337
MRIQcAll$T1FreeSurferQCOk <- ifelse((MRIQcAll$IQC_T1_OK_SER > 0 & 
                                       MRIQcAll$FSQC_QC == 1), 1, 0)

count(MRIQcAll,T1FreeSurferQCOk)
##   T1FreeSurferQCOk     n
## 1                0   524
## 2                1 11004
## 3               NA   347
MRIQcAll %>% count(IQC_NBACK_OK_SER>0)
##   IQC_NBACK_OK_SER > 0     n
## 1                FALSE   143
## 2                 TRUE 10045
## 3                   NA  1687
MRIQcAll %>% count(TFMRI_NBACK_BEH_PERFORMFLAG==1)
##   TFMRI_NBACK_BEH_PERFORMFLAG == 1    n
## 1                            FALSE 1464
## 2                             TRUE 8004
## 3                               NA 2407
MRIQcAll %>% count(TFMRI_NBACK_ALL_BETA_DOF>200)
##   TFMRI_NBACK_ALL_BETA_DOF > 200    n
## 1                          FALSE   33
## 2                           TRUE 8821
## 3                             NA 3021
MRIQcAll$NbackBehDofOk <- ifelse((MRIQcAll$IQC_NBACK_OK_SER>0 &
                                    MRIQcAll$TFMRI_NBACK_BEH_PERFORMFLAG ==1 &
                                    MRIQcAll$TFMRI_NBACK_ALL_BETA_DOF>200), 1, 0)
count(MRIQcAll,NbackBehDofOk)
##   NbackBehDofOk    n
## 1             0 1602
## 2             1 7439
## 3            NA 2834
MRIQcAll$AllNbackQc <- ifelse((MRIQcAll$NoIncidental == 1 & 
                                 MRIQcAll$T1FreeSurferQCOk == 1 & 
                                 MRIQcAll$NbackBehDofOk == 1), 1, 0)
count(MRIQcAll,AllNbackQc)
##   AllNbackQc    n
## 1          0 2399
## 2          1 6947
## 3         NA 2529
Nback.QCed <- MRIQcAll %>% filter(AllNbackQc == 1)

1.3.1 Remove Phillips

There was an issue was reported with the Philips scanners and it was recommended that the data from these scanners should be dropped. We did so:

#remove phil and add beh during fMRI
Nback.QCedNoPhil <- Nback.QCed %>%
  filter(MRI_INFO_MANUFACTURER != 'Philips Medical Systems') 

#check how many variables in X2backVS0back and the list of ROIs
Nback.2backVS0back <- Nback.QCedNoPhil%>% select(.,starts_with("X2backvs0back"))
#colnames(Nback.2backVS0back)

1.4 Create Names for plotting

Here are a list of responsevariable names which are used in the later analysis. Both short names ang long names are used in plotting.

Resp_Var <- c('TFMRI_NB_ALL_BEH_C2B_RATE',
              "NIHTBX_PICVOCAB_UNCORRECTED", 
              "NIHTBX_FLANKER_UNCORRECTED",
              "NIHTBX_LIST_UNCORRECTED",
              "NIHTBX_CARDSORT_UNCORRECTED",
              "NIHTBX_PATTERN_UNCORRECTED",
              "NIHTBX_PICTURE_UNCORRECTED",
              "NIHTBX_READING_UNCORRECTED",
              "LMT_SCR_PERC_CORRECT",
              "PEA_RAVLT_LD_TRIAL_VII_TC",
              "PEA_WISCV_TRS")
resp_var_plotting_long <- c("2-back working memory",
  "Picture vocabulary test",
  "Flanker test",
  "List sorting working memory",
  "Dimentional change card sort test",
  "Pattern comparison processing speed test",
  "Picture sequence memory test",
  "Oral reading recognition test",
  "Little man task correct percentage",
  "RAVLT long delay trial VII total correct",
  "WISC_V matrix reasoning total raw score"
)
resp_var_plotting_short <- c("2-back Work Mem",
                             "Pic Vocab",
                             "Flanker",
                             "List Work Mem",
                             "Card Sort", # "Cog Flex",
                             "Pattern Speed",
                             "Seq Memory",
                             "Reading Recog",
                             "Little Man",
                             "Audi Verbal",
                             "Matrix Reason")

resp_var_plotting <- tibble("response" 
                            =Resp_Var, 
                            "longer_name"=resp_var_plotting_long,
                            "short_name"=resp_var_plotting_short)

subj_info <- c('SUBJECTKEY', 'MRI_INFO_DEVICESERIALNUMBER', 'SITE_ID_L')

data_all_average <- Nback.QCedNoPhil %>%
  select(SUBJECTKEY, 
         all_of(subj_info), 
         all_of(Resp_Var),
         starts_with('X2backvs0back')) %>%
  rename_at(vars(-all_of(subj_info),-all_of(Resp_Var)),
            ~ str_replace(., 'X2backvs0back_ROI_', 'roi_'))

### checking whether the short names and ROI names in the data are the same
name_check <- which(short_names$roi != str_remove(names(select(data_all_average,starts_with("roi_"))),"roi_"))
print(name_check)
## integer(0)
new_shorter_names <- short_names 
new_shorter_names$roiShort[97] <- "R Subcentral"
new_shorter_names$roiShort <-  str_squish(string = new_shorter_names$roiShort)

### new_shorter_names_two_lines are used for plotting roi names in two lines to prevent labels being cut
new_shorter_names_two_lines <- short_names_two_lines
new_shorter_names_two_lines$roiShortTwoLines <- map(short_names_two_lines$roiShortAddLines,
                                                    ~str_replace(.x,"55", "\n")) %>% unlist()

1.4.1 Selecting data

We first drop participants with na in any variable of interests.

data_all_listwise <- data_all_average %>%
  drop_na() 

We also performed listwise deletion and dropped all participants for whom either the behavioral performance or the activation across some brain area was greater than 3 * IQR (interquartile range).

The outliers are removed with respect to training and testing data sets. So no count of columns is displayed here. Only do IQR remove for the brain area features that is those variables starts with “roi_”.

The data are scaled after the IQR is removed.

## this IQR function is used in the recipe
IQR_remove <- function(data_split, resp_vec){
  data_split%>%
  mutate_at(vars(starts_with("roi_"),all_of(resp_vec)), ~ ifelse(
    .x > quantile(.x, na.rm = TRUE)[4] + 3 * IQR(.x, na.rm = TRUE) |
    .x < quantile(.x, na.rm = TRUE)[2] - 3 * IQR(.x, na.rm = TRUE), 
    NA, .x)) %>%
  drop_na() %>%
  mutate_if(is.numeric, ~ (.x - mean(.x)) / sd(.x)) 
## scaling the data set is in not the recipe function
  }

1.4.2 Check participant numbers across sites and scanners

Next we checked the number of participants across sites and scanners: Note that site 22 and site08 have fewer than 100 participants when IQR rules were applied.

# Remove site 22 and 8
data_all_listwise <- data_all_listwise %>%
  filter(SITE_ID_L != 'site22' & SITE_ID_L != 'site08')

1.4.3 Check the distribution of all of the cognitive tasks

Most are normally distributed.

#Look at distribution of NIHTBX_LIST_UNCORRECTED (i.e., working memory from Nback)

#keep track of all the variable names for later use:
resp_names <-data_all_listwise %>% select(all_of(Resp_Var))%>%
  names()%>%
  set_names()

feature_names <- data_all_listwise %>% select(starts_with("roi_"))%>%
                colnames()%>% 
                set_names()


density_plot_grid <- resp_names %>%
  map(~ggplot(data_all_listwise,aes(x=.data[[.]])) 
      +stat_function(fun=dnorm, 
                color="skyblue", size = 1.5,
                args=list(mean=mean(data_all_listwise$.),
                          sd=sd(data_all_listwise$.))) +
  geom_density() +
  labs(x = NULL, y =NULL,title = resp_var_plotting$short_name[[which(resp_var_plotting$response==.)]])
)
title_density_plot <- ggdraw() + 
  draw_label(
    "Density plots of all the Cognitive Performance Variables",
    fontface = 'bold',
    x = 0,
    hjust = 0
  ) +
  theme(
    # add margin on the left of the drawing canvas,
    # so title is aligned with left edge of first plot
    plot.margin = margin(0, 0, 0, 7)
  )
plot_grid(title_density_plot,plot_grid(plotlist = density_plot_grid),nrow = 2 , rel_heights = c(0.1, 1))

2 Modeling

2.1 make data splits

set.seed(123456)
data_split <- initial_split(data_all_listwise)

split_train <- training(data_split)
split_test <- testing(data_split)

2.2 parameter tuning functions for each ML algorithm (and functions for mass univariate analyese)

2.2.1 Elastic net tuning functions

enet_tuning <- function(recipe_input, formula_input){

   set.seed(123) 
  train_input <- recipe_input %>% bake(new_data=NULL)
  tuning_cv_folds <- train_input  %>%
vfold_cv(v = 10)
    
  ## mtry is the number of predictors to sample at each split
## min_n (the number of observations needed to keep splitting nodes)
model_spec <-linear_reg(penalty =tune(),  
                        mixture = tune()) %>%
            set_mode("regression") %>%
            set_engine("glmnet")

tune_wf <- workflow() %>%
  add_recipe(recipe_input) %>%
  add_model(model_spec)

## automate generate grid for hyperparameters

model_grid <- 
  model_spec %>% 
  parameters(penalty(range = c(-10,1), 
                     trans = log10_trans()), 
             mixture()) %>% 
  grid_regular(levels = c(200,11))

tune_ctrl <- control_grid(save_pred = TRUE, verbose = TRUE
                        ,parallel_over = "everything"
                          )
library(doFuture)
registerDoFuture()
plan(multisession(workers = 30))
#start <- Sys.time()
tune_res <- tune_grid(
  tune_wf,
  resamples = tuning_cv_folds,
  metrics = metric_set(rmse),
  grid = model_grid,
  control= tune_ctrl
)

best_tune <- select_best(tune_res, 
                         metric = "rmse")

best_tuned_param <- show_best(tune_res, 
                              metric="rmse")

enet_final_wf <- tune_wf %>% finalize_workflow(best_tune)

return(list(enet_wf_final = enet_final_wf, 
              best_enet_model = best_tune,
              best_enet_forest_param = best_tuned_param))
}

2.2.1.1 the grid of elastic net

enet_grid <- parameters(penalty(range = c(-10,1), 
                                trans = log10_trans()), 
                        mixture()) %>% 
  grid_regular(levels = c(200,11))

range(enet_grid$penalty)
## [1]  0.0000000001 10.0000000000
range(enet_grid$mixture)
## [1] 0 1
sort(unique(enet_grid$penalty)) 
##   [1]  0.0000000001000000  0.0000000001135733
##   [3]  0.0000000001289890  0.0000000001464971
##   [5]  0.0000000001663817  0.0000000001889652
##   [7]  0.0000000002146141  0.0000000002437444
##   [9]  0.0000000002768287  0.0000000003144035
##  [11]  0.0000000003570786  0.0000000004055461
##  [13]  0.0000000004605922  0.0000000005231099
##  [15]  0.0000000005941134  0.0000000006747544
##  [17]  0.0000000007663411  0.0000000008703591
##  [19]  0.0000000009884959  0.0000000011226678
##  [21]  0.0000000012750512  0.0000000014481182
##  [23]  0.0000000016446762  0.0000000018679136
##  [25]  0.0000000021214518  0.0000000024094036
##  [27]  0.0000000027364400  0.0000000031078662
##  [29]  0.0000000035297073  0.0000000040088063
##  [31]  0.0000000045529351  0.0000000051709202
##  [33]  0.0000000058727866  0.0000000066699197
##  [35]  0.0000000075752503  0.0000000086034644
##  [37]  0.0000000097712415  0.0000000110975250
##  [39]  0.0000000126038293  0.0000000143145894
##  [41]  0.0000000162575567  0.0000000184642494
##  [43]  0.0000000209704640  0.0000000238168555
##  [45]  0.0000000270495973  0.0000000307211300
##  [47]  0.0000000348910121  0.0000000396268864
##  [49]  0.0000000450055768  0.0000000511143348
##  [51]  0.0000000580522552  0.0000000659318827
##  [53]  0.0000000748810386  0.0000000850448934
##  [55]  0.0000000965883224  0.0000001096985798
##  [57]  0.0000001245883364  0.0000001414991297
##  [59]  0.0000001607052818  0.0000001825183494
##  [61]  0.0000002072921780  0.0000002354286414
##  [63]  0.0000002673841616  0.0000003036771118
##  [65]  0.0000003448962260  0.0000003917101491
##  [67]  0.0000004448782831  0.0000005052631065
##  [69]  0.0000005738441648  0.0000006517339605
##  [71]  0.0000007401959997  0.0000008406652886
##  [73]  0.0000009547716114  0.0000010843659687
##  [75]  0.0000012315506033  0.0000013987131026
##  [77]  0.0000015885651294  0.0000018041864094
##  [79]  0.0000020490746898  0.0000023272024790
##  [81]  0.0000026430814870  0.0000030018358136
##  [83]  0.0000034092850697  0.0000038720387818
##  [85]  0.0000043976036093  0.0000049945051159
##  [87]  0.0000056724260685  0.0000064423635087
##  [89]  0.0000073168071434  0.0000083099419494
##  [91]  0.0000094378782778  0.0000107189131921
##  [93]  0.0000121738272774  0.0000138262217376
##  [95]  0.0000157029012473  0.0000178343087693
##  [97]  0.0000202550193923  0.0000230043011977
##  [99]  0.0000261267522556  0.0000296730240819
## [101]  0.0000337006432927  0.0000382749447852
## [103]  0.0000434701315813  0.0000493704785284
## [105]  0.0000560716993821  0.0000636824994472
## [107]  0.0000723263389648  0.0000821434358492
## [109]  0.0000932930402628  0.0001059560179278
## [111]  0.0001203377840778  0.0001366716356462
## [113]  0.0001552225357427  0.0001762914118096
## [115]  0.0002002200371816  0.0002273965752358
## [117]  0.0002582618760683  0.0002933166278390
## [119]  0.0003331294787935  0.0003783462617132
## [121]  0.0004297004704321  0.0004880251583654
## [123]  0.0005542664520663  0.0006294988990222
## [125]  0.0007149428986598  0.0008119844993184
## [127]  0.0009221978823334  0.0010473708979595
## [129]  0.0011895340673703  0.0013509935211980
## [131]  0.0015343684089300  0.0017426333860097
## [133]  0.0019791668678536  0.0022478058335487
## [135]  0.0025529080682395  0.0028994228538829
## [137]  0.0032929712550972  0.0037399373024788
## [139]  0.0042475715525369  0.0048241087041654
## [141]  0.0054789011795939  0.0062225708367302
## [143]  0.0070671812739275  0.0080264335222572
## [145]  0.0091158882997508  0.0103532184329566
## [147]  0.0117584955405216  0.0133545156292990
## [149]  0.0151671688847092  0.0172258596539879
## [151]  0.0195639834351706  0.0222194686093953
## [153]  0.0252353917043477  0.0286606761694826
## [155]  0.0325508859983506  0.0369691270719503
## [157]  0.0419870708444392  0.0476861169771447
## [159]  0.0541587137807949  0.0615098578858050
## [161]  0.0698587974678526  0.0793409666579749
## [163]  0.0901101825166504  0.1023411402105453
## [165]  0.1162322468679854  0.1320088400831422
## [167]  0.1499268432786047  0.1702769172225905
## [169]  0.1933891750455232  0.2196385372416551
## [171]  0.2494508135230317  0.2833096101839330
## [173]  0.3217641750250735  0.3654383070957262
## [175]  0.4150404757850489  0.4713753134116729
## [177]  0.5353566677410740  0.6080224261649427
## [179]  0.6905513520162345  0.7842822061337682
## [181]  0.8907354638610459  1.0116379797662070
## [183]  1.1489510001873109  1.3049019780144069
## [185]  1.4820207057988601  1.6831803533309617
## [187]  1.9116440753857036  2.1711179456945096
## [189]  2.4658110758226037  2.8005038941836369
## [191]  3.1806256927941190  3.6123426997094379
## [193]  4.1026581058271905  4.6595256686646866
## [195]  5.2919787359584580  6.0102767820703882
## [197]  6.8260718342724065  7.7525974886294646
## [199]  8.8048835816434821 10.0000000000000000
sort(unique(enet_grid$mixture))
##  [1] 0.0 0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9 1.0

2.2.2 random forest tuning function

random_forest_tuning <- function(recipe_input, formula_input){

set.seed(123) 
  train_input <- recipe_input %>% bake(new_data=NULL)
  tuning_cv_folds <- train_input  %>%
vfold_cv(v = 10)
  
    
  ## mtry is the number of predictors to sample at each split
## min_n (the number of observations needed to keep splitting nodes)
tune_spec <-rand_forest(mtry = tune(),
                         trees = 500,
                         min_n = tune()) %>%
            set_mode("regression") %>%
            set_engine("ranger")

tune_wf <- workflow() %>%
  add_recipe(recipe_input) %>%
  add_model(tune_spec)

## automate generate grid for hyperparameters

rf_grid <- grid_latin_hypercube(
 min_n(range = c(2,2000)), 
 mtry(range = c(1, 167)), 
  size = 3000
)
rf_ctrl <- control_grid(save_pred = TRUE,
                        verbose = TRUE,
                        parallel_over = "everything")


library(doFuture)
registerDoFuture()
plan(multisession(workers = 30))

tune_res <- tune_grid(
  tune_wf,
  resamples = tuning_cv_folds,
  metrics = metric_set(rmse),
  grid = rf_grid,
  control= rf_ctrl
)

best_tune <- select_best(tune_res, metric = "rmse")

best_tuned_param <- show_best(tune_res, metric="rmse")

rf_final_wf <- tune_wf %>% finalize_workflow(best_tune)

return(list(random_forest_wf_final = rf_final_wf, 
              best_random_forest_model = best_tune,
              best_random_forest_param = best_tuned_param))
}

2.2.2.1 the grid of random forest

set.seed(123)

random_forest_grid <- grid_latin_hypercube(
 min_n(range = c(2,2000)), 
 mtry(range = c(1, 167)), 
  size = 3000
)

##range of the grid
range(random_forest_grid$min_n)
## [1]    2 2000
range(random_forest_grid$mtry)
## [1]   1 167
##unique elements of the grid
sort(unique(random_forest_grid$min_n))
##    [1]    2    3    4    5    6    7    8    9   10   11
##   [11]   12   13   14   15   16   17   18   19   20   21
##   [21]   22   23   24   25   26   27   28   29   30   31
##   [31]   32   33   34   35   36   37   38   39   40   41
##   [41]   42   43   44   45   46   47   48   49   50   51
##   [51]   52   53   54   55   56   57   58   59   60   61
##   [61]   62   63   64   65   66   67   68   69   70   71
##   [71]   73   74   75   76   77   78   79   80   81   82
##   [81]   83   84   85   86   87   88   89   90   91   92
##   [91]   93   94   95   96   97   98   99  100  101  102
##  [101]  103  104  105  106  107  108  109  110  111  112
##  [111]  113  114  115  116  117  118  119  120  121  122
##  [121]  123  124  125  126  127  128  129  130  131  132
##  [131]  133  134  135  136  137  139  140  141  142  143
##  [141]  144  145  146  147  148  149  150  151  152  153
##  [151]  154  155  156  157  158  159  160  161  162  163
##  [161]  164  165  166  167  168  169  170  171  172  173
##  [171]  174  175  176  177  178  179  180  181  182  183
##  [181]  184  185  186  187  188  189  190  191  192  193
##  [191]  194  195  196  197  198  199  200  201  202  203
##  [201]  204  205  206  207  208  209  210  211  212  213
##  [211]  214  215  216  217  218  219  220  221  222  223
##  [221]  224  225  226  227  228  229  230  231  232  233
##  [231]  234  236  237  238  239  240  241  242  244  245
##  [241]  246  247  248  249  250  251  252  253  254  255
##  [251]  256  257  258  259  260  261  262  263  264  265
##  [261]  266  267  268  270  271  272  273  274  275  276
##  [271]  277  278  279  280  281  282  283  284  285  286
##  [281]  287  288  289  290  291  292  293  294  295  296
##  [291]  297  298  299  300  301  302  303  304  305  306
##  [301]  307  308  309  310  311  312  313  314  315  316
##  [311]  317  318  319  320  321  322  323  324  325  326
##  [321]  327  328  329  330  331  332  333  334  335  336
##  [331]  337  338  339  340  341  342  343  344  345  346
##  [341]  347  348  350  351  352  353  354  355  356  358
##  [351]  359  360  361  362  363  364  365  366  367  368
##  [361]  369  370  371  372  373  374  375  376  377  378
##  [371]  379  380  381  382  383  384  385  386  387  388
##  [381]  389  390  391  392  393  394  395  396  397  398
##  [391]  400  401  402  403  404  405  406  407  408  409
##  [401]  410  411  412  413  414  415  416  417  418  419
##  [411]  420  421  422  423  424  425  426  427  428  429
##  [421]  430  431  432  433  434  435  436  437  438  439
##  [431]  440  441  442  443  444  445  446  447  448  449
##  [441]  450  451  452  453  454  455  456  457  458  459
##  [451]  460  461  462  463  464  465  466  467  468  469
##  [461]  470  471  472  473  474  475  476  477  478  479
##  [471]  480  481  482  483  484  485  486  487  488  489
##  [481]  490  491  492  493  494  495  496  497  498  499
##  [491]  500  501  502  503  504  505  506  507  508  509
##  [501]  510  511  512  513  514  515  516  517  518  519
##  [511]  520  521  522  523  524  525  526  527  528  529
##  [521]  530  531  532  533  534  535  536  537  538  539
##  [531]  540  541  542  543  544  545  546  547  548  549
##  [541]  550  551  552  553  554  555  556  557  558  559
##  [551]  560  561  562  563  564  565  566  567  568  569
##  [561]  570  571  572  573  574  575  576  577  578  579
##  [571]  580  581  582  583  584  585  586  587  588  589
##  [581]  590  591  592  593  594  595  596  597  598  599
##  [591]  600  601  602  603  604  605  606  607  608  609
##  [601]  610  611  612  613  614  615  616  617  618  619
##  [611]  620  621  622  623  624  625  626  627  628  629
##  [621]  630  631  632  633  634  635  636  637  638  639
##  [631]  640  641  642  643  644  645  647  648  649  650
##  [641]  651  652  653  654  655  656  657  658  659  660
##  [651]  661  662  663  665  666  667  668  669  670  671
##  [661]  672  673  674  675  676  677  678  679  680  681
##  [671]  682  683  684  685  686  687  688  689  690  691
##  [681]  692  693  694  695  696  697  698  699  700  701
##  [691]  702  703  704  705  706  707  708  709  710  711
##  [701]  712  713  715  716  717  718  719  720  721  722
##  [711]  723  725  726  727  728  729  730  731  732  733
##  [721]  735  736  737  738  739  740  741  742  743  744
##  [731]  745  746  747  748  749  750  751  752  753  755
##  [741]  756  757  758  759  760  761  762  763  764  765
##  [751]  766  767  768  769  770  771  772  773  774  775
##  [761]  776  777  778  779  780  781  782  783  784  785
##  [771]  786  787  788  789  790  791  792  793  794  795
##  [781]  796  797  798  799  800  801  802  803  804  805
##  [791]  806  807  808  809  810  811  812  813  814  815
##  [801]  816  817  818  819  820  821  822  823  824  825
##  [811]  826  827  828  829  830  831  832  833  834  835
##  [821]  836  837  838  839  840  841  842  843  844  845
##  [831]  846  847  848  849  850  851  852  853  854  855
##  [841]  856  857  858  859  860  861  862  863  864  865
##  [851]  866  867  868  869  870  871  872  873  874  875
##  [861]  876  877  878  880  881  882  883  884  885  886
##  [871]  887  888  889  890  891  892  893  894  895  896
##  [881]  897  898  899  900  901  902  903  904  905  906
##  [891]  907  908  909  910  911  912  913  914  915  916
##  [901]  917  918  919  920  921  922  923  924  925  926
##  [911]  927  928  929  930  931  932  933  934  935  936
##  [921]  937  938  939  940  941  942  943  944  945  946
##  [931]  947  948  949  950  951  952  953  954  955  956
##  [941]  957  958  959  960  961  962  963  964  965  966
##  [951]  967  968  969  970  971  972  973  974  975  976
##  [961]  978  979  980  981  982  983  984  985  986  987
##  [971]  988  989  990  991  992  993  994  995  996  997
##  [981]  998  999 1000 1002 1003 1004 1005 1006 1007 1008
##  [991] 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018
## [1001] 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028
## [1011] 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038
## [1021] 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048
## [1031] 1049 1050 1051 1052 1053 1054 1055 1056 1057 1058
## [1041] 1059 1060 1061 1062 1063 1064 1065 1066 1067 1068
## [1051] 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078
## [1061] 1079 1080 1081 1082 1083 1084 1085 1086 1087 1088
## [1071] 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098
## [1081] 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108
## [1091] 1109 1110 1111 1112 1113 1114 1116 1117 1118 1119
## [1101] 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129
## [1111] 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139
## [1121] 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149
## [1131] 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159
## [1141] 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169
## [1151] 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179
## [1161] 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189
## [1171] 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199
## [1181] 1200 1201 1202 1203 1204 1205 1206 1207 1208 1209
## [1191] 1210 1211 1212 1213 1214 1215 1217 1218 1219 1220
## [1201] 1221 1222 1223 1224 1225 1226 1227 1228 1229 1230
## [1211] 1231 1232 1233 1234 1235 1236 1237 1238 1239 1240
## [1221] 1241 1242 1243 1244 1245 1246 1247 1248 1249 1250
## [1231] 1251 1252 1253 1254 1255 1256 1257 1258 1259 1260
## [1241] 1261 1262 1263 1264 1265 1266 1267 1268 1269 1270
## [1251] 1271 1272 1273 1274 1275 1276 1277 1279 1280 1281
## [1261] 1282 1283 1284 1285 1286 1287 1288 1289 1290 1291
## [1271] 1292 1293 1294 1295 1297 1298 1299 1300 1301 1302
## [1281] 1303 1304 1305 1306 1307 1309 1310 1311 1312 1313
## [1291] 1314 1315 1316 1317 1318 1319 1320 1321 1322 1323
## [1301] 1324 1325 1326 1327 1328 1329 1330 1331 1332 1333
## [1311] 1334 1335 1336 1337 1338 1339 1340 1341 1342 1343
## [1321] 1345 1346 1347 1348 1349 1350 1351 1352 1353 1354
## [1331] 1355 1357 1358 1359 1360 1361 1362 1363 1364 1365
## [1341] 1366 1367 1368 1369 1370 1371 1372 1373 1374 1375
## [1351] 1376 1377 1378 1379 1380 1381 1382 1383 1384 1385
## [1361] 1386 1387 1388 1389 1390 1391 1392 1393 1394 1395
## [1371] 1396 1397 1398 1399 1400 1401 1402 1403 1404 1405
## [1381] 1406 1407 1408 1409 1410 1411 1412 1413 1414 1415
## [1391] 1416 1417 1418 1419 1420 1421 1422 1423 1424 1425
## [1401] 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435
## [1411] 1436 1437 1438 1439 1440 1441 1442 1443 1444 1445
## [1421] 1446 1447 1448 1449 1450 1451 1452 1453 1454 1455
## [1431] 1456 1457 1458 1459 1460 1461 1462 1463 1464 1465
## [1441] 1466 1467 1468 1469 1470 1471 1472 1473 1474 1475
## [1451] 1476 1477 1478 1479 1480 1481 1482 1483 1484 1485
## [1461] 1486 1487 1488 1489 1490 1491 1492 1493 1494 1495
## [1471] 1496 1497 1498 1499 1500 1501 1502 1503 1504 1505
## [1481] 1506 1507 1508 1509 1510 1511 1512 1513 1514 1515
## [1491] 1516 1517 1518 1519 1520 1521 1522 1523 1524 1525
## [1501] 1526 1527 1528 1529 1530 1531 1532 1533 1534 1535
## [1511] 1536 1537 1538 1539 1540 1541 1542 1543 1544 1545
## [1521] 1546 1547 1548 1549 1550 1551 1552 1553 1554 1555
## [1531] 1556 1557 1558 1559 1560 1561 1562 1564 1565 1566
## [1541] 1567 1568 1569 1570 1571 1572 1573 1574 1575 1576
## [1551] 1577 1578 1579 1580 1581 1582 1583 1584 1585 1586
## [1561] 1587 1588 1589 1590 1591 1592 1593 1594 1595 1596
## [1571] 1597 1598 1599 1600 1601 1602 1603 1604 1605 1606
## [1581] 1607 1608 1609 1610 1611 1612 1613 1614 1615 1616
## [1591] 1617 1618 1619 1620 1621 1622 1624 1625 1626 1627
## [1601] 1628 1629 1630 1631 1632 1633 1634 1635 1636 1637
## [1611] 1638 1639 1640 1641 1642 1643 1644 1645 1646 1647
## [1621] 1648 1649 1650 1651 1652 1653 1654 1655 1656 1658
## [1631] 1659 1660 1661 1662 1663 1664 1665 1666 1667 1668
## [1641] 1669 1670 1671 1672 1673 1674 1675 1676 1677 1678
## [1651] 1679 1680 1681 1682 1683 1684 1686 1687 1688 1689
## [1661] 1690 1691 1692 1693 1694 1695 1696 1697 1698 1699
## [1671] 1700 1701 1702 1703 1704 1705 1706 1707 1708 1709
## [1681] 1710 1711 1712 1713 1714 1715 1716 1717 1718 1719
## [1691] 1720 1721 1722 1723 1724 1725 1726 1727 1728 1730
## [1701] 1732 1733 1734 1735 1736 1737 1738 1739 1740 1741
## [1711] 1742 1744 1746 1748 1749 1750 1751 1752 1753 1754
## [1721] 1755 1756 1757 1758 1759 1760 1761 1762 1763 1764
## [1731] 1765 1766 1767 1768 1769 1770 1771 1772 1773 1774
## [1741] 1775 1776 1777 1778 1779 1780 1781 1782 1783 1784
## [1751] 1785 1786 1787 1788 1789 1790 1791 1792 1793 1794
## [1761] 1795 1796 1797 1798 1799 1800 1801 1802 1803 1804
## [1771] 1805 1806 1807 1808 1809 1810 1811 1812 1813 1814
## [1781] 1815 1816 1817 1818 1819 1820 1821 1822 1823 1824
## [1791] 1825 1826 1827 1828 1829 1830 1831 1832 1833 1834
## [1801] 1835 1836 1837 1838 1839 1840 1841 1842 1843 1844
## [1811] 1845 1846 1847 1848 1849 1850 1851 1852 1853 1854
## [1821] 1855 1856 1857 1858 1859 1860 1861 1862 1863 1864
## [1831] 1865 1866 1867 1868 1869 1870 1871 1872 1873 1874
## [1841] 1875 1876 1877 1878 1879 1880 1881 1882 1883 1884
## [1851] 1885 1886 1887 1888 1889 1890 1891 1892 1893 1894
## [1861] 1895 1896 1897 1898 1899 1900 1901 1902 1903 1904
## [1871] 1905 1906 1907 1908 1909 1910 1911 1912 1913 1914
## [1881] 1915 1916 1917 1918 1919 1920 1921 1922 1923 1924
## [1891] 1925 1926 1927 1928 1929 1930 1931 1932 1933 1935
## [1901] 1936 1937 1938 1939 1940 1941 1942 1943 1944 1945
## [1911] 1946 1947 1948 1949 1950 1951 1952 1953 1954 1955
## [1921] 1957 1958 1959 1960 1961 1962 1963 1964 1965 1966
## [1931] 1967 1968 1969 1970 1971 1972 1973 1974 1975 1976
## [1941] 1977 1978 1979 1980 1981 1982 1983 1984 1985 1986
## [1951] 1987 1988 1989 1990 1991 1992 1993 1994 1995 1996
## [1961] 1997 1998 1999 2000
sort(unique(random_forest_grid$mtry))
##   [1]   1   2   3   4   5   6   7   8   9  10  11  12  13
##  [14]  14  15  16  17  18  19  20  21  22  23  24  25  26
##  [27]  27  28  29  30  31  32  33  34  35  36  37  38  39
##  [40]  40  41  42  43  44  45  46  47  48  49  50  51  52
##  [53]  53  54  55  56  57  58  59  60  61  62  63  64  65
##  [66]  66  67  68  69  70  71  72  73  74  75  76  77  78
##  [79]  79  80  81  82  83  84  85  86  87  88  89  90  91
##  [92]  92  93  94  95  96  97  98  99 100 101 102 103 104
## [105] 105 106 107 108 109 110 111 112 113 114 115 116 117
## [118] 118 119 120 121 122 123 124 125 126 127 128 129 130
## [131] 131 132 133 134 135 136 137 138 139 140 141 142 143
## [144] 144 145 146 147 148 149 150 151 152 153 154 155 156
## [157] 157 158 159 160 161 162 163 164 165 166 167

2.2.3 XGBoost tuning functions

xgboost_tuning <- function(recipe_input, formula_input){
  
  set.seed(123) 
  train_input <- recipe_input %>% bake(new_data=NULL)
  tuning_cv_folds <- train_input  %>%
vfold_cv(v = 10)
  
  ## mtry is the number of predictors to sample at each split
## min_n (the number of observations needed to keep splitting nodes)
tune_spec <-boost_tree(mtry = tune(),
                        trees = 500,
                        min_n = tune(),
                        tree_depth = tune(), 
                        loss_reduction = tune(),      ## first three: model complexity
                        sample_size = tune(),         ## randomness
                        learn_rate = tune()) %>%
            set_mode("regression") %>%
            set_engine("xgboost")

tune_wf <- workflow() %>%
  add_recipe(recipe_input) %>%
  add_model(tune_spec)


## automate generate grid for hyperparameters

xgb_grid <- grid_latin_hypercube(
  tree_depth(),
 min_n(range = c(2,1000)),
  loss_reduction(),
  sample_size = sample_prop(),
 mtry(range = c(1, 167)),
  learn_rate(),
  size = 3000
)

xgb_ctrl <- control_grid(save_pred = TRUE, 
                                   verbose = TRUE,
                                   parallel_over = "everything")

library(doFuture)
registerDoFuture()
plan(multisession(workers = 30))

tune_res <- tune_grid(
  tune_wf,
  resamples = tuning_cv_folds,
  metrics = metric_set(rmse),
  grid = xgb_grid,
  control= xgb_ctrl
)

best_tune <- select_best(tune_res, metric = "rmse")

best_tuned_param <- show_best(tune_res, metric="rmse")

xgboost_final_wf <- tune_wf %>% finalize_workflow(best_tune)

return(list(xgboost_wf_final = xgboost_final_wf, 
              best_xgboost_model = best_tune,
              best_xgboost_param = best_tuned_param))
}

2.2.3.1 the grid of xgboost

set.seed(123)
xgboost_grid <- grid_latin_hypercube(
  tree_depth(),
  min_n(range = c(2,1000)),
  loss_reduction(),
  sample_size = sample_prop(),
  mtry(range = c(1, 167)),
  learn_rate(),
  size = 3000
)
##range of the grid
range(xgboost_grid$tree_depth)
## [1]  1 15
range(xgboost_grid$min_n)
## [1]    2 1000
range(xgboost_grid$loss_reduction)
## [1]  0.0000000001003709 31.4091533457374190
range(xgboost_grid$sample_size)
## [1] 0.1001774 0.9998824
range(xgboost_grid$mtry)
## [1]   1 167
range(xgboost_grid$learn_rate)
## [1] 0.000000000100421 0.099600824160661
## unique grid elements
sort(unique(xgboost_grid$tree_depth))
##  [1]  1  2  3  4  5  6  7  8  9 10 11 12 13 14 15
sort(unique(xgboost_grid$min_n))
##   [1]    2    3    4    5    6    7    8    9   10   11   12
##  [12]   13   14   15   16   17   18   19   20   21   22   23
##  [23]   24   25   26   27   28   29   30   31   32   33   34
##  [34]   35   36   37   38   39   40   41   42   43   44   45
##  [45]   46   47   48   49   50   51   52   53   54   55   56
##  [56]   57   58   59   60   61   62   63   64   65   66   67
##  [67]   68   69   70   71   72   73   74   75   76   77   78
##  [78]   79   80   81   82   83   84   85   86   87   88   89
##  [89]   90   91   92   93   94   95   96   97   98   99  100
## [100]  101  102  103  104  105  106  107  108  109  110  111
## [111]  112  113  114  115  116  117  118  119  120  121  122
## [122]  123  124  125  126  127  128  129  130  131  132  133
## [133]  134  135  136  137  138  139  140  141  142  143  144
## [144]  145  146  147  148  149  150  151  152  153  154  155
## [155]  156  157  158  159  160  161  162  163  164  165  166
## [166]  167  168  169  170  171  172  173  174  175  176  177
## [177]  178  179  180  181  182  183  184  185  186  187  188
## [188]  189  190  191  192  193  194  195  196  197  198  199
## [199]  200  201  202  203  204  205  206  207  208  209  210
## [210]  211  212  213  214  215  216  217  218  219  220  221
## [221]  222  223  224  225  226  227  228  229  230  231  232
## [232]  233  234  235  236  237  238  239  240  241  242  243
## [243]  244  245  246  247  248  249  250  251  252  253  254
## [254]  255  256  257  258  259  260  261  262  263  264  265
## [265]  266  267  268  269  270  271  272  273  274  275  276
## [276]  277  278  279  280  281  282  283  284  285  286  287
## [287]  288  289  290  291  292  293  294  295  296  297  298
## [298]  299  300  301  302  303  304  305  306  307  308  309
## [309]  310  311  312  313  314  315  316  317  318  319  320
## [320]  321  322  323  324  325  326  327  328  329  330  331
## [331]  332  333  334  335  336  337  338  339  340  341  342
## [342]  343  344  345  346  347  348  349  350  351  352  353
## [353]  354  355  356  357  358  359  360  361  362  363  364
## [364]  365  366  367  368  369  370  371  372  373  374  375
## [375]  376  377  378  379  380  381  382  383  384  385  386
## [386]  387  388  389  390  391  392  393  394  395  396  397
## [397]  398  399  400  401  402  403  404  405  406  407  408
## [408]  409  410  411  412  413  414  415  416  417  418  419
## [419]  420  421  422  423  424  425  426  427  428  429  430
## [430]  431  432  433  434  435  436  437  438  439  440  441
## [441]  442  443  444  445  446  447  448  449  450  451  452
## [452]  453  454  455  456  457  458  459  460  461  462  463
## [463]  464  465  466  467  468  469  470  471  472  473  474
## [474]  475  476  477  478  479  480  481  482  483  484  485
## [485]  486  487  488  489  490  491  492  493  494  495  496
## [496]  497  498  499  500  501  502  503  504  505  506  507
## [507]  508  509  510  511  512  513  514  515  516  517  518
## [518]  519  520  521  522  523  524  525  526  527  528  529
## [529]  530  531  532  533  534  535  536  537  538  539  540
## [540]  541  542  543  544  545  546  547  548  549  550  551
## [551]  552  553  554  555  556  557  558  559  560  561  562
## [562]  563  564  565  566  567  568  569  570  571  572  573
## [573]  574  575  576  577  578  579  580  581  582  583  584
## [584]  585  586  587  588  589  590  591  592  593  594  595
## [595]  596  597  598  599  600  601  602  603  604  605  606
## [606]  607  608  609  610  611  612  613  614  615  616  617
## [617]  618  619  620  621  622  623  624  625  626  627  628
## [628]  629  630  631  632  633  634  635  636  637  638  639
## [639]  640  641  642  643  644  645  646  647  648  649  650
## [650]  651  652  653  654  655  656  657  658  659  660  661
## [661]  662  663  664  665  666  667  668  669  670  671  672
## [672]  673  674  675  676  677  678  679  680  681  682  683
## [683]  684  685  686  687  688  689  690  691  692  693  694
## [694]  695  696  697  698  699  700  701  702  703  704  705
## [705]  706  707  708  709  710  711  712  713  714  715  716
## [716]  717  718  719  720  721  722  723  724  725  726  727
## [727]  728  729  730  731  732  733  734  735  736  737  738
## [738]  739  740  741  742  743  744  745  746  747  748  749
## [749]  750  751  752  753  754  755  756  757  758  759  760
## [760]  761  762  763  764  765  766  767  768  769  770  771
## [771]  772  773  774  775  776  777  778  779  780  781  782
## [782]  783  784  785  786  787  788  789  790  791  792  793
## [793]  794  795  796  797  798  799  800  801  802  803  804
## [804]  805  806  807  808  809  810  811  812  813  814  815
## [815]  816  817  818  819  820  821  822  823  824  825  826
## [826]  827  828  829  830  831  832  833  834  835  836  837
## [837]  838  839  840  841  842  843  844  845  846  847  848
## [848]  849  850  851  852  853  854  855  856  857  858  859
## [859]  860  861  862  863  864  865  866  867  868  869  870
## [870]  871  872  873  874  875  876  877  878  879  880  881
## [881]  882  883  884  885  886  887  888  889  890  891  892
## [892]  893  894  895  896  897  898  899  900  901  902  903
## [903]  904  905  906  907  908  909  910  911  912  913  914
## [914]  915  916  917  918  919  920  921  922  923  924  925
## [925]  926  927  928  929  930  931  932  933  934  935  936
## [936]  937  938  939  940  941  942  943  944  945  946  947
## [947]  948  949  950  951  952  953  954  955  956  957  958
## [958]  959  960  961  962  963  964  965  966  967  968  969
## [969]  970  971  972  973  974  975  976  977  978  979  980
## [980]  981  982  983  984  985  986  987  988  989  990  991
## [991]  992  993  994  995  996  997  998  999 1000
sort(unique(xgboost_grid$loss_reduction))
##    [1]  0.0000000001003709  0.0000000001014648
##    [3]  0.0000000001025765  0.0000000001035544
##    [5]  0.0000000001038801  0.0000000001045954
##    [7]  0.0000000001063629  0.0000000001066730
##    [9]  0.0000000001078699  0.0000000001089197
##   [11]  0.0000000001092603  0.0000000001104807
##   [13]  0.0000000001121521  0.0000000001126890
##   [15]  0.0000000001133438  0.0000000001148394
##   [17]  0.0000000001154959  0.0000000001171594
##   [19]  0.0000000001179795  0.0000000001192678
##   [21]  0.0000000001196835  0.0000000001207433
##   [23]  0.0000000001216881  0.0000000001228511
##   [25]  0.0000000001238820  0.0000000001252990
##   [27]  0.0000000001260908  0.0000000001277042
##   [29]  0.0000000001284100  0.0000000001295687
##   [31]  0.0000000001311005  0.0000000001318439
##   [33]  0.0000000001331375  0.0000000001339335
##   [35]  0.0000000001356624  0.0000000001367244
##   [37]  0.0000000001379233  0.0000000001393855
##   [39]  0.0000000001408460  0.0000000001412759
##   [41]  0.0000000001435611  0.0000000001443726
##   [43]  0.0000000001458603  0.0000000001466234
##   [45]  0.0000000001486726  0.0000000001493173
##   [47]  0.0000000001509915  0.0000000001525379
##   [49]  0.0000000001540898  0.0000000001547454
##   [51]  0.0000000001557071  0.0000000001571261
##   [53]  0.0000000001582991  0.0000000001604477
##   [55]  0.0000000001615407  0.0000000001628972
##   [57]  0.0000000001647074  0.0000000001661858
##   [59]  0.0000000001680070  0.0000000001685386
##   [61]  0.0000000001701420  0.0000000001726516
##   [63]  0.0000000001737167  0.0000000001757222
##   [65]  0.0000000001770643  0.0000000001781239
##   [67]  0.0000000001792314  0.0000000001821227
##   [69]  0.0000000001835196  0.0000000001847342
##   [71]  0.0000000001865264  0.0000000001882795
##   [73]  0.0000000001897200  0.0000000001905422
##   [75]  0.0000000001930229  0.0000000001951634
##   [77]  0.0000000001972079  0.0000000001988609
##   [79]  0.0000000002007496  0.0000000002024739
##   [81]  0.0000000002035344  0.0000000002049404
##   [83]  0.0000000002062438  0.0000000002097738
##   [85]  0.0000000002103063  0.0000000002129699
##   [87]  0.0000000002149780  0.0000000002167321
##   [89]  0.0000000002185156  0.0000000002210458
##   [91]  0.0000000002217390  0.0000000002233548
##   [93]  0.0000000002263985  0.0000000002290897
##   [95]  0.0000000002295921  0.0000000002314112
##   [97]  0.0000000002347983  0.0000000002367576
##   [99]  0.0000000002391506  0.0000000002398878
##  [101]  0.0000000002438596  0.0000000002439187
##  [103]  0.0000000002480901  0.0000000002484491
##  [105]  0.0000000002508618  0.0000000002540447
##  [107]  0.0000000002564714  0.0000000002574851
##  [109]  0.0000000002603062  0.0000000002627540
##  [111]  0.0000000002641965  0.0000000002677384
##  [113]  0.0000000002701331  0.0000000002714398
##  [115]  0.0000000002748826  0.0000000002771204
##  [117]  0.0000000002800553  0.0000000002833555
##  [119]  0.0000000002848882  0.0000000002875765
##  [121]  0.0000000002909301  0.0000000002919212
##  [123]  0.0000000002951856  0.0000000002983716
##  [125]  0.0000000003013838  0.0000000003040598
##  [127]  0.0000000003067816  0.0000000003069430
##  [129]  0.0000000003111697  0.0000000003125791
##  [131]  0.0000000003173626  0.0000000003200645
##  [133]  0.0000000003218570  0.0000000003246659
##  [135]  0.0000000003265657  0.0000000003293624
##  [137]  0.0000000003328895  0.0000000003356776
##  [139]  0.0000000003390307  0.0000000003420016
##  [141]  0.0000000003454855  0.0000000003495657
##  [143]  0.0000000003523194  0.0000000003534027
##  [145]  0.0000000003576600  0.0000000003607887
##  [147]  0.0000000003640129  0.0000000003690858
##  [149]  0.0000000003711206  0.0000000003751834
##  [151]  0.0000000003780132  0.0000000003804694
##  [153]  0.0000000003826794  0.0000000003880635
##  [155]  0.0000000003911118  0.0000000003953616
##  [157]  0.0000000003975046  0.0000000004014764
##  [159]  0.0000000004044716  0.0000000004076938
##  [161]  0.0000000004119207  0.0000000004158168
##  [163]  0.0000000004190678  0.0000000004245930
##  [165]  0.0000000004267389  0.0000000004321219
##  [167]  0.0000000004341373  0.0000000004402721
##  [169]  0.0000000004427206  0.0000000004446161
##  [171]  0.0000000004492024  0.0000000004548342
##  [173]  0.0000000004601422  0.0000000004620914
##  [175]  0.0000000004654938  0.0000000004702624
##  [177]  0.0000000004732523  0.0000000004784224
##  [179]  0.0000000004845458  0.0000000004888636
##  [181]  0.0000000004929833  0.0000000004944690
##  [183]  0.0000000004992975  0.0000000005052292
##  [185]  0.0000000005098693  0.0000000005155720
##  [187]  0.0000000005178103  0.0000000005219234
##  [189]  0.0000000005277594  0.0000000005329706
##  [191]  0.0000000005378760  0.0000000005408062
##  [193]  0.0000000005445148  0.0000000005507425
##  [195]  0.0000000005578093  0.0000000005626494
##  [197]  0.0000000005665605  0.0000000005717076
##  [199]  0.0000000005755540  0.0000000005796055
##  [201]  0.0000000005884645  0.0000000005945527
##  [203]  0.0000000005961731  0.0000000006022185
##  [205]  0.0000000006080350  0.0000000006144901
##  [207]  0.0000000006200043  0.0000000006268108
##  [209]  0.0000000006296368  0.0000000006333905
##  [211]  0.0000000006429706  0.0000000006477309
##  [213]  0.0000000006550570  0.0000000006579538
##  [215]  0.0000000006658135  0.0000000006673629
##  [217]  0.0000000006734972  0.0000000006820054
##  [219]  0.0000000006875172  0.0000000006926682
##  [221]  0.0000000006978567  0.0000000007043171
##  [223]  0.0000000007138086  0.0000000007167989
##  [225]  0.0000000007274587  0.0000000007318686
##  [227]  0.0000000007352103  0.0000000007453166
##  [229]  0.0000000007527914  0.0000000007590418
##  [231]  0.0000000007674815  0.0000000007685822
##  [233]  0.0000000007788184  0.0000000007865982
##  [235]  0.0000000007923630  0.0000000007980967
##  [237]  0.0000000008061168  0.0000000008158619
##  [239]  0.0000000008201865  0.0000000008246283
##  [241]  0.0000000008388073  0.0000000008445916
##  [243]  0.0000000008533189  0.0000000008613139
##  [245]  0.0000000008625997  0.0000000008703833
##  [247]  0.0000000008829669  0.0000000008875670
##  [249]  0.0000000008948176  0.0000000009080757
##  [251]  0.0000000009134040  0.0000000009214252
##  [253]  0.0000000009284878  0.0000000009345257
##  [255]  0.0000000009487694  0.0000000009515643
##  [257]  0.0000000009630496  0.0000000009747607
##  [259]  0.0000000009792246  0.0000000009886517
##  [261]  0.0000000009948462  0.0000000010059343
##  [263]  0.0000000010127802  0.0000000010253772
##  [265]  0.0000000010341749  0.0000000010390196
##  [267]  0.0000000010525169  0.0000000010625551
##  [269]  0.0000000010714441  0.0000000010786506
##  [271]  0.0000000010919791  0.0000000011011275
##  [273]  0.0000000011068612  0.0000000011195219
##  [275]  0.0000000011230143  0.0000000011395801
##  [277]  0.0000000011521554  0.0000000011610215
##  [279]  0.0000000011661978  0.0000000011785901
##  [281]  0.0000000011842392  0.0000000011965624
##  [283]  0.0000000012051091  0.0000000012229887
##  [285]  0.0000000012274823  0.0000000012475030
##  [287]  0.0000000012530763  0.0000000012620880
##  [289]  0.0000000012708267  0.0000000012860888
##  [291]  0.0000000013039974  0.0000000013146481
##  [293]  0.0000000013178177  0.0000000013296943
##  [295]  0.0000000013462592  0.0000000013586161
##  [297]  0.0000000013732847  0.0000000013854430
##  [299]  0.0000000013986867  0.0000000014039399
##  [301]  0.0000000014177925  0.0000000014253032
##  [303]  0.0000000014487474  0.0000000014604292
##  [305]  0.0000000014737018  0.0000000014775358
##  [307]  0.0000000015025486  0.0000000015079696
##  [309]  0.0000000015239615  0.0000000015357840
##  [311]  0.0000000015525701  0.0000000015614993
##  [313]  0.0000000015754764  0.0000000015945415
##  [315]  0.0000000016080452  0.0000000016148925
##  [317]  0.0000000016332554  0.0000000016461158
##  [319]  0.0000000016648314  0.0000000016844979
##  [321]  0.0000000016893330  0.0000000017036599
##  [323]  0.0000000017302323  0.0000000017352116
##  [325]  0.0000000017523589  0.0000000017706071
##  [327]  0.0000000017855664  0.0000000017991892
##  [329]  0.0000000018092366  0.0000000018298699
##  [331]  0.0000000018522530  0.0000000018717860
##  [333]  0.0000000018744210  0.0000000018935384
##  [335]  0.0000000019114027  0.0000000019407838
##  [337]  0.0000000019484629  0.0000000019586570
##  [339]  0.0000000019787767  0.0000000019996210
##  [341]  0.0000000020263684  0.0000000020407407
##  [343]  0.0000000020594295  0.0000000020705162
##  [345]  0.0000000020987880  0.0000000021150481
##  [347]  0.0000000021285646  0.0000000021457154
##  [349]  0.0000000021639629  0.0000000021772524
##  [351]  0.0000000022002176  0.0000000022225471
##  [353]  0.0000000022445151  0.0000000022603759
##  [355]  0.0000000022799459  0.0000000022992798
##  [357]  0.0000000023327105  0.0000000023374394
##  [359]  0.0000000023747534  0.0000000023835482
##  [361]  0.0000000024153455  0.0000000024276727
##  [363]  0.0000000024427747  0.0000000024710277
##  [365]  0.0000000024890270  0.0000000025164386
##  [367]  0.0000000025385250  0.0000000025676346
##  [369]  0.0000000025969001  0.0000000026069931
##  [371]  0.0000000026403492  0.0000000026524972
##  [373]  0.0000000026847836  0.0000000027136310
##  [375]  0.0000000027331327  0.0000000027483870
##  [377]  0.0000000027740061  0.0000000027943273
##  [379]  0.0000000028147653  0.0000000028490361
##  [381]  0.0000000028682554  0.0000000029063280
##  [383]  0.0000000029243187  0.0000000029489218
##  [385]  0.0000000029678327  0.0000000030037047
##  [387]  0.0000000030298024  0.0000000030524154
##  [389]  0.0000000030869619  0.0000000031166995
##  [391]  0.0000000031411990  0.0000000031544417
##  [393]  0.0000000032077637  0.0000000032377681
##  [395]  0.0000000032412487  0.0000000032950493
##  [397]  0.0000000033199493  0.0000000033497554
##  [399]  0.0000000033610238  0.0000000033907415
##  [401]  0.0000000034319272  0.0000000034626106
##  [403]  0.0000000034947559  0.0000000035337046
##  [405]  0.0000000035566639  0.0000000035888589
##  [407]  0.0000000036195803  0.0000000036606974
##  [409]  0.0000000036811344  0.0000000037127308
##  [411]  0.0000000037534106  0.0000000037932310
##  [413]  0.0000000038192240  0.0000000038441458
##  [415]  0.0000000038837562  0.0000000039017826
##  [417]  0.0000000039619906  0.0000000039779068
##  [419]  0.0000000040356788  0.0000000040480412
##  [421]  0.0000000041036043  0.0000000041105382
##  [423]  0.0000000041748078  0.0000000042107908
##  [425]  0.0000000042416698  0.0000000042742461
##  [427]  0.0000000043323482  0.0000000043440533
##  [429]  0.0000000043997003  0.0000000044345968
##  [431]  0.0000000044721965  0.0000000045237276
##  [433]  0.0000000045599217  0.0000000045843188
##  [435]  0.0000000046111420  0.0000000046575346
##  [437]  0.0000000046967440  0.0000000047615082
##  [439]  0.0000000047824389  0.0000000048328207
##  [441]  0.0000000048645238  0.0000000049262940
##  [443]  0.0000000049747435  0.0000000050243492
##  [445]  0.0000000050373224  0.0000000050921286
##  [447]  0.0000000051269283  0.0000000051703065
##  [449]  0.0000000052494734  0.0000000053070869
##  [451]  0.0000000053397809  0.0000000053864569
##  [453]  0.0000000054298499  0.0000000054966118
##  [455]  0.0000000055202197  0.0000000055855413
##  [457]  0.0000000056304940  0.0000000056824647
##  [459]  0.0000000056998518  0.0000000057916201
##  [461]  0.0000000058423708  0.0000000058740602
##  [463]  0.0000000059218193  0.0000000059830115
##  [465]  0.0000000060496727  0.0000000061018275
##  [467]  0.0000000061311563  0.0000000061947707
##  [469]  0.0000000062319693  0.0000000063122360
##  [471]  0.0000000063361838  0.0000000064043857
##  [473]  0.0000000064495924  0.0000000065254637
##  [475]  0.0000000065832834  0.0000000066750745
##  [477]  0.0000000067235678  0.0000000067906583
##  [479]  0.0000000068547868  0.0000000068978600
##  [481]  0.0000000069524610  0.0000000070209618
##  [483]  0.0000000070972569  0.0000000071634334
##  [485]  0.0000000071722649  0.0000000072940815
##  [487]  0.0000000072964485  0.0000000073622763
##  [489]  0.0000000074509010  0.0000000075142968
##  [491]  0.0000000075577165  0.0000000076341712
##  [493]  0.0000000077143451  0.0000000078101597
##  [495]  0.0000000078339848  0.0000000078992421
##  [497]  0.0000000080070447  0.0000000080889758
##  [499]  0.0000000081498025  0.0000000082509793
##  [501]  0.0000000083268655  0.0000000083286610
##  [503]  0.0000000084155737  0.0000000085210006
##  [505]  0.0000000085905853  0.0000000086761148
##  [507]  0.0000000087719366  0.0000000088564907
##  [509]  0.0000000089090618  0.0000000089457520
##  [511]  0.0000000090800372  0.0000000091662261
##  [513]  0.0000000092430593  0.0000000093362518
##  [515]  0.0000000094213616  0.0000000094738391
##  [517]  0.0000000095320833  0.0000000095942820
##  [519]  0.0000000097001421  0.0000000097663763
##  [521]  0.0000000098925443  0.0000000099922934
##  [523]  0.0000000100778708  0.0000000101881674
##  [525]  0.0000000102392786  0.0000000103092826
##  [527]  0.0000000104206982  0.0000000105454186
##  [529]  0.0000000106124017  0.0000000106621542
##  [531]  0.0000000108029379  0.0000000108532315
##  [533]  0.0000000109688433  0.0000000110608545
##  [535]  0.0000000111546611  0.0000000112609695
##  [537]  0.0000000114360023  0.0000000114520100
##  [539]  0.0000000116222062  0.0000000116633355
##  [541]  0.0000000118490121  0.0000000119235215
##  [543]  0.0000000119896111  0.0000000121059633
##  [545]  0.0000000122395138  0.0000000122821765
##  [547]  0.0000000124295209  0.0000000125336003
##  [549]  0.0000000126116374  0.0000000127714303
##  [551]  0.0000000128730468  0.0000000129694245
##  [553]  0.0000000131209613  0.0000000132718204
##  [555]  0.0000000133599242  0.0000000134661809
##  [557]  0.0000000136122149  0.0000000137386265
##  [559]  0.0000000137799745  0.0000000139392424
##  [561]  0.0000000140638390  0.0000000142092253
##  [563]  0.0000000143263265  0.0000000144873771
##  [565]  0.0000000145607112  0.0000000147144331
##  [567]  0.0000000148515336  0.0000000149343607
##  [569]  0.0000000150846031  0.0000000152813095
##  [571]  0.0000000154319719  0.0000000154835929
##  [573]  0.0000000155980167  0.0000000158583719
##  [575]  0.0000000159730314  0.0000000160493906
##  [577]  0.0000000161635413  0.0000000163378209
##  [579]  0.0000000165001285  0.0000000166955412
##  [581]  0.0000000167377599  0.0000000169274485
##  [583]  0.0000000170579379  0.0000000173009896
##  [585]  0.0000000174541093  0.0000000175495996
##  [587]  0.0000000177589268  0.0000000178255859
##  [589]  0.0000000179783974  0.0000000181220389
##  [591]  0.0000000184179657  0.0000000184548390
##  [593]  0.0000000186215377  0.0000000188122575
##  [595]  0.0000000190007834  0.0000000191769061
##  [597]  0.0000000194259987  0.0000000195114863
##  [599]  0.0000000197287908  0.0000000198752076
##  [601]  0.0000000199847445  0.0000000202892794
##  [603]  0.0000000204285491  0.0000000205388327
##  [605]  0.0000000207967616  0.0000000210026634
##  [607]  0.0000000212184303  0.0000000212247782
##  [609]  0.0000000214301067  0.0000000216412088
##  [611]  0.0000000219644461  0.0000000220843147
##  [613]  0.0000000223482690  0.0000000225589592
##  [615]  0.0000000227162777  0.0000000228496405
##  [617]  0.0000000230563408  0.0000000233042930
##  [619]  0.0000000235596094  0.0000000236752894
##  [621]  0.0000000238832458  0.0000000241140481
##  [623]  0.0000000243001951  0.0000000245067755
##  [625]  0.0000000247351156  0.0000000250024865
##  [627]  0.0000000252489032  0.0000000255116188
##  [629]  0.0000000256926097  0.0000000258147166
##  [631]  0.0000000260427296  0.0000000264465943
##  [633]  0.0000000265254770  0.0000000268449820
##  [635]  0.0000000271681947  0.0000000272606195
##  [637]  0.0000000276346199  0.0000000276987654
##  [639]  0.0000000280989198  0.0000000283352672
##  [641]  0.0000000285809606  0.0000000287529633
##  [643]  0.0000000290665930  0.0000000292683443
##  [645]  0.0000000294997135  0.0000000297584502
##  [647]  0.0000000299652536  0.0000000303004139
##  [649]  0.0000000307327430  0.0000000309163641
##  [651]  0.0000000312488733  0.0000000315004919
##  [653]  0.0000000316816390  0.0000000321177207
##  [655]  0.0000000324018740  0.0000000326013850
##  [657]  0.0000000328704849  0.0000000332049659
##  [659]  0.0000000333580498  0.0000000335987839
##  [661]  0.0000000339070697  0.0000000342968243
##  [663]  0.0000000345029064  0.0000000349064051
##  [665]  0.0000000351107940  0.0000000356491600
##  [667]  0.0000000360181206  0.0000000360851824
##  [669]  0.0000000365997248  0.0000000368670189
##  [671]  0.0000000371531029  0.0000000373766705
##  [673]  0.0000000377409160  0.0000000380735309
##  [675]  0.0000000385068953  0.0000000388607377
##  [677]  0.0000000392574690  0.0000000394761294
##  [679]  0.0000000398729137  0.0000000401650282
##  [681]  0.0000000405262046  0.0000000410777517
##  [683]  0.0000000414059821  0.0000000415830842
##  [685]  0.0000000419845544  0.0000000426080818
##  [687]  0.0000000429876798  0.0000000433037567
##  [689]  0.0000000434912736  0.0000000441062801
##  [691]  0.0000000443784294  0.0000000449401045
##  [693]  0.0000000450247606  0.0000000454207967
##  [695]  0.0000000459769056  0.0000000465087546
##  [697]  0.0000000466945496  0.0000000473354476
##  [699]  0.0000000475142690  0.0000000479066424
##  [701]  0.0000000485465828  0.0000000489759177
##  [703]  0.0000000495141910  0.0000000497737728
##  [705]  0.0000000502902599  0.0000000508040200
##  [707]  0.0000000510560085  0.0000000513888796
##  [709]  0.0000000521122270  0.0000000525278394
##  [711]  0.0000000529206425  0.0000000534308399
##  [713]  0.0000000539026434  0.0000000545497710
##  [715]  0.0000000546881228  0.0000000554716336
##  [717]  0.0000000557707431  0.0000000561640168
##  [719]  0.0000000565771149  0.0000000575210406
##  [721]  0.0000000580424617  0.0000000583105128
##  [723]  0.0000000587839251  0.0000000591897239
##  [725]  0.0000000597266458  0.0000000605874442
##  [727]  0.0000000607101326  0.0000000616145901
##  [729]  0.0000000622905325  0.0000000623591497
##  [731]  0.0000000632769334  0.0000000636590650
##  [733]  0.0000000642618731  0.0000000649524314
##  [735]  0.0000000651328409  0.0000000661332957
##  [737]  0.0000000664318817  0.0000000671620139
##  [739]  0.0000000675987309  0.0000000682966866
##  [741]  0.0000000686690380  0.0000000694021906
##  [743]  0.0000000700752572  0.0000000707189168
##  [745]  0.0000000713244308  0.0000000718273310
##  [747]  0.0000000725243292  0.0000000736067728
##  [749]  0.0000000741745656  0.0000000746115240
##  [751]  0.0000000751322751  0.0000000761524794
##  [753]  0.0000000766667210  0.0000000771199204
##  [755]  0.0000000780774706  0.0000000785879308
##  [757]  0.0000000795950817  0.0000000800755026
##  [759]  0.0000000807448368  0.0000000816215577
##  [761]  0.0000000823891250  0.0000000826463802
##  [763]  0.0000000840628207  0.0000000847965548
##  [765]  0.0000000855836681  0.0000000856120517
##  [767]  0.0000000867071652  0.0000000875101924
##  [769]  0.0000000883762478  0.0000000887094942
##  [771]  0.0000000896861942  0.0000000903130124
##  [773]  0.0000000911216312  0.0000000923942835
##  [775]  0.0000000929362526  0.0000000940550205
##  [777]  0.0000000946726750  0.0000000955543896
##  [779]  0.0000000962181775  0.0000000971864406
##  [781]  0.0000000985785134  0.0000000991094748
##  [783]  0.0000000996254179  0.0000001009874628
##  [785]  0.0000001013727969  0.0000001023344756
##  [787]  0.0000001036006490  0.0000001043524360
##  [789]  0.0000001049475468  0.0000001065979328
##  [791]  0.0000001069505701  0.0000001081727305
##  [793]  0.0000001091087866  0.0000001098468706
##  [795]  0.0000001106049201  0.0000001124471944
##  [797]  0.0000001133387008  0.0000001144104474
##  [799]  0.0000001150598415  0.0000001157057637
##  [801]  0.0000001168768865  0.0000001179852213
##  [803]  0.0000001188818874  0.0000001204478789
##  [805]  0.0000001209241398  0.0000001218993194
##  [807]  0.0000001234519339  0.0000001247830230
##  [809]  0.0000001258043225  0.0000001266796088
##  [811]  0.0000001274234800  0.0000001290090389
##  [813]  0.0000001304894771  0.0000001319193521
##  [815]  0.0000001328077093  0.0000001342076440
##  [817]  0.0000001346248477  0.0000001365245151
##  [819]  0.0000001373644162  0.0000001380809157
##  [821]  0.0000001391046006  0.0000001411061918
##  [823]  0.0000001420147225  0.0000001440426794
##  [825]  0.0000001445536464  0.0000001456649089
##  [827]  0.0000001472570424  0.0000001487904364
##  [829]  0.0000001503076597  0.0000001506718796
##  [831]  0.0000001528740675  0.0000001533390716
##  [833]  0.0000001556854160  0.0000001561315431
##  [835]  0.0000001576099182  0.0000001590154277
##  [837]  0.0000001605711237  0.0000001625282249
##  [839]  0.0000001634604483  0.0000001657762249
##  [841]  0.0000001671530290  0.0000001686877416
##  [843]  0.0000001692764257  0.0000001715380854
##  [845]  0.0000001725927919  0.0000001743823636
##  [847]  0.0000001764068590  0.0000001778130698
##  [849]  0.0000001790478949  0.0000001797453911
##  [851]  0.0000001820262989  0.0000001834238689
##  [853]  0.0000001847424383  0.0000001861643254
##  [855]  0.0000001884312157  0.0000001906125503
##  [857]  0.0000001918359968  0.0000001934463142
##  [859]  0.0000001951339787  0.0000001978836907
##  [861]  0.0000001989630917  0.0000001998908189
##  [863]  0.0000002023737332  0.0000002047209334
##  [865]  0.0000002054117696  0.0000002075411702
##  [867]  0.0000002095526270  0.0000002108597207
##  [869]  0.0000002132523467  0.0000002153585725
##  [871]  0.0000002166586454  0.0000002192165118
##  [873]  0.0000002212455333  0.0000002222613169
##  [875]  0.0000002250752202  0.0000002261692490
##  [877]  0.0000002291907919  0.0000002311792846
##  [879]  0.0000002337023339  0.0000002359206379
##  [881]  0.0000002365782084  0.0000002399740563
##  [883]  0.0000002422744867  0.0000002433781539
##  [885]  0.0000002450938129  0.0000002470946438
##  [887]  0.0000002504831655  0.0000002523054840
##  [889]  0.0000002549803546  0.0000002572813876
##  [891]  0.0000002591179570  0.0000002620863294
##  [893]  0.0000002648508050  0.0000002652901625
##  [895]  0.0000002691310062  0.0000002717134399
##  [897]  0.0000002735729698  0.0000002765075513
##  [899]  0.0000002786044651  0.0000002805160543
##  [901]  0.0000002820895637  0.0000002848682626
##  [903]  0.0000002877156760  0.0000002902702534
##  [905]  0.0000002943670425  0.0000002959762086
##  [907]  0.0000002982745032  0.0000003015915548
##  [909]  0.0000003027039920  0.0000003063187162
##  [911]  0.0000003091082580  0.0000003107587181
##  [913]  0.0000003135081125  0.0000003185537042
##  [915]  0.0000003209209371  0.0000003229028648
##  [917]  0.0000003267486049  0.0000003276759135
##  [919]  0.0000003309123593  0.0000003350698217
##  [921]  0.0000003368983206  0.0000003412137557
##  [923]  0.0000003440920916  0.0000003478432843
##  [925]  0.0000003503075312  0.0000003524265114
##  [927]  0.0000003556659048  0.0000003600456741
##  [929]  0.0000003634291169  0.0000003654787513
##  [931]  0.0000003704464334  0.0000003707269377
##  [933]  0.0000003747452483  0.0000003788759621
##  [935]  0.0000003833014011  0.0000003841806985
##  [937]  0.0000003906735142  0.0000003914034676
##  [939]  0.0000003951310878  0.0000003996532372
##  [941]  0.0000004035151639  0.0000004058753825
##  [943]  0.0000004113822305  0.0000004143055524
##  [945]  0.0000004160245992  0.0000004195909841
##  [947]  0.0000004265161377  0.0000004269194536
##  [949]  0.0000004314076951  0.0000004377281545
##  [951]  0.0000004413841373  0.0000004455119510
##  [953]  0.0000004493126395  0.0000004514759172
##  [955]  0.0000004574705795  0.0000004618498753
##  [957]  0.0000004636301676  0.0000004674031244
##  [959]  0.0000004728320220  0.0000004772644602
##  [961]  0.0000004794540331  0.0000004845912877
##  [963]  0.0000004893510295  0.0000004927220643
##  [965]  0.0000004991253794  0.0000005032816344
##  [967]  0.0000005061636921  0.0000005099903609
##  [969]  0.0000005175332506  0.0000005199762698
##  [971]  0.0000005260627337  0.0000005313329095
##  [973]  0.0000005359645853  0.0000005407393026
##  [975]  0.0000005455038465  0.0000005470935578
##  [977]  0.0000005534499586  0.0000005609190977
##  [979]  0.0000005629418214  0.0000005664604324
##  [981]  0.0000005742414517  0.0000005799311929
##  [983]  0.0000005834285996  0.0000005879137261
##  [985]  0.0000005947588811  0.0000006005592097
##  [987]  0.0000006072330202  0.0000006092224900
##  [989]  0.0000006145588193  0.0000006204186130
##  [991]  0.0000006279365082  0.0000006336515708
##  [993]  0.0000006382205727  0.0000006407663880
##  [995]  0.0000006486358399  0.0000006562360530
##  [997]  0.0000006603580373  0.0000006646887284
##  [999]  0.0000006695563087  0.0000006777199897
## [1001]  0.0000006855395277  0.0000006895054816
## [1003]  0.0000006940750642  0.0000007003839828
## [1005]  0.0000007089085530  0.0000007165033042
## [1007]  0.0000007188781663  0.0000007248460836
## [1009]  0.0000007323280581  0.0000007400240557
## [1011]  0.0000007492112680  0.0000007524426365
## [1013]  0.0000007588931728  0.0000007651646638
## [1015]  0.0000007741034429  0.0000007811457187
## [1017]  0.0000007879336411  0.0000007974552195
## [1019]  0.0000008039829784  0.0000008076566018
## [1021]  0.0000008189078211  0.0000008257050821
## [1023]  0.0000008333591958  0.0000008356680176
## [1025]  0.0000008453135724  0.0000008529170132
## [1027]  0.0000008615055772  0.0000008684830338
## [1029]  0.0000008791851550  0.0000008811576032
## [1031]  0.0000008946655963  0.0000009006475597
## [1033]  0.0000009080540751  0.0000009155544156
## [1035]  0.0000009257099094  0.0000009334976981
## [1037]  0.0000009373791713  0.0000009470425845
## [1039]  0.0000009573039836  0.0000009647489135
## [1041]  0.0000009728906337  0.0000009815977722
## [1043]  0.0000009916539725  0.0000010041118504
## [1045]  0.0000010096453613  0.0000010197120536
## [1047]  0.0000010285577241  0.0000010397475308
## [1049]  0.0000010430264082  0.0000010507632835
## [1051]  0.0000010594832127  0.0000010697790734
## [1053]  0.0000010817839742  0.0000010897598279
## [1055]  0.0000011059073080  0.0000011122034871
## [1057]  0.0000011200150781  0.0000011310296785
## [1059]  0.0000011447723196  0.0000011532041815
## [1061]  0.0000011615938494  0.0000011734944610
## [1063]  0.0000011791843758  0.0000011943436322
## [1065]  0.0000012077568868  0.0000012158105469
## [1067]  0.0000012281265302  0.0000012368493096
## [1069]  0.0000012498297153  0.0000012528609186
## [1071]  0.0000012708305366  0.0000012818477474
## [1073]  0.0000012973444247  0.0000013066109336
## [1075]  0.0000013198371995  0.0000013295082808
## [1077]  0.0000013440154997  0.0000013551794327
## [1079]  0.0000013596323736  0.0000013801241042
## [1081]  0.0000013804898675  0.0000014038550865
## [1083]  0.0000014144648746  0.0000014259823715
## [1085]  0.0000014311943124  0.0000014460429640
## [1087]  0.0000014680664980  0.0000014776064961
## [1089]  0.0000014895566743  0.0000014970937115
## [1091]  0.0000015084747375  0.0000015242263555
## [1093]  0.0000015352349438  0.0000015608683003
## [1095]  0.0000015632188315  0.0000015798153339
## [1097]  0.0000015928657314  0.0000016162893958
## [1099]  0.0000016192823885  0.0000016332502048
## [1101]  0.0000016571095980  0.0000016723016885
## [1103]  0.0000016786501121  0.0000016973527416
## [1105]  0.0000017119199512  0.0000017219508983
## [1107]  0.0000017430617288  0.0000017629460880
## [1109]  0.0000017814143816  0.0000017832222312
## [1111]  0.0000018089328706  0.0000018213207734
## [1113]  0.0000018384637568  0.0000018504216984
## [1115]  0.0000018677363307  0.0000018808023162
## [1117]  0.0000019055142712  0.0000019245542543
## [1119]  0.0000019440485807  0.0000019607329261
## [1121]  0.0000019689221001  0.0000019849661803
## [1123]  0.0000020024066637  0.0000020303729981
## [1125]  0.0000020399676085  0.0000020678109692
## [1127]  0.0000020752696089  0.0000020927771077
## [1129]  0.0000021137474743  0.0000021356245161
## [1131]  0.0000021586536828  0.0000021783592894
## [1133]  0.0000022019065730  0.0000022090083680
## [1135]  0.0000022267388333  0.0000022589303830
## [1137]  0.0000022652292480  0.0000022869654899
## [1139]  0.0000023112864879  0.0000023298275599
## [1141]  0.0000023479314218  0.0000023710780159
## [1143]  0.0000023907433868  0.0000024279741718
## [1145]  0.0000024416072021  0.0000024515626180
## [1147]  0.0000024800820216  0.0000025142610653
## [1149]  0.0000025201664718  0.0000025510867861
## [1151]  0.0000025801250324  0.0000026060039404
## [1153]  0.0000026079170252  0.0000026337628353
## [1155]  0.0000026529533153  0.0000026955884815
## [1157]  0.0000027030277235  0.0000027443967607
## [1159]  0.0000027591786655  0.0000027896688435
## [1161]  0.0000028019693297  0.0000028255490013
## [1163]  0.0000028633065415  0.0000028783831666
## [1165]  0.0000029014604895  0.0000029343708848
## [1167]  0.0000029575190224  0.0000029995051005
## [1169]  0.0000030250832323  0.0000030337003641
## [1171]  0.0000030715246146  0.0000031033063302
## [1173]  0.0000031252570111  0.0000031421649053
## [1175]  0.0000031906104293  0.0000032180339932
## [1177]  0.0000032250193771  0.0000032606041458
## [1179]  0.0000032966899413  0.0000033092669600
## [1181]  0.0000033534114134  0.0000033740161072
## [1183]  0.0000034078284727  0.0000034446635388
## [1185]  0.0000034782021934  0.0000035025306776
## [1187]  0.0000035282176586  0.0000035573459936
## [1189]  0.0000035890383565  0.0000036391351188
## [1191]  0.0000036623258739  0.0000037053227229
## [1193]  0.0000037164193506  0.0000037519006483
## [1195]  0.0000037993950583  0.0000038100007241
## [1197]  0.0000038493924947  0.0000038863912768
## [1199]  0.0000039457556244  0.0000039479763018
## [1201]  0.0000040129986098  0.0000040250808690
## [1203]  0.0000040847764848  0.0000041220116188
## [1205]  0.0000041390067311  0.0000041630033573
## [1207]  0.0000042250488788  0.0000042661300619
## [1209]  0.0000042901956561  0.0000043325199287
## [1211]  0.0000043589198768  0.0000043928060362
## [1213]  0.0000044635386630  0.0000044948434299
## [1215]  0.0000045248644106  0.0000045515692973
## [1217]  0.0000046123941342  0.0000046536104407
## [1219]  0.0000047036923536  0.0000047147678212
## [1221]  0.0000047719559450  0.0000048337516529
## [1223]  0.0000048441260716  0.0000049060231441
## [1225]  0.0000049221807479  0.0000050000888296
## [1227]  0.0000050390462638  0.0000050761997042
## [1229]  0.0000051109650317  0.0000051621437933
## [1231]  0.0000052300975241  0.0000052507909917
## [1233]  0.0000052972511251  0.0000053722209171
## [1235]  0.0000053853418019  0.0000054662531054
## [1237]  0.0000054814007271  0.0000055629962925
## [1239]  0.0000056018275457  0.0000056610626965
## [1241]  0.0000056851256184  0.0000057240036267
## [1243]  0.0000057843530216  0.0000058631538177
## [1245]  0.0000058961784121  0.0000059546111117
## [1247]  0.0000059956287500  0.0000060492124594
## [1249]  0.0000060882775853  0.0000061659908386
## [1251]  0.0000062148426234  0.0000062807682255
## [1253]  0.0000063292154456  0.0000063933638428
## [1255]  0.0000064645950721  0.0000064906534856
## [1257]  0.0000065471421343  0.0000066035527627
## [1259]  0.0000066984931758  0.0000067600300364
## [1261]  0.0000068154952077  0.0000068788464210
## [1263]  0.0000069393989094  0.0000069946826208
## [1265]  0.0000070566484240  0.0000071157021021
## [1267]  0.0000071585208660  0.0000072404294545
## [1269]  0.0000072636755073  0.0000073441681565
## [1271]  0.0000074015332522  0.0000074935510013
## [1273]  0.0000075818789740  0.0000075993894150
## [1275]  0.0000077086493613  0.0000077236009657
## [1277]  0.0000078346117998  0.0000079103070543
## [1279]  0.0000079714121865  0.0000080073654908
## [1281]  0.0000081182983736  0.0000081598456893
## [1283]  0.0000082197291365  0.0000083118750434
## [1285]  0.0000084095419196  0.0000084486404620
## [1287]  0.0000085498638394  0.0000086383788103
## [1289]  0.0000086946944935  0.0000088020046443
## [1291]  0.0000088525697461  0.0000089597969842
## [1293]  0.0000089852202508  0.0000091239862904
## [1295]  0.0000091936297174  0.0000092187343462
## [1297]  0.0000093621344574  0.0000094156409765
## [1299]  0.0000095381760804  0.0000095633371501
## [1301]  0.0000096935913835  0.0000097804072003
## [1303]  0.0000098352515943  0.0000099408565054
## [1305]  0.0000100136440434  0.0000101034438052
## [1307]  0.0000101627184497  0.0000102703421223
## [1309]  0.0000103706264530  0.0000104753134481
## [1311]  0.0000105420367720  0.0000106869758845
## [1313]  0.0000107865579045  0.0000108025697269
## [1315]  0.0000109157053028  0.0000110115249407
## [1317]  0.0000110869866718  0.0000112695108451
## [1319]  0.0000112848406342  0.0000114025103319
## [1321]  0.0000115795252045  0.0000116010625279
## [1323]  0.0000116890888531  0.0000118687197596
## [1325]  0.0000119236775327  0.0000120791155467
## [1327]  0.0000121253739610  0.0000122980255705
## [1329]  0.0000123719735741  0.0000124877624421
## [1331]  0.0000125634332724  0.0000127319327647
## [1333]  0.0000127938286933  0.0000129610724839
## [1335]  0.0000130008976439  0.0000131715671750
## [1337]  0.0000133218949393  0.0000133757316911
## [1339]  0.0000134764674721  0.0000136979926862
## [1341]  0.0000137607624487  0.0000138788044445
## [1343]  0.0000140493646916  0.0000141200151484
## [1345]  0.0000141977432789  0.0000143260424149
## [1347]  0.0000144996069305  0.0000146557615248
## [1349]  0.0000147090039883  0.0000149489451472
## [1351]  0.0000150290712714  0.0000152086174315
## [1353]  0.0000153625657951  0.0000153694456251
## [1355]  0.0000156187543014  0.0000157117891665
## [1357]  0.0000158650349536  0.0000159485284140
## [1359]  0.0000161590202412  0.0000162912318848
## [1361]  0.0000163980010560  0.0000165269401462
## [1363]  0.0000166416376477  0.0000168866238537
## [1365]  0.0000170179027448  0.0000171673870381
## [1367]  0.0000172812185635  0.0000174140180669
## [1369]  0.0000176534234013  0.0000177688653804
## [1371]  0.0000179278668249  0.0000181102720251
## [1373]  0.0000181696639314  0.0000184377205868
## [1375]  0.0000185183929088  0.0000188064535869
## [1377]  0.0000189180964061  0.0000190105023007
## [1379]  0.0000191711259282  0.0000193356667755
## [1381]  0.0000195232694664  0.0000197827991075
## [1383]  0.0000199244835895  0.0000200653400327
## [1385]  0.0000203523878954  0.0000204052009357
## [1387]  0.0000206415881696  0.0000208439874043
## [1389]  0.0000209881521547  0.0000211950342238
## [1391]  0.0000213328027735  0.0000216608978868
## [1393]  0.0000217037468438  0.0000219866325650
## [1395]  0.0000221262079791  0.0000224144054872
## [1397]  0.0000225475248914  0.0000227273774526
## [1399]  0.0000230173378356  0.0000232092254139
## [1401]  0.0000232754963344  0.0000235854400476
## [1403]  0.0000237041837738  0.0000240311691562
## [1405]  0.0000241703775741  0.0000244327226833
## [1407]  0.0000245812200907  0.0000248390959937
## [1409]  0.0000250476549088  0.0000253781753096
## [1411]  0.0000254426022739  0.0000256622509421
## [1413]  0.0000260441823166  0.0000261883741131
## [1415]  0.0000263309283238  0.0000267892799393
## [1417]  0.0000268198134197  0.0000272512004008
## [1419]  0.0000272932057147  0.0000276027224574
## [1421]  0.0000279042619761  0.0000281180517859
## [1423]  0.0000284005555976  0.0000287426113873
## [1425]  0.0000287843985731  0.0000291909121870
## [1427]  0.0000293558211349  0.0000295499414828
## [1429]  0.0000299955092427  0.0000303080643537
## [1431]  0.0000304705664081  0.0000305928323216
## [1433]  0.0000309179521134  0.0000311497687752
## [1435]  0.0000315953782056  0.0000317368564153
## [1437]  0.0000321483277946  0.0000325117596195
## [1439]  0.0000326460694700  0.0000329365437298
## [1441]  0.0000331685783201  0.0000337004161576
## [1443]  0.0000337636246614  0.0000342123326951
## [1445]  0.0000344718208947  0.0000346650707226
## [1447]  0.0000351787927476  0.0000355156284514
## [1449]  0.0000357165617194  0.0000359194266341
## [1451]  0.0000363870905593  0.0000366247397335
## [1453]  0.0000369547146934  0.0000371857083230
## [1455]  0.0000375013870308  0.0000379945279610
## [1457]  0.0000381817037507  0.0000387853270079
## [1459]  0.0000390414992486  0.0000393358582204
## [1461]  0.0000396864090898  0.0000399539215960
## [1463]  0.0000403608735349  0.0000407430937540
## [1465]  0.0000409888821528  0.0000413120144963
## [1467]  0.0000419054185240  0.0000421923346392
## [1469]  0.0000424689042993  0.0000429843036374
## [1471]  0.0000432154991151  0.0000437008040990
## [1473]  0.0000442348781939  0.0000446435538212
## [1475]  0.0000448089439179  0.0000453958167756
## [1477]  0.0000458188955496  0.0000461314873299
## [1479]  0.0000465882311846  0.0000469177712471
## [1481]  0.0000471479623739  0.0000478383631612
## [1483]  0.0000481549837808  0.0000487841853616
## [1485]  0.0000491147807881  0.0000496465437711
## [1487]  0.0000500718227082  0.0000504319799910
## [1489]  0.0000510122195823  0.0000514511102043
## [1491]  0.0000514906451146  0.0000521248961459
## [1493]  0.0000526740588313  0.0000532029642228
## [1495]  0.0000536930538356  0.0000542688655236
## [1497]  0.0000543153976429  0.0000549895743420
## [1499]  0.0000555052246880  0.0000558495850329
## [1501]  0.0000564751726354  0.0000570870722813
## [1503]  0.0000573008156620  0.0000579001227575
## [1505]  0.0000584638724389  0.0000590009685881
## [1507]  0.0000594616221527  0.0000599973151494
## [1509]  0.0000606417273865  0.0000612282625246
## [1511]  0.0000619251162829  0.0000620063740419
## [1513]  0.0000625281271167  0.0000632190109912
## [1515]  0.0000641922243649  0.0000642403934130
## [1517]  0.0000652547828609  0.0000655243211231
## [1519]  0.0000659720271730  0.0000667858620763
## [1521]  0.0000671127325971  0.0000678658590002
## [1523]  0.0000685905417654  0.0000693366865272
## [1525]  0.0000697208422070  0.0000707220768965
## [1527]  0.0000712689797520  0.0000716934477930
## [1529]  0.0000724556308013  0.0000728539923736
## [1531]  0.0000734792981098  0.0000739889701174
## [1533]  0.0000751623862309  0.0000752571263804
## [1535]  0.0000760805054720  0.0000768344691296
## [1537]  0.0000779231612655  0.0000783194078739
## [1539]  0.0000786558376615  0.0000794645886793
## [1541]  0.0000806091917458  0.0000809812841374
## [1543]  0.0000815921553627  0.0000824108540210
## [1545]  0.0000831887835905  0.0000840059884098
## [1547]  0.0000848168662353  0.0000854892425773
## [1549]  0.0000862845903217  0.0000870663570704
## [1551]  0.0000879581680345  0.0000882386506373
## [1553]  0.0000892447161055  0.0000901083028361
## [1555]  0.0000905892825286  0.0000916243301256
## [1557]  0.0000922372579657  0.0000934872748942
## [1559]  0.0000940899919969  0.0000948060443903
## [1561]  0.0000955610466271  0.0000967312024119
## [1563]  0.0000976723153877  0.0000988132047986
## [1565]  0.0000995896373299  0.0001000770983239
## [1567]  0.0001007439800296  0.0001016921136279
## [1569]  0.0001033327329884  0.0001041401817819
## [1571]  0.0001046729508917  0.0001055308996343
## [1573]  0.0001063286303187  0.0001077153854333
## [1575]  0.0001086637466223  0.0001094742811651
## [1577]  0.0001104632627363  0.0001112186136262
## [1579]  0.0001125377952118  0.0001136116196696
## [1581]  0.0001140217632439  0.0001155125847815
## [1583]  0.0001160405010798  0.0001179050836634
## [1585]  0.0001183237888410  0.0001198391812164
## [1587]  0.0001207854070154  0.0001217172787972
## [1589]  0.0001225458757065  0.0001243598907455
## [1591]  0.0001251644535683  0.0001260051019853
## [1593]  0.0001274929395783  0.0001288248545967
## [1595]  0.0001298184395964  0.0001310939438734
## [1597]  0.0001322207370810  0.0001331338835732
## [1599]  0.0001339491196691  0.0001354840187412
## [1601]  0.0001368406934958  0.0001380483239604
## [1603]  0.0001387837036241  0.0001400425932347
## [1605]  0.0001410325602940  0.0001424618310574
## [1607]  0.0001434863784034  0.0001453864475710
## [1609]  0.0001459423819577  0.0001479032722828
## [1611]  0.0001491201313532  0.0001510035066888
## [1613]  0.0001522406438688  0.0001534388103897
## [1615]  0.0001539502510936  0.0001564144585613
## [1617]  0.0001566340034531  0.0001592631931559
## [1619]  0.0001594454072655  0.0001617040467407
## [1621]  0.0001627401575870  0.0001642913303135
## [1623]  0.0001650722229208  0.0001675917341405
## [1625]  0.0001686294267726  0.0001703937695091
## [1627]  0.0001724365966923  0.0001738752811917
## [1629]  0.0001753388539392  0.0001764683564800
## [1631]  0.0001771600461505  0.0001796655171060
## [1633]  0.0001806699367891  0.0001832856792698
## [1635]  0.0001850010575364  0.0001852250915544
## [1637]  0.0001874719999069  0.0001893268973998
## [1639]  0.0001912932002953  0.0001933078853006
## [1641]  0.0001935225761380  0.0001961228242194
## [1643]  0.0001986586896313  0.0001995099627570
## [1645]  0.0002011558117000  0.0002022787633384
## [1647]  0.0002049268395054  0.0002070030673461
## [1649]  0.0002089471655954  0.0002104317662281
## [1651]  0.0002118965844711  0.0002142294002782
## [1653]  0.0002162063689814  0.0002177202007818
## [1655]  0.0002199702980434  0.0002212588603656
## [1657]  0.0002234346822789  0.0002256230365109
## [1659]  0.0002272254926530  0.0002289066497396
## [1661]  0.0002319853101695  0.0002342468940327
## [1663]  0.0002352930370982  0.0002384526237792
## [1665]  0.0002397894909146  0.0002426391014033
## [1667]  0.0002443404860039  0.0002458363099048
## [1669]  0.0002478580033817  0.0002505934207655
## [1671]  0.0002535838094686  0.0002549429716660
## [1673]  0.0002580322453423  0.0002591375771985
## [1675]  0.0002620663818024  0.0002639683875356
## [1677]  0.0002666142140784  0.0002698271277458
## [1679]  0.0002724064901202  0.0002745853201759
## [1681]  0.0002766857841025  0.0002800733017601
## [1683]  0.0002804222747410  0.0002836707664747
## [1685]  0.0002871578183850  0.0002884930280640
## [1687]  0.0002906800000839  0.0002955522602292
## [1689]  0.0002957090263610  0.0002991716077438
## [1691]  0.0003020410167657  0.0003050249473651
## [1693]  0.0003066363490475  0.0003114799660149
## [1695]  0.0003132999901534  0.0003152908548669
## [1697]  0.0003195855622748  0.0003202275798978
## [1699]  0.0003233226767435  0.0003264787577877
## [1701]  0.0003308578749671  0.0003315821647501
## [1703]  0.0003366509153568  0.0003385039921332
## [1705]  0.0003407147950279  0.0003439219284699
## [1707]  0.0003480546198920  0.0003500475379894
## [1709]  0.0003551333020777  0.0003566526424559
## [1711]  0.0003617670339889  0.0003641654640215
## [1713]  0.0003677849007663  0.0003692284745659
## [1715]  0.0003742443616711  0.0003770736130611
## [1717]  0.0003800002101261  0.0003823478029250
## [1719]  0.0003856569046936  0.0003906744367358
## [1721]  0.0003933246492473  0.0003982630871693
## [1723]  0.0003991362801083  0.0004058567155562
## [1725]  0.0004061799039677  0.0004100190684262
## [1727]  0.0004142411055033  0.0004175224514632
## [1729]  0.0004236425251131  0.0004258130509335
## [1731]  0.0004314388928217  0.0004357807920225
## [1733]  0.0004371260964370  0.0004412275364657
## [1735]  0.0004462216842688  0.0004505034915304
## [1737]  0.0004553778860698  0.0004588730213978
## [1739]  0.0004623249658516  0.0004657793454797
## [1741]  0.0004680452653767  0.0004755261294577
## [1743]  0.0004787614019810  0.0004831555509013
## [1745]  0.0004860600584501  0.0004918366971974
## [1747]  0.0004947821444697  0.0004991782487981
## [1749]  0.0005053755702777  0.0005065493763389
## [1751]  0.0005152058303298  0.0005183756436158
## [1753]  0.0005234248703910  0.0005279532415223
## [1755]  0.0005326901193526  0.0005362102845721
## [1757]  0.0005412320929590  0.0005470555821034
## [1759]  0.0005517279710305  0.0005572594512256
## [1761]  0.0005596959240702  0.0005666347569187
## [1763]  0.0005707908940847  0.0005749562860136
## [1765]  0.0005781578145093  0.0005856278754810
## [1767]  0.0005919697160331  0.0005959006167422
## [1769]  0.0006012416148569  0.0006056788731117
## [1771]  0.0006135333551772  0.0006184442592879
## [1773]  0.0006204994905207  0.0006300426026914
## [1775]  0.0006332027741938  0.0006395587746896
## [1777]  0.0006435412349358  0.0006485977488621
## [1779]  0.0006551880833638  0.0006616780202495
## [1781]  0.0006707503114725  0.0006763257484576
## [1783]  0.0006825050249275  0.0006893013484941
## [1785]  0.0006954599925563  0.0006995507903963
## [1787]  0.0007068198659843  0.0007120773466187
## [1789]  0.0007168631057478  0.0007224008872330
## [1791]  0.0007320534912497  0.0007344810240790
## [1793]  0.0007404451097999  0.0007487074767074
## [1795]  0.0007567449283437  0.0007607932711028
## [1797]  0.0007687041008437  0.0007756558084744
## [1799]  0.0007847283316719  0.0007922987141083
## [1801]  0.0007947247874686  0.0008040354507793
## [1803]  0.0008090377467076  0.0008225200303472
## [1805]  0.0008237549649020  0.0008342208870697
## [1807]  0.0008445289103502  0.0008523808408570
## [1809]  0.0008590312870036  0.0008600438982363
## [1811]  0.0008676459679250  0.0008764360135030
## [1813]  0.0008903193848885  0.0008930764316047
## [1815]  0.0009049045228708  0.0009130851183178
## [1817]  0.0009165790114164  0.0009277488663407
## [1819]  0.0009385134752209  0.0009442969913795
## [1821]  0.0009538270769384  0.0009592249554692
## [1823]  0.0009666592804404  0.0009741339143414
## [1825]  0.0009830268340796  0.0009960471242734
## [1827]  0.0010020292743922  0.0010145392479679
## [1829]  0.0010231145064038  0.0010305128684459
## [1831]  0.0010441850154814  0.0010491668598042
## [1833]  0.0010580752064373  0.0010672532899739
## [1835]  0.0010803409703970  0.0010885700574567
## [1837]  0.0010974008735421  0.0011012743122085
## [1839]  0.0011191160205202  0.0011261144698460
## [1841]  0.0011340221595341  0.0011429680275363
## [1843]  0.0011598232967737  0.0011693612109794
## [1845]  0.0011775397990484  0.0011873890001775
## [1847]  0.0011996235340287  0.0012095803485381
## [1849]  0.0012233310039213  0.0012265873333456
## [1851]  0.0012381301538104  0.0012496358777814
## [1853]  0.0012667328013074  0.0012780184121256
## [1855]  0.0012832718644770  0.0013006162678986
## [1857]  0.0013053956356843  0.0013197901702007
## [1859]  0.0013367771203542  0.0013406242461564
## [1861]  0.0013565333669136  0.0013695972658792
## [1863]  0.0013737742043344  0.0013922522276864
## [1865]  0.0014001091904843  0.0014202345213681
## [1867]  0.0014309653213101  0.0014354083681341
## [1869]  0.0014570733773161  0.0014656921103622
## [1871]  0.0014749113071435  0.0014946606945635
## [1873]  0.0015041616314423  0.0015227786620431
## [1875]  0.0015282423969482  0.0015432713116946
## [1877]  0.0015647611993401  0.0015730905158939
## [1879]  0.0015885355416262  0.0015968900902874
## [1881]  0.0016140245140918  0.0016267257341304
## [1883]  0.0016480911615655  0.0016602786241458
## [1885]  0.0016688306231794  0.0016867687619192
## [1887]  0.0016984525038197  0.0017194878899600
## [1889]  0.0017346213427141  0.0017516424342522
## [1891]  0.0017588882209403  0.0017818305869015
## [1893]  0.0017971726004055  0.0018165792720651
## [1895]  0.0018212527846394  0.0018383158239219
## [1897]  0.0018586432228983  0.0018823782011229
## [1899]  0.0018885461386006  0.0019114203000341
## [1901]  0.0019334323511646  0.0019512235791986
## [1903]  0.0019635871053645  0.0019832138392821
## [1905]  0.0019934662592249  0.0020161295020648
## [1907]  0.0020255447560141  0.0020588894915178
## [1909]  0.0020740282771066  0.0020793504476130
## [1911]  0.0021098037899582  0.0021329991133562
## [1913]  0.0021375480988929  0.0021687883319262
## [1915]  0.0021837308334790  0.0022029916512151
## [1917]  0.0022305908042598  0.0022405610339422
## [1919]  0.0022540383803140  0.0022819921818135
## [1921]  0.0023094645110486  0.0023146257692363
## [1923]  0.0023336595589986  0.0023527776705438
## [1925]  0.0023742728008781  0.0023942573188586
## [1927]  0.0024180395416107  0.0024400445562841
## [1929]  0.0024774655192747  0.0024948725681560
## [1931]  0.0025221797235103  0.0025254459476224
## [1933]  0.0025631788663938  0.0025903552436296
## [1935]  0.0026026998098119  0.0026262226835298
## [1937]  0.0026421674201308  0.0026822969012231
## [1939]  0.0027036305021564  0.0027194700661184
## [1941]  0.0027369823336233  0.0027590549233998
## [1943]  0.0028064857669007  0.0028153913418537
## [1945]  0.0028418249279632  0.0028789587002845
## [1947]  0.0028965258336709  0.0029128295009950
## [1949]  0.0029555738097357  0.0029730304003465
## [1951]  0.0030074050525353  0.0030268107560653
## [1953]  0.0030482019811324  0.0030862235704343
## [1955]  0.0031189223026904  0.0031305816250039
## [1957]  0.0031570172428751  0.0031830132707298
## [1959]  0.0032264732561025  0.0032589953216980
## [1961]  0.0032651963564833  0.0033131996488188
## [1963]  0.0033248813977464  0.0033650130781625
## [1965]  0.0033833985274594  0.0034105038281141
## [1967]  0.0034533167338398  0.0034971707272103
## [1969]  0.0035196422545881  0.0035562129227683
## [1971]  0.0035723256764001  0.0036143075582686
## [1973]  0.0036292763039536  0.0036650564630012
## [1975]  0.0037117044165842  0.0037387447326166
## [1977]  0.0037592537517192  0.0038143563540336
## [1979]  0.0038562276692378  0.0038799446454217
## [1981]  0.0038914338624386  0.0039369780714554
## [1983]  0.0039723627253115  0.0040018829385872
## [1985]  0.0040370874418277  0.0040678963232399
## [1987]  0.0041061442886439  0.0041428785249451
## [1989]  0.0041980027276949  0.0042448530903950
## [1991]  0.0042505730018866  0.0043050853194386
## [1993]  0.0043358968745595  0.0043796169860893
## [1995]  0.0044112853257709  0.0044805093336060
## [1997]  0.0044933483848922  0.0045387998401028
## [1999]  0.0045920555976425  0.0046406293477531
## [2001]  0.0046745696667156  0.0047223745745260
## [2003]  0.0047466533037074  0.0047991883546150
## [2005]  0.0048386083644589  0.0048696346720973
## [2007]  0.0048943861779146  0.0049755099257207
## [2009]  0.0049976149653831  0.0050344920209274
## [2011]  0.0050988397013098  0.0051209538178042
## [2013]  0.0051832853251467  0.0052303914956443
## [2015]  0.0052778771264183  0.0053351038950020
## [2017]  0.0053502056327937  0.0054260767027094
## [2019]  0.0054485601256965  0.0054947226798094
## [2021]  0.0055731048031882  0.0056167195291372
## [2023]  0.0056820331248275  0.0057353056175387
## [2025]  0.0057646993878253  0.0058157486244080
## [2027]  0.0058634526293537  0.0058937865976233
## [2029]  0.0059827448538248  0.0060157787495804
## [2031]  0.0060584307524860  0.0061146225624935
## [2033]  0.0061980457633881  0.0062122341488560
## [2035]  0.0062889328286486  0.0063454715001741
## [2037]  0.0064283700301717  0.0064530765325813
## [2039]  0.0065130570838154  0.0065559491088631
## [2041]  0.0066366954442119  0.0066905897119201
## [2043]  0.0067533357743183  0.0067845530269523
## [2045]  0.0069017469215093  0.0069518747130684
## [2047]  0.0070270769801064  0.0070562136442686
## [2049]  0.0070927163865409  0.0072147594264929
## [2051]  0.0072490101044814  0.0073415918950496
## [2053]  0.0073957456672911  0.0074173865170568
## [2055]  0.0074880462739777  0.0075706686653770
## [2057]  0.0076330020047291  0.0077150154054020
## [2059]  0.0077889166542643  0.0078551183723196
## [2061]  0.0079136194577228  0.0080133628039661
## [2063]  0.0080848242704475  0.0081417268644172
## [2065]  0.0082000470972326  0.0082516511654591
## [2067]  0.0083320308959295  0.0084519869045414
## [2069]  0.0085137697165264  0.0085422797405713
## [2071]  0.0086348352990589  0.0087080217314450
## [2073]  0.0087695055370073  0.0088893504668397
## [2075]  0.0089375501265384  0.0090427172995411
## [2077]  0.0090965198889992  0.0092299708068568
## [2079]  0.0092538045149624  0.0093823264075212
## [2081]  0.0094772198026846  0.0095414594628477
## [2083]  0.0096387005303313  0.0097077621675174
## [2085]  0.0097616165652313  0.0098846576825291
## [2087]  0.0099393913067481  0.0100810658148537
## [2089]  0.0100940598205848  0.0102513361235323
## [2091]  0.0103542578697022  0.0104235036351813
## [2093]  0.0104947271873430  0.0105583952183981
## [2095]  0.0106522904093205  0.0107869782925308
## [2097]  0.0108731337128998  0.0109946651671287
## [2099]  0.0110925443112506  0.0112178808446680
## [2101]  0.0112293452411876  0.0114184809958570
## [2103]  0.0115076432920040  0.0115488848527425
## [2105]  0.0116338875162573  0.0117929569993813
## [2107]  0.0118745472803033  0.0119616567274134
## [2109]  0.0121380117488416  0.0121634675956130
## [2111]  0.0122800966744388  0.0123873505965913
## [2113]  0.0125016415356579  0.0126739975358230
## [2115]  0.0127970859065967  0.0128895372599376
## [2117]  0.0130203087326888  0.0130538014622816
## [2119]  0.0131630700180779  0.0133809943511270
## [2121]  0.0133945716783550  0.0135332459424873
## [2123]  0.0137012378595739  0.0138005448053235
## [2125]  0.0139901991389208  0.0140254042785405
## [2127]  0.0141301188439485  0.0143268641944811
## [2129]  0.0143680195725116  0.0145364815232774
## [2131]  0.0147386973521324  0.0148411682664509
## [2133]  0.0149937086278075  0.0150473594919857
## [2135]  0.0152087872205568  0.0153183350617188
## [2137]  0.0155368537117579  0.0156623358795761
## [2139]  0.0158043414737586  0.0159430766545128
## [2141]  0.0160642038472681  0.0162245535264473
## [2143]  0.0163692231169518  0.0164359626923138
## [2145]  0.0166472359464827  0.0167214661380716
## [2147]  0.0169054775401423  0.0171162131980791
## [2149]  0.0172875008095656  0.0173045387313278
## [2151]  0.0175179808491575  0.0177446838084163
## [2153]  0.0178235629667238  0.0180089575690917
## [2155]  0.0181703018004801  0.0183608663840738
## [2157]  0.0184624437801987  0.0186816822464265
## [2159]  0.0187366574649017  0.0189755266934624
## [2161]  0.0190876719168947  0.0193879585933526
## [2163]  0.0195184461155453  0.0196675821803129
## [2165]  0.0198465769950151  0.0199787656169547
## [2167]  0.0201290850271841  0.0202750514283311
## [2169]  0.0205761730404462  0.0206351569593559
## [2171]  0.0208340348494903  0.0210683742128772
## [2173]  0.0212074426182833  0.0214269765012985
## [2175]  0.0216270526089761  0.0217896732779255
## [2177]  0.0221027073037800  0.0222445665298796
## [2179]  0.0224119747919033  0.0226806102885660
## [2181]  0.0228462234580169  0.0230220245765358
## [2183]  0.0232691599201816  0.0235025621608198
## [2185]  0.0237432549902311  0.0238784857999368
## [2187]  0.0240651108621287  0.0242885066195202
## [2189]  0.0245976860473630  0.0247093396933382
## [2191]  0.0249352498643368  0.0251525106598681
## [2193]  0.0253053207438215  0.0256589695110369
## [2195]  0.0259245061448767  0.0260743862657211
## [2197]  0.0262973569476185  0.0265608318802435
## [2199]  0.0267931829562861  0.0270947764097482
## [2201]  0.0271347677617786  0.0274683648871782
## [2203]  0.0277519096655098  0.0279497591051630
## [2205]  0.0281919141942659  0.0284128505577479
## [2207]  0.0286338348652397  0.0290139108143602
## [2209]  0.0292292153510155  0.0294394508938241
## [2211]  0.0298022932714797  0.0299994180143458
## [2213]  0.0302489192376182  0.0304801196815110
## [2215]  0.0307905185493600  0.0309749196103937
## [2217]  0.0314072690704337  0.0317049080630592
## [2219]  0.0319966924079404  0.0321287536857364
## [2221]  0.0323805665456175  0.0326817663716620
## [2223]  0.0330966907738080  0.0332959409441674
## [2225]  0.0335445423781663  0.0340837141384211
## [2227]  0.0344035382351231  0.0345754952278875
## [2229]  0.0347533069947767  0.0352078183667079
## [2231]  0.0355237044717520  0.0357994674060406
## [2233]  0.0361267559877693  0.0364002679778295
## [2235]  0.0367678054355211  0.0371985910106314
## [2237]  0.0373860075452863  0.0378015263422808
## [2239]  0.0381101047910428  0.0383731417984007
## [2241]  0.0387590738717706  0.0392011246048470
## [2243]  0.0395012822124655  0.0399352172660083
## [2245]  0.0402870688886458  0.0403495338895091
## [2247]  0.0408550027828637  0.0412055362768347
## [2249]  0.0416908042714736  0.0420441461514640
## [2251]  0.0423633427493191  0.0425728716404998
## [2253]  0.0432884818757161  0.0435625590854828
## [2255]  0.0439621290687222  0.0442277455286685
## [2257]  0.0447869728869722  0.0451190800173307
## [2259]  0.0454087789629599  0.0457620933173591
## [2261]  0.0463472126052932  0.0466672328640011
## [2263]  0.0471959608476050  0.0473589720915592
## [2265]  0.0479936269371747  0.0484567787408335
## [2267]  0.0485699316452690  0.0492656227759697
## [2269]  0.0495397450341902  0.0501140253269859
## [2271]  0.0505091613774945  0.0508859508638430
## [2273]  0.0516040850177740  0.0518503994219515
## [2275]  0.0523701582407412  0.0529951265866365
## [2277]  0.0530958055877111  0.0537113100800809
## [2279]  0.0540075831658465  0.0546675784700290
## [2281]  0.0552359164683343  0.0554565962061999
## [2283]  0.0561405645948534  0.0566788932896043
## [2285]  0.0571794565264733  0.0577446435838127
## [2287]  0.0584423461870018  0.0585540083291946
## [2289]  0.0594175635400998  0.0596330080058317
## [2291]  0.0601441137810018  0.0606213300903281
## [2293]  0.0612240904998228  0.0621611481954532
## [2295]  0.0625529314795937  0.0628957831857518
## [2297]  0.0633230395101129  0.0642329179149700
## [2299]  0.0647954752074508  0.0653762008091707
## [2301]  0.0659158568347947  0.0662982060103182
## [2303]  0.0670405965445323  0.0674560391496305
## [2305]  0.0679426386126732  0.0691011094447686
## [2307]  0.0695862593994218  0.0700348235666996
## [2309]  0.0709719822076361  0.0710709636564212
## [2311]  0.0720425384370208  0.0726604235518440
## [2313]  0.0731140412467432  0.0737033039447861
## [2315]  0.0743167446008720  0.0748813913398054
## [2317]  0.0757431631342879  0.0761852351820910
## [2319]  0.0771230512973290  0.0781575968027379
## [2321]  0.0785096859539053  0.0789736425579738
## [2323]  0.0796717053863266  0.0804181100070821
## [2325]  0.0814905237757490  0.0820035084338874
## [2327]  0.0826387041735839  0.0834437330248827
## [2329]  0.0844353449308884  0.0849444617560706
## [2331]  0.0857098768556448  0.0863477688816819
## [2333]  0.0876944666416904  0.0880837355967353
## [2335]  0.0892237903852143  0.0894172923241331
## [2337]  0.0905260574107198  0.0915667178197063
## [2339]  0.0920102254890910  0.0926446188680056
## [2341]  0.0935945268960530  0.0947561638916551
## [2343]  0.0957695624491581  0.0961067268513308
## [2345]  0.0970800786915295  0.0975578122467073
## [2347]  0.0988234539568269  0.0996799681015118
## [2349]  0.1004456458254461  0.1015233420787839
## [2351]  0.1020820933014093  0.1034777287302212
## [2353]  0.1037848817924425  0.1050134473980267
## [2355]  0.1059461481494701  0.1071242965489497
## [2357]  0.1082866023404190  0.1092168441025322
## [2359]  0.1099716674100552  0.1112813309366483
## [2361]  0.1116737445825471  0.1124727699073136
## [2363]  0.1133924119463966  0.1149180429969463
## [2365]  0.1154832570078077  0.1167321210888793
## [2367]  0.1183209337638792  0.1184510292185230
## [2369]  0.1200808138372073  0.1206217797229798
## [2371]  0.1220566200295363  0.1231822507516343
## [2373]  0.1242079728476080  0.1255157129751723
## [2375]  0.1268725720530800  0.1280127309943789
## [2377]  0.1287194743970066  0.1302736321404318
## [2379]  0.1312582164282244  0.1316925355990486
## [2381]  0.1336137304340368  0.1341301658151073
## [2383]  0.1361290515632851  0.1369266135886104
## [2385]  0.1383753641147875  0.1392215213171716
## [2387]  0.1411353988527879  0.1413358911364674
## [2389]  0.1432844563954992  0.1441140866151098
## [2391]  0.1452624467116103  0.1470556788513906
## [2393]  0.1482135051931777  0.1499341575123891
## [2395]  0.1505962967076281  0.1526375990077739
## [2397]  0.1531396774846516  0.1547101413472804
## [2399]  0.1560846686616034  0.1580062504502047
## [2401]  0.1589716216270878  0.1612354181874350
## [2403]  0.1615884078069837  0.1628319383099704
## [2405]  0.1655516627463616  0.1659777700433266
## [2407]  0.1678641722405305  0.1692190517944808
## [2409]  0.1714803735517457  0.1726850389084674
## [2411]  0.1735208598949071  0.1746837874408715
## [2413]  0.1763062266383296  0.1788747382130932
## [2415]  0.1801755343829847  0.1820233748111747
## [2417]  0.1838667709729679  0.1852669859594969
## [2419]  0.1872257512275678  0.1880706025412250
## [2421]  0.1904937713570423  0.1908327908336385
## [2423]  0.1935512621077075  0.1947530417493721
## [2425]  0.1972447135014306  0.1991394193211364
## [2427]  0.2005800086378969  0.2028687531852697
## [2429]  0.2040188631372455  0.2062400846663737
## [2431]  0.2065935435856658  0.2096578092141072
## [2433]  0.2107950870719580  0.2134964600937606
## [2435]  0.2145285497470010  0.2170944390262398
## [2437]  0.2194292925956881  0.2210676059953280
## [2439]  0.2221987444451440  0.2239213852389969
## [2441]  0.2264934075540360  0.2278262296559298
## [2443]  0.2302599206080763  0.2332858388875157
## [2445]  0.2342411135640197  0.2358336577967809
## [2447]  0.2394148929878203  0.2402766388093142
## [2449]  0.2441385042678969  0.2456507446110571
## [2451]  0.2465728631480242  0.2492042787197882
## [2453]  0.2519431463411913  0.2542977462619647
## [2455]  0.2569607179687109  0.2593169912278235
## [2457]  0.2605372190159673  0.2639262633065218
## [2459]  0.2662161500842395  0.2679728011273860
## [2461]  0.2710931089995698  0.2728747270331937
## [2463]  0.2757674008454600  0.2780240753996407
## [2465]  0.2803005750813294  0.2834715840877180
## [2467]  0.2856580440696984  0.2886844604501793
## [2469]  0.2895061875099996  0.2919594357709673
## [2471]  0.2945741276775497  0.2967951737162096
## [2473]  0.3001096300567537  0.3036116895938538
## [2475]  0.3062777718837845  0.3093237755286243
## [2477]  0.3113839566340761  0.3130913136042018
## [2479]  0.3164513470558318  0.3184586868945961
## [2481]  0.3220919704781496  0.3266157779726614
## [2483]  0.3295411018384976  0.3310036070220642
## [2485]  0.3330167335788876  0.3367855153062288
## [2487]  0.3392230534185386  0.3436427717073477
## [2489]  0.3467933098033615  0.3486988898688836
## [2491]  0.3527531235071877  0.3541718437870099
## [2493]  0.3592513733597582  0.3611850069957965
## [2495]  0.3656497139414300  0.3684563317210495
## [2497]  0.3705326485693404  0.3738678695348455
## [2499]  0.3783736586178837  0.3816350575363487
## [2501]  0.3850865673161579  0.3897577875246090
## [2503]  0.3926492778592067  0.3955149710145359
## [2505]  0.3989088314473441  0.4010111414621948
## [2507]  0.4042428193765622  0.4086566383149753
## [2509]  0.4146629277532566  0.4184338559389979
## [2511]  0.4204772753093061  0.4249946765271400
## [2513]  0.4274126766642263  0.4298985086056437
## [2515]  0.4365736968475039  0.4391706040343296
## [2517]  0.4451431724130488  0.4453014848944360
## [2519]  0.4500118769846187  0.4535834553305648
## [2521]  0.4587698135608699  0.4636767951183875
## [2523]  0.4677046983571329  0.4704708151458587
## [2525]  0.4757605146740546  0.4802725670600540
## [2527]  0.4839723076273363  0.4882990149344322
## [2529]  0.4941153347281752  0.4972222500970619
## [2531]  0.5017221538368645  0.5045750133096636
## [2533]  0.5085397661400403  0.5156864030720936
## [2535]  0.5202580266362539  0.5235080734235280
## [2537]  0.5276697729151744  0.5346385766331104
## [2539]  0.5361808547486796  0.5441390148311248
## [2541]  0.5484599902452032  0.5504575258894692
## [2543]  0.5580492835341822  0.5630945690271607
## [2545]  0.5689815962846504  0.5704231089712614
## [2547]  0.5757658378124723  0.5811724698539750
## [2549]  0.5865010267950127  0.5908297609492791
## [2551]  0.5956625257386342  0.6035334565719274
## [2553]  0.6075130024345116  0.6148007686362331
## [2555]  0.6208355821283462  0.6240870793468091
## [2557]  0.6313482245256035  0.6342424401161010
## [2559]  0.6416139080168631  0.6450101055409397
## [2561]  0.6540437824008529  0.6583390967499183
## [2563]  0.6646775282899869  0.6730576575259563
## [2565]  0.6759866977324515  0.6845223818514137
## [2567]  0.6912048229994370  0.6957137975029442
## [2569]  0.6982609032548323  0.7055198463495512
## [2571]  0.7137084988403872  0.7230213997996791
## [2573]  0.7280281822190297  0.7332717205722392
## [2575]  0.7376079759285514  0.7441159056520871
## [2577]  0.7511510302158765  0.7581815622257894
## [2579]  0.7656341403394616  0.7710572596069472
## [2581]  0.7804197130065130  0.7899063259338654
## [2583]  0.7908590206609583  0.8015072018533350
## [2585]  0.8075881063761102  0.8130293164086660
## [2587]  0.8253696030963937  0.8284163098725372
## [2589]  0.8359668070077944  0.8434472146225291
## [2591]  0.8552780603726226  0.8562588048577028
## [2593]  0.8702168760780750  0.8769980710064065
## [2595]  0.8826574094911446  0.8923592567613793
## [2597]  0.9019023642790793  0.9052738459194060
## [2599]  0.9163863676592875  0.9183354142667504
## [2601]  0.9281122679491540  0.9394390507901467
## [2603]  0.9446797369529798  0.9566881729372262
## [2605]  0.9637699953283403  0.9693191709020114
## [2607]  0.9806767580209838  0.9882020365855957
## [2609]  1.0011188694813764  1.0071298461826386
## [2611]  1.0142161021340805  1.0263651880766171
## [2613]  1.0370583668711324  1.0417667643745787
## [2615]  1.0494435665373236  1.0661270748605269
## [2617]  1.0708932350268972  1.0802152954453490
## [2619]  1.0883188151465426  1.0974767261676721
## [2621]  1.1112048304777016  1.1203375731413572
## [2623]  1.1278453924721601  1.1441999643119856
## [2625]  1.1518283271857244  1.1602753615719832
## [2627]  1.1694454009402251  1.1769545080297226
## [2629]  1.1896347961197129  1.2004057747429036
## [2631]  1.2168878564841721  1.2189802778629169
## [2633]  1.2372874658902380  1.2496030261273414
## [2635]  1.2597050180170963  1.2719874827520052
## [2637]  1.2812979562337996  1.2860269632802650
## [2639]  1.2956292556977209  1.3069071722793848
## [2641]  1.3299287575237386  1.3317191205533294
## [2643]  1.3509617836173777  1.3635284712806166
## [2645]  1.3751831545344388  1.3801843093430723
## [2647]  1.3990642318661346  1.4094266587053876
## [2649]  1.4249235603191295  1.4369049362450386
## [2651]  1.4489871341687655  1.4646191111470515
## [2653]  1.4767129204922458  1.4857064277767360
## [2655]  1.4983772858402820  1.5112256798680590
## [2657]  1.5203588500777063  1.5328859790767533
## [2659]  1.5517402844689885  1.5694357234113496
## [2661]  1.5739790562561928  1.5954208674102039
## [2663]  1.6021631908305478  1.6280951531658372
## [2665]  1.6335843797168987  1.6476634308692963
## [2667]  1.6706245668019197  1.6809857188657233
## [2669]  1.6893898803259848  1.7088905987973879
## [2671]  1.7206834173991390  1.7340396006649152
## [2673]  1.7599904290624218  1.7779825443418116
## [2675]  1.7941990256886986  1.8038870954154986
## [2677]  1.8204479805011371  1.8430377037363466
## [2679]  1.8443505159694318  1.8633230879545695
## [2681]  1.8851131997472681  1.8950185851392809
## [2683]  1.9138173087027095  1.9432989486473384
## [2685]  1.9578162619646955  1.9690760346591341
## [2687]  1.9855633278480191  1.9980032564976513
## [2689]  2.0209154850518254  2.0466079325769528
## [2691]  2.0577554774236355  2.0756606699176379
## [2693]  2.1037787878253562  2.1116133106509158
## [2695]  2.1293088415417145  2.1602867918154409
## [2697]  2.1800513670713801  2.1891028174090246
## [2699]  2.2013473192199524  2.2331229616064645
## [2701]  2.2403815011399684  2.2606802738227256
## [2703]  2.2845555933573496  2.3049407482686912
## [2705]  2.3320832057029861  2.3585057731855237
## [2707]  2.3664730011422876  2.3956523956167470
## [2709]  2.4112975301011867  2.4266056800285671
## [2711]  2.4517462815332660  2.4754128589057416
## [2713]  2.4951113176986781  2.5117739017076439
## [2715]  2.5347878306537512  2.5737433405464714
## [2717]  2.5988766954361191  2.6029689222541230
## [2719]  2.6428240775000411  2.6498844063538818
## [2721]  2.6804719330766451  2.7040298941903322
## [2723]  2.7421666120321353  2.7457939183615681
## [2725]  2.7875485222823913  2.8094232413676812
## [2727]  2.8270967031132170  2.8628982653590533
## [2729]  2.8720804902807018  2.8984434381571411
## [2731]  2.9296924271094396  2.9601996377237687
## [2733]  2.9820217370450450  3.0013998701964781
## [2735]  3.0287618454766201  3.0743884010157396
## [2737]  3.0803587219003954  3.1063502756571668
## [2739]  3.1314985379433606  3.1793664398587382
## [2741]  3.1902164852816428  3.2231328682226978
## [2743]  3.2657729722708226  3.2737115611022669
## [2745]  3.3060759972855784  3.3533787802839718
## [2747]  3.3680105792961013  3.4073555467241672
## [2749]  3.4227581905008231  3.4675181467925675
## [2751]  3.4807077696550737  3.5118248454943379
## [2753]  3.5531662171547032  3.5963784814729873
## [2755]  3.6240265316399856  3.6678419003358038
## [2757]  3.6992394636384138  3.7349623339753433
## [2759]  3.7436076657792494  3.7824684258089611
## [2761]  3.8229143125202527  3.8422807928116032
## [2763]  3.8900232608438672  3.9060473358628025
## [2765]  3.9397430282619617  4.0007905886633894
## [2767]  4.0090899280438315  4.0733601431411364
## [2769]  4.1036220289969165  4.1307896353431142
## [2771]  4.1595500950097826  4.2188674431135871
## [2773]  4.2449696241830690  4.2975418944516042
## [2775]  4.3238182689577744  4.3574583975259982
## [2777]  4.4160377896259941  4.4324664157811444
## [2779]  4.4627834132308255  4.5037705460400375
## [2781]  4.5508885977749811  4.5999948615370014
## [2783]  4.6491338842749776  4.6621822636414443
## [2785]  4.7022132805404082  4.7590479448484455
## [2787]  4.7890488720812128  4.8391922655146988
## [2789]  4.8776967062492629  4.9218217326984872
## [2791]  4.9747291239542495  5.0110684126789788
## [2793]  5.0532276981515896  5.1182247741068938
## [2795]  5.1488435110634256  5.2027742224189808
## [2797]  5.2288020136549695  5.3041427019516174
## [2799]  5.3365266401688638  5.4059093340238427
## [2801]  5.4149907192588858  5.4725352289040377
## [2803]  5.5271979182430773  5.5674773455971085
## [2805]  5.6556995917064361  5.7028082244640110
## [2807]  5.7385126141206655  5.8067806389264822
## [2809]  5.8238603240373061  5.8907602061530273
## [2811]  5.9589011619030812  6.0019757297998533
## [2813]  6.0331412452243738  6.1092993853279012
## [2815]  6.1634177570418132  6.2111257701517077
## [2817]  6.2382436440110816  6.3067681195553797
## [2819]  6.3552549853433549  6.4240843731764334
## [2821]  6.4675637857135042  6.5172013551693970
## [2823]  6.5719370811608000  6.6634246592691353
## [2825]  6.7205580244268708  6.7892794799944163
## [2827]  6.8646438565926076  6.8842943367695728
## [2829]  6.9818563015614501  7.0489599865182004
## [2831]  7.0539918849277674  7.1705599179486388
## [2833]  7.1963833789669431  7.2915893166912698
## [2835]  7.3507262200422074  7.4183330540833321
## [2837]  7.4902252950176953  7.5165635881353481
## [2839]  7.6344174193531602  7.6755240826396722
## [2841]  7.7499411789842485  7.8041345721377526
## [2843]  7.8703806217762029  7.9566416177782422
## [2845]  8.0456966105480578  8.1155389521697661
## [2847]  8.1519176130359607  8.2549109812774137
## [2849]  8.3184003225434981  8.3695595822442641
## [2851]  8.4242319506045131  8.5280348785790512
## [2853]  8.5719676311933526  8.6512828019332009
## [2855]  8.7577956962119305  8.8065362592308087
## [2857]  8.8953598177580062  8.9619580611348955
## [2859]  9.0330895132713191  9.1103537999096389
## [2861]  9.2655109810967691  9.2972012235690773
## [2863]  9.3820561380552387  9.5191477949580374
## [2865]  9.6034488587716087  9.6136730441173661
## [2867]  9.7644067359366566  9.8193278079559239
## [2869]  9.9497589998237768 10.0311896225343293
## [2871] 10.0738810100516307 10.1905339049440702
## [2873] 10.2303318430937686 10.3958961032064963
## [2875] 10.4732340564492059 10.5710561104955065
## [2877] 10.6086811410369197 10.6810073702084516
## [2879] 10.8128065825829562 10.9387401910851718
## [2881] 10.9752623592079797 11.0704806782194858
## [2883] 11.2060946021775010 11.3500331469983262
## [2885] 11.4472218337558775 11.5291541740821621
## [2887] 11.6011976069803762 11.6897283432749202
## [2889] 11.7784101240775936 11.9182887121995300
## [2891] 12.0750622871647924 12.1724461331700020
## [2893] 12.2378678898857132 12.4051517434843888
## [2895] 12.4263809602576529 12.5289531848165847
## [2897] 12.6732209698326734 12.8294667555530140
## [2899] 12.8772069581729998 13.0762157600403164
## [2901] 13.1227345897192222 13.2030534412747631
## [2903] 13.3807791673368932 13.4809643547975142
## [2905] 13.5580503725841819 13.7622485277600486
## [2907] 13.8771840923015350 13.9429651009978226
## [2909] 14.0414358453505557 14.2030216928741737
## [2911] 14.3650296997600151 14.5113415068762119
## [2913] 14.6209928075250080 14.7804644317429883
## [2915] 14.8546826191918839 14.9653211931326684
## [2917] 15.1194492451347831 15.2267924140128663
## [2919] 15.3949183442438216 15.5416382340973165
## [2921] 15.6796226968860761 15.7990704682291625
## [2923] 15.9214720949021906 16.0850758988399853
## [2925] 16.1899051146657840 16.3615432062145416
## [2927] 16.5249228112320878 16.6336731197715544
## [2929] 16.8597978488546438 16.9041221708510143
## [2931] 17.0910679817425120 17.2179432524441225
## [2933] 17.4697904248386315 17.6191325077217655
## [2935] 17.7989174519669824 17.9131267798099252
## [2937] 18.0478683769447521 18.2585233544695278
## [2939] 18.3414974759492644 18.4904773025392828
## [2941] 18.7621796541871504 18.8190250098936112
## [2943] 19.0532611914407290 19.1251232858365405
## [2945] 19.4053211882803680 19.5195653877606325
## [2947] 19.6832417463560176 19.8233305641345190
## [2949] 19.9999766591055206 20.2079409729862149
## [2951] 20.4364778191898928 20.6821088193978824
## [2953] 20.8433060441674129 20.8990935997531544
## [2955] 21.0856359979458432 21.3518854984802218
## [2957] 21.5348028521546624 21.6355171849878722
## [2959] 21.9570736524493917 22.1086118065347357
## [2961] 22.2819598799602581 22.6067961527118619
## [2963] 22.7752900222008385 22.9081012495018861
## [2965] 23.1540047288970676 23.4101202112752702
## [2967] 23.4799602207342737 23.7118263418966535
## [2969] 24.0298599286751475 24.1923934856620804
## [2971] 24.4797757451091584 24.5456887316662815
## [2973] 24.9149678836123911 25.0791546166123602
## [2975] 25.1693778006415378 25.5846454593931441
## [2977] 25.7749767355474546 26.0231525862126318
## [2979] 26.1418622300966383 26.4839180677619659
## [2981] 26.5064882682569660 26.8213364437713082
## [2983] 26.9823458719054834 27.3344758075558154
## [2985] 27.5821422909795224 27.8395004744332937
## [2987] 28.0425444688960219 28.3232124433139774
## [2989] 28.4513070611285706 28.8136874024939367
## [2991] 29.1480515336351260 29.3493028708489767
## [2993] 29.6971036094533929 29.8814568053896181
## [2995] 30.1692257146326988 30.3232358656978924
## [2997] 30.5716047345665345 30.9734764995187568
## [2999] 31.3257702166249139 31.4091533457374190
sort(unique(xgboost_grid$sample_size))
##    [1] 0.1001774 0.1005786 0.1006769 0.1011920 0.1014580
##    [6] 0.1017791 0.1019064 0.1021682 0.1024619 0.1028093
##   [11] 0.1032371 0.1035192 0.1038420 0.1041415 0.1043948
##   [16] 0.1047969 0.1049283 0.1051930 0.1054611 0.1057677
##   [21] 0.1060959 0.1064986 0.1067595 0.1070875 0.1074954
##   [26] 0.1076536 0.1080553 0.1082786 0.1084246 0.1087569
##   [31] 0.1090235 0.1094758 0.1098976 0.1100328 0.1102702
##   [36] 0.1107820 0.1108498 0.1111698 0.1114990 0.1119006
##   [41] 0.1120900 0.1123293 0.1128295 0.1130164 0.1132349
##   [46] 0.1137486 0.1139024 0.1141789 0.1146361 0.1147761
##   [51] 0.1150996 0.1155788 0.1158108 0.1160910 0.1163020
##   [56] 0.1165465 0.1168288 0.1173607 0.1175049 0.1179713
##   [61] 0.1182699 0.1184008 0.1187184 0.1190949 0.1193670
##   [66] 0.1195214 0.1198835 0.1202052 0.1204129 0.1209801
##   [71] 0.1210427 0.1214198 0.1218404 0.1219535 0.1223145
##   [76] 0.1226596 0.1229530 0.1232745 0.1234660 0.1238814
##   [81] 0.1242216 0.1245938 0.1246814 0.1249375 0.1254011
##   [86] 0.1255375 0.1259010 0.1263229 0.1266130 0.1267791
##   [91] 0.1271683 0.1274540 0.1276831 0.1281301 0.1282354
##   [96] 0.1287107 0.1288569 0.1291205 0.1296237 0.1299189
##  [101] 0.1302199 0.1304148 0.1307017 0.1310530 0.1312897
##  [106] 0.1316906 0.1319482 0.1321383 0.1324047 0.1328222
##  [111] 0.1331584 0.1334025 0.1337498 0.1339743 0.1342758
##  [116] 0.1347663 0.1350382 0.1352505 0.1356104 0.1358432
##  [121] 0.1362529 0.1365841 0.1366708 0.1370964 0.1372539
##  [126] 0.1375316 0.1380556 0.1383051 0.1386949 0.1387294
##  [131] 0.1391498 0.1395571 0.1398366 0.1401653 0.1402866
##  [136] 0.1406251 0.1409331 0.1411210 0.1415752 0.1417006
##  [141] 0.1422741 0.1423217 0.1428093 0.1431105 0.1434087
##  [146] 0.1436370 0.1438550 0.1441524 0.1445604 0.1447640
##  [151] 0.1451823 0.1455040 0.1456423 0.1460474 0.1463217
##  [156] 0.1465059 0.1468042 0.1473555 0.1474061 0.1478015
##  [161] 0.1480372 0.1484208 0.1488600 0.1489409 0.1493195
##  [166] 0.1496633 0.1498503 0.1502803 0.1504882 0.1508269
##  [171] 0.1510745 0.1513216 0.1516196 0.1520226 0.1523316
##  [176] 0.1527554 0.1529981 0.1533038 0.1536311 0.1539047
##  [181] 0.1542330 0.1545102 0.1547799 0.1550567 0.1554248
##  [186] 0.1555787 0.1558185 0.1562340 0.1566624 0.1569270
##  [191] 0.1571084 0.1574931 0.1577819 0.1579817 0.1582616
##  [196] 0.1587290 0.1588823 0.1592391 0.1594278 0.1598715
##  [201] 0.1600463 0.1604965 0.1607763 0.1610704 0.1614103
##  [206] 0.1616145 0.1618245 0.1623496 0.1625127 0.1627253
##  [211] 0.1630945 0.1635454 0.1637934 0.1640385 0.1643397
##  [216] 0.1647988 0.1650975 0.1653505 0.1656567 0.1659849
##  [221] 0.1662235 0.1665448 0.1666089 0.1670418 0.1672931
##  [226] 0.1675633 0.1680310 0.1681301 0.1685476 0.1689779
##  [231] 0.1692110 0.1695370 0.1696540 0.1701228 0.1704507
##  [236] 0.1706080 0.1710176 0.1713820 0.1716177 0.1717315
##  [241] 0.1722688 0.1723605 0.1726904 0.1730335 0.1732323
##  [246] 0.1737983 0.1738845 0.1741162 0.1746135 0.1747538
##  [251] 0.1752353 0.1753403 0.1757646 0.1759104 0.1762691
##  [256] 0.1766103 0.1768462 0.1772623 0.1776959 0.1777960
##  [261] 0.1782804 0.1785329 0.1788590 0.1789905 0.1794204
##  [266] 0.1796947 0.1798307 0.1803726 0.1804092 0.1809538
##  [271] 0.1812876 0.1813699 0.1817595 0.1820530 0.1824852
##  [276] 0.1825254 0.1828247 0.1832336 0.1834021 0.1839667
##  [281] 0.1840892 0.1844470 0.1846272 0.1849984 0.1852864
##  [286] 0.1855036 0.1858511 0.1861899 0.1866347 0.1867814
##  [291] 0.1870530 0.1874071 0.1877992 0.1880000 0.1884075
##  [296] 0.1885201 0.1890377 0.1892553 0.1895528 0.1898267
##  [301] 0.1901636 0.1903094 0.1907305 0.1909746 0.1912368
##  [306] 0.1915525 0.1920062 0.1921462 0.1924355 0.1928757
##  [311] 0.1932451 0.1935775 0.1936542 0.1939413 0.1943245
##  [316] 0.1945339 0.1949604 0.1951223 0.1955939 0.1959886
##  [321] 0.1962543 0.1965456 0.1966862 0.1971435 0.1974805
##  [326] 0.1975983 0.1979499 0.1983640 0.1984111 0.1989565
##  [331] 0.1991459 0.1994075 0.1997069 0.1999741 0.2004498
##  [336] 0.2006193 0.2009963 0.2011585 0.2014088 0.2019487
##  [341] 0.2022598 0.2024529 0.2027338 0.2030218 0.2032166
##  [346] 0.2035561 0.2039963 0.2041769 0.2044489 0.2049385
##  [351] 0.2052975 0.2055851 0.2058234 0.2060830 0.2063871
##  [356] 0.2065228 0.2068761 0.2071747 0.2076350 0.2077226
##  [361] 0.2082925 0.2084707 0.2086018 0.2089504 0.2092802
##  [366] 0.2097759 0.2098089 0.2101872 0.2105302 0.2109172
##  [371] 0.2112820 0.2114658 0.2118903 0.2120745 0.2122140
##  [376] 0.2125179 0.2128062 0.2133070 0.2134571 0.2137277
##  [381] 0.2142263 0.2145927 0.2147708 0.2150072 0.2152699
##  [386] 0.2155248 0.2159528 0.2161500 0.2166559 0.2168117
##  [391] 0.2171701 0.2174244 0.2176153 0.2179504 0.2184300
##  [396] 0.2185096 0.2188961 0.2192009 0.2195967 0.2197565
##  [401] 0.2202299 0.2203399 0.2208130 0.2210392 0.2213335
##  [406] 0.2215475 0.2218547 0.2221719 0.2224387 0.2228852
##  [411] 0.2232030 0.2235393 0.2238707 0.2239322 0.2243531
##  [416] 0.2246042 0.2248119 0.2252128 0.2255175 0.2257655
##  [421] 0.2261008 0.2265939 0.2268449 0.2270066 0.2274079
##  [426] 0.2275766 0.2278639 0.2283668 0.2285418 0.2289447
##  [431] 0.2292050 0.2294846 0.2298793 0.2300667 0.2303520
##  [436] 0.2305344 0.2308557 0.2312876 0.2315441 0.2319916
##  [441] 0.2322055 0.2324399 0.2326361 0.2329442 0.2332069
##  [446] 0.2337619 0.2340073 0.2341072 0.2345603 0.2349554
##  [451] 0.2350205 0.2353739 0.2358848 0.2361139 0.2362295
##  [456] 0.2366177 0.2370696 0.2373618 0.2374191 0.2378112
##  [461] 0.2380740 0.2385324 0.2388557 0.2391100 0.2393094
##  [466] 0.2395621 0.2399733 0.2403113 0.2405127 0.2408815
##  [471] 0.2412086 0.2413181 0.2418945 0.2421166 0.2422027
##  [476] 0.2426075 0.2428453 0.2432678 0.2435727 0.2438894
##  [481] 0.2441267 0.2445869 0.2448039 0.2450265 0.2452642
##  [486] 0.2455129 0.2460274 0.2463862 0.2466940 0.2468620
##  [491] 0.2472236 0.2473734 0.2477606 0.2479993 0.2484562
##  [496] 0.2485889 0.2489390 0.2492289 0.2495704 0.2498354
##  [501] 0.2500136 0.2503376 0.2506093 0.2510168 0.2514578
##  [506] 0.2517234 0.2518330 0.2522255 0.2526607 0.2528006
##  [511] 0.2530884 0.2533711 0.2537688 0.2540348 0.2542955
##  [516] 0.2545673 0.2548387 0.2553021 0.2555244 0.2559280
##  [521] 0.2561893 0.2564196 0.2566519 0.2569780 0.2573838
##  [526] 0.2575707 0.2578366 0.2583815 0.2585758 0.2589008
##  [531] 0.2592617 0.2593285 0.2597657 0.2600029 0.2604608
##  [536] 0.2606948 0.2609390 0.2613864 0.2616008 0.2617888
##  [541] 0.2622661 0.2623296 0.2627102 0.2629447 0.2633246
##  [546] 0.2637555 0.2640484 0.2641741 0.2646559 0.2647599
##  [551] 0.2651839 0.2653052 0.2656813 0.2659626 0.2662092
##  [556] 0.2667888 0.2669935 0.2671886 0.2675109 0.2679760
##  [561] 0.2680358 0.2683501 0.2687638 0.2691975 0.2692859
##  [566] 0.2695585 0.2699006 0.2703292 0.2705955 0.2707135
##  [571] 0.2710055 0.2713239 0.2716561 0.2719619 0.2724329
##  [576] 0.2726969 0.2728113 0.2732437 0.2734982 0.2737362
##  [581] 0.2741189 0.2744594 0.2746427 0.2750910 0.2753128
##  [586] 0.2755706 0.2760754 0.2762342 0.2765195 0.2768650
##  [591] 0.2770496 0.2774796 0.2777678 0.2779129 0.2783095
##  [596] 0.2785703 0.2790735 0.2791926 0.2796903 0.2799090
##  [601] 0.2800494 0.2805204 0.2806270 0.2810323 0.2813400
##  [606] 0.2816045 0.2820964 0.2821092 0.2824757 0.2827488
##  [611] 0.2832867 0.2835671 0.2836797 0.2840459 0.2844017
##  [616] 0.2845560 0.2850574 0.2853292 0.2856774 0.2858612
##  [621] 0.2861494 0.2865586 0.2866187 0.2869258 0.2874156
##  [626] 0.2876418 0.2878890 0.2883011 0.2886738 0.2888656
##  [631] 0.2892467 0.2894734 0.2896561 0.2899231 0.2902290
##  [636] 0.2905398 0.2910000 0.2913282 0.2915563 0.2919269
##  [641] 0.2921017 0.2925836 0.2927548 0.2930303 0.2933204
##  [646] 0.2936702 0.2940489 0.2943645 0.2946837 0.2947213
##  [651] 0.2950399 0.2953274 0.2956366 0.2961438 0.2962465
##  [656] 0.2966563 0.2968312 0.2973502 0.2975342 0.2979428
##  [661] 0.2980875 0.2984168 0.2988937 0.2991832 0.2992941
##  [666] 0.2996270 0.3000813 0.3003062 0.3006530 0.3008956
##  [671] 0.3011179 0.3013410 0.3018177 0.3019076 0.3024691
##  [676] 0.3027780 0.3029090 0.3032106 0.3035667 0.3039512
##  [681] 0.3041930 0.3043835 0.3047051 0.3049583 0.3052749
##  [686] 0.3057973 0.3060235 0.3062758 0.3064529 0.3069665
##  [691] 0.3070853 0.3074587 0.3076837 0.3079625 0.3082648
##  [696] 0.3087702 0.3088860 0.3091957 0.3096806 0.3097664
##  [701] 0.3102422 0.3105379 0.3107564 0.3110910 0.3114417
##  [706] 0.3117971 0.3118855 0.3121060 0.3124554 0.3128174
##  [711] 0.3131287 0.3134859 0.3137853 0.3139941 0.3142081
##  [716] 0.3146679 0.3149675 0.3151783 0.3155718 0.3157563
##  [721] 0.3160648 0.3165210 0.3166624 0.3171820 0.3174919
##  [726] 0.3175736 0.3178685 0.3183505 0.3186430 0.3187390
##  [731] 0.3190650 0.3193965 0.3197541 0.3200237 0.3202281
##  [736] 0.3207231 0.3210677 0.3213528 0.3215222 0.3217956
##  [741] 0.3222132 0.3224506 0.3227953 0.3231873 0.3234496
##  [746] 0.3237665 0.3238776 0.3242983 0.3244742 0.3248348
##  [751] 0.3250727 0.3253656 0.3257195 0.3261854 0.3262076
##  [756] 0.3266232 0.3270986 0.3273685 0.3274558 0.3278333
##  [761] 0.3280157 0.3284697 0.3288672 0.3289679 0.3292328
##  [766] 0.3296935 0.3299181 0.3301997 0.3306898 0.3308179
##  [771] 0.3310199 0.3314291 0.3316753 0.3320673 0.3323829
##  [776] 0.3325561 0.3329755 0.3331761 0.3335889 0.3338551
##  [781] 0.3342434 0.3344575 0.3346746 0.3349181 0.3352373
##  [786] 0.3357116 0.3359233 0.3362352 0.3366390 0.3367752
##  [791] 0.3372541 0.3373512 0.3376309 0.3379074 0.3384709
##  [796] 0.3386867 0.3390601 0.3393214 0.3396875 0.3399318
##  [801] 0.3402276 0.3404536 0.3408839 0.3411591 0.3412745
##  [806] 0.3417536 0.3418126 0.3422288 0.3424419 0.3429686
##  [811] 0.3430757 0.3433215 0.3436983 0.3439444 0.3443923
##  [816] 0.3446605 0.3448898 0.3451080 0.3454816 0.3457377
##  [821] 0.3461739 0.3463009 0.3468077 0.3469387 0.3472794
##  [826] 0.3477838 0.3479643 0.3481437 0.3486077 0.3488932
##  [831] 0.3490719 0.3493246 0.3496256 0.3499549 0.3503539
##  [836] 0.3506385 0.3510140 0.3513591 0.3514190 0.3517167
##  [841] 0.3522217 0.3524242 0.3528248 0.3529274 0.3533716
##  [846] 0.3536798 0.3540234 0.3543658 0.3544463 0.3548929
##  [851] 0.3552887 0.3555285 0.3557534 0.3561058 0.3563253
##  [856] 0.3566991 0.3570252 0.3572520 0.3576482 0.3579751
##  [861] 0.3581086 0.3584262 0.3588693 0.3591636 0.3594827
##  [866] 0.3596245 0.3598977 0.3603390 0.3606633 0.3608133
##  [871] 0.3611647 0.3613093 0.3616070 0.3621729 0.3623878
##  [876] 0.3627435 0.3630890 0.3631832 0.3635314 0.3638629
##  [881] 0.3641245 0.3644492 0.3646297 0.3650444 0.3653604
##  [886] 0.3657349 0.3659414 0.3663270 0.3664949 0.3667412
##  [891] 0.3671453 0.3674605 0.3677826 0.3681864 0.3683399
##  [896] 0.3687417 0.3688623 0.3693296 0.3695601 0.3698276
##  [901] 0.3700279 0.3703141 0.3708061 0.3710005 0.3714258
##  [906] 0.3716804 0.3718075 0.3722456 0.3725941 0.3729870
##  [911] 0.3730290 0.3733282 0.3737126 0.3739522 0.3743713
##  [916] 0.3746059 0.3748491 0.3753628 0.3756114 0.3759262
##  [921] 0.3760170 0.3763817 0.3766405 0.3770144 0.3773679
##  [926] 0.3775973 0.3780822 0.3783822 0.3786657 0.3787526
##  [931] 0.3792472 0.3795556 0.3797373 0.3799188 0.3803951
##  [936] 0.3805386 0.3809741 0.3812945 0.3814201 0.3819750
##  [941] 0.3822729 0.3825477 0.3826102 0.3829613 0.3833084
##  [946] 0.3836553 0.3840006 0.3841775 0.3845924 0.3849113
##  [951] 0.3851139 0.3854655 0.3857874 0.3861161 0.3862668
##  [956] 0.3865187 0.3870673 0.3871087 0.3876194 0.3877188
##  [961] 0.3881738 0.3884068 0.3886947 0.3890291 0.3892200
##  [966] 0.3897343 0.3900215 0.3903672 0.3906998 0.3909363
##  [971] 0.3912932 0.3914104 0.3918348 0.3920495 0.3924548
##  [976] 0.3927345 0.3928753 0.3932766 0.3936349 0.3939449
##  [981] 0.3942557 0.3944953 0.3948770 0.3950890 0.3953137
##  [986] 0.3956273 0.3960228 0.3963666 0.3966682 0.3968182
##  [991] 0.3970700 0.3973427 0.3978519 0.3980358 0.3984965
##  [996] 0.3986793 0.3989423 0.3993364 0.3996216 0.3997314
## [1001] 0.4002105 0.4003719 0.4008740 0.4010234 0.4014102
## [1006] 0.4016980 0.4018804 0.4022612 0.4025326 0.4029426
## [1011] 0.4031826 0.4034861 0.4038617 0.4039231 0.4043182
## [1016] 0.4047087 0.4049946 0.4052534 0.4054980 0.4057950
## [1021] 0.4062193 0.4063713 0.4068656 0.4071981 0.4073398
## [1026] 0.4075576 0.4080569 0.4082053 0.4086902 0.4088959
## [1031] 0.4092838 0.4093155 0.4098897 0.4101122 0.4103249
## [1036] 0.4106959 0.4108911 0.4113829 0.4114521 0.4119699
## [1041] 0.4120083 0.4124739 0.4128712 0.4131318 0.4133380
## [1046] 0.4136622 0.4138890 0.4143461 0.4145373 0.4147717
## [1051] 0.4150694 0.4155198 0.4156206 0.4160908 0.4162701
## [1056] 0.4165080 0.4170210 0.4173133 0.4174644 0.4178164
## [1061] 0.4180456 0.4184093 0.4188451 0.4191733 0.4194662
## [1066] 0.4197774 0.4200482 0.4201860 0.4204156 0.4209466
## [1071] 0.4211472 0.4215739 0.4218264 0.4221370 0.4224980
## [1076] 0.4226696 0.4230607 0.4232232 0.4235848 0.4239642
## [1081] 0.4241194 0.4243807 0.4246979 0.4249012 0.4252280
## [1086] 0.4255521 0.4258895 0.4263537 0.4265833 0.4267444
## [1091] 0.4271090 0.4274394 0.4278454 0.4281306 0.4284634
## [1096] 0.4287609 0.4288745 0.4291787 0.4295858 0.4299151
## [1101] 0.4301662 0.4303858 0.4308564 0.4310323 0.4314814
## [1106] 0.4316553 0.4318270 0.4323101 0.4324366 0.4327620
## [1111] 0.4330395 0.4333054 0.4337035 0.4340723 0.4343726
## [1116] 0.4347056 0.4348443 0.4352423 0.4355906 0.4358748
## [1121] 0.4362771 0.4365273 0.4368099 0.4371750 0.4372745
## [1126] 0.4376572 0.4378775 0.4383832 0.4384025 0.4387947
## [1131] 0.4391967 0.4395929 0.4397949 0.4400828 0.4402444
## [1136] 0.4407645 0.4409564 0.4411653 0.4416089 0.4417108
## [1141] 0.4422488 0.4425943 0.4428227 0.4431778 0.4434733
## [1146] 0.4437894 0.4440125 0.4442003 0.4445780 0.4447561
## [1151] 0.4451996 0.4454929 0.4457103 0.4460267 0.4464132
## [1156] 0.4467990 0.4468128 0.4472229 0.4475997 0.4478032
## [1161] 0.4482445 0.4484672 0.4486408 0.4490741 0.4494653
## [1166] 0.4496332 0.4498976 0.4503096 0.4504347 0.4508406
## [1171] 0.4510019 0.4514566 0.4516978 0.4520105 0.4524195
## [1176] 0.4527729 0.4529612 0.4532002 0.4536176 0.4538891
## [1181] 0.4541143 0.4543855 0.4546763 0.4551275 0.4554070
## [1186] 0.4555046 0.4559795 0.4563883 0.4564258 0.4567136
## [1191] 0.4570775 0.4573866 0.4577696 0.4579916 0.4584533
## [1196] 0.4585764 0.4588350 0.4593058 0.4594799 0.4599528
## [1201] 0.4601904 0.4604437 0.4608648 0.4610069 0.4612323
## [1206] 0.4616003 0.4619585 0.4621660 0.4625508 0.4629108
## [1211] 0.4632251 0.4635700 0.4636670 0.4640205 0.4644588
## [1216] 0.4645096 0.4648304 0.4652908 0.4654383 0.4659056
## [1221] 0.4661149 0.4665814 0.4666659 0.4670687 0.4673703
## [1226] 0.4675339 0.4680367 0.4683932 0.4685680 0.4687311
## [1231] 0.4691541 0.4694557 0.4697298 0.4701722 0.4703926
## [1236] 0.4706065 0.4708132 0.4711882 0.4714679 0.4717138
## [1241] 0.4721338 0.4723269 0.4727231 0.4729946 0.4734216
## [1246] 0.4735268 0.4738596 0.4742305 0.4744072 0.4747918
## [1251] 0.4752007 0.4753315 0.4757297 0.4760777 0.4762871
## [1256] 0.4765343 0.4768137 0.4773155 0.4775949 0.4777898
## [1261] 0.4780734 0.4784244 0.4786186 0.4790468 0.4794572
## [1266] 0.4795283 0.4799487 0.4802847 0.4805426 0.4808662
## [1271] 0.4810334 0.4815167 0.4816221 0.4819037 0.4823859
## [1276] 0.4826685 0.4828363 0.4833235 0.4834902 0.4837857
## [1281] 0.4841188 0.4844794 0.4847737 0.4849939 0.4853573
## [1286] 0.4855390 0.4860281 0.4861055 0.4865935 0.4867102
## [1291] 0.4870476 0.4875867 0.4877660 0.4880097 0.4884091
## [1296] 0.4885226 0.4890197 0.4892048 0.4896977 0.4897379
## [1301] 0.4901167 0.4905684 0.4908533 0.4911481 0.4913495
## [1306] 0.4915036 0.4918316 0.4922460 0.4924161 0.4928296
## [1311] 0.4930308 0.4934022 0.4938876 0.4941616 0.4944444
## [1316] 0.4947870 0.4948579 0.4951355 0.4955661 0.4959049
## [1321] 0.4960832 0.4965188 0.4966205 0.4970235 0.4972803
## [1326] 0.4976031 0.4980111 0.4981636 0.4985564 0.4988847
## [1331] 0.4992105 0.4995105 0.4996176 0.4999983 0.5004647
## [1336] 0.5006430 0.5008988 0.5013961 0.5014381 0.5019918
## [1341] 0.5022556 0.5025358 0.5028236 0.5029201 0.5032667
## [1346] 0.5037411 0.5039999 0.5041965 0.5044650 0.5049268
## [1351] 0.5052166 0.5055926 0.5057480 0.5060209 0.5064731
## [1356] 0.5066872 0.5068485 0.5072846 0.5076943 0.5078546
## [1361] 0.5081294 0.5084705 0.5086151 0.5089363 0.5094886
## [1366] 0.5096674 0.5099046 0.5103215 0.5106248 0.5108286
## [1371] 0.5110814 0.5115152 0.5117065 0.5120504 0.5123182
## [1376] 0.5127026 0.5130230 0.5133841 0.5136886 0.5138462
## [1381] 0.5142880 0.5144487 0.5146276 0.5150519 0.5152736
## [1386] 0.5156429 0.5159293 0.5163496 0.5166692 0.5167963
## [1391] 0.5170183 0.5175337 0.5178835 0.5180000 0.5183535
## [1396] 0.5185936 0.5190961 0.5191508 0.5194837 0.5199125
## [1401] 0.5202728 0.5203602 0.5208582 0.5210740 0.5212200
## [1406] 0.5215432 0.5218309 0.5221432 0.5225793 0.5229535
## [1411] 0.5230790 0.5233700 0.5236947 0.5241217 0.5244828
## [1416] 0.5245698 0.5250147 0.5251894 0.5255472 0.5259684
## [1421] 0.5260022 0.5264897 0.5267276 0.5269624 0.5273581
## [1426] 0.5277291 0.5278418 0.5282233 0.5285360 0.5289692
## [1431] 0.5292664 0.5295127 0.5298697 0.5300822 0.5303912
## [1436] 0.5307472 0.5308430 0.5312745 0.5316861 0.5317576
## [1441] 0.5320371 0.5324945 0.5327630 0.5331056 0.5332995
## [1446] 0.5336288 0.5340493 0.5341684 0.5345832 0.5347943
## [1451] 0.5351547 0.5353575 0.5357293 0.5361656 0.5364087
## [1456] 0.5365828 0.5369737 0.5372494 0.5374858 0.5377086
## [1461] 0.5380179 0.5383101 0.5387126 0.5391532 0.5392147
## [1466] 0.5395663 0.5398307 0.5403328 0.5405935 0.5409953
## [1471] 0.5411421 0.5415194 0.5416284 0.5419223 0.5422042
## [1476] 0.5426231 0.5430030 0.5433801 0.5434546 0.5438842
## [1481] 0.5442959 0.5445365 0.5448089 0.5451677 0.5454846
## [1486] 0.5457579 0.5459419 0.5462567 0.5464274 0.5469562
## [1491] 0.5471172 0.5475784 0.5477322 0.5479873 0.5483755
## [1496] 0.5485899 0.5488346 0.5491609 0.5494065 0.5499293
## [1501] 0.5501547 0.5503006 0.5507823 0.5510653 0.5514523
## [1506] 0.5516428 0.5520912 0.5521930 0.5524539 0.5528170
## [1511] 0.5530952 0.5533291 0.5538508 0.5541292 0.5542725
## [1516] 0.5545505 0.5548950 0.5551428 0.5554937 0.5557677
## [1521] 0.5560381 0.5565446 0.5568663 0.5571671 0.5574135
## [1526] 0.5577885 0.5580994 0.5581938 0.5584249 0.5588757
## [1531] 0.5591254 0.5595085 0.5597431 0.5599837 0.5603923
## [1536] 0.5605256 0.5609228 0.5613103 0.5614220 0.5619575
## [1541] 0.5622990 0.5624229 0.5626642 0.5629166 0.5633029
## [1546] 0.5637396 0.5639188 0.5643184 0.5644540 0.5647910
## [1551] 0.5652220 0.5654527 0.5656403 0.5659517 0.5662853
## [1556] 0.5666313 0.5670784 0.5671514 0.5675097 0.5677366
## [1561] 0.5681989 0.5684372 0.5688237 0.5689689 0.5694276
## [1566] 0.5697906 0.5698394 0.5701116 0.5706002 0.5709064
## [1571] 0.5712748 0.5715352 0.5717052 0.5721758 0.5724550
## [1576] 0.5725312 0.5728252 0.5731508 0.5735177 0.5737226
## [1581] 0.5740479 0.5743033 0.5748012 0.5750924 0.5753194
## [1586] 0.5755314 0.5759142 0.5761298 0.5766953 0.5767139
## [1591] 0.5772906 0.5774693 0.5778571 0.5781369 0.5783196
## [1596] 0.5786881 0.5789237 0.5792682 0.5794213 0.5797651
## [1601] 0.5802225 0.5805660 0.5807990 0.5809144 0.5812548
## [1606] 0.5816588 0.5820510 0.5823504 0.5824308 0.5827192
## [1611] 0.5832802 0.5834570 0.5836007 0.5841452 0.5844510
## [1616] 0.5847372 0.5849634 0.5852027 0.5856072 0.5858975
## [1621] 0.5860524 0.5865683 0.5866433 0.5871547 0.5873128
## [1626] 0.5877827 0.5878032 0.5882133 0.5886966 0.5889832
## [1631] 0.5891828 0.5894913 0.5898279 0.5901745 0.5904325
## [1636] 0.5907808 0.5910316 0.5913299 0.5914865 0.5919234
## [1641] 0.5921271 0.5924902 0.5927737 0.5929704 0.5932875
## [1646] 0.5937505 0.5940420 0.5942653 0.5945469 0.5948723
## [1651] 0.5950897 0.5954974 0.5956170 0.5961904 0.5963957
## [1656] 0.5965113 0.5970979 0.5973103 0.5974062 0.5978543
## [1661] 0.5981071 0.5985963 0.5988583 0.5990899 0.5994796
## [1666] 0.5996117 0.5999806 0.6003198 0.6005419 0.6009046
## [1671] 0.6011307 0.6014900 0.6016709 0.6019051 0.6022812
## [1676] 0.6025305 0.6028546 0.6032947 0.6035420 0.6037516
## [1681] 0.6040339 0.6044172 0.6047073 0.6051530 0.6053034
## [1686] 0.6057507 0.6059051 0.6061291 0.6066436 0.6067862
## [1691] 0.6071271 0.6074671 0.6077829 0.6079593 0.6084621
## [1696] 0.6087019 0.6089005 0.6093481 0.6095637 0.6098843
## [1701] 0.6101056 0.6105599 0.6106355 0.6109797 0.6112649
## [1706] 0.6117632 0.6120836 0.6122335 0.6125548 0.6127076
## [1711] 0.6132071 0.6134211 0.6137396 0.6140670 0.6142340
## [1716] 0.6146518 0.6148071 0.6152649 0.6155238 0.6158091
## [1721] 0.6161465 0.6163398 0.6168497 0.6170006 0.6173180
## [1726] 0.6175114 0.6179298 0.6183421 0.6185835 0.6187600
## [1731] 0.6191148 0.6193875 0.6196575 0.6200174 0.6203478
## [1736] 0.6205217 0.6210683 0.6212714 0.6214635 0.6217640
## [1741] 0.6222187 0.6225669 0.6227183 0.6230808 0.6232330
## [1746] 0.6237441 0.6238204 0.6242411 0.6245419 0.6248280
## [1751] 0.6252746 0.6255284 0.6258103 0.6259146 0.6263263
## [1756] 0.6267479 0.6270856 0.6273931 0.6275132 0.6277361
## [1761] 0.6280309 0.6284527 0.6288357 0.6291595 0.6293495
## [1766] 0.6297207 0.6298280 0.6302019 0.6305759 0.6307321
## [1771] 0.6310461 0.6313504 0.6317543 0.6320882 0.6322523
## [1776] 0.6325605 0.6330224 0.6333427 0.6335811 0.6338457
## [1781] 0.6341279 0.6343630 0.6346417 0.6350597 0.6354698
## [1786] 0.6357794 0.6358249 0.6361177 0.6365943 0.6368407
## [1791] 0.6370318 0.6373788 0.6378390 0.6381949 0.6384065
## [1796] 0.6386574 0.6390827 0.6391794 0.6396772 0.6399327
## [1801] 0.6402349 0.6404634 0.6408434 0.6410827 0.6412105
## [1806] 0.6417361 0.6419950 0.6421118 0.6424419 0.6427783
## [1811] 0.6431981 0.6435559 0.6436248 0.6439953 0.6444572
## [1816] 0.6447797 0.6450303 0.6453581 0.6455898 0.6457304
## [1821] 0.6461746 0.6463012 0.6468589 0.6469985 0.6472738
## [1826] 0.6477411 0.6479157 0.6483789 0.6486253 0.6488640
## [1831] 0.6491384 0.6493614 0.6498588 0.6501251 0.6502359
## [1836] 0.6506323 0.6509927 0.6511346 0.6514967 0.6517853
## [1841] 0.6522919 0.6523743 0.6528901 0.6530562 0.6533221
## [1846] 0.6537701 0.6538075 0.6541102 0.6545631 0.6547386
## [1851] 0.6551696 0.6554163 0.6556739 0.6559229 0.6563670
## [1856] 0.6565370 0.6570048 0.6573110 0.6574363 0.6579452
## [1861] 0.6581469 0.6583938 0.6587036 0.6591499 0.6594441
## [1866] 0.6596019 0.6600097 0.6601219 0.6605149 0.6608404
## [1871] 0.6610845 0.6615526 0.6617852 0.6621326 0.6622899
## [1876] 0.6625835 0.6628854 0.6631446 0.6635223 0.6638778
## [1881] 0.6642379 0.6645511 0.6646868 0.6651904 0.6652806
## [1886] 0.6655500 0.6659778 0.6663129 0.6666522 0.6667013
## [1891] 0.6671630 0.6674929 0.6677732 0.6680811 0.6684528
## [1896] 0.6687831 0.6689864 0.6693943 0.6695928 0.6698289
## [1901] 0.6701271 0.6704413 0.6708678 0.6711131 0.6714533
## [1906] 0.6715846 0.6719995 0.6721212 0.6724436 0.6728499
## [1911] 0.6731343 0.6734128 0.6736805 0.6740077 0.6743803
## [1916] 0.6745223 0.6749229 0.6753686 0.6754696 0.6759543
## [1921] 0.6760695 0.6764610 0.6767028 0.6769230 0.6773489
## [1926] 0.6776283 0.6780301 0.6783147 0.6785708 0.6787558
## [1931] 0.6792891 0.6794160 0.6798478 0.6800745 0.6803988
## [1936] 0.6807031 0.6809155 0.6813499 0.6816812 0.6817959
## [1941] 0.6821519 0.6824942 0.6828339 0.6831801 0.6832081
## [1946] 0.6837066 0.6839131 0.6843179 0.6844719 0.6847851
## [1951] 0.6850568 0.6854027 0.6857827 0.6859673 0.6862003
## [1956] 0.6866282 0.6869643 0.6873252 0.6876621 0.6879545
## [1961] 0.6880960 0.6884949 0.6888840 0.6889653 0.6894746
## [1966] 0.6897039 0.6899379 0.6903986 0.6906839 0.6907850
## [1971] 0.6911906 0.6913712 0.6917080 0.6920158 0.6922132
## [1976] 0.6926247 0.6928996 0.6932023 0.6934887 0.6939588
## [1981] 0.6940067 0.6945615 0.6947671 0.6951556 0.6954727
## [1986] 0.6955879 0.6960679 0.6961854 0.6964799 0.6969476
## [1991] 0.6970711 0.6975218 0.6977212 0.6980560 0.6982665
## [1996] 0.6985700 0.6988979 0.6993847 0.6994189 0.6998981
## [2001] 0.7002732 0.7004377 0.7006420 0.7011069 0.7012924
## [2006] 0.7016526 0.7020013 0.7023935 0.7025219 0.7027688
## [2011] 0.7031453 0.7034440 0.7036095 0.7039332 0.7043675
## [2016] 0.7045604 0.7048493 0.7053475 0.7056434 0.7057763
## [2021] 0.7060432 0.7064367 0.7068593 0.7071495 0.7072485
## [2026] 0.7077687 0.7080659 0.7081350 0.7086922 0.7088424
## [2031] 0.7092816 0.7093711 0.7097185 0.7100177 0.7103111
## [2036] 0.7107301 0.7108174 0.7112180 0.7116408 0.7119677
## [2041] 0.7120159 0.7125628 0.7126224 0.7130240 0.7133551
## [2046] 0.7136390 0.7138297 0.7142830 0.7145176 0.7148758
## [2051] 0.7152680 0.7153410 0.7156865 0.7159477 0.7163736
## [2056] 0.7166177 0.7168384 0.7172804 0.7176110 0.7178038
## [2061] 0.7182948 0.7184033 0.7187628 0.7190936 0.7192829
## [2066] 0.7196770 0.7199502 0.7203420 0.7205610 0.7208949
## [2071] 0.7212165 0.7214387 0.7217114 0.7220133 0.7223122
## [2076] 0.7226638 0.7229787 0.7232595 0.7234878 0.7237389
## [2081] 0.7241091 0.7245201 0.7246534 0.7249349 0.7252164
## [2086] 0.7256953 0.7259286 0.7263118 0.7265186 0.7267205
## [2091] 0.7271611 0.7275008 0.7276515 0.7281434 0.7283409
## [2096] 0.7287700 0.7289613 0.7293744 0.7295358 0.7299062
## [2101] 0.7302269 0.7305539 0.7306581 0.7311445 0.7314588
## [2106] 0.7317880 0.7320423 0.7323175 0.7325427 0.7328462
## [2111] 0.7330532 0.7333792 0.7336643 0.7341685 0.7342517
## [2116] 0.7347955 0.7350443 0.7353488 0.7356119 0.7359122
## [2121] 0.7361981 0.7363646 0.7366353 0.7371297 0.7374999
## [2126] 0.7375891 0.7378456 0.7381212 0.7385760 0.7389025
## [2131] 0.7390727 0.7394544 0.7396987 0.7400095 0.7402690
## [2136] 0.7405667 0.7409748 0.7412079 0.7414216 0.7419629
## [2141] 0.7420949 0.7423589 0.7426536 0.7430204 0.7434357
## [2146] 0.7436857 0.7440801 0.7441818 0.7444295 0.7447930
## [2151] 0.7451468 0.7454032 0.7458407 0.7460009 0.7464291
## [2156] 0.7467913 0.7469650 0.7471275 0.7475225 0.7478829
## [2161] 0.7482292 0.7485408 0.7487705 0.7491641 0.7494065
## [2166] 0.7496438 0.7498056 0.7502474 0.7505052 0.7509599
## [2171] 0.7511025 0.7515837 0.7517351 0.7521005 0.7523883
## [2176] 0.7526074 0.7528651 0.7531782 0.7536009 0.7539073
## [2181] 0.7542791 0.7545730 0.7548308 0.7550500 0.7553557
## [2186] 0.7557837 0.7559268 0.7562159 0.7564722 0.7568423
## [2191] 0.7572949 0.7573604 0.7577491 0.7579228 0.7583601
## [2196] 0.7585335 0.7590653 0.7592974 0.7596437 0.7597671
## [2201] 0.7602605 0.7604960 0.7607688 0.7609898 0.7613665
## [2206] 0.7617328 0.7619087 0.7622223 0.7625301 0.7627630
## [2211] 0.7632305 0.7633597 0.7637818 0.7641406 0.7642564
## [2216] 0.7645895 0.7650512 0.7651071 0.7655392 0.7659324
## [2221] 0.7661930 0.7665606 0.7666033 0.7671182 0.7674918
## [2226] 0.7675785 0.7680377 0.7683817 0.7684164 0.7687814
## [2231] 0.7691239 0.7693098 0.7698573 0.7700402 0.7704311
## [2236] 0.7705543 0.7709632 0.7711015 0.7716242 0.7718073
## [2241] 0.7721040 0.7724214 0.7728040 0.7730633 0.7732172
## [2246] 0.7735712 0.7740126 0.7743598 0.7746141 0.7749099
## [2251] 0.7751780 0.7753069 0.7756703 0.7760696 0.7763969
## [2256] 0.7767210 0.7770442 0.7773139 0.7775452 0.7779758
## [2261] 0.7781154 0.7785288 0.7786478 0.7790474 0.7792786
## [2266] 0.7797923 0.7798239 0.7803447 0.7806481 0.7808672
## [2271] 0.7812266 0.7814925 0.7817229 0.7821295 0.7824158
## [2276] 0.7827025 0.7829912 0.7832520 0.7834483 0.7839836
## [2281] 0.7841794 0.7845174 0.7848812 0.7849867 0.7853827
## [2286] 0.7856136 0.7859860 0.7862919 0.7865498 0.7869475
## [2291] 0.7870115 0.7874355 0.7878504 0.7879830 0.7882230
## [2296] 0.7886860 0.7888066 0.7893500 0.7895390 0.7897885
## [2301] 0.7901810 0.7905004 0.7908091 0.7910453 0.7913220
## [2306] 0.7916008 0.7919979 0.7921461 0.7925365 0.7929858
## [2311] 0.7930445 0.7935703 0.7936405 0.7941158 0.7944499
## [2316] 0.7947219 0.7950560 0.7951195 0.7955717 0.7958975
## [2321] 0.7961654 0.7964230 0.7966434 0.7969008 0.7974155
## [2326] 0.7977295 0.7978868 0.7982507 0.7984976 0.7987591
## [2331] 0.7990184 0.7995269 0.7996997 0.7999057 0.8003623
## [2336] 0.8005840 0.8010250 0.8012745 0.8014591 0.8018004
## [2341] 0.8021140 0.8024162 0.8028704 0.8031882 0.8034655
## [2346] 0.8036614 0.8039220 0.8042676 0.8046779 0.8047599
## [2351] 0.8051750 0.8054536 0.8058347 0.8059815 0.8063492
## [2356] 0.8067954 0.8070967 0.8073196 0.8074837 0.8077151
## [2361] 0.8080752 0.8083197 0.8088401 0.8089128 0.8093769
## [2366] 0.8096627 0.8098036 0.8102321 0.8104318 0.8109919
## [2371] 0.8112207 0.8115464 0.8118437 0.8120296 0.8124210
## [2376] 0.8126528 0.8130104 0.8131301 0.8136618 0.8137332
## [2381] 0.8140552 0.8143369 0.8147929 0.8151294 0.8154422
## [2386] 0.8156255 0.8158050 0.8163290 0.8166050 0.8168218
## [2391] 0.8172966 0.8173946 0.8177427 0.8179583 0.8183294
## [2396] 0.8185442 0.8190839 0.8193632 0.8194933 0.8197733
## [2401] 0.8201818 0.8203166 0.8207489 0.8210598 0.8214301
## [2406] 0.8215511 0.8220458 0.8223354 0.8226252 0.8229116
## [2411] 0.8230525 0.8234985 0.8238262 0.8241169 0.8243426
## [2416] 0.8245789 0.8250551 0.8251734 0.8256754 0.8259016
## [2421] 0.8260871 0.8263291 0.8267583 0.8271471 0.8272208
## [2426] 0.8277608 0.8278075 0.8281405 0.8285617 0.8288581
## [2431] 0.8291705 0.8295620 0.8297498 0.8300809 0.8303217
## [2436] 0.8307397 0.8308755 0.8311853 0.8316584 0.8317376
## [2441] 0.8320250 0.8325108 0.8327420 0.8329592 0.8333340
## [2446] 0.8337613 0.8339400 0.8343694 0.8344420 0.8347596
## [2451] 0.8352011 0.8355091 0.8358277 0.8359048 0.8364943
## [2456] 0.8366678 0.8368192 0.8373980 0.8376511 0.8378845
## [2461] 0.8380458 0.8383388 0.8386779 0.8391549 0.8393232
## [2466] 0.8396812 0.8400507 0.8402171 0.8404814 0.8408359
## [2471] 0.8410865 0.8413013 0.8418533 0.8421351 0.8423903
## [2476] 0.8427351 0.8428032 0.8432571 0.8434311 0.8438987
## [2481] 0.8441460 0.8444033 0.8448204 0.8449889 0.8454831
## [2486] 0.8456651 0.8459564 0.8461058 0.8465293 0.8467681
## [2491] 0.8471583 0.8475157 0.8478270 0.8479665 0.8483381
## [2496] 0.8485950 0.8489977 0.8491856 0.8494192 0.8499140
## [2501] 0.8501544 0.8503957 0.8507692 0.8509201 0.8513614
## [2506] 0.8516114 0.8520194 0.8522263 0.8524547 0.8529321
## [2511] 0.8532003 0.8533992 0.8536585 0.8539249 0.8544126
## [2516] 0.8547840 0.8550358 0.8552410 0.8555916 0.8558354
## [2521] 0.8561662 0.8565608 0.8566136 0.8569559 0.8572688
## [2526] 0.8577064 0.8579935 0.8583670 0.8585026 0.8588033
## [2531] 0.8590758 0.8593425 0.8597727 0.8601897 0.8604586
## [2536] 0.8605649 0.8610320 0.8613659 0.8614412 0.8619364
## [2541] 0.8620609 0.8624079 0.8628287 0.8630033 0.8633343
## [2546] 0.8636793 0.8638205 0.8643415 0.8646622 0.8648662
## [2551] 0.8651365 0.8655568 0.8657207 0.8659241 0.8664049
## [2556] 0.8666100 0.8670200 0.8671668 0.8676748 0.8678460
## [2561] 0.8680701 0.8683856 0.8688043 0.8689542 0.8692092
## [2566] 0.8697712 0.8700022 0.8703550 0.8705499 0.8709790
## [2571] 0.8710542 0.8715287 0.8717764 0.8719973 0.8723517
## [2576] 0.8727309 0.8729681 0.8732066 0.8736085 0.8739636
## [2581] 0.8740520 0.8743249 0.8747462 0.8751635 0.8753223
## [2586] 0.8757129 0.8760642 0.8762330 0.8766816 0.8767197
## [2591] 0.8771779 0.8774757 0.8776143 0.8779244 0.8783675
## [2596] 0.8786814 0.8788671 0.8793917 0.8794159 0.8797463
## [2601] 0.8800194 0.8804936 0.8808285 0.8810044 0.8812981
## [2606] 0.8817052 0.8819140 0.8822586 0.8825776 0.8828192
## [2611] 0.8830769 0.8833801 0.8837394 0.8841915 0.8844225
## [2616] 0.8845082 0.8848439 0.8852225 0.8856704 0.8858451
## [2621] 0.8860718 0.8863746 0.8866693 0.8869091 0.8872447
## [2626] 0.8876932 0.8880212 0.8883744 0.8886777 0.8887664
## [2631] 0.8890701 0.8894223 0.8896211 0.8899733 0.8902929
## [2636] 0.8906055 0.8909693 0.8913922 0.8915357 0.8918398
## [2641] 0.8921568 0.8923687 0.8927414 0.8931055 0.8933142
## [2646] 0.8936377 0.8938335 0.8943605 0.8944169 0.8949829
## [2651] 0.8951520 0.8955112 0.8958199 0.8959388 0.8963448
## [2656] 0.8967509 0.8970570 0.8972476 0.8976350 0.8979168
## [2661] 0.8981405 0.8985635 0.8986254 0.8989829 0.8994226
## [2666] 0.8995258 0.9000892 0.9001286 0.9004802 0.9009961
## [2671] 0.9011633 0.9013041 0.9016326 0.9020698 0.9024139
## [2676] 0.9025106 0.9028400 0.9033196 0.9035260 0.9039438
## [2681] 0.9041844 0.9043136 0.9046487 0.9050413 0.9053648
## [2686] 0.9055333 0.9058277 0.9063398 0.9065464 0.9067769
## [2691] 0.9071844 0.9075421 0.9078653 0.9081531 0.9083675
## [2696] 0.9085863 0.9088415 0.9093957 0.9094446 0.9099660
## [2701] 0.9100522 0.9103502 0.9106467 0.9111666 0.9113005
## [2706] 0.9117090 0.9120833 0.9121526 0.9124475 0.9128552
## [2711] 0.9131093 0.9135716 0.9136701 0.9140062 0.9143175
## [2716] 0.9146737 0.9150373 0.9153132 0.9156045 0.9157230
## [2721] 0.9161412 0.9165999 0.9168853 0.9171164 0.9173068
## [2726] 0.9176027 0.9178559 0.9181276 0.9185344 0.9189204
## [2731] 0.9192061 0.9195027 0.9196261 0.9200172 0.9204006
## [2736] 0.9206774 0.9208456 0.9212238 0.9215848 0.9219386
## [2741] 0.9220272 0.9225173 0.9226460 0.9231075 0.9234498
## [2746] 0.9235216 0.9239220 0.9243574 0.9245475 0.9248491
## [2751] 0.9250617 0.9253920 0.9257387 0.9260871 0.9263358
## [2756] 0.9265931 0.9268568 0.9273771 0.9274651 0.9278564
## [2761] 0.9280902 0.9284753 0.9288499 0.9290453 0.9294289
## [2766] 0.9296218 0.9299507 0.9302911 0.9306386 0.9309368
## [2771] 0.9310459 0.9314562 0.9317397 0.9320650 0.9324175
## [2776] 0.9325928 0.9330916 0.9333778 0.9336152 0.9337119
## [2781] 0.9340406 0.9344009 0.9348134 0.9351415 0.9353793
## [2786] 0.9356414 0.9360443 0.9361986 0.9366119 0.9367057
## [2791] 0.9370458 0.9375105 0.9378945 0.9379506 0.9382665
## [2796] 0.9386195 0.9389159 0.9392847 0.9395952 0.9398614
## [2801] 0.9400233 0.9405909 0.9406040 0.9411476 0.9412528
## [2806] 0.9416044 0.9419176 0.9421957 0.9426872 0.9429719
## [2811] 0.9432628 0.9434908 0.9436632 0.9440454 0.9442081
## [2816] 0.9446795 0.9449067 0.9452311 0.9455314 0.9458458
## [2821] 0.9460889 0.9463559 0.9468441 0.9471948 0.9474632
## [2826] 0.9476053 0.9480318 0.9483416 0.9486072 0.9489821
## [2831] 0.9492837 0.9495970 0.9498961 0.9499197 0.9503383
## [2836] 0.9506518 0.9509327 0.9513639 0.9516591 0.9519305
## [2841] 0.9522718 0.9524450 0.9527275 0.9529431 0.9532346
## [2846] 0.9537800 0.9538879 0.9541984 0.9545189 0.9548367
## [2851] 0.9550429 0.9553536 0.9557652 0.9560637 0.9563593
## [2856] 0.9567902 0.9570682 0.9573218 0.9576392 0.9578020
## [2861] 0.9580060 0.9583834 0.9586239 0.9589818 0.9594704
## [2866] 0.9595090 0.9598712 0.9601350 0.9604569 0.9607600
## [2871] 0.9610997 0.9615045 0.9617528 0.9621492 0.9623860
## [2876] 0.9625190 0.9630054 0.9633821 0.9634797 0.9639048
## [2881] 0.9640393 0.9643828 0.9648582 0.9649168 0.9653358
## [2886] 0.9656533 0.9660265 0.9661601 0.9664559 0.9668018
## [2891] 0.9670635 0.9674560 0.9677425 0.9679822 0.9682424
## [2896] 0.9685016 0.9690682 0.9692867 0.9694697 0.9699377
## [2901] 0.9701422 0.9703944 0.9706077 0.9709418 0.9714330
## [2906] 0.9717774 0.9718945 0.9723822 0.9726577 0.9727616
## [2911] 0.9731044 0.9734750 0.9738258 0.9739242 0.9744094
## [2916] 0.9746861 0.9748674 0.9752357 0.9755736 0.9758356
## [2921] 0.9762934 0.9763150 0.9767263 0.9769508 0.9772777
## [2926] 0.9777129 0.9780124 0.9782920 0.9785066 0.9788141
## [2931] 0.9792479 0.9794868 0.9797688 0.9799705 0.9803369
## [2936] 0.9806293 0.9809958 0.9813350 0.9814243 0.9819374
## [2941] 0.9822602 0.9823524 0.9826172 0.9829808 0.9833120
## [2946] 0.9837691 0.9838972 0.9843205 0.9844267 0.9848866
## [2951] 0.9852941 0.9854396 0.9857172 0.9860350 0.9863154
## [2956] 0.9865984 0.9868987 0.9871260 0.9875246 0.9877931
## [2961] 0.9882960 0.9883368 0.9887355 0.9891846 0.9894714
## [2966] 0.9895727 0.9900910 0.9902848 0.9906123 0.9908889
## [2971] 0.9910798 0.9915903 0.9916616 0.9919470 0.9922296
## [2976] 0.9925751 0.9929011 0.9932469 0.9934310 0.9937384
## [2981] 0.9942405 0.9944920 0.9946383 0.9949409 0.9952622
## [2986] 0.9957640 0.9960550 0.9962639 0.9965255 0.9969223
## [2991] 0.9971796 0.9975407 0.9977076 0.9981953 0.9983924
## [2996] 0.9985625 0.9988872 0.9992080 0.9994646 0.9998824
sort(unique(xgboost_grid$mtry))
##   [1]   1   2   3   4   5   6   7   8   9  10  11  12  13
##  [14]  14  15  16  17  18  19  20  21  22  23  24  25  26
##  [27]  27  28  29  30  31  32  33  34  35  36  37  38  39
##  [40]  40  41  42  43  44  45  46  47  48  49  50  51  52
##  [53]  53  54  55  56  57  58  59  60  61  62  63  64  65
##  [66]  66  67  68  69  70  71  72  73  74  75  76  77  78
##  [79]  79  80  81  82  83  84  85  86  87  88  89  90  91
##  [92]  92  93  94  95  96  97  98  99 100 101 102 103 104
## [105] 105 106 107 108 109 110 111 112 113 114 115 116 117
## [118] 118 119 120 121 122 123 124 125 126 127 128 129 130
## [131] 131 132 133 134 135 136 137 138 139 140 141 142 143
## [144] 144 145 146 147 148 149 150 151 152 153 154 155 156
## [157] 157 158 159 160 161 162 163 164 165 166 167
sort(unique(xgboost_grid$learn_rate))
##    [1] 0.0000000001004210 0.0000000001010100
##    [3] 0.0000000001016635 0.0000000001025669
##    [5] 0.0000000001028755 0.0000000001039491
##    [7] 0.0000000001049168 0.0000000001052954
##    [9] 0.0000000001057045 0.0000000001065451
##   [11] 0.0000000001072393 0.0000000001081898
##   [13] 0.0000000001091549 0.0000000001094640
##   [15] 0.0000000001103571 0.0000000001115821
##   [17] 0.0000000001124069 0.0000000001124659
##   [19] 0.0000000001136419 0.0000000001144229
##   [21] 0.0000000001149404 0.0000000001158707
##   [23] 0.0000000001170182 0.0000000001176052
##   [25] 0.0000000001181535 0.0000000001192865
##   [27] 0.0000000001202820 0.0000000001207269
##   [29] 0.0000000001216697 0.0000000001223789
##   [31] 0.0000000001230283 0.0000000001241138
##   [33] 0.0000000001252061 0.0000000001263030
##   [35] 0.0000000001272259 0.0000000001279957
##   [37] 0.0000000001284447 0.0000000001292776
##   [39] 0.0000000001302362 0.0000000001311214
##   [41] 0.0000000001322187 0.0000000001329960
##   [43] 0.0000000001337348 0.0000000001349895
##   [45] 0.0000000001361969 0.0000000001373164
##   [47] 0.0000000001377949 0.0000000001385267
##   [49] 0.0000000001398911 0.0000000001407891
##   [51] 0.0000000001413928 0.0000000001424190
##   [53] 0.0000000001438504 0.0000000001445054
##   [55] 0.0000000001459349 0.0000000001464208
##   [57] 0.0000000001480825 0.0000000001486324
##   [59] 0.0000000001500431 0.0000000001503771
##   [61] 0.0000000001515914 0.0000000001533655
##   [63] 0.0000000001536249 0.0000000001551179
##   [65] 0.0000000001556427 0.0000000001574561
##   [67] 0.0000000001584128 0.0000000001591947
##   [69] 0.0000000001601835 0.0000000001615754
##   [71] 0.0000000001629061 0.0000000001644306
##   [73] 0.0000000001646757 0.0000000001659038
##   [75] 0.0000000001672128 0.0000000001685831
##   [77] 0.0000000001691029 0.0000000001712023
##   [79] 0.0000000001715473 0.0000000001726162
##   [81] 0.0000000001747548 0.0000000001753631
##   [83] 0.0000000001772092 0.0000000001780679
##   [85] 0.0000000001791435 0.0000000001802375
##   [87] 0.0000000001819940 0.0000000001834261
##   [89] 0.0000000001839964 0.0000000001853305
##   [91] 0.0000000001872323 0.0000000001877239
##   [93] 0.0000000001893611 0.0000000001913579
##   [95] 0.0000000001921887 0.0000000001934374
##   [97] 0.0000000001943954 0.0000000001960442
##   [99] 0.0000000001978469 0.0000000001982988
##  [101] 0.0000000002006246 0.0000000002015847
##  [103] 0.0000000002027208 0.0000000002050833
##  [105] 0.0000000002062117 0.0000000002065927
##  [107] 0.0000000002090824 0.0000000002094499
##  [109] 0.0000000002119949 0.0000000002137913
##  [111] 0.0000000002141554 0.0000000002159383
##  [113] 0.0000000002172012 0.0000000002186779
##  [115] 0.0000000002200902 0.0000000002217961
##  [117] 0.0000000002237441 0.0000000002245921
##  [119] 0.0000000002259517 0.0000000002288149
##  [121] 0.0000000002291771 0.0000000002319320
##  [123] 0.0000000002329211 0.0000000002354357
##  [125] 0.0000000002361643 0.0000000002372772
##  [127] 0.0000000002391240 0.0000000002406979
##  [129] 0.0000000002421512 0.0000000002454017
##  [131] 0.0000000002459562 0.0000000002480581
##  [133] 0.0000000002493342 0.0000000002507458
##  [135] 0.0000000002530563 0.0000000002548779
##  [137] 0.0000000002574429 0.0000000002590182
##  [139] 0.0000000002609837 0.0000000002616884
##  [141] 0.0000000002637916 0.0000000002664940
##  [143] 0.0000000002675251 0.0000000002686080
##  [145] 0.0000000002708630 0.0000000002739399
##  [147] 0.0000000002744253 0.0000000002760591
##  [149] 0.0000000002793212 0.0000000002805150
##  [151] 0.0000000002837624 0.0000000002841588
##  [153] 0.0000000002866919 0.0000000002886854
##  [155] 0.0000000002900805 0.0000000002937392
##  [157] 0.0000000002952579 0.0000000002974704
##  [159] 0.0000000002994211 0.0000000002999519
##  [161] 0.0000000003024641 0.0000000003054307
##  [163] 0.0000000003062248 0.0000000003098636
##  [165] 0.0000000003120689 0.0000000003136996
##  [167] 0.0000000003150685 0.0000000003171166
##  [169] 0.0000000003195280 0.0000000003229955
##  [171] 0.0000000003238720 0.0000000003260465
##  [173] 0.0000000003294380 0.0000000003307979
##  [175] 0.0000000003337678 0.0000000003367044
##  [177] 0.0000000003392658 0.0000000003407815
##  [179] 0.0000000003431329 0.0000000003447933
##  [181] 0.0000000003473353 0.0000000003495381
##  [183] 0.0000000003525700 0.0000000003552658
##  [185] 0.0000000003583403 0.0000000003592165
##  [187] 0.0000000003614659 0.0000000003645142
##  [189] 0.0000000003670205 0.0000000003706499
##  [191] 0.0000000003740541 0.0000000003747265
##  [193] 0.0000000003791447 0.0000000003816986
##  [195] 0.0000000003844779 0.0000000003860467
##  [197] 0.0000000003895792 0.0000000003920683
##  [199] 0.0000000003938092 0.0000000003977122
##  [201] 0.0000000003987229 0.0000000004016029
##  [203] 0.0000000004051100 0.0000000004073775
##  [205] 0.0000000004093484 0.0000000004134881
##  [207] 0.0000000004176265 0.0000000004206547
##  [209] 0.0000000004232958 0.0000000004237076
##  [211] 0.0000000004266012 0.0000000004324141
##  [213] 0.0000000004338589 0.0000000004379613
##  [215] 0.0000000004410302 0.0000000004416161
##  [217] 0.0000000004450991 0.0000000004490966
##  [219] 0.0000000004510339 0.0000000004566932
##  [221] 0.0000000004583735 0.0000000004632293
##  [223] 0.0000000004648920 0.0000000004672702
##  [225] 0.0000000004722116 0.0000000004757098
##  [227] 0.0000000004765576 0.0000000004823740
##  [229] 0.0000000004833804 0.0000000004897761
##  [231] 0.0000000004918849 0.0000000004940155
##  [233] 0.0000000004982926 0.0000000005033941
##  [235] 0.0000000005069328 0.0000000005080779
##  [237] 0.0000000005107743 0.0000000005166260
##  [239] 0.0000000005199814 0.0000000005234361
##  [241] 0.0000000005272910 0.0000000005287419
##  [243] 0.0000000005346080 0.0000000005359434
##  [245] 0.0000000005420333 0.0000000005437879
##  [247] 0.0000000005480408 0.0000000005540095
##  [249] 0.0000000005556324 0.0000000005612910
##  [251] 0.0000000005638033 0.0000000005690451
##  [253] 0.0000000005725194 0.0000000005743549
##  [255] 0.0000000005790991 0.0000000005845561
##  [257] 0.0000000005895168 0.0000000005931314
##  [259] 0.0000000005974127 0.0000000005985517
##  [261] 0.0000000006061006 0.0000000006069022
##  [263] 0.0000000006139654 0.0000000006192968
##  [265] 0.0000000006213362 0.0000000006243851
##  [267] 0.0000000006322812 0.0000000006358661
##  [269] 0.0000000006380778 0.0000000006440264
##  [271] 0.0000000006498706 0.0000000006508835
##  [273] 0.0000000006553084 0.0000000006628139
##  [275] 0.0000000006680403 0.0000000006693081
##  [277] 0.0000000006774997 0.0000000006782396
##  [279] 0.0000000006869389 0.0000000006915120
##  [281] 0.0000000006957644 0.0000000006998418
##  [283] 0.0000000007019137 0.0000000007095735
##  [285] 0.0000000007119720 0.0000000007200089
##  [287] 0.0000000007253264 0.0000000007284074
##  [289] 0.0000000007348063 0.0000000007376271
##  [291] 0.0000000007456283 0.0000000007488329
##  [293] 0.0000000007564578 0.0000000007584724
##  [295] 0.0000000007653840 0.0000000007707636
##  [297] 0.0000000007747323 0.0000000007832782
##  [299] 0.0000000007868179 0.0000000007894804
##  [301] 0.0000000007967690 0.0000000008021476
##  [303] 0.0000000008069373 0.0000000008138723
##  [305] 0.0000000008191143 0.0000000008241398
##  [307] 0.0000000008314019 0.0000000008355334
##  [309] 0.0000000008428663 0.0000000008485166
##  [311] 0.0000000008544674 0.0000000008611373
##  [313] 0.0000000008654844 0.0000000008739248
##  [315] 0.0000000008754063 0.0000000008862290
##  [317] 0.0000000008884161 0.0000000008991419
##  [319] 0.0000000008996605 0.0000000009103583
##  [321] 0.0000000009135207 0.0000000009244906
##  [323] 0.0000000009294892 0.0000000009350344
##  [325] 0.0000000009385079 0.0000000009501850
##  [327] 0.0000000009522859 0.0000000009587780
##  [329] 0.0000000009687378 0.0000000009719120
##  [331] 0.0000000009836750 0.0000000009895859
##  [333] 0.0000000009909716 0.0000000009999079
##  [335] 0.0000000010110234 0.0000000010119614
##  [337] 0.0000000010187049 0.0000000010297753
##  [339] 0.0000000010366291 0.0000000010434933
##  [341] 0.0000000010516107 0.0000000010571021
##  [343] 0.0000000010674558 0.0000000010740469
##  [345] 0.0000000010815280 0.0000000010879261
##  [347] 0.0000000010978720 0.0000000011043304
##  [349] 0.0000000011109061 0.0000000011173870
##  [351] 0.0000000011290232 0.0000000011306461
##  [353] 0.0000000011395651 0.0000000011534324
##  [355] 0.0000000011576478 0.0000000011679823
##  [357] 0.0000000011743953 0.0000000011817892
##  [359] 0.0000000011929942 0.0000000011966858
##  [361] 0.0000000012098644 0.0000000012169945
##  [363] 0.0000000012267556 0.0000000012354626
##  [365] 0.0000000012408393 0.0000000012522949
##  [367] 0.0000000012609378 0.0000000012700417
##  [369] 0.0000000012781395 0.0000000012812715
##  [371] 0.0000000012899044 0.0000000012995305
##  [373] 0.0000000013148720 0.0000000013182439
##  [375] 0.0000000013323095 0.0000000013347917
##  [377] 0.0000000013474439 0.0000000013591447
##  [379] 0.0000000013672918 0.0000000013729425
##  [381] 0.0000000013844090 0.0000000013942377
##  [383] 0.0000000014068762 0.0000000014161972
##  [385] 0.0000000014281277 0.0000000014381122
##  [387] 0.0000000014470517 0.0000000014582496
##  [389] 0.0000000014591455 0.0000000014751487
##  [391] 0.0000000014865085 0.0000000014915187
##  [393] 0.0000000015037786 0.0000000015122665
##  [395] 0.0000000015246840 0.0000000015386199
##  [397] 0.0000000015471779 0.0000000015586693
##  [399] 0.0000000015707661 0.0000000015815531
##  [401] 0.0000000015950422 0.0000000015996904
##  [403] 0.0000000016146407 0.0000000016258893
##  [405] 0.0000000016300415 0.0000000016476695
##  [407] 0.0000000016547111 0.0000000016679599
##  [409] 0.0000000016756745 0.0000000016890527
##  [411] 0.0000000017015362 0.0000000017179247
##  [413] 0.0000000017319626 0.0000000017430168
##  [415] 0.0000000017491018 0.0000000017631530
##  [417] 0.0000000017765652 0.0000000017906791
##  [419] 0.0000000018035654 0.0000000018187372
##  [421] 0.0000000018223283 0.0000000018376906
##  [423] 0.0000000018560998 0.0000000018661811
##  [425] 0.0000000018802970 0.0000000018899567
##  [427] 0.0000000019017130 0.0000000019181523
##  [429] 0.0000000019350864 0.0000000019436545
##  [431] 0.0000000019508137 0.0000000019654238
##  [433] 0.0000000019854749 0.0000000019979468
##  [435] 0.0000000020070957 0.0000000020312608
##  [437] 0.0000000020432136 0.0000000020476813
##  [439] 0.0000000020630655 0.0000000020882340
##  [441] 0.0000000020910701 0.0000000021152583
##  [443] 0.0000000021308925 0.0000000021475044
##  [445] 0.0000000021567980 0.0000000021692642
##  [447] 0.0000000021883511 0.0000000021993437
##  [449] 0.0000000022187484 0.0000000022290218
##  [451] 0.0000000022454508 0.0000000022620973
##  [453] 0.0000000022700156 0.0000000022908122
##  [455] 0.0000000023125990 0.0000000023242710
##  [457] 0.0000000023478157 0.0000000023649363
##  [459] 0.0000000023773748 0.0000000023952608
##  [461] 0.0000000024049335 0.0000000024219754
##  [463] 0.0000000024373896 0.0000000024527899
##  [465] 0.0000000024790083 0.0000000024864276
##  [467] 0.0000000025130520 0.0000000025332490
##  [469] 0.0000000025442621 0.0000000025574454
##  [471] 0.0000000025711686 0.0000000026010447
##  [473] 0.0000000026146151 0.0000000026322898
##  [475] 0.0000000026495603 0.0000000026696942
##  [477] 0.0000000026813041 0.0000000027126388
##  [479] 0.0000000027277197 0.0000000027374223
##  [481] 0.0000000027583996 0.0000000027846550
##  [483] 0.0000000028038846 0.0000000028214885
##  [485] 0.0000000028350635 0.0000000028621286
##  [487] 0.0000000028788350 0.0000000028960679
##  [489] 0.0000000029139494 0.0000000029356060
##  [491] 0.0000000029524241 0.0000000029903215
##  [493] 0.0000000030057251 0.0000000030281902
##  [495] 0.0000000030480203 0.0000000030688887
##  [497] 0.0000000030817659 0.0000000031009261
##  [499] 0.0000000031292350 0.0000000031503337
##  [501] 0.0000000031833518 0.0000000031913889
##  [503] 0.0000000032107812 0.0000000032290292
##  [505] 0.0000000032666364 0.0000000032753670
##  [507] 0.0000000033004103 0.0000000033294381
##  [509] 0.0000000033459729 0.0000000033847640
##  [511] 0.0000000033983415 0.0000000034332479
##  [513] 0.0000000034433290 0.0000000034667678
##  [515] 0.0000000034998706 0.0000000035131181
##  [517] 0.0000000035340223 0.0000000035659797
##  [519] 0.0000000035857737 0.0000000036286102
##  [521] 0.0000000036370334 0.0000000036595946
##  [523] 0.0000000037051977 0.0000000037090158
##  [525] 0.0000000037329286 0.0000000037625968
##  [527] 0.0000000037958135 0.0000000038308110
##  [529] 0.0000000038453411 0.0000000038727563
##  [531] 0.0000000038928586 0.0000000039179680
##  [533] 0.0000000039489790 0.0000000039890950
##  [535] 0.0000000040215762 0.0000000040541137
##  [537] 0.0000000040742255 0.0000000041088810
##  [539] 0.0000000041384180 0.0000000041646493
##  [541] 0.0000000041780294 0.0000000042097690
##  [543] 0.0000000042462439 0.0000000042748709
##  [545] 0.0000000043136228 0.0000000043175342
##  [547] 0.0000000043496660 0.0000000043819010
##  [549] 0.0000000044235277 0.0000000044496732
##  [551] 0.0000000044763183 0.0000000045129252
##  [553] 0.0000000045580000 0.0000000045762862
##  [555] 0.0000000046182177 0.0000000046487478
##  [557] 0.0000000046787675 0.0000000046901769
##  [559] 0.0000000047310013 0.0000000047767708
##  [561] 0.0000000048019702 0.0000000048302945
##  [563] 0.0000000048806334 0.0000000048917066
##  [565] 0.0000000049492835 0.0000000049790757
##  [567] 0.0000000050010775 0.0000000050280340
##  [569] 0.0000000050774044 0.0000000050955741
##  [571] 0.0000000051586857 0.0000000051965851
##  [573] 0.0000000052065095 0.0000000052501587
##  [575] 0.0000000052743600 0.0000000053118334
##  [577] 0.0000000053755467 0.0000000053918685
##  [579] 0.0000000054222534 0.0000000054605701
##  [581] 0.0000000055083696 0.0000000055539791
##  [583] 0.0000000056007742 0.0000000056448286
##  [585] 0.0000000056527286 0.0000000057234451
##  [587] 0.0000000057406102 0.0000000057827910
##  [589] 0.0000000058199438 0.0000000058711840
##  [591] 0.0000000059215918 0.0000000059490513
##  [593] 0.0000000060000411 0.0000000060196931
##  [595] 0.0000000060798188 0.0000000061058852
##  [597] 0.0000000061548504 0.0000000062027196
##  [599] 0.0000000062638363 0.0000000062787779
##  [601] 0.0000000063293926 0.0000000063552624
##  [603] 0.0000000064315506 0.0000000064517139
##  [605] 0.0000000064864395 0.0000000065692358
##  [607] 0.0000000066103702 0.0000000066666657
##  [609] 0.0000000067141349 0.0000000067530892
##  [611] 0.0000000068026643 0.0000000068140953
##  [613] 0.0000000068996037 0.0000000069499046
##  [615] 0.0000000069945743 0.0000000070268616
##  [617] 0.0000000070823919 0.0000000071006184
##  [619] 0.0000000071918050 0.0000000072117588
##  [621] 0.0000000072859923 0.0000000072959872
##  [623] 0.0000000073607893 0.0000000074341162
##  [625] 0.0000000074923231 0.0000000075137926
##  [627] 0.0000000075535896 0.0000000076377042
##  [629] 0.0000000077026530 0.0000000077596587
##  [631] 0.0000000077670052 0.0000000078335877
##  [633] 0.0000000078960931 0.0000000079471855
##  [635] 0.0000000080165633 0.0000000080846464
##  [637] 0.0000000081030934 0.0000000082032236
##  [639] 0.0000000082207703 0.0000000082713433
##  [641] 0.0000000083348307 0.0000000083778112
##  [643] 0.0000000084846109 0.0000000085324602
##  [645] 0.0000000085747361 0.0000000086464343
##  [647] 0.0000000086836167 0.0000000087793845
##  [649] 0.0000000088392796 0.0000000088910338
##  [651] 0.0000000089465268 0.0000000090190401
##  [653] 0.0000000090661726 0.0000000091134542
##  [655] 0.0000000091773699 0.0000000092779143
##  [657] 0.0000000093515970 0.0000000094089854
##  [659] 0.0000000094540242 0.0000000095249798
##  [661] 0.0000000095667778 0.0000000096184333
##  [663] 0.0000000097333598 0.0000000098153378
##  [665] 0.0000000098437368 0.0000000099090055
##  [667] 0.0000000099706858 0.0000000100820584
##  [669] 0.0000000101318479 0.0000000102226493
##  [671] 0.0000000102791019 0.0000000103745703
##  [673] 0.0000000104060313 0.0000000104948445
##  [675] 0.0000000105889534 0.0000000106261356
##  [677] 0.0000000107397136 0.0000000107534136
##  [679] 0.0000000108541155 0.0000000109067972
##  [681] 0.0000000109961098 0.0000000110564554
##  [683] 0.0000000111390996 0.0000000112042046
##  [685] 0.0000000113360004 0.0000000113530706
##  [687] 0.0000000114832753 0.0000000115129760
##  [689] 0.0000000116025803 0.0000000116854575
##  [691] 0.0000000117718448 0.0000000118441751
##  [693] 0.0000000119458353 0.0000000120025921
##  [695] 0.0000000120823268 0.0000000121688094
##  [697] 0.0000000122770046 0.0000000123958799
##  [699] 0.0000000124494064 0.0000000125036833
##  [701] 0.0000000126199616 0.0000000127285467
##  [703] 0.0000000128256255 0.0000000128765690
##  [705] 0.0000000129444593 0.0000000130889875
##  [707] 0.0000000132028807 0.0000000133013576
##  [709] 0.0000000133076406 0.0000000133994497
##  [711] 0.0000000135709262 0.0000000136423471
##  [713] 0.0000000136960824 0.0000000138269739
##  [715] 0.0000000138734187 0.0000000139815278
##  [717] 0.0000000141302924 0.0000000142426599
##  [719] 0.0000000143111399 0.0000000144266700
##  [721] 0.0000000145521262 0.0000000145824369
##  [723] 0.0000000147222182 0.0000000147976888
##  [725] 0.0000000148868675 0.0000000150429231
##  [727] 0.0000000151071613 0.0000000152074228
##  [729] 0.0000000153063966 0.0000000154608991
##  [731] 0.0000000155879903 0.0000000156691899
##  [733] 0.0000000157329520 0.0000000159085435
##  [735] 0.0000000160009121 0.0000000161327780
##  [737] 0.0000000162500448 0.0000000163029732
##  [739] 0.0000000164411758 0.0000000165093788
##  [741] 0.0000000166423654 0.0000000167275412
##  [743] 0.0000000169300121 0.0000000170021848
##  [745] 0.0000000170741334 0.0000000172704291
##  [747] 0.0000000173231008 0.0000000174982451
##  [749] 0.0000000176031962 0.0000000176964629
##  [751] 0.0000000178669764 0.0000000179733482
##  [753] 0.0000000181206772 0.0000000181957295
##  [755] 0.0000000182887230 0.0000000184532370
##  [757] 0.0000000186630487 0.0000000187044152
##  [759] 0.0000000189063776 0.0000000190130887
##  [761] 0.0000000191718148 0.0000000192101604
##  [763] 0.0000000193526749 0.0000000194869387
##  [765] 0.0000000196159514 0.0000000197579692
##  [767] 0.0000000199135475 0.0000000200697070
##  [769] 0.0000000202533632 0.0000000203997560
##  [771] 0.0000000204938531 0.0000000206844751
##  [773] 0.0000000207488662 0.0000000208689971
##  [775] 0.0000000211181664 0.0000000212321048
##  [777] 0.0000000213633629 0.0000000215206983
##  [779] 0.0000000216255217 0.0000000217969519
##  [781] 0.0000000220113382 0.0000000220474396
##  [783] 0.0000000222889643 0.0000000224543557
##  [785] 0.0000000225497045 0.0000000226945156
##  [787] 0.0000000229572658 0.0000000229964291
##  [789] 0.0000000232691828 0.0000000234391966
##  [791] 0.0000000234779930 0.0000000236592345
##  [793] 0.0000000239219044 0.0000000239473619
##  [795] 0.0000000242637029 0.0000000242734390
##  [797] 0.0000000245647025 0.0000000247194645
##  [799] 0.0000000247747840 0.0000000250394289
##  [801] 0.0000000251602302 0.0000000253525298
##  [803] 0.0000000255621070 0.0000000256816320
##  [805] 0.0000000259736614 0.0000000261695293
##  [807] 0.0000000263507462 0.0000000264301744
##  [809] 0.0000000267144126 0.0000000268431360
##  [811] 0.0000000270816705 0.0000000271326589
##  [813] 0.0000000274505863 0.0000000276681942
##  [815] 0.0000000278535407 0.0000000278823494
##  [817] 0.0000000282349867 0.0000000284117587
##  [819] 0.0000000284864600 0.0000000288391729
##  [821] 0.0000000289325846 0.0000000290487960
##  [823] 0.0000000292847292 0.0000000294517750
##  [825] 0.0000000297745633 0.0000000299131969
##  [827] 0.0000000301433639 0.0000000302938082
##  [829] 0.0000000306354985 0.0000000308702125
##  [831] 0.0000000311031711 0.0000000312019226
##  [833] 0.0000000313896532 0.0000000315719370
##  [835] 0.0000000319784779 0.0000000321425104
##  [837] 0.0000000322746024 0.0000000324711252
##  [839] 0.0000000327923226 0.0000000331077937
##  [841] 0.0000000333084161 0.0000000334984612
##  [843] 0.0000000335865723 0.0000000339168828
##  [845] 0.0000000341621711 0.0000000343736655
##  [847] 0.0000000347150453 0.0000000349656926
##  [849] 0.0000000350480740 0.0000000353202781
##  [851] 0.0000000355819076 0.0000000358854930
##  [853] 0.0000000362103294 0.0000000364664900
##  [855] 0.0000000366318612 0.0000000368522392
##  [857] 0.0000000370214712 0.0000000373012114
##  [859] 0.0000000375597273 0.0000000378168763
##  [861] 0.0000000382569023 0.0000000383292417
##  [863] 0.0000000385881889 0.0000000388246102
##  [865] 0.0000000393064801 0.0000000394598216
##  [867] 0.0000000398069288 0.0000000400333216
##  [869] 0.0000000403612668 0.0000000407066464
##  [871] 0.0000000408949630 0.0000000412809540
##  [873] 0.0000000415778255 0.0000000416447991
##  [875] 0.0000000419954071 0.0000000422983326
##  [877] 0.0000000426784010 0.0000000427972721
##  [879] 0.0000000433189628 0.0000000436086569
##  [881] 0.0000000437892734 0.0000000439888220
##  [883] 0.0000000443861883 0.0000000446692442
##  [885] 0.0000000450180533 0.0000000452998461
##  [887] 0.0000000455618975 0.0000000458325079
##  [889] 0.0000000464317454 0.0000000465699427
##  [891] 0.0000000468090347 0.0000000471463713
##  [893] 0.0000000475244760 0.0000000479358047
##  [895] 0.0000000482737121 0.0000000485844461
##  [897] 0.0000000488311111 0.0000000493394795
##  [899] 0.0000000494387634 0.0000000498046875
##  [901] 0.0000000502591104 0.0000000506622038
##  [903] 0.0000000508722811 0.0000000515134055
##  [905] 0.0000000517183005 0.0000000521702822
##  [907] 0.0000000525134703 0.0000000527248062
##  [909] 0.0000000530425093 0.0000000536646699
##  [911] 0.0000000538543487 0.0000000542580992
##  [913] 0.0000000544909426 0.0000000550722710
##  [915] 0.0000000555540000 0.0000000557648072
##  [917] 0.0000000560629048 0.0000000566388218
##  [919] 0.0000000569727058 0.0000000573860799
##  [921] 0.0000000577082603 0.0000000580130734
##  [923] 0.0000000584525081 0.0000000590641244
##  [925] 0.0000000595487549 0.0000000598516305
##  [927] 0.0000000600158477 0.0000000607024465
##  [929] 0.0000000610189350 0.0000000615551092
##  [931] 0.0000000616914607 0.0000000621327238
##  [933] 0.0000000625832572 0.0000000629948597
##  [935] 0.0000000637420935 0.0000000641986017
##  [937] 0.0000000644673389 0.0000000650384154
##  [939] 0.0000000653930209 0.0000000658554204
##  [941] 0.0000000661517034 0.0000000666287537
##  [943] 0.0000000673196106 0.0000000675195373
##  [945] 0.0000000682004674 0.0000000684076948
##  [947] 0.0000000690521385 0.0000000698196088
##  [949] 0.0000000700932961 0.0000000705767217
##  [951] 0.0000000708915667 0.0000000716662203
##  [953] 0.0000000720467123 0.0000000724659024
##  [955] 0.0000000729295901 0.0000000733823707
##  [957] 0.0000000738172217 0.0000000744730802
##  [959] 0.0000000752359724 0.0000000755092362
##  [961] 0.0000000760954393 0.0000000768178357
##  [963] 0.0000000772813113 0.0000000774748353
##  [965] 0.0000000782341750 0.0000000786688672
##  [967] 0.0000000795412490 0.0000000798522342
##  [969] 0.0000000804386770 0.0000000812546850
##  [971] 0.0000000815823765 0.0000000822430395
##  [973] 0.0000000824919870 0.0000000832415361
##  [975] 0.0000000839451335 0.0000000845376591
##  [977] 0.0000000848196948 0.0000000853932425
##  [979] 0.0000000860423219 0.0000000867781725
##  [981] 0.0000000875407036 0.0000000877679083
##  [983] 0.0000000889184772 0.0000000895197106
##  [985] 0.0000000897720954 0.0000000907077234
##  [987] 0.0000000913451925 0.0000000917118245
##  [989] 0.0000000926185967 0.0000000927966019
##  [991] 0.0000000935234737 0.0000000944357840
##  [993] 0.0000000948207108 0.0000000957177045
##  [995] 0.0000000962115060 0.0000000969948821
##  [997] 0.0000000973737698 0.0000000984864573
##  [999] 0.0000000990067797 0.0000000993961332
## [1001] 0.0000001000761624 0.0000001010483385
## [1003] 0.0000001017950636 0.0000001026398641
## [1005] 0.0000001033252546 0.0000001041861904
## [1007] 0.0000001047406199 0.0000001054034814
## [1009] 0.0000001058701914 0.0000001066196460
## [1011] 0.0000001077203607 0.0000001081050024
## [1013] 0.0000001092859247 0.0000001100975708
## [1015] 0.0000001102338704 0.0000001109715380
## [1017] 0.0000001123623487 0.0000001129009345
## [1019] 0.0000001135505095 0.0000001147581842
## [1021] 0.0000001155846536 0.0000001162300107
## [1023] 0.0000001169888627 0.0000001172593783
## [1025] 0.0000001186091018 0.0000001189578141
## [1027] 0.0000001197495344 0.0000001205180795
## [1029] 0.0000001215262259 0.0000001228783101
## [1031] 0.0000001233766169 0.0000001242615861
## [1033] 0.0000001252023055 0.0000001262795284
## [1035] 0.0000001267900152 0.0000001278274515
## [1037] 0.0000001283503590 0.0000001294868298
## [1039] 0.0000001305902824 0.0000001317049534
## [1041] 0.0000001322994880 0.0000001332852036
## [1043] 0.0000001344603154 0.0000001352990039
## [1045] 0.0000001361423007 0.0000001371421195
## [1047] 0.0000001381538572 0.0000001387094537
## [1049] 0.0000001402628903 0.0000001407698343
## [1051] 0.0000001414514499 0.0000001423655412
## [1053] 0.0000001435805659 0.0000001442787343
## [1055] 0.0000001453465130 0.0000001471097657
## [1057] 0.0000001475378538 0.0000001486496607
## [1059] 0.0000001492888593 0.0000001511183088
## [1061] 0.0000001518860831 0.0000001529365908
## [1063] 0.0000001539659146 0.0000001555284785
## [1065] 0.0000001562965202 0.0000001576279280
## [1067] 0.0000001586067766 0.0000001596759685
## [1069] 0.0000001602715483 0.0000001616485913
## [1071] 0.0000001626523705 0.0000001636663832
## [1073] 0.0000001652141186 0.0000001657446258
## [1075] 0.0000001668197499 0.0000001679601644
## [1077] 0.0000001700310152 0.0000001703425886
## [1079] 0.0000001717740118 0.0000001727656848
## [1081] 0.0000001746087518 0.0000001759032259
## [1083] 0.0000001763719154 0.0000001781652595
## [1085] 0.0000001796363801 0.0000001810899235
## [1087] 0.0000001815303602 0.0000001826749426
## [1089] 0.0000001841903474 0.0000001857708599
## [1091] 0.0000001862968481 0.0000001877513351
## [1093] 0.0000001892357106 0.0000001905980970
## [1095] 0.0000001919048556 0.0000001929766858
## [1097] 0.0000001945277315 0.0000001959150954
## [1099] 0.0000001980152704 0.0000001990707062
## [1101] 0.0000001998386794 0.0000002010264844
## [1103] 0.0000002032454645 0.0000002045734242
## [1105] 0.0000002058723596 0.0000002067691619
## [1107] 0.0000002085953173 0.0000002094287001
## [1109] 0.0000002121339373 0.0000002135868716
## [1111] 0.0000002143124230 0.0000002155433592
## [1113] 0.0000002179985625 0.0000002188326339
## [1115] 0.0000002209045788 0.0000002225079385
## [1117] 0.0000002236650382 0.0000002244505990
## [1119] 0.0000002272611343 0.0000002285112091
## [1121] 0.0000002291492557 0.0000002322369356
## [1123] 0.0000002328757168 0.0000002346367830
## [1125] 0.0000002367086710 0.0000002374541826
## [1127] 0.0000002392109061 0.0000002412829508
## [1129] 0.0000002434505109 0.0000002449241121
## [1131] 0.0000002465204802 0.0000002480485767
## [1133] 0.0000002493320266 0.0000002514488486
## [1135] 0.0000002528569815 0.0000002544412582
## [1137] 0.0000002560239959 0.0000002586475750
## [1139] 0.0000002612010731 0.0000002613813280
## [1141] 0.0000002636831614 0.0000002648621432
## [1143] 0.0000002680694478 0.0000002698124696
## [1145] 0.0000002708860493 0.0000002724892915
## [1147] 0.0000002752568658 0.0000002762771180
## [1149] 0.0000002793280939 0.0000002808010353
## [1151] 0.0000002819070646 0.0000002847236096
## [1153] 0.0000002876610052 0.0000002884997542
## [1155] 0.0000002915564059 0.0000002936784690
## [1157] 0.0000002940039685 0.0000002973878594
## [1159] 0.0000002987844647 0.0000003016905814
## [1161] 0.0000003037632924 0.0000003054738921
## [1163] 0.0000003065925918 0.0000003095076911
## [1165] 0.0000003105774390 0.0000003128610505
## [1167] 0.0000003159761892 0.0000003183896987
## [1169] 0.0000003202131484 0.0000003224419922
## [1171] 0.0000003250632736 0.0000003259516917
## [1173] 0.0000003292246232 0.0000003306712578
## [1175] 0.0000003338568728 0.0000003367873718
## [1177] 0.0000003385489808 0.0000003401521927
## [1179] 0.0000003430805126 0.0000003460987460
## [1181] 0.0000003482161095 0.0000003502266910
## [1183] 0.0000003538291991 0.0000003554505963
## [1185] 0.0000003584662847 0.0000003606548698
## [1187] 0.0000003628993132 0.0000003651940898
## [1189] 0.0000003673669885 0.0000003693781949
## [1191] 0.0000003738309170 0.0000003746823356
## [1193] 0.0000003773344635 0.0000003814483649
## [1195] 0.0000003842770765 0.0000003848318440
## [1197] 0.0000003883792398 0.0000003923490435
## [1199] 0.0000003927690509 0.0000003965337216
## [1201] 0.0000004002648129 0.0000004014184891
## [1203] 0.0000004053162362 0.0000004074876624
## [1205] 0.0000004112594942 0.0000004135035932
## [1207] 0.0000004176814203 0.0000004205765965
## [1209] 0.0000004230432699 0.0000004251399933
## [1211] 0.0000004269080487 0.0000004324361160
## [1213] 0.0000004337978904 0.0000004356442764
## [1215] 0.0000004415604424 0.0000004421249543
## [1217] 0.0000004468422362 0.0000004490486883
## [1219] 0.0000004529886999 0.0000004550093831
## [1221] 0.0000004596907560 0.0000004629127044
## [1223] 0.0000004648320241 0.0000004690629669
## [1225] 0.0000004703844411 0.0000004735034923
## [1227] 0.0000004772082145 0.0000004810374697
## [1229] 0.0000004851379783 0.0000004882942586
## [1231] 0.0000004927047784 0.0000004952917026
## [1233] 0.0000004994190025 0.0000005017267570
## [1235] 0.0000005055883013 0.0000005075786582
## [1237] 0.0000005137394361 0.0000005154403964
## [1239] 0.0000005186274616 0.0000005234527209
## [1241] 0.0000005273489414 0.0000005319117129
## [1243] 0.0000005331425815 0.0000005381632190
## [1245] 0.0000005428094517 0.0000005448812236
## [1247] 0.0000005501636273 0.0000005532689059
## [1249] 0.0000005577982505 0.0000005585095642
## [1251] 0.0000005646318134 0.0000005663162072
## [1253] 0.0000005710789039 0.0000005764991871
## [1255] 0.0000005815491093 0.0000005840136493
## [1257] 0.0000005877329353 0.0000005904647944
## [1259] 0.0000005944806213 0.0000006015769819
## [1261] 0.0000006031615565 0.0000006088184965
## [1263] 0.0000006140777016 0.0000006160618648
## [1265] 0.0000006232019456 0.0000006260032507
## [1267] 0.0000006303648825 0.0000006337648982
## [1269] 0.0000006408696313 0.0000006432994939
## [1271] 0.0000006471981484 0.0000006539772706
## [1273] 0.0000006587713494 0.0000006614506870
## [1275] 0.0000006668593569 0.0000006702733732
## [1277] 0.0000006752649656 0.0000006819741628
## [1279] 0.0000006829148191 0.0000006886018348
## [1281] 0.0000006939776640 0.0000007004921905
## [1283] 0.0000007052643572 0.0000007072064888
## [1285] 0.0000007117592859 0.0000007172654225
## [1287] 0.0000007237144004 0.0000007269635169
## [1289] 0.0000007351309451 0.0000007367973030
## [1291] 0.0000007427237314 0.0000007494723956
## [1293] 0.0000007557257756 0.0000007599043215
## [1295] 0.0000007669167522 0.0000007689022154
## [1297] 0.0000007750129074 0.0000007833315568
## [1299] 0.0000007864827446 0.0000007939178743
## [1301] 0.0000007992855605 0.0000008013370548
## [1303] 0.0000008084827505 0.0000008165326376
## [1305] 0.0000008219921760 0.0000008233783430
## [1307] 0.0000008296134234 0.0000008347283040
## [1309] 0.0000008429731811 0.0000008496142848
## [1311] 0.0000008535387346 0.0000008605570929
## [1313] 0.0000008646631454 0.0000008713489513
## [1315] 0.0000008756667442 0.0000008813250360
## [1317] 0.0000008913312668 0.0000008965322113
## [1319] 0.0000009006897861 0.0000009106478279
## [1321] 0.0000009125568098 0.0000009207404215
## [1323] 0.0000009257048866 0.0000009367309597
## [1325] 0.0000009402836488 0.0000009453503239
## [1327] 0.0000009568815185 0.0000009630304606
## [1329] 0.0000009683112904 0.0000009741931205
## [1331] 0.0000009792050061 0.0000009874219426
## [1333] 0.0000009916622420 0.0000009982585821
## [1335] 0.0000010056536940 0.0000010141386394
## [1337] 0.0000010205327640 0.0000010270272050
## [1339] 0.0000010328346968 0.0000010449986868
## [1341] 0.0000010530093592 0.0000010591490740
## [1343] 0.0000010680390672 0.0000010743182154
## [1345] 0.0000010824385339 0.0000010861001658
## [1347] 0.0000010977069298 0.0000010993534831
## [1349] 0.0000011068019318 0.0000011203605944
## [1351] 0.0000011244592911 0.0000011322747046
## [1353] 0.0000011403710511 0.0000011490888221
## [1355] 0.0000011595995210 0.0000011631908913
## [1357] 0.0000011744347230 0.0000011776341413
## [1359] 0.0000011936157589 0.0000011949113175
## [1361] 0.0000012088335010 0.0000012184643306
## [1363] 0.0000012205128231 0.0000012305922950
## [1365] 0.0000012423700564 0.0000012492021068
## [1367] 0.0000012556438482 0.0000012634400586
## [1369] 0.0000012757100173 0.0000012808877532
## [1371] 0.0000012942764855 0.0000013055302922
## [1373] 0.0000013122214181 0.0000013233920634
## [1375] 0.0000013257580922 0.0000013340172520
## [1377] 0.0000013497078505 0.0000013555088603
## [1379] 0.0000013621480496 0.0000013717917600
## [1381] 0.0000013841049503 0.0000013954996063
## [1383] 0.0000013997906401 0.0000014124333449
## [1385] 0.0000014228260960 0.0000014336754742
## [1387] 0.0000014392133332 0.0000014525025424
## [1389] 0.0000014618938113 0.0000014706570373
## [1391] 0.0000014803046970 0.0000014985267576
## [1393] 0.0000015045558985 0.0000015157282562
## [1395] 0.0000015289402136 0.0000015311355474
## [1397] 0.0000015455664075 0.0000015557325232
## [1399] 0.0000015655586681 0.0000015756219347
## [1401] 0.0000015887523964 0.0000016055016298
## [1403] 0.0000016170139564 0.0000016242132912
## [1405] 0.0000016300833285 0.0000016450081810
## [1407] 0.0000016539111017 0.0000016703496194
## [1409] 0.0000016823272385 0.0000016920544457
## [1411] 0.0000017034379770 0.0000017112455040
## [1413] 0.0000017303215293 0.0000017434992544
## [1415] 0.0000017476910497 0.0000017586183054
## [1417] 0.0000017745231862 0.0000017827735022
## [1419] 0.0000018049232779 0.0000018094160886
## [1421] 0.0000018234054862 0.0000018325742619
## [1423] 0.0000018509479501 0.0000018696952151
## [1425] 0.0000018714698153 0.0000018850104739
## [1427] 0.0000019096159591 0.0000019113411987
## [1429] 0.0000019304156541 0.0000019367210301
## [1431] 0.0000019548201567 0.0000019730181933
## [1433] 0.0000019792827593 0.0000019985122100
## [1435] 0.0000020116811374 0.0000020231438253
## [1437] 0.0000020350984034 0.0000020556829636
## [1439] 0.0000020659837395 0.0000020796804096
## [1441] 0.0000020965425611 0.0000021111781691
## [1443] 0.0000021291211251 0.0000021447564005
## [1445] 0.0000021534756257 0.0000021734650673
## [1447] 0.0000021921531069 0.0000021943965888
## [1449] 0.0000022174068671 0.0000022308955133
## [1451] 0.0000022534927538 0.0000022569514599
## [1453] 0.0000022717066745 0.0000023003450166
## [1455] 0.0000023098894253 0.0000023184700105
## [1457] 0.0000023432449156 0.0000023592573841
## [1459] 0.0000023802328058 0.0000023906496751
## [1461] 0.0000023994327780 0.0000024214467614
## [1463] 0.0000024475765826 0.0000024605851447
## [1465] 0.0000024730871380 0.0000024898370773
## [1467] 0.0000025138906602 0.0000025268239572
## [1469] 0.0000025429673902 0.0000025612436699
## [1471] 0.0000025870219404 0.0000025963036002
## [1473] 0.0000026194059232 0.0000026267320777
## [1475] 0.0000026496142569 0.0000026782585202
## [1477] 0.0000026944923387 0.0000027090913256
## [1479] 0.0000027304923087 0.0000027384177880
## [1481] 0.0000027620743125 0.0000027843815606
## [1483] 0.0000028053431265 0.0000028311310752
## [1485] 0.0000028419865938 0.0000028514975178
## [1487] 0.0000028842171094 0.0000028950414425
## [1489] 0.0000029194895375 0.0000029325447699
## [1491] 0.0000029566660463 0.0000029903617290
## [1493] 0.0000030032148168 0.0000030171909586
## [1495] 0.0000030410989584 0.0000030598492171
## [1497] 0.0000030833874025 0.0000031004191627
## [1499] 0.0000031396893597 0.0000031552144127
## [1501] 0.0000031671140014 0.0000031994490894
## [1503] 0.0000032179186569 0.0000032314861025
## [1505] 0.0000032561054388 0.0000032821249315
## [1507] 0.0000033181960119 0.0000033404843880
## [1509] 0.0000033538094379 0.0000033830195898
## [1511] 0.0000033941591589 0.0000034254618833
## [1513] 0.0000034591666836 0.0000034736454346
## [1515] 0.0000034928923117 0.0000035176227453
## [1517] 0.0000035557004599 0.0000035804135924
## [1519] 0.0000035978490881 0.0000036249466980
## [1521] 0.0000036357952657 0.0000036676584144
## [1523] 0.0000036927693806 0.0000037297186906
## [1525] 0.0000037502286638 0.0000037616231993
## [1527] 0.0000038083825062 0.0000038189082531
## [1529] 0.0000038496662156 0.0000038658964331
## [1531] 0.0000039160640444 0.0000039399185552
## [1533] 0.0000039618044230 0.0000039785416680
## [1535] 0.0000040066153436 0.0000040375533121
## [1537] 0.0000040765568834 0.0000041084895500
## [1539] 0.0000041360560958 0.0000041423419003
## [1541] 0.0000041972833729 0.0000042177755596
## [1543] 0.0000042537284593 0.0000042759626683
## [1545] 0.0000043073585701 0.0000043219739227
## [1547] 0.0000043626674852 0.0000043784802912
## [1549] 0.0000044165509366 0.0000044459532045
## [1551] 0.0000044833951733 0.0000045126675717
## [1553] 0.0000045429836082 0.0000045676746394
## [1555] 0.0000046010121286 0.0000046303935069
## [1557] 0.0000046750980873 0.0000047196615158
## [1559] 0.0000047491330069 0.0000047840021604
## [1561] 0.0000048112089229 0.0000048390211919
## [1563] 0.0000048686325827 0.0000049100271264
## [1565] 0.0000049428338584 0.0000049882725583
## [1567] 0.0000049901026924 0.0000050320479216
## [1569] 0.0000050732031729 0.0000050972951792
## [1571] 0.0000051413887161 0.0000051766943041
## [1573] 0.0000052005746018 0.0000052506328318
## [1575] 0.0000052968883827 0.0000053238710070
## [1577] 0.0000053502867734 0.0000053889434926
## [1579] 0.0000054391185266 0.0000054809756881
## [1581] 0.0000055048370505 0.0000055539033557
## [1583] 0.0000055734953591 0.0000056487540580
## [1585] 0.0000056712261801 0.0000057146240745
## [1587] 0.0000057553647010 0.0000057731297879
## [1589] 0.0000058095374068 0.0000058746552948
## [1591] 0.0000058896831232 0.0000059294442956
## [1593] 0.0000059995520297 0.0000060165769583
## [1595] 0.0000060573297163 0.0000061285402692
## [1597] 0.0000061490220967 0.0000062065618080
## [1599] 0.0000062351714828 0.0000062755093278
## [1601] 0.0000063530528401 0.0000063651847748
## [1603] 0.0000064165067111 0.0000064775031876
## [1605] 0.0000065154362015 0.0000065706647267
## [1607] 0.0000065975339195 0.0000066376734913
## [1609] 0.0000067054148937 0.0000067594500514
## [1611] 0.0000068055170889 0.0000068402084391
## [1613] 0.0000068575417447 0.0000069191367985
## [1615] 0.0000069568822099 0.0000070152323435
## [1617] 0.0000070866675150 0.0000071170433788
## [1619] 0.0000071702394084 0.0000072159785866
## [1621] 0.0000072871175661 0.0000073259346648
## [1623] 0.0000073676756669 0.0000074112077746
## [1625] 0.0000074473250155 0.0000075351396110
## [1627] 0.0000075865652134 0.0000076445409673
## [1629] 0.0000077000271243 0.0000077204535015
## [1631] 0.0000077969841322 0.0000078415004803
## [1633] 0.0000079240893994 0.0000079702723172
## [1635] 0.0000080210306610 0.0000080843745896
## [1637] 0.0000081288982570 0.0000081484330588
## [1639] 0.0000082493290883 0.0000083054348741
## [1641] 0.0000083262782406 0.0000084141260878
## [1643] 0.0000084373911014 0.0000085032119486
## [1645] 0.0000085575388446 0.0000086284597591
## [1647] 0.0000087196981976 0.0000087611373674
## [1649] 0.0000088216868722 0.0000088936770722
## [1651] 0.0000089430271357 0.0000090327113824
## [1653] 0.0000090686648371 0.0000091370235452
## [1655] 0.0000091724404016 0.0000092604780564
## [1657] 0.0000093257797088 0.0000094064752873
## [1659] 0.0000094322144323 0.0000094846927028
## [1661] 0.0000095622385267 0.0000096518734418
## [1663] 0.0000096903545930 0.0000097579319672
## [1665] 0.0000098640338717 0.0000099357550852
## [1667] 0.0000100099227412 0.0000100368660832
## [1669] 0.0000101108170270 0.0000102315945508
## [1671] 0.0000102948934513 0.0000103692712830
## [1673] 0.0000103874837259 0.0000105028392779
## [1675] 0.0000105244581640 0.0000106122653406
## [1677] 0.0000106850369887 0.0000108019635008
## [1679] 0.0000108834735178 0.0000109181497863
## [1681] 0.0000109711117609 0.0000110873239226
## [1683] 0.0000111869504686 0.0000111951941430
## [1685] 0.0000112909339357 0.0000113819853352
## [1687] 0.0000114824859079 0.0000115485151484
## [1689] 0.0000116656543471 0.0000116767329889
## [1691] 0.0000117527600933 0.0000118612022844
## [1693] 0.0000119322406101 0.0000120072534446
## [1695] 0.0000121321953282 0.0000122061820077
## [1697] 0.0000123302171453 0.0000124120298969
## [1699] 0.0000124542668219 0.0000125848236530
## [1701] 0.0000126475648537 0.0000127428746086
## [1703] 0.0000128036162136 0.0000128731750543
## [1705] 0.0000130239331336 0.0000130772862398
## [1707] 0.0000131408576757 0.0000132491919871
## [1709] 0.0000133894186784 0.0000134837576973
## [1711] 0.0000135829995603 0.0000136422504154
## [1713] 0.0000137630542539 0.0000138479794606
## [1715] 0.0000139012802250 0.0000139891929082
## [1717] 0.0000141298446831 0.0000142415760396
## [1719] 0.0000143247702593 0.0000143806834814
## [1721] 0.0000145314657569 0.0000145585459680
## [1723] 0.0000146602077875 0.0000147998395495
## [1725] 0.0000149605238283 0.0000149961300816
## [1727] 0.0000151137266283 0.0000152158728166
## [1729] 0.0000153733294923 0.0000154554693630
## [1731] 0.0000155642257750 0.0000156329301719
## [1733] 0.0000157221736859 0.0000158611909228
## [1735] 0.0000159241997863 0.0000160694195651
## [1737] 0.0000161828623029 0.0000163258848530
## [1739] 0.0000164584823445 0.0000165268831440
## [1741] 0.0000166270404938 0.0000167639701228
## [1743] 0.0000168816586840 0.0000170605042599
## [1745] 0.0000170785443365 0.0000172168741055
## [1747] 0.0000173459269006 0.0000174656303673
## [1749] 0.0000176250762272 0.0000177308367362
## [1751] 0.0000178784257979 0.0000179996833019
## [1753] 0.0000181115972681 0.0000182049533334
## [1755] 0.0000183254915200 0.0000184357169021
## [1757] 0.0000185719142693 0.0000187720971476
## [1759] 0.0000187983655579 0.0000190110259025
## [1761] 0.0000191236412899 0.0000192432759530
## [1763] 0.0000193851152286 0.0000195061916814
## [1765] 0.0000196429052195 0.0000197355495490
## [1767] 0.0000198623671415 0.0000200479862020
## [1769] 0.0000202027387964 0.0000202950427801
## [1771] 0.0000204542807417 0.0000206479097659
## [1773] 0.0000207386263695 0.0000208619935869
## [1775] 0.0000210768590087 0.0000211719058241
## [1777] 0.0000213295567652 0.0000215529764195
## [1779] 0.0000216329236341 0.0000217586544012
## [1781] 0.0000219459073652 0.0000220847092661
## [1783] 0.0000222065348772 0.0000224322056696
## [1785] 0.0000225144435108 0.0000226997800143
## [1787] 0.0000228336834966 0.0000231043082533
## [1789] 0.0000232511304798 0.0000233539508761
## [1791] 0.0000235347121212 0.0000236951129217
## [1793] 0.0000238450874080 0.0000240076628468
## [1795] 0.0000241194056828 0.0000242869304569
## [1797] 0.0000245345650242 0.0000247242150896
## [1799] 0.0000249070957217 0.0000249966175711
## [1801] 0.0000252837810829 0.0000254131399708
## [1803] 0.0000254874341160 0.0000258065693646
## [1805] 0.0000259156856270 0.0000260406502331
## [1807] 0.0000263567011489 0.0000265291875721
## [1809] 0.0000266086472678 0.0000268621654609
## [1811] 0.0000269279556763 0.0000272247499679
## [1813] 0.0000274356136243 0.0000275707779772
## [1815] 0.0000278594088897 0.0000279749214604
## [1817] 0.0000280960835733 0.0000283050119871
## [1819] 0.0000285953674038 0.0000286724915415
## [1821] 0.0000288782192629 0.0000290764784819
## [1823] 0.0000294368740605 0.0000296356832337
## [1825] 0.0000297467940761 0.0000300465713368
## [1827] 0.0000301971114288 0.0000303890982273
## [1829] 0.0000305366099922 0.0000307374792423
## [1831] 0.0000310188391548 0.0000313285493250
## [1833] 0.0000313620828955 0.0000317126954208
## [1835] 0.0000318120730855 0.0000321207769097
## [1837] 0.0000323137460339 0.0000324980806474
## [1839] 0.0000328092397164 0.0000331073430741
## [1841] 0.0000331899756459 0.0000335512766760
## [1843] 0.0000336126151021 0.0000338602745558
## [1845] 0.0000342328044013 0.0000345141588443
## [1847] 0.0000346282874767 0.0000349586707278
## [1849] 0.0000351416864513 0.0000352988534046
## [1851] 0.0000356843364829 0.0000357786607999
## [1853] 0.0000360405585591 0.0000364403863808
## [1855] 0.0000365172655956 0.0000368838465126
## [1857] 0.0000372101743827 0.0000374874754999
## [1859] 0.0000377554096140 0.0000378532291949
## [1861] 0.0000382647679920 0.0000383564398625
## [1863] 0.0000385979212177 0.0000388473380325
## [1865] 0.0000391285454021 0.0000394363832582
## [1867] 0.0000398828962522 0.0000401311199555
## [1869] 0.0000404369504243 0.0000406803928047
## [1871] 0.0000410090678542 0.0000412918219751
## [1873] 0.0000413702800505 0.0000417899533374
## [1875] 0.0000418998698238 0.0000422751494453
## [1877] 0.0000427083126109 0.0000429344605278
## [1879] 0.0000431829985526 0.0000434925772204
## [1881] 0.0000438040153571 0.0000439572564909
## [1883] 0.0000443170754508 0.0000446936376173
## [1885] 0.0000450065008866 0.0000454696766564
## [1887] 0.0000457920315643 0.0000459131754679
## [1889] 0.0000463455042018 0.0000465620892418
## [1891] 0.0000468584107136 0.0000472541652931
## [1893] 0.0000474825618128 0.0000479164092421
## [1895] 0.0000484102283391 0.0000487243442197
## [1897] 0.0000489588441844 0.0000491387063239
## [1899] 0.0000496749874168 0.0000500645961080
## [1901] 0.0000504193195674 0.0000504744222082
## [1903] 0.0000509006571800 0.0000512189479201
## [1905] 0.0000518600451373 0.0000522234694014
## [1907] 0.0000523076974778 0.0000526632245917
## [1909] 0.0000530634437910 0.0000536362742603
## [1911] 0.0000540442102692 0.0000543125302946
## [1913] 0.0000544930040837 0.0000549828739140
## [1915] 0.0000554081352777 0.0000558948158815
## [1917] 0.0000561711929917 0.0000566220012830
## [1919] 0.0000568577776257 0.0000571881853409
## [1921] 0.0000577962272603 0.0000582519429699
## [1923] 0.0000586233882585 0.0000587910766309
## [1925] 0.0000593586193729 0.0000596300471862
## [1927] 0.0000603104378334 0.0000606910177356
## [1929] 0.0000611894967614 0.0000614480794771
## [1931] 0.0000618073757511 0.0000621104562214
## [1933] 0.0000629006514927 0.0000632725181972
## [1935] 0.0000636350403347 0.0000639747496307
## [1937] 0.0000643570282629 0.0000650423137444
## [1939] 0.0000652101895024 0.0000660423120446
## [1941] 0.0000664279973674 0.0000669188994489
## [1943] 0.0000674331120861 0.0000677673178377
## [1945] 0.0000681236973060 0.0000686407729594
## [1947] 0.0000689764056850 0.0000698201144555
## [1949] 0.0000702422654374 0.0000704742041120
## [1951] 0.0000711427377387 0.0000715455425352
## [1953] 0.0000720446375343 0.0000723451812393
## [1955] 0.0000731796098990 0.0000733210687431
## [1957] 0.0000738222422892 0.0000745017006427
## [1959] 0.0000750720105260 0.0000758068712922
## [1961] 0.0000760059816795 0.0000766978666170
## [1963] 0.0000769842106741 0.0000774562506999
## [1965] 0.0000780232544344 0.0000788139532735
## [1967] 0.0000795988096381 0.0000799572356941
## [1969] 0.0000805132873534 0.0000810715179532
## [1971] 0.0000818450078157 0.0000820060342501
## [1973] 0.0000826704043721 0.0000831518729003
## [1975] 0.0000835958906474 0.0000843260977986
## [1977] 0.0000851075716740 0.0000855648615624
## [1979] 0.0000860713322575 0.0000869962938378
## [1981] 0.0000875949621169 0.0000878537687871
## [1983] 0.0000886260907812 0.0000893803811107
## [1985] 0.0000898811396121 0.0000905883963363
## [1987] 0.0000912397510794 0.0000916676489766
## [1989] 0.0000920977883619 0.0000930721605218
## [1991] 0.0000939567892871 0.0000945354335284
## [1993] 0.0000951645969842 0.0000954514574423
## [1995] 0.0000961282244345 0.0000970466518414
## [1997] 0.0000978316255759 0.0000983806009917
## [1999] 0.0000989576180424 0.0000994542663634
## [2001] 0.0001000095213370 0.0001012845900470
## [2003] 0.0001014784336300 0.0001022867578086
## [2005] 0.0001029934556976 0.0001041476505228
## [2007] 0.0001049076520853 0.0001056699491747
## [2009] 0.0001058143997153 0.0001065767403144
## [2011] 0.0001074835348715 0.0001081511698932
## [2013] 0.0001089227832399 0.0001097893552086
## [2015] 0.0001105355063269 0.0001110434540443
## [2017] 0.0001124449275147 0.0001129917654657
## [2019] 0.0001134080851685 0.0001145499927227
## [2021] 0.0001151342925153 0.0001162190000735
## [2023] 0.0001166076085444 0.0001178021821070
## [2025] 0.0001183496263296 0.0001188840782276
## [2027] 0.0001201346048204 0.0001212568871007
## [2029] 0.0001221260689993 0.0001225715131084
## [2031] 0.0001236776536585 0.0001244478842242
## [2033] 0.0001248909903427 0.0001258650223248
## [2035] 0.0001272853500443 0.0001278755573177
## [2037] 0.0001283089482132 0.0001292591033642
## [2039] 0.0001302786999104 0.0001311356664129
## [2041] 0.0001326287150825 0.0001332548086903
## [2043] 0.0001338253036515 0.0001353555304594
## [2045] 0.0001355819901992 0.0001372197689758
## [2047] 0.0001376882753868 0.0001388059220277
## [2049] 0.0001399940802847 0.0001404504008710
## [2051] 0.0001418244062890 0.0001429855974673
## [2053] 0.0001440174951944 0.0001446512486298
## [2055] 0.0001460342473049 0.0001467845307698
## [2057] 0.0001473197261132 0.0001492091832143
## [2059] 0.0001495412242817 0.0001508818370882
## [2061] 0.0001522115825439 0.0001529655177506
## [2063] 0.0001544000822474 0.0001547167378480
## [2065] 0.0001558276946921 0.0001572461214097
## [2067] 0.0001584719203128 0.0001589571540819
## [2069] 0.0001606874657211 0.0001617926851783
## [2071] 0.0001623351775205 0.0001634726455878
## [2073] 0.0001650963075787 0.0001664369517245
## [2075] 0.0001671906985913 0.0001689673302740
## [2077] 0.0001696451315319 0.0001713163613233
## [2079] 0.0001719177430323 0.0001735552785623
## [2081] 0.0001738641159106 0.0001756094192975
## [2083] 0.0001769423398825 0.0001783827217211
## [2085] 0.0001788904849274 0.0001803904914806
## [2087] 0.0001821770299516 0.0001836435052622
## [2089] 0.0001839150638081 0.0001858562267136
## [2091] 0.0001863387455459 0.0001883984009745
## [2093] 0.0001893209631983 0.0001913147390422
## [2095] 0.0001927333359854 0.0001939997176140
## [2097] 0.0001944409487484 0.0001960049625491
## [2099] 0.0001976521773683 0.0001982620458598
## [2101] 0.0001999107717514 0.0002013909780717
## [2103] 0.0002025114349074 0.0002044843443154
## [2105] 0.0002056693610802 0.0002071902996853
## [2107] 0.0002093667011257 0.0002099510307277
## [2109] 0.0002110851032874 0.0002126734743502
## [2111] 0.0002144123234928 0.0002164439926407
## [2113] 0.0002175003777435 0.0002197339104503
## [2115] 0.0002209864650082 0.0002218079359736
## [2117] 0.0002230595635136 0.0002247154522719
## [2119] 0.0002273556005330 0.0002277488323079
## [2121] 0.0002293284855769 0.0002322428127485
## [2123] 0.0002326335625945 0.0002340914541085
## [2125] 0.0002365164727766 0.0002387073164478
## [2127] 0.0002399760290286 0.0002407308413792
## [2129] 0.0002430745226706 0.0002447850476664
## [2131] 0.0002455745746763 0.0002486288597324
## [2133] 0.0002494475334776 0.0002513160465729
## [2135] 0.0002536750004288 0.0002551158832362
## [2137] 0.0002563262126866 0.0002582171880480
## [2139] 0.0002597861278604 0.0002622912584321
## [2141] 0.0002640542719134 0.0002655200872366
## [2143] 0.0002684013859058 0.0002687731204028
## [2145] 0.0002707976006072 0.0002740764344431
## [2147] 0.0002746255673902 0.0002765832295951
## [2149] 0.0002782878149274 0.0002807391596762
## [2151] 0.0002818942024650 0.0002838029730394
## [2153] 0.0002862129904407 0.0002884773916470
## [2155] 0.0002899757585958 0.0002935489643072
## [2157] 0.0002949925104430 0.0002974824591062
## [2159] 0.0002991257373906 0.0003003760310268
## [2161] 0.0003025184037751 0.0003043260328037
## [2163] 0.0003069977752458 0.0003102386543376
## [2165] 0.0003123603627353 0.0003142499090450
## [2167] 0.0003158449389070 0.0003188533587021
## [2169] 0.0003206943958179 0.0003234758609814
## [2171] 0.0003253756159516 0.0003272635572348
## [2173] 0.0003298649036963 0.0003317107984023
## [2175] 0.0003330816894990 0.0003367844189880
## [2177] 0.0003392921345434 0.0003417792619179
## [2179] 0.0003419865268279 0.0003458085151587
## [2181] 0.0003467388311907 0.0003510421497840
## [2183] 0.0003529507123482 0.0003547117831314
## [2185] 0.0003567690292505 0.0003598813378831
## [2187] 0.0003633460797440 0.0003640142114481
## [2189] 0.0003683049034251 0.0003706105396737
## [2191] 0.0003726791997081 0.0003755703157336
## [2193] 0.0003789675764742 0.0003795141126594
## [2195] 0.0003842927260044 0.0003871290240013
## [2197] 0.0003873974614929 0.0003908651542092
## [2199] 0.0003944104564203 0.0003954679302046
## [2201] 0.0003999125606375 0.0004031181318081
## [2203] 0.0004044882401176 0.0004077279590702
## [2205] 0.0004096462176729 0.0004140472747751
## [2207] 0.0004177996662621 0.0004188090275683
## [2209] 0.0004225199256676 0.0004263718127968
## [2211] 0.0004276954487364 0.0004318675160526
## [2213] 0.0004335083965350 0.0004374756212203
## [2215] 0.0004405728026211 0.0004441466827329
## [2217] 0.0004458416437651 0.0004477660612126
## [2219] 0.0004518421914992 0.0004545384990814
## [2221] 0.0004586057851647 0.0004615987924044
## [2223] 0.0004647774139050 0.0004698518346925
## [2225] 0.0004710586688113 0.0004763894429484
## [2227] 0.0004797091157098 0.0004807835245344
## [2229] 0.0004851036620368 0.0004865612786157
## [2231] 0.0004898562323927 0.0004935672216965
## [2233] 0.0004998476633415 0.0005008442955632
## [2235] 0.0005062808117920 0.0005100505444500
## [2237] 0.0005128901188107 0.0005141238876522
## [2239] 0.0005199446616482 0.0005236103419478
## [2241] 0.0005249731737837 0.0005285310012989
## [2243] 0.0005339395758125 0.0005383475931154
## [2245] 0.0005416820401129 0.0005434296674863
## [2247] 0.0005501793731489 0.0005509657069908
## [2249] 0.0005547004416021 0.0005590578783994
## [2251] 0.0005626868010242 0.0005691997390986
## [2253] 0.0005711454154336 0.0005779167532304
## [2255] 0.0005801554417609 0.0005822455088226
## [2257] 0.0005892169895808 0.0005912212197614
## [2259] 0.0005954019655164 0.0006004052519446
## [2261] 0.0006052421349066 0.0006073364738672
## [2263] 0.0006125453782645 0.0006162028445506
## [2265] 0.0006208291837459 0.0006248636267735
## [2267] 0.0006296219575712 0.0006342583211329
## [2269] 0.0006408584419261 0.0006418386623439
## [2271] 0.0006499349253924 0.0006507153792593
## [2273] 0.0006554107429997 0.0006603784926659
## [2275] 0.0006669048514045 0.0006684230402938
## [2277] 0.0006734460787900 0.0006781075992242
## [2279] 0.0006832701946856 0.0006879476996591
## [2281] 0.0006959901756046 0.0007006115982660
## [2283] 0.0007062379964523 0.0007105341892460
## [2285] 0.0007135033787019 0.0007191556256722
## [2287] 0.0007260623331316 0.0007304754436726
## [2289] 0.0007348808793005 0.0007377602613133
## [2291] 0.0007455024396313 0.0007486369517505
## [2293] 0.0007548157325379 0.0007614236799782
## [2295] 0.0007668436834646 0.0007680995894410
## [2297] 0.0007731219913457 0.0007789268315391
## [2299] 0.0007861076854647 0.0007931102438740
## [2301] 0.0007975475863435 0.0008047740770902
## [2303] 0.0008101211147686 0.0008155907745509
## [2305] 0.0008189466374611 0.0008231986472409
## [2307] 0.0008288625291555 0.0008383981475022
## [2309] 0.0008396526094477 0.0008488871535693
## [2311] 0.0008546664123358 0.0008587323306101
## [2313] 0.0008684038503956 0.0008732927706713
## [2315] 0.0008752231550647 0.0008818073182167
## [2317] 0.0008903337352451 0.0008978123493120
## [2319] 0.0009014216363555 0.0009092955089433
## [2321] 0.0009122372658588 0.0009196068429670
## [2323] 0.0009249736281399 0.0009311804585188
## [2325] 0.0009383776898921 0.0009489672195210
## [2327] 0.0009534750376136 0.0009629455595382
## [2329] 0.0009704672269030 0.0009746448826106
## [2331] 0.0009789356259190 0.0009865379461475
## [2333] 0.0009920106680993 0.0010019311342350
## [2335] 0.0010070224282469 0.0010133025167351
## [2337] 0.0010200877240686 0.0010317546851451
## [2339] 0.0010381294553024 0.0010455928184252
## [2341] 0.0010505750842924 0.0010584681473948
## [2343] 0.0010661656382351 0.0010721139099353
## [2345] 0.0010827405736444 0.0010866173220387
## [2347] 0.0010957611768139 0.0011049785465995
## [2349] 0.0011108667595596 0.0011156127963484
## [2351] 0.0011264320888940 0.0011309110137794
## [2353] 0.0011381164697660 0.0011530485852093
## [2355] 0.0011595349395199 0.0011626563279145
## [2357] 0.0011760116650919 0.0011826762495979
## [2359] 0.0011925353779939 0.0011975468622878
## [2361] 0.0012029162337837 0.0012117570995692
## [2363] 0.0012232438627205 0.0012290459892798
## [2365] 0.0012389310133747 0.0012468532623927
## [2367] 0.0012607982413880 0.0012673740360998
## [2369] 0.0012723415382227 0.0012798938910840
## [2371] 0.0012938941989488 0.0013061429758806
## [2373] 0.0013150562940431 0.0013174282499449
## [2375] 0.0013282879072083 0.0013352134035512
## [2377] 0.0013432807101442 0.0013578300642745
## [2379] 0.0013687141317359 0.0013767192805953
## [2381] 0.0013881919857566 0.0013994144786082
## [2383] 0.0014055032103549 0.0014110458155700
## [2385] 0.0014218188404541 0.0014343519111902
## [2387] 0.0014475995084611 0.0014586473517851
## [2389] 0.0014617465267404 0.0014771478116960
## [2391] 0.0014826638876900 0.0014945852479616
## [2393] 0.0015015990474017 0.0015166489453663
## [2395] 0.0015222487066042 0.0015407106318134
## [2397] 0.0015506397048378 0.0015557592718326
## [2399] 0.0015715895474431 0.0015834930270882
## [2401] 0.0015849099738554 0.0015990361272551
## [2403] 0.0016073496574335 0.0016248775974045
## [2405] 0.0016380143800998 0.0016461069792089
## [2407] 0.0016532835397900 0.0016693222406258
## [2409] 0.0016798549104481 0.0016876622949892
## [2411] 0.0017069825494711 0.0017184497517352
## [2413] 0.0017252594117437 0.0017391519994879
## [2415] 0.0017558585338932 0.0017589471959443
## [2417] 0.0017782586099363 0.0017868388290766
## [2419] 0.0017974231749292 0.0018179621267683
## [2421] 0.0018272520673345 0.0018389670199168
## [2423] 0.0018473211654395 0.0018701087634126
## [2425] 0.0018754881532998 0.0018937718795388
## [2427] 0.0018999696486968 0.0019128554460841
## [2429] 0.0019270099607776 0.0019479875488449
## [2431] 0.0019516477921318 0.0019634073067720
## [2433] 0.0019825113587213 0.0020008604511357
## [2435] 0.0020182839532188 0.0020249103686042
## [2437] 0.0020417207579129 0.0020537304087494
## [2439] 0.0020702825701823 0.0020890819100719
## [2441] 0.0020894680604148 0.0021160670782401
## [2443] 0.0021236464930987 0.0021391895269128
## [2445] 0.0021570033889826 0.0021732965726774
## [2447] 0.0021812608200847 0.0021975911178065
## [2449] 0.0022162300748098 0.0022355694277485
## [2451] 0.0022453308186913 0.0022565698881318
## [2453] 0.0022730816071121 0.0022911099608319
## [2455] 0.0023095115769939 0.0023317515823191
## [2457] 0.0023456766133098 0.0023614338177089
## [2459] 0.0023721736813412 0.0023843804103150
## [2461] 0.0024130086977124 0.0024242145620490
## [2463] 0.0024477144880317 0.0024517856438489
## [2465] 0.0024737415105293 0.0024892516261443
## [2467] 0.0025041565261524 0.0025298284664628
## [2469] 0.0025388134839222 0.0025588797043240
## [2471] 0.0025799143846052 0.0026013688686472
## [2473] 0.0026181069810845 0.0026405529302048
## [2475] 0.0026519596878624 0.0026745121152625
## [2477] 0.0026869214231765 0.0027069519786978
## [2479] 0.0027189837280535 0.0027537676614595
## [2481] 0.0027542870755708 0.0027812921390002
## [2483] 0.0027969456662015 0.0028129741216680
## [2485] 0.0028471546226217 0.0028683683973229
## [2487] 0.0028822081736031 0.0029099400121701
## [2489] 0.0029123483712333 0.0029460326324740
## [2491] 0.0029676332239434 0.0029743582304480
## [2493] 0.0030099876390907 0.0030322258510717
## [2495] 0.0030469435996947 0.0030747986923962
## [2497] 0.0030831008456248 0.0031121415695849
## [2499] 0.0031310781322550 0.0031571075482515
## [2501] 0.0031689974617702 0.0031963206655335
## [2503] 0.0032171506121569 0.0032325436703330
## [2505] 0.0032618103065773 0.0032929455634006
## [2507] 0.0033154070599695 0.0033391654355902
## [2509] 0.0033590860806782 0.0033874408758485
## [2511] 0.0033900520918764 0.0034336942624240
## [2513] 0.0034579220349616 0.0034682649747821
## [2515] 0.0034878242055448 0.0035236723921697
## [2517] 0.0035439333404568 0.0035654511130086
## [2519] 0.0035864483970013 0.0036203555912952
## [2521] 0.0036310987766853 0.0036793906851250
## [2523] 0.0036834228103807 0.0037098466373879
## [2525] 0.0037547958982267 0.0037721708783559
## [2527] 0.0037931011006147 0.0038353739183307
## [2529] 0.0038536878355911 0.0038739696938328
## [2531] 0.0039087632736585 0.0039421976433201
## [2533] 0.0039570420865094 0.0039892183027045
## [2535] 0.0040189009378485 0.0040434958006110
## [2537] 0.0040676190167871 0.0040936441696772
## [2539] 0.0041286580003043 0.0041653730895446
## [2541] 0.0041772911586078 0.0042220949148391
## [2543] 0.0042437564369295 0.0042641036880978
## [2545] 0.0043087508641616 0.0043158878940688
## [2547] 0.0043516720439298 0.0043800295458333
## [2549] 0.0044072656730174 0.0044524006173174
## [2551] 0.0044801105194418 0.0045097875308406
## [2553] 0.0045529366652809 0.0045787733804143
## [2555] 0.0046077415582888 0.0046485562918867
## [2557] 0.0046705135713669 0.0047084607208109
## [2559] 0.0047498289831482 0.0047708452495665
## [2561] 0.0047996210442970 0.0048518448756346
## [2563] 0.0048595859565286 0.0048967536197185
## [2565] 0.0049517505437246 0.0049635291783555
## [2567] 0.0049990513708705 0.0050446585671815
## [2569] 0.0050831569151932 0.0051145440439407
## [2571] 0.0051533656456393 0.0051906765550906
## [2573] 0.0052245986462528 0.0052411162805962
## [2575] 0.0052953891969044 0.0053286193440728
## [2577] 0.0053784782235758 0.0053861105246711
## [2579] 0.0054556533348381 0.0054823416854762
## [2581] 0.0055263126726394 0.0055623331015298
## [2583] 0.0055994021878603 0.0056452175426889
## [2585] 0.0056586287843876 0.0056895105766719
## [2587] 0.0057594038593206 0.0058074847927954
## [2589] 0.0058310114200434 0.0058571527473189
## [2591] 0.0059128427262315 0.0059547610446114
## [2593] 0.0059832501920464 0.0060188130906599
## [2595] 0.0060734314179864 0.0060976024087820
## [2597] 0.0061435360957459 0.0061818851461541
## [2599] 0.0062298361919496 0.0062996635657228
## [2601] 0.0063434292113226 0.0063553528668523
## [2603] 0.0064144326096832 0.0064502482951910
## [2605] 0.0065029259473700 0.0065688237813453
## [2607] 0.0066052195174855 0.0066305334899453
## [2609] 0.0066785314989440 0.0067190522310370
## [2611] 0.0068045492218020 0.0068395285999485
## [2613] 0.0068977968539552 0.0069048004364709
## [2615] 0.0069978075331976 0.0070146992862511
## [2617] 0.0070947699299209 0.0071429586043974
## [2619] 0.0071870770143110 0.0072412280794704
## [2621] 0.0072540084171687 0.0073347167903043
## [2623] 0.0073891913445490 0.0074268461043423
## [2625] 0.0074539396048533 0.0075347932912656
## [2627] 0.0075877838739714 0.0076143703907539
## [2629] 0.0076687293915982 0.0077548333840360
## [2631] 0.0078104030006091 0.0078439821134220
## [2633] 0.0078840501404379 0.0079285243319833
## [2635] 0.0080213428215594 0.0080406737443885
## [2637] 0.0081200490251396 0.0081850744412395
## [2639] 0.0082495344492450 0.0082732623453705
## [2641] 0.0083481036194546 0.0084041951699627
## [2643] 0.0084715397624118 0.0084985679273356
## [2645] 0.0086052758434812 0.0086175389824049
## [2647] 0.0086708443607258 0.0087628075123075
## [2649] 0.0088213455879548 0.0088642281508764
## [2651] 0.0089385389818641 0.0090358270416354
## [2653] 0.0090414575453536 0.0091515575764083
## [2655] 0.0092136426756035 0.0092562925988394
## [2657] 0.0093512821452391 0.0093649650416404
## [2659] 0.0094758365791768 0.0095479018569252
## [2661] 0.0095707219623846 0.0096176891855853
## [2663] 0.0097291110708509 0.0098016512519164
## [2665] 0.0098480855727102 0.0098952985861702
## [2667] 0.0099953709530137 0.0100796138451235
## [2669] 0.0101588444248226 0.0101949858336313
## [2671] 0.0102336292666985 0.0103192607767334
## [2673] 0.0104329925748082 0.0104921279561438
## [2675] 0.0105704610627361 0.0106038161795000
## [2677] 0.0106824035363443 0.0107567250743972
## [2679] 0.0108520296632428 0.0109513751134313
## [2681] 0.0109955388772301 0.0110514678690251
## [2683] 0.0111354707178757 0.0112280254764249
## [2685] 0.0113168649825169 0.0114046747097134
## [2687] 0.0114688874736672 0.0115563951771506
## [2689] 0.0116379879392189 0.0117302989891067
## [2691] 0.0118210251071852 0.0118693872130753
## [2693] 0.0119414228051547 0.0120393474207929
## [2695] 0.0120795947685612 0.0121699809785562
## [2697] 0.0122600625565185 0.0123444643010790
## [2699] 0.0125017813319704 0.0125557372079100
## [2701] 0.0126581404953665 0.0127433917242551
## [2703] 0.0128171751854285 0.0128780812736144
## [2705] 0.0130208151535801 0.0130415989017687
## [2707] 0.0131988390176270 0.0132989116110071
## [2709] 0.0133499595907250 0.0134263978697318
## [2711] 0.0135774467290802 0.0136314427183348
## [2713] 0.0137713934417063 0.0137907501008384
## [2715] 0.0139518557539015 0.0140141504406500
## [2717] 0.0141574954782476 0.0142044779424874
## [2719] 0.0142767132812002 0.0144047527961205
## [2721] 0.0145270357006815 0.0146070987097806
## [2723] 0.0147223786742563 0.0147934569325555
## [2725] 0.0149570398647108 0.0150202153777071
## [2727] 0.0151652973262498 0.0152488310326844
## [2729] 0.0153764924457665 0.0154054272865397
## [2731] 0.0155761820701071 0.0156967424078435
## [2733] 0.0157629351809823 0.0158792304193556
## [2735] 0.0159879399086913 0.0160490037422852
## [2737] 0.0161832475456197 0.0162667823528996
## [2739] 0.0164474443372589 0.0164997972913970
## [2741] 0.0166127816223652 0.0168077761535880
## [2743] 0.0168895052015288 0.0169541575266798
## [2745] 0.0171270008395147 0.0172805840018739
## [2747] 0.0173663816300391 0.0175357012689333
## [2749] 0.0175449357108041 0.0177340269288143
## [2751] 0.0178709260367600 0.0179207187580778
## [2753] 0.0181443868020146 0.0182299122425733
## [2755] 0.0183905401549078 0.0185252756261412
## [2757] 0.0186078873600662 0.0186866769354766
## [2759] 0.0189121915400330 0.0189958670213379
## [2761] 0.0191086726530158 0.0192251583552893
## [2763] 0.0193280175763627 0.0195577258349261
## [2765] 0.0196956591069541 0.0198576996279538
## [2767] 0.0199372869428529 0.0200425929987754
## [2769] 0.0201929693525944 0.0203126733080283
## [2771] 0.0205477496853392 0.0205837265245349
## [2773] 0.0207072244296820 0.0209075970238145
## [2775] 0.0210470210220641 0.0212601524641258
## [2777] 0.0213772203493786 0.0214398104415445
## [2779] 0.0216554097268924 0.0218035385571137
## [2781] 0.0220276688481856 0.0220754510041383
## [2783] 0.0223221316078052 0.0224441167036630
## [2785] 0.0225240367179469 0.0227486205835481
## [2787] 0.0228895588157658 0.0230018084086757
## [2789] 0.0231321112489759 0.0233460446506322
## [2791] 0.0235876008935284 0.0236527643163391
## [2793] 0.0238198870483242 0.0239412603681800
## [2795] 0.0241143300386791 0.0243955619601570
## [2797] 0.0244696679698304 0.0247219230860790
## [2799] 0.0247947830791493 0.0250046825938914
## [2801] 0.0251771226873007 0.0254089660134140
## [2803] 0.0255543369429094 0.0258040105774170
## [2805] 0.0259411459267530 0.0261504178885988
## [2807] 0.0262673529706736 0.0265212936117591
## [2809] 0.0267269188398365 0.0268526679531941
## [2811] 0.0269425180169176 0.0271726757384120
## [2813] 0.0273544944747952 0.0275910950228823
## [2815] 0.0278053940164229 0.0279261395363249
## [2817] 0.0280796682609666 0.0283600257232624
## [2819] 0.0285517932678256 0.0287378231018993
## [2821] 0.0289054571910659 0.0291427862429871
## [2823] 0.0293219840441997 0.0296355852743563
## [2825] 0.0297363581073951 0.0299051459902397
## [2827] 0.0301299063728341 0.0303236353327946
## [2829] 0.0304828644394498 0.0308352060489911
## [2831] 0.0310298438963459 0.0313062233167581
## [2833] 0.0313531949918650 0.0317614740692237
## [2835] 0.0319156501905865 0.0320700468279433
## [2837] 0.0323257889269791 0.0326226810606084
## [2839] 0.0327514839081434 0.0330301607186866
## [2841] 0.0333300402340952 0.0333648302905679
## [2843] 0.0336490723435793 0.0338892541461575
## [2845] 0.0341602809726382 0.0343098057209711
## [2847] 0.0345835405579228 0.0348290957990861
## [2849] 0.0349967906628475 0.0354457509590115
## [2851] 0.0357013250962964 0.0357394275537877
## [2853] 0.0361515373630166 0.0364096067934135
## [2855] 0.0366118374612288 0.0368357413479370
## [2857] 0.0371131606147412 0.0373712521018001
## [2859] 0.0375380228120807 0.0379436455127064
## [2861] 0.0381973460065844 0.0383668975001057
## [2863] 0.0385735519914388 0.0389259303483152
## [2865] 0.0390955354855345 0.0395230104922489
## [2867] 0.0397884554491592 0.0399386682315595
## [2869] 0.0402125568722912 0.0405383114660221
## [2871] 0.0408001232158259 0.0410568380142756
## [2873] 0.0415439656007119 0.0418448584810924
## [2875] 0.0421223642780368 0.0424293080001344
## [2877] 0.0426393117015187 0.0430493138448321
## [2879] 0.0433081121358519 0.0433652020360853
## [2881] 0.0439404201243622 0.0441146283599082
## [2883] 0.0445259623129993 0.0446280960787023
## [2885] 0.0448790578246882 0.0452511896097713
## [2887] 0.0456792419418999 0.0460310568002234
## [2889] 0.0461915924556843 0.0466539660197400
## [2891] 0.0470970321575573 0.0474120169326610
## [2893] 0.0474812643818562 0.0479844178686005
## [2895] 0.0482666095052829 0.0486652260832376
## [2897] 0.0488553193573252 0.0492410258113716
## [2899] 0.0496024480245209 0.0498946075613057
## [2901] 0.0504644414324685 0.0506968734894051
## [2903] 0.0511149835405979 0.0514536118878320
## [2905] 0.0518186650819740 0.0518915711310878
## [2907] 0.0522596529200791 0.0529472787456109
## [2909] 0.0529711075464806 0.0533680923654248
## [2911] 0.0537451898506536 0.0541034292202705
## [2913] 0.0547756299408147 0.0551722999629324
## [2915] 0.0552982990116554 0.0558583797792500
## [2917] 0.0562828119971154 0.0567097363078847
## [2919] 0.0567617424280318 0.0573803549507847
## [2921] 0.0579071930468982 0.0582506242664334
## [2923] 0.0585253465072210 0.0591490168092297
## [2925] 0.0593454528078920 0.0596232419213127
## [2927] 0.0600491518565379 0.0607871654198329
## [2929] 0.0611386491158217 0.0615147519740324
## [2931] 0.0616891277356807 0.0625032332730642
## [2933] 0.0629487820042915 0.0630870983746782
## [2935] 0.0635704450981739 0.0640832946413871
## [2937] 0.0643110854959684 0.0650523454696805
## [2939] 0.0654090582255517 0.0660643112675873
## [2941] 0.0662291204606336 0.0669575860204024
## [2943] 0.0670112844014255 0.0678133419277412
## [2945] 0.0682789924250402 0.0687981106500250
## [2947] 0.0690394854627525 0.0695308050414753
## [2949] 0.0700387013754897 0.0707183429617258
## [2951] 0.0708561841182710 0.0716181127308208
## [2953] 0.0718537524979923 0.0726086187361399
## [2955] 0.0729603598833098 0.0733075094819504
## [2957] 0.0741121740503763 0.0747294214470281
## [2959] 0.0749436388992182 0.0757833828583001
## [2961] 0.0761173943776465 0.0765669792168543
## [2963] 0.0772150660752770 0.0775484334130549
## [2965] 0.0781789090494731 0.0788763222535755
## [2967] 0.0795272101454024 0.0796534004273761
## [2969] 0.0807215527995615 0.0809227789224822
## [2971] 0.0814545276307172 0.0821407243927735
## [2973] 0.0825411518924844 0.0833191227778792
## [2975] 0.0837111952088566 0.0842234564274593
## [2977] 0.0849588817959609 0.0859009265413118
## [2979] 0.0861413672867757 0.0869907892144707
## [2981] 0.0876159140616268 0.0877059487949378
## [2983] 0.0887125779030905 0.0891976361044804
## [2985] 0.0898464431154523 0.0905415558807215
## [2987] 0.0911916371657396 0.0914674026357545
## [2989] 0.0925838861605995 0.0927088716228035
## [2991] 0.0937489141215411 0.0943775091750541
## [2993] 0.0951045892214518 0.0954335413330671
## [2995] 0.0962825156953287 0.0968994543947177
## [2997] 0.0973867181943762 0.0985774165578666
## [2999] 0.0990088747881357 0.0996008241606606

2.2.4 Linear SVM tuning function

SVM_linear_tuning <- function(recipe_input, formula_input){

  set.seed(123) 
  train_input <- recipe_input %>% bake(new_data=NULL)
  tuning_cv_folds <- train_input  %>%
vfold_cv(v = 10)
  
    
  ## mtry is the number of predictors to sample at each split
## min_n (the number of observations needed to keep splitting nodes)
model_spec <-svm_poly(cost=tune(),margin = tune(),degree=1) %>%
            set_mode("regression") %>%
            set_engine("kernlab")

tune_wf <- workflow() %>%
  add_recipe(recipe_input) %>%
  add_model(model_spec)

## automate generate grid for hyperparameters

model_grid <- 
  model_spec %>% 
  parameters() %>% 
  grid_regular(levels = c(15, 30))

tune_ctrl <- control_grid(save_pred = TRUE, 
                          verbose = TRUE,
                          parallel_over = "everything")

library(doFuture)
registerDoFuture()
plan(multisession(workers = 30))

tune_res <- tune_grid(
  tune_wf,
  resamples = tuning_cv_folds,
  metrics = metric_set(rmse),
  grid = model_grid,
  control= tune_ctrl
)

best_tune <- select_best(tune_res, metric = "rmse")

best_tuned_param <- show_best(tune_res, metric="rmse")

rf_final_wf <- tune_wf %>% finalize_workflow(best_tune)

return(list(svm_linear_wf_final = rf_final_wf, 
              best_svm_linear_model = best_tune,
              best_svm_linear_forest_param = best_tuned_param))
}

2.2.4.1 the grid of linear svm

svm_linear_model_spec <-svm_poly(cost=tune(),margin = tune(),degree=1) %>%
            set_mode("regression") %>%
            set_engine("kernlab")


svm_linear_grid <- 
  svm_linear_model_spec %>% 
  parameters() %>% 
  grid_regular(levels = c(15, 30))
##range of the grid
range(svm_linear_grid$cost)
## [1]  0.0009765625 32.0000000000
range(svm_linear_grid$margin)
## [1] 0.0 0.2
## unique elements of the grid
unique(svm_linear_grid$cost)
##  [1]  0.0009765625  0.0020522591  0.0043128497  0.0090635108
##  [5]  0.0190470883  0.0400277091  0.0841187620  0.1767766953
##  [9]  0.3714985723  0.7807091822  1.6406707120  3.4478912850
## [13]  7.2457893141 15.2271224482 32.0000000000
unique(svm_linear_grid$margin)
##  [1] 0.000000000 0.006896552 0.013793103 0.020689655
##  [5] 0.027586207 0.034482759 0.041379310 0.048275862
##  [9] 0.055172414 0.062068966 0.068965517 0.075862069
## [13] 0.082758621 0.089655172 0.096551724 0.103448276
## [17] 0.110344828 0.117241379 0.124137931 0.131034483
## [21] 0.137931034 0.144827586 0.151724138 0.158620690
## [25] 0.165517241 0.172413793 0.179310345 0.186206897
## [29] 0.193103448 0.200000000

2.2.5 RBF SVM tuning function

SVM_RBF_tuning <- function(recipe_input, formula_input){

  set.seed(123) 
  train_input <- recipe_input %>% bake(new_data=NULL)
  tuning_cv_folds <- train_input  %>%
vfold_cv(v = 10)
  
    
  ## mtry is the number of predictors to sample at each split
## min_n (the number of observations needed to keep splitting nodes)
model_spec <-svm_rbf(cost=tune(),rbf_sigma  = tune(),margin = tune()) %>%
            set_mode("regression") %>%
            set_engine("kernlab")

tune_wf <- workflow() %>%
  add_recipe(recipe_input) %>%
  add_model(model_spec)

## automate generate grid for hyperparameters

model_grid <- 
  model_spec %>% 
  parameters() %>% 
  grid_regular(levels = c(15,10,30))

tune_ctrl <- control_grid(save_pred = TRUE, verbose = TRUE, parallel_over = "everything")

library(doFuture)
registerDoFuture()
plan(multisession(workers = 55))


tune_res <- tune_grid(
  tune_wf,
  resamples = tuning_cv_folds,
  metrics = metric_set(rmse),
  grid = model_grid,
  control= tune_ctrl
)

best_tune <- select_best(tune_res, 
                         metric = "rmse")

best_tuned_param <- show_best(tune_res, 
                              metric="rmse")

rf_final_wf <- tune_wf %>% finalize_workflow(best_tune)

return(list(svm_rbf_wf_final = rf_final_wf, 
              best_svm_rbf_model = best_tune,
              best_svm_rbf_forest_param = best_tuned_param))
}

2.2.5.1 the grid of RBF SVM

Grid that tunes margin, cost and RBF sigma

svm_rbf_spec <-svm_rbf(cost=tune(),rbf_sigma  = tune(),margin = tune()) %>%
            set_mode("regression") %>%
            set_engine("kernlab")

svm_rbf_grid <-  
  svm_rbf_spec %>% 
  parameters() %>% 
  grid_regular(levels = c(15,10, 30))
## range of the grid
range(svm_rbf_grid$cost)
## [1]  0.0009765625 32.0000000000
range(svm_rbf_grid$rbf_sigma)
## [1] 0.0000000001 1.0000000000
range(svm_rbf_grid$margin)
## [1] 0.0 0.2
##unique elements of the grid
unique(svm_rbf_grid$cost)
##  [1]  0.0009765625  0.0020522591  0.0043128497  0.0090635108
##  [5]  0.0190470883  0.0400277091  0.0841187620  0.1767766953
##  [9]  0.3714985723  0.7807091822  1.6406707120  3.4478912850
## [13]  7.2457893141 15.2271224482 32.0000000000
unique(svm_rbf_grid$rbf_sigma)
##  [1] 0.00000000010000 0.00000000129155 0.00000001668101
##  [4] 0.00000021544347 0.00000278255940 0.00003593813664
##  [7] 0.00046415888336 0.00599484250319 0.07742636826811
## [10] 1.00000000000000
unique(svm_rbf_grid$margin)
##  [1] 0.000000000 0.006896552 0.013793103 0.020689655
##  [5] 0.027586207 0.034482759 0.041379310 0.048275862
##  [9] 0.055172414 0.062068966 0.068965517 0.075862069
## [13] 0.082758621 0.089655172 0.096551724 0.103448276
## [17] 0.110344828 0.117241379 0.124137931 0.131034483
## [21] 0.137931034 0.144827586 0.151724138 0.158620690
## [25] 0.165517241 0.172413793 0.179310345 0.186206897
## [29] 0.193103448 0.200000000

2.2.6 polynomial SVM tuning function

SVM_poly_tuning <- function(recipe_input, formula_input){

   set.seed(123) 
  train_input <- recipe_input %>% bake(new_data=NULL)
  tuning_cv_folds <- train_input  %>%
vfold_cv(v = 10)
  
model_spec <-svm_poly(cost=tune(),
                      degree=tune(),
                      scale_factor = tune(),
                      margin = tune()) %>%
            set_mode("regression") %>%
            set_engine("kernlab")

tune_wf <- workflow() %>%
  add_recipe(recipe_input) %>%
  add_model(model_spec)

## automate generate grid for hyperparameters

model_grid <- 
  model_spec %>% 
  parameters() %>% 
  grid_regular(levels = c(15,4,10, 30))

tune_ctrl <- control_grid(save_pred = TRUE, verbose = TRUE,parallel_over = "everything")

library(doFuture)
registerDoFuture()
plan(multisession(workers = 30))

tune_res <- tune_grid(
  tune_wf,
  resamples = tuning_cv_folds,
  metrics = metric_set(rmse),
  grid = model_grid,
  control= tune_ctrl
)
#saveRDS(tune_res, paste0(anotherFold,'working_memory_tasks/svm_poly_twoback_grid', '.RData'))
best_tune <- select_best(tune_res, 
                         metric = "rmse")

best_tuned_param <- show_best(tune_res, 
                              metric="rmse")

rf_final_wf <- tune_wf %>% finalize_workflow(best_tune)

return(list(svm_poly_wf_final = rf_final_wf, 
              best_svm_poly_model = best_tune,
              best_svm_poly_forest_param = best_tuned_param))
}

2.2.6.1 the grid of polynomial svm

SVM_poly_model_spec <-svm_poly(cost=tune(),degree=tune(),scale_factor = tune(),margin = tune()) %>%
            set_mode("regression") %>%
            set_engine("kernlab")

SVM_poly_grid <- 
  SVM_poly_model_spec %>% 
  parameters() %>% 
  grid_regular(levels = c(15,4,10, 30))

## get the grid range
range(SVM_poly_grid$cost)
## [1]  0.0009765625 32.0000000000
range(SVM_poly_grid$degree)
## [1] 1 3
range(SVM_poly_grid$scale_factor)
## [1] 0.0000000001 0.1000000000
range(SVM_poly_grid$margin)
## [1] 0.0 0.2
## get the unique elements of the grid
unique(SVM_poly_grid$cost)
##  [1]  0.0009765625  0.0020522591  0.0043128497  0.0090635108
##  [5]  0.0190470883  0.0400277091  0.0841187620  0.1767766953
##  [9]  0.3714985723  0.7807091822  1.6406707120  3.4478912850
## [13]  7.2457893141 15.2271224482 32.0000000000
unique(SVM_poly_grid$degree)
## [1] 1 2 3
unique(SVM_poly_grid$scale_factor)
##  [1] 0.0000000001 0.0000000010 0.0000000100 0.0000001000
##  [5] 0.0000010000 0.0000100000 0.0001000000 0.0010000000
##  [9] 0.0100000000 0.1000000000
unique(SVM_poly_grid$margin)
##  [1] 0.000000000 0.006896552 0.013793103 0.020689655
##  [5] 0.027586207 0.034482759 0.041379310 0.048275862
##  [9] 0.055172414 0.062068966 0.068965517 0.075862069
## [13] 0.082758621 0.089655172 0.096551724 0.103448276
## [17] 0.110344828 0.117241379 0.124137931 0.131034483
## [21] 0.137931034 0.144827586 0.151724138 0.158620690
## [25] 0.165517241 0.172413793 0.179310345 0.186206897
## [29] 0.193103448 0.200000000

2.2.7 mass univariate functions

The mass univariate fit function.

Holdout_results is a function that takes one roi and fit a regression on it. The outputs of this function are model slope estimate and the prediction. Resp_results returns the slope estimate and prediction for all rois.

Median_extrac extracts the median value of the predictions of all rois that is significant.

holdout_results <- function(.x,training_data, testing_data, ...) {
  # Fit the model to the 75%
  mod <- lm(..., data = training_data)
  slope <- mod %>% broom::tidy() %>% 
    filter(term != '(Intercept)') %>%
    rename(roi = term)
  preds <- predict(mod, newdata = testing_data)%>%
     tibble::as_tibble()
  names(preds) <-slope$roi[1]
return(list(model_spec= slope, model_pred = preds))
}

resp_result <- function(.x,test_input,recipe_input){
  x=.x
  testing_data <- bake(prep(recipe_input), new_data = test_input)
  training_data <- bake(prep(recipe_input), new_data = NULL)
  formulas <- paste0(x ,' ~ ', colnames(select(data_all_listwise, starts_with("roi_"))))
  results_test_simple <-map(formulas, 
                            ~ holdout_results(.x=x,
                                              training_data =training_data,
                                               testing_data =testing_data,...)) 
  model_broom <- map(results_test_simple,"model_spec") %>%
    do.call(rbind,.)
  model_pred <- map(results_test_simple,"model_pred") %>% 
  do.call(cbind,.) %>%
  mutate(response = testing_data[[x]])
  return(list(model_broom = model_broom,model_pred= model_pred))
  return(results_test_simple)
}

### extract median

median_extract <- function(resp_input, model_input, pred_input){
  roi_left <- model_input[["roi"]] 
  pred_selected <- pred_input %>% select(all_of(roi_left), all_of(resp_input))
  pred_median <- apply(pred_selected[,-1], 1, median)
 pred_tibble <- pred_selected %>% mutate(model_pred = pred_median)
 return(pred_tibble)
 
}

2.3 model fitting function

The model fitting function for the algorithms including elastic net, linear svm, RBF svm, polynomial svm and random forest. The output is a list. The one end with _final_fit is a model fit object and the one end with _predict is model prediction.

model_final_fit <- function(recipe_input,
                            wf_input,
                            formula_input,
                            model_name,
                            test_data){
 library(doFuture)
  registerDoFuture()
  plan(multisession(workers = 30))

  train_input <- recipe_input %>% 
    bake(new_data=NULL)
  
  ##baked recipe scale the test data with the mean and sd in the training data
  test_input <-  bake( recipe_input,
                       new_data=test_data)
  
  model_final_fit <- 
    wf_input%>%
    parsnip::extract_spec_parsnip()%>%
    parsnip::fit(data = train_input, formula= formula_input)
    
  model_predict <- predict(model_final_fit, 
                           new_data = test_input %>% 
                             drop_na() ) %>%
    rename(model_predict = .pred) %>% 
    bind_cols(test_input%>% drop_na())  

    ##processing output
 
   output_list <- vector("list",length=2)
  names(output_list) <- c(paste0(model_name,"_final_fit"),
                          paste0(model_name,"_predict"))
  
  output_list[[paste0(model_name,"_final_fit")]] <- model_final_fit
  output_list[[paste0(model_name,"_predict")]] <- model_predict
  
  return(output_list)
}

2.3.1 xgboost fit function

The xgboost fit function is special with more types of output 1. Model fit object of xgboost 2. Model prediction for the test data 3. Shapley variable importance for the training dataset 4. The summary plot of all shapley value

xgboost_model_pred <- function(resp_input,
                               param_input, recipe_input, 
                               train_input = gfactor_train_all,
                               test_input = gfactor_test_all){

training_data <- recipe_input%>%
  prep(training = train_input) %>%
  bake(new_data = NULL)   

training_matrix <-    training_data  %>% 
               select(starts_with("roi_"))%>%
               as.matrix()

training_label <- training_data[[resp_input]] ## labeling out the response variable

testing_data <- recipe_input %>%
                bake(new_data = test_input)%>% drop_na() 

testing_matrix <- testing_data %>%
  select(starts_with("roi_"))%>%
  as.matrix()

testing_label <- testing_data[[resp_input]] ## labeling out the response variable

dTrain <- xgboost::xgb.DMatrix(data = training_matrix,
                               label=training_label)

dtest <-xgboost::xgb.DMatrix(data = testing_matrix,
                             label=testing_label)

xgboost_fit <- xgboost::xgboost(data=dTrain, 
                                eta=param_input$learn_rate,
                                gamma=param_input$loss_reduction,
                                max_depth=param_input$tree_depth,
                                min_child_weight=param_input$min_n,
                                subsample=param_input$sample_size,
                                colsample_bynode = param_input$mtry/dim(training_data)[1],
                                nrounds = 500,
                                objective="reg:squarederror",verbose = 0)

model_predict <- predict(xgboost_fit,dtest)%>%
                 tibble::as_tibble()%>%
                 rename(model_predict = value)%>%
                 bind_cols(recipe_input %>%
                             bake(new_data = test_input)%>% 
                             drop_na()) 

model_predict_train <- predict(xgboost_fit,
                               dTrain,predcontrib = TRUE)

##extract the shapley values have to be the same with the ROIs
shapley_plot <- xgboost::xgb.ggplot.shap.summary(training_matrix, 
                                                 model_predict_train, 
                                                 model = xgboost_fit, 
                                                 top_n = 30)

output_list <- vector("list",length=4)
names(output_list) <- c(paste0("xgboost","_final_fit"),
                        "xgboost_predict",
                        "xgboost_predict_train",
                        "xgboost_shap_plot")

output_list[[paste0("xgboost","_final_fit")]] <- xgboost_fit
output_list[[paste0("xgboost","_predict")]] <- model_predict
output_list[[paste0("xgboost","_predict_train")]] <- model_predict_train
output_list[[paste0("xgboost","_shap_plot")]] <- shapley_plot

return(output_list)
}

3 Model fitting for all response variables except for G-Factor

3.1 formulas and recipes

Get the formulas and the recipe.

Scaling WAS done by this recipe.

But the outlier-removal procedure was done in the IQR_remove function because recipe didn’t work well when we removed rows from the data set.

data_train <- IQR_remove(data_split = split_train, resp_vec = resp_names)
data_test <- IQR_remove(data_split = split_test, resp_vec = resp_names)



formula_list <- resp_names %>% 
  map(.,~as.formula(paste(.,paste(feature_names,collapse = "+"),sep="~")))


recipe_prep <- function(resp_var,formula_input,train_input=data_train){
 norm_recipe <- recipe( formula_input, data = train_input) %>%
  update_role(starts_with("roi_"), new_role = "predictor")%>%
  update_role(resp_var, new_role = "outcome" )%>%
  step_dummy(all_nominal()) %>%
  prep(training = train_input, retain = TRUE)
  return(norm_recipe)
}

recipe_list <-  map2(.x = resp_names,
                     .y = formula_list,
                     ~recipe_prep(resp_var = .x,
                                  formula_input =.y))

3.1.1 number of participants in train and test data

##select subject information from the data with column selection

subj_info_all <- Nback.QCedNoPhil%>%
                 select(all_of(c('SUBJECTKEY', 'MRI_INFO_DEVICESERIALNUMBER', 'SITE_ID_L','SEX')))


## print the size of training data after IRQ
dim(data_train)
## [1] 2982  181
data_train_subj <- left_join(data_train, subj_info_all, by = subj_info)
## males
sum(data_train_subj$SEX =="M")
## [1] 1511
## females
sum(data_train_subj$SEX =="F")
## [1] 1470
## print the size of training data after IRQ
dim(data_test)
## [1] 1007  181
data_test_subj <- left_join(data_test, subj_info_all, by = subj_info)
## males
sum(data_test_subj$SEX =="M")
## [1] 509
## females
sum(data_test_subj$SEX =="F")
## [1] 498

3.2 Mass Univariate

These are funcitons for mass univariate, which cannot be done the same way with other algorithms. Here we get the mass univariate model fitting and predictive values and select rois that are survived the FDR and Bonferroni correction.

simple_all_IQR <- map2(.x=resp_names,
                       .y = recipe_list,
                       ~resp_result(.x,
                                    recipe_input = .y, 
                                    test_input = data_test))

univariate_model_broom <- map(simple_all_IQR , 
                              "model_broom")

univariate_model_pred <- map(simple_all_IQR , 
                             "model_pred")

univariate_model_pred <- map2(.x=univariate_model_pred,
                              .y=resp_names, 
                              function(pred_input=.x, resp_input){
                                names_vec <-  c(names(pred_input)[1:167],
                                                resp_input)
                                names(pred_input) <- names_vec
                                return(pred_input)})


univariate_model_broom <- univariate_model_broom %>% 
  map(., ~ mutate(.,
                  FDR = p.adjust(p.value, method = 'fdr'),
                  bonferroni= p.adjust(p.value, method = 'bonferroni')))

univariate_model_fdr <- 
  univariate_model_broom %>% 
  map(., ~ filter(.,FDR  <= 0.05))

univariate_model_bonferroni <- 
  univariate_model_broom %>% 
  map(., ~ filter(.,bonferroni  <= 0.05))

median_univar_fdr_pred <- pmap(list(resp_names,
                                    univariate_model_fdr,
                                    univariate_model_pred),
                               ~median_extract(resp_input=..1, 
                                               model_input=..2, 
                                               pred_input=..3) )

median_univar_bonferroni_pred <- pmap(list(resp_names,
                                           univariate_model_bonferroni,
                                           univariate_model_pred),
                               ~median_extract(resp_input=..1, 
                                               model_input=..2, 
                                               pred_input=..3) )

3.3 OLS

Fit the OLs model

OLS_fit <-  map2(.x=formula_list,
                 .y=recipe_list ,
                 ~lm(.x,
                     data = .y %>%  
                       bake(new_data= NULL)))

3.4 prediction of OLS

OLS_predict_list  <- map2(.x=OLS_fit,
                          .y=recipe_list,
                          ~predict(.x,newdata = bake(prep(.y),new_data = data_test ))%>%                            tibble::as_tibble() %>% 
                            rename(model_pred = value)%>%
                            bind_cols(bake(prep(.y), new_data = data_test)))
yardstick::rsq_trad(data = OLS_predict_list$TFMRI_NB_ALL_BEH_C2B_RATE, 
                    truth =.data$TFMRI_NB_ALL_BEH_C2B_RATE, 
                    estimate =.data$model_pred)
## # A tibble: 1 x 3
##   .metric  .estimator .estimate
##   <chr>    <chr>          <dbl>
## 1 rsq_trad standard       0.240
yardstick::mae(data = OLS_predict_list$TFMRI_NB_ALL_BEH_C2B_RATE, 
               truth =.data$TFMRI_NB_ALL_BEH_C2B_RATE, 
               estimate =.data$model_pred  )
## # A tibble: 1 x 3
##   .metric .estimator .estimate
##   <chr>   <chr>          <dbl>
## 1 mae     standard       0.705

3.4.1 plot OLS coeffs and se for regions that passed p <.05

tidy_fit_ols_all <-OLS_fit %>%  map(., ~broom::tidy(.))
tidy_fit_ols_all <- tidy_fit_ols_all %>%   map(.,~filter(.,term != '(Intercept)' & p.value < 0.05 )%>%
                                             mutate(.,roi = str_remove(term, 'roi_'))%>%
                                         left_join( .,new_shorter_names,by="roi")%>%
                                         mutate(.,direction = ifelse(estimate >= median(estimate), "big","small")))

resp_names %>% map(~ggplot(tidy_fit_ols_all[[.]],aes(fct_reorder(roiShort, estimate), estimate, 
             ymin = estimate - 2 * std.error, 
             ymax = estimate + 2 * std.error)) +
  geom_hline(yintercept = 0, linetype = 'dashed', col = 'grey60') +
  geom_pointrange(fatten = 1.5, col = 'grey60') +
  coord_flip() +
  labs(x = 'Explanatory variables (Brain Regions)', y = 'Coefficients (± 2 std. errors)',
       title = paste0(resp_var_plotting$longer_name[[which(resp_var_plotting$response==.)]],
       '\nOLS Coeffcients (p < .05)')) + 
     facet_wrap(~ direction, scales = 'free_y') +
                     theme(
                       axis.title.x = element_text(size = 15),
                       axis.text.x = element_text(size = 12),
                       axis.title.y = element_text(size = 15),
                       axis.text.y = element_text(size = 12),
                       legend.text = element_text(size = 10),
                       plot.title = element_text(size=16)) + 
     theme(
    strip.background = element_blank(),
    strip.text.x = element_blank()
))
## $TFMRI_NB_ALL_BEH_C2B_RATE

## 
## $NIHTBX_PICVOCAB_UNCORRECTED

## 
## $NIHTBX_FLANKER_UNCORRECTED

## 
## $NIHTBX_LIST_UNCORRECTED

## 
## $NIHTBX_CARDSORT_UNCORRECTED

## 
## $NIHTBX_PATTERN_UNCORRECTED

## 
## $NIHTBX_PICTURE_UNCORRECTED

## 
## $NIHTBX_READING_UNCORRECTED

## 
## $LMT_SCR_PERC_CORRECT

## 
## $PEA_RAVLT_LD_TRIAL_VII_TC

## 
## $PEA_WISCV_TRS

3.5 Tune elastic net, RBF SVM, linear SVM, polynomial SVM, Random forest and XGBoost.

library(doFuture)
registerDoFuture()
plan(multisession(workers = 30))


start_time <- Sys.time() 

enet_tune <- map2(recipe_list,formula_list, 
                  ~enet_tuning(recipe_input = .x,
                               formula_input = .y))


saveRDS(enet_tune, paste0(anotherFold,'working_memory_tasks/windows/enet_tune_results_Dec_30_2021', '.RData'))


stop_time <- Sys.time() 

start_time <- Sys.time() 

random_forest_tune <- map2(recipe_list,formula_list, 
                           ~random_forest_tuning(recipe_input = .x,
                                                 formula_input = .y))


saveRDS(random_forest_tune, paste0(anotherFold,'working_memory_tasks/windows/random_forest_tune_results_Dec_13_2021', '.RData'))

stop_time <- Sys.time() 

start_time <- Sys.time() 

svm_linear_tune <- map2(recipe_list,formula_list, 
                        ~SVM_linear_tuning(recipe_input = .x,
                                           formula_input = .y))




saveRDS(svm_linear_tune, paste0(anotherFold,'working_memory_tasks/windows/SVM_linear_tune_results_Dec_13_2021', '.RData'))

stop_time <- Sys.time()

start_time <- Sys.time() 

xgboost_tune <- map2(recipe_list,formula_list, 
                     ~xgboost_tuning(recipe_input = .x,
                                     formula_input = .y))


saveRDS(xgboost_tune, paste0(anotherFold,'working_memory_tasks/xgboost_tune_gfactor_results_Dec_13_2021', '.RData'))

stop_time <- Sys.time() 

start_time <- Sys.time() 

#svm_rbf_tune <-map2(recipe_list,formula_list, 
#                    ~SVM_RBF_tuning(recipe_input = .x,
#                                    formula_input = .y))



for(i in 4:length(resp_names)){
  svm_rbf_recipe_input <- recipe_list[[resp_names[i]]]
  svm_rbf_formula_input <- formula_list[[resp_names[i]]]
  svm_rbf_tune <- SVM_RBF_tuning(recipe_input = svm_rbf_recipe_input, 
                                   formula_input = svm_rbf_formula_input)
  saveRDS(svm_rbf_tune, paste0(anotherFold,'working_memory_tasks/windows/svm_rbf_',resp_names[i],'_tune_results_Mar_16_2022','.RData'))
}


#saveRDS(svm_rbf_tune, paste0(anotherFold,'working_memory_tasks/windows/SVM_RBF_tune_results_Mar_16_2021', '.RData'))

stop_time <- Sys.time() 

start_time <- Sys.time() 

#svm_poly_tune <- map2(recipe_list,formula_list,
#                      ~SVM_poly_tuning(recipe_input = .x,
#                                       formula_input = .y))



for(i in 7:8){
  svm_poly_recipe_input <- recipe_list[[resp_names[i]]]
  svm_poly_formula_input <- formula_list[[resp_names[i]]]
  svm_poly_tune <- SVM_poly_tuning(recipe_input = svm_poly_recipe_input, 
                                   formula_input = svm_poly_formula_input)
  saveRDS(svm_poly_tune, paste0(anotherFold,'working_memory_tasks/windows/svm_poly_',resp_names[i],'_tune_results_Dec_16_2021','.RData'))
}



stop_time <- Sys.time() 

3.5.1 load the tuned outputs

enet_tune <- readRDS(file = paste0(anotherFold,'working_memory_tasks/windows/enet_tune_results_Dec_30_2021', '.RData'))

random_forest_tune <- readRDS(file = paste0(anotherFold,'working_memory_tasks/windows/random_forest_tune_results_Dec_13_2021', '.RData'))

xgboost_tune <- readRDS(file = paste0(anotherFold,'working_memory_tasks/windows/xgboost_tune_results_Dec_13_2021', '.RData'))

svm_linear_tune <- readRDS(file = paste0(anotherFold,'working_memory_tasks/windows/SVM_linear_tune_results_Dec_13_2021', '.RData'))

#SVM_RBF_tune <- readRDS(file = paste0(anotherFold,'working_memory_tasks/windows/SVM_RBF_tune_results_Dec_13_2021', #'.RData'))



SVM_RBF_tune <- vector(mode = "list", length = length(resp_names) )
names(SVM_RBF_tune) <- resp_names

### load the tuned results individually
for( i in 1:length(resp_names)){
 SVM_RBF_tune[[resp_names[i]]] <-  
   readRDS(file = paste0(anotherFold,'working_memory_tasks/windows/svm_rbf_', resp_names[i],'_tune_results_Mar_16_2022', '.RData'))
}


### svm polynomial took the longest, so we need a different way to save and load the data for each response variables

svm_poly_tune <- vector(mode = "list", length = length(resp_names) )
names(svm_poly_tune) <- resp_names

### load the tuned results individually
for( i in 1:length(resp_names)){
 svm_poly_tune[[resp_names[i]]] <-  
   readRDS(file = paste0(anotherFold,'working_memory_tasks/windows/SVM_poly_', resp_names[i],'_tune_results_Dec_16_2021', '.RData'))
}

3.6 Elastic net

3.6.1 Extract final workflow and best tuned paramerts from elastic net

enet_wfl_final_list <- map(enet_tune, "enet_wf_final")
best_enet_model_list <- map(enet_tune, "best_enet_model")

Note Using future map would cause the following error message:
Error in UseMethod(“extract_spec_parsnip”) :
no applicable method for ‘extract_spec_parsnip’ applied to an object of class “workflow”

3.6.2 Extract the elastic net model fit

enet_final_fit <-pmap(list(recipe_list,enet_wfl_final_list, formula_list),
                                ~model_final_fit(test_data = data_test, 
                                                 recipe_input = ..1,
                                                 wf_input = ..2,
                                                 formula_input = ..3,
                                                 model_name = "enet")) 

3.6.3 Extract the elastic net output and prediction

enet_final_fit_list <- map(enet_final_fit, "enet_final_fit")
enet_predicted_list <- map(enet_final_fit, "enet_predict")

yardstick::mae(data = enet_predicted_list$TFMRI_NB_ALL_BEH_C2B_RATE, 
               truth =.data$TFMRI_NB_ALL_BEH_C2B_RATE, 
               estimate =.data$model_predict  )
## # A tibble: 1 x 3
##   .metric .estimator .estimate
##   <chr>   <chr>          <dbl>
## 1 mae     standard       0.699
yardstick::rsq_trad(data = enet_predicted_list$TFMRI_NB_ALL_BEH_C2B_RATE, 
               truth =.data$TFMRI_NB_ALL_BEH_C2B_RATE, 
               estimate =.data$model_predict  )
## # A tibble: 1 x 3
##   .metric  .estimator .estimate
##   <chr>    <chr>          <dbl>
## 1 rsq_trad standard       0.261

3.6.4 prep data for eNetXplorer

matrix_train <-bake(recipe_list[[resp_names[1]]], 
                    new_data = NULL)%>%
               select(starts_with("roi_"))%>%
               as.matrix()

resp_train <- resp_names %>% 
  map(.,
      ~bake(recipe_list[[.]], 
            new_data = NULL)%>%
        select(-starts_with("roi_"))%>%
        as.vector())

3.7 eNetXplorer

fit_explorer_all <-resp_names %>% 
  future_map(.,~eNetXplorer(x = matrix_train ,
                            y = resp_train[[.]][[.]],
                            alpha = best_enet_model_list[[.]][["mixture"]], 
                            n_fold = 10,
                            nlambda.ext = 1000, 
                            nlambda = 1000, 
                            scaled = TRUE,
                            QF_gaussian = "mse" ,
                            seed = 123456)) 

saveRDS(fit_explorer_all, 
        paste0(anotherFold,'working_memory_tasks/windows/fit_explorer_all_Dec_30_2021_rmse', '.RData'))

3.7.1 Extract the lambda value from the enetXplorer output

lambdas_all <- vector("list", length = length(resp_names))
names(lambdas_all)<- resp_names

lambdas_all_best <- vector("list", length = length(resp_names))
names(lambdas_all_best)<- resp_names

summary_enet_all <- vector("list", length = length(resp_names))
names(summary_enet_all)<- resp_names

for(i in 1:length(resp_names)){
  lambdas_all[[resp_names[i]]] <- fit_explorer_all[[resp_names[i]]][["lambda_values"]]
  lambdas_all_best[[resp_names[i]]] <- fit_explorer_all[[resp_names[i]]][["best_lambda"]]
  summary_enet_all[[resp_names[i]]]<- as_tibble(summary(fit_explorer_all[[resp_names[i]]])[[2]]) %>%
    slice(1)
}

summary_enet_all %>% bind_rows() %>% 
  mutate(respones = resp_var_plotting$short_name)%>%
  rename(.,Alpha = alpha, 
         `Best-tune lambda` = lambda.max, 
         `MSE` = QF.est, 
         `P-value` = model.vs.null.pval) %>%
  pander::pander(split.cell = 80, 
                 split.table = Inf, 
                 justify = 'left')
Alpha Best-tune lambda MSE P-value respones
0.05 0.1063 -0.7615 0.0003998 2-back Work Mem
0.905 0.01376 -0.8848 0.0003998 Pic Vocab
0.05 0.2432 -0.9687 0.0003998 Flanker
0.05 0.2268 -0.9144 0.0003998 List Work Mem
0.145 0.1468 -0.9526 0.0003998 Card Sort
0.05 0.4371 -0.9816 0.0003998 Pattern Speed
0.05 0.3214 -0.9648 0.0003998 Seq Memory
0.05 0.1788 -0.912 0.0003998 Reading Recog
0.145 0.1003 -0.939 0.0003998 Little Man
0.05 0.2321 -0.9684 0.0003998 Audi Verbal
0.05 0.2041 -0.9153 0.0003998 Matrix Reason

3.7.2 plot the predictive performance with various values of lambda

alpha_vals <- best_enet_model_list %>% 
  map(.,~paste0("a",.[["mixture"]])) 

enet_lambda_grid <-resp_names%>% 
  map(.,~qplot(fit_explorer_all[[.]][["lambda_values"]][[alpha_vals[[.]]]],
               fit_explorer_all[[.]][["lambda_QF_est"]][[alpha_vals[[.]]]], 
               geom = 'line') + 
        scale_x_log10() +
        geom_vline(xintercept = lambdas_all_best[[.]], 
                   col = 'red', 
                   linetype = 'dashed') +
        labs(x = NULL, 
             y = NULL, 
             title =resp_var_plotting$short_name[[which(resp_var_plotting$response==.)]] ))

title_enet_lambda <- ggdraw() + 
  draw_label(
    "Elastic Net Lambda (Penalty) Parameter Tuning",
    fontface = 'bold',
    x = 0,
    hjust = 0
  ) +
  theme(
    # add margin on the left of the drawing canvas,
    # so title is aligned with left edge of first plot
    plot.margin = margin(0.1, 0.1, 0.1, 7)
  )

enet_lambda_all_figure<- plot_grid(title_enet_lambda,
                                   plot_grid(plotlist = enet_lambda_grid,nrow=4,ncol=3),
                                   nrow = 2 , 
                                   rel_heights = c(0.1, 1))

ggpubr::annotate_figure(enet_lambda_all_figure,
                        left= ggpubr::text_grob("Cross Validated Predictive Ability\n(MSE)",
                                                size=15,
                                                rot=90),
                        bottom = ggpubr::text_grob("Lambda",size=15))

3.7.3 extract the permuted parameter estimate and plot against the null permuted values

extract_tibble <- function(elastic_mod, alpha_index) {
   variable <- elastic_mod$feature_coef_wmean[, alpha_index] %>% names()
    wmean <- elastic_mod$feature_coef_wmean[, alpha_index]
    wsd <- elastic_mod$feature_coef_wsd[, alpha_index]
    null_wmean <- elastic_mod$null_feature_coef_wmean[, alpha_index]
    null_wsd <- elastic_mod$null_feature_coef_wsd[, alpha_index]
    pvalue <- elastic_mod$feature_coef_model_vs_null_pval[, alpha_index]
    
    tib <- tibble(variable, wmean, wsd, null_wmean, null_wsd, pvalue)
   
    tib <- tib %>%
      gather(key = 'placeholder', 
             value = 'value', 
             wmean, 
             wsd, 
             null_wmean, 
             null_wsd) %>%
      mutate(type = ifelse(str_detect(placeholder, 'null'), 
                           'null', 
                           'target'),
             placeholder = (str_remove(placeholder, 'null_'))) %>%
      mutate(type = factor(type, 
                           labels = c('Null', 'Target'))) %>%
      spread(placeholder, value)
    
    tib
}

## the length of coef_enet_all is doubled because of the null and permuted models
  coefs_enet_all <- resp_names %>%  
    map(.,~extract_tibble(fit_explorer_all[[.]], 
                          alpha_index = paste0("a",best_enet_model_list[[.]]$mixture)))
    coefs_enet_all <- coefs_enet_all  %>% 
      map(.,~filter(.,pvalue < 0.05) %>%
            mutate(.,type = ifelse(type == 'Null', 
                                   'Null permuted models', 
                                   'Target models'),
         roi = str_remove(variable, 'roi_'))%>%
           left_join( .,new_shorter_names,by="roi"))
    
roi_num_enet <- coefs_enet_all %>% map(.,~dim(.)[1])
max_roi_enet <- max(as.numeric(roi_num_enet))

##the trick used here to divide the columns into two group is that when
##the roi is large enough and the estimated parameter is less than the medial then this roi would be in the group "small". so if the roi is not large enough then there is only one group "big"

coefs_enet_test <- coefs_enet_all[[resp_names[1]]] %>% 
  group_by(type)
coefs_enet_test <- coefs_enet_test%>% 
  nest(-type)
 coefs_enet_test[[2]][[1]] <- coefs_enet_test[[2]][[1]] %>% 
   mutate(direction1 = ifelse(coefs_enet_test[[2]][[1]]$wmean >= median(coefs_enet_test[[2]][[1]]$wmean)|roi_num_enet[[resp_names[1]]] <= floor(max_roi_enet/2),"big","small"))
 
coefs_enet_test[[2]][[2]] <- coefs_enet_test[[2]][[2]] %>% 
  mutate(direction1=coefs_enet_test[[2]][[1]]$direction1)
coefs_enet_test <- coefs_enet_test %>% 
  unnest()

coefs_enet_all <- coefs_enet_all %>% map(.,~group_by(.,type)) 
coefs_enet_all <- coefs_enet_all %>% map(.,~nest(.,-type)) 

for(i in 1:length(resp_names)){
  coefs_enet_all[[resp_names[i]]][["data"]][[2]]<-
    coefs_enet_all[[resp_names[i]]][["data"]][[2]] %>% 
    mutate(direction = ifelse(coefs_enet_all[[resp_names[i]]][["data"]][[2]]$wmean >= median(coefs_enet_all[[resp_names[i]]][["data"]][[2]]$wmean)|roi_num_enet[[resp_names[i]]] <= floor(max_roi_enet/2), "big","small"))
  
coefs_enet_all[[resp_names[i]]][["data"]][[1]] <- 
  coefs_enet_all[[resp_names[i]]][["data"]][[1]] %>%
  mutate(direction=coefs_enet_all[[resp_names[i]]][["data"]][[2]]$direction)
}

coefs_enet_all <- coefs_enet_all %>%map(.,~unnest(.)) 

resp_names %>% map(.,
                   ~ggplot(coefs_enet_all[[.]],
                           aes(x = fct_reorder(roiShort, wmean),
                               y = wmean, 
                               ymax = wmean + 2 * wsd, 
                               ymin = wmean - 2 * wsd,
                               col = type)) +
                     geom_pointrange(fatten = 0.5, key_glyph = 'point') +
                     scale_y_continuous(labels = numform::ff_num(zero = 0, digits = 2)) +
                     scale_color_grey(start = 0.7, end = 0.5) +
                     coord_flip() +
                     guides(colour = guide_legend(override.aes = list(size = 2.5)))+
                     labs(x = 'Explanatory Variables (Brain Regions)', 
                          y = 'Averaged Coefficient Across Models (±2 Std. dev)', 
                          col = 'Model type',
                          title = paste0(resp_var_plotting$longer_name[[which(resp_var_plotting$response==.)]],
                      "\nElastic Net Coefficients (p < .05)")) +
                     facet_wrap(~ direction, scales = 'free_y') +
                     scale_color_manual(values = c("#56B4E9", "black"),
                                        labels = c("Permuted Null", "Target")) +     
                     theme_bw() +  
                     theme(legend.title=element_blank()) +  
                     theme(legend.position = "top") + 
                     theme(
                       axis.title.x = element_text(size = 15),
                       axis.text.x = element_text(size = 12),
                       axis.title.y = element_text(size = 15),
                       axis.text.y = element_text(size = 12),
                       legend.text = element_text(size = 15),
                       plot.title = element_text(size=15)) +
                     theme(
                       strip.background = element_blank(),
                       strip.text.x = element_blank()) + 
                     theme(panel.grid.major = element_blank(), 
                           panel.grid.minor = element_blank())
                   )
## $TFMRI_NB_ALL_BEH_C2B_RATE

## 
## $NIHTBX_PICVOCAB_UNCORRECTED

## 
## $NIHTBX_FLANKER_UNCORRECTED

## 
## $NIHTBX_LIST_UNCORRECTED

## 
## $NIHTBX_CARDSORT_UNCORRECTED

## 
## $NIHTBX_PATTERN_UNCORRECTED

## 
## $NIHTBX_PICTURE_UNCORRECTED

## 
## $NIHTBX_READING_UNCORRECTED

## 
## $LMT_SCR_PERC_CORRECT

## 
## $PEA_RAVLT_LD_TRIAL_VII_TC

## 
## $PEA_WISCV_TRS

3.8 Random Forest

3.8.1 extract the final work flow and best tuned parameters from random forest

random_forest_wfl_final_list <- map(random_forest_tune, "random_forest_wf_final")
best_random_forest_model_list <- map(random_forest_tune, "best_random_forest_model")

3.8.2 fit the random forest model with best grid search paramters

random_forest_final_fit <-pmap(list(recipe_list,
                                    random_forest_wfl_final_list,
                                    formula_list),
                          ~model_final_fit(test_data = data_test,
                                           recipe_input = ..1,
                                           wf_input = ..2,
                                           formula_input = ..3,
                                           model_name = "random_forest")) 

3.8.3 extract the output of model prediction and model fit

random_forest_final_fit_list <- map(random_forest_final_fit, "random_forest_final_fit")
random_forest_predicted_list <- map(random_forest_final_fit, "random_forest_predict")


yardstick::mae(data = random_forest_predicted_list$TFMRI_NB_ALL_BEH_C2B_RATE, 
               truth =.data$TFMRI_NB_ALL_BEH_C2B_RATE, 
               estimate =.data$model_predict  )
## # A tibble: 1 x 3
##   .metric .estimator .estimate
##   <chr>   <chr>          <dbl>
## 1 mae     standard       0.723
yardstick::rsq_trad(data = random_forest_predicted_list$TFMRI_NB_ALL_BEH_C2B_RATE, 
               truth =.data$TFMRI_NB_ALL_BEH_C2B_RATE, 
               estimate =.data$model_predict  )
## # A tibble: 1 x 3
##   .metric  .estimator .estimate
##   <chr>    <chr>          <dbl>
## 1 rsq_trad standard       0.217

3.9 XgBoost

3.9.1 Get the final work flow and best tuned parameters from xgboost

xgboost_wfl_final_list <- map(xgboost_tune, "xgboost_wf_final")
best_xgboost_model_list <- map(xgboost_tune, "best_xgboost_model")

3.9.2 fit the model with the best grid parameter

xgboost_final_fit <-future_pmap(list(recipe_list,
                                     best_xgboost_model_list,
                                     resp_names),
                          ~xgboost_model_pred(
                            recipe_input=..1, 
                            param_input=..2,
                            resp_input=..3,
                            train_input = data_train,
                            test_input = data_test),
                          .options = furrr::furrr_options(seed = 123456)) 

3.9.3 extract the output

https://www.rdocumentation.org/packages/xgboost/versions/0.71.2/topics/predict.xgb.Booster

Setting predcontrib = TRUE allows to calculate contributions of each feature to individual predictions. For “gbtree” booster, feature contributions are SHAP values (Lundberg 2017) that sum to the difference between the expected output of the model and the current prediction (where the hessian weights are used to compute the expectations).

“BIAS” is the shapley values for the response variable.

xgboost_final_fit_list <- map(xgboost_final_fit, "xgboost_final_fit")
xgboost_predicted_list <- map(xgboost_final_fit, "xgboost_predict")
xgboost_predicted_train_list <- map(xgboost_final_fit, "xgboost_predict_train")
xgboost_shap_list <- map(xgboost_final_fit, "xgboost_shap_plot")


yardstick::mae(data = xgboost_predicted_list$TFMRI_NB_ALL_BEH_C2B_RATE, 
               truth =.data$TFMRI_NB_ALL_BEH_C2B_RATE, 
               estimate =.data$model_predict  )
## # A tibble: 1 x 3
##   .metric .estimator .estimate
##   <chr>   <chr>          <dbl>
## 1 mae     standard       0.726
yardstick::rsq_trad(data = xgboost_predicted_list$TFMRI_NB_ALL_BEH_C2B_RATE, 
               truth =.data$TFMRI_NB_ALL_BEH_C2B_RATE, 
               estimate =.data$model_predict  )
## # A tibble: 1 x 3
##   .metric  .estimator .estimate
##   <chr>    <chr>          <dbl>
## 1 rsq_trad standard       0.208

3.10 Linear SVM

3.10.1 Get the final work flow and best tuned parameters from linear SVM

svm_linear_wfl_final_list <- map(svm_linear_tune, "svm_linear_wf_final")
best_svm_linear_model_list <- map(svm_linear_tune, "best_svm_linear_model")

3.10.2 fit the model with the best grid parameter for linear SVM

svm_linear_final_fit <-pmap(list(recipe_list,
                                 svm_linear_wfl_final_list, 
                                 formula_list),
                          ~model_final_fit(test_data = data_test, 
                                           recipe_input = ..1,
                                           wf_input = ..2,
                                           formula_input = ..3,
                                           model_name = "svm_linear")) 

3.10.3 extract the output of model fit and prediction

svm_linear_final_fit_list <- map(svm_linear_final_fit, "svm_linear_final_fit")
svm_linear_predicted_list <- map(svm_linear_final_fit, "svm_linear_predict")

yardstick::mae(data = svm_linear_predicted_list$TFMRI_NB_ALL_BEH_C2B_RATE, 
               truth =.data$TFMRI_NB_ALL_BEH_C2B_RATE, 
               estimate =.data$model_predict  )
## # A tibble: 1 x 3
##   .metric .estimator .estimate
##   <chr>   <chr>          <dbl>
## 1 mae     standard       0.703
yardstick::rsq_trad(data = svm_linear_predicted_list$TFMRI_NB_ALL_BEH_C2B_RATE, 
               truth =.data$TFMRI_NB_ALL_BEH_C2B_RATE, 
               estimate =.data$model_predict  )
## # A tibble: 1 x 3
##   .metric  .estimator .estimate
##   <chr>    <chr>          <dbl>
## 1 rsq_trad standard       0.247
yardstick::mae(data = svm_linear_predicted_list$NIHTBX_READING_UNCORRECTED, 
               truth =.data$NIHTBX_READING_UNCORRECTED, 
               estimate =.data$model_predict  )
## # A tibble: 1 x 3
##   .metric .estimator .estimate
##   <chr>   <chr>          <dbl>
## 1 mae     standard       0.711
yardstick::rsq_trad(data = svm_linear_predicted_list$NIHTBX_READING_UNCORRECTED, 
               truth =.data$NIHTBX_READING_UNCORRECTED, 
               estimate =.data$model_predict  )
## # A tibble: 1 x 3
##   .metric  .estimator .estimate
##   <chr>    <chr>          <dbl>
## 1 rsq_trad standard      0.0847

3.11 SVM RBF

3.11.1 Extract the final work flow and best tuned paramerts from RBF SVM

SVM_RBF_wfl_final_list <- map(SVM_RBF_tune, "svm_rbf_wf_final")
best_SVM_RBF_model_list <- map(SVM_RBF_tune, "best_svm_rbf_model")

3.11.2 fit the model with the best grid search results

SVM_RBF_final_fit <-pmap(list(recipe_list,
                              SVM_RBF_wfl_final_list, 
                              formula_list),
                                ~model_final_fit(test_data = data_test,
                                                 recipe_input = ..1,
                                                 wf_input = ..2,
                                                 formula_input = ..3,
                                                 model_name = "SVM_RBF")) 

3.11.3 extract the output of model fit and prediction for RBF SVM

SVM_RBF_final_fit_list <- map(SVM_RBF_final_fit, "SVM_RBF_final_fit")
SVM_RBF_predicted_list <- map(SVM_RBF_final_fit, "SVM_RBF_predict")


yardstick::mae(data = SVM_RBF_predicted_list$TFMRI_NB_ALL_BEH_C2B_RATE, 
               truth =.data$TFMRI_NB_ALL_BEH_C2B_RATE, 
               estimate =.data$model_predict  )
## # A tibble: 1 x 3
##   .metric .estimator .estimate
##   <chr>   <chr>          <dbl>
## 1 mae     standard       0.696
yardstick::rsq_trad(data = SVM_RBF_predicted_list$TFMRI_NB_ALL_BEH_C2B_RATE, 
               truth =.data$TFMRI_NB_ALL_BEH_C2B_RATE, 
               estimate =.data$model_predict  )
## # A tibble: 1 x 3
##   .metric  .estimator .estimate
##   <chr>    <chr>          <dbl>
## 1 rsq_trad standard       0.258

3.12 Polynomial SVM

3.12.1 Get the final workflow and best tuned parameters from polynomial svm

svm_poly_wfl_final_list <- map(svm_poly_tune, "svm_poly_wf_final")
best_svm_poly_model_list <- map(svm_poly_tune, "best_svm_poly_model")

3.12.2 fit the model with best grid parameters

svm_poly_final_fit <-pmap(list(recipe_list,
                               svm_poly_wfl_final_list, 
                               formula_list),
                          ~model_final_fit(test_data = data_test, 
                                           recipe_input = ..1,
                                           wf_input = ..2,
                                           formula_input = ..3,
                                           model_name = "svm_poly")) 

3.12.3 extract the output of prediction and model fit

svm_poly_final_fit_list <- map(svm_poly_final_fit, "svm_poly_final_fit")
svm_poly_predicted_list <- map(svm_poly_final_fit, "svm_poly_predict")


yardstick::mae(data = svm_poly_predicted_list$TFMRI_NB_ALL_BEH_C2B_RATE, 
               truth =.data$TFMRI_NB_ALL_BEH_C2B_RATE, 
               estimate =.data$model_predict  )
## # A tibble: 1 x 3
##   .metric .estimator .estimate
##   <chr>   <chr>          <dbl>
## 1 mae     standard       0.702
yardstick::rsq_trad(data = svm_poly_predicted_list$TFMRI_NB_ALL_BEH_C2B_RATE, 
               truth =.data$TFMRI_NB_ALL_BEH_C2B_RATE, 
               estimate =.data$model_predict  )
## # A tibble: 1 x 3
##   .metric  .estimator .estimate
##   <chr>    <chr>          <dbl>
## 1 rsq_trad standard       0.247

4 G-factor

4.1 Compute the G-factor CFA and prep it for predictive modeling

4.1.1 CFA of the gfactor based on 10 tasks

remove the outliers of the dataset and fit the bifactor model of the g factor

TaskDVs1Batch = c("NIHTBX_FLANKER_UNCORRECTED", 
"NIHTBX_CARDSORT_UNCORRECTED", 
"NIHTBX_PATTERN_UNCORRECTED", 
"NIHTBX_PICVOCAB_UNCORRECTED", 
"NIHTBX_READING_UNCORRECTED", 
"NIHTBX_PICTURE_UNCORRECTED", 
"PEA_RAVLT_LD_TRIAL_VII_TC", 
"NIHTBX_LIST_UNCORRECTED", 
"LMT_SCR_PERC_CORRECT", 
"PEA_WISCV_TRS"
)

processed_split_train <- split_train %>%
              select(all_of(subj_info), all_of(TaskDVs1Batch))%>%
              drop_na()%>% 
              IQR_remove(resp_vec = all_of(TaskDVs1Batch))

processed_split_test <- split_test %>%
              select(all_of(subj_info),all_of(TaskDVs1Batch))%>%
              drop_na()%>%
              IQR_remove(resp_vec = all_of(TaskDVs1Batch))

NeuroCogBiFac <-'
Language_Reasoning =~ NIHTBX_PICVOCAB_UNCORRECTED + NIHTBX_READING_UNCORRECTED + NIHTBX_LIST_UNCORRECTED + PEA_WISCV_TRS
Cognitive_Flexibility =~ NIHTBX_FLANKER_UNCORRECTED + NIHTBX_CARDSORT_UNCORRECTED + NIHTBX_PATTERN_UNCORRECTED 
Memory_Recall =~ NIHTBX_PICTURE_UNCORRECTED + PEA_RAVLT_LD_TRIAL_VII_TC
g =~ NIHTBX_PICVOCAB_UNCORRECTED + NIHTBX_READING_UNCORRECTED + NIHTBX_LIST_UNCORRECTED + PEA_WISCV_TRS + NIHTBX_FLANKER_UNCORRECTED + NIHTBX_CARDSORT_UNCORRECTED + NIHTBX_PATTERN_UNCORRECTED + NIHTBX_PICTURE_UNCORRECTED + PEA_RAVLT_LD_TRIAL_VII_TC
#orthogonalize everything
Language_Reasoning ~~ 0*Cognitive_Flexibility
Language_Reasoning ~~ 0*Memory_Recall
Cognitive_Flexibility ~~ 0*Memory_Recall
g ~~ 0*Language_Reasoning
g ~~ 0*Cognitive_Flexibility
g ~~ 0*Memory_Recall
'
NeuroCogBiFac_fit <- lavaan::sem(model = NeuroCogBiFac,
                                 data = processed_split_train,
                                 estimator="MLR")

lavaan::summary(NeuroCogBiFac_fit, standardized = TRUE, rsquare = TRUE, fit.measures = TRUE)
## lavaan 0.6-10 ended normally after 61 iterations
## 
##   Estimator                                         ML
##   Optimization method                           NLMINB
##   Number of model parameters                        27
##                                                       
##   Number of observations                          4105
##                                                       
## Model Test User Model:
##                                                Standard      Robust
##   Test Statistic                                 75.999      70.005
##   Degrees of freedom                                 18          18
##   P-value (Chi-square)                            0.000       0.000
##   Scaling correction factor                                   1.086
##        Yuan-Bentler correction (Mplus variant)                     
## 
## Model Test Baseline Model:
## 
##   Test statistic                              6726.755    6315.254
##   Degrees of freedom                                36          36
##   P-value                                        0.000       0.000
##   Scaling correction factor                                  1.065
## 
## User Model versus Baseline Model:
## 
##   Comparative Fit Index (CFI)                    0.991       0.992
##   Tucker-Lewis Index (TLI)                       0.983       0.983
##                                                                   
##   Robust Comparative Fit Index (CFI)                         0.992
##   Robust Tucker-Lewis Index (TLI)                            0.983
## 
## Loglikelihood and Information Criteria:
## 
##   Loglikelihood user model (H0)             -49092.806  -49092.806
##   Scaling correction factor                                  1.107
##       for the MLR correction                                      
##   Loglikelihood unrestricted model (H1)     -49054.806  -49054.806
##   Scaling correction factor                                  1.099
##       for the MLR correction                                      
##                                                                   
##   Akaike (AIC)                               98239.611   98239.611
##   Bayesian (BIC)                             98410.250   98410.250
##   Sample-size adjusted Bayesian (BIC)        98324.456   98324.456
## 
## Root Mean Square Error of Approximation:
## 
##   RMSEA                                          0.028       0.027
##   90 Percent confidence interval - lower         0.022       0.020
##   90 Percent confidence interval - upper         0.035       0.033
##   P-value RMSEA <= 0.05                          1.000       1.000
##                                                                   
##   Robust RMSEA                                               0.028
##   90 Percent confidence interval - lower                     0.021
##   90 Percent confidence interval - upper                     0.035
## 
## Standardized Root Mean Square Residual:
## 
##   SRMR                                           0.017       0.017
## 
## Parameter Estimates:
## 
##   Standard errors                             Sandwich
##   Information bread                           Observed
##   Observed information based on                Hessian
## 
## Latent Variables:
##                            Estimate  Std.Err  z-value
##   Language_Reasoning =~                              
##     NIHTBX_PICVOCA            1.000                  
##     NIHTBX_READING            0.829    0.167    4.972
##     NIHTBX_LIST_UN            0.101    0.066    1.527
##     PEA_WISCV_TRS             0.313    0.060    5.202
##   Cognitive_Flexibility =~                           
##     NIHTBX_FLANKER            1.000                  
##     NIHTBX_CARDSOR            1.308    0.083   15.669
##     NIHTBX_PATTERN            1.156    0.072   16.125
##   Memory_Recall =~                                   
##     NIHTBX_PICTURE            1.000                  
##     PEA_RAVLT_LD_T            0.426    0.021   20.129
##   g =~                                               
##     NIHTBX_PICVOCA            1.000                  
##     NIHTBX_READING            0.981    0.043   22.791
##     NIHTBX_LIST_UN            1.207    0.060   20.107
##     PEA_WISCV_TRS             1.018    0.054   18.842
##     NIHTBX_FLANKER            0.700    0.054   13.073
##     NIHTBX_CARDSOR            0.781    0.057   13.719
##     NIHTBX_PATTERN            0.588    0.049   12.080
##     NIHTBX_PICTURE            0.874    0.064   13.647
##     PEA_RAVLT_LD_T            0.936    0.059   15.887
##   P(>|z|)   Std.lv  Std.all
##                            
##              0.553    0.553
##     0.000    0.459    0.459
##     0.127    0.056    0.056
##     0.000    0.173    0.173
##                            
##              0.457    0.457
##     0.000    0.598    0.598
##     0.000    0.528    0.528
##                            
##              0.610    0.610
##     0.000    0.260    0.260
##                            
##              0.510    0.510
##     0.000    0.501    0.501
##     0.000    0.616    0.616
##     0.000    0.520    0.520
##     0.000    0.357    0.357
##     0.000    0.399    0.399
##     0.000    0.300    0.300
##     0.000    0.446    0.446
##     0.000    0.477    0.478
## 
## Covariances:
##                            Estimate  Std.Err  z-value
##   Language_Reasoning ~~                              
##     Cogntv_Flxblty            0.000                  
##     Memory_Recall             0.000                  
##   Cognitive_Flexibility ~~                           
##     Memory_Recall             0.000                  
##   Language_Reasoning ~~                              
##     g                         0.000                  
##   Cognitive_Flexibility ~~                           
##     g                         0.000                  
##   Memory_Recall ~~                                   
##     g                         0.000                  
##   P(>|z|)   Std.lv  Std.all
##                            
##              0.000    0.000
##              0.000    0.000
##                            
##              0.000    0.000
##                            
##              0.000    0.000
##                            
##              0.000    0.000
##                            
##              0.000    0.000
## 
## Variances:
##                    Estimate  Std.Err  z-value  P(>|z|)
##    .NIHTBX_PICVOCA    0.434    0.058    7.499    0.000
##    .NIHTBX_READING    0.539    0.041   13.267    0.000
##    .NIHTBX_LIST_UN    0.617    0.024   26.104    0.000
##    .PEA_WISCV_TRS     0.700    0.019   37.374    0.000
##    .NIHTBX_FLANKER    0.663    0.022   29.645    0.000
##    .NIHTBX_CARDSOR    0.483    0.027   17.707    0.000
##    .NIHTBX_PATTERN    0.631    0.024   26.486    0.000
##    .NIHTBX_PICTURE    0.428    0.021   20.479    0.000
##    .PEA_RAVLT_LD_T    0.704    0.019   36.235    0.000
##     Language_Rsnng    0.306    0.061    4.993    0.000
##     Cogntv_Flxblty    0.209    0.020   10.679    0.000
##     Memory_Recall     0.372    0.021   17.814    0.000
##     g                 0.260    0.024   10.967    0.000
##    Std.lv  Std.all
##     0.434    0.434
##     0.539    0.539
##     0.617    0.617
##     0.700    0.700
##     0.663    0.664
##     0.483    0.484
##     0.631    0.631
##     0.428    0.429
##     0.704    0.704
##     1.000    1.000
##     1.000    1.000
##     1.000    1.000
##     1.000    1.000
## 
## R-Square:
##                    Estimate
##     NIHTBX_PICVOCA    0.566
##     NIHTBX_READING    0.461
##     NIHTBX_LIST_UN    0.383
##     PEA_WISCV_TRS     0.300
##     NIHTBX_FLANKER    0.336
##     NIHTBX_CARDSOR    0.516
##     NIHTBX_PATTERN    0.369
##     NIHTBX_PICTURE    0.571
##     PEA_RAVLT_LD_T    0.296

4.1.2 plot the bifactor model for G-Factor

labels<-c("PIC\nVOCAB","READING\nRECOG","LIST\nWORKING\nMEMORY","MATRIX\nREASON",
          "FLANKER","CARD\nSORT","PATTERN\nSPEED",
          "SEQUENCE\nMEMORY","AUDI\nVERBAL\nDELAY",
          "Language\nReasoning", "Cognitive\nFlexibility", "Memory\nRecall",
          "g")

semPlot::semPaths(NeuroCogBiFac_fit, 
                  "model", 
                  whatLabels = "std", 
                  bifactor = "g", 
                  layout = "tree2", 
                  nodeLabels=labels,
                  residuals = FALSE, 
                  exoCov = FALSE,
                  edge.label.cex = 1,
                  sizeMan = 10,
                  sizeLat = 20,
                  edge.color="black"
#                  rotation=2
                  )

4.1.3 Obtain the prediction results from the CFA model

gfactor_train_output <- lavaan::lavPredict(NeuroCogBiFac_fit, 
                                           newdata = processed_split_train)%>%
                        tibble::as_tibble()%>%
                        mutate_all(as.double)%>% 
                        rename(gfactor=g)%>%
                        mutate(SUBJECTKEY= processed_split_train$SUBJECTKEY)

gfactor_train_all <-  left_join(split_train,gfactor_train_output,
                                by ="SUBJECTKEY") %>% 
                        drop_na()%>%
                        IQR_remove(resp_vec = "gfactor") 


gfactor_test_output <- lavaan::lavPredict(NeuroCogBiFac_fit, 
                                          newdata = processed_split_test)%>%
                        tibble::as_tibble()%>%
                        mutate_all(as.double)%>%
                        rename(gfactor=g)%>%
                        mutate(SUBJECTKEY= processed_split_test$SUBJECTKEY)

gfactor_test_all <-  left_join(split_test,gfactor_test_output,
                               by ="SUBJECTKEY") %>% 
                     drop_na()%>%
                     IQR_remove(resp_vec = "gfactor")

4.1.4 number of participants in train and test data

## print the size of training data after IRQ
dim(gfactor_train_all)
## [1] 2979  185
gfactor_train_all_subj <- left_join(gfactor_train_all, subj_info_all, by = subj_info)
## males
sum(gfactor_train_all_subj$SEX =="M")
## [1] 1510
## females
sum(gfactor_train_all_subj$SEX =="F")
## [1] 1468
## print the size of training data after IRQ
dim(gfactor_test_all)
## [1] 1006  185
gfactor_test_all_subj <- left_join(gfactor_test_all, subj_info_all, by = subj_info)
## males
sum(gfactor_test_all_subj$SEX =="M")
## [1] 509
## females
sum(gfactor_test_all_subj$SEX =="F")
## [1] 497

4.1.5 formulas and recipes

The data preprocessing contains two steps 1. IQR remove which is done by the “IQR_remove” function. 2. Scaling which is done in the following recipe function. The reason to split this two-step procedure into two functions is that there are some issues with the recipe function. Some models would fail if the column of the data frame changes when predicting. 3. the scale function is kept in the following recipe because scaling the data twice does not change anything.

cfa_resp_names <- c('gfactor')%>%
                   set_names()

formula_gfactor <- cfa_resp_names %>%
  map(.,~as.formula(paste(.,paste(feature_names,collapse = "+"),sep="~")))

recipe_prep_gfactor <- function(resp_var,
                                formula_input,
                                train_input=data_train){
 norm_recipe <- recipe( formula_input, 
                        data = train_input) %>%
   update_role(starts_with("roi_"), 
               new_role = "predictor")%>%
   step_dummy(all_nominal()) %>%
  prep(training = train_input, retain = TRUE)
  return(norm_recipe)
}

recipe_gfactor <-  map2(.x= cfa_resp_names,
                        .y = formula_gfactor,
                        ~recipe_prep_gfactor(resp_var = .x, 
                                             formula_input = .y, 
                                             train_input = gfactor_train_all))

4.2 Model fitting for the g Factor

4.2.1 Mass univariate

simple_all_IQR_gfactor <- map2(.x=cfa_resp_names,
                               .y = recipe_gfactor,
                               ~resp_result(.x,
                                            recipe_input = .y, 
                                            test_input = gfactor_test_all))

univariate_model_broom_gfactor <- map(simple_all_IQR_gfactor , "model_broom")

univariate_model_pred_gfactor <- map(simple_all_IQR_gfactor , "model_pred")

univariate_model_pred_gfactor <- map2(.x=univariate_model_pred_gfactor,
                                      .y=cfa_resp_names, 
                                      function(pred_input=.x, 
                                               resp_input){
                                        names_vec <-  c(names(pred_input)[1:167],
                                                        resp_input) 
                                        names(pred_input) <- names_vec
                                        return(pred_input)})


univariate_model_broom_gfactor <- univariate_model_broom_gfactor %>% 
  map(., ~ mutate(.,
                  FDR = p.adjust(p.value,
                                 method = 'fdr'),
                  bonferroni= p.adjust(p.value, 
                                       method = 'bonferroni')))

univariate_model_fdr_gfactor <- univariate_model_broom_gfactor %>% 
  map(., ~ filter(.,FDR  <= 0.05))

univariate_model_bonferroni_gfactor <- univariate_model_broom_gfactor %>% 
  map(., ~ filter(.,bonferroni  <= 0.05))

median_univar_fdr_pred_gfactor <- pmap(list(cfa_resp_names,
                                            univariate_model_fdr_gfactor,
                                            univariate_model_pred_gfactor),
                               ~median_extract(resp_input=..1, 
                                               model_input=..2, 
                                               pred_input=..3) )

median_univar_bonferroni_pred_gfactor <- pmap(list(cfa_resp_names,
                                                   univariate_model_bonferroni_gfactor,
                                                   univariate_model_pred_gfactor),
                               ~median_extract(resp_input=..1, 
                                               model_input=..2, 
                                               pred_input=..3) )

4.2.2 OLS

OLS_fit_gfactor <-  map2(.x=formula_gfactor,
                         .y=recipe_gfactor ,
                         ~lm(.x,data = .y %>%  
                               bake(new_data= NULL)))

OLS_predict_list_gfactor  <- map2(.x=OLS_fit_gfactor,
                                  .y=recipe_gfactor,
                                  ~predict(.x,
                                           newdata = bake(prep(.y),
                                                          new_data = gfactor_test_all) )%>% 
                                    tibble::as_tibble() %>%
                                    rename(model_pred = value)%>%
                                    bind_cols(bake(prep(.y), 
                                                   new_data = gfactor_test_all) ))

yardstick::rsq_trad(data = OLS_predict_list_gfactor$gfactor, 
                    truth =.data$gfactor, 
                    estimate =.data$model_pred)
## # A tibble: 1 x 3
##   .metric  .estimator .estimate
##   <chr>    <chr>          <dbl>
## 1 rsq_trad standard       0.159

4.2.3 plot OLS coeffs and se for regions that passed p <.05

tidy_fit_ols_gfactor <-OLS_fit_gfactor %>%  map(., ~broom::tidy(.))
tidy_fit_ols_gfactor <- tidy_fit_ols_gfactor %>%   map(.,~filter(.,term != '(Intercept)' & p.value < 0.05 )%>%
                                             mutate(.,roi = str_remove(term, 'roi_'))%>%
                                         left_join( .,new_shorter_names,by="roi")%>%
                                         mutate(.,direction = ifelse(estimate >= median(estimate), "big","small")))



cfa_resp_names %>% map(~ggplot(tidy_fit_ols_gfactor[[.]],aes(fct_reorder(roiShort, estimate), estimate, 
             ymin = estimate - 2 * std.error, 
             ymax = estimate + 2 * std.error)) +
  geom_hline(yintercept = 0, linetype = 'dashed', col = 'grey60') +
  geom_pointrange(fatten = 1.5, col = 'grey60') +
  coord_flip() +
  labs(x = 'Explanatory variables (Brain Regions)', y = 'Coefficients (± 2 std. errors)',
       title = paste0('G-Factor\nOLS Coeffcients (p < .05)')) + 
     facet_wrap(~ direction, scales = 'free_y') +
                     theme(
                       axis.title.x = element_text(size = 15),
                       axis.text.x = element_text(size = 12),
                       axis.title.y = element_text(size = 15),
                       axis.text.y = element_text(size = 12),
                       legend.text = element_text(size = 10),
                       plot.title = element_text(size=16)) + 
     theme(
    strip.background = element_blank(),
    strip.text.x = element_blank()
))
## $gfactor

4.2.4 tuning elastic net, random forest, RBF svm, linear svm, polynomial svm and xgboost

library(doFuture)
registerDoFuture()
plan(multisession(workers = 45))


start_time <- Sys.time() 

enet_tune_gfactor <- map2(recipe_gfactor,
                          formula_gfactor, 
                          ~enet_tuning(recipe_input = .x,
                                       formula_input = .y))

saveRDS(enet_tune_gfactor, 
        paste0(anotherFold,'working_memory_tasks/windows/enet_tune_gfactor_Dec_03_2021_rmse', '.RData'))

stop_time_gfactor <- Sys.time() 

start_time <- Sys.time() 

svm_rbf_tune_gfactor <-map2(recipe_gfactor,
                            formula_gfactor, 
                            ~SVM_RBF_tuning(recipe_input = .x,
                                            formula_input = .y))

saveRDS(svm_rbf_tune_gfactor, 
        paste0(anotherFold,'working_memory_tasks/windows/SVM_RBF_tune_gfactor_Mar_21_2022', '.RData'))


stop_time <- Sys.time() 

start_time <- Sys.time() 

random_forest_tune_gfactor <- map2(recipe_gfactor,
                                   formula_gfactor, 
                                   ~random_forest_tuning(recipe_input = .x,
                                                        formula_input = .y))
saveRDS(random_forest_tune_gfactor,
        paste0(anotherFold,'working_memory_tasks/windows/random_forest_tune_gfactor_Nov_04_2021', '.RData'))

stop_time <- Sys.time() 

start_time <- Sys.time() 

svm_linear_tune_gfactor <- map2(recipe_gfactor,
                                formula_gfactor, 
                                ~SVM_linear_tuning(recipe_input = .x,
                                                   formula_input = .y))

saveRDS(svm_linear_tune_gfactor, 
        paste0(anotherFold,'working_memory_tasks/windows/SVM_linear_tune_gfactor_Nov_04_2021', '.RData'))

stop_time <- Sys.time()

start_time <- Sys.time() 

svm_poly_tune_gfactor <- map2(recipe_gfactor,
                              formula_gfactor, 
                              ~SVM_poly_tuning(recipe_input = .x,
                                               formula_input = .y))

saveRDS(svm_poly_tune_gfactor, 
        paste0(anotherFold,'working_memory_tasks/windows/svm_poly_tune_gfactor_Nov_04_2021', '.RData'))

stop_time <- Sys.time() 

start_time <- Sys.time() 

xgboost_tune_gfactor <- map2(recipe_gfactor,
                             formula_gfactor, 
                             ~xgboost_tuning(recipe_input = .x,
                                             formula_input = .y))

saveRDS(xgboost_tune_gfactor, 
        paste0(anotherFold,'working_memory_tasks/windows/xgboost_tune_gfactor_Nov_04_2021', '.RData'))

stop_time <- Sys.time() 

4.2.5 processing the outputs

get the best parameters from the grid search and then
get workflow, model fit and prediction with best grid paramters

enet_wfl_final_list_gfactor <- map(enet_tune_gfactor, "enet_wf_final")
best_enet_model_list_gfactor <- map(enet_tune_gfactor, "best_enet_model")
enet_final_fit_gfactor <-pmap(list(recipe_gfactor,
                                   enet_wfl_final_list_gfactor, 
                                   formula_gfactor),
                                ~model_final_fit(recipe_input  = ..1,
                                                 wf_input = ..2,
                                                 formula_input = ..3,
                                                 model_name = "enet",
                                                 test_data = gfactor_test_all)) 

4.2.6 extract the output of enet

enet_final_fit_list_gfactor <- map(enet_final_fit_gfactor, 
                                   "enet_final_fit")
enet_predicted_list_gfactor <- map(enet_final_fit_gfactor, 
                                   "enet_predict")

yardstick::rmse(data = enet_predicted_list_gfactor$gfactor, 
               truth =.data$gfactor, 
               estimate =.data$model_predict  )
## # A tibble: 1 x 3
##   .metric .estimator .estimate
##   <chr>   <chr>          <dbl>
## 1 rmse    standard       0.901
yardstick::mae(data = enet_predicted_list_gfactor$gfactor, 
               truth =.data$gfactor, 
               estimate =.data$model_predict  )
## # A tibble: 1 x 3
##   .metric .estimator .estimate
##   <chr>   <chr>          <dbl>
## 1 mae     standard       0.720
yardstick::rsq_trad(data = enet_predicted_list_gfactor$gfactor, 
                    truth =.data$gfactor, 
                    estimate =.data$model_predict)
## # A tibble: 1 x 3
##   .metric  .estimator .estimate
##   <chr>    <chr>          <dbl>
## 1 rsq_trad standard       0.187

4.2.7 prep data for eNetXplorer

matrix_train_gfactor <-bake(recipe_gfactor[[cfa_resp_names[1]]], 
                            new_data = NULL)%>%
               select(starts_with("roi_"))%>%
               as.matrix()

resp_train_gfactor <- cfa_resp_names %>% map(.,~bake(recipe_gfactor[[.]], 
                                                     new_data = NULL)%>%
                                   select(-starts_with("roi_"))%>%
                                   as.vector())

4.2.8 eNetXplorer

fit_explorer_gfactor <-cfa_resp_names %>% 
  future_map(.,
             ~eNetXplorer(x = matrix_train_gfactor ,
                          y = resp_train_gfactor[[.]][[.]],
                          alpha = best_enet_model_list_gfactor[[.]][["mixture"]], 
                          n_fold = 10,
                          nlambda.ext = 1000, 
                          nlambda = 1000,
                          scaled = TRUE,
                          QF_gaussian = "mse",
                          seed = 123456)) 

saveRDS(fit_explorer_gfactor, paste0(anotherFold,'working_memory_tasks/windows/fit_explorer_gfactor_Dec_03_2021_rmse', '.RData'))

4.2.9 Extract the lambda value from the enetXplorer output

lambdas_gfactor <- vector("list", 
                          length = length(cfa_resp_names))
names(lambdas_gfactor)<- cfa_resp_names
lambdas_gfactor_best <- vector("list", 
                               length = length(cfa_resp_names))
names(lambdas_gfactor_best)<- cfa_resp_names
summary_enet_gfactor <- vector("list", 
                               length = length(cfa_resp_names))
names(summary_enet_gfactor)<- cfa_resp_names

for(i in 1:length(cfa_resp_names)){
  lambdas_gfactor[[cfa_resp_names[i]]] <- fit_explorer_gfactor[[cfa_resp_names[i]]][["lambda_values"]]
  lambdas_gfactor_best[[cfa_resp_names[i]]] <- fit_explorer_gfactor[[cfa_resp_names[i]]][["best_lambda"]]
  summary_enet_gfactor[[cfa_resp_names[i]]]<-
  as_tibble(summary(fit_explorer_gfactor[[cfa_resp_names[i]]])[[2]]) %>% slice(1)
}

summary_enet_gfactor %>% bind_rows() %>% 
  rename(.,
         Alpha = alpha, 
         `Best-tune lambda` = lambda.max, 
         `MSE` = QF.est, 
         `P-value` = model.vs.null.pval) %>%
pander::pander(split.cell = 80, split.table = Inf, justify = 'left')
Alpha Best-tune lambda MSE P-value
0.05 0.127 -0.8105 0.0003998

4.2.10 plot the predictive performance with various values of lambda

alpha_vals_gfactor <- best_enet_model_list_gfactor %>% 
  map(.,~paste0("a",.[["mixture"]])) 

cfa_resp_names%>% 
  map(.,
      ~qplot(fit_explorer_gfactor[[.]][["lambda_values"]][[alpha_vals_gfactor[[.]]]],
             fit_explorer_gfactor[[.]][["lambda_QF_est"]][[alpha_vals_gfactor[[.]]]], 
             geom = 'line') + 
        scale_x_log10() + 
        geom_vline(xintercept = lambdas_gfactor_best[[.]], col = 'red', linetype = 'dashed') +
        labs(x = NULL, y = NULL, title =. )
  )  
## $gfactor

4.2.11 extract the permuted parameter estimate and plot against the null permuted values

coefs_enet_gfactor <- cfa_resp_names %>%  
  map(.,~extract_tibble(fit_explorer_gfactor[[.]],
                        alpha_index = paste0("a",best_enet_model_list_gfactor[[.]]$mixture)))

coefs_enet_gfactor <- coefs_enet_gfactor  %>% 
  map(.,~filter(.,pvalue < 0.05) %>%
  mutate(.,
         type = ifelse(type == 'Null', 
                       'Null permuted models', 
                       'Target models'),
         roi = str_remove(variable, 'roi_'))%>%
    left_join( .,new_shorter_names,by="roi"))

roi_num_enet_gfactor <- coefs_enet_gfactor %>% 
  map(.,~dim(.)[1])
max_roi_enet_gfactor <- max(as.numeric(roi_num_enet_gfactor))

coefs_enet_gfactor <- coefs_enet_gfactor%>%map(.,~group_by(.,type)) 
coefs_enet_gfactor <- coefs_enet_gfactor%>%map(.,~nest(.,-type)) 

for(i in 1:length(cfa_resp_names)){
  
  coefs_enet_gfactor[[cfa_resp_names[i]]][["data"]][[2]]<-
    coefs_enet_gfactor[[cfa_resp_names[i]]][["data"]][[2]] %>% 
    mutate(direction = ifelse(coefs_enet_gfactor[[cfa_resp_names[i]]][["data"]][[2]]$wmean >= median(coefs_enet_gfactor[[cfa_resp_names[i]]][["data"]][[2]]$wmean)|roi_num_enet_gfactor[[cfa_resp_names[i]]] <= floor(max_roi_enet_gfactor/2),"big","small"))
  
  coefs_enet_gfactor[[cfa_resp_names[i]]][["data"]][[1]] <-
    coefs_enet_gfactor[[cfa_resp_names[i]]][["data"]][[1]] %>%
    mutate(direction=coefs_enet_gfactor[[cfa_resp_names[i]]][["data"]][[2]]$direction)
}

coefs_enet_gfactor <- coefs_enet_gfactor %>%map(.,~unnest(.)) 

cfa_resp_names %>% map(.,~ggplot(coefs_enet_gfactor[[.]], aes(x = fct_reorder(roiShort, wmean), 
             y = wmean, 
             ymax = wmean + 2 * wsd, 
             ymin = wmean - 2 * wsd,
             col = type)) +
  geom_pointrange(fatten = 0.5, key_glyph = 'point') +
  scale_y_continuous(labels = numform::ff_num(zero = 0, digits = 2)) + 
  scale_color_grey(start = 0.7, end = 0.5) +
  coord_flip() +
  guides(colour = guide_legend(override.aes = list(size = 2.5)))+
  labs(x = 'Explanatory Variables (Brain Regions)', 
       y = 'Averaged Coefficient Across Models (±2 Std. dev)', col = 'Model type',
       title = paste0("G-Factor","\nElastic Net Coefficients (p < .05)")) +
  facet_wrap(~ direction, scales = 'free_y') +
  scale_color_manual(values = c("#56B4E9", "black"),labels = c("Permuted Null", "Target")) +     
  theme_bw() +  
  theme(legend.title=element_blank()) +  
  theme(legend.position = "top") + 
  theme(
    axis.title.x = element_text(size = 15),
    axis.text.x = element_text(size = 12),
    axis.title.y = element_text(size = 15),
    axis.text.y = element_text(size = 12),
    legend.text = element_text(size = 15),
    plot.title = element_text(size=15)) +
  theme(
    strip.background = element_blank(),
    strip.text.x = element_blank()
) + theme(panel.grid.major = element_blank(), 
          panel.grid.minor = element_blank())
)
## $gfactor

4.3 Random Forest

4.3.1 extract the final work flow and best tuned parameters from random forest

random_forest_wfl_final_list_gfactor <- map(random_forest_tune_gfactor, 
                                            "random_forest_wf_final")

best_random_forest_model_list_gfactor <- map(random_forest_tune_gfactor, 
                                             "best_random_forest_model")

4.3.2 fit the random forest model with best grid search paramters

random_forest_final_fit_gfactor <-pmap(list(recipe_gfactor,
                                            random_forest_wfl_final_list_gfactor, 
                                            formula_gfactor),
                          ~model_final_fit(test_data = gfactor_test_all,
                                           recipe_input = ..1
                                           ,wf_input = ..2,
                                           formula_input = ..3,
                                           model_name = "random_forest"),
                             .options = furrr::furrr_options(seed = 123456)) 

4.3.3 extract the output of model prediction and model fit

random_forest_final_fit_list_gfactor <- map(random_forest_final_fit_gfactor, "random_forest_final_fit")
random_forest_predicted_list_gfactor <- map(random_forest_final_fit_gfactor, "random_forest_predict")

yardstick::mae(data = random_forest_predicted_list_gfactor$gfactor, 
               truth =.data$gfactor, 
               estimate =.data$model_predict  )
## # A tibble: 1 x 3
##   .metric .estimator .estimate
##   <chr>   <chr>          <dbl>
## 1 mae     standard       0.724
yardstick::rsq_trad(data = random_forest_predicted_list_gfactor$gfactor,
                    truth =.data$gfactor, 
                    estimate =.data$model_predict)
## # A tibble: 1 x 3
##   .metric  .estimator .estimate
##   <chr>    <chr>          <dbl>
## 1 rsq_trad standard       0.171

4.4 XgBoost

4.4.1 Get the final work flow and best tuned parameters from xgboost

xgboost_wfl_final_list_gfactor <- map(xgboost_tune_gfactor, "xgboost_wf_final")
best_xgboost_model_list_gfactor <- map(xgboost_tune_gfactor, "best_xgboost_model")

4.4.2 fit the model with the best grid parameter

xgboost_final_fit_gfactor <-future_pmap(list(recipe_gfactor,best_xgboost_model_list_gfactor, cfa_resp_names),
                          ~xgboost_model_pred(
                            recipe_input=..1,
                            param_input=..2,
                            resp_input=..3),
                          .options = furrr::furrr_options(seed = 123456)) 

4.4.3 extract the output

https://www.rdocumentation.org/packages/xgboost/versions/0.71.2/topics/predict.xgb.Booster

Setting predcontrib = TRUE allows to calculate contributions of each feature to individual predictions. For “gbtree” booster, feature contributions are SHAP values (Lundberg 2017) that sum to the difference between the expected output of the model and the current prediction (where the hessian weights are used to compute the expectations).

“BIAS” is the shapley values for the response variable.

xgboost_final_fit_list_gfactor <- map(xgboost_final_fit_gfactor, "xgboost_final_fit")
xgboost_predicted_list_gfactor <- map(xgboost_final_fit_gfactor, "xgboost_predict")
xgboost_predicted_train_list_gfactor <- map(xgboost_final_fit_gfactor, "xgboost_predict_train")
xgboost_shap_list_gfactor <- map(xgboost_final_fit_gfactor, "xgboost_shap_plot")

yardstick::mae(data = xgboost_predicted_list_gfactor$gfactor, 
               truth =.data$gfactor, 
               estimate =.data$model_predict  )
## # A tibble: 1 x 3
##   .metric .estimator .estimate
##   <chr>   <chr>          <dbl>
## 1 mae     standard       0.729
yardstick::rsq_trad(data = xgboost_predicted_list_gfactor$gfactor, 
                    truth =.data$gfactor, 
                    estimate =.data$model_predict)
## # A tibble: 1 x 3
##   .metric  .estimator .estimate
##   <chr>    <chr>          <dbl>
## 1 rsq_trad standard       0.166

4.5 Linear SVM

4.5.1 Get the final work flow and best tuned parameters from linear SVM

svm_linear_wfl_final_list_gfactor <- map(svm_linear_tune_gfactor, "svm_linear_wf_final")
best_svm_linear_model_list_gfactor <- map(svm_linear_tune_gfactor, "best_svm_linear_model")

4.5.2 fit the model with the best grid parameter for linear SVM

svm_linear_final_fit_gfactor <-pmap(list(recipe_gfactor,svm_linear_wfl_final_list_gfactor, formula_gfactor),
                          ~model_final_fit(test_data = gfactor_test_all, 
                                           recipe_input = ..1,
                                           wf_input = ..2,
                                           formula_input = ..3,
                                           model_name = "svm_linear")) 

4.5.3 extract the output of model fit and prediction

svm_linear_final_fit_list_gfactor <- map(svm_linear_final_fit_gfactor, "svm_linear_final_fit")
svm_linear_predicted_list_gfactor <- map(svm_linear_final_fit_gfactor, "svm_linear_predict")


yardstick::mae(data = svm_linear_predicted_list_gfactor$gfactor, 
               truth =.data$gfactor, 
               estimate =.data$model_predict  )
## # A tibble: 1 x 3
##   .metric .estimator .estimate
##   <chr>   <chr>          <dbl>
## 1 mae     standard       0.720
yardstick::rsq_trad(data = svm_linear_predicted_list_gfactor$gfactor, 
                    truth =.data$gfactor, 
                    estimate =.data$model_predict)
## # A tibble: 1 x 3
##   .metric  .estimator .estimate
##   <chr>    <chr>          <dbl>
## 1 rsq_trad standard       0.182

4.6 RBF SVM

4.6.1 Extract the final work flow and best tuned paramerts from RBF SVM

SVM_RBF_wfl_final_list_gfactor <- map(SVM_RBF_tune_gfactor, "svm_rbf_wf_final")
best_SVM_RBF_model_list_gfactor <- map(SVM_RBF_tune_gfactor, "best_svm_rbf_model")%>% print()
## $gfactor
## # A tibble: 1 x 4
##    cost rbf_sigma margin .config                
##   <dbl>     <dbl>  <dbl> <chr>                  
## 1  1.64  0.000464  0.193 Preprocessor1_Model4301

4.6.2 fit the model with the best grid search results

SVM_RBF_final_fit_gfactor <-pmap(list(recipe_gfactor,SVM_RBF_wfl_final_list_gfactor,formula_gfactor),
                                ~model_final_fit(test_data = gfactor_test_all,
                                                 recipe_input = ..1,
                                                 wf_input = ..2,
                                                 formula_input = ..3,
                                                 model_name = "SVM_RBF")) 

4.6.3 extract the output of model fit and prediction for RBF SVM

SVM_RBF_final_fit_list_gfactor <- map(SVM_RBF_final_fit_gfactor, "SVM_RBF_final_fit")
SVM_RBF_predicted_list_gfactor <- map(SVM_RBF_final_fit_gfactor, "SVM_RBF_predict")

yardstick::mae(data = SVM_RBF_predicted_list_gfactor$gfactor, 
               truth =.data$gfactor, 
               estimate =.data$model_predict  )
## # A tibble: 1 x 3
##   .metric .estimator .estimate
##   <chr>   <chr>          <dbl>
## 1 mae     standard       0.717
yardstick::rsq_trad(data = SVM_RBF_predicted_list_gfactor$gfactor, 
                    truth =.data$gfactor, 
                    estimate =.data$model_predict)
## # A tibble: 1 x 3
##   .metric  .estimator .estimate
##   <chr>    <chr>          <dbl>
## 1 rsq_trad standard       0.188

4.7 Polynomial SVM

4.7.1 Get the final workflow and best tuned parameters from polynomial svm

svm_poly_wfl_final_list_gfactor <- map(svm_poly_tune_gfactor, "svm_poly_wf_final")
best_svm_poly_model_list_gfactor <- map(svm_poly_tune_gfactor, "best_svm_poly_model")

4.7.2 fit the model with best grid parameters

svm_poly_final_fit_gfactor <-pmap(list(recipe_gfactor,svm_poly_wfl_final_list_gfactor, formula_gfactor),
                          ~model_final_fit(test_data = gfactor_test_all,
                                           recipe_input = ..1,
                                           wf_input = ..2,
                                           formula_input = ..3,
                                           model_name = "svm_poly")) 

4.7.3 extract the output of prediction and model fit

svm_poly_final_fit_list_gfactor <- map(svm_poly_final_fit_gfactor, "svm_poly_final_fit")
svm_poly_predicted_list_gfactor <- map(svm_poly_final_fit_gfactor, "svm_poly_predict")



yardstick::mae(data = svm_poly_predicted_list_gfactor$gfactor, 
               truth =.data$gfactor, 
               estimate =.data$model_predict  )
## # A tibble: 1 x 3
##   .metric .estimator .estimate
##   <chr>   <chr>          <dbl>
## 1 mae     standard       0.720
yardstick::rsq_trad(data = svm_poly_predicted_list_gfactor$gfactor, 
                    truth =.data$gfactor, 
                    estimate =.data$model_predict)
## # A tibble: 1 x 3
##   .metric  .estimator .estimate
##   <chr>    <chr>          <dbl>
## 1 rsq_trad standard       0.183

5 Bootstrapping performance matrics for each algotithm

5.1 check functions for bootstrap performance metrics

The following function computes the performance statistcs for one algorithm only.

perfmatrics <-function(data,i){
  cor_model <- cor(data$model_pred[i],
                     data$model_resp[i],
                     use = "pairwise.complete.obs")
  
  tradrsq_model <- yardstick::rsq_trad(data=data, 
                                       truth=.data$model_resp[i], 
                                       estimate=.data$model_pred[i])
  
  mae_model <- yardstick::mae(data =data, 
                            truth=.data$model_resp[i], 
                            estimate=.data$model_pred[i])
  
  rmse_model <- yardstick::rmse(data =data, 
                              truth=.data$model_resp[i], 
                              estimate=.data$model_pred[i])
  
  return(c(cor_model,  tradrsq_model$.estimate , mae_model$.estimate, rmse_model$.estimate))
}

5.2 Bootstrap the real values and the predicted values

5000 times and compute the performance statistcs with the above function

set.seed(123456)

boot_result_list <- function(model_pred, 
                             resp_var){
  
  model_results <- select(model_pred,
                          -starts_with("roi_")) 

  names(model_results) <- c("model_pred","model_resp")
  
#  library(doSNOW)
#cl <- makeCluster(c("localhost","localhost"),
#                  type = "SOCK")
#registerDoSNOW(cl=cl)
  
  
  DiffResults <- boot::boot(data = model_results,
           statistic = perfmatrics,
           R = 5000,
          # parallel="snow",
          # ncpus=20,
           #cl=cl
          )

return(DiffResults)
}

5.3 Bootstrap metrics for all algorithms

bootstrap function for univariate

the procedure of this function is
1. select the significant rois (that passed fdr or bonferroni)
2. within each roi, bootstrap 5000 times
3. get performance statistcs from bootstrap of each roi
4. group all of the performance statistcs together for all significant rois

set.seed(123456)

boot_results_process <- function(boot_input,metric_idx){
    boot_results <- boot_input[,metric_idx]
    return(boot_results)}

uni_performance_all <- function(resp_input, 
                                model_input, 
                                pred_input,
                                univar_input){
  
  roi_left <- model_input[["roi"]] 
  
  pred_selected <- pred_input %>% 
    select(all_of(roi_left), 
           all_of(resp_input))
  
  pred_list <- roi_left %>% 
    map(.,~select(pred_input, ., resp_input))
  
  pred_list <- pred_list%>% 
    map(.,function(pred_input=.){
    names(pred_input)= c("model_pred", resp_input)
    return(pred_input)
  })
  
  boot_list <- pred_list %>% 
    furrr::future_map(.,~boot_result_list(model_pred = .,
                                 resp_var =resp_input),
                             .options = furrr::furrr_options(seed = 123456)) 
  
  boot_results <- map(boot_list,"t")
  
  boot_corr <- map(boot_results,
                   ~boot_results_process(boot_input = ., 
                                         metric_idx = 1))%>%
    do.call(cbind,.) %>%
              as.vector()%>%
              tibble::as_tibble()%>%
              mutate(modality = rep(univar_input,5000*length(roi_left)))
  
  boot_tradrsq <- map(boot_results,
                      ~boot_results_process(boot_input = ., 
                                            metric_idx = 2))%>%
              do.call(rbind,.)%>%
              as.vector()%>%
              tibble::as_tibble()%>%
              mutate(modality = rep(univar_input,5000*length(roi_left)))
  
  boot_mae <- map(boot_results,
                  ~boot_results_process(boot_input = ., 
                                        metric_idx = 3))%>%
              do.call(rbind,.)%>%
              as.vector()%>%
              tibble::as_tibble()%>%
              mutate(modality = rep(univar_input,5000*length(roi_left)))
  
  boot_rmse <- map(boot_results,
                   ~boot_results_process(boot_input = ., 
                                         metric_idx = 4))%>%
              do.call(rbind,.)%>%
              as.vector()%>%
              tibble::as_tibble()%>%
              mutate(modality = rep(univar_input,5000*length(roi_left)))
  
 return( list (corr = boot_corr, 
               tradrsq = boot_tradrsq, 
               mae= boot_mae, 
               rmse= boot_rmse))
 
}

5.4 bootstrapping function for mass univariate

univar_fdr_boot <-furrr::future_pmap(list(resp_names,
                             univariate_model_fdr,
                             univariate_model_pred),
                               ~uni_performance_all(resp_input=..1, 
                                                    model_input=..2, 
                                                    pred_input=..3,
                                                    univar_input = "fdr"),
                             .options = furrr::furrr_options(seed = 123456) )

univar_bonferroni_boot <- furrr::future_pmap(list(resp_names,
                                    univariate_model_bonferroni,
                                    univariate_model_pred),
                               ~uni_performance_all(resp_input=..1, 
                                                    model_input=..2, 
                                                    pred_input=..3, 
                                                    univar_input = "bonferroni") ,
                             .options = furrr::furrr_options(seed = 123456))


#stopCluster(cl)


uni_boot <- list(fdr= univar_fdr_boot, 
                 bonferroni = univar_bonferroni_boot)

5.5 bootstrap for OLS, elastic net, linear SVM, RBF SVM, polynomial SVM, random forest and xgboost

#library(doSNOW)
#cl <- makeCluster(c("localhost","localhost"),
#                  type = "SOCK")
#registerDoSNOW(cl=cl)


boot_ols <- furrr::future_pmap(list(resp_names,
                             OLS_predict_list),
                        ~boot_result_list(
                          resp_var  = ..1,
                          model_pred  = ..2),
                        .options = furrr::furrr_options(seed = 123456)) 


boot_enet <-furrr::future_pmap(list(resp_names,
                              enet_predicted_list),
                                ~boot_result_list(
                                  resp_var  = ..1,
                                  model_pred  = ..2),
                             .options = furrr::furrr_options(seed = 123456)) 

boot_svm_linear <- furrr::future_pmap(list(resp_names,
                                    svm_linear_predicted_list),
                               ~boot_result_list(
                                 resp_var  = ..1,
                                 model_pred  = ..2),
                             .options = furrr::furrr_options(seed = 123456)) 


boot_random_forest <- furrr::future_pmap(list(resp_names,
                                       random_forest_predicted_list),
                                ~boot_result_list(
                                  resp_var  = ..1,
                                  model_pred  = ..2),
                             .options = furrr::furrr_options(seed = 123456)) 

boot_xgboost <- furrr::future_pmap(list(resp_names,xgboost_predicted_list),
                                ~boot_result_list(
                                  resp_var  = ..1,
                                  model_pred  = ..2),
                             .options = furrr::furrr_options(seed = 123456)) 


boot_svm_RBF <- furrr::future_pmap(list(resp_names,SVM_RBF_predicted_list),
                                ~boot_result_list(
                                  resp_var  = ..1,
                                  model_pred  = ..2),
                             .options = furrr::furrr_options(seed = 123456)) 


boot_svm_poly <- furrr::future_pmap(list(resp_names,svm_poly_predicted_list),
                                ~boot_result_list(
                                  resp_var  = ..1,
                                  model_pred  = ..2),
                             .options = furrr::furrr_options(seed = 123456)) 

bootstrapping_list <- list(OLS = boot_ols, 
                           enet = boot_enet, 
                           svm_linear = boot_svm_linear,
                           random_forest = boot_random_forest, 
                           xgboost = boot_xgboost, 
                           svm_RBF = boot_svm_RBF, 
                           svm_ploy= boot_svm_poly)

saveRDS(bootstrapping_list, paste0(anotherFold,'working_memory_tasks/bootstrapping_results_all_April_22_2022_rmse', '.RData'))


#stopCluster(cl)

5.6 plotting performances

group the bootstrapped performance statistics and plot them across all the working memory tasks

algorithm_vec <- names(bootstrapping_list)

algorithm_names <- tibble(vec_names = algorithm_vec, 
                          plotting_names = c("OLS", 
                                             "Elastic\nNet", 
                                             "Linear\nSVM", 
                                             "Random\nForest",
                                             "Xgboost",
                                             "RBF\nSVM",
                                             "Polynomial\nSVM"))

boot_across_algorithms <- function(resp_input, boot_input){
  
  one_resp_cor <- map(.x= algorithm_vec, 
                      function(algorithm_input=.x){
                        one_algotithm_cor <- boot_input[[algorithm_input]][[resp_input]]$t[,1]%>%
                          tibble::as_tibble()%>%
                          mutate(modality = rep(algorithm_names$plotting_names[[which(algorithm_names$vec_names==algorithm_input)]],5000))
  })%>%
    do.call(rbind,.)
  
  one_resp_tradrsq <- map(.x= algorithm_vec,
                          function(algorithm_input){
                            one_algotithm_tradrsq <- boot_input[[algorithm_input]][[resp_input]]$t[,2]%>%
                              tibble::as_tibble()%>%
                              mutate(modality = rep(algorithm_names$plotting_names[[which(algorithm_names$vec_names==algorithm_input)]],5000))
  })%>%
    do.call(rbind,.)
  
  one_resp_mae <- map(.x= algorithm_vec, 
                      function(algorithm_input){
                        
    one_algotithm_mae <- 
      boot_input[[algorithm_input]][[resp_input]]$t[,3]%>%
      tibble::as_tibble()%>%
      mutate(modality = rep(algorithm_names$plotting_names[[which(algorithm_names$vec_names==algorithm_input)]], 5000))
  })%>%
    do.call(rbind,.)
  
  one_resp_rmse <- map(.x= algorithm_vec, function(algorithm_input){
    one_algotithm_tradrsq <- boot_input[[algorithm_input]][[resp_input]]$t[,4]%>%
      tibble::as_tibble()%>%
      mutate(modality = rep(algorithm_names$plotting_names[[which(algorithm_names$vec_names==algorithm_input)]], 5000))
  })%>%do.call(rbind,.)
  
  return(list(correlation = one_resp_cor, 
              tradrsq = one_resp_tradrsq, 
              mae= one_resp_mae, 
              rmse = one_resp_rmse))
}


bootstrapping_resp_list <- map(.x= resp_names ,
                               ~boot_across_algorithms(resp_input = .x, 
                                                       boot_input = bootstrapping_list) )

boot_cbind <- function(index_input_1, 
                       index_input_2,
                       boot_uni_list,
                       boot_resp_input){
  
boot_output_other <- map(boot_resp_input,
                           index_input_1)
boot_output_fdr <- map(boot_uni_list[["fdr"]], 
                       index_input_2)
boot_output_bonf <- map(boot_uni_list[["bonferroni"]], 
                        index_input_2)
boot_output_all <- pmap(list(boot_output_other,
                             boot_output_fdr, 
                             boot_output_bonf), 
                        ~ rbind(boot_output_other=..1, 
                                boot_output_fdr=..2, 
                                boot_output_bonf=..3))
return(boot_output_all)
}

uni_recode <- function(data_input){
  data_input$modality <-  recode(data_input$modality, 
                                 fdr= "FDR", 
                                 bonferroni = "Bonferroni")
  return(data_input)
  }
boot_cor <- boot_cbind(index_input_1 = "correlation", 
                       index_input_2 = "corr",
                       boot_uni_list = uni_boot,
                       boot_resp_input = bootstrapping_resp_list)

boot_cor <- boot_cor %>% 
  map(.,~uni_recode(data_input = .))

boot_tradrsq <- boot_cbind(index_input_1 = "tradrsq", 
                           index_input_2 = "tradrsq", 
                           boot_uni_list = uni_boot,
                           boot_resp_input = bootstrapping_resp_list)

boot_tradrsq <- boot_tradrsq %>%
  map(.,~uni_recode(data_input = .))

boot_rmse <- boot_cbind(index_input_1 = "rmse", 
                        index_input_2 = "rmse",
                        boot_uni_list = uni_boot,
                        boot_resp_input = bootstrapping_resp_list)

boot_rmse <- boot_rmse %>% map(.,~uni_recode(data_input = .))

boot_mae <- boot_cbind(index_input_1 = "mae", 
                       index_input_2 = "mae",
                       boot_uni_list = uni_boot,
                       boot_resp_input = bootstrapping_resp_list)

boot_mae <- boot_mae %>% 
  map(.,~uni_recode(data_input = .))

change_factor <- function(data_input){
  data_input <- data_input %>%
   mutate(modality = factor(modality,
                            levels =c ("FDR","Bonferroni","OLS","Elastic\nNet",
                                       "Random\nForest","Xgboost", "Linear\nSVM",
                                       "Polynomial\nSVM" ,"RBF\nSVM")))
  return(data_input)
}

density_plot <- function(data_input, metric_input){
  data_input %>%
  ggplot(aes(y = modality, x = value)) +
  stat_halfeye(aes(fill = stat(cut_cdf_qi(cdf)))) +
  scale_fill_brewer(direction = -1) +
  labs(x = NULL, 
       y = NULL,
       title = paste0(metric_input))+ 
  theme(legend.position = "none")
}


interval_plot <- function(data_input, metric_input){
  data_input %>%
  ggplot(aes(y = modality, x = value))  +
  stat_interval() +
  scale_color_brewer() +
  labs(x = NULL, 
       y = NULL,
       title = paste0(metric_input))+ 
  theme(legend.position = "none")
}

boot_cor <- boot_cor %>% map(.,~change_factor(data_input = .))


boot_tradrsq <- boot_tradrsq %>% map(.,~change_factor(data_input = .))


boot_mae <- boot_mae %>% map(.,~change_factor(data_input = .))


boot_rmse <- boot_rmse %>% map(.,~change_factor(data_input = .))


metric_vec <- c("Pearson's correlation","Tradional r squared", 
                "Mean Absolute Error","Root Mean Square Error")

metrics_plot_all <- function(resp_input){
  metric_list <- list(corr=boot_cor[[resp_input]], 
                      tradrsq = boot_tradrsq[[resp_input]],
                      mae=boot_mae[[resp_input]], 
                      rmse=boot_rmse[[resp_input]] ) 
 
plot_list <- map2(.x= metric_vec, 
                  .y = metric_list, 
                  ~interval_plot(data_input = .y, 
                                 metric_input = .x))

plot_list[[1]] <- plot_list[[1]]+
  theme(axis.title.y=element_text(size=10),
        axis.text.y=element_text(size=10))

plot_list[[3]] <- plot_list[[3]]+
  theme(axis.title.y=element_text(size=10),
        axis.text.y=element_text(size=10))

plot_list[[2]] <- plot_list[[2]]+
  theme(axis.title.y=element_blank(),
        axis.text.y=element_blank(),
        axis.ticks.y=element_blank())

plot_list[[4]] <- plot_list[[4]]+
  theme(axis.title.y=element_blank(),
        axis.text.y=element_blank(),
        axis.ticks.y=element_blank())
title_plot <- ggdraw() + 
  draw_label(
   paste0("Bootstrapped performance metrics of ", resp_var_plotting$longer_name[which(resp_var_plotting$response==resp_input)] ) ,
    fontface = 'bold',
    x = 0,
    hjust = 0
  ) +
  theme(
    # add margin on the left of the drawing canvas,
    # so title is aligned with left edge of first plot
    plot.margin = margin(0, 0, 0, 7)
  )

metric_figure<- plot_grid(title_plot,
                          plot_grid(plotlist = plot_list),
                          nrow = 2 , 
                          rel_heights = c(0.1, 1)) 
return(metric_figure)
}

resp_names %>% map(.,
                   ~metrics_plot_all(resp_input = .))%>%
  print()
## $TFMRI_NB_ALL_BEH_C2B_RATE

## 
## $NIHTBX_PICVOCAB_UNCORRECTED

## 
## $NIHTBX_FLANKER_UNCORRECTED

## 
## $NIHTBX_LIST_UNCORRECTED

## 
## $NIHTBX_CARDSORT_UNCORRECTED

## 
## $NIHTBX_PATTERN_UNCORRECTED

## 
## $NIHTBX_PICTURE_UNCORRECTED

## 
## $NIHTBX_READING_UNCORRECTED

## 
## $LMT_SCR_PERC_CORRECT

## 
## $PEA_RAVLT_LD_TRIAL_VII_TC

## 
## $PEA_WISCV_TRS

5.7 bootstrapping results for the gfactor

the same procedure with all the other working memory tasks

# stuck
univar_fdr_boot_gfactor <- pmap(list(cfa_resp_names,
                                     univariate_model_fdr_gfactor,
                                     univariate_model_pred_gfactor),
                               ~uni_performance_all(resp_input=..1, 
                                                    model_input=..2, 
                                                    pred_input=..3,
                                                    univar_input = "fdr") )

univar_bonferroni_boot_gfactor <- pmap(list(cfa_resp_names,
                                            univariate_model_bonferroni_gfactor,
                                            univariate_model_pred_gfactor),
                               ~uni_performance_all(resp_input=..1, 
                                                    model_input=..2, 
                                                    pred_input=..3, 
                                                    univar_input = "bonferroni") )

uni_boot_gfactor <- list(fdr= univar_fdr_boot_gfactor, 
                         bonferroni = univar_bonferroni_boot_gfactor)

saveRDS(uni_boot_gfactor, paste0(anotherFold,'working_memory_tasks/windows/uni_boot_gfactor_Nov_12_2021', '.RData'))
#  library(doSNOW)
#cl <- makeCluster(c("localhost","localhost"),
#                  type = "SOCK")
#registerDoSNOW(cl=cl)


boot_ols_gfactor <- furrr::future_pmap(list(cfa_resp_names,OLS_predict_list_gfactor),
                                ~boot_result_list(resp_var  = ..1,
                                                  model_pred  = ..2),
                             .options = furrr::furrr_options(seed = 123456)) 


boot_enet_gfactor <- furrr::future_pmap(list(cfa_resp_names,enet_predicted_list_gfactor),
                                ~boot_result_list(resp_var  = ..1,
                                                  model_pred  = ..2),
                             .options = furrr::furrr_options(seed = 123456)) 


boot_svm_linear_gfactor <- furrr::future_pmap(list(cfa_resp_names,svm_linear_predicted_list_gfactor),
                                ~boot_result_list(resp_var  = ..1,
                                                  model_pred  = ..2),
                             .options = furrr::furrr_options(seed = 123456)) 


boot_random_forest_gfactor <- furrr::future_pmap(list(cfa_resp_names,random_forest_predicted_list_gfactor),
                                ~boot_result_list(resp_var  = ..1,
                                                  model_pred  = ..2),
                             .options = furrr::furrr_options(seed = 123456)) 

boot_xgboost_gfactor <- furrr::future_pmap(list(cfa_resp_names,xgboost_predicted_list_gfactor),
                                ~boot_result_list(resp_var  = ..1,
                                                  model_pred  = ..2),
                             .options = furrr::furrr_options(seed = 123456)) 


boot_svm_RBF_gfactor <- furrr::future_pmap(list(cfa_resp_names,SVM_RBF_predicted_list_gfactor),
                                ~boot_result_list(resp_var  = ..1,
                                                  model_pred  = ..2),
                             .options = furrr::furrr_options(seed = 123456)) 


boot_svm_poly_gfactor <- furrr::future_pmap(list(cfa_resp_names,svm_poly_predicted_list_gfactor),
                                ~boot_result_list(resp_var  = ..1,
                                                  model_pred  = ..2),
                             .options = furrr::furrr_options(seed = 123456)) 

#stopCluster(cl)



bootstrapping_list_gfactor <- list(OLS = boot_ols_gfactor, 
                                   enet = boot_enet_gfactor, 
                                   svm_linear = boot_svm_linear_gfactor,
                           random_forest = boot_random_forest_gfactor, 
                           xgboost = boot_xgboost_gfactor, 
                           svm_RBF = boot_svm_RBF_gfactor, 
                           svm_ploy= boot_svm_poly_gfactor)



saveRDS(bootstrapping_list_gfactor, paste0(anotherFold,'working_memory_tasks/windows/bootstrapping_list_gfactor_Mar_22_2022_rmse', '.RData'))
bootstrapping_resp_list_gfactor <- map(.x= cfa_resp_names ,
                               ~boot_across_algorithms(resp_input = .x, 
                                                       boot_input = bootstrapping_list_gfactor) )


boot_cor_gfactor <- boot_cbind(index_input_1 = "correlation", 
                               index_input_2 = "corr",
                       boot_uni_list = uni_boot_gfactor,
                       boot_resp_input = bootstrapping_resp_list_gfactor)

boot_cor_gfactor <- boot_cor_gfactor %>% map(.,~uni_recode(data_input = .))

boot_tradrsq_gfactor <- boot_cbind(index_input_1 = "tradrsq", 
                                   index_input_2 = "tradrsq", 
                                   boot_uni_list = uni_boot_gfactor,
                                   boot_resp_input = bootstrapping_resp_list_gfactor)

boot_tradrsq_gfactor <- boot_tradrsq_gfactor %>% map(.,~uni_recode(data_input = .))

boot_rmse_gfactor <- boot_cbind(index_input_1 = "rmse", 
                                index_input_2 = "rmse",
                                boot_uni_list = uni_boot_gfactor,
                                boot_resp_input = bootstrapping_resp_list_gfactor)
boot_rmse_gfactor <- boot_rmse_gfactor %>% map(.,~uni_recode(data_input = .))

boot_mae_gfactor <- boot_cbind(index_input_1 = "mae", 
                               index_input_2 = "mae",
                               boot_uni_list = uni_boot_gfactor,
                               boot_resp_input = bootstrapping_resp_list_gfactor)
boot_mae_gfactor <- boot_mae_gfactor %>% map(.,~uni_recode(data_input = .))


boot_cor_gfactor_tibble <- boot_cor_gfactor[["gfactor"]]%>% 
  tibble::as_tibble()%>%
  mutate(response = "gfactor")

boot_tradrsq_gfactor_tibble <- boot_tradrsq_gfactor[["gfactor"]]%>% 
  tibble::as_tibble()%>%
  mutate(response = "gfactor")

boot_rmse_gfactor_tibble <- boot_rmse_gfactor[["gfactor"]]%>% 
  tibble::as_tibble()%>%
  mutate(response = "gfactor")

boot_mae_gfactor_tibble <- boot_mae_gfactor[["gfactor"]]%>% 
  tibble::as_tibble()%>%
  mutate(response = "gfactor")

5.8 combine the response plot into one

combine gfactor and all other working memory tasks and plot them into one plot

boot_list_processing <- function(resp_input ,data_input  ){
  resp_short_name <-  resp_var_plotting$short_name[which(resp_var_plotting$response==resp_input)]
  output_tibble <-data_input %>% 
    mutate(response = resp_short_name) 
  return(output_tibble)
}


boot_cor_tibble <- map2(.x=resp_names,
                        .y= boot_cor,
                        ~boot_list_processing(resp_input=.x,
                                              data_input=.y))%>%
  do.call(rbind,.)
boot_tradrsq_tibble <- map2(.x=resp_names,
                            .y= boot_tradrsq,
                            ~boot_list_processing(resp_input=.x,
                                                  data_input=.y))%>%
  do.call(rbind,.)
boot_mae_tibble <- map2(.x=resp_names,
                        .y= boot_mae,
                        ~boot_list_processing(resp_input=.x,
                                              data_input=.y))%>%
  do.call(rbind,.)
boot_rmse_tibble <- map2(.x=resp_names,
                         .y= boot_rmse,
                         ~boot_list_processing(resp_input=.x,
                                               data_input=.y))%>%
  do.call(rbind,.)


boot_cor_all <- bind_rows(boot_cor_tibble, 
                          boot_cor_gfactor_tibble)%>%
  mutate(resp_factor= as.factor(response))%>%
  mutate(resp_factor = factor(response,
                              levels =c("Pattern Speed", "Audi Verbal",
                                        "Flanker","Seq Memory",
                                        "Card Sort", # "Cog Flex",
                                        "Little Man",
                                        "List Work Mem","Matrix Reason",
                                        "Reading Recog","Pic Vocab",
                                        "2-back Work Mem","gfactor" )))%>% 
  mutate(algorithm = modality)%>%
  mutate(algorithm = factor(algorithm,levels =c ("FDR","Bonferroni",
                                                 "OLS","Elastic\nNet",
                                                "Random\nForest","Xgboost", 
                                                "Linear\nSVM",
                                                "Polynomial\nSVM" ,"RBF\nSVM")))

boot_tradrsq_all <- bind_rows(boot_tradrsq_tibble, boot_tradrsq_gfactor_tibble)%>%
  mutate(resp_factor= as.factor(response))%>%
  mutate(resp_factor = factor(response,levels =c("Pattern Speed", "Audi Verbal",
                                                 "Flanker","Seq Memory",
                                                 "Card Sort", # "Cog Flex",
                                                 "Little Man",
                                                 "List Work Mem","Matrix Reason",
                                                 "Reading Recog","Pic Vocab", 
                                                 "2-back Work Mem","gfactor" )))%>%
  mutate(algorithm = modality)%>%
  mutate(algorithm = factor(algorithm,levels =c ("FDR","Bonferroni","OLS","Elastic\nNet",
                                                "Random\nForest","Xgboost", "Linear\nSVM",
                                                "Polynomial\nSVM" ,"RBF\nSVM")))

boot_rmse_all <- bind_rows(boot_rmse_tibble, boot_rmse_gfactor_tibble)%>%
  mutate(resp_factor= as.factor(response))%>%
  mutate(resp_factor = factor(response,levels =c("Pattern Speed", "Audi Verbal",
                                                 "Flanker","Seq Memory",
                                                 "Card Sort", # "Cog Flex",
                                                 "Little Man",
                                                 "List Work Mem","Matrix Reason",
                                                 "Reading Recog","Pic Vocab", 
                                                 "2-back Work Mem","gfactor" )))%>%
  mutate(algorithm = modality)%>%
   mutate(algorithm = factor(algorithm,levels =c ("FDR","Bonferroni",
                                                  "OLS","Elastic\nNet",
                                                "Random\nForest","Xgboost", 
                                                "Linear\nSVM",
                                                "Polynomial\nSVM" ,"RBF\nSVM")))

boot_mae_all <- bind_rows(boot_mae_tibble, boot_mae_gfactor_tibble)%>%
  mutate(resp_factor= as.factor(response))%>%
  mutate(resp_factor = factor(response,levels =c("Pattern Speed", "Audi Verbal",
                                                 "Flanker","Seq Memory",
                                                 "Card Sort", # "Cog Flex",
                                                 "Little Man",
                                                 "List Work Mem","Matrix Reason",
                                                 "Reading Recog","Pic Vocab", 
                                                 "2-back Work Mem","gfactor" )))%>%
  mutate(algorithm = modality)%>%
   mutate(algorithm = factor(algorithm,levels =c ("FDR","Bonferroni",
                                                  "OLS","Elastic\nNet",
                                                "Random\nForest","Xgboost", 
                                                "Linear\nSVM",
                                                "Polynomial\nSVM" ,"RBF\nSVM")))


boot_all_metrix <- list(Correlation= boot_cor_all, 
                        Traditional_Rsquare= boot_tradrsq_all, 
                        MAE = boot_mae_all, 
                        RMSE= boot_rmse_all)



color_boot_plot <- c(RColorBrewer::brewer.pal(n = 8, name = "Dark2"),
                     RColorBrewer::brewer.pal(n = 8, name = "Reds")[8])




boot_plot_list <-  map2(.x=boot_all_metrix,
                        .y = metric_vec, 
                        ~ggplot(data=.x,
                                aes(y = value, 
                                    x = resp_factor, 
                                    color= algorithm)) +
                          ggtitle(.y)+
                          stat_pointinterval(position = position_dodge(width = 2, 
                                                                       preserve = "single"))+
                          geom_hline(yintercept = 0, color = "grey55", linetype = "dashed",size=1.5) +
                          coord_flip()+
                          theme(plot.title = element_text(size=15),
                                axis.title.x = element_blank(),
                                axis.title.y = element_blank(), 
                                axis.text.x = element_text(size = 12),
                                axis.text.y = element_text(size=12),
                                legend.position = "bottom",
                                legend.text=element_text(size=12),
                                legend.title=element_text(size=15))+
                          guides(color = guide_legend(override.aes = list(size = 10)))+
                               scale_color_manual(values=color_boot_plot,
                               labels = c ("FDR","Bonferroni",
                                                 "OLS","Elastic\nNet",
                                                 "Random\nForest","Xgboost", 
                                                 "Linear\nSVM",
                                                 "Polynomial\nSVM" ,"RBF\nSVM"))
                                                  )

boot_plot_list[3:4] <-  map2(.x=boot_all_metrix[3:4],
                        .y = metric_vec[3:4], 
                        ~ggplot(data=.x,
                                aes(y = value, 
                                    x = resp_factor, 
                                    color= algorithm)) +
                          ggtitle(.y)+
                          stat_pointinterval(position = position_dodge(width = 2, 
                                                                       preserve = "single"))+
                          coord_flip()+
                          theme(plot.title = element_text(size=15),
                                axis.title.x = element_blank(),
                                axis.title.y = element_blank(), 
                                axis.text.x = element_text(size = 12),
                                axis.text.y = element_text(size=12),
                                legend.position = "bottom",
                                legend.text=element_text(size=12),
                                legend.title=element_text(size=15))+
                          guides(color = guide_legend(override.aes = list(size = 10)))+
                               scale_color_manual(values=color_boot_plot,
                               labels = c ("FDR","Bonferroni",
                                                 "OLS","Elastic\nNet",
                                                 "Random\nForest","Xgboost", 
                                                 "Linear\nSVM",
                                                 "Polynomial\nSVM" ,"RBF\nSVM"))
                                                  )

get_legend<-function(a.gplot){
  tmp <- ggplot_gtable(ggplot_build(a.gplot))
  leg <- which(sapply(tmp$grobs, 
                      function(x) x$name) == "guide-box")
  legend <- tmp$grobs[[leg]]
  return(legend)}

boot_plot_legend <- get_legend(boot_plot_list[[1]])


title_boot_plot <- ggdraw() + 
  draw_label(
    "Bootstrapped Distribution of Predictive Performance",
    fontface = 'bold',
    x = 0,
    hjust = 0,
    size=21
  ) +
  theme(
    # add margin on the left of the drawing canvas,
    # so title is aligned with left edge of first plot
    plot.margin = margin(0, 0, 0, 3)
  )
plot_grid(title_boot_plot,ggpubr::ggarrange(plotlist =boot_plot_list, 
                  ncol = 2,
                  nrow = 2, 
                  common.legend = TRUE, 
                  legend = "bottom",
                  legend.grob = boot_plot_legend),nrow = 2 , rel_heights = c(0.2, 1))

plot bootstrapped intervals without univariate

boot_all_metrixnouni <- map(boot_all_metrix, ~filter(.x,.data[["algorithm"]]!="FDR")%>%
                            filter(.data[["algorithm"]]!="Bonferroni"))


boot_plot_listnouni <-  map2(.x=boot_all_metrixnouni,
                        .y = metric_vec, 
                        ~ggplot(data=.x,
                                aes(y = value, 
                                    x = resp_factor, 
                                    color= algorithm)) +
                          ggtitle(.y)+
                          stat_pointinterval(position = position_dodge(width = 2, 
                                                                       preserve = "single"))+
                          geom_hline(yintercept = 0, color = "grey55", linetype = "dashed",size=1.5) +
                          coord_flip()+
                          theme(plot.title = element_text(size=15),
                                axis.title.x = element_blank(),
                                axis.title.y = element_blank(), 
                                axis.text.x = element_text(size = 12),
                                axis.text.y = element_text(size=12),
                                legend.position = "bottom",
                                legend.text=element_text(size=12),
                                legend.title=element_text(size=15))+
                          guides(color = guide_legend(override.aes = list(size = 10)))+
                               scale_color_manual(values=color_boot_plot[-c(1,2)],
                               labels = c ("OLS","Elastic\nNet",
                                                 "Random\nForest","Xgboost", 
                                                 "Linear\nSVM",
                                                 "Polynomial\nSVM" ,"RBF\nSVM"))
                                                  )

boot_plot_listnouni[3:4] <-  map2(.x=boot_all_metrixnouni[3:4],
                        .y = metric_vec[3:4], 
                        ~ggplot(data=.x,
                                aes(y = value, 
                                    x = resp_factor, 
                                    color= algorithm)) +
                          ggtitle(.y)+
                          stat_pointinterval(position = position_dodge(width = 2, 
                                                                       preserve = "single"))+
                          coord_flip()+
                          theme(plot.title = element_text(size=15),
                                axis.title.x = element_blank(),
                                axis.title.y = element_blank(), 
                                axis.text.x = element_text(size = 12),
                                axis.text.y = element_text(size=12),
                                legend.position = "bottom",
                                legend.text=element_text(size=12),
                                legend.title=element_text(size=15))+
                          guides(color = guide_legend(override.aes = list(size = 10)))+
                               scale_color_manual(values=color_boot_plot[-c(1,2)],
                               labels = c ("OLS","Elastic\nNet",
                                                 "Random\nForest","Xgboost", 
                                                 "Linear\nSVM",
                                                 "Polynomial\nSVM" ,"RBF\nSVM"))
                                                  )

boot_plot_legendnouni <- get_legend(boot_plot_listnouni[[1]])



plot_grid(title_boot_plot,ggpubr::ggarrange(plotlist =boot_plot_listnouni, 
                  ncol = 2,
                  nrow = 2, 
                  common.legend = TRUE, 
                  legend = "bottom",
                  legend.grob = boot_plot_legendnouni),nrow = 2 , rel_heights = c(0.2, 1))

5.8.1 getting the summary quantiles of the bootstrapped metrics

resp_vec <- unique(boot_all_metrix[["Correlation"]][["response"]])
modality_vec <- unique(boot_all_metrix[["Correlation"]][["modality"]])

boot_one_resp_processing <- function(resp_input,data_input){
     one_resp <- filter(data_input, 
                        response == resp_input)
     
     quantile_one_resp <- one_resp%>% 
       group_by(modality)%>%
       summarise(quantile = c(0.025, 0.5, 0.975),
                 value = quantile(value, c(0.025,0.5,0.975)))%>% 
       ungroup()%>%
       pivot_wider(names_from = quantile,
                  values_from = value)
  
      mean_one_resp <- one_resp %>% 
        group_by(modality)%>%
        summarise(mean = mean(value))%>% 
        ungroup()
  
      metric_one_resp <- left_join(quantile_one_resp, 
                                   mean_one_resp, 
                                   by = "modality")%>% 
                         mutate(response = resp_input)
  return(metric_one_resp)
}   

boot_quantile_processing <- function(data_input){
     all_resp <- resp_vec %>% map(.,
                                  ~ boot_one_resp_processing(resp_input = .,
                                                             data_input))%>%
       do.call(rbind,.)
  return(all_resp)
}

metric_names <- names(boot_all_metrix)


kable_boot_metric <- boot_all_metrix %>% 
  map(.,
      ~boot_quantile_processing(data_input = .)%>%
        mutate(algorithm = modality)%>%
        mutate(algorithm = factor(algorithm,
                                  levels =c ("FDR","Bonferroni","OLS","Elastic\nNet",
                                                "Random\nForest","Xgboost", "Linear\nSVM",
                                                "Polynomial\nSVM" ,"RBF\nSVM"))))

kable_metric_vars <- colnames(kable_boot_metric[[1]])[-1]

kable_boot_metric_vars <- kable_boot_metric %>%
  map(.,~select(.,all_of(kable_metric_vars))) %>% 
  map(.,~arrange(.,match(algorithm, c("FDR","Bonferroni","OLS","Elastic\nNet",
                                                "Random\nForest","Xgboost", "Linear\nSVM",
                                                "Polynomial\nSVM" ,"RBF\nSVM"))) %>%
        arrange(desc(match(response,
                      c("Pattern Speed", 
                        "Audi Verbal",
                        "Flanker",
                        "Seq Memory",
                        "Card Sort", # "Cog Flex",
                        "Little Man",
                        "List Work Mem",
                        "Matrix Reason",
                        "Reading Recog",
                        "Pic Vocab", 
                        "2-back Work Mem",
                        "gfactor" )))) %>%
        mutate_if(is.numeric, round, 3) %>%
        relocate(response,algorithm))


  kable_boot_metric_vars[[1]] %>% 
    kableExtra::kbl(caption = paste0(metric_names[1])) %>%
    kableExtra::kable_classic(full_width = F, 
                             html_font = "Cambria")
Correlation
response algorithm 0.025 0.5 0.975 mean
gfactor FDR -0.006 0.092 0.237 0.099
gfactor Bonferroni 0.014 0.111 0.245 0.118
gfactor OLS 0.358 0.410 0.458 0.409
gfactor Elastic Net 0.381 0.432 0.480 0.432
gfactor Random Forest 0.363 0.416 0.467 0.416
gfactor Xgboost 0.356 0.408 0.458 0.408
gfactor Linear SVM 0.377 0.428 0.478 0.428
gfactor Polynomial SVM 0.377 0.428 0.478 0.428
gfactor RBF SVM 0.382 0.434 0.484 0.434
2-back Work Mem FDR -0.016 0.092 0.239 0.097
2-back Work Mem Bonferroni 0.011 0.114 0.249 0.119
2-back Work Mem OLS 0.447 0.493 0.536 0.493
2-back Work Mem Elastic Net 0.468 0.514 0.556 0.513
2-back Work Mem Random Forest 0.423 0.473 0.520 0.473
2-back Work Mem Xgboost 0.409 0.459 0.507 0.459
2-back Work Mem Linear SVM 0.454 0.500 0.543 0.499
2-back Work Mem Polynomial SVM 0.454 0.500 0.543 0.499
2-back Work Mem RBF SVM 0.465 0.511 0.554 0.510
Pic Vocab FDR -0.035 0.077 0.169 0.073
Pic Vocab Bonferroni -0.016 0.089 0.175 0.086
Pic Vocab OLS 0.257 0.311 0.363 0.311
Pic Vocab Elastic Net 0.298 0.351 0.402 0.350
Pic Vocab Random Forest 0.273 0.327 0.380 0.327
Pic Vocab Xgboost 0.279 0.333 0.385 0.332
Pic Vocab Linear SVM 0.273 0.329 0.383 0.328
Pic Vocab Polynomial SVM 0.277 0.332 0.384 0.331
Pic Vocab RBF SVM 0.276 0.331 0.384 0.331
Reading Recog FDR -0.017 0.087 0.196 0.088
Reading Recog Bonferroni 0.026 0.115 0.207 0.116
Reading Recog OLS 0.211 0.268 0.322 0.268
Reading Recog Elastic Net 0.268 0.323 0.374 0.323
Reading Recog Random Forest 0.247 0.306 0.360 0.306
Reading Recog Xgboost 0.243 0.300 0.353 0.300
Reading Recog Linear SVM 0.244 0.299 0.351 0.299
Reading Recog Polynomial SVM 0.255 0.309 0.360 0.309
Reading Recog RBF SVM 0.258 0.312 0.364 0.312
Matrix Reason FDR -0.026 0.072 0.189 0.076
Matrix Reason Bonferroni -0.014 0.095 0.200 0.095
Matrix Reason OLS 0.218 0.278 0.332 0.277
Matrix Reason Elastic Net 0.226 0.286 0.342 0.286
Matrix Reason Random Forest 0.211 0.270 0.328 0.270
Matrix Reason Xgboost 0.201 0.260 0.318 0.259
Matrix Reason Linear SVM 0.227 0.288 0.346 0.288
Matrix Reason Polynomial SVM 0.217 0.276 0.335 0.277
Matrix Reason RBF SVM 0.227 0.287 0.346 0.287
List Work Mem FDR -0.017 0.070 0.176 0.073
List Work Mem Bonferroni -0.008 0.085 0.185 0.087
List Work Mem OLS 0.200 0.257 0.312 0.257
List Work Mem Elastic Net 0.238 0.291 0.344 0.292
List Work Mem Random Forest 0.203 0.259 0.313 0.259
List Work Mem Xgboost 0.205 0.260 0.313 0.260
List Work Mem Linear SVM 0.216 0.270 0.322 0.270
List Work Mem Polynomial SVM 0.221 0.276 0.328 0.276
List Work Mem RBF SVM 0.221 0.275 0.328 0.276
Little Man FDR -0.015 0.072 0.162 0.073
Little Man Bonferroni -0.001 0.087 0.170 0.086
Little Man OLS 0.182 0.242 0.298 0.241
Little Man Elastic Net 0.207 0.266 0.321 0.265
Little Man Random Forest 0.184 0.242 0.301 0.242
Little Man Xgboost 0.192 0.251 0.309 0.251
Little Man Linear SVM 0.205 0.261 0.317 0.261
Little Man Polynomial SVM 0.190 0.248 0.305 0.248
Little Man RBF SVM 0.210 0.268 0.325 0.268
Card Sort FDR -0.015 0.066 0.150 0.067
Card Sort Bonferroni -0.004 0.080 0.158 0.079
Card Sort OLS 0.141 0.201 0.260 0.201
Card Sort Elastic Net 0.163 0.226 0.286 0.225
Card Sort Random Forest 0.153 0.215 0.276 0.215
Card Sort Xgboost 0.152 0.213 0.276 0.214
Card Sort Linear SVM 0.132 0.196 0.256 0.196
Card Sort Polynomial SVM 0.166 0.228 0.288 0.227
Card Sort RBF SVM 0.171 0.230 0.290 0.230
Seq Memory FDR -0.038 0.057 0.151 0.057
Seq Memory Bonferroni -0.019 0.086 0.160 0.081
Seq Memory OLS 0.051 0.118 0.180 0.118
Seq Memory Elastic Net 0.098 0.163 0.223 0.163
Seq Memory Random Forest 0.097 0.157 0.217 0.157
Seq Memory Xgboost 0.114 0.176 0.234 0.175
Seq Memory Linear SVM 0.094 0.158 0.219 0.158
Seq Memory Polynomial SVM 0.104 0.168 0.229 0.168
Seq Memory RBF SVM 0.104 0.168 0.230 0.168
Flanker FDR -0.039 0.074 0.180 0.073
Flanker Bonferroni -0.003 0.111 0.193 0.106
Flanker OLS 0.077 0.141 0.205 0.141
Flanker Elastic Net 0.146 0.212 0.273 0.212
Flanker Random Forest 0.132 0.194 0.253 0.194
Flanker Xgboost 0.123 0.185 0.243 0.184
Flanker Linear SVM 0.125 0.190 0.252 0.190
Flanker Polynomial SVM 0.139 0.205 0.266 0.205
Flanker RBF SVM 0.091 0.158 0.222 0.158
Audi Verbal FDR -0.032 0.054 0.140 0.054
Audi Verbal Bonferroni 0.002 0.075 0.153 0.076
Audi Verbal OLS 0.025 0.090 0.157 0.090
Audi Verbal Elastic Net 0.074 0.138 0.201 0.138
Audi Verbal Random Forest 0.090 0.154 0.215 0.154
Audi Verbal Xgboost 0.075 0.139 0.199 0.138
Audi Verbal Linear SVM 0.058 0.122 0.186 0.123
Audi Verbal Polynomial SVM 0.081 0.146 0.208 0.146
Audi Verbal RBF SVM 0.082 0.146 0.207 0.146
Pattern Speed FDR -0.025 0.065 0.144 0.064
Pattern Speed Bonferroni -0.013 0.079 0.153 0.077
Pattern Speed OLS 0.068 0.129 0.188 0.129
Pattern Speed Elastic Net 0.093 0.156 0.218 0.156
Pattern Speed Random Forest 0.109 0.169 0.227 0.169
Pattern Speed Xgboost 0.100 0.163 0.224 0.162
Pattern Speed Linear SVM 0.083 0.147 0.206 0.146
Pattern Speed Polynomial SVM 0.095 0.157 0.217 0.156
Pattern Speed RBF SVM 0.097 0.159 0.220 0.158
  kable_boot_metric_vars[[2]] %>% 
    kableExtra::kbl(caption = paste0(metric_names[2])) %>%
    kableExtra::kable_classic(full_width = F, 
                             html_font = "Cambria")
Traditional_Rsquare
response algorithm 0.025 0.5 0.975 mean
gfactor FDR -0.007 0.007 0.054 0.011
gfactor Bonferroni -0.007 0.011 0.058 0.015
gfactor OLS 0.103 0.158 0.206 0.157
gfactor Elastic Net 0.143 0.185 0.226 0.185
gfactor Random Forest 0.131 0.167 0.203 0.167
gfactor Xgboost 0.125 0.164 0.201 0.164
gfactor Linear SVM 0.141 0.180 0.218 0.180
gfactor Polynomial SVM 0.141 0.181 0.219 0.181
gfactor RBF SVM 0.144 0.185 0.225 0.185
2-back Work Mem FDR -0.007 0.006 0.055 0.011
2-back Work Mem Bonferroni -0.006 0.011 0.060 0.015
2-back Work Mem OLS 0.190 0.239 0.287 0.239
2-back Work Mem Elastic Net 0.218 0.260 0.300 0.260
2-back Work Mem Random Forest 0.176 0.216 0.254 0.216
2-back Work Mem Xgboost 0.166 0.207 0.246 0.207
2-back Work Mem Linear SVM 0.202 0.247 0.288 0.246
2-back Work Mem Polynomial SVM 0.202 0.247 0.288 0.246
2-back Work Mem RBF SVM 0.210 0.257 0.301 0.257
Pic Vocab FDR -0.012 0.004 0.023 0.004
Pic Vocab Bonferroni -0.014 0.006 0.026 0.006
Pic Vocab OLS 0.035 0.083 0.127 0.082
Pic Vocab Elastic Net 0.086 0.122 0.155 0.121
Pic Vocab Random Forest 0.072 0.102 0.131 0.102
Pic Vocab Xgboost 0.076 0.108 0.140 0.108
Pic Vocab Linear SVM 0.058 0.099 0.139 0.099
Pic Vocab Polynomial SVM 0.064 0.104 0.143 0.104
Pic Vocab RBF SVM 0.062 0.104 0.143 0.103
Reading Recog FDR -0.007 0.006 0.035 0.008
Reading Recog Bonferroni -0.005 0.011 0.039 0.013
Reading Recog OLS 0.013 0.058 0.100 0.057
Reading Recog Elastic Net 0.071 0.101 0.128 0.100
Reading Recog Random Forest 0.060 0.090 0.117 0.089
Reading Recog Xgboost 0.057 0.085 0.110 0.084
Reading Recog Linear SVM 0.052 0.084 0.114 0.084
Reading Recog Polynomial SVM 0.058 0.088 0.115 0.088
Reading Recog RBF SVM 0.059 0.090 0.118 0.090
Matrix Reason FDR -0.009 0.004 0.033 0.006
Matrix Reason Bonferroni -0.010 0.008 0.036 0.009
Matrix Reason OLS 0.016 0.064 0.107 0.063
Matrix Reason Elastic Net 0.048 0.081 0.111 0.081
Matrix Reason Random Forest 0.041 0.072 0.101 0.071
Matrix Reason Xgboost 0.039 0.063 0.087 0.063
Matrix Reason Linear SVM 0.048 0.081 0.112 0.081
Matrix Reason Polynomial SVM 0.036 0.074 0.109 0.073
Matrix Reason RBF SVM 0.049 0.080 0.111 0.080
List Work Mem FDR -0.009 0.003 0.028 0.005
List Work Mem Bonferroni -0.010 0.006 0.030 0.007
List Work Mem OLS 0.002 0.047 0.090 0.047
List Work Mem Elastic Net 0.055 0.083 0.111 0.083
List Work Mem Random Forest 0.038 0.066 0.093 0.066
List Work Mem Xgboost 0.034 0.066 0.096 0.065
List Work Mem Linear SVM 0.039 0.070 0.099 0.070
List Work Mem Polynomial SVM 0.045 0.072 0.098 0.072
List Work Mem RBF SVM 0.045 0.072 0.099 0.072
Little Man FDR -0.008 0.004 0.022 0.005
Little Man Bonferroni -0.009 0.006 0.024 0.007
Little Man OLS 0.000 0.043 0.084 0.042
Little Man Elastic Net 0.041 0.069 0.096 0.069
Little Man Random Forest 0.032 0.056 0.081 0.056
Little Man Xgboost 0.035 0.060 0.086 0.060
Little Man Linear SVM 0.021 0.060 0.097 0.059
Little Man Polynomial SVM 0.017 0.054 0.089 0.054
Little Man RBF SVM 0.033 0.068 0.100 0.068
Card Sort FDR -0.008 0.003 0.019 0.004
Card Sort Bonferroni -0.008 0.005 0.021 0.005
Card Sort OLS -0.026 0.018 0.059 0.017
Card Sort Elastic Net 0.025 0.049 0.072 0.049
Card Sort Random Forest 0.019 0.045 0.071 0.045
Card Sort Xgboost 0.021 0.043 0.067 0.044
Card Sort Linear SVM 0.003 0.035 0.064 0.034
Card Sort Polynomial SVM 0.026 0.048 0.070 0.048
Card Sort RBF SVM 0.026 0.051 0.074 0.051
Seq Memory FDR -0.009 0.002 0.019 0.003
Seq Memory Bonferroni -0.010 0.006 0.021 0.006
Seq Memory OLS -0.076 -0.031 0.009 -0.032
Seq Memory Elastic Net 0.004 0.026 0.046 0.025
Seq Memory Random Forest 0.000 0.023 0.045 0.023
Seq Memory Xgboost 0.009 0.029 0.048 0.029
Seq Memory Linear SVM -0.024 0.011 0.042 0.010
Seq Memory Polynomial SVM -0.002 0.024 0.048 0.024
Seq Memory RBF SVM -0.002 0.024 0.048 0.024
Flanker FDR -0.009 0.004 0.024 0.005
Flanker Bonferroni -0.007 0.010 0.027 0.010
Flanker OLS -0.063 -0.017 0.026 -0.017
Flanker Elastic Net 0.018 0.044 0.066 0.043
Flanker Random Forest 0.015 0.036 0.055 0.036
Flanker Xgboost 0.013 0.032 0.050 0.032
Flanker Linear SVM -0.003 0.025 0.050 0.024
Flanker Polynomial SVM 0.003 0.027 0.049 0.027
Flanker RBF SVM -0.013 0.013 0.038 0.013
Audi Verbal FDR -0.009 0.002 0.015 0.002
Audi Verbal Bonferroni -0.007 0.005 0.018 0.005
Audi Verbal OLS -0.100 -0.054 -0.008 -0.054
Audi Verbal Elastic Net -0.011 0.015 0.040 0.014
Audi Verbal Random Forest 0.000 0.023 0.044 0.022
Audi Verbal Xgboost 0.004 0.014 0.023 0.014
Audi Verbal Linear SVM -0.031 0.000 0.029 0.000
Audi Verbal Polynomial SVM -0.005 0.017 0.039 0.017
Audi Verbal RBF SVM -0.009 0.016 0.040 0.016
Pattern Speed FDR -0.008 0.003 0.015 0.003
Pattern Speed Bonferroni -0.008 0.005 0.017 0.005
Pattern Speed OLS -0.048 -0.011 0.023 -0.011
Pattern Speed Elastic Net 0.007 0.021 0.035 0.021
Pattern Speed Random Forest 0.009 0.027 0.045 0.027
Pattern Speed Xgboost 0.008 0.024 0.040 0.024
Pattern Speed Linear SVM -0.010 0.017 0.041 0.016
Pattern Speed Polynomial SVM 0.006 0.022 0.037 0.022
Pattern Speed RBF SVM 0.007 0.022 0.037 0.022
  kable_boot_metric_vars[[3]] %>% 
    kableExtra::kbl(caption = paste0(metric_names[3])) %>%
    kableExtra::kable_classic(full_width = F, 
                             html_font = "Cambria")
MAE
response algorithm 0.025 0.5 0.975 mean
gfactor FDR 0.751 0.789 0.828 0.789
gfactor Bonferroni 0.750 0.788 0.827 0.788
gfactor OLS 0.700 0.733 0.768 0.733
gfactor Elastic Net 0.687 0.720 0.755 0.721
gfactor Random Forest 0.691 0.725 0.760 0.725
gfactor Xgboost 0.695 0.729 0.764 0.730
gfactor Linear SVM 0.688 0.721 0.755 0.721
gfactor Polynomial SVM 0.687 0.720 0.754 0.721
gfactor RBF SVM 0.682 0.715 0.750 0.716
2-back Work Mem FDR 0.787 0.824 0.860 0.824
2-back Work Mem Bonferroni 0.785 0.823 0.859 0.823
2-back Work Mem OLS 0.674 0.705 0.737 0.705
2-back Work Mem Elastic Net 0.669 0.699 0.731 0.700
2-back Work Mem Random Forest 0.691 0.722 0.754 0.723
2-back Work Mem Xgboost 0.695 0.726 0.757 0.726
2-back Work Mem Linear SVM 0.672 0.703 0.735 0.703
2-back Work Mem Polynomial SVM 0.671 0.702 0.734 0.703
2-back Work Mem RBF SVM 0.665 0.696 0.727 0.696
Pic Vocab FDR 0.756 0.793 0.831 0.793
Pic Vocab Bonferroni 0.755 0.792 0.831 0.793
Pic Vocab OLS 0.724 0.759 0.796 0.760
Pic Vocab Elastic Net 0.707 0.741 0.778 0.742
Pic Vocab Random Forest 0.723 0.757 0.794 0.758
Pic Vocab Xgboost 0.717 0.752 0.788 0.752
Pic Vocab Linear SVM 0.710 0.746 0.783 0.746
Pic Vocab Polynomial SVM 0.709 0.746 0.782 0.746
Pic Vocab RBF SVM 0.710 0.746 0.782 0.746
Reading Recog FDR 0.703 0.743 0.785 0.744
Reading Recog Bonferroni 0.702 0.742 0.784 0.743
Reading Recog OLS 0.692 0.731 0.771 0.731
Reading Recog Elastic Net 0.672 0.710 0.749 0.710
Reading Recog Random Forest 0.676 0.714 0.754 0.714
Reading Recog Xgboost 0.681 0.718 0.758 0.719
Reading Recog Linear SVM 0.672 0.711 0.751 0.711
Reading Recog Polynomial SVM 0.670 0.709 0.750 0.710
Reading Recog RBF SVM 0.670 0.709 0.750 0.709
Matrix Reason FDR 0.736 0.775 0.815 0.775
Matrix Reason Bonferroni 0.734 0.773 0.813 0.773
Matrix Reason OLS 0.719 0.757 0.794 0.757
Matrix Reason Elastic Net 0.706 0.743 0.780 0.743
Matrix Reason Random Forest 0.710 0.746 0.783 0.746
Matrix Reason Xgboost 0.711 0.748 0.786 0.749
Matrix Reason Linear SVM 0.706 0.743 0.781 0.743
Matrix Reason Polynomial SVM 0.711 0.748 0.785 0.748
Matrix Reason RBF SVM 0.705 0.742 0.779 0.742
List Work Mem FDR 0.762 0.799 0.837 0.799
List Work Mem Bonferroni 0.761 0.798 0.836 0.798
List Work Mem OLS 0.746 0.782 0.818 0.782
List Work Mem Elastic Net 0.735 0.770 0.805 0.770
List Work Mem Random Forest 0.740 0.775 0.810 0.775
List Work Mem Xgboost 0.739 0.774 0.809 0.774
List Work Mem Linear SVM 0.740 0.774 0.809 0.775
List Work Mem Polynomial SVM 0.739 0.774 0.809 0.774
List Work Mem RBF SVM 0.739 0.774 0.809 0.774
Little Man FDR 0.769 0.805 0.843 0.805
Little Man Bonferroni 0.767 0.804 0.841 0.804
Little Man OLS 0.744 0.780 0.817 0.780
Little Man Elastic Net 0.738 0.773 0.809 0.773
Little Man Random Forest 0.743 0.779 0.815 0.779
Little Man Xgboost 0.744 0.779 0.815 0.779
Little Man Linear SVM 0.738 0.774 0.810 0.774
Little Man Polynomial SVM 0.740 0.776 0.812 0.776
Little Man RBF SVM 0.736 0.771 0.808 0.771
Card Sort FDR 0.728 0.767 0.807 0.767
Card Sort Bonferroni 0.728 0.766 0.806 0.766
Card Sort OLS 0.731 0.769 0.808 0.769
Card Sort Elastic Net 0.713 0.751 0.791 0.751
Card Sort Random Forest 0.712 0.750 0.790 0.750
Card Sort Xgboost 0.714 0.752 0.793 0.752
Card Sort Linear SVM 0.718 0.756 0.796 0.756
Card Sort Polynomial SVM 0.712 0.749 0.789 0.750
Card Sort RBF SVM 0.713 0.750 0.790 0.750
Seq Memory FDR 0.781 0.816 0.853 0.817
Seq Memory Bonferroni 0.779 0.815 0.851 0.815
Seq Memory OLS 0.780 0.816 0.854 0.816
Seq Memory Elastic Net 0.765 0.800 0.837 0.801
Seq Memory Random Forest 0.766 0.801 0.838 0.801
Seq Memory Xgboost 0.764 0.799 0.835 0.799
Seq Memory Linear SVM 0.769 0.805 0.841 0.805
Seq Memory Polynomial SVM 0.765 0.801 0.836 0.801
Seq Memory RBF SVM 0.765 0.800 0.836 0.801
Flanker FDR 0.730 0.769 0.809 0.769
Flanker Bonferroni 0.727 0.766 0.807 0.767
Flanker OLS 0.734 0.773 0.812 0.774
Flanker Elastic Net 0.710 0.749 0.787 0.749
Flanker Random Forest 0.715 0.754 0.793 0.753
Flanker Xgboost 0.715 0.754 0.793 0.754
Flanker Linear SVM 0.709 0.748 0.788 0.748
Flanker Polynomial SVM 0.708 0.746 0.786 0.746
Flanker RBF SVM 0.711 0.750 0.791 0.750
Audi Verbal FDR 0.765 0.801 0.839 0.801
Audi Verbal Bonferroni 0.764 0.800 0.838 0.800
Audi Verbal OLS 0.780 0.817 0.857 0.817
Audi Verbal Elastic Net 0.760 0.796 0.833 0.796
Audi Verbal Random Forest 0.757 0.793 0.829 0.793
Audi Verbal Xgboost 0.762 0.796 0.833 0.797
Audi Verbal Linear SVM 0.761 0.797 0.836 0.798
Audi Verbal Polynomial SVM 0.757 0.792 0.830 0.793
Audi Verbal RBF SVM 0.758 0.793 0.831 0.794
Pattern Speed FDR 0.743 0.781 0.820 0.781
Pattern Speed Bonferroni 0.743 0.780 0.819 0.781
Pattern Speed OLS 0.751 0.789 0.829 0.790
Pattern Speed Elastic Net 0.733 0.772 0.812 0.772
Pattern Speed Random Forest 0.733 0.771 0.811 0.772
Pattern Speed Xgboost 0.731 0.771 0.810 0.771
Pattern Speed Linear SVM 0.735 0.774 0.813 0.774
Pattern Speed Polynomial SVM 0.732 0.772 0.811 0.772
Pattern Speed RBF SVM 0.732 0.771 0.810 0.771
  kable_boot_metric_vars[[4]] %>% 
    kableExtra::kbl(caption = paste0(metric_names[4])) %>%
    kableExtra::kable_classic(full_width = F, 
                             html_font = "Cambria")
RMSE
response algorithm 0.025 0.5 0.975 mean
gfactor FDR 0.947 0.993 1.039 0.993
gfactor Bonferroni 0.945 0.991 1.038 0.991
gfactor OLS 0.879 0.917 0.956 0.917
gfactor Elastic Net 0.863 0.902 0.941 0.902
gfactor Random Forest 0.871 0.912 0.952 0.911
gfactor Xgboost 0.872 0.914 0.955 0.913
gfactor Linear SVM 0.865 0.905 0.945 0.905
gfactor Polynomial SVM 0.865 0.904 0.944 0.904
gfactor RBF SVM 0.861 0.902 0.943 0.902
2-back Work Mem FDR 0.955 0.994 1.030 0.993
2-back Work Mem Bonferroni 0.952 0.991 1.028 0.991
2-back Work Mem OLS 0.837 0.871 0.907 0.871
2-back Work Mem Elastic Net 0.826 0.859 0.894 0.859
2-back Work Mem Random Forest 0.851 0.884 0.918 0.884
2-back Work Mem Xgboost 0.856 0.890 0.924 0.890
2-back Work Mem Linear SVM 0.833 0.867 0.902 0.867
2-back Work Mem Polynomial SVM 0.833 0.867 0.902 0.867
2-back Work Mem RBF SVM 0.826 0.861 0.896 0.861
Pic Vocab FDR 0.950 0.996 1.044 0.997
Pic Vocab Bonferroni 0.949 0.996 1.043 0.996
Pic Vocab OLS 0.915 0.957 1.000 0.957
Pic Vocab Elastic Net 0.894 0.936 0.981 0.937
Pic Vocab Random Forest 0.903 0.946 0.991 0.947
Pic Vocab Xgboost 0.900 0.943 0.988 0.944
Pic Vocab Linear SVM 0.904 0.948 0.994 0.949
Pic Vocab Polynomial SVM 0.901 0.945 0.991 0.946
Pic Vocab RBF SVM 0.902 0.946 0.991 0.946
Reading Recog FDR 0.939 0.994 1.051 0.995
Reading Recog Bonferroni 0.937 0.992 1.049 0.992
Reading Recog OLS 0.917 0.969 1.023 0.970
Reading Recog Elastic Net 0.895 0.948 1.002 0.948
Reading Recog Random Forest 0.901 0.953 1.008 0.953
Reading Recog Xgboost 0.903 0.956 1.010 0.956
Reading Recog Linear SVM 0.903 0.956 1.011 0.956
Reading Recog Polynomial SVM 0.901 0.954 1.009 0.954
Reading Recog RBF SVM 0.900 0.953 1.008 0.953
Matrix Reason FDR 0.948 0.996 1.044 0.996
Matrix Reason Bonferroni 0.946 0.994 1.043 0.994
Matrix Reason OLS 0.921 0.967 1.013 0.967
Matrix Reason Elastic Net 0.912 0.958 1.005 0.958
Matrix Reason Random Forest 0.917 0.962 1.010 0.962
Matrix Reason Xgboost 0.921 0.967 1.014 0.967
Matrix Reason Linear SVM 0.912 0.957 1.005 0.957
Matrix Reason Polynomial SVM 0.916 0.961 1.007 0.961
Matrix Reason RBF SVM 0.912 0.958 1.005 0.958
List Work Mem FDR 0.954 0.996 1.039 0.996
List Work Mem Bonferroni 0.953 0.995 1.038 0.995
List Work Mem OLS 0.934 0.974 1.017 0.974
List Work Mem Elastic Net 0.916 0.956 0.996 0.956
List Work Mem Random Forest 0.925 0.965 1.005 0.965
List Work Mem Xgboost 0.924 0.965 1.006 0.965
List Work Mem Linear SVM 0.922 0.963 1.004 0.963
List Work Mem Polynomial SVM 0.921 0.962 1.003 0.962
List Work Mem RBF SVM 0.921 0.962 1.002 0.962
Little Man FDR 0.954 0.996 1.040 0.996
Little Man Bonferroni 0.953 0.995 1.039 0.996
Little Man OLS 0.932 0.978 1.023 0.977
Little Man Elastic Net 0.921 0.964 1.006 0.964
Little Man Random Forest 0.927 0.970 1.015 0.970
Little Man Xgboost 0.925 0.968 1.011 0.968
Little Man Linear SVM 0.925 0.969 1.013 0.969
Little Man Polynomial SVM 0.928 0.972 1.016 0.972
Little Man RBF SVM 0.921 0.965 1.008 0.964
Card Sort FDR 0.944 0.997 1.050 0.997
Card Sort Bonferroni 0.944 0.996 1.050 0.996
Card Sort OLS 0.939 0.989 1.044 0.990
Card Sort Elastic Net 0.923 0.973 1.029 0.974
Card Sort Random Forest 0.925 0.975 1.030 0.976
Card Sort Xgboost 0.926 0.976 1.032 0.977
Card Sort Linear SVM 0.930 0.981 1.037 0.982
Card Sort Polynomial SVM 0.923 0.974 1.029 0.975
Card Sort RBF SVM 0.923 0.972 1.028 0.973
Seq Memory FDR 0.959 0.997 1.036 0.997
Seq Memory Bonferroni 0.957 0.996 1.035 0.996
Seq Memory OLS 0.974 1.014 1.056 1.014
Seq Memory Elastic Net 0.947 0.986 1.024 0.986
Seq Memory Random Forest 0.949 0.987 1.026 0.987
Seq Memory Xgboost 0.946 0.984 1.023 0.984
Seq Memory Linear SVM 0.954 0.993 1.034 0.994
Seq Memory Polynomial SVM 0.948 0.987 1.026 0.987
Seq Memory RBF SVM 0.948 0.987 1.026 0.987
Flanker FDR 0.940 0.996 1.053 0.996
Flanker Bonferroni 0.938 0.993 1.050 0.994
Flanker OLS 0.952 1.007 1.062 1.007
Flanker Elastic Net 0.923 0.976 1.031 0.976
Flanker Random Forest 0.926 0.980 1.036 0.980
Flanker Xgboost 0.928 0.982 1.038 0.982
Flanker Linear SVM 0.929 0.986 1.044 0.986
Flanker Polynomial SVM 0.928 0.985 1.043 0.985
Flanker RBF SVM 0.935 0.992 1.051 0.992
Audi Verbal FDR 0.955 0.998 1.041 0.998
Audi Verbal Bonferroni 0.954 0.996 1.039 0.996
Audi Verbal OLS 0.981 1.025 1.070 1.025
Audi Verbal Elastic Net 0.950 0.991 1.034 0.991
Audi Verbal Random Forest 0.946 0.987 1.030 0.988
Audi Verbal Xgboost 0.950 0.992 1.035 0.992
Audi Verbal Linear SVM 0.956 0.998 1.043 0.999
Audi Verbal Polynomial SVM 0.947 0.990 1.033 0.990
Audi Verbal RBF SVM 0.948 0.990 1.034 0.991
Pattern Speed FDR 0.951 0.997 1.044 0.997
Pattern Speed Bonferroni 0.950 0.996 1.043 0.996
Pattern Speed OLS 0.959 1.005 1.051 1.005
Pattern Speed Elastic Net 0.942 0.989 1.036 0.989
Pattern Speed Random Forest 0.939 0.985 1.032 0.985
Pattern Speed Xgboost 0.939 0.987 1.034 0.987
Pattern Speed Linear SVM 0.944 0.991 1.037 0.991
Pattern Speed Polynomial SVM 0.941 0.988 1.035 0.988
Pattern Speed RBF SVM 0.941 0.988 1.035 0.988

5.9 get the bootstrapped differences of the summary statistics other algorithms and OLS

Get the predicted values from differenct algorithms

enet_pred_gractor <- enet_predicted_list_gfactor[["gfactor"]]%>%
                     select(c("model_predict","gfactor"))%>%
                     rename(enet_pred = model_predict, model_resp = gfactor)


ols_pred_gractor <- OLS_predict_list_gfactor[["gfactor"]]%>%
                     select(c("model_pred","gfactor"))%>%
                     rename(ols_pred = model_pred, model_resp = gfactor)


SVM_RBF_pred_gractor <- SVM_RBF_predicted_list_gfactor[["gfactor"]]%>%
                     select(c("model_predict","gfactor"))%>%
                     rename(svm_rbf_pred = model_predict, model_resp = gfactor)


svm_linear_pred_gractor <- svm_linear_predicted_list_gfactor[["gfactor"]]%>%
                     select(c("model_predict","gfactor"))%>%
                     rename(svm_linear_pred = model_predict, model_resp = gfactor)

svm_poly_pred_gractor <- svm_poly_predicted_list_gfactor[["gfactor"]]%>%
                     select(c("model_predict","gfactor"))%>%
                     rename(svm_poly_pred = model_predict, model_resp = gfactor)

random_forest_pred_gractor <- random_forest_predicted_list_gfactor[["gfactor"]]%>%
                     select(c("model_predict","gfactor"))%>%
                     rename(random_forest_pred = model_predict, model_resp = gfactor)


xgboost_pred_gractor <- xgboost_predicted_list_gfactor[["gfactor"]]%>%
                     select(c("model_predict","gfactor"))%>%
                     rename(xgboost_pred = model_predict, model_resp = gfactor)





gfactor_pred_all <- plyr::join_all(list(enet_pred_gractor,ols_pred_gractor,SVM_RBF_pred_gractor,
                                       svm_linear_pred_gractor, svm_poly_pred_gractor,random_forest_pred_gractor,
                                       xgboost_pred_gractor), by = "model_resp", type = "left")


##get all the predictions from all the working memory tasks

task_pred_processing <- function(resp_input){
  
  enet_pred <- enet_predicted_list[[resp_input]]%>%
                     select(c("model_predict","roi_Left.Cerebellum.Cortex",resp_input))
  
  names(enet_pred) <- c("enet_pred","roi_Left.Cerebellum.Cortex","model_resp")
 

   ols_pred <- OLS_predict_list[[resp_input]]%>%
                     select(c("model_pred","roi_Left.Cerebellum.Cortex",resp_input))
  
  names(ols_pred) <- c("ols_pred","roi_Left.Cerebellum.Cortex","model_resp")
  
   svm_rbf_pred <- SVM_RBF_predicted_list[[resp_input]]%>%
                     select(c("model_predict","roi_Left.Cerebellum.Cortex",resp_input))
  
  names(svm_rbf_pred) <- c("svm_rbf_pred","roi_Left.Cerebellum.Cortex","model_resp")
  
   svm_linear_pred <- svm_linear_predicted_list[[resp_input]]%>%
                     select(c("model_predict","roi_Left.Cerebellum.Cortex",resp_input))
  
  names(svm_linear_pred) <- c("svm_linear_pred","roi_Left.Cerebellum.Cortex","model_resp")
  
   svm_poly_pred <- svm_poly_predicted_list[[resp_input]]%>%
                     select(c("model_predict","roi_Left.Cerebellum.Cortex",resp_input))
  
  names(svm_poly_pred) <- c("svm_poly_pred","roi_Left.Cerebellum.Cortex","model_resp")
  
   random_forest_pred <- random_forest_predicted_list[[resp_input]]%>%
                     select(c("model_predict","roi_Left.Cerebellum.Cortex",resp_input))
  
  names(random_forest_pred) <- c("random_forest_pred","roi_Left.Cerebellum.Cortex","model_resp")
  
   xgboost_pred <- xgboost_predicted_list[[resp_input]]%>%
                     select(c("model_predict","roi_Left.Cerebellum.Cortex",resp_input))
  
  names(xgboost_pred) <- c("xgboost_pred","roi_Left.Cerebellum.Cortex","model_resp")
  
  
pred_all <- plyr::join_all(list(enet_pred,svm_rbf_pred,ols_pred,
                                       svm_linear_pred, svm_poly_pred,random_forest_pred,
                                       xgboost_pred), by=c("roi_Left.Cerebellum.Cortex","model_resp"), type = "left")%>%
  select(-"roi_Left.Cerebellum.Cortex")
}

task_pred_all <- resp_names %>% map(.,~task_pred_processing(resp_input = .))

resp_all <- c(resp_names,"gfactor")

pred_all_resp <- vector("list",length =length(resp_all))
names(pred_all_resp) <- resp_all

pred_all_resp[1:11]<-task_pred_all

pred_all_resp[["gfactor"]] <- gfactor_pred_all
perfmatrics_diff_all_ols <-function(data,i){
  cor_ols <- cor(data$ols_pred[i],
                     data$model_resp[i],
                     use = "pairwise.complete.obs")
  
  tradrsq_ols <- yardstick::rsq_trad(data=data, 
                                       truth=.data$model_resp[i], 
                                       estimate=.data$ols_pred[i])
  
  mae_ols <- yardstick::mae(data =data, 
                            truth=.data$model_resp[i], 
                            estimate=.data$ols_pred[i])
  
  rmse_ols <- yardstick::rmse(data =data, 
                              truth=.data$model_resp[i], 
                              estimate=.data$ols_pred[i])
  
  
  cor_enet <- cor(data$enet_pred[i],
                     data$model_resp[i],
                     use = "pairwise.complete.obs")
  
  tradrsq_enet <- yardstick::rsq_trad(data=data, 
                                       truth=.data$model_resp[i], 
                                       estimate=.data$enet_pred[i])
  
  mae_enet <- yardstick::mae(data =data, 
                            truth=.data$model_resp[i], 
                            estimate=.data$enet_pred[i])
  
  rmse_enet <- yardstick::rmse(data =data, 
                              truth=.data$model_resp[i], 
                              estimate=.data$enet_pred[i])
  
  cor_diff_enet <- cor_enet-cor_ols
  
  tradrsq_diff_enet <- tradrsq_enet$.estimate-tradrsq_ols$.estimate
  
  mae_diff_enet <- mae_enet$.estimate-mae_ols$.estimate

  rmse_diff_enet <- rmse_enet$.estimate-rmse_ols$.estimate

  cor_svm_rbf <- cor(data$svm_rbf_pred[i],
                     data$model_resp[i],
                     use = "pairwise.complete.obs")
  
  tradrsq_svm_rbf <- yardstick::rsq_trad(data=data, 
                                       truth=.data$model_resp[i], 
                                       estimate=.data$svm_rbf_pred[i])
  
  mae_svm_rbf <- yardstick::mae(data =data, 
                            truth=.data$model_resp[i], 
                            estimate=.data$svm_rbf_pred[i])
  
  rmse_svm_rbf <- yardstick::rmse(data =data, 
                              truth=.data$model_resp[i], 
                              estimate=.data$svm_rbf_pred[i])
  
  cor_diff_svm_rbf <- cor_svm_rbf-cor_ols
  
  tradrsq_diff_svm_rbf <- tradrsq_svm_rbf$.estimate-tradrsq_ols$.estimate
  
  mae_diff_svm_rbf <- mae_svm_rbf$.estimate-mae_ols$.estimate

  rmse_diff_svm_rbf <- rmse_svm_rbf$.estimate-rmse_ols$.estimate
  
  
   cor_svm_linear <- cor(data$svm_linear_pred[i],
                     data$model_resp[i],
                     use = "pairwise.complete.obs")
  
  tradrsq_svm_linear <- yardstick::rsq_trad(data=data, 
                                       truth=.data$model_resp[i], 
                                       estimate=.data$svm_linear_pred[i])
  
  mae_svm_linear <- yardstick::mae(data =data, 
                            truth=.data$model_resp[i], 
                            estimate=.data$svm_linear_pred[i])
  
  rmse_svm_linear <- yardstick::rmse(data =data, 
                              truth=.data$model_resp[i], 
                              estimate=.data$svm_linear_pred[i])
  
  cor_diff_svm_linear <- cor_svm_linear-cor_ols
  
  tradrsq_diff_svm_linear <- tradrsq_svm_linear$.estimate-tradrsq_ols$.estimate
  
  mae_diff_svm_linear <- mae_svm_linear$.estimate-mae_ols$.estimate

  rmse_diff_svm_linear <- rmse_svm_linear$.estimate-rmse_ols$.estimate
  
  
   cor_svm_poly <- cor(data$svm_poly_pred[i],
                     data$model_resp[i],
                     use = "pairwise.complete.obs")
  
  tradrsq_svm_poly <- yardstick::rsq_trad(data=data, 
                                       truth=.data$model_resp[i], 
                                       estimate=.data$svm_poly_pred[i])
  
  mae_svm_poly <- yardstick::mae(data =data, 
                            truth=.data$model_resp[i], 
                            estimate=.data$svm_poly_pred[i])
  
  rmse_svm_poly <- yardstick::rmse(data =data, 
                              truth=.data$model_resp[i], 
                              estimate=.data$svm_poly_pred[i])
  
  cor_diff_svm_poly <- cor_svm_poly-cor_ols
  
  tradrsq_diff_svm_poly <- tradrsq_svm_poly$.estimate-tradrsq_ols$.estimate
  
  mae_diff_svm_poly <- mae_svm_poly$.estimate-mae_ols$.estimate

  rmse_diff_svm_poly <- rmse_svm_poly$.estimate-rmse_ols$.estimate
  
  
   cor_random_forest <- cor(data$random_forest_pred[i],
                     data$model_resp[i],
                     use = "pairwise.complete.obs")
  
  tradrsq_random_forest <- yardstick::rsq_trad(data=data, 
                                       truth=.data$model_resp[i], 
                                       estimate=.data$random_forest_pred[i])
  
  mae_random_forest <- yardstick::mae(data =data, 
                            truth=.data$model_resp[i], 
                            estimate=.data$random_forest_pred[i])
  
  rmse_random_forest <- yardstick::rmse(data =data, 
                              truth=.data$model_resp[i], 
                              estimate=.data$random_forest_pred[i])
  
  cor_diff_random_forest <- cor_random_forest-cor_ols
  
  tradrsq_diff_random_forest <- tradrsq_random_forest$.estimate-tradrsq_ols$.estimate
  
  mae_diff_random_forest <- mae_random_forest$.estimate-mae_ols$.estimate

  rmse_diff_random_forest <- rmse_random_forest$.estimate-rmse_ols$.estimate
  
  
   cor_xgboost <- cor(data$xgboost_pred[i],
                     data$model_resp[i],
                     use = "pairwise.complete.obs")
  
  tradrsq_xgboost <- yardstick::rsq_trad(data=data, 
                                       truth=.data$model_resp[i], 
                                       estimate=.data$xgboost_pred[i])
  
  mae_xgboost <- yardstick::mae(data =data, 
                            truth=.data$model_resp[i], 
                            estimate=.data$xgboost_pred[i])
  
  rmse_xgboost <- yardstick::rmse(data =data, 
                              truth=.data$model_resp[i], 
                              estimate=.data$xgboost_pred[i])
  
  cor_diff_xgboost <- cor_xgboost-cor_ols
  
  tradrsq_diff_xgboost <- tradrsq_xgboost$.estimate-tradrsq_ols$.estimate
  
  mae_diff_xgboost <- mae_xgboost$.estimate-mae_ols$.estimate

  rmse_diff_xgboost <- rmse_xgboost$.estimate-rmse_ols$.estimate
  
  return(c(cor_diff_enet,  tradrsq_diff_enet , mae_diff_enet, rmse_diff_enet,
           cor_diff_svm_rbf,  tradrsq_diff_svm_rbf , mae_diff_svm_rbf, rmse_diff_svm_rbf,
           cor_diff_svm_linear,  tradrsq_diff_svm_linear , mae_diff_svm_linear, rmse_diff_svm_linear,
           cor_diff_svm_poly,  tradrsq_diff_svm_poly , mae_diff_svm_poly, rmse_diff_svm_poly,
           cor_diff_random_forest,  tradrsq_diff_random_forest , mae_diff_random_forest, rmse_diff_random_forest,
           cor_diff_xgboost,  tradrsq_diff_xgboost , mae_diff_xgboost, rmse_diff_xgboost))
}

5.10 Bootstrap the real values and the predicted values

5000 times and compute the performance statistcs with the above function

set.seed(123456)


 boot_all_resp_no_uni <- furrr::future_map(pred_all_resp, ~boot::boot(data = .,
           statistic = perfmatrics_diff_all_ols,
           R = 5000,
          # parallel="snow",
          # ncpus=20,
           #cl=cl
          ),
          .options = furrr::furrr_options(seed = 123456))    



saveRDS(boot_all_resp_no_uni, paste0(anotherFold,'working_memory_tasks/windows/boot_all_resp_no_uni_April_12_2022', '.RData'))

5.10.1 processing univariate

uni_var_results_process <- function(model_input, pred_input, ols_input,resp_input){
  roi_vec <- model_input$roi
  roi_list <- map(roi_vec,function(roi_input=.){
    roi_tibble <- select(pred_input,c(roi_input,resp_input ))%>%
      mutate(ols_pred = ols_input$model_pred)
    names(roi_tibble)<- c("uni_pred","model_resp","ols_pred")
    return(roi_tibble)
  })
  names(roi_list)<- roi_vec
  return(roi_list)
  }

uni_fdr_results <- pmap(list(univariate_model_pred,univariate_model_fdr,
                             OLS_predict_list,resp_names),~uni_var_results_process(
                               model_input=..2, 
                               pred_input=..1,
                               ols_input=..3,
                               resp_input=..4
                             ))

uni_bonferroni_results <- pmap(list(univariate_model_pred,univariate_model_bonferroni,
                             OLS_predict_list,resp_names),~uni_var_results_process(
                               model_input=..2, 
                               pred_input=..1,
                               ols_input=..3,
                               resp_input=..4
                             ))


uni_fdr_results_gfactor <- pmap(list(univariate_model_pred_gfactor,
                                     univariate_model_fdr_gfactor,
                             OLS_predict_list_gfactor,cfa_resp_names),~uni_var_results_process(
                               model_input=..2, 
                               pred_input=..1,
                               ols_input=..3,
                               resp_input=..4
                             ))

uni_bonferroni_results_gfactor <- pmap(list(univariate_model_pred_gfactor,
                                    univariate_model_bonferroni_gfactor,
                             OLS_predict_list_gfactor,cfa_resp_names),~uni_var_results_process(
                               model_input=..2, 
                               pred_input=..1,
                               ols_input=..3,
                               resp_input=..4
                             ))

uni_fdr_all <- append(uni_fdr_results,uni_fdr_results_gfactor)
uni_bonferroni_all <- append(uni_bonferroni_results,uni_bonferroni_results_gfactor)
perfmatrics_diff_uni_ols <-function(data,i){
  cor_ols <- cor(data$ols_pred[i],
                     data$model_resp[i],
                     use = "pairwise.complete.obs")
  
  tradrsq_ols <- yardstick::rsq_trad(data=data, 
                                       truth=.data$model_resp[i], 
                                       estimate=.data$ols_pred[i])
  
  mae_ols <- yardstick::mae(data =data, 
                            truth=.data$model_resp[i], 
                            estimate=.data$ols_pred[i])
  
  rmse_ols <- yardstick::rmse(data =data, 
                              truth=.data$model_resp[i], 
                              estimate=.data$ols_pred[i])
  
  
  cor_uni <- cor(data$uni_pred[i],
                     data$model_resp[i],
                     use = "pairwise.complete.obs")
  
  tradrsq_uni <- yardstick::rsq_trad(data=data, 
                                       truth=.data$model_resp[i], 
                                       estimate=.data$uni_pred[i])
  
  mae_uni <- yardstick::mae(data =data, 
                            truth=.data$model_resp[i], 
                            estimate=.data$uni_pred[i])
  
  rmse_uni <- yardstick::rmse(data =data, 
                              truth=.data$model_resp[i], 
                              estimate=.data$uni_pred[i])
  
  cor_diff_uni <- cor_uni-cor_ols
  
  tradrsq_diff_uni <- tradrsq_uni$.estimate-tradrsq_ols$.estimate
  
  mae_diff_uni <- mae_uni$.estimate-mae_ols$.estimate

  rmse_diff_uni <- rmse_uni$.estimate-rmse_ols$.estimate

 
  
  return(c(cor_diff_uni,  tradrsq_diff_uni , mae_diff_uni, rmse_diff_uni))
}

bootstrapped difference of univariates

boot_diff_uni <- function(data_input,metric_input){
  set.seed(123456)


 boot_uni <- furrr::future_map(data_input, ~boot::boot(data = .,
           statistic = metric_input,
           R = 5000,
          # parallel="snow",
          # ncpus=20,
           #cl=cl
          ),
          .options = furrr::furrr_options(seed = 123456))    
return(boot_uni)
}
boot_uni_diff_fdr<-map(uni_fdr_all,~boot_diff_uni(data_input=.,
                                                  metric_input =perfmatrics_diff_uni_ols ))
  
boot_uni_diff_bonferroni <-map(uni_bonferroni_all,~boot_diff_uni(data_input=.,
                                                  metric_input =perfmatrics_diff_uni_ols ))


boot_diff_uni_all <- list(fdr=boot_uni_diff_fdr,bonferroni=boot_uni_diff_bonferroni)

saveRDS(boot_diff_uni_all, paste0(anotherFold,'working_memory_tasks/boot_diff_uni_all_April_21_2022', '.RData'))

process the results

 uni_boot_results_processing <- function(data_input,resp_input, algor_input){
   corr_all <- map(data_input,function(table_input=.){
     out_tibble <- tibble(value = table_input$t[,1],
                          response =rep( resp_plotting_all$short_name[which(resp_plotting_all$response==resp_input)],5000))
   return(out_tibble)
     })%>%
     do.call(rbind,.)%>%
     mutate(algorithm = algor_input)
   
   rsq_all <- map(data_input,function(table_input=.){
     out_tibble <- tibble(value = table_input$t[,2],
                          response =rep( resp_plotting_all$short_name[which(resp_plotting_all$response==resp_input)],5000))
   return(out_tibble)
     })%>%
     do.call(rbind,.)%>%
     mutate(algorithm = algor_input)
   
   mae_all <- map(data_input,function(table_input=.){
     out_tibble <- tibble(value = table_input$t[,3],
                          response =rep( resp_plotting_all$short_name[which(resp_plotting_all$response==resp_input)],5000))
   return(out_tibble)
     })%>%
     do.call(rbind,.)%>%
     mutate(algorithm = algor_input)
   
   
   rmse_all <- map(data_input,function(table_input=.){
     out_tibble <- tibble(value = table_input$t[,4],
                          response =rep( resp_plotting_all$short_name[which(resp_plotting_all$response==resp_input)],5000))
   return(out_tibble)
     })%>%
     do.call(rbind,.)%>%
     mutate(algorithm = algor_input)
   out_list <- list(Correlation = corr_all, Traditional_Rsquare=rsq_all,
             MAE =mae_all, RMSE = rmse_all )
 return(out_list)
   }



resp_plotting_all <- bind_rows(resp_var_plotting,tibble(response="gfactor",longer_name="gfactor",short_name="gfactor"))


resp_all <- resp_all%>% set_names()

uni_fdr_diff <- map2(.x=resp_all,.y=boot_uni_diff_fdr,~uni_boot_results_processing(data_input=.y,
                                                          resp_input=.x, 
                                                          algor_input="FDR"))

uni_bonferroni_diff <- map2(.x=resp_all,.y=boot_uni_diff_bonferroni
                            ,~uni_boot_results_processing(data_input=.y,
                                                          resp_input=.x, 
                                                          algor_input="Bonferroni"))

5.11 bootstrap results processing

theme_set(theme_ggdist())


boot_diff_result_process<- function(boot_input,resp_input){
  boot_output <- boot_input[[resp_input]]
  cor_diff_enet <- tibble(value = boot_output$t[,1],algorithm = "Elastic\nNet")
  tradrsq_diff_enet<- tibble(value = boot_output$t[,2],algorithm = "Elastic\nNet")
 mae_diff_enet<- tibble(value = boot_output$t[,3],algorithm = "Elastic\nNet")
 rmse_diff_enet<- tibble(value = boot_output$t[,4],algorithm = "Elastic\nNet")
 
 cor_diff_svm_rbf<- tibble(value = boot_output$t[,5],algorithm = "RBF\nSVM")
 tradrsq_diff_svm_rbf <- tibble(value = boot_output$t[,6],algorithm = "RBF\nSVM")
 mae_diff_svm_rbf<- tibble(value = boot_output$t[,7],algorithm = "RBF\nSVM")
 rmse_diff_svm_rbf<- tibble(value = boot_output$t[,8],algorithm = "RBF\nSVM")
 
  cor_diff_svm_linear<- tibble(value = boot_output$t[,9],algorithm = "Linear\nSVM")
  tradrsq_diff_svm_linear <- tibble(value = boot_output$t[,10],algorithm = "Linear\nSVM")
  mae_diff_svm_linear<- tibble(value = boot_output$t[,11],algorithm = "Linear\nSVM")
  rmse_diff_svm_linear<- tibble(value = boot_output$t[,12],algorithm = "Linear\nSVM")
  
  cor_diff_svm_poly<- tibble(value = boot_output$t[,13],algorithm = "Polynomial\nSVM")
  tradrsq_diff_svm_poly<- tibble(value = boot_output$t[,14],algorithm = "Polynomial\nSVM")
  mae_diff_svm_poly<- tibble(value = boot_output$t[,15],algorithm = "Polynomial\nSVM")
  rmse_diff_svm_poly<- tibble(value = boot_output$t[,16],algorithm = "Polynomial\nSVM")
  
  cor_diff_random_forest<- tibble(value = boot_output$t[,17],algorithm = "Random\nForest")
  tradrsq_diff_random_forest <- tibble(value = boot_output$t[,18],algorithm = "Random\nForest")
  mae_diff_random_forest<- tibble(value = boot_output$t[,19],algorithm = "Random\nForest")
  rmse_diff_random_forest<- tibble(value = boot_output$t[,20],algorithm = "Random\nForest")
  
  cor_diff_xgboost<- tibble(value = boot_output$t[,21],algorithm = "Xgboost")
  tradrsq_diff_xgboost<- tibble(value = boot_output$t[,22],algorithm = "Xgboost")
  mae_diff_xgboost<- tibble(value = boot_output$t[,23],algorithm = "Xgboost")
  rmse_diff_xgboost<- tibble(value = boot_output$t[,24],algorithm = "Xgboost")
  
  corr_all <- bind_rows(cor_diff_svm_poly,cor_diff_random_forest,cor_diff_xgboost,cor_diff_enet,cor_diff_svm_rbf,cor_diff_svm_linear)%>%
              mutate(response = resp_plotting_all$short_name[which(resp_plotting_all$response==resp_input)])
  
  tradrsq_all <- bind_rows(tradrsq_diff_svm_poly,tradrsq_diff_random_forest,tradrsq_diff_xgboost,
                           tradrsq_diff_enet,tradrsq_diff_svm_rbf,tradrsq_diff_svm_linear)%>%
              mutate(response = resp_plotting_all$short_name[which(resp_plotting_all$response==resp_input)])
  
  mae_all <- bind_rows(mae_diff_svm_poly,mae_diff_random_forest,mae_diff_xgboost,mae_diff_enet,mae_diff_svm_rbf,mae_diff_svm_linear)%>%
              mutate(response = resp_plotting_all$short_name[which(resp_plotting_all$response==resp_input)])
  
  rmse_all <- bind_rows(rmse_diff_svm_poly,rmse_diff_random_forest,rmse_diff_xgboost,rmse_diff_enet,
                        rmse_diff_svm_rbf,rmse_diff_svm_linear)%>%
              mutate(response = resp_plotting_all$short_name[which(resp_plotting_all$response==resp_input)])
  
  
  return(list(Correlation = corr_all,Traditional_Rsquare=tradrsq_all,MAE=mae_all,RMSE =rmse_all ))
  
}


boot_diff_nouni <- map(resp_all,~boot_diff_result_process(boot_input=boot_all_resp_no_uni,resp_input=.))

#names(boot_diff_nouni[[12]])<- names(boot_diff_nouni[[11]])
boot_corr_diff_nouni <- map(boot_diff_nouni,"Correlation")%>%
            do.call(rbind,.)

boot_cor_diff_fdr <- map(uni_fdr_diff,"Correlation")%>%
            do.call(rbind,.)

boot_cor_diff_bonferroni <- map(uni_bonferroni_diff,"Correlation")%>%
            do.call(rbind,.)

boot_cor_diff <- boot_corr_diff_nouni%>%
                 bind_rows(boot_cor_diff_fdr)%>%
                 bind_rows(boot_cor_diff_bonferroni)%>%
  mutate(algorithm = as.factor(algorithm))%>%
   mutate(algorithm = factor(algorithm,levels =c ("FDR","Bonferroni","Elastic\nNet",
                                                "Random\nForest","Xgboost", 
                                                "Linear\nSVM",
                                                "Polynomial\nSVM" ,"RBF\nSVM")))%>%
  mutate(resp_factor= as.factor(response))%>%
  mutate(resp_factor = factor(response,levels =c("Pattern Speed", "Audi Verbal",
                                                 "Flanker","Seq Memory",
                                                 "Card Sort", # "Cog Flex",
                                                 "Little Man",
                                                 "List Work Mem","Matrix Reason",
                                                 "Reading Recog","Pic Vocab", 
                                                 "2-back Work Mem","gfactor" )))






boot_rsq_diff_nouni <- map(boot_diff_nouni,"Traditional_Rsquare")%>%
            do.call(rbind,.)

boot_rsq_diff_fdr <- map(uni_fdr_diff,"Traditional_Rsquare")%>%
            do.call(rbind,.)

boot_rsq_diff_bonferroni <- map(uni_bonferroni_diff,"Traditional_Rsquare")%>%
            do.call(rbind,.)

boot_rsq_diff <- boot_rsq_diff_nouni%>%
                 bind_rows(boot_rsq_diff_fdr)%>%
                 bind_rows(boot_rsq_diff_bonferroni)%>%
  mutate(algorithm = as.factor(algorithm))%>%
   mutate(algorithm = factor(algorithm,levels =c ("FDR","Bonferroni","Elastic\nNet",
                                                "Random\nForest","Xgboost", 
                                                "Linear\nSVM",
                                                "Polynomial\nSVM" ,"RBF\nSVM")))%>%
  mutate(resp_factor= as.factor(response))%>%
  mutate(resp_factor = factor(response,levels =c("Pattern Speed", "Audi Verbal",
                                                 "Flanker","Seq Memory",
                                                 "Card Sort", # "Cog Flex",
                                                 "Little Man",
                                                 "List Work Mem","Matrix Reason",
                                                 "Reading Recog","Pic Vocab", 
                                                 "2-back Work Mem","gfactor" )))

boot_mae_diff_nouni <- map(boot_diff_nouni,"MAE")%>%
            do.call(rbind,.)

boot_mae_diff_fdr <- map(uni_fdr_diff,"MAE")%>%
            do.call(rbind,.)

boot_mae_diff_bonferroni <- map(uni_bonferroni_diff,"MAE")%>%
            do.call(rbind,.)

boot_mae_diff <- boot_mae_diff_nouni%>%
                 bind_rows(boot_mae_diff_fdr)%>%
                 bind_rows(boot_mae_diff_bonferroni)%>%
  mutate(algorithm = as.factor(algorithm))%>%
   mutate(algorithm = factor(algorithm,levels =c ("FDR","Bonferroni","Elastic\nNet",
                                                "Random\nForest","Xgboost", 
                                                "Linear\nSVM",
                                                "Polynomial\nSVM" ,"RBF\nSVM")))%>%
  mutate(resp_factor= as.factor(response))%>%
  mutate(resp_factor = factor(response,levels =c("Pattern Speed", "Audi Verbal",
                                                 "Flanker","Seq Memory",
                                                 "Card Sort", # "Cog Flex",
                                                 "Little Man",
                                                 "List Work Mem","Matrix Reason",
                                                 "Reading Recog","Pic Vocab", 
                                                 "2-back Work Mem","gfactor" )))


boot_rmse_diff_nouni <- map(boot_diff_nouni,"RMSE")%>%
            do.call(rbind,.)

boot_rmse_diff_fdr <- map(uni_fdr_diff,"RMSE")%>%
            do.call(rbind,.)

boot_rmse_diff_bonferroni <- map(uni_bonferroni_diff,"RMSE")%>%
            do.call(rbind,.)

boot_rmse_diff <- boot_rmse_diff_nouni%>%
                 bind_rows(boot_rmse_diff_fdr)%>%
                 bind_rows(boot_rmse_diff_bonferroni)%>%
  mutate(algorithm = as.factor(algorithm))%>%
   mutate(algorithm = factor(algorithm,levels =c ("FDR","Bonferroni","Elastic\nNet",
                                                "Random\nForest","Xgboost", 
                                                "Linear\nSVM",
                                                "Polynomial\nSVM" ,"RBF\nSVM")))%>%
  mutate(resp_factor= as.factor(response))%>%
  mutate(resp_factor = factor(response,levels =c("Pattern Speed", "Audi Verbal",
                                                 "Flanker","Seq Memory",
                                                 "Card Sort", # "Cog Flex",
                                                 "Little Man",
                                                 "List Work Mem","Matrix Reason",
                                                 "Reading Recog","Pic Vocab", 
                                                 "2-back Work Mem","gfactor" )))

boot_diff_metric<- list(Correlation=boot_cor_diff,
                               Traditional_Rsquare=boot_rsq_diff, 
                               MAE= boot_mae_diff,
                               RMSE =boot_rmse_diff )



color_diff_plot <- color_boot_plot[-4]


boot_plot_diff_list <-  map2(.x=boot_diff_metric,
                        .y = metric_vec, 
                        ~ggplot(data=.x,
                                aes(y = value, 
                                    x = resp_factor, 
                                    color= algorithm)) +
                          stat_pointinterval(position = position_dodge(width = 2, 
                                                                       preserve = "single"))+
                          ##hline instead of vline because of corrdinate flip
                            geom_hline(yintercept = 0, color = "grey55", linetype = "dashed",size=1.5) +
                          ggtitle(.y)+
                         coord_flip()+
                           theme(plot.title = element_text(size=15),
                                     axis.title.x = element_blank(),
                                     axis.title.y = element_blank(), 
                                     axis.text.x = element_text(size = 12),
                                     axis.text.y = element_text(size=12),
                                     legend.position = "bottom",
                                     legend.text=element_text(size=12),
                                     legend.title=element_text(size=15))+
                               guides(color = guide_legend(override.aes = list(size = 10)))+
                               scale_color_manual(values=color_boot_plot[-3],
                                                  labels = c ("FDR","Bonferroni",
                                                            "Elastic\nNet",
                                                              "Random\nForest","Xgboost", 
                                                              "Linear\nSVM",
                                                              "Polynomial\nSVM" ,"RBF\nSVM"))
                        )







boot_plot_diff_legend <- get_legend(boot_plot_diff_list[[1]])



title_boot_diff_plot <- ggdraw() + 
  draw_label(
    "Bootstrapped Distribution of the Differences in Predictive Performance:
Other Algorithms Minus the OLS regression",
    fontface = 'bold',
    x = 0,
    hjust = 0,
    size=21
  ) +
  theme(
    # add margin on the left of the drawing canvas,
    # so title is aligned with left edge of first plot
    plot.margin = margin(0, 0, 0, 7)
  )

plot_grid(title_boot_diff_plot,ggpubr::ggarrange(plotlist =boot_plot_diff_list, 
                  ncol = 2,
                  nrow = 2, 
                  common.legend = TRUE, 
                  legend = "bottom",
                  legend.grob = boot_plot_diff_legend),
          nrow = 2 , rel_heights = c(0.1, 1))

plot without univariate

boot_diff_metricnouni <- map(boot_diff_metric, ~filter(.x,.data[["algorithm"]]!="FDR")%>%
                            filter(.data[["algorithm"]]!="Bonferroni"))


boot_plot_diff_listnouni <-  map2(.x=boot_diff_metricnouni,
                        .y = metric_vec, 
                        ~ggplot(data=.x,
                                aes(y = value, 
                                    x = resp_factor, 
                                    color= algorithm)) +
                          stat_pointinterval(position = position_dodge(width = 2, 
                                                                       preserve = "single"))+
                          ##hline instead of vline because of corrdinate flip
                            geom_hline(yintercept = 0, color = "grey55", linetype = "dashed",size=1.5) +
                          ggtitle(.y)+
                         coord_flip()+
                           theme(plot.title = element_text(size=15),
                                     axis.title.x = element_blank(),
                                     axis.title.y = element_blank(), 
                                     axis.text.x = element_text(size = 12),
                                     axis.text.y = element_text(size=12),
                                     legend.position = "bottom",
                                     legend.text=element_text(size=12),
                                     legend.title=element_text(size=15))+
                               guides(color = guide_legend(override.aes = list(size = 10)))+
                               scale_color_manual(values=color_boot_plot[-c(1,2,3)],
                                                  labels = c ("Elastic\nNet",
                                                              "Random\nForest","Xgboost", 
                                                              "Linear\nSVM",
                                                              "Polynomial\nSVM" ,"RBF\nSVM"))
                        )







boot_plot_diff_legendnouni <- get_legend(boot_plot_diff_listnouni[[1]])





plot_grid(title_boot_diff_plot,ggpubr::ggarrange(plotlist =boot_plot_diff_listnouni, 
                  ncol = 2,
                  nrow = 2, 
                  common.legend = TRUE, 
                  legend = "bottom",
                  legend.grob = boot_plot_diff_legendnouni),
          nrow = 2 , rel_heights = c(0.1, 1))

5.11.1 getting the summary quantiles of the bootstrapped metrics

modality_vec_diff <- unique(boot_diff_metric[["Correlation"]][["modality"]])




boot_one_resp_processing_diff <- function(resp_input,data_input){
     one_resp <- filter(data_input, 
                        response == resp_input)
     
     quantile_one_resp <- one_resp%>% 
       group_by(algorithm)%>%
       summarise(quantile = c(0.025, 0.5, 0.975),
                 value = quantile(value, c(0.025,0.5,0.975)))%>% 
       ungroup()%>%
       pivot_wider(names_from = quantile,
                  values_from = value)
  
      mean_one_resp <- one_resp %>% 
        group_by(algorithm)%>%
        summarise(mean = mean(value))%>% 
        ungroup()
  
      metric_one_resp <- left_join(quantile_one_resp, 
                                   mean_one_resp, 
                                   by = "algorithm")%>% 
                         mutate(response = resp_input)
  return(metric_one_resp)
}   

boot_quantile_processing_diff <- function(data_input){
     all_resp <- resp_vec %>% map(.,
                                  ~ boot_one_resp_processing_diff(resp_input = .,
                                                             data_input))%>%
       do.call(rbind,.)
  return(all_resp)
}


kable_boot_metric_diff <- boot_diff_metric %>% 
  map(.,
      ~boot_quantile_processing_diff(data_input = .))

kable_metric_vars_diff <- colnames(kable_boot_metric_diff[[1]])

kable_boot_metric_vars_diff <- kable_boot_metric_diff   %>% 
  map(.,~arrange(.,desc(match(response,
                      c("Pattern Speed", 
                        "Audi Verbal",
                        "Flanker",
                        "Seq Memory",
                        "Card Sort", # "Cog Flex",
                        "Little Man",
                        "List Work Mem",
                        "Matrix Reason",
                        "Reading Recog",
                        "Pic Vocab", 
                        "2-back Work Mem",
                        "gfactor" )))) %>%
        mutate_if(is.numeric, round, 3) %>%
        relocate(response,algorithm))
  


  kable_boot_metric_vars_diff[[1]] %>% 
    kableExtra::kbl(caption = paste0(metric_names[1])) %>%
    kableExtra::kable_classic(full_width = F, 
                             html_font = "Cambria")
Correlation
response algorithm 0.025 0.5 0.975 mean
gfactor FDR -0.426 -0.317 -0.170 -0.311
gfactor Bonferroni -0.405 -0.297 -0.162 -0.292
gfactor Elastic Net 0.004 0.022 0.041 0.022
gfactor Random Forest -0.028 0.010 0.045 0.009
gfactor Xgboost -0.038 -0.001 0.036 -0.001
gfactor Linear SVM 0.000 0.019 0.038 0.019
gfactor Polynomial SVM 0.000 0.019 0.038 0.019
gfactor RBF SVM 0.004 0.024 0.044 0.024
2-back Work Mem FDR -0.516 -0.400 -0.253 -0.396
2-back Work Mem Bonferroni -0.489 -0.378 -0.243 -0.374
2-back Work Mem Elastic Net 0.008 0.021 0.034 0.021
2-back Work Mem Random Forest -0.056 -0.020 0.015 -0.020
2-back Work Mem Xgboost -0.065 -0.034 -0.002 -0.034
2-back Work Mem Linear SVM -0.012 0.007 0.024 0.007
2-back Work Mem Polynomial SVM -0.011 0.007 0.025 0.007
2-back Work Mem RBF SVM 0.000 0.018 0.036 0.018
Pic Vocab FDR -0.353 -0.234 -0.132 -0.237
Pic Vocab Bonferroni -0.333 -0.221 -0.127 -0.224
Pic Vocab Elastic Net 0.016 0.039 0.063 0.039
Pic Vocab Random Forest -0.025 0.015 0.057 0.016
Pic Vocab Xgboost -0.017 0.021 0.059 0.021
Pic Vocab Linear SVM -0.006 0.017 0.042 0.018
Pic Vocab Polynomial SVM -0.004 0.020 0.045 0.020
Pic Vocab RBF SVM -0.004 0.020 0.044 0.020
Reading Recog FDR -0.295 -0.179 -0.064 -0.179
Reading Recog Bonferroni -0.252 -0.152 -0.053 -0.152
Reading Recog Elastic Net 0.026 0.055 0.084 0.055
Reading Recog Random Forest -0.007 0.038 0.083 0.038
Reading Recog Xgboost -0.013 0.032 0.078 0.032
Reading Recog Linear SVM 0.008 0.031 0.053 0.031
Reading Recog Polynomial SVM 0.015 0.041 0.066 0.041
Reading Recog RBF SVM 0.016 0.044 0.072 0.044
Matrix Reason FDR -0.313 -0.203 -0.081 -0.201
Matrix Reason Bonferroni -0.298 -0.181 -0.070 -0.181
Matrix Reason Elastic Net -0.021 0.009 0.039 0.009
Matrix Reason Random Forest -0.051 -0.007 0.035 -0.007
Matrix Reason Xgboost -0.061 -0.018 0.026 -0.018
Matrix Reason Linear SVM -0.013 0.011 0.035 0.011
Matrix Reason Polynomial SVM -0.036 0.000 0.035 0.000
Matrix Reason RBF SVM -0.017 0.010 0.037 0.010
List Work Mem FDR -0.285 -0.186 -0.075 -0.183
List Work Mem Bonferroni -0.274 -0.171 -0.066 -0.170
List Work Mem Elastic Net 0.006 0.035 0.063 0.035
List Work Mem Random Forest -0.038 0.002 0.042 0.002
List Work Mem Xgboost -0.037 0.003 0.042 0.003
List Work Mem Linear SVM -0.013 0.013 0.039 0.013
List Work Mem Polynomial SVM -0.012 0.019 0.049 0.019
List Work Mem RBF SVM -0.013 0.019 0.051 0.019
Little Man FDR -0.268 -0.168 -0.071 -0.169
Little Man Bonferroni -0.251 -0.154 -0.063 -0.155
Little Man Elastic Net -0.004 0.024 0.051 0.024
Little Man Random Forest -0.045 0.001 0.044 0.000
Little Man Xgboost -0.038 0.009 0.054 0.009
Little Man Linear SVM -0.006 0.020 0.045 0.020
Little Man Polynomial SVM -0.032 0.006 0.043 0.006
Little Man RBF SVM -0.010 0.027 0.061 0.027
Card Sort FDR -0.230 -0.134 -0.042 -0.134
Card Sort Bonferroni -0.215 -0.121 -0.035 -0.122
Card Sort Elastic Net -0.013 0.025 0.062 0.024
Card Sort Random Forest -0.040 0.014 0.071 0.014
Card Sort Xgboost -0.042 0.012 0.071 0.013
Card Sort Linear SVM -0.035 -0.005 0.025 -0.005
Card Sort Polynomial SVM -0.012 0.026 0.066 0.027
Card Sort RBF SVM -0.014 0.029 0.074 0.029
Seq Memory FDR -0.171 -0.061 0.045 -0.061
Seq Memory Bonferroni -0.147 -0.034 0.055 -0.038
Seq Memory Elastic Net 0.001 0.045 0.090 0.045
Seq Memory Random Forest -0.011 0.040 0.093 0.040
Seq Memory Xgboost 0.005 0.057 0.111 0.057
Seq Memory Linear SVM 0.007 0.040 0.073 0.040
Seq Memory Polynomial SVM 0.007 0.050 0.094 0.050
Seq Memory RBF SVM 0.008 0.050 0.094 0.051
Flanker FDR -0.194 -0.065 0.051 -0.067
Flanker Bonferroni -0.155 -0.030 0.065 -0.034
Flanker Elastic Net 0.030 0.071 0.113 0.071
Flanker Random Forest -0.006 0.054 0.114 0.053
Flanker Xgboost -0.015 0.044 0.102 0.043
Flanker Linear SVM 0.013 0.049 0.085 0.049
Flanker Polynomial SVM 0.021 0.063 0.108 0.064
Flanker RBF SVM -0.039 0.017 0.072 0.017
Audi Verbal FDR -0.139 -0.036 0.063 -0.037
Audi Verbal Bonferroni -0.103 -0.015 0.075 -0.015
Audi Verbal Elastic Net 0.012 0.048 0.084 0.048
Audi Verbal Random Forest 0.005 0.063 0.123 0.063
Audi Verbal Xgboost -0.013 0.048 0.109 0.048
Audi Verbal Linear SVM 0.001 0.032 0.063 0.032
Audi Verbal Polynomial SVM 0.011 0.056 0.101 0.056
Audi Verbal RBF SVM 0.009 0.056 0.103 0.056
Pattern Speed FDR -0.164 -0.064 0.026 -0.066
Pattern Speed Bonferroni -0.151 -0.051 0.035 -0.053
Pattern Speed Elastic Net -0.026 0.027 0.079 0.027
Pattern Speed Random Forest -0.020 0.041 0.098 0.040
Pattern Speed Xgboost -0.029 0.034 0.093 0.033
Pattern Speed Linear SVM -0.018 0.017 0.052 0.017
Pattern Speed Polynomial SVM -0.022 0.028 0.075 0.027
Pattern Speed RBF SVM -0.022 0.030 0.080 0.030
  kable_boot_metric_vars_diff[[2]] %>% 
    kableExtra::kbl(caption = paste0(metric_names[2])) %>%
    kableExtra::kable_classic(full_width = F, 
                             html_font = "Cambria")
Traditional_Rsquare
response algorithm 0.025 0.5 0.975 mean
gfactor FDR -0.202 -0.148 -0.086 -0.147
gfactor Bonferroni -0.199 -0.144 -0.082 -0.143
gfactor Elastic Net 0.007 0.028 0.049 0.028
gfactor Random Forest -0.022 0.012 0.047 0.012
gfactor Xgboost -0.028 0.007 0.043 0.007
gfactor Linear SVM 0.000 0.022 0.047 0.023
gfactor Polynomial SVM 0.001 0.023 0.047 0.023
gfactor RBF SVM 0.007 0.028 0.052 0.028
2-back Work Mem FDR -0.280 -0.230 -0.171 -0.228
2-back Work Mem Bonferroni -0.276 -0.225 -0.166 -0.224
2-back Work Mem Elastic Net 0.005 0.020 0.037 0.021
2-back Work Mem Random Forest -0.057 -0.024 0.011 -0.023
2-back Work Mem Xgboost -0.064 -0.033 -0.001 -0.033
2-back Work Mem Linear SVM -0.013 0.007 0.026 0.007
2-back Work Mem Polynomial SVM -0.013 0.007 0.027 0.007
2-back Work Mem RBF SVM -0.003 0.017 0.038 0.017
Pic Vocab FDR -0.123 -0.078 -0.029 -0.077
Pic Vocab Bonferroni -0.122 -0.076 -0.028 -0.076
Pic Vocab Elastic Net 0.018 0.039 0.062 0.039
Pic Vocab Random Forest -0.012 0.019 0.055 0.020
Pic Vocab Xgboost -0.005 0.025 0.058 0.026
Pic Vocab Linear SVM -0.006 0.016 0.042 0.017
Pic Vocab Polynomial SVM 0.000 0.022 0.046 0.022
Pic Vocab RBF SVM 0.000 0.021 0.044 0.021
Reading Recog FDR -0.094 -0.049 -0.001 -0.049
Reading Recog Bonferroni -0.089 -0.044 0.003 -0.044
Reading Recog Elastic Net 0.019 0.043 0.069 0.043
Reading Recog Random Forest 0.000 0.032 0.066 0.032
Reading Recog Xgboost -0.005 0.027 0.062 0.027
Reading Recog Linear SVM 0.005 0.026 0.049 0.027
Reading Recog Polynomial SVM 0.006 0.030 0.056 0.030
Reading Recog RBF SVM 0.007 0.032 0.059 0.032
Matrix Reason FDR -0.103 -0.057 -0.009 -0.057
Matrix Reason Bonferroni -0.100 -0.054 -0.005 -0.054
Matrix Reason Elastic Net -0.007 0.017 0.043 0.017
Matrix Reason Random Forest -0.024 0.008 0.040 0.008
Matrix Reason Xgboost -0.033 0.000 0.034 0.000
Matrix Reason Linear SVM -0.003 0.017 0.039 0.018
Matrix Reason Polynomial SVM -0.016 0.010 0.036 0.010
Matrix Reason RBF SVM -0.006 0.017 0.040 0.017
List Work Mem FDR -0.086 -0.043 0.003 -0.042
List Work Mem Bonferroni -0.085 -0.040 0.005 -0.040
List Work Mem Elastic Net 0.012 0.036 0.061 0.036
List Work Mem Random Forest -0.012 0.019 0.049 0.019
List Work Mem Xgboost -0.011 0.018 0.047 0.018
List Work Mem Linear SVM -0.001 0.022 0.045 0.022
List Work Mem Polynomial SVM -0.002 0.025 0.051 0.025
List Work Mem RBF SVM -0.002 0.025 0.052 0.025
Little Man FDR -0.079 -0.037 0.006 -0.037
Little Man Bonferroni -0.077 -0.036 0.007 -0.035
Little Man Elastic Net 0.005 0.026 0.049 0.027
Little Man Random Forest -0.016 0.014 0.044 0.014
Little Man Xgboost -0.013 0.018 0.048 0.018
Little Man Linear SVM -0.003 0.017 0.036 0.017
Little Man Polynomial SVM -0.016 0.012 0.038 0.011
Little Man RBF SVM -0.001 0.025 0.051 0.025
Card Sort FDR -0.055 -0.014 0.029 -0.014
Card Sort Bonferroni -0.053 -0.012 0.030 -0.012
Card Sort Elastic Net 0.004 0.031 0.060 0.031
Card Sort Random Forest -0.006 0.028 0.063 0.028
Card Sort Xgboost -0.008 0.026 0.063 0.027
Card Sort Linear SVM -0.005 0.017 0.040 0.017
Card Sort Polynomial SVM 0.002 0.031 0.061 0.031
Card Sort RBF SVM 0.004 0.033 0.065 0.033
Seq Memory FDR -0.008 0.034 0.077 0.034
Seq Memory Bonferroni -0.005 0.037 0.080 0.037
Seq Memory Elastic Net 0.028 0.057 0.088 0.057
Seq Memory Random Forest 0.022 0.054 0.089 0.054
Seq Memory Xgboost 0.027 0.061 0.097 0.061
Seq Memory Linear SVM 0.019 0.042 0.066 0.042
Seq Memory Polynomial SVM 0.026 0.056 0.086 0.056
Seq Memory RBF SVM 0.026 0.055 0.086 0.056
Flanker FDR -0.023 0.023 0.070 0.023
Flanker Bonferroni -0.017 0.028 0.074 0.028
Flanker Elastic Net 0.031 0.060 0.092 0.061
Flanker Random Forest 0.015 0.052 0.092 0.053
Flanker Xgboost 0.012 0.049 0.087 0.049
Flanker Linear SVM 0.011 0.041 0.074 0.041
Flanker Polynomial SVM 0.010 0.044 0.081 0.044
Flanker RBF SVM -0.007 0.030 0.070 0.030
Audi Verbal FDR 0.011 0.055 0.101 0.056
Audi Verbal Bonferroni 0.014 0.058 0.104 0.058
Audi Verbal Elastic Net 0.041 0.068 0.095 0.068
Audi Verbal Random Forest 0.040 0.076 0.112 0.076
Audi Verbal Xgboost 0.026 0.068 0.109 0.067
Audi Verbal Linear SVM 0.029 0.054 0.078 0.054
Audi Verbal Polynomial SVM 0.038 0.071 0.105 0.071
Audi Verbal RBF SVM 0.038 0.070 0.103 0.070
Pattern Speed FDR -0.021 0.014 0.051 0.014
Pattern Speed Bonferroni -0.019 0.016 0.052 0.016
Pattern Speed Elastic Net 0.003 0.032 0.062 0.032
Pattern Speed Random Forest 0.007 0.039 0.070 0.039
Pattern Speed Xgboost 0.004 0.035 0.067 0.036
Pattern Speed Linear SVM 0.008 0.028 0.049 0.028
Pattern Speed Polynomial SVM 0.005 0.033 0.062 0.033
Pattern Speed RBF SVM 0.005 0.033 0.063 0.033
  kable_boot_metric_vars_diff[[3]] %>% 
    kableExtra::kbl(caption = paste0(metric_names[3])) %>%
    kableExtra::kable_classic(full_width = F, 
                             html_font = "Cambria")
MAE
response algorithm 0.025 0.5 0.975 mean
gfactor FDR 0.028 0.057 0.085 0.057
gfactor Bonferroni 0.027 0.056 0.084 0.056
gfactor Elastic Net -0.023 -0.013 -0.002 -0.013
gfactor Random Forest -0.026 -0.008 0.009 -0.008
gfactor Xgboost -0.021 -0.004 0.013 -0.004
gfactor Linear SVM -0.023 -0.012 0.000 -0.012
gfactor Polynomial SVM -0.024 -0.013 -0.001 -0.013
gfactor RBF SVM -0.026 -0.015 -0.004 -0.015
2-back Work Mem FDR 0.088 0.120 0.150 0.120
2-back Work Mem Bonferroni 0.087 0.118 0.148 0.118
2-back Work Mem Elastic Net -0.015 -0.005 0.004 -0.005
2-back Work Mem Random Forest -0.001 0.018 0.037 0.018
2-back Work Mem Xgboost 0.003 0.021 0.039 0.021
2-back Work Mem Linear SVM -0.013 -0.002 0.010 -0.002
2-back Work Mem Polynomial SVM -0.013 -0.002 0.009 -0.002
2-back Work Mem RBF SVM -0.020 -0.009 0.003 -0.009
Pic Vocab FDR 0.010 0.034 0.058 0.034
Pic Vocab Bonferroni 0.009 0.033 0.057 0.033
Pic Vocab Elastic Net -0.029 -0.017 -0.007 -0.018
Pic Vocab Random Forest -0.019 -0.002 0.014 -0.002
Pic Vocab Xgboost -0.023 -0.007 0.008 -0.008
Pic Vocab Linear SVM -0.025 -0.013 -0.001 -0.013
Pic Vocab Polynomial SVM -0.025 -0.014 -0.002 -0.014
Pic Vocab RBF SVM -0.024 -0.013 -0.002 -0.013
Reading Recog FDR -0.009 0.012 0.033 0.012
Reading Recog Bonferroni -0.010 0.011 0.032 0.011
Reading Recog Elastic Net -0.032 -0.021 -0.010 -0.021
Reading Recog Random Forest -0.033 -0.018 -0.002 -0.017
Reading Recog Xgboost -0.029 -0.013 0.003 -0.013
Reading Recog Linear SVM -0.031 -0.020 -0.010 -0.020
Reading Recog Polynomial SVM -0.033 -0.022 -0.011 -0.022
Reading Recog RBF SVM -0.034 -0.022 -0.010 -0.022
Matrix Reason FDR -0.004 0.019 0.041 0.019
Matrix Reason Bonferroni -0.006 0.017 0.039 0.017
Matrix Reason Elastic Net -0.025 -0.014 -0.002 -0.014
Matrix Reason Random Forest -0.026 -0.010 0.005 -0.010
Matrix Reason Xgboost -0.024 -0.008 0.008 -0.008
Matrix Reason Linear SVM -0.024 -0.014 -0.003 -0.014
Matrix Reason Polynomial SVM -0.022 -0.009 0.004 -0.009
Matrix Reason RBF SVM -0.026 -0.015 -0.004 -0.015
List Work Mem FDR -0.005 0.017 0.039 0.017
List Work Mem Bonferroni -0.006 0.016 0.038 0.016
List Work Mem Elastic Net -0.024 -0.012 0.000 -0.012
List Work Mem Random Forest -0.021 -0.007 0.008 -0.007
List Work Mem Xgboost -0.022 -0.008 0.006 -0.008
List Work Mem Linear SVM -0.018 -0.007 0.004 -0.007
List Work Mem Polynomial SVM -0.021 -0.008 0.005 -0.008
List Work Mem RBF SVM -0.021 -0.008 0.005 -0.008
Little Man FDR 0.005 0.025 0.046 0.025
Little Man Bonferroni 0.003 0.024 0.044 0.024
Little Man Elastic Net -0.018 -0.006 0.005 -0.006
Little Man Random Forest -0.016 -0.001 0.014 -0.001
Little Man Xgboost -0.016 -0.001 0.014 -0.001
Little Man Linear SVM -0.015 -0.006 0.004 -0.006
Little Man Polynomial SVM -0.017 -0.004 0.009 -0.004
Little Man RBF SVM -0.021 -0.009 0.004 -0.009
Card Sort FDR -0.022 -0.002 0.018 -0.002
Card Sort Bonferroni -0.022 -0.003 0.017 -0.003
Card Sort Elastic Net -0.032 -0.018 -0.005 -0.019
Card Sort Random Forest -0.035 -0.019 -0.004 -0.019
Card Sort Xgboost -0.034 -0.017 -0.001 -0.017
Card Sort Linear SVM -0.024 -0.013 -0.003 -0.013
Card Sort Polynomial SVM -0.034 -0.020 -0.006 -0.020
Card Sort RBF SVM -0.033 -0.019 -0.006 -0.019
Seq Memory FDR -0.019 0.000 0.020 0.000
Seq Memory Bonferroni -0.020 -0.001 0.018 -0.001
Seq Memory Elastic Net -0.029 -0.016 -0.002 -0.016
Seq Memory Random Forest -0.030 -0.015 0.000 -0.015
Seq Memory Xgboost -0.032 -0.017 -0.002 -0.017
Seq Memory Linear SVM -0.022 -0.012 -0.001 -0.012
Seq Memory Polynomial SVM -0.029 -0.015 -0.002 -0.015
Seq Memory RBF SVM -0.029 -0.016 -0.002 -0.016
Flanker FDR -0.024 -0.005 0.014 -0.005
Flanker Bonferroni -0.026 -0.007 0.012 -0.007
Flanker Elastic Net -0.037 -0.025 -0.013 -0.025
Flanker Random Forest -0.036 -0.020 -0.004 -0.020
Flanker Xgboost -0.036 -0.020 -0.005 -0.020
Flanker Linear SVM -0.039 -0.026 -0.013 -0.026
Flanker Polynomial SVM -0.042 -0.027 -0.013 -0.027
Flanker RBF SVM -0.039 -0.023 -0.008 -0.023
Audi Verbal FDR -0.035 -0.016 0.004 -0.016
Audi Verbal Bonferroni -0.036 -0.017 0.003 -0.017
Audi Verbal Elastic Net -0.033 -0.021 -0.010 -0.021
Audi Verbal Random Forest -0.040 -0.025 -0.008 -0.024
Audi Verbal Xgboost -0.039 -0.021 -0.003 -0.021
Audi Verbal Linear SVM -0.031 -0.020 -0.009 -0.020
Audi Verbal Polynomial SVM -0.039 -0.025 -0.011 -0.025
Audi Verbal RBF SVM -0.038 -0.024 -0.010 -0.024
Pattern Speed FDR -0.024 -0.008 0.008 -0.008
Pattern Speed Bonferroni -0.025 -0.009 0.007 -0.009
Pattern Speed Elastic Net -0.030 -0.017 -0.004 -0.017
Pattern Speed Random Forest -0.032 -0.018 -0.004 -0.018
Pattern Speed Xgboost -0.033 -0.019 -0.005 -0.019
Pattern Speed Linear SVM -0.026 -0.016 -0.006 -0.016
Pattern Speed Polynomial SVM -0.031 -0.018 -0.005 -0.018
Pattern Speed RBF SVM -0.032 -0.018 -0.005 -0.018
  kable_boot_metric_vars_diff[[4]] %>% 
    kableExtra::kbl(caption = paste0(metric_names[4])) %>%
    kableExtra::kable_classic(full_width = F, 
                             html_font = "Cambria")
RMSE
response algorithm 0.025 0.5 0.975 mean
gfactor FDR 0.044 0.077 0.109 0.077
gfactor Bonferroni 0.042 0.075 0.107 0.075
gfactor Elastic Net -0.026 -0.015 -0.004 -0.015
gfactor Random Forest -0.025 -0.007 0.013 -0.007
gfactor Xgboost -0.023 -0.004 0.016 -0.003
gfactor Linear SVM -0.025 -0.012 0.000 -0.012
gfactor Polynomial SVM -0.025 -0.013 -0.001 -0.013
gfactor RBF SVM -0.028 -0.016 -0.004 -0.016
2-back Work Mem FDR 0.090 0.123 0.153 0.122
2-back Work Mem Bonferroni 0.088 0.121 0.151 0.120
2-back Work Mem Elastic Net -0.021 -0.012 -0.003 -0.012
2-back Work Mem Random Forest -0.006 0.013 0.033 0.013
2-back Work Mem Xgboost 0.000 0.019 0.037 0.019
2-back Work Mem Linear SVM -0.015 -0.004 0.008 -0.004
2-back Work Mem Polynomial SVM -0.015 -0.004 0.007 -0.004
2-back Work Mem RBF SVM -0.022 -0.010 0.002 -0.010
Pic Vocab FDR 0.015 0.040 0.065 0.040
Pic Vocab Bonferroni 0.014 0.039 0.064 0.039
Pic Vocab Elastic Net -0.032 -0.020 -0.010 -0.020
Pic Vocab Random Forest -0.028 -0.010 0.007 -0.010
Pic Vocab Xgboost -0.030 -0.013 0.002 -0.013
Pic Vocab Linear SVM -0.021 -0.009 0.003 -0.009
Pic Vocab Polynomial SVM -0.024 -0.011 0.000 -0.011
Pic Vocab RBF SVM -0.022 -0.011 0.000 -0.011
Reading Recog FDR 0.000 0.025 0.049 0.025
Reading Recog Bonferroni -0.002 0.022 0.047 0.022
Reading Recog Elastic Net -0.035 -0.022 -0.010 -0.022
Reading Recog Random Forest -0.033 -0.016 0.000 -0.017
Reading Recog Xgboost -0.031 -0.014 0.003 -0.014
Reading Recog Linear SVM -0.025 -0.014 -0.003 -0.014
Reading Recog Polynomial SVM -0.028 -0.016 -0.003 -0.016
Reading Recog RBF SVM -0.029 -0.017 -0.004 -0.017
Matrix Reason FDR 0.004 0.029 0.053 0.029
Matrix Reason Bonferroni 0.003 0.027 0.052 0.027
Matrix Reason Elastic Net -0.022 -0.009 0.003 -0.009
Matrix Reason Random Forest -0.020 -0.004 0.013 -0.004
Matrix Reason Xgboost -0.017 0.000 0.017 0.000
Matrix Reason Linear SVM -0.020 -0.009 0.002 -0.009
Matrix Reason Polynomial SVM -0.019 -0.005 0.008 -0.005
Matrix Reason RBF SVM -0.020 -0.009 0.003 -0.009
List Work Mem FDR -0.001 0.022 0.044 0.022
List Work Mem Bonferroni -0.003 0.020 0.044 0.020
List Work Mem Elastic Net -0.031 -0.019 -0.006 -0.019
List Work Mem Random Forest -0.024 -0.010 0.006 -0.009
List Work Mem Xgboost -0.024 -0.009 0.006 -0.009
List Work Mem Linear SVM -0.023 -0.011 0.000 -0.011
List Work Mem Polynomial SVM -0.026 -0.013 0.001 -0.013
List Work Mem RBF SVM -0.026 -0.013 0.001 -0.013
Little Man FDR -0.003 0.019 0.041 0.019
Little Man Bonferroni -0.004 0.018 0.039 0.018
Little Man Elastic Net -0.025 -0.014 -0.002 -0.014
Little Man Random Forest -0.022 -0.007 0.008 -0.007
Little Man Xgboost -0.025 -0.009 0.007 -0.009
Little Man Linear SVM -0.019 -0.009 0.002 -0.009
Little Man Polynomial SVM -0.020 -0.006 0.008 -0.006
Little Man RBF SVM -0.026 -0.013 0.000 -0.013
Card Sort FDR -0.014 0.007 0.028 0.007
Card Sort Bonferroni -0.015 0.006 0.027 0.006
Card Sort Elastic Net -0.030 -0.016 -0.002 -0.016
Card Sort Random Forest -0.031 -0.014 0.003 -0.014
Card Sort Xgboost -0.031 -0.013 0.004 -0.013
Card Sort Linear SVM -0.020 -0.009 0.003 -0.009
Card Sort Polynomial SVM -0.031 -0.016 -0.001 -0.016
Card Sort RBF SVM -0.032 -0.017 -0.002 -0.017
Seq Memory FDR -0.038 -0.017 0.004 -0.017
Seq Memory Bonferroni -0.039 -0.018 0.003 -0.018
Seq Memory Elastic Net -0.043 -0.028 -0.014 -0.028
Seq Memory Random Forest -0.043 -0.027 -0.011 -0.027
Seq Memory Xgboost -0.047 -0.030 -0.014 -0.030
Seq Memory Linear SVM -0.032 -0.021 -0.009 -0.021
Seq Memory Polynomial SVM -0.042 -0.028 -0.013 -0.028
Seq Memory RBF SVM -0.042 -0.028 -0.013 -0.028
Flanker FDR -0.034 -0.011 0.012 -0.011
Flanker Bonferroni -0.036 -0.014 0.009 -0.014
Flanker Elastic Net -0.045 -0.030 -0.016 -0.030
Flanker Random Forest -0.045 -0.026 -0.008 -0.026
Flanker Xgboost -0.043 -0.024 -0.006 -0.025
Flanker Linear SVM -0.036 -0.021 -0.005 -0.021
Flanker Polynomial SVM -0.040 -0.022 -0.005 -0.022
Flanker RBF SVM -0.034 -0.015 0.004 -0.015
Audi Verbal FDR -0.049 -0.027 -0.005 -0.027
Audi Verbal Bonferroni -0.050 -0.029 -0.007 -0.029
Audi Verbal Elastic Net -0.046 -0.034 -0.021 -0.034
Audi Verbal Random Forest -0.055 -0.038 -0.020 -0.038
Audi Verbal Xgboost -0.053 -0.033 -0.013 -0.033
Audi Verbal Linear SVM -0.038 -0.026 -0.015 -0.026
Audi Verbal Polynomial SVM -0.051 -0.035 -0.019 -0.035
Audi Verbal RBF SVM -0.050 -0.035 -0.019 -0.035
Pattern Speed FDR -0.025 -0.007 0.011 -0.007
Pattern Speed Bonferroni -0.026 -0.008 0.010 -0.008
Pattern Speed Elastic Net -0.031 -0.016 -0.002 -0.016
Pattern Speed Random Forest -0.035 -0.019 -0.004 -0.019
Pattern Speed Xgboost -0.033 -0.018 -0.002 -0.018
Pattern Speed Linear SVM -0.024 -0.014 -0.004 -0.014
Pattern Speed Polynomial SVM -0.030 -0.017 -0.003 -0.017
Pattern Speed RBF SVM -0.031 -0.017 -0.002 -0.017

5.12 Bootstrapping performance difference between other algorithms and elastict net

perfmatrics_diff_all_enet <-function(data,i){
  
  cor_enet <- cor(data$enet_pred[i],
                     data$model_resp[i],
                     use = "pairwise.complete.obs")
  
  tradrsq_enet <- yardstick::rsq_trad(data=data, 
                                       truth=.data$model_resp[i], 
                                       estimate=.data$enet_pred[i])
  
  mae_enet <- yardstick::mae(data =data, 
                            truth=.data$model_resp[i], 
                            estimate=.data$enet_pred[i])
  
  rmse_enet <- yardstick::rmse(data =data, 
                              truth=.data$model_resp[i], 
                              estimate=.data$enet_pred[i])
  
  
  
  cor_ols <- cor(data$ols_pred[i],
                     data$model_resp[i],
                     use = "pairwise.complete.obs")
  
  tradrsq_ols <- yardstick::rsq_trad(data=data, 
                                       truth=.data$model_resp[i], 
                                       estimate=.data$ols_pred[i])
  
  mae_ols <- yardstick::mae(data =data, 
                            truth=.data$model_resp[i], 
                            estimate=.data$ols_pred[i])
  
  rmse_ols <- yardstick::rmse(data =data, 
                              truth=.data$model_resp[i], 
                              estimate=.data$ols_pred[i])
  
  cor_diff_ols <- cor_ols-cor_enet
  
  tradrsq_diff_ols <- tradrsq_ols$.estimate-tradrsq_enet$.estimate
  
  mae_diff_ols <- mae_ols$.estimate-mae_enet$.estimate

  rmse_diff_ols <- rmse_ols$.estimate-rmse_enet$.estimate

  cor_svm_rbf <- cor(data$svm_rbf_pred[i],
                     data$model_resp[i],
                     use = "pairwise.complete.obs")
  
  tradrsq_svm_rbf <- yardstick::rsq_trad(data=data, 
                                       truth=.data$model_resp[i], 
                                       estimate=.data$svm_rbf_pred[i])
  
  mae_svm_rbf <- yardstick::mae(data =data, 
                            truth=.data$model_resp[i], 
                            estimate=.data$svm_rbf_pred[i])
  
  rmse_svm_rbf <- yardstick::rmse(data =data, 
                              truth=.data$model_resp[i], 
                              estimate=.data$svm_rbf_pred[i])
  
  cor_diff_svm_rbf <- cor_svm_rbf-cor_enet
  
  tradrsq_diff_svm_rbf <- tradrsq_svm_rbf$.estimate-tradrsq_enet$.estimate
  
  mae_diff_svm_rbf <- mae_svm_rbf$.estimate-mae_enet$.estimate

  rmse_diff_svm_rbf <- rmse_svm_rbf$.estimate-rmse_enet$.estimate
  
  
   cor_svm_linear <- cor(data$svm_linear_pred[i],
                     data$model_resp[i],
                     use = "pairwise.complete.obs")
  
  tradrsq_svm_linear <- yardstick::rsq_trad(data=data, 
                                       truth=.data$model_resp[i], 
                                       estimate=.data$svm_linear_pred[i])
  
  mae_svm_linear <- yardstick::mae(data =data, 
                            truth=.data$model_resp[i], 
                            estimate=.data$svm_linear_pred[i])
  
  rmse_svm_linear <- yardstick::rmse(data =data, 
                              truth=.data$model_resp[i], 
                              estimate=.data$svm_linear_pred[i])
  
  cor_diff_svm_linear <- cor_svm_linear-cor_enet
  
  tradrsq_diff_svm_linear <- tradrsq_svm_linear$.estimate-tradrsq_enet$.estimate
  
  mae_diff_svm_linear <- mae_svm_linear$.estimate-mae_enet$.estimate

  rmse_diff_svm_linear <- rmse_svm_linear$.estimate-rmse_enet$.estimate
  
  
   cor_svm_poly <- cor(data$svm_poly_pred[i],
                     data$model_resp[i],
                     use = "pairwise.complete.obs")
  
  tradrsq_svm_poly <- yardstick::rsq_trad(data=data, 
                                       truth=.data$model_resp[i], 
                                       estimate=.data$svm_poly_pred[i])
  
  mae_svm_poly <- yardstick::mae(data =data, 
                            truth=.data$model_resp[i], 
                            estimate=.data$svm_poly_pred[i])
  
  rmse_svm_poly <- yardstick::rmse(data =data, 
                              truth=.data$model_resp[i], 
                              estimate=.data$svm_poly_pred[i])
  
  cor_diff_svm_poly <- cor_svm_poly-cor_enet
  
  tradrsq_diff_svm_poly <- tradrsq_svm_poly$.estimate-tradrsq_enet$.estimate
  
  mae_diff_svm_poly <- mae_svm_poly$.estimate-mae_enet$.estimate

  rmse_diff_svm_poly <- rmse_svm_poly$.estimate-rmse_enet$.estimate
  
  
   cor_random_forest <- cor(data$random_forest_pred[i],
                     data$model_resp[i],
                     use = "pairwise.complete.obs")
  
  tradrsq_random_forest <- yardstick::rsq_trad(data=data, 
                                       truth=.data$model_resp[i], 
                                       estimate=.data$random_forest_pred[i])
  
  mae_random_forest <- yardstick::mae(data =data, 
                            truth=.data$model_resp[i], 
                            estimate=.data$random_forest_pred[i])
  
  rmse_random_forest <- yardstick::rmse(data =data, 
                              truth=.data$model_resp[i], 
                              estimate=.data$random_forest_pred[i])
  
  cor_diff_random_forest <- cor_random_forest-cor_enet
  
  tradrsq_diff_random_forest <- tradrsq_random_forest$.estimate-tradrsq_enet$.estimate
  
  mae_diff_random_forest <- mae_random_forest$.estimate-mae_enet$.estimate

  rmse_diff_random_forest <- rmse_random_forest$.estimate-rmse_enet$.estimate
  
  
   cor_xgboost <- cor(data$xgboost_pred[i],
                     data$model_resp[i],
                     use = "pairwise.complete.obs")
  
  tradrsq_xgboost <- yardstick::rsq_trad(data=data, 
                                       truth=.data$model_resp[i], 
                                       estimate=.data$xgboost_pred[i])
  
  mae_xgboost <- yardstick::mae(data =data, 
                            truth=.data$model_resp[i], 
                            estimate=.data$xgboost_pred[i])
  
  rmse_xgboost <- yardstick::rmse(data =data, 
                              truth=.data$model_resp[i], 
                              estimate=.data$xgboost_pred[i])
  
  cor_diff_xgboost <- cor_xgboost-cor_enet
  
  tradrsq_diff_xgboost <- tradrsq_xgboost$.estimate-tradrsq_enet$.estimate
  
  mae_diff_xgboost <- mae_xgboost$.estimate-mae_enet$.estimate

  rmse_diff_xgboost <- rmse_xgboost$.estimate-rmse_enet$.estimate
  
  return(c(cor_diff_ols,  tradrsq_diff_ols , mae_diff_ols, rmse_diff_ols,
           cor_diff_svm_rbf,  tradrsq_diff_svm_rbf , mae_diff_svm_rbf, rmse_diff_svm_rbf,
           cor_diff_svm_linear,  tradrsq_diff_svm_linear , mae_diff_svm_linear, rmse_diff_svm_linear,
           cor_diff_svm_poly,  tradrsq_diff_svm_poly , mae_diff_svm_poly, rmse_diff_svm_poly,
           cor_diff_random_forest,  tradrsq_diff_random_forest , mae_diff_random_forest, rmse_diff_random_forest,
           cor_diff_xgboost,  tradrsq_diff_xgboost , mae_diff_xgboost, rmse_diff_xgboost))
}

5.13 Bootstrap the real values and the predicted values

5000 times and compute the performance statistics with the above function

set.seed(123456)


 boot_all_resp_no_uni_enet <- furrr::future_map(pred_all_resp, ~boot::boot(data = .,
           statistic = perfmatrics_diff_all_enet,
           R = 5000,
          # parallel="snow",
          # ncpus=20,
           #cl=cl
          ),
          .options = furrr::furrr_options(seed = 123456))    



saveRDS(boot_all_resp_no_uni_enet, paste0(anotherFold,'working_memory_tasks/windows/boot_all_resp_no_uni_enet_April_21_2022', '.RData'))

5.13.1 processing univariate

uni_var_results_process_enet <- function(model_input, pred_input, enet_input,resp_input){
  roi_vec <- model_input$roi
  roi_list <- map(roi_vec,function(roi_input=.){
    roi_tibble <- select(pred_input,c(roi_input,resp_input ))%>%
      mutate(ols_pred = enet_input$model_predict)
    names(roi_tibble)<- c("uni_pred","model_resp","enet_pred")
    return(roi_tibble)
  })
  names(roi_list)<- roi_vec
  return(roi_list)
  }

uni_fdr_results_enet <- pmap(list(univariate_model_pred,univariate_model_fdr,
                             enet_predicted_list,resp_names),~uni_var_results_process_enet(
                               model_input=..2, 
                               pred_input=..1,
                               enet_input=..3,
                               resp_input=..4
                             ))

uni_bonferroni_results_enet <- pmap(list(univariate_model_pred,univariate_model_bonferroni,
                             enet_predicted_list,resp_names),~uni_var_results_process_enet(
                               model_input=..2, 
                               pred_input=..1,
                               enet_input=..3,
                               resp_input=..4
                             ))


uni_fdr_results_gfactor_enet <- pmap(list(univariate_model_pred_gfactor,
                                     univariate_model_fdr_gfactor,
                             enet_predicted_list_gfactor,cfa_resp_names),
                             ~uni_var_results_process_enet(
                               model_input=..2, 
                               pred_input=..1,
                               enet_input=..3,
                               resp_input=..4
                             ))

uni_bonferroni_results_gfactor_enet <- pmap(list(univariate_model_pred_gfactor,
                                    univariate_model_bonferroni_gfactor,
                             enet_predicted_list_gfactor,cfa_resp_names),~uni_var_results_process_enet(
                               model_input=..2, 
                               pred_input=..1,
                               enet_input=..3,
                               resp_input=..4
                             ))

uni_fdr_all_enet <- append(uni_fdr_results_enet,uni_fdr_results_gfactor_enet)
uni_bonferroni_all_enet <- append(uni_bonferroni_results_enet,uni_bonferroni_results_gfactor_enet)
perfmatrics_diff_uni_enet <-function(data,i){
  cor_enet <- cor(data$enet_pred[i],
                     data$model_resp[i],
                     use = "pairwise.complete.obs")
  
  tradrsq_enet <- yardstick::rsq_trad(data=data, 
                                       truth=.data$model_resp[i], 
                                       estimate=.data$enet_pred[i])
  
  mae_enet <- yardstick::mae(data =data, 
                            truth=.data$model_resp[i], 
                            estimate=.data$enet_pred[i])
  
  rmse_enet <- yardstick::rmse(data =data, 
                              truth=.data$model_resp[i], 
                              estimate=.data$enet_pred[i])
  
  
  cor_uni <- cor(data$uni_pred[i],
                     data$model_resp[i],
                     use = "pairwise.complete.obs")
  
  tradrsq_uni <- yardstick::rsq_trad(data=data, 
                                       truth=.data$model_resp[i], 
                                       estimate=.data$uni_pred[i])
  
  mae_uni <- yardstick::mae(data =data, 
                            truth=.data$model_resp[i], 
                            estimate=.data$uni_pred[i])
  
  rmse_uni <- yardstick::rmse(data =data, 
                              truth=.data$model_resp[i], 
                              estimate=.data$uni_pred[i])
  
  cor_diff_uni <- cor_uni-cor_enet
  
  tradrsq_diff_uni <- tradrsq_uni$.estimate-tradrsq_enet$.estimate
  
  mae_diff_uni <- mae_uni$.estimate-mae_enet$.estimate

  rmse_diff_uni <- rmse_uni$.estimate-rmse_enet$.estimate

 
  
  return(c(cor_diff_uni,  tradrsq_diff_uni , mae_diff_uni, rmse_diff_uni))
}

bootstrapped difference of univariates

boot_uni_diff_fdr_enet<-map(uni_fdr_all_enet,~boot_diff_uni(data_input=.,
                                                  metric_input =perfmatrics_diff_uni_enet ))
  
boot_uni_diff_bonferroni_enet <-map(uni_bonferroni_all_enet,~boot_diff_uni(data_input=.,
                                                  metric_input =perfmatrics_diff_uni_enet ))


boot_diff_uni_all_enet <- list(fdr=boot_uni_diff_fdr_enet,bonferroni=boot_uni_diff_bonferroni_enet)

saveRDS(boot_diff_uni_all_enet, paste0(anotherFold,'working_memory_tasks/boot_diff_uni_all_enet_April_21_2022', '.RData'))

process the results

uni_fdr_diff_enet <- map2(.x=resp_all,.y=boot_uni_diff_fdr_enet,~uni_boot_results_processing(data_input=.y,
                                                          resp_input=.x, 
                                                          algor_input="FDR"))

uni_bonferroni_diff_enet <- map2(.x=resp_all,.y=boot_uni_diff_bonferroni_enet
                            ,~uni_boot_results_processing(data_input=.y,
                                                          resp_input=.x, 
                                                          algor_input="Bonferroni"))

5.14 bootstrap results processing

theme_set(theme_ggdist())

boot_diff_result_process_enet<- function(boot_input,resp_input){
  boot_output <- boot_input[[resp_input]]
  cor_diff_ols <- tibble(value = boot_output$t[,1],algorithm = "OLS")
  tradrsq_diff_ols<- tibble(value = boot_output$t[,2],algorithm = "OLS")
 mae_diff_ols<- tibble(value = boot_output$t[,3],algorithm = "OLS")
 rmse_diff_ols<- tibble(value = boot_output$t[,4],algorithm = "OLS")
 
 cor_diff_svm_rbf<- tibble(value = boot_output$t[,5],algorithm = "RBF\nSVM")
 tradrsq_diff_svm_rbf <- tibble(value = boot_output$t[,6],algorithm = "RBF\nSVM")
 mae_diff_svm_rbf<- tibble(value = boot_output$t[,7],algorithm = "RBF\nSVM")
 rmse_diff_svm_rbf<- tibble(value = boot_output$t[,8],algorithm = "RBF\nSVM")
 
  cor_diff_svm_linear<- tibble(value = boot_output$t[,9],algorithm = "Linear\nSVM")
  tradrsq_diff_svm_linear <- tibble(value = boot_output$t[,10],algorithm = "Linear\nSVM")
  mae_diff_svm_linear<- tibble(value = boot_output$t[,11],algorithm = "Linear\nSVM")
  rmse_diff_svm_linear<- tibble(value = boot_output$t[,12],algorithm = "Linear\nSVM")
  
  cor_diff_svm_poly<- tibble(value = boot_output$t[,13],algorithm = "Polynomial\nSVM")
  tradrsq_diff_svm_poly<- tibble(value = boot_output$t[,14],algorithm = "Polynomial\nSVM")
  mae_diff_svm_poly<- tibble(value = boot_output$t[,15],algorithm = "Polynomial\nSVM")
  rmse_diff_svm_poly<- tibble(value = boot_output$t[,16],algorithm = "Polynomial\nSVM")
  
  cor_diff_random_forest<- tibble(value = boot_output$t[,17],algorithm = "Random\nForest")
  tradrsq_diff_random_forest <- tibble(value = boot_output$t[,18],algorithm = "Random\nForest")
  mae_diff_random_forest<- tibble(value = boot_output$t[,19],algorithm = "Random\nForest")
  rmse_diff_random_forest<- tibble(value = boot_output$t[,20],algorithm = "Random\nForest")
  
  cor_diff_xgboost<- tibble(value = boot_output$t[,21],algorithm = "Xgboost")
  tradrsq_diff_xgboost<- tibble(value = boot_output$t[,22],algorithm = "Xgboost")
  mae_diff_xgboost<- tibble(value = boot_output$t[,23],algorithm = "Xgboost")
  rmse_diff_xgboost<- tibble(value = boot_output$t[,24],algorithm = "Xgboost")
  
  corr_all <- bind_rows(cor_diff_svm_poly,cor_diff_random_forest,cor_diff_xgboost,cor_diff_ols,cor_diff_svm_rbf,cor_diff_svm_linear)%>%
              mutate(response = resp_plotting_all$short_name[which(resp_plotting_all$response==resp_input)])
  
  tradrsq_all <- bind_rows(tradrsq_diff_svm_poly,tradrsq_diff_random_forest,tradrsq_diff_xgboost,
                           tradrsq_diff_ols,tradrsq_diff_svm_rbf,tradrsq_diff_svm_linear)%>%
              mutate(response = resp_plotting_all$short_name[which(resp_plotting_all$response==resp_input)])
  
  mae_all <- bind_rows(mae_diff_svm_poly,mae_diff_random_forest,mae_diff_xgboost,mae_diff_ols,mae_diff_svm_rbf,mae_diff_svm_linear)%>%
              mutate(response = resp_plotting_all$short_name[which(resp_plotting_all$response==resp_input)])
  
  rmse_all <- bind_rows(rmse_diff_svm_poly,rmse_diff_random_forest,rmse_diff_xgboost,rmse_diff_ols,
                        rmse_diff_svm_rbf,rmse_diff_svm_linear)%>%
              mutate(response = resp_plotting_all$short_name[which(resp_plotting_all$response==resp_input)])
  
  
  return(list(Correlation = corr_all,Traditional_Rsquare=tradrsq_all,MAE=mae_all,RMSE =rmse_all ))
  
}


boot_diff_nouni_enet <- map(resp_all,~boot_diff_result_process_enet(boot_input=boot_all_resp_no_uni_enet,resp_input=.))

#names(boot_diff_nouni[[12]])<- names(boot_diff_nouni[[11]])
boot_corr_diff_nouni_enet <- map(boot_diff_nouni_enet,"Correlation")%>%
            do.call(rbind,.)

boot_cor_diff_fdr_enet <- map(uni_fdr_diff_enet,"Correlation")%>%
            do.call(rbind,.)

boot_cor_diff_bonferroni_enet <- map(uni_bonferroni_diff_enet,"Correlation")%>%
            do.call(rbind,.)

boot_cor_diff_enet <- boot_corr_diff_nouni_enet%>%
                 bind_rows(boot_cor_diff_fdr_enet)%>%
                 bind_rows(boot_cor_diff_bonferroni_enet)%>%
  mutate(algorithm = as.factor(algorithm))%>%
   mutate(algorithm = factor(algorithm,levels =c ("FDR","Bonferroni","OLS",
                                                "Random\nForest","Xgboost", 
                                                "Linear\nSVM",
                                                "Polynomial\nSVM" ,"RBF\nSVM")))%>%
  mutate(resp_factor= as.factor(response))%>%
  mutate(resp_factor = factor(response,levels =c("Pattern Speed", "Audi Verbal",
                                                 "Flanker","Seq Memory",
                                                 "Card Sort", # "Cog Flex",
                                                 "Little Man",
                                                 "List Work Mem","Matrix Reason",
                                                 "Reading Recog","Pic Vocab", 
                                                 "2-back Work Mem","gfactor" )))






boot_rsq_diff_nouni_enet <- map(boot_diff_nouni_enet,"Traditional_Rsquare")%>%
            do.call(rbind,.)

boot_rsq_diff_fdr_enet <- map(uni_fdr_diff_enet,"Traditional_Rsquare")%>%
            do.call(rbind,.)

boot_rsq_diff_bonferroni_enet <- map(uni_bonferroni_diff_enet,"Traditional_Rsquare")%>%
            do.call(rbind,.)

boot_rsq_diff_enet <- boot_rsq_diff_nouni_enet%>%
                 bind_rows(boot_rsq_diff_fdr_enet)%>%
                 bind_rows(boot_rsq_diff_bonferroni_enet)%>%
  mutate(algorithm = as.factor(algorithm))%>%
   mutate(algorithm = factor(algorithm,levels =c ("FDR","Bonferroni","OLS",
                                                "Random\nForest","Xgboost", 
                                                "Linear\nSVM",
                                                "Polynomial\nSVM" ,"RBF\nSVM")))%>%
  mutate(resp_factor= as.factor(response))%>%
  mutate(resp_factor = factor(response,levels =c("Pattern Speed", "Audi Verbal",
                                                 "Flanker","Seq Memory",
                                                 "Card Sort", # "Cog Flex",
                                                 "Little Man",
                                                 "List Work Mem","Matrix Reason",
                                                 "Reading Recog","Pic Vocab", 
                                                 "2-back Work Mem","gfactor" )))

boot_mae_diff_nouni_enet <- map(boot_diff_nouni_enet,"MAE")%>%
            do.call(rbind,.)

boot_mae_diff_fdr_enet <- map(uni_fdr_diff_enet,"MAE")%>%
            do.call(rbind,.)

boot_mae_diff_bonferroni_enet <- map(uni_bonferroni_diff_enet,"MAE")%>%
            do.call(rbind,.)

boot_mae_diff_enet <- boot_mae_diff_nouni_enet%>%
                 bind_rows(boot_mae_diff_fdr_enet)%>%
                 bind_rows(boot_mae_diff_bonferroni_enet)%>%
  mutate(algorithm = as.factor(algorithm))%>%
   mutate(algorithm = factor(algorithm,levels =c ("FDR","Bonferroni","OLS",
                                                "Random\nForest","Xgboost", 
                                                "Linear\nSVM",
                                                "Polynomial\nSVM" ,"RBF\nSVM")))%>%
  mutate(resp_factor= as.factor(response))%>%
  mutate(resp_factor = factor(response,levels =c("Pattern Speed", "Audi Verbal",
                                                 "Flanker","Seq Memory",
                                                 "Card Sort", # "Cog Flex",
                                                 "Little Man",
                                                 "List Work Mem","Matrix Reason",
                                                 "Reading Recog","Pic Vocab", 
                                                 "2-back Work Mem","gfactor" )))


boot_rmse_diff_nouni_enet <- map(boot_diff_nouni_enet,"RMSE")%>%
            do.call(rbind,.)

boot_rmse_diff_fdr_enet <- map(uni_fdr_diff_enet,"RMSE")%>%
            do.call(rbind,.)

boot_rmse_diff_bonferroni_enet <- map(uni_bonferroni_diff_enet,"RMSE")%>%
            do.call(rbind,.)

boot_rmse_diff_enet <- boot_rmse_diff_nouni_enet%>%
                 bind_rows(boot_rmse_diff_fdr_enet)%>%
                 bind_rows(boot_rmse_diff_bonferroni_enet)%>%
  mutate(algorithm = as.factor(algorithm))%>%
   mutate(algorithm = factor(algorithm,levels =c ("FDR","Bonferroni","OLS",
                                                "Random\nForest","Xgboost", 
                                                "Linear\nSVM",
                                                "Polynomial\nSVM" ,"RBF\nSVM")))%>%
  mutate(resp_factor= as.factor(response))%>%
  mutate(resp_factor = factor(response,levels =c("Pattern Speed", "Audi Verbal",
                                                 "Flanker","Seq Memory",
                                                 "Card Sort", # "Cog Flex",
                                                 "Little Man",
                                                 "List Work Mem","Matrix Reason",
                                                 "Reading Recog","Pic Vocab", 
                                                 "2-back Work Mem","gfactor" )))

boot_diff_metric_enet<- list(Correlation=boot_cor_diff_enet,
                               Traditional_Rsquare=boot_rsq_diff_enet, 
                               MAE= boot_mae_diff_enet,
                               RMSE =boot_rmse_diff_enet )



boot_plot_diff_list_enet <-  map2(.x=boot_diff_metric_enet,
                        .y = metric_vec, 
                        ~ggplot(data=.x,
                                aes(y = value, 
                                    x = resp_factor, 
                                    color= algorithm)) +
                          stat_pointinterval(position = position_dodge(width = 2, 
                                                                       preserve = "single"))+
                          ##hline instead of vline because of corrdinate flip
                            geom_hline(yintercept = 0, color = "grey55", linetype = "dashed",size=1.5) +
                          ggtitle(.y)+
                         coord_flip()+
                          theme(plot.title = element_text(size=15),
                                     axis.title.x = element_blank(),
                                     axis.title.y = element_blank(), 
                                     axis.text.x = element_text(size = 12),
                                     axis.text.y = element_text(size=12),
                                     legend.position = "bottom",
                                     legend.text=element_text(size=12),
                                     legend.title=element_text(size=15))+
                               guides(color = guide_legend(override.aes = list(size = 10)))+
                               scale_color_manual(values=color_boot_plot[-4],
                                                  labels = c ("FDR","Bonferroni",
                                                              "OLS",
                                                              "Random\nForest","Xgboost", 
                                                              "Linear\nSVM",
                                                              "Polynomial\nSVM" ,"RBF\nSVM"))
                        )







boot_plot_diff_legend_enet <- get_legend(boot_plot_diff_list_enet[[1]])


title_boot_diff_plot_enet <- ggdraw() + 
  draw_label(
    "Bootstrapped Distribution of the Differences in Predictive Performance:
Other Algorithms Minus Elastic Net

",
    fontface = 'bold',
    x = 0,
    hjust = 0,
    size=21
  ) +
  theme(
    # add margin on the left of the drawing canvas,
    # so title is aligned with left edge of first plot
    plot.margin = margin(0, 0, 0, 7)
  )
plot_grid(title_boot_diff_plot_enet,
          ggpubr::ggarrange(plotlist =boot_plot_diff_list_enet, 
                  ncol = 2,
                  nrow = 2, 
                  common.legend = TRUE, 
                  legend = "bottom",
                  legend.grob = boot_plot_diff_legend_enet)
          ,nrow = 2 , rel_heights = c(0.1, 1))

plotting without univariate

boot_diff_metric_enetnouni <- map(boot_diff_metric_enet, ~filter(.x,.data[["algorithm"]]!="FDR")%>%
                            filter(.data[["algorithm"]]!="Bonferroni"))




boot_plot_diff_list_enetnouni <-  map2(.x=boot_diff_metric_enetnouni,
                        .y = metric_vec, 
                        ~ggplot(data=.x,
                                aes(y = value, 
                                    x = resp_factor, 
                                    color= algorithm)) +
                          stat_pointinterval(position = position_dodge(width = 2, 
                                                                       preserve = "single"))+
                          ##hline instead of vline because of corrdinate flip
                            geom_hline(yintercept = 0, color = "grey55", linetype = "dashed",size=1.5) +
                          ggtitle(.y)+
                         coord_flip()+
                          theme(plot.title = element_text(size=15),
                                     axis.title.x = element_blank(),
                                     axis.title.y = element_blank(), 
                                     axis.text.x = element_text(size = 12),
                                     axis.text.y = element_text(size=12),
                                     legend.position = "bottom",
                                     legend.text=element_text(size=12),
                                     legend.title=element_text(size=15))+
                               guides(color = guide_legend(override.aes = list(size = 10)))+
                               scale_color_manual(values=color_boot_plot[-c(1,2,4)],
                                                  labels = c ("OLS",
                                                              "Random\nForest","Xgboost", 
                                                              "Linear\nSVM",
                                                              "Polynomial\nSVM" ,"RBF\nSVM"))
                        )







boot_plot_diff_legend_enetnouni <- get_legend(boot_plot_diff_list_enetnouni[[1]])



plot_grid(title_boot_diff_plot_enet,
          ggpubr::ggarrange(plotlist =boot_plot_diff_list_enetnouni, 
                  ncol = 2,
                  nrow = 2, 
                  common.legend = TRUE, 
                  legend = "bottom",
                  legend.grob = boot_plot_diff_legend_enetnouni)
          ,nrow = 2 , rel_heights = c(0.1, 1))

5.14.1 getting the summary quantiles of the bootstrapped metrics

modality_vec_diff_enet <- unique(boot_diff_metric_enet[["Correlation"]][["modality"]])


kable_boot_metric_diff_enet <- boot_diff_metric_enet %>% 
  map(.,
      ~boot_quantile_processing_diff(data_input = .))

kable_metric_vars_diff_enet <- colnames(kable_boot_metric_diff_enet[[1]])

kable_boot_metric_vars_diff_enet <- kable_boot_metric_diff_enet   %>% 
  map(.,~arrange(.,desc(match(response,
                      c("Pattern Speed", 
                        "Audi Verbal",
                        "Flanker",
                        "Seq Memory",
                        "Card Sort", # "Cog Flex",
                        "Little Man",
                        "List Work Mem",
                        "Matrix Reason",
                        "Reading Recog",
                        "Pic Vocab", 
                        "2-back Work Mem",
                        "gfactor" )))) %>%
        mutate_if(is.numeric, round, 3) %>%
        relocate(response,algorithm))
  


  kable_boot_metric_vars_diff_enet[[1]] %>% 
    kableExtra::kbl(caption = paste0(metric_names[1])) %>%
    kableExtra::kable_classic(full_width = F, 
                             html_font = "Cambria")
Correlation
response algorithm 0.025 0.5 0.975 mean
gfactor FDR -0.448 -0.339 -0.194 -0.333
gfactor Bonferroni -0.427 -0.320 -0.186 -0.315
gfactor OLS -0.041 -0.022 -0.004 -0.022
gfactor Random Forest -0.039 -0.013 0.012 -0.013
gfactor Xgboost -0.050 -0.023 0.003 -0.024
gfactor Linear SVM -0.014 -0.004 0.007 -0.004
gfactor Polynomial SVM -0.014 -0.003 0.008 -0.003
gfactor RBF SVM -0.013 0.001 0.016 0.002
2-back Work Mem FDR -0.536 -0.421 -0.274 -0.416
2-back Work Mem Bonferroni -0.509 -0.399 -0.264 -0.395
2-back Work Mem OLS -0.034 -0.021 -0.008 -0.021
2-back Work Mem Random Forest -0.069 -0.041 -0.013 -0.041
2-back Work Mem Xgboost -0.080 -0.055 -0.030 -0.055
2-back Work Mem Linear SVM -0.024 -0.014 -0.004 -0.014
2-back Work Mem Polynomial SVM -0.024 -0.014 -0.004 -0.014
2-back Work Mem RBF SVM -0.014 -0.003 0.009 -0.003
Pic Vocab FDR -0.393 -0.273 -0.173 -0.277
Pic Vocab Bonferroni -0.372 -0.260 -0.168 -0.263
Pic Vocab OLS -0.063 -0.039 -0.016 -0.039
Pic Vocab Random Forest -0.055 -0.023 0.007 -0.023
Pic Vocab Xgboost -0.046 -0.018 0.010 -0.018
Pic Vocab Linear SVM -0.040 -0.022 -0.004 -0.022
Pic Vocab Polynomial SVM -0.040 -0.019 0.002 -0.019
Pic Vocab RBF SVM -0.040 -0.019 0.001 -0.020
Reading Recog FDR -0.348 -0.234 -0.124 -0.234
Reading Recog Bonferroni -0.304 -0.206 -0.114 -0.207
Reading Recog OLS -0.084 -0.055 -0.026 -0.055
Reading Recog Random Forest -0.045 -0.017 0.012 -0.017
Reading Recog Xgboost -0.050 -0.023 0.004 -0.023
Reading Recog Linear SVM -0.040 -0.024 -0.008 -0.024
Reading Recog Polynomial SVM -0.028 -0.014 0.000 -0.014
Reading Recog RBF SVM -0.027 -0.011 0.006 -0.011
Matrix Reason FDR -0.321 -0.212 -0.092 -0.210
Matrix Reason Bonferroni -0.306 -0.189 -0.082 -0.190
Matrix Reason OLS -0.039 -0.009 0.021 -0.009
Matrix Reason Random Forest -0.046 -0.016 0.013 -0.016
Matrix Reason Xgboost -0.055 -0.027 0.001 -0.027
Matrix Reason Linear SVM -0.017 0.002 0.021 0.002
Matrix Reason Polynomial SVM -0.042 -0.009 0.021 -0.009
Matrix Reason RBF SVM -0.018 0.001 0.019 0.001
List Work Mem FDR -0.318 -0.221 -0.114 -0.218
List Work Mem Bonferroni -0.307 -0.205 -0.105 -0.205
List Work Mem OLS -0.063 -0.035 -0.006 -0.035
List Work Mem Random Forest -0.058 -0.033 -0.008 -0.033
List Work Mem Xgboost -0.057 -0.032 -0.007 -0.032
List Work Mem Linear SVM -0.038 -0.022 -0.005 -0.022
List Work Mem Polynomial SVM -0.029 -0.016 -0.003 -0.016
List Work Mem RBF SVM -0.029 -0.016 -0.003 -0.016
Little Man FDR -0.290 -0.192 -0.098 -0.192
Little Man Bonferroni -0.273 -0.177 -0.090 -0.179
Little Man OLS -0.051 -0.024 0.004 -0.024
Little Man Random Forest -0.055 -0.023 0.006 -0.023
Little Man Xgboost -0.048 -0.014 0.016 -0.015
Little Man Linear SVM -0.021 -0.004 0.013 -0.004
Little Man Polynomial SVM -0.045 -0.018 0.009 -0.018
Little Man RBF SVM -0.018 0.003 0.023 0.003
Card Sort FDR -0.252 -0.158 -0.070 -0.159
Card Sort Bonferroni -0.236 -0.145 -0.064 -0.146
Card Sort OLS -0.062 -0.025 0.013 -0.024
Card Sort Random Forest -0.051 -0.011 0.035 -0.010
Card Sort Xgboost -0.053 -0.012 0.032 -0.011
Card Sort Linear SVM -0.055 -0.029 -0.003 -0.029
Card Sort Polynomial SVM -0.021 0.002 0.026 0.002
Card Sort RBF SVM -0.028 0.005 0.041 0.005
Seq Memory FDR -0.212 -0.105 -0.008 -0.106
Seq Memory Bonferroni -0.188 -0.077 0.000 -0.083
Seq Memory OLS -0.090 -0.045 -0.001 -0.045
Seq Memory Random Forest -0.038 -0.005 0.028 -0.005
Seq Memory Xgboost -0.019 0.013 0.044 0.013
Seq Memory Linear SVM -0.031 -0.005 0.022 -0.005
Seq Memory Polynomial SVM -0.011 0.005 0.022 0.005
Seq Memory RBF SVM -0.011 0.006 0.023 0.006
Flanker FDR -0.263 -0.137 -0.028 -0.139
Flanker Bonferroni -0.223 -0.101 -0.015 -0.106
Flanker OLS -0.113 -0.071 -0.030 -0.071
Flanker Random Forest -0.059 -0.018 0.023 -0.018
Flanker Xgboost -0.062 -0.028 0.006 -0.028
Flanker Linear SVM -0.047 -0.022 0.001 -0.022
Flanker Polynomial SVM -0.026 -0.007 0.011 -0.007
Flanker RBF SVM -0.092 -0.054 -0.015 -0.054
Audi Verbal FDR -0.183 -0.084 0.009 -0.085
Audi Verbal Bonferroni -0.145 -0.063 0.019 -0.063
Audi Verbal OLS -0.084 -0.048 -0.012 -0.048
Audi Verbal Random Forest -0.026 0.016 0.058 0.016
Audi Verbal Xgboost -0.043 0.000 0.044 0.000
Audi Verbal Linear SVM -0.038 -0.016 0.007 -0.015
Audi Verbal Polynomial SVM -0.015 0.008 0.031 0.008
Audi Verbal RBF SVM -0.019 0.008 0.035 0.008
Pattern Speed FDR -0.189 -0.090 -0.008 -0.092
Pattern Speed Bonferroni -0.172 -0.077 -0.001 -0.079
Pattern Speed OLS -0.079 -0.027 0.026 -0.027
Pattern Speed Random Forest -0.023 0.013 0.051 0.013
Pattern Speed Xgboost -0.034 0.006 0.046 0.006
Pattern Speed Linear SVM -0.047 -0.010 0.027 -0.010
Pattern Speed Polynomial SVM -0.024 0.000 0.025 0.000
Pattern Speed RBF SVM -0.023 0.003 0.028 0.003
  kable_boot_metric_vars_diff_enet[[2]] %>% 
    kableExtra::kbl(caption = paste0(metric_names[2])) %>%
    kableExtra::kable_classic(full_width = F, 
                             html_font = "Cambria")
Traditional_Rsquare
response algorithm 0.025 0.5 0.975 mean
gfactor FDR -0.220 -0.176 -0.122 -0.175
gfactor Bonferroni -0.217 -0.172 -0.119 -0.171
gfactor OLS -0.049 -0.028 -0.007 -0.028
gfactor Random Forest -0.036 -0.016 0.005 -0.016
gfactor Xgboost -0.043 -0.021 0.001 -0.021
gfactor Linear SVM -0.014 -0.005 0.005 -0.005
gfactor Polynomial SVM -0.014 -0.004 0.006 -0.004
gfactor RBF SVM -0.011 0.000 0.013 0.001
2-back Work Mem FDR -0.293 -0.250 -0.197 -0.249
2-back Work Mem Bonferroni -0.289 -0.246 -0.192 -0.245
2-back Work Mem OLS -0.037 -0.020 -0.005 -0.021
2-back Work Mem Random Forest -0.067 -0.044 -0.020 -0.044
2-back Work Mem Xgboost -0.074 -0.053 -0.032 -0.053
2-back Work Mem Linear SVM -0.025 -0.014 -0.003 -0.014
2-back Work Mem Polynomial SVM -0.024 -0.013 -0.002 -0.013
2-back Work Mem RBF SVM -0.017 -0.003 0.010 -0.003
Pic Vocab FDR -0.152 -0.117 -0.080 -0.116
Pic Vocab Bonferroni -0.150 -0.115 -0.078 -0.115
Pic Vocab OLS -0.062 -0.039 -0.018 -0.039
Pic Vocab Random Forest -0.038 -0.019 0.001 -0.019
Pic Vocab Xgboost -0.031 -0.013 0.005 -0.013
Pic Vocab Linear SVM -0.040 -0.022 -0.006 -0.022
Pic Vocab Polynomial SVM -0.034 -0.017 0.000 -0.017
Pic Vocab RBF SVM -0.035 -0.018 -0.001 -0.018
Reading Recog FDR -0.124 -0.092 -0.058 -0.092
Reading Recog Bonferroni -0.119 -0.087 -0.054 -0.087
Reading Recog OLS -0.069 -0.043 -0.019 -0.043
Reading Recog Random Forest -0.026 -0.011 0.004 -0.011
Reading Recog Xgboost -0.030 -0.016 -0.002 -0.016
Reading Recog Linear SVM -0.028 -0.016 -0.005 -0.016
Reading Recog Polynomial SVM -0.023 -0.013 -0.002 -0.013
Reading Recog RBF SVM -0.022 -0.011 0.000 -0.011
Matrix Reason FDR -0.108 -0.075 -0.038 -0.074
Matrix Reason Bonferroni -0.106 -0.071 -0.035 -0.071
Matrix Reason OLS -0.043 -0.017 0.007 -0.017
Matrix Reason Random Forest -0.025 -0.009 0.007 -0.009
Matrix Reason Xgboost -0.033 -0.017 -0.002 -0.017
Matrix Reason Linear SVM -0.011 0.000 0.011 0.000
Matrix Reason Polynomial SVM -0.027 -0.007 0.011 -0.007
Matrix Reason RBF SVM -0.011 0.000 0.010 -0.001
List Work Mem FDR -0.108 -0.079 -0.047 -0.079
List Work Mem Bonferroni -0.107 -0.077 -0.045 -0.076
List Work Mem OLS -0.061 -0.036 -0.012 -0.036
List Work Mem Random Forest -0.031 -0.017 -0.005 -0.017
List Work Mem Xgboost -0.033 -0.018 -0.004 -0.018
List Work Mem Linear SVM -0.025 -0.014 -0.003 -0.014
List Work Mem Polynomial SVM -0.020 -0.012 -0.003 -0.012
List Work Mem RBF SVM -0.020 -0.011 -0.002 -0.011
Little Man FDR -0.092 -0.064 -0.035 -0.064
Little Man Bonferroni -0.090 -0.062 -0.034 -0.062
Little Man OLS -0.049 -0.026 -0.005 -0.027
Little Man Random Forest -0.027 -0.013 0.001 -0.013
Little Man Xgboost -0.024 -0.008 0.006 -0.009
Little Man Linear SVM -0.026 -0.009 0.006 -0.010
Little Man Polynomial SVM -0.034 -0.015 0.002 -0.015
Little Man RBF SVM -0.016 -0.001 0.012 -0.001
Card Sort FDR -0.068 -0.045 -0.021 -0.045
Card Sort Bonferroni -0.067 -0.043 -0.020 -0.043
Card Sort OLS -0.060 -0.031 -0.004 -0.031
Card Sort Random Forest -0.020 -0.004 0.015 -0.003
Card Sort Xgboost -0.021 -0.005 0.012 -0.005
Card Sort Linear SVM -0.029 -0.014 0.000 -0.014
Card Sort Polynomial SVM -0.009 0.000 0.009 0.000
Card Sort RBF SVM -0.011 0.002 0.016 0.002
Seq Memory FDR -0.045 -0.023 -0.001 -0.023
Seq Memory Bonferroni -0.042 -0.020 0.001 -0.020
Seq Memory OLS -0.088 -0.057 -0.028 -0.057
Seq Memory Random Forest -0.015 -0.003 0.009 -0.003
Seq Memory Xgboost -0.007 0.004 0.014 0.004
Seq Memory Linear SVM -0.033 -0.015 0.003 -0.015
Seq Memory Polynomial SVM -0.011 -0.002 0.008 -0.002
Seq Memory RBF SVM -0.011 -0.002 0.008 -0.002
Flanker FDR -0.065 -0.038 -0.012 -0.038
Flanker Bonferroni -0.060 -0.033 -0.009 -0.033
Flanker OLS -0.092 -0.060 -0.031 -0.061
Flanker Random Forest -0.022 -0.008 0.007 -0.008
Flanker Xgboost -0.024 -0.012 0.001 -0.012
Flanker Linear SVM -0.034 -0.019 -0.004 -0.019
Flanker Polynomial SVM -0.031 -0.016 -0.001 -0.016
Flanker RBF SVM -0.049 -0.030 -0.011 -0.031
Audi Verbal FDR -0.038 -0.013 0.013 -0.013
Audi Verbal Bonferroni -0.034 -0.010 0.015 -0.010
Audi Verbal OLS -0.095 -0.068 -0.041 -0.068
Audi Verbal Random Forest -0.008 0.008 0.024 0.008
Audi Verbal Xgboost -0.020 -0.001 0.019 -0.001
Audi Verbal Linear SVM -0.027 -0.015 -0.002 -0.015
Audi Verbal Polynomial SVM -0.008 0.003 0.015 0.003
Audi Verbal RBF SVM -0.011 0.002 0.014 0.002
Pattern Speed FDR -0.033 -0.018 -0.003 -0.018
Pattern Speed Bonferroni -0.032 -0.016 -0.002 -0.016
Pattern Speed OLS -0.062 -0.032 -0.003 -0.032
Pattern Speed Random Forest -0.004 0.006 0.016 0.006
Pattern Speed Xgboost -0.007 0.003 0.013 0.003
Pattern Speed Linear SVM -0.022 -0.005 0.011 -0.005
Pattern Speed Polynomial SVM -0.006 0.001 0.007 0.001
Pattern Speed RBF SVM -0.005 0.001 0.007 0.001
  kable_boot_metric_vars_diff_enet[[3]] %>% 
    kableExtra::kbl(caption = paste0(metric_names[3])) %>%
    kableExtra::kable_classic(full_width = F, 
                             html_font = "Cambria")
MAE
response algorithm 0.025 0.5 0.975 mean
gfactor FDR 0.045 0.070 0.094 0.070
gfactor Bonferroni 0.044 0.068 0.092 0.068
gfactor OLS 0.002 0.013 0.023 0.013
gfactor Random Forest -0.006 0.004 0.015 0.004
gfactor Xgboost -0.002 0.009 0.020 0.009
gfactor Linear SVM -0.005 0.001 0.006 0.001
gfactor Polynomial SVM -0.005 0.000 0.005 0.000
gfactor RBF SVM -0.009 -0.003 0.004 -0.003
2-back Work Mem FDR 0.097 0.125 0.151 0.125
2-back Work Mem Bonferroni 0.095 0.123 0.149 0.123
2-back Work Mem OLS -0.004 0.005 0.015 0.005
2-back Work Mem Random Forest 0.010 0.023 0.036 0.023
2-back Work Mem Xgboost 0.014 0.026 0.038 0.026
2-back Work Mem Linear SVM -0.002 0.004 0.009 0.004
2-back Work Mem Polynomial SVM -0.003 0.003 0.009 0.003
2-back Work Mem RBF SVM -0.011 -0.003 0.004 -0.003
Pic Vocab FDR 0.032 0.051 0.070 0.051
Pic Vocab Bonferroni 0.032 0.051 0.070 0.051
Pic Vocab OLS 0.007 0.017 0.029 0.018
Pic Vocab Random Forest 0.005 0.016 0.026 0.016
Pic Vocab Xgboost 0.001 0.010 0.019 0.010
Pic Vocab Linear SVM -0.004 0.004 0.013 0.004
Pic Vocab Polynomial SVM -0.004 0.004 0.013 0.004
Pic Vocab RBF SVM -0.004 0.004 0.013 0.004
Reading Recog FDR 0.018 0.034 0.049 0.034
Reading Recog Bonferroni 0.018 0.033 0.048 0.033
Reading Recog OLS 0.010 0.021 0.032 0.021
Reading Recog Random Forest -0.004 0.004 0.012 0.004
Reading Recog Xgboost 0.001 0.009 0.016 0.009
Reading Recog Linear SVM -0.005 0.001 0.007 0.001
Reading Recog Polynomial SVM -0.006 0.000 0.005 0.000
Reading Recog RBF SVM -0.006 0.000 0.005 0.000
Matrix Reason FDR 0.015 0.032 0.049 0.032
Matrix Reason Bonferroni 0.013 0.030 0.047 0.030
Matrix Reason OLS 0.002 0.014 0.025 0.014
Matrix Reason Random Forest -0.004 0.003 0.011 0.003
Matrix Reason Xgboost -0.002 0.005 0.013 0.005
Matrix Reason Linear SVM -0.006 0.000 0.006 0.000
Matrix Reason Polynomial SVM -0.004 0.005 0.014 0.005
Matrix Reason RBF SVM -0.006 -0.001 0.004 -0.001
List Work Mem FDR 0.013 0.029 0.045 0.029
List Work Mem Bonferroni 0.012 0.028 0.044 0.028
List Work Mem OLS 0.000 0.012 0.024 0.012
List Work Mem Random Forest -0.001 0.005 0.012 0.005
List Work Mem Xgboost -0.003 0.004 0.012 0.004
List Work Mem Linear SVM -0.001 0.005 0.010 0.005
List Work Mem Polynomial SVM -0.001 0.004 0.009 0.004
List Work Mem RBF SVM -0.001 0.004 0.009 0.004
Little Man FDR 0.017 0.032 0.047 0.032
Little Man Bonferroni 0.016 0.030 0.045 0.030
Little Man OLS -0.005 0.006 0.018 0.006
Little Man Random Forest -0.001 0.006 0.013 0.006
Little Man Xgboost -0.001 0.006 0.013 0.006
Little Man Linear SVM -0.007 0.000 0.009 0.001
Little Man Polynomial SVM -0.006 0.002 0.011 0.002
Little Man RBF SVM -0.009 -0.002 0.005 -0.002
Card Sort FDR 0.005 0.017 0.028 0.017
Card Sort Bonferroni 0.005 0.016 0.027 0.016
Card Sort OLS 0.005 0.018 0.032 0.019
Card Sort Random Forest -0.008 -0.001 0.007 -0.001
Card Sort Xgboost -0.005 0.002 0.008 0.002
Card Sort Linear SVM -0.002 0.005 0.013 0.005
Card Sort Polynomial SVM -0.005 -0.001 0.003 -0.001
Card Sort RBF SVM -0.006 -0.001 0.005 -0.001
Seq Memory FDR 0.005 0.016 0.027 0.016
Seq Memory Bonferroni 0.004 0.014 0.026 0.014
Seq Memory OLS 0.002 0.016 0.029 0.016
Seq Memory Random Forest -0.005 0.001 0.006 0.001
Seq Memory Xgboost -0.006 -0.001 0.004 -0.001
Seq Memory Linear SVM -0.004 0.004 0.013 0.004
Seq Memory Polynomial SVM -0.004 0.000 0.005 0.000
Seq Memory RBF SVM -0.005 0.000 0.005 0.000
Flanker FDR 0.008 0.020 0.032 0.020
Flanker Bonferroni 0.006 0.018 0.030 0.018
Flanker OLS 0.013 0.025 0.037 0.025
Flanker Random Forest -0.002 0.005 0.012 0.005
Flanker Xgboost -0.001 0.005 0.011 0.005
Flanker Linear SVM -0.009 -0.001 0.007 -0.001
Flanker Polynomial SVM -0.010 -0.002 0.005 -0.002
Flanker RBF SVM -0.007 0.002 0.011 0.002
Audi Verbal FDR -0.006 0.006 0.017 0.006
Audi Verbal Bonferroni -0.007 0.005 0.016 0.005
Audi Verbal OLS 0.010 0.021 0.033 0.021
Audi Verbal Random Forest -0.011 -0.003 0.004 -0.003
Audi Verbal Xgboost -0.009 0.001 0.010 0.001
Audi Verbal Linear SVM -0.005 0.001 0.008 0.001
Audi Verbal Polynomial SVM -0.009 -0.003 0.002 -0.003
Audi Verbal RBF SVM -0.008 -0.002 0.003 -0.002
Pattern Speed FDR 0.002 0.009 0.015 0.009
Pattern Speed Bonferroni 0.002 0.008 0.015 0.008
Pattern Speed OLS 0.004 0.017 0.030 0.017
Pattern Speed Random Forest -0.006 -0.001 0.005 -0.001
Pattern Speed Xgboost -0.006 -0.002 0.003 -0.002
Pattern Speed Linear SVM -0.006 0.002 0.010 0.002
Pattern Speed Polynomial SVM -0.004 -0.001 0.002 -0.001
Pattern Speed RBF SVM -0.004 -0.001 0.002 -0.001
  kable_boot_metric_vars_diff_enet[[4]] %>% 
    kableExtra::kbl(caption = paste0(metric_names[4])) %>%
    kableExtra::kable_classic(full_width = F, 
                             html_font = "Cambria")
RMSE
response algorithm 0.025 0.5 0.975 mean
gfactor FDR 0.064 0.093 0.119 0.092
gfactor Bonferroni 0.062 0.091 0.117 0.090
gfactor OLS 0.004 0.015 0.026 0.015
gfactor Random Forest -0.002 0.009 0.020 0.009
gfactor Xgboost 0.000 0.012 0.024 0.012
gfactor Linear SVM -0.003 0.003 0.008 0.003
gfactor Polynomial SVM -0.003 0.002 0.008 0.002
gfactor RBF SVM -0.007 0.000 0.006 0.000
2-back Work Mem FDR 0.105 0.134 0.160 0.134
2-back Work Mem Bonferroni 0.103 0.132 0.159 0.132
2-back Work Mem OLS 0.003 0.012 0.021 0.012
2-back Work Mem Random Forest 0.012 0.025 0.039 0.025
2-back Work Mem Xgboost 0.018 0.030 0.042 0.030
2-back Work Mem Linear SVM 0.002 0.008 0.014 0.008
2-back Work Mem Polynomial SVM 0.001 0.008 0.014 0.008
2-back Work Mem RBF SVM -0.006 0.002 0.009 0.002
Pic Vocab FDR 0.040 0.060 0.080 0.060
Pic Vocab Bonferroni 0.039 0.059 0.079 0.059
Pic Vocab OLS 0.010 0.020 0.032 0.020
Pic Vocab Random Forest -0.001 0.010 0.021 0.010
Pic Vocab Xgboost -0.003 0.007 0.016 0.007
Pic Vocab Linear SVM 0.003 0.012 0.021 0.012
Pic Vocab Polynomial SVM 0.000 0.009 0.018 0.009
Pic Vocab RBF SVM 0.001 0.010 0.018 0.010
Reading Recog FDR 0.029 0.047 0.065 0.047
Reading Recog Bonferroni 0.027 0.045 0.062 0.045
Reading Recog OLS 0.010 0.022 0.035 0.022
Reading Recog Random Forest -0.002 0.006 0.014 0.006
Reading Recog Xgboost 0.001 0.008 0.016 0.008
Reading Recog Linear SVM 0.003 0.009 0.015 0.009
Reading Recog Polynomial SVM 0.001 0.007 0.012 0.007
Reading Recog RBF SVM 0.000 0.006 0.011 0.006
Matrix Reason FDR 0.019 0.038 0.056 0.038
Matrix Reason Bonferroni 0.018 0.036 0.055 0.036
Matrix Reason OLS -0.003 0.009 0.022 0.009
Matrix Reason Random Forest -0.003 0.005 0.013 0.005
Matrix Reason Xgboost 0.001 0.009 0.017 0.009
Matrix Reason Linear SVM -0.006 0.000 0.006 0.000
Matrix Reason Polynomial SVM -0.006 0.004 0.014 0.004
Matrix Reason RBF SVM -0.005 0.000 0.006 0.000
List Work Mem FDR 0.024 0.040 0.056 0.040
List Work Mem Bonferroni 0.023 0.039 0.055 0.039
List Work Mem OLS 0.006 0.019 0.031 0.019
List Work Mem Random Forest 0.002 0.009 0.016 0.009
List Work Mem Xgboost 0.002 0.009 0.017 0.009
List Work Mem Linear SVM 0.001 0.007 0.013 0.007
List Work Mem Polynomial SVM 0.001 0.006 0.011 0.006
List Work Mem RBF SVM 0.001 0.006 0.010 0.006
Little Man FDR 0.018 0.033 0.047 0.033
Little Man Bonferroni 0.017 0.032 0.046 0.032
Little Man OLS 0.002 0.014 0.025 0.014
Little Man Random Forest -0.001 0.007 0.014 0.007
Little Man Xgboost -0.003 0.004 0.012 0.004
Little Man Linear SVM -0.003 0.005 0.013 0.005
Little Man Polynomial SVM -0.001 0.008 0.017 0.008
Little Man RBF SVM -0.006 0.001 0.008 0.001
Card Sort FDR 0.011 0.023 0.035 0.023
Card Sort Bonferroni 0.010 0.022 0.034 0.022
Card Sort OLS 0.002 0.016 0.030 0.016
Card Sort Random Forest -0.007 0.002 0.010 0.002
Card Sort Xgboost -0.006 0.002 0.011 0.002
Card Sort Linear SVM 0.000 0.007 0.014 0.007
Card Sort Polynomial SVM -0.004 0.000 0.005 0.000
Card Sort RBF SVM -0.008 -0.001 0.006 -0.001
Seq Memory FDR 0.000 0.011 0.023 0.012
Seq Memory Bonferroni -0.001 0.010 0.021 0.010
Seq Memory OLS 0.014 0.028 0.043 0.028
Seq Memory Random Forest -0.005 0.001 0.007 0.001
Seq Memory Xgboost -0.007 -0.002 0.003 -0.002
Seq Memory Linear SVM -0.001 0.008 0.016 0.008
Seq Memory Polynomial SVM -0.004 0.001 0.005 0.001
Seq Memory RBF SVM -0.004 0.001 0.006 0.001
Flanker FDR 0.006 0.019 0.033 0.019
Flanker Bonferroni 0.004 0.017 0.030 0.017
Flanker OLS 0.016 0.030 0.045 0.030
Flanker Random Forest -0.004 0.004 0.012 0.004
Flanker Xgboost -0.001 0.006 0.013 0.006
Flanker Linear SVM 0.002 0.010 0.017 0.010
Flanker Polynomial SVM 0.001 0.008 0.016 0.008
Flanker RBF SVM 0.006 0.015 0.025 0.015
Audi Verbal FDR -0.006 0.006 0.019 0.006
Audi Verbal Bonferroni -0.007 0.005 0.018 0.005
Audi Verbal OLS 0.021 0.034 0.046 0.034
Audi Verbal Random Forest -0.012 -0.004 0.004 -0.004
Audi Verbal Xgboost -0.009 0.000 0.010 0.000
Audi Verbal Linear SVM 0.001 0.007 0.014 0.007
Audi Verbal Polynomial SVM -0.007 -0.002 0.004 -0.002
Audi Verbal RBF SVM -0.007 -0.001 0.005 -0.001
Pattern Speed FDR 0.002 0.009 0.017 0.009
Pattern Speed Bonferroni 0.001 0.008 0.016 0.008
Pattern Speed OLS 0.002 0.016 0.031 0.016
Pattern Speed Random Forest -0.008 -0.003 0.002 -0.003
Pattern Speed Xgboost -0.006 -0.002 0.003 -0.002
Pattern Speed Linear SVM -0.006 0.002 0.011 0.002
Pattern Speed Polynomial SVM -0.004 -0.001 0.003 0.000
Pattern Speed RBF SVM -0.004 -0.001 0.003 -0.001

6 Shapley values

shapley computing function

library("fastshap")

##need to test this works for all the models
model_pred_fun <- function(object, newdata) {
  pred_results <- predict(object, new_data = newdata)
return(pred_results$.pred)
  }  


model_shapley <- function(recipe_input, wf_input,resp_input,formula_input ,split_input= data_split){
  train_input <- recipe_input %>% bake(new_data=NULL)

 model_final_fit <- 
    wf_input%>%
    parsnip::extract_spec_parsnip()%>%
    parsnip::fit(data = train_input, formula= formula_input)
   
library(doFuture)
registerDoFuture()
plan(multisession(workers = 25))
## because of the false alarm in plyr running this chunk would give
doRNG::registerDoRNG()  

 model_shap <- model_final_fit %>% 
   fastshap::explain(X = train_input%>%
                       select(-resp_input)
                     %>% as.data.frame(),nsim= 1000, pred_wrapper =model_pred_fun
                     ,.parallel=TRUE
                     )
  
return(model_shap)
}

6.1 Compute Shapley values for linear SVM, RBF SVM, polynomial SVM and random forest

Do not use future map for parallel, it would use 100% of the cpu

svm_linear_shap <- pmap(list(svm_linear_wfl_final_list,
                             recipe_list,
                             resp_names, 
                             formula_list),
                        ~model_shapley(recipe_input = ..2,
                                       wf_input = ..1,
                                       resp_input = ..3, 
                                       formula_input = ..4))
                          

saveRDS(svm_linear_shap, paste0(anotherFold,'working_memory_tasks/svm_linear_shap', '.RData'))


svm_rbf_shap <- pmap(list(SVM_RBF_wfl_final_list,
                          recipe_list,
                          resp_names, 
                          formula_list),
                     ~model_shapley(recipe_input = ..2,
                                    wf_input = ..1,
                                    resp_input = ..3, 
                                    formula_input = ..4))
                            

saveRDS(svm_rbf_shap, paste0(anotherFold,'working_memory_tasks/svm_rbf_shap', '.RData'))


svm_poly_shap <- pmap(list(svm_poly_wfl_final_list,
                           recipe_list,
                           resp_names,
                           formula_list),
                      ~model_shapley(recipe_input = ..2,
                                     wf_input = ..1,
                                     resp_input = ..3, 
                                     formula_input = ..4))
                            

saveRDS(svm_poly_shap, paste0(anotherFold,'working_memory_tasks/svm_poly_shap', '.RData'))


random_forest_shap <- pmap(list(random_forest_wfl_final_list,
                                recipe_list,
                                resp_names, 
                                formula_list),
                           ~model_shapley(recipe_input = ..2
                                          ,wf_input = ..1,
                                          resp_input = ..3, 
                                          formula_input = ..4))

saveRDS(random_forest_shap, paste0(anotherFold,'working_memory_tasks/random_forest_shap', '.RData'))

Compute the Shapley value for Nback only

recipe_Nback= recipe_list[[resp_names[1]]]
formula_Nback = formula_list[[resp_names[1]]]



 svm_linear_shap_Nback <-  model_shapley(recipe_input = recipe_Nback,
                                       wf_input = svm_linear_wfl_final_list[[resp_names[1]]],
                                       resp_input = resp_names[1], 
                                       formula_input = formula_Nback)
  
 saveRDS(svm_linear_shap_Nback, paste0(anotherFold,'working_memory_tasks/svm_linear_shap_nback_Dec_29_2021', '.RData'))
 

 
 
 svm_rbf_shap_Nback <-  model_shapley(recipe_input = recipe_Nback,
                                       wf_input = SVM_RBF_wfl_final_list[[resp_names[1]]],
                                       resp_input = resp_names[1], 
                                       formula_input = formula_Nback)
  
saveRDS(svm_rbf_shap_Nback, paste0(anotherFold,'working_memory_tasks/svm_rbf_shap_nback_Mar_22_2022', '.RData'))
 
 
 
 svm_poly_shap_Nback <-  model_shapley(recipe_input = recipe_Nback,
                                       wf_input = svm_poly_wfl_final_list[[resp_names[1]]],
                                       resp_input = resp_names[1], 
                                       formula_input = formula_Nback)
  
 saveRDS(svm_poly_shap_Nback, paste0(anotherFold,'working_memory_tasks/svm_poly_shap_Nback_Dec_29_2021', '.RData'))
 
 
 
 
 random_forest_shap_Nback <-  model_shapley(recipe_input = recipe_Nback,
                                       wf_input = random_forest_wfl_final_list[[resp_names[1]]],
                                       resp_input = resp_names[1], 
                                       formula_input = formula_Nback)
  
 saveRDS(random_forest_shap_Nback, paste0(anotherFold,'working_memory_tasks/random_forest_shap_Nback_Dec_29_2021', '.RData'))

The result list of Shapley values should have one entry:Nback.

7 Correlation plots among feature importance

7.1 processing the shap outputs as well as the parameter value for elastic net, OLS and univariate simple regressions and plot the correlation plots

These correlation plots used the following feature importance: univariate,OLS,Elastic Net = |coeff| RF, XG, Linear SVM, RBF SVM, Polynomial SVM = |SHAP|

We focus on plotting Nback and gfactor.

resp_names_nback <- resp_names[1]

##processing the shapley values
shap_value_precessing <- function(data_input, model_name){
  data_output <- data_input%>%tibble::as_tibble()%>% 
                      select(starts_with("roi_"))%>% 
                      abs()%>% colSums() 
  names_output <- names(data_output)
  out_tibble <- tibble(estimate= data_output%>% as.vector(), rois = names_output)
  names(out_tibble) <- c(model_name,"rois")
  return(out_tibble)
}

random_forest_shap_colsum <- random_forest_shap %>% 
  map(.,~shap_value_precessing(data_input = ., 
                               model_name = "random_forest"))

svm_poly_shap_abs <- svm_poly_shap %>% 
  map(.,~shap_value_precessing(
    data_input = .,
    model_name = "svm_poly"))

svm_rbf_shap_abs <- svm_rbf_shap %>% 
  map(.,~shap_value_precessing(
    data_input = .,
    model_name = "svm_rbf"))

svm_linear_shap_abs <- svm_linear_shap %>% 
  map(.,~shap_value_precessing(data_input = ., 
                               model_name = "svm_linear") )
xgboost_shap_abs <- xgboost_shap %>% 
  map(.,~shap_value_precessing(data_input = .,
                               model_name = "xgboost"))

### get the parameter estimations from OLS and enet
OLS_coefs <- OLS_fit%>% map(.,~broom::tidy(.)%>%
                   filter(term != "(Intercept)")%>%
                   rename(OLS_estimate = estimate, rois= term)%>%
                   select("rois","OLS_estimate"))

OLS_abs <- OLS_coefs %>% map(.,~select(.,"OLS_estimate")%>%
                               mutate(OLS = abs(OLS_estimate)))

OLS_all <- map2(.x=OLS_coefs,
                .y=OLS_abs,
                ~left_join(.x,.y, by = "OLS_estimate"))

OLS_all <- map(OLS_all,
               ~select(.,-"OLS_estimate"))

enet_coefs <- enet_final_fit_list%>% map(.,~broom::tidy(.)%>%
                  filter(term != "(Intercept)")%>%
                  rename(enet_estimate = estimate, 
                         rois= term)%>%
                    select("rois","enet_estimate"))
enet_abs <- enet_coefs %>% map(.,function(data_input = .){
  abs_val = abs(data_input[["enet_estimate"]])
  return(tibble(rois= data_input[["rois"]], enet = abs_val))
})

## extract the parameter estimation in simple linear regression
univar_fit <- map(simple_all_IQR,"model_broom")
univar_estimate <- univar_fit %>% map(.,function(data_input=.){
  abs_val = data_input[["estimate"]] %>% abs() 
  return(tibble(rois= data_input[["roi"]], univariate = abs_val))
})

                               
vi_all <- resp_names_nback %>% map(.,function(resp_input=.){
  out_data <- plyr::join_all(list(univar_estimate[[resp_input]],
                                  OLS_all[[resp_input]],
                                  enet_abs[[resp_input]],
                                  svm_linear_shap_abs[[resp_input]], 
                                  svm_poly_shap_abs[[resp_input]],
                                  svm_rbf_shap_abs[[resp_input]],
                                  random_forest_shap_colsum[[resp_input]],
                                  xgboost_shap_abs[[resp_input]]
                              ), by="rois", type="full")
return(out_data)
  })


vi_all_rename <- vi_all %>% map(.,function(data_input=.){
  names(data_input) = c("rois","Univariate","OLS","Elastic\nnet", "SVM\nlinear","SVM\nPloynomial", "SVM\nRBF", "Random\nForest","Xgboost")
  return(data_input)
})

7.1.1 Pearson correlation among feature importance across algorithms and response variables

vi_all_rename %>% purrr::map(.,~select(.,-"rois")%>%
                 cor(method = "pearson")%>%
                 ggcorrplot::ggcorrplot())
## $TFMRI_NB_ALL_BEH_C2B_RATE

7.1.2 Spearman correlation among feature importance across algorithms and response variables

vi_all_rename %>% map(.,~select(.,-"rois")%>%
                 cor(method = "spearman")%>%
                 ggcorrplot::ggcorrplot())
## $TFMRI_NB_ALL_BEH_C2B_RATE

7.1.3 Pearson correlation for feature importance predicting the n-back performance

library(GGally)
ggpairs(vi_all_rename[[1]]%>% select(-"rois"),
        upper = list(combo = "facetdensity"))+
  theme(axis.text.x=NULL,
        axis.text.y=NULL,
        axis.title.y=NULL,
        axis.title.x=NULL,
        plot.title=NULL)

7.1.4 Spearman correlation for feature importance predicting the n-back performance

### change the font size
ggpairs(vi_all_rename[[1]]%>% select(-"rois"),
        upper = list(continuous = GGally::wrap("cor",
                                               method="spearman", 
                                               size = 4)),
        lower = list(combo = "facetdensity"))+
  theme(axis.text.x=element_text(angle = 90))

7.2 Correlation Plots for rois that passed eNetXplorer in predicting the n-back performance

Plot the selected areas that is significant by the elastic net p-values

coefs_enet_nback <- list(TFMRI_NB_ALL_BEH_C2B_RATE=coefs_enet_all[[resp_names[1]]])
vi_all_select_rename <- map2(.x = vi_all_rename, .y = coefs_enet_nback, 
                             ~filter(.x, .x[["rois"]] %in% .y[["variable"]]))

7.2.1 Pearson correlation for feature importance predicting the n-back performance for rois that passed eNetXplorer

vi_all_select_rename %>% purrr::map(.,~select(.,-"rois")%>%
                 cor(method = "pearson")%>%
                 ggcorrplot::ggcorrplot()+
                ggtitle("Pearson correlation plot"))
## $TFMRI_NB_ALL_BEH_C2B_RATE

7.2.2 Spearman correlation for feature importance predicting the n-back performance for rois that passed eNetXplorer

### plotting for the selected rois
vi_all_select_rename %>% map(.,~select(.,-"rois")%>%
                 cor(method = "spearman")%>%
                 ggcorrplot::ggcorrplot()+
                ggtitle("Spearman's rank correlation coefficien plot"))
## $TFMRI_NB_ALL_BEH_C2B_RATE

7.2.3 Pearson correlation for feature importance predicting the n-back performance for rois that passed eNetXplorer

ggpairs(vi_all_select_rename[[1]]%>% select(-"rois"),
        upper = list(combo = "facetdensity"))+
  theme(axis.text.x=NULL,
        axis.text.y=NULL,
        axis.title.y=NULL,
        axis.title.x=NULL)+
      ggtitle("Pearson correlation of the regions that predicted N-Back behavior\nwith eNetXplorer p<.05")

7.2.4 Spearman correlation for feature importance predicting the n-back performance for rois that passed eNetXplorer

### change the font size
ggpairs(vi_all_select_rename[[1]]%>% select(-"rois"),
        upper = list(continuous = GGally::wrap("cor",method="spearman", size = 4)),
        lower = list(combo = "facetdensity"))+
  theme(axis.text.x=element_text(angle = 90))+
      ggtitle("Spearman's rank correlation of the regions that predicted N-Back behavior\nwith eNetXplorer p<.05")

7.3 Correlations among 30 top rois

create a rank across algorithms (with frankv)

TFMRI_NB_ALL_BEH_C2B_RATE_rank <- vi_all_rename$TFMRI_NB_ALL_BEH_C2B_RATE %>% 
  select(-rois) %>% 
  data.table::frankv(ties.method = "min")

vi_all_rename_TFMRI_NB_ALL_BEH_C2B_RATE_rank <- 
  cbind(vi_all_rename$TFMRI_NB_ALL_BEH_C2B_RATE, TFMRI_NB_ALL_BEH_C2B_RATE_rank)

7.3.1 Pearson correlation in feature importance among top 30 ROIs that predicted n back performance

select the top ranked regions

vi_all_rename_TFMRI_NB_ALL_BEH_C2B_RATE_rank %>% 
  filter(TFMRI_NB_ALL_BEH_C2B_RATE_rank > 137) %>% 
  select(-"rois",-"TFMRI_NB_ALL_BEH_C2B_RATE_rank") %>%
ggpairs(
        upper = list(continuous =  GGally::wrap("cor",method="pearson", size = 4)),
        lower = list(combo = "facetdensity"))+
  theme(axis.text.x=element_text(angle = 90))+
  ggtitle("Pearson correlation of the top 30 regions that predicted N-Back Performance")

7.3.2 Spearman correlation in feature importance among top 30 ROIs that predicted n back performance

vi_all_rename_TFMRI_NB_ALL_BEH_C2B_RATE_rank %>% 
  filter(TFMRI_NB_ALL_BEH_C2B_RATE_rank > 137) %>% 
  select(-"rois",-"TFMRI_NB_ALL_BEH_C2B_RATE_rank") %>%
ggpairs(
        upper = list(continuous =  GGally::wrap("cor",method="spearman", size = 4)),
        lower = list(combo = "facetdensity"))+
  theme(axis.text.x=element_text(angle = 90))+
      ggtitle("Spearman's rank correlation of the top 30 regions that predicted N-Back Performance")

7.4 Shapley values for gfactor

svm_rbf_shap_gfactor <- pmap(list(SVM_RBF_wfl_final_list_gfactor,
                                  recipe_gfactor,
                                  cfa_resp_names,
                                  formula_gfactor),
                     ~model_shapley(wf_input = ..1,
                                    recipe_input = ..2,
                                    resp_input = ..3,
                                    formula_input = ..4))
saveRDS(svm_rbf_shap_gfactor, paste0(anotherFold,'working_memory_tasks/windows/svm_rbf_shap_gfactor_Mar_21_2022', '.RData'))


svm_linear_shap_gfactor <- pmap(list(svm_linear_wfl_final_list_gfactor,
                                     recipe_gfactor,
                                     cfa_resp_names, 
                                     formula_gfactor),
                         ~model_shapley(wf_input = ..1,
                                        recipe_input = ..2,
                                        resp_input = ..3,
                                        formula_input = ..4))

saveRDS(svm_linear_shap_gfactor, paste0(anotherFold,'working_memory_tasks/windows/svm_linear_shap_gfactor_Nov_08_2021', '.RData'))


random_forest_shap_gfactor <- pmap(list(random_forest_wfl_final_list_gfactor,
                                        recipe_gfactor,
                                        cfa_resp_names,
                                        formula_gfactor),
                           ~model_shapley(wf_input = ..1,
                                          recipe_input = ..2,
                                          resp_input = ..3, 
                                          formula_input = ..4))
                            

saveRDS(random_forest_shap_gfactor, paste0(anotherFold,'working_memory_tasks/windows/random_forest_shap_gfactor_Nov_08_2021', '.RData'))



svm_poly_shap_gfactor <- pmap(list(svm_poly_wfl_final_list_gfactor,
                                   recipe_gfactor,
                                   cfa_resp_names, 
                                   formula_gfactor),
                              ~model_shapley(wf_input = ..1,
                                             recipe_input = ..2,
                                             resp_input = ..3,
                                             formula_input = ..4))
                            
saveRDS(svm_poly_shap_gfactor, paste0(anotherFold,'working_memory_tasks/windows/svm_poly_shap_gfactor_Nov_04_2021', '.RData'))

7.4.1 Correlation plots among feature importance for gfactor

random_forest_shap_colsum_gfactor <- random_forest_shap_gfactor %>% 
  map(.,~shap_value_precessing(data_input = ., 
                               model_name = "random_forest"))

xgboost_shap_abs_gfactor <- xgboost_shap_gfactor %>% 
  map(.,~shap_value_precessing(data_input = ., 
                               model_name = "xgboost") )

svm_linear_shap_abs_gfactor <- svm_linear_shap_gfactor %>% 
  map(.,~shap_value_precessing(data_input = ., 
                               model_name = "svm_linear") )

svm_rbf_shap_abs_gfactor <- svm_rbf_shap_gfactor %>% 
  map(.,~shap_value_precessing(data_input = ., 
                               model_name = "svm_rbf"))

svm_poly_shap_abs_gfactor <- svm_poly_shap_gfactor %>% 
  map(.,~shap_value_precessing(data_input = ., 
                               model_name = "svm_poly") )



### get the parameter estimations from OLS and enet
OLS_coefs_gfactor <- OLS_fit_gfactor%>% map(.,~broom::tidy(.)%>%
                   filter(term != "(Intercept)")%>%
                    rename(OLS_estimate = estimate, rois= term)%>%
                     select("rois","OLS_estimate"))

OLS_abs_gfactor <- OLS_coefs_gfactor %>% map(.,~select(.,"OLS_estimate")%>%
                               mutate(OLS = abs(OLS_estimate)))

OLS_all_gfactor <- map2(.x=OLS_coefs_gfactor,.y=OLS_abs_gfactor,~left_join(.x,.y, by = "OLS_estimate"))

OLS_all_gfactor <- map(OLS_all_gfactor,~select(.,-"OLS_estimate"))


enet_coefs_gfactor <- enet_final_fit_list_gfactor%>% map(.,~broom::tidy(.)%>%
                                                           filter(term != "(Intercept)")%>%
                                                           rename(enet_estimate = estimate, 
                                                                  rois= term)%>%
                                                           select("rois","enet_estimate"))

enet_abs_gfactor <- enet_coefs_gfactor %>% 
  map(.,function(data_input = .){
  abs_val = abs(data_input[["enet_estimate"]])
  return(tibble(rois= data_input[["rois"]], 
                enet = abs_val))
})

## extract the parameter estimation in simple linear regression
univar_fit_gfactor <- map(simple_all_IQR_gfactor,
                          "model_broom")
univar_estimate_gfactor <- univar_fit_gfactor %>% 
  map(.,function(data_input=.){
    abs_val = data_input[["estimate"]] %>% 
      abs() 
  return(tibble(rois= data_input[["roi"]], 
                univariate = abs_val))
})

                               
vi_all_gfactor <- cfa_resp_names %>% map(.,function(resp_input=.){
  out_data <- plyr::join_all(list(univar_estimate_gfactor[[resp_input]],
                                  OLS_all_gfactor[[resp_input]],
                                  enet_abs_gfactor[[resp_input]],
                                  svm_linear_shap_abs_gfactor[[resp_input]], 
                                  svm_poly_shap_abs_gfactor[[resp_input]],
                                  svm_rbf_shap_abs_gfactor[[resp_input]],
                                  random_forest_shap_colsum_gfactor[[resp_input]],
                                  xgboost_shap_abs_gfactor[[resp_input]]
                              ), by="rois", type="full")
return(out_data)
  })


vi_all_rename_gfactor <- vi_all_gfactor %>% 
  map(.,function(data_input=.){
  names(data_input) = c("rois",
                        "Univariate",
                        "OLS",
                        "Elastic\nnet", 
                        "SVM\nlinear",
                        "SVM\nPloynomial", 
                        "SVM\nRBF", 
                        "Random\nForest",
                        "Xgboost")
  return(data_input)
})


vi_all_select_rename_gfactor <- map2(.x = vi_all_rename_gfactor, 
                                     .y = coefs_enet_gfactor, 
                             ~filter(.x, .x[["rois"]] %in% .y[["variable"]]))

7.4.2 Pearson correlation for feature importance predicting the g-factor

vi_all_rename_gfactor %>% purrr::map(.,~select(.,-"rois")%>%
                 cor(method = "pearson")%>%
                 ggcorrplot::ggcorrplot()+
      ggtitle("Pearson correlation coefficien polt for gfactor"))
## $gfactor

7.4.3 Spearman correlation for feature importance predicting the g-factor

vi_all_rename_gfactor %>% map(.,~select(.,-"rois")%>%
                 cor(method = "spearman")%>%
                 ggcorrplot::ggcorrplot()+
      ggtitle("Spearman's rank correlation coefficien plot for gfactor"))
## $gfactor

7.4.4 Pearson correlation for feature importance predicting the g-factor

ggpairs(vi_all_rename_gfactor[[1]]%>% select(-"rois"),
        upper = list(combo = "facetdensity"))+
  theme(axis.text.x=NULL,
        axis.text.y=NULL,
        axis.title.y=NULL,
        axis.title.x=NULL)+
      ggtitle("Pearson correlation of feature importance for the g-factor")

7.4.5 Spearman correlation for feature importance predicting the g-factor

### change the font size
ggpairs(vi_all_rename_gfactor[[1]]%>% select(-"rois"),
        upper = list(continuous = GGally::wrap("cor",method="spearman", size = 4)),
        lower = list(combo = "facetdensity"))+
  theme(axis.text.x=element_text(angle = 90))+
      ggtitle("Spearman's rank correlation of feature importance for the g-factor")

7.5 Correlation Plots for rois that passed eNetXplorer in predicting the g-factor performance

Plot the selected areas that is significant by the elastic net p-values

vi_all_select_rename_gfactor <- map2(.x = vi_all_rename_gfactor, .y = coefs_enet_gfactor, 
                             ~filter(.x, .x[["rois"]] %in% .y[["variable"]]))

7.5.1 Pearson correlation for feature importance predicting the g-factor for rois that passed eNetXplorer

vi_all_select_rename_gfactor %>% purrr::map(.,~select(.,-"rois")%>%
                 cor(method = "pearson")%>%
                 ggcorrplot::ggcorrplot()+
                ggtitle("Pearson correlation plot"))
## $gfactor

7.5.2 Spearman correlation for feature importance predicting the g-factor for rois that passed eNetXplorer

### plotting for the selected rois
vi_all_select_rename_gfactor %>% map(.,~select(.,-"rois")%>%
                 cor(method = "spearman")%>%
                 ggcorrplot::ggcorrplot()+
                ggtitle("Spearman's rank correlation coefficien plot"))
## $gfactor

7.5.3 Pearson correlation for feature importance predicting the g-factor for rois that passed eNetXplorer

ggpairs(vi_all_select_rename_gfactor[[1]]%>% select(-"rois"),
        upper = list(combo = "facetdensity"))+
  theme(axis.text.x=NULL,
        axis.text.y=NULL,
        axis.title.y=NULL,
        axis.title.x=NULL)+
      ggtitle("Pearson correlation of the regions that predicted the g-factor\nwith eNetXplorer p<.05")

7.5.4 Spearman correlation for feature importance predicting the g-factor for rois that passed eNetXplorer

### change the font size
ggpairs(vi_all_select_rename_gfactor[[1]]%>% select(-"rois"),
        upper = list(continuous = GGally::wrap("cor",method="spearman", size = 4)),
        lower = list(combo = "facetdensity"))+
  theme(axis.text.x=element_text(angle = 90))+
      ggtitle("Spearman's rank correlation of the regions that predicted g-factor\nwith eNetXplorer p<.05")

7.6 Correlations among 30 top rois

create a rank across algorithms (with frankv)

gfactor_rank <- vi_all_rename_gfactor[[1]] %>% 
  select(-rois) %>% 
  data.table::frankv(ties.method = "min")

vi_all_rename_gfactor_rank <- 
  cbind(vi_all_rename_gfactor[[1]], gfactor_rank)

read and process the output from Sripada 2019 paper

roi_table<- read.table(paste0(anotherFold,"working_memory_tasks/Destr_FS_cifti_index.txt"),header = FALSE)
roi_number <- read.table(paste0(anotherFold,"working_memory_tasks/WM11_2_0bk_destr.txt"),header = FALSE)
sub_roi_table <- read.csv(paste0(anotherFold,"working_memory_tasks/WM11_subcortex_FS.csv"),header = FALSE)%>%
                 rename( roi_names_other=V1, other_paper=V2)

roi_tibble <- tibble(roi_names_other = roi_table$V1, other_paper = roi_number$V1)%>%
              mutate(rois = roi_names_other)

roi_tibble_test <- roi_tibble%>%
                   mutate(rois = str_replace_all(rois,"L_","lh_"))%>%
                   mutate(rois = str_replace_all(rois,"R_","rh_"))%>%
                   mutate(rois = str_replace_all(rois,"-","."))%>%
                   mutate(rois = paste0("roi_", rois))

vi_gfactor_joined <- left_join(roi_tibble_test,vi_all_gfactor$gfactor, by = "rois")


vi_gfactor_abs <- vi_gfactor_joined%>%
                      mutate(other_paper = abs(other_paper))


sub_roi_tibble <- sub_roi_table %>% tibble::as.tibble()%>%
                     rename(rois = V3)%>%
                     mutate(rois = paste0("roi_", rois))

sub_roi_tibble_joined <- left_join(sub_roi_tibble, vi_all_gfactor$gfactor, by = "rois")


sub_roi_tibble_abs <- sub_roi_tibble_joined%>%
                      mutate(other_paper = abs(other_paper))

other_paper_vi_all <- bind_rows(sub_roi_tibble_joined,vi_gfactor_joined)%>%
                      mutate(other_paper = abs(other_paper))


other_paper_vi_all_raw <- bind_rows(sub_roi_tibble_joined,vi_gfactor_joined)

gfactor_rank_other_paper <- other_paper_vi_all %>% 
  select(-rois) %>%
  select(-roi_names_other)%>%
  data.table::frankv(ties.method = "min")

vi_all_other_paper_gfactor_rank <- 
  cbind(other_paper_vi_all, gfactor_rank_other_paper)

vi_all_other_paper_gfactor_rank_name <- vi_all_other_paper_gfactor_rank


colnames(vi_all_other_paper_gfactor_rank) <- c("roi_names_other","Sripada\n2019","rois","Univariate","OLS","Elastic\nnet","SVM\nlinear","SVM\nPloynomial","SVM\nRBF","Random\nForest","Xgboost" ,"gfactor_rank_other_paper")



colnames(sub_roi_tibble_abs) <- c("roi_names_other","Sripada\n2019","rois","Univariate","OLS","Elastic\nnet","SVM\nlinear","SVM\nPloynomial","SVM\nRBF","Random\nForest","Xgboost" ,"gfactor_rank_other_paper")



colnames(vi_gfactor_abs) <- c("roi_names_other","Sripada\n2019","rois","Univariate","OLS","Elastic\nnet","SVM\nlinear","SVM\nPloynomial","SVM\nRBF","Random\nForest","Xgboost" ,"gfactor_rank_other_paper")

7.6.1 Pearson correlation in feature importance among top 30 ROIs that predicted g factor

select the top ranked regions

vi_all_rename_gfactor_rank %>% 
  filter(gfactor_rank > 137) %>% 
  select(-"rois",-"gfactor_rank") %>%
ggpairs(
        upper = list(continuous =  GGally::wrap("cor",method="pearson", size = 4)),
        lower = list(combo = "facetdensity"))+
  theme(axis.text.x=element_text(angle = 90))+
      ggtitle("Pearson correlation of the top 30 regions that predicted the G-Factor")

vi_all_rename_gfactor_rank %>% 
  filter(gfactor_rank > 137) %>% 
  select(-"rois",-"gfactor_rank") %>%
ggpairs(
        upper = list(continuous =  GGally::wrap("cor",method="spearman", size = 4)),
        lower = list(combo = "facetdensity"))+
  theme(axis.text.x=element_text(angle = 90))+
      ggtitle("Spearman's rank correlation of  the top 30 regions that predicted the G-Factor")

7.6.2 Spearman correlation in variable importance among all the rois that predicted n back performance along with sripada_2019 paper

vi_all_other_paper_gfactor_rank %>% 
 # filter(gfactor_rank_other_paper > 137) %>% 
  select(-"rois",-"gfactor_rank_other_paper",-"roi_names_other") %>%
ggpairs(
        upper = list(continuous =  GGally::wrap("cor",method="spearman", size = 4)),
        lower = list(combo = "facetdensity"))+
  theme(axis.text.x=element_text(angle = 90))+
      ggtitle("Spearman's rho of variable importance for the G-Factor: All regions")

vi_all_other_paper_gfactor_rank %>% 
 # filter(gfactor_rank_other_paper > 137) %>% 
  select(-"rois",-"gfactor_rank_other_paper",-"roi_names_other") %>%
ggpairs(
        upper = list(continuous =  GGally::wrap("cor",method="pearson", size = 4)),
        lower = list(combo = "facetdensity"))+
  theme(axis.text.x=element_text(angle = 90))+
      ggtitle("Pearson's r of variable importance for the G-Factor: All regions")

correlation plot for subcortex only

sub_roi_tibble_abs %>% 
 # filter(gfactor_rank_other_paper > 137) %>% 
  select(-"rois",-"roi_names_other") %>%
ggpairs(
        upper = list(continuous =  GGally::wrap("cor",method="spearman", size = 4)),
        lower = list(combo = "facetdensity"))+
  theme(axis.text.x=element_text(angle = 90))+
      ggtitle("Spearman's rho of variable importance for the G-Factor: Subcortical regions")

sub_roi_tibble_abs %>% 
 # filter(gfactor_rank_other_paper > 137) %>% 
  select(-"rois",-"roi_names_other") %>%
ggpairs(
        upper = list(continuous =  GGally::wrap("cor",method="pearson", size = 4)),
        lower = list(combo = "facetdensity"))+
  theme(axis.text.x=element_text(angle = 90))+
      ggtitle("Pearson's r of variable importance for the G-Factor: Subcortical regions")

correlation plot for cortex only

vi_gfactor_abs %>% 
 # filter(gfactor_rank_other_paper > 137) %>% 
  select(-"rois",-"roi_names_other") %>%
ggpairs(
        upper = list(continuous =  GGally::wrap("cor",method="spearman", size = 4)),
        lower = list(combo = "facetdensity"))+
  theme(axis.text.x=element_text(angle = 90))+
      ggtitle("Spearman's rho of variable importance for the G-Factor: Cortical regions")

vi_gfactor_abs %>% 
 # filter(gfactor_rank_other_paper > 137) %>% 
  select(-"rois",-"roi_names_other") %>%
ggpairs(
        upper = list(continuous =  GGally::wrap("cor",method="pearson", size = 4)),
        lower = list(combo = "facetdensity"))+
  theme(axis.text.x=element_text(angle = 90))+
      ggtitle("Pearson's r of variable importance for the G-Factor: Cortical regions")

8 feature importance, plotted on the brain

parameter estimate to the variable importance frame and plot all rois on the brain. Note for RBF SVM, polynomial SVM, linear SVM, random forest and xgboost the sum of absolute Shapley is used, while for mass univariate, OLS, enet and enetxplorer, the coeff is used. Coeff provides directionality

library(ggseg)
library(ggsegExtra)
library(ggsegDesterieux)

vi_all_select <- map2(.x = vi_all,
                      .y =coefs_enet_all ,
                      ~filter(.x, 
                              .x [["rois"]] %in% .y[["variable"]]))

vi_all_select_gfactor <- map2(.x = vi_all_gfactor,
                              .y =coefs_enet_gfactor ,
                              ~filter(.x, .x [["rois"]] %in% .y[["variable"]]))

## adding Nback eNetXplorer fitted results to the variable importance frame

OLS_coefs_nback_prepare <- OLS_coefs[[resp_names[1]]]%>% rename(OLS = OLS_estimate)
enet_coefs_nback_prepare <- enet_coefs[[resp_names[1]]]%>% rename(enet = enet_estimate)


univar_coefs_fdr_nback_prepare <- univariate_model_fdr[[resp_names[1]]] %>% 
  filter(FDR < .05) %>%
  select("roi","estimate")%>%
  rename(rois = roi, FDR = estimate)

univar_coefs_bonf_nback_prepare <- univariate_model_fdr[[resp_names[1]]] %>% 
  filter(bonferroni < .05) %>%
  select("roi","estimate")%>%
  rename(rois = roi, bonferroni = estimate)

OLS_coefs_p05 <- OLS_fit%>% map(.,~broom::tidy(.) %>%
                   filter(term != "(Intercept)") %>%
                   filter(p.value < .05) %>%     
                   rename(OLS_estimate = estimate, rois= term)%>%
                   select("rois","OLS_estimate"))

OLS_coefs_p05_nback_prepare <- OLS_coefs_p05[[resp_names[1]]]%>% 
  rename(OLS_p05 = OLS_estimate)

nrow(OLS_coefs_p05_nback_prepare)
## [1] 23
enetxplorer_coefs_nback_prepare <- 
  extract_tibble(fit_explorer_all[[resp_names[1]]], 
                 alpha_index =
                   paste0("a",
                          best_enet_model_list[[resp_names[1]]]$mixture))%>%
  filter(type == "Target")%>%
  filter(pvalue < .05) %>%
  select("variable","wmean")%>%
  rename(rois = variable,eNetXplorer= wmean)
                     
vi_all_nback <-  plyr::join_all(list(univar_coefs_fdr_nback_prepare,
                                     univar_coefs_bonf_nback_prepare,
                                     OLS_coefs_nback_prepare,
                                     OLS_coefs_p05_nback_prepare,
                                     enet_coefs_nback_prepare,
                                     enetxplorer_coefs_nback_prepare,
                                     svm_linear_shap_abs[[resp_names[1]]], 
                                     svm_poly_shap_abs[[resp_names[1]]],
                                     svm_rbf_shap_abs[[resp_names[1]]],
                                     random_forest_shap_colsum[[resp_names[1]]],
                                     xgboost_shap_abs[[resp_names[1]]]
                              ), by="rois", type="full")

###get the right coefficients from mass univariate OLS, enet and enexplorer

OLS_coefs_gfactor_prepare <- OLS_coefs_gfactor[[cfa_resp_names[1]]]%>% rename(OLS = OLS_estimate)
enet_coefs_gfactor_prepare <- enet_coefs_gfactor[[cfa_resp_names[1]]]%>% rename(enet = enet_estimate)


univar_coefs_fdr_gfactor_prepare <- univariate_model_broom_gfactor[[cfa_resp_names[1]]] %>% 
  filter(FDR < .05) %>%
  select("roi","estimate")%>%
  rename(rois = roi, FDR = estimate)

univar_coefs_bonf_gfactor_prepare <- univariate_model_broom_gfactor[[cfa_resp_names[1]]] %>% 
  filter(bonferroni < .05) %>%
  select("roi","estimate")%>%
  rename(rois = roi, bonferroni = estimate)

OLS_coefs_p05_gfactor <- OLS_fit_gfactor%>% map(.,~broom::tidy(.) %>%
                   filter(term != "(Intercept)") %>%
                   filter(p.value < .05) %>%     
                   rename(OLS_estimate = estimate, rois= term)%>%
                   select("rois","OLS_estimate"))

OLS_coefs_p05_gfactor_prepare <- OLS_coefs_p05_gfactor[[cfa_resp_names[1]]]%>% 
  rename(OLS_p05 = OLS_estimate)

nrow(OLS_coefs_p05_gfactor_prepare)
## [1] 23
enetxplorer_coefs_gfactor_prepare <- extract_tibble(fit_explorer_gfactor[[cfa_resp_names[1]]], 
                    alpha_index = paste0("a",best_enet_model_list_gfactor[[cfa_resp_names[1]]]$mixture))%>%
                    filter(type == "Target")%>%
                    filter(pvalue < .05) %>%
                    select("variable","wmean")%>%
                    rename(rois = variable,
                           eNetXplorer= wmean)

vi_all_gfactor_enetxplorer <- plyr::join_all(list(univar_coefs_fdr_gfactor_prepare,
                                  univar_coefs_bonf_gfactor_prepare,                
                                  OLS_coefs_gfactor_prepare,
                                  OLS_coefs_p05_gfactor_prepare,
                                  enet_coefs_gfactor_prepare,
                                  enetxplorer_coefs_gfactor_prepare,
                                  svm_linear_shap_abs_gfactor[[cfa_resp_names[1]]], 
                                  svm_poly_shap_abs_gfactor[[cfa_resp_names[1]]],
                                  svm_rbf_shap_abs_gfactor[[cfa_resp_names[1]]],
                                  random_forest_shap_colsum_gfactor[[cfa_resp_names[1]]],
                                  xgboost_shap_abs_gfactor[[cfa_resp_names[1]]]
                              ), by="rois", type="full")
  

vi_enetxplorer <-list( TFMRI_NB_ALL_BEH_C2B_RATE = vi_all_nback, 
                       gfactor = vi_all_gfactor_enetxplorer)
brainPlot_names_eNetXplorer <- tibble(vec_names = colnames(vi_enetxplorer$TFMRI_NB_ALL_BEH_C2B_RATE[2:length(vi_enetxplorer$TFMRI_NB_ALL_BEH_C2B_RATE)]), 
                         plotting_names = c("Mass-Univariate FDR-Corrected",
                                            "Mass-Univariate Bonferroni-Corrected",
                                            "OLS",
                                            "OLS with p<.05",
                                            "Elastic Net",
                                            "eNetXplorer with p<.05",
                                            "Linear SVM", 
                                            "Polynomial SVM", 
                                            "RBF SVM",
                                            "Random Forest", 
                                            "XGBoost"))


brainPrepTibFunc <- function(vi_input,resp_input, algorithm_vec = brainPlot_names_eNetXplorer) { 
      
      brainPlotTib <- vi_input[[resp_input]] %>%
    rename(label = rois) %>%  
  #  mutate(.,label = str_replace_all(variable, str_type, '')) %>%
      mutate(.,label = str_replace_all(label, '\\.', '-')) %>%
      mutate(.,label = str_replace_all(label, 'Brain-Stem', 'brain-stem')) %>% 
      mutate(.,label = str_replace_all(label, 'Right-Cerebellum-Cortex', 'right-cerebellum-cortex')) %>%
      mutate(.,label = str_replace_all(label, 'roi_', ''))

ggsegDesterieux_resp <- ggsegDesterieux::desterieux %>% as_tibble %>% 
        select(label) %>% 
        na.omit() %>%
        left_join(brainPlotTib, by = "label")

ggsegAseg_resp <- ggseg::aseg$data %>% as_tibble %>% 
        select(label) %>% 
        na.omit() %>%
        left_join(brainPlotTib, by = "label")


estimateCort_resp <- 
  purrr::map2(.x = algorithm_vec$vec_names,
              .y = algorithm_vec$plotting_names,
             ~ggseg(.data = ggsegDesterieux_resp,
              atlas = 'desterieux', 
              mapping = aes_string(fill = .x[[1]]),
              colour="black"
        ) + 
        theme_void() +
        scale_fill_gradient2(
          limits = c(min(c(ggsegDesterieux_resp[[.x]],ggsegAseg_resp[[.x]]), na.rm = TRUE), 
                    max(c(ggsegDesterieux_resp[[.x]],ggsegAseg_resp[[.x]]), na.rm = TRUE)), 
          midpoint = 0, low = "blue", mid = "white",
          high = "red", space = "Lab", na.value="transparent" ) +
        theme(legend.position = "none") +
        labs(title =.y) 
        )

estimateAseg_resp <- 
  algorithm_vec$vec_names %>% 
  purrr::map(., ~ggseg(.data = ggsegAseg_resp,
              atlas = 'aseg', 
              mapping = aes_string(fill = .x[[1]]),
              view = "axial",
              colour="black"
        ) + 
        theme_void() +
        scale_fill_gradient2(
          limits = c(min(c(ggsegDesterieux_resp[[.x]],ggsegAseg_resp[[.x]]), na.rm = TRUE), 
                    max(c(ggsegDesterieux_resp[[.x]],ggsegAseg_resp[[.x]]), na.rm = TRUE)), 
          midpoint = 0, low = "blue", mid = "white",
          high = "red", space = "Lab", na.value="transparent" ) +
        guides(fill = guide_colourbar(barwidth = 0.5, barheight = 3, title = NULL)) 
        ) 


return(list(estimateCort_resp=estimateCort_resp,
            estimateAseg_resp = estimateAseg_resp))
    }

plot_vec <- seq(1,11,by = 1)

###Nback all rois
nback_brain_plot <- brainPrepTibFunc(vi_input = vi_enetxplorer, 
                                     resp_input = resp_names[1])

plot_vec %>% map(.,~gridExtra::grid.arrange(nback_brain_plot[["estimateCort_resp"]][[.]],
                                  nback_brain_plot[["estimateAseg_resp"]][[.]],
                                  nrow = 1, ncol = 2, widths = c(4, 1.5)))

## [[1]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[2]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[3]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[4]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[5]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[6]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[7]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[8]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[9]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[10]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[11]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
estimateCort_nback_all <- ggpubr::ggarrange(plotlist=nback_brain_plot[["estimateCort_resp"]],nrow=11)

estimateAseg_nback_all <- ggpubr::ggarrange(plotlist=nback_brain_plot[["estimateAseg_resp"]],nrow=11) +
        theme(plot.margin=grid::unit(c(0,0,0,0), "mm"))  

nback_feature_importance_plot <- 
  ggpubr::ggarrange(plotlist=list(estimateCort_nback_all, estimateAseg_nback_all),
                             ncol=2,common.legend = TRUE,legend = "right")

ggpubr::annotate_figure(nback_feature_importance_plot, 
                 top = ggpubr::text_grob("Variable Importance for the N-Back Behavioral Performance", 
                                 color = "black", face = "bold", size = 14, hjust = .6))

### gfactor all rois

gfactor_brain_plot <- brainPrepTibFunc(vi_input = vi_enetxplorer,
                                       resp_input = cfa_resp_names[1])


plot_vec %>% map(.,~gridExtra::grid.arrange(gfactor_brain_plot[["estimateCort_resp"]][[.]],
                                 gfactor_brain_plot[["estimateAseg_resp"]][[.]],
                                  nrow = 1, ncol = 2, widths = c(4, 1.5)))

## [[1]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[2]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[3]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[4]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[5]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[6]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[7]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[8]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[9]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[10]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[11]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
estimateCort_gfactor_all <- ggpubr::ggarrange(plotlist=gfactor_brain_plot[["estimateCort_resp"]],nrow=11)


estimateAseg_gfactor_all <- ggpubr::ggarrange(plotlist=gfactor_brain_plot[["estimateAseg_resp"]],nrow=11) +
        theme(plot.margin=grid::unit(c(0,0,0,0), "mm"))  

gfactor_feature_importance_plot <- 
  ggpubr::ggarrange(plotlist=list(estimateCort_gfactor_all,
                                  estimateAseg_gfactor_all),
                                  ncol=2,
                                  common.legend = TRUE,
                                  legend = "right")

  ggpubr::annotate_figure(gfactor_feature_importance_plot, 
                 top = ggpubr::text_grob("Variable Importance for the G-Factor", 
                                 color = "black", face = "bold", size = 14, hjust = 1))

   brainPlotTib_sri <- other_paper_vi_all_raw %>%
    rename(label = rois) %>%  
  #  mutate(.,label = str_replace_all(variable, str_type, '')) %>%
      mutate(.,label = str_replace_all(label, '\\.', '-')) %>%
      mutate(.,label = str_replace_all(label, 'Brain-Stem', 'brain-stem')) %>% 
      mutate(.,label = str_replace_all(label, 'Right-Cerebellum-Cortex', 'right-cerebellum-cortex')) %>%
      mutate(.,label = str_replace_all(label, 'roi_', ''))

ggsegDesterieux_resp_sri <- ggsegDesterieux::desterieux %>% as_tibble %>% 
        select(label) %>% 
        na.omit() %>%
        left_join(brainPlotTib_sri, by = "label")

ggsegAseg_resp_sri <- ggseg::aseg$data %>% as_tibble %>% 
        select(label) %>% 
        na.omit() %>%
        left_join(brainPlotTib_sri, by = "label")


ggseg(.data = ggsegDesterieux_resp_sri,
              atlas = 'desterieux', 
              mapping = aes_string(fill = "other_paper"),
              colour="black"
        ) + 
        theme_void() +
        scale_fill_gradient2(
          limits = c(min(c(ggsegDesterieux_resp_sri[["other_paper"]],ggsegAseg_resp_sri[["other_paper"]]), na.rm = TRUE), 
                    max(c(ggsegDesterieux_resp_sri[["other_paper"]],ggsegAseg_resp_sri[["other_paper"]]), na.rm = TRUE)), 
          midpoint = 0, low = "blue", mid = "white",
          high = "red", space = "Lab", na.value="transparent" ) +
        theme(legend.position = "none") +
        labs(title ="Sripada\n2019") 

ggseg(.data = ggsegAseg_resp_sri,
              atlas = 'aseg', 
              mapping = aes_string(fill = "other_paper"),
              view = "axial",
              colour="black"
        ) + 
        theme_void() +
        scale_fill_gradient2(
          limits = c(min(c(ggsegAseg_resp_sri[["other_paper"]],ggsegAseg_resp_sri[["other_paper"]]), na.rm = TRUE), 
                    max(c(ggsegAseg_resp_sri[["other_paper"]],ggsegAseg_resp_sri[["other_paper"]]), na.rm = TRUE)), 
          midpoint = 0, low = "blue", mid = "white",
          high = "red", space = "Lab", na.value="transparent" ) +
        guides(fill = guide_colourbar(barwidth = 0.5, barheight = 3, title = NULL)) 

brainPlot_names_eNetXplorer_other <- tibble(vec_names = append("other_paper",colnames(vi_enetxplorer$TFMRI_NB_ALL_BEH_C2B_RATE[2:length(vi_enetxplorer$TFMRI_NB_ALL_BEH_C2B_RATE)])), 
                         plotting_names = c("Sripada 2019","Mass-Univariate FDR-Corrected",
                                            "Mass-Univariate Bonferroni-Corrected",
                                            "OLS",
                                            "OLS with p<.05",
                                            "Elastic Net",
                                            "eNetXplorer with p<.05",
                                            "Linear SVM", 
                                            "Polynomial SVM", 
                                            "RBF SVM",
                                            "Random Forest", 
                                            "XGBoost"))

vi_enetxplorer_other <- vi_enetxplorer

other_paper_raw <- other_paper_vi_all_raw %>% select("rois","other_paper")

vi_enetxplorer_other_all <- full_join(vi_enetxplorer_other$gfactor, other_paper_raw, by = "rois")

vi_enetxplorer_other$gfactor <- vi_enetxplorer_other_all



gfactor_brain_plot_other <- brainPrepTibFunc(vi_input = vi_enetxplorer_other,
                                       resp_input = cfa_resp_names[1],
                                       algorithm_vec = brainPlot_names_eNetXplorer_other)


seq(1,12,1) %>% map(.,~gridExtra::grid.arrange(gfactor_brain_plot_other[["estimateCort_resp"]][[.]],
                                 gfactor_brain_plot_other[["estimateAseg_resp"]][[.]],
                                  nrow = 1, ncol = 2, widths = c(4, 1.5)))

## [[1]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[2]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[3]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[4]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[5]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[6]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[7]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[8]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[9]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[10]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[11]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
## 
## [[12]]
## TableGrob (1 x 2) "arrange": 2 grobs
##   z     cells    name           grob
## 1 1 (1-1,1-1) arrange gtable[layout]
## 2 2 (1-1,2-2) arrange gtable[layout]
estimateCort_gfactor_all_other <- ggpubr::ggarrange(plotlist=gfactor_brain_plot_other[["estimateCort_resp"]],nrow=12)


estimateAseg_gfactor_all_other <- ggpubr::ggarrange(plotlist=gfactor_brain_plot_other[["estimateAseg_resp"]],nrow=12) +
        theme(plot.margin=grid::unit(c(0,0,0,0), "mm"))  

gfactor_feature_importance_plot_other <- 
  ggpubr::ggarrange(plotlist=list(estimateCort_gfactor_all_other,
                                  estimateAseg_gfactor_all_other),
                                  ncol=2,
                                  common.legend = TRUE,
                                  legend = "right")

  ggpubr::annotate_figure(gfactor_feature_importance_plot_other, 
                 top = ggpubr::text_grob("Variable Importance for the G-Factor", 
                                 color = "black", face = "bold", size = 14, hjust = 1))

9 Accumulated Local Effects

To understand the pattern of the prediction, we calculate ALE from each algorithm

https://www.brodrigues.co/blog/2020-03-10-exp_tidymodels/ https://cran.r-project.org/web/packages/iml/vignettes/intro.html https://cran.r-project.org/web/packages/iml/vignettes/parallel.html

9.1 obtain the predictors from the iml package

Instead of creating one list for all the algorithms of OLS, enet, svm linear, svm ploy, svm rbf, random forest and xgboost. OLS and xgboost are treated differently. The reason for this is that the predictive wrapper of those two algorithms are different. Especially, in xgboost, the prediction function requires the input of response variable. Hence there are two different prediction wrapper for xgboost.

library(iml)

##list of Nback all algorithms all model fit

OLS_Nback <-  OLS_fit[[1]]

Nback_all_algorithms <- list(enet= enet_final_fit_list[[1]],
                             svm_linear = svm_linear_final_fit_list[[1]],
                             random_forest=random_forest_final_fit_list[[1]],
                             svm_RBF = SVM_RBF_final_fit_list[[1]],
                             svm_ploy = svm_poly_final_fit_list[[1]])

xgboost_Nback  <- xgboost_final_fit_list[[1]]

OLS_gfactor <-  OLS_fit_gfactor[[1]]

gfactor_all_algorithms <- list(enet= enet_final_fit_list_gfactor[[1]],
                             svm_linear = svm_linear_final_fit_list_gfactor[[1]],
                             random_forest=random_forest_final_fit_list_gfactor[[1]],
                             svm_RBF = SVM_RBF_final_fit_list_gfactor[[1]],
                              svm_ploy = svm_poly_final_fit_list_gfactor[[1]])


xgboost_gfactor <-  xgboost_final_fit_list_gfactor[[1]]



ols_pred_fun <- function(object, newdata) {
return(predict(object, new_data = newdata))
}  


xgboost_pred_fun <- function(object, newdata, newresp) {
  pred_matrix <- newdata%>%
               as.matrix()
  pred_label <- newresp%>% as.vector()%>%t()
  pred_DMtrix <- xgboost::xgb.DMatrix(data = pred_matrix,label=pred_label)
  return(predict(object,pred_DMtrix))
}  


recipe_Nback <- recipe_list[[resp_names[1]]]
recipe_vi_gfactor <- recipe_gfactor[[cfa_resp_names[1]]]

features_Nback <- recipe_Nback %>% 
  bake(new_data=NULL) %>% 
  select(-resp_names[1]) %>% 
  as.data.frame()

target_Nback <- recipe_Nback %>% 
  bake(new_data=NULL) %>% 
  select(resp_names[1]) %>% 
  as.data.frame()

predictor_Nback_all <-Nback_all_algorithms %>% map(.,~Predictor$new(
                         model = .,
                         data = features_Nback, 
                         y = target_Nback,
                         predict.fun = model_pred_fun)
) 

predictor_Nback_ols <- Predictor$new(
                         model = OLS_Nback,
                         data = features_Nback, 
                         y = target_Nback)


##this following predict wrapper does not have response but also works
xgboost_pred_wrapper <- function(object, newdata) {
  pred_matrix <- newdata%>%
               as.matrix()
  return(predict(object,pred_matrix))
}  

predictor_Nback_xgboost <- Predictor$new(
                         model = xgboost_Nback,
                         data = features_Nback, 
                         y = target_Nback,
                         predict.fun = xgboost_pred_wrapper)


features_gfactor <- recipe_vi_gfactor %>% 
  bake(new_data=NULL) %>% 
  select(-cfa_resp_names[1]) %>% 
  as.data.frame()

target_gfactor <- recipe_vi_gfactor %>% 
  bake(new_data=NULL) %>% 
  select(cfa_resp_names[1]) %>% 
  as.data.frame()

predictor_gfactor_all <- gfactor_all_algorithms %>% map(.,~Predictor$new(
                         model = .,
                         data = features_gfactor, 
                         y = target_gfactor,
                         predict.fun = model_pred_fun)
) 

predictor_gfactor_ols <- Predictor$new(
                         model = OLS_gfactor,
                         data = features_gfactor, 
                         y = target_gfactor)


predictor_gfactor_xgboost <- Predictor$new(
                         model = xgboost_gfactor,
                         data = features_gfactor, 
                         y = target_gfactor,
                         predict.fun = xgboost_pred_wrapper)

9.2 the ALE values for top 30 ranked rois for Nback and Gfactor

Nback_roi_vec <- vi_all_rename_TFMRI_NB_ALL_BEH_C2B_RATE_rank %>%
  arrange(desc(TFMRI_NB_ALL_BEH_C2B_RATE_rank)) %>%
  filter(TFMRI_NB_ALL_BEH_C2B_RATE_rank > 137)%>%
  select("rois")%>%
  as_vector()


gfactor_rank <- vi_all_gfactor$gfactor %>% select(-rois) %>% data.table::frankv(ties.method = "min")


vi_all_gfactor_rank <- cbind(vi_all_gfactor$gfactor, gfactor_rank)


gfactor_roi_vec <- vi_all_gfactor_rank %>% 
  arrange(desc(gfactor_rank)) %>%
  filter(gfactor_rank > 137) %>%
  select("rois")%>%
  as_vector()

# # compare the ranks between n-back and g-factor
# 
# vi_all_rename_TFMRI_NB_ALL_BEH_C2B_RATE_rank %>% 
#   select(rois, TFMRI_NB_ALL_BEH_C2B_RATE_rank) %>% 
#   left_join(vi_all_gfactor_rank %>%
#               select(rois, gfactor_rank), by = "rois") %>% arrange(desc(TFMRI_NB_ALL_BEH_C2B_RATE_rank,gfactor_rank)) 

9.3 Functions to compute ALE values across all the algorithms

algorithm_iml_vec <- names(Nback_all_algorithms)

alePlot_names <- tibble(vec_names =algorithm_iml_vec, 
                         plotting_names = c("Elastic Net",
                                            "Linear SVM", 
                                            "Random Forest", 
                                            "RBF SVM",
                                            "Polynomial SVM"))

rois_ale_processing <- function(roi_input, 
                                ols_predictor_input,
                                algorithms_predictor_input,
                                xgboost_predictor_input){

library(doFuture)
registerDoFuture()
plan(multisession(workers = 15))
  
ale_OLS <- FeatureEffect$new(ols_predictor_input, 
                             feature = roi_input,
                             grid.size = 20)

ale_OLS_results <- ale_OLS$results%>% 
                   mutate(algorithm = "OLS")

ale_algorithms <- algorithm_iml_vec %>% 
  map(.,function(algorithm_input=.){
  
ale_one_algor <- FeatureEffect$new(algorithms_predictor_input[[algorithm_input]], 
                                   feature = roi_input,
                                   grid.size = 20)

ale_one_algor_results <- ale_one_algor$results%>% 
                   mutate(algorithm =alePlot_names$plotting_names
                          [which(alePlot_names$vec_names==algorithm_input)] )
return(ale_one_algor_results)
})%>% do.call(rbind,.)

ale_xgboost <- FeatureEffect$new(xgboost_predictor_input, 
                                 feature = roi_input,
                                 grid.size = 20)

ale_xgboost_results <- ale_xgboost$results%>% 
                   mutate(algorithm = "XGBoost")


ale_all <- bind_rows(ale_OLS_results,ale_algorithms,ale_xgboost_results)%>%
   mutate(algorithm = factor(algorithm,levels =c ("OLS","Elastic Net",
                                                "Random Forest","XGBoost", "Linear SVM",
                                                "Polynomial SVM" ,"RBF SVM")))
return(ale_all)
}

9.4 COmpute ale values across all the algorithms

ale_Nback_all <- Nback_roi_vec %>%
  map(.,~rois_ale_processing(roi_input = .,
                             ols_predictor_input = predictor_Nback_ols,
                             algorithms_predictor_input = predictor_Nback_all,
                             xgboost_predictor_input =predictor_Nback_xgboost ))


names(ale_Nback_all) <- Nback_roi_vec

saveRDS(ale_Nback_all, paste0(anotherFold,
                               'working_memory_tasks/windows/ale_Nback_all_Mar_22_2022_rmse', '.RData'))

ale_gfactor_all <- gfactor_roi_vec %>% 
  map(.,~rois_ale_processing(roi_input = .,
                             ols_predictor_input = predictor_gfactor_ols,
                             algorithms_predictor_input = predictor_gfactor_all,
                             xgboost_predictor_input = predictor_gfactor_xgboost))

names(ale_gfactor_all) <- gfactor_roi_vec


saveRDS(ale_gfactor_all, paste0(anotherFold,
                               'working_memory_tasks/windows/ale_gfactor_all_Mar_22_2022_rmse', '.RData'))

9.5 Plotting ALE plots for top 30 rois across algorithms

ale_plotting <- function(data_input, roi_input, feature_input){
  ale_plot <- ggplot()+
  geom_line(data = data_input, aes(x = .data[[roi_input]], 
                                y = .value,group = algorithm, 
                                color = algorithm,linetype = algorithm),size=2)+
  scale_color_brewer(palette = "Dark2")+
   geom_rug(data = feature_input,aes(x= .data[[roi_input]]))+ 
theme(
  axis.title.x = element_blank(),
  axis.title.y = element_blank(),
  legend.position = "none",
  axis.text=element_text(size=15))+
  ggtitle(new_shorter_names_two_lines$roiShortTwoLines[which(new_shorter_names_two_lines$roi==str_remove(roi_input,"roi_"))])+
  theme(plot.title = element_text(size = 20)) 
return(ale_plot)
}

ale_plot_Nback <- map2(.x =ale_Nback_all,
                       .y= Nback_roi_vec,
                       ~ale_plotting(data_input = .x,
                                     roi_input = .y,
                                     feature_input = features_Nback))


ale_plot_gfactor <- map2(.x =ale_gfactor_all,
                         .y= gfactor_roi_vec,
                       ~ale_plotting(data_input = .x,
                                     roi_input = .y,
                                     feature_input = features_gfactor))

ale_plot_example <- ggplot()+
  geom_line(data = ale_Nback_all[[1]], 
            aes(x = .data[[Nback_roi_vec[1]]], 
                                y = .value,group = algorithm, 
                                color = algorithm,linetype = algorithm),size=2)+
  scale_color_brewer(palette = "Dark2")+
   geom_rug(data = features_Nback,aes(x= .data[[Nback_roi_vec[1]]]))+ 
theme(
  axis.title.x = element_blank(),
  axis.title.y = element_blank(),
        legend.position = "bottom",
    legend.title=element_text(size=25), 
    legend.text=element_text(size=25))+
  ggtitle(
    new_shorter_names_two_lines$roiShortTwoLines[which(new_shorter_names_two_lines$roi==str_remove(Nback_roi_vec[3],"roi_"))])+
  theme(plot.title = element_text(size = 20)) +
  guides(color = guide_legend(override.aes = list(size = 10)))

ale_plot_legend <- get_legend(ale_plot_example)

ale_plot_nback_all <- ggpubr::ggarrange(plotlist =ale_plot_Nback, ncol = 5,nrow = 6, 
                                        common.legend = TRUE, legend = "bottom",
                                        legend.grob = ale_plot_legend)

ale_plot_gfactor_all <- ggpubr::ggarrange(plotlist =ale_plot_gfactor, ncol = 5,nrow = 6, 
                                          common.legend = TRUE, legend = "bottom",
                                          legend.grob = ale_plot_legend)



title_ale_Nback <- ggdraw() + 
  draw_label(
    "Accumulated Local Effects for 30-Top Brain Regions\nThat Predicted N-Back Behavioral Performance",
    fontface = 'bold',
    x = 0,
    hjust = 0,
    size = 30) +
  theme(
    # add margin on the left of the drawing canvas,
    # so title is aligned with left edge of first plot
    plot.margin = margin(0, 0, 0, 7)
  )

title_ale_gfactor <- ggdraw() + 
  draw_label(
    "Accumulated Local Effects for 30-Top Brain Regions\nThat Predicted the G-Factor",
    fontface = 'bold',
    x = 0,
    hjust = 0,
    size = 30
  ) +
  theme(
    # add margin on the left of the drawing canvas,
    # so title is aligned with left edge of first plot
    plot.margin = margin(0, 0, 0, 7)
  )

plot_grid(title_ale_Nback,ale_plot_nback_all,nrow = 2 , rel_heights = c(0.1, 1))

plot_grid(title_ale_gfactor,ale_plot_gfactor_all,nrow = 2 , rel_heights = c(0.1, 1))

ols_example <-map(.x = feature_names,~FeatureEffect$new(predictor_Nback_ols, 
                                   feature = .,
                              grid.size = 20)) 

ols_results <- map(ols_example, "results")%>% do.call(cbind,.)

9.6 functions to get the univariate fit

9.6.1 mass univariate functions

The mass univariate fit function.

Holdout_results is a function that takes one roi and fit a regression on it. The outputs of this function are model slope estimate and the prediction. Resp_results returns the slope estimate and prediction for all rois.

Median_extrac extracts the median value of the predictions of all rois that is significant.

mass_uni_fit <- function(resp_input,recipe_input,roi_input){
  training_data <- bake(prep(recipe_input), new_data = NULL)
  formulas <- paste0(resp_input ,' ~ ', roi_input)
  results_test_simple <- lm(as.formula(formulas),data=training_data)
  model_predict <- predict(results_test_simple, data = training_data)%>%
                    as.vector()
  result_tibble <- tibble(feature_value = training_data[[roi_input]],predict = model_predict)
  names(result_tibble) <- c(roi_input, "model_predict")
  return(list(mode_fit=results_test_simple, model_result =result_tibble))
}
uni_fit_nback <-map(.x = Nback_roi_vec,
                    ~mass_uni_fit(resp_input = resp_names[1],recipe_input = recipe_list[[1]],roi_input = .x))


uni_fit_gfactor <-map(.x = gfactor_roi_vec,
                    ~mass_uni_fit(resp_input = cfa_resp_names[1],
                                  recipe_input = recipe_gfactor[[1]],roi_input = .x))



features_uni_nback <- map(Nback_roi_vec, function(roi_input = .){
uni_features <-   features_Nback%>% 
  select(roi_input)
return(uni_features)
})

#predictor_nback_uni <- map2(.x = uni_fit_nback,.y = features_uni_nback,
#                            ~Predictor$new(model = .x,
#                         data =.y, 
#                         y = target_Nback))  


#predictor_nback_uni <- Predictor$new(
#                         model = xgboost_gfactor,
#                         data = features_gfactor, 
#                         y = target_gfactor,
#                         predict.fun = xgboost_pred_wrapper)



uni_nback_results<- map(uni_fit_nback,"model_result")

uni_gfactor_results<- map(uni_fit_gfactor,"model_result")

uni_plotting <- function(data_input, roi_input){
    uni_plot <- ggplot()+
  geom_line(data = data_input, aes(x = .data[[roi_input]], 
                                y = model_predict),size=2)+
  scale_color_brewer(palette = "Dark2")+
   geom_rug(data = data_input,aes(x= .data[[roi_input]]))+ 
theme(
  axis.title.x = element_blank(),
  axis.title.y = element_blank(),
  legend.position = "none",
  axis.text=element_text(size=15))+
  ggtitle(new_shorter_names_two_lines$roiShortTwoLines[which(new_shorter_names_two_lines$roi==str_remove(roi_input,"roi_"))])+
  theme(plot.title = element_text(size = 20)) 
return(uni_plot)
}

uni_plot_Nback <- map2(.x =uni_nback_results,
                       .y= Nback_roi_vec,
                       ~uni_plotting(data_input = .x,
                                     roi_input = .y))


uni_plot_gfactor <- map2(.x =uni_gfactor_results,
                         .y= gfactor_roi_vec,
                       ~uni_plotting(data_input = .x,
                                     roi_input = .y))

uni_plot_nback_all <- ggpubr::ggarrange(plotlist =uni_plot_Nback, ncol = 5,nrow = 6)

uni_plot_gfactor_all <- ggpubr::ggarrange(plotlist =uni_plot_gfactor, ncol = 5,nrow = 6)



title_uni_Nback <- ggdraw() + 
  draw_label(
    "Univariate Effects for 30-Top Brain Regions\nThat Predicted N-Back Behavioral Performance",
    fontface = 'bold',
    x = 0,
    hjust = 0,
    size = 30) +
  theme(
    # add margin on the left of the drawing canvas,
    # so title is aligned with left edge of first plot
    plot.margin = margin(0, 0, 0, 7)
  )

title_uni_gfactor <- ggdraw() + 
  draw_label(
    "Univariate Effects for 30-Top Brain Regions\nThat Predicted the G-Factor",
    fontface = 'bold',
    x = 0,
    hjust = 0,
    size = 30
  ) +
  theme(
    # add margin on the left of the drawing canvas,
    # so title is aligned with left edge of first plot
    plot.margin = margin(0, 0, 0, 7)
  )

plot_grid(title_uni_Nback,uni_plot_nback_all,nrow = 2 , rel_heights = c(0.1, 1))

plot_grid(title_uni_gfactor,uni_plot_gfactor_all,nrow = 2 , rel_heights = c(0.1, 1))

9.7 plot ale for all algorithms and effects of univariate

## predicted value of univariate and ale value for others


effect_plot_data_processing <- function(uni_input, ale_input){
 nrow_uni <- nrow(uni_input)
   uni_input <- uni_input %>%
    rename(.value = model_predict)%>%
    mutate(algorithm ="Univariate")
   ale_frame <- select(ale_input,-.type)
   all_frame <- bind_rows(uni_input,ale_frame)%>%
   mutate(algorithm = factor(algorithm,levels =c ("Univariate","OLS","Elastic Net",
                                                "Random Forest","XGBoost", "Linear SVM",
                                                "Polynomial SVM" ,"RBF SVM")))
   return(all_frame)
}
ale_uni_nback <-map2(.x = uni_nback_results,.y=ale_Nback_all, 
                     ~effect_plot_data_processing(uni_input =.x ,ale_input = .y))


ale_uni_gfactor <-map2(.x = uni_gfactor_results,.y=ale_gfactor_all, 
                     ~effect_plot_data_processing(uni_input =.x ,ale_input = .y))


ale_uni_plot_Nback <- map2(.x =ale_uni_nback,
                       .y= Nback_roi_vec,
                       ~ale_plotting(data_input = .x,
                                     roi_input = .y,
                                     feature_input = features_Nback))


ale_uni_plot_gfactor <- map2(.x =ale_uni_gfactor,
                         .y= gfactor_roi_vec,
                       ~ale_plotting(data_input = .x,
                                     roi_input = .y,
                                     feature_input = features_gfactor))

ale_uni_plot_example <- ggplot()+
  geom_line(data = ale_uni_nback[[1]], 
            aes(x = .data[[Nback_roi_vec[1]]], 
                                y = .value,group = algorithm, 
                                color = algorithm,linetype = algorithm),size=2)+
  scale_color_brewer(palette = "Dark2")+
   geom_rug(data = features_Nback,aes(x= .data[[Nback_roi_vec[1]]]))+ 
theme(
  axis.title.x = element_blank(),
  axis.title.y = element_blank(),
        legend.position = "bottom",
    legend.title=element_text(size=25), 
    legend.text=element_text(size=25))+
  ggtitle(
    new_shorter_names_two_lines$roiShortTwoLines[which(new_shorter_names_two_lines$roi==str_remove(Nback_roi_vec[3],"roi_"))])+
  theme(plot.title = element_text(size = 20)) +
  guides(color = guide_legend(override.aes = list(size = 10)))

ale_uni_plot_legend <- get_legend(ale_uni_plot_example)

ale_uni_plot_nback_all <- ggpubr::ggarrange(plotlist =ale_uni_plot_Nback, ncol = 5,nrow = 6, 
                                        common.legend = TRUE, legend = "bottom",
                                        legend.grob = ale_uni_plot_legend)

ale_uni_plot_gfactor_all <- ggpubr::ggarrange(plotlist =ale_uni_plot_gfactor, ncol = 5,nrow = 6, 
                                          common.legend = TRUE, legend = "bottom",
                                          legend.grob = ale_uni_plot_legend)



title_ale_uni_Nback <- ggdraw() + 
  draw_label(
    "Accumulated Local and Univariate Effects for 30-Top Brain Regions\nThat Predicted N-Back Behavioral Performance",
    fontface = 'bold',
    x = 0,
    hjust = 0,
    size = 30) +
  theme(
    # add margin on the left of the drawing canvas,
    # so title is aligned with left edge of first plot
    plot.margin = margin(0, 0, 0, 7)
  )

title_ale_uni_gfactor <- ggdraw() + 
  draw_label(
    "Accumulated Local and Univariate Effects for 30-Top Brain Regions\nThat Predicted the G-Factor",
    fontface = 'bold',
    x = 0,
    hjust = 0,
    size = 30
  ) +
  theme(
    # add margin on the left of the drawing canvas,
    # so title is aligned with left edge of first plot
    plot.margin = margin(0, 0, 0, 7)
  )

plot_grid(title_ale_uni_Nback,ale_uni_plot_nback_all,nrow = 2 , rel_heights = c(0.1, 1))

plot_grid(title_ale_uni_gfactor,ale_uni_plot_gfactor_all,nrow = 2 , rel_heights = c(0.1, 1))

9.8 Ploting ALE for rois that passed eNetXplorer p < .05

9.8.1 select those areas

enet_nback_roi_vec <- coefs_enet_all[[1]] %>% 
  filter(type == "Target models") %>%
  arrange(desc(abs(wmean))) %>% .$variable


enet_gfactor_roi_vec <- coefs_enet_gfactor[[1]] %>% 
  filter(type == "Target models") %>% 
  arrange(desc(abs(wmean))) %>% .$variable

9.8.2 Calculate ALE

enet_ale_Nback_all <- enet_nback_roi_vec %>%
  map(.,
      ~rois_ale_processing(roi_input = .,
                           ols_predictor_input = predictor_Nback_ols,
                           algorithms_predictor_input = predictor_Nback_all,
                           xgboost_predictor_input =predictor_Nback_xgboost ))

saveRDS(enet_ale_Nback_all, 
        paste0(anotherFold,
               'working_memory_tasks/windows/enet_ale_Nback_all_Mar_22_2022_rmse', '.RData'))


enet_ale_gfactor_all <- 
  enet_gfactor_roi_vec %>% 
  map(.,~rois_ale_processing(roi_input = .,
                             ols_predictor_input = predictor_gfactor_ols,
                             algorithms_predictor_input = predictor_gfactor_all,
                             xgboost_predictor_input = predictor_gfactor_xgboost))

saveRDS(enet_ale_gfactor_all, paste0(anotherFold,
                               'working_memory_tasks/windows/enet_ale_gfactor_all_Mar_22_2022_rmse', '.RData'))
uni_enet_nback <-map(.x = enet_nback_roi_vec,
                    ~mass_uni_fit(resp_input = resp_names[1],recipe_input = recipe_list[[1]],roi_input = .x))


uni_enet_gfactor <-map(.x = enet_gfactor_roi_vec,
                    ~mass_uni_fit(resp_input = cfa_resp_names[1],
                                  recipe_input = recipe_gfactor[[1]],roi_input = .x))



uni_nback_enet_results<- map(uni_enet_nback,"model_result")

uni_gfactor_enet_results<- map(uni_enet_gfactor,"model_result")


uni_plot_Nback_enet <- map2(.x =uni_nback_enet_results,
                       .y= enet_nback_roi_vec,
                       ~uni_plotting(data_input = .x,
                                     roi_input = .y))


uni_plot_gfactor_enet <- map2(.x =uni_gfactor_enet_results,
                         .y= enet_gfactor_roi_vec,
                       ~uni_plotting(data_input = .x,
                                     roi_input = .y))

uni_plot_nback_enet_all <- ggpubr::ggarrange(plotlist =uni_plot_Nback_enet, ncol = 5,nrow = 6)

uni_plot_gfactor_enet_all <- ggpubr::ggarrange(plotlist =uni_plot_gfactor_enet, ncol = 5,nrow = 6)



title_uni_Nback_enet <- ggdraw() + 
  draw_label(
    "Univaritate Effects of Brain Regions that Predicted\nN-Back Behavioral Performance With eNetXplorer's p < .05",
    fontface = 'bold',
    x = 0,
    hjust = 0,
    size = 30) +
  theme(
    # add margin on the left of the drawing canvas,
    # so title is aligned with left edge of first plot
    plot.margin = margin(0, 0, 0, 7)
  )

title_uni_gfactor_enet <- ggdraw() + 
  draw_label(
    "Univaritate Effects of Brain Regions That\nPredicted the G-Factor With eNetXplorer's p < .05",
    fontface = 'bold',
    x = 0,
    hjust = 0,
    size = 30
  ) +
  theme(
    # add margin on the left of the drawing canvas,
    # so title is aligned with left edge of first plot
    plot.margin = margin(0, 0, 0, 7)
  )

plot_grid(title_uni_Nback_enet,uni_plot_nback_enet_all,nrow = 2 , rel_heights = c(0.1, 1))

plot_grid(title_uni_gfactor_enet,uni_plot_gfactor_enet_all,nrow = 2 , rel_heights = c(0.1, 1))

9.9 Ploting ALE for those that passed enetXplorers p < .05

ale_plot_Nback_enet <- map2(.x =enet_ale_Nback_all,
                            .y= enet_nback_roi_vec,
                       ~ale_plotting(data_input = .x,
                                     roi_input = .y,
                                     feature_input = features_Nback))


ale_plot_gfactor_enet <- map2(.x =enet_ale_gfactor_all,
                              .y= enet_gfactor_roi_vec,
                       ~ale_plotting(data_input = .x,
                                     roi_input = .y,
                                     feature_input = features_gfactor))


ale_plot_nback_all_enet <- ggpubr::ggarrange(plotlist =ale_plot_Nback_enet, 
                                             ncol = 5,
                                             nrow = 6, 
                                             common.legend = TRUE, 
                                             legend = "bottom",
                                             legend.grob = ale_plot_legend)

ale_plot_gfactor_all_enet <- ggpubr::ggarrange(plotlist =ale_plot_gfactor_enet, 
                                               ncol = 5,
                                               nrow = 6, 
                                               common.legend = TRUE, 
                                               legend = "bottom",
                                               legend.grob = ale_plot_legend)

title_ale_Nback_enet <- ggdraw() + 
  draw_label(
    "Accumulated Local Effects of Brain Regions that Predicted\nN-Back Behavioral Performance With eNetXplorer's p < .05",
    fontface = 'bold',
    x = 0,
    hjust = 0,
    size = 30) +
  theme(
    # add margin on the left of the drawing canvas,
    # so title is aligned with left edge of first plot
    plot.margin = margin(0, 0, 0, 7)
  )


title_ale_gfactor_enet <- ggdraw() + 
  draw_label(
    "Accumulated Local Effects of Brain Regions That\nPredicted the G-Factor With eNetXplorer's p < .05",
    fontface = 'bold',
    x = 0,
    hjust = 0,
    size = 30
  ) +
  theme(
    # add margin on the left of the drawing canvas,
    # so title is aligned with left edge of first plot
    plot.margin = margin(0, 0, 0, 7)
  )


plot_grid(title_ale_Nback_enet,ale_plot_nback_all_enet,nrow = 2 , rel_heights = c(0.1, 1))

plot_grid(title_ale_gfactor_enet,ale_plot_gfactor_all_enet,nrow = 2 , rel_heights = c(0.1, 1))

10 Interaction Plot Based on Friedman’s H statistic

Plot top 20 areas by the highest interaction value for algorithms that have iterations: xgboost polynomial SVM, RBF SVM, and random forest.

##list of Nback all algorithms all model fit
Nback_all_interact <- list(svm_linear = svm_linear_final_fit_list[[1]],
                             random_forest=random_forest_final_fit_list[[1]],
                             svm_RBF = SVM_RBF_final_fit_list[[1]],
                             svm_ploy = svm_poly_final_fit_list[[1]])

gfactor_all_interact <- list(svm_linear = svm_linear_final_fit_list_gfactor[[1]],
                             random_forest=random_forest_final_fit_list_gfactor[[1]],
                             svm_RBF = SVM_RBF_final_fit_list_gfactor[[1]],
                              svm_ploy = svm_poly_final_fit_list_gfactor[[1]])


predictor_Nback_interact <-Nback_all_interact %>% map(.,~Predictor$new(
                         model = .,
                         data = features_Nback, 
                         y = target_Nback,
                         predict.fun = model_pred_fun)) 


predictor_gfactor_interact <- gfactor_all_interact %>% map(.,~Predictor$new(
                         model = .,
                         data = features_gfactor, 
                         y = target_gfactor,
                         predict.fun = model_pred_fun)) 


algorithm_interact_vec <- names(Nback_all_interact)

interaction_names <- tibble(vec_names =algorithm_interact_vec, 
                         plotting_names = c("Linear SVM", "Random Forest", "RBF SVM","Polynomial SVM"))

10.1 Data prep for interaction plots

library("future")
library("future.callr")
#plan("callr", workers = 6)
plan(multisession(workers = 30))
interact_Nback <- predictor_Nback_interact %>% map(.,~Interaction$new(.))

saveRDS(interact_Nback, paste0(anotherFold,
                               'working_memory_tasks/windows/interact_Nback_Mar_22_2022', '.RData'))

interact_Nback_xgboost <- Interaction$new(predictor_Nback_xgboost)

saveRDS(interact_Nback_xgboost, paste0(anotherFold,
                                       'working_memory_tasks/windows/interact_Nback_xgboost_Mar_22_2022', '.RData'))


interact_gfactor <- predictor_gfactor_interact %>% map(.,~Interaction$new(.))

saveRDS(interact_gfactor, paste0(anotherFold,
                                 'working_memory_tasks/windows/interact_gfactor_Mar_22_2022', '.RData'))

interact_gfactor_xgboost <- Interaction$new(predictor_gfactor_xgboost)

saveRDS(interact_gfactor_xgboost, paste0(anotherFold,
                              'working_memory_tasks/windows/interact_gfactor_xgboost_Mar_22_2022', '.RData'))

10.2 Plot interactions

interact_nback_all <- list("random_forest"= interact_Nback[["random_forest"]], 
                            "xgboost" = interact_Nback_xgboost, 
                            "svm_ploy" = interact_Nback[["svm_ploy"]],
                           "svm_RBF" =interact_Nback[["svm_RBF"]] )



 interact_gfactor_all <- list("random_forest"= interact_gfactor[["random_forest"]], 
                            "xgboost" = interact_gfactor_xgboost, 
                            "svm_ploy" = interact_gfactor[["svm_ploy"]],
                            "svm_RBF" =interact_gfactor[["svm_RBF"]])


interactPlot_names <- tibble(vec_names =names(interact_nback_all), 
                         plotting_names = c("Random\nForest", 
                                            "XGBoost\n",
                                            "Polynomial\nSVM",
                                            "RBF\nSVM"))
  
interact_nback_results_all <- map(interact_nback_all,"results") 

interact_gfactor_results_all <- map(interact_gfactor_all,"results") 

interact_Nback_rank <- interact_nback_results_all%>% map(.,~select(.,.interaction)%>%
                                 data.table::frankv(ties.method = "min"))
                                 

interact_gfactor_rank <- interact_gfactor_results_all%>% map(.,~select(.,.interaction)%>%
                                 data.table::frankv(ties.method = "min"))



interact_Nback_select <- map2(.x =interact_Nback_rank,.y = interact_nback_results_all, 
                              ~cbind(.x,.y)%>% 
                               filter(.x > 147)%>%
                               rename(interaction_rank= .x)%>%
                              mutate(roi = str_remove(.feature,"roi_"))%>% 
                                arrange(desc(.interaction)))

interact_Nback_select <- interact_Nback_select %>% map(.,~left_join(.,new_shorter_names, by = "roi"))
                              
interact_gfactor_select <- map2(.x =interact_gfactor_rank,.y = interact_gfactor_results_all, 
                              ~cbind(.x,.y)%>% 
                               filter(.x > 147)%>%
                               rename(interaction_rank= .x)%>%
                              mutate(roi = str_remove(.feature,"roi_"))%>% 
                                arrange(desc(.interaction)))

interact_gfactor_select <- interact_gfactor_select %>% map(.,~left_join(., new_shorter_names, by = "roi"))

interact_algor_vec <- names(interact_gfactor_select)


interact_plot_fun <- function(interact_input, algorithm_input){
 interact_plot <-interact_input[[algorithm_input]]%>%
ggplot( aes(x = .interaction, 
            y =fct_reorder(roiShort, .interaction, .desc = FALSE) ))+
  geom_point()+
  geom_segment(aes(x =.interaction, xend = 0, y = roiShort, yend = roiShort))+
  theme(axis.title.x=element_blank(),
        axis.title.y=element_blank(),
        axis.text.x = element_text(angle = 90),
        axis.text.y = element_text(size = 12))+
   ggtitle(interactPlot_names$plotting_names[which(interactPlot_names$vec_names==algorithm_input)])
 return(interact_plot)
 }


interact_Nback_plot_list <- 
  interact_algor_vec %>% map(.,
                             ~interact_plot_fun(interact_input=interact_Nback_select,
                                                algorithm_input=.))

interact_Nback_plot_grid <- plot_grid(plotlist = interact_Nback_plot_list,
                                      nrow = 2, 
                                      ncol = 2)

title_interact_Nback <- ggdraw() + 
  draw_label(
    "Interaction plot for the top-20 regions that\npredicted N-Back Behavioral Performance with highest H-statistic",
    fontface = 'bold',
    x = 0,
    hjust = 0
  ) +
  theme(
    # add margin on the left of the drawing canvas,
    # so title is aligned with left edge of first plot
    plot.margin = margin(0, 0, 0, 7)
  )

plot_grid(title_interact_Nback,
          interact_Nback_plot_grid,
          nrow = 2, 
          rel_heights = c(0.1, 1))

interact_gfactor_plot_list <- interact_algor_vec %>% map(.,~interact_plot_fun(interact_input=interact_gfactor_select,
                                                                            algorithm_input=.))

interact_gfactor_plot_grid <-plot_grid(plotlist = interact_gfactor_plot_list,nrow = 2,ncol = 2)

 title_interact_gfactor <- ggdraw() + 
  draw_label(
    "Interaction plot for the top-20 regions that\npredicted the G-Factor with highest H-statistic",
    fontface = 'bold',
    x = 0,
    hjust = 0
  ) +
  theme(
    # add margin on the left of the drawing canvas,
    # so title is aligned with left edge of first plot
    plot.margin = margin(0, 0, 0, 7)
  )

plot_grid(title_interact_gfactor,interact_gfactor_plot_grid,nrow = 2 , rel_heights = c(0.1, 1))

11 VIF of the data

library(olsrr)

vifs_ols_all <- resp_names %>%furrr::future_map(.,~ols_vif_tol(OLS_fit[[.]]),
                             .options = furrr::furrr_options(seed = 123456))
vifs_ols_all <- vifs_ols_all %>% map(.,~mutate(., term=Variables))
vifs_ols_all[[resp_names[1]]] %>% ggplot(aes(x = VIF)) +
  geom_histogram(fill = 'grey80', binwidth = .5) +
  scale_x_continuous(breaks = seq(0, 13, by = 3)) +
  labs(x = "Explanatory Variables (Regions)",
       y = 'Count', title="Variable Inflation Factor of\nExplanatory Variables")+ 
   theme_light() +
  theme(text = element_text(size = 30))

summary(vifs_ols_all[[1]]$VIF)  
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   1.570   2.584   4.155   4.589   5.956  12.072
sum(vifs_ols_all[[1]]$VIF > 5)
## [1] 67
sum(vifs_ols_all[[1]]$VIF > 10)
## [1] 2
formula_gfactor <- cfa_resp_names %>%
  map(.,~as.formula(paste(.,paste(feature_names,collapse = "+"),sep="~")))

OLS_fit_gfactor <-  map2(.x=formula_gfactor,
                         .y=recipe_gfactor ,
                         ~lm(.x,data = .y %>%  
                               bake(new_data= NULL)))

ols_vif_tol(OLS_fit_gfactor[[1]])
##                            Variables  Tolerance       VIF
## 1         roi_Left.Cerebellum.Cortex 0.23825422  4.197197
## 2           roi_Left.Thalamus.Proper 0.14667920  6.817599
## 3                   roi_Left.Caudate 0.10494616  9.528696
## 4                   roi_Left.Putamen 0.10725691  9.323409
## 5                  roi_Left.Pallidum 0.40481918  2.470239
## 6                     roi_Brain.Stem 0.45259422  2.209485
## 7               roi_Left.Hippocampus 0.23895158  4.184948
## 8                  roi_Left.Amygdala 0.40106172  2.493382
## 9            roi_Left.Accumbens.area 0.43857383  2.280118
## 10                roi_Left.VentralDC 0.37245016  2.684923
## 11       roi_Right.Cerebellum.Cortex 0.22632115  4.418500
## 12         roi_Right.Thalamus.Proper 0.13749027  7.273242
## 13                 roi_Right.Caudate 0.10954767  9.128446
## 14                 roi_Right.Putamen 0.10776559  9.279400
## 15                roi_Right.Pallidum 0.48661866  2.054997
## 16             roi_Right.Hippocampus 0.25948093  3.853848
## 17                roi_Right.Amygdala 0.40115933  2.492775
## 18          roi_Right.Accumbens.area 0.44824548  2.230920
## 19               roi_Right.VentralDC 0.38599869  2.590682
## 20       roi_lh_G_and_S_frontomargin 0.44820567  2.231119
## 21      roi_lh_G_and_S_occipital_inf 0.21523801  4.646020
## 22        roi_lh_G_and_S_paracentral 0.25143311  3.977201
## 23         roi_lh_G_and_S_subcentral 0.18231007  5.485161
## 24   roi_lh_G_and_S_transv_frontopol 0.50189889  1.992433
## 25         roi_lh_G_and_S_cingul.Ant 0.16336452  6.121280
## 26     roi_lh_G_and_S_cingul.Mid.Ant 0.18816958  5.314355
## 27    roi_lh_G_and_S_cingul.Mid.Post 0.19554471  5.113920
## 28       roi_lh_G_cingul.Post.dorsal 0.21505895  4.649888
## 29      roi_lh_G_cingul.Post.ventral 0.33473298  2.987456
## 30                   roi_lh_G_cuneus 0.13331004  7.501311
## 31      roi_lh_G_front_inf.Opercular 0.18416522  5.429907
## 32        roi_lh_G_front_inf.Orbital 0.46558469  2.147837
## 33       roi_lh_G_front_inf.Triangul 0.27490293  3.637648
## 34             roi_lh_G_front_middle 0.11038333  9.059339
## 35                roi_lh_G_front_sup 0.08305576 12.040103
## 36    roi_lh_G_Ins_lg_and_S_cent_ins 0.27001821  3.703454
## 37            roi_lh_G_insular_short 0.26482977  3.776010
## 38         roi_lh_G_occipital_middle 0.17980211  5.561670
## 39            roi_lh_G_occipital_sup 0.18430234  5.425867
## 40      roi_lh_G_oc.temp_lat.fusifor 0.24062242  4.155889
## 41      roi_lh_G_oc.temp_med.Lingual 0.11181100  8.943664
## 42      roi_lh_G_oc.temp_med.Parahip 0.54717225  1.827578
## 43                  roi_lh_G_orbital 0.39833574  2.510445
## 44       roi_lh_G_pariet_inf.Angular 0.12649414  7.905504
## 45      roi_lh_G_pariet_inf.Supramar 0.14769821  6.770563
## 46             roi_lh_G_parietal_sup 0.13544386  7.383133
## 47              roi_lh_G_postcentral 0.13869056  7.210296
## 48               roi_lh_G_precentral 0.13201709  7.574777
## 49                roi_lh_G_precuneus 0.12294256  8.133880
## 50                   roi_lh_G_rectus 0.55705708  1.795148
## 51              roi_lh_G_subcallosal 0.51439140  1.944045
## 52      roi_lh_G_temp_sup.G_T_transv 0.30341470  3.295819
## 53         roi_lh_G_temp_sup.Lateral 0.28700555  3.484253
## 54      roi_lh_G_temp_sup.Plan_polar 0.47566630  2.102314
## 55      roi_lh_G_temp_sup.Plan_tempo 0.28804756  3.471649
## 56             roi_lh_G_temporal_inf 0.53140294  1.881811
## 57          roi_lh_G_temporal_middle 0.40059666  2.496276
## 58       roi_lh_Lat_Fis.ant.Horizont 0.46927533  2.130945
## 59       roi_lh_Lat_Fis.ant.Vertical 0.52808938  1.893619
## 60               roi_lh_Lat_Fis.post 0.18737862  5.336788
## 61             roi_lh_Pole_occipital 0.30952048  3.230804
## 62              roi_lh_Pole_temporal 0.44214482  2.261702
## 63                roi_lh_S_calcarine 0.10392030  9.622759
## 64                  roi_lh_S_central 0.11222563  8.910620
## 65        roi_lh_S_cingul.Marginalis 0.23575243  4.241738
## 66      roi_lh_S_circular_insula_ant 0.29475468  3.392652
## 67      roi_lh_S_circular_insula_inf 0.24417494  4.095424
## 68      roi_lh_S_circular_insula_sup 0.18606502  5.374465
## 69        roi_lh_S_collat_transv_ant 0.62215817  1.607308
## 70       roi_lh_S_collat_transv_post 0.32351133  3.091082
## 71                roi_lh_S_front_inf 0.17523685  5.706562
## 72             roi_lh_S_front_middle 0.20403612  4.901093
## 73                roi_lh_S_front_sup 0.13559565  7.374868
## 74       roi_lh_S_interm_prim.Jensen 0.33721867  2.965435
## 75  roi_lh_S_intrapariet_and_P_trans 0.13375748  7.476217
## 76    roi_lh_S_oc_middle_and_Lunatus 0.22392568  4.465767
## 77   roi_lh_S_oc_sup_and_transversal 0.16173686  6.182883
## 78            roi_lh_S_occipital_ant 0.29608092  3.377455
## 79              roi_lh_S_oc.temp_lat 0.32181090  3.107415
## 80  roi_lh_S_oc.temp_med_and_Lingual 0.18485849  5.409543
## 81          roi_lh_S_orbital_lateral 0.40263733  2.483625
## 82       roi_lh_S_orbital_med.olfact 0.57410664  1.741837
## 83         roi_lh_S_orbital.H_Shaped 0.32526671  3.074400
## 84        roi_lh_S_parieto_occipital 0.16971039  5.892391
## 85             roi_lh_S_pericallosal 0.18891848  5.293288
## 86              roi_lh_S_postcentral 0.18928765  5.282965
## 87      roi_lh_S_precentral.inf.part 0.16822941  5.944264
## 88      roi_lh_S_precentral.sup.part 0.24323773  4.111204
## 89               roi_lh_S_suborbital 0.38943279  2.567837
## 90              roi_lh_S_subparietal 0.23459756  4.262619
## 91             roi_lh_S_temporal_inf 0.48159259  2.076444
## 92             roi_lh_S_temporal_sup 0.15428514  6.481505
## 93      roi_lh_S_temporal_transverse 0.37978344  2.633079
## 94       roi_rh_G_and_S_frontomargin 0.48530903  2.060543
## 95      roi_rh_G_and_S_occipital_inf 0.24458314  4.088589
## 96        roi_rh_G_and_S_paracentral 0.24950205  4.007983
## 97         roi_rh_G_and_S_subcentral 0.19870707  5.032534
## 98   roi_rh_G_and_S_transv_frontopol 0.43241073  2.312616
## 99         roi_rh_G_and_S_cingul.Ant 0.18148623  5.510060
## 100    roi_rh_G_and_S_cingul.Mid.Ant 0.18429042  5.426218
## 101   roi_rh_G_and_S_cingul.Mid.Post 0.18712070  5.344144
## 102      roi_rh_G_cingul.Post.dorsal 0.22217564  4.500944
## 103     roi_rh_G_cingul.Post.ventral 0.30178975  3.313565
## 104                  roi_rh_G_cuneus 0.15963464  6.264305
## 105     roi_rh_G_front_inf.Opercular 0.19692920  5.077967
## 106       roi_rh_G_front_inf.Orbital 0.43439573  2.302048
## 107      roi_rh_G_front_inf.Triangul 0.26603384  3.758920
## 108            roi_rh_G_front_middle 0.10770719  9.284431
## 109               roi_rh_G_front_sup 0.10329596  9.680921
## 110   roi_rh_G_Ins_lg_and_S_cent_ins 0.28801178  3.472080
## 111           roi_rh_G_insular_short 0.27042153  3.697930
## 112        roi_rh_G_occipital_middle 0.16589992  6.027730
## 113           roi_rh_G_occipital_sup 0.17718555  5.643801
## 114     roi_rh_G_oc.temp_lat.fusifor 0.30079446  3.324529
## 115     roi_rh_G_oc.temp_med.Lingual 0.10764313  9.289957
## 116     roi_rh_G_oc.temp_med.Parahip 0.54975290  1.818999
## 117                 roi_rh_G_orbital 0.36343427  2.751529
## 118      roi_rh_G_pariet_inf.Angular 0.12021565  8.318385
## 119     roi_rh_G_pariet_inf.Supramar 0.17172610  5.823227
## 120            roi_rh_G_parietal_sup 0.16524702  6.051546
## 121             roi_rh_G_postcentral 0.13398632  7.463449
## 122              roi_rh_G_precentral 0.14469340  6.911165
## 123               roi_rh_G_precuneus 0.13129277  7.616565
## 124                  roi_rh_G_rectus 0.60379441  1.656193
## 125             roi_rh_G_subcallosal 0.56962590  1.755538
## 126     roi_rh_G_temp_sup.G_T_transv 0.31439027  3.180760
## 127        roi_rh_G_temp_sup.Lateral 0.31061482  3.219421
## 128     roi_rh_G_temp_sup.Plan_polar 0.44499697  2.247206
## 129     roi_rh_G_temp_sup.Plan_tempo 0.26488401  3.775237
## 130            roi_rh_G_temporal_inf 0.52417247  1.907769
## 131         roi_rh_G_temporal_middle 0.37070583  2.697557
## 132      roi_rh_Lat_Fis.ant.Horizont 0.42114155  2.374499
## 133      roi_rh_Lat_Fis.ant.Vertical 0.50637833  1.974808
## 134              roi_rh_Lat_Fis.post 0.17494970  5.715929
## 135            roi_rh_Pole_occipital 0.22504482  4.443559
## 136             roi_rh_Pole_temporal 0.45048714  2.219819
## 137               roi_rh_S_calcarine 0.09890480 10.110732
## 138                 roi_rh_S_central 0.11513124  8.685740
## 139       roi_rh_S_cingul.Marginalis 0.25205256  3.967426
## 140     roi_rh_S_circular_insula_ant 0.25851274  3.868281
## 141     roi_rh_S_circular_insula_inf 0.27627975  3.619520
## 142     roi_rh_S_circular_insula_sup 0.23133968  4.322648
## 143       roi_rh_S_collat_transv_ant 0.63756379  1.568470
## 144      roi_rh_S_collat_transv_post 0.30434031  3.285795
## 145               roi_rh_S_front_inf 0.18693670  5.349404
## 146            roi_rh_S_front_middle 0.16039624  6.234560
## 147               roi_rh_S_front_sup 0.15167054  6.593238
## 148      roi_rh_S_interm_prim.Jensen 0.29976033  3.335998
## 149 roi_rh_S_intrapariet_and_P_trans 0.14516999  6.888476
## 150   roi_rh_S_oc_middle_and_Lunatus 0.21949110  4.555993
## 151  roi_rh_S_oc_sup_and_transversal 0.15321011  6.526984
## 152           roi_rh_S_occipital_ant 0.32215288  3.104116
## 153             roi_rh_S_oc.temp_lat 0.42770915  2.338037
## 154 roi_rh_S_oc.temp_med_and_Lingual 0.19095425  5.236856
## 155         roi_rh_S_orbital_lateral 0.39409924  2.537432
## 156      roi_rh_S_orbital_med.olfact 0.54011757  1.851449
## 157        roi_rh_S_orbital.H_Shaped 0.31329294  3.191901
## 158       roi_rh_S_parieto_occipital 0.16707639  5.985286
## 159            roi_rh_S_pericallosal 0.16599960  6.024111
## 160             roi_rh_S_postcentral 0.22643296  4.416318
## 161     roi_rh_S_precentral.inf.part 0.18206414  5.492570
## 162     roi_rh_S_precentral.sup.part 0.23768230  4.207297
## 163              roi_rh_S_suborbital 0.53365022  1.873887
## 164             roi_rh_S_subparietal 0.21792846  4.588662
## 165            roi_rh_S_temporal_inf 0.49018959  2.040027
## 166            roi_rh_S_temporal_sup 0.13858170  7.215960
## 167     roi_rh_S_temporal_transverse 0.37403684  2.673533
summary(OLS_fit_gfactor[[1]])
## 
## Call:
## lm(formula = .x, data = .y %>% bake(new_data = NULL))
## 
## Residuals:
##     Min      1Q  Median      3Q     Max 
## -3.2367 -0.5815  0.0308  0.5818  2.9546 
## 
## Coefficients:
##                                                 Estimate
## (Intercept)                      -0.00000000000000002421
## roi_Left.Cerebellum.Cortex        0.00966131461153772568
## roi_Left.Thalamus.Proper          0.01177037636976946411
## roi_Left.Caudate                  0.01310503720389756023
## roi_Left.Putamen                  0.01130670628551590055
## roi_Left.Pallidum                -0.02966998013315826771
## roi_Brain.Stem                   -0.01376196019045078725
## roi_Left.Hippocampus             -0.07905194188584124815
## roi_Left.Amygdala                 0.06191620770662376733
## roi_Left.Accumbens.area          -0.02313580189328147060
## roi_Left.VentralDC                0.07688199212013521744
## roi_Right.Cerebellum.Cortex       0.03696881267224050999
## roi_Right.Thalamus.Proper        -0.04797935767057321527
## roi_Right.Caudate                -0.02726760146452630873
## roi_Right.Putamen                -0.01155277115289973094
## roi_Right.Pallidum                0.00741235414824566174
## roi_Right.Hippocampus             0.03509660553700912761
## roi_Right.Amygdala               -0.01923530037054431777
## roi_Right.Accumbens.area          0.00871939626605779637
## roi_Right.VentralDC               0.02875785269317946216
## roi_lh_G_and_S_frontomargin       0.00860853025681734309
## roi_lh_G_and_S_occipital_inf      0.02099941207341855451
## roi_lh_G_and_S_paracentral        0.00262990998770774777
## roi_lh_G_and_S_subcentral        -0.00789403932407936083
## roi_lh_G_and_S_transv_frontopol   0.00443396704536213378
## roi_lh_G_and_S_cingul.Ant         0.03592906260948611902
## roi_lh_G_and_S_cingul.Mid.Ant     0.02665745032436623793
## roi_lh_G_and_S_cingul.Mid.Post   -0.05324801965651750069
## roi_lh_G_cingul.Post.dorsal      -0.08662740424592965671
## roi_lh_G_cingul.Post.ventral     -0.02011551996747178161
## roi_lh_G_cuneus                   0.10628505258869698491
## roi_lh_G_front_inf.Opercular      0.04993018454751842194
## roi_lh_G_front_inf.Orbital       -0.01204429012693875901
## roi_lh_G_front_inf.Triangul      -0.01842964879410049703
## roi_lh_G_front_middle            -0.01602400449107488703
## roi_lh_G_front_sup               -0.02729030996074978052
## roi_lh_G_Ins_lg_and_S_cent_ins   -0.03954633798632166408
## roi_lh_G_insular_short            0.02993054843103441776
## roi_lh_G_occipital_middle         0.02902907427346838876
## roi_lh_G_occipital_sup           -0.03900903470473601059
## roi_lh_G_oc.temp_lat.fusifor      0.00797497861140587794
## roi_lh_G_oc.temp_med.Lingual      0.04873348223322794548
## roi_lh_G_oc.temp_med.Parahip      0.03645790597464744487
## roi_lh_G_orbital                 -0.06256297637069772877
## roi_lh_G_pariet_inf.Angular      -0.12217649577683124817
## roi_lh_G_pariet_inf.Supramar     -0.02940020833603647701
## roi_lh_G_parietal_sup             0.11742002851151804588
## roi_lh_G_postcentral              0.06509293790453225814
## roi_lh_G_precentral              -0.01952613188110465317
## roi_lh_G_precuneus                0.14012374537209376646
## roi_lh_G_rectus                   0.04887897608152006607
## roi_lh_G_subcallosal             -0.01613790343203189842
## roi_lh_G_temp_sup.G_T_transv      0.00588378235649806864
## roi_lh_G_temp_sup.Lateral         0.00242817698769527994
## roi_lh_G_temp_sup.Plan_polar     -0.05199564960949133730
## roi_lh_G_temp_sup.Plan_tempo      0.01666818031105053027
## roi_lh_G_temporal_inf            -0.02410532089092182889
## roi_lh_G_temporal_middle         -0.01639067862541369533
## roi_lh_Lat_Fis.ant.Horizont       0.01918650584109483220
## roi_lh_Lat_Fis.ant.Vertical       0.02840932703942825738
## roi_lh_Lat_Fis.post              -0.05730353927202889996
## roi_lh_Pole_occipital             0.01624711656210765798
## roi_lh_Pole_temporal             -0.02443163897884133642
## roi_lh_S_calcarine               -0.01902206234926521328
## roi_lh_S_central                 -0.09101638113030841026
## roi_lh_S_cingul.Marginalis       -0.02879237746234408410
## roi_lh_S_circular_insula_ant     -0.03677579476979808693
## roi_lh_S_circular_insula_inf     -0.02981789904661133472
## roi_lh_S_circular_insula_sup     -0.00434522713859756323
## roi_lh_S_collat_transv_ant       -0.01347418971245172362
## roi_lh_S_collat_transv_post      -0.00991158117058269471
## roi_lh_S_front_inf                0.06914619998293770886
## roi_lh_S_front_middle            -0.07254411335641308689
## roi_lh_S_front_sup                0.13269614087199388397
## roi_lh_S_interm_prim.Jensen       0.03314267472660410718
## roi_lh_S_intrapariet_and_P_trans -0.05491321968722431140
## roi_lh_S_oc_middle_and_Lunatus   -0.04533841187652120491
## roi_lh_S_oc_sup_and_transversal   0.00063333899289398480
## roi_lh_S_occipital_ant            0.02086294892339740289
## roi_lh_S_oc.temp_lat              0.00582385561957353241
## roi_lh_S_oc.temp_med_and_Lingual  0.03571153506040759124
## roi_lh_S_orbital_lateral          0.01196908867653649448
## roi_lh_S_orbital_med.olfact       0.01699945213129518129
## roi_lh_S_orbital.H_Shaped        -0.03082761924350217037
## roi_lh_S_parieto_occipital       -0.06675551214385934407
## roi_lh_S_pericallosal            -0.01080728969350605410
## roi_lh_S_postcentral             -0.03683990266384409851
## roi_lh_S_precentral.inf.part      0.03499746312496239409
## roi_lh_S_precentral.sup.part      0.04984511038997261473
## roi_lh_S_suborbital              -0.02147374322110650463
## roi_lh_S_subparietal             -0.01323316414123252432
## roi_lh_S_temporal_inf            -0.02737684395961494793
## roi_lh_S_temporal_sup             0.03678005598580073171
## roi_lh_S_temporal_transverse      0.00849583223700136649
## roi_rh_G_and_S_frontomargin       0.03888558427046354821
## roi_rh_G_and_S_occipital_inf     -0.00341018970374145367
## roi_rh_G_and_S_paracentral       -0.03923651960139699912
## roi_rh_G_and_S_subcentral        -0.03557507786401344074
## roi_rh_G_and_S_transv_frontopol  -0.01745900206027690624
## roi_rh_G_and_S_cingul.Ant         0.03121709317231347036
## roi_rh_G_and_S_cingul.Mid.Ant     0.06616808574255822473
## roi_rh_G_and_S_cingul.Mid.Post    0.04920721929926234750
## roi_rh_G_cingul.Post.dorsal       0.04202723140331964674
## roi_rh_G_cingul.Post.ventral     -0.04858959091124330498
## roi_rh_G_cuneus                  -0.04216042645617019274
## roi_rh_G_front_inf.Opercular      0.03359523469555928538
## roi_rh_G_front_inf.Orbital       -0.05440228127682450454
## roi_rh_G_front_inf.Triangul      -0.01628211075638175478
## roi_rh_G_front_middle             0.08626456812815107289
## roi_rh_G_front_sup               -0.14728955680117258864
## roi_rh_G_Ins_lg_and_S_cent_ins   -0.03536374332756012789
## roi_rh_G_insular_short            0.07765645813756125171
## roi_rh_G_occipital_middle        -0.05161906678069110022
## roi_rh_G_occipital_sup           -0.01965977037744999836
## roi_rh_G_oc.temp_lat.fusifor      0.00657003321815724170
## roi_rh_G_oc.temp_med.Lingual      0.02546870440500232508
## roi_rh_G_oc.temp_med.Parahip      0.01492347049101621324
## roi_rh_G_orbital                 -0.01607992231394794747
## roi_rh_G_pariet_inf.Angular       0.01636770458321264513
## roi_rh_G_pariet_inf.Supramar     -0.12591154726566700095
## roi_rh_G_parietal_sup            -0.02358168762892801779
## roi_rh_G_postcentral              0.03658510284323202022
## roi_rh_G_precentral               0.06647980609229742210
## roi_rh_G_precuneus                0.12046767291032406400
## roi_rh_G_rectus                   0.01755174899943042280
## roi_rh_G_subcallosal             -0.01032257110209917027
## roi_rh_G_temp_sup.G_T_transv      0.04531292836688068093
## roi_rh_G_temp_sup.Lateral         0.02795342691142018929
## roi_rh_G_temp_sup.Plan_polar      0.02001646835029793209
## roi_rh_G_temp_sup.Plan_tempo     -0.02140234586545168918
## roi_rh_G_temporal_inf            -0.03761595937129045414
## roi_rh_G_temporal_middle          0.00801009248546306375
## roi_rh_Lat_Fis.ant.Horizont      -0.01711655825542161141
## roi_rh_Lat_Fis.ant.Vertical      -0.01946418931182052753
## roi_rh_Lat_Fis.post               0.02439895565316811676
## roi_rh_Pole_occipital            -0.06801050574518398284
## roi_rh_Pole_temporal              0.00064492323602705049
## roi_rh_S_calcarine                0.06803397324601162532
## roi_rh_S_central                  0.01796886265261512031
## roi_rh_S_cingul.Marginalis       -0.00672875333333778670
## roi_rh_S_circular_insula_ant      0.05069494321776563117
## roi_rh_S_circular_insula_inf      0.00344875972382770093
## roi_rh_S_circular_insula_sup     -0.01954525327565757864
## roi_rh_S_collat_transv_ant        0.03631443870302092369
## roi_rh_S_collat_transv_post      -0.04359264507861023569
## roi_rh_S_front_inf               -0.07429819524793281060
## roi_rh_S_front_middle            -0.02809800381626406662
## roi_rh_S_front_sup                0.08209196217295661180
## roi_rh_S_interm_prim.Jensen       0.00146182755096764560
## roi_rh_S_intrapariet_and_P_trans  0.13079866881895360620
## roi_rh_S_oc_middle_and_Lunatus    0.06917123680815549791
## roi_rh_S_oc_sup_and_transversal  -0.07287526920181292001
## roi_rh_S_occipital_ant            0.03972410197388721170
## roi_rh_S_oc.temp_lat             -0.02675773290045807551
## roi_rh_S_oc.temp_med_and_Lingual -0.05328215486016094765
## roi_rh_S_orbital_lateral         -0.04486750760060319310
## roi_rh_S_orbital_med.olfact      -0.00562618334459519254
## roi_rh_S_orbital.H_Shaped         0.04768031300827030305
## roi_rh_S_parieto_occipital       -0.04197262079918621935
## roi_rh_S_pericallosal            -0.00601257301349015800
## roi_rh_S_postcentral             -0.04859697191830562868
## roi_rh_S_precentral.inf.part      0.00652610663994512407
## roi_rh_S_precentral.sup.part     -0.04098953629240435076
## roi_rh_S_suborbital              -0.04140952026458529639
## roi_rh_S_subparietal             -0.03287930929565580779
## roi_rh_S_temporal_inf             0.02545105403410785616
## roi_rh_S_temporal_sup            -0.01108569468447078918
## roi_rh_S_temporal_transverse      0.00845301362801755170
##                                               Std. Error
## (Intercept)                       0.01622812612306296157
## roi_Left.Cerebellum.Cortex        0.03325224553453359072
## roi_Left.Thalamus.Proper          0.04237961590039338033
## roi_Left.Caudate                  0.05010233986605008155
## roi_Left.Putamen                  0.04955969767727115560
## roi_Left.Pallidum                 0.02551001631995352195
## roi_Brain.Stem                    0.02412608069663895252
## roi_Left.Hippocampus              0.03320368829276035966
## roi_Left.Amygdala                 0.02562923672500416056
## roi_Left.Accumbens.area           0.02450868006472781482
## roi_Left.VentralDC                0.02659544100427128563
## roi_Right.Cerebellum.Cortex       0.03411761850265186691
## roi_Right.Thalamus.Proper         0.04377289781135267760
## roi_Right.Caudate                 0.04903878602891673794
## roi_Right.Putamen                 0.04944259039956272472
## roi_Right.Pallidum                0.02326734884828391206
## roi_Right.Hippocampus             0.03186313902495444689
## roi_Right.Amygdala                0.02562611865374391645
## roi_Right.Accumbens.area          0.02424283013897716998
## roi_Right.VentralDC               0.02612452273120478494
## roi_lh_G_and_S_frontomargin       0.02424390693003002084
## roi_lh_G_and_S_occipital_inf      0.03498499368967847301
## roi_lh_G_and_S_paracentral        0.03236905657135460967
## roi_lh_G_and_S_subcentral         0.03801333351590827081
## roi_lh_G_and_S_transv_frontopol   0.02291042576628239680
## roi_lh_G_and_S_cingul.Ant         0.04015710639211041155
## roi_lh_G_and_S_cingul.Mid.Ant     0.03741679430368499704
## roi_lh_G_and_S_cingul.Mid.Post    0.03670441007365443653
## roi_lh_G_cingul.Post.dorsal       0.03499955480252105872
## roi_lh_G_cingul.Post.ventral      0.02805382140742967295
## roi_lh_G_cuneus                   0.04445389858872001582
## roi_lh_G_front_inf.Opercular      0.03782138917764220343
## roi_lh_G_front_inf.Orbital        0.02378712385461861575
## roi_lh_G_front_inf.Triangul       0.03095648179475042616
## roi_lh_G_front_middle             0.04885280922847451412
## roi_lh_G_front_sup                0.05631918850202335264
## roi_lh_G_Ins_lg_and_S_cent_ins    0.03123523322559123086
## roi_lh_G_insular_short            0.03153972307614961307
## roi_lh_G_occipital_middle         0.03827752790607135552
## roi_lh_G_occipital_sup            0.03780731687319049961
## roi_lh_G_oc.temp_lat.fusifor      0.03308820692380676221
## roi_lh_G_oc.temp_med.Lingual      0.04853991478305191842
## roi_lh_G_oc.temp_med.Parahip      0.02194215339278950128
## roi_lh_G_orbital                  0.02571678319915044961
## roi_lh_G_pariet_inf.Angular       0.04563584225669104627
## roi_lh_G_pariet_inf.Supramar      0.04223316792522971430
## roi_lh_G_parietal_sup             0.04410233884127390258
## roi_lh_G_postcentral              0.04358307169217014859
## roi_lh_G_precentral               0.04467105431710172159
## roi_lh_G_precuneus                0.04629031763053293586
## roi_lh_G_rectus                   0.02174660312364970829
## roi_lh_G_subcallosal              0.02263051462813885084
## roi_lh_G_temp_sup.G_T_transv      0.02946612124384470274
## roi_lh_G_temp_sup.Lateral         0.03029675593926029267
## roi_lh_G_temp_sup.Plan_polar      0.02353369315977707488
## roi_lh_G_temp_sup.Plan_tempo      0.03024190724023622170
## roi_lh_G_temporal_inf             0.02226533863349853051
## roi_lh_G_temporal_middle          0.02564410925238447617
## roi_lh_Lat_Fis.ant.Horizont       0.02369340158343016811
## roi_lh_Lat_Fis.ant.Vertical       0.02233508261634072067
## roi_lh_Lat_Fis.post               0.03749568283302279897
## roi_lh_Pole_occipital             0.02917404052191484298
## roi_lh_Pole_temporal              0.02440950701663549202
## roi_lh_S_calcarine                0.05034902740717630415
## roi_lh_S_central                  0.04845016364823095945
## roi_lh_S_cingul.Marginalis        0.03342821536922090109
## roi_lh_S_circular_insula_ant      0.02989585142577418780
## roi_lh_S_circular_insula_inf      0.03284662344909109605
## roi_lh_S_circular_insula_sup      0.03762780709400883467
## roi_lh_S_collat_transv_ant        0.02057741783174552377
## roi_lh_S_collat_transv_post       0.02853622590437551385
## roi_lh_S_front_inf                0.03877292412812203409
## roi_lh_S_front_middle             0.03593252583987504439
## roi_lh_S_front_sup                0.04407764685893626744
## roi_lh_S_interm_prim.Jensen       0.02795023560972039606
## roi_lh_S_intrapariet_and_P_trans  0.04437948343110049293
## roi_lh_S_oc_middle_and_Lunatus    0.03429962118526326542
## roi_lh_S_oc_sup_and_transversal   0.04035866396616210466
## roi_lh_S_occipital_ant            0.02982881980429992475
## roi_lh_S_oc.temp_lat              0.02861151848487899704
## roi_lh_S_oc.temp_med_and_Lingual  0.03775040187417103010
## roi_lh_S_orbital_lateral          0.02557904099683192337
## roi_lh_S_orbital_med.olfact       0.02142125901620976422
## roi_lh_S_orbital.H_Shaped         0.02845912010708764525
## roi_lh_S_parieto_occipital        0.03939916967209613347
## roi_lh_S_pericallosal             0.03734255775870513883
## roi_lh_S_postcentral              0.03730612482056724216
## roi_lh_S_precentral.inf.part      0.03957221163350137239
## roi_lh_S_precentral.sup.part      0.03290984280732334177
## roi_lh_S_suborbital               0.02600908184363911330
## roi_lh_S_subparietal              0.03351039417672585508
## roi_lh_S_temporal_inf             0.02338844682415765208
## roi_lh_S_temporal_sup             0.04132179943594301019
## roi_lh_S_temporal_transverse      0.02633742213588468814
## roi_rh_G_and_S_frontomargin       0.02329872167599779387
## roi_rh_G_and_S_occipital_inf      0.03281920228600784112
## roi_rh_G_and_S_paracentral        0.03249407796990792796
## roi_rh_G_and_S_subcentral         0.03641116877675910912
## roi_rh_G_and_S_transv_frontopol   0.02468272167114939769
## roi_rh_G_and_S_cingul.Ant         0.03809951464161196594
## roi_rh_G_and_S_cingul.Mid.Ant     0.03780853896110410867
## roi_rh_G_and_S_cingul.Mid.Post    0.03752151492620676987
## roi_rh_G_cingul.Post.dorsal       0.03443444306303389962
## roi_rh_G_cingul.Post.ventral      0.02954534300294875057
## roi_rh_G_cuneus                   0.04062353504329531406
## roi_rh_G_front_inf.Opercular      0.03657515910292990363
## roi_rh_G_front_inf.Orbital        0.02462626260165058992
## roi_rh_G_front_inf.Triangul       0.03146826775836233242
## roi_rh_G_front_middle             0.04945599321021364181
## roi_rh_G_front_sup                0.05050095745233817990
## roi_rh_G_Ins_lg_and_S_cent_ins    0.03024378600536735406
## roi_rh_G_insular_short            0.03121193154951108426
## roi_rh_G_occipital_middle         0.03984906946480461232
## roi_rh_G_occipital_sup            0.03855912171784179471
## roi_rh_G_oc.temp_lat.fusifor      0.02959418363798841517
## roi_rh_G_oc.temp_med.Lingual      0.04947070728746837120
## roi_rh_G_oc.temp_med.Parahip      0.02189059250002741222
## roi_rh_G_orbital                  0.02692330329488351950
## roi_rh_G_pariet_inf.Angular       0.04681238583250715152
## roi_rh_G_pariet_inf.Supramar      0.03916725432427168446
## roi_rh_G_parietal_sup             0.03992771479458961242
## roi_rh_G_postcentral              0.04434156882663216337
## roi_rh_G_precentral               0.04266943760911798972
## roi_rh_G_precuneus                0.04479410541277694530
## roi_rh_G_rectus                   0.02088799413779220820
## roi_rh_G_subcallosal              0.02150534508020609734
## roi_rh_G_temp_sup.G_T_transv      0.02894721144832470075
## roi_rh_G_temp_sup.Lateral         0.02912260340288247776
## roi_rh_G_temp_sup.Plan_polar      0.02433115659156450186
## roi_rh_G_temp_sup.Plan_tempo      0.03153649365926415593
## roi_rh_G_temporal_inf             0.02241837735043616248
## roi_rh_G_temporal_middle          0.02665793900122852625
## roi_rh_Lat_Fis.ant.Horizont       0.02501077982647486112
## roi_rh_Lat_Fis.ant.Vertical       0.02280886756163550896
## roi_rh_Lat_Fis.post               0.03880473076201389204
## roi_rh_Pole_occipital             0.03421423015773167320
## roi_rh_Pole_temporal              0.02418243797189643940
## roi_rh_S_calcarine                0.05160984874798812960
## roi_rh_S_central                  0.04783487896996239941
## roi_rh_S_cingul.Marginalis        0.03232925700083557480
## roi_rh_S_circular_insula_ant      0.03192275055610789558
## roi_rh_S_circular_insula_inf      0.03087925066958848927
## roi_rh_S_circular_insula_sup      0.03374552659766562324
## roi_rh_S_collat_transv_ant        0.02032728873182348961
## roi_rh_S_collat_transv_post       0.02942127833391260799
## roi_rh_S_front_inf                0.03753997647932689335
## roi_rh_S_front_middle             0.04052697514608587237
## roi_rh_S_front_sup                0.04167644431530644761
## roi_rh_S_interm_prim.Jensen       0.02964518743747439344
## roi_rh_S_intrapariet_and_P_trans  0.04259933918890899951
## roi_rh_S_oc_middle_and_Lunatus    0.03464438237246675323
## roi_rh_S_oc_sup_and_transversal   0.04146651811381556890
## roi_rh_S_occipital_ant            0.02859632786037229896
## roi_rh_S_oc.temp_lat              0.02481801312877721447
## roi_rh_S_oc.temp_med_and_Lingual  0.03714296867892835641
## roi_rh_S_orbital_lateral          0.02585463898718546455
## roi_rh_S_orbital_med.olfact       0.02208498586291920626
## roi_rh_S_orbital.H_Shaped         0.02899786185886317480
## roi_rh_S_parieto_occipital        0.03970852349174397444
## roi_rh_S_pericallosal             0.03983710381789809518
## roi_rh_S_postcentral              0.03410919377375777606
## roi_rh_S_precentral.inf.part      0.03803899872330807758
## roi_rh_S_precentral.sup.part      0.03329222800014788181
## roi_rh_S_suborbital               0.02221840783728454630
## roi_rh_S_subparietal              0.03476836806965011295
## roi_rh_S_temporal_inf             0.02318244497400098747
## roi_rh_S_temporal_sup             0.04360018587890156921
## roi_rh_S_temporal_transverse      0.02653897161249051811
##                                  t value Pr(>|t|)   
## (Intercept)                        0.000  1.00000   
## roi_Left.Cerebellum.Cortex         0.291  0.77142   
## roi_Left.Thalamus.Proper           0.278  0.78123   
## roi_Left.Caudate                   0.262  0.79368   
## roi_Left.Putamen                   0.228  0.81955   
## roi_Left.Pallidum                 -1.163  0.24490   
## roi_Brain.Stem                    -0.570  0.56844   
## roi_Left.Hippocampus              -2.381  0.01734 * 
## roi_Left.Amygdala                  2.416  0.01576 * 
## roi_Left.Accumbens.area           -0.944  0.34526   
## roi_Left.VentralDC                 2.891  0.00387 **
## roi_Right.Cerebellum.Cortex        1.084  0.27865   
## roi_Right.Thalamus.Proper         -1.096  0.27313   
## roi_Right.Caudate                 -0.556  0.57823   
## roi_Right.Putamen                 -0.234  0.81527   
## roi_Right.Pallidum                 0.319  0.75007   
## roi_Right.Hippocampus              1.101  0.27078   
## roi_Right.Amygdala                -0.751  0.45295   
## roi_Right.Accumbens.area           0.360  0.71912   
## roi_Right.VentralDC                1.101  0.27108   
## roi_lh_G_and_S_frontomargin        0.355  0.72256   
## roi_lh_G_and_S_occipital_inf       0.600  0.54839   
## roi_lh_G_and_S_paracentral         0.081  0.93525   
## roi_lh_G_and_S_subcentral         -0.208  0.83551   
## roi_lh_G_and_S_transv_frontopol    0.194  0.84655   
## roi_lh_G_and_S_cingul.Ant          0.895  0.37102   
## roi_lh_G_and_S_cingul.Mid.Ant      0.712  0.47625   
## roi_lh_G_and_S_cingul.Mid.Post    -1.451  0.14697   
## roi_lh_G_cingul.Post.dorsal       -2.475  0.01338 * 
## roi_lh_G_cingul.Post.ventral      -0.717  0.47341   
## roi_lh_G_cuneus                    2.391  0.01687 * 
## roi_lh_G_front_inf.Opercular       1.320  0.18689   
## roi_lh_G_front_inf.Orbital        -0.506  0.61266   
## roi_lh_G_front_inf.Triangul       -0.595  0.55166   
## roi_lh_G_front_middle             -0.328  0.74293   
## roi_lh_G_front_sup                -0.485  0.62802   
## roi_lh_G_Ins_lg_and_S_cent_ins    -1.266  0.20559   
## roi_lh_G_insular_short             0.949  0.34271   
## roi_lh_G_occipital_middle          0.758  0.44828   
## roi_lh_G_occipital_sup            -1.032  0.30226   
## roi_lh_G_oc.temp_lat.fusifor       0.241  0.80956   
## roi_lh_G_oc.temp_med.Lingual       1.004  0.31547   
## roi_lh_G_oc.temp_med.Parahip       1.662  0.09672 . 
## roi_lh_G_orbital                  -2.433  0.01505 * 
## roi_lh_G_pariet_inf.Angular       -2.677  0.00747 **
## roi_lh_G_pariet_inf.Supramar      -0.696  0.48640   
## roi_lh_G_parietal_sup              2.662  0.00780 **
## roi_lh_G_postcentral               1.494  0.13541   
## roi_lh_G_precentral               -0.437  0.66207   
## roi_lh_G_precuneus                 3.027  0.00249 **
## roi_lh_G_rectus                    2.248  0.02468 * 
## roi_lh_G_subcallosal              -0.713  0.47584   
## roi_lh_G_temp_sup.G_T_transv       0.200  0.84175   
## roi_lh_G_temp_sup.Lateral          0.080  0.93613   
## roi_lh_G_temp_sup.Plan_polar      -2.209  0.02723 * 
## roi_lh_G_temp_sup.Plan_tempo       0.551  0.58157   
## roi_lh_G_temporal_inf             -1.083  0.27906   
## roi_lh_G_temporal_middle          -0.639  0.52277   
## roi_lh_Lat_Fis.ant.Horizont        0.810  0.41813   
## roi_lh_Lat_Fis.ant.Vertical        1.272  0.20349   
## roi_lh_Lat_Fis.post               -1.528  0.12656   
## roi_lh_Pole_occipital              0.557  0.57764   
## roi_lh_Pole_temporal              -1.001  0.31696   
## roi_lh_S_calcarine                -0.378  0.70560   
## roi_lh_S_central                  -1.879  0.06041 . 
## roi_lh_S_cingul.Marginalis        -0.861  0.38914   
## roi_lh_S_circular_insula_ant      -1.230  0.21875   
## roi_lh_S_circular_insula_inf      -0.908  0.36407   
## roi_lh_S_circular_insula_sup      -0.115  0.90807   
## roi_lh_S_collat_transv_ant        -0.655  0.51265   
## roi_lh_S_collat_transv_post       -0.347  0.72837   
## roi_lh_S_front_inf                 1.783  0.07464 . 
## roi_lh_S_front_middle             -2.019  0.04359 * 
## roi_lh_S_front_sup                 3.011  0.00263 **
## roi_lh_S_interm_prim.Jensen        1.186  0.23581   
## roi_lh_S_intrapariet_and_P_trans  -1.237  0.21606   
## roi_lh_S_oc_middle_and_Lunatus    -1.322  0.18633   
## roi_lh_S_oc_sup_and_transversal    0.016  0.98748   
## roi_lh_S_occipital_ant             0.699  0.48435   
## roi_lh_S_oc.temp_lat               0.204  0.83872   
## roi_lh_S_oc.temp_med_and_Lingual   0.946  0.34423   
## roi_lh_S_orbital_lateral           0.468  0.63987   
## roi_lh_S_orbital_med.olfact        0.794  0.42751   
## roi_lh_S_orbital.H_Shaped         -1.083  0.27880   
## roi_lh_S_parieto_occipital        -1.694  0.09031 . 
## roi_lh_S_pericallosal             -0.289  0.77229   
## roi_lh_S_postcentral              -0.988  0.32348   
## roi_lh_S_precentral.inf.part       0.884  0.37656   
## roi_lh_S_precentral.sup.part       1.515  0.12999   
## roi_lh_S_suborbital               -0.826  0.40909   
## roi_lh_S_subparietal              -0.395  0.69295   
## roi_lh_S_temporal_inf             -1.171  0.24189   
## roi_lh_S_temporal_sup              0.890  0.37349   
## roi_lh_S_temporal_transverse       0.323  0.74704   
## roi_rh_G_and_S_frontomargin        1.669  0.09523 . 
## roi_rh_G_and_S_occipital_inf      -0.104  0.91725   
## roi_rh_G_and_S_paracentral        -1.207  0.22734   
## roi_rh_G_and_S_subcentral         -0.977  0.32863   
## roi_rh_G_and_S_transv_frontopol   -0.707  0.47942   
## roi_rh_G_and_S_cingul.Ant          0.819  0.41265   
## roi_rh_G_and_S_cingul.Mid.Ant      1.750  0.08021 . 
## roi_rh_G_and_S_cingul.Mid.Post     1.311  0.18982   
## roi_rh_G_cingul.Post.dorsal        1.220  0.22238   
## roi_rh_G_cingul.Post.ventral      -1.645  0.10017   
## roi_rh_G_cuneus                   -1.038  0.29944   
## roi_rh_G_front_inf.Opercular       0.919  0.35842   
## roi_rh_G_front_inf.Orbital        -2.209  0.02725 * 
## roi_rh_G_front_inf.Triangul       -0.517  0.60491   
## roi_rh_G_front_middle              1.744  0.08122 . 
## roi_rh_G_front_sup                -2.917  0.00357 **
## roi_rh_G_Ins_lg_and_S_cent_ins    -1.169  0.24239   
## roi_rh_G_insular_short             2.488  0.01290 * 
## roi_rh_G_occipital_middle         -1.295  0.19530   
## roi_rh_G_occipital_sup            -0.510  0.61019   
## roi_rh_G_oc.temp_lat.fusifor       0.222  0.82433   
## roi_rh_G_oc.temp_med.Lingual       0.515  0.60672   
## roi_rh_G_oc.temp_med.Parahip       0.682  0.49547   
## roi_rh_G_orbital                  -0.597  0.55039   
## roi_rh_G_pariet_inf.Angular        0.350  0.72663   
## roi_rh_G_pariet_inf.Supramar      -3.215  0.00132 **
## roi_rh_G_parietal_sup             -0.591  0.55483   
## roi_rh_G_postcentral               0.825  0.40940   
## roi_rh_G_precentral                1.558  0.11934   
## roi_rh_G_precuneus                 2.689  0.00720 **
## roi_rh_G_rectus                    0.840  0.40082   
## roi_rh_G_subcallosal              -0.480  0.63126   
## roi_rh_G_temp_sup.G_T_transv       1.565  0.11761   
## roi_rh_G_temp_sup.Lateral          0.960  0.33721   
## roi_rh_G_temp_sup.Plan_polar       0.823  0.41077   
## roi_rh_G_temp_sup.Plan_tempo      -0.679  0.49741   
## roi_rh_G_temporal_inf             -1.678  0.09348 . 
## roi_rh_G_temporal_middle           0.300  0.76384   
## roi_rh_Lat_Fis.ant.Horizont       -0.684  0.49380   
## roi_rh_Lat_Fis.ant.Vertical       -0.853  0.39353   
## roi_rh_Lat_Fis.post                0.629  0.52956   
## roi_rh_Pole_occipital             -1.988  0.04693 * 
## roi_rh_Pole_temporal               0.027  0.97873   
## roi_rh_S_calcarine                 1.318  0.18753   
## roi_rh_S_central                   0.376  0.70721   
## roi_rh_S_cingul.Marginalis        -0.208  0.83514   
## roi_rh_S_circular_insula_ant       1.588  0.11239   
## roi_rh_S_circular_insula_inf       0.112  0.91108   
## roi_rh_S_circular_insula_sup      -0.579  0.56250   
## roi_rh_S_collat_transv_ant         1.786  0.07413 . 
## roi_rh_S_collat_transv_post       -1.482  0.13854   
## roi_rh_S_front_inf                -1.979  0.04789 * 
## roi_rh_S_front_middle             -0.693  0.48817   
## roi_rh_S_front_sup                 1.970  0.04897 * 
## roi_rh_S_interm_prim.Jensen        0.049  0.96068   
## roi_rh_S_intrapariet_and_P_trans   3.070  0.00216 **
## roi_rh_S_oc_middle_and_Lunatus     1.997  0.04596 * 
## roi_rh_S_oc_sup_and_transversal   -1.757  0.07895 . 
## roi_rh_S_occipital_ant             1.389  0.16490   
## roi_rh_S_oc.temp_lat              -1.078  0.28106   
## roi_rh_S_oc.temp_med_and_Lingual  -1.435  0.15154   
## roi_rh_S_orbital_lateral          -1.735  0.08278 . 
## roi_rh_S_orbital_med.olfact       -0.255  0.79893   
## roi_rh_S_orbital.H_Shaped          1.644  0.10023   
## roi_rh_S_parieto_occipital        -1.057  0.29059   
## roi_rh_S_pericallosal             -0.151  0.88004   
## roi_rh_S_postcentral              -1.425  0.15434   
## roi_rh_S_precentral.inf.part       0.172  0.86379   
## roi_rh_S_precentral.sup.part      -1.231  0.21835   
## roi_rh_S_suborbital               -1.864  0.06246 . 
## roi_rh_S_subparietal              -0.946  0.34440   
## roi_rh_S_temporal_inf              1.098  0.27236   
## roi_rh_S_temporal_sup             -0.254  0.79931   
## roi_rh_S_temporal_transverse       0.319  0.75012   
## ---
## Signif. codes:  
## 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.8857 on 2811 degrees of freedom
## Multiple R-squared:  0.2595, Adjusted R-squared:  0.2155 
## F-statistic: 5.898 on 167 and 2811 DF,  p-value: < 0.00000000000000022

11.1 fit ridge regression for gfactor

fit_ridge_gfactor <-cfa_resp_names %>% 
  future_map(.,
             ~eNetXplorer(x = matrix_train_gfactor ,
                          y = resp_train_gfactor[[.]][[.]],
                          alpha = 0, 
                          n_fold = 10,
                          nlambda.ext = 1000, 
                          nlambda = 1000,
                          scaled = TRUE,
                          QF_gaussian = "mse",
                          seed = 123456)) 

saveRDS(fit_ridge_gfactor, paste0(anotherFold,'working_memory_tasks/windows/fit_ridge_gfactor_April_08_2022_rmse', '.RData'))

11.2 fit lasso regression for gfactor

fit_lasso_gfactor <-cfa_resp_names %>% 
  future_map(.,
             ~eNetXplorer(x = matrix_train_gfactor ,
                          y = resp_train_gfactor[[.]][[.]],
                          alpha = 1, 
                          n_fold = 10,
                          nlambda.ext = 1000, 
                          nlambda = 1000,
                          scaled = TRUE,
                          QF_gaussian = "mse",
                          seed = 123456)) 

saveRDS(fit_lasso_gfactor, paste0(anotherFold,'working_memory_tasks/windows/fit_lasso_gfactor_April_08_2021_rmse', '.RData'))

11.2.3 OLS SE as a function of VIF

library("jcolors")

ggplot(enet_gfactor_target_and_null_renamed , aes(x = VIF, y = std.error, 
                                                  color = significance
                                                  #color= p.value<=.05, 
                                                  )) +
  geom_point(size = 5, alpha = 0.4) + 
  scale_colour_discrete(name="significance") +
                         # breaks=c("FALSE", "TRUE"),
                         # labels=c(">.05", "<.05")) +
#  geom_linerange() +
  guides(color = FALSE) +
  theme_light() +
  theme(text = element_text(size = 30),
        panel.background = element_rect(fill = "grey97"),
          panel.border = element_blank()) +
#  theme(legend.position="top") + 
  xlab("Variable Inflation Factor") + ylab("OLS\nStandard Error") + 
                          scale_color_jcolors(palette = "pal8")

ggplot(enet_gfactor_target_and_null_renamed , aes(x = VIF, y = std.error, 
                                                  color = significance
                                                  #color= p.value<=.05, 
                                                  )) +
  geom_point(size = 5, alpha = 0.4) + 
  scale_colour_discrete(name="significance") +
                         # breaks=c("FALSE", "TRUE"),
                         # labels=c(">.05", "<.05")) +
#  geom_linerange() +
  theme_light() +
  theme(text = element_text(size = 30),
        panel.background = element_rect(fill = "grey97"),
          panel.border = element_blank()) +
#  theme(legend.position="top") + 
  xlab("Variable Inflation Factor") + ylab("OLS\nStandard Error") + 
                          scale_color_jcolors(palette = "pal8")

11.2.4 OLS Coef ±2SE as a funct of VIF

ggplot(enet_gfactor_target_and_null_renamed, 
       aes(x = VIF, 
           y = estimate, 
           color = significance,
          # color= p.value<=.05, 
           ymin = estimate - (2 * std.error), 
           ymax = estimate + (2 * std.error))) +
  geom_point(size = 5, alpha = 0.4) +
  scale_colour_discrete(name="significance") +
  # scale_colour_discrete(name="p value",
  #                        breaks=c("FALSE", "TRUE"),
  #                        labels=c("> .05", "< .05")) +
  geom_linerange() +
  theme_light() +
  theme(text = element_text(size = 30),
        panel.background = element_rect(fill = "grey97"),
          panel.border = element_blank()) +
  guides(color = FALSE) +
#  theme(legend.position="top") + 
  xlab("Variable Inflation Factor") + ylab("OLS\nCoefficients ±2SE") +
  geom_hline(yintercept = 0) + 
                          scale_color_jcolors(palette = "pal8")

11.2.5 Enet null SD as a function of VIF

enet_gfactor_target_and_null_renamed %>%
ggplot(aes(x = VIF, y = nullWsd, color= significance, )) +
  geom_point(size = 5, alpha = 0.4) + 
  scale_colour_discrete(name="significance") +
                         # breaks=c("FALSE", "TRUE"),
                         # labels=c(">.05", "<.05")) +
#  geom_linerange() +
  theme_light() +
  theme(text = element_text(size = 30),
        panel.background = element_rect(fill = "grey97"),
          panel.border = element_blank()) +
  guides(color = FALSE) +
  # theme(legend.position="top") + 
  xlab("Variable Inflation Factor") + ylab("Elastic Net\nSD of Permuted Null") +
  ylim(0, .021)+ 
                          scale_color_jcolors(palette = "pal8")

11.2.6 Enet null Coef ±2SD as a funct of VIF

ggplot(enet_gfactor_target_and_null_renamed, aes(x = VIF, y = nullWmean, color= significance, 
                                     ymin = nullWmean - (2 * nullWsd), ymax = nullWmean + (2 * nullWsd))) +
  geom_point(size = 5, alpha = 0.4) + 
  scale_colour_discrete(name="significance") +
                         # breaks=c("FALSE", "TRUE"),
                         # labels=c(">.05", "< .05")) +
  geom_linerange() +
  theme_light() +
  theme(text = element_text(size = 30),
        panel.background = element_rect(fill = "grey97"),
          panel.border = element_blank()) +
  guides(color = FALSE) +
  #theme(legend.position="top") + 
  xlab("Variable Inflation Factor") + ylab("Elastic Net\nCoefficients of\n Permuted Null±2SD")+ 
                          scale_color_jcolors(palette = "pal8") 

11.2.7 ridge null SD as a function of VIF

enet_gfactor_target_and_null_renamed %>%
ggplot(aes(x = VIF, y = ridge_null_Wsd, color= significance, )) +
  geom_point(size = 5, alpha = 0.4) + 
  scale_colour_discrete(name="significance") +
                         # breaks=c("FALSE", "TRUE"),
                         # labels=c(">.05", "<.05")) +
#  geom_linerange() +
  theme_light() +
  theme(text = element_text(size = 30),
        panel.background = element_rect(fill = "grey97"),
          panel.border = element_blank()) +
  guides(color = FALSE) +
  # theme(legend.position="top") + 
  xlab("Variable Inflation Factor") + ylab("Ridge \u03b1=0 \u03bb=.36\nSD of Permuted Null") +
  ylim(0, .021)+ 
                          scale_color_jcolors(palette = "pal8")

11.2.8 ridge null Coef ±2SD as a funct of VIF

ggplot(enet_gfactor_target_and_null_renamed, aes(x = VIF, y = ridge_null_Wmean, color= significance, 
                                     ymin = ridge_null_Wmean - (2 * ridge_null_Wsd), 
                                     ymax = ridge_null_Wmean + (2 * ridge_null_Wsd))) +
  geom_point(size = 5, alpha = 0.4) + 
  scale_colour_discrete(name="significance") +
                         # breaks=c("FALSE", "TRUE"),
                         # labels=c(">.05", "< .05")) +
  geom_linerange() +
  theme_light() +
  theme(text = element_text(size = 30),
        panel.background = element_rect(fill = "grey97"),
          panel.border = element_blank()) +
  guides(color = FALSE) +
  #theme(legend.position="top") + 
  xlab("Variable Inflation Factor") + ylab("Ridge \u03b1=0 \u03bb=.36\nCoefficients of\n Permuted Null±2SD")+ 
                          scale_color_jcolors(palette = "pal8") 

11.2.9 LASSO null SD as a function of VIF

enet_gfactor_target_and_null_renamed %>%
ggplot(aes(x = VIF, y = lasso_null_Wsd, color= significance, )) +
  geom_point(size = 5, alpha = 0.4) + 
  scale_colour_discrete(name="significance") +
                         # breaks=c("FALSE", "TRUE"),
                         # labels=c(">.05", "<.05")) +
#  geom_linerange() +
  theme_light() +
  theme(text = element_text(size = 30),
        panel.background = element_rect(fill = "grey97"),
          panel.border = element_blank()) +
  guides(color = FALSE) +
  # theme(legend.position="top") + 
  xlab("Variable Inflation Factor") + ylab("LASSO \u03b1=1 \u03bb=.01\nSD of Permuted Null") +
  ylim(0, .021)+ 
                          scale_color_jcolors(palette = "pal8")

11.2.10 LASSO null Coef ±2SD as a funct of VIF

ggplot(enet_gfactor_target_and_null_renamed, aes(x = VIF, y = lasso_null_Wmean, color= significance, 
                                     ymin = lasso_null_Wmean - (2 * lasso_null_Wsd), 
                                     ymax = lasso_null_Wmean + (2 * lasso_null_Wsd))) +
  geom_point(size = 5, alpha = 0.4) + 
  scale_colour_discrete(name="significance") +
                         # breaks=c("FALSE", "TRUE"),
                         # labels=c(">.05", "< .05")) +
  geom_linerange() +
  theme_light() +
  theme(text = element_text(size = 30),
        panel.background = element_rect(fill = "grey97"),
          panel.border = element_blank()) +
  guides(color = FALSE) +
  #theme(legend.position="top") + 
  xlab("Variable Inflation Factor") + ylab("LASSO \u03b1=1 \u03bb=.01\nCoefficients of\n Permuted Null±2SD") + 
                          scale_color_jcolors(palette = "pal8")

11.2.11 Enet Coef ±2SD as a funct of VIF

ggplot(enet_gfactor_target_and_null, aes(x = VIF, y = wmean, 
                                           color= significance, 
                                           ymin = wmean - (2 * wsd), 
                                           ymax = wmean + (2 * wsd),
                                           shape = type)) +
  geom_point(size = 5, alpha = 0.4) + 
  scale_colour_discrete(name="significance") +
  # scale_colour_discrete(name="p value",
  #                        breaks=c("FALSE", "TRUE"),
  #                        labels=c("> .05", "< .05")) +
  scale_shape_discrete(name="",
                         breaks=c("Null permuted models", "Target models"),
                         labels=c("Null", "Target")) +
  geom_linerange() +
  theme_light() +
  theme(text = element_text(size = 30),
        panel.background = element_rect(fill = "grey97"),
          panel.border = element_blank()) +
  guides(color = FALSE) +
 #theme(legend.justification=c(1,0), legend.position=c(1,0)) +
  theme(legend.position="top") + 
  xlab("Variable Inflation Factor") + ylab("Elastic Net \u03b1=.05 \u03bb=.13\nCoefficients ± 2SD") + 
                          scale_color_jcolors(palette = "pal8")

ggplot(enet_gfactor_target_and_null, aes(x = VIF, y = ridge_wmean, 
                                           color= significance, 
                                           ymin = ridge_wmean - (2 * ridge_wsd), 
                                           ymax = ridge_wmean + (2 * ridge_wsd),
                                           shape = type)) +
  geom_point(size = 5, alpha = 0.4) + 
  scale_colour_discrete(name="significance") +
  # scale_colour_discrete(name="p value",
  #                        breaks=c("FALSE", "TRUE"),
  #                        labels=c("> .05", "< .05")) +
  scale_shape_discrete(name="",
                         breaks=c("Null permuted models", "Target models"),
                         labels=c("Null", "Target")) +
  geom_linerange() +
  theme_light() +
  theme(text = element_text(size = 30),
        panel.background = element_rect(fill = "grey97"),
          panel.border = element_blank()) +
  guides(color = FALSE) +
 #theme(legend.justification=c(1,0), legend.position=c(1,0)) +
  theme(legend.position="top") + 
  xlab("Variable Inflation Factor") + ylab("Ridge \u03b1=0 \u03bb=.36\nCoefficients ± 2SD") + 
                          scale_color_jcolors(palette = "pal8")

ggplot(enet_gfactor_target_and_null, aes(x = VIF, y = lasso_wmean, 
                                           color= significance, 
                                           ymin = lasso_wmean - (2 * lasso_wsd), 
                                           ymax = lasso_wmean + (2 * lasso_wsd),
                                           shape = type)) +
  geom_point(size = 5, alpha = 0.4) + 
  scale_colour_discrete(name="significance") +
  # scale_colour_discrete(name="p value",
  #                        breaks=c("FALSE", "TRUE"),
  #                        labels=c("> .05", "< .05")) +
  scale_shape_discrete(name="",
                         breaks=c("Null permuted models", "Target models"),
                         labels=c("Null", "Target")) +
  geom_linerange() +
  theme_light() +
  theme(text = element_text(size = 30),
        panel.background = element_rect(fill = "grey97"),
          panel.border = element_blank()) +
  guides(color = FALSE) +
 #theme(legend.justification=c(1,0), legend.position=c(1,0)) +
  theme(legend.position="top") + 
  xlab("Variable Inflation Factor") + ylab("LASSO \u03b1=1 \u03bb=.01\nCoefficients ± 2SD") + 
                          scale_color_jcolors(palette = "pal8")

11.2.12 Enet Coef ±2SD as a funct of VIF

ggplot(enet_gfactor_target_and_null, aes(x = VIF, y = wmean, 
                                           color= significance, 
                                           ymin = wmean - (2 * wsd), 
                                           ymax = wmean + (2 * wsd),
                                           shape = type)) +
 geom_point(size = 4, alpha = 0.4) + 
  scale_colour_discrete(name="significance") +
  # scale_colour_discrete(name="p value",
  #                        breaks=c("FALSE", "TRUE"),
  #                        labels=c("> .05", "< .05")) +
  scale_shape_discrete(name="",
                         breaks=c("Null permuted models", "Target models"),
                         labels=c("Null", "Target")) +
  geom_linerange(width = 0.2, color = 'black') +
  theme_light() +
  theme(text = element_text(size = 30),
        panel.background = element_rect(fill = "grey97"),
          panel.border = element_blank()) +
  guides(color = FALSE) +
 #theme(legend.justification=c(1,0), legend.position=c(1,0)) +
  theme(legend.position="top") + 
  xlab("Variable Inflation Factor") + ylab("Elastic Net \u03b1=.05 \u03bb=.13\nCoefficients ± 2SD")+ 
                          scale_color_jcolors(palette = "pal8") 

11.2.13 ridge Coef ±2SD as a funct of VIF

ggplot(enet_gfactor_target_and_null, aes(x = VIF, y = ridge_wmean, 
                                           color= significance, 
                                           ymin = ridge_wmean - (2 * ridge_wsd), 
                                           ymax = ridge_wmean + (2 * ridge_wsd),
                                           shape = type)) +
 geom_point(size = 4, alpha = 0.4) + 
  scale_colour_discrete(name="significance") +
  # scale_colour_discrete(name="p value",
  #                        breaks=c("FALSE", "TRUE"),
  #                        labels=c("> .05", "< .05")) +
  scale_shape_discrete(name="",
                         breaks=c("Null permuted models", "Target models"),
                         labels=c("Null", "Target")) +
  geom_linerange(width = 0.2, color = 'black') +
  theme_light() +
  theme(text = element_text(size = 30),
        panel.background = element_rect(fill = "grey97"),
          panel.border = element_blank()) +
  guides(color = FALSE) +
 #theme(legend.justification=c(1,0), legend.position=c(1,0)) +
  theme(legend.position="top") + 
  xlab("Variable Inflation Factor") + ylab("Ridge \u03b1=0 \u03bb=.36\nCoefficients ± 2SD") + 
                          scale_color_jcolors(palette = "pal8")

11.2.14 LASSO Coef ±2SD as a funct of VIF

ggplot(enet_gfactor_target_and_null, aes(x = VIF, y = lasso_wmean, 
                                           color= significance, 
                                           ymin = lasso_wmean - (2 * lasso_wsd), 
                                           ymax = lasso_wmean + (2 * lasso_wsd),
                                           shape = type)) +
 geom_point(size = 4, alpha = 0.4) + 
  scale_colour_discrete(name="significance") +
  # scale_colour_discrete(name="p value",
  #                        breaks=c("FALSE", "TRUE"),
  #                        labels=c("> .05", "< .05")) +
  scale_shape_discrete(name="",
                         breaks=c("Null permuted models", "Target models"),
                         labels=c("Null", "Target")) +
  geom_linerange(width = 0.2, color = 'black') +
  theme_light() +
  theme(text = element_text(size = 30),
        panel.background = element_rect(fill = "grey97"),
          panel.border = element_blank()) +
  guides(color = FALSE) +
 #theme(legend.justification=c(1,0), legend.position=c(1,0)) +
  theme(legend.position="top") + 
  xlab("Variable Inflation Factor") + ylab("LASSO \u03b1=1 \u03bb=.01\nCoefficients ± 2SD") + 
                          scale_color_jcolors(palette = "pal8")

12 R session and library

pander::pander(sessionInfo())

R version 4.1.3 (2022-03-10)

Platform: x86_64-w64-mingw32/x64 (64-bit)

locale: LC_COLLATE=English_New Zealand.1252, LC_CTYPE=English_New Zealand.1252, LC_MONETARY=English_New Zealand.1252, LC_NUMERIC=C and LC_TIME=English_New Zealand.1252

attached base packages: stats, graphics, grDevices, utils, datasets, methods and base

other attached packages: jcolors(v.0.0.4), olsrr(v.0.5.3), iml(v.0.10.1), ggsegDesterieux(v.1.0.1.002), ggsegExtra(v.1.5.33.004), ggseg3d(v.1.6.3), ggseg(v.1.6.4), GGally(v.2.1.2), glmnet(v.4.1-3), Matrix(v.1.4-0), fastshap(v.0.0.7), doFuture(v.0.12.0), foreach(v.1.5.2), furrr(v.0.2.3), future(v.1.24.0), eNetXplorer(v.1.1.3), ggdist(v.3.1.1), vip(v.0.3.2), cowplot(v.1.1.1), yardstick(v.0.0.9), workflowsets(v.0.2.1), workflows(v.0.2.6), tune(v.0.2.0), rsample(v.0.1.1), recipes(v.0.2.0), parsnip(v.0.2.1), modeldata(v.0.1.1), infer(v.1.0.0), dials(v.0.1.0), scales(v.1.1.1), broom(v.0.7.12), tidymodels(v.0.2.0), forcats(v.0.5.1), stringr(v.1.4.0), dplyr(v.1.0.8), purrr(v.0.3.4), readr(v.2.1.2), tidyr(v.1.2.0), tibble(v.3.1.6), ggplot2(v.3.3.5) and tidyverse(v.1.3.1)

loaded via a namespace (and not attached): rgl(v.0.108.3), Hmisc(v.4.6-0), svglite(v.2.1.0), corpcor(v.1.6.10), class(v.7.3-20), crayon(v.1.5.1), MASS(v.7.3-55), nlme(v.3.1-155), backports(v.1.4.1), reprex(v.2.0.1), ggcorrplot(v.0.1.3), rlang(v.1.0.2), readxl(v.1.3.1), nloptr(v.2.0.0), extrafontdb(v.1.0), xgboost(v.1.5.2.1), extrafont(v.0.17), bit64(v.4.0.5), glue(v.1.6.2), parallel(v.4.1.3), oro.nifti(v.0.11.0), classInt(v.0.4-3), haven(v.2.4.3), tidyselect(v.1.1.2), RRPP(v.1.2.2), XML(v.3.99-0.9), calibrate(v.1.7.7), sf(v.1.0-7), ggpubr(v.0.4.0), SuppDists(v.1.1-9.7), distributional(v.0.3.0), xtable(v.1.8-4), magrittr(v.2.0.3), evaluate(v.0.15), cli(v.3.2.0), rstudioapi(v.0.13), sp(v.1.4-6), DiceDesign(v.1.9), bslib(v.0.3.1), rpart(v.4.1.16), pbmcapply(v.1.5.0), numform(v.0.7.0), xfun(v.0.30), cluster(v.2.1.2), caTools(v.1.18.2), expm(v.0.999-6), RNifti(v.1.4.0), ape(v.5.6-2), listenv(v.0.8.0), png(v.0.1-7), reshape(v.0.8.8), ipred(v.0.9-12), withr(v.2.5.0), neurobase(v.1.32.1), bitops(v.1.0-7), ranger(v.0.13.1), freesurfer(v.1.6.8), plyr(v.1.8.6), cellranger(v.1.1.0), hardhat(v.0.2.0), e1071(v.1.7-9), pROC(v.1.18.0), coda(v.0.19-4), pillar(v.1.7.0), RcppParallel(v.5.1.5), gplots(v.3.1.1), fs(v.1.5.2), kernlab(v.0.9-29), raster(v.3.5-15), geomorph(v.4.0.3), vctrs(v.0.4.0), pbivnorm(v.0.6.0), ellipsis(v.0.3.2), generics(v.0.1.2), nortest(v.1.0-4), lava(v.1.6.10), rgdal(v.1.5-29), tools(v.4.1.3), foreign(v.0.8-82), munsell(v.0.5.0), proxy(v.0.4-26), fastmap(v.1.1.0), compiler(v.4.1.3), abind(v.1.4-5), stars(v.0.5-5), plotly(v.4.10.0), semPlot(v.1.1.5), prodlim(v.2019.11.13), gridExtra(v.2.3), OpenMx(v.2.20.6), lattice(v.0.20-45), utf8(v.1.2.2), jsonlite(v.1.8.0), arm(v.1.12-2), pbapply(v.1.5-0), carData(v.3.0-5), lazyeval(v.0.2.2), car(v.3.0-12), latticeExtra(v.0.6-29), R.utils(v.2.11.0), goftest(v.1.2-3), checkmate(v.2.0.0), rmarkdown(v.2.13.2), openxlsx(v.4.2.5), webshot(v.0.5.2), pander(v.0.6.4), igraph(v.1.2.11), survival(v.3.2-13), numDeriv(v.2016.8-1.1), yaml(v.2.3.5), timeROC(v.0.4), systemfonts(v.1.0.4), survivalROC(v.1.0.3), htmltools(v.0.5.2), lavaan(v.0.6-10), viridisLite(v.0.4.0), digest(v.0.6.29), assertthat(v.0.2.1), timereg(v.2.0.1), Rttf2pt1(v.1.3.10), lwgeom(v.0.2-8), units(v.0.8-0), future.apply(v.1.8.1), rockchalk(v.1.8.151), data.table(v.1.14.2), R.oo(v.1.24.0), lhs(v.1.1.4), splines(v.4.1.3), Formula(v.1.2-4), labeling(v.0.4.2), pec(v.2022.03.06), hms(v.1.1.1), modelr(v.0.1.8), colorspace(v.2.0-3), base64enc(v.0.1-3), mnormt(v.2.0.2), survcomp(v.1.44.1), shape(v.1.4.6), tmvnsim(v.1.0-2), Metrics(v.0.1.4), nnet(v.7.3-17), sass(v.0.4.0), Rcpp(v.1.0.8), mvtnorm(v.1.1-3), GPfit(v.1.0-8), fansi(v.1.0.3), tzdb(v.0.2.0), parallelly(v.1.30.0), R6(v.2.5.1), grid(v.4.1.3), lifecycle(v.1.0.1), zip(v.2.2.0), ggsignif(v.0.6.3), minqa(v.1.2.4), mi(v.1.0), jquerylib(v.0.1.4), qgraph(v.1.9.2), glasso(v.1.11), prediction(v.0.3.14), RColorBrewer(v.1.1-3), iterators(v.1.0.14), gower(v.1.0.0), htmlwidgets(v.1.5.4), terra(v.1.5-21), rvest(v.1.0.2), globals(v.0.14.0), htmlTable(v.2.4.0), codetools(v.0.2-18), matrixStats(v.0.61.0), lubridate(v.1.8.0), gtools(v.3.9.2), prettyunits(v.1.1.1), psych(v.2.1.9), dbplyr(v.2.1.1), R.methodsS3(v.1.8.1), gtable(v.0.3.0), DBI(v.1.1.2), stats4(v.4.1.3), httr(v.1.4.2), highr(v.0.9), KernSmooth(v.2.23-20), smoothr(v.0.2.2), stringi(v.1.7.6), vroom(v.1.5.7), progress(v.1.2.2), reshape2(v.1.4.4), farver(v.2.1.0), fdrtool(v.1.2.17), magick(v.2.7.3), timeDate(v.3043.102), lisrelToR(v.0.1.4), xml2(v.1.3.3), boot(v.1.3-28), kableExtra(v.1.3.4), rmeta(v.3.0), lme4(v.1.1-28), sem(v.3.1-14), kutils(v.1.70), bit(v.4.0.4), jpeg(v.0.1-9), pkgconfig(v.2.0.3), rstatix(v.0.7.0), bootstrap(v.2019.6) and knitr(v.1.37)