Sys.setenv(TF_KERAS=1) 
reticulate::py_config()
python:         /home/ron/.local/share/r-miniconda/envs/r-reticulate/bin/python
libpython:      /home/ron/.local/share/r-miniconda/envs/r-reticulate/lib/libpython3.6m.so
pythonhome:     /home/ron/.local/share/r-miniconda/envs/r-reticulate:/home/ron/.local/share/r-miniconda/envs/r-reticulate
version:        3.6.13 | packaged by conda-forge | (default, Feb 19 2021, 05:36:01)  [GCC 9.3.0]
numpy:          /home/ron/.local/lib/python3.6/site-packages/numpy
numpy_version:  1.19.5
reticulate::py_module_available('keras_bert')
[1] TRUE
tensorflow::tf_version()
[1] ‘2.6’
pretrained_path = './bert_models/uncased_L-12_H-768_A-12'
config_path = file.path(pretrained_path, 'bert_config.json')
checkpoint_path = file.path(pretrained_path, 
                            'bert_model.ckpt')
vocab_path = file.path(pretrained_path, 'vocab.txt')
library(reticulate)
library(keras)
k_bert = import('keras_bert')
token_dict = k_bert$load_vocabulary(vocab_path)
tokenizer = k_bert$Tokenizer(token_dict)
seq_length = 128L 
bch_size = 24
epochs = 8
learning_rate = 1e-4

DATA_COLUMN = 'text'
LABEL_COLUMN = 'declined'
model = k_bert$load_trained_model_from_checkpoint(
  config_path,
  checkpoint_path,
  training=T,
  trainable=T,
  seq_len=seq_length)
2022-03-12 04:25:56.421131: I tensorflow/core/platform/cpu_feature_guard.cc:142] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations:  AVX2 FMA
To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.
2022-03-12 04:25:56.776775: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1510] Created device /job:localhost/replica:0/task:0/device:GPU:0 with 11329 MB memory:  -> device: 0, name: NVIDIA TITAN X (Pascal), pci bus id: 0000:41:00.0, compute capability: 6.1
# tokenize text
tokenize_fun = function(dataset) {
  c(indices, target, segments) %<-% list(list(),list(),list())
  for ( i in 1:nrow(dataset)) {
    c(indices_tok, segments_tok) %<-% tokenizer$encode(dataset[[DATA_COLUMN]][i], 
                                                       max_len=seq_length)
    indices = indices %>% append(list(as.matrix(indices_tok)))
    target = target %>% append(dataset[[LABEL_COLUMN]][i])
    segments = segments %>% append(list(as.matrix(segments_tok)))
  }
  return(list(indices,segments, target))
}

# read data
dt_data = function(dir, rows_to_read){
  data = data.table::fread(dir, nrows=rows_to_read)
  c(x_train, x_segment, y_train) %<-% tokenize_fun(data)
  return(list(x_train, x_segment, y_train))
}
# write_csv(sub_text %>%
#             mutate(declined = if_else(Decision == "Declined", 1, 0)) %>%
#             select(text, declined),
#           "sub_text.csv")
c(x_train, x_segment, y_train) %<-% dt_data("sub_text.csv", 1000)
Registered S3 method overwritten by 'data.table':
  method           from
  print.data.table     
train = do.call(cbind,x_train) %>% t()
segments = do.call(cbind,x_segment) %>% t()
targets = do.call(cbind,y_train) %>% t()

concat = c(list(train ),list(segments))
c(decay_steps, warmup_steps) %<-% k_bert$calc_train_steps(
  targets %>% length(),
  batch_size=bch_size,
  epochs=epochs
)
library(keras)

input_1 = get_layer(model,name = 'Input-Token')$input
input_2 = get_layer(model,name = 'Input-Segment')$input
inputs = list(input_1,input_2)

dense = get_layer(model,name = 'NSP-Dense')$output

outputs = dense %>% layer_dense(units=1L, activation='sigmoid',
                         kernel_initializer=initializer_truncated_normal(stddev = 0.02),
                         name = 'output')

model = keras_model(inputs = inputs,outputs = outputs)
model %>% compile(
  k_bert$AdamWarmup(decay_steps=decay_steps, 
                    warmup_steps=warmup_steps, learning_rate=learning_rate),
  loss = 'binary_crossentropy',
  metrics = list('accuracy', 'AUC')
)

history <- model %>% fit(
  concat,
  targets,
  epochs=epochs,
  batch_size=bch_size, validation_split=0.2)
2022-03-12 04:26:20.520348: I tensorflow/compiler/mlir/mlir_graph_optimization_pass.cc:185] None of the MLIR Optimization Passes are enabled (registered 2)
Epoch 1/8

 1/23 [>.............................] - ETA: 11:18 - loss: 0.5503 - accuracy: 0.8750 - auc: 0.7937
 2/23 [=>............................] - ETA: 8s - loss: 0.5546 - accuracy: 0.8542 - auc: 0.7753   
 3/23 [==>...........................] - ETA: 8s - loss: 0.5642 - accuracy: 0.8333 - auc: 0.6021
 4/23 [====>.........................] - ETA: 7s - loss: 0.5593 - accuracy: 0.8229 - auc: 0.5346
 5/23 [=====>........................] - ETA: 7s - loss: 0.5470 - accuracy: 0.8250 - auc: 0.5072
 6/23 [======>.......................] - ETA: 6s - loss: 0.5408 - accuracy: 0.8194 - auc: 0.4896
 7/23 [========>.....................] - ETA: 6s - loss: 0.5362 - accuracy: 0.8155 - auc: 0.4829
 8/23 [=========>....................] - ETA: 6s - loss: 0.5244 - accuracy: 0.8177 - auc: 0.5116
 9/23 [==========>...................] - ETA: 5s - loss: 0.5085 - accuracy: 0.8241 - auc: 0.5433
10/23 [============>.................] - ETA: 5s - loss: 0.5031 - accuracy: 0.8250 - auc: 0.5391
11/23 [=============>................] - ETA: 4s - loss: 0.4986 - accuracy: 0.8258 - auc: 0.5335
12/23 [==============>...............] - ETA: 4s - loss: 0.5203 - accuracy: 0.8125 - auc: 0.4846
13/23 [===============>..............] - ETA: 4s - loss: 0.5150 - accuracy: 0.8141 - auc: 0.4858
14/23 [=================>............] - ETA: 3s - loss: 0.5072 - accuracy: 0.8185 - auc: 0.4838
15/23 [==================>...........] - ETA: 3s - loss: 0.5168 - accuracy: 0.8111 - auc: 0.4709
16/23 [===================>..........] - ETA: 2s - loss: 0.5084 - accuracy: 0.8151 - auc: 0.4766
17/23 [=====================>........] - ETA: 2s - loss: 0.5107 - accuracy: 0.8113 - auc: 0.4875
18/23 [======================>.......] - ETA: 2s - loss: 0.5046 - accuracy: 0.8148 - auc: 0.4871
19/23 [=======================>......] - ETA: 1s - loss: 0.4984 - accuracy: 0.8180 - auc: 0.4921
20/23 [=========================>....] - ETA: 1s - loss: 0.5070 - accuracy: 0.8104 - auc: 0.4981
21/23 [==========================>...] - ETA: 0s - loss: 0.4986 - accuracy: 0.8155 - auc: 0.5018
22/23 [===========================>..] - ETA: 0s - loss: 0.4987 - accuracy: 0.8144 - auc: 0.5057
23/23 [==============================] - ETA: 0s - loss: 0.5018 - accuracy: 0.8120 - auc: 0.5063
23/23 [==============================] - 44s 576ms/step - loss: 0.5018 - accuracy: 0.8120 - auc: 0.5063 - val_loss: 0.4496 - val_accuracy: 0.8175 - val_auc: 0.7036
Epoch 2/8

 1/23 [>.............................] - ETA: 8s - loss: 0.5893 - accuracy: 0.7500 - auc: 0.4861
 2/23 [=>............................] - ETA: 8s - loss: 0.4806 - accuracy: 0.8125 - auc: 0.5427
 3/23 [==>...........................] - ETA: 8s - loss: 0.4236 - accuracy: 0.8472 - auc: 0.5917
 4/23 [====>.........................] - ETA: 7s - loss: 0.4696 - accuracy: 0.8125 - auc: 0.6125
 5/23 [=====>........................] - ETA: 7s - loss: 0.4459 - accuracy: 0.8250 - auc: 0.6556
 6/23 [======>.......................] - ETA: 6s - loss: 0.4319 - accuracy: 0.8333 - auc: 0.6609
 7/23 [========>.....................] - ETA: 6s - loss: 0.4271 - accuracy: 0.8333 - auc: 0.6855
 8/23 [=========>....................] - ETA: 6s - loss: 0.4285 - accuracy: 0.8333 - auc: 0.6734
 9/23 [==========>...................] - ETA: 5s - loss: 0.4157 - accuracy: 0.8380 - auc: 0.6991
10/23 [============>.................] - ETA: 5s - loss: 0.4173 - accuracy: 0.8333 - auc: 0.7219
11/23 [=============>................] - ETA: 4s - loss: 0.4089 - accuracy: 0.8371 - auc: 0.7301
12/23 [==============>...............] - ETA: 4s - loss: 0.4035 - accuracy: 0.8368 - auc: 0.7491
13/23 [===============>..............] - ETA: 4s - loss: 0.4083 - accuracy: 0.8301 - auc: 0.7617
14/23 [=================>............] - ETA: 3s - loss: 0.4083 - accuracy: 0.8274 - auc: 0.7703
15/23 [==================>...........] - ETA: 3s - loss: 0.4063 - accuracy: 0.8278 - auc: 0.7680
16/23 [===================>..........] - ETA: 2s - loss: 0.4020 - accuracy: 0.8255 - auc: 0.7873
17/23 [=====================>........] - ETA: 2s - loss: 0.4153 - accuracy: 0.8235 - auc: 0.7611
18/23 [======================>.......] - ETA: 2s - loss: 0.4151 - accuracy: 0.8241 - auc: 0.7598
19/23 [=======================>......] - ETA: 1s - loss: 0.4236 - accuracy: 0.8224 - auc: 0.7407
20/23 [=========================>....] - ETA: 1s - loss: 0.4301 - accuracy: 0.8188 - auc: 0.7347
21/23 [==========================>...] - ETA: 0s - loss: 0.4338 - accuracy: 0.8175 - auc: 0.7250
22/23 [===========================>..] - ETA: 0s - loss: 0.4325 - accuracy: 0.8182 - auc: 0.7330
23/23 [==============================] - ETA: 0s - loss: 0.4361 - accuracy: 0.8157 - auc: 0.7308
23/23 [==============================] - 10s 437ms/step - loss: 0.4361 - accuracy: 0.8157 - auc: 0.7308 - val_loss: 0.4420 - val_accuracy: 0.8102 - val_auc: 0.7729
Epoch 3/8

 1/23 [>.............................] - ETA: 8s - loss: 0.3895 - accuracy: 0.8333 - auc: 0.4205
 2/23 [=>............................] - ETA: 8s - loss: 0.3294 - accuracy: 0.8750 - auc: 0.7017
 3/23 [==>...........................] - ETA: 8s - loss: 0.3761 - accuracy: 0.8472 - auc: 0.8110
 4/23 [====>.........................] - ETA: 7s - loss: 0.3545 - accuracy: 0.8542 - auc: 0.8337
 5/23 [=====>........................] - ETA: 7s - loss: 0.3614 - accuracy: 0.8167 - auc: 0.7949
 6/23 [======>.......................] - ETA: 6s - loss: 0.3648 - accuracy: 0.8333 - auc: 0.8205
 7/23 [========>.....................] - ETA: 6s - loss: 0.3491 - accuracy: 0.8333 - auc: 0.8419
 8/23 [=========>....................] - ETA: 6s - loss: 0.3336 - accuracy: 0.8438 - auc: 0.8619
 9/23 [==========>...................] - ETA: 5s - loss: 0.3604 - accuracy: 0.8241 - auc: 0.8445
10/23 [============>.................] - ETA: 5s - loss: 0.3508 - accuracy: 0.8292 - auc: 0.8633
11/23 [=============>................] - ETA: 4s - loss: 0.3322 - accuracy: 0.8447 - auc: 0.8710
12/23 [==============>...............] - ETA: 4s - loss: 0.3297 - accuracy: 0.8438 - auc: 0.8745
13/23 [===============>..............] - ETA: 4s - loss: 0.3155 - accuracy: 0.8526 - auc: 0.8917
14/23 [=================>............] - ETA: 3s - loss: 0.3249 - accuracy: 0.8542 - auc: 0.8884
15/23 [==================>...........] - ETA: 3s - loss: 0.3188 - accuracy: 0.8528 - auc: 0.8904
16/23 [===================>..........] - ETA: 2s - loss: 0.3237 - accuracy: 0.8464 - auc: 0.8827
17/23 [=====================>........] - ETA: 2s - loss: 0.3196 - accuracy: 0.8480 - auc: 0.8829
18/23 [======================>.......] - ETA: 2s - loss: 0.3331 - accuracy: 0.8403 - auc: 0.8705
19/23 [=======================>......] - ETA: 1s - loss: 0.3332 - accuracy: 0.8377 - auc: 0.8722
20/23 [=========================>....] - ETA: 1s - loss: 0.3339 - accuracy: 0.8375 - auc: 0.8756
21/23 [==========================>...] - ETA: 0s - loss: 0.3395 - accuracy: 0.8313 - auc: 0.8673
22/23 [===========================>..] - ETA: 0s - loss: 0.3388 - accuracy: 0.8333 - auc: 0.8684
23/23 [==============================] - ETA: 0s - loss: 0.3404 - accuracy: 0.8339 - auc: 0.8651
23/23 [==============================] - 10s 439ms/step - loss: 0.3404 - accuracy: 0.8339 - auc: 0.8651 - val_loss: 0.4564 - val_accuracy: 0.8248 - val_auc: 0.7952
Epoch 4/8

 1/23 [>.............................] - ETA: 9s - loss: 0.1654 - accuracy: 0.9167 - auc: 1.0000
 2/23 [=>............................] - ETA: 8s - loss: 0.1652 - accuracy: 0.9167 - auc: 0.9930
 3/23 [==>...........................] - ETA: 8s - loss: 0.2031 - accuracy: 0.9167 - auc: 0.9694
 4/23 [====>.........................] - ETA: 7s - loss: 0.2274 - accuracy: 0.8958 - auc: 0.9691
 5/23 [=====>........................] - ETA: 7s - loss: 0.2608 - accuracy: 0.8750 - auc: 0.9589
 6/23 [======>.......................] - ETA: 6s - loss: 0.2568 - accuracy: 0.8819 - auc: 0.9485
 7/23 [========>.....................] - ETA: 6s - loss: 0.2618 - accuracy: 0.8869 - auc: 0.9297
 8/23 [=========>....................] - ETA: 6s - loss: 0.2628 - accuracy: 0.8906 - auc: 0.9214
 9/23 [==========>...................] - ETA: 5s - loss: 0.2571 - accuracy: 0.8935 - auc: 0.9265
10/23 [============>.................] - ETA: 5s - loss: 0.2478 - accuracy: 0.9000 - auc: 0.9354
11/23 [=============>................] - ETA: 4s - loss: 0.2323 - accuracy: 0.9091 - auc: 0.9442
12/23 [==============>...............] - ETA: 4s - loss: 0.2252 - accuracy: 0.9167 - auc: 0.9548
13/23 [===============>..............] - ETA: 4s - loss: 0.2306 - accuracy: 0.9135 - auc: 0.9529
14/23 [=================>............] - ETA: 3s - loss: 0.2328 - accuracy: 0.9167 - auc: 0.9497
15/23 [==================>...........] - ETA: 3s - loss: 0.2424 - accuracy: 0.9139 - auc: 0.9444
16/23 [===================>..........] - ETA: 2s - loss: 0.2346 - accuracy: 0.9167 - auc: 0.9455
17/23 [=====================>........] - ETA: 2s - loss: 0.2266 - accuracy: 0.9216 - auc: 0.9478
18/23 [======================>.......] - ETA: 2s - loss: 0.2249 - accuracy: 0.9213 - auc: 0.9471
19/23 [=======================>......] - ETA: 1s - loss: 0.2238 - accuracy: 0.9211 - auc: 0.9482
20/23 [=========================>....] - ETA: 1s - loss: 0.2217 - accuracy: 0.9208 - auc: 0.9469
21/23 [==========================>...] - ETA: 0s - loss: 0.2156 - accuracy: 0.9226 - auc: 0.9496
22/23 [===========================>..] - ETA: 0s - loss: 0.2160 - accuracy: 0.9223 - auc: 0.9493
23/23 [==============================] - ETA: 0s - loss: 0.2163 - accuracy: 0.9234 - auc: 0.9466
23/23 [==============================] - 10s 443ms/step - loss: 0.2163 - accuracy: 0.9234 - auc: 0.9466 - val_loss: 0.5839 - val_accuracy: 0.8029 - val_auc: 0.7954
Epoch 5/8

 1/23 [>.............................] - ETA: 10s - loss: 0.0778 - accuracy: 0.9583 - auc: 1.0000
 2/23 [=>............................] - ETA: 9s - loss: 0.0590 - accuracy: 0.9792 - auc: 1.0000 
 3/23 [==>...........................] - ETA: 8s - loss: 0.0976 - accuracy: 0.9583 - auc: 0.9972
 4/23 [====>.........................] - ETA: 8s - loss: 0.1496 - accuracy: 0.9479 - auc: 0.9957
 5/23 [=====>........................] - ETA: 7s - loss: 0.1358 - accuracy: 0.9500 - auc: 0.9960
 6/23 [======>.......................] - ETA: 7s - loss: 0.1164 - accuracy: 0.9583 - auc: 0.9964
 7/23 [========>.....................] - ETA: 6s - loss: 0.1163 - accuracy: 0.9583 - auc: 0.9939
 8/23 [=========>....................] - ETA: 6s - loss: 0.1095 - accuracy: 0.9635 - auc: 0.9937
 9/23 [==========>...................] - ETA: 6s - loss: 0.0996 - accuracy: 0.9676 - auc: 0.9944
10/23 [============>.................] - ETA: 5s - loss: 0.0913 - accuracy: 0.9708 - auc: 0.9954
11/23 [=============>................] - ETA: 5s - loss: 0.0866 - accuracy: 0.9735 - auc: 0.9955
12/23 [==============>...............] - ETA: 4s - loss: 0.1073 - accuracy: 0.9653 - auc: 0.9913
13/23 [===============>..............] - ETA: 4s - loss: 0.1080 - accuracy: 0.9647 - auc: 0.9912
14/23 [=================>............] - ETA: 3s - loss: 0.1026 - accuracy: 0.9673 - auc: 0.9919
15/23 [==================>...........] - ETA: 3s - loss: 0.0973 - accuracy: 0.9694 - auc: 0.9925
16/23 [===================>..........] - ETA: 2s - loss: 0.0932 - accuracy: 0.9714 - auc: 0.9928
17/23 [=====================>........] - ETA: 2s - loss: 0.0911 - accuracy: 0.9706 - auc: 0.9929
18/23 [======================>.......] - ETA: 2s - loss: 0.0929 - accuracy: 0.9699 - auc: 0.9929
19/23 [=======================>......] - ETA: 1s - loss: 0.0976 - accuracy: 0.9693 - auc: 0.9920
20/23 [=========================>....] - ETA: 1s - loss: 0.1026 - accuracy: 0.9688 - auc: 0.9902
21/23 [==========================>...] - ETA: 0s - loss: 0.1040 - accuracy: 0.9683 - auc: 0.9900
22/23 [===========================>..] - ETA: 0s - loss: 0.1012 - accuracy: 0.9697 - auc: 0.9904
23/23 [==============================] - ETA: 0s - loss: 0.0988 - accuracy: 0.9708 - auc: 0.9907
23/23 [==============================] - 10s 448ms/step - loss: 0.0988 - accuracy: 0.9708 - auc: 0.9907 - val_loss: 0.7148 - val_accuracy: 0.8321 - val_auc: 0.7509
Epoch 6/8

 1/23 [>.............................] - ETA: 9s - loss: 0.1800 - accuracy: 0.9583 - auc: 1.0000
 2/23 [=>............................] - ETA: 9s - loss: 0.1681 - accuracy: 0.9583 - auc: 1.0000
 3/23 [==>...........................] - ETA: 9s - loss: 0.1162 - accuracy: 0.9722 - auc: 1.0000
 4/23 [====>.........................] - ETA: 8s - loss: 0.0967 - accuracy: 0.9792 - auc: 1.0000
 5/23 [=====>........................] - ETA: 8s - loss: 0.0840 - accuracy: 0.9833 - auc: 0.9988
 6/23 [======>.......................] - ETA: 7s - loss: 0.0819 - accuracy: 0.9792 - auc: 0.9980
 7/23 [========>.....................] - ETA: 7s - loss: 0.0853 - accuracy: 0.9762 - auc: 0.9979
 8/23 [=========>....................] - ETA: 6s - loss: 0.0904 - accuracy: 0.9740 - auc: 0.9954
 9/23 [==========>...................] - ETA: 6s - loss: 0.0832 - accuracy: 0.9769 - auc: 0.9958
10/23 [============>.................] - ETA: 5s - loss: 0.0766 - accuracy: 0.9792 - auc: 0.9959
11/23 [=============>................] - ETA: 5s - loss: 0.0790 - accuracy: 0.9773 - auc: 0.9950
12/23 [==============>...............] - ETA: 4s - loss: 0.0748 - accuracy: 0.9792 - auc: 0.9955
13/23 [===============>..............] - ETA: 4s - loss: 0.0748 - accuracy: 0.9776 - auc: 0.9957
14/23 [=================>............] - ETA: 3s - loss: 0.0756 - accuracy: 0.9762 - auc: 0.9957
15/23 [==================>...........] - ETA: 3s - loss: 0.0711 - accuracy: 0.9778 - auc: 0.9963
16/23 [===================>..........] - ETA: 2s - loss: 0.0670 - accuracy: 0.9792 - auc: 0.9966
17/23 [=====================>........] - ETA: 2s - loss: 0.0710 - accuracy: 0.9779 - auc: 0.9964
18/23 [======================>.......] - ETA: 2s - loss: 0.0677 - accuracy: 0.9792 - auc: 0.9968
19/23 [=======================>......] - ETA: 1s - loss: 0.0653 - accuracy: 0.9803 - auc: 0.9971
20/23 [=========================>....] - ETA: 1s - loss: 0.0623 - accuracy: 0.9812 - auc: 0.9974
21/23 [==========================>...] - ETA: 0s - loss: 0.0599 - accuracy: 0.9821 - auc: 0.9976
22/23 [===========================>..] - ETA: 0s - loss: 0.0612 - accuracy: 0.9811 - auc: 0.9976
23/23 [==============================] - ETA: 0s - loss: 0.0592 - accuracy: 0.9818 - auc: 0.9977
23/23 [==============================] - 10s 452ms/step - loss: 0.0592 - accuracy: 0.9818 - auc: 0.9977 - val_loss: 0.8358 - val_accuracy: 0.7518 - val_auc: 0.7514
Epoch 7/8

 1/23 [>.............................] - ETA: 9s - loss: 0.0100 - accuracy: 1.0000 - auc: 1.0000
 2/23 [=>............................] - ETA: 8s - loss: 0.0153 - accuracy: 1.0000 - auc: 1.0000
 3/23 [==>...........................] - ETA: 8s - loss: 0.0136 - accuracy: 1.0000 - auc: 1.0000
 4/23 [====>.........................] - ETA: 7s - loss: 0.0134 - accuracy: 1.0000 - auc: 1.0000
 5/23 [=====>........................] - ETA: 7s - loss: 0.0123 - accuracy: 1.0000 - auc: 1.0000
 6/23 [======>.......................] - ETA: 6s - loss: 0.0115 - accuracy: 1.0000 - auc: 1.0000
 7/23 [========>.....................] - ETA: 6s - loss: 0.0382 - accuracy: 0.9881 - auc: 1.0000
 8/23 [=========>....................] - ETA: 6s - loss: 0.0387 - accuracy: 0.9844 - auc: 1.0000
 9/23 [==========>...................] - ETA: 5s - loss: 0.0349 - accuracy: 0.9861 - auc: 1.0000
10/23 [============>.................] - ETA: 5s - loss: 0.0319 - accuracy: 0.9875 - auc: 1.0000
11/23 [=============>................] - ETA: 4s - loss: 0.0347 - accuracy: 0.9848 - auc: 0.9995
12/23 [==============>...............] - ETA: 4s - loss: 0.0321 - accuracy: 0.9861 - auc: 0.9996
13/23 [===============>..............] - ETA: 4s - loss: 0.0329 - accuracy: 0.9840 - auc: 0.9994
14/23 [=================>............] - ETA: 3s - loss: 0.0314 - accuracy: 0.9851 - auc: 0.9994
15/23 [==================>...........] - ETA: 3s - loss: 0.0298 - accuracy: 0.9861 - auc: 0.9995
16/23 [===================>..........] - ETA: 2s - loss: 0.0381 - accuracy: 0.9818 - auc: 0.9990
17/23 [=====================>........] - ETA: 2s - loss: 0.0363 - accuracy: 0.9828 - auc: 0.9992
18/23 [======================>.......] - ETA: 2s - loss: 0.0344 - accuracy: 0.9838 - auc: 0.9992
19/23 [=======================>......] - ETA: 1s - loss: 0.0408 - accuracy: 0.9803 - auc: 0.9990
20/23 [=========================>....] - ETA: 1s - loss: 0.0392 - accuracy: 0.9812 - auc: 0.9991
21/23 [==========================>...] - ETA: 0s - loss: 0.0377 - accuracy: 0.9821 - auc: 0.9992
22/23 [===========================>..] - ETA: 0s - loss: 0.0363 - accuracy: 0.9830 - auc: 0.9993
23/23 [==============================] - ETA: 0s - loss: 0.0353 - accuracy: 0.9836 - auc: 0.9993
23/23 [==============================] - 10s 441ms/step - loss: 0.0353 - accuracy: 0.9836 - auc: 0.9993 - val_loss: 1.0636 - val_accuracy: 0.6861 - val_auc: 0.7159
Epoch 8/8

 1/23 [>.............................] - ETA: 9s - loss: 0.1284 - accuracy: 0.9167 - auc: 1.0000
 2/23 [=>............................] - ETA: 8s - loss: 0.0726 - accuracy: 0.9583 - auc: 1.0000
 3/23 [==>...........................] - ETA: 8s - loss: 0.0542 - accuracy: 0.9722 - auc: 1.0000
 4/23 [====>.........................] - ETA: 7s - loss: 0.0427 - accuracy: 0.9792 - auc: 1.0000
 5/23 [=====>........................] - ETA: 7s - loss: 0.0391 - accuracy: 0.9833 - auc: 1.0000
 6/23 [======>.......................] - ETA: 7s - loss: 0.0335 - accuracy: 0.9861 - auc: 1.0000
 7/23 [========>.....................] - ETA: 6s - loss: 0.0294 - accuracy: 0.9881 - auc: 1.0000
 8/23 [=========>....................] - ETA: 6s - loss: 0.0272 - accuracy: 0.9896 - auc: 1.0000
 9/23 [==========>...................] - ETA: 6s - loss: 0.0250 - accuracy: 0.9907 - auc: 1.0000
10/23 [============>.................] - ETA: 5s - loss: 0.0326 - accuracy: 0.9875 - auc: 1.0000
11/23 [=============>................] - ETA: 5s - loss: 0.0306 - accuracy: 0.9886 - auc: 1.0000
12/23 [==============>...............] - ETA: 4s - loss: 0.0284 - accuracy: 0.9896 - auc: 1.0000
13/23 [===============>..............] - ETA: 4s - loss: 0.0275 - accuracy: 0.9904 - auc: 1.0000
14/23 [=================>............] - ETA: 3s - loss: 0.0258 - accuracy: 0.9911 - auc: 1.0000
15/23 [==================>...........] - ETA: 3s - loss: 0.0243 - accuracy: 0.9917 - auc: 1.0000
16/23 [===================>..........] - ETA: 2s - loss: 0.0229 - accuracy: 0.9922 - auc: 1.0000
17/23 [=====================>........] - ETA: 2s - loss: 0.0218 - accuracy: 0.9926 - auc: 1.0000
18/23 [======================>.......] - ETA: 2s - loss: 0.0208 - accuracy: 0.9931 - auc: 1.0000
19/23 [=======================>......] - ETA: 1s - loss: 0.0199 - accuracy: 0.9934 - auc: 1.0000
20/23 [=========================>....] - ETA: 1s - loss: 0.0190 - accuracy: 0.9937 - auc: 1.0000
21/23 [==========================>...] - ETA: 0s - loss: 0.0188 - accuracy: 0.9940 - auc: 1.0000
22/23 [===========================>..] - ETA: 0s - loss: 0.0181 - accuracy: 0.9943 - auc: 1.0000
23/23 [==============================] - ETA: 0s - loss: 0.0175 - accuracy: 0.9945 - auc: 1.0000
23/23 [==============================] - 10s 451ms/step - loss: 0.0175 - accuracy: 0.9945 - auc: 1.0000 - val_loss: 0.7724 - val_accuracy: 0.8102 - val_auc: 0.6995
plot(history)

LS0tCnRpdGxlOiAiUiBOb3RlYm9vayIKb3V0cHV0OiBodG1sX25vdGVib29rCi0tLQoKYGBge3J9ClN5cy5zZXRlbnYoVEZfS0VSQVM9MSkgCnJldGljdWxhdGU6OnB5X2NvbmZpZygpCmBgYAoKYGBge3J9CnJldGljdWxhdGU6OnB5X21vZHVsZV9hdmFpbGFibGUoJ2tlcmFzX2JlcnQnKQpgYGAKCmBgYHtyfQp0ZW5zb3JmbG93Ojp0Zl92ZXJzaW9uKCkKYGBgCmBgYHtyfQpwcmV0cmFpbmVkX3BhdGggPSAnLi9iZXJ0X21vZGVscy91bmNhc2VkX0wtMTJfSC03NjhfQS0xMicKY29uZmlnX3BhdGggPSBmaWxlLnBhdGgocHJldHJhaW5lZF9wYXRoLCAnYmVydF9jb25maWcuanNvbicpCmNoZWNrcG9pbnRfcGF0aCA9IGZpbGUucGF0aChwcmV0cmFpbmVkX3BhdGgsIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgJ2JlcnRfbW9kZWwuY2twdCcpCnZvY2FiX3BhdGggPSBmaWxlLnBhdGgocHJldHJhaW5lZF9wYXRoLCAndm9jYWIudHh0JykKYGBgCgpgYGB7cn0KbGlicmFyeShyZXRpY3VsYXRlKQpsaWJyYXJ5KGtlcmFzKQprX2JlcnQgPSBpbXBvcnQoJ2tlcmFzX2JlcnQnKQp0b2tlbl9kaWN0ID0ga19iZXJ0JGxvYWRfdm9jYWJ1bGFyeSh2b2NhYl9wYXRoKQp0b2tlbml6ZXIgPSBrX2JlcnQkVG9rZW5pemVyKHRva2VuX2RpY3QpCmBgYAoKYGBge3J9CnNlcV9sZW5ndGggPSAxMjhMIApiY2hfc2l6ZSA9IDI0CmVwb2NocyA9IDgKbGVhcm5pbmdfcmF0ZSA9IDFlLTQKCkRBVEFfQ09MVU1OID0gJ3RleHQnCkxBQkVMX0NPTFVNTiA9ICdkZWNsaW5lZCcKYGBgCgpgYGB7cn0KbW9kZWwgPSBrX2JlcnQkbG9hZF90cmFpbmVkX21vZGVsX2Zyb21fY2hlY2twb2ludCgKICBjb25maWdfcGF0aCwKICBjaGVja3BvaW50X3BhdGgsCiAgdHJhaW5pbmc9VCwKICB0cmFpbmFibGU9VCwKICBzZXFfbGVuPXNlcV9sZW5ndGgpCmBgYAoKYGBge3J9CiMgdG9rZW5pemUgdGV4dAp0b2tlbml6ZV9mdW4gPSBmdW5jdGlvbihkYXRhc2V0KSB7CiAgYyhpbmRpY2VzLCB0YXJnZXQsIHNlZ21lbnRzKSAlPC0lIGxpc3QobGlzdCgpLGxpc3QoKSxsaXN0KCkpCiAgZm9yICggaSBpbiAxOm5yb3coZGF0YXNldCkpIHsKICAgIGMoaW5kaWNlc190b2ssIHNlZ21lbnRzX3RvaykgJTwtJSB0b2tlbml6ZXIkZW5jb2RlKGRhdGFzZXRbW0RBVEFfQ09MVU1OXV1baV0sIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgbWF4X2xlbj1zZXFfbGVuZ3RoKQogICAgaW5kaWNlcyA9IGluZGljZXMgJT4lIGFwcGVuZChsaXN0KGFzLm1hdHJpeChpbmRpY2VzX3RvaykpKQogICAgdGFyZ2V0ID0gdGFyZ2V0ICU+JSBhcHBlbmQoZGF0YXNldFtbTEFCRUxfQ09MVU1OXV1baV0pCiAgICBzZWdtZW50cyA9IHNlZ21lbnRzICU+JSBhcHBlbmQobGlzdChhcy5tYXRyaXgoc2VnbWVudHNfdG9rKSkpCiAgfQogIHJldHVybihsaXN0KGluZGljZXMsc2VnbWVudHMsIHRhcmdldCkpCn0KCiMgcmVhZCBkYXRhCmR0X2RhdGEgPSBmdW5jdGlvbihkaXIsIHJvd3NfdG9fcmVhZCl7CiAgZGF0YSA9IGRhdGEudGFibGU6OmZyZWFkKGRpciwgbnJvd3M9cm93c190b19yZWFkKQogIGMoeF90cmFpbiwgeF9zZWdtZW50LCB5X3RyYWluKSAlPC0lIHRva2VuaXplX2Z1bihkYXRhKQogIHJldHVybihsaXN0KHhfdHJhaW4sIHhfc2VnbWVudCwgeV90cmFpbikpCn0KYGBgCgpgYGB7cn0KIyB3cml0ZV9jc3Yoc3ViX3RleHQgJT4lCiMgICAgICAgICAgICAgbXV0YXRlKGRlY2xpbmVkID0gaWZfZWxzZShEZWNpc2lvbiA9PSAiRGVjbGluZWQiLCAxLCAwKSkgJT4lCiMgICAgICAgICAgICAgc2VsZWN0KHRleHQsIGRlY2xpbmVkKSwKIyAgICAgICAgICAgInN1Yl90ZXh0LmNzdiIpCmMoeF90cmFpbiwgeF9zZWdtZW50LCB5X3RyYWluKSAlPC0lIGR0X2RhdGEoInN1Yl90ZXh0LmNzdiIsIDEwMDApCmBgYAoKYGBge3J9CnRyYWluID0gZG8uY2FsbChjYmluZCx4X3RyYWluKSAlPiUgdCgpCnNlZ21lbnRzID0gZG8uY2FsbChjYmluZCx4X3NlZ21lbnQpICU+JSB0KCkKdGFyZ2V0cyA9IGRvLmNhbGwoY2JpbmQseV90cmFpbikgJT4lIHQoKQoKY29uY2F0ID0gYyhsaXN0KHRyYWluICksbGlzdChzZWdtZW50cykpCmBgYAoKYGBge3J9CmMoZGVjYXlfc3RlcHMsIHdhcm11cF9zdGVwcykgJTwtJSBrX2JlcnQkY2FsY190cmFpbl9zdGVwcygKICB0YXJnZXRzICU+JSBsZW5ndGgoKSwKICBiYXRjaF9zaXplPWJjaF9zaXplLAogIGVwb2Nocz1lcG9jaHMKKQpgYGAKCmBgYHtyfQpsaWJyYXJ5KGtlcmFzKQoKaW5wdXRfMSA9IGdldF9sYXllcihtb2RlbCxuYW1lID0gJ0lucHV0LVRva2VuJykkaW5wdXQKaW5wdXRfMiA9IGdldF9sYXllcihtb2RlbCxuYW1lID0gJ0lucHV0LVNlZ21lbnQnKSRpbnB1dAppbnB1dHMgPSBsaXN0KGlucHV0XzEsaW5wdXRfMikKCmRlbnNlID0gZ2V0X2xheWVyKG1vZGVsLG5hbWUgPSAnTlNQLURlbnNlJykkb3V0cHV0CgpvdXRwdXRzID0gZGVuc2UgJT4lIGxheWVyX2RlbnNlKHVuaXRzPTFMLCBhY3RpdmF0aW9uPSdzaWdtb2lkJywKICAgICAgICAgICAgICAgICAgICAgICAgIGtlcm5lbF9pbml0aWFsaXplcj1pbml0aWFsaXplcl90cnVuY2F0ZWRfbm9ybWFsKHN0ZGRldiA9IDAuMDIpLAogICAgICAgICAgICAgICAgICAgICAgICAgbmFtZSA9ICdvdXRwdXQnKQoKbW9kZWwgPSBrZXJhc19tb2RlbChpbnB1dHMgPSBpbnB1dHMsb3V0cHV0cyA9IG91dHB1dHMpCmBgYAoKYGBge3J9Cm1vZGVsICU+JSBjb21waWxlKAogIGtfYmVydCRBZGFtV2FybXVwKGRlY2F5X3N0ZXBzPWRlY2F5X3N0ZXBzLCAKICAgICAgICAgICAgICAgICAgICB3YXJtdXBfc3RlcHM9d2FybXVwX3N0ZXBzLCBsZWFybmluZ19yYXRlPWxlYXJuaW5nX3JhdGUpLAogIGxvc3MgPSAnYmluYXJ5X2Nyb3NzZW50cm9weScsCiAgbWV0cmljcyA9IGxpc3QoJ2FjY3VyYWN5JywgJ0FVQycpCikKCmhpc3RvcnkgPC0gbW9kZWwgJT4lIGZpdCgKICBjb25jYXQsCiAgdGFyZ2V0cywKICBlcG9jaHM9ZXBvY2hzLAogIGJhdGNoX3NpemU9YmNoX3NpemUsIHZhbGlkYXRpb25fc3BsaXQ9MC4yKQpgYGAKCmBgYHtyfQpwbG90KGhpc3RvcnkpCmBgYAoK