Keras Dense 신경망 예제 치트코드

  • 해당하는 코드는 대회에서 약 상위 50%정도 하는 베이스라인 뉴럴넷 코드입니다.
keras neural net
In [1]:
import pandas as pd
import numpy as np
import seaborn as sns
In [2]:
import warnings
warnings.filterwarnings("ignore")
warnings.filterwarnings(action="ignore",category=DeprecationWarning)
warnings.filterwarnings(action="ignore",category=FutureWarning)
import numpy as np # linear algebra
import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)
import matplotlib.pyplot as plt
import tensorflow as tf
import os
import math
import gc
import copy
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler, MinMaxScaler


from tensorflow.keras.layers import Dense, Input, Activation
from tensorflow.keras.layers import BatchNormalization,Add,Dropout
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.models import Model, load_model
from tensorflow.keras import callbacks
from tensorflow.keras import backend as K
In [3]:
train_X = pd.read_csv("../inputs/train_from_f1.csv")[:]
test_X = pd.read_csv("test_from_f1_2.csv")
train_y = pd.read_csv("../inputs/train.csv")['scalar_coupling_constant'][:]
In [4]:
ATOMIC_NUMBERS = {
    'H': 1,
    'C': 6,
    'N': 7,
    'O': 8,
    'F': 9
}
In [5]:
giba_columns = ['inv_dist0', 'inv_dist1', 'inv_distP', 'inv_dist0R', 'inv_dist1R', 'inv_distPR', 'inv_dist0E', 'inv_dist1E', 'inv_distPE', 'linkM0',
         'linkM1', 'min_molecule_atom_0_dist_xyz', 'mean_molecule_atom_0_dist_xyz', 'max_molecule_atom_0_dist_xyz', 'sd_molecule_atom_0_dist_xyz', 'min_molecule_atom_1_dist_xyz',
         'mean_molecule_atom_1_dist_xyz', 'max_molecule_atom_1_dist_xyz', 'sd_molecule_atom_1_dist_xyz', 'coulomb_C.x', 'coulomb_F.x', 'coulomb_H.x', 'coulomb_N.x',
         'coulomb_O.x', 'yukawa_C.x', 'yukawa_F.x', 'yukawa_H.x', 'yukawa_N.x', 'yukawa_O.x', 'vander_C.x', 'vander_F.x', 'vander_H.x', 'vander_N.x', 'vander_O.x',
         'coulomb_C.y', 'coulomb_F.y', 'coulomb_H.y', 'coulomb_N.y', 'coulomb_O.y', 'yukawa_C.y', 'yukawa_F.y', 'yukawa_H.y', 'yukawa_N.y', 'yukawa_O.y', 'vander_C.y',
         'vander_F.y', 'vander_H.y', 'vander_N.y', 'vander_O.y', 'distC0', 'distH0', 'distN0', 'distC1', 'distH1', 'distN1', 'adH1', 'adH2', 'adH3', 'adH4', 'adC1',
         'adC2', 'adC3', 'adC4', 'adN1', 'adN2', 'adN3', 'adN4', 'NC', 'NH', 'NN', 'NF', 'NO']

all_features = [ 'x_x', 'y_x','z_x', 'atom_y', 'x_y', 'y_y',
       'z_y', 'n_bonds_y', 'C', 'F', 'H', 'N', 'O', 'distance', 'dist_mean_x','dist_mean_y',
        'x_dist_abs', 'y_dist_abs', 'z_dist_abs','inv_distance3',
       'molecule_atom_1_dist_std_diff','molecule_dist_mean_x',
       'molecule_dist_mean_y','molecule_dist_std_x','molecule_dist_std_y','molecule_atom_0_dist_mean',
       'molecule_atom_1_dist_mean','dist_mean_bond_y',
       'n_no_bonds_x','n_no_bonds_y', 'dist_std_x', 'dist_std_y','dist_min_x','dist_min_y','dist_max_x', 'dist_max_y',
       'molecule_dist_range_x','molecule_dist_range_y', 'dimension_x', 'dimension_y','dimension_z','molecule_dist_mean_bond_x',
       'molecule_dist_mean_bond_x','dist_mean_no_bond_x','dist_mean_no_bond_y',
       'dist_std_bond_y','dist_bond_min_y','dist_bond_max_y',
       'range_dist_bond_y','dist_std_no_bond_x','dist_std_no_bond_y', 'dist_no_bond_min_x','dist_no_bond_min_y','dist_no_bond_max_x',
       'dist_no_bond_max_y', 'range_dist_no_bond_x','range_dist_no_bond_y','dist_median_bond_y','dist_median_x',
       'dist_median_y','dist_median_no_bond_x','dist_median_no_bond_y','molecule_type_dist_min','molecule_type_dist_max',
       'molecule_dist_mean_no_bond_x','molecule_dist_mean_no_bond_y', 'n_diff_y','molecule_atom_index_0_dist_min_div','molecule_atom_index_0_dist_std_div',
        'molecule_atom_index_0_dist_mean','molecule_atom_index_0_dist_max','molecule_atom_index_1_dist_mean','molecule_atom_index_1_dist_max',
       'molecule_atom_index_1_dist_min','molecule_atom_index_1_dist_min_div','molecule_atom_index_1_dist_std_diff','molecule_atom_index_0_dist_mean_diff',
        'molecule_atom_index_1_dist_mean_div','molecule_atom_index_1_dist_min_diff', 'rc_A', 'rc_B', 'rc_C', 'mu', 'alpha', 'homo', 'lumo', 'gap', 'zpve', 'Cv',
         'freqs_min', 'freqs_max', 'freqs_mean', 'mulliken_min', 'mulliken_max', 'mulliken_atom_0', 'mulliken_atom_1',
         'dist_C_0_x', 'dist_C_1_x', 'dist_C_2_x', 'dist_C_3_x', 'dist_C_4_x', 'dist_F_0_x', 'dist_F_1_x', 'dist_F_2_x', 'dist_H_0_x',
         'dist_H_1_x', 'dist_H_2_x', 'dist_H_3_x', 'dist_H_4_x', 'dist_N_0_x', 'dist_N_1_x', 'dist_N_2_x', 'dist_N_3_x', 'dist_N_4_x', 'dist_O_0_x', 'dist_O_1_x',
         'dist_O_2_x', 'dist_O_3_x', 'dist_O_4_x', 'dist_C_0_y', 'dist_C_1_y', 'dist_C_2_y', 'dist_C_3_y', 'dist_C_4_y', 'dist_F_0_y', 'dist_F_1_y', 'dist_F_2_y',
         'dist_F_3_y', 'dist_F_4_y', 'dist_H_0_y', 'dist_H_1_y', 'dist_H_2_y', 'dist_H_3_y', 'dist_H_4_y', 'dist_N_0_y', 'dist_N_1_y', 'dist_N_2_y', 'dist_N_3_y',
         'dist_N_4_y', 'dist_O_0_y', 'dist_O_1_y', 'dist_O_2_y', 'dist_O_3_y', 'dist_O_4_y','distance_closest_0', 'distance_closest_1', 'distance_farthest_0',
         'distance_farthest_1','cos_c0_c1', 'cos_f0_f1','cos_c0_f0', 'cos_c1_f1', 'cos_c0', 'cos_c1', 'cos_f0', 'cos_f1'] + giba_columns
In [6]:
def create_nn_model(input_shape):

    inp = Input(shape=(input_shape,))
    x = Dense(2048, activation = 'relu')(inp)
    x = BatchNormalization()(x)
    x = Dense(1024, activation = 'relu')(x)
    x = BatchNormalization()(x)
    x = Dense(1024, activation = 'relu')(x)
    x = BatchNormalization()(x)
    x = Dense(512, activation = 'relu')(x)
    x = BatchNormalization()(x)
    x = Dense(512, activation = 'relu')(x)
    x = BatchNormalization()(x)
    out = Dense(1, activation = 'linear')(x)
    model = Model(inputs=inp, outputs=[out])
    return model
In [7]:
def plot_history(history, label):
    plt.plot(history.history['loss'])
    plt.plot(history.history['val_loss'])
    plt.title('Loss for %s' % label)
    plt.ylabel('Loss')
    plt.xlabel('Epoch')
    _= plt.legend(['Train','Validation'], loc='upper left')
    plt.show()
In [8]:
tf.__version__
Out[8]:
'2.0.0-alpha0'
In [9]:
types = ['1JHC', '2JHH', '1JHN', '2JHN', '2JHC', '3JHH', '3JHC', '3JHN']
In [10]:
train_X = train_X.fillna(0)
test_X = test_X.fillna(0)
In [11]:
epoch_n = 100
batch_size = 10000
verbose = 1

cv_score=[]
cv_score_total=0

test_prediction=np.zeros(len(test_X))
In [12]:
pd.set_option('display.max_rows', 500)
pd.set_option('display.max_columns', 500)
pd.set_option('display.width', 1000)
In [13]:
for mol_type in types:
    #model_name_wrt = ('/model/molecule_model_%s.hdf5' % mol_type)
    
    print('Training %s' % mol_type)
    
    train_type_index = (train_X['type'] == mol_type)
    
    train_type_X = train_X[train_type_index]
    train_type_y = train_y[train_type_index]
    test_type_X = test_X[test_X['type'] == mol_type]
    
    
    input_data = StandardScaler().fit_transform(pd.concat([train_type_X.loc[:, all_features], test_type_X.loc[:, all_features]], ignore_index=True))
    target_data = train_type_y.values
    
    train_index, cv_index = train_test_split(np.arange(len(train_type_X)),random_state=111, test_size=0.1)
    
    train_target=target_data[train_index]
    cv_target=target_data[cv_index]
    train_input=input_data[train_index]
    cv_input=input_data[cv_index]
    
    test_input=input_data[len(train_type_X):,:]
    
    nn_model = create_nn_model(train_input.shape[1])
    nn_model.compile(loss='mae', optimizer=Adam())
    
    es = callbacks.EarlyStopping(monitor='val_loss', min_delta = 0.0001, patience = 40, verbose = 1, mode='auto', restore_best_weights = True)
    rlr = callbacks.ReduceLROnPlateau(monitor='val_loss', factor = 0.1, patience = 30, min_lr = 1e-6, mode = 'auto', verbose = 1)
    #sv_mod = callbacks.ModelCheckpoint(model_name_wrt, monitor='val_loss', save_best_only = True, period = 1)
    
    history = nn_model.fit(train_input, [train_target], validation_data=(cv_input, [cv_target]), callbacks = [es, rlr], epochs = epoch_n, batch_size = batch_size, verbose = verbose)
    
    cv_predict = nn_model.predict(cv_input)
    plot_history(history, mol_type)
    accuracy = np.mean(np.abs(cv_target - cv_predict[:,0]))
    print(np.log(accuracy))
    cv_score.append(np.log(accuracy))
    cv_score_total += np.log(accuracy)
    
    test_predict = nn_model.predict(test_input)
    
    test_prediction[test_X["type"]==mol_type]=test_predict[:,0]
    K.clear_session()
Training 1JHC
Train on 638474 samples, validate on 70942 samples
Epoch 1/100
638474/638474 [==============================] - 19s 30us/sample - loss: 93.1285 - val_loss: 89.7207 16s - loss: 94.60 - ET
Epoch 2/100
638474/638474 [==============================] - 17s 26us/sample - loss: 83.6441 - val_loss: 74.4705
Epoch 3/100
638474/638474 [==============================] - 17s 27us/sample - loss: 63.4610 - val_loss: 48.2530
Epoch 4/100
638474/638474 [==============================] - 16s 26us/sample - loss: 32.4112 - val_loss: 174.2880s - loss: 32.53
Epoch 5/100
638474/638474 [==============================] - 17s 27us/sample - loss: 4.5867 - val_loss: 29.1638- ETA: 3s - loss: 5.1 - ETA: 2s - loss: 4.9 - ETA: 1s - loss:
Epoch 6/100
638474/638474 [==============================] - 17s 26us/sample - loss: 1.9738 - val_loss: 8.1140
Epoch 7/100
638474/638474 [==============================] - 16s 26us/sample - loss: 1.7832 - val_loss: 4.5774TA: 13s  - ETA: 12s  - ET - ETA:  - ETA: 2s - los - ETA: 0s - loss: 1.786
Epoch 8/100
638474/638474 [==============================] - 17s 27us/sample - loss: 1.5669 - val_loss: 2.9929TA: 7s - loss: 1. - ETA: 6s - loss: 1.58 - ETA: 6s - loss:  - ETA: 4s - loss: - ETA: 2s - l
Epoch 9/100
638474/638474 [==============================] - 17s 26us/sample - loss: 1.4776 - val_loss: 2.0729 ETA: 8s - loss: 1.4 - ETA: 7s - loss: 1.46 - ETA: 6s - loss: 1.47
Epoch 10/100
638474/638474 [==============================] - 17s 26us/sample - loss: 1.4282 - val_loss: 1.9831ss: 1. - ETA: 10s - los - ETA: 9s - loss: 1. - ETA: 8s - loss - ETA: 6s - l - ETA: 3s - loss - ETA: 1s - loss: 
Epoch 11/100
638474/638474 [==============================] - 17s 26us/sample - loss: 1.3381 - val_loss: 1.9496- ETA: 7s - loss: 
Epoch 12/100
638474/638474 [==============================] - 17s 26us/sample - loss: 1.3114 - val_loss: 1.5569 ETA: 1
Epoch 13/100
638474/638474 [==============================] - 17s 27us/sample - loss: 1.2674 - val_loss: 1.4292l - ETA: 4s - loss: 1.27 - ETA: 4
Epoch 14/100
638474/638474 [==============================] - 17s 26us/sample - loss: 1.2020 - val_loss: 1.3416
Epoch 15/100
638474/638474 [==============================] - 17s 26us/sample - loss: 1.2216 - val_loss: 1.2823
Epoch 16/100
638474/638474 [==============================] - 17s 27us/sample - loss: 1.1459 - val_loss: 1.4549
Epoch 17/100
638474/638474 [==============================] - 17s 26us/sample - loss: 1.1644 - val_loss: 1.3985
Epoch 18/100
638474/638474 [==============================] - 17s 27us/sample - loss: 1.1127 - val_loss: 1.1311s: 1.11 - ETA: 0s - loss: 1.112
Epoch 19/100
638474/638474 [==============================] - 17s 26us/sample - loss: 1.0979 - val_loss: 1.4580T - ETA: 1s - loss
Epoch 20/100
638474/638474 [==============================] - 17s 26us/sample - loss: 1.0657 - val_loss: 1.5067A: 6s - loss: 1.04 - ETA: 6s - l - ETA: 3s - los - ETA: 1s - loss: 1.060 - ETA: 0s - loss: 1.
Epoch 21/100
638474/638474 [==============================] - 17s 27us/sample - loss: 1.0539 - val_loss: 1.3239 8s - loss:
Epoch 22/100
638474/638474 [==============================] - 17s 26us/sample - loss: 1.0374 - val_loss: 1.1923: 12s -  - ETA - ETA: 10s - loss: 1.02 - ETA - ET - ETA: 1s - loss: 1 - ETA: 0s - loss: 1.036
Epoch 23/100
638474/638474 [==============================] - 17s 27us/sample - loss: 1.0278 - val_loss: 1.1018
Epoch 24/100
638474/638474 [==============================] - 17s 27us/sample - loss: 1.0103 - val_loss: 1.2931 ETA: 7s - loss: - ETA: 5s - loss: 1.01 - ETA
Epoch 25/100
638474/638474 [==============================] - 17s 27us/sample - loss: 1.0042 - val_loss: 1.1112 - ET - ETA: 6s - loss: 1.01 - ETA: 5s - loss: 1.01 - ETA: 5s - loss: 1.017 - ETA:  - ETA: 0s - loss: 1.00
Epoch 26/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.9619 - val_loss: 1.2621
Epoch 27/100
638474/638474 [==============================] - 17s 26us/sample - loss: 0.9702 - val_loss: 1.2017TA: 12s - loss: 0. - ETA - ETA: 11s - loss: 0. - ETA - ETA: 8s - lo  - ETA: 0s - loss: 0.971
Epoch 28/100
638474/638474 [==============================] - 17s 26us/sample - loss: 0.9314 - val_loss: 1.1839TA: 3s - loss:  - ETA: 1s - loss
Epoch 29/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.9109 - val_loss: 0.9753s: 0.90 - ETA:  - ETA: 1s - loss: 0.906 - ETA: 1s - loss: 0. - ETA: 0s - loss: 0.910
Epoch 30/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.9241 - val_loss: 1.0458
Epoch 31/100
638474/638474 [==============================] - 17s 26us/sample - loss: 0.9321 - val_loss: 0.9921oss: 0
Epoch 32/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.9021 - val_loss: 0.9796TA:  - ETA: 11s - loss:  - ET
Epoch 33/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.9203 - val_loss: 1.0165
Epoch 34/100
638474/638474 [==============================] - 17s 26us/sample - loss: 0.8607 - val_loss: 1.10180.8 - ETA: 1s - loss: 
Epoch 35/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.8703 - val_loss: 1.0222
Epoch 36/100
638474/638474 [==============================] - 17s 26us/sample - loss: 0.8519 - val_loss: 1.1167
Epoch 37/100
638474/638474 [==============================] - 17s 26us/sample - loss: 0.8421 - val_loss: 0.9152: 6s - lo - ETA: 4s - loss: 0.850 - ETA: 4s - loss: 0 - ETA: 3s - loss: 0.8 - ETA: 2s - los
Epoch 38/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.8369 - val_loss: 1.184813s  - ETA: 12s - loss - ETA: 6s - loss: 0.821 - ETA: 6s - loss:  - ETA: 4 - ETA: 0s - loss: 0.836 - ETA: 0s - loss: 0.836
Epoch 39/100
638474/638474 [==============================] - 16s 26us/sample - loss: 0.8251 - val_loss: 0.9920TA: 11s - l - ETA: 9s 
Epoch 40/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.8280 - val_loss: 1.0351: 3s - loss:  - ETA: 2s - los
Epoch 41/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.8257 - val_loss: 1.0697ETA: 9s - loss: 0 - ETA: 8s - loss: 0.8 - ETA: 7s - loss: 0. - ETA: 6s - loss - ETA: 4s - ETA: 0s - loss: 0.8
Epoch 42/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.7994 - val_loss: 0.9602TA: 12s - loss: 0.78 - ETA: 12s - 
Epoch 43/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.7940 - val_loss: 0.9103
Epoch 44/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.7769 - val_loss: 1.1411TA: 2s - lo
Epoch 45/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.7745 - val_loss: 0.9476ss: 0. - ETA: 0s - loss: 0.7
Epoch 46/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.7600 - val_loss: 0.8886loss: 0.758 - ETA: 6s - loss: 0 - ETA:  - ETA: 0s - loss: 0.759 - ETA: 0s - loss: 0.759
Epoch 47/100
638474/638474 [==============================] - 18s 27us/sample - loss: 0.7574 - val_loss: 0.9307: 12s - loss: 0.72 - ETA: 12s - loss - ETA: 11s - lo - ETA: 10s - los - ETA: 9s - loss: 0.734  - ETA: 3s - 
Epoch 48/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.7533 - val_loss: 0.8698ss: 0.74 - ETA: 13s - loss - ETA: 8s - loss: - ETA: - ETA: 2s - los
Epoch 49/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.7613 - val_loss: 0.9333: 0s - loss: 0.760
Epoch 50/100
638474/638474 [==============================] - 17s 26us/sample - loss: 0.7640 - val_loss: 0.9976A: 7s - loss: 0.749 - ET - ETA: 2s - lo
Epoch 51/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.7434 - val_loss: 0.8942TA: 10s - loss: 0. - ET - ETA: 5s - loss: 0.7 - ETA: 
Epoch 52/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.7317 - val_loss: 0.9438- ETA: 5s - loss: - ETA: 4s - l - ETA: 1s - loss: 0
Epoch 53/100
638474/638474 [==============================] - 17s 26us/sample - loss: 0.7472 - val_loss: 0.9540
Epoch 54/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.7345 - val_loss: 0.840111s - loss: 0 - E
Epoch 55/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.7069 - val_loss: 0.8742 loss: 0.70 - ETA: 4 - ETA: 0s - loss: 0.702 - ETA: 0s - loss: 0.705
Epoch 56/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.7242 - val_loss: 0.9342TA: 11s - loss: 0.73 - ETA: 11s - - ETA: 2s - l
Epoch 57/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.7020 - val_loss: 0.9152
Epoch 58/100
638474/638474 [==============================] - 17s 26us/sample - loss: 0.7289 - val_loss: 0.9354
Epoch 59/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.7188 - val_loss: 0.9079: 4s - loss: 0.71 - ETA: 3s - loss: 0 - ETA: 2s - loss: 0.720 - ETA: 2s - los
Epoch 60/100
638474/638474 [==============================] - 17s 26us/sample - loss: 0.6757 - val_loss: 0.8250:  - ETA: 4s - loss - ETA: 2s - loss: 0 - ETA: 1s - loss: 0
Epoch 61/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.6737 - val_loss: 0.9344TA: 13s - - ETA: 6s - loss: 0.667 -
Epoch 62/100
638474/638474 [==============================] - 18s 27us/sample - loss: 0.6860 - val_loss: 0.9349
Epoch 63/100
638474/638474 [==============================] - 17s 26us/sample - loss: 0.6598 - val_loss: 0.8523
Epoch 64/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.6562 - val_loss: 0.7978TA: 4s - loss: 0.652 - ETA: 4
Epoch 65/100
638474/638474 [==============================] - 17s 26us/sample - loss: 0.6677 - val_loss: 0.8222- loss: 0.6 - ETA: 4s - loss: 0. - ETA: 3s - loss: 0.665 - ETA: 3s - loss: 0 - ETA: 2s - loss:  - ETA: 0s - loss: 0.668 - ETA: 0s - loss: 0.66
Epoch 66/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.6416 - val_loss: 0.9229
Epoch 67/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.6494 - val_loss: 0.9359
Epoch 68/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.6688 - val_loss: 0.9561oss: 0. - ETA
Epoch 69/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.6562 - val_loss: 0.8620- loss: 0.65 - ET - ETA: 9s - loss: 0.65 - ETA: 9s - loss: 0.65 - ETA: 8s - los - ETA:  - ETA: 2s - loss: 0.6 - ETA: 1s - loss: 0
Epoch 70/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.6234 - val_loss: 0.8309TA: 14 - ETA: 8s - loss:  - ETA: 0s - loss: 0.62
Epoch 71/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.6490 - val_loss: 0.9186
Epoch 72/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.6155 - val_loss: 0.7886.60
Epoch 73/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.6101 - val_loss: 0.8975TA: 9s - loss: 0.5 - ETA: 8s - - ETA: 5s - loss: 0.59 - ETA: 4s -  - ETA: 1s - loss:
Epoch 74/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.6081 - val_loss: 0.7673- loss: - ETA: 2s - l
Epoch 75/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.6120 - val_loss: 0.8454
Epoch 76/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.6321 - val_loss: 0.7640 5s - loss: 0.632 - ET
Epoch 77/100
638474/638474 [==============================] - 17s 26us/sample - loss: 0.5903 - val_loss: 0.8424A: 7s  - ETA: 3s
Epoch 78/100
638474/638474 [==============================] - 17s 26us/sample - loss: 0.5924 - val_loss: 0.7844loss: 0.58 - ETA: 8s  - ETA: 4s - loss: 0.59 - ETA: 
Epoch 79/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.6212 - val_loss: 0.80793s -
Epoch 80/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.6049 - val_loss: 0.8630:  - ETA: 11s -   - ETA: 4s - loss: 0.587 - ETA: 4
Epoch 81/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.5835 - val_loss: 0.7761- lo -  - ETA: 5s - lo - ETA: 3s - loss: 0 - ETA: 2s - loss
Epoch 82/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.5755 - val_loss: 0.7603 lo - ETA: 1s - loss:
Epoch 83/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.5712 - val_loss: 0.9023: 13s - loss: 0.53 - - ETA - ETA: 0s - loss: 0.57
Epoch 84/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.5898 - val_loss: 0.8090ss: 0 - ETA: 2s - loss
Epoch 85/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.5885 - val_loss: 0.7863s: 0.57 - ETA: 6s - loss:  - ETA: 5s - loss: - ETA: 3s -
Epoch 86/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.5871 - val_loss: 0.7748
Epoch 87/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.5923 - val_loss: 0.8274
Epoch 88/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.5979 - val_loss: 0.759510s - lo - ETA: 9s - l - ETA: 6s - - ETA: 3s - loss: 0 - ETA: 1s - loss: 0.60 - ETA: 1s - loss: 0.6 - ETA: 0s - loss: 0.59
Epoch 89/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.5611 - val_loss: 0.7629
Epoch 90/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.5588 - val_loss: 0.8183 - ETA: 2s - loss: 0 - ETA: 1s - loss: 
Epoch 91/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.5949 - val_loss: 0.8018- loss: 0.5 - ETA
Epoch 92/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.5849 - val_loss: 0.8703A: 5s - loss: 0.56 - ETA: 4s - loss: 0 - ETA: 3s -
Epoch 93/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.5573 - val_loss: 0.8400s - loss: 0.557 - ETA: 0s - loss: 0.557
Epoch 94/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.5675 - val_loss: 0.8470: 13s - loss -  - ETA: 9s - loss: 0.55 - ETA: 9s  - E
Epoch 95/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.5536 - val_loss: 0.8088 8s - loss: 0.541 - ETA: 8s - loss: 0 - - ETA: 0s - loss: 0.5
Epoch 96/100
638474/638474 [==============================] - 17s 26us/sample - loss: 0.5500 - val_loss: 0.9406
Epoch 97/100
638474/638474 [==============================] - 17s 26us/sample - loss: 0.5368 - val_loss: 0.7486 - ETA: 8s - l - ETA: 6s - loss: 0.525 -
Epoch 98/100
638474/638474 [==============================] - 17s 26us/sample - loss: 0.5691 - val_loss: 0.9355
Epoch 99/100
638474/638474 [==============================] - 17s 27us/sample - loss: 0.5459 - val_loss: 0.9337TA: 4s - loss: 0. - ETA: 3s - 
Epoch 100/100
638474/638474 [==============================] - 17s 26us/sample - loss: 0.5422 - val_loss: 0.7399TA: 2s - l
-0.3012262679466983
Training 2JHH
Train on 340232 samples, validate on 37804 samples
Epoch 1/100
340232/340232 [==============================] - 10s 30us/sample - loss: 9.6792 - val_loss: 12.2901
Epoch 2/100
340232/340232 [==============================] - 9s 27us/sample - loss: 7.2625 - val_loss: 8.8652
Epoch 3/100
340232/340232 [==============================] - 10s 28us/sample - loss: 2.8357 - val_loss: 3.2792 4.07 - ETA: 3s - loss: 3. - ETA: 2s - l - ETA: 0s - loss: 2.837
Epoch 4/100
340232/340232 [==============================] - 9s 28us/sample - loss: 0.6608 - val_loss: 1.7506A: 5s - loss: 0 - ETA:
Epoch 5/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.4915 - val_loss: 0.7096491
Epoch 6/100
340232/340232 [==============================] - 10s 28us/sample - loss: 0.4175 - val_loss: 0.5883TA: 6s - l - ETA: 3s - loss: 0.438 - ETA: 3s 
Epoch 7/100
340232/340232 [==============================] - 9s 28us/sample - loss: 0.3829 - val_loss: 0.5626 3s - lo - ETA: 1s - loss: 0.380 - ETA: 1s - loss: 0
Epoch 8/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.3532 - val_loss: 0.5505 ETA: 1s - loss
Epoch 9/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.3296 - val_loss: 0.3913
Epoch 10/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.3089 - val_loss: 0.4151
Epoch 11/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.3009 - val_loss: 0.4492- loss: 0.307 - ETA: 2s - loss: 0.306 - ETA: 2s - lo
Epoch 12/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.3061 - val_loss: 0.5399
Epoch 13/100
340232/340232 [==============================] - 9s 28us/sample - loss: 0.2946 - val_loss: 0.46640s - loss: 0.29
Epoch 14/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.2658 - val_loss: 0.4083- ETA: 0s - loss: 0.2
Epoch 15/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.2769 - val_loss: 0.3670
Epoch 16/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.2499 - val_loss: 0.3475
Epoch 17/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.2501 - val_loss: 0.3197ETA: 3s - loss: 0.2 - ETA: 2s - lo
Epoch 18/100
340232/340232 [==============================] - 10s 28us/sample - loss: 0.2486 - val_loss: 0.3887ss: 0.
Epoch 19/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.2402 - val_loss: 0.3590
Epoch 20/100
340232/340232 [==============================] - 10s 28us/sample - loss: 0.2449 - val_loss: 0.3542 1s - loss: 0.248 - ETA: 0s - loss: 0.
Epoch 21/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.2301 - val_loss: 0.3214
Epoch 22/100
340232/340232 [==============================] - 9s 28us/sample - loss: 0.2218 - val_loss: 0.3254
Epoch 23/100
340232/340232 [==============================] - 9s 28us/sample - loss: 0.2441 - val_loss: 0.2801
Epoch 24/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.2289 - val_loss: 0.2676
Epoch 25/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.2254 - val_loss: 0.2618
Epoch 26/100
340232/340232 [==============================] - 9s 26us/sample - loss: 0.2113 - val_loss: 0.2990
Epoch 27/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.2045 - val_loss: 0.2660 - ETA: 0s - loss: 0.204
Epoch 28/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.2126 - val_loss: 0.2694
Epoch 29/100
340232/340232 [==============================] - 9s 28us/sample - loss: 0.1949 - val_loss: 0.2926
Epoch 30/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.1944 - val_loss: 0.2729TA: 2s - loss: 0.1 - ETA: 1s - loss:
Epoch 31/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.2039 - val_loss: 0.2955
Epoch 32/100
340232/340232 [==============================] - 10s 28us/sample - loss: 0.2054 - val_loss: 0.2796A: 5s - loss: - ETA: 3s - lo - ETA: 1s - loss: 0
Epoch 33/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.2000 - val_loss: 0.2981ETA: 7s - loss: 0.240 - ETA: 7s - - ETA: 
Epoch 34/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.1927 - val_loss: 0.2707
Epoch 35/100
340232/340232 [==============================] - 10s 29us/sample - loss: 0.1972 - val_loss: 0.2762ss: 0.2 - ETA: 4s - loss: 0.205 - ETA: 3s - loss:  - ETA: 2s - los
Epoch 36/100
340232/340232 [==============================] - 9s 26us/sample - loss: 0.1917 - val_loss: 0.2696 - loss: - ETA: 0s - loss: 0.1
Epoch 37/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.1851 - val_loss: 0.2648s - loss: 0.
Epoch 38/100
340232/340232 [==============================] - 9s 28us/sample - loss: 0.1905 - val_loss: 0.3503TA: 6s - l - ETA: 4s  - ETA: 1s - loss: 0.192 - ETA: 1s - loss: 0.1 - ETA: 0s - loss: 0.19
Epoch 39/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.1947 - val_loss: 0.2651
Epoch 40/100
340232/340232 [==============================] - 9s 28us/sample - loss: 0.1971 - val_loss: 0.2397A: 2s - loss: 0. - ETA: 1s - loss: 0.2 - ETA: 0s - loss: 0.
Epoch 41/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.2009 - val_loss: 0.2728ETA:  - ETA: 2s - loss: - ETA: 1s - loss: 0.20 - ETA: 0s - loss: 0.202 - ETA: 0s - loss: 0.20
Epoch 42/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.1894 - val_loss: 0.3419TA: 5s - lo - ETA: 3s - loss: 0.19 - ETA: 2s - loss: 0.1 - ETA: 1s - los
Epoch 43/100
340232/340232 [==============================] - 9s 26us/sample - loss: 0.1973 - val_loss: 0.3926
Epoch 44/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.1857 - val_loss: 0.2663A - ETA: 2s - lo
Epoch 45/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.1666 - val_loss: 0.2424
Epoch 46/100
340232/340232 [==============================] - 10s 28us/sample - loss: 0.1729 - val_loss: 0.30996s - loss: 0.1 
Epoch 47/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.1856 - val_loss: 0.2616
Epoch 48/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.1642 - val_loss: 0.2559ETA: 
Epoch 49/100
340232/340232 [==============================] - 9s 28us/sample - loss: 0.1568 - val_loss: 0.1894: 6s -  - ETA: 
Epoch 50/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.1603 - val_loss: 0.2199TA: - ETA: 1s - loss: 0.1 - ETA: 1s - loss: 0
Epoch 51/100
340232/340232 [==============================] - 10s 28us/sample - loss: 0.1588 - val_loss: 0.2658: 8s - loss: 0. - ETA: 6s - loss: 0 - ETA: 5s - loss: 0.1 - ETA:
Epoch 52/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.1651 - val_loss: 0.2503
Epoch 53/100
340232/340232 [==============================] - 10s 29us/sample - loss: 0.1806 - val_loss: 0.21025s - loss:  - ETA: 4s - loss: 0.188 - ETA: 4s - loss: 0. - ETA: 3s - los - ETA: 0s - loss: 0.17 - ETA: 0s - loss: 0.18
Epoch 54/100
340232/340232 [==============================] - 9s 26us/sample - loss: 0.1696 - val_loss: 0.2467
Epoch 55/100
340232/340232 [==============================] - 9s 26us/sample - loss: 0.1550 - val_loss: 0.3256
Epoch 56/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.1620 - val_loss: 0.2346
Epoch 57/100
340232/340232 [==============================] - 9s 26us/sample - loss: 0.1500 - val_loss: 0.2249
Epoch 58/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.1670 - val_loss: 0.2325s: 0.175 - ETA: 3s - loss: 0 - ETA: 2s - loss: 0.17 - ETA: 1s - loss: 0 - ETA: 0s - loss: 0.16
Epoch 59/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.1616 - val_loss: 0.2178ETA: 7s - loss: 0. - ETA: 6s - loss: 0.181 - ETA: 6s - los - ETA: 4s - loss: 0.16 - ETA: 3s - los - ETA: 1s - loss: 
Epoch 60/100
340232/340232 [==============================] - 10s 28us/sample - loss: 0.1509 - val_loss: 0.2503
Epoch 61/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.1613 - val_loss: 0.3043 loss: 0.16 - ETA: 3
Epoch 62/100
340232/340232 [==============================] - 9s 28us/sample - loss: 0.1596 - val_loss: 0.220816 - ETA: 1s - loss
Epoch 63/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.1498 - val_loss: 0.2181
Epoch 64/100
340232/340232 [==============================] - 9s 28us/sample - loss: 0.1558 - val_loss: 0.2739: 3s - loss: 0 - ETA: 1s - loss
Epoch 65/100
340232/340232 [==============================] - 10s 28us/sample - loss: 0.1512 - val_loss: 0.2120
Epoch 66/100
340232/340232 [==============================] - 10s 28us/sample - loss: 0.1470 - val_loss: 0.2592 - ETA: 2s - loss: 0 - ETA: 1s - loss: 0
Epoch 67/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.1502 - val_loss: 0.2094 ETA: 1s - loss: 0.154 - ETA: 1s - loss:
Epoch 68/100
340232/340232 [==============================] - 10s 29us/sample - loss: 0.1414 - val_loss: 0.2344
Epoch 69/100
340232/340232 [==============================] - 10s 28us/sample - loss: 0.1485 - val_loss: 0.2053TA: 0s - loss: 0.1
Epoch 70/100
340232/340232 [==============================] - 10s 29us/sample - loss: 0.1479 - val_loss: 0.2138TA: 6s - loss - ETA: 4s - loss - ETA: 2s - loss:  - ETA: 0s - loss: 0.14 - ETA: 0s - loss: 0.147
Epoch 71/100
340232/340232 [==============================] - 9s 28us/sample - loss: 0.1544 - val_loss: 0.2444
Epoch 72/100
340232/340232 [==============================] - 10s 29us/sample - loss: 0.1597 - val_loss: 0.2150
Epoch 73/100
340232/340232 [==============================] - 10s 28us/sample - loss: 0.1404 - val_loss: 0.2428: 0.1 - ETA: 3s - loss - ETA: 1s - loss: 0.141 - ETA: 1s - loss: 0. - ETA: 0s - loss: 0.140
Epoch 74/100
340232/340232 [==============================] - 10s 30us/sample - loss: 0.1636 - val_loss: 0.41247s - loss: 0.192 - ETA: 6s - loss:  - ETA: 5s - loss: 0 - ETA: 3s - loss: 0.17 - ETA: 3s - lo - ETA: 0s - loss: 0.163 - ETA: 0s - loss: 0.16
Epoch 75/100
340232/340232 [==============================] - 10s 29us/sample - loss: 0.1502 - val_loss: 0.2745
Epoch 76/100
340232/340232 [==============================] - 10s 28us/sample - loss: 0.1488 - val_loss: 0.2917
Epoch 77/100
340232/340232 [==============================] - 10s 29us/sample - loss: 0.1417 - val_loss: 0.2663
Epoch 78/100
340232/340232 [==============================] - 10s 28us/sample - loss: 0.1475 - val_loss: 0.3855TA: 6s - loss: 0.16 - ETA: - ETA: 0s - loss: 0.
Epoch 79/100
340000/340232 [============================>.] - ETA: 0s - loss: 0.1606- ETA: 8s - loss: 0.24 - ETA: 7s - loss - ETA: 5s -  - ETA: 2s - los
Epoch 00079: ReduceLROnPlateau reducing learning rate to 0.00010000000474974513.
340232/340232 [==============================] - 9s 26us/sample - loss: 0.1607 - val_loss: 0.2924
Epoch 80/100
340232/340232 [==============================] - 9s 28us/sample - loss: 0.1208 - val_loss: 0.1827
Epoch 81/100
340232/340232 [==============================] - 9s 25us/sample - loss: 0.1041 - val_loss: 0.1605: 0s - loss: 0.104 - ETA: 0s - loss: 0.104 - ETA: 0s - loss: 0.104 - ETA: 0s - loss: 0.104
Epoch 82/100
340232/340232 [==============================] - 9s 26us/sample - loss: 0.0990 - val_loss: 0.1435A: 0s - loss: 0.09 - ETA: 0s - loss: 0.099
Epoch 83/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.0984 - val_loss: 0.1360
Epoch 84/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.0979 - val_loss: 0.1349
Epoch 85/100
340232/340232 [==============================] - 9s 25us/sample - loss: 0.0940 - val_loss: 0.1356: 6s - loss: 0 - ETA: 5 - ETA: 1s - loss:  - ETA: 0s - loss: 0.09
Epoch 86/100
340232/340232 [==============================] - 9s 28us/sample - loss: 0.0933 - val_loss: 0.1352ETA: 6s - l - ETA: 4s - loss: 0.093 - ETA: 4s - loss: 0.09 - ETA: 3s - loss: 0 - ETA: 2s - loss:  - ETA: 1s - loss: 0.09 - ETA: 0s - loss: 0.0
Epoch 87/100
340232/340232 [==============================] - 9s 26us/sample - loss: 0.0947 - val_loss: 0.1300TA: 6s - loss: 0.092 - ETA: 6s - loss - ETA: 4s - loss: - ETA: 3s - loss: 0.094 - ETA: 2s - loss: 0.09 - ETA: 2s - loss: 0 - ETA: 1s - loss: 
Epoch 88/100
340232/340232 [==============================] - 10s 29us/sample - loss: 0.0900 - val_loss: 0.1344TA: 6s - loss: 0. - ETA: 6s - loss: 0.08 - ETA: 5s - loss: - ETA: 4s - los - ETA: 1s - loss:
Epoch 89/100
340232/340232 [==============================] - 10s 30us/sample - loss: 0.0915 - val_loss: 0.1386
Epoch 90/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.0912 - val_loss: 0.1374A: 3s -  - ETA: 0s - loss: 0.0
Epoch 91/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.0921 - val_loss: 0.1311TA:
Epoch 92/100
340232/340232 [==============================] - 10s 28us/sample - loss: 0.0924 - val_loss: 0.1313
Epoch 93/100
340232/340232 [==============================] - 9s 25us/sample - loss: 0.0883 - val_loss: 0.13045s -  - ETA: 2s - loss - ETA: 1s - loss: 0.088 - ETA: 0s - loss: 0.08 - ETA: 0s - loss: 0.0
Epoch 94/100
340232/340232 [==============================] - 9s 27us/sample - loss: 0.0900 - val_loss: 0.1381A: 5s - los - ETA: 3s
Epoch 95/100
340232/340232 [==============================] - 9s 26us/sample - loss: 0.0954 - val_loss: 0.1338loss: 0.094 - ETA: 3s -
Epoch 96/100
340232/340232 [==============================] - 9s 28us/sample - loss: 0.0891 - val_loss: 0.1293ETA: 1s - loss: 0
Epoch 97/100
340232/340232 [==============================] - 10s 29us/sample - loss: 0.0876 - val_loss: 0.1332
Epoch 98/100
340232/340232 [==============================] - 9s 26us/sample - loss: 0.0882 - val_loss: 0.1310
Epoch 99/100
340232/340232 [==============================] - 10s 28us/sample - loss: 0.0871 - val_loss: 0.1277ss: 0 - ETA: 3s - loss:  - ETA: 2s - loss: 0. - ETA: 1s - loss: 
Epoch 100/100
340232/340232 [==============================] - 9s 26us/sample - loss: 0.0887 - val_loss: 0.14191s - loss: - ETA: 0s - loss: 0.08
-1.9525659470029737
Training 1JHN
Train on 39026 samples, validate on 4337 samples
Epoch 1/100
39026/39026 [==============================] - 2s 53us/sample - loss: 47.4429 - val_loss: 47.2498
Epoch 2/100
39026/39026 [==============================] - 1s 23us/sample - loss: 47.3132 - val_loss: 47.1132
Epoch 3/100
39026/39026 [==============================] - 1s 23us/sample - loss: 47.1666 - val_loss: 46.9649
Epoch 4/100
39026/39026 [==============================] - 1s 22us/sample - loss: 47.0009 - val_loss: 46.8058
Epoch 5/100
39026/39026 [==============================] - 1s 24us/sample - loss: 46.8138 - val_loss: 46.6338
Epoch 6/100
39026/39026 [==============================] - 1s 23us/sample - loss: 46.6032 - val_loss: 46.4429
Epoch 7/100
39026/39026 [==============================] - 1s 23us/sample - loss: 46.3671 - val_loss: 46.2292
Epoch 8/100
39026/39026 [==============================] - 1s 22us/sample - loss: 46.1052 - val_loss: 45.2117
Epoch 9/100
39026/39026 [==============================] - 1s 23us/sample - loss: 45.8107 - val_loss: 45.5927
Epoch 10/100
39026/39026 [==============================] - 1s 23us/sample - loss: 45.4869 - val_loss: 47.7998
Epoch 11/100
39026/39026 [==============================] - 1s 25us/sample - loss: 45.1306 - val_loss: 50.4262
Epoch 12/100
39026/39026 [==============================] - 1s 24us/sample - loss: 44.7404 - val_loss: 52.4429
Epoch 13/100
39026/39026 [==============================] - 1s 29us/sample - loss: 44.3151 - val_loss: 53.5160
Epoch 14/100
39026/39026 [==============================] - 1s 22us/sample - loss: 43.8535 - val_loss: 53.6865
Epoch 15/100
39026/39026 [==============================] - 1s 22us/sample - loss: 43.3548 - val_loss: 53.1591
Epoch 16/100
39026/39026 [==============================] - 1s 22us/sample - loss: 42.8178 - val_loss: 52.1546
Epoch 17/100
39026/39026 [==============================] - 1s 22us/sample - loss: 42.2420 - val_loss: 50.8432
Epoch 18/100
39026/39026 [==============================] - 1s 21us/sample - loss: 41.6265 - val_loss: 49.3558
Epoch 19/100
39026/39026 [==============================] - 1s 22us/sample - loss: 40.9708 - val_loss: 47.7540
Epoch 20/100
39026/39026 [==============================] - 1s 23us/sample - loss: 40.2742 - val_loss: 46.4818
Epoch 21/100
39026/39026 [==============================] - 1s 21us/sample - loss: 39.5362 - val_loss: 46.7934
Epoch 22/100
39026/39026 [==============================] - 1s 23us/sample - loss: 38.7566 - val_loss: 42.1300
Epoch 23/100
39026/39026 [==============================] - 1s 22us/sample - loss: 37.9345 - val_loss: 37.2612
Epoch 24/100
39026/39026 [==============================] - 1s 23us/sample - loss: 37.0701 - val_loss: 34.2747
Epoch 25/100
39026/39026 [==============================] - 1s 22us/sample - loss: 36.1627 - val_loss: 29.8528
Epoch 26/100
39026/39026 [==============================] - 1s 24us/sample - loss: 35.2119 - val_loss: 26.1707
Epoch 27/100
39026/39026 [==============================] - 1s 27us/sample - loss: 34.2178 - val_loss: 24.1067
Epoch 28/100
39026/39026 [==============================] - 1s 20us/sample - loss: 33.1803 - val_loss: 25.7475
Epoch 29/100
39026/39026 [==============================] - 1s 24us/sample - loss: 32.1053 - val_loss: 30.7548
Epoch 30/100
39026/39026 [==============================] - 1s 23us/sample - loss: 30.9747 - val_loss: 52.4572
Epoch 31/100
39026/39026 [==============================] - 1s 22us/sample - loss: 29.8077 - val_loss: 82.4570
Epoch 32/100
39026/39026 [==============================] - 1s 25us/sample - loss: 28.5944 - val_loss: 96.1013
Epoch 33/100
39026/39026 [==============================] - 1s 23us/sample - loss: 27.3364 - val_loss: 94.6792
Epoch 34/100
39026/39026 [==============================] - 1s 21us/sample - loss: 26.0334 - val_loss: 92.3050
Epoch 35/100
39026/39026 [==============================] - 1s 22us/sample - loss: 24.6852 - val_loss: 86.7467
Epoch 36/100
39026/39026 [==============================] - 1s 24us/sample - loss: 23.2925 - val_loss: 78.3327
Epoch 37/100
39026/39026 [==============================] - 1s 24us/sample - loss: 21.8544 - val_loss: 74.9470
Epoch 38/100
39026/39026 [==============================] - 1s 25us/sample - loss: 20.3717 - val_loss: 69.4439
Epoch 39/100
39026/39026 [==============================] - 1s 23us/sample - loss: 18.8465 - val_loss: 50.4509
Epoch 40/100
39026/39026 [==============================] - 1s 23us/sample - loss: 17.3213 - val_loss: 56.9714
Epoch 41/100
39026/39026 [==============================] - 1s 23us/sample - loss: 16.4046 - val_loss: 79.6565
Epoch 42/100
39026/39026 [==============================] - 1s 25us/sample - loss: 14.2170 - val_loss: 133.4991: 0s - loss: 14.37
Epoch 43/100
39026/39026 [==============================] - ETA: 0s - loss: 12.77 - 1s 21us/sample - loss: 12.5864 - val_loss: 139.8480
Epoch 44/100
39026/39026 [==============================] - 1s 23us/sample - loss: 10.9265 - val_loss: 125.0735
Epoch 45/100
39026/39026 [==============================] - 1s 25us/sample - loss: 9.1952 - val_loss: 115.1161
Epoch 46/100
39026/39026 [==============================] - 1s 23us/sample - loss: 7.4096 - val_loss: 108.6453
Epoch 47/100
39026/39026 [==============================] - 1s 22us/sample - loss: 5.6031 - val_loss: 71.3014
Epoch 48/100
39026/39026 [==============================] - 1s 20us/sample - loss: 4.4934 - val_loss: 34.0770
Epoch 49/100
39026/39026 [==============================] - 1s 24us/sample - loss: 2.8901 - val_loss: 23.2498
Epoch 50/100
39026/39026 [==============================] - 1s 23us/sample - loss: 1.9100 - val_loss: 18.6182
Epoch 51/100
39026/39026 [==============================] - 1s 23us/sample - loss: 1.4894 - val_loss: 17.5900
Epoch 52/100
39026/39026 [==============================] - 1s 21us/sample - loss: 1.5177 - val_loss: 21.7379
Epoch 53/100
39026/39026 [==============================] - 1s 21us/sample - loss: 1.4825 - val_loss: 22.2159
Epoch 54/100
39026/39026 [==============================] - 1s 21us/sample - loss: 1.1949 - val_loss: 20.4457
Epoch 55/100
39026/39026 [==============================] - 1s 24us/sample - loss: 1.0763 - val_loss: 21.2424
Epoch 56/100
39026/39026 [==============================] - 1s 21us/sample - loss: 1.0138 - val_loss: 21.5029
Epoch 57/100
39026/39026 [==============================] - 1s 23us/sample - loss: 0.9096 - val_loss: 19.1322
Epoch 58/100
39026/39026 [==============================] - 1s 21us/sample - loss: 0.8261 - val_loss: 17.6690
Epoch 59/100
39026/39026 [==============================] - 1s 21us/sample - loss: 0.7695 - val_loss: 16.5462
Epoch 60/100
39026/39026 [==============================] - 1s 20us/sample - loss: 0.7598 - val_loss: 15.7382
Epoch 61/100
39026/39026 [==============================] - 1s 25us/sample - loss: 0.7298 - val_loss: 14.8328
Epoch 62/100
39026/39026 [==============================] - 1s 23us/sample - loss: 0.6893 - val_loss: 14.1166
Epoch 63/100
39026/39026 [==============================] - 1s 23us/sample - loss: 0.6937 - val_loss: 12.7927
Epoch 64/100
39026/39026 [==============================] - 1s 23us/sample - loss: 0.7641 - val_loss: 12.0034
Epoch 65/100
39026/39026 [==============================] - 1s 23us/sample - loss: 0.7324 - val_loss: 10.8334
Epoch 66/100
39026/39026 [==============================] - 1s 21us/sample - loss: 0.6791 - val_loss: 10.1458
Epoch 67/100
39026/39026 [==============================] - 1s 24us/sample - loss: 0.6659 - val_loss: 9.7229
Epoch 68/100
39026/39026 [==============================] - 1s 23us/sample - loss: 0.6538 - val_loss: 9.6107
Epoch 69/100
39026/39026 [==============================] - 1s 21us/sample - loss: 0.6508 - val_loss: 8.8205
Epoch 70/100
39026/39026 [==============================] - 1s 25us/sample - loss: 0.6117 - val_loss: 8.5233
Epoch 71/100
39026/39026 [==============================] - 1s 23us/sample - loss: 0.5820 - val_loss: 7.7598
Epoch 72/100
39026/39026 [==============================] - 1s 25us/sample - loss: 0.5721 - val_loss: 7.2036
Epoch 73/100
39026/39026 [==============================] - 1s 22us/sample - loss: 0.5631 - val_loss: 6.7153
Epoch 74/100
39026/39026 [==============================] - 1s 22us/sample - loss: 0.5450 - val_loss: 6.1777
Epoch 75/100
39026/39026 [==============================] - 1s 22us/sample - loss: 0.5891 - val_loss: 6.0333
Epoch 76/100
39026/39026 [==============================] - 1s 23us/sample - loss: 0.5447 - val_loss: 5.7652
Epoch 77/100
39026/39026 [==============================] - 1s 22us/sample - loss: 0.5298 - val_loss: 5.3883
Epoch 78/100
39026/39026 [==============================] - 1s 25us/sample - loss: 0.5097 - val_loss: 5.2180
Epoch 79/100
39026/39026 [==============================] - 1s 25us/sample - loss: 0.5082 - val_loss: 4.9001
Epoch 80/100
39026/39026 [==============================] - 1s 22us/sample - loss: 0.5267 - val_loss: 4.6663ETA: 0s - loss: 0.500
Epoch 81/100
39026/39026 [==============================] - 1s 24us/sample - loss: 0.5292 - val_loss: 4.5860
Epoch 82/100
39026/39026 [==============================] - 1s 22us/sample - loss: 0.5420 - val_loss: 4.2004
Epoch 83/100
39026/39026 [==============================] - 1s 24us/sample - loss: 0.5135 - val_loss: 3.9924
Epoch 84/100
39026/39026 [==============================] - 1s 28us/sample - loss: 0.5015 - val_loss: 3.7977ETA: 0s - loss: 0.485
Epoch 85/100
39026/39026 [==============================] - 1s 22us/sample - loss: 0.4636 - val_loss: 3.5336
Epoch 86/100
39026/39026 [==============================] - 1s 23us/sample - loss: 0.4891 - val_loss: 3.3498
Epoch 87/100
39026/39026 [==============================] - 1s 24us/sample - loss: 0.4816 - val_loss: 3.1997
Epoch 88/100
39026/39026 [==============================] - 1s 22us/sample - loss: 0.4747 - val_loss: 2.9282
Epoch 89/100
39026/39026 [==============================] - 1s 23us/sample - loss: 0.4422 - val_loss: 2.7656
Epoch 90/100
39026/39026 [==============================] - 1s 26us/sample - loss: 0.4401 - val_loss: 2.6590
Epoch 91/100
39026/39026 [==============================] - 1s 25us/sample - loss: 0.4249 - val_loss: 2.6263
Epoch 92/100
39026/39026 [==============================] - 1s 22us/sample - loss: 0.4514 - val_loss: 2.5525
Epoch 93/100
39026/39026 [==============================] - 1s 23us/sample - loss: 0.4743 - val_loss: 2.6817
Epoch 94/100
39026/39026 [==============================] - 1s 23us/sample - loss: 0.4296 - val_loss: 2.3041
Epoch 95/100
39026/39026 [==============================] - 1s 26us/sample - loss: 0.4717 - val_loss: 1.8881
Epoch 96/100
39026/39026 [==============================] - 1s 23us/sample - loss: 0.4409 - val_loss: 2.0784
Epoch 97/100
39026/39026 [==============================] - 1s 19us/sample - loss: 0.4822 - val_loss: 2.0034
Epoch 98/100
39026/39026 [==============================] - 1s 24us/sample - loss: 0.5557 - val_loss: 2.0805
Epoch 99/100
39026/39026 [==============================] - 1s 24us/sample - loss: 0.4334 - val_loss: 2.0182
Epoch 100/100
39026/39026 [==============================] - 1s 22us/sample - loss: 0.4184 - val_loss: 1.9750
0.6805895605829633
Training 2JHN
Train on 107327 samples, validate on 11926 samples
Epoch 1/100
107327/107327 [==============================] - 4s 41us/sample - loss: 3.4077 - val_loss: 2.6730
Epoch 2/100
107327/107327 [==============================] - 3s 30us/sample - loss: 2.6841 - val_loss: 2.1399
Epoch 3/100
107327/107327 [==============================] - 3s 25us/sample - loss: 2.3251 - val_loss: 2.0060
Epoch 4/100
107327/107327 [==============================] - 3s 23us/sample - loss: 1.8818 - val_loss: 2.3050
Epoch 5/100
107327/107327 [==============================] - 3s 24us/sample - loss: 1.4027 - val_loss: 1.9290
Epoch 6/100
107327/107327 [==============================] - 3s 24us/sample - loss: 0.9191 - val_loss: 1.6506
Epoch 7/100
107327/107327 [==============================] - 2s 23us/sample - loss: 0.6996 - val_loss: 1.2796
Epoch 8/100
107327/107327 [==============================] - 3s 23us/sample - loss: 0.5482 - val_loss: 1.0835
Epoch 9/100
107327/107327 [==============================] - 3s 24us/sample - loss: 0.4430 - val_loss: 1.0013
Epoch 10/100
107327/107327 [==============================] - 2s 23us/sample - loss: 0.3926 - val_loss: 0.9812ETA: 1s - loss
Epoch 11/100
107327/107327 [==============================] - 2s 23us/sample - loss: 0.3456 - val_loss: 0.8646
Epoch 12/100
107327/107327 [==============================] - 2s 23us/sample - loss: 0.3430 - val_loss: 0.8838
Epoch 13/100
107327/107327 [==============================] - 3s 25us/sample - loss: 0.3327 - val_loss: 0.7518
Epoch 14/100
107327/107327 [==============================] - 3s 24us/sample - loss: 0.3317 - val_loss: 0.6530
Epoch 15/100
107327/107327 [==============================] - 3s 25us/sample - loss: 0.2940 - val_loss: 0.6567
Epoch 16/100
107327/107327 [==============================] - 3s 23us/sample - loss: 0.2818 - val_loss: 0.6656s - loss: 0.28
Epoch 17/100
107327/107327 [==============================] - 3s 25us/sample - loss: 0.2769 - val_loss: 0.6148
Epoch 18/100
107327/107327 [==============================] - 3s 24us/sample - loss: 0.2641 - val_loss: 0.5881
Epoch 19/100
107327/107327 [==============================] - 3s 23us/sample - loss: 0.2450 - val_loss: 0.4780
Epoch 20/100
107327/107327 [==============================] - 3s 24us/sample - loss: 0.2417 - val_loss: 0.5139
Epoch 21/100
107327/107327 [==============================] - 2s 23us/sample - loss: 0.2467 - val_loss: 0.5385
Epoch 22/100
107327/107327 [==============================] - 3s 24us/sample - loss: 0.2393 - val_loss: 0.4576
Epoch 23/100
107327/107327 [==============================] - 3s 24us/sample - loss: 0.2230 - val_loss: 0.4105A: 0s - loss: 0.
Epoch 24/100
107327/107327 [==============================] - 3s 24us/sample - loss: 0.2119 - val_loss: 0.3938TA: 1s - loss:
Epoch 25/100
107327/107327 [==============================] - 3s 25us/sample - loss: 0.2121 - val_loss: 0.3744
Epoch 26/100
107327/107327 [==============================] - 2s 23us/sample - loss: 0.2016 - val_loss: 0.3881
Epoch 27/100
107327/107327 [==============================] - 3s 23us/sample - loss: 0.2063 - val_loss: 0.3871
Epoch 28/100
107327/107327 [==============================] - 3s 23us/sample - loss: 0.1933 - val_loss: 0.3559
Epoch 29/100
107327/107327 [==============================] - 3s 24us/sample - loss: 0.1913 - val_loss: 0.3045
Epoch 30/100
107327/107327 [==============================] - 3s 24us/sample - loss: 0.2035 - val_loss: 0.3133A: 0s - loss: 0.20
Epoch 31/100
107327/107327 [==============================] - 3s 24us/sample - loss: 0.2013 - val_loss: 0.2981ETA: 1s - loss: 
Epoch 32/100
107327/107327 [==============================] - 3s 23us/sample - loss: 0.1936 - val_loss: 0.3174
Epoch 33/100
107327/107327 [==============================] - 3s 24us/sample - loss: 0.1918 - val_loss: 0.3001: 0s - loss: 0.19
Epoch 34/100
107327/107327 [==============================] - 3s 24us/sample - loss: 0.1837 - val_loss: 0.2979
Epoch 35/100
107327/107327 [==============================] - 3s 25us/sample - loss: 0.1805 - val_loss: 0.2815
Epoch 36/100
107327/107327 [==============================] - 3s 24us/sample - loss: 0.1694 - val_loss: 0.2560
Epoch 37/100
107327/107327 [==============================] - 2s 23us/sample - loss: 0.1774 - val_loss: 0.2776 0s - loss: 0.
Epoch 38/100
107327/107327 [==============================] - 3s 24us/sample - loss: 0.1749 - val_loss: 0.2897
Epoch 39/100
107327/107327 [==============================] - 3s 25us/sample - loss: 0.1784 - val_loss: 0.2757
Epoch 40/100
107327/107327 [==============================] - 3s 25us/sample - loss: 0.1703 - val_loss: 0.2544
Epoch 41/100
107327/107327 [==============================] - 2s 23us/sample - loss: 0.1742 - val_loss: 0.2624
Epoch 42/100
107327/107327 [==============================] - 3s 23us/sample - loss: 0.1676 - val_loss: 0.2967
Epoch 43/100
107327/107327 [==============================] - 3s 23us/sample - loss: 0.1675 - val_loss: 0.2765
Epoch 44/100
107327/107327 [==============================] - 3s 24us/sample - loss: 0.1593 - val_loss: 0.2390ETA: 1s - loss:
Epoch 45/100
107327/107327 [==============================] - 2s 23us/sample - loss: 0.1635 - val_loss: 0.2688
Epoch 46/100
107327/107327 [==============================] - 2s 23us/sample - loss: 0.1616 - val_loss: 0.2579
Epoch 47/100
107327/107327 [==============================] - 3s 24us/sample - loss: 0.1567 - val_loss: 0.2263
Epoch 48/100
107327/107327 [==============================] - 3s 26us/sample - loss: 0.1478 - val_loss: 0.2728
Epoch 49/100
107327/107327 [==============================] - 3s 24us/sample - loss: 0.1659 - val_loss: 0.2831
Epoch 50/100
107327/107327 [==============================] - 3s 24us/sample - loss: 0.1716 - val_loss: 0.2550
Epoch 51/100
107327/107327 [==============================] - 2s 23us/sample - loss: 0.1665 - val_loss: 0.2816
Epoch 52/100
107327/107327 [==============================] - 3s 23us/sample - loss: 0.1520 - val_loss: 0.2714
Epoch 53/100
107327/107327 [==============================] - 3s 24us/sample - loss: 0.1419 - val_loss: 0.2806
Epoch 54/100
107327/107327 [==============================] - 3s 24us/sample - loss: 0.1459 - val_loss: 0.2728ETA: 1s - loss: 0. - ETA: 0s - loss: 0.145 - ETA: 0s - loss: 0.1
Epoch 55/100
107327/107327 [==============================] - 3s 24us/sample - loss: 0.1290 - val_loss: 0.2602
Epoch 56/100
107327/107327 [==============================] - 3s 24us/sample - loss: 0.1447 - val_loss: 0.2425
Epoch 57/100
107327/107327 [==============================] - 2s 23us/sample - loss: 0.1494 - val_loss: 0.2497
Epoch 58/100
107327/107327 [==============================] - 3s 24us/sample - loss: 0.1382 - val_loss: 0.2613s - loss: 0.13
Epoch 59/100
107327/107327 [==============================] - 3s 24us/sample - loss: 0.1489 - val_loss: 0.2749
Epoch 60/100
107327/107327 [==============================] - 3s 24us/sample - loss: 0.1404 - val_loss: 0.2673TA: 0s - loss: 0.
Epoch 61/100
107327/107327 [==============================] - 3s 24us/sample - loss: 0.1401 - val_loss: 0.2557
Epoch 62/100
107327/107327 [==============================] - 2s 23us/sample - loss: 0.1352 - val_loss: 0.2430
Epoch 63/100
107327/107327 [==============================] - 3s 24us/sample - loss: 0.1286 - val_loss: 0.2243
Epoch 64/100
107327/107327 [==============================] - 3s 25us/sample - loss: 0.1284 - val_loss: 0.2629
Epoch 65/100
107327/107327 [==============================] - 2s 22us/sample - loss: 0.1374 - val_loss: 0.2419
Epoch 66/100
107327/107327 [==============================] - 3s 24us/sample - loss: 0.1276 - val_loss: 0.2288
Epoch 67/100
107327/107327 [==============================] - 2s 23us/sample - loss: 0.1299 - val_loss: 0.2304
Epoch 68/100
107327/107327 [==============================] - 3s 28us/sample - loss: 0.1369 - val_loss: 0.2724
Epoch 69/100
107327/107327 [==============================] - 3s 25us/sample - loss: 0.1444 - val_loss: 0.2323
Epoch 70/100
107327/107327 [==============================] - 3s 26us/sample - loss: 0.1434 - val_loss: 0.2573
Epoch 71/100
107327/107327 [==============================] - 3s 27us/sample - loss: 0.1391 - val_loss: 0.2651
Epoch 72/100
107327/107327 [==============================] - 3s 30us/sample - loss: 0.1411 - val_loss: 0.2666TA: 0s - loss: 0.
Epoch 73/100
107327/107327 [==============================] - 3s 28us/sample - loss: 0.1331 - val_loss: 0.2470
Epoch 74/100
107327/107327 [==============================] - 4s 33us/sample - loss: 0.1302 - val_loss: 0.2460
Epoch 75/100
107327/107327 [==============================] - 4s 34us/sample - loss: 0.1346 - val_loss: 0.2304 1s - loss: 0.13 - ETA: 0s - loss: 0.13
Epoch 76/100
107327/107327 [==============================] - 3s 32us/sample - loss: 0.1271 - val_loss: 0.2416
Epoch 77/100
107327/107327 [==============================] - 3s 28us/sample - loss: 0.1268 - val_loss: 0.2709
Epoch 78/100
107327/107327 [==============================] - 3s 27us/sample - loss: 0.1187 - val_loss: 0.2236
Epoch 79/100
107327/107327 [==============================] - 3s 28us/sample - loss: 0.1168 - val_loss: 0.2054
Epoch 80/100
107327/107327 [==============================] - 3s 28us/sample - loss: 0.1276 - val_loss: 0.2525
Epoch 81/100
107327/107327 [==============================] - 3s 26us/sample - loss: 0.1260 - val_loss: 0.2372
Epoch 82/100
107327/107327 [==============================] - 3s 26us/sample - loss: 0.1288 - val_loss: 0.2299
Epoch 83/100
107327/107327 [==============================] - 3s 26us/sample - loss: 0.1252 - val_loss: 0.2386
Epoch 84/100
107327/107327 [==============================] - 3s 30us/sample - loss: 0.1324 - val_loss: 0.2634
Epoch 85/100
107327/107327 [==============================] - 3s 28us/sample - loss: 0.1521 - val_loss: 0.2686TA: 1s - loss:
Epoch 86/100
107327/107327 [==============================] - 3s 28us/sample - loss: 0.1212 - val_loss: 0.2209ETA: 1s - loss: 0.12 - ETA: 0s - loss: 0.12 - ETA: 0s - loss: 0.12
Epoch 87/100
107327/107327 [==============================] - 3s 28us/sample - loss: 0.1275 - val_loss: 0.2397
Epoch 88/100
107327/107327 [==============================] - 3s 29us/sample - loss: 0.1151 - val_loss: 0.2100
Epoch 89/100
107327/107327 [==============================] - 3s 30us/sample - loss: 0.1275 - val_loss: 0.2677ETA: 1s - loss:  - ETA: 0s - loss: 0.124 - ETA: 0s - loss: 0.124
Epoch 90/100
107327/107327 [==============================] - 3s 29us/sample - loss: 0.1333 - val_loss: 0.2799
Epoch 91/100
107327/107327 [==============================] - 3s 29us/sample - loss: 0.1292 - val_loss: 0.2370
Epoch 92/100
107327/107327 [==============================] - 3s 25us/sample - loss: 0.1214 - val_loss: 0.2492
Epoch 93/100
107327/107327 [==============================] - 3s 28us/sample - loss: 0.1174 - val_loss: 0.2542
Epoch 94/100
107327/107327 [==============================] - 3s 30us/sample - loss: 0.1233 - val_loss: 0.2354
Epoch 95/100
107327/107327 [==============================] - 3s 28us/sample - loss: 0.1093 - val_loss: 0.2262
Epoch 96/100
107327/107327 [==============================] - 3s 29us/sample - loss: 0.1088 - val_loss: 0.2235
Epoch 97/100
107327/107327 [==============================] - 3s 28us/sample - loss: 0.1069 - val_loss: 0.2134
Epoch 98/100
107327/107327 [==============================] - 3s 29us/sample - loss: 0.1079 - val_loss: 0.2339
Epoch 99/100
107327/107327 [==============================] - 3s 28us/sample - loss: 0.1113 - val_loss: 0.2222
Epoch 100/100
107327/107327 [==============================] - 3s 27us/sample - loss: 0.1271 - val_loss: 0.2252
-1.4906527135074825
Training 2JHC
Train on 1026606 samples, validate on 114068 samples
Epoch 1/100
1026606/1026606 [==============================] - 32s 31us/sample - loss: 1.2277 - val_loss: 1.5710ETA: 6s - los - ETA: 3s - 
Epoch 2/100
1026606/1026606 [==============================] - 26s 25us/sample - loss: 0.7129 - val_loss: 1.0543TA: 5s - loss - ETA: 3s - loss: 0. - ETA: 2s - loss: 0. - ETA: 1s - loss:
Epoch 3/100
1026606/1026606 [==============================] - 23s 23us/sample - loss: 0.5799 - val_loss: 0.7306: 10s - loss: 0. - ETA: 9s - loss: 0.5945 - ETA: 9s - loss: 0.594 - ETA: 9s -  - ETA: 0s - loss: 0.580 - ETA: 0s - loss: 0.580
Epoch 4/100
1026606/1026606 [==============================] - 26s 25us/sample - loss: 0.5181 - val_loss: 0.5254ss: 0. - ETA: 17s - loss:  - E - ETA: 14 - ETA: 13s - loss: 0. - ETA: 12s - loss:  - ETA: 12s - loss: 0.52 - ETA: 11s - loss: 0.52 - ETA: 11s - loss: 0.52 - ETA: 11s - loss: 0. - ETA: 10s - loss:  - ETA: 10s -  - ETA: 7s - l
Epoch 5/100
1026606/1026606 [==============================] - 27s 26us/sample - loss: 0.4734 - val_loss: 0.4845TA: 18s - loss - E - ETA: 11s - lo - ETA: 9s - loss: 0.4 - ETA: 8s - loss: 0.476 - ETA: 8s - loss: 0.475 - ETA: 8s  - ETA: 
Epoch 6/100
1026606/1026606 [==============================] - 27s 26us/sample - loss: 0.4375 - val_loss: 0.4615: 20s -  - ETA: 19s - loss:  - ETA: 15s - loss:  - ETA: 14s - loss: 0. - ETA: 13s - loss: 0. - ETA: 10s - lo - ETA: 8s - loss: 0.442 - ETA: 8s - loss: 0.44 - ETA: 7s - loss:  - ETA: - ETA: 1s - loss:
Epoch 7/100
1026606/1026606 [==============================] - 26s 25us/sample - loss: 0.4133 - val_loss: 0.4417 ETA: 13s - loss: 0.41 - ETA: 13s -  - ETA: 11s -  - ETA: 10s - loss: 0 - ETA: 9s - loss: - ETA: 7s - loss: 0.414 - ETA: 7s - loss: 0.4 - ETA: 6s - loss: 0.41 - ETA: 5s - loss - ETA: 3s - loss: 0. - ETA: 2s - loss: 0.414 - ETA: 2s - l
Epoch 8/100
1026606/1026606 [==============================] - 26s 25us/sample - loss: 0.3932 - val_loss: 0.4271TA: 22s - lo - ETA: 22s - loss: 0.38 - ETA: 22s  - ETA: 20 - ETA: 18s - loss:  - ETA: 18s - loss - ETA: 17s -  - ETA - ETA: 13s - loss - ETA: 12s - loss: 0.39 - ETA: 8s - loss: 0 - ETA: 7s - loss: 0
Epoch 9/100
1026606/1026606 [==============================] - 26s 25us/sample - loss: 0.3771 - val_loss: 0.4224
Epoch 10/100
1026606/1026606 [==============================] - 26s 25us/sample - loss: 0.3591 - val_loss: 0.4078TA: 8s - loss: 0.357 - ETA: 7s - loss: 0. - ETA: 6s - loss: 0.35 - E - ETA: 1s - loss: 0
Epoch 11/100
1026606/1026606 [==============================] - 27s 26us/sample - loss: 0.3482 - val_loss: 0.38569s - loss: 0.35 - ETA: 9s - loss: 0.350 - ETA: 8s - - ETA: - ETA: 1s - loss: 0
Epoch 12/100
1026606/1026606 [==============================] - 29s 28us/sample - loss: 0.3333 - val_loss: 0.3867TA: 21s - loss: 0
Epoch 13/100
1026606/1026606 [==============================] - 29s 29us/sample - loss: 0.3215 - val_loss: 0.3791- loss: 0.32 - ETA: 8s - loss: 0.323 - ETA: 7s - loss: 0.32 - ETA: 7s - loss: 0.322 - ETA: 6s - loss: 0.322 - ETA: 6s - loss: 0.322 - ETA: 6s - loss: 0.322 - ETA: 6s - los - ETA: 3s - loss: 0.32 - ETA: 2s - l
Epoch 14/100
1026606/1026606 [==============================] - 30s 29us/sample - loss: 0.3106 - val_loss: 0.3858TA: 2s - loss: 0.310 - ETA: 2s - los
Epoch 15/100
1026606/1026606 [==============================] - 29s 28us/sample - loss: 0.3072 - val_loss: 0.3646- ETA: 2s - los
Epoch 16/100
1026606/1026606 [==============================] - 29s 28us/sample - loss: 0.2923 - val_loss: 0.3524
Epoch 17/100
1026606/1026606 [==============================] - 30s 29us/sample - loss: 0.2856 - val_loss: 0.3407TA: 1s - loss: 0.
Epoch 18/100
1026606/1026606 [==============================] - 30s 29us/sample - loss: 0.2834 - val_loss: 0.3565- loss: 0.27 - ETA: - ETA: 3s - loss: 0.28 - ETA: 3s - 
Epoch 19/100
1026606/1026606 [==============================] - 29s 28us/sample - loss: 0.2746 - val_loss: 0.3356A: 9s - l
Epoch 20/100
1026606/1026606 [==============================] - 29s 29us/sample - loss: 0.2695 - val_loss: 0.3268
Epoch 21/100
1026606/1026606 [==============================] - 29s 28us/sample - loss: 0.2617 - val_loss: 0.3321
Epoch 22/100
1026606/1026606 [==============================] - 26s 25us/sample - loss: 0.2586 - val_loss: 0.3279
Epoch 23/100
1026606/1026606 [==============================] - 26s 25us/sample - loss: 0.2520 - val_loss: 0.314218s - loss:  - ETA: 17s - loss: 0.25 - ETA - ETA: 15s 
Epoch 24/100
1026606/1026606 [==============================] - 26s 25us/sample - loss: 0.2482 - val_loss: 0.3136 - loss: 0.2463 - ETA: 9s - loss: 0.2 - ETA: 9s - loss: 0.246 - ETA: 8 - ETA: 4s - los - ETA: 2s - loss: 0.247 - ETA: 2s - lo
Epoch 25/100
1026606/1026606 [==============================] - 26s 26us/sample - loss: 0.2410 - val_loss: 0.3076s - loss: 0.237 - ETA: 8s - loss: 0.237 - ETA: 7s - loss: 0.23 - ETA: 7s - loss: 0.237  - ETA: 1s - loss: 0
Epoch 26/100
1026606/1026606 [==============================] - 26s 25us/sample - loss: 0.2393 - val_loss: 0.2987loss: 
Epoch 27/100
1026606/1026606 [==============================] - 26s 25us/sample - loss: 0.2352 - val_loss: 0.3066ss: 0.23 - ETA: 8s - loss: 0 - ETA: 7s - loss: 0 - ETA: 6s - loss: 0.2 - ETA: 5s - loss: 0. - ETA: 4s - loss: 0.234 - ETA: 4s - loss: 0.234 - ETA: 
Epoch 28/100
1026606/1026606 [==============================] - 27s 26us/sample - loss: 0.2326 - val_loss: 0.2978
Epoch 29/100
1026606/1026606 [==============================] - 26s 26us/sample - loss: 0.2281 - val_loss: 0.2961: 4s - l - ETA: 1s - loss:
Epoch 30/100
1026606/1026606 [==============================] - 26s 25us/sample - loss: 0.2237 - val_loss: 0.2969.22 - ETA: 4s - loss: 0.222 - ETA: 3s - loss:  - ETA: 2s - loss: 0 - ETA: 1s - loss: 0.223 - ETA: 0s - loss: 0.
Epoch 31/100
1026606/1026606 [==============================] - 26s 25us/sample - loss: 0.2207 - val_loss: 0.3066 - ETA: 11s - loss: 0.21 -  - ETA: 6s - loss: 0.22 - ETA: - ETA: 1s - loss:
Epoch 32/100
1026606/1026606 [==============================] - 26s 25us/sample - loss: 0.2210 - val_loss: 0.2881-  - ETA: 1s - loss: 
Epoch 33/100
1026606/1026606 [==============================] - 26s 25us/sample - loss: 0.2129 - val_loss: 0.29795s - loss: 0.211 - ETA:  - ETA: 0s - loss: 0.2
Epoch 34/100
1026606/1026606 [==============================] - 28s 27us/sample - loss: 0.2124 - val_loss: 0.2828 ETA: 7s - los - ETA
Epoch 35/100
1026606/1026606 [==============================] - 29s 28us/sample - loss: 0.2096 - val_loss: 0.3028: 8s - loss: 0.20 - ETA: 1s - loss: 
Epoch 36/100
1026606/1026606 [==============================] - 27s 26us/sample - loss: 0.2054 - val_loss: 0.2835-  - ETA: 10s - los - ETA: 9s - los - ETA: 7s - - ETA: 3
Epoch 37/100
1026606/1026606 [==============================] - 26s 25us/sample - loss: 0.2041 - val_loss: 0.2845s - loss: 0.203 - ETA: 1s - loss: 0. - ETA: 0s - loss: 0.20
Epoch 38/100
1026606/1026606 [==============================] - 27s 26us/sample - loss: 0.2003 - val_loss: 0.2840 - ETA: 10s - loss: 0.19 - ETA: 
Epoch 39/100
1026606/1026606 [==============================] - 27s 26us/sample - loss: 0.1994 - val_loss: 0.2792TA: 8s - loss: 0.1 - ETA: 8s - loss: 0 - ETA: 6s - loss - ETA: 4s - - ETA: 1s - loss: 0.199 - ETA: 1s - loss: 0
Epoch 40/100
1026606/1026606 [==============================] - 27s 26us/sample - loss: 0.1942 - val_loss: 0.279817s - loss: 0.19 - ETA: 17s  - E - ETA: 13 - - ETA: 6s - l - ETA: 3s - loss: 0.19 - ETA: 3s 
Epoch 41/100
1026606/1026606 [==============================] - 27s 26us/sample - loss: 0.1961 - val_loss: 0.27890.19 - ETA: 1 - ETA: 7s - l - ETA: 4s - loss: 0.19 - ETA: 4s - loss:  - ETA: 2s - 
Epoch 42/100
1026606/1026606 [==============================] - 26s 26us/sample - loss: 0.1940 - val_loss: 0.28430.19 - ETA: 21 - ETA: 16s - lo - ETA: 12s - lo
Epoch 43/100
1026606/1026606 [==============================] - 27s 26us/sample - loss: 0.1854 - val_loss: 0.2745TA:  - ETA: 17s - loss - ETA: 16s - loss: 0.18 - ETA: 16s  - ETA: 11s - loss:  - ETA: 10s - ETA: 7s - loss: 0.182 - ETA: 7s - loss: 0
Epoch 44/100
1026606/1026606 [==============================] - 28s 27us/sample - loss: 0.1878 - val_loss: 0.2728- ETA: 4s - loss: 0.187 - ETA: 3s - loss: 0.187 - ETA: 3s - loss: 0.187 - ETA: 3s - los - ETA: 0s - loss: 0.
Epoch 45/100
1026606/1026606 [==============================] - 27s 26us/sample - loss: 0.1870 - val_loss: 0.2723: 5s - lo - ETA: 2s - loss: 0.186 - ETA: 2s - loss: 0.186 - ETA: 1s - loss
Epoch 46/100
1026606/1026606 [==============================] - 28s 27us/sample - loss: 0.1877 - val_loss: 0.2731TA: 9s - loss: 0.189 - ETA: 8s - loss:  - ETA: 7s - loss: 0.18 - ETA: - ETA: 2s - loss
Epoch 47/100
1026606/1026606 [==============================] - 28s 27us/sample - loss: 0.1819 - val_loss: 0.2681A: 1s - loss: 0
Epoch 48/100
1026606/1026606 [==============================] - 28s 27us/sample - loss: 0.1787 - val_loss: 0.2731TA: 20s - loss: 0. - ET - E - ETA: 3s 
Epoch 49/100
1026606/1026606 [==============================] - 29s 28us/sample - loss: 0.1768 - val_loss: 0.2698
Epoch 50/100
1026606/1026606 [==============================] - 29s 28us/sample - loss: 0.1786 - val_loss: 0.2699- loss - ETA: 24s  - ETA: 17s - loss: 0. - ET - ETA: 2s - loss
Epoch 51/100
1026606/1026606 [==============================] - 27s 27us/sample - loss: 0.1724 - val_loss: 0.2685: 0.17 - ETA: 8s - loss: 0. - ETA: 7s  - ETA: 3s -  - ETA: 0s - loss: 0.17
Epoch 52/100
1026606/1026606 [==============================] - 27s 26us/sample - loss: 0.1758 - val_loss: 0.2780: 8 - ETA: 4s - los - ETA: 1s - loss
Epoch 53/100
1026606/1026606 [==============================] - 27s 26us/sample - loss: 0.1759 - val_loss: 0.2618A: 8s - loss: 0 - ETA: 7s - loss - ETA: 5s - lo - ETA: 2s - loss: 0.175 - ETA: 2s - l
Epoch 54/100
1026606/1026606 [==============================] - 27s 26us/sample - loss: 0.1729 - val_loss: 0.2712
Epoch 55/100
1026606/1026606 [==============================] - 27s 26us/sample - loss: 0.1689 - val_loss: 0.26401s - loss: 0
Epoch 56/100
1026606/1026606 [==============================] - 27s 27us/sample - loss: 0.1689 - val_loss: 0.2637TA: 13s - loss - ETA: 12s - loss: 0. - ETA: 11s - loss: 0.  - ETA: 0s - loss: 0.169
Epoch 57/100
1026606/1026606 [==============================] - 27s 26us/sample - loss: 0.1662 - val_loss: 0.2666 - ETA: - ETA:  - ETA: 2s - los
Epoch 58/100
1026606/1026606 [==============================] - 27s 27us/sample - loss: 0.1653 - val_loss: 0.2670: 21s - loss: 0.16 - ETA: 21 - ETA: 21s - loss: 0.16 - ETA: 20s - loss - ETA: 20s - loss:  - ETA:  - ETA: 13 - ETA: 0s - loss: 0.1
Epoch 59/100
1026606/1026606 [==============================] - 28s 27us/sample - loss: 0.1650 - val_loss: 0.2609
Epoch 60/100
1026606/1026606 [==============================] - 28s 27us/sample - loss: 0.1653 - val_loss: 0.2647s - loss: 0.15 -  - ETA: 5s - los - ETA: 3s - loss: 0.1 - ETA: 2s - loss: 0. - ETA: 1s - loss: 0
Epoch 61/100
1026606/1026606 [==============================] - 27s 26us/sample - loss: 0.1617 - val_loss: 0.2827loss: 0.15 - ETA: 16 - ETA: 14s - loss: 0. - ETA - ETA:  - ETA: 7s - l - ETA: 4s - loss: - ETA: 2s - loss: 0.160 - ETA: 2s - los
Epoch 62/100
1026606/1026606 [==============================] - 27s 26us/sample - loss: 0.1636 - val_loss: 0.2708ETA:  - ETA: 4s - loss: 0.16 - ETA: 4 - ETA: 0s - loss: 0.163 - ETA: 0s - loss: 0.163
Epoch 63/100
1026606/1026606 [==============================] - 28s 27us/sample - loss: 0.1576 - val_loss: 0.2495TA: 21 - ETA: 22s - loss - ETA:  - ETA: 10s - loss: - ET - ETA: 3s - loss: 0.157 - ETA: 3s - 
Epoch 64/100
1026606/1026606 [==============================] - 27s 26us/sample - loss: 0.1581 - val_loss: 0.2610loss: 0.15 - ETA: 10s - los - ETA: 8s - loss:
Epoch 65/100
1026606/1026606 [==============================] - 27s 27us/sample - loss: 0.1574 - val_loss: 0.2621TA: 2s - loss: 0. - ETA: 1s - loss:
Epoch 66/100
1026606/1026606 [==============================] - 27s 27us/sample - loss: 0.1558 - val_loss: 0.24988s -  - ETA: - ETA: 0s - loss: 0.15 - ETA: 0s - loss: 0.15
Epoch 67/100
1026606/1026606 [==============================] - 27s 27us/sample - loss: 0.1547 - val_loss: 0.2656TA: 18s - loss: 0.15 - ETA: 18s - loss: 0.15 - ETA: 18s - loss: 0.15 - ETA: 18s - loss: 0.15 - ETA: 17s - loss: 0.15 - ETA: 17s - loss - ETA: 16s - loss - ETA: 15s  - ETA - ETA - ETA: 1s - loss: 
Epoch 68/100
1026606/1026606 [==============================] - 27s 27us/sample - loss: 0.1540 - val_loss: 0.2488TA - ETA: 13 - ETA: 0s - loss: 0.15
Epoch 69/100
1026606/1026606 [==============================] - 27s 26us/sample - loss: 0.1509 - val_loss: 0.2566 -  - ETA: 7 - ETA: 3s  - ETA: 0s - loss: 0.150
Epoch 70/100
1026606/1026606 [==============================] - 27s 26us/sample - loss: 0.1488 - val_loss: 0.2548 ET - ETA: 8s - loss: 0.14 - ETA: 1s - loss
Epoch 71/100
1026606/1026606 [==============================] - 27s 27us/sample - loss: 0.1519 - val_loss: 0.25211 - ETA: 4s - loss: 0.15 - ETA: 3s - loss: 0.151 - ETA: 3s - los - ETA: 0s - loss: 0.
Epoch 72/100
1026606/1026606 [==============================] - 27s 26us/sample - loss: 0.1467 - val_loss: 0.2670ss - ETA: 15s -  - ETA: 13 - ETA: 0s - loss: 0.14
Epoch 73/100
1026606/1026606 [==============================] - 27s 26us/sample - loss: 0.1505 - val_loss: 0.2481 - ETA: 21s  - ETA: 19s -  - ETA: 11s - loss: 0.14 - ETA: 11s - loss: 0.14 - ETA: 10s - loss: 0.14 - ETA: 10s - l - ETA: 8s -  - ETA: 5s - lo - ETA: 2s - 
Epoch 74/100
1026606/1026606 [==============================] - 27s 26us/sample - loss: 0.1459 - val_loss: 0.2624s - ETA: 3s - loss: 0. - ETA: 2s - 
Epoch 75/100
1026606/1026606 [==============================] - 27s 26us/sample - loss: 0.1477 - val_loss: 0.2612 - ETA: 13s - loss - ETA: 12s - loss: 0.14 - ETA: 12s - loss: 0.14 - ETA: 12s - loss: 0.14 - ET - ETA: 8s - loss: 0.14 - ETA: 8s - loss: 0. - ETA: 7s - l - ETA: 4 - ETA: 0s - loss: 0.1
Epoch 76/100
1026606/1026606 [==============================] - 27s 26us/sample - loss: 0.1430 - val_loss: 0.26230.1 - ETA: 4s - loss: 0.142 - ETA: 4s - loss:  - ETA: 2s - loss: 0.142 - ETA: 2s - loss:  - ETA: 1s - loss: 0.14 - ETA: 0s - loss: 0.1
Epoch 77/100
1026606/1026606 [==============================] - 27s 27us/sample - loss: 0.1459 - val_loss: 0.2489 ETA: 0s - loss: 0.1
Epoch 78/100
1026606/1026606 [==============================] - 27s 26us/sample - loss: 0.1442 - val_loss: 0.2556: 18s - 
Epoch 79/100
1026606/1026606 [==============================] - 28s 28us/sample - loss: 0.1465 - val_loss: 0.25650. - ETA: 1 - ETA: 6s - loss: 0.1 - ETA: 5s - loss: 0. - ETA: 4s - loss: 0. - ETA: 3s - 
Epoch 80/100
1026606/1026606 [==============================] - 29s 28us/sample - loss: 0.1426 - val_loss: 0.2499s - loss: 0.143 - ETA: 3s - loss: 0.1 - ETA: 2s - loss: 0. - ETA: 1s - loss: 0.142 - ETA: 1s - loss: 0
Epoch 81/100
1026606/1026606 [==============================] - 28s 27us/sample - loss: 0.1410 - val_loss: 0.2552- lo - ETA: 13 - ETA: 11 - ETA: 8s - lo - ETA - ETA: 1s - loss: 0
Epoch 82/100
1026606/1026606 [==============================] - 28s 27us/sample - loss: 0.1430 - val_loss: 0.2521 4s - loss: 0.143 - ETA: 4s  - ETA: 0s - loss: 0.14
Epoch 83/100
1026606/1026606 [==============================] - 28s 27us/sample - loss: 0.1367 - val_loss: 0.2483
Epoch 84/100
1026606/1026606 [==============================] - 27s 26us/sample - loss: 0.1396 - val_loss: 0.2512 2s - loss: 0.139 - ETA: 2s - loss: 0.1 - ETA: 1s - loss: 0.139 - ETA: 1s - loss: 0.13 - ETA: 0s - loss: 0.13 - ETA: 0s - loss: 0.13
Epoch 85/100
1026606/1026606 [==============================] - 28s 27us/sample - loss: 0.1398 - val_loss: 0.2447TA: 22s - loss - ETA: 20s  - ETA: 14s  - ETA: 13s - loss - E - ETA:  - ETA: 4s - loss: 0.140 - ETA: 3s - loss: 0.14 - ETA: 3s - loss:  - ETA: 1s - loss:
Epoch 86/100
1026606/1026606 [==============================] - 28s 28us/sample - loss: 0.1388 - val_loss: 0.2462TA: 18s - loss: 0. - ETA: 5s - loss: 0.1 - ETA: 4s - loss: 0.1 - ETA: 3s - loss: 0.138 - ETA: 3s 
Epoch 87/100
1026606/1026606 [==============================] - 27s 26us/sample - loss: 0.1376 - val_loss: 0.24780. - ETA: 12s - loss:  - ETA: 12s - loss: 0. - ETA: 7s - loss: 0.
Epoch 88/100
1026606/1026606 [==============================] - 28s 27us/sample - loss: 0.1371 - val_loss: 0.2423 -
Epoch 89/100
1026606/1026606 [==============================] - 28s 27us/sample - loss: 0.1332 - val_loss: 0.2437TA: 18s - loss - ETA:
Epoch 90/100
1026606/1026606 [==============================] - 28s 28us/sample - loss: 0.1411 - val_loss: 0.2403
Epoch 91/100
1026606/1026606 [==============================] - 29s 28us/sample - loss: 0.1350 - val_loss: 0.2435
Epoch 92/100
1026606/1026606 [==============================] - 28s 28us/sample - loss: 0.1331 - val_loss: 0.2476ETA: 9s - lo - ETA: 6s - lo - ETA: 4s - loss: 0. - ETA: 3s - loss: 0.132 - ETA: 2s - loss: 0.132 - ETA: 2s - lo
Epoch 93/100
1026606/1026606 [==============================] - 29s 28us/sample - loss: 0.1311 - val_loss: 0.2422
Epoch 94/100
1026606/1026606 [==============================] - 28s 27us/sample - loss: 0.1320 - val_loss: 0.2386s - lo
Epoch 95/100
1026606/1026606 [==============================] - 28s 28us/sample - loss: 0.1309 - val_loss: 0.2404
Epoch 96/100
1026606/1026606 [==============================] - 29s 28us/sample - loss: 0.1407 - val_loss: 0.2430- loss - ETA: 11s - loss: 0.14 - ETA: 10s - ETA: 7s -  - ETA: 4s - lo - ETA: 2s - loss
Epoch 97/100
1026606/1026606 [==============================] - 29s 28us/sample - loss: 0.1279 - val_loss: 0.2365TA: 10s - loss - ETA: 9s - loss: 0.1 - ETA: 8s - loss
Epoch 98/100
1026606/1026606 [==============================] - 28s 28us/sample - loss: 0.1286 - val_loss: 0.2413 ETA: 3s
Epoch 99/100
1026606/1026606 [==============================] - 28s 27us/sample - loss: 0.1306 - val_loss: 0.2398s - los - ETA: 7s - loss: 0.1  - ETA: 0s - loss: 0.13
Epoch 100/100
1026606/1026606 [==============================] - 27s 26us/sample - loss: 0.1274 - val_loss: 0.2387 ETA: 17s - - ETA: 1s - loss:
-1.432343974372237
Training 3JHH
Train on 531549 samples, validate on 59062 samples
Epoch 1/100
531549/531549 [==============================] - 16s 30us/sample - loss: 3.9098 - val_loss: 2.2347
Epoch 2/100
531549/531549 [==============================] - 15s 29us/sample - loss: 1.1489 - val_loss: 1.2187TA: 4
Epoch 3/100
531549/531549 [==============================] - 15s 29us/sample - loss: 0.5214 - val_loss: 0.7097 loss: 0.544 - ETA: - ETA: 0s - loss: 0.
Epoch 4/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.4353 - val_loss: 0.6586: 8s - loss - ETA: 0s - loss: 0.435
Epoch 5/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.4037 - val_loss: 0.62548 - ETA: 9s - l - ETA: 6s - loss: 0.40 - ETA: 6s -  - ETA: 3s -
Epoch 6/100
531549/531549 [==============================] - 14s 26us/sample - loss: 0.3663 - val_loss: 0.4643
Epoch 7/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.3469 - val_loss: 0.4120oss: 0.35 - ETA: 8s - loss: 0 - ETA: 7s - ETA: 3s - l - ETA: 0s - loss: 0.3
Epoch 8/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.3218 - val_loss: 0.4151: 8s - loss: 0.3 - ETA: 1s - loss:
Epoch 9/100
531549/531549 [==============================] - 15s 27us/sample - loss: 0.3129 - val_loss: 0.4305TA: 3s - loss: 0.31 - ETA: 3s -  - ETA: 0s - loss: 0.312
Epoch 10/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.2897 - val_loss: 0.3996s - lo - ETA: 4s - loss: 0.291 - ETA: 4s - - ETA: 1s - loss: 0.289 - ETA: 0s - loss: 0.
Epoch 11/100
531549/531549 [==============================] - 14s 26us/sample - loss: 0.2774 - val_loss: 0.3390: 4s - los - ETA: 2s - los - ETA: 0s - loss: 0.277
Epoch 12/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.2680 - val_loss: 0.4119 6s - - ETA: 3s - loss: 0.2 - ETA: 2s - loss: 0.268 - ETA: 2s - los
Epoch 13/100
531549/531549 [==============================] - 14s 26us/sample - loss: 0.2651 - val_loss: 0.3318
Epoch 14/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.2504 - val_loss: 0.3686TA:
Epoch 15/100
531549/531549 [==============================] - 15s 28us/sample - loss: 0.2470 - val_loss: 0.3754s - loss: 0.26 - ETA: 11s - lo - ETA: 10s -  - ETA: 7 - ETA: 3s - loss - ETA: 1s - loss: 
Epoch 16/100
531549/531549 [==============================] - 15s 28us/sample - loss: 0.2445 - val_loss: 0.3092
Epoch 17/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.2364 - val_loss: 0.3451ETA: 2s - 
Epoch 18/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.2372 - val_loss: 0.37985s - loss - ETA: 3s - loss: 0.234 - ETA: 3s -
Epoch 19/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.2314 - val_loss: 0.30870.254 - ETA: 9s - l - ETA: 7s - loss: 0.242 - ETA: 7s - loss: 0.
Epoch 20/100
531549/531549 [==============================] - 15s 28us/sample - loss: 0.2290 - val_loss: 0.2671-  - ETA: 10s - loss: 0.22 - ETA: 9s - loss:  - ETA: 7s - loss:  - ETA: 6s - loss: 0 - ETA: 5s - los - ETA: 2s - loss: 0.23 - ETA: 1s - loss: 0.2 - ETA: 1s - loss: 0.229 - ETA: 0s - loss: 0.
Epoch 21/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.2178 - val_loss: 0.2778ETA: 4s - loss: - ETA: 2s - 
Epoch 22/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.2162 - val_loss: 0.3552ss: 0 - ETA: 0s - loss: 0.216
Epoch 23/100
531549/531549 [==============================] - 15s 28us/sample - loss: 0.2129 - val_loss: 0.3428TA: 11s - loss: 0.22  - ETA: 6s - loss: 0.219 - ETA: 6s - - ETA: 2s - lo - ETA: 0s - loss: 0.212
Epoch 24/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.2101 - val_loss: 0.2840 loss: - ETA: 0s - loss: 0.21
Epoch 25/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.2042 - val_loss: 0.3277: 0.199 - ETA: 3s - lo - ETA: 0s - loss: 0.
Epoch 26/100
531549/531549 [==============================] - 15s 27us/sample - loss: 0.2014 - val_loss: 0.3185TA: 4s - loss: 0.199 - ETA: 3s - loss: 0.200 - ETA: 3s - loss: 0.2 - ETA: 2s - 
Epoch 27/100
531549/531549 [==============================] - 15s 28us/sample - loss: 0.2036 - val_loss: 0.2783TA: 10s - lo - ETA: 8s - loss: 0.1 - ETA: 7s - loss - ETA: 5s - loss: 0 - ETA: 4s - loss: 0.2 - ETA: 3s - loss: 0.202 - ETA: 3s - loss: 0.203 - ETA: 2s - 
Epoch 28/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.1925 - val_loss: 0.3270TA: 9s  - ETA: 5s - loss: 0.1 - ETA: 4s - loss: 0.188 - ETA: 4s  - ETA: 1s - loss: 0
Epoch 29/100
531549/531549 [==============================] - 14s 26us/sample - loss: 0.1967 - val_loss: 0.2994s: 0.19
Epoch 30/100
531549/531549 [==============================] - 15s 27us/sample - loss: 0.1875 - val_loss: 0.2594 - loss:  - ETA: 6s - loss: 0.18 - ETA: 6s - loss: 0.1 - ETA: 5s - loss: 0. - ETA: 4s - loss: 0.1 - ETA: 3s  - ETA: 0s - loss: 0.187
Epoch 31/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.1872 - val_loss: 0.2859s - los - ETA: 9s - loss: 0.18 - ETA: 8s - - ETA: 5s - loss: 0.187 - ETA: 5s - loss: 0.186 - ETA: 5 - ETA: 1s - loss: 0
Epoch 32/100
531549/531549 [==============================] - 15s 27us/sample - loss: 0.1924 - val_loss: 0.3280loss: 0.19 - ETA: - ETA: 0s - loss: 0.192
Epoch 33/100
531549/531549 [==============================] - 14s 26us/sample - loss: 0.1898 - val_loss: 0.26155s - ETA: 2s - lo
Epoch 34/100
531549/531549 [==============================] - 14s 26us/sample - loss: 0.1800 - val_loss: 0.2488
Epoch 35/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.1823 - val_loss: 0.2612 - ETA: 2s - loss - ETA: 0s - loss: 0.18
Epoch 36/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.1806 - val_loss: 0.2377: 10s - lo - ETA: - ETA: 3s - loss: 0.18 - ETA: 3s - l - ETA: 0s - loss: 0.181 - ETA: 0s - loss: 0.18
Epoch 37/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.1759 - val_loss: 0.2469: 1s - loss:
Epoch 38/100
531549/531549 [==============================] - 14s 26us/sample - loss: 0.1720 - val_loss: 0.2381 3s
Epoch 39/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.1704 - val_loss: 0.2645: 6s - loss: - ETA: 4s - loss: 0.16 - ETA: 3s -  - ETA: 0s - loss: 0.
Epoch 40/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.1653 - val_loss: 0.2313TA: 8s - loss: 0.16 - - ETA: 1s - loss
Epoch 41/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.1708 - val_loss: 0.2958- loss - ETA: 10s - loss - ETA: 9s - loss: 0.175 - ETA: 8s -  - ETA: 6s - loss: 0.16 - ETA: 5s - loss: - ETA: 3s - loss: 0.16 - ETA: 3s - loss: - ETA: 1s - loss: 0.1 - ETA: 0s - loss: 0.
Epoch 42/100
531549/531549 [==============================] - 14s 26us/sample - loss: 0.1670 - val_loss: 0.2429TA: 3s
Epoch 43/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.1623 - val_loss: 0.2561 los - ETA: 1s - loss: 0.16 - ETA: 1s - loss: 0.16 - ETA: 0s - loss: 0.
Epoch 44/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.1672 - val_loss: 0.2607A: 6s - loss - ETA: 4s - lo - ETA: 1s - loss:
Epoch 45/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.1556 - val_loss: 0.26321s - loss:
Epoch 46/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.1671 - val_loss: 0.2351
Epoch 47/100
531549/531549 [==============================] - 15s 27us/sample - loss: 0.1580 - val_loss: 0.2508A: 7s - - ETA: 3s - loss: 0.158 - ETA: 3s - loss: 0.15 - ETA: 2s - loss - ETA: 0s - loss: 0.
Epoch 48/100
531549/531549 [==============================] - 14s 26us/sample - loss: 0.1524 - val_loss: 0.2507
Epoch 49/100
531549/531549 [==============================] - 15s 28us/sample - loss: 0.1558 - val_loss: 0.2175- loss:  - ETA: 10s - loss: 0.16 - ETA: 10s - l - ETA: 8s - loss: 0.164 - ETA: 8s - loss: 0.163 - ETA: 8s - loss: 0. - ETA: 7s -  - ETA: 4s - loss: - ETA: 2s - loss: 0. - ETA: 1s - loss: 
Epoch 50/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.1579 - val_loss: 0.2415 - ETA: 3s - loss: 0.157 - ETA: 2s - l - ETA: 0s - loss: 0.157
Epoch 51/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.1583 - val_loss: 0.2399
Epoch 52/100
531549/531549 [==============================] - 15s 28us/sample - loss: 0.1638 - val_loss: 0.25358s - loss: 
Epoch 53/100
531549/531549 [==============================] - 14s 26us/sample - loss: 0.1622 - val_loss: 0.2748
Epoch 54/100
531549/531549 [==============================] - 15s 28us/sample - loss: 0.1600 - val_loss: 0.2221TA: 11s - - ETA: 3s -
Epoch 55/100
531549/531549 [==============================] - 15s 27us/sample - loss: 0.1564 - val_loss: 0.2781 ETA: 3s 
Epoch 56/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.1477 - val_loss: 0.2581
Epoch 57/100
531549/531549 [==============================] - 16s 30us/sample - loss: 0.1425 - val_loss: 0.2313
Epoch 58/100
531549/531549 [==============================] - 15s 29us/sample - loss: 0.1425 - val_loss: 0.2090:  - ETA
Epoch 59/100
531549/531549 [==============================] - 15s 29us/sample - loss: 0.1390 - val_loss: 0.1951.13 - ETA: 7s - los - ETA: 4s - l - ETA: 1s - loss: 0.139 - ETA: 1s - loss: 0. - ETA: 0s - loss: 0.1
Epoch 60/100
531549/531549 [==============================] - 15s 28us/sample - loss: 0.1525 - val_loss: 0.2195
Epoch 61/100
531549/531549 [==============================] - 15s 28us/sample - loss: 0.1431 - val_loss: 0.2255TA: 6s - loss: 0 - ETA: 5s - loss: 0.148 - E
Epoch 62/100
531549/531549 [==============================] - 15s 28us/sample - loss: 0.1426 - val_loss: 0.2129ETA: 3s - loss: 0. - ETA: 2s - loss: 0.142 - ETA: 1s - loss: 0.142 - ETA: 1s - loss:
Epoch 63/100
531549/531549 [==============================] - 15s 28us/sample - loss: 0.1347 - val_loss: 0.2096 ETA: 10s - - ETA: 7s - loss: 0. - ETA: 6s - loss: 0.140 - ETA: 6s - loss: 0.140 - ETA: 5s - loss: 0.140 - ETA: 5s - loss:  - ETA: 3s  - ETA: 0s - loss: 0.13
Epoch 64/100
531549/531549 [==============================] - 15s 28us/sample - loss: 0.1413 - val_loss: 0.2550
Epoch 65/100
531549/531549 [==============================] - 15s 28us/sample - loss: 0.1361 - val_loss: 0.2198- los - ETA: 4s - loss: 0.137 - ETA:
Epoch 66/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.1402 - val_loss: 0.2025
Epoch 67/100
531549/531549 [==============================] - 15s 28us/sample - loss: 0.1362 - val_loss: 0.2305 loss: 0.13 - ETA:  - ETA: 2s - los
Epoch 68/100
531549/531549 [==============================] - 15s 28us/sample - loss: 0.1392 - val_loss: 0.2215
Epoch 69/100
531549/531549 [==============================] - 15s 29us/sample - loss: 0.1370 - val_loss: 0.2067s - loss:  - ETA: 11s -  - ETA: 9s - loss: 0.145 - ETA: 9s - loss: 0 - ETA: 8s - loss: 0.1 - ETA - ETA: 1s - loss:
Epoch 70/100
531549/531549 [==============================] - 15s 27us/sample - loss: 0.1310 - val_loss: 0.2379ETA: 1s - loss:
Epoch 71/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.1374 - val_loss: 0.2290- ETA: 0s - loss: 0.137
Epoch 72/100
531549/531549 [==============================] - 15s 28us/sample - loss: 0.1335 - val_loss: 0.2196oss: 0.139 - ETA: 6s - loss: 0.1 -
Epoch 73/100
531549/531549 [==============================] - 15s 28us/sample - loss: 0.1351 - val_loss: 0.2101TA: 6s - loss: 0.129 - ETA: 6s - loss: 0.129 - ETA: 6s - loss - ETA: 4s  - ETA: 0s - loss: 0.13
Epoch 74/100
531549/531549 [==============================] - 14s 26us/sample - loss: 0.1364 - val_loss: 0.2070s - loss: 0.13 - ETA: 4s - loss: 0.134 - ETA: 4s - loss: 0.134 - ETA: 4 - ETA: 0s - loss: 0.13
Epoch 75/100
531549/531549 [==============================] - 15s 27us/sample - loss: 0.1316 - val_loss: 0.21611s - loss
Epoch 76/100
531549/531549 [==============================] - 15s 28us/sample - loss: 0.1300 - val_loss: 0.2067: 10s - loss: 0. - E - ETA:  - ETA: 0s - loss: 0.1
Epoch 77/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.1346 - val_loss: 0.2165
Epoch 78/100
531549/531549 [==============================] - 14s 26us/sample - loss: 0.1317 - val_loss: 0.23137s - loss: 0.13 - ETA:  - ETA: 2s - 
Epoch 79/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.1317 - val_loss: 0.2532 - ETA: 6s - l - ETA: 3
Epoch 80/100
531549/531549 [==============================] - 15s 27us/sample - loss: 0.1348 - val_loss: 0.2295TA: 10s - lo - ETA: - ETA: 8s - loss: 0.145  - ETA: 2s - loss: 0.1 - ETA: 1s - loss: 0.135 - ETA: 1s - loss: 0
Epoch 81/100
531549/531549 [==============================] - 14s 26us/sample - loss: 0.1375 - val_loss: 0.1907: 10s - l - ETA: 9s - loss - ETA: 2s - loss: 0.139 - ETA: 2s - loss: 0.139 - ETA: 2s - loss: - ETA: 0s - loss: 0.137 - ETA: 0s - loss: 0.137
Epoch 82/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.1308 - val_loss: 0.2295TA: 6s - loss: 0.131 - ETA: 5s - loss: 0.132 - ETA: 5s - loss: 0 - ETA: 4s - loss: 0.13 - ETA: 4s - loss: 0.1 - ETA: 3s - loss: 0. - ETA: 2s - loss: 0. - ETA: 1s - loss: 
Epoch 83/100
531549/531549 [==============================] - 14s 26us/sample - loss: 0.1262 - val_loss: 0.2146TA: 2s - loss: 0.12 - ETA: 1s - loss:
Epoch 84/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.1316 - val_loss: 0.2073s: 0.137 - ETA: 8s - loss: 0.13 - ETA: 7s - - ETA: 4s - loss: 0. - ETA: 3s - loss:  - ETA: 1s - loss
Epoch 85/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.1228 - val_loss: 0.2055A: 8s - loss: 0.119 - ETA: 8s - loss: 0.119 - ETA: 7s - loss - ETA: 5s - - ETA: 2s - loss: 0.1 - ETA: 1s - loss: 0.123 - ETA: 1s - loss: 0.12 - ETA: 1s - loss: 0. - ETA: 0s - loss: 0.122
Epoch 86/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.1315 - val_loss: 0.1968ETA: 2s - loss: 0.132 - ETA: 2s - loss: - ETA: 0s - loss: 0.13 - ETA: 0s - loss: 0.131 - ETA: 0s - loss: 0.131
Epoch 87/100
531549/531549 [==============================] - 14s 26us/sample - loss: 0.1221 - val_loss: 0.1983TA: 9s - loss: 0.1 - ETA: 8s - loss: - ETA: 7s - loss: 0.12 -  - ETA: 1s - loss:
Epoch 88/100
531549/531549 [==============================] - 14s 26us/sample - loss: 0.1301 - val_loss: 0.2278
Epoch 89/100
531549/531549 [==============================] - 14s 26us/sample - loss: 0.1217 - val_loss: 0.2138
Epoch 90/100
531549/531549 [==============================] - 14s 26us/sample - loss: 0.1185 - val_loss: 0.1882A: 7s - - ETA: 4s  - ETA: 0s - loss: 0.118 - ETA: 0s - loss: 0.1
Epoch 91/100
531549/531549 [==============================] - 14s 26us/sample - loss: 0.1192 - val_loss: 0.1940
Epoch 92/100
531549/531549 [==============================] - 14s 26us/sample - loss: 0.1248 - val_loss: 0.2047
Epoch 93/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.1201 - val_loss: 0.2104: 0 - ETA: 4s - loss - ETA: 2s - loss: 0.119 - ETA: 2s - los
Epoch 94/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.1260 - val_loss: 0.2167- loss:  - E - ETA: 6s - loss: 0.123 - ETA: 6s - loss: 0. - ETA: 5s  - ETA: 1s - loss:
Epoch 95/100
531549/531549 [==============================] - 14s 26us/sample - loss: 0.1176 - val_loss: 0.2020- ETA: 7s - loss: 0.125 - ETA: 7s - ETA: 3s - loss: 0.11 - ETA: 2s - 
Epoch 96/100
531549/531549 [==============================] - 14s 26us/sample - loss: 0.1168 - val_loss: 0.1810 - ETA: 0s - loss: 0.11
Epoch 97/100
531549/531549 [==============================] - 14s 26us/sample - loss: 0.1147 - val_loss: 0.2066TA:
Epoch 98/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.1150 - val_loss: 0.1782 - los
Epoch 99/100
531549/531549 [==============================] - 14s 27us/sample - loss: 0.1185 - val_loss: 0.2174
Epoch 100/100
531549/531549 [==============================] - 15s 28us/sample - loss: 0.1193 - val_loss: 0.2053
-1.5834044906774032
Training 3JHC
Train on 1359341 samples, validate on 151038 samples
Epoch 1/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 1.7776 - val_loss: 1.1553 - ETA: 1s - loss: 
Epoch 2/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.6909 - val_loss: 0.7831
Epoch 3/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.5714 - val_loss: 0.5689A: 6s - loss: 0 - ETA: 5s - l - ETA: 2s - loss: 0.574 - ETA: 2s - loss: 0.573 - ETA: 1s - loss:
Epoch 4/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.5091 - val_loss: 0.5270
Epoch 5/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.4651 - val_loss: 0.4952TA: 30s  - ETA: 29s - loss - ETA: 28s - loss - ETA: 20s -  - ET - ETA: 8s - loss: 0.466 - ET - ETA: 2s - l
Epoch 6/100
1359341/1359341 [==============================] - 36s 27us/sample - loss: 0.4318 - val_loss: 0.4497TA: 10s - los - ETA: 2s - loss
Epoch 7/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.4075 - val_loss: 0.4311: 11s - loss: 0. - - ETA: 6s - loss: - ETA: 4s - l - ETA: 1s - loss: 0
Epoch 8/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.3907 - val_loss: 0.4136ET - ETA: 2s - los
Epoch 9/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.3685 - val_loss: 0.4004
Epoch 10/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.3521 - val_loss: 0.4165
Epoch 11/100
1359341/1359341 [==============================] - 36s 26us/sample - loss: 0.3389 - val_loss: 0.3666
Epoch 12/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.3269 - val_loss: 0.3641
Epoch 13/100
1359341/1359341 [==============================] - 36s 27us/sample - loss: 0.3176 - val_loss: 0.3714
Epoch 14/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.3077 - val_loss: 0.3500
Epoch 15/100
1359341/1359341 [==============================] - 36s 27us/sample - loss: 0.2982 - val_loss: 0.3442loss - ETA: 5s - loss: 0.29 - ETA: 4s -  - ETA: 1s - loss: 0.2 - ETA: 0s - loss: 0.29 - ETA: 0s - loss: 0.298
Epoch 16/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.2932 - val_loss: 0.3337TA: 22 - ETA: 20 - ETA: 15s - loss: 0.29 - ETA: 15s  - ETA: 9s - loss: 0.2931 - ETA: 9s - lo - ETA: 7s - loss: 0.292 - ETA: 6s - - ETA: 3s -
Epoch 17/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.2860 - val_loss: 0.3311: 22s  - ETA: 9s -  
Epoch 18/100
1359341/1359341 [==============================] - 36s 27us/sample - loss: 0.2751 - val_loss: 0.3338TA: 21s - - ETA: 9s - loss: 0.2 - ETA: 8s - los
Epoch 19/100
1359341/1359341 [==============================] - 36s 27us/sample - loss: 0.2692 - val_loss: 0.3304
Epoch 20/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.2667 - val_loss: 0.31342 - ETA: 3s - loss: 0 - ETA: 1s - loss:
Epoch 21/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.2581 - val_loss: 0.3249
Epoch 22/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.2557 - val_loss: 0.3254: 15s  - - ETA: 10s  - ETA: 0s - loss: 0.255
Epoch 23/100
1359341/1359341 [==============================] - 36s 27us/sample - loss: 0.2502 - val_loss: 0.3099A: 9s - l - ETA: 6s - loss - ETA: 4s - loss: 0.250 - ETA: 4s - loss:  - ETA: 2s - loss: 0.250 - ETA: 2s - lo
Epoch 24/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.2432 - val_loss: 0.3176: 0.241 - ETA: 5s - - ETA: 1s - loss:
Epoch 25/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.2417 - val_loss: 0.3193: 0.24 - E - ETA: 11s - loss: 0. - ETA: 10s - loss: 0. - ETA: 10s - loss: 0.24 - - ETA: 4s - loss: 0. - ETA: 3s - loss - ETA: 1s - loss: 0.
Epoch 26/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.2401 - val_loss: 0.3156- loss: 0.23 - ETA: 4s - lo - ETA: 2s - loss: 0. - ETA: 1s - loss: 0.
Epoch 27/100
1359341/1359341 [==============================] - 36s 26us/sample - loss: 0.2319 - val_loss: 0.3022
Epoch 28/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.2298 - val_loss: 0.3009
Epoch 29/100
1359341/1359341 [==============================] - 36s 26us/sample - loss: 0.2265 - val_loss: 0.2868
Epoch 30/100
1359341/1359341 [==============================] - 36s 27us/sample - loss: 0.2201 - val_loss: 0.2915loss: 0.21 - ETA:  - ETA - ETA: 5s - loss: 0.2 - ETA: 4s - loss: 0 - ETA: 3s - loss: 0.21 - ETA: 2s - lo
Epoch 31/100
1359341/1359341 [==============================] - 36s 27us/sample - loss: 0.2159 - val_loss: 0.2965: 27 - ETA: 1s - loss: 0.215 - ETA: 1s - loss: 
Epoch 32/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.2169 - val_loss: 0.2998
Epoch 33/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.2120 - val_loss: 0.2936
Epoch 34/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.2078 - val_loss: 0.2940s - loss: 0.20 - ETA: 24s - loss:  - ETA: 24 - ETA: 21s  - ETA: 10s - loss: 0.2 - ETA: 9s - loss: 0.2 - ETA: 8s - loss: 0.206 - ETA: 8s - - ETA: 4s - loss: - ETA: 3s - 
Epoch 35/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.2073 - val_loss: 0.2841s -  - ETA: 12s - loss: - ETA - ETA: 1s - loss:  - ETA: 0s - loss: 0.207
Epoch 36/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.2052 - val_loss: 0.2786- loss: 0.20 - ETA: 16s - loss - ETA: 12s - lo - ETA: 11s - loss: 0.20 - ETA: 10 - ETA: 8s - loss:  - ETA: 6s - loss:  - ETA: 5s - loss: - ETA: 3s - loss: 0.205 - ETA: 3s - l - ETA: 0s - loss: 0.205
Epoch 37/100
1359341/1359341 [==============================] - 36s 26us/sample - loss: 0.2028 - val_loss: 0.2973TA: 5s - loss: 0.201 - ETA
Epoch 38/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.2009 - val_loss: 0.2735TA: 22s - loss: 0.20 - ETA: 22s - loss: 0. - ETA: 17s - lo - ETA: 16s - loss: 0. -  - ETA: 9s - los - ETA: 7s - loss: 0.2 - ETA: 6s - loss: 0.20 - ETA: 6s -  - ETA: 2s - loss: - ETA: 1s - loss: 0.200 - ETA: 0s - loss: 0.2
Epoch 39/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.1977 - val_loss: 0.2731
Epoch 40/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.1920 - val_loss: 0.2705: 0.18 - ETA: 21s - loss: 0.18 - ETA: 21s - loss:  - ETA: 13s - loss:  - ETA: 12s - loss - ETA: 11s - loss: 0.18 - ETA:  - ET - ETA: 3s - loss:  - ETA: 1s - loss: 0.191 - ETA: 1s - loss: 0. - ETA: 0s - loss: 0.191 - ETA: 0s - loss: 0.191
Epoch 41/100
1359341/1359341 [==============================] - 36s 27us/sample - loss: 0.1905 - val_loss: 0.2759
Epoch 42/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.1896 - val_loss: 0.2710 -
Epoch 43/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.1904 - val_loss: 0.2708 - E - ETA - ETA: 19s - loss: 0.18 - ETA: - ETA: 8s - loss: 0 - ETA: 1s - loss: 0.190 - ETA: 0s - loss: 0.190 - ETA: 0s - loss: 0.19
Epoch 44/100
1359341/1359341 [==============================] - 36s 26us/sample - loss: 0.1855 - val_loss: 0.2633 ETA: 10s - los - ETA: 9s - los - ETA: 1s - loss: 0.
Epoch 45/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.1834 - val_loss: 0.2690TA: 33s - loss - ETA: 28s - loss:  - ETA: 28s - loss: 0.18 - ETA: 28s - loss:  - ETA: 10s - loss: 0.18 - ET - ETA:
Epoch 46/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.1838 - val_loss: 0.27030 - ETA: 7s - l - ETA: 4s - loss: 0.1 - ETA: 3s -
Epoch 47/100
1359341/1359341 [==============================] - 36s 27us/sample - loss: 0.1788 - val_loss: 0.2652TA: 13s - loss: 0.17 - - ETA: 10s
Epoch 48/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.1755 - val_loss: 0.2621s - los
Epoch 49/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.1768 - val_loss: 0.2673TA:  - E - ETA:  - ETA: 4s - loss: - ETA: 2s - loss
Epoch 50/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.1760 - val_loss: 0.26230.17 - 
Epoch 51/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.1736 - val_loss: 0.25637s - loss: 0.17 - ETA: 7s - loss: 0.174 - ETA: - ETA: 1s - loss:
Epoch 52/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.1731 - val_loss: 0.2600- ETA: 3s - 
Epoch 53/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.1720 - val_loss: 0.2707 ETA: 1 - ETA: 2s - los
Epoch 54/100
1359341/1359341 [==============================] - 39s 29us/sample - loss: 0.1705 - val_loss: 0.2622TA: 8s - loss - ETA: 6s - loss: 0.1 - ETA: 5s - loss: 0. - ETA: 4s - loss - ETA: 2s - loss: 0.170 - ETA: 1s - loss:
Epoch 55/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.1681 - val_loss: 0.2625TA: 25s - loss: 0.16 - ETA: 25s - loss: 0.16 - ETA: 25s - loss: 0.16 - ETA: 25s - loss: 0.16 - - ETA: 21s -  - ETA: 7s - los - ET
Epoch 56/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.1635 - val_loss: 0.2548 - ETA: 1 - ETA: 5s - loss: 0.1 - ETA
Epoch 57/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.1655 - val_loss: 0.26411s - loss: 
Epoch 58/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.1638 - val_loss: 0.2618
Epoch 59/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.1626 - val_loss: 0.2656
Epoch 60/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.1609 - val_loss: 0.2657
Epoch 61/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.1588 - val_loss: 0.2528s - lo - ETA: 16s - loss - ETA: 15s  - - ETA: 2s - loss - ETA: 0s - loss: 0.1
Epoch 62/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.1603 - val_loss: 0.2511
Epoch 63/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.1560 - val_loss: 0.2530TA: 9s - loss: 0.1554 - ETA: 9s -  - ETA: 6s - los - ETA: 3s - loss: - ETA: 2s - loss
Epoch 64/100
1359341/1359341 [==============================] - 41s 30us/sample - loss: 0.1549 - val_loss: 0.2637 - loss: 0.153 - ETA: 9s - l - ETA: 6s - loss: - ETA: 4s - loss:  - ETA: 2s - loss: 0. - ETA: 1s - loss: 0.
Epoch 65/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.1553 - val_loss: 0.2494
Epoch 66/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.1534 - val_loss: 0.2495 - loss: 0.153 - ETA: 1s - loss:
Epoch 67/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.1524 - val_loss: 0.2530
Epoch 68/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.1546 - val_loss: 0.2574TA: 30s - loss: 0.15 - - ETA: 21s - loss:  - ETA: 13s - loss: 0.15 - E - ETA: 10s - l - ETA: 7s - loss: 0.154 - ETA: 7s - loss: 0.154 - ETA: 6s - loss: 0.154 - ETA: 6s - ETA: 2s - loss: - ETA: 0s - loss: 0.154 - ETA: 0s - loss: 0.154
Epoch 69/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.1493 - val_loss: 0.2481 - loss: 0.14
Epoch 70/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.1463 - val_loss: 0.2457  - ETA: 2s - l
Epoch 71/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.1535 - val_loss: 0.2573TA: 7s - - ETA: 4s
Epoch 72/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.1495 - val_loss: 0.2500TA: 31s - loss:  - ETA - ETA: 29
Epoch 73/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.1469 - val_loss: 0.2509 loss: 0.1 - ETA: 8s - lo - ETA: 6s - loss: 0.146 - ETA: 5s - lo - ETA: 3s - l
Epoch 74/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.1435 - val_loss: 0.2459s - loss: 0. - E - ETA: 10s - - ETA: 7s - loss: 0.1 - ETA: 7s - - ETA: 3s - loss: 0.143 - ETA: 3s - 
Epoch 75/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.1454 - val_loss: 0.2483TA: 14s - loss: 0.14 - ETA: 14s - loss: 0.14 - ETA: 14s - lo - ETA: 12s - loss: 0.14 - ETA: 12s - loss:   - ETA: 6s - loss - ETA: 
Epoch 76/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.1446 - val_loss: 0.2464s - loss - ETA: 28s - loss: 0. - ETA: 2s - loss
Epoch 77/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.1410 - val_loss: 0.2459TA: 21s - loss - ETA: 20s - loss - ETA: 19s - lo - ETA: 11s - loss: 0. - ETA: 11s - loss - ETA: 10s - loss: 0.13 - ETA: 9s  - ETA: 5s - loss: 0.13 - ETA: 5s - - ETA: 1s - loss:
Epoch 78/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.1431 - val_loss: 0.2465 ETA: 12s - loss: 0.14 - - ETA: 8s - loss: 0.143 - ETA: 7s -  - ETA:
Epoch 79/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.1375 - val_loss: 0.2410 2s - l
Epoch 80/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.1417 - val_loss: 0.2449TA: 9s  - ETA: 5s -  - ETA: 2s - loss: 0.141 - ETA: 2s - loss: 0.1 - ETA: 1s - loss: 0
Epoch 81/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.1412 - val_loss: 0.2451: 30s - loss: - ETA: 8s - loss: 0.142 - ETA: 8s - l - ETA: 5s - loss: - ETA: 3s - loss: 0.1 - ETA: 2s - lo
Epoch 82/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.1376 - val_loss: 0.2420loss: 0.13 - ETA: 24s - loss: 0.13 - ETA: 24s -  - ETA: 19s - loss: 0.13 - ETA - ETA: 10s - loss: 0 - ETA - ETA: 4s - ETA: 0s - loss: 0.13
Epoch 83/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.1380 - val_loss: 0.2411: 33s - loss: 0. - ETA: 34s  - ETA: 33s - lo - ETA: 14s - loss: 0.13 - ETA: 14s - loss: 0. - ETA:  - ETA: 11s - loss - ETA: 10s - loss: 0. - ETA: 9s - l - ETA: 6s - loss: 0.13 - ETA: 5s - l - ETA: 2s - loss:  - ETA: 1s - loss: 0.
Epoch 84/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.1365 - val_loss: 0.2443
Epoch 85/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.1359 - val_loss: 0.2401: 27s  - ETA: 18s - loss - ETA - ETA: - ETA: 1s - loss: 0.
Epoch 86/100
1359341/1359341 [==============================] - 39s 29us/sample - loss: 0.1334 - val_loss: 0.2521:  - ETA: 33s -  - ETA: 23s - lo - ETA: 18s - loss: 0.13 - ETA: 18s  - ETA: 13s -  - ETA: 11 - ETA: 8s - loss: 0.132 - ETA: 8s - loss: 0.132 - ETA: 8s - loss: 0 - ETA: 6s - loss: 0.132 - ETA: 6s - loss: 0.1 - ETA: 5 - ETA: 1s - loss: 0.
Epoch 87/100
1359341/1359341 [==============================] - 37s 28us/sample - loss: 0.1355 - val_loss: 0.2499 6s - loss: 0.13 -
Epoch 88/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.1308 - val_loss: 0.2452TA: 15s - loss: 0.13 - ETA: 14s - loss: 0.13 - ETA: 14s - loss: 0. - ETA - ETA:  - ETA: 8s - loss: - ETA: 6s - - ETA: 2s - lo
Epoch 89/100
1359341/1359341 [==============================] - 37s 27us/sample - loss: 0.1299 - val_loss: 0.2429TA: 26 - ETA: 24s - loss - ETA: 24s - lo - ETA:  - ETA: 20s - loss:  - ETA: 19s - lo - ETA - ETA: 8s - loss:  - ETA: 7s - loss: 0.12
Epoch 90/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.1324 - val_loss: 0.2405 ETA: 2s - lo
Epoch 91/100
1359341/1359341 [==============================] - 40s 29us/sample - loss: 0.1330 - val_loss: 0.2483: 33s - loss: 0.12 - ETA: 24s - loss: 0.13 - E - ETA: 13s - loss:  - E - ETA: 9s - loss: 0.1333 - ETA: 9s - loss: 0 - ETA: 8s - loss: 0 -
Epoch 92/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.1294 - val_loss: 0.2556
Epoch 93/100
1359341/1359341 [==============================] - 39s 29us/sample - loss: 0.1295 - val_loss: 0.2467TA:  - ETA:  - ETA: 32s - loss - ETA: 31s - loss: 0.12 - ETA: 7s - loss: 0.127 - ETA: 7s - loss:  - ETA: 5s - loss: 0. - ETA: 4
Epoch 94/100
1359341/1359341 [==============================] - 40s 29us/sample - loss: 0.1295 - val_loss: 0.2396
Epoch 95/100
1359341/1359341 [==============================] - 38s 28us/sample - loss: 0.1273 - val_loss: 0.2393
Epoch 96/100
1359341/1359341 [==============================] - 39s 29us/sample - loss: 0.1285 - val_loss: 0.2373
Epoch 97/100
1359341/1359341 [==============================] - 40s 29us/sample - loss: 0.1265 - val_loss: 0.2352TA: 4
Epoch 98/100
1359341/1359341 [==============================] - 39s 29us/sample - loss: 0.1280 - val_loss: 0.2403
Epoch 99/100
1359341/1359341 [==============================] - 39s 29us/sample - loss: 0.1250 - val_loss: 0.2393 - loss: 0.1
Epoch 100/100
1359341/1359341 [==============================] - 40s 30us/sample - loss: 0.1250 - val_loss: 0.2415
-1.4210624837773993
Training 3JHN
Train on 149773 samples, validate on 16642 samples
Epoch 1/100
149773/149773 [==============================] - 6s 40us/sample - loss: 1.3809 - val_loss: 1.0704
Epoch 2/100
149773/149773 [==============================] - 5s 32us/sample - loss: 0.6169 - val_loss: 0.7938
Epoch 3/100
149773/149773 [==============================] - 5s 32us/sample - loss: 0.4602 - val_loss: 0.7594
Epoch 4/100
149773/149773 [==============================] - 5s 32us/sample - loss: 0.3751 - val_loss: 0.6836s - loss: 0.388 - ETA: 1s - loss: 0
Epoch 5/100
149773/149773 [==============================] - 5s 32us/sample - loss: 0.3332 - val_loss: 0.6477
Epoch 6/100
149773/149773 [==============================] - 5s 31us/sample - loss: 0.2986 - val_loss: 0.6202TA: 2s - loss: 0.3 - ETA: 2s - loss
Epoch 7/100
149773/149773 [==============================] - 4s 28us/sample - loss: 0.2757 - val_loss: 0.5971
Epoch 8/100
149773/149773 [==============================] - 5s 35us/sample - loss: 0.2658 - val_loss: 0.6010
Epoch 9/100
149773/149773 [==============================] - 5s 31us/sample - loss: 0.2631 - val_loss: 0.5617
Epoch 10/100
149773/149773 [==============================] - 5s 32us/sample - loss: 0.2338 - val_loss: 0.5676
Epoch 11/100
149773/149773 [==============================] - 4s 27us/sample - loss: 0.2177 - val_loss: 0.5537
Epoch 12/100
149773/149773 [==============================] - 5s 33us/sample - loss: 0.2179 - val_loss: 0.5235ETA: 2s - lo
Epoch 13/100
149773/149773 [==============================] - 5s 31us/sample - loss: 0.2010 - val_loss: 0.5062
Epoch 14/100
149773/149773 [==============================] - 5s 31us/sample - loss: 0.1930 - val_loss: 0.4815
Epoch 15/100
149773/149773 [==============================] - 4s 29us/sample - loss: 0.1933 - val_loss: 0.4290
Epoch 16/100
149773/149773 [==============================] - 5s 32us/sample - loss: 0.1803 - val_loss: 0.4162
Epoch 17/100
149773/149773 [==============================] - 4s 29us/sample - loss: 0.1764 - val_loss: 0.3893 1s - loss: 
Epoch 18/100
149773/149773 [==============================] - 5s 30us/sample - loss: 0.1728 - val_loss: 0.3796
Epoch 19/100
149773/149773 [==============================] - 4s 28us/sample - loss: 0.1716 - val_loss: 0.3515
Epoch 20/100
149773/149773 [==============================] - 5s 30us/sample - loss: 0.1631 - val_loss: 0.3429ETA: 2s - l
Epoch 21/100
149773/149773 [==============================] - 4s 28us/sample - loss: 0.1572 - val_loss: 0.3053
Epoch 22/100
149773/149773 [==============================] - 5s 30us/sample - loss: 0.1567 - val_loss: 0.2869
Epoch 23/100
149773/149773 [==============================] - 4s 28us/sample - loss: 0.1549 - val_loss: 0.2980ETA: 2s - l
Epoch 24/100
149773/149773 [==============================] - 4s 28us/sample - loss: 0.1536 - val_loss: 0.2613TA: 2s - loss: 0.1 - ETA: 1s - loss: 
Epoch 25/100
149773/149773 [==============================] - 4s 29us/sample - loss: 0.1497 - val_loss: 0.2543 1s - loss: 
Epoch 26/100
149773/149773 [==============================] - 4s 28us/sample - loss: 0.1378 - val_loss: 0.2499
Epoch 27/100
149773/149773 [==============================] - 4s 29us/sample - loss: 0.1412 - val_loss: 0.22290s - loss: 0.140
Epoch 28/100
149773/149773 [==============================] - 4s 27us/sample - loss: 0.1338 - val_loss: 0.2232
Epoch 29/100
149773/149773 [==============================] - 4s 27us/sample - loss: 0.1340 - val_loss: 0.2146
Epoch 30/100
149773/149773 [==============================] - 4s 30us/sample - loss: 0.1373 - val_loss: 0.2216
Epoch 31/100
149773/149773 [==============================] - 4s 28us/sample - loss: 0.1344 - val_loss: 0.2092 1s - loss: 0.
Epoch 32/100
149773/149773 [==============================] - 4s 28us/sample - loss: 0.1290 - val_loss: 0.1973ETA: 1s - loss:
Epoch 33/100
149773/149773 [==============================] - 5s 32us/sample - loss: 0.1216 - val_loss: 0.1888
Epoch 34/100
149773/149773 [==============================] - 5s 32us/sample - loss: 0.1191 - val_loss: 0.1877
Epoch 35/100
149773/149773 [==============================] - 5s 30us/sample - loss: 0.1190 - val_loss: 0.1987
Epoch 36/100
149773/149773 [==============================] - 5s 32us/sample - loss: 0.1185 - val_loss: 0.1922
Epoch 37/100
149773/149773 [==============================] - 5s 35us/sample - loss: 0.1163 - val_loss: 0.1917
Epoch 38/100
149773/149773 [==============================] - 5s 31us/sample - loss: 0.1203 - val_loss: 0.1938
Epoch 39/100
149773/149773 [==============================] - 5s 30us/sample - loss: 0.1139 - val_loss: 0.1765A: 1s - loss: 0.
Epoch 40/100
149773/149773 [==============================] - 4s 27us/sample - loss: 0.1056 - val_loss: 0.1782 loss: 0.1
Epoch 41/100
149773/149773 [==============================] - 4s 29us/sample - loss: 0.0977 - val_loss: 0.1756
Epoch 42/100
149773/149773 [==============================] - 5s 32us/sample - loss: 0.1064 - val_loss: 0.1909s - loss: 0.10
Epoch 43/100
149773/149773 [==============================] - 4s 30us/sample - loss: 0.1108 - val_loss: 0.1887 0s - loss: 0.111
Epoch 44/100
149773/149773 [==============================] - 5s 30us/sample - loss: 0.1086 - val_loss: 0.1812ETA: 3s - loss: 0 - ETA: 1s - loss: 
Epoch 45/100
149773/149773 [==============================] - 5s 31us/sample - loss: 0.1049 - val_loss: 0.1762
Epoch 46/100
149773/149773 [==============================] - 5s 34us/sample - loss: 0.0961 - val_loss: 0.1741
Epoch 47/100
149773/149773 [==============================] - 5s 33us/sample - loss: 0.1020 - val_loss: 0.1750A: 2s - los
Epoch 48/100
149773/149773 [==============================] - 4s 29us/sample - loss: 0.1007 - val_loss: 0.1765
Epoch 49/100
149773/149773 [==============================] - 5s 33us/sample - loss: 0.1005 - val_loss: 0.1833
Epoch 50/100
149773/149773 [==============================] - 5s 32us/sample - loss: 0.1047 - val_loss: 0.18492s - loss:
Epoch 51/100
149773/149773 [==============================] - 5s 32us/sample - loss: 0.1017 - val_loss: 0.1758
Epoch 52/100
149773/149773 [==============================] - 5s 33us/sample - loss: 0.0949 - val_loss: 0.1685
Epoch 53/100
149773/149773 [==============================] - 5s 32us/sample - loss: 0.0934 - val_loss: 0.1708TA: 2s - loss: 0. - ETA: 1s - loss: 0
Epoch 54/100
149773/149773 [==============================] - 5s 34us/sample - loss: 0.0891 - val_loss: 0.1645
Epoch 55/100
149773/149773 [==============================] - 5s 31us/sample - loss: 0.0958 - val_loss: 0.1666
Epoch 56/100
149773/149773 [==============================] - 5s 34us/sample - loss: 0.0879 - val_loss: 0.1633
Epoch 57/100
149773/149773 [==============================] - 5s 33us/sample - loss: 0.0844 - val_loss: 0.1682
Epoch 58/100
149773/149773 [==============================] - 5s 34us/sample - loss: 0.0941 - val_loss: 0.1745
Epoch 59/100
149773/149773 [==============================] - 5s 33us/sample - loss: 0.0908 - val_loss: 0.1764
Epoch 60/100
149773/149773 [==============================] - 4s 29us/sample - loss: 0.0924 - val_loss: 0.1648oss: 0.091
Epoch 61/100
149773/149773 [==============================] - 5s 32us/sample - loss: 0.0940 - val_loss: 0.1695
Epoch 62/100
149773/149773 [==============================] - 5s 32us/sample - loss: 0.0805 - val_loss: 0.1621: 2s - los
Epoch 63/100
149773/149773 [==============================] - 5s 32us/sample - loss: 0.0782 - val_loss: 0.1685
Epoch 64/100
149773/149773 [==============================] - 5s 35us/sample - loss: 0.0955 - val_loss: 0.1761A: 2s - loss: 0 - ETA: 1s - loss: 0
Epoch 65/100
149773/149773 [==============================] - 4s 28us/sample - loss: 0.0889 - val_loss: 0.1672
Epoch 66/100
149773/149773 [==============================] - 5s 36us/sample - loss: 0.0866 - val_loss: 0.1617
Epoch 67/100
149773/149773 [==============================] - 5s 33us/sample - loss: 0.0888 - val_loss: 0.1639
Epoch 68/100
149773/149773 [==============================] - 5s 30us/sample - loss: 0.0816 - val_loss: 0.1628
Epoch 69/100
149773/149773 [==============================] - 5s 32us/sample - loss: 0.0777 - val_loss: 0.1690
Epoch 70/100
149773/149773 [==============================] - 5s 32us/sample - loss: 0.0816 - val_loss: 0.1588
Epoch 71/100
149773/149773 [==============================] - 5s 30us/sample - loss: 0.0763 - val_loss: 0.1563
Epoch 72/100
149773/149773 [==============================] - 4s 27us/sample - loss: 0.0768 - val_loss: 0.1684
Epoch 73/100
149773/149773 [==============================] - 5s 31us/sample - loss: 0.0775 - val_loss: 0.1612ETA: 2s - loss:
Epoch 74/100
149773/149773 [==============================] - 5s 30us/sample - loss: 0.0708 - val_loss: 0.1581
Epoch 75/100
149773/149773 [==============================] - 5s 34us/sample - loss: 0.0804 - val_loss: 0.1655
Epoch 76/100
149773/149773 [==============================] - 4s 30us/sample - loss: 0.0803 - val_loss: 0.1649
Epoch 77/100
149773/149773 [==============================] - 5s 31us/sample - loss: 0.0811 - val_loss: 0.1648
Epoch 78/100
149773/149773 [==============================] - 5s 30us/sample - loss: 0.0835 - val_loss: 0.1730ETA: 1s - loss: 0.07 - ETA: 1s - loss: 0
Epoch 79/100
149773/149773 [==============================] - 6s 37us/sample - loss: 0.0857 - val_loss: 0.1589: 1s - loss: 0.
Epoch 80/100
149773/149773 [==============================] - 5s 33us/sample - loss: 0.0769 - val_loss: 0.1631
Epoch 81/100
149773/149773 [==============================] - 5s 34us/sample - loss: 0.0726 - val_loss: 0.1623 2s - loss: 0 - ETA: 0s - loss: 0.071 - ETA: 0s - loss: 0.072
Epoch 82/100
149773/149773 [==============================] - 5s 30us/sample - loss: 0.0741 - val_loss: 0.1595
Epoch 83/100
149773/149773 [==============================] - 5s 31us/sample - loss: 0.0728 - val_loss: 0.1581
Epoch 84/100
149773/149773 [==============================] - 5s 31us/sample - loss: 0.0692 - val_loss: 0.1605
Epoch 85/100
149773/149773 [==============================] - 4s 29us/sample - loss: 0.0713 - val_loss: 0.1570
Epoch 86/100
149773/149773 [==============================] - 5s 32us/sample - loss: 0.0720 - val_loss: 0.1560
Epoch 87/100
149773/149773 [==============================] - 5s 33us/sample - loss: 0.0743 - val_loss: 0.1572
Epoch 88/100
149773/149773 [==============================] - 5s 36us/sample - loss: 0.0747 - val_loss: 0.1589
Epoch 89/100
149773/149773 [==============================] - 5s 36us/sample - loss: 0.0769 - val_loss: 0.1599: 0s - loss: 0.07
Epoch 90/100
149773/149773 [==============================] - 5s 31us/sample - loss: 0.0676 - val_loss: 0.1534
Epoch 91/100
149773/149773 [==============================] - 5s 32us/sample - loss: 0.0666 - val_loss: 0.1576
Epoch 92/100
149773/149773 [==============================] - 5s 36us/sample - loss: 0.0745 - val_loss: 0.1645
Epoch 93/100
149773/149773 [==============================] - 5s 35us/sample - loss: 0.0743 - val_loss: 0.1564
Epoch 94/100
149773/149773 [==============================] - 5s 35us/sample - loss: 0.0749 - val_loss: 0.1597
Epoch 95/100
149773/149773 [==============================] - 5s 33us/sample - loss: 0.0667 - val_loss: 0.1579
Epoch 96/100
149773/149773 [==============================] - 5s 34us/sample - loss: 0.0644 - val_loss: 0.1483ETA: 0s - loss: 0.064
Epoch 97/100
149773/149773 [==============================] - 4s 29us/sample - loss: 0.0672 - val_loss: 0.1560
Epoch 98/100
149773/149773 [==============================] - 5s 32us/sample - loss: 0.0682 - val_loss: 0.1561
Epoch 99/100
149773/149773 [==============================] - 5s 32us/sample - loss: 0.0691 - val_loss: 0.1521
Epoch 100/100
149773/149773 [==============================] - 5s 36us/sample - loss: 0.0667 - val_loss: 0.1560
-1.857934672449139
In [14]:
cv_score_total/=len(types)
In [15]:
i=0
for mol_type in types: 
    print(mol_type,": cv score is ",cv_score[i])
    i+=1
print("total cv score is",cv_score_total)
1JHC : cv score is  -0.3012262679466983
2JHH : cv score is  -1.9525659470029737
1JHN : cv score is  0.6805895605829633
2JHN : cv score is  -1.4906527135074825
2JHC : cv score is  -1.432343974372237
3JHH : cv score is  -1.5834044906774032
3JHC : cv score is  -1.4210624837773993
3JHN : cv score is  -1.857934672449139
total cv score is -1.1698251236437962
In [16]:
submit = pd.read_csv('../inputs/sample_submission.csv')
submit["scalar_coupling_constant"] = test_prediction
submit.to_csv("./submission.csv", index=False)

답글 남기기