You can download and run the notebook locally or run it with Google Colaboratory:

Download jupyter notebook Run on Colab


Technical: conv-piVAE#

  • this notebook will demo conv-piVAE on hippocampus data, implemented based on piVAE (Zhou, D., Wei, X. Learning identifiable and interpretable latent models of high-dimensional neural activity using pi-VAE. NeurIPS 2020. https://arxiv.org/abs/2011.04798)

  • This has only been tested on Ubuntu and MacOS

  • Install note: be sure you have cebra, and the demo dependencies, installed to use this notebook:

[ ]:
!pip install --pre 'cebra[datasets,demos]'
[6]:
import sys
import numpy as np
import pandas as pd
import torch
import matplotlib.pyplot as plt
import joblib as jl
from keras.callbacks import ModelCheckpoint
from keras import backend as K

sys.path.insert(0, '..')
import cebra.datasets

sys.path.insert(0, '../third_party/pivae')
import pivae_code.datasets, pivae_code.conv_pi_vae, pivae_code.pi_vae
[7]:
###Set your own seed
seed = 42
np.random.seed(seed)

Load and generate dataset:#

  • The data will be automatically downloaded into a /data folder.

[9]:
DATA_NAME = 'rat-hippocampus-achilles-3fold-trial-split-0'
offset_right = 5
offset_left = 5

def _call_dataset(offset_right, offset_left, split):
    dataset = cebra.datasets.init(DATA_NAME, split = split)
    dataset.offset.right = offset_right
    dataset.offset.left = offset_left
    return dataset

train_set = _call_dataset(offset_right, offset_left, 'train')
valid_set=_call_dataset(offset_right, offset_left, 'valid')
test_set=_call_dataset(offset_right, offset_left, 'test')
100%|██████████| 10.0M/10.0M [00:01<00:00, 9.00MB/s]
Download complete. Dataset saved in 'data/rat_hippocampus/achilles.jl'
[10]:
def make_loader(dataset, batch_size):
    def _to_batch_list(x, y, batch_size):
        if x is not None and y is not None:
            x = x.squeeze()
            if len(x.shape) == 3:
                x = x.transpose(0,2,1)
            x_batch_list = np.array_split(x, int(len(x) / batch_size))
            y_batch_list = np.array_split(y, int(len(y) / batch_size))
        else:
            return None, None
        return x_batch_list, y_batch_list

    x,u = _to_batch_list(dataset[torch.arange(len(dataset))].numpy(), dataset.index.numpy(), batch_size)

    loader = pivae_code.pi_vae.custom_data_generator(x, u)
    _len = len(x)
    return x, u, loader
[11]:
batch_size = 200
train_x, train_u, train_loader = make_loader(train_set, batch_size)
valid_x, valid_u, valid_loader = make_loader(valid_set, batch_size)
test_x, test_u, test_loader = make_loader(test_set, batch_size)

Visualize the data#

[12]:
fig = plt.figure(figsize=(9,3))
plt.subplots_adjust(wspace = 0.3)
ax = plt.subplot(121)
ax.imshow(train_set.neural.numpy()[:1000].T, aspect = 'auto', cmap = 'gray_r')
plt.ylabel('Neuron #')
plt.xlabel('Time [s]')
plt.xticks(np.linspace(0, 1000, 5), np.linspace(0, 0.025*1000, 5, dtype = int))

ax2 = plt.subplot(122)
ax2.scatter(np.arange(1000), train_set.continuous_index[:1000,0], c = 'gray', s=1)
plt.ylabel('Position [m]')
plt.xlabel('Time [s]')
plt.xticks(np.linspace(0, 1000, 5), np.linspace(0, 0.025*1000, 5, dtype = int))
plt.show()
../_images/demo_notebooks_Demo_conv-pivae_11_0.png

Define and train a conv-piVAE model#

  • Note that conv-piVAE use 10 time bins (250ms) receptive field, while piVAE uses 1 time bin (25ms).

  • If training takes too long, use smaller training epochs (usually the loss converges around ~300 epochs)

[13]:
conv_pivae = pivae_code.conv_pi_vae.conv_vae_mdl(
                dim_x=train_set.neural.shape[1],
                dim_z=2,
                dim_u=3,
                time_window=10,
                gen_nodes=60,
                n_blk=2,
                mdl="poisson",
                disc=False,
                learning_rate=0.00025)
2022-10-03 14:33:40.035925: W tensorflow/stream_executor/platform/default/dso_loader.cc:55] Could not load dynamic library 'libcuda.so.1'; dlerror: libcuda.so.1: cannot open shared object file: No such file or directory
2022-10-03 14:33:40.035974: E tensorflow/stream_executor/cuda/cuda_driver.cc:313] failed call to cuInit: UNKNOWN ERROR (303)
2022-10-03 14:33:40.036015: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (stes-t14s): /proc/driver/nvidia/version does not exist
2022-10-03 14:33:40.036392: I tensorflow/core/platform/cpu_feature_guard.cc:143] Your CPU supports instructions that this TensorFlow binary was not compiled to use: AVX2 FMA
2022-10-03 14:33:40.051370: I tensorflow/core/platform/profile_utils/cpu_utils.cc:102] CPU Frequency: 1697430000 Hz
2022-10-03 14:33:40.053336: I tensorflow/compiler/xla/service/service.cc:168] XLA service 0x7fd814000b70 initialized for platform Host (this does not guarantee that XLA will be used). Devices:
2022-10-03 14:33:40.053404: I tensorflow/compiler/xla/service/service.cc:176]   StreamExecutor device (0): Host, Default Version
Model: "vae"
__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to
==================================================================================================
input_1 (InputLayer)            (None, 10, 120)      0
__________________________________________________________________________________________________
input_3 (InputLayer)            (None, 3)            0
__________________________________________________________________________________________________
encoder (Model)                 [(None, 2), (None, 2 35020       input_1[0][0]
                                                                 input_3[0][0]
__________________________________________________________________________________________________
decoder (Model)                 (None, 10, 120)      2977794     encoder[1][2]
==================================================================================================
Total params: 3,012,814
Trainable params: 3,012,814
Non-trainable params: 0
__________________________________________________________________________________________________
None
/home/stes/.conda/envs/cebra-full/lib/python3.8/site-packages/keras/engine/training_utils.py:816: UserWarning: Output encoder missing from loss dictionary. We assume this was done on purpose. The fit and evaluate APIs will not be expecting any data to be passed to encoder.
  warnings.warn(
/home/stes/.conda/envs/cebra-full/lib/python3.8/site-packages/keras/engine/training_utils.py:816: UserWarning: Output decoder missing from loss dictionary. We assume this was done on purpose. The fit and evaluate APIs will not be expecting any data to be passed to decoder.
  warnings.warn(
[14]:
model_chk_path = 'conv_pivae_model0210.h5'
mcp = ModelCheckpoint(model_chk_path, monitor="val_loss", save_best_only=True, save_weights_only=True)
s_n = conv_pivae.fit_generator(train_loader,
              steps_per_epoch=len(train_x), epochs=600,
              verbose=1,
              validation_data = valid_loader,
              validation_steps = len(valid_x), callbacks=[mcp])
Epoch 1/600
34/34 [==============================] - 3s 100ms/step - loss: 509.8190 - val_loss: 212.0420
Epoch 2/600
34/34 [==============================] - 2s 46ms/step - loss: 212.5554 - val_loss: 177.2430
Epoch 3/600
34/34 [==============================] - 2s 45ms/step - loss: 192.3327 - val_loss: 165.8879
Epoch 4/600
34/34 [==============================] - 1s 42ms/step - loss: 186.7391 - val_loss: 164.7504
Epoch 5/600
34/34 [==============================] - 1s 43ms/step - loss: 186.4515 - val_loss: 164.5008
Epoch 6/600
34/34 [==============================] - 1s 43ms/step - loss: 186.4154 - val_loss: 164.6330
Epoch 7/600
34/34 [==============================] - 1s 43ms/step - loss: 186.4084 - val_loss: 164.6232
Epoch 8/600
34/34 [==============================] - 1s 44ms/step - loss: 186.3944 - val_loss: 164.5316
Epoch 9/600
34/34 [==============================] - 2s 46ms/step - loss: 186.2701 - val_loss: 164.2339
Epoch 10/600
34/34 [==============================] - 2s 46ms/step - loss: 186.1861 - val_loss: 164.2754
Epoch 11/600
34/34 [==============================] - 1s 44ms/step - loss: 186.1450 - val_loss: 164.1155
Epoch 12/600
34/34 [==============================] - 1s 43ms/step - loss: 186.0446 - val_loss: 164.0661
Epoch 13/600
34/34 [==============================] - 1s 43ms/step - loss: 185.9379 - val_loss: 164.1499
Epoch 14/600
34/34 [==============================] - 1s 43ms/step - loss: 185.8697 - val_loss: 164.0963
Epoch 15/600
34/34 [==============================] - 2s 45ms/step - loss: 185.6543 - val_loss: 163.8571
Epoch 16/600
34/34 [==============================] - 2s 45ms/step - loss: 185.2769 - val_loss: 163.7182
Epoch 17/600
34/34 [==============================] - 1s 43ms/step - loss: 184.5498 - val_loss: 163.2224
Epoch 18/600
34/34 [==============================] - 2s 46ms/step - loss: 182.9443 - val_loss: 163.1278
Epoch 19/600
34/34 [==============================] - 2s 47ms/step - loss: 180.7850 - val_loss: 161.5421
Epoch 20/600
34/34 [==============================] - 2s 44ms/step - loss: 178.7081 - val_loss: 159.5752
Epoch 21/600
34/34 [==============================] - 1s 44ms/step - loss: 176.8280 - val_loss: 158.1187
Epoch 22/600
34/34 [==============================] - 1s 43ms/step - loss: 174.9552 - val_loss: 155.8834
Epoch 23/600
34/34 [==============================] - 1s 44ms/step - loss: 173.0196 - val_loss: 152.2670
Epoch 24/600
34/34 [==============================] - 2s 48ms/step - loss: 170.3790 - val_loss: 149.4572
Epoch 25/600
34/34 [==============================] - 2s 45ms/step - loss: 168.4063 - val_loss: 147.2516
Epoch 26/600
34/34 [==============================] - 2s 46ms/step - loss: 166.8510 - val_loss: 144.7695
Epoch 27/600
34/34 [==============================] - 1s 44ms/step - loss: 165.3624 - val_loss: 143.2796
Epoch 28/600
34/34 [==============================] - 1s 43ms/step - loss: 164.1374 - val_loss: 142.2919
Epoch 29/600
34/34 [==============================] - 1s 44ms/step - loss: 163.0654 - val_loss: 141.1287
Epoch 30/600
34/34 [==============================] - 1s 43ms/step - loss: 162.3048 - val_loss: 140.3004
Epoch 31/600
34/34 [==============================] - 1s 44ms/step - loss: 161.2343 - val_loss: 139.6303
Epoch 32/600
34/34 [==============================] - 1s 43ms/step - loss: 160.4963 - val_loss: 138.8364
Epoch 33/600
34/34 [==============================] - 1s 43ms/step - loss: 159.7594 - val_loss: 137.9424
Epoch 34/600
34/34 [==============================] - 1s 44ms/step - loss: 159.0830 - val_loss: 137.6932
Epoch 35/600
34/34 [==============================] - 1s 43ms/step - loss: 158.5559 - val_loss: 137.3128
Epoch 36/600
34/34 [==============================] - 1s 43ms/step - loss: 157.8401 - val_loss: 136.6159
Epoch 37/600
34/34 [==============================] - 1s 44ms/step - loss: 157.4032 - val_loss: 136.8394
Epoch 38/600
34/34 [==============================] - 1s 43ms/step - loss: 156.8842 - val_loss: 136.2114
Epoch 39/600
34/34 [==============================] - 1s 43ms/step - loss: 156.2586 - val_loss: 135.7509
Epoch 40/600
34/34 [==============================] - 1s 43ms/step - loss: 155.9265 - val_loss: 135.3052
Epoch 41/600
34/34 [==============================] - 1s 43ms/step - loss: 155.5301 - val_loss: 135.3062
Epoch 42/600
34/34 [==============================] - 1s 43ms/step - loss: 155.1858 - val_loss: 134.9559
Epoch 43/600
34/34 [==============================] - 1s 43ms/step - loss: 154.7895 - val_loss: 135.1275
Epoch 44/600
34/34 [==============================] - 1s 43ms/step - loss: 154.4883 - val_loss: 135.5404
Epoch 45/600
34/34 [==============================] - 1s 43ms/step - loss: 154.2749 - val_loss: 135.3865
Epoch 46/600
34/34 [==============================] - 1s 43ms/step - loss: 153.9525 - val_loss: 135.5557
Epoch 47/600
34/34 [==============================] - 2s 44ms/step - loss: 153.7735 - val_loss: 134.6591
Epoch 48/600
34/34 [==============================] - 1s 43ms/step - loss: 153.4866 - val_loss: 134.1797
Epoch 49/600
34/34 [==============================] - 1s 43ms/step - loss: 153.2572 - val_loss: 134.4760
Epoch 50/600
34/34 [==============================] - 1s 43ms/step - loss: 153.0343 - val_loss: 134.2853
Epoch 51/600
34/34 [==============================] - 1s 42ms/step - loss: 152.7185 - val_loss: 134.2296
Epoch 52/600
34/34 [==============================] - 1s 43ms/step - loss: 152.7369 - val_loss: 134.3609
Epoch 53/600
34/34 [==============================] - 1s 43ms/step - loss: 152.4475 - val_loss: 133.9866
Epoch 54/600
34/34 [==============================] - 2s 47ms/step - loss: 152.3859 - val_loss: 134.5579
Epoch 55/600
34/34 [==============================] - 2s 51ms/step - loss: 152.2696 - val_loss: 134.0202
Epoch 56/600
34/34 [==============================] - 2s 46ms/step - loss: 152.1087 - val_loss: 133.5061
Epoch 57/600
34/34 [==============================] - 2s 53ms/step - loss: 151.8830 - val_loss: 134.3623
Epoch 58/600
34/34 [==============================] - 2s 48ms/step - loss: 151.6815 - val_loss: 133.9002
Epoch 59/600
34/34 [==============================] - 2s 46ms/step - loss: 151.6416 - val_loss: 134.3605
Epoch 60/600
34/34 [==============================] - 2s 53ms/step - loss: 151.5324 - val_loss: 133.6862
Epoch 61/600
34/34 [==============================] - 2s 51ms/step - loss: 151.2587 - val_loss: 133.8059
Epoch 62/600
34/34 [==============================] - 2s 46ms/step - loss: 151.2168 - val_loss: 134.0073
Epoch 63/600
34/34 [==============================] - 2s 45ms/step - loss: 151.2193 - val_loss: 134.2295
Epoch 64/600
34/34 [==============================] - 2s 45ms/step - loss: 150.9878 - val_loss: 134.1403
Epoch 65/600
34/34 [==============================] - 2s 45ms/step - loss: 150.8001 - val_loss: 134.5686
Epoch 66/600
34/34 [==============================] - 2s 45ms/step - loss: 150.9648 - val_loss: 134.7472
Epoch 67/600
34/34 [==============================] - 2s 45ms/step - loss: 150.8419 - val_loss: 135.1183
Epoch 68/600
34/34 [==============================] - 2s 44ms/step - loss: 151.0243 - val_loss: 135.6888
Epoch 69/600
34/34 [==============================] - 2s 45ms/step - loss: 150.8993 - val_loss: 135.2122
Epoch 70/600
34/34 [==============================] - 2s 45ms/step - loss: 151.0090 - val_loss: 135.0504
Epoch 71/600
34/34 [==============================] - 1s 43ms/step - loss: 150.9658 - val_loss: 134.8042
Epoch 72/600
34/34 [==============================] - 1s 43ms/step - loss: 150.7124 - val_loss: 134.5691
Epoch 73/600
34/34 [==============================] - 1s 43ms/step - loss: 150.4768 - val_loss: 134.2057
Epoch 74/600
34/34 [==============================] - 1s 42ms/step - loss: 150.3804 - val_loss: 134.2023
Epoch 75/600
34/34 [==============================] - 1s 43ms/step - loss: 150.2365 - val_loss: 133.7696
Epoch 76/600
34/34 [==============================] - 1s 43ms/step - loss: 150.1960 - val_loss: 133.8051
Epoch 77/600
34/34 [==============================] - 1s 44ms/step - loss: 149.9340 - val_loss: 133.0406
Epoch 78/600
34/34 [==============================] - 2s 45ms/step - loss: 149.9740 - val_loss: 132.7862
Epoch 79/600
34/34 [==============================] - 2s 48ms/step - loss: 149.7775 - val_loss: 133.0508
Epoch 80/600
34/34 [==============================] - 2s 44ms/step - loss: 149.6358 - val_loss: 132.8242
Epoch 81/600
34/34 [==============================] - 1s 43ms/step - loss: 149.8333 - val_loss: 132.9957
Epoch 82/600
34/34 [==============================] - 1s 43ms/step - loss: 149.6818 - val_loss: 134.0401
Epoch 83/600
34/34 [==============================] - 1s 43ms/step - loss: 150.2907 - val_loss: 133.8262
Epoch 84/600
34/34 [==============================] - 1s 43ms/step - loss: 150.5345 - val_loss: 134.5123
Epoch 85/600
34/34 [==============================] - 1s 44ms/step - loss: 150.5716 - val_loss: 134.5144
Epoch 86/600
34/34 [==============================] - 1s 43ms/step - loss: 150.1075 - val_loss: 134.8088
Epoch 87/600
34/34 [==============================] - 1s 44ms/step - loss: 149.7634 - val_loss: 134.5372
Epoch 88/600
34/34 [==============================] - 1s 44ms/step - loss: 149.4065 - val_loss: 135.4284
Epoch 89/600
34/34 [==============================] - 1s 44ms/step - loss: 149.3708 - val_loss: 136.1645
Epoch 90/600
34/34 [==============================] - 1s 44ms/step - loss: 149.6762 - val_loss: 137.2781
Epoch 91/600
34/34 [==============================] - 1s 43ms/step - loss: 150.0900 - val_loss: 138.1392
Epoch 92/600
34/34 [==============================] - 1s 43ms/step - loss: 150.3504 - val_loss: 138.2345
Epoch 93/600
34/34 [==============================] - 1s 44ms/step - loss: 150.7532 - val_loss: 137.0063
Epoch 94/600
34/34 [==============================] - 2s 44ms/step - loss: 150.8895 - val_loss: 136.1580
Epoch 95/600
34/34 [==============================] - 1s 43ms/step - loss: 150.7110 - val_loss: 136.4608
Epoch 96/600
34/34 [==============================] - 1s 42ms/step - loss: 150.3667 - val_loss: 135.7379
Epoch 97/600
34/34 [==============================] - 1s 42ms/step - loss: 149.8033 - val_loss: 134.6018
Epoch 98/600
34/34 [==============================] - 1s 42ms/step - loss: 149.4651 - val_loss: 134.3129
Epoch 99/600
34/34 [==============================] - 1s 43ms/step - loss: 149.1929 - val_loss: 133.8064
Epoch 100/600
34/34 [==============================] - 1s 43ms/step - loss: 148.7446 - val_loss: 134.2913
Epoch 101/600
34/34 [==============================] - 1s 43ms/step - loss: 148.6622 - val_loss: 134.1206
Epoch 102/600
34/34 [==============================] - 1s 43ms/step - loss: 148.5265 - val_loss: 134.1020
Epoch 103/600
34/34 [==============================] - 1s 43ms/step - loss: 148.4305 - val_loss: 133.8481
Epoch 104/600
34/34 [==============================] - 1s 43ms/step - loss: 148.1615 - val_loss: 134.0582
Epoch 105/600
34/34 [==============================] - 1s 43ms/step - loss: 148.1733 - val_loss: 133.9091
Epoch 106/600
34/34 [==============================] - 1s 44ms/step - loss: 148.0293 - val_loss: 134.1138
Epoch 107/600
34/34 [==============================] - 1s 43ms/step - loss: 147.8826 - val_loss: 133.7751
Epoch 108/600
34/34 [==============================] - 2s 44ms/step - loss: 147.7907 - val_loss: 133.8350
Epoch 109/600
34/34 [==============================] - 1s 43ms/step - loss: 147.7343 - val_loss: 134.1620
Epoch 110/600
34/34 [==============================] - 1s 43ms/step - loss: 147.6035 - val_loss: 134.0674
Epoch 111/600
34/34 [==============================] - 1s 43ms/step - loss: 147.4339 - val_loss: 135.0539
Epoch 112/600
34/34 [==============================] - 1s 42ms/step - loss: 147.4112 - val_loss: 134.3374
Epoch 113/600
34/34 [==============================] - 1s 43ms/step - loss: 147.3606 - val_loss: 134.2066
Epoch 114/600
34/34 [==============================] - 2s 46ms/step - loss: 147.1205 - val_loss: 134.3514
Epoch 115/600
34/34 [==============================] - 1s 43ms/step - loss: 147.1248 - val_loss: 134.0115
Epoch 116/600
34/34 [==============================] - 1s 43ms/step - loss: 147.1305 - val_loss: 134.8344
Epoch 117/600
34/34 [==============================] - 1s 43ms/step - loss: 147.1765 - val_loss: 134.5315
Epoch 118/600
34/34 [==============================] - 1s 43ms/step - loss: 146.9575 - val_loss: 134.0187
Epoch 119/600
34/34 [==============================] - 1s 43ms/step - loss: 147.0394 - val_loss: 134.8788
Epoch 120/600
34/34 [==============================] - 1s 43ms/step - loss: 146.9976 - val_loss: 134.4555
Epoch 121/600
34/34 [==============================] - 1s 42ms/step - loss: 146.9511 - val_loss: 134.5541
Epoch 122/600
34/34 [==============================] - 1s 43ms/step - loss: 146.7345 - val_loss: 135.1024
Epoch 123/600
34/34 [==============================] - 1s 42ms/step - loss: 146.6790 - val_loss: 134.2958
Epoch 124/600
34/34 [==============================] - 1s 43ms/step - loss: 146.7378 - val_loss: 134.4119
Epoch 125/600
34/34 [==============================] - 1s 43ms/step - loss: 146.7241 - val_loss: 134.5348
Epoch 126/600
34/34 [==============================] - 1s 43ms/step - loss: 146.6240 - val_loss: 134.8545
Epoch 127/600
34/34 [==============================] - 1s 42ms/step - loss: 146.5561 - val_loss: 134.5120
Epoch 128/600
34/34 [==============================] - 1s 42ms/step - loss: 146.5639 - val_loss: 134.5121
Epoch 129/600
34/34 [==============================] - 1s 43ms/step - loss: 146.6035 - val_loss: 134.0658
Epoch 130/600
34/34 [==============================] - 1s 42ms/step - loss: 146.5192 - val_loss: 134.4457
Epoch 131/600
34/34 [==============================] - 1s 42ms/step - loss: 146.4038 - val_loss: 135.1496
Epoch 132/600
34/34 [==============================] - 1s 42ms/step - loss: 146.3382 - val_loss: 135.5483
Epoch 133/600
34/34 [==============================] - 1s 43ms/step - loss: 146.2289 - val_loss: 134.7906
Epoch 134/600
34/34 [==============================] - 1s 43ms/step - loss: 146.0894 - val_loss: 135.8072
Epoch 135/600
34/34 [==============================] - 2s 47ms/step - loss: 146.2353 - val_loss: 135.4010
Epoch 136/600
34/34 [==============================] - 1s 44ms/step - loss: 146.1467 - val_loss: 136.2479
Epoch 137/600
34/34 [==============================] - 2s 45ms/step - loss: 146.2748 - val_loss: 136.3566
Epoch 138/600
34/34 [==============================] - 2s 47ms/step - loss: 146.3802 - val_loss: 135.8375
Epoch 139/600
34/34 [==============================] - 1s 44ms/step - loss: 146.4157 - val_loss: 135.9337
Epoch 140/600
34/34 [==============================] - 1s 44ms/step - loss: 146.1987 - val_loss: 135.6012
Epoch 141/600
34/34 [==============================] - 1s 42ms/step - loss: 146.1422 - val_loss: 135.9419
Epoch 142/600
34/34 [==============================] - 1s 43ms/step - loss: 146.2373 - val_loss: 135.9483
Epoch 143/600
34/34 [==============================] - 1s 43ms/step - loss: 146.0995 - val_loss: 134.8925
Epoch 144/600
34/34 [==============================] - 1s 43ms/step - loss: 146.1627 - val_loss: 134.7436
Epoch 145/600
34/34 [==============================] - 1s 42ms/step - loss: 145.9768 - val_loss: 135.7968
Epoch 146/600
34/34 [==============================] - 1s 43ms/step - loss: 146.1534 - val_loss: 135.8653
Epoch 147/600
34/34 [==============================] - 1s 42ms/step - loss: 146.0682 - val_loss: 136.0443
Epoch 148/600
34/34 [==============================] - 1s 43ms/step - loss: 146.0625 - val_loss: 136.3753
Epoch 149/600
34/34 [==============================] - 1s 42ms/step - loss: 146.3361 - val_loss: 136.1194
Epoch 150/600
34/34 [==============================] - 1s 42ms/step - loss: 146.5520 - val_loss: 136.1011
Epoch 151/600
34/34 [==============================] - 1s 42ms/step - loss: 146.5306 - val_loss: 135.5692
Epoch 152/600
34/34 [==============================] - 1s 42ms/step - loss: 146.6893 - val_loss: 137.1896
Epoch 153/600
34/34 [==============================] - 1s 42ms/step - loss: 147.5535 - val_loss: 137.2108
Epoch 154/600
34/34 [==============================] - 1s 42ms/step - loss: 147.5728 - val_loss: 137.6411
Epoch 155/600
34/34 [==============================] - 1s 43ms/step - loss: 147.4922 - val_loss: 135.8203
Epoch 156/600
34/34 [==============================] - 1s 44ms/step - loss: 147.6828 - val_loss: 135.7719
Epoch 157/600
34/34 [==============================] - 2s 46ms/step - loss: 146.9720 - val_loss: 135.8134
Epoch 158/600
34/34 [==============================] - 2s 46ms/step - loss: 147.0724 - val_loss: 134.6013
Epoch 159/600
34/34 [==============================] - 1s 43ms/step - loss: 147.1313 - val_loss: 135.1867
Epoch 160/600
34/34 [==============================] - 2s 45ms/step - loss: 146.7636 - val_loss: 134.9753
Epoch 161/600
34/34 [==============================] - 1s 43ms/step - loss: 146.4711 - val_loss: 134.7352
Epoch 162/600
34/34 [==============================] - 1s 42ms/step - loss: 146.2915 - val_loss: 134.1578
Epoch 163/600
34/34 [==============================] - 1s 42ms/step - loss: 146.2960 - val_loss: 135.5761
Epoch 164/600
34/34 [==============================] - 1s 43ms/step - loss: 146.3947 - val_loss: 135.9062
Epoch 165/600
34/34 [==============================] - 1s 44ms/step - loss: 146.5375 - val_loss: 135.6568
Epoch 166/600
34/34 [==============================] - 1s 44ms/step - loss: 146.2053 - val_loss: 134.9943
Epoch 167/600
34/34 [==============================] - 1s 43ms/step - loss: 146.5168 - val_loss: 134.9169
Epoch 168/600
34/34 [==============================] - 1s 43ms/step - loss: 146.2641 - val_loss: 135.5483
Epoch 169/600
34/34 [==============================] - 1s 42ms/step - loss: 146.3167 - val_loss: 134.6131
Epoch 170/600
34/34 [==============================] - 1s 42ms/step - loss: 146.5993 - val_loss: 135.3062
Epoch 171/600
34/34 [==============================] - 1s 42ms/step - loss: 146.6814 - val_loss: 134.8348
Epoch 172/600
34/34 [==============================] - 1s 42ms/step - loss: 146.5308 - val_loss: 134.7778
Epoch 173/600
34/34 [==============================] - 1s 43ms/step - loss: 146.4735 - val_loss: 135.4662
Epoch 174/600
34/34 [==============================] - 1s 43ms/step - loss: 146.8892 - val_loss: 135.2315
Epoch 175/600
34/34 [==============================] - 1s 44ms/step - loss: 147.3326 - val_loss: 134.1582
Epoch 176/600
34/34 [==============================] - 1s 42ms/step - loss: 146.8464 - val_loss: 134.4517
Epoch 177/600
34/34 [==============================] - 1s 42ms/step - loss: 146.2581 - val_loss: 134.2401
Epoch 178/600
34/34 [==============================] - 1s 42ms/step - loss: 145.8193 - val_loss: 134.6712
Epoch 179/600
34/34 [==============================] - 1s 42ms/step - loss: 145.2207 - val_loss: 134.4761
Epoch 180/600
34/34 [==============================] - 1s 43ms/step - loss: 145.2198 - val_loss: 134.5837
Epoch 181/600
34/34 [==============================] - 1s 43ms/step - loss: 145.1541 - val_loss: 135.3448
Epoch 182/600
34/34 [==============================] - 1s 42ms/step - loss: 144.9390 - val_loss: 134.8759
Epoch 183/600
34/34 [==============================] - 1s 43ms/step - loss: 144.8677 - val_loss: 135.3060
Epoch 184/600
34/34 [==============================] - 1s 44ms/step - loss: 144.7729 - val_loss: 135.1714
Epoch 185/600
34/34 [==============================] - 2s 47ms/step - loss: 144.8152 - val_loss: 135.3699
Epoch 186/600
34/34 [==============================] - 2s 49ms/step - loss: 144.5421 - val_loss: 135.6850
Epoch 187/600
34/34 [==============================] - 2s 46ms/step - loss: 144.5396 - val_loss: 135.3119
Epoch 188/600
34/34 [==============================] - 2s 45ms/step - loss: 144.4535 - val_loss: 136.5559
Epoch 189/600
34/34 [==============================] - 2s 45ms/step - loss: 144.5904 - val_loss: 135.6797
Epoch 190/600
34/34 [==============================] - 2s 44ms/step - loss: 144.3484 - val_loss: 135.1144
Epoch 191/600
34/34 [==============================] - 2s 44ms/step - loss: 144.5467 - val_loss: 134.6234
Epoch 192/600
34/34 [==============================] - 2s 45ms/step - loss: 144.3668 - val_loss: 134.7827
Epoch 193/600
34/34 [==============================] - 2s 45ms/step - loss: 144.4539 - val_loss: 134.6865
Epoch 194/600
34/34 [==============================] - 2s 46ms/step - loss: 144.5466 - val_loss: 135.7621
Epoch 195/600
34/34 [==============================] - 2s 44ms/step - loss: 144.5341 - val_loss: 135.5948
Epoch 196/600
34/34 [==============================] - 2s 44ms/step - loss: 144.5403 - val_loss: 135.0580
Epoch 197/600
34/34 [==============================] - 2s 46ms/step - loss: 144.3948 - val_loss: 136.4899
Epoch 198/600
34/34 [==============================] - 2s 45ms/step - loss: 144.4896 - val_loss: 135.7660
Epoch 199/600
34/34 [==============================] - 2s 45ms/step - loss: 144.4548 - val_loss: 135.9334
Epoch 200/600
34/34 [==============================] - 2s 45ms/step - loss: 144.6068 - val_loss: 134.6177
Epoch 201/600
34/34 [==============================] - 2s 46ms/step - loss: 144.5601 - val_loss: 134.9823
Epoch 202/600
34/34 [==============================] - 1s 43ms/step - loss: 144.3723 - val_loss: 134.4446
Epoch 203/600
34/34 [==============================] - 1s 43ms/step - loss: 144.4205 - val_loss: 134.5595
Epoch 204/600
34/34 [==============================] - 2s 46ms/step - loss: 144.5790 - val_loss: 133.7298
Epoch 205/600
34/34 [==============================] - 2s 48ms/step - loss: 144.6660 - val_loss: 134.5581
Epoch 206/600
34/34 [==============================] - 2s 48ms/step - loss: 145.0567 - val_loss: 133.8724
Epoch 207/600
34/34 [==============================] - 2s 48ms/step - loss: 145.4897 - val_loss: 134.3089
Epoch 208/600
34/34 [==============================] - 1s 43ms/step - loss: 146.1739 - val_loss: 133.9886
Epoch 209/600
34/34 [==============================] - 2s 44ms/step - loss: 146.3205 - val_loss: 134.8599
Epoch 210/600
34/34 [==============================] - 2s 47ms/step - loss: 146.6813 - val_loss: 136.4124
Epoch 211/600
34/34 [==============================] - 2s 47ms/step - loss: 144.8540 - val_loss: 136.0217
Epoch 212/600
34/34 [==============================] - 2s 48ms/step - loss: 144.3648 - val_loss: 135.8691
Epoch 213/600
34/34 [==============================] - 1s 43ms/step - loss: 143.9125 - val_loss: 135.3004
Epoch 214/600
34/34 [==============================] - 2s 47ms/step - loss: 143.6579 - val_loss: 134.8514
Epoch 215/600
34/34 [==============================] - 2s 46ms/step - loss: 143.5590 - val_loss: 134.4572
Epoch 216/600
34/34 [==============================] - 1s 42ms/step - loss: 143.6574 - val_loss: 135.5940
Epoch 217/600
34/34 [==============================] - 1s 44ms/step - loss: 143.4222 - val_loss: 135.3681
Epoch 218/600
34/34 [==============================] - 1s 44ms/step - loss: 143.2499 - val_loss: 135.4474
Epoch 219/600
34/34 [==============================] - 1s 43ms/step - loss: 143.2509 - val_loss: 135.5825
Epoch 220/600
34/34 [==============================] - 2s 47ms/step - loss: 143.2251 - val_loss: 135.9847
Epoch 221/600
34/34 [==============================] - 2s 45ms/step - loss: 143.2830 - val_loss: 136.5602
Epoch 222/600
34/34 [==============================] - 1s 42ms/step - loss: 143.0344 - val_loss: 136.1859
Epoch 223/600
34/34 [==============================] - 1s 43ms/step - loss: 142.9773 - val_loss: 136.5076
Epoch 224/600
34/34 [==============================] - 1s 44ms/step - loss: 142.8994 - val_loss: 136.0695
Epoch 225/600
34/34 [==============================] - 1s 42ms/step - loss: 142.7522 - val_loss: 136.2886
Epoch 226/600
34/34 [==============================] - 1s 43ms/step - loss: 142.6130 - val_loss: 135.9650
Epoch 227/600
34/34 [==============================] - 1s 43ms/step - loss: 142.5688 - val_loss: 135.2283
Epoch 228/600
34/34 [==============================] - 1s 42ms/step - loss: 142.3855 - val_loss: 135.1360
Epoch 229/600
34/34 [==============================] - 2s 45ms/step - loss: 142.3513 - val_loss: 133.9171
Epoch 230/600
34/34 [==============================] - 1s 44ms/step - loss: 142.4714 - val_loss: 134.2395
Epoch 231/600
34/34 [==============================] - 1s 42ms/step - loss: 142.4052 - val_loss: 134.4562
Epoch 232/600
34/34 [==============================] - 1s 43ms/step - loss: 142.2832 - val_loss: 135.1044
Epoch 233/600
34/34 [==============================] - 1s 42ms/step - loss: 142.3853 - val_loss: 134.1011
Epoch 234/600
34/34 [==============================] - 2s 44ms/step - loss: 142.3228 - val_loss: 134.8532
Epoch 235/600
34/34 [==============================] - 1s 44ms/step - loss: 142.2588 - val_loss: 134.8331
Epoch 236/600
34/34 [==============================] - 1s 43ms/step - loss: 142.6255 - val_loss: 134.5482
Epoch 237/600
34/34 [==============================] - 1s 43ms/step - loss: 142.5364 - val_loss: 134.9104
Epoch 238/600
34/34 [==============================] - 2s 44ms/step - loss: 142.6134 - val_loss: 134.7059
Epoch 239/600
34/34 [==============================] - 2s 47ms/step - loss: 142.6284 - val_loss: 134.9820
Epoch 240/600
34/34 [==============================] - 2s 48ms/step - loss: 142.7501 - val_loss: 135.5601
Epoch 241/600
34/34 [==============================] - 2s 50ms/step - loss: 142.8515 - val_loss: 135.3673
Epoch 242/600
34/34 [==============================] - 2s 47ms/step - loss: 142.7381 - val_loss: 135.5410
Epoch 243/600
34/34 [==============================] - 2s 44ms/step - loss: 142.8294 - val_loss: 136.5202
Epoch 244/600
34/34 [==============================] - 2s 47ms/step - loss: 142.5131 - val_loss: 135.9022
Epoch 245/600
34/34 [==============================] - 1s 44ms/step - loss: 142.5297 - val_loss: 134.4347
Epoch 246/600
34/34 [==============================] - 2s 46ms/step - loss: 142.2861 - val_loss: 134.7914
Epoch 247/600
34/34 [==============================] - 2s 46ms/step - loss: 142.4466 - val_loss: 133.8967
Epoch 248/600
34/34 [==============================] - 1s 44ms/step - loss: 142.3344 - val_loss: 135.3045
Epoch 249/600
34/34 [==============================] - 1s 42ms/step - loss: 142.2265 - val_loss: 135.4135
Epoch 250/600
34/34 [==============================] - 1s 42ms/step - loss: 142.2546 - val_loss: 135.9919
Epoch 251/600
34/34 [==============================] - 1s 43ms/step - loss: 142.3789 - val_loss: 135.5265
Epoch 252/600
34/34 [==============================] - 1s 43ms/step - loss: 142.2035 - val_loss: 136.2489
Epoch 253/600
34/34 [==============================] - 1s 43ms/step - loss: 142.6299 - val_loss: 136.7372
Epoch 254/600
34/34 [==============================] - 1s 43ms/step - loss: 142.4244 - val_loss: 136.3056
Epoch 255/600
34/34 [==============================] - 2s 44ms/step - loss: 142.5658 - val_loss: 136.7696
Epoch 256/600
34/34 [==============================] - 2s 44ms/step - loss: 142.3469 - val_loss: 136.9115
Epoch 257/600
34/34 [==============================] - 2s 47ms/step - loss: 142.1947 - val_loss: 137.0952
Epoch 258/600
34/34 [==============================] - 1s 44ms/step - loss: 142.1926 - val_loss: 137.1383
Epoch 259/600
34/34 [==============================] - 2s 45ms/step - loss: 142.2429 - val_loss: 138.1434
Epoch 260/600
34/34 [==============================] - 1s 43ms/step - loss: 142.1464 - val_loss: 138.6059
Epoch 261/600
34/34 [==============================] - 1s 43ms/step - loss: 142.0851 - val_loss: 139.1481
Epoch 262/600
34/34 [==============================] - 1s 44ms/step - loss: 142.1269 - val_loss: 140.3260
Epoch 263/600
34/34 [==============================] - 1s 44ms/step - loss: 142.1768 - val_loss: 139.5362
Epoch 264/600
34/34 [==============================] - 2s 46ms/step - loss: 142.1840 - val_loss: 137.4183
Epoch 265/600
34/34 [==============================] - 2s 46ms/step - loss: 142.0324 - val_loss: 137.1342
Epoch 266/600
34/34 [==============================] - 1s 43ms/step - loss: 141.8350 - val_loss: 137.1655
Epoch 267/600
34/34 [==============================] - 1s 43ms/step - loss: 141.6964 - val_loss: 136.4094
Epoch 268/600
34/34 [==============================] - 1s 43ms/step - loss: 141.6750 - val_loss: 138.3994
Epoch 269/600
34/34 [==============================] - 1s 44ms/step - loss: 141.7526 - val_loss: 137.2778
Epoch 270/600
34/34 [==============================] - 1s 44ms/step - loss: 141.6044 - val_loss: 136.8807
Epoch 271/600
34/34 [==============================] - 1s 44ms/step - loss: 141.7451 - val_loss: 136.2484
Epoch 272/600
34/34 [==============================] - 1s 44ms/step - loss: 141.4683 - val_loss: 136.5553
Epoch 273/600
34/34 [==============================] - 1s 43ms/step - loss: 141.5822 - val_loss: 136.0675
Epoch 274/600
34/34 [==============================] - 1s 43ms/step - loss: 141.4311 - val_loss: 137.1347
Epoch 275/600
34/34 [==============================] - 1s 43ms/step - loss: 141.3773 - val_loss: 137.1355
Epoch 276/600
34/34 [==============================] - 1s 43ms/step - loss: 141.4384 - val_loss: 136.9316
Epoch 277/600
34/34 [==============================] - 1s 43ms/step - loss: 141.4659 - val_loss: 137.7078
Epoch 278/600
34/34 [==============================] - 1s 44ms/step - loss: 141.4812 - val_loss: 137.8552
Epoch 279/600
34/34 [==============================] - 2s 46ms/step - loss: 141.1809 - val_loss: 138.5090
Epoch 280/600
34/34 [==============================] - 1s 44ms/step - loss: 140.9745 - val_loss: 138.5774
Epoch 281/600
34/34 [==============================] - 2s 47ms/step - loss: 140.7342 - val_loss: 138.3969
Epoch 282/600
34/34 [==============================] - 2s 50ms/step - loss: 140.7291 - val_loss: 137.0562
Epoch 283/600
34/34 [==============================] - 2s 46ms/step - loss: 140.5262 - val_loss: 137.2404
Epoch 284/600
34/34 [==============================] - 2s 47ms/step - loss: 140.5480 - val_loss: 137.0170
Epoch 285/600
34/34 [==============================] - 2s 46ms/step - loss: 140.6632 - val_loss: 136.3454
Epoch 286/600
34/34 [==============================] - 1s 43ms/step - loss: 140.9470 - val_loss: 136.5881
Epoch 287/600
34/34 [==============================] - 1s 43ms/step - loss: 140.6622 - val_loss: 135.8255
Epoch 288/600
34/34 [==============================] - 1s 43ms/step - loss: 140.6113 - val_loss: 135.6260
Epoch 289/600
34/34 [==============================] - 2s 44ms/step - loss: 140.7728 - val_loss: 135.1092
Epoch 290/600
34/34 [==============================] - 2s 45ms/step - loss: 140.9979 - val_loss: 135.6145
Epoch 291/600
34/34 [==============================] - 1s 44ms/step - loss: 141.2364 - val_loss: 136.0707
Epoch 292/600
34/34 [==============================] - 1s 43ms/step - loss: 141.3742 - val_loss: 135.9652
Epoch 293/600
34/34 [==============================] - 2s 46ms/step - loss: 141.0346 - val_loss: 136.1223
Epoch 294/600
34/34 [==============================] - 2s 49ms/step - loss: 141.2858 - val_loss: 137.3034
Epoch 295/600
34/34 [==============================] - 1s 43ms/step - loss: 141.5152 - val_loss: 137.9517
Epoch 296/600
34/34 [==============================] - 1s 43ms/step - loss: 141.2948 - val_loss: 136.7206
Epoch 297/600
34/34 [==============================] - 1s 43ms/step - loss: 141.1164 - val_loss: 135.7426
Epoch 298/600
34/34 [==============================] - 1s 44ms/step - loss: 141.0428 - val_loss: 136.2638
Epoch 299/600
34/34 [==============================] - 1s 43ms/step - loss: 140.8684 - val_loss: 135.6482
Epoch 300/600
34/34 [==============================] - 1s 43ms/step - loss: 140.9512 - val_loss: 136.5296
Epoch 301/600
34/34 [==============================] - 1s 43ms/step - loss: 140.6280 - val_loss: 135.6373
Epoch 302/600
34/34 [==============================] - 1s 42ms/step - loss: 140.5882 - val_loss: 135.1580
Epoch 303/600
34/34 [==============================] - 1s 42ms/step - loss: 140.4347 - val_loss: 135.6350
Epoch 304/600
34/34 [==============================] - 2s 45ms/step - loss: 140.6369 - val_loss: 135.4333
Epoch 305/600
34/34 [==============================] - 1s 43ms/step - loss: 140.9272 - val_loss: 135.3534
Epoch 306/600
34/34 [==============================] - 2s 44ms/step - loss: 140.7827 - val_loss: 135.3624
Epoch 307/600
34/34 [==============================] - 1s 44ms/step - loss: 141.4008 - val_loss: 136.2764
Epoch 308/600
34/34 [==============================] - 2s 45ms/step - loss: 141.1808 - val_loss: 136.6674
Epoch 309/600
34/34 [==============================] - 2s 45ms/step - loss: 141.0001 - val_loss: 135.8523
Epoch 310/600
34/34 [==============================] - 2s 45ms/step - loss: 140.9790 - val_loss: 136.9753
Epoch 311/600
34/34 [==============================] - 1s 44ms/step - loss: 140.7408 - val_loss: 136.3946
Epoch 312/600
34/34 [==============================] - 2s 46ms/step - loss: 140.9743 - val_loss: 136.8099
Epoch 313/600
34/34 [==============================] - 2s 45ms/step - loss: 140.6608 - val_loss: 138.3266
Epoch 314/600
34/34 [==============================] - 2s 47ms/step - loss: 140.6510 - val_loss: 136.9312
Epoch 315/600
34/34 [==============================] - 2s 49ms/step - loss: 140.5007 - val_loss: 138.0331
Epoch 316/600
34/34 [==============================] - 2s 49ms/step - loss: 140.8078 - val_loss: 136.3254
Epoch 317/600
34/34 [==============================] - 2s 48ms/step - loss: 140.6286 - val_loss: 136.7110
Epoch 318/600
34/34 [==============================] - 2s 49ms/step - loss: 140.6951 - val_loss: 136.8296
Epoch 319/600
34/34 [==============================] - 2s 49ms/step - loss: 141.0117 - val_loss: 136.2443
Epoch 320/600
34/34 [==============================] - 2s 48ms/step - loss: 140.8298 - val_loss: 136.9564
Epoch 321/600
34/34 [==============================] - 2s 50ms/step - loss: 140.7996 - val_loss: 135.6990
Epoch 322/600
34/34 [==============================] - 2s 50ms/step - loss: 141.0951 - val_loss: 135.5361
Epoch 323/600
34/34 [==============================] - 2s 49ms/step - loss: 141.1717 - val_loss: 136.6288
Epoch 324/600
34/34 [==============================] - 2s 47ms/step - loss: 140.8041 - val_loss: 135.4642
Epoch 325/600
34/34 [==============================] - 1s 44ms/step - loss: 140.4826 - val_loss: 135.3171
Epoch 326/600
34/34 [==============================] - 2s 44ms/step - loss: 140.2942 - val_loss: 136.8911
Epoch 327/600
34/34 [==============================] - 1s 44ms/step - loss: 140.3775 - val_loss: 136.5200
Epoch 328/600
34/34 [==============================] - 2s 45ms/step - loss: 140.3965 - val_loss: 137.6756
Epoch 329/600
34/34 [==============================] - 2s 44ms/step - loss: 140.4684 - val_loss: 139.3232
Epoch 330/600
34/34 [==============================] - 2s 45ms/step - loss: 140.4129 - val_loss: 140.4349
Epoch 331/600
34/34 [==============================] - 2s 45ms/step - loss: 140.3173 - val_loss: 142.3287
Epoch 332/600
34/34 [==============================] - 1s 43ms/step - loss: 140.0271 - val_loss: 142.6782
Epoch 333/600
34/34 [==============================] - 1s 44ms/step - loss: 139.7647 - val_loss: 143.1599
Epoch 334/600
34/34 [==============================] - 1s 44ms/step - loss: 139.8621 - val_loss: 143.6943
Epoch 335/600
34/34 [==============================] - 2s 45ms/step - loss: 140.0276 - val_loss: 141.2817
Epoch 336/600
34/34 [==============================] - 1s 44ms/step - loss: 140.1533 - val_loss: 141.2625
Epoch 337/600
34/34 [==============================] - 1s 44ms/step - loss: 139.8290 - val_loss: 141.2593
Epoch 338/600
34/34 [==============================] - 1s 44ms/step - loss: 140.0295 - val_loss: 140.8388
Epoch 339/600
34/34 [==============================] - 1s 43ms/step - loss: 139.4425 - val_loss: 139.7182
Epoch 340/600
34/34 [==============================] - 1s 42ms/step - loss: 139.2054 - val_loss: 139.8942
Epoch 341/600
34/34 [==============================] - 1s 42ms/step - loss: 139.4551 - val_loss: 139.2589
Epoch 342/600
34/34 [==============================] - 1s 43ms/step - loss: 139.1723 - val_loss: 139.2241
Epoch 343/600
34/34 [==============================] - 1s 44ms/step - loss: 139.1288 - val_loss: 138.5026
Epoch 344/600
34/34 [==============================] - 1s 44ms/step - loss: 139.3023 - val_loss: 137.8388
Epoch 345/600
34/34 [==============================] - 1s 44ms/step - loss: 139.4120 - val_loss: 137.4490
Epoch 346/600
34/34 [==============================] - 1s 44ms/step - loss: 139.1994 - val_loss: 138.1814
Epoch 347/600
34/34 [==============================] - 2s 45ms/step - loss: 139.2658 - val_loss: 137.5700
Epoch 348/600
34/34 [==============================] - 1s 44ms/step - loss: 139.2242 - val_loss: 137.3399
Epoch 349/600
34/34 [==============================] - 1s 44ms/step - loss: 139.2796 - val_loss: 136.9808
Epoch 350/600
34/34 [==============================] - 2s 45ms/step - loss: 139.5804 - val_loss: 137.5983
Epoch 351/600
34/34 [==============================] - 2s 45ms/step - loss: 139.4388 - val_loss: 137.3824
Epoch 352/600
34/34 [==============================] - 2s 45ms/step - loss: 139.3800 - val_loss: 138.1564
Epoch 353/600
34/34 [==============================] - 2s 45ms/step - loss: 138.9271 - val_loss: 137.2049
Epoch 354/600
34/34 [==============================] - 2s 45ms/step - loss: 138.9667 - val_loss: 137.8724
Epoch 355/600
34/34 [==============================] - 2s 45ms/step - loss: 139.2729 - val_loss: 137.1395
Epoch 356/600
34/34 [==============================] - 2s 45ms/step - loss: 138.9125 - val_loss: 137.0128
Epoch 357/600
34/34 [==============================] - 2s 46ms/step - loss: 138.8679 - val_loss: 137.0939
Epoch 358/600
34/34 [==============================] - 2s 45ms/step - loss: 138.6061 - val_loss: 136.8957
Epoch 359/600
34/34 [==============================] - 2s 45ms/step - loss: 138.6657 - val_loss: 137.2510
Epoch 360/600
34/34 [==============================] - 2s 45ms/step - loss: 138.3957 - val_loss: 137.3609
Epoch 361/600
34/34 [==============================] - 2s 45ms/step - loss: 138.3109 - val_loss: 138.3255
Epoch 362/600
34/34 [==============================] - 2s 45ms/step - loss: 138.0562 - val_loss: 139.0242
Epoch 363/600
34/34 [==============================] - 2s 45ms/step - loss: 138.1531 - val_loss: 138.8433
Epoch 364/600
34/34 [==============================] - 2s 45ms/step - loss: 138.7056 - val_loss: 137.7325
Epoch 365/600
34/34 [==============================] - 2s 45ms/step - loss: 138.8098 - val_loss: 138.7542
Epoch 366/600
34/34 [==============================] - 2s 45ms/step - loss: 138.3813 - val_loss: 138.2717
Epoch 367/600
34/34 [==============================] - 2s 45ms/step - loss: 138.4372 - val_loss: 138.5658
Epoch 368/600
34/34 [==============================] - 2s 45ms/step - loss: 138.3854 - val_loss: 139.0194
Epoch 369/600
34/34 [==============================] - 2s 45ms/step - loss: 137.9959 - val_loss: 138.2838
Epoch 370/600
34/34 [==============================] - 2s 45ms/step - loss: 138.0659 - val_loss: 138.8755
Epoch 371/600
34/34 [==============================] - 2s 45ms/step - loss: 137.7976 - val_loss: 137.4097
Epoch 372/600
34/34 [==============================] - 2s 45ms/step - loss: 137.9003 - val_loss: 138.7584
Epoch 373/600
34/34 [==============================] - 2s 48ms/step - loss: 138.0012 - val_loss: 138.6241
Epoch 374/600
34/34 [==============================] - 2s 45ms/step - loss: 138.2627 - val_loss: 139.0752
Epoch 375/600
34/34 [==============================] - 2s 45ms/step - loss: 138.5616 - val_loss: 137.4317
Epoch 376/600
34/34 [==============================] - 2s 45ms/step - loss: 138.6388 - val_loss: 138.0724
Epoch 377/600
34/34 [==============================] - 2s 45ms/step - loss: 139.3749 - val_loss: 137.7596
Epoch 378/600
34/34 [==============================] - 2s 45ms/step - loss: 139.4407 - val_loss: 138.9402
Epoch 379/600
34/34 [==============================] - 2s 45ms/step - loss: 139.6038 - val_loss: 138.3347
Epoch 380/600
34/34 [==============================] - 2s 45ms/step - loss: 139.5764 - val_loss: 139.0106
Epoch 381/600
34/34 [==============================] - 2s 45ms/step - loss: 138.8569 - val_loss: 139.6091
Epoch 382/600
34/34 [==============================] - 2s 45ms/step - loss: 138.5050 - val_loss: 141.2720
Epoch 383/600
34/34 [==============================] - 2s 45ms/step - loss: 138.3399 - val_loss: 141.8097
Epoch 384/600
34/34 [==============================] - 2s 45ms/step - loss: 137.9237 - val_loss: 141.5377
Epoch 385/600
34/34 [==============================] - 2s 45ms/step - loss: 137.4531 - val_loss: 142.5988
Epoch 386/600
34/34 [==============================] - 2s 45ms/step - loss: 137.2009 - val_loss: 141.3619
Epoch 387/600
34/34 [==============================] - 2s 45ms/step - loss: 137.3657 - val_loss: 142.2761
Epoch 388/600
34/34 [==============================] - 2s 45ms/step - loss: 137.2203 - val_loss: 140.8026
Epoch 389/600
34/34 [==============================] - 2s 45ms/step - loss: 137.1903 - val_loss: 141.0476
Epoch 390/600
34/34 [==============================] - 2s 48ms/step - loss: 137.0710 - val_loss: 140.7649
Epoch 391/600
34/34 [==============================] - 2s 45ms/step - loss: 137.1322 - val_loss: 141.4252
Epoch 392/600
34/34 [==============================] - 2s 45ms/step - loss: 137.4431 - val_loss: 141.5249
Epoch 393/600
34/34 [==============================] - 2s 45ms/step - loss: 137.3807 - val_loss: 141.4614
Epoch 394/600
34/34 [==============================] - 2s 46ms/step - loss: 137.9727 - val_loss: 141.5533
Epoch 395/600
34/34 [==============================] - 2s 45ms/step - loss: 137.7742 - val_loss: 142.6848
Epoch 396/600
34/34 [==============================] - 2s 45ms/step - loss: 137.6556 - val_loss: 141.3472
Epoch 397/600
34/34 [==============================] - 2s 46ms/step - loss: 137.5348 - val_loss: 142.2829
Epoch 398/600
34/34 [==============================] - 2s 45ms/step - loss: 137.8193 - val_loss: 142.4320
Epoch 399/600
34/34 [==============================] - 2s 44ms/step - loss: 138.1380 - val_loss: 141.2642
Epoch 400/600
34/34 [==============================] - 2s 45ms/step - loss: 138.0180 - val_loss: 142.6458
Epoch 401/600
34/34 [==============================] - 2s 45ms/step - loss: 138.1829 - val_loss: 142.9757
Epoch 402/600
34/34 [==============================] - 2s 45ms/step - loss: 137.9812 - val_loss: 142.9876
Epoch 403/600
34/34 [==============================] - 2s 45ms/step - loss: 137.8678 - val_loss: 142.8444
Epoch 404/600
34/34 [==============================] - 2s 45ms/step - loss: 137.4952 - val_loss: 141.6336
Epoch 405/600
34/34 [==============================] - 2s 45ms/step - loss: 137.2003 - val_loss: 141.5093
Epoch 406/600
34/34 [==============================] - 2s 45ms/step - loss: 137.0460 - val_loss: 141.6206
Epoch 407/600
34/34 [==============================] - 2s 45ms/step - loss: 137.0283 - val_loss: 142.0814
Epoch 408/600
34/34 [==============================] - 2s 45ms/step - loss: 136.9459 - val_loss: 142.0420
Epoch 409/600
34/34 [==============================] - 2s 45ms/step - loss: 136.8326 - val_loss: 144.1428
Epoch 410/600
34/34 [==============================] - 2s 45ms/step - loss: 136.7396 - val_loss: 144.3148
Epoch 411/600
34/34 [==============================] - 2s 46ms/step - loss: 137.0055 - val_loss: 142.9268
Epoch 412/600
34/34 [==============================] - 2s 45ms/step - loss: 136.9211 - val_loss: 144.1188
Epoch 413/600
34/34 [==============================] - 2s 45ms/step - loss: 136.7937 - val_loss: 143.4787
Epoch 414/600
34/34 [==============================] - 2s 46ms/step - loss: 136.7027 - val_loss: 145.9332
Epoch 415/600
34/34 [==============================] - 2s 45ms/step - loss: 136.7186 - val_loss: 144.2531
Epoch 416/600
34/34 [==============================] - 2s 45ms/step - loss: 137.0411 - val_loss: 143.9023
Epoch 417/600
34/34 [==============================] - 2s 45ms/step - loss: 137.6372 - val_loss: 141.5299
Epoch 418/600
34/34 [==============================] - 2s 46ms/step - loss: 137.3592 - val_loss: 143.5639
Epoch 419/600
34/34 [==============================] - 2s 46ms/step - loss: 137.6398 - val_loss: 144.2882
Epoch 420/600
34/34 [==============================] - 2s 46ms/step - loss: 137.2957 - val_loss: 146.4559
Epoch 421/600
34/34 [==============================] - 2s 45ms/step - loss: 137.5155 - val_loss: 144.6556
Epoch 422/600
34/34 [==============================] - 2s 46ms/step - loss: 137.3274 - val_loss: 144.6913
Epoch 423/600
34/34 [==============================] - 2s 45ms/step - loss: 136.9400 - val_loss: 143.9370
Epoch 424/600
34/34 [==============================] - 2s 46ms/step - loss: 136.6789 - val_loss: 144.9106
Epoch 425/600
34/34 [==============================] - 2s 46ms/step - loss: 136.4521 - val_loss: 144.1562
Epoch 426/600
34/34 [==============================] - 2s 46ms/step - loss: 136.4735 - val_loss: 143.5854
Epoch 427/600
34/34 [==============================] - 2s 45ms/step - loss: 136.2401 - val_loss: 144.2756
Epoch 428/600
34/34 [==============================] - 2s 46ms/step - loss: 136.1992 - val_loss: 144.2443
Epoch 429/600
34/34 [==============================] - 2s 47ms/step - loss: 136.2131 - val_loss: 145.6608
Epoch 430/600
34/34 [==============================] - 2s 46ms/step - loss: 136.1925 - val_loss: 144.9304
Epoch 431/600
34/34 [==============================] - 2s 46ms/step - loss: 136.4050 - val_loss: 145.0949
Epoch 432/600
34/34 [==============================] - 2s 45ms/step - loss: 137.4755 - val_loss: 144.4926
Epoch 433/600
34/34 [==============================] - 2s 45ms/step - loss: 137.7589 - val_loss: 145.5876
Epoch 434/600
34/34 [==============================] - 2s 46ms/step - loss: 137.9856 - val_loss: 144.3925
Epoch 435/600
34/34 [==============================] - 2s 46ms/step - loss: 137.9103 - val_loss: 144.4070
Epoch 436/600
34/34 [==============================] - 2s 46ms/step - loss: 138.0567 - val_loss: 144.7677
Epoch 437/600
34/34 [==============================] - 2s 46ms/step - loss: 137.1838 - val_loss: 142.9882
Epoch 438/600
34/34 [==============================] - 2s 46ms/step - loss: 136.0871 - val_loss: 144.3652
Epoch 439/600
34/34 [==============================] - 2s 46ms/step - loss: 135.8670 - val_loss: 144.4221
Epoch 440/600
34/34 [==============================] - 2s 46ms/step - loss: 135.6601 - val_loss: 144.1144
Epoch 441/600
34/34 [==============================] - 2s 46ms/step - loss: 135.7422 - val_loss: 142.7335
Epoch 442/600
34/34 [==============================] - 2s 46ms/step - loss: 136.0942 - val_loss: 141.7747
Epoch 443/600
34/34 [==============================] - 2s 46ms/step - loss: 136.2285 - val_loss: 141.8147
Epoch 444/600
34/34 [==============================] - 2s 46ms/step - loss: 136.2902 - val_loss: 142.5519
Epoch 445/600
34/34 [==============================] - 2s 46ms/step - loss: 136.1740 - val_loss: 141.5857
Epoch 446/600
34/34 [==============================] - 2s 46ms/step - loss: 136.1300 - val_loss: 140.7974
Epoch 447/600
34/34 [==============================] - 2s 46ms/step - loss: 135.8125 - val_loss: 140.1648
Epoch 448/600
34/34 [==============================] - 2s 46ms/step - loss: 135.5766 - val_loss: 140.8348
Epoch 449/600
34/34 [==============================] - 2s 49ms/step - loss: 135.6936 - val_loss: 141.0649
Epoch 450/600
34/34 [==============================] - 2s 48ms/step - loss: 135.8607 - val_loss: 141.7749
Epoch 451/600
34/34 [==============================] - 2s 46ms/step - loss: 135.9611 - val_loss: 141.9115
Epoch 452/600
34/34 [==============================] - 2s 46ms/step - loss: 136.8343 - val_loss: 140.9707
Epoch 453/600
34/34 [==============================] - 2s 46ms/step - loss: 137.3669 - val_loss: 140.8088
Epoch 454/600
34/34 [==============================] - 2s 46ms/step - loss: 137.5811 - val_loss: 140.1483
Epoch 455/600
34/34 [==============================] - 2s 46ms/step - loss: 137.9177 - val_loss: 140.5729
Epoch 456/600
34/34 [==============================] - 2s 46ms/step - loss: 138.0336 - val_loss: 140.3970
Epoch 457/600
34/34 [==============================] - 2s 46ms/step - loss: 137.5693 - val_loss: 140.4451
Epoch 458/600
34/34 [==============================] - 2s 46ms/step - loss: 137.7177 - val_loss: 141.1882
Epoch 459/600
34/34 [==============================] - 2s 46ms/step - loss: 137.6586 - val_loss: 140.8400
Epoch 460/600
34/34 [==============================] - 2s 46ms/step - loss: 138.4143 - val_loss: 141.0095
Epoch 461/600
34/34 [==============================] - 2s 46ms/step - loss: 137.9711 - val_loss: 140.3717
Epoch 462/600
34/34 [==============================] - 2s 46ms/step - loss: 138.2552 - val_loss: 140.2240
Epoch 463/600
34/34 [==============================] - 2s 47ms/step - loss: 137.5673 - val_loss: 141.4444
Epoch 464/600
34/34 [==============================] - 2s 46ms/step - loss: 137.5257 - val_loss: 140.2500
Epoch 465/600
34/34 [==============================] - 2s 46ms/step - loss: 137.1848 - val_loss: 140.4661
Epoch 466/600
34/34 [==============================] - 2s 46ms/step - loss: 137.1092 - val_loss: 141.1800
Epoch 467/600
34/34 [==============================] - 2s 46ms/step - loss: 136.7234 - val_loss: 142.1432
Epoch 468/600
34/34 [==============================] - 2s 46ms/step - loss: 136.3277 - val_loss: 142.8113
Epoch 469/600
34/34 [==============================] - 2s 46ms/step - loss: 136.3014 - val_loss: 144.0887
Epoch 470/600
34/34 [==============================] - 2s 46ms/step - loss: 135.8312 - val_loss: 144.1331
Epoch 471/600
34/34 [==============================] - 2s 46ms/step - loss: 135.6628 - val_loss: 144.1910
Epoch 472/600
34/34 [==============================] - 2s 46ms/step - loss: 135.4846 - val_loss: 144.2282
Epoch 473/600
34/34 [==============================] - 2s 46ms/step - loss: 135.9704 - val_loss: 144.1233
Epoch 474/600
34/34 [==============================] - 2s 46ms/step - loss: 135.3735 - val_loss: 147.4350
Epoch 475/600
34/34 [==============================] - 2s 46ms/step - loss: 135.8838 - val_loss: 145.4421
Epoch 476/600
34/34 [==============================] - 2s 46ms/step - loss: 136.2582 - val_loss: 145.6502
Epoch 477/600
34/34 [==============================] - 2s 46ms/step - loss: 136.1581 - val_loss: 146.8468
Epoch 478/600
34/34 [==============================] - 2s 46ms/step - loss: 135.6638 - val_loss: 147.3320
Epoch 479/600
34/34 [==============================] - 2s 46ms/step - loss: 135.9740 - val_loss: 147.8923
Epoch 480/600
34/34 [==============================] - 2s 46ms/step - loss: 135.9342 - val_loss: 146.9014
Epoch 481/600
34/34 [==============================] - 2s 46ms/step - loss: 135.7469 - val_loss: 147.4988
Epoch 482/600
34/34 [==============================] - 2s 46ms/step - loss: 135.6479 - val_loss: 148.3866
Epoch 483/600
34/34 [==============================] - 2s 46ms/step - loss: 135.3263 - val_loss: 147.9759
Epoch 484/600
34/34 [==============================] - 2s 46ms/step - loss: 135.1246 - val_loss: 146.1944
Epoch 485/600
34/34 [==============================] - 2s 46ms/step - loss: 135.3060 - val_loss: 146.4290
Epoch 486/600
34/34 [==============================] - 2s 46ms/step - loss: 134.8265 - val_loss: 145.3110
Epoch 487/600
34/34 [==============================] - 2s 46ms/step - loss: 134.9593 - val_loss: 145.5780
Epoch 488/600
34/34 [==============================] - 2s 47ms/step - loss: 134.5852 - val_loss: 145.1518
Epoch 489/600
34/34 [==============================] - 2s 46ms/step - loss: 134.8732 - val_loss: 145.6475
Epoch 490/600
34/34 [==============================] - 2s 46ms/step - loss: 134.7973 - val_loss: 143.8884
Epoch 491/600
34/34 [==============================] - 2s 46ms/step - loss: 134.9859 - val_loss: 143.8917
Epoch 492/600
34/34 [==============================] - 2s 46ms/step - loss: 135.3622 - val_loss: 144.7879
Epoch 493/600
34/34 [==============================] - 2s 46ms/step - loss: 135.1329 - val_loss: 145.0883
Epoch 494/600
34/34 [==============================] - 2s 46ms/step - loss: 135.0123 - val_loss: 145.6436
Epoch 495/600
34/34 [==============================] - 2s 46ms/step - loss: 134.7129 - val_loss: 145.6779
Epoch 496/600
34/34 [==============================] - 2s 46ms/step - loss: 134.9532 - val_loss: 144.5386
Epoch 497/600
34/34 [==============================] - 2s 46ms/step - loss: 134.6745 - val_loss: 144.5002
Epoch 498/600
34/34 [==============================] - 2s 46ms/step - loss: 135.3211 - val_loss: 145.9825
Epoch 499/600
34/34 [==============================] - 2s 46ms/step - loss: 135.3445 - val_loss: 146.4794
Epoch 500/600
34/34 [==============================] - 2s 46ms/step - loss: 134.9691 - val_loss: 145.9211
Epoch 501/600
34/34 [==============================] - 2s 46ms/step - loss: 134.8785 - val_loss: 146.3900
Epoch 502/600
34/34 [==============================] - 2s 46ms/step - loss: 135.4226 - val_loss: 144.9985
Epoch 503/600
34/34 [==============================] - 2s 46ms/step - loss: 135.4640 - val_loss: 146.1516
Epoch 504/600
34/34 [==============================] - 2s 46ms/step - loss: 135.5907 - val_loss: 147.1083
Epoch 505/600
34/34 [==============================] - 2s 45ms/step - loss: 135.1661 - val_loss: 147.8856
Epoch 506/600
34/34 [==============================] - 2s 46ms/step - loss: 134.9828 - val_loss: 147.0822
Epoch 507/600
34/34 [==============================] - 2s 46ms/step - loss: 135.3715 - val_loss: 146.1877
Epoch 508/600
34/34 [==============================] - 2s 46ms/step - loss: 135.4303 - val_loss: 146.0808
Epoch 509/600
34/34 [==============================] - 2s 46ms/step - loss: 135.4763 - val_loss: 147.7718
Epoch 510/600
34/34 [==============================] - 2s 46ms/step - loss: 135.6138 - val_loss: 149.5840
Epoch 511/600
34/34 [==============================] - 2s 46ms/step - loss: 135.8148 - val_loss: 146.4570
Epoch 512/600
34/34 [==============================] - 2s 46ms/step - loss: 136.1208 - val_loss: 145.6416
Epoch 513/600
34/34 [==============================] - 2s 46ms/step - loss: 136.7021 - val_loss: 142.2682
Epoch 514/600
34/34 [==============================] - 2s 45ms/step - loss: 135.9738 - val_loss: 142.9066
Epoch 515/600
34/34 [==============================] - 2s 46ms/step - loss: 136.9077 - val_loss: 143.3877
Epoch 516/600
34/34 [==============================] - 2s 46ms/step - loss: 135.9884 - val_loss: 142.4059
Epoch 517/600
34/34 [==============================] - 2s 46ms/step - loss: 135.5788 - val_loss: 143.5410
Epoch 518/600
34/34 [==============================] - 2s 46ms/step - loss: 134.8519 - val_loss: 143.4791
Epoch 519/600
34/34 [==============================] - 2s 46ms/step - loss: 134.4908 - val_loss: 144.5910
Epoch 520/600
34/34 [==============================] - 2s 46ms/step - loss: 134.1789 - val_loss: 144.5958
Epoch 521/600
34/34 [==============================] - 2s 45ms/step - loss: 133.9238 - val_loss: 145.5065
Epoch 522/600
34/34 [==============================] - 2s 46ms/step - loss: 133.7337 - val_loss: 146.6824
Epoch 523/600
34/34 [==============================] - 2s 46ms/step - loss: 133.8089 - val_loss: 145.9666
Epoch 524/600
34/34 [==============================] - 2s 46ms/step - loss: 133.8215 - val_loss: 146.0312
Epoch 525/600
34/34 [==============================] - 2s 46ms/step - loss: 133.7289 - val_loss: 144.4341
Epoch 526/600
34/34 [==============================] - 2s 46ms/step - loss: 133.4494 - val_loss: 144.7009
Epoch 527/600
34/34 [==============================] - 2s 47ms/step - loss: 133.6872 - val_loss: 144.4650
Epoch 528/600
34/34 [==============================] - 2s 46ms/step - loss: 133.2993 - val_loss: 145.5628
Epoch 529/600
34/34 [==============================] - 2s 46ms/step - loss: 133.4731 - val_loss: 146.2699
Epoch 530/600
34/34 [==============================] - 2s 46ms/step - loss: 133.5691 - val_loss: 147.1108
Epoch 531/600
34/34 [==============================] - 2s 46ms/step - loss: 133.7609 - val_loss: 148.4298
Epoch 532/600
34/34 [==============================] - 2s 45ms/step - loss: 133.9448 - val_loss: 148.4364
Epoch 533/600
34/34 [==============================] - 2s 46ms/step - loss: 134.6313 - val_loss: 145.5741
Epoch 534/600
34/34 [==============================] - 2s 46ms/step - loss: 134.8351 - val_loss: 146.2703
Epoch 535/600
34/34 [==============================] - 2s 45ms/step - loss: 134.4670 - val_loss: 145.8584
Epoch 536/600
34/34 [==============================] - 2s 46ms/step - loss: 134.2649 - val_loss: 147.7898
Epoch 537/600
34/34 [==============================] - 2s 46ms/step - loss: 133.6999 - val_loss: 147.7597
Epoch 538/600
34/34 [==============================] - 2s 46ms/step - loss: 133.6514 - val_loss: 147.6605
Epoch 539/600
34/34 [==============================] - 2s 46ms/step - loss: 133.3716 - val_loss: 150.1050
Epoch 540/600
34/34 [==============================] - 2s 46ms/step - loss: 133.2607 - val_loss: 150.2529
Epoch 541/600
34/34 [==============================] - 2s 46ms/step - loss: 133.5697 - val_loss: 149.5192
Epoch 542/600
34/34 [==============================] - 2s 46ms/step - loss: 133.4706 - val_loss: 148.3766
Epoch 543/600
34/34 [==============================] - 2s 46ms/step - loss: 133.8055 - val_loss: 148.3919
Epoch 544/600
34/34 [==============================] - 2s 45ms/step - loss: 133.7280 - val_loss: 147.1725
Epoch 545/600
34/34 [==============================] - 2s 46ms/step - loss: 133.6027 - val_loss: 147.6811
Epoch 546/600
34/34 [==============================] - 2s 46ms/step - loss: 133.4496 - val_loss: 147.0526
Epoch 547/600
34/34 [==============================] - 2s 46ms/step - loss: 133.6999 - val_loss: 146.1245
Epoch 548/600
34/34 [==============================] - 2s 46ms/step - loss: 133.6330 - val_loss: 146.5956
Epoch 549/600
34/34 [==============================] - 2s 45ms/step - loss: 133.3718 - val_loss: 147.9286
Epoch 550/600
34/34 [==============================] - 2s 46ms/step - loss: 133.6493 - val_loss: 149.0562
Epoch 551/600
34/34 [==============================] - 2s 46ms/step - loss: 133.1895 - val_loss: 149.0026
Epoch 552/600
34/34 [==============================] - 2s 46ms/step - loss: 133.5352 - val_loss: 147.2954
Epoch 553/600
34/34 [==============================] - 2s 46ms/step - loss: 133.4619 - val_loss: 147.7376
Epoch 554/600
34/34 [==============================] - 2s 46ms/step - loss: 133.1738 - val_loss: 146.2086
Epoch 555/600
34/34 [==============================] - 2s 46ms/step - loss: 132.8803 - val_loss: 146.4775
Epoch 556/600
34/34 [==============================] - 2s 45ms/step - loss: 132.7602 - val_loss: 147.0419
Epoch 557/600
34/34 [==============================] - 2s 46ms/step - loss: 132.8004 - val_loss: 147.7082
Epoch 558/600
34/34 [==============================] - 2s 46ms/step - loss: 132.7452 - val_loss: 148.4892
Epoch 559/600
34/34 [==============================] - 2s 45ms/step - loss: 132.8828 - val_loss: 148.6580
Epoch 560/600
34/34 [==============================] - 2s 46ms/step - loss: 133.1773 - val_loss: 148.2873
Epoch 561/600
34/34 [==============================] - 2s 46ms/step - loss: 133.8202 - val_loss: 148.7892
Epoch 562/600
34/34 [==============================] - 2s 46ms/step - loss: 134.1894 - val_loss: 148.3109
Epoch 563/600
34/34 [==============================] - 2s 46ms/step - loss: 135.0070 - val_loss: 148.3951
Epoch 564/600
34/34 [==============================] - 2s 46ms/step - loss: 134.4832 - val_loss: 146.6408
Epoch 565/600
34/34 [==============================] - 2s 47ms/step - loss: 134.3996 - val_loss: 148.3154
Epoch 566/600
34/34 [==============================] - 2s 46ms/step - loss: 133.6765 - val_loss: 147.3045
Epoch 567/600
34/34 [==============================] - 2s 46ms/step - loss: 133.9406 - val_loss: 148.5090
Epoch 568/600
34/34 [==============================] - 2s 46ms/step - loss: 134.9962 - val_loss: 149.2270
Epoch 569/600
34/34 [==============================] - 2s 46ms/step - loss: 134.7897 - val_loss: 150.7587
Epoch 570/600
34/34 [==============================] - 2s 46ms/step - loss: 134.4867 - val_loss: 150.0631
Epoch 571/600
34/34 [==============================] - 2s 46ms/step - loss: 134.7373 - val_loss: 148.6393
Epoch 572/600
34/34 [==============================] - 2s 46ms/step - loss: 133.9860 - val_loss: 148.3361
Epoch 573/600
34/34 [==============================] - 2s 46ms/step - loss: 133.2844 - val_loss: 150.0474
Epoch 574/600
34/34 [==============================] - 2s 46ms/step - loss: 133.3064 - val_loss: 149.2645
Epoch 575/600
34/34 [==============================] - 2s 46ms/step - loss: 133.0796 - val_loss: 149.6770
Epoch 576/600
34/34 [==============================] - 2s 46ms/step - loss: 133.0362 - val_loss: 149.3986
Epoch 577/600
34/34 [==============================] - 2s 45ms/step - loss: 132.9999 - val_loss: 148.3777
Epoch 578/600
34/34 [==============================] - 2s 46ms/step - loss: 133.0929 - val_loss: 148.6705
Epoch 579/600
34/34 [==============================] - 2s 46ms/step - loss: 133.1316 - val_loss: 149.0501
Epoch 580/600
34/34 [==============================] - 2s 46ms/step - loss: 133.2899 - val_loss: 149.8145
Epoch 581/600
34/34 [==============================] - 2s 46ms/step - loss: 133.6173 - val_loss: 148.6873
Epoch 582/600
34/34 [==============================] - 2s 46ms/step - loss: 133.9034 - val_loss: 148.3769
Epoch 583/600
34/34 [==============================] - 2s 46ms/step - loss: 134.4010 - val_loss: 149.4266
Epoch 584/600
34/34 [==============================] - 2s 46ms/step - loss: 134.4090 - val_loss: 150.5802
Epoch 585/600
34/34 [==============================] - 2s 46ms/step - loss: 133.8262 - val_loss: 151.9098
Epoch 586/600
34/34 [==============================] - 2s 46ms/step - loss: 133.2415 - val_loss: 151.2908
Epoch 587/600
34/34 [==============================] - 2s 45ms/step - loss: 133.5606 - val_loss: 152.0659
Epoch 588/600
34/34 [==============================] - 2s 46ms/step - loss: 134.0117 - val_loss: 152.8749
Epoch 589/600
34/34 [==============================] - 2s 46ms/step - loss: 133.8595 - val_loss: 153.4059
Epoch 590/600
34/34 [==============================] - 2s 46ms/step - loss: 133.4200 - val_loss: 154.0671
Epoch 591/600
34/34 [==============================] - 2s 46ms/step - loss: 133.4285 - val_loss: 151.3421
Epoch 592/600
34/34 [==============================] - 2s 46ms/step - loss: 133.0065 - val_loss: 152.1399
Epoch 593/600
34/34 [==============================] - 2s 46ms/step - loss: 133.3750 - val_loss: 151.7432
Epoch 594/600
34/34 [==============================] - 2s 46ms/step - loss: 133.6496 - val_loss: 152.9641
Epoch 595/600
34/34 [==============================] - 2s 46ms/step - loss: 133.7530 - val_loss: 153.7097
Epoch 596/600
34/34 [==============================] - 2s 46ms/step - loss: 134.0605 - val_loss: 151.3371
Epoch 597/600
34/34 [==============================] - 2s 46ms/step - loss: 133.3471 - val_loss: 152.9345
Epoch 598/600
34/34 [==============================] - 2s 46ms/step - loss: 133.4139 - val_loss: 153.0237
Epoch 599/600
34/34 [==============================] - 2s 45ms/step - loss: 133.4027 - val_loss: 153.5298
Epoch 600/600
34/34 [==============================] - 2s 46ms/step - loss: 134.0882 - val_loss: 155.1112
[15]:
conv_pivae.load_weights(model_chk_path)
[16]:
outputs = conv_pivae.predict([np.concatenate(train_x),
                            np.concatenate(train_u)])
# Outputs: post_mean, post_log_var, z_sample,fire_rate, lam_mean, lam_log_var, z_mean, z_log_var
post_z = outputs[0]
mean_z = outputs[6]
labels = np.concatenate(train_u)

Visualize the embedding#

[17]:
fig = plt.figure(figsize=(5,5))
ax = plt.subplot(111)
r_ind = labels[:,1] == 1
l_ind = labels[:,2] == 1
ax.scatter(post_z[r_ind, 0], post_z[r_ind, 1], c=labels[r_ind,0], s=1, cmap = 'cool')
ax.scatter(post_z[l_ind, 0], post_z[l_ind, 1], c=labels[l_ind,0], s=1, cmap = 'viridis')

[17]:
<matplotlib.collections.PathCollection at 0x7fd8cd1b7df0>
../_images/demo_notebooks_Demo_conv-pivae_18_1.png

Decoding the position using Monte Carlo sampling#

  • This method takes a while since it uses random sampling from p(z|u) to approximate p(u|x).

  • Below functions to compute marginal likelihood and decode are copied and adapted from the pi-VAE repo zhd96/pi-vae, zhd96/pi-vae

[18]:
def compute_marginal_lik_poisson(vae_mdl,
                                 x_test,
                                 u_fake,
                                 n_sample,
                                 log_opt=False):
    lik_all = []

    for jj in range(len(x_test)):  ## for each batch
        lik_test = []
        for ii in range(len(u_fake)):  ## for each unique u value
            opts = vae_mdl.predict([x_test[jj], u_fake[ii][jj]])
            lam_mean = opts[4]
            lam_log_var = opts[5]
            z_dim = lam_mean.shape
            z_sample = np.random.normal(0,
                                        1,
                                        size=(n_sample, z_dim[0], z_dim[1]))
            z_sample = z_sample * np.exp(0.5 * lam_log_var) + lam_mean

            ## compute fire rate ##
            get_fire_rate_output = K.function(
                [vae_mdl.layers[-1].get_input_at(0)],
                [vae_mdl.layers[-1].get_output_at(0)],
            )
            fire_rate = get_fire_rate_output([z_sample.reshape(-1,
                                                               z_dim[-1])])[0]
            fire_rate = fire_rate.reshape(n_sample, -1, fire_rate.shape[-2],
                                          fire_rate.shape[-1])
            ## compute p(x|z) poisson likelihood ##
            loglik = x_test[jj] * np.log(np.clip(fire_rate, 1e-10,
                                                 1e7)) - fire_rate
            # n_sample*n_time*n_neuron
            loglik = loglik.sum(axis=(-2, -1), dtype = np.float64)
            ## sum across neurons and time
            loglik_max = loglik.max(axis=0)
            loglik -= loglik_max
            if log_opt:
                tmp = np.log(np.exp(loglik).mean(axis=0)) + (loglik_max)
            else:
                tmp = (np.exp(loglik).mean(axis=0)) * np.exp(loglik_max)
            lik_test.append(tmp)
        lik_all.append(np.array(lik_test))

    return lik_all


def decode_sampling_rat(test_x, test_y, model, sampling_num):
    hd_bins = np.linspace(0, 1.6, 100)
    hd_bins_dir = np.hstack([np.concatenate([np.linspace(0,1.6,100), np.linspace(0,1.6,100)])[...,None], np.zeros((200,2))])
    hd_bins_dir[:100][:,1]=1
    hd_bins_dir[100:200][:,2]=1
    nu_sample = 200
    u_fake = []
    for jj in range(nu_sample):
        tmp_all = []
        for ii in range(len(test_x)):
            nn = test_x[ii].shape[0]
            tmp = np.hstack((np.ones((nn, 1)) * hd_bins[jj % 100], np.zeros((nn, 2))))
            if jj >= (nu_sample // 2):
                tmp[:, 2] += 1
            else:
                tmp[:, 1] += 1
            tmp_all.append(tmp)
        u_fake.append(np.array(tmp_all))
    u_fake = np.array(u_fake)

    ## compute loglik

    lik_all = compute_marginal_lik_poisson(model, test_x, u_fake, sampling_num)
    decode_use = np.array(
        [
            (lik_all[jj]).reshape(200, -1, order="F").argmax(axis=0)
            for jj in range(len(lik_all))
        ]
    )
    median_err = np.median(
        np.abs(
            [
                hd_bins_dir[np.concatenate(decode_use)[i],0] - np.concatenate(test_y)[i, 0]
                for i in range(len(np.concatenate(test_y)))
            ]
        )
    )

    prediction = [
        hd_bins_dir[np.concatenate(decode_use)[i]]
        for i in range(len(np.concatenate(test_y)))
    ]

    return median_err, prediction
[19]:
## Set smaller sampling_num to reduce computing time
median_err, prediction = decode_sampling_rat(test_x, test_u, conv_pivae, sampling_num=100)

/tmp/ipykernel_165463/761218901.py:64: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray.
  u_fake.append(np.array(tmp_all))
/tmp/ipykernel_165463/761218901.py:70: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray.
  decode_use = np.array(
[20]:
print(f'Median Err [m]: {median_err}')
Median Err [m]: 0.09438572947425072

Decoding with the embedding using kNN#

  • Here we test kNN decoding on piVAE embedding without label prior, the same method we use for decoding with CERBA embeddings.

[21]:
train_outputs = conv_pivae.predict([np.concatenate(train_x),
                            np.concatenate(train_u)])
valid_outputs = conv_pivae.predict([np.concatenate(valid_x),
                            np.concatenate(valid_u)])
test_outputs = conv_pivae.predict([np.concatenate(test_x),
                            np.concatenate(test_u)])

train_post_z = train_outputs[6]
valid_post_z = valid_outputs[6]
test_post_z = test_outputs[6]
[22]:
from sklearn.neighbors import KNeighborsRegressor, KNeighborsClassifier
import sklearn.metrics

def decoding_pos_dir(emb_train, emb_valid, emb_test, label_train, label_valid, label_test):
    metric = 'cosine'
    neighbors = np.power(np.arange(1,6, dtype = int),2)
    valid_scores = []
    for n in neighbors:
        pos_decoder = KNeighborsRegressor(n, metric = metric)
        dir_decoder = KNeighborsClassifier(n, metric = metric)
        pos_decoder.fit(emb_train, label_train[:,0])
        dir_decoder.fit(emb_train, label_train[:,1])
        pos_pred = pos_decoder.predict(emb_valid)
        dir_pred = dir_decoder.predict(emb_valid)
        prediction =np.stack([pos_pred, dir_pred],axis = 1)
        valid_score = sklearn.metrics.r2_score(label_valid[:,:2], prediction)
        valid_scores.append(valid_score)

    best_n=neighbors[np.argmax(valid_scores)]

    pos_decoder = KNeighborsRegressor(best_n, metric = metric)
    dir_decoder = KNeighborsClassifier(n, metric = metric)
    pos_decoder.fit(emb_train, label_train[:,0])
    dir_decoder.fit(emb_train, label_train[:,1])
    pos_pred = pos_decoder.predict(emb_test)
    dir_pred = dir_decoder.predict(emb_test)
    prediction =np.stack([pos_pred, dir_pred],axis = 1)
    pos_test_err = np.median(abs(prediction[:,0] - label_test[:, 0]))

    return pos_test_err

train_label = np.concatenate(train_u)
valid_label = np.concatenate(valid_u)
test_label = np.concatenate(test_u)
pos_test_err = decoding_pos_dir(train_post_z, valid_post_z, test_post_z, train_label, valid_label, test_label )
[23]:
print(f'Median Err: {pos_test_err} m')
Median Err: 0.10224993526935577 m