6. Neural Architecture Search for Regression (Tensorflow/Keras2)#
In this tutorial we will learn the basics of neural architecture search (NAS). We will use artificial data to simulate a regression problem. Then, we will discover how to create a neural architecture search space using conditions with the deephyper.hpo
API. Finally, we will execute the search using Bayesian optimization and analyse the results.
[1]:
try:
import deephyper
import tf_keras as tfk
from deephyper.evaluator import RayEvaluator
except (ImportError, ModuleNotFoundError):
!pip install "deephyper[tf-keras2,ray] pydot"
import deephyper
import tf_keras as tfk
from deephyper.evaluator import RayEvaluator
print(deephyper.__version__)
Collecting deephyper
Downloading deephyper-0.3.3-py2.py3-none-any.whl (962 kB)
|████████████████████████████████| 962 kB 5.1 MB/s
Requirement already satisfied: networkx in /usr/local/lib/python3.7/dist-packages (from deephyper) (2.6.3)
Requirement already satisfied: tensorflow-probability in /usr/local/lib/python3.7/dist-packages (from deephyper) (0.14.1)
Collecting ConfigSpace>=0.4.18
Downloading ConfigSpace-0.4.20-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (4.2 MB)
|████████████████████████████████| 4.2 MB 33.2 MB/s
Requirement already satisfied: matplotlib>=3.0.3 in /usr/local/lib/python3.7/dist-packages (from deephyper) (3.2.2)
Requirement already satisfied: xgboost in /usr/local/lib/python3.7/dist-packages (from deephyper) (0.90)
Requirement already satisfied: tensorflow>=2.0.0 in /usr/local/lib/python3.7/dist-packages (from deephyper) (2.7.0)
Requirement already satisfied: pydot in /usr/local/lib/python3.7/dist-packages (from deephyper) (1.3.0)
Requirement already satisfied: statsmodels in /usr/local/lib/python3.7/dist-packages (from deephyper) (0.10.2)
Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from deephyper) (1.19.5)
Requirement already satisfied: Jinja2 in /usr/local/lib/python3.7/dist-packages (from deephyper) (2.11.3)
Requirement already satisfied: pandas>=0.24.2 in /usr/local/lib/python3.7/dist-packages (from deephyper) (1.1.5)
Collecting openml==0.10.2
Downloading openml-0.10.2.tar.gz (158 kB)
|████████████████████████████████| 158 kB 47.4 MB/s
Collecting dh-scikit-optimize==0.9.4
Downloading dh_scikit_optimize-0.9.4-py2.py3-none-any.whl (102 kB)
|████████████████████████████████| 102 kB 11.5 MB/s
Requirement already satisfied: tqdm in /usr/local/lib/python3.7/dist-packages (from deephyper) (4.62.3)
Requirement already satisfied: typeguard in /usr/local/lib/python3.7/dist-packages (from deephyper) (2.7.1)
Requirement already satisfied: joblib>=0.10.3 in /usr/local/lib/python3.7/dist-packages (from deephyper) (1.1.0)
Collecting ray[default]>=1.3.0
Downloading ray-1.8.0-cp37-cp37m-manylinux2014_x86_64.whl (54.7 MB)
|████████████████████████████████| 54.7 MB 23 kB/s
Requirement already satisfied: scikit-learn>=0.23.1 in /usr/local/lib/python3.7/dist-packages (from deephyper) (1.0.1)
Requirement already satisfied: scipy>=0.19.1 in /usr/local/lib/python3.7/dist-packages (from dh-scikit-optimize==0.9.4->deephyper) (1.4.1)
Collecting pyaml>=16.9
Downloading pyaml-21.10.1-py2.py3-none-any.whl (24 kB)
Collecting liac-arff>=2.4.0
Downloading liac-arff-2.5.0.tar.gz (13 kB)
Collecting xmltodict
Downloading xmltodict-0.12.0-py2.py3-none-any.whl (9.2 kB)
Requirement already satisfied: requests in /usr/local/lib/python3.7/dist-packages (from openml==0.10.2->deephyper) (2.23.0)
Requirement already satisfied: python-dateutil in /usr/local/lib/python3.7/dist-packages (from openml==0.10.2->deephyper) (2.8.2)
Requirement already satisfied: cython in /usr/local/lib/python3.7/dist-packages (from ConfigSpace>=0.4.18->deephyper) (0.29.24)
Requirement already satisfied: pyparsing in /usr/local/lib/python3.7/dist-packages (from ConfigSpace>=0.4.18->deephyper) (2.4.7)
Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib>=3.0.3->deephyper) (0.11.0)
Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib>=3.0.3->deephyper) (1.3.2)
Requirement already satisfied: pytz>=2017.2 in /usr/local/lib/python3.7/dist-packages (from pandas>=0.24.2->deephyper) (2018.9)
Requirement already satisfied: PyYAML in /usr/local/lib/python3.7/dist-packages (from pyaml>=16.9->dh-scikit-optimize==0.9.4->deephyper) (3.13)
Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.7/dist-packages (from python-dateutil->openml==0.10.2->deephyper) (1.15.0)
Collecting redis>=3.5.0
Downloading redis-4.0.1-py3-none-any.whl (118 kB)
|████████████████████████████████| 118 kB 22.2 MB/s
Requirement already satisfied: click>=7.0 in /usr/local/lib/python3.7/dist-packages (from ray[default]>=1.3.0->deephyper) (7.1.2)
Requirement already satisfied: msgpack<2.0.0,>=1.0.0 in /usr/local/lib/python3.7/dist-packages (from ray[default]>=1.3.0->deephyper) (1.0.2)
Requirement already satisfied: grpcio>=1.28.1 in /usr/local/lib/python3.7/dist-packages (from ray[default]>=1.3.0->deephyper) (1.41.1)
Requirement already satisfied: filelock in /usr/local/lib/python3.7/dist-packages (from ray[default]>=1.3.0->deephyper) (3.3.2)
Requirement already satisfied: protobuf>=3.15.3 in /usr/local/lib/python3.7/dist-packages (from ray[default]>=1.3.0->deephyper) (3.17.3)
Requirement already satisfied: attrs in /usr/local/lib/python3.7/dist-packages (from ray[default]>=1.3.0->deephyper) (21.2.0)
Requirement already satisfied: jsonschema in /usr/local/lib/python3.7/dist-packages (from ray[default]>=1.3.0->deephyper) (2.6.0)
Collecting gpustat>=1.0.0b1
Downloading gpustat-1.0.0b1.tar.gz (82 kB)
|████████████████████████████████| 82 kB 213 kB/s
Collecting py-spy>=0.2.0
Downloading py_spy-0.3.11-py2.py3-none-manylinux_2_5_x86_64.manylinux1_x86_64.whl (3.0 MB)
|████████████████████████████████| 3.0 MB 63.8 MB/s
Collecting aiohttp-cors
Downloading aiohttp_cors-0.7.0-py3-none-any.whl (27 kB)
Collecting aiohttp>=3.7
Downloading aiohttp-3.8.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl (1.1 MB)
|████████████████████████████████| 1.1 MB 46.6 MB/s
Requirement already satisfied: prometheus-client>=0.7.1 in /usr/local/lib/python3.7/dist-packages (from ray[default]>=1.3.0->deephyper) (0.12.0)
Collecting colorful
Downloading colorful-0.5.4-py2.py3-none-any.whl (201 kB)
|████████████████████████████████| 201 kB 53.7 MB/s
Collecting opencensus
Downloading opencensus-0.8.0-py2.py3-none-any.whl (128 kB)
|████████████████████████████████| 128 kB 75.8 MB/s
Collecting aioredis<2
Downloading aioredis-1.3.1-py3-none-any.whl (65 kB)
|████████████████████████████████| 65 kB 4.0 MB/s
Collecting aiosignal>=1.1.2
Downloading aiosignal-1.2.0-py3-none-any.whl (8.2 kB)
Collecting frozenlist>=1.1.1
Downloading frozenlist-1.2.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl (192 kB)
|████████████████████████████████| 192 kB 64.4 MB/s
Collecting yarl<2.0,>=1.0
Downloading yarl-1.7.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl (271 kB)
|████████████████████████████████| 271 kB 49.6 MB/s
Requirement already satisfied: typing-extensions>=3.7.4 in /usr/local/lib/python3.7/dist-packages (from aiohttp>=3.7->ray[default]>=1.3.0->deephyper) (3.10.0.2)
Collecting asynctest==0.13.0
Downloading asynctest-0.13.0-py3-none-any.whl (26 kB)
Requirement already satisfied: charset-normalizer<3.0,>=2.0 in /usr/local/lib/python3.7/dist-packages (from aiohttp>=3.7->ray[default]>=1.3.0->deephyper) (2.0.7)
Collecting multidict<7.0,>=4.5
Downloading multidict-5.2.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl (160 kB)
|████████████████████████████████| 160 kB 57.6 MB/s
Collecting async-timeout<5.0,>=4.0.0a3
Downloading async_timeout-4.0.1-py3-none-any.whl (5.7 kB)
Collecting hiredis
Downloading hiredis-2.0.0-cp37-cp37m-manylinux2010_x86_64.whl (85 kB)
|████████████████████████████████| 85 kB 4.1 MB/s
Requirement already satisfied: nvidia-ml-py3>=7.352.0 in /usr/local/lib/python3.7/dist-packages (from gpustat>=1.0.0b1->ray[default]>=1.3.0->deephyper) (7.352.0)
Requirement already satisfied: psutil in /usr/local/lib/python3.7/dist-packages (from gpustat>=1.0.0b1->ray[default]>=1.3.0->deephyper) (5.4.8)
Collecting blessed>=1.17.1
Downloading blessed-1.19.0-py2.py3-none-any.whl (57 kB)
|████████████████████████████████| 57 kB 4.7 MB/s
Requirement already satisfied: wcwidth>=0.1.4 in /usr/local/lib/python3.7/dist-packages (from blessed>=1.17.1->gpustat>=1.0.0b1->ray[default]>=1.3.0->deephyper) (0.2.5)
Collecting deprecated
Downloading Deprecated-1.2.13-py2.py3-none-any.whl (9.6 kB)
Requirement already satisfied: threadpoolctl>=2.0.0 in /usr/local/lib/python3.7/dist-packages (from scikit-learn>=0.23.1->deephyper) (3.0.0)
Requirement already satisfied: tensorboard~=2.6 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2.0.0->deephyper) (2.7.0)
Requirement already satisfied: flatbuffers<3.0,>=1.12 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2.0.0->deephyper) (2.0)
Requirement already satisfied: gast<0.5.0,>=0.2.1 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2.0.0->deephyper) (0.4.0)
Requirement already satisfied: wheel<1.0,>=0.32.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2.0.0->deephyper) (0.37.0)
Requirement already satisfied: tensorflow-io-gcs-filesystem>=0.21.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2.0.0->deephyper) (0.22.0)
Requirement already satisfied: google-pasta>=0.1.1 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2.0.0->deephyper) (0.2.0)
Requirement already satisfied: wrapt>=1.11.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2.0.0->deephyper) (1.13.3)
Requirement already satisfied: tensorflow-estimator<2.8,~=2.7.0rc0 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2.0.0->deephyper) (2.7.0)
Requirement already satisfied: keras-preprocessing>=1.1.1 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2.0.0->deephyper) (1.1.2)
Requirement already satisfied: absl-py>=0.4.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2.0.0->deephyper) (0.12.0)
Requirement already satisfied: h5py>=2.9.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2.0.0->deephyper) (3.1.0)
Requirement already satisfied: opt-einsum>=2.3.2 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2.0.0->deephyper) (3.3.0)
Requirement already satisfied: termcolor>=1.1.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2.0.0->deephyper) (1.1.0)
Requirement already satisfied: keras<2.8,>=2.7.0rc0 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2.0.0->deephyper) (2.7.0)
Requirement already satisfied: astunparse>=1.6.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2.0.0->deephyper) (1.6.3)
Requirement already satisfied: libclang>=9.0.1 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2.0.0->deephyper) (12.0.0)
Requirement already satisfied: cached-property in /usr/local/lib/python3.7/dist-packages (from h5py>=2.9.0->tensorflow>=2.0.0->deephyper) (1.5.2)
Requirement already satisfied: google-auth-oauthlib<0.5,>=0.4.1 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.6->tensorflow>=2.0.0->deephyper) (0.4.6)
Requirement already satisfied: setuptools>=41.0.0 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.6->tensorflow>=2.0.0->deephyper) (57.4.0)
Requirement already satisfied: werkzeug>=0.11.15 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.6->tensorflow>=2.0.0->deephyper) (1.0.1)
Requirement already satisfied: tensorboard-plugin-wit>=1.6.0 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.6->tensorflow>=2.0.0->deephyper) (1.8.0)
Requirement already satisfied: markdown>=2.6.8 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.6->tensorflow>=2.0.0->deephyper) (3.3.4)
Requirement already satisfied: tensorboard-data-server<0.7.0,>=0.6.0 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.6->tensorflow>=2.0.0->deephyper) (0.6.1)
Requirement already satisfied: google-auth<3,>=1.6.3 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.6->tensorflow>=2.0.0->deephyper) (1.35.0)
Requirement already satisfied: rsa<5,>=3.1.4 in /usr/local/lib/python3.7/dist-packages (from google-auth<3,>=1.6.3->tensorboard~=2.6->tensorflow>=2.0.0->deephyper) (4.7.2)
Requirement already satisfied: pyasn1-modules>=0.2.1 in /usr/local/lib/python3.7/dist-packages (from google-auth<3,>=1.6.3->tensorboard~=2.6->tensorflow>=2.0.0->deephyper) (0.2.8)
Requirement already satisfied: cachetools<5.0,>=2.0.0 in /usr/local/lib/python3.7/dist-packages (from google-auth<3,>=1.6.3->tensorboard~=2.6->tensorflow>=2.0.0->deephyper) (4.2.4)
Requirement already satisfied: requests-oauthlib>=0.7.0 in /usr/local/lib/python3.7/dist-packages (from google-auth-oauthlib<0.5,>=0.4.1->tensorboard~=2.6->tensorflow>=2.0.0->deephyper) (1.3.0)
Requirement already satisfied: importlib-metadata in /usr/local/lib/python3.7/dist-packages (from markdown>=2.6.8->tensorboard~=2.6->tensorflow>=2.0.0->deephyper) (4.8.2)
Requirement already satisfied: pyasn1<0.5.0,>=0.4.6 in /usr/local/lib/python3.7/dist-packages (from pyasn1-modules>=0.2.1->google-auth<3,>=1.6.3->tensorboard~=2.6->tensorflow>=2.0.0->deephyper) (0.4.8)
Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests->openml==0.10.2->deephyper) (2.10)
Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests->openml==0.10.2->deephyper) (1.24.3)
Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests->openml==0.10.2->deephyper) (3.0.4)
Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests->openml==0.10.2->deephyper) (2021.10.8)
Requirement already satisfied: oauthlib>=3.0.0 in /usr/local/lib/python3.7/dist-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tensorboard~=2.6->tensorflow>=2.0.0->deephyper) (3.1.1)
Requirement already satisfied: zipp>=0.5 in /usr/local/lib/python3.7/dist-packages (from importlib-metadata->markdown>=2.6.8->tensorboard~=2.6->tensorflow>=2.0.0->deephyper) (3.6.0)
Requirement already satisfied: MarkupSafe>=0.23 in /usr/local/lib/python3.7/dist-packages (from Jinja2->deephyper) (2.0.1)
Requirement already satisfied: google-api-core<3.0.0,>=1.0.0 in /usr/local/lib/python3.7/dist-packages (from opencensus->ray[default]>=1.3.0->deephyper) (1.26.3)
Collecting opencensus-context==0.1.2
Downloading opencensus_context-0.1.2-py2.py3-none-any.whl (4.4 kB)
Requirement already satisfied: googleapis-common-protos<2.0dev,>=1.6.0 in /usr/local/lib/python3.7/dist-packages (from google-api-core<3.0.0,>=1.0.0->opencensus->ray[default]>=1.3.0->deephyper) (1.53.0)
Requirement already satisfied: packaging>=14.3 in /usr/local/lib/python3.7/dist-packages (from google-api-core<3.0.0,>=1.0.0->opencensus->ray[default]>=1.3.0->deephyper) (21.2)
Requirement already satisfied: patsy>=0.4.0 in /usr/local/lib/python3.7/dist-packages (from statsmodels->deephyper) (0.5.2)
Requirement already satisfied: decorator in /usr/local/lib/python3.7/dist-packages (from tensorflow-probability->deephyper) (4.4.2)
Requirement already satisfied: dm-tree in /usr/local/lib/python3.7/dist-packages (from tensorflow-probability->deephyper) (0.1.6)
Requirement already satisfied: cloudpickle>=1.3 in /usr/local/lib/python3.7/dist-packages (from tensorflow-probability->deephyper) (1.3.0)
Building wheels for collected packages: openml, liac-arff, gpustat
Building wheel for openml (setup.py) ... done
Created wheel for openml: filename=openml-0.10.2-py3-none-any.whl size=190318 sha256=6f431466cb702f739c2337475733f05ce4bb2bafb81cc3d9d1ab6941159d0152
Stored in directory: /root/.cache/pip/wheels/9c/9e/f3/6a5ebf16527d7fe22d9bc1652bc9beb5dc9fcfdeb75e805400
Building wheel for liac-arff (setup.py) ... done
Created wheel for liac-arff: filename=liac_arff-2.5.0-py3-none-any.whl size=11731 sha256=60fe3cfb23ccb90a5eb581e4884df69a8e804b2d6fb7ec25b6ae071acde27916
Stored in directory: /root/.cache/pip/wheels/1f/0f/15/332ca86cbebf25ddf98518caaf887945fbe1712b97a0f2493b
Building wheel for gpustat (setup.py) ... done
Created wheel for gpustat: filename=gpustat-1.0.0b1-py3-none-any.whl size=15979 sha256=80eda2084e3b9a55684ec3fbfc292b182ec25c8bef7220130107c82157038bb8
Stored in directory: /root/.cache/pip/wheels/1a/16/e2/3e2437fba4c4b6a97a97bd96fce5d14e66cff5c4966fb1cc8c
Successfully built openml liac-arff gpustat
Installing collected packages: multidict, frozenlist, yarl, deprecated, asynctest, async-timeout, aiosignal, redis, opencensus-context, hiredis, blessed, aiohttp, xmltodict, ray, pyaml, py-spy, opencensus, liac-arff, gpustat, colorful, aioredis, aiohttp-cors, openml, dh-scikit-optimize, ConfigSpace, deephyper
Successfully installed ConfigSpace-0.4.20 aiohttp-3.8.1 aiohttp-cors-0.7.0 aioredis-1.3.1 aiosignal-1.2.0 async-timeout-4.0.1 asynctest-0.13.0 blessed-1.19.0 colorful-0.5.4 deephyper-0.3.3 deprecated-1.2.13 dh-scikit-optimize-0.9.4 frozenlist-1.2.0 gpustat-1.0.0b1 hiredis-2.0.0 liac-arff-2.5.0 multidict-5.2.0 opencensus-0.8.0 opencensus-context-0.1.2 openml-0.10.2 py-spy-0.3.11 pyaml-21.10.1 ray-1.8.0 redis-4.0.1 xmltodict-0.12.0 yarl-1.7.2
[1]:
import pathlib
import os
WIDTH_PLOTS = 8
HEIGHT_PLOTS = WIDTH_PLOTS / 1.618
6.1. Loading the data#
First, we will create load_data_*
functions which loads and returns the training, validation and testing data. The load_data_*
functions generates data from a function \(f(\mathbf{x}) = 2 \sin (x)\) where \(x \in [-30, 30]\):
[2]:
import numpy as np
from sklearn.model_selection import train_test_split
def load_data_train_test(random_state=42):
rs = np.random.RandomState(random_state)
train_size = 400
f = lambda x: 2 * np.sin(x) # a simlpe affine function
x = rs.uniform(low=-30, high=30.0, size=train_size)
y = f(x)
x_tst = np.linspace(-30.0, 30.0, 400)
y_tst = f(x_tst)
x = x.reshape(-1, 1)
y = y.reshape(-1, 1)
x_tst = x_tst.reshape(-1, 1)
y_tst = y_tst.reshape(-1, 1)
return (x, y), (x_tst, y_tst)
def load_data_train_valid(verbose=0, random_state=42):
(x, y), _ = load_data_train_test(random_state=random_state)
train_X, valid_X, train_y, valid_y = train_test_split(
x, y, test_size=0.33, random_state=random_state
)
if verbose:
print(f"train_X shape: {np.shape(train_X)}")
print(f"train_y shape: {np.shape(train_y)}")
print(f"valid_X shape: {np.shape(valid_X)}")
print(f"valid_y shape: {np.shape(valid_y)}")
return (train_X, train_y), (valid_X, valid_y)
6.2. Neural Architecture Search Space#
[3]:
from collections import deque
import tf_keras as tfk
import tf_keras.utils as tfku
from ConfigSpace import GreaterThanCondition
from deephyper.hpo import HpProblem
class SimpleMLPSearchSpace:
def __init__(
self, max_num_layers: int = 5, max_num_units_per_layer: int = 64
) -> None:
self.max_num_layers = max_num_layers
self.max_num_units_per_layer = max_num_units_per_layer
self._hp_problem = None
def create_model(self, parameters: dict) -> tfk.Model:
"""Create a tensorflow keras model from a set of hyperparameters."""
tfk.backend.clear_session()
inputs = out = tfk.layers.Input(shape=(1,), name="input")
anchors = deque(maxlen=3)
for i in range(parameters["num_layers"]):
# Skip connection
prev_out = out
for anchor in anchors:
if parameters[f"{anchor.name.split('/')[0]}->layer_{i}"]:
# Linear projection to match the shape of the output from the previous layer
anchor = tfk.layers.Dense(parameters[f"layer_{i-1}_units"])(anchor)
out = tfk.layers.Add()([out, anchor])
out = tfk.layers.Activation("relu")(out)
# Dense layer for the main backbone of the model
out = tfk.layers.Dense(
parameters[f"layer_{i}_units"],
activation=parameters[f"layer_{i}_activation"],
name=f"layer_{i}",
)(out)
anchors.append(prev_out)
# Final output layer with a single unit
output = tfk.layers.Dense(1)(out)
model = tfk.Model(inputs, output)
return model
@property
def hp_problem(self) -> HpProblem:
"""Create a hyperparameter optimization problem corresponding to the model."""
if self._hp_problem is not None:
return self._hp_problem
problem = HpProblem()
max_num_layers = self.max_num_layers
num_layers = problem.add_hyperparameter(
(1, max_num_layers), "num_layers", default_value=2
)
anchors = deque(maxlen=3)
conditions = []
for i in range(max_num_layers):
layer_i_units = problem.add_hyperparameter(
(1, 64), f"layer_{i}_units", default_value=32
)
layer_i_activation = problem.add_hyperparameter(
["relu", "sigmoid", "tanh", "swish"],
f"layer_{i}_activation",
default_value="relu",
)
if i > 0:
conditions.extend(
[
GreaterThanCondition(layer_i_units, num_layers, i),
GreaterThanCondition(layer_i_activation, num_layers, i),
]
)
# For Skip Connections
previous_layer = f"layer_{i-1}" if i > 0 else "input"
for anchor_name in anchors:
layer_i_connection = problem.add_hyperparameter(
(0, 1), f"{anchor_name}->layer_{i}", default_value=0
)
conditions.append(
GreaterThanCondition(layer_i_connection, num_layers, i)
)
anchors.append(previous_layer)
problem.add_conditions(conditions)
# Training hyperparameters
problem.add_hyperparameter(
(1e-5, 1e-1, "log-uniform"), "learning_rate", default_value=1e-3
)
problem.add_hyperparameter(
(1, 100, "log-uniform"), "batch_size", default_value=16
)
self._hp_problem = problem
return self._hp_problem
[4]:
nas_search_space = SimpleMLPSearchSpace(max_num_layers=5, max_num_units_per_layer=64)
nas_search_space.hp_problem
[4]:
Configuration space object:
Hyperparameters:
batch_size, Type: UniformInteger, Range: [1, 100], Default: 16, on log-scale
input->layer_1, Type: UniformInteger, Range: [0, 1], Default: 0
input->layer_2, Type: UniformInteger, Range: [0, 1], Default: 0
input->layer_3, Type: UniformInteger, Range: [0, 1], Default: 0
layer_0->layer_2, Type: UniformInteger, Range: [0, 1], Default: 0
layer_0->layer_3, Type: UniformInteger, Range: [0, 1], Default: 0
layer_0->layer_4, Type: UniformInteger, Range: [0, 1], Default: 0
layer_0_activation, Type: Categorical, Choices: {relu, sigmoid, tanh, swish}, Default: relu
layer_0_units, Type: UniformInteger, Range: [1, 64], Default: 32
layer_1->layer_3, Type: UniformInteger, Range: [0, 1], Default: 0
layer_1->layer_4, Type: UniformInteger, Range: [0, 1], Default: 0
layer_1_activation, Type: Categorical, Choices: {relu, sigmoid, tanh, swish}, Default: relu
layer_1_units, Type: UniformInteger, Range: [1, 64], Default: 32
layer_2->layer_4, Type: UniformInteger, Range: [0, 1], Default: 0
layer_2_activation, Type: Categorical, Choices: {relu, sigmoid, tanh, swish}, Default: relu
layer_2_units, Type: UniformInteger, Range: [1, 64], Default: 32
layer_3_activation, Type: Categorical, Choices: {relu, sigmoid, tanh, swish}, Default: relu
layer_3_units, Type: UniformInteger, Range: [1, 64], Default: 32
layer_4_activation, Type: Categorical, Choices: {relu, sigmoid, tanh, swish}, Default: relu
layer_4_units, Type: UniformInteger, Range: [1, 64], Default: 32
learning_rate, Type: UniformFloat, Range: [1e-05, 0.1], Default: 0.001, on log-scale
num_layers, Type: UniformInteger, Range: [1, 5], Default: 2
Conditions:
input->layer_1 | num_layers > 1
input->layer_2 | num_layers > 2
input->layer_3 | num_layers > 3
layer_0->layer_2 | num_layers > 2
layer_0->layer_3 | num_layers > 3
layer_0->layer_4 | num_layers > 4
layer_1->layer_3 | num_layers > 3
layer_1->layer_4 | num_layers > 4
layer_1_activation | num_layers > 1
layer_1_units | num_layers > 1
layer_2->layer_4 | num_layers > 4
layer_2_activation | num_layers > 2
layer_2_units | num_layers > 2
layer_3_activation | num_layers > 3
layer_3_units | num_layers > 3
layer_4_activation | num_layers > 4
layer_4_units | num_layers > 4
[5]:
nas_search_space.hp_problem.default_configuration
[5]:
{'batch_size': 16,
'layer_0_activation': 'relu',
'layer_0_units': 32,
'learning_rate': 0.001,
'num_layers': 2,
'input->layer_1': 0,
'layer_1_activation': 'relu',
'layer_1_units': 32,
'input->layer_2': 0,
'input->layer_3': 0,
'layer_0->layer_2': 0,
'layer_0->layer_3': 0,
'layer_0->layer_4': 0,
'layer_1->layer_3': 0,
'layer_1->layer_4': 0,
'layer_2->layer_4': 0,
'layer_2_activation': 'relu',
'layer_2_units': 1,
'layer_3_activation': 'relu',
'layer_3_units': 1,
'layer_4_activation': 'relu',
'layer_4_units': 1}
[6]:
model = nas_search_space.create_model(nas_search_space.hp_problem.default_configuration)
model.summary()
Model: "model"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
input (InputLayer) [(None, 1)] 0
layer_0 (Dense) (None, 32) 64
layer_1 (Dense) (None, 32) 1056
dense (Dense) (None, 1) 33
=================================================================
Total params: 1153 (4.50 KB)
Trainable params: 1153 (4.50 KB)
Non-trainable params: 0 (0.00 Byte)
_________________________________________________________________
[21]:
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
def plot_model_architecture(model):
plt.figure(figsize=(WIDTH_PLOTS, HEIGHT_PLOTS))
tfku.plot_model(model, show_shapes=True, show_layer_names=True, show_layer_activations=True, dpi=300)
image = mpimg.imread("model.png")
plt.imshow(image)
plt.axis("off")
plt.show()
plot_model_architecture(model)

6.3. Evaluation of Hyperparameters#
[8]:
from sklearn.preprocessing import StandardScaler
def run(job, model_checkpoint_dir=".", verbose=True, show_plots=False):
if verbose:
print(f"{job.id}: Start running...")
(x, y), (vx, vy) = load_data_train_valid(verbose=verbose)
_, (tx, ty) = load_data_train_test()
if show_plots:
plt.figure(figsize=(WIDTH_PLOTS, HEIGHT_PLOTS))
plt.plot(tx.reshape(-1), ty.reshape(-1), "ko--", label="test", alpha=0.5)
plt.plot(x.reshape(-1), y.reshape(-1), "bo", label="train", alpha=0.8)
plt.plot(vx.reshape(-1), vy.reshape(-1), "ro", label="valid", alpha=0.8)
plt.ylabel("$y = f(x)$", fontsize=12)
plt.xlabel("$x$", fontsize=12)
plt.xlim(-30, 30)
plt.legend(loc="upper center", ncol=3, fontsize=12)
plt.show()
# Scaling the data
scaler_x = StandardScaler()
s_x = scaler_x.fit_transform(x)
s_vx = scaler_x.transform(vx)
s_tx = scaler_x.transform(tx)
scaler_y = StandardScaler()
s_y = scaler_y.fit_transform(y)
s_vy = scaler_y.transform(vy)
s_ty = scaler_y.transform(ty)
# Creating the model from received hyperparameters withing the `job`
model = nas_search_space.create_model(job.parameters)
# Some hyperparameters corresponds to training parameters
# such as learning rate and batch size
optimizer = tfk.optimizers.legacy.Adam(
learning_rate=job.parameters["learning_rate"]
)
model.compile(optimizer, loss="mse")
try:
history = model.fit(
s_x,
s_y,
epochs=1000,
batch_size=job.parameters["batch_size"],
validation_data=(s_vx, s_vy),
verbose=verbose,
)
except:
# Errors can happen during training de model (e.g., NaN values)
# In this case we return a specific objective value starting with "F_"
# To notify the search algorithm that these hyperparameters should be penalized.
return "F_fit"
# Checkpointing the model is useful to be able to reload it later
# We use the `job.id` to give a unique identifier to the model
tfk.models.save_model(
model, os.path.join(model_checkpoint_dir, f"model_{job.id}.keras")
)
if show_plots:
plt.figure(figsize=(WIDTH_PLOTS, HEIGHT_PLOTS))
plt.plot(history.history["loss"], label="training")
plt.plot(history.history["val_loss"], label="validation")
plt.xlabel("Epochs")
plt.ylabel("MSE")
plt.legend()
plt.show()
if show_plots:
pred_ty = scaler_y.inverse_transform(model(s_tx).numpy())
plt.figure(figsize=(WIDTH_PLOTS, HEIGHT_PLOTS))
plt.plot(tx, ty, label="truth")
plt.plot(tx, pred_ty, label=r"$prediction$")
plt.legend()
plt.ylim(-30, 30)
plt.show()
# DeepHyper search algorithms are standardized for MAXIMIZATION
# Therefore we return the negative of the validation loss at the last training iteration
# Returning `-min(history.history["val_loss"])` is more prone to selecting overfitting models!
return {"objective": -history.history["val_loss"][-1]}
6.4. Testing the Baseline Default Neural Architecture#
[9]:
from deephyper.evaluator import RunningJob
def evaluate_baseline_default_neural_architecture():
model_checkpoint_dir = "nas_tfk2_basic_baseline"
pathlib.Path(model_checkpoint_dir).mkdir(parents=True, exist_ok=True)
output = run(
RunningJob(parameters=nas_search_space.hp_problem.default_configuration),
model_checkpoint_dir=model_checkpoint_dir,
verbose=True,
show_plots=True,
)
return output
[10]:
evaluate_baseline_default_neural_architecture()
0.0: Start running...
train_X shape: (268, 1)
train_y shape: (268, 1)
valid_X shape: (132, 1)
valid_y shape: (132, 1)

Epoch 1/1000
17/17 [==============================] - 0s 5ms/step - loss: 1.0223 - val_loss: 0.8525
Epoch 2/1000
17/17 [==============================] - 0s 1ms/step - loss: 1.0018 - val_loss: 0.8364
Epoch 3/1000
17/17 [==============================] - 0s 1ms/step - loss: 1.0040 - val_loss: 0.8303
Epoch 4/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9995 - val_loss: 0.8424
Epoch 5/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9983 - val_loss: 0.8409
Epoch 6/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9969 - val_loss: 0.8346
Epoch 7/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.9961 - val_loss: 0.8402
Epoch 8/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9937 - val_loss: 0.8332
Epoch 9/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9990 - val_loss: 0.8389
Epoch 10/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9913 - val_loss: 0.8360
Epoch 11/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9911 - val_loss: 0.8324
Epoch 12/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9912 - val_loss: 0.8344
Epoch 13/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9894 - val_loss: 0.8383
Epoch 14/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9931 - val_loss: 0.8457
Epoch 15/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9912 - val_loss: 0.8354
Epoch 16/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9898 - val_loss: 0.8403
Epoch 17/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9917 - val_loss: 0.8329
Epoch 18/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9927 - val_loss: 0.8428
Epoch 19/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9883 - val_loss: 0.8346
Epoch 20/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9872 - val_loss: 0.8386
Epoch 21/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9910 - val_loss: 0.8359
Epoch 22/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9857 - val_loss: 0.8335
Epoch 23/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9871 - val_loss: 0.8399
Epoch 24/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9854 - val_loss: 0.8392
Epoch 25/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9839 - val_loss: 0.8354
Epoch 26/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9843 - val_loss: 0.8360
Epoch 27/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9840 - val_loss: 0.8343
Epoch 28/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9837 - val_loss: 0.8362
Epoch 29/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9840 - val_loss: 0.8347
Epoch 30/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9849 - val_loss: 0.8336
Epoch 31/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9846 - val_loss: 0.8394
Epoch 32/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.9820 - val_loss: 0.8342
Epoch 33/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9835 - val_loss: 0.8356
Epoch 34/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9849 - val_loss: 0.8391
Epoch 35/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9837 - val_loss: 0.8341
Epoch 36/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9813 - val_loss: 0.8405
Epoch 37/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9831 - val_loss: 0.8378
Epoch 38/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9838 - val_loss: 0.8389
Epoch 39/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9815 - val_loss: 0.8338
Epoch 40/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9814 - val_loss: 0.8354
Epoch 41/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9788 - val_loss: 0.8341
Epoch 42/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9865 - val_loss: 0.8339
Epoch 43/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9782 - val_loss: 0.8384
Epoch 44/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9802 - val_loss: 0.8397
Epoch 45/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9787 - val_loss: 0.8342
Epoch 46/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9794 - val_loss: 0.8340
Epoch 47/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9800 - val_loss: 0.8319
Epoch 48/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9768 - val_loss: 0.8413
Epoch 49/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9778 - val_loss: 0.8372
Epoch 50/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9789 - val_loss: 0.8351
Epoch 51/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9818 - val_loss: 0.8407
Epoch 52/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9767 - val_loss: 0.8305
Epoch 53/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9779 - val_loss: 0.8394
Epoch 54/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9750 - val_loss: 0.8344
Epoch 55/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9765 - val_loss: 0.8292
Epoch 56/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9761 - val_loss: 0.8389
Epoch 57/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9771 - val_loss: 0.8397
Epoch 58/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.9760 - val_loss: 0.8372
Epoch 59/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9795 - val_loss: 0.8339
Epoch 60/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9773 - val_loss: 0.8298
Epoch 61/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9753 - val_loss: 0.8375
Epoch 62/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9752 - val_loss: 0.8416
Epoch 63/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9766 - val_loss: 0.8384
Epoch 64/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9761 - val_loss: 0.8297
Epoch 65/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9732 - val_loss: 0.8344
Epoch 66/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9752 - val_loss: 0.8395
Epoch 67/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9717 - val_loss: 0.8338
Epoch 68/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9729 - val_loss: 0.8317
Epoch 69/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9781 - val_loss: 0.8439
Epoch 70/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9732 - val_loss: 0.8319
Epoch 71/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9760 - val_loss: 0.8257
Epoch 72/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9709 - val_loss: 0.8449
Epoch 73/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9733 - val_loss: 0.8328
Epoch 74/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9711 - val_loss: 0.8321
Epoch 75/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9722 - val_loss: 0.8366
Epoch 76/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9707 - val_loss: 0.8373
Epoch 77/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9714 - val_loss: 0.8386
Epoch 78/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9695 - val_loss: 0.8323
Epoch 79/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9707 - val_loss: 0.8293
Epoch 80/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9708 - val_loss: 0.8337
Epoch 81/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9698 - val_loss: 0.8447
Epoch 82/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9713 - val_loss: 0.8361
Epoch 83/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9700 - val_loss: 0.8334
Epoch 84/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9722 - val_loss: 0.8381
Epoch 85/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.9695 - val_loss: 0.8313
Epoch 86/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9682 - val_loss: 0.8326
Epoch 87/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9711 - val_loss: 0.8357
Epoch 88/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9711 - val_loss: 0.8366
Epoch 89/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9676 - val_loss: 0.8360
Epoch 90/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9679 - val_loss: 0.8321
Epoch 91/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9701 - val_loss: 0.8352
Epoch 92/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9739 - val_loss: 0.8389
Epoch 93/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9656 - val_loss: 0.8279
Epoch 94/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9712 - val_loss: 0.8279
Epoch 95/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9662 - val_loss: 0.8348
Epoch 96/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9647 - val_loss: 0.8389
Epoch 97/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9653 - val_loss: 0.8307
Epoch 98/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9662 - val_loss: 0.8333
Epoch 99/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9677 - val_loss: 0.8356
Epoch 100/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9660 - val_loss: 0.8323
Epoch 101/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9654 - val_loss: 0.8405
Epoch 102/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9696 - val_loss: 0.8250
Epoch 103/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9642 - val_loss: 0.8375
Epoch 104/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9649 - val_loss: 0.8327
Epoch 105/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9645 - val_loss: 0.8317
Epoch 106/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9627 - val_loss: 0.8324
Epoch 107/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9655 - val_loss: 0.8276
Epoch 108/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9651 - val_loss: 0.8407
Epoch 109/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.9674 - val_loss: 0.8264
Epoch 110/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9630 - val_loss: 0.8329
Epoch 111/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9684 - val_loss: 0.8463
Epoch 112/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9639 - val_loss: 0.8282
Epoch 113/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9653 - val_loss: 0.8225
Epoch 114/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9673 - val_loss: 0.8383
Epoch 115/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9667 - val_loss: 0.8247
Epoch 116/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9662 - val_loss: 0.8403
Epoch 117/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9604 - val_loss: 0.8254
Epoch 118/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9608 - val_loss: 0.8268
Epoch 119/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9614 - val_loss: 0.8276
Epoch 120/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9628 - val_loss: 0.8336
Epoch 121/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9607 - val_loss: 0.8260
Epoch 122/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9651 - val_loss: 0.8362
Epoch 123/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9594 - val_loss: 0.8265
Epoch 124/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9602 - val_loss: 0.8284
Epoch 125/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9603 - val_loss: 0.8290
Epoch 126/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9580 - val_loss: 0.8333
Epoch 127/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9628 - val_loss: 0.8254
Epoch 128/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9588 - val_loss: 0.8295
Epoch 129/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9646 - val_loss: 0.8285
Epoch 130/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9595 - val_loss: 0.8362
Epoch 131/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9594 - val_loss: 0.8294
Epoch 132/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9562 - val_loss: 0.8276
Epoch 133/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9593 - val_loss: 0.8276
Epoch 134/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.9575 - val_loss: 0.8242
Epoch 135/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9570 - val_loss: 0.8325
Epoch 136/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9568 - val_loss: 0.8262
Epoch 137/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9559 - val_loss: 0.8261
Epoch 138/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9668 - val_loss: 0.8216
Epoch 139/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9574 - val_loss: 0.8377
Epoch 140/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9550 - val_loss: 0.8258
Epoch 141/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9550 - val_loss: 0.8256
Epoch 142/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9546 - val_loss: 0.8265
Epoch 143/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9572 - val_loss: 0.8297
Epoch 144/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9576 - val_loss: 0.8222
Epoch 145/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9557 - val_loss: 0.8281
Epoch 146/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9561 - val_loss: 0.8253
Epoch 147/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9616 - val_loss: 0.8202
Epoch 148/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9549 - val_loss: 0.8304
Epoch 149/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9530 - val_loss: 0.8212
Epoch 150/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9592 - val_loss: 0.8240
Epoch 151/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9552 - val_loss: 0.8155
Epoch 152/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9578 - val_loss: 0.8347
Epoch 153/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9564 - val_loss: 0.8172
Epoch 154/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9515 - val_loss: 0.8283
Epoch 155/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9605 - val_loss: 0.8332
Epoch 156/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.9537 - val_loss: 0.8137
Epoch 157/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9528 - val_loss: 0.8266
Epoch 158/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9516 - val_loss: 0.8240
Epoch 159/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9548 - val_loss: 0.8225
Epoch 160/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9498 - val_loss: 0.8258
Epoch 161/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9541 - val_loss: 0.8275
Epoch 162/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9512 - val_loss: 0.8201
Epoch 163/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9512 - val_loss: 0.8161
Epoch 164/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9539 - val_loss: 0.8193
Epoch 165/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9515 - val_loss: 0.8303
Epoch 166/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9518 - val_loss: 0.8232
Epoch 167/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9487 - val_loss: 0.8161
Epoch 168/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9479 - val_loss: 0.8199
Epoch 169/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9481 - val_loss: 0.8213
Epoch 170/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9508 - val_loss: 0.8316
Epoch 171/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9504 - val_loss: 0.8159
Epoch 172/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9511 - val_loss: 0.8267
Epoch 173/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9466 - val_loss: 0.8135
Epoch 174/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9467 - val_loss: 0.8179
Epoch 175/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9487 - val_loss: 0.8224
Epoch 176/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9465 - val_loss: 0.8151
Epoch 177/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9499 - val_loss: 0.8232
Epoch 178/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9450 - val_loss: 0.8177
Epoch 179/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9456 - val_loss: 0.8212
Epoch 180/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9449 - val_loss: 0.8163
Epoch 181/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9455 - val_loss: 0.8195
Epoch 182/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9492 - val_loss: 0.8093
Epoch 183/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9480 - val_loss: 0.8234
Epoch 184/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9473 - val_loss: 0.8216
Epoch 185/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9451 - val_loss: 0.8106
Epoch 186/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9421 - val_loss: 0.8168
Epoch 187/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9509 - val_loss: 0.8232
Epoch 188/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9428 - val_loss: 0.8105
Epoch 189/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9473 - val_loss: 0.8115
Epoch 190/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9416 - val_loss: 0.8241
Epoch 191/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9440 - val_loss: 0.8197
Epoch 192/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9482 - val_loss: 0.8138
Epoch 193/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9420 - val_loss: 0.8163
Epoch 194/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9415 - val_loss: 0.8161
Epoch 195/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9418 - val_loss: 0.8097
Epoch 196/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9455 - val_loss: 0.8165
Epoch 197/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9391 - val_loss: 0.8147
Epoch 198/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9433 - val_loss: 0.8197
Epoch 199/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9469 - val_loss: 0.8095
Epoch 200/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9438 - val_loss: 0.8240
Epoch 201/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9416 - val_loss: 0.8104
Epoch 202/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9402 - val_loss: 0.8117
Epoch 203/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9444 - val_loss: 0.8144
Epoch 204/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9471 - val_loss: 0.8132
Epoch 205/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9397 - val_loss: 0.8047
Epoch 206/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9388 - val_loss: 0.8111
Epoch 207/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9369 - val_loss: 0.8156
Epoch 208/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9425 - val_loss: 0.8135
Epoch 209/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9483 - val_loss: 0.8230
Epoch 210/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9478 - val_loss: 0.8038
Epoch 211/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9415 - val_loss: 0.8168
Epoch 212/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9366 - val_loss: 0.8114
Epoch 213/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9462 - val_loss: 0.8159
Epoch 214/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9343 - val_loss: 0.8089
Epoch 215/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9371 - val_loss: 0.8077
Epoch 216/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9382 - val_loss: 0.8126
Epoch 217/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9372 - val_loss: 0.8061
Epoch 218/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9407 - val_loss: 0.8151
Epoch 219/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9384 - val_loss: 0.8138
Epoch 220/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9423 - val_loss: 0.8193
Epoch 221/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9382 - val_loss: 0.8029
Epoch 222/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9373 - val_loss: 0.8102
Epoch 223/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9358 - val_loss: 0.8071
Epoch 224/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9363 - val_loss: 0.8088
Epoch 225/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9330 - val_loss: 0.8092
Epoch 226/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9326 - val_loss: 0.8121
Epoch 227/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9335 - val_loss: 0.8106
Epoch 228/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9411 - val_loss: 0.8097
Epoch 229/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9359 - val_loss: 0.8030
Epoch 230/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9357 - val_loss: 0.8137
Epoch 231/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9354 - val_loss: 0.8151
Epoch 232/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9348 - val_loss: 0.8050
Epoch 233/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9364 - val_loss: 0.8116
Epoch 234/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9401 - val_loss: 0.8018
Epoch 235/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9355 - val_loss: 0.8132
Epoch 236/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9340 - val_loss: 0.8020
Epoch 237/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9345 - val_loss: 0.8072
Epoch 238/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.9327 - val_loss: 0.8089
Epoch 239/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9348 - val_loss: 0.8093
Epoch 240/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9344 - val_loss: 0.8101
Epoch 241/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9347 - val_loss: 0.8017
Epoch 242/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9335 - val_loss: 0.8175
Epoch 243/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9365 - val_loss: 0.7996
Epoch 244/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9448 - val_loss: 0.8222
Epoch 245/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9301 - val_loss: 0.8001
Epoch 246/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9351 - val_loss: 0.8063
Epoch 247/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9309 - val_loss: 0.8008
Epoch 248/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9352 - val_loss: 0.8168
Epoch 249/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9350 - val_loss: 0.7974
Epoch 250/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9332 - val_loss: 0.8038
Epoch 251/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9423 - val_loss: 0.8196
Epoch 252/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9286 - val_loss: 0.7990
Epoch 253/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9293 - val_loss: 0.8074
Epoch 254/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9284 - val_loss: 0.8006
Epoch 255/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9324 - val_loss: 0.8119
Epoch 256/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.9291 - val_loss: 0.8051
Epoch 257/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9303 - val_loss: 0.8017
Epoch 258/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9316 - val_loss: 0.8014
Epoch 259/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9271 - val_loss: 0.8063
Epoch 260/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9372 - val_loss: 0.8170
Epoch 261/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9300 - val_loss: 0.7931
Epoch 262/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9295 - val_loss: 0.8066
Epoch 263/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9270 - val_loss: 0.8045
Epoch 264/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9295 - val_loss: 0.8023
Epoch 265/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9369 - val_loss: 0.8097
Epoch 266/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9296 - val_loss: 0.7963
Epoch 267/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9265 - val_loss: 0.8017
Epoch 268/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9355 - val_loss: 0.8180
Epoch 269/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9313 - val_loss: 0.8019
Epoch 270/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9280 - val_loss: 0.7990
Epoch 271/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9255 - val_loss: 0.8095
Epoch 272/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9243 - val_loss: 0.8042
Epoch 273/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9228 - val_loss: 0.7993
Epoch 274/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9266 - val_loss: 0.8024
Epoch 275/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9260 - val_loss: 0.7997
Epoch 276/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.9340 - val_loss: 0.7972
Epoch 277/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9307 - val_loss: 0.8066
Epoch 278/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9250 - val_loss: 0.8039
Epoch 279/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9293 - val_loss: 0.8030
Epoch 280/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9250 - val_loss: 0.7989
Epoch 281/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9237 - val_loss: 0.8033
Epoch 282/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9291 - val_loss: 0.8114
Epoch 283/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9298 - val_loss: 0.7963
Epoch 284/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9245 - val_loss: 0.7956
Epoch 285/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9248 - val_loss: 0.8136
Epoch 286/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9265 - val_loss: 0.7965
Epoch 287/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9269 - val_loss: 0.8055
Epoch 288/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9328 - val_loss: 0.7900
Epoch 289/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9223 - val_loss: 0.8135
Epoch 290/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9333 - val_loss: 0.7981
Epoch 291/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9274 - val_loss: 0.8047
Epoch 292/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9269 - val_loss: 0.7957
Epoch 293/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9184 - val_loss: 0.8096
Epoch 294/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.9279 - val_loss: 0.8021
Epoch 295/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9241 - val_loss: 0.7987
Epoch 296/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9296 - val_loss: 0.8120
Epoch 297/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9213 - val_loss: 0.7979
Epoch 298/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9227 - val_loss: 0.7906
Epoch 299/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9241 - val_loss: 0.7974
Epoch 300/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9197 - val_loss: 0.8017
Epoch 301/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9208 - val_loss: 0.7983
Epoch 302/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9243 - val_loss: 0.7947
Epoch 303/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9228 - val_loss: 0.7938
Epoch 304/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9183 - val_loss: 0.7991
Epoch 305/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9205 - val_loss: 0.8059
Epoch 306/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9226 - val_loss: 0.7923
Epoch 307/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9208 - val_loss: 0.7960
Epoch 308/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9225 - val_loss: 0.8005
Epoch 309/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9266 - val_loss: 0.7992
Epoch 310/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9180 - val_loss: 0.8083
Epoch 311/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.9242 - val_loss: 0.7883
Epoch 312/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9188 - val_loss: 0.8022
Epoch 313/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9254 - val_loss: 0.8008
Epoch 314/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9206 - val_loss: 0.7881
Epoch 315/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9237 - val_loss: 0.8065
Epoch 316/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9167 - val_loss: 0.7939
Epoch 317/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9169 - val_loss: 0.7933
Epoch 318/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9191 - val_loss: 0.7985
Epoch 319/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9177 - val_loss: 0.7877
Epoch 320/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9204 - val_loss: 0.7938
Epoch 321/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9172 - val_loss: 0.7896
Epoch 322/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9174 - val_loss: 0.8040
Epoch 323/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9152 - val_loss: 0.7898
Epoch 324/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9139 - val_loss: 0.7940
Epoch 325/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9151 - val_loss: 0.7916
Epoch 326/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9171 - val_loss: 0.7993
Epoch 327/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9170 - val_loss: 0.7906
Epoch 328/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9179 - val_loss: 0.7897
Epoch 329/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.9222 - val_loss: 0.7877
Epoch 330/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9165 - val_loss: 0.8017
Epoch 331/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9158 - val_loss: 0.7955
Epoch 332/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9168 - val_loss: 0.7979
Epoch 333/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9142 - val_loss: 0.7913
Epoch 334/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9183 - val_loss: 0.7957
Epoch 335/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9163 - val_loss: 0.7866
Epoch 336/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9163 - val_loss: 0.8006
Epoch 337/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9150 - val_loss: 0.7916
Epoch 338/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9143 - val_loss: 0.7918
Epoch 339/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9215 - val_loss: 0.7838
Epoch 340/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9216 - val_loss: 0.8045
Epoch 341/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9097 - val_loss: 0.7876
Epoch 342/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9129 - val_loss: 0.7863
Epoch 343/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9212 - val_loss: 0.7954
Epoch 344/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.9110 - val_loss: 0.7888
Epoch 345/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9129 - val_loss: 0.7949
Epoch 346/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9114 - val_loss: 0.7871
Epoch 347/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9117 - val_loss: 0.7941
Epoch 348/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9133 - val_loss: 0.7907
Epoch 349/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9166 - val_loss: 0.7829
Epoch 350/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9103 - val_loss: 0.7994
Epoch 351/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9131 - val_loss: 0.7923
Epoch 352/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9126 - val_loss: 0.7873
Epoch 353/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9177 - val_loss: 0.7975
Epoch 354/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9135 - val_loss: 0.7921
Epoch 355/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9177 - val_loss: 0.7819
Epoch 356/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9112 - val_loss: 0.7983
Epoch 357/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9111 - val_loss: 0.7919
Epoch 358/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9127 - val_loss: 0.7883
Epoch 359/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9145 - val_loss: 0.7883
Epoch 360/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9097 - val_loss: 0.7981
Epoch 361/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9072 - val_loss: 0.7909
Epoch 362/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9138 - val_loss: 0.7847
Epoch 363/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.9124 - val_loss: 0.7960
Epoch 364/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9066 - val_loss: 0.7888
Epoch 365/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9080 - val_loss: 0.7899
Epoch 366/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9091 - val_loss: 0.7811
Epoch 367/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9083 - val_loss: 0.7889
Epoch 368/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9068 - val_loss: 0.7896
Epoch 369/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9142 - val_loss: 0.7966
Epoch 370/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9051 - val_loss: 0.7860
Epoch 371/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9118 - val_loss: 0.7901
Epoch 372/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9115 - val_loss: 0.7874
Epoch 373/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9057 - val_loss: 0.7916
Epoch 374/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9046 - val_loss: 0.7889
Epoch 375/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9096 - val_loss: 0.7872
Epoch 376/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9073 - val_loss: 0.7883
Epoch 377/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9063 - val_loss: 0.7872
Epoch 378/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9056 - val_loss: 0.7864
Epoch 379/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9046 - val_loss: 0.7840
Epoch 380/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9055 - val_loss: 0.7941
Epoch 381/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9040 - val_loss: 0.7832
Epoch 382/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.9057 - val_loss: 0.7869
Epoch 383/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9084 - val_loss: 0.7909
Epoch 384/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9076 - val_loss: 0.7975
Epoch 385/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9027 - val_loss: 0.7790
Epoch 386/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9058 - val_loss: 0.7877
Epoch 387/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9036 - val_loss: 0.7802
Epoch 388/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9060 - val_loss: 0.7908
Epoch 389/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9056 - val_loss: 0.7849
Epoch 390/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9088 - val_loss: 0.7819
Epoch 391/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9052 - val_loss: 0.7931
Epoch 392/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9015 - val_loss: 0.7837
Epoch 393/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9022 - val_loss: 0.7863
Epoch 394/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9051 - val_loss: 0.7777
Epoch 395/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9029 - val_loss: 0.7921
Epoch 396/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9082 - val_loss: 0.7900
Epoch 397/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9089 - val_loss: 0.7870
Epoch 398/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9004 - val_loss: 0.7915
Epoch 399/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9012 - val_loss: 0.7844
Epoch 400/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.9032 - val_loss: 0.7807
Epoch 401/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9019 - val_loss: 0.7904
Epoch 402/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9042 - val_loss: 0.7906
Epoch 403/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9044 - val_loss: 0.7824
Epoch 404/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9033 - val_loss: 0.7829
Epoch 405/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9000 - val_loss: 0.7811
Epoch 406/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8997 - val_loss: 0.7809
Epoch 407/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8995 - val_loss: 0.7871
Epoch 408/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8975 - val_loss: 0.7829
Epoch 409/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9020 - val_loss: 0.7828
Epoch 410/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8995 - val_loss: 0.7835
Epoch 411/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8995 - val_loss: 0.7793
Epoch 412/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8994 - val_loss: 0.7861
Epoch 413/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8983 - val_loss: 0.7877
Epoch 414/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9020 - val_loss: 0.7845
Epoch 415/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.9064 - val_loss: 0.7742
Epoch 416/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8983 - val_loss: 0.7997
Epoch 417/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9035 - val_loss: 0.7818
Epoch 418/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8955 - val_loss: 0.7868
Epoch 419/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8987 - val_loss: 0.7836
Epoch 420/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8977 - val_loss: 0.7835
Epoch 421/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8993 - val_loss: 0.7837
Epoch 422/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8994 - val_loss: 0.7864
Epoch 423/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8974 - val_loss: 0.7727
Epoch 424/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8954 - val_loss: 0.7866
Epoch 425/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8975 - val_loss: 0.7893
Epoch 426/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.9004 - val_loss: 0.7730
Epoch 427/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8991 - val_loss: 0.7908
Epoch 428/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8977 - val_loss: 0.7849
Epoch 429/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.9061 - val_loss: 0.7781
Epoch 430/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8959 - val_loss: 0.7821
Epoch 431/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8984 - val_loss: 0.7815
Epoch 432/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8914 - val_loss: 0.7771
Epoch 433/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.9025 - val_loss: 0.7866
Epoch 434/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8954 - val_loss: 0.7822
Epoch 435/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8958 - val_loss: 0.7788
Epoch 436/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8956 - val_loss: 0.7792
Epoch 437/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8923 - val_loss: 0.7863
Epoch 438/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8950 - val_loss: 0.7863
Epoch 439/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8928 - val_loss: 0.7696
Epoch 440/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8937 - val_loss: 0.7812
Epoch 441/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8928 - val_loss: 0.7870
Epoch 442/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8908 - val_loss: 0.7740
Epoch 443/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.8919 - val_loss: 0.7820
Epoch 444/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8906 - val_loss: 0.7825
Epoch 445/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8932 - val_loss: 0.7767
Epoch 446/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8912 - val_loss: 0.7845
Epoch 447/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8913 - val_loss: 0.7879
Epoch 448/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8897 - val_loss: 0.7771
Epoch 449/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8961 - val_loss: 0.7727
Epoch 450/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8992 - val_loss: 0.7830
Epoch 451/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8871 - val_loss: 0.7765
Epoch 452/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8917 - val_loss: 0.7806
Epoch 453/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8905 - val_loss: 0.7739
Epoch 454/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8908 - val_loss: 0.7692
Epoch 455/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8882 - val_loss: 0.7817
Epoch 456/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8914 - val_loss: 0.7872
Epoch 457/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8931 - val_loss: 0.7798
Epoch 458/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8923 - val_loss: 0.7724
Epoch 459/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8880 - val_loss: 0.7850
Epoch 460/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8877 - val_loss: 0.7747
Epoch 461/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.8909 - val_loss: 0.7771
Epoch 462/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8876 - val_loss: 0.7775
Epoch 463/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8870 - val_loss: 0.7756
Epoch 464/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8848 - val_loss: 0.7760
Epoch 465/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8856 - val_loss: 0.7818
Epoch 466/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8864 - val_loss: 0.7791
Epoch 467/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8932 - val_loss: 0.7796
Epoch 468/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8853 - val_loss: 0.7701
Epoch 469/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8888 - val_loss: 0.7726
Epoch 470/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8878 - val_loss: 0.7716
Epoch 471/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8842 - val_loss: 0.7783
Epoch 472/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8858 - val_loss: 0.7875
Epoch 473/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8845 - val_loss: 0.7741
Epoch 474/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8861 - val_loss: 0.7643
Epoch 475/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.8843 - val_loss: 0.7725
Epoch 476/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8827 - val_loss: 0.7881
Epoch 477/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8829 - val_loss: 0.7703
Epoch 478/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8842 - val_loss: 0.7778
Epoch 479/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8827 - val_loss: 0.7699
Epoch 480/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8809 - val_loss: 0.7736
Epoch 481/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8798 - val_loss: 0.7712
Epoch 482/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8826 - val_loss: 0.7820
Epoch 483/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8798 - val_loss: 0.7693
Epoch 484/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8794 - val_loss: 0.7708
Epoch 485/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8931 - val_loss: 0.7754
Epoch 486/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8856 - val_loss: 0.7640
Epoch 487/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8761 - val_loss: 0.7823
Epoch 488/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8793 - val_loss: 0.7738
Epoch 489/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.8774 - val_loss: 0.7634
Epoch 490/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8777 - val_loss: 0.7756
Epoch 491/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8769 - val_loss: 0.7659
Epoch 492/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8739 - val_loss: 0.7693
Epoch 493/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8742 - val_loss: 0.7730
Epoch 494/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8730 - val_loss: 0.7664
Epoch 495/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8725 - val_loss: 0.7744
Epoch 496/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8728 - val_loss: 0.7651
Epoch 497/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8713 - val_loss: 0.7690
Epoch 498/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8750 - val_loss: 0.7615
Epoch 499/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8698 - val_loss: 0.7683
Epoch 500/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8733 - val_loss: 0.7697
Epoch 501/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8855 - val_loss: 0.7691
Epoch 502/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8698 - val_loss: 0.7718
Epoch 503/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.8704 - val_loss: 0.7640
Epoch 504/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8713 - val_loss: 0.7683
Epoch 505/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8705 - val_loss: 0.7621
Epoch 506/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8674 - val_loss: 0.7656
Epoch 507/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8693 - val_loss: 0.7680
Epoch 508/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8668 - val_loss: 0.7582
Epoch 509/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8726 - val_loss: 0.7699
Epoch 510/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8703 - val_loss: 0.7576
Epoch 511/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8688 - val_loss: 0.7676
Epoch 512/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8660 - val_loss: 0.7657
Epoch 513/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8643 - val_loss: 0.7602
Epoch 514/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8665 - val_loss: 0.7576
Epoch 515/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8621 - val_loss: 0.7687
Epoch 516/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8656 - val_loss: 0.7645
Epoch 517/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.8719 - val_loss: 0.7513
Epoch 518/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8772 - val_loss: 0.7736
Epoch 519/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8626 - val_loss: 0.7616
Epoch 520/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8643 - val_loss: 0.7585
Epoch 521/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8624 - val_loss: 0.7623
Epoch 522/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8598 - val_loss: 0.7535
Epoch 523/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8594 - val_loss: 0.7648
Epoch 524/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8624 - val_loss: 0.7604
Epoch 525/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8652 - val_loss: 0.7573
Epoch 526/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8567 - val_loss: 0.7568
Epoch 527/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8599 - val_loss: 0.7590
Epoch 528/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8558 - val_loss: 0.7643
Epoch 529/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8614 - val_loss: 0.7505
Epoch 530/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8617 - val_loss: 0.7686
Epoch 531/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8559 - val_loss: 0.7498
Epoch 532/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8545 - val_loss: 0.7566
Epoch 533/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.8546 - val_loss: 0.7645
Epoch 534/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8609 - val_loss: 0.7483
Epoch 535/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8537 - val_loss: 0.7582
Epoch 536/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8621 - val_loss: 0.7720
Epoch 537/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8539 - val_loss: 0.7408
Epoch 538/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8525 - val_loss: 0.7577
Epoch 539/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8509 - val_loss: 0.7616
Epoch 540/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8491 - val_loss: 0.7466
Epoch 541/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8644 - val_loss: 0.7509
Epoch 542/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8579 - val_loss: 0.7508
Epoch 543/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8483 - val_loss: 0.7505
Epoch 544/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8480 - val_loss: 0.7486
Epoch 545/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8484 - val_loss: 0.7555
Epoch 546/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8524 - val_loss: 0.7598
Epoch 547/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8499 - val_loss: 0.7477
Epoch 548/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.8473 - val_loss: 0.7477
Epoch 549/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8468 - val_loss: 0.7520
Epoch 550/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8418 - val_loss: 0.7478
Epoch 551/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8443 - val_loss: 0.7488
Epoch 552/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8454 - val_loss: 0.7540
Epoch 553/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8413 - val_loss: 0.7451
Epoch 554/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8448 - val_loss: 0.7415
Epoch 555/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8430 - val_loss: 0.7515
Epoch 556/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8435 - val_loss: 0.7522
Epoch 557/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8448 - val_loss: 0.7608
Epoch 558/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8458 - val_loss: 0.7427
Epoch 559/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8385 - val_loss: 0.7482
Epoch 560/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8426 - val_loss: 0.7429
Epoch 561/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8404 - val_loss: 0.7478
Epoch 562/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.8400 - val_loss: 0.7502
Epoch 563/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8356 - val_loss: 0.7455
Epoch 564/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8385 - val_loss: 0.7343
Epoch 565/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8390 - val_loss: 0.7577
Epoch 566/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8371 - val_loss: 0.7382
Epoch 567/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8410 - val_loss: 0.7573
Epoch 568/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8293 - val_loss: 0.7392
Epoch 569/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8365 - val_loss: 0.7399
Epoch 570/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8442 - val_loss: 0.7491
Epoch 571/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8328 - val_loss: 0.7360
Epoch 572/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8341 - val_loss: 0.7407
Epoch 573/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8340 - val_loss: 0.7492
Epoch 574/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8314 - val_loss: 0.7340
Epoch 575/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.8305 - val_loss: 0.7395
Epoch 576/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8288 - val_loss: 0.7444
Epoch 577/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8362 - val_loss: 0.7472
Epoch 578/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8281 - val_loss: 0.7340
Epoch 579/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8315 - val_loss: 0.7444
Epoch 580/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8332 - val_loss: 0.7329
Epoch 581/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8303 - val_loss: 0.7502
Epoch 582/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8265 - val_loss: 0.7353
Epoch 583/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8230 - val_loss: 0.7358
Epoch 584/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8279 - val_loss: 0.7428
Epoch 585/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8242 - val_loss: 0.7481
Epoch 586/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8250 - val_loss: 0.7296
Epoch 587/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8224 - val_loss: 0.7413
Epoch 588/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8216 - val_loss: 0.7337
Epoch 589/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8250 - val_loss: 0.7471
Epoch 590/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8208 - val_loss: 0.7378
Epoch 591/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.8240 - val_loss: 0.7297
Epoch 592/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8229 - val_loss: 0.7351
Epoch 593/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8199 - val_loss: 0.7351
Epoch 594/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8248 - val_loss: 0.7405
Epoch 595/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8213 - val_loss: 0.7245
Epoch 596/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8167 - val_loss: 0.7341
Epoch 597/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8231 - val_loss: 0.7406
Epoch 598/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8174 - val_loss: 0.7255
Epoch 599/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8172 - val_loss: 0.7381
Epoch 600/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8167 - val_loss: 0.7358
Epoch 601/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8170 - val_loss: 0.7304
Epoch 602/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8154 - val_loss: 0.7245
Epoch 603/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8211 - val_loss: 0.7486
Epoch 604/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8161 - val_loss: 0.7269
Epoch 605/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.8103 - val_loss: 0.7332
Epoch 606/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8156 - val_loss: 0.7236
Epoch 607/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8114 - val_loss: 0.7397
Epoch 608/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8161 - val_loss: 0.7208
Epoch 609/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8146 - val_loss: 0.7513
Epoch 610/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8189 - val_loss: 0.7213
Epoch 611/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8158 - val_loss: 0.7428
Epoch 612/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8070 - val_loss: 0.7226
Epoch 613/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8093 - val_loss: 0.7278
Epoch 614/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8198 - val_loss: 0.7290
Epoch 615/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8084 - val_loss: 0.7168
Epoch 616/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8056 - val_loss: 0.7257
Epoch 617/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.8129 - val_loss: 0.7394
Epoch 618/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8090 - val_loss: 0.7149
Epoch 619/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8041 - val_loss: 0.7235
Epoch 620/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8060 - val_loss: 0.7229
Epoch 621/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8220 - val_loss: 0.7358
Epoch 622/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8032 - val_loss: 0.7128
Epoch 623/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8119 - val_loss: 0.7270
Epoch 624/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8038 - val_loss: 0.7350
Epoch 625/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8073 - val_loss: 0.7146
Epoch 626/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8031 - val_loss: 0.7283
Epoch 627/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.8077 - val_loss: 0.7190
Epoch 628/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8043 - val_loss: 0.7306
Epoch 629/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8034 - val_loss: 0.7205
Epoch 630/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.7989 - val_loss: 0.7183
Epoch 631/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8020 - val_loss: 0.7222
Epoch 632/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.8000 - val_loss: 0.7067
Epoch 633/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7954 - val_loss: 0.7324
Epoch 634/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7980 - val_loss: 0.7177
Epoch 635/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7940 - val_loss: 0.7153
Epoch 636/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7988 - val_loss: 0.7147
Epoch 637/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7984 - val_loss: 0.7127
Epoch 638/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7957 - val_loss: 0.7223
Epoch 639/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7952 - val_loss: 0.7104
Epoch 640/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7971 - val_loss: 0.7238
Epoch 641/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7953 - val_loss: 0.7092
Epoch 642/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.7921 - val_loss: 0.7073
Epoch 643/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7945 - val_loss: 0.7174
Epoch 644/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7917 - val_loss: 0.7076
Epoch 645/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7934 - val_loss: 0.7190
Epoch 646/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7911 - val_loss: 0.7129
Epoch 647/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7880 - val_loss: 0.7141
Epoch 648/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7887 - val_loss: 0.7146
Epoch 649/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7886 - val_loss: 0.7025
Epoch 650/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7889 - val_loss: 0.7111
Epoch 651/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7881 - val_loss: 0.7160
Epoch 652/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7921 - val_loss: 0.7060
Epoch 653/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7988 - val_loss: 0.7049
Epoch 654/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7877 - val_loss: 0.7232
Epoch 655/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.7888 - val_loss: 0.7109
Epoch 656/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7880 - val_loss: 0.7071
Epoch 657/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7837 - val_loss: 0.7074
Epoch 658/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7840 - val_loss: 0.7125
Epoch 659/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7854 - val_loss: 0.7069
Epoch 660/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7802 - val_loss: 0.7016
Epoch 661/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7877 - val_loss: 0.7120
Epoch 662/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7848 - val_loss: 0.7010
Epoch 663/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7905 - val_loss: 0.7075
Epoch 664/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7800 - val_loss: 0.6965
Epoch 665/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7780 - val_loss: 0.6998
Epoch 666/1000
17/17 [==============================] - 0s 4ms/step - loss: 0.7785 - val_loss: 0.7025
Epoch 667/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7835 - val_loss: 0.6929
Epoch 668/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7755 - val_loss: 0.7060
Epoch 669/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7781 - val_loss: 0.7046
Epoch 670/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7782 - val_loss: 0.7057
Epoch 671/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7756 - val_loss: 0.6921
Epoch 672/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7761 - val_loss: 0.7022
Epoch 673/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7828 - val_loss: 0.6980
Epoch 674/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7765 - val_loss: 0.6936
Epoch 675/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7722 - val_loss: 0.6989
Epoch 676/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7749 - val_loss: 0.7011
Epoch 677/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7708 - val_loss: 0.6941
Epoch 678/1000
17/17 [==============================] - 0s 6ms/step - loss: 0.7715 - val_loss: 0.6898
Epoch 679/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7704 - val_loss: 0.6996
Epoch 680/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7707 - val_loss: 0.6977
Epoch 681/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7689 - val_loss: 0.6879
Epoch 682/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7783 - val_loss: 0.7055
Epoch 683/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7656 - val_loss: 0.6897
Epoch 684/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7721 - val_loss: 0.6974
Epoch 685/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7676 - val_loss: 0.6811
Epoch 686/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7698 - val_loss: 0.6952
Epoch 687/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7620 - val_loss: 0.6955
Epoch 688/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7618 - val_loss: 0.6938
Epoch 689/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7675 - val_loss: 0.6908
Epoch 690/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7610 - val_loss: 0.6942
Epoch 691/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.7710 - val_loss: 0.7013
Epoch 692/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7615 - val_loss: 0.6821
Epoch 693/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7584 - val_loss: 0.6998
Epoch 694/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7635 - val_loss: 0.6987
Epoch 695/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7528 - val_loss: 0.6777
Epoch 696/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7626 - val_loss: 0.6866
Epoch 697/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7692 - val_loss: 0.6877
Epoch 698/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7610 - val_loss: 0.7040
Epoch 699/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7659 - val_loss: 0.6913
Epoch 700/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7589 - val_loss: 0.6844
Epoch 701/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7618 - val_loss: 0.6936
Epoch 702/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7621 - val_loss: 0.6763
Epoch 703/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.7741 - val_loss: 0.6953
Epoch 704/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7496 - val_loss: 0.6857
Epoch 705/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7546 - val_loss: 0.6801
Epoch 706/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7569 - val_loss: 0.6864
Epoch 707/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7626 - val_loss: 0.6866
Epoch 708/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7539 - val_loss: 0.6862
Epoch 709/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7518 - val_loss: 0.6827
Epoch 710/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7562 - val_loss: 0.6778
Epoch 711/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7495 - val_loss: 0.6854
Epoch 712/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7455 - val_loss: 0.6860
Epoch 713/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.7523 - val_loss: 0.6899
Epoch 714/1000
17/17 [==============================] - 0s 4ms/step - loss: 0.7533 - val_loss: 0.6779
Epoch 715/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7434 - val_loss: 0.6880
Epoch 716/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7477 - val_loss: 0.6789
Epoch 717/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7433 - val_loss: 0.6809
Epoch 718/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7478 - val_loss: 0.6872
Epoch 719/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7548 - val_loss: 0.6943
Epoch 720/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7401 - val_loss: 0.6782
Epoch 721/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7435 - val_loss: 0.6764
Epoch 722/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7517 - val_loss: 0.6757
Epoch 723/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7430 - val_loss: 0.7025
Epoch 724/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7435 - val_loss: 0.6697
Epoch 725/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7450 - val_loss: 0.6852
Epoch 726/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7440 - val_loss: 0.6750
Epoch 727/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.7398 - val_loss: 0.6761
Epoch 728/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7392 - val_loss: 0.6887
Epoch 729/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7395 - val_loss: 0.6789
Epoch 730/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7396 - val_loss: 0.6779
Epoch 731/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7411 - val_loss: 0.6781
Epoch 732/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7530 - val_loss: 0.6775
Epoch 733/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7350 - val_loss: 0.6713
Epoch 734/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7422 - val_loss: 0.6801
Epoch 735/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7333 - val_loss: 0.6742
Epoch 736/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7313 - val_loss: 0.6730
Epoch 737/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7438 - val_loss: 0.6742
Epoch 738/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.7398 - val_loss: 0.6766
Epoch 739/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7287 - val_loss: 0.6915
Epoch 740/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7348 - val_loss: 0.6722
Epoch 741/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7343 - val_loss: 0.6801
Epoch 742/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7425 - val_loss: 0.6834
Epoch 743/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7227 - val_loss: 0.6660
Epoch 744/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7404 - val_loss: 0.6601
Epoch 745/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7385 - val_loss: 0.6802
Epoch 746/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7386 - val_loss: 0.6734
Epoch 747/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7222 - val_loss: 0.6879
Epoch 748/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7319 - val_loss: 0.6704
Epoch 749/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7300 - val_loss: 0.6699
Epoch 750/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7388 - val_loss: 0.6831
Epoch 751/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7303 - val_loss: 0.6589
Epoch 752/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.7256 - val_loss: 0.6841
Epoch 753/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7283 - val_loss: 0.6645
Epoch 754/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7238 - val_loss: 0.6698
Epoch 755/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7276 - val_loss: 0.6697
Epoch 756/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7282 - val_loss: 0.6557
Epoch 757/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7264 - val_loss: 0.6716
Epoch 758/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7189 - val_loss: 0.6790
Epoch 759/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7233 - val_loss: 0.6711
Epoch 760/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7208 - val_loss: 0.6625
Epoch 761/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7177 - val_loss: 0.6623
Epoch 762/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7296 - val_loss: 0.6768
Epoch 763/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.7420 - val_loss: 0.6602
Epoch 764/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7251 - val_loss: 0.6852
Epoch 765/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7190 - val_loss: 0.6526
Epoch 766/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7200 - val_loss: 0.6752
Epoch 767/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7180 - val_loss: 0.6609
Epoch 768/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7185 - val_loss: 0.6648
Epoch 769/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7133 - val_loss: 0.6709
Epoch 770/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7178 - val_loss: 0.6582
Epoch 771/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7152 - val_loss: 0.6641
Epoch 772/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7179 - val_loss: 0.6705
Epoch 773/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7163 - val_loss: 0.6562
Epoch 774/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7153 - val_loss: 0.6625
Epoch 775/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7189 - val_loss: 0.6592
Epoch 776/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.7134 - val_loss: 0.6646
Epoch 777/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7069 - val_loss: 0.6615
Epoch 778/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7076 - val_loss: 0.6643
Epoch 779/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7118 - val_loss: 0.6710
Epoch 780/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7163 - val_loss: 0.6510
Epoch 781/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7165 - val_loss: 0.6755
Epoch 782/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7100 - val_loss: 0.6600
Epoch 783/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7102 - val_loss: 0.6593
Epoch 784/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7062 - val_loss: 0.6597
Epoch 785/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7071 - val_loss: 0.6515
Epoch 786/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.7177 - val_loss: 0.6602
Epoch 787/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7115 - val_loss: 0.6755
Epoch 788/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7132 - val_loss: 0.6551
Epoch 789/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7014 - val_loss: 0.6622
Epoch 790/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7085 - val_loss: 0.6515
Epoch 791/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7002 - val_loss: 0.6528
Epoch 792/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7075 - val_loss: 0.6623
Epoch 793/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7026 - val_loss: 0.6498
Epoch 794/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7084 - val_loss: 0.6573
Epoch 795/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6992 - val_loss: 0.6578
Epoch 796/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6998 - val_loss: 0.6520
Epoch 797/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7032 - val_loss: 0.6658
Epoch 798/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6969 - val_loss: 0.6478
Epoch 799/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.7000 - val_loss: 0.6470
Epoch 800/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6972 - val_loss: 0.6573
Epoch 801/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7036 - val_loss: 0.6571
Epoch 802/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7073 - val_loss: 0.6634
Epoch 803/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6965 - val_loss: 0.6494
Epoch 804/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6968 - val_loss: 0.6629
Epoch 805/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.7086 - val_loss: 0.6423
Epoch 806/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7006 - val_loss: 0.6601
Epoch 807/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6923 - val_loss: 0.6468
Epoch 808/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6987 - val_loss: 0.6487
Epoch 809/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.6934 - val_loss: 0.6551
Epoch 810/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6966 - val_loss: 0.6555
Epoch 811/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6895 - val_loss: 0.6556
Epoch 812/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6936 - val_loss: 0.6571
Epoch 813/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6904 - val_loss: 0.6531
Epoch 814/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6915 - val_loss: 0.6591
Epoch 815/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6933 - val_loss: 0.6541
Epoch 816/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6915 - val_loss: 0.6454
Epoch 817/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6918 - val_loss: 0.6498
Epoch 818/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6945 - val_loss: 0.6461
Epoch 819/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6829 - val_loss: 0.6563
Epoch 820/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6953 - val_loss: 0.6553
Epoch 821/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.6875 - val_loss: 0.6445
Epoch 822/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6875 - val_loss: 0.6461
Epoch 823/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6905 - val_loss: 0.6462
Epoch 824/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6923 - val_loss: 0.6498
Epoch 825/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6896 - val_loss: 0.6535
Epoch 826/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6860 - val_loss: 0.6419
Epoch 827/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6844 - val_loss: 0.6475
Epoch 828/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.7040 - val_loss: 0.6574
Epoch 829/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6934 - val_loss: 0.6366
Epoch 830/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6818 - val_loss: 0.6496
Epoch 831/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.6929 - val_loss: 0.6466
Epoch 832/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6847 - val_loss: 0.6453
Epoch 833/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6908 - val_loss: 0.6407
Epoch 834/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6964 - val_loss: 0.6489
Epoch 835/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6848 - val_loss: 0.6378
Epoch 836/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6821 - val_loss: 0.6695
Epoch 837/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6808 - val_loss: 0.6396
Epoch 838/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6868 - val_loss: 0.6512
Epoch 839/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6832 - val_loss: 0.6483
Epoch 840/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6792 - val_loss: 0.6500
Epoch 841/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6762 - val_loss: 0.6492
Epoch 842/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.7045 - val_loss: 0.6468
Epoch 843/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6843 - val_loss: 0.6655
Epoch 844/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6789 - val_loss: 0.6392
Epoch 845/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6797 - val_loss: 0.6492
Epoch 846/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6749 - val_loss: 0.6466
Epoch 847/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6832 - val_loss: 0.6435
Epoch 848/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6757 - val_loss: 0.6340
Epoch 849/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6775 - val_loss: 0.6546
Epoch 850/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6784 - val_loss: 0.6405
Epoch 851/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6968 - val_loss: 0.6405
Epoch 852/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6798 - val_loss: 0.6375
Epoch 853/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6777 - val_loss: 0.6515
Epoch 854/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.6828 - val_loss: 0.6339
Epoch 855/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6853 - val_loss: 0.6437
Epoch 856/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6646 - val_loss: 0.6322
Epoch 857/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6737 - val_loss: 0.6482
Epoch 858/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6739 - val_loss: 0.6435
Epoch 859/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6774 - val_loss: 0.6350
Epoch 860/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6803 - val_loss: 0.6527
Epoch 861/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6709 - val_loss: 0.6377
Epoch 862/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6697 - val_loss: 0.6327
Epoch 863/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6710 - val_loss: 0.6541
Epoch 864/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.6710 - val_loss: 0.6346
Epoch 865/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6720 - val_loss: 0.6526
Epoch 866/1000
17/17 [==============================] - 0s 4ms/step - loss: 0.6680 - val_loss: 0.6343
Epoch 867/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6755 - val_loss: 0.6440
Epoch 868/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6670 - val_loss: 0.6356
Epoch 869/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6692 - val_loss: 0.6352
Epoch 870/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6704 - val_loss: 0.6415
Epoch 871/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6701 - val_loss: 0.6390
Epoch 872/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6612 - val_loss: 0.6557
Epoch 873/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6771 - val_loss: 0.6508
Epoch 874/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6690 - val_loss: 0.6419
Epoch 875/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6605 - val_loss: 0.6318
Epoch 876/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6796 - val_loss: 0.6423
Epoch 877/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6573 - val_loss: 0.6391
Epoch 878/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.6649 - val_loss: 0.6382
Epoch 879/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6642 - val_loss: 0.6399
Epoch 880/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6641 - val_loss: 0.6468
Epoch 881/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6595 - val_loss: 0.6286
Epoch 882/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6647 - val_loss: 0.6434
Epoch 883/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6625 - val_loss: 0.6282
Epoch 884/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6672 - val_loss: 0.6589
Epoch 885/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6599 - val_loss: 0.6275
Epoch 886/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6660 - val_loss: 0.6507
Epoch 887/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6566 - val_loss: 0.6394
Epoch 888/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6578 - val_loss: 0.6349
Epoch 889/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6584 - val_loss: 0.6391
Epoch 890/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.6573 - val_loss: 0.6345
Epoch 891/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6547 - val_loss: 0.6334
Epoch 892/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6533 - val_loss: 0.6382
Epoch 893/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6535 - val_loss: 0.6341
Epoch 894/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6566 - val_loss: 0.6346
Epoch 895/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6588 - val_loss: 0.6426
Epoch 896/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6565 - val_loss: 0.6332
Epoch 897/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6569 - val_loss: 0.6294
Epoch 898/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6542 - val_loss: 0.6390
Epoch 899/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6525 - val_loss: 0.6392
Epoch 900/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.6708 - val_loss: 0.6493
Epoch 901/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6595 - val_loss: 0.6461
Epoch 902/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6523 - val_loss: 0.6327
Epoch 903/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6590 - val_loss: 0.6325
Epoch 904/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6458 - val_loss: 0.6409
Epoch 905/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6633 - val_loss: 0.6246
Epoch 906/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6584 - val_loss: 0.6383
Epoch 907/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6531 - val_loss: 0.6302
Epoch 908/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6594 - val_loss: 0.6363
Epoch 909/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6502 - val_loss: 0.6333
Epoch 910/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.6586 - val_loss: 0.6355
Epoch 911/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6496 - val_loss: 0.6272
Epoch 912/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6553 - val_loss: 0.6301
Epoch 913/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6497 - val_loss: 0.6374
Epoch 914/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6498 - val_loss: 0.6318
Epoch 915/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6436 - val_loss: 0.6427
Epoch 916/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6592 - val_loss: 0.6248
Epoch 917/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6469 - val_loss: 0.6358
Epoch 918/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6463 - val_loss: 0.6317
Epoch 919/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6636 - val_loss: 0.6285
Epoch 920/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6447 - val_loss: 0.6213
Epoch 921/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6508 - val_loss: 0.6331
Epoch 922/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.6559 - val_loss: 0.6271
Epoch 923/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6442 - val_loss: 0.6372
Epoch 924/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6425 - val_loss: 0.6260
Epoch 925/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6432 - val_loss: 0.6275
Epoch 926/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6453 - val_loss: 0.6506
Epoch 927/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6501 - val_loss: 0.6337
Epoch 928/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6455 - val_loss: 0.6318
Epoch 929/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6422 - val_loss: 0.6417
Epoch 930/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6501 - val_loss: 0.6266
Epoch 931/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6452 - val_loss: 0.6218
Epoch 932/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.6409 - val_loss: 0.6340
Epoch 933/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6439 - val_loss: 0.6407
Epoch 934/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6434 - val_loss: 0.6283
Epoch 935/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6465 - val_loss: 0.6223
Epoch 936/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6515 - val_loss: 0.6323
Epoch 937/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6476 - val_loss: 0.6353
Epoch 938/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6375 - val_loss: 0.6287
Epoch 939/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6414 - val_loss: 0.6407
Epoch 940/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6335 - val_loss: 0.6231
Epoch 941/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6478 - val_loss: 0.6336
Epoch 942/1000
17/17 [==============================] - 0s 4ms/step - loss: 0.6579 - val_loss: 0.6310
Epoch 943/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6363 - val_loss: 0.6515
Epoch 944/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6430 - val_loss: 0.6264
Epoch 945/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6425 - val_loss: 0.6274
Epoch 946/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6411 - val_loss: 0.6363
Epoch 947/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6511 - val_loss: 0.6225
Epoch 948/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6505 - val_loss: 0.6265
Epoch 949/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6396 - val_loss: 0.6242
Epoch 950/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6399 - val_loss: 0.6259
Epoch 951/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6379 - val_loss: 0.6300
Epoch 952/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6358 - val_loss: 0.6259
Epoch 953/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.6398 - val_loss: 0.6371
Epoch 954/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6319 - val_loss: 0.6233
Epoch 955/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6333 - val_loss: 0.6234
Epoch 956/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6352 - val_loss: 0.6270
Epoch 957/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6328 - val_loss: 0.6219
Epoch 958/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6532 - val_loss: 0.6438
Epoch 959/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6438 - val_loss: 0.6224
Epoch 960/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6337 - val_loss: 0.6273
Epoch 961/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6365 - val_loss: 0.6271
Epoch 962/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6272 - val_loss: 0.6219
Epoch 963/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.6290 - val_loss: 0.6246
Epoch 964/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6276 - val_loss: 0.6365
Epoch 965/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6293 - val_loss: 0.6152
Epoch 966/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6428 - val_loss: 0.6286
Epoch 967/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6527 - val_loss: 0.6289
Epoch 968/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6424 - val_loss: 0.6268
Epoch 969/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6362 - val_loss: 0.6250
Epoch 970/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6359 - val_loss: 0.6221
Epoch 971/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6300 - val_loss: 0.6548
Epoch 972/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6314 - val_loss: 0.6232
Epoch 973/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6284 - val_loss: 0.6252
Epoch 974/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.6366 - val_loss: 0.6199
Epoch 975/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6314 - val_loss: 0.6267
Epoch 976/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6270 - val_loss: 0.6184
Epoch 977/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6337 - val_loss: 0.6456
Epoch 978/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6278 - val_loss: 0.6235
Epoch 979/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6306 - val_loss: 0.6262
Epoch 980/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6350 - val_loss: 0.6284
Epoch 981/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6271 - val_loss: 0.6333
Epoch 982/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6281 - val_loss: 0.6245
Epoch 983/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.6291 - val_loss: 0.6218
Epoch 984/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6246 - val_loss: 0.6299
Epoch 985/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6285 - val_loss: 0.6221
Epoch 986/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6256 - val_loss: 0.6333
Epoch 987/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6289 - val_loss: 0.6241
Epoch 988/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6221 - val_loss: 0.6280
Epoch 989/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6293 - val_loss: 0.6259
Epoch 990/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6239 - val_loss: 0.6285
Epoch 991/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6263 - val_loss: 0.6313
Epoch 992/1000
17/17 [==============================] - 0s 3ms/step - loss: 0.6242 - val_loss: 0.6169
Epoch 993/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6333 - val_loss: 0.6203
Epoch 994/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6276 - val_loss: 0.6222
Epoch 995/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6286 - val_loss: 0.6402
Epoch 996/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6288 - val_loss: 0.6180
Epoch 997/1000
17/17 [==============================] - 0s 2ms/step - loss: 0.6267 - val_loss: 0.6231
Epoch 998/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6212 - val_loss: 0.6186
Epoch 999/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6220 - val_loss: 0.6282
Epoch 1000/1000
17/17 [==============================] - 0s 1ms/step - loss: 0.6211 - val_loss: 0.6206


[10]:
{'objective': -0.6206095814704895}
6.5. Executing Neural Architecture Search#
[12]:
from deephyper.hpo import CBO
from deephyper.evaluator import Evaluator
from deephyper.evaluator.callback import TqdmCallback
def execute_neural_architecture_search():
hpo_dir = "nas_tfk2_basic"
model_checkpoint_dir = os.path.join(hpo_dir, "models")
pathlib.Path(model_checkpoint_dir).mkdir(parents=True, exist_ok=True)
evaluator = Evaluator.create(
run,
method="ray",
method_kwargs={
"num_cpus": 4,
"num_cpus_per_task": 1,
"run_function_kwargs": {
"model_checkpoint_dir": model_checkpoint_dir,
"verbose": False,
},
"callbacks": [TqdmCallback()],
},
)
search = CBO(
nas_search_space.hp_problem,
evaluator,
log_dir=hpo_dir,
initial_points=[nas_search_space.hp_problem.default_configuration],
acq_optimizer="mixedga",
acq_optimizer_freq=1,
)
results = search.search(max_evals=50)
return results
[13]:
results = execute_neural_architecture_search()
2024-09-05 09:22:23,483 INFO worker.py:1788 -- Started a local Ray instance.
/Users/romainegele/Documents/Argonne/deephyper/deephyper/evaluator/_evaluator.py:132: UserWarning: Applying nest-asyncio patch for IPython Shell!
warnings.warn(
WARNING:root:Results file already exists, it will be renamed to /Users/romainegele/Documents/Argonne/deephyper-tutorials/tutorials/colab/nas_tfk2_basic/results_20240905-092225.csv
6.6. Analyzing the Results of Neural Architecture Search#
[15]:
from deephyper.analysis.hpo import filter_failed_objectives
results, _ = filter_failed_objectives(results)
results
[15]:
p:batch_size | p:layer_0_activation | p:layer_0_units | p:learning_rate | p:num_layers | p:input->layer_1 | p:input->layer_2 | p:input->layer_3 | p:layer_0->layer_2 | p:layer_0->layer_3 | ... | p:layer_2_activation | p:layer_2_units | p:layer_3_activation | p:layer_3_units | p:layer_4_activation | p:layer_4_units | objective | job_id | m:timestamp_submit | m:timestamp_gather | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | 44 | sigmoid | 45 | 0.032610 | 3 | 1 | 1 | 0 | 0 | 0 | ... | swish | 33 | relu | 1 | relu | 1 | -0.856591 | 2 | 2.398861 | 23.227373 |
1 | 35 | swish | 41 | 0.000015 | 3 | 1 | 0 | 0 | 0 | 0 | ... | relu | 14 | relu | 1 | relu | 1 | -0.837029 | 3 | 2.399569 | 25.287926 |
2 | 16 | relu | 32 | 0.001000 | 2 | 0 | 0 | 0 | 0 | 0 | ... | relu | 1 | relu | 1 | relu | 1 | -0.635702 | 0 | 2.397342 | 30.584927 |
3 | 74 | relu | 3 | 0.000054 | 5 | 1 | 0 | 0 | 1 | 0 | ... | tanh | 31 | relu | 63 | swish | 56 | -0.813391 | 4 | 24.227576 | 42.423346 |
4 | 38 | sigmoid | 4 | 0.003118 | 1 | 0 | 0 | 0 | 0 | 0 | ... | relu | 1 | relu | 1 | relu | 1 | -0.843133 | 6 | 31.686945 | 48.262753 |
5 | 15 | swish | 1 | 0.040829 | 2 | 0 | 0 | 0 | 0 | 0 | ... | relu | 1 | relu | 1 | relu | 1 | -0.878911 | 5 | 26.325039 | 49.630951 |
6 | 32 | relu | 21 | 0.010260 | 4 | 0 | 1 | 1 | 1 | 1 | ... | relu | 4 | sigmoid | 57 | relu | 1 | -0.552101 | 7 | 43.390739 | 65.487351 |
7 | 8 | relu | 41 | 0.003504 | 3 | 0 | 0 | 0 | 0 | 0 | ... | tanh | 22 | relu | 1 | relu | 1 | -0.393816 | 9 | 50.651295 | 91.581341 |
8 | 90 | swish | 41 | 0.004726 | 5 | 0 | 0 | 1 | 1 | 0 | ... | sigmoid | 9 | swish | 3 | sigmoid | 13 | -0.373598 | 11 | 92.546858 | 109.617554 |
9 | 8 | swish | 33 | 0.000012 | 4 | 1 | 0 | 1 | 1 | 1 | ... | relu | 60 | swish | 64 | relu | 1 | -0.835079 | 10 | 66.491044 | 121.362111 |
10 | 51 | relu | 57 | 0.000043 | 1 | 0 | 0 | 0 | 0 | 0 | ... | relu | 1 | relu | 1 | relu | 1 | -0.839816 | 12 | 110.592049 | 126.891973 |
11 | 37 | swish | 54 | 0.077270 | 5 | 0 | 0 | 1 | 0 | 0 | ... | sigmoid | 8 | tanh | 10 | sigmoid | 13 | -1.110910 | 13 | 123.910111 | 145.319685 |
12 | 97 | tanh | 59 | 0.049179 | 5 | 0 | 0 | 1 | 1 | 0 | ... | sigmoid | 7 | swish | 5 | sigmoid | 14 | -0.785662 | 14 | 129.117882 | 147.700025 |
13 | 2 | sigmoid | 32 | 0.018636 | 5 | 1 | 0 | 1 | 0 | 0 | ... | relu | 23 | swish | 36 | swish | 53 | -0.837148 | 1 | 2.398152 | 165.477171 |
14 | 72 | tanh | 41 | 0.007107 | 5 | 0 | 0 | 1 | 1 | 0 | ... | sigmoid | 7 | tanh | 19 | swish | 28 | -0.302309 | 15 | 147.698724 | 170.626488 |
15 | 2 | relu | 21 | 0.000154 | 3 | 0 | 1 | 0 | 0 | 0 | ... | tanh | 14 | relu | 1 | relu | 1 | -0.695745 | 8 | 49.233974 | 170.629071 |
16 | 8 | relu | 40 | 0.018258 | 5 | 0 | 0 | 1 | 0 | 0 | ... | tanh | 57 | tanh | 22 | sigmoid | 59 | -0.846542 | 16 | 152.384796 | 214.972001 |
17 | 5 | relu | 41 | 0.082435 | 5 | 0 | 0 | 1 | 0 | 0 | ... | tanh | 20 | tanh | 4 | sigmoid | 17 | -0.864827 | 17 | 170.625352 | 238.917515 |
18 | 7 | relu | 46 | 0.050009 | 5 | 0 | 0 | 1 | 0 | 0 | ... | tanh | 26 | tanh | 18 | sigmoid | 17 | -0.994480 | 18 | 180.276333 | 246.140277 |
19 | 16 | relu | 41 | 0.010473 | 3 | 0 | 0 | 0 | 0 | 0 | ... | tanh | 48 | relu | 1 | relu | 1 | -0.823368 | 20 | 219.172256 | 250.485698 |
20 | 7 | tanh | 46 | 0.079948 | 5 | 0 | 0 | 1 | 0 | 1 | ... | tanh | 26 | tanh | 20 | swish | 60 | -0.832149 | 19 | 180.278026 | 250.488314 |
21 | 18 | tanh | 42 | 0.000552 | 5 | 0 | 0 | 1 | 1 | 0 | ... | sigmoid | 47 | tanh | 21 | swish | 59 | -0.679949 | 23 | 258.956510 | 294.177700 |
22 | 8 | relu | 41 | 0.024097 | 5 | 0 | 0 | 0 | 0 | 0 | ... | tanh | 43 | relu | 10 | relu | 1 | -0.848148 | 22 | 250.484444 | 299.841397 |
23 | 5 | relu | 45 | 0.007380 | 3 | 0 | 0 | 0 | 0 | 0 | ... | swish | 38 | relu | 1 | relu | 1 | -0.500730 | 21 | 244.672089 | 304.236167 |
24 | 61 | relu | 10 | 0.034834 | 5 | 0 | 1 | 0 | 1 | 1 | ... | swish | 3 | tanh | 15 | swish | 48 | -0.577232 | 27 | 307.438368 | 327.087681 |
25 | 8 | sigmoid | 41 | 0.002513 | 3 | 0 | 0 | 0 | 0 | 0 | ... | tanh | 24 | relu | 1 | relu | 1 | -0.833777 | 25 | 299.840063 | 339.798210 |
26 | 8 | relu | 45 | 0.002059 | 4 | 0 | 0 | 0 | 0 | 0 | ... | tanh | 22 | sigmoid | 10 | relu | 1 | -0.323491 | 26 | 304.234934 | 345.718221 |
27 | 8 | relu | 53 | 0.021763 | 3 | 0 | 0 | 0 | 0 | 0 | ... | swish | 1 | relu | 1 | relu | 1 | -0.792197 | 28 | 332.114602 | 371.515173 |
28 | 7 | relu | 42 | 0.026038 | 2 | 0 | 0 | 0 | 0 | 0 | ... | relu | 1 | relu | 1 | relu | 1 | -0.491669 | 29 | 345.716928 | 386.765604 |
29 | 3 | tanh | 30 | 0.000014 | 5 | 0 | 0 | 1 | 1 | 1 | ... | relu | 54 | tanh | 64 | tanh | 43 | -0.794177 | 24 | 258.957998 | 396.926558 |
30 | 8 | relu | 45 | 0.001953 | 5 | 0 | 0 | 1 | 0 | 0 | ... | tanh | 29 | swish | 44 | relu | 2 | -0.843250 | 30 | 349.960318 | 402.656029 |
31 | 8 | relu | 45 | 0.002391 | 4 | 0 | 0 | 1 | 0 | 0 | ... | tanh | 26 | sigmoid | 20 | relu | 1 | -0.570951 | 31 | 378.299622 | 423.771388 |
32 | 9 | relu | 45 | 0.002143 | 4 | 0 | 1 | 1 | 0 | 0 | ... | tanh | 23 | tanh | 12 | relu | 1 | -0.088316 | 32 | 391.748946 | 433.073985 |
33 | 8 | relu | 44 | 0.002006 | 4 | 0 | 0 | 1 | 0 | 0 | ... | tanh | 22 | tanh | 58 | relu | 1 | -0.841810 | 33 | 402.654607 | 447.361397 |
34 | 8 | relu | 45 | 0.091478 | 4 | 0 | 0 | 0 | 0 | 0 | ... | relu | 24 | tanh | 45 | relu | 1 | -0.843895 | 34 | 407.707823 | 454.583435 |
35 | 8 | relu | 46 | 0.076854 | 4 | 1 | 0 | 0 | 0 | 0 | ... | tanh | 22 | swish | 10 | relu | 1 | -0.832944 | 35 | 430.242332 | 472.796194 |
36 | 9 | relu | 45 | 0.092587 | 4 | 0 | 1 | 0 | 0 | 0 | ... | tanh | 23 | tanh | 11 | relu | 1 | -1.034327 | 36 | 441.483011 | 481.720454 |
37 | 10 | relu | 47 | 0.076934 | 4 | 1 | 1 | 1 | 0 | 0 | ... | relu | 23 | tanh | 12 | relu | 1 | -0.838317 | 37 | 453.036270 | 491.917020 |
38 | 9 | relu | 45 | 0.045269 | 4 | 0 | 1 | 1 | 0 | 0 | ... | relu | 1 | tanh | 13 | relu | 1 | -0.841542 | 38 | 460.907967 | 498.829024 |
39 | 10 | relu | 45 | 0.001363 | 5 | 0 | 1 | 1 | 0 | 0 | ... | tanh | 23 | tanh | 12 | sigmoid | 1 | -0.692866 | 39 | 477.404193 | 519.086112 |
40 | 11 | relu | 29 | 0.002736 | 5 | 0 | 0 | 0 | 0 | 0 | ... | swish | 23 | sigmoid | 13 | relu | 1 | -0.843667 | 40 | 487.297042 | 526.926419 |
41 | 14 | relu | 45 | 0.001467 | 4 | 0 | 1 | 0 | 0 | 0 | ... | tanh | 23 | tanh | 11 | relu | 1 | -0.085316 | 42 | 503.811523 | 536.778627 |
42 | 7 | relu | 45 | 0.007118 | 4 | 0 | 1 | 0 | 0 | 0 | ... | tanh | 23 | tanh | 12 | relu | 1 | -0.568592 | 41 | 498.827958 | 546.672133 |
43 | 89 | relu | 47 | 0.002942 | 4 | 0 | 1 | 0 | 0 | 0 | ... | swish | 56 | tanh | 11 | relu | 1 | -0.833958 | 46 | 552.624057 | 569.276774 |
44 | 7 | relu | 41 | 0.035073 | 2 | 0 | 0 | 0 | 0 | 0 | ... | relu | 1 | relu | 1 | relu | 1 | -0.837686 | 44 | 533.560559 | 575.517045 |
45 | 16 | swish | 1 | 0.002478 | 4 | 0 | 1 | 0 | 0 | 0 | ... | sigmoid | 5 | tanh | 14 | relu | 1 | -0.406434 | 45 | 544.581489 | 575.519870 |
46 | 5 | relu | 45 | 0.002216 | 4 | 0 | 1 | 0 | 0 | 1 | ... | tanh | 12 | tanh | 10 | relu | 1 | -0.360283 | 43 | 526.925266 | 594.200230 |
47 | 14 | relu | 54 | 0.000044 | 4 | 0 | 1 | 0 | 0 | 0 | ... | tanh | 24 | tanh | 27 | relu | 1 | -0.787052 | 47 | 575.515678 | 608.489793 |
48 | 17 | relu | 47 | 0.000806 | 4 | 1 | 1 | 1 | 0 | 0 | ... | tanh | 19 | tanh | 13 | relu | 1 | -0.498741 | 48 | 588.185281 | 618.369118 |
49 | 17 | relu | 63 | 0.000105 | 4 | 1 | 1 | 1 | 0 | 1 | ... | tanh | 24 | sigmoid | 56 | relu | 1 | -0.750285 | 49 | 588.186336 | 621.403263 |
50 rows × 26 columns
[16]:
from deephyper.analysis.hpo import plot_search_trajectory_single_objective_hpo
fig, ax = plt.subplots(figsize=(WIDTH_PLOTS, HEIGHT_PLOTS))
fig, ax = plot_search_trajectory_single_objective_hpo(results, ax=ax)
plt.show()

6.7. Evaluate Checkpointed Models#
[23]:
def evaluate_checkpointed_model_on_test(model_path):
print(f"Loading model: {model_path}")
(x, y), (vx, vy) = load_data_train_valid(verbose=True)
_, (tx, ty) = load_data_train_test()
# Scaling the data
scaler_x = StandardScaler()
scaler_x.fit(x)
s_tx = scaler_x.transform(tx)
scaler_y = StandardScaler()
scaler_y.fit(y)
model = tfk.models.load_model(model_path)
pred_ty = scaler_y.inverse_transform(model(s_tx).numpy())
plt.figure(figsize=(WIDTH_PLOTS, HEIGHT_PLOTS))
plt.plot(tx, ty, label="truth")
plt.plot(tx, pred_ty, label=r"$prediction$")
plt.legend()
plt.ylim(-30, 30)
plt.show()
plot_model_architecture(model)
[32]:
from deephyper.analysis.hpo import parameters_from_row
# Baseline model
idx = results["job_id"].argmin()
parameters = parameters_from_row(results.iloc[idx])
print(f"{parameters=}")
evaluate_checkpointed_model_on_test("nas_tfk2_basic/models/model_0.0.keras")
parameters={'batch_size': 16, 'layer_0_activation': 'relu', 'layer_0_units': 32, 'learning_rate': 0.001, 'num_layers': 2, 'input->layer_1': 0, 'input->layer_2': 0, 'input->layer_3': 0, 'layer_0->layer_2': 0, 'layer_0->layer_3': 0, 'layer_0->layer_4': 0, 'layer_1->layer_3': 0, 'layer_1->layer_4': 0, 'layer_1_activation': 'relu', 'layer_1_units': 32, 'layer_2->layer_4': 0, 'layer_2_activation': 'relu', 'layer_2_units': 1, 'layer_3_activation': 'relu', 'layer_3_units': 1, 'layer_4_activation': 'relu', 'layer_4_units': 1}
Loading model: nas_tfk2_basic/models/model_0.0.keras
train_X shape: (268, 1)
train_y shape: (268, 1)
valid_X shape: (132, 1)
valid_y shape: (132, 1)


[33]:
# Best model
idx = results["objective"].argmax()
job_id = results.iloc[idx]["job_id"]
parameters = parameters_from_row(results.iloc[idx])
print(f"{parameters=}")
evaluate_checkpointed_model_on_test(f"nas_tfk2_basic/models/model_0.{job_id}.keras")
parameters={'batch_size': 14, 'layer_0_activation': 'relu', 'layer_0_units': 45, 'learning_rate': 0.001466675026, 'num_layers': 4, 'input->layer_1': 0, 'input->layer_2': 1, 'input->layer_3': 0, 'layer_0->layer_2': 0, 'layer_0->layer_3': 0, 'layer_0->layer_4': 0, 'layer_1->layer_3': 1, 'layer_1->layer_4': 0, 'layer_1_activation': 'swish', 'layer_1_units': 53, 'layer_2->layer_4': 0, 'layer_2_activation': 'tanh', 'layer_2_units': 23, 'layer_3_activation': 'tanh', 'layer_3_units': 11, 'layer_4_activation': 'relu', 'layer_4_units': 1}
Loading model: nas_tfk2_basic/models/model_0.42.keras
train_X shape: (268, 1)
train_y shape: (268, 1)
valid_X shape: (132, 1)
valid_y shape: (132, 1)

