research-rainfallradar/aimodel/src/rainfallwater_identity_TEST.ipynb

713 lines
48 KiB
Plaintext
Raw Normal View History

2022-12-09 15:50:27 +00:00
{
"cells": [
{
"cell_type": "code",
2023-01-06 17:08:18 +00:00
"execution_count": 1,
2022-12-09 15:50:27 +00:00
"id": "1f6fdebf-69c5-46ab-a5a8-f9c91f000ff3",
"metadata": {},
2023-01-06 17:08:18 +00:00
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-06 18:45:38.088928: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory\n",
"2023-01-06 18:45:38.088955: I tensorflow/stream_executor/cuda/cudart_stub.cc:29] Ignore above cudart dlerror if you do not have a GPU set up on your machine.\n"
2023-01-06 17:08:18 +00:00
]
}
],
2022-12-09 15:50:27 +00:00
"source": [
"import os\n",
"\n",
"import tensorflow as tf\n",
2023-01-06 17:08:18 +00:00
"import matplotlib.pyplot as plt\n",
2022-12-09 15:50:27 +00:00
"\n",
"from lib.dataset.parse_heightmap import parse_heightmap\n",
"from lib.ai.model_rainfallwater_mono import model_rainfallwater_mono\n",
"from lib.ai.helpers.make_callbacks import make_callbacks\n",
"from lib.ai.helpers.summarywriter import summarywriter"
]
},
{
"cell_type": "code",
2023-01-06 17:08:18 +00:00
"execution_count": 2,
2022-12-09 15:50:27 +00:00
"id": "07093079",
"metadata": {},
"outputs": [],
"source": [
"filepath_heightmap=\"/mnt/research-data/main/terrain50-nimrodsized.json.gz\"\n",
"\n",
"dir_output = \"/tmp/x/mono_segment_TEST\"\n",
"if not os.path.exists(os.path.join(dir_output, \"checkpoints\")):\n",
"\tos.makedirs(os.path.join(dir_output, \"checkpoints\"))"
]
},
{
"cell_type": "code",
2023-01-06 17:08:18 +00:00
"execution_count": 3,
2022-12-09 15:50:27 +00:00
"id": "f4466ac9",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"RAINFALL channels 1 width 64 height 64 HEIGHTMAP_INPUT False\n",
2023-01-06 17:08:18 +00:00
"convnext:shape IN x (None, 64, 64, 1)\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-06 18:45:42.019487: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:975] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
"2023-01-06 18:45:42.019743: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory\n",
"2023-01-06 18:45:42.019824: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcublas.so.11'; dlerror: libcublas.so.11: cannot open shared object file: No such file or directory\n",
"2023-01-06 18:45:42.019903: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcublasLt.so.11'; dlerror: libcublasLt.so.11: cannot open shared object file: No such file or directory\n",
"2023-01-06 18:45:42.019979: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcufft.so.10'; dlerror: libcufft.so.10: cannot open shared object file: No such file or directory\n",
"2023-01-06 18:45:42.020049: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcurand.so.10'; dlerror: libcurand.so.10: cannot open shared object file: No such file or directory\n",
"2023-01-06 18:45:42.020119: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcusolver.so.11'; dlerror: libcusolver.so.11: cannot open shared object file: No such file or directory\n",
"2023-01-06 18:45:42.020192: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcusparse.so.11'; dlerror: libcusparse.so.11: cannot open shared object file: No such file or directory\n",
"2023-01-06 18:45:42.020265: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcudnn.so.8'; dlerror: libcudnn.so.8: cannot open shared object file: No such file or directory\n",
"2023-01-06 18:45:42.020277: W tensorflow/core/common_runtime/gpu/gpu_device.cc:1850] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\n",
2023-01-06 17:08:18 +00:00
"Skipping registering GPU devices...\n",
"2023-01-06 18:45:42.020683: I tensorflow/core/platform/cpu_feature_guard.cc:193] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations: AVX2 FMA\n",
2023-01-06 17:08:18 +00:00
"To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
2022-12-09 15:50:27 +00:00
"DEBUG:model ENCODER output_shape (None, 512)\n",
"DEBUG:model BOTTLENECK:stack2image output_shape (None, 4, 4, 512)\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-06 18:45:44.801 | WARNING | lib.ai.model_rainfallwater_mono:model_rainfallwater_mono:71 - Warning: TODO implement attention from https://ieeexplore.ieee.org/document/9076883\n",
"2023-01-06 18:45:44.833 | INFO | lib.ai.model_rainfallwater_mono:model_rainfallwater_mono:86 - learning_rate: 3e-05\n"
2022-12-09 15:50:27 +00:00
]
}
],
"source": [
"model = model_rainfallwater_mono(\n",
"\tmetadata={ \"rainfallradar\": [ 1, 64, 64 ] },\n",
2023-01-06 17:08:18 +00:00
"\tmodel_arch_dec=\"convnext_i_xxtiny\",\n",
"\tlearning_rate=3e-5\n",
2022-12-09 15:50:27 +00:00
")\n",
"\n",
"summarywriter(model, filepath_output=os.path.join(dir_output, \"summary.txt\"))"
]
},
{
"cell_type": "code",
"execution_count": 5,
2022-12-09 15:50:27 +00:00
"id": "78c633e1",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
2023-01-06 17:08:18 +00:00
"cells 4096 cells/2 2048.0 shape+ (64, 64) tf.Tensor(1015, shape=(), dtype=int64)\n"
2022-12-09 15:50:27 +00:00
]
},
{
"data": {
"text/plain": [
"<matplotlib.image.AxesImage at 0x7fc684421570>"
]
},
"execution_count": 5,
"metadata": {},
"output_type": "execute_result"
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAaEAAAGfCAYAAAD22G0fAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAA9hAAAPYQGoP6dpAAAfQUlEQVR4nO3df2xV9f3H8ddF4ErhcsUf3NvGilUvKr8UKatUZ+uULswZGYlTQYdZsoAFpWMLWpuMavRexKSpC9oFZrDEdf1H2Vim0i5K2dIwsdpYi6k4qlblrtFhb1XWKny+fzjO12tb5bb38rn33OcjOQn9nHNv3x/a8uLT+76f4zHGGAEAYME42wUAALIXIQQAsIYQAgBYQwgBAKwhhAAA1hBCAABrCCEAgDWEEADAGkIIAGANIQQAsGZ8qp74iSee0KOPPqrDhw9r9uzZqq2t1fe///3vfNzx48f14YcfyufzyePxpKo8AECKGGPU39+vvLw8jRv3HWsdkwKNjY1mwoQJZtu2bebAgQNm3bp1ZvLkyebdd9/9zsf29PQYSRwcHBwcGX709PR857/5HmOSv4FpUVGRrrjiCtXV1Tljl156qZYuXapIJPKtj+3r69MZZ5yhd189X1OnjP63hT+ZOXfUj0Xq7Xyrw3YJOAX4OcxOX+oL/UPP6ZNPPpHf7//Wa5P+67jBwUG1tbXpvvvuixsvKytTa2vrkOsHBgY0MDDgfNzf3y9JmjplnKb6Rh9C4z0TRv1YpN5YvrbIHPwcZqn/LW1O5iWVpP9L8NFHH+nYsWMKBAJx44FAQNFodMj1kUhEfr/fOfLz85NdEgAgTaXsv6PfTEBjzLCpWFlZqb6+Pufo6elJVUkAgDST9F/HnX322TrttNOGrHp6e3uHrI4kyev1yuv1JrsMAGlg94ftw47/MO/yU1oH0lfSV0ITJ07UggUL1NzcHDfe3Nys4uLiZH86AEAGS8n7hNavX6877rhDhYWFWrRokbZu3ar33ntPq1evTsWnAwBkqJSE0C233KKPP/5YDz74oA4fPqw5c+boueee04wZM1Lx6QAAGSplOyaUl5ervLw8VU8PAHAB3qwBALAmZSuhsfrJzLm80S3DjNQJNZyRuqMSeQ5krmR8r8AdWAkBAKwhhAAA1hBCAABrCCEAgDVp25iAzJPIC8g0IOBkJeN7heaG9MVKCABgDSEEALCGEAIAWEMIAQCsIYQAANbQHYeUoxMOtiX6PUg33anDSggAYA0hBACwhhACAFhDCAEArCGEAADW0B2HEdHVhmw10vc+XXPJx0oIAGANIQQAsIYQAgBYQwgBAKwhhAAA1tAdB7rgAFjDSggAYA0hBACwhhACAFhDCAEArKExIcvQhAAgnbASAgBYQwgBAKwhhAAA1hBCAABrCCEAgDV0xwHASUqku5Qb4J0cVkIAAGsIIQCANYQQAMAaQggAYA0hBACwhu44l2KPOCA9jfSzma3ddKyEAADWEEIAAGsIIQCANYQQAMAaQggAYE3C3XF79+7Vo48+qra2Nh0+fFg7d+7U0qVLnfPGGD3wwAPaunWrjhw5oqKiIj3++OOaPXt2MuvG/9AFB6SnRH82s7VrLuGV0GeffabLLrtMW7ZsGfb85s2bVVNToy1btmj//v0KBoNavHix+vv7x1wsAMBdEl4JLVmyREuWLBn2nDFGtbW1qqqq0rJlyyRJ9fX1CgQCamho0KpVq4Y8ZmBgQAMDA87HsVgs0ZIAABkqqa8JdXd3KxqNqqyszBnzer0qKSlRa2vrsI+JRCLy+/3OkZ+fn8ySAABpLKkhFI1GJUmBQCBuPBAIOOe+qbKyUn19fc7R09OTzJIAAGksJdv2eDyeuI+NMUPGTvB6vfJ6vakoAwCQ5pIaQsFgUNJXK6Lc3FxnvLe3d8jqCADw3dzeNZfUX8cVFBQoGAyqubnZGRscHFRLS4uKi4uT+akAAC6Q8Ero008/1dtvv+183N3drfb2dp155pk677zzVFFRoXA4rFAopFAopHA4rJycHC1fvjyphQMAMl/CIfTKK6/o2muvdT5ev369JGnlypV66qmntGHDBh09elTl5eXOm1Wbmprk8/mSVzUAwBU8xhhju4ivi8Vi8vv9KtVNGu+ZYLuctMeOCUB2SufXhL40X2iP/qy+vj5NnTr1W6/lpnYZgrAB8HWJ/JuQzoHFBqYAAGsIIQCANYQQAMAaQggAYA0hBACwhu64NEQnHIBswUoIAGANIQQAsIYQAgBYQwgBAKwhhAAA1tAdZxFdcABOhXS+MR4rIQCANYQQAMAaQggAYA0hBACwhhACAFhDdxwAZKl06JpjJQQAsIYQAgBYQwgBAKwhhAAA1tCYAAAulw7b84yElRAAwBpCCABgDSEEALCGEAIAWEMIAQCsoTvuFODmdQBsSofteUbCSggAYA0hBACwhhACAFhDCAEArCGEAADW0B0HAFkqHbrmWAkBAKwhhAAA1hBCAABrCCEAgDWEEADAGrrjACBLsXccACCrEUIAAGsIIQCANYQQAMCahEIoEolo4cKF8vl8mj59upYuXaqurq64a4wxqq6uVl5eniZNmqTS0lJ1dnYmtWgAgDsk1B3X0tKiNWvWaOHChfryyy9VVVWlsrIyHThwQJMnT5Ykbd68WTU1NXrqqac0c+ZMPfTQQ1q8eLG6urrk8/lSMol0wR1UASAxCYXQCy+8EPfx9u3bNX36dLW1temaa66RMUa1tbWqqqrSsmXLJEn19fUKBAJqaGjQqlWrklc5ACDjjek1ob6+PknSmWeeKUnq7u5WNBpVWVmZc43X61VJSYlaW1uHfY6BgQHFYrG4AwCQHUYdQsYYrV+/XldffbXmzJkjSYpGo5KkQCAQd20gEHDOfVMkEpHf73eO/Pz80ZYEAMgwow6htWvX6vXXX9cf//jHIec8Hk/cx8aYIWMnVFZWqq+vzzl6enpGWxIAIMOMatueu+++W7t27dLevXt17rnnOuPBYFDSVyui3NxcZ7y3t3fI6ugEr9crr9c7mjIA4KSMtD1Nos1EyXqedJGMuse69U9CKyFjjNauXatnn31WL774ogoKCuLOFxQUKBgMqrm52RkbHBxUS0uLiouLx1QoAMB9EloJrVmzRg0NDfrzn/8sn8/nvM7j9/s1adIkeTweVVRUKBwOKxQKKRQKKRwOKycnR8uXL0/JBAAAmSuhEKqrq5MklZaWxo1v375dd955pyRpw4YNOnr0qMrLy3XkyBEVFRWpqanJ9e8RAgAkLqEQMsZ85zUej0fV1dWqrq4ebU0AgCzB3nEAAGu4qR0AV0mkWyuVXXPp1DGXaAfbqaydlRAAwBpCCABgDSEEALCGEAIAWEMIAQCsoTsOQEYa655lGFki3X7Djcf6j2vazJP7XKyEAADWEEIAAGsIIQCANYQQAMAaQggAYA3dcQBwkkbqEBuum8xtd2FNpBvxS/OFpEMndS0rIQCANYQQAMAaQggAYA0hBACwhhACAFhDd1wSua0bBkgH7BE3dsm4U2wiz83ecQCAjEAIAQCsIYQAANYQQgAAa2hMSCIaEIDRy4QGhGTUmAkNTKeyFlZCAABrCCEAgDWEEADAGkIIAGANIQQAsIbuuFFIpy4WIBOleydcutdnAze1AwC4DiEEALCGEAIAWEMIAQCsIYQAANbQHTcKmbD3EwAk00j/vo21k5CVEADAGkIIAGANIQQAsIYQAgBYQwgBAKyhOy6JEu0SoZsOQLZjJQQAsIYQAgBYQwgBAKwhhAAA1iTUmFBXV6e6ujq98847kqTZs2frN7/5jZYsWSJJMsbogQce0NatW3XkyBEVFRXp8ccf1+zZs5NeeCahAQGIl8jPBDeYS2/DfS1j/cc1bebJPT6hldC5556rTZs26ZVXXtErr7yiH/zgB7rpppvU2dkpSdq8ebNqamq0ZcsW7d+/X8FgUIsXL1Z/f38inwYAkCUSCqEbb7xRP/rRjzRz5kzNnDlTDz/8sKZMmaJ9+/bJGKPa2lpVVVVp2bJlmjNnjurr6/X555+roaEhVfUDADLYqF8TOnbsmBobG/XZZ59
"text/plain": [
"<Figure size 640x480 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
2022-12-09 15:50:27 +00:00
}
],
"source": [
"heightmap = parse_heightmap(filepath_heightmap) / 100\n",
"heightmap = tf.image.crop_to_bounding_box(tf.expand_dims(heightmap, axis=-1), 0, 0, 64, 64)\n",
2023-01-06 17:08:18 +00:00
"#heightmap_labels = tf.one_hot(tf.cast(tf.math.greater(tf.squeeze(heightmap)/10, 0.05), dtype=tf.int32), 2)\n",
"heightmap_labels = tf.cast(tf.math.greater(tf.squeeze(heightmap)/10, 0.05), dtype=tf.int32)\n",
2022-12-09 15:50:27 +00:00
"\n",
2023-01-06 17:08:18 +00:00
"dataset = tf.data.Dataset.from_tensor_slices([heightmap_labels]).map(\n",
"\tlambda tensor: tf.expand_dims(tensor, axis=-1),\n",
"\tnum_parallel_calls=tf.data.AUTOTUNE\n",
")\n",
2022-12-09 15:50:27 +00:00
"dataset_labels = tf.data.Dataset.from_tensor_slices([heightmap_labels])\n",
"\n",
"for item in dataset_labels:\n",
"\tprint(\"cells\", 64*64, \"cells/2\", (64*64)/2, \"shape+\", item.shape, tf.math.reduce_sum(tf.math.argmax(item, axis=-1)))\n",
"\tbreak\n",
2023-01-06 17:08:18 +00:00
"dataset = tf.data.Dataset.zip((dataset, dataset_labels)) \\\n",
2022-12-09 15:50:27 +00:00
"\t.repeat(64 * 64) \\\n",
"\t.batch(64) \\\n",
"\t.prefetch(tf.data.AUTOTUNE)\n",
"\n",
"\n",
"plt.imshow(tf.squeeze(heightmap_labels))"
2022-12-09 15:50:27 +00:00
]
},
{
"cell_type": "code",
2023-01-06 17:08:18 +00:00
"execution_count": 5,
2022-12-09 15:50:27 +00:00
"id": "3dbc95eb",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
2023-01-06 17:08:18 +00:00
"Epoch 1/25\n",
"64/64 [==============================] - ETA: 0s - loss: 0.0863 - binary_accuracy: 0.5702"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-05 20:13:58.692 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:12 - Saving checkpoint\n",
"2023-01-05 20:13:59.282 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:15 - Checkpoint saved successfully\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"64/64 [==============================] - 552s 8s/step - loss: 0.0863 - binary_accuracy: 0.5702\n",
"Epoch 2/25\n",
"64/64 [==============================] - ETA: 0s - loss: -0.1695 - binary_accuracy: 0.5977"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-05 20:22:53.006 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:12 - Saving checkpoint\n",
"2023-01-05 20:22:53.575 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:15 - Checkpoint saved successfully\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"64/64 [==============================] - 534s 8s/step - loss: -0.1695 - binary_accuracy: 0.5977\n",
"Epoch 3/25\n",
"64/64 [==============================] - ETA: 0s - loss: -0.1154 - binary_accuracy: 0.5065"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-05 20:31:46.951 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:12 - Saving checkpoint\n",
"2023-01-05 20:31:47.518 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:15 - Checkpoint saved successfully\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"64/64 [==============================] - 534s 8s/step - loss: -0.1154 - binary_accuracy: 0.5065\n",
"Epoch 4/25\n",
"64/64 [==============================] - ETA: 0s - loss: -0.2798 - binary_accuracy: 0.4606"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-05 20:40:40.765 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:12 - Saving checkpoint\n",
"2023-01-05 20:40:41.338 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:15 - Checkpoint saved successfully\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"64/64 [==============================] - 534s 8s/step - loss: -0.2798 - binary_accuracy: 0.4606\n",
"Epoch 5/25\n",
"64/64 [==============================] - ETA: 0s - loss: -0.0833 - binary_accuracy: 0.4692"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-05 20:49:34.870 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:12 - Saving checkpoint\n",
"2023-01-05 20:49:35.429 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:15 - Checkpoint saved successfully\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"64/64 [==============================] - 534s 8s/step - loss: -0.0833 - binary_accuracy: 0.4692\n",
"Epoch 6/25\n",
"64/64 [==============================] - ETA: 0s - loss: 0.0793 - binary_accuracy: 0.5488"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-05 20:58:26.682 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:12 - Saving checkpoint\n",
"2023-01-05 20:58:27.245 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:15 - Checkpoint saved successfully\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"64/64 [==============================] - 532s 8s/step - loss: 0.0793 - binary_accuracy: 0.5488\n",
"Epoch 7/25\n",
"64/64 [==============================] - ETA: 0s - loss: 0.0547 - binary_accuracy: 0.5568"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-05 21:07:17.933 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:12 - Saving checkpoint\n",
"2023-01-05 21:07:18.495 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:15 - Checkpoint saved successfully\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"64/64 [==============================] - 531s 8s/step - loss: 0.0547 - binary_accuracy: 0.5568\n",
"Epoch 8/25\n",
"64/64 [==============================] - ETA: 0s - loss: 0.0304 - binary_accuracy: 0.5615"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-05 21:16:09.952 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:12 - Saving checkpoint\n",
"2023-01-05 21:16:10.519 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:15 - Checkpoint saved successfully\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"64/64 [==============================] - 532s 8s/step - loss: 0.0304 - binary_accuracy: 0.5615\n",
"Epoch 9/25\n",
"64/64 [==============================] - ETA: 0s - loss: 0.0066 - binary_accuracy: 0.5657"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-05 21:25:03.040 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:12 - Saving checkpoint\n",
"2023-01-05 21:25:03.614 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:15 - Checkpoint saved successfully\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"64/64 [==============================] - 533s 8s/step - loss: 0.0066 - binary_accuracy: 0.5657\n",
"Epoch 10/25\n",
"64/64 [==============================] - ETA: 0s - loss: -0.0154 - binary_accuracy: 0.5707"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-05 21:33:57.583 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:12 - Saving checkpoint\n",
"2023-01-05 21:33:58.155 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:15 - Checkpoint saved successfully\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"64/64 [==============================] - 535s 8s/step - loss: -0.0154 - binary_accuracy: 0.5707\n",
"Epoch 11/25\n",
"64/64 [==============================] - ETA: 0s - loss: -0.0363 - binary_accuracy: 0.5749"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-05 21:42:52.326 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:12 - Saving checkpoint\n",
"2023-01-05 21:42:52.898 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:15 - Checkpoint saved successfully\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"64/64 [==============================] - 535s 8s/step - loss: -0.0363 - binary_accuracy: 0.5749\n",
"Epoch 12/25\n",
"64/64 [==============================] - ETA: 0s - loss: -0.0575 - binary_accuracy: 0.5775"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-05 21:51:46.051 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:12 - Saving checkpoint\n",
"2023-01-05 21:51:46.627 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:15 - Checkpoint saved successfully\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"64/64 [==============================] - 534s 8s/step - loss: -0.0575 - binary_accuracy: 0.5775\n",
"Epoch 13/25\n",
"64/64 [==============================] - ETA: 0s - loss: -0.0828 - binary_accuracy: 0.5835"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-05 22:00:40.405 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:12 - Saving checkpoint\n",
"2023-01-05 22:00:40.971 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:15 - Checkpoint saved successfully\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"64/64 [==============================] - 534s 8s/step - loss: -0.0828 - binary_accuracy: 0.5835\n",
"Epoch 14/25\n",
"64/64 [==============================] - ETA: 0s - loss: -0.1216 - binary_accuracy: 0.5869"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-05 22:09:34.844 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:12 - Saving checkpoint\n",
"2023-01-05 22:09:35.418 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:15 - Checkpoint saved successfully\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"64/64 [==============================] - 534s 8s/step - loss: -0.1216 - binary_accuracy: 0.5869\n",
"Epoch 15/25\n",
"64/64 [==============================] - ETA: 0s - loss: -0.1928 - binary_accuracy: 0.5821"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-05 22:18:28.651 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:12 - Saving checkpoint\n",
"2023-01-05 22:18:29.219 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:15 - Checkpoint saved successfully\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"64/64 [==============================] - 534s 8s/step - loss: -0.1928 - binary_accuracy: 0.5821\n",
"Epoch 16/25\n",
"64/64 [==============================] - ETA: 0s - loss: -0.2815 - binary_accuracy: 0.5564"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-05 22:27:23.336 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:12 - Saving checkpoint\n",
"2023-01-05 22:27:23.906 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:15 - Checkpoint saved successfully\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"64/64 [==============================] - 535s 8s/step - loss: -0.2815 - binary_accuracy: 0.5564\n",
"Epoch 17/25\n",
"64/64 [==============================] - ETA: 0s - loss: -0.2078 - binary_accuracy: 0.4880"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-05 22:36:18.191 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:12 - Saving checkpoint\n",
"2023-01-05 22:36:18.761 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:15 - Checkpoint saved successfully\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"64/64 [==============================] - 535s 8s/step - loss: -0.2078 - binary_accuracy: 0.4880\n",
"Epoch 18/25\n",
"64/64 [==============================] - ETA: 0s - loss: -0.2591 - binary_accuracy: 0.4643"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-05 22:45:13.175 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:12 - Saving checkpoint\n",
"2023-01-05 22:45:13.744 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:15 - Checkpoint saved successfully\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"64/64 [==============================] - 535s 8s/step - loss: -0.2591 - binary_accuracy: 0.4643\n",
"Epoch 19/25\n",
"64/64 [==============================] - ETA: 0s - loss: -0.4663 - binary_accuracy: 0.4370"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-05 22:54:08.037 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:12 - Saving checkpoint\n",
"2023-01-05 22:54:08.624 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:15 - Checkpoint saved successfully\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"64/64 [==============================] - 535s 8s/step - loss: -0.4663 - binary_accuracy: 0.4370\n",
"Epoch 20/25\n",
"64/64 [==============================] - ETA: 0s - loss: 0.0233 - binary_accuracy: 0.5323"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-05 23:03:03.088 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:12 - Saving checkpoint\n",
"2023-01-05 23:03:03.655 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:15 - Checkpoint saved successfully\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"64/64 [==============================] - 535s 8s/step - loss: 0.0233 - binary_accuracy: 0.5323\n",
"Epoch 21/25\n",
"64/64 [==============================] - ETA: 0s - loss: 0.0272 - binary_accuracy: 0.5650"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-05 23:11:57.541 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:12 - Saving checkpoint\n",
"2023-01-05 23:11:58.301 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:15 - Checkpoint saved successfully\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"64/64 [==============================] - 535s 8s/step - loss: 0.0272 - binary_accuracy: 0.5650\n",
"Epoch 22/25\n",
"64/64 [==============================] - ETA: 0s - loss: 0.0047 - binary_accuracy: 0.5712"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-05 23:20:52.402 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:12 - Saving checkpoint\n",
"2023-01-05 23:20:52.969 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:15 - Checkpoint saved successfully\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"64/64 [==============================] - 535s 8s/step - loss: 0.0047 - binary_accuracy: 0.5712\n",
"Epoch 23/25\n",
"64/64 [==============================] - ETA: 0s - loss: -0.0151 - binary_accuracy: 0.5776"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-05 23:29:46.982 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:12 - Saving checkpoint\n",
"2023-01-05 23:29:47.558 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:15 - Checkpoint saved successfully\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"64/64 [==============================] - 535s 8s/step - loss: -0.0151 - binary_accuracy: 0.5776\n",
"Epoch 24/25\n",
"64/64 [==============================] - ETA: 0s - loss: -0.0319 - binary_accuracy: 0.5800"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-05 23:38:40.377 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:12 - Saving checkpoint\n",
"2023-01-05 23:38:40.948 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:15 - Checkpoint saved successfully\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"64/64 [==============================] - 533s 8s/step - loss: -0.0319 - binary_accuracy: 0.5800\n",
"Epoch 25/25\n",
"64/64 [==============================] - ETA: 0s - loss: -0.0465 - binary_accuracy: 0.5806"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-05 23:47:34.752 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:12 - Saving checkpoint\n",
"2023-01-05 23:47:35.313 | INFO | lib.ai.components.CallbackCustomModelCheckpoint:on_epoch_end:15 - Checkpoint saved successfully\n"
]
},
{
2023-01-06 17:08:18 +00:00
"name": "stdout",
"output_type": "stream",
"text": [
"64/64 [==============================] - 534s 8s/step - loss: -0.0465 - binary_accuracy: 0.5806\n"
2022-12-09 15:50:27 +00:00
]
2023-01-06 17:08:18 +00:00
},
{
"data": {
"text/plain": [
"<keras.callbacks.History at 0x7ff99a33d8a0>"
]
},
"execution_count": 5,
"metadata": {},
"output_type": "execute_result"
2022-12-09 15:50:27 +00:00
}
],
"source": [
"model.fit(\n",
"\tdataset,\n",
2023-01-06 17:08:18 +00:00
"\tepochs=25,\n",
2022-12-09 15:50:27 +00:00
"\tcallbacks=make_callbacks(\"/tmp/x/mono_segment_TEST\", model)\n",
")"
]
},
{
"cell_type": "code",
2023-01-06 17:08:18 +00:00
"execution_count": 6,
2022-12-09 15:50:27 +00:00
"id": "b7f8c33f",
"metadata": {},
2023-01-06 17:08:18 +00:00
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"1/1 [==============================] - 2s 2s/step\n",
"tf.Tensor(\n",
"[[[1 1 1 ... 1 1 1]\n",
" [1 1 1 ... 1 1 1]\n",
" [1 1 1 ... 1 1 1]\n",
" ...\n",
" [0 1 1 ... 1 1 1]\n",
" [1 1 1 ... 1 1 1]\n",
" [1 1 1 ... 1 1 1]]], shape=(1, 64, 64), dtype=int32)\n",
"tf.Tensor(3936, shape=(), dtype=int32) tf.Tensor(160, shape=(), dtype=int32)\n",
"(1, 64, 64)\n",
"[[[ 6.318168 4.484145 5.098278 ... 2.2153077 4.2120304\n",
" 3.6126046 ]\n",
" [ 6.4958954 5.6199083 6.5477724 ... 1.4242218 3.6372695\n",
" 4.750148 ]\n",
" [ 4.059501 3.8315465 7.3975286 ... 5.0964937 4.767578\n",
" 2.0836473 ]\n",
" ...\n",
" [-0.3680715 1.0862777 4.403977 ... 3.2975245 3.878313\n",
" 1.355243 ]\n",
" [ 7.1270704 3.7269826 4.1089396 ... 5.3976045 2.4421794\n",
" 2.3658426 ]\n",
" [ 6.693168 7.6807394 5.613674 ... 0.89391357 3.4751601\n",
" 3.142672 ]]]\n"
]
},
{
"data": {
"text/plain": [
"<matplotlib.image.AxesImage at 0x7ff956514f70>"
]
},
"execution_count": 6,
"metadata": {},
"output_type": "execute_result"
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAaEAAAGfCAYAAAD22G0fAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAA9hAAAPYQGoP6dpAAAfCklEQVR4nO3df2zU9eHH8ddh4aR4PUHlrhcrFj1UfilQVql+LU6pI2pkJE4FHcZkARGlYwtaSEYxegVMCC5gl7IFIY71H8WxTKVd1LKlcSLYWMEUHFU74dbp6t2h7JrB+/uH8glny49r7+7duz4fySex78/nru/3m4/3yrv3/rzfLmOMEQAAFgyxXQEAwOBFCAEArCGEAADWEEIAAGsIIQCANYQQAMAaQggAYA0hBACwhhACAFhDCAEArMlL1xu/8MILeu6553T06FFNmDBBGzZs0P/93/+d83UnT57UkSNH5PF45HK50lU9AECaGGMUi8UUCAQ0ZMg5xjomDerr683QoUPN5s2bzYEDB8zSpUvNiBEjzKeffnrO13Z0dBhJHBwcHBxZfnR0dJzzM99lTOoXMC0tLdXUqVNVW1vrlF133XWaM2eOampqzvraSCSiiy++WJ/uu1IFF/HXQgDINtFjJzVm6if66quv5PV6z3ptyv8c193drb179+qpp55KKK+oqFBzc3OP6+PxuOLxuPNzLBaTJBVcNEQFHkIIALLV+XylkvJP+S+++EInTpyQz+dLKPf5fAqHwz2ur6mpkdfrdY6ioqJUVwkAMEClbajx/QQ0xvSailVVVYpEIs7R0dGRrioBAAaYlP857tJLL9UFF1zQY9TT2dnZY3QkSW63W263O9XVAABkgZSPhIYNG6Zp06apsbExobyxsVFlZWWp/nUAgCyWlueEli1bpoceekglJSWaMWOG6urq9Nlnn2nRokXp+HUAgCyVlhC677779OWXX+rpp5/W0aNHNXHiRL322msaM2ZMOn4dACBLpeU5of6IRqPyer3qOjiWKdoAkIWisZMaOe6wIpGICgoKznotn/IAAGvStnZcrrgjcEOPsl1HWjJej8Git/6W6PN04h7PLO7xRIyEAADWEEIAAGsIIQCANYQQAMAaJiZ850xfFiI96O/Mo88zi/4+P4yEAADWEEIAAGsIIQCANYQQAMAaQggAYA0LmAIAUooFTAEAWYEQAgBYQwgBAKwhhAAA1hBCAABrCCEAgDWEEADAGkIIAGANIQQAsIYQAgBYQwgBAKxhZ9XvnGkXxF1HWvp1LXqXbB/S5/3HPZ5Z3OPnh5EQAMAaQggAYA0hBACwhhACAFjDpnYAgJRiUzsAQFYghAAA1hBCAABrCCEAgDWEEADAGpbt6YN0L6/R2/vn+tId55LOPh+sy6WcDfd45g3We5yREADAGkIIAGANIQQAsIYQAgBYQwgBAKxh7TgAQEqxdhwAICsQQgAAawghAIA1hBAAwBpCCABgTdIhtHv3bt19990KBAJyuVx69dVXE84bY1RdXa1AIKDhw4dr5syZ2r9/f6rqCwDIIUmH0Ndff63rr79eGzdu7PX8unXrtH79em3cuFF79uyR3+/XrFmzFIvF+l1ZAEBuSXoV7dmzZ2v27Nm9njPGaMOGDVq5cqXmzp0rSdq6dat8Pp+2b9+uhQsX9nhNPB5XPB53fo5Go8lWCQCQpVL6nVB7e7vC4bAqKiqcMrfbrfLycjU3N/f6mpqaGnm9XucoKipKZZUAAANYSkMoHA5Lknw+X0K5z+dzzn1fVVWVIpGIc3R0dKSySgCAASwtm9q5XK6En40xPcpOcbvdcrvd6agGAGCAS2kI+f1+Sd+OiAoLC53yzs7OHqOjbDaQdynMVfR5ZtHfmTdY+zylf44rLi6W3+9XY2OjU9bd3a2mpiaVlZWl8lcBAHJA0iOhY8eO6eOPP3Z+bm9vV0tLi0aNGqUrrrhClZWVCoVCCgaDCgaDCoVCys/P17x581JacQBA9ks6hN577z3deuutzs/Lli2TJC1YsEAvvviili9fruPHj2vx4sXq6upSaWmpGhoa5PF4UldrAEBOSDqEZs6cqbNtQeRyuVRdXa3q6ur+1AsAMAjk7KZ2g/VLPpvo88yivzOPPj8/bGoHAMgKhBAAwBpCCABgDSEEALCGEAIAWDNgZ8fN1D3Kcw1NOMcMlPRgxk/m0eeZRX9nFrPjAABZgRACAFhDCAEArCGEAADWEEIAAGsG7Oy4/q4dBwCwg9lxAICsQAgBAKwhhAAA1hBCAABrCCEAgDWEEADAGkIIAGANIQQAsIYQAgBYQwgBAKwhhAAA1hBCAABrCCEAgDWEEADAGkIIAGANIQQAsIYQAgBYQwgBAKwhhAAA1hBCAABrCCEAgDWEEADAmjzbFRjo7gjc0KNs15GWjNdjsOitvyX6PJ24xzOLezwRIyEAgDWEEADAGkIIAGANIQQAsIYQAgBYw+y475xpxgrSg/7OPPo8s+jv88NICABgDSEEALCGEAIAWEMIAQCsSSqEampqNH36dHk8Ho0ePVpz5sxRW1tbwjXGGFVXVysQCGj48OGaOXOm9u/fn9JKAwByg8sYY8734h/96Ee6//77NX36dP3vf//TypUr1draqgMHDmjEiBGSpLVr1+rZZ5/Viy++qHHjxumZZ57R7t271dbWJo/Hc87fEY1G5fV61XVwrAo8DNQAINtEYyc1ctxhRSIRFRQUnPXapELo+/79739r9OjRampq0i233CJjjAKBgCorK/Xkk09KkuLxuHw+n9auXauFCxeeu/KEEABktWRCqF+f8pFIRJI0atQoSVJ7e7vC4bAqKiqca9xut8rLy9Xc3Nzre8TjcUWj0YQDADA49DmEjDFatmyZbr75Zk2cOFGSFA6HJUk+ny/hWp/P55z7vpqaGnm9XucoKirqa5UAAFmmzyG0ZMkSffDBB/rDH/7Q45zL5Ur42RjTo+yUqqoqRSIR5+jo6OhrlQAAWaZPy/Y8/vjj2rlzp3bv3q3LL7/cKff7/ZK+HREVFhY65Z2dnT1GR6e43W653e6+VAMAkOWSGgkZY7RkyRK98sorevPNN1VcXJxwvri4WH6/X42NjU5Zd3e3mpqaVFZWlpoaAwByRlIjoccee0zbt2/XH//4R3k8Hud7Hq/Xq+HDh8vlcqmyslKhUEjBYFDBYFChUEj5+fmaN29eWhoAAMheSYVQbW2tJGnmzJkJ5Vu2bNHDDz8sSVq+fLmOHz+uxYsXq6urS6WlpWpoaDivZ4QAAINLv54TSgeeEwKA7Jax54QAAOgPNrX7zpk2oNp1pKVf16J3yfYhfd5/3OOZxT1+fhgJAQCsIYQAANYQQgAAawghAIA1hBAAwBqeEwIApBTPCQEAsgIhBACwhhACAFhDCAEArCGEAADWsHZcH6R7jafe3j/X1486l3T2+WBds+tsuMczb7De44yEAADWEEIAAGsIIQCANYQQAMAaQggAYA1rxwEAUoq14wAAWYEQAgBYQwgBAKwhhAAA1hBCAABrCCEAgDWEEADAGkIIAGANIQQAsIYQAgBYw6Z2fTCQN4jKVfR5ZtHfmTdY+5yREADAGkIIAGANIQQAsIYQAgBYQwgBAKzJ2U3tButME5vo88yivzOPPj8/bGoHAMgKhBAAwBpCCABgDSEEALCGEAIAWDNgZ8fN1D3Kcw1NOMcMlPRgxk/m0eeZRX9nFrPjAABZgRACAFhDCAEArCGEAADWJDUxoba2VrW1tfrkk08kSRMmTNCvfvUrzZ49W5JkjNHq1atVV1enrq4ulZaWatOmTZowYcJ5VyhVy/YAAOxI28SEyy+/XGvWrNF7772n9957Tz/84Q91zz33aP/+/ZKkdevWaf369dq4caP27Nkjv9+vWbNmKRaL9b01AICc1e8p2qNGjdJzzz2nRx55RIFAQJWVlXryySclSfF4XD6fT2vXrtXChQvP6/0YCQFAdsvIFO0TJ06ovr5eX3/9tWbMmKH29naFw2FVVFQ417jdbpWXl6u5ufmM7xOPxxWNRhMOAMDgkHQItba26qKLLpLb7daiRYu0Y8cOjR8
"text/plain": [
"<Figure size 640x480 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
2022-12-09 15:50:27 +00:00
"source": [
"prediction = model.predict(tf.expand_dims(heightmap, axis=0))\n",
2023-01-06 17:08:18 +00:00
"prediction_binarised = tf.cast(tf.math.greater(prediction, 0.5), dtype=tf.int32)\n",
"print(prediction_binarised)\n",
"print(tf.math.reduce_sum(prediction_binarised), (64*64) - tf.math.reduce_sum(prediction_binarised))\n",
"print(prediction_binarised.shape)\n",
"print(prediction)\n",
"plt.imshow(tf.squeeze(prediction_binarised))"
2022-12-09 15:50:27 +00:00
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.6"
},
"vscode": {
"interpreter": {
"hash": "e7370f93d1d0cde622a1f8e1c04877d8463912d04d973331ad4851f04de6915a"
}
}
},
"nbformat": 4,
"nbformat_minor": 5
}