practice/python/watermark.ipynb

859 lines
2.1 MiB
Plaintext
Raw Permalink Normal View History

2021-05-13 11:11:57 +02:00
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"import os, shutil\n",
"from tensorflow import keras\n",
"from tensorflow.keras import layers\n",
"from tensorflow.keras import models\n",
"from keras.layers import Input, Dense, Conv2D, MaxPool2D, UpSampling2D, Dropout\n",
"from keras.callbacks import EarlyStopping, ModelCheckpoint, ReduceLROnPlateau\n",
"import numpy as np\n",
"import matplotlib.pyplot as plt\n",
"from PIL import Image, ImageFile\n",
"from PIL import ImageFont\n",
"from PIL import ImageDraw, ImageOps\n",
"import string"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Generate input images with watermarks"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"# list printable characters\n",
"printable = list(string.printable)[0:84]"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"def gen_watermark(img_str, train=1):\n",
" # open the image as jpegs\n",
" img = Image.open(\"/home/tensorflow/Downloads/VOCdevkit/RAWS/{}\".format(img_str))\n",
" \n",
" # generate random font sizes 30 - 100\n",
" np.random.seed()\n",
" font_size = np.random.randint(low = 30, high = 100)\n",
" \n",
" # create the watermark font\n",
" font = ImageFont.truetype(\"/home/tensorflow/Downloads/VOCdevkit/abel-regular.ttf\", font_size)\n",
" \n",
" # generate image to hold the watermark\n",
" img_temp = Image.new('L', (350,350))\n",
" \n",
" # create the watermark text\n",
" np.random.seed()\n",
" text_str = np.random.choice(printable, np.random.randint(low=5, high=10))\n",
" text_str = \"\".join(text_str)\n",
" \n",
" # draw on the temporary image\n",
" draw_temp = ImageDraw.Draw(img_temp)\n",
" \n",
" # generate random opacity level\n",
" np.random.seed()\n",
" opacity = np.random.randint(low=90, high=120)\n",
" \n",
" # insert text onto the temporary image\n",
" draw_temp.text((0,0), text_str, font=font, fill=opacity)\n",
" \n",
" # generate random rotation angle\n",
" np.random.seed()\n",
" rot_int = np.random.randint(low=0, high=40)\n",
" \n",
" # rotate the text\n",
" rotated_text = img_temp.rotate(rot_int, expand=1)\n",
" \n",
" # default text color = white\n",
" col_1 = (255,255,255)\n",
" col_2 = (255,255,255)\n",
" \n",
" # generate random location to put the text\n",
" np.random.seed()\n",
" rand_loc = tuple(np.random.randint(low=10,high=100,size=(2,)))\n",
" \n",
" # merge the temporary image\n",
" img.paste(ImageOps.colorize(rotated_text, col_1, col_2), rand_loc, rotated_text)\n",
" \n",
" # save the watermarked image\n",
" img.save(\"/home/tensorflow/Downloads/VOCdevkit/WATS/{}\".format(img_str))"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"# Prepare the directory\n",
"\n",
"wat_path = \"/home/tensorflow/Downloads/VOCdevkit/WATS\"\n",
" \n",
"if not os.path.exists(wat_path):\n",
" os.mkdir(wat_path)\n",
"else:\n",
" shutil.rmtree(wat_path)"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"# Or else it will crash\n",
"ImageFile.LOAD_TRUNCATED_IMAGES = True"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [],
"source": [
"# Generate watermarked images\n",
"\n",
"img_list = os.listdir(\"/home/tensorflow/Downloads/VOCdevkit/RAWS\")\n",
"\n",
"for _, img in enumerate(img_list):\n",
" gen_watermark(img)"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {
"scrolled": true
},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAABEwAAAVyCAYAAAAbBmvTAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Z1A+gAAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9Z7Bt6XnY+f3flddeO4ezT043584JaKAbmSAAgpQokRStsarGKs+URh7LnpI/+IvtDw5jT43L9tSUZiwPRyIpCRRIAgwAiQY6N7pv39s3x3PuyWHnuHLyh9MAIYigqBliCJD7V3Xq3r3PXmGH++5nPfd9n0ekacrExMTExMTExMTExMTExMTExJ+Q/rJPYGJiYmJiYmJiYmJiYmJiYuInzSRhMjExMTExMTExMTExMTExMfFDJgmTiYmJiYmJiYmJiYmJiYmJiR8ySZhMTExMTExMTExMTExMTExM/JBJwmRiYmJiYmJiYmJiYmJiYmLih0wSJhMTExMTExMTExMTExMTExM/5MeSMBFCfE4IcV8IsSaE+N/9OI4xMTExMTExMTExMTExMTEx8eMi0jT9i92hEDLwAPg0sAtcBn45TdM7f6EHmpiYmJiYmJiYmJiYmJiYmPgx+XHMMHkGWEvT9FGapgHwL4Cf+zEcZ2JiYmJiYmJiYmJiYmJiYuLH4seRMJkDdn7g9u6H901MTExMTExMTExMTExMTEz8VFD+sg4shPj7wN8H0HTtyWK1gBc4yGiIxCSbs5BVQX/YJYl9UpESJiGRlyAhQKRAiiTLKLJEKhKSBJIIhPiTZUYpAlWVCYKQjCUQMsSRBClESYwkp6SkCCGRxKBIR/uOo5QkPdo+9SFJBYaWwcxkCEMXN7AhhTRViOMEVRIYGQWhJrijhCRMsDIGg5GDpihESYyQQJYlkiRFHD0LMlkJzUxJk5QwFMjK0VOLE/C9CCOjEBOQxgKJlESASCWC+OgcRZqQzRh4dkoYxQR+jCQLZFnCyEikUoLvxEiSjCylaLpEHKk4doAkp6iqREqCoqYgSQRBgqoI0hTSOCWTVXGdhCROCIKEVAhIADlFkgRpnCJkEAJEIpHJaMRxCnGK70dkLA3XiwmjECEESQypAFlKQQjSBAQSSZIghIBUkMgJpi4dvS+SDGly9Pc0JUZAIgEpaSRQVYkwCUkTSKIUFIGspEefCQSkEYoiARJRnJLGAtIUVRWEQULy4TmkpJCCpICUyoBEHIbw4XlKsoyIUmQNkhTiEIQkSNMEIVIkIY4+KwlAAhJounT0HOMIRVURQsb3QhQtRRYKpEef0DhJURSJJJKRJQFSSJIc5TKTJCUlQaCQJEfvU5ImyJKGSE0kFLy4j6JKGHKJXCZHZ9gmiUMEkCQhkiIhSRIkKrlMGUmVCEOPNAbSo+cgKTKqrBHFAZqqY2XMo9ePFBAAxElCEiUoioSQ/vrWi7565Uo7TdPaX/Z5TExMTExM/CT6t2P8Er4fIEsyaQq5bBZFUej3+yRpTJJGJGmM53tIQuIo4k0/jDdShDiKh5KED+MWgRACIUBWJILAI5MxkBWJKAxJOYqrhXS0LxAkcYIsH8WPcRIfXTMkkMRHsZumH8U+Qejhez4gkcTpUYwkg2nqyELG8wKCIMSysoxGYxRZIY5jJPkorhcCkjRGkgSZjI5mCNI0JoojZFmGFJI0xXUdMhnzKM5LY9L06JomjmOiKCVNU9IUTN3E90PCMCEMYoQQKIqCYWhATBD4IECWZHRNJwwjPD8AUjRdJSVGUWSEEIRhjCQrCCTiKCKXtbBtlySBMIxJkqPYHpEixFG8Kcnff0+xTIswiBACPM/Dsixc1yOKEkDie1UehJCQJEGcxMDRc/leLIkAzVARAlJixIehpkAQRzFCHMXscRyjyApJmpIkCXESIwmBrEhHr7Xgw/M82m2aQhRFgEBVNKIoIk0FaXJ0UmmSIisypCmSJBGGAbIsHb3ukkSSxGia/uHrHyPL8ofvDSiKQpImpMSkaYyQJHRNQQhBFEdoqoqQBK7roesaII72KwRJHB9tnyQASNLRMY/ej+j7n88UUBWVOEqRZR04ireDwEOSJAzDJJPJMugPSeKj66I4DlEUCUlSIJXIZnPIkkIQRh9+zji6JJElFEUmjmJUVSGTySAExHGMLMkgvhfjx8iKgiSO/n1972o6PboU+mvhR8X4P46EyR6w8AO35z+879+Qpuk/Af4JwMLqXPqr/8nn2WmNGTUsXn7sizz1kVk6zi4bh4fcvtpFVQfsjt7ixnd3yBcUklhGz1vUFnLIQZ9Ovw+RRjiK6NoRpqyAgKXzFWaXDQ63D7GmI5y+gpUodByXwNBx2w6pJ0ABEceEbsp0vUAUR9ipRyzFeHszSI7OseUKVrnKdGGK797/I7Z2mhSUWY7PnGc8TAlFj+Kqx8PLCfX8MfZ3rjK7HKEbLpEusP2IfrNPPb+ELCc88dElclMdVDEmXzYRqY+uSYwCCL2ATitk4USB3f0O/UOZNB2i5CBA4PkwaIcUdZ3D9RAFg/rxafrdFloaoWlFwsgnEjGGpFOrWsiBR2W+RKO/TzFr0XNT9nZaOIlE1nLJlhR2dgRFUyJ0VNZvDzl5OmX2eJX7byaEik+j55HLJVBUUOSI5y/OokpjYtmnlMuhxDU27/U491KdB1c7LCzbzKyc5w9+fZsPruyjaRFoCogIQ5HwidCEQjYnY/syqiIz9jyKMxqxSInTmGxJRo4DUi1GlgLisIQaRki6ib3v4nkCP0yQ9RQnjqkXEkRWJZUVBB4ZQydyQ8IgwCoWSZGwEw9nIAjGKcnAR9YUolTCCFXUXEzqS9ijhFw1Ic5E6LqgVlSJE5PRXoxsqIwDm2xGxe5ppLJNva4QqQHCK+JGfWbmdYScp9NoUCjkiQKPXDqDa+3hujHztRpyVqLT7BOEMFOroCsSY9shnyuDKuO7NgfNEalTQ9VSQr9DsbxIIV9AU6aYz5zgG6/+t+ROSHzquc+QZZbbt26g62dZXixz7dE36Q96KOSZKs/x2Wd/ETvqc/XBNynLH+Hlj79EY79BIIbEoUylnOPRzk0+/9IvMhgMyZga+WwW8eFgjjj6EkiF9P3vviOCo0za0SD70+JPvtwh/Td+8aO30SSx9eM8p4mJiYmJiZ9mPxjjLx5bSv/G/+Lv0OsMsAcuzz71LC88+zyDcZ8bN69y0NwmFC6Ptu9xb/02pmlBpGKaCrPzFYQU0Ok0iaIE14nwvJgkktA0g5WVJWpTFQb2AboV4AcjFFUwGg7QVJNOe4RnpxiaBYkgCHwq1RJJEuJ6AapW5HDHRcJidmaO6ekyhXyGm9fvsLPZIZ+rMz87g+u1iZIhlWKVzbUdarU6O5t7LGg6YRSRyeg47oCxM0DIDoVilmeefZxMNkWzRmSLOmEQYmWztNttdF2n1WwyOzfL4eE+/X4fIY7+4zOXK2DbKe3OEFlS6Xdi5KTA4uIy7dYYEolcNoMfjEnFkFxeRTdkpDShVqkwGHoYRpYg9nm0vU0q+qimQjaXo9FwkYWJlFa4e+sup8/lmK2f4e6dLcZjH9f30U0Nw0rQjJhTZxdRdUgIqZZqqLHFwzsdnn3uOPcf3iOfz3Px4pP8xq//Djdv7qEoGayMwXjskLEMoshHVWU0wwBJwvV8Uk2iWM2QpA5eMGRmpkLoB6QRZM0Crh0iEgVFMXBsl8NmE90wQRJ4gUOpkkPPSISRg6wlZLMGQRARhimWmUWSVKJQ0GkNiEOB7waQClRVQ5YUkjhAIDEc9JifreO4YwqFAsViHllV2NzcIGNlCcMIM5NlbHskCZQrJYTso+gJ/X6HxaV5hBTTah1SKGQJowBFUYnCCCFJlIoVctks3UYT17FZWl4kDENIU8yMSRiEqKrO9vYBaSJDKhPHKaX8FOViHdPUUPWEd777OjPTMzz/9Mcw9Br37myiigwnT53k+o33GQ3bSEKhUp7hEy99BtsOuHd7jUK+xjNPv0DrsAOArAg0RaLbbfHRj7zAcDjAylgU8iWCKEBVVHw/RFVVhBDIskISJx8m0H564vr/sVTxp8f
"text/plain": [
"<Figure size 1800x1800 with 6 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
}
],
"source": [
"# Show some images\n",
"\n",
"raw_str = \"/home/tensorflow/Downloads/VOCdevkit/RAWS/\"\n",
"wat_str = \"/home/tensorflow/Downloads/VOCdevkit/WATS/\"\n",
"\n",
"dir_raw = os.listdir(\"/home/tensorflow/Downloads/VOCdevkit/RAWS\")\n",
"dir_wat = os.listdir(\"/home/tensorflow/Downloads/VOCdevkit/WATS\")\n",
"\n",
"f, axs = plt.subplots(3,2, figsize=(25, 25), sharey=True, sharex = True)\n",
"axs = axs.ravel()\n",
"\n",
"img_raw = Image.open(raw_str + dir_raw[4])\n",
"img_wat = Image.open(wat_str + dir_wat[4])\n",
"axs[0].imshow(img_raw)\n",
"axs[1].imshow(img_wat)\n",
"\n",
"img_raw = Image.open(raw_str + dir_raw[5])\n",
"img_wat = Image.open(wat_str + dir_wat[5])\n",
"axs[2].imshow(img_raw)\n",
"axs[3].imshow(img_wat)\n",
"\n",
"img_raw = Image.open(raw_str + dir_raw[10])\n",
"img_wat = Image.open(wat_str + dir_wat[10])\n",
"axs[4].imshow(img_raw)\n",
"axs[5].imshow(img_wat)\n",
"\n",
"plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Network Code"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Transform inputs and labels"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [],
"source": [
"train_wat_path = \"/home/tensorflow/Downloads/VOCdevkit/train/wat_imgs\"\n",
"train_raw_path = \"/home/tensorflow/Downloads/VOCdevkit/train/raw_imgs\"\n",
"val_wat_path = \"/home/tensorflow/Downloads/VOCdevkit/val/wat_imgs\"\n",
"val_raw_path = \"/home/tensorflow/Downloads/VOCdevkit/val/raw_imgs\""
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [],
"source": [
"# Seperate training and validation inputs/labels\n",
" \n",
"if not os.path.exists(train_raw_path):\n",
" os.mkdir(train_raw_path)\n",
" \n",
"if not os.path.exists(val_raw_path):\n",
" os.mkdir(val_raw_path)\n",
" \n",
"if not os.path.exists(train_wat_path):\n",
" os.mkdir(train_wat_path)\n",
"else:\n",
" shutil.rmtree(train_wat_path)\n",
" \n",
"if not os.path.exists(val_wat_path):\n",
" os.mkdir(val_wat_path)\n",
"else:\n",
" shutil.rmtree(val_wat_path)"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [],
"source": [
"# Copy raw images over\n",
"\n",
"raw_list = os.listdir(\"/home/tensorflow/Downloads/VOCdevkit/RAWS\")\n",
"\n",
"for ind, img_str in enumerate(raw_list):\n",
" img = Image.open(\"/home/tensorflow/Downloads/VOCdevkit/RAWS/{}\".format(img_str))\n",
" if ind < 4900:\n",
" img.save(\"/home/tensorflow/Downloads/VOCdevkit/train/raw_imgs/{}\".format(img_str))\n",
" else:\n",
" img.save(\"/home/tensorflow/Downloads/VOCdevkit/val/raw_imgs/{}\".format(img_str))"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {},
"outputs": [],
"source": [
"# Copy watermarked images over\n",
"\n",
"wat_list = os.listdir(\"/home/tensorflow/Downloads/VOCdevkit/WATS\")\n",
"\n",
"for ind, img_str in enumerate(wat_list):\n",
" img = Image.open(\"/home/tensorflow/Downloads/VOCdevkit/WATS/{}\".format(img_str))\n",
" if ind < 4900:\n",
" img.save(\"/home/tensorflow/Downloads/VOCdevkit/train/wat_imgs/{}\".format(img_str))\n",
" else:\n",
" img.save(\"/home/tensorflow/Downloads/VOCdevkit/val/wat_imgs/{}\".format(img_str))"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {},
"outputs": [],
"source": [
"# Standardize and normalize\n",
"\n",
"x_train = np.ndarray(shape=(4900, 128, 128, 3),\n",
" dtype=np.float32)\n",
"y_train = np.ndarray(shape=(4900, 128, 128, 3),\n",
" dtype=np.float32)\n",
"x_val = np.ndarray(shape=(196, 128, 128, 3),\n",
" dtype=np.float32)\n",
"y_val = np.ndarray(shape=(196, 128, 128, 3),\n",
" dtype=np.float32)\n",
"\n",
"i = 0\n",
"for file in os.listdir(train_wat_path):\n",
" img = Image.open(\"/home/tensorflow/Downloads/VOCdevkit/train/wat_imgs/{}\".format(file))\n",
" img = np.array(img.resize((128,128))) / 255\n",
" img = img.astype('float32')\n",
" x_train[i] = img\n",
" i += 1\n",
" \n",
"i = 0\n",
"for file in os.listdir(train_raw_path):\n",
" img = Image.open(\"/home/tensorflow/Downloads/VOCdevkit/train/raw_imgs/{}\".format(file))\n",
" img = np.array(img.resize((128,128))) / 255\n",
" img = img.astype('float32')\n",
" y_train[i] = img\n",
" i += 1\n",
" \n",
"i = 0\n",
"for file in os.listdir(val_wat_path):\n",
" img = Image.open(\"/home/tensorflow/Downloads/VOCdevkit/val/wat_imgs/{}\".format(file))\n",
" img = np.array(img.resize((128,128))) / 255\n",
" img = img.astype('float32')\n",
" x_val[i] = img\n",
" i += 1\n",
" \n",
"i = 0\n",
"for file in os.listdir(val_raw_path):\n",
" img = Image.open(\"/home/tensorflow/Downloads/VOCdevkit/val/raw_imgs/{}\".format(file))\n",
" img = np.array(img.resize((128,128))) / 255\n",
" img = img.astype('float32')\n",
" y_val[i] = img\n",
" i += 1"
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"<matplotlib.image.AxesImage at 0x7f0c99cd56d0>"
]
},
"execution_count": 13,
"metadata": {},
"output_type": "execute_result"
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAABZsAAAKbCAYAAACTqVTnAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Z1A+gAAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9eZxlZ33dCz97OPOpqlNzdVV1d1XP3Wq11JoHhBAICTBgbDDgIbFj5zrTjeP35iZ2nORzc+Pcz5vk5o0xcWKbxHEwYIMZDBKDQGDQPLWkVqvV81DdXfN46szDHt4/gMT7rIV80NFQ6l7ff+As7fHZz7DOrtO/ZYVhaIQQQgghhBBCCCGEEEKITrDf6AsQQgghhBBCCCGEEEII8eZHL5uFEEIIIYQQQgghhBBCdIxeNgshhBBCCCGEEEIIIYToGL1sFkIIIYQQQgghhBBCCNExetkshBBCCCGEEEIIIYQQomP0slkIIYQQQgghhBBCCCFEx7xmL5sty3qXZVknLcs6Y1nWb75W5xFCCCGEEEIIIYQQQgjxxmOFYfjqH9SyHGPMKWPMO40x08aYZ4wxPxuG4bFX/WRCCCGEEEIIIYQQQggh3nDc1+i4NxljzoRheM4YYyzL+qwx5ieNMfRlc1+uJxwfGYlotsV+dG2BYsfwFgILX6AX18ugpZMp0OKpBGilOu6bX1/C8wYeaM0GXku91ox8TsRjsI1t4/17gQ8a+1NBzMU2iSfwHCE5ngnwiM0m3lcqlgath1yMZ/Aci806btfA7eJJvGZ2xxbpK4EJQLMd3I40s7Et7GdhiMcLfNZWeEDyOEwsET2eFeI5W/uJMcbYNh7MIv2d/Q0pSfq24+KGnodaqdTA7ertndd2UHRdbKeern7QErEkaOulEmiWi2PZJ305JP0nJNtZrGOQ+YdhsVHZ2n9IH+MHY9u1eR1sM3JpbN5yY3Hc0HbwcKztyPFMgH25Wa/iKchFNz2cG5IpfN6xOF6z65Djkbmm6eH1OWS+SJE1IwhwbmB/xG3V/BCvo9HAtvM81Lq6cP61yfOJOahVarie1aoV0Fj/sR02r5L5MsDz+j7bF8/hk/5Dug8MKWP4+PZ9bGeHtEssgXMrmW7p2hx4eDHNevQ+bAsPxuZLdv82mS9Zu7Npi41Rn1yvIesoGwNsXXItHHt+i7+oVium0Wi0OfGJy4nXxeMXiMdPEI+fRh9UJh5/Lb9RPD4eP+bi8eJJMr+QOZd5/Abx+Gnm8cnhfOLxFzx5fHl8efz2DtaJxyfbkQcUkHmlfY+P4+IN8/gx7N+uyzw+madfF48f3c4nc0r7Hj8DGlsz2vf4+CzYvCKPL4//Wnj81+pl85gx5tJf+TxtjLn5R208PjJi7v/EJyJahixAlos3mx7CxasSxwf53QeeBO3gjqvxWq6ZAO3Js8+A9sWv/SFo9coqaDNTNdDOnZyLfN62ZRy2SXfhhLdaLoLGXlgMDAyBtm3nIGgNcjxTwU42Pb0M2sGxg6DdW8dOu2phm/znhfOgzU/nQdu6fRS0wKAhSiTQFFdDnFgzfbhdIokTUpo4x0YT26pSwsG8NI3Pra8fJ4JN26PX54RouqeOzoOWzPaBloiTl2dk4t51YCteWx9e2/wi7vzEoxdAW5zC7bwGtkm2B7fr7+8G7V13/TxoO0f3gPa1Rx4DzR3EsVyo4nn9APtPrYbtl0iQ+ceQVZMYJ8ciXxj96PO2yBfokBhMxyF/JCJ9JSQLkEsu1yGmplJdBy03uhk0O9kDmlfG+c2t45i3ywugzZ5+EbQUWXAX1wqg7dhH5u4t2L/7u7Gt5ubIPD2/CFp3D5q9q/dhf6xWcK7xfWzneiP6BbxEDOHFizjXLi+j9rZ3XAdaKp0FbaQbx9nh47ienTz+PGiujWM50YPjp6sH27hew7bLr2I7dfViJ10v4f2Wl3BMeWT58m2cz/LreTxvTw600W04t2YGSBuQLyMVcm/z56PnTVroVcImHn+tiv0zOYBfstI5bPd0CsdPs4bPbH2FjFvyRS5DvvBcPIVjuSeGa3WxEn1ATzz2CGwjrgxeF4//TeLxt+M6sfW67aA9ceZp0D5/3x+AVqvg3DR7AV+svqoen3he7vFRa5CXdmEV2+7iDN7XDaPo8d9dJR7f4Hz1n5bk8eXx5fFhm1fb49vkeOTFG/f4OCcxj98s4Q8R3Poa7lvBvjx7+ihobOwtruH17dh3ALTxzVtA6yf+c25uBbTXx+NHfVWpjm138QL+EXN5Beffu95xI2ipFF7vcBfx+CcOgXby+AuguTaOlWQOtSz5HiWPL49fKEW/mz/5xKOwzQ95wwICLcv6VcuyDlmWdWg1jxONEEIIIYQQ4s2FPL4QQgghxJXNa/WyecYY81d/Gjf+A+1/EobhJ8IwvCEMwxv6cvjXNCGEEEIIIcSbC3l8IYQQQogrm9eqjMYzxpidlmVNmu+/ZP6oMebnftTGtmWbrmT0nz/5DVJfJ8XqrZB/knHpImgvHj2M2jP4Tw32z10L2teeegC0rh72z3LwJ++JJP6UPZ6I/lPBeh2PlUqzOip4rP7B/aDdeuu9oC2t4T+hCAL8pzCxGPknD038if5cN17fd3vx/mtr+E8I/At4vLU1vJbUAv6Tyr7+HGjZHvynC2tz2AdqFfw3GVYM/ynM5Hb854gjo1iGpFLF57GygP9sxnjknya1/MtLO47/pGt8+27Q0qTAkFfNgxaP4T+tWZnGf75TLZFaZk4OtJ5uvIegH9uuVMJxm8FLMdnuXtA2bcZ/3pohX1IT5J+fLhew1EIzwLZq1vD5pFNdoDnkn9NZ5G9zpOq58Ul9L6+lTherqx6QumUWDhVatInVL/TieA+k0prxSL2negnbc0sfPrOePvwnOCmbPJ9pHCu33/Rh0JJZnENiSTxHOpMDrVTEfzKcCPGON41hP7MOY5zA5Hb8J6k33LQXNFYzzmX/hLKlvtXySh62+d53cU3aUp0E7a677gbNJvXI+lP4z4r33oj/PPrF558F7fFHvwVavjwD2vIy/mqRDG8ztBkngpD8E9JkL/5TL2sI+49fJcXQSF2xwIyB1iS15RJkza02cV0aGsabS5N/fRtr+SeuocFnkXaxb4/6ePz5VfxnbTFSw5KVenQS2CaZXvLPzS28ia4cqXm7E4/nFUh9vP7ovm78DfuHdOIN5nXx+C8eBu3IU1gy6Oo5LEH0deLxWWmAJKkxnEhiGY1X1eMPXAParbe8E7SltcOgBUEetBhZr0p1NBrzPTimv70Fn0V1Fe/Nn5bHl8eXx2/lDfP4pPZ4vYhjhXt8LM+WttE/LE/jPPgW6vFxzMcSzOPjtZTINScM9sdNY9tAe308fvQZrazlYZvvPPgUaFvq6PHf/o53geaQc+ZIyYd9N+Ea9+JhLJX32CNfB21NHl8en3p8Ui+8r8XjP/ujPf5r8rI5DEPPsqz/3RjzTWOMY4z572EYvvRanEsIIYQQQgghhBBCCCHEG89r9ctmE4bh140x+GcTIYQQQgghhBBCCCGEEJcd+neNQgghhBBCCCGEEEIIITpGL5uFEEIIIYQQQgghhBBCdMxrVkbjxyHwPFNYXI5obhaLavukVH9PBotgl0nwkm+w+PhyBcMUjn31OGgVu0yOh4W2t0+OgpZJYsH0+dmW41n4GCzyaPpzA6Bt3boDtI989GdB+5PPYNH3i6efAy2TxTAAi1xfuobPoraOhfqX1rCg/4033YznzZwHba2ARelTCSysXitiIfjyKl5L7xDum8ticEQpj0EHXV0szIVFR2D1dr+JRe5Tseh5ax6mRKS6cQwsz2ER+S4SDGYcDCNJJzAUxfLw2vwmjiknwHt1bRxTySQWls92YRiAb/D6QlIM/7mXsNS7Rc5Rr2D7eU2MymgEuB1rvsDC+w1DElZAgkLsAPue1RruQQI78EjG1ALsizEXAxd8snfg4z3YhtwDua+wgmPv2skcXkuIoXwWmRv3Td4IWr2O92YlcZxZIY4px8JrTpOQBEP6mSHtMjqB82iDtNUzR06AFoSk7UnootsSGJPL4Vy7aesEaGGI7Xn
"text/plain": [
"<Figure size 1800x1800 with 2 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
}
],
"source": [
"# See images after transforming\n",
"\n",
"fig, axs = plt.subplots(1, 2, figsize=(25, 25), sharey=True, sharex = True)\n",
"\n",
"img_1 = x_train[0]\n",
"img_2 = y_train[0]\n",
"\n",
"axs[0].imshow(img_1)\n",
"axs[1].imshow(img_2)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Create model"
]
},
{
"cell_type": "code",
"execution_count": 14,
"metadata": {},
"outputs": [],
"source": [
"class inpaintingModel:\n",
" '''\n",
" Build UNET like model for image inpaining task.\n",
" '''\n",
" def prepare_model(self, input_size=(128,128,3)):\n",
" inputs = keras.layers.Input(input_size)\n",
"\n",
" conv1, pool1 = self.__ConvBlock(32, (3,3), (2,2), 'relu', 'same', inputs)\n",
" pool1 = Dropout(0.25)(pool1)\n",
" conv2, pool2 = self.__ConvBlock(64, (3,3), (2,2), 'relu', 'same', pool1)\n",
" pool2 = Dropout(0.5)(pool2)\n",
" conv3, pool3 = self.__ConvBlock(128, (3,3), (2,2), 'relu', 'same', pool2)\n",
" pool3 = Dropout(0.5)(pool3)\n",
" conv4, pool4 = self.__ConvBlock(256, (3,3), (2,2), 'relu', 'same', pool3)\n",
" pool4 = Dropout(0.5)(pool4)\n",
" \n",
" conv5, up6 = self.__UpConvBlock(512, 256, (3,3), (2,2), (2,2), 'relu', 'same', pool4, conv4)\n",
" up6 = Dropout(0.5)(up6)\n",
" conv6, up7 = self.__UpConvBlock(256, 128, (3,3), (2,2), (2,2), 'relu', 'same', up6, conv3)\n",
" up7 = Dropout(0.5)(up7)\n",
" conv7, up8 = self.__UpConvBlock(128, 64, (3,3), (2,2), (2,2), 'relu', 'same', up7, conv2)\n",
" up8= Dropout(0.5)(up8)\n",
" conv8, up9 = self.__UpConvBlock(64, 32, (3,3), (2,2), (2,2), 'relu', 'same', up8, conv1)\n",
" up9= Dropout(0.25)(up9)\n",
" conv9 = self.__ConvBlock(32, (3,3), (2,2), 'relu', 'same', up9, False)\n",
" \n",
" outputs = keras.layers.Conv2D(3, (3, 3), activation='sigmoid', padding='same')(conv9)\n",
"\n",
" return keras.models.Model(inputs=[inputs], outputs=[outputs]) \n",
"\n",
" def __ConvBlock(self, filters, kernel_size, pool_size, activation, padding, connecting_layer, pool_layer=True):\n",
" conv = keras.layers.Conv2D(filters=filters, kernel_size=kernel_size, activation=activation, padding=padding)(connecting_layer)\n",
" conv = keras.layers.Conv2D(filters=filters, kernel_size=kernel_size, activation=activation, padding=padding)(conv)\n",
" if pool_layer:\n",
" pool = keras.layers.MaxPooling2D(pool_size)(conv)\n",
" return conv, pool\n",
" else:\n",
" return conv\n",
"\n",
" def __UpConvBlock(self, filters, up_filters, kernel_size, up_kernel, up_stride, activation, padding, connecting_layer, shared_layer):\n",
" conv = keras.layers.Conv2D(filters=filters, kernel_size=kernel_size, activation=activation, padding=padding)(connecting_layer)\n",
" conv = keras.layers.Conv2D(filters=filters, kernel_size=kernel_size, activation=activation, padding=padding)(conv)\n",
" up = keras.layers.Conv2DTranspose(filters=up_filters, kernel_size=up_kernel, strides=up_stride, padding=padding)(conv)\n",
" up = keras.layers.concatenate([up, shared_layer], axis=3)\n",
"\n",
" return conv, up"
]
},
{
"cell_type": "code",
"execution_count": 15,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Model: \"model\"\n",
"__________________________________________________________________________________________________\n",
"Layer (type) Output Shape Param # Connected to \n",
"==================================================================================================\n",
"input_1 (InputLayer) [(None, 128, 128, 3) 0 \n",
"__________________________________________________________________________________________________\n",
"conv2d (Conv2D) (None, 128, 128, 32) 896 input_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv2d_1 (Conv2D) (None, 128, 128, 32) 9248 conv2d[0][0] \n",
"__________________________________________________________________________________________________\n",
"max_pooling2d (MaxPooling2D) (None, 64, 64, 32) 0 conv2d_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"dropout (Dropout) (None, 64, 64, 32) 0 max_pooling2d[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv2d_2 (Conv2D) (None, 64, 64, 64) 18496 dropout[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv2d_3 (Conv2D) (None, 64, 64, 64) 36928 conv2d_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"max_pooling2d_1 (MaxPooling2D) (None, 32, 32, 64) 0 conv2d_3[0][0] \n",
"__________________________________________________________________________________________________\n",
"dropout_1 (Dropout) (None, 32, 32, 64) 0 max_pooling2d_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv2d_4 (Conv2D) (None, 32, 32, 128) 73856 dropout_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv2d_5 (Conv2D) (None, 32, 32, 128) 147584 conv2d_4[0][0] \n",
"__________________________________________________________________________________________________\n",
"max_pooling2d_2 (MaxPooling2D) (None, 16, 16, 128) 0 conv2d_5[0][0] \n",
"__________________________________________________________________________________________________\n",
"dropout_2 (Dropout) (None, 16, 16, 128) 0 max_pooling2d_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv2d_6 (Conv2D) (None, 16, 16, 256) 295168 dropout_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv2d_7 (Conv2D) (None, 16, 16, 256) 590080 conv2d_6[0][0] \n",
"__________________________________________________________________________________________________\n",
"max_pooling2d_3 (MaxPooling2D) (None, 8, 8, 256) 0 conv2d_7[0][0] \n",
"__________________________________________________________________________________________________\n",
"dropout_3 (Dropout) (None, 8, 8, 256) 0 max_pooling2d_3[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv2d_8 (Conv2D) (None, 8, 8, 512) 1180160 dropout_3[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv2d_9 (Conv2D) (None, 8, 8, 512) 2359808 conv2d_8[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv2d_transpose (Conv2DTranspo (None, 16, 16, 256) 524544 conv2d_9[0][0] \n",
"__________________________________________________________________________________________________\n",
"concatenate (Concatenate) (None, 16, 16, 512) 0 conv2d_transpose[0][0] \n",
" conv2d_7[0][0] \n",
"__________________________________________________________________________________________________\n",
"dropout_4 (Dropout) (None, 16, 16, 512) 0 concatenate[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv2d_10 (Conv2D) (None, 16, 16, 256) 1179904 dropout_4[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv2d_11 (Conv2D) (None, 16, 16, 256) 590080 conv2d_10[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv2d_transpose_1 (Conv2DTrans (None, 32, 32, 128) 131200 conv2d_11[0][0] \n",
"__________________________________________________________________________________________________\n",
"concatenate_1 (Concatenate) (None, 32, 32, 256) 0 conv2d_transpose_1[0][0] \n",
" conv2d_5[0][0] \n",
"__________________________________________________________________________________________________\n",
"dropout_5 (Dropout) (None, 32, 32, 256) 0 concatenate_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv2d_12 (Conv2D) (None, 32, 32, 128) 295040 dropout_5[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv2d_13 (Conv2D) (None, 32, 32, 128) 147584 conv2d_12[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv2d_transpose_2 (Conv2DTrans (None, 64, 64, 64) 32832 conv2d_13[0][0] \n",
"__________________________________________________________________________________________________\n",
"concatenate_2 (Concatenate) (None, 64, 64, 128) 0 conv2d_transpose_2[0][0] \n",
" conv2d_3[0][0] \n",
"__________________________________________________________________________________________________\n",
"dropout_6 (Dropout) (None, 64, 64, 128) 0 concatenate_2[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv2d_14 (Conv2D) (None, 64, 64, 64) 73792 dropout_6[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv2d_15 (Conv2D) (None, 64, 64, 64) 36928 conv2d_14[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv2d_transpose_3 (Conv2DTrans (None, 128, 128, 32) 8224 conv2d_15[0][0] \n",
"__________________________________________________________________________________________________\n",
"concatenate_3 (Concatenate) (None, 128, 128, 64) 0 conv2d_transpose_3[0][0] \n",
" conv2d_1[0][0] \n",
"__________________________________________________________________________________________________\n",
"dropout_7 (Dropout) (None, 128, 128, 64) 0 concatenate_3[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv2d_16 (Conv2D) (None, 128, 128, 32) 18464 dropout_7[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv2d_17 (Conv2D) (None, 128, 128, 32) 9248 conv2d_16[0][0] \n",
"__________________________________________________________________________________________________\n",
"conv2d_18 (Conv2D) (None, 128, 128, 3) 867 conv2d_17[0][0] \n",
"==================================================================================================\n",
"Total params: 7,760,931\n",
"Trainable params: 7,760,931\n",
"Non-trainable params: 0\n",
"__________________________________________________________________________________________________\n"
]
}
],
"source": [
"model = inpaintingModel().prepare_model()\n",
"model.compile(loss=\"binary_crossentropy\", optimizer=\"adam\", metrics=[\"accuracy\"])\n",
"model.summary()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Training"
]
},
{
"cell_type": "code",
"execution_count": 16,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 1/30\n",
"154/154 [==============================] - 132s 764ms/step - loss: 0.5882 - accuracy: 0.5445 - val_loss: 0.5275 - val_accuracy: 0.8164\n",
"\n",
"Epoch 00001: val_loss improved from inf to 0.52747, saving model to ./keras.model\n",
"INFO:tensorflow:Assets written to: ./keras.model/assets\n",
"Epoch 2/30\n",
"154/154 [==============================] - 113s 735ms/step - loss: 0.5266 - accuracy: 0.7876 - val_loss: 0.5246 - val_accuracy: 0.8419\n",
"\n",
"Epoch 00002: val_loss improved from 0.52747 to 0.52456, saving model to ./keras.model\n",
"INFO:tensorflow:Assets written to: ./keras.model/assets\n",
"Epoch 3/30\n",
"154/154 [==============================] - 114s 739ms/step - loss: 0.5215 - accuracy: 0.8043 - val_loss: 0.5226 - val_accuracy: 0.8693\n",
"\n",
"Epoch 00003: val_loss improved from 0.52456 to 0.52256, saving model to ./keras.model\n",
"INFO:tensorflow:Assets written to: ./keras.model/assets\n",
"Epoch 4/30\n",
"154/154 [==============================] - 114s 738ms/step - loss: 0.5216 - accuracy: 0.8182 - val_loss: 0.5210 - val_accuracy: 0.8521\n",
"\n",
"Epoch 00004: val_loss improved from 0.52256 to 0.52101, saving model to ./keras.model\n",
"INFO:tensorflow:Assets written to: ./keras.model/assets\n",
"Epoch 5/30\n",
"154/154 [==============================] - 114s 740ms/step - loss: 0.5205 - accuracy: 0.8236 - val_loss: 0.5200 - val_accuracy: 0.8616\n",
"\n",
"Epoch 00005: val_loss improved from 0.52101 to 0.52001, saving model to ./keras.model\n",
"INFO:tensorflow:Assets written to: ./keras.model/assets\n",
"Epoch 6/30\n",
"154/154 [==============================] - 114s 742ms/step - loss: 0.5196 - accuracy: 0.8239 - val_loss: 0.5194 - val_accuracy: 0.8534\n",
"\n",
"Epoch 00006: val_loss improved from 0.52001 to 0.51936, saving model to ./keras.model\n",
"INFO:tensorflow:Assets written to: ./keras.model/assets\n",
"Epoch 7/30\n",
"154/154 [==============================] - 114s 744ms/step - loss: 0.5193 - accuracy: 0.8306 - val_loss: 0.5199 - val_accuracy: 0.8710\n",
"\n",
"Epoch 00007: val_loss did not improve from 0.51936\n",
"Epoch 8/30\n",
"154/154 [==============================] - 115s 747ms/step - loss: 0.5190 - accuracy: 0.8328 - val_loss: 0.5202 - val_accuracy: 0.8559\n",
"\n",
"Epoch 00008: val_loss did not improve from 0.51936\n",
"Epoch 9/30\n",
"154/154 [==============================] - 115s 746ms/step - loss: 0.5192 - accuracy: 0.8367 - val_loss: 0.5195 - val_accuracy: 0.8664\n",
"\n",
"Epoch 00009: val_loss did not improve from 0.51936\n",
"Epoch 10/30\n",
"154/154 [==============================] - 115s 748ms/step - loss: 0.5188 - accuracy: 0.8373 - val_loss: 0.5198 - val_accuracy: 0.8769\n",
"\n",
"Epoch 00010: val_loss did not improve from 0.51936\n",
"Epoch 11/30\n",
"154/154 [==============================] - 115s 748ms/step - loss: 0.5163 - accuracy: 0.8358 - val_loss: 0.5190 - val_accuracy: 0.8400\n",
"\n",
"Epoch 00011: val_loss improved from 0.51936 to 0.51905, saving model to ./keras.model\n",
"INFO:tensorflow:Assets written to: ./keras.model/assets\n",
"Epoch 12/30\n",
"154/154 [==============================] - 115s 744ms/step - loss: 0.5186 - accuracy: 0.8335 - val_loss: 0.5193 - val_accuracy: 0.8613\n",
"\n",
"Epoch 00012: val_loss did not improve from 0.51905\n",
"Epoch 13/30\n",
"154/154 [==============================] - 115s 745ms/step - loss: 0.5177 - accuracy: 0.8381 - val_loss: 0.5185 - val_accuracy: 0.8720\n",
"\n",
"Epoch 00013: val_loss improved from 0.51905 to 0.51849, saving model to ./keras.model\n",
"INFO:tensorflow:Assets written to: ./keras.model/assets\n",
"Epoch 14/30\n",
"154/154 [==============================] - 115s 747ms/step - loss: 0.5185 - accuracy: 0.8377 - val_loss: 0.5188 - val_accuracy: 0.8541\n",
"\n",
"Epoch 00014: val_loss did not improve from 0.51849\n",
"Epoch 15/30\n",
"154/154 [==============================] - 115s 747ms/step - loss: 0.5168 - accuracy: 0.8358 - val_loss: 0.5215 - val_accuracy: 0.8176\n",
"\n",
"Epoch 00015: val_loss did not improve from 0.51849\n",
"Epoch 16/30\n",
"154/154 [==============================] - 115s 748ms/step - loss: 0.5181 - accuracy: 0.8273 - val_loss: 0.5177 - val_accuracy: 0.8593\n",
"\n",
"Epoch 00016: val_loss improved from 0.51849 to 0.51772, saving model to ./keras.model\n",
"INFO:tensorflow:Assets written to: ./keras.model/assets\n",
"Epoch 17/30\n",
"154/154 [==============================] - 116s 750ms/step - loss: 0.5159 - accuracy: 0.8350 - val_loss: 0.5185 - val_accuracy: 0.8332\n",
"\n",
"Epoch 00017: val_loss did not improve from 0.51772\n",
"Epoch 18/30\n",
"154/154 [==============================] - 115s 747ms/step - loss: 0.5184 - accuracy: 0.8389 - val_loss: 0.5183 - val_accuracy: 0.8424\n",
"\n",
"Epoch 00018: val_loss did not improve from 0.51772\n",
"Epoch 19/30\n",
"154/154 [==============================] - 115s 746ms/step - loss: 0.5178 - accuracy: 0.8434 - val_loss: 0.5183 - val_accuracy: 0.8688\n",
"\n",
"Epoch 00019: val_loss did not improve from 0.51772\n",
"Epoch 20/30\n",
"154/154 [==============================] - 115s 747ms/step - loss: 0.5170 - accuracy: 0.8327 - val_loss: 0.5182 - val_accuracy: 0.8720\n",
"\n",
"Epoch 00020: val_loss did not improve from 0.51772\n",
"Epoch 21/30\n",
"154/154 [==============================] - 115s 747ms/step - loss: 0.5168 - accuracy: 0.8462 - val_loss: 0.5183 - val_accuracy: 0.8682\n",
"\n",
"Epoch 00021: val_loss did not improve from 0.51772\n",
"\n",
"Epoch 00021: ReduceLROnPlateau reducing learning rate to 0.00010000000474974513.\n",
"Epoch 22/30\n",
"154/154 [==============================] - 115s 749ms/step - loss: 0.5172 - accuracy: 0.8465 - val_loss: 0.5179 - val_accuracy: 0.8762\n",
"\n",
"Epoch 00022: val_loss did not improve from 0.51772\n",
"Epoch 23/30\n",
"154/154 [==============================] - 116s 750ms/step - loss: 0.5179 - accuracy: 0.8504 - val_loss: 0.5180 - val_accuracy: 0.8705\n",
"\n",
"Epoch 00023: val_loss did not improve from 0.51772\n",
"Epoch 24/30\n",
"154/154 [==============================] - 115s 747ms/step - loss: 0.5171 - accuracy: 0.8483 - val_loss: 0.5179 - val_accuracy: 0.8772\n",
"\n",
"Epoch 00024: val_loss did not improve from 0.51772\n",
"Epoch 25/30\n",
"154/154 [==============================] - 115s 748ms/step - loss: 0.5169 - accuracy: 0.8478 - val_loss: 0.5181 - val_accuracy: 0.8749\n",
"\n",
"Epoch 00025: val_loss did not improve from 0.51772\n",
"Epoch 26/30\n",
"154/154 [==============================] - 115s 748ms/step - loss: 0.5177 - accuracy: 0.8507 - val_loss: 0.5179 - val_accuracy: 0.8740\n",
"\n",
"Epoch 00026: val_loss did not improve from 0.51772\n",
"\n",
"Epoch 00026: ReduceLROnPlateau reducing learning rate to 1.0000000474974514e-05.\n",
"Epoch 00026: early stopping\n"
]
}
],
"source": [
"early_stopping = EarlyStopping(patience=10, verbose=1)\n",
"model_checkpoint = ModelCheckpoint(\"./keras.model\", save_best_only=True, verbose=1)\n",
"reduce_lr = ReduceLROnPlateau(factor=0.1, patience=5, min_lr=0.00001, verbose=1)\n",
"\n",
"history = model.fit(x_train, y_train,\n",
" validation_data=(x_val, y_val),\n",
" epochs=30,\n",
" batch_size=32,\n",
" callbacks=[early_stopping, model_checkpoint, reduce_lr])"
]
},
{
"cell_type": "code",
"execution_count": 17,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"[<matplotlib.lines.Line2D at 0x7f0d0cb4ef70>]"
]
},
"execution_count": 17,
"metadata": {},
"output_type": "execute_result"
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAA3cAAAEvCAYAAADmcTilAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Z1A+gAAAACXBIWXMAAAsTAAALEwEAmpwYAABi+ElEQVR4nO3dd3jb1b3H8fdXsrxjO7azB0kgQCAhCYQAZe8AbaClZZbRsjq4dNJCB+2Ftre3t4MOOoCyR6BAIawyCpQNCZCQxQiB7CnHdjxlS+f+cX5OFMeO7XjItj6v59Ej6bd0JEzkj8/3nGPOOURERERERKRvC6W6ASIiIiIiItJ5CnciIiIiIiL9gMKdiIiIiIhIP6BwJyIiIiIi0g8o3ImIiIiIiPQDCnciIiIiIiL9QEaqG9ARpaWlbsyYMaluhoiIdLO33nprk3NuUKrb0Vfo+1FEJH3s7DuyT4W7MWPGMHfu3FQ3Q0REupmZLU91G/oSfT+KiKSPnX1HqixTRERERESkH1C4ExERERER6QcU7kRERERERPoBhTsREREREZF+QOFORERERESkH1C4ExERERER6QcU7kRERERERPoBhTsREREREZF+QOFORERERESkH2hXuDOzGWb2vpktNbOrWth/oZltNLN5we3ipH3xpO2zk7aPNbM3gmveZ2aZXfOWWvfaR1Eembe6u19GREREpHdzDhrrU90KkdRxDj56Dt57Aso+hkQi1S3qEhltHWBmYeAG4HhgFTDHzGY75xY3O/Q+59zlLVyi1jk3pYXt/wv8zjk3y8z+ClwE/KVDre+gB95axevLopw6ZUR3voyIiIhI71WxCv5xIVRvhMvnQjiS6haJeBWrYfMnMPpgCIW75zUSCVjyCLz4a1i/cNv2SC4M2gsGTYDBwW3Q3lA4Esy6py3doM1wB0wHljrnlgGY2SzgVKB5uGs3MzPgGOCcYNPtwE/p5nBXmBOhvCbWnS8hIiIi0nstfRYevATqKyHRCEv/DXvNSHWrJN3FauDVP8DL10NjLRSNhmlfhqnnQV5p17xGIg4LH4IX/w82vQ8l4+G0v0LJ7rBhCWx8DzYs9r158+/Zdl5WgQ99gycEwW9vf58/uPsCaCe0J9yNAFYmPV8FHNTCcaeb2RHAB8C3nHNN52Sb2VygEfilc+5hoAQod841Jl2z27vTCnMiVMfiNMQTRMIabigiIiJpIhGH//wK/vO/MHgf+NITcNspMP9ehTtJHedg0T/hmWugYiXs+1nYcwa8cxc8+1N4/hd+24EXw8gDd60HLd4A794PL/0Gyj7ywezzt8A+p20LZ6Omb39OTVkQ9pZsC37vPQ5v37H9cVmFkF0IOYWQXRQ8LgoeJz9P2l80GjJzO/4+2qk94a49HgXudc7Vm9ll+J64Y4J9uznnVpvZOOA5M1sAVLT3wmZ2KXApwOjRozvVyKJcX3ZQWdtASX5Wp64lIiIi0idUb4KHLvE9EpPPhlN+63+5nPQFmHsL1G6GnIGpbqWkm7Xvwr+uguWvwJBJ8Nm/wZhD/b7JZ8GG9/zP5/x74d37YOgkH/ImfQEy89q+fmMM5t0NL/8Wylf488+4E/b+NITa6OTJLYbdPuVvyao2wsYlsPF9//9VXTnUVUBtcF+2bNvjhuqWr33OP2DPE9pu/y5qT7hbDYxKej4y2LaVcy6a9PRm4FdJ+1YH98vM7AVgKvAgUGRmGUHv3Q7XTDr/RuBGgGnTprl2tLdVhTk+3FUo3ImIiEg6WPlmML5uE3zm97D/Bdt6PyafDW/81fecTPtySpvZqqqNviyvD4156lec871YucVd99+gehM8dx28dbu/7qevh/3P37HEcfDecPKv4NhrYME/YM7N8Og34Okfw5RzYNpFMGjPHa/fUAfv3Akv/w4qV8Pw/eGk/4M9T+z8e8gf5G9jj2j72MaYL39uCnt1m/3jYZM714Y2tCfczQHGm9lYfAA7i21j5QAws2HOubXB05nAkmD7QKAm6NErBQ4FfuWcc2b2PPB5YBZwAfBIV7yhnWkKd+W1Dd39UiIiIpKuast9+VUqA4lz8Mbf4OkfQsEIuOhpGD5l+2OGTfYlavPu7X3hzjn/y/lz18GBl/hf8qX7xRth/QJY/hqseA1WvA7VG/zP0O5Hw+7HwLijfSjr8LUb4M2b4IVf+l6tg78KR36v7V7jrHyY9iU44EL/x4o5N/sevTf+6kPWgRfDXif76791K7zyB6haB6MOhpl/gN2PTc3/ixmZkFHadWMG2/uybR3gnGs0s8uBp4AwcItzbpGZXQvMdc7NBq4ws5n4cXVlwIXB6ROAv5lZAr/swi+TZtn8PjDLzH4GvAP8vQvfV4sKc7f13ImIiIh0ueWvwh2n+rKyz/whNb9U1lXC7P+CxQ/7X3pP+3PLv0CbwZSz/XinTUuhdI8eb2qL6rfAw1+FJY/CwLHw5t9g9EEw8fRUt6z/idXA6rnbwtyqORCr8vuKRvtAN3gfWPO2/+/xzl2AwfCpPujtfowfC5fRxopmS/8N/7raT2Sy+zEw45d+kpKOMPM/B6MPghN/4Xvn5t4K958PA4b5CYKqN8KYw+H0m/x9Gvb4tmvMnXPuCeCJZtuuSXp8NXB1C+e9Ckxq5ZrL8DNx9pimnrtKhTsREZHUqd8Cn7zsJ07oT798bVkP//gShDP9xAv5Q+CYH/VsG9Yv8r/sli2D4/4bPnXFzscXTTrDT1zx7qyeb2tLNn4A950L0Y/ghJ/B9Mv8xC+zr4Ch+0Hp+FS3sG+rjgY9csFt7XwfijAYsq//o8ToQ/ytsNlch/FGWPOOH7v50XO+Z/WlX0Nmvu9Bawp7xeO2/X8d/Qie+iF88KTffvasrvn/Pn8QHP5tOPQb8OEzvscO8893O6Rz1+7jumpClT5ha1lmjcKdiIhIyjz1Ax9+Tvg5fKqlJXL7oHgjPPBlP7bm4mfgzRv9lOv5Q2D6JT3Thvmz4NFvQtYAuOBRGHNY2+cUDPNldvNnwVE/aHuiie605FH451chIwvOf3jbuKYv3Ap/PdyH1ov/3a0zDfZb9VVw1+mw8nX/PJwJIw7w4X/0IX62yJyinV8jnAGjDvS3o77vf9Y/fgk++rfvmXs/6AcqGu1LITOyfQllRpb/Q8PBwX/brhQK+9leNePrVmkZ7lSWKSIikiKbl8O8e/zaUc9cA8P2a9/kBL3dc9fB8pf9ullDJ8Epv/MTRzxxJeQNgn1P677Xbqjzsw6+dSvsdqif5n3A0PafP+UcePAiP2vh2MO7r52tScTh+Z/7qeqH7w9n3ukXjm5SONKX2d31ef95nnZDz7exr3vzbz7YHfl9H+aHT4VIdueumV0IEz7tb+B7iz96Dj56HhY8ALEtMOVcOPYnMGBI59+DtEtahbtIOEReZljhTkREJFVe+g1YCC5+Fu47z8/keOl/oGhUm6f2Wu89Dq9cDwd8yY9hA9/Lcfrf4c7T/DIEuSXdE5w2f+J7tNbO9yVpx1zjX7sj9joZMgf4Ked7OtzVlPlg+dFzfsbEk/6v5dCxx3FwxJXw4q982d3UL/ZsO/uy2nJ45fe+HPLoH3Tf6xSP87cDL/aTm9RV9PhkIuInOUkrhTkRlWWKiIikQvkKv+7U/uf7yRTOutv/Enj/eb73qS+KfuRLCYdN8ZNEJMvM9WOMBo6FWefAuoVd+9ofPAV/OwLKPoGz7oHjr+14sGtq576nweJHINbK2lzdYe18uPFIP/7yM7+HmX/ceW/SUVfB2CPh8e90/WfZn712gw9aR/+w514zHFGwS5G0C3cFORH13ImIiKTCS7/1vXaHfcs/Lx3vFy5e847/hd11ajnbntdQC/cH68adcUfLwSS3GM57yE86cdfpviy1sxJx+Pd1cM8ZfnzTZS/A3qd07pqTz/azJC55rPPta4/5s+DvJ/ixil960k9z35ZQGE6/GbKLfG9lXWV3t7Lvq47C63+GfU7zJdDS76VduCvKjWi2TBERkZ5WvtJPoz71vO3HU+19MhzxPZh3F8zt9lWRuo5zPpCuXwCfuwkG7tb6sYUjfcBrrIW7Pud/4d5V1Zv
"text/plain": [
"<Figure size 1080x360 with 2 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
}
],
"source": [
"# Visualize the training process\n",
"\n",
"fig, (ax_loss, ax_acc) = plt.subplots(1, 2, figsize=(15,5))\n",
"ax_loss.plot(history.epoch, history.history[\"loss\"], label=\"Train loss\")\n",
"ax_loss.plot(history.epoch, history.history[\"val_loss\"], label=\"Validation loss\")\n",
"ax_acc.plot(history.epoch, history.history[\"accuracy\"], label=\"Train accuracy\")\n",
"ax_acc.plot(history.epoch, history.history[\"val_accuracy\"], label=\"Validation accuracy\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Test"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"model = load_model(\"./keras.model\")"
]
},
{
"cell_type": "code",
"execution_count": 18,
"metadata": {},
"outputs": [],
"source": [
"# Generate watermark (again)\n",
"\n",
"def watermark(img_str):\n",
" img = Image.open(\"/home/tensorflow/Downloads/VOCdevkit/TEST/RAW/{}\".format(img_str))\n",
" img = img.resize((512,round(img.size[1]*512/img.size[0])))\n",
" \n",
" np.random.seed()\n",
" font_size = np.random.randint(low = 30, high = 100)\n",
" \n",
" font = ImageFont.truetype(\"/home/tensorflow/Downloads/VOCdevkit/abel-regular.ttf\", font_size)\n",
" \n",
" img_temp = Image.new('L', (350,350))\n",
" \n",
" np.random.seed()\n",
" text_str = np.random.choice(printable, np.random.randint(low=5, high=10))\n",
" text_str = \"\".join(text_str)\n",
" \n",
" draw_temp = ImageDraw.Draw(img_temp)\n",
" \n",
" np.random.seed()\n",
" opacity = np.random.randint(low=90, high=120)\n",
" \n",
" # insert text onto the temporary image\n",
" draw_temp.text((0,0), text_str, font=font, fill=opacity)\n",
" \n",
" np.random.seed()\n",
" rot_int = np.random.randint(low=0, high=30)\n",
" \n",
" rotated_text = img_temp.rotate(rot_int, expand=1)\n",
" \n",
" col_1 = (255,255,255)\n",
" col_2 = (255,255,255)\n",
" \n",
" np.random.seed()\n",
" rand_loc = tuple(np.random.randint(low=10,high=100,size=(2,)))\n",
" \n",
" img.paste(ImageOps.colorize(rotated_text, col_1, col_2), rand_loc, rotated_text)\n",
" \n",
" img.save(\"/home/tensorflow/Downloads/VOCdevkit/TEST/WAT/{}\".format(img_str))\n",
" return img"
]
},
{
"cell_type": "code",
"execution_count": 27,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"<matplotlib.image.AxesImage at 0x7f0d0c947ee0>"
]
},
"execution_count": 27,
"metadata": {},
"output_type": "execute_result"
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAABZsAAAG8CAYAAAC46Hv0AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Z1A+gAAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9Saxl25aeh41V7eoUcaK+9X333Vfke/kyk8xkIYopMilStEDJlgwYhgtYaghgxwZk2IYlueWOAbvjomWAgAyoIUCWIcOSbEqGIIgSqyQzmcWrq/tuXUR94pS7WIUbEbHGN8Zec999z4mbGWmOvxPz7L3mmnPNYvxzrtj/P7Ou6yQQCAQCgUAgEAgEAoFAIBAIBAKByyD/465AIBAIBAKBQCAQCAQCgUAgEAgE/uQjXjYHAoFAIBAIBAKBQCAQCAQCgUDg0oiXzYFAIBAIBAKBQCAQCAQCgUAgELg04mVzIBAIBAKBQCAQCAQCgUAgEAgELo142RwIBAKBQCAQCAQCgUAgEAgEAoFLI142BwKBQCAQCAQCgUAgEAgEAoFA4NL40l42Z1n2L2ZZ9pMsy36eZdm//WWVEwgEAoFAIBAIBAKBQCAQCAQCgT9+ZF3XPf+bZlkhIj8VkX9BRD4Skd8Rkf9h13U/fO6FBQKBQCAQCAQCgUAgEAgEAoFA4I8dX9Yvm/+ciPy867pfdF23FJH/QET+lS+prEAgEAgEAoFAIBAIBAKBQCAQCPwxo/yS7vuqiHyIvz8SkT+frEQ16kbjmYiIdG3bf+5/dN1d+t04b5glPv/id9qEzPyRIZkNX2U+33AvSV+XzJUVifI3lzSMTS2wbd22wee3tC9tm75Zf/xuMLmxIOY2ebapgb0mfesLKA+2zvL8VQ1rJWxol9Qzd1t0xcYyt67QH00M+OI3SN95OyGKvygbSIl0Zsx3+DyNLNFrXdcOfr41Mv+nfmDVN6inqagtP8s1f1k84Y3Vci51vXyewemfWhTlqCurqYikx8TzxuXn25Z3SHBjtg1/r+dKfLptnhyfFsOXeFykof7YZ8UWlV4j+i3WBsizdvUX5mxfnS+Wx19/ofF86TpvkXs43K79adp2Sy7bcOskuj+CdUpyayDpNczWq4dLVt+2M3l6w0IpG0watJdUtW7aQljK3jburudpmrm07eqPPTr9SUfw9Z9wvn7hZsA/fXydKn0jD3zBOl/kXcY/9Xy98asL8PWW5aTw5fH15SrmwyT/NPf+kvj6y3rZ/LnIsuxvisjfFBGpxlP5+p/6TRERWR0t+2uW7plbGekfXeKlBwfX2qJxOER00iaukeTLKftyJT1SslzJJy+UfIpyrOmiHLze35DfZRnui5fIvl06dHFe7aJMLV8yW+Y2i5EuMYmffDf8cusihJllwy/ENk3O1Es0ydJjw7ysSk02k8eFtOQLsXbw86xrTP6Cz9kO59mErEvVX9Pt2tDWPMmlmLnGjy3OB84nloHnbGtXZ16o+WtUtOV/Prlx2ia6mf3vW69NzdsUKWxAMp4kX2i7Nk+84O1a++LU/gdcqs4y+LmIX3TzvnpdXS/xuR2bRF4gBqPUZoX8rvxkm2MAFDlimIjk+LupF3qvJhWr5yZ/OdFyrh7siYjIez/5RxK4OMjZZTmR1956wtncVGV+6pjAMryTMGM6dzfIhse+me8b/qNj07xIAvUswNllWeHzCpfbsWvjJNom53/2gstbz/nk9n2kd1hIukwTI3DvbDje+yxpMkjDXMY+RIwxG/61gULOZPxB7Cp8XOQ9hsdAkevnTWfbOR2Lkc6GrxERGWXkNnNjzYPnzP2aAd9xCKfqIiLS4Jlz3pt1Y13c0MoS64EObYMmk3plOdswG/5Dr1ux/1AVN59r/N0kNk/+mWuzNkvk2cS5yf+glOHP3dhMre1MW7rFlRmbZm2U2L67euWMQehDrocWjfZN58rnVCuL4bXJ0vUtYdYg+XD9R4XbN2S8t86NhuuJDTHYjKenMeDw/u8mrw9sxoX4msi34WsXd1MvWswccHkS84P5N9IQuLTAHrmswNfgbvMSWL44X0vn98soX5SvM/D1+n5zmK95L7bT2ruMxK2SDeXXC5yHfBy2ebeBr8ldwvlNvvYxEWu5RBwtMZ7qNb7mIBzeI9s2c3zdJeIQXxFs4Gu2IeM9ubfdmq9Z/Q3vrFL7d7Rz3mLv/CeSry1S39g1fzpHimMNXzcuz1Z7+WFOF7GhsuA4wThdgq/9emE7vua6OD23OJ838TXXGAuMm4b32rC3+qJ8/WW9bP5YRF7H3689/axH13V/S0T+lojIdPegq+snVWkQ7P3EZXVTL0QZRP3LNTtBTGUG77V+IcpJTLbcbTztr4n5v558wVwxw1pJPbApzXJ0XadldK3Pry+HsnzWp5v289vSl28WtBuzDL8Qs/+73A1f776zORL32viyAXm47177L8DEC97E/7Su1zhBfh3HBnPYF3rceEoz/DybXvBniaBqgtDaI3OHa/6fC0kuEFx+s5hkXfiyRNO3rr9q8p+fn/Xpo6OjweJN+X6eZgx2qFdiwfukbqmNH6/ZgOR/KrD9h1+8i4h5njbBnmvPaTauMnzdpk2DmQNYpDAeZRonVq2+3H1SPMeJxqrcvPwvhq8XkbZZoSbD46wrLBUVuf5nmH3JqPOmw39edIL/PBORvNW/T4+fvAhv1gkh8AVAzp5MD7pn/yFgOM/9P0WX+EURF/Vmvq79Jx6DdmoR6Th3sERXL8MlbvOIOWL+gxebz8KM1/R/EEviP4VZZudqmXecY/jPHd52w8tmsw7elrMTv/swTZ4Px7g1JH4IkCXi5VqZif9JXr9rgqdRftsxxvkyeXO+VMbHpv/s4M7Nr9g4nofHuX+1NzLDhC/FsWbwLzHRB3miC7LUWkbsWMsS/NNg/fHG6zdM/tMz/U+9e/fB2dx4mP9bTMdc1j/xrv7JdeYFDMEXJhteTCXWBoZ/N/3y0bz8H16brSkFE7cz/5GQWA6v3Rs3GJcadwrsAeb1yuThf6Qw7pSG/8HZbpw1NV4Wm0fjf1jZuDstER+Fm1e996pObGpFZFpxH/Sk/KO1H98EtsVl+ZpjOvUjLs9d6f+0Iaf4PfJg0t6Xl7u5Zvga3/EHC5wrnq/tc2qeJF+7PXbW8T+eR7INTJlmK7stXyfA7JtiYuonxFv035M/t+HrDRxvXlZrpmYDX5tnYP4UX7vBbb5rGIfJqXhx62pf8YUi2pb7ncKRl7l36qV+s6GdgVR3NI0+5xtvOL4+Vb6+D77uknydLD71f09r79kuy9epF9GGLbLkH6ZCSb72iyfzEPzPnxRfb3hnhTE0wo9VcsSgxYX4WvvZ83WNMWBXf/zPFxt3x+BrcjnvvcIL8sa9l5piLfJs/byJr78sJv8dEfl6lmVvZU+i7/9ARP6TL6msQCAQCAQCgUAgEAgEAoFAIBAI/DHjS/llc9d1dZZl/zMR+f/Kk5+8/d+6rvvBl1FWIBAIBAKBQCAQCAQCgUAgEAgE/vjxpXk2d133t0Xkb291cduJLM9FRKRpVIbif06fi/703MgwUno0L0FM2WhsQMqbmdIR6w7hpBuUAUOPZuXBKvnOc9slxnqDsp6GP9tn+c6Pin5WxlFB67kuiR2WqKTuu63HrZHVJHxkBzKhWlvKTYwsCg9t2sxLdBI2FCmnAldpI1cwn+O+7XAZIiJFyrrCDOdEn4sYQSP9n7uGtg02T5tqT0o6zBVpKRXvTblHhnE2cX5QK3j55XjmAumM6TWJEiUmwz5Haxbsww4vIonYsFG6ncizaTa0Jo8RIyU+93Frixi2QdFbGH87dhqvsTFosYQfcoM0YtWo5Ai0FWgw72r6TBpJt5UVFfiuGlHCqOmmpeUSJbgi4/FE7z154lXvY2vgctAwk5Cabfgu5b/mvUeNry3L3lCmcX0xFU4RjfMCTni21TWlbnoNfZ2fZKKslrEcawPyn/fVbTB2uUTbKFFN2SUNr1nW+inl52y8hPG58+rc6tApluG9uSlN5too4assIpIbSwnYQNCbj9Zjroq5kBsRPwrEIhRZeCE
"text/plain": [
"<Figure size 1800x1800 with 3 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
}
],
"source": [
"fig, axs = plt.subplots(1, 3, figsize=(25, 25), sharey=True, sharex = True)\n",
"\n",
"# The watermarked image\n",
"wat_img = watermark(\"1.jpg\")\n",
"wat_img = np.array(wat_img.resize((128,128))) / 255\n",
"wat_img = wat_img.astype('float32')\n",
"\n",
"# The predicted image\n",
"rec_img = model.predict(wat_img.reshape((1,)+wat_img.shape))\n",
"rec_img = rec_img.reshape(rec_img.shape[1:])\n",
"\n",
"# The original image (reshaped)\n",
"tr_img = Image.open(\"/home/tensorflow/Downloads/VOCdevkit/TEST/RAW/1.jpg\")\n",
"tr_img = np.array(tr_img.resize((128,128))) / 255\n",
"tr_img = tr_img.astype('float32')\n",
"\n",
"axs[0].imshow(rec_img)\n",
"axs[1].imshow(wat_img)\n",
"axs[2].imshow(tr_img)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.5"
}
},
"nbformat": 4,
"nbformat_minor": 4
}