simplifying kears code

This commit is contained in:
frankknoll
2022-06-18 17:14:01 +02:00
parent 6bd05b0b95
commit bc0a66d576

View File

@@ -238,8 +238,6 @@
"# copied from value of characters variable in captcha_ocr.ipynb or captcha_ocr_trainAndSaveModel.ipynb\n", "# copied from value of characters variable in captcha_ocr.ipynb or captcha_ocr_trainAndSaveModel.ipynb\n",
"characters = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F', 'a', 'b', 'c', 'd', 'e', 'f']\n", "characters = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F', 'a', 'b', 'c', 'd', 'e', 'f']\n",
"\n", "\n",
"batch_size = 1\n",
"\n",
"img_width = 241\n", "img_width = 241\n",
"img_height = 62\n", "img_height = 62\n",
"\n", "\n",
@@ -270,7 +268,9 @@
" # dimension to correspond to the width of the image.\n", " # dimension to correspond to the width of the image.\n",
" img = tf.transpose(img, perm=[1, 0, 2])\n", " img = tf.transpose(img, perm=[1, 0, 2])\n",
" # 7. Return a dict as our model is expecting two inputs\n", " # 7. Return a dict as our model is expecting two inputs\n",
" return img\n", " array = keras.utils.img_to_array(img)\n",
" array = np.expand_dims(array, axis=0)\n",
" return array\n",
"\n", "\n",
"\n", "\n",
"def decode_batch_predictions(pred):\n", "def decode_batch_predictions(pred):\n",
@@ -294,26 +294,11 @@
"\n", "\n",
"\n", "\n",
"def getTextInCaptchaImage(captchaImageFile):\n", "def getTextInCaptchaImage(captchaImageFile):\n",
" batchImages = getBatchImagesFromFile(captchaImageFile)\n", " batchImages = encode_single_sample(captchaImageFile)\n",
" preds = model.predict(batchImages)\n", " preds = model.predict(batchImages)\n",
" return decode_batch_predictions(preds)[0]\n", " return decode_batch_predictions(preds)[0]\n",
"\n", "\n",
"\n", "\n",
"def getBatchImagesFromFile(imageFile):\n",
" return list(asDataset(imageFile).as_numpy_iterator())[0]\n",
"\n",
"\n",
"def asDataset(imageFile):\n",
" dataset = tf.data.Dataset.from_tensor_slices([imageFile])\n",
" dataset = (\n",
" dataset\n",
" .map(encode_single_sample, num_parallel_calls=tf.data.AUTOTUNE)\n",
" .batch(batch_size)\n",
" .prefetch(buffer_size=tf.data.AUTOTUNE)\n",
" )\n",
" return dataset\n",
"\n",
"\n",
"print(\"loading model...\")\n", "print(\"loading model...\")\n",
"model = load_model()\n", "model = load_model()\n",
"model.summary()" "model.summary()"