This commit is contained in:
frankknoll
2023-03-16 15:17:57 +01:00
parent f1ad511850
commit ebf9633723
2 changed files with 72 additions and 33 deletions

View File

@@ -25,8 +25,10 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 1, "execution_count": null,
"metadata": {}, "metadata": {
"id": "ioGwCR3Xl31V"
},
"outputs": [], "outputs": [],
"source": [ "source": [
"import sys\n", "import sys\n",
@@ -35,8 +37,10 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 2, "execution_count": null,
"metadata": {}, "metadata": {
"id": "l-coMy_2l31X"
},
"outputs": [], "outputs": [],
"source": [ "source": [
"def isInColab():\n", "def isInColab():\n",
@@ -49,8 +53,10 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 3, "execution_count": null,
"metadata": {}, "metadata": {
"id": "goO0feQwl31Y"
},
"outputs": [], "outputs": [],
"source": [ "source": [
"inColab = isInColab()" "inColab = isInColab()"
@@ -59,7 +65,9 @@
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
"metadata": {}, "metadata": {
"id": "nsE9VWCel31Z"
},
"outputs": [], "outputs": [],
"source": [ "source": [
"if inColab:\n", "if inColab:\n",
@@ -71,13 +79,27 @@
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
"metadata": {}, "metadata": {
"id": "l9qhlDVNl31b"
},
"outputs": [], "outputs": [],
"source": [ "source": [
"if inColab:\n", "import os\n",
" sys.path.insert(0, '/content/HowBadIsMyBatch/src')" "srcPath = '/content/HowBadIsMyBatch/src' if inColab else os.getcwd()"
] ]
}, },
{
"cell_type": "code",
"source": [
"if inColab:\n",
" sys.path.insert(0, srcPath)"
],
"metadata": {
"id": "c-2fE6vZsD7a"
},
"execution_count": null,
"outputs": []
},
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
@@ -86,7 +108,6 @@
}, },
"outputs": [], "outputs": [],
"source": [ "source": [
"import os\n",
"import numpy as np\n", "import numpy as np\n",
"from pathlib import Path\n", "from pathlib import Path\n",
"import tensorflow as tf\n", "import tensorflow as tf\n",
@@ -104,7 +125,9 @@
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
"metadata": {}, "metadata": {
"id": "BWqAvnVOl31d"
},
"outputs": [], "outputs": [],
"source": [ "source": [
"from pathlib import Path\n", "from pathlib import Path\n",
@@ -223,7 +246,9 @@
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
"metadata": {}, "metadata": {
"id": "HEKh6eval31k"
},
"outputs": [], "outputs": [],
"source": [ "source": [
"def printLayers(model):\n", "def printLayers(model):\n",
@@ -249,6 +274,21 @@
" return accuracy.result().numpy()" " return accuracy.result().numpy()"
] ]
}, },
{
"cell_type": "code",
"source": [
"def saveModel(srcPath, modelDAO, model):\n",
" modelFilepath = f'{srcPath}/captcha/{model.name}'\n",
" modelDAO.saveModel(model, modelFilepath)\n",
" if inColab:\n",
" GoogleDriveManager.uploadFolderToGoogleDrive(model.name)"
],
"metadata": {
"id": "WG2rSl9nxZQ0"
},
"execution_count": null,
"outputs": []
},
{ {
"cell_type": "markdown", "cell_type": "markdown",
"metadata": { "metadata": {
@@ -303,6 +343,7 @@
"outputs": [], "outputs": [],
"source": [ "source": [
"if inColab:\n", "if inColab:\n",
" !apt-get update\n",
" !sudo apt install ttf-mscorefonts-installer\n", " !sudo apt install ttf-mscorefonts-installer\n",
" !sudo fc-cache -f\n", " !sudo fc-cache -f\n",
" !fc-match Arial" " !fc-match Arial"
@@ -318,8 +359,8 @@
"source": [ "source": [
"# \"We generate 200,000 images for base model pre-training\"\n", "# \"We generate 200,000 images for base model pre-training\"\n",
"captchaGenerator = CaptchaGenerator(\n", "captchaGenerator = CaptchaGenerator(\n",
" numCaptchas = 50, # 50, # 200000,\n", " numCaptchas = 200000, # 50, # 200000,\n",
" dataDir = Path(\"captchas/generated/VAERS/\"))" " dataDir = Path(srcPath + '/captchas/generated/VAERS/'))"
] ]
}, },
{ {
@@ -395,9 +436,7 @@
}, },
"outputs": [], "outputs": [],
"source": [ "source": [
"modelDAO.saveModel(model)\n", "saveModel(srcPath, modelDAO, model)"
"if inColab:\n",
" GoogleDriveManager.uploadFolderToGoogleDrive(model.name)"
] ]
}, },
{ {
@@ -460,7 +499,9 @@
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
"metadata": {}, "metadata": {
"id": "qZvn1k2Ul31v"
},
"outputs": [], "outputs": [],
"source": [ "source": [
"modelName, numTrainableLayers = 'MobileNetV3Small', 104\n", "modelName, numTrainableLayers = 'MobileNetV3Small', 104\n",
@@ -476,7 +517,7 @@
"outputs": [], "outputs": [],
"source": [ "source": [
"# FK-TODO: DRY with VAERSFileDownloader\n", "# FK-TODO: DRY with VAERSFileDownloader\n",
"modelFilepath = f'{os.getcwd()}/captcha/{modelName}'\n", "modelFilepath = f'{srcPath}/captcha/{modelName}'\n",
"model = modelDAO.loadModel(modelFilepath)\n", "model = modelDAO.loadModel(modelFilepath)\n",
"model.summary(show_trainable=True)" "model.summary(show_trainable=True)"
] ]
@@ -584,21 +625,19 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [ "source": [
"modelDAO.saveModel(model)\n", "saveModel(srcPath, modelDAO, model)"
"if inColab:\n", ],
" GoogleDriveManager.uploadFolderToGoogleDrive(model.name)" "metadata": {
] "id": "FpJTHU6dxOVy"
},
"execution_count": null,
"outputs": []
} }
], ],
"metadata": { "metadata": {
"accelerator": "GPU", "accelerator": "GPU",
"colab": { "colab": {
"collapsed_sections": [],
"name": "captcha.ipynb",
"private_outputs": true, "private_outputs": true,
"provenance": [] "provenance": []
}, },

View File

@@ -4,9 +4,9 @@ import shutil
class ModelDAO: class ModelDAO:
def saveModel(self, model): def saveModel(self, model, modelFilepath):
shutil.rmtree(model.name, ignore_errors = True) shutil.rmtree(modelFilepath, ignore_errors = True)
model.save(model.name) model.save(modelFilepath)
def loadModel(self, modelFilepath): def loadModel(self, modelFilepath):
return keras.models.load_model(modelFilepath) return keras.models.load_model(modelFilepath)