Browse Source

Default to ViT-L, lower intermediate count for Colab with ViT-H

pull/34/head
pharmapsychotic 2 years ago
parent
commit
884aab1a26
  1. 11
      clip_interrogator.ipynb
  2. 2
      clip_interrogator/clip_interrogator.py

11
clip_interrogator.ipynb

@ -28,7 +28,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 1,
"metadata": { "metadata": {
"cellView": "form", "cellView": "form",
"id": "aP9FjmWxtLKJ" "id": "aP9FjmWxtLKJ"
@ -100,6 +100,7 @@
" if clip_model_name != ci.config.clip_model_name:\n", " if clip_model_name != ci.config.clip_model_name:\n",
" ci.config.clip_model_name = clip_model_name\n", " ci.config.clip_model_name = clip_model_name\n",
" ci.load_clip_model()\n", " ci.load_clip_model()\n",
" ci.config.flavor_intermediate_count = 2048 if clip_model_name == \"ViT-L-14/openai\" else 1024\n",
" image = image.convert('RGB')\n", " image = image.convert('RGB')\n",
" if mode == 'best':\n", " if mode == 'best':\n",
" return ci.interrogate(image, max_flavors=int(best_max_flavors))\n", " return ci.interrogate(image, max_flavors=int(best_max_flavors))\n",
@ -111,7 +112,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 3, "execution_count": 4,
"metadata": { "metadata": {
"cellView": "form", "cellView": "form",
"colab": { "colab": {
@ -264,7 +265,7 @@
"provenance": [] "provenance": []
}, },
"kernelspec": { "kernelspec": {
"display_name": "Python 3.8.10 ('ci')", "display_name": "Python 3.7.15 ('py37')",
"language": "python", "language": "python",
"name": "python3" "name": "python3"
}, },
@ -278,12 +279,12 @@
"name": "python", "name": "python",
"nbconvert_exporter": "python", "nbconvert_exporter": "python",
"pygments_lexer": "ipython3", "pygments_lexer": "ipython3",
"version": "3.8.10" "version": "3.7.15"
}, },
"orig_nbformat": 4, "orig_nbformat": 4,
"vscode": { "vscode": {
"interpreter": { "interpreter": {
"hash": "90daa5087f97972f35e673cab20894a33c1e0ca77092ccdd163e60b53596983a" "hash": "1f51d5616d3bc2b87a82685314c5be1ec9a49b6e0cb1f707bfa2acb6c45f3e5f"
} }
} }
}, },

2
clip_interrogator/clip_interrogator.py

@ -32,7 +32,7 @@ class Config:
blip_offload: bool = False blip_offload: bool = False
# clip settings # clip settings
clip_model_name: str = 'ViT-H-14/laion2b_s32b_b79k' clip_model_name: str = 'ViT-L-14/openai'
clip_model_path: str = None clip_model_path: str = None
# interrogator settings # interrogator settings

Loading…
Cancel
Save