{ "cells": [ { "cell_type": "markdown", "metadata": { "id": "3jm8RYrLqvzz" }, "source": [ "# CLIP Interrogator 2 by [@pharmapsychotic](https://twitter.com/pharmapsychotic) \n", "\n", "
\n", "\n", "Want to figure out what a good prompt might be to create new images like an existing one? The CLIP Interrogator is here to get you answers!\n", "\n", "
\n", "\n", "This version is specialized for producing nice prompts for use with Stable Diffusion and achieves higher alignment between generated text prompt and source image. You can try out the old [version 1](https://colab.research.google.com/github/pharmapsychotic/clip-interrogator/blob/v1/clip_interrogator.ipynb) to see how different CLIP models ranks terms. \n", "\n", "
\n", "\n", "If this notebook is helpful to you please consider buying me a coffee via [ko-fi](https://ko-fi.com/pharmapsychotic) or following me on [twitter](https://twitter.com/pharmapsychotic) for more cool Ai stuff. 🙂\n", "\n", "And if you're looking for more Ai art tools check out my [Ai generative art tools list](https://pharmapsychotic.com/tools.html).\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "cellView": "form", "id": "aP9FjmWxtLKJ" }, "outputs": [], "source": [ "#@title Check GPU\n", "!nvidia-smi -L" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "cellView": "form", "id": "xpPKQR40qvz2" }, "outputs": [], "source": [ "#@title Setup\n", "import argparse, subprocess, sys, time\n", "\n", "def setup():\n", " install_cmds = [\n", " ['pip', 'install', 'ftfy', 'gradio', 'regex', 'tqdm', 'transformers==4.21.2', 'timm', 'fairscale', 'requests'],\n", " ['pip', 'install', '-e', 'git+https://github.com/openai/CLIP.git@main#egg=clip'],\n", " ['pip', 'install', '-e', 'git+https://github.com/pharmapsychotic/BLIP.git@lib#egg=blip'],\n", " ['git', 'clone', '-b', 'lib', 'https://github.com/pharmapsychotic/clip-interrogator.git']\n", " ]\n", " for cmd in install_cmds:\n", " print(subprocess.run(cmd, stdout=subprocess.PIPE).stdout.decode('utf-8'))\n", "\n", "setup()\n", "\n", "import sys\n", "sys.path.append('src/blip')\n", "sys.path.append('src/clip')\n", "sys.path.append('clip-interrogator')\n", "\n", "import clip\n", "import gradio as gr\n", "import torch\n", "from clip_interrogator import Interrogator, Config\n", "\n", "ci = Interrogator(Config())\n" ] }, { "cell_type": "code", "execution_count": 9, "metadata": { "colab": { "base_uri": "https://localhost:8080/", "height": 677 }, "cellView": "form", "id": "Pf6qkFG6MPRj", "outputId": "5f959af5-f6dd-43f2-f8df-8331a422d317" }, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Colab notebook detected. To show errors in colab notebook, set `debug=True` in `launch()`\n", "\n", "Using Embedded Colab Mode (NEW). If you have issues, please use share=True and file an issue at https://github.com/gradio-app/gradio/\n", "Note: opening the browser inspector may crash Embedded Colab Mode.\n", "\n", "To create a public link, set `share=True` in `launch()`.\n" ] }, { "output_type": "display_data", "data": { "text/plain": [ "" ], "application/javascript": [ "(async (port, path, width, height, cache, element) => {\n", " if (!google.colab.kernel.accessAllowed && !cache) {\n", " return;\n", " }\n", " element.appendChild(document.createTextNode(''));\n", " const url = await google.colab.kernel.proxyPort(port, {cache});\n", "\n", " const external_link = document.createElement('div');\n", " external_link.innerHTML = `\n", "
\n", " Running on \n", " https://localhost:${port}${path}\n", " \n", "
\n", " `;\n", " element.appendChild(external_link);\n", "\n", " const iframe = document.createElement('iframe');\n", " iframe.src = new URL(path, url).toString();\n", " iframe.height = height;\n", " iframe.allow = \"autoplay; camera; microphone; clipboard-read; clipboard-write;\"\n", " iframe.width = width;\n", " iframe.style.border = 0;\n", " element.appendChild(iframe);\n", " })(7866, \"/\", \"100%\", 500, false, window.element)" ] }, "metadata": {} }, { "output_type": "execute_result", "data": { "text/plain": [ "(, 'http://127.0.0.1:7866/', None)" ] }, "metadata": {}, "execution_count": 9 } ], "source": [ "#@title Run!\n", "\n", "def inference(image, mode):\n", " image = image.convert('RGB')\n", " if mode == 'best':\n", " return ci.interrogate(image)\n", " elif mode == 'classic':\n", " return ci.interrogate_classic(image)\n", " else:\n", " return ci.interrogate_fast(image)\n", " \n", "inputs = [\n", " gr.inputs.Image(type='pil'),\n", " gr.Radio(['best', 'classic', 'fast'], label='', value='best'),\n", "]\n", "outputs = [\n", " gr.outputs.Textbox(label=\"Output\"),\n", "]\n", "\n", "io = gr.Interface(\n", " inference, \n", " inputs, \n", " outputs, \n", " allow_flagging=False,\n", ")\n", "io.launch()\n" ] } ], "metadata": { "accelerator": "GPU", "colab": { "collapsed_sections": [], "provenance": [] }, "kernelspec": { "display_name": "Python 3.8.10 ('venv': venv)", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.8.10" }, "orig_nbformat": 4, "vscode": { "interpreter": { "hash": "f7a8d9541664ade9cff251487a19c76f2dd1b4c864d158f07ee26d1b0fd5c9a1" } } }, "nbformat": 4, "nbformat_minor": 0 }