Image to prompt with BLIP and CLIP
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

447 lines
241 KiB

{
"cells": [
{
"cell_type": "markdown",
"metadata": {
"id": "ytxkysgmrJEi"
},
"source": [
"# CLIP Interrogator by [@pharmapsychotic](https://twitter.com/pharmapsychotic) \n",
"\n",
"<br>\n",
"\n",
"What do the different OpenAI CLIP models see in an image? What might be a good text prompt to create similar images using CLIP guided diffusion or another text to image model? The CLIP Interrogator is here to get you answers!\n",
"\n",
"<br>\n",
"\n",
"If this notebook is helpful to you please consider buying me a coffee via [ko-fi](https://ko-fi.com/pharmapsychotic) or following me on [twitter](https://twitter.com/pharmapsychotic) for more cool Ai stuff. 🙂\n",
"\n",
"And if you're looking for more Ai art tools check out my [Ai generative art tools list](https://pharmapsychotic.com/tools.html).\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"cellView": "form",
"id": "YQk0eemUrSC7"
},
"outputs": [],
"source": [
"#@title Check GPU\n",
"!nvidia-smi -L"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"cellView": "form",
"id": "30xPxDSDrJEl"
},
"outputs": [],
"source": [
"#@title Setup\n",
"!pip3 install ftfy regex tqdm transformers==4.15.0 timm==0.4.12 fairscale==0.4.4\n",
"!pip3 install git+https://github.com/openai/CLIP.git\n",
"!git clone https://github.com/pharmapsychotic/clip-interrogator.git\n",
"!git clone https://github.com/salesforce/BLIP\n",
"%cd /content/BLIP\n",
"\n",
"import clip\n",
"import gc\n",
"import io\n",
"import math\n",
"import numpy as np\n",
"import pandas as pd\n",
"import requests\n",
"import sys\n",
"import torch\n",
"import torchvision.transforms as T\n",
"import torchvision.transforms.functional as TF\n",
"\n",
"from IPython.display import display\n",
"from PIL import Image\n",
"from torch import nn\n",
"from torch.nn import functional as F\n",
"from torchvision import transforms\n",
"from torchvision.transforms.functional import InterpolationMode\n",
"from models.blip import blip_decoder\n",
"\n",
"device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')\n",
"\n",
"blip_image_eval_size = 384\n",
"blip_model_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model*_base_caption.pth' \n",
"blip_model = blip_decoder(pretrained=blip_model_url, image_size=blip_image_eval_size, vit='base')\n",
"blip_model.eval()\n",
"blip_model = blip_model.to(device)\n",
"\n",
"def generate_caption(pil_image):\n",
" gpu_image = transforms.Compose([\n",
" transforms.Resize((blip_image_eval_size, blip_image_eval_size), interpolation=InterpolationMode.BICUBIC),\n",
" transforms.ToTensor(),\n",
" transforms.Normalize((0.48145466, 0.4578275, 0.40821073), (0.26862954, 0.26130258, 0.27577711))\n",
" ])(image).unsqueeze(0).to(device)\n",
"\n",
" with torch.no_grad():\n",
" caption = blip_model.generate(gpu_image, sample=False, num_beams=3, max_length=20, min_length=5)\n",
" return caption[0]\n",
"\n",
"def load_list(name):\n",
" with open(f\"/content/clip-interrogator/data/{name}.txt\", 'r', encoding='utf-8', errors='replace') as f:\n",
" items = [line.strip() for line in f.readlines()]\n",
" return items\n",
"\n",
"def rank(model, image_features, text_array, top_count=1):\n",
" top_count = min(top_count, len(text_array))\n",
" text_tokens = clip.tokenize([text for text in text_array]).cuda()\n",
" with torch.no_grad():\n",
" text_features = model.encode_text(text_tokens).float()\n",
" text_features /= text_features.norm(dim=-1, keepdim=True)\n",
"\n",
" similarity = torch.zeros((1, len(text_array))).to(device)\n",
" for i in range(image_features.shape[0]):\n",
" similarity += (100.0 * image_features[i].unsqueeze(0) @ text_features.T).softmax(dim=-1)\n",
" similarity /= image_features.shape[0]\n",
"\n",
" top_probs, top_labels = similarity.cpu().topk(top_count, dim=-1) \n",
" return [(text_array[top_labels[0][i].numpy()], (top_probs[0][i].numpy()*100)) for i in range(top_count)]\n",
"\n",
"def interrogate(image, models):\n",
" caption = generate_caption(image)\n",
" if len(models) == 0:\n",
" print(f\"\\n\\n{caption}\")\n",
" return\n",
"\n",
" table = []\n",
" bests = [[('',0)]]*5\n",
" for model_name in models:\n",
" print(f\"Interrogating with {model_name}...\")\n",
" model, preprocess = clip.load(model_name)\n",
" model.cuda().eval()\n",
"\n",
" images = preprocess(image).unsqueeze(0).cuda()\n",
" with torch.no_grad():\n",
" image_features = model.encode_image(images).float()\n",
" image_features /= image_features.norm(dim=-1, keepdim=True)\n",
"\n",
" ranks = [\n",
" rank(model, image_features, mediums),\n",
" rank(model, image_features, [\"by \"+artist for artist in artists]),\n",
" rank(model, image_features, trending_list),\n",
" rank(model, image_features, movements),\n",
" rank(model, image_features, flavors, top_count=3)\n",
" ]\n",
"\n",
" for i in range(len(ranks)):\n",
" confidence_sum = 0\n",
" for ci in range(len(ranks[i])):\n",
" confidence_sum += ranks[i][ci][1]\n",
" if confidence_sum > sum(bests[i][t][1] for t in range(len(bests[i]))):\n",
" bests[i] = ranks[i]\n",
"\n",
" row = [model_name]\n",
" for r in ranks:\n",
" row.append(', '.join([f\"{x[0]} ({x[1]:0.1f}%)\" for x in r]))\n",
"\n",
" table.append(row)\n",
"\n",
" del model\n",
" gc.collect()\n",
" display(pd.DataFrame(table, columns=[\"Model\", \"Medium\", \"Artist\", \"Trending\", \"Movement\", \"Flavors\"]))\n",
"\n",
" flaves = ', '.join([f\"{x[0]}\" for x in bests[4]])\n",
" medium = bests[0][0][0]\n",
" if caption.startswith(medium):\n",
" print(f\"\\n\\n{caption} {bests[1][0][0]}, {bests[2][0][0]}, {bests[3][0][0]}, {flaves}\")\n",
" else:\n",
" print(f\"\\n\\n{caption}, {medium} {bests[1][0][0]}, {bests[2][0][0]}, {bests[3][0][0]}, {flaves}\")\n",
"\n",
"\n",
"artists = load_list('artists')\n",
"flavors = load_list('flavors')\n",
"mediums = load_list('mediums')\n",
"movements = load_list('movements')\n",
"sites = ['Artstation', 'behance', 'cg society', 'cgsociety', 'deviantart', 'dribble', 'flickr', 'instagram', 'pexels', 'pinterest', 'pixabay', 'pixiv', 'polycount', 'reddit', 'shutterstock', 'tumblr', 'unsplash', 'zbrush central']\n",
"trending_list = [site for site in sites]\n",
"trending_list.extend([\"trending on \"+site for site in sites])\n",
"trending_list.extend([\"featured on \"+site for site in sites])\n",
"trending_list.extend([site+\" contest winner\" for site in sites])\n"
]
},
{
"cell_type": "code",
"execution_count": 18,
"metadata": {
"cellView": "form",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 632
},
"id": "rbDEMDGJrJEo",
"outputId": "dd9742e7-1d09-4684-9a00-c1a3284513a1"
},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAO4AAAGACAIAAAAlBcnrAAEAAElEQVR4nGT9Wa9lSZYeiK21zGxPZ76zT+HhHnNEVmVmFZM1dBXJItnqhtCDBAGCWoDAR0FP/QsE6BcIEiAIDUgvemhRooBuQUKRYpGsKrIqK6fKMSIzRo8In67f+cx7NFtLD2a2z41qR6bH9XPu2WdvszV861uD4XD6EEAA/F8CiCCICMJOxAECAoEwIAogAoA4AAJE/yIAAPR/OwQU8H8QQUTEvyHhJfSvgP8+EUACRAT/ugCi/1iCyglbYBQEQEDwX0ekWAAQgRkBBQVEMHySCIARBQT7x/HfJALiABCQEEQAEAgIAQCEBREBgR0ACBICAiH0twkg/v6E0T/43/kjIGCBSSezg+P71naAQIDZW2+bPL344b9PizEq7R8RAUQcAgqi61ql5e4f/PHT738fgO6wyYG+16Yv98338umvt5vf29sDgS/qzYBwvV39v159smhbJBRnEXfr7NcVgBCEnQUAJAMgiCggIoJIfoMR0a+NX3xBv00ICMLiV0YQ/Yr77ZbbCwECgkCAQLdfDfvP0m87IPmFFRErPDX5+4OjJXb/YPxw6PD/cvrTDTABKp0414UvIYVIIi5KComw/8HvI5IStoiIQKAUCIhwuCskQqCw08zg3xAGYGYnwAAIgiIs/svECx9K+HIBYWAWYRAGscIMQVIRghxL2GsRCC9hlC6JK9pfTUBEWFhgnAwISGFCKgWisEYszIIQtlFARJxfOBFJEN8a7QOwf2N3fbbADECAAP4+mP0Nhq9n8asmgMLOvwLMUcEFGEAIUSH6i/hF7PXEgSCQJq0RUdghKQZUg8KVlQiwsH+usIBADCgCLE5nWVuVABoEn1O3QPeWTR+mA9MJFemDdPhRuRySIlLvTo//68kHM9B+e9nZ/g7R/yXMrgNEiDfJckvQxNsDDFsCCET+Lb+xQMiIcXeAQQDYGyL0Fqz/Mukt327XcCfRIMHwSOu6Eaj/ePb4v77/h781OnktGSuAe/lsXycsDCDOtSIcLsgOwr4JiDBbEAfiAFyQInZwSxVFGEQg/i0gGqOxREJ/XUBE/yxEIAxC4q0eOyDllRhAADCotAR1RED2/xQGpLjI6H8XCIUFEYD9iiEQoSAgB/vurQBSnu+1KslQG2WsyLZZW1siqWgLRJiRkJkJCUh5U9Ky/bJcoYSrQJR3RI0UbTSCiCDpKIUM4m0VgAggIJK3SgKMIBgNDgSd52AASYkEv4OkQEQASGlC9F/ihE1ebM/P0BsblJ1iACAIAIFIPizqzRqIxDklMAf7v8+v/pl+/HlTjlvcFC2j3LhmDMlLbB9Weg/0tVhAIqV7/yYowByFWIXdQX/z0XxHW4yIAAyACATgBMCbPW/VIdjCqAQY7hW8Z9w54Z1RRorW0P82ABKIyCEm/2Dvtd+avnaSTr5sbkruZpTloq7bzdyVrqnT8T4ZY+tKmBEBIaoEEoBEz3jrG5EQyV8cpTf/KMKEBAAaJJhVER0cCwgihQcHEmQUAGEgQr/92CshBSVkEYz6CyJIcRWj3gAAeymR3boIAwuQQhQQFgGjdJHv59P7iavRuffH+x9vrpDQurxttx133rB64YsmxQkKoBLBjp0I+5v3YuvtkF9d8AsQwQP2XtTfHikU532xBDmIkCAsCyFBcLu7e0BBACAUAtTszZigEOks67ZbRCQiEsXIXuAh7pDrumQ8LTvLbYeKBMAJzsl9Wq0LxnQ0GKAyQG9nw2dt9bpNx6peIkeVJO94gwx5DfSr75/I37oHb0EgMaqTR1k7ExwEWgBAkACEILpPCRId1hoBkci7NugxEwcZ9isGALky/8u99741uidJdtasflNf/lHx4N/K1b23vvWbdnH02h8Nv/68Ku4A8Pbl166qAAlJifPrj2EHhcVbE+k3DP4ucI0OAgF0tJ0UjG38mLfVgOH+vUT2cE9EkCA+BEIQYhYWJAoXEgQkAI6gq0d35LUPhICCoRBB0umwOFqVNTlrxLHwb00OldY/ASoI19ubbnMpGPWll2QveMI7AWMLSEgUHiFIo18RBvFgmgQ4WjVEj5E8pkRC4R4AsnegSseNlX5RAQCIvJNB1ESamQUQCHWSIpGrKkBEVGGVbhs0BAEmnUlbMjMFdy8J4J+tzv8TnL2ejeZJV7L7ui5zpS6rclOuzrHT4gVIAAhQgvcn6rUkrHA0pSwSV8wDY2+mw+6ihK33i4ISkAj5qKbX+511hPhh2u1BkBYCFBS2ACdm+O38aC7NcTH7yc0n3yvu6enenbuP/uZnf/6crBsXg+Hw8stPxLXctWAG3tj2BninExBML4i/VbkN3gOsEQQRHyn1EEeC+yME8BC5v6IE3aAAtpDIu8goo+HTGAIybyoYJaqOxIVAAOD4kjcVLAKCUEzu1g5r50hrAQCUu/ngPz16fWxMbRvHok0OEQQDMNLAZDOdDlEnpDNAI2hIp6TMzuv572cr4iLA4hjt0DfEK2i7B14s7KAPO4LF4mCjA/BAv+R+/6KKMzMLM2U5W8e289bOAQsA9QgJRESMMcy263o3AuBVmuFHJ6YbmI/KZUF6ppNCmTdp4AwKu+AGw6WwR2bexQSUFgUUhG8B3Vt65DUhKFiIGHwwCsGU77AWelvmITju4pBvyDaEIBwQGeT1dPStdP9FvfzLq8+/O7l/lE++unv0kx/+y789/c0GOrFuvm1svVE6UVqJ68CDt3At6gUyOppwp7JDS7fguscmIEGz0ctvWE1mtiASPXEAtojkN5yCG4bgyLxJ818cgKZfU5K4qiIMwCKMwc3FIM/bbEQUMEo75xCVVkYQkPAgyX97dPCfHz/q2BFRlo2ifWVUmagcVa7MNBscZuPDpNhL84nJ9hxkAff6RUGFqBEIoL/xuBaIEMxPb7fAx7KoNHi7HnaUQNBHmV7JwYfSYV0JAImICJUidlYVKbeNOEtKO3D+gQvMCswxsDRARqliUM+XgOQD3n5n0jT9y+uXJVsSOO8qQni2XP68uRHyIka7uw1bdisCDK4j6GkEdT3eCK/vfgMCqL7tX2W3iUG+o3ePiBn7n3uD7WEZCeJ388N9lX3ZLh9kk3eLgyf3T77/5EefXTxJksJkhYCgsygOxBpjhG1kUwhJeQ8hfVi5u0UACcDPu/3bd4USLauIp952H8DoSVEYwK8y+02VEOECECGSFwjwVkAYdqokOzgf1J1kxx54yfZOioWIEB07RCGFDJCRnppkbPR/de+dbw1nLTudFqQM9mjbX885pZUiYK6UAmu7trHWJZ5cCvLhLWvQWH9TEVCKIBICCbvA4iGSSpDQu2kO8JiFO2T2Tw0QkJwICxCgElD+GQkJEVWec9O4rkWlMcgOllC30KWQKFDADoWS4bjbrG6FpEFVtcCn9er766u38yECjEE9NqnDwMKwyG2MI4hIFIVKvOyhOIw21wNcuCV0EGyc966IO9gGAHgbJ3teJ+oACZD/3Z38Iu72H4ERMtLfTvf/Xfm8IPWa06f7s3+//vKzs090kopIMihsU4GwSQyCRQJFImwFUdA/l0QMF3F+f9NBFOO9RTH38kQAIMwoEoC/OISoyMLgrHgmy7/QB6qIiLcxjafAOKAm2EFz+TtAzT+391xIPmSOrgQdMxIZk4oyuUlmOs1IN869ng37kFIABQkQszwBEGtt2dRV3TpMy7opy20xnBw/fFtne0Dab6HXQxAOAD6aEgEGCn4zUKEeSkGghIIJ8ndOCpQGUn7pPIMbXAR6iYK6rgQYCHUxsNVGnAu7IAIoTpyTrpFWhIWdTkxdlxAIkGCJhF2SJs5axXLaVf/n8y+syNNmC2X7MzePLhgAPEDoWQWJHtiThoikAXaxYG+/bolzBDtRLGSHV25JTxTXHowFAxzepHArAZ0jA79mhjeufuXKf1a8+dlI/ylffHn+abfZgqDKchHRWd6uFr1bM0az6/waezFGoPBPCJYnKF/IP0CkFsN9e1yhwYf8gfdlRG/hfQYEIIQs7IOA6AFR+sAfOJh85hgrS3C8AauSZ
"text/plain": [
"<PIL.Image.Image image mode=RGB size=238x384 at 0x7FCFE2DA38D0>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Interrogating with ViT-B/32...\n",
"Interrogating with ViT-B/16...\n",
"Interrogating with RN50...\n"
]
},
{
"data": {
"text/html": [
"\n",
" <div id=\"df-2c5d7c91-4c49-4765-a131-91768e3baffe\">\n",
" <div class=\"colab-df-container\">\n",
" <div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>Model</th>\n",
" <th>Medium</th>\n",
" <th>Artist</th>\n",
" <th>Trending</th>\n",
" <th>Movement</th>\n",
" <th>Flavors</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>ViT-B/32</td>\n",
" <td>cyberpunk art (98.1%)</td>\n",
" <td>by Vincent Lefevre (2.9%)</td>\n",
" <td>cg society contest winner (13.0%)</td>\n",
" <td>retrofuturism (11.5%)</td>\n",
" <td>synthwave (14.1%), cityscape (13.4%), darksynt...</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>ViT-B/16</td>\n",
" <td>cyberpunk art (98.5%)</td>\n",
" <td>by Kilian Eng (0.7%)</td>\n",
" <td>behance contest winner (27.9%)</td>\n",
" <td>panfuturism (21.7%)</td>\n",
" <td>cityscape (11.8%), concept art (10.1%), dystop...</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>RN50</td>\n",
" <td>cyberpunk art (93.8%)</td>\n",
" <td>by Ross Tran (0.8%)</td>\n",
" <td>Artstation (12.3%)</td>\n",
" <td>altermodern (21.9%)</td>\n",
" <td>cityscape (62.3%), synthwave (6.8%), matte pai...</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>\n",
" <button class=\"colab-df-convert\" onclick=\"convertToInteractive('df-2c5d7c91-4c49-4765-a131-91768e3baffe')\"\n",
" title=\"Convert this dataframe to an interactive table.\"\n",
" style=\"display:none;\">\n",
" \n",
" <svg xmlns=\"http://www.w3.org/2000/svg\" height=\"24px\"viewBox=\"0 0 24 24\"\n",
" width=\"24px\">\n",
" <path d=\"M0 0h24v24H0V0z\" fill=\"none\"/>\n",
" <path d=\"M18.56 5.44l.94 2.06.94-2.06 2.06-.94-2.06-.94-.94-2.06-.94 2.06-2.06.94zm-11 1L8.5 8.5l.94-2.06 2.06-.94-2.06-.94L8.5 2.5l-.94 2.06-2.06.94zm10 10l.94 2.06.94-2.06 2.06-.94-2.06-.94-.94-2.06-.94 2.06-2.06.94z\"/><path d=\"M17.41 7.96l-1.37-1.37c-.4-.4-.92-.59-1.43-.59-.52 0-1.04.2-1.43.59L10.3 9.45l-7.72 7.72c-.78.78-.78 2.05 0 2.83L4 21.41c.39.39.9.59 1.41.59.51 0 1.02-.2 1.41-.59l7.78-7.78 2.81-2.81c.8-.78.8-2.07 0-2.86zM5.41 20L4 18.59l7.72-7.72 1.47 1.35L5.41 20z\"/>\n",
" </svg>\n",
" </button>\n",
" \n",
" <style>\n",
" .colab-df-container {\n",
" display:flex;\n",
" flex-wrap:wrap;\n",
" gap: 12px;\n",
" }\n",
"\n",
" .colab-df-convert {\n",
" background-color: #E8F0FE;\n",
" border: none;\n",
" border-radius: 50%;\n",
" cursor: pointer;\n",
" display: none;\n",
" fill: #1967D2;\n",
" height: 32px;\n",
" padding: 0 0 0 0;\n",
" width: 32px;\n",
" }\n",
"\n",
" .colab-df-convert:hover {\n",
" background-color: #E2EBFA;\n",
" box-shadow: 0px 1px 2px rgba(60, 64, 67, 0.3), 0px 1px 3px 1px rgba(60, 64, 67, 0.15);\n",
" fill: #174EA6;\n",
" }\n",
"\n",
" [theme=dark] .colab-df-convert {\n",
" background-color: #3B4455;\n",
" fill: #D2E3FC;\n",
" }\n",
"\n",
" [theme=dark] .colab-df-convert:hover {\n",
" background-color: #434B5C;\n",
" box-shadow: 0px 1px 3px 1px rgba(0, 0, 0, 0.15);\n",
" filter: drop-shadow(0px 1px 2px rgba(0, 0, 0, 0.3));\n",
" fill: #FFFFFF;\n",
" }\n",
" </style>\n",
"\n",
" <script>\n",
" const buttonEl =\n",
" document.querySelector('#df-2c5d7c91-4c49-4765-a131-91768e3baffe button.colab-df-convert');\n",
" buttonEl.style.display =\n",
" google.colab.kernel.accessAllowed ? 'block' : 'none';\n",
"\n",
" async function convertToInteractive(key) {\n",
" const element = document.querySelector('#df-2c5d7c91-4c49-4765-a131-91768e3baffe');\n",
" const dataTable =\n",
" await google.colab.kernel.invokeFunction('convertToInteractive',\n",
" [key], {});\n",
" if (!dataTable) return;\n",
"\n",
" const docLinkHtml = 'Like what you see? Visit the ' +\n",
" '<a target=\"_blank\" href=https://colab.research.google.com/notebooks/data_table.ipynb>data table notebook</a>'\n",
" + ' to learn more about interactive tables.';\n",
" element.innerHTML = '';\n",
" dataTable['output_type'] = 'display_data';\n",
" await google.colab.output.renderOutput(dataTable, element);\n",
" const docLink = document.createElement('div');\n",
" docLink.innerHTML = docLinkHtml;\n",
" element.appendChild(docLink);\n",
" }\n",
" </script>\n",
" </div>\n",
" </div>\n",
" "
],
"text/plain": [
" Model Medium Artist \\\n",
"0 ViT-B/32 cyberpunk art (98.1%) by Vincent Lefevre (2.9%) \n",
"1 ViT-B/16 cyberpunk art (98.5%) by Kilian Eng (0.7%) \n",
"2 RN50 cyberpunk art (93.8%) by Ross Tran (0.8%) \n",
"\n",
" Trending Movement \\\n",
"0 cg society contest winner (13.0%) retrofuturism (11.5%) \n",
"1 behance contest winner (27.9%) panfuturism (21.7%) \n",
"2 Artstation (12.3%) altermodern (21.9%) \n",
"\n",
" Flavors \n",
"0 synthwave (14.1%), cityscape (13.4%), darksynt... \n",
"1 cityscape (11.8%), concept art (10.1%), dystop... \n",
"2 cityscape (62.3%), synthwave (6.8%), matte pai... "
]
},
"metadata": {},
"output_type": "display_data"
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"\n",
"\n",
"a man standing on top of a bridge over a city, cyberpunk art by Vincent Lefevre, behance contest winner, altermodern, cityscape, synthwave, matte painting\n"
]
}
],
"source": [
"#@title Interrogate!\n",
"\n",
"#@markdown \n",
"\n",
"#@markdown #####**Image:**\n",
"\n",
"image_path_or_url = \"https://cdnb.artstation.com/p/assets/images/images/032/142/769/large/ignacio-bazan-lazcano-book-4-final.jpg\" #@param {type:\"string\"}\n",
"\n",
"#@markdown \n",
"\n",
"#@markdown #####**CLIP models:**\n",
"ViTB32 = True #@param{type:\"boolean\"}\n",
"ViTB16 = True #@param{type:\"boolean\"}\n",
"ViTL14 = False #@param{type:\"boolean\"}\n",
"ViTL14_336px = False #@param{type:\"boolean\"}\n",
"RN101 = False #@param{type:\"boolean\"}\n",
"RN50 = True #@param{type:\"boolean\"}\n",
"RN50x4 = False #@param{type:\"boolean\"}\n",
"RN50x16 = False #@param{type:\"boolean\"}\n",
"RN50x64 = False #@param{type:\"boolean\"}\n",
"\n",
"models = []\n",
"if ViTB32: models.append('ViT-B/32')\n",
"if ViTB16: models.append('ViT-B/16')\n",
"if ViTL14: models.append('ViT-L/14')\n",
"if ViTL14_336px: models.append('ViT-L/14@336px')\n",
"if RN101: models.append('RN101')\n",
"if RN50: models.append('RN50')\n",
"if RN50x4: models.append('RN50x4')\n",
"if RN50x16: models.append('RN50x16')\n",
"if RN50x64: models.append('RN50x64')\n",
"\n",
"if str(image_path_or_url).startswith('http://') or str(image_path_or_url).startswith('https://'):\n",
" image = Image.open(requests.get(image_path_or_url, stream=True).raw).convert('RGB')\n",
"else:\n",
" image = Image.open(image_path_or_url).convert('RGB')\n",
"\n",
"thumb = image.copy()\n",
"thumb.thumbnail([blip_image_eval_size, blip_image_eval_size])\n",
"display(thumb)\n",
"\n",
"interrogate(image, models=models)\n"
]
}
],
"metadata": {
"accelerator": "GPU",
"colab": {
"collapsed_sections": [],
"name": "clip-interrogator.ipynb",
"provenance": []
},
"gpuClass": "standard",
"kernelspec": {
"display_name": "Python 3.9.5 ('base')",
"language": "python",
"name": "python3"
},
"language_info": {
"name": "python",
"version": "3.9.5"
},
"orig_nbformat": 4,
"vscode": {
"interpreter": {
"hash": "2e35b1f3b2666f0e402b0693dd7493a583002c98361385482aa9f27d8f0f5c89"
}
}
},
"nbformat": 4,
"nbformat_minor": 0
}