Image to prompt with BLIP and CLIP
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

474 lines
243 KiB

{
"cells": [
{
"cell_type": "markdown",
"metadata": {
"id": "ytxkysgmrJEi"
},
"source": [
"# CLIP Interrogator by [@pharmapsychotic](https://twitter.com/pharmapsychotic) \n",
"\n",
"<br>\n",
"\n",
"What do the different OpenAI CLIP models see in an image? What might be a good text prompt to create similar images using CLIP guided diffusion or another text to image model? The CLIP Interrogator is here to get you answers!\n",
"\n",
"<br>\n",
"\n",
"If this notebook is helpful to you please consider buying me a coffee via [ko-fi](https://ko-fi.com/pharmapsychotic) or following me on [twitter](https://twitter.com/pharmapsychotic) for more cool Ai stuff. 🙂\n",
"\n",
"And if you're looking for more Ai art tools check out my [Ai generative art tools list](https://pharmapsychotic.com/tools.html).\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"cellView": "form",
"id": "YQk0eemUrSC7"
},
"outputs": [],
"source": [
"#@title Check GPU\n",
"!nvidia-smi -L"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"cellView": "form",
"id": "30xPxDSDrJEl"
},
"outputs": [],
"source": [
"#@title Setup\n",
"!pip3 install ftfy regex tqdm transformers==4.15.0 timm==0.4.12 fairscale==0.4.4\n",
"!pip3 install git+https://github.com/openai/CLIP.git\n",
"!git clone https://github.com/pharmapsychotic/clip-interrogator.git\n",
"!git clone https://github.com/salesforce/BLIP\n",
"%cd /content/BLIP\n",
"\n",
"import clip\n",
"import gc\n",
"import io\n",
"import math\n",
"import numpy as np\n",
"import pandas as pd\n",
"import requests\n",
"import sys\n",
"import torch\n",
"import torchvision.transforms as T\n",
"import torchvision.transforms.functional as TF\n",
"\n",
"from IPython.display import display\n",
"from PIL import Image\n",
"from torch import nn\n",
"from torch.nn import functional as F\n",
"from torchvision import transforms\n",
"from torchvision.transforms.functional import InterpolationMode\n",
"from models.blip import blip_decoder\n",
"\n",
"device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')\n",
"\n",
"blip_image_eval_size = 384\n",
"blip_model_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model*_base_caption.pth' \n",
"blip_model = blip_decoder(pretrained=blip_model_url, image_size=blip_image_eval_size, vit='base')\n",
"blip_model.eval()\n",
"blip_model = blip_model.to(device)\n",
"\n",
"def generate_caption(pil_image):\n",
" gpu_image = transforms.Compose([\n",
" transforms.Resize((blip_image_eval_size, blip_image_eval_size), interpolation=InterpolationMode.BICUBIC),\n",
" transforms.ToTensor(),\n",
" transforms.Normalize((0.48145466, 0.4578275, 0.40821073), (0.26862954, 0.26130258, 0.27577711))\n",
" ])(image).unsqueeze(0).to(device)\n",
"\n",
" with torch.no_grad():\n",
" caption = blip_model.generate(gpu_image, sample=False, num_beams=3, max_length=20, min_length=5)\n",
" return caption[0]\n",
"\n",
"def image_embedding(image, model_name):\n",
" model, preprocess = clip.load(model_name)\n",
" model.cuda().eval()\n",
" images = preprocess(image).unsqueeze(0).cuda()\n",
" print(f\"After preprocess: {images.shape}\")\n",
" with torch.no_grad():\n",
" image_features = model.encode_image(images).float()\n",
" image_features /= image_features.norm(dim=-1, keepdim=True)\n",
" return image_features\n",
"\n",
"def load_list(name):\n",
" with open(f\"/content/clip-interrogator/data/{name}.txt\", 'r', encoding='utf-8', errors='replace') as f:\n",
" items = [line.strip() for line in f.readlines()]\n",
" return items\n",
"\n",
"def rank(model, image_features, text_array, top_count=1):\n",
" top_count = min(top_count, len(text_array))\n",
" text_tokens = clip.tokenize([text for text in text_array]).cuda()\n",
" with torch.no_grad():\n",
" text_features = model.encode_text(text_tokens).float()\n",
" text_features /= text_features.norm(dim=-1, keepdim=True)\n",
"\n",
" similarity = torch.zeros((1, len(text_array))).to(device)\n",
" for i in range(image_features.shape[0]):\n",
" similarity += (100.0 * image_features[i].unsqueeze(0) @ text_features.T).softmax(dim=-1)\n",
" similarity /= image_features.shape[0]\n",
"\n",
" top_probs, top_labels = similarity.cpu().topk(top_count, dim=-1) \n",
" return [(text_array[top_labels[0][i].numpy()], (top_probs[0][i].numpy()*100)) for i in range(top_count)]\n",
" \n",
"def interrogate(image, models=['RN50','RN101','RN50x4','RN50x16','RN50x64','ViT-B/32','ViT-B/16','ViT-L/14']):\n",
" caption = generate_caption(image)\n",
" print(caption)\n",
"\n",
" table = []\n",
" bests = [[('',0)]]*5\n",
" for model_name in models:\n",
" print(f\"Interrogating with {model_name}...\")\n",
" model, preprocess = clip.load(model_name)\n",
" model.cuda().eval()\n",
"\n",
" images = preprocess(image).unsqueeze(0).cuda()\n",
" with torch.no_grad():\n",
" image_features = model.encode_image(images).float()\n",
" image_features /= image_features.norm(dim=-1, keepdim=True)\n",
"\n",
" ranks = [\n",
" rank(model, image_features, movements),\n",
" rank(model, image_features, mediums),\n",
" rank(model, image_features, [\"by \"+artist for artist in artists]),\n",
" rank(model, image_features, trending_list),\n",
" rank(model, image_features, flavors, top_count=3)\n",
" ]\n",
"\n",
" for i in range(len(ranks)):\n",
" confidence_sum = 0\n",
" for ci in range(len(ranks[i])):\n",
" confidence_sum += ranks[i][ci][1]\n",
" if confidence_sum > sum(bests[i][t][1] for t in range(len(bests[i]))):\n",
" bests[i] = ranks[i]\n",
"\n",
" row = [model_name]\n",
" for r in ranks:\n",
" row.append(', '.join([f\"{x[0]} ({x[1]:0.1f}%)\" for x in r]))\n",
"\n",
" table.append(row)\n",
"\n",
" del model\n",
" gc.collect()\n",
" display(pd.DataFrame(table, columns=[\"Model\", \"Movement\", \"Medium\", \"Artist\", \"Trending\", \"Flavors\"]))\n",
"\n",
" flaves = ', '.join([f\"{x[0]}\" for x in bests[4]])\n",
" print(f\"\\n\\n{bests[0][0][0]} {bests[1][0][0]} of {caption}, {bests[2][0][0]}, {bests[3][0][0]}, {flaves}\")\n",
"\n",
"\n",
"artists = load_list('artists')\n",
"flavors = load_list('flavors')\n",
"mediums = load_list('mediums')\n",
"movements = load_list('movements')\n",
"sites = ['Artstation', 'behance', 'cg society', 'cgsociety', 'deviantart', 'dribble', 'flickr', 'instagram', 'pexels', 'pinterest', 'pixabay', 'pixiv', 'polycount', 'reddit', 'shutterstock', 'tumblr', 'unsplash', 'zbrush central']\n",
"trending_list = [site for site in sites]\n",
"trending_list.extend([\"trending on \"+site for site in sites])\n",
"trending_list.extend([\"featured on \"+site for site in sites])\n",
"trending_list.extend([site+\" contest winner\" for site in sites])\n"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {
"cellView": "form",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 747
},
"id": "rbDEMDGJrJEo",
"outputId": "a14961f4-0f79-4595-92e1-37793d24175e"
},
"outputs": [
{
"output_type": "display_data",
"data": {
"text/plain": [
"<PIL.Image.Image image mode=RGB size=238x384 at 0x7FE97209CCD0>"
],
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAO4AAAGACAIAAAAlBcnrAAEAAElEQVR4nGT9Wa9lSZYeiK21zGxPZ76zT+HhHnNEVmVmFZM1dBXJItnqhtCDBAGCWoDAR0FP/QsE6BcIEiAIDUgvemhRooBuQUKRYpGsKrIqK6fKMSIzRo8In67f+cx7NFtLD2a2z41qR6bH9XPu2WdvszV861uD4XD6EEAA/F8CiCCICMJOxAECAoEwIAogAoA4AAJE/yIAAPR/OwQU8H8QQUTEvyHhJfSvgP8+EUACRAT/ugCi/1iCyglbYBQEQEDwX0ekWAAQgRkBBQVEMHySCIARBQT7x/HfJALiABCQEEQAEAgIAQCEBREBgR0ACBICAiH0twkg/v6E0T/43/kjIGCBSSezg+P71naAQIDZW2+bPL344b9PizEq7R8RAUQcAgqi61ql5e4f/PHT738fgO6wyYG+16Yv98338umvt5vf29sDgS/qzYBwvV39v159smhbJBRnEXfr7NcVgBCEnQUAJAMgiCggIoJIfoMR0a+NX3xBv00ICMLiV0YQ/Yr77ZbbCwECgkCAQLdfDfvP0m87IPmFFRErPDX5+4OjJXb/YPxw6PD/cvrTDTABKp0414UvIYVIIi5KComw/8HvI5IStoiIQKAUCIhwuCskQqCw08zg3xAGYGYnwAAIgiIs/svECx9K+HIBYWAWYRAGscIMQVIRghxL2GsRCC9hlC6JK9pfTUBEWFhgnAwISGFCKgWisEYszIIQtlFARJxfOBFJEN8a7QOwf2N3fbbADECAAP4+mP0Nhq9n8asmgMLOvwLMUcEFGEAIUSH6i/hF7PXEgSCQJq0RUdghKQZUg8KVlQiwsH+usIBADCgCLE5nWVuVABoEn1O3QPeWTR+mA9MJFemDdPhRuRySIlLvTo//68kHM9B+e9nZ/g7R/yXMrgNEiDfJckvQxNsDDFsCCET+Lb+xQMiIcXeAQQDYGyL0Fqz/Mukt327XcCfRIMHwSOu6Eaj/ePb4v77/h781OnktGSuAe/lsXycsDCDOtSIcLsgOwr4JiDBbEAfiAFyQInZwSxVFGEQg/i0gGqOxREJ/XUBE/yxEIAxC4q0eOyDllRhAADCotAR1RED2/xQGpLjI6H8XCIUFEYD9iiEQoSAgB/vurQBSnu+1KslQG2WsyLZZW1siqWgLRJiRkJkJCUh5U9Ky/bJcoYSrQJR3RI0UbTSCiCDpKIUM4m0VgAggIJK3SgKMIBgNDgSd52AASYkEv4OkQEQASGlC9F/ihE1ebM/P0BsblJ1iACAIAIFIPizqzRqIxDklMAf7v8+v/pl+/HlTjlvcFC2j3LhmDMlLbB9Weg/0tVhAIqV7/yYowByFWIXdQX/z0XxHW4yIAAyACATgBMCbPW/VIdjCqAQY7hW8Z9w54Z1RRorW0P82ABKIyCEm/2Dvtd+avnaSTr5sbkruZpTloq7bzdyVrqnT8T4ZY+tKmBEBIaoEEoBEz3jrG5EQyV8cpTf/KMKEBAAaJJhVER0cCwgihQcHEmQUAGEgQr/92CshBSVkEYz6CyJIcRWj3gAAeymR3boIAwuQQhQQFgGjdJHv59P7iavRuffH+x9vrpDQurxttx133rB64YsmxQkKoBLBjp0I+5v3YuvtkF9d8AsQwQP2XtTfHikU532xBDmIkCAsCyFBcLu7e0BBACAUAtTszZigEOks67ZbRCQiEsXIXuAh7pDrumQ8LTvLbYeKBMAJzsl9Wq0LxnQ0GKAyQG9nw2dt9bpNx6peIkeVJO94gwx5DfSr75/I37oHb0EgMaqTR1k7ExwEWgBAkACEILpPCRId1hoBkci7NugxEwcZ9isGALky/8u99741uidJdtasflNf/lHx4N/K1b23vvWbdnH02h8Nv/68Ku4A8Pbl166qAAlJifPrj2EHhcVbE+k3DP4ucI0OAgF0tJ0UjG38mLfVgOH+vUT2cE9EkCA+BEIQYhYWJAoXEgQkAI6gq0d35LUPhICCoRBB0umwOFqVNTlrxLHwb00OldY/ASoI19ubbnMpGPWll2QveMI7AWMLSEgUHiFIo18RBvFgmgQ4WjVEj5E8pkRC4R4AsnegSseNlX5RAQCIvJNB1ESamQUQCHWSIpGrKkBEVGGVbhs0BAEmnUlbMjMFdy8J4J+tzv8TnL2ejeZJV7L7ui5zpS6rclOuzrHT4gVIAAhQgvcn6rUkrHA0pSwSV8wDY2+mw+6ihK33i4ISkAj5qKbX+511hPhh2u1BkBYCFBS2ACdm+O38aC7NcTH7yc0n3yvu6enenbuP/uZnf/6crBsXg+Hw8stPxLXctWAG3tj2BninExBML4i/VbkN3gOsEQQRHyn1EEeC+yME8BC5v6IE3aAAtpDIu8goo+HTGAIybyoYJaqOxIVAAOD4kjcVLAKCUEzu1g5r50hrAQCUu/ngPz16fWxMbRvHok0OEQQDMNLAZDOdDlEnpDNAI2hIp6TMzuv572cr4iLA4hjt0DfEK2i7B14s7KAPO4LF4mCjA/BAv+R+/6KKMzMLM2U5W8e289bOAQsA9QgJRESMMcy263o3AuBVmuFHJ6YbmI/KZUF6ppNCmTdp4AwKu+AGw6WwR2bexQSUFgUUhG8B3Vt65DUhKFiIGHwwCsGU77AWelvmITju4pBvyDaEIBwQGeT1dPStdP9FvfzLq8+/O7l/lE++unv0kx/+y789/c0GOrFuvm1svVE6UVqJ68CDt3At6gUyOppwp7JDS7fguscmIEGz0ctvWE1mtiASPXEAtojkN5yCG4bgyLxJ818cgKZfU5K4qiIMwCKMwc3FIM/bbEQUMEo75xCVVkYQkPAgyX97dPCfHz/q2BFRlo2ifWVUmagcVa7MNBscZuPDpNhL84nJ9hxkAff6RUGFqBEIoL/xuBaIEMxPb7fAx7KoNHi7HnaUQNBHmV7JwYfSYV0JAImICJUidlYVKbeNOEtKO3D+gQvMCswxsDRARqliUM+XgOQD3n5n0jT9y+uXJVsSOO8qQni2XP68uRHyIka7uw1bdisCDK4j6GkEdT3eCK/vfgMCqL7tX2W3iUG+o3ePiBn7n3uD7WEZCeJ388N9lX3ZLh9kk3eLgyf3T77/5EefXTxJksJkhYCgsygOxBpjhG1kUwhJeQ8hfVi5u0UACcDPu/3bd4USLauIp952H8DoSVEYwK8y+02VEOECECGSFwjwVkAYdqokOzgf1J1kxx54yfZOioWIEB07RCGFDJCRnppkbPR/de+dbw1nLTudFqQM9mjbX885pZUiYK6UAmu7trHWJZ5cCvLhLWvQWH9TEVCKIBICCbvA4iGSSpDQu2kO8JiFO2T2Tw0QkJwICxCgElD+GQkJEVWec9O4rkWlMcgOllC30KWQKFDADoWS4bjbrG6FpEFVtcCn9er766u38yECjEE9NqnDwMKwyG2MI4hIFIVKvOyhOIw21wNcuCV0EGyc966IO9gGAHgbJ3teJ+oACZD/3Z38Iu72H4ERMtLfTvf/Xfm8IPWa06f7s3+//vKzs090kopIMihsU4GwSQyCRQJFImwFUdA/l0QMF3F+f9NBFOO9RTH38kQAIMwoEoC/OISoyMLgrHgmy7/QB6qIiLcxjafAOKAm2EFz+TtAzT+391xIPmSOrgQdMxIZk4oyuUlmOs1IN869ng37kFIABQkQszwBEGtt2dRV3TpMy7opy20xnBw/fFtne0Dab6HXQxAOAD6aEgEGCn4zUKEeSkGghIIJ8ndOCpQGUn7pPIMbXAR6iYK6rgQYCHUxsNVGnAu7IAIoTpyTrpFWhIWdTkxdlxAIkGCJhF2SJs5axXLaVf/n8y+syNNmC2X7MzePLhgAPEDoWQWJHtiThoikAXaxYG+/bolzBDtRLGSHV25JTxTXHowFAxzepHArAZ0jA79mhjeufuXKf1a8+dlI/ylffHn+abfZgqDKchHRWd6uFr1bM0az6/waezFGoPBPCJYnKF/IP0CkFsN9e1yhwYf8gfdlRG/hfQYEIIQs7IOA6AFR+sAfOJh85hgrS3C8AauSZ
},
"metadata": {}
},
{
"output_type": "stream",
"name": "stdout",
"text": [
"a man standing on top of a bridge over a city\n",
"Interrogating with ViT-B/32...\n",
"Interrogating with ViT-B/16...\n",
"Interrogating with RN101...\n",
"Interrogating with RN50x4...\n",
"Interrogating with RN50x16...\n"
]
},
{
"output_type": "display_data",
"data": {
"text/plain": [
" Model Movement Medium \\\n",
"0 ViT-B/32 Retrofuturism (11.7%) cyberpunk art (98.2%) \n",
"1 ViT-B/16 Panfuturism (21.9%) cyberpunk art (98.5%) \n",
"2 RN101 Computer art (23.3%) cyberpunk art (96.4%) \n",
"3 RN50x4 Retrofuturism (26.6%) cyberpunk art (97.9%) \n",
"4 RN50x16 Panfuturism (65.3%) cyberpunk art (99.7%) \n",
"\n",
" Artist Trending \\\n",
"0 by Vincent Lefevre (3.1%) cg society contest winner (13.0%) \n",
"1 by Syd Mead (0.7%) behance contest winner (27.9%) \n",
"2 by Reuben Tam (3.3%) featured on Artstation (15.9%) \n",
"3 by Reuben Tam (8.3%) cgsociety contest winner (18.8%) \n",
"4 by Reuben Tam (4.1%) Artstation contest winner (32.0%) \n",
"\n",
" Flavors \n",
"0 synthwave (18.4%), darksynth (15.1%), dystopia... \n",
"1 concept art (13.1%), dystopian art (9.0%), ret... \n",
"2 voxel art (14.7%), artstation hq (13.6%), synt... \n",
"3 artstation hq (20.5%), dystopian art (16.5%), ... \n",
"4 synthwave (45.5%), retrowave (19.6%), 2d game ... "
],
"text/html": [
"\n",
" <div id=\"df-3da2b9a3-b583-4587-97d6-ca9d74d2cf4e\">\n",
" <div class=\"colab-df-container\">\n",
" <div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>Model</th>\n",
" <th>Movement</th>\n",
" <th>Medium</th>\n",
" <th>Artist</th>\n",
" <th>Trending</th>\n",
" <th>Flavors</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>ViT-B/32</td>\n",
" <td>Retrofuturism (11.7%)</td>\n",
" <td>cyberpunk art (98.2%)</td>\n",
" <td>by Vincent Lefevre (3.1%)</td>\n",
" <td>cg society contest winner (13.0%)</td>\n",
" <td>synthwave (18.4%), darksynth (15.1%), dystopia...</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>ViT-B/16</td>\n",
" <td>Panfuturism (21.9%)</td>\n",
" <td>cyberpunk art (98.5%)</td>\n",
" <td>by Syd Mead (0.7%)</td>\n",
" <td>behance contest winner (27.9%)</td>\n",
" <td>concept art (13.1%), dystopian art (9.0%), ret...</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>RN101</td>\n",
" <td>Computer art (23.3%)</td>\n",
" <td>cyberpunk art (96.4%)</td>\n",
" <td>by Reuben Tam (3.3%)</td>\n",
" <td>featured on Artstation (15.9%)</td>\n",
" <td>voxel art (14.7%), artstation hq (13.6%), synt...</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>RN50x4</td>\n",
" <td>Retrofuturism (26.6%)</td>\n",
" <td>cyberpunk art (97.9%)</td>\n",
" <td>by Reuben Tam (8.3%)</td>\n",
" <td>cgsociety contest winner (18.8%)</td>\n",
" <td>artstation hq (20.5%), dystopian art (16.5%), ...</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>RN50x16</td>\n",
" <td>Panfuturism (65.3%)</td>\n",
" <td>cyberpunk art (99.7%)</td>\n",
" <td>by Reuben Tam (4.1%)</td>\n",
" <td>Artstation contest winner (32.0%)</td>\n",
" <td>synthwave (45.5%), retrowave (19.6%), 2d game ...</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>\n",
" <button class=\"colab-df-convert\" onclick=\"convertToInteractive('df-3da2b9a3-b583-4587-97d6-ca9d74d2cf4e')\"\n",
" title=\"Convert this dataframe to an interactive table.\"\n",
" style=\"display:none;\">\n",
" \n",
" <svg xmlns=\"http://www.w3.org/2000/svg\" height=\"24px\"viewBox=\"0 0 24 24\"\n",
" width=\"24px\">\n",
" <path d=\"M0 0h24v24H0V0z\" fill=\"none\"/>\n",
" <path d=\"M18.56 5.44l.94 2.06.94-2.06 2.06-.94-2.06-.94-.94-2.06-.94 2.06-2.06.94zm-11 1L8.5 8.5l.94-2.06 2.06-.94-2.06-.94L8.5 2.5l-.94 2.06-2.06.94zm10 10l.94 2.06.94-2.06 2.06-.94-2.06-.94-.94-2.06-.94 2.06-2.06.94z\"/><path d=\"M17.41 7.96l-1.37-1.37c-.4-.4-.92-.59-1.43-.59-.52 0-1.04.2-1.43.59L10.3 9.45l-7.72 7.72c-.78.78-.78 2.05 0 2.83L4 21.41c.39.39.9.59 1.41.59.51 0 1.02-.2 1.41-.59l7.78-7.78 2.81-2.81c.8-.78.8-2.07 0-2.86zM5.41 20L4 18.59l7.72-7.72 1.47 1.35L5.41 20z\"/>\n",
" </svg>\n",
" </button>\n",
" \n",
" <style>\n",
" .colab-df-container {\n",
" display:flex;\n",
" flex-wrap:wrap;\n",
" gap: 12px;\n",
" }\n",
"\n",
" .colab-df-convert {\n",
" background-color: #E8F0FE;\n",
" border: none;\n",
" border-radius: 50%;\n",
" cursor: pointer;\n",
" display: none;\n",
" fill: #1967D2;\n",
" height: 32px;\n",
" padding: 0 0 0 0;\n",
" width: 32px;\n",
" }\n",
"\n",
" .colab-df-convert:hover {\n",
" background-color: #E2EBFA;\n",
" box-shadow: 0px 1px 2px rgba(60, 64, 67, 0.3), 0px 1px 3px 1px rgba(60, 64, 67, 0.15);\n",
" fill: #174EA6;\n",
" }\n",
"\n",
" [theme=dark] .colab-df-convert {\n",
" background-color: #3B4455;\n",
" fill: #D2E3FC;\n",
" }\n",
"\n",
" [theme=dark] .colab-df-convert:hover {\n",
" background-color: #434B5C;\n",
" box-shadow: 0px 1px 3px 1px rgba(0, 0, 0, 0.15);\n",
" filter: drop-shadow(0px 1px 2px rgba(0, 0, 0, 0.3));\n",
" fill: #FFFFFF;\n",
" }\n",
" </style>\n",
"\n",
" <script>\n",
" const buttonEl =\n",
" document.querySelector('#df-3da2b9a3-b583-4587-97d6-ca9d74d2cf4e button.colab-df-convert');\n",
" buttonEl.style.display =\n",
" google.colab.kernel.accessAllowed ? 'block' : 'none';\n",
"\n",
" async function convertToInteractive(key) {\n",
" const element = document.querySelector('#df-3da2b9a3-b583-4587-97d6-ca9d74d2cf4e');\n",
" const dataTable =\n",
" await google.colab.kernel.invokeFunction('convertToInteractive',\n",
" [key], {});\n",
" if (!dataTable) return;\n",
"\n",
" const docLinkHtml = 'Like what you see? Visit the ' +\n",
" '<a target=\"_blank\" href=https://colab.research.google.com/notebooks/data_table.ipynb>data table notebook</a>'\n",
" + ' to learn more about interactive tables.';\n",
" element.innerHTML = '';\n",
" dataTable['output_type'] = 'display_data';\n",
" await google.colab.output.renderOutput(dataTable, element);\n",
" const docLink = document.createElement('div');\n",
" docLink.innerHTML = docLinkHtml;\n",
" element.appendChild(docLink);\n",
" }\n",
" </script>\n",
" </div>\n",
" </div>\n",
" "
]
},
"metadata": {}
},
{
"output_type": "stream",
"name": "stdout",
"text": [
"\n",
"\n",
"Panfuturism cyberpunk art of a man standing on top of a bridge over a city, by Reuben Tam, Artstation contest winner, synthwave, retrowave, 2d game art\n"
]
}
],
"source": [
"#@title Interrogate!\n",
"\n",
"#@markdown \n",
"\n",
"#@markdown #####**Image:**\n",
"image_path_or_url = \"https://cdnb.artstation.com/p/assets/images/images/032/142/769/large/ignacio-bazan-lazcano-book-4-final.jpg\" #@param {type:\"string\"}\n",
"\n",
"#@markdown \n",
"\n",
"#@markdown #####**CLIP models:**\n",
"ViTB32 = True #@param{type:\"boolean\"}\n",
"ViTB16 = True #@param{type:\"boolean\"}\n",
"ViTL14 = False #@param{type:\"boolean\"}\n",
"ViTL14_336px = False #@param{type:\"boolean\"}\n",
"RN101 = True #@param{type:\"boolean\"}\n",
"RN50 = True #@param{type:\"boolean\"}\n",
"RN50x4 = True #@param{type:\"boolean\"}\n",
"RN50x16 = False #@param{type:\"boolean\"}\n",
"RN50x64 = False #@param{type:\"boolean\"}\n",
"\n",
"models = []\n",
"if ViTB32: models.append('ViT-B/32')\n",
"if ViTB16: models.append('ViT-B/16')\n",
"if ViTL14: models.append('ViT-L/14')\n",
"if ViTL14_336px: models.append('ViT-L/14@336px')\n",
"if RN101: models.append('RN101')\n",
"if RN50: models.append('RN50x4')\n",
"if RN50x4: models.append('RN50x16')\n",
"\n",
"if str(image_path_or_url).startswith('http://') or str(image_path_or_url).startswith('https://'):\n",
" image = Image.open(requests.get(image_path_or_url, stream=True).raw).convert('RGB')\n",
"else:\n",
" image = Image.open(image_path_or_url).convert('RGB')\n",
"\n",
"thumb = image.copy()\n",
"thumb.thumbnail([blip_image_eval_size, blip_image_eval_size])\n",
"display(thumb)\n",
"\n",
"interrogate(image, models=models)\n"
]
}
],
"metadata": {
"accelerator": "GPU",
"colab": {
"collapsed_sections": [],
"name": "clip-interrogator.ipynb",
"provenance": []
},
"gpuClass": "standard",
"kernelspec": {
"display_name": "Python 3.9.5 ('base')",
"language": "python",
"name": "python3"
},
"language_info": {
"name": "python",
"version": "3.9.5"
},
"orig_nbformat": 4,
"vscode": {
"interpreter": {
"hash": "2e35b1f3b2666f0e402b0693dd7493a583002c98361385482aa9f27d8f0f5c89"
}
}
},
"nbformat": 4,
"nbformat_minor": 0
}