You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 

984 lines
36 KiB

{
"cells": [
{
"cell_type": "markdown",
"id": "8b0e11f2-9ea4-48c2-b8d2-d0a4ba967827",
"metadata": {},
"source": [
"# Gradio Day!\n",
"\n",
"Today we will build User Interfaces using the outrageously simple Gradio framework.\n",
"\n",
"Prepare for joy!\n",
"\n",
"Please note: your Gradio screens may appear in 'dark mode' or 'light mode' depending on your computer settings."
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "c44c5494-950d-4d2f-8d4f-b87b57c5b330",
"metadata": {},
"outputs": [],
"source": [
"# imports\n",
"\n",
"import os\n",
"import requests\n",
"from bs4 import BeautifulSoup\n",
"from typing import List\n",
"from dotenv import load_dotenv\n",
"from openai import OpenAI\n",
"import google.generativeai\n",
"import anthropic"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "d1715421-cead-400b-99af-986388a97aff",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Requirement already satisfied: gradio in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (5.15.0)\n",
"Requirement already satisfied: aiofiles<24.0,>=22.0 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from gradio) (23.2.1)\n",
"Requirement already satisfied: anyio<5.0,>=3.0 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from gradio) (4.8.0)\n",
"Requirement already satisfied: fastapi<1.0,>=0.115.2 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from gradio) (0.115.8)\n",
"Requirement already satisfied: ffmpy in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from gradio) (0.5.0)\n",
"Requirement already satisfied: gradio-client==1.7.0 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from gradio) (1.7.0)\n",
"Requirement already satisfied: httpx>=0.24.1 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from gradio) (0.28.1)\n",
"Requirement already satisfied: huggingface-hub>=0.28.1 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from gradio) (0.28.1)\n",
"Requirement already satisfied: jinja2<4.0 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from gradio) (3.1.5)\n",
"Requirement already satisfied: markupsafe~=2.0 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from gradio) (2.1.5)\n",
"Requirement already satisfied: numpy<3.0,>=1.0 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from gradio) (2.2.2)\n",
"Requirement already satisfied: orjson~=3.0 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from gradio) (3.10.15)\n",
"Requirement already satisfied: packaging in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from gradio) (24.2)\n",
"Requirement already satisfied: pandas<3.0,>=1.0 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from gradio) (2.2.3)\n",
"Requirement already satisfied: pillow<12.0,>=8.0 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from gradio) (11.1.0)\n",
"Requirement already satisfied: pydantic>=2.0 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from gradio) (2.10.6)\n",
"Requirement already satisfied: pydub in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from gradio) (0.25.1)\n",
"Requirement already satisfied: python-multipart>=0.0.18 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from gradio) (0.0.20)\n",
"Requirement already satisfied: pyyaml<7.0,>=5.0 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from gradio) (6.0.2)\n",
"Requirement already satisfied: ruff>=0.9.3 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from gradio) (0.9.4)\n",
"Requirement already satisfied: safehttpx<0.2.0,>=0.1.6 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from gradio) (0.1.6)\n",
"Requirement already satisfied: semantic-version~=2.0 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from gradio) (2.10.0)\n",
"Requirement already satisfied: starlette<1.0,>=0.40.0 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from gradio) (0.45.3)\n",
"Requirement already satisfied: tomlkit<0.14.0,>=0.12.0 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from gradio) (0.13.2)\n",
"Requirement already satisfied: typer<1.0,>=0.12 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from gradio) (0.15.1)\n",
"Requirement already satisfied: typing-extensions~=4.0 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from gradio) (4.12.2)\n",
"Requirement already satisfied: uvicorn>=0.14.0 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from gradio) (0.34.0)\n",
"Requirement already satisfied: fsspec in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from gradio-client==1.7.0->gradio) (2025.2.0)\n",
"Requirement already satisfied: websockets<15.0,>=10.0 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from gradio-client==1.7.0->gradio) (14.2)\n",
"Requirement already satisfied: idna>=2.8 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from anyio<5.0,>=3.0->gradio) (3.10)\n",
"Requirement already satisfied: sniffio>=1.1 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from anyio<5.0,>=3.0->gradio) (1.3.1)\n",
"Requirement already satisfied: certifi in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from httpx>=0.24.1->gradio) (2025.1.31)\n",
"Requirement already satisfied: httpcore==1.* in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from httpx>=0.24.1->gradio) (1.0.7)\n",
"Requirement already satisfied: h11<0.15,>=0.13 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from httpcore==1.*->httpx>=0.24.1->gradio) (0.14.0)\n",
"Requirement already satisfied: filelock in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from huggingface-hub>=0.28.1->gradio) (3.17.0)\n",
"Requirement already satisfied: requests in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from huggingface-hub>=0.28.1->gradio) (2.32.3)\n",
"Requirement already satisfied: tqdm>=4.42.1 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from huggingface-hub>=0.28.1->gradio) (4.67.1)\n",
"Requirement already satisfied: python-dateutil>=2.8.2 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from pandas<3.0,>=1.0->gradio) (2.9.0.post0)\n",
"Requirement already satisfied: pytz>=2020.1 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from pandas<3.0,>=1.0->gradio) (2025.1)\n",
"Requirement already satisfied: tzdata>=2022.7 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from pandas<3.0,>=1.0->gradio) (2025.1)\n",
"Requirement already satisfied: annotated-types>=0.6.0 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from pydantic>=2.0->gradio) (0.7.0)\n",
"Requirement already satisfied: pydantic-core==2.27.2 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from pydantic>=2.0->gradio) (2.27.2)\n",
"Requirement already satisfied: click>=8.0.0 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from typer<1.0,>=0.12->gradio) (8.1.8)\n",
"Requirement already satisfied: shellingham>=1.3.0 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from typer<1.0,>=0.12->gradio) (1.5.4)\n",
"Requirement already satisfied: rich>=10.11.0 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from typer<1.0,>=0.12->gradio) (13.9.4)\n",
"Requirement already satisfied: six>=1.5 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from python-dateutil>=2.8.2->pandas<3.0,>=1.0->gradio) (1.17.0)\n",
"Requirement already satisfied: markdown-it-py>=2.2.0 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from rich>=10.11.0->typer<1.0,>=0.12->gradio) (3.0.0)\n",
"Requirement already satisfied: pygments<3.0.0,>=2.13.0 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from rich>=10.11.0->typer<1.0,>=0.12->gradio) (2.19.1)\n",
"Requirement already satisfied: charset-normalizer<4,>=2 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from requests->huggingface-hub>=0.28.1->gradio) (3.4.1)\n",
"Requirement already satisfied: urllib3<3,>=1.21.1 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from requests->huggingface-hub>=0.28.1->gradio) (2.3.0)\n",
"Requirement already satisfied: mdurl~=0.1 in /Users/adang4/Documents/devwork/andersondang/llm_engineering/llms/lib/python3.11/site-packages (from markdown-it-py>=2.2.0->rich>=10.11.0->typer<1.0,>=0.12->gradio) (0.1.2)\n",
"Note: you may need to restart the kernel to use updated packages.\n"
]
}
],
"source": [
"%pip install gradio\n",
"import gradio as gr # oh yeah!"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "337d5dfc-0181-4e3b-8ab9-e78e0c3f657b",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"OpenAI API Key exists and begins sk-proj-\n"
]
}
],
"source": [
"# Load environment variables in a file called .env\n",
"# Print the key prefixes to help with any debugging\n",
"\n",
"load_dotenv()\n",
"openai_api_key = os.getenv('OPENAI_API_KEY')\n",
"#anthropic_api_key = os.getenv('ANTHROPIC_API_KEY')\n",
"#google_api_key = os.getenv('GOOGLE_API_KEY')\n",
"\n",
"if openai_api_key:\n",
" print(f\"OpenAI API Key exists and begins {openai_api_key[:8]}\")\n",
"else:\n",
" print(\"OpenAI API Key not set\")\n",
" \n",
"#if anthropic_api_key:\n",
"# print(f\"Anthropic API Key exists and begins {anthropic_api_key[:7]}\")\n",
"#else:\n",
"# print(\"Anthropic API Key not set\")\n",
"\n",
"#if google_api_key:\n",
"# print(f\"Google API Key exists and begins {google_api_key[:8]}\")\n",
"#else:\n",
"# print(\"Google API Key not set\")"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "22586021-1795-4929-8079-63f5bb4edd4c",
"metadata": {},
"outputs": [],
"source": [
"# Connect to OpenAI, Anthropic and Google; comment out the Claude or Google lines if you're not using them\n",
"\n",
"openai = OpenAI()\n",
"\n",
"#claude = anthropic.Anthropic()\n",
"\n",
"#google.generativeai.configure()"
]
},
{
"cell_type": "code",
"execution_count": 7,
"id": "b16e6021-6dc4-4397-985a-6679d6c8ffd5",
"metadata": {},
"outputs": [],
"source": [
"# A generic system message - no more snarky adversarial AIs!\n",
"\n",
"system_message = \"You are a helpful assistant\""
]
},
{
"cell_type": "code",
"execution_count": 8,
"id": "02ef9b69-ef31-427d-86d0-b8c799e1c1b1",
"metadata": {},
"outputs": [],
"source": [
"# Let's wrap a call to GPT-4o-mini in a simple function\n",
"\n",
"def message_gpt(prompt):\n",
" messages = [\n",
" {\"role\": \"system\", \"content\": system_message},\n",
" {\"role\": \"user\", \"content\": prompt}\n",
" ]\n",
" completion = openai.chat.completions.create(\n",
" model='gpt-4o-mini',\n",
" messages=messages,\n",
" )\n",
" return completion.choices[0].message.content"
]
},
{
"cell_type": "code",
"execution_count": 9,
"id": "aef7d314-2b13-436b-b02d-8de3b72b193f",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"\"Today's date is October 3, 2023.\""
]
},
"execution_count": 9,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"message_gpt(\"What is today's date?\")"
]
},
{
"cell_type": "markdown",
"id": "f94013d1-4f27-4329-97e8-8c58db93636a",
"metadata": {},
"source": [
"## User Interface time!"
]
},
{
"cell_type": "code",
"execution_count": 10,
"id": "bc664b7a-c01d-4fea-a1de-ae22cdd5141a",
"metadata": {},
"outputs": [],
"source": [
"# here's a simple function\n",
"\n",
"def shout(text):\n",
" print(f\"Shout has been called with input {text}\")\n",
" return text.upper()"
]
},
{
"cell_type": "code",
"execution_count": 11,
"id": "083ea451-d3a0-4d13-b599-93ed49b975e4",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Shout has been called with input hello\n"
]
},
{
"data": {
"text/plain": [
"'HELLO'"
]
},
"execution_count": 11,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"shout(\"hello\")"
]
},
{
"cell_type": "code",
"execution_count": 12,
"id": "08f1f15a-122e-4502-b112-6ee2817dda32",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"* Running on local URL: http://127.0.0.1:7860\n",
"\n",
"To create a public link, set `share=True` in `launch()`.\n"
]
},
{
"data": {
"text/html": [
"<div><iframe src=\"http://127.0.0.1:7860/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": []
},
"execution_count": 12,
"metadata": {},
"output_type": "execute_result"
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Shout has been called with input Hello\n",
"Shout has been called with input What is that?\n",
"Created dataset file at: .gradio/flagged/dataset1.csv\n"
]
}
],
"source": [
"# The simplicty of gradio. This might appear in \"light mode\" - I'll show you how to make this in dark mode later.\n",
"\n",
"gr.Interface(fn=shout, inputs=\"textbox\", outputs=\"textbox\").launch()"
]
},
{
"cell_type": "code",
"execution_count": 13,
"id": "c9a359a4-685c-4c99-891c-bb4d1cb7f426",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"* Running on local URL: http://127.0.0.1:7861\n",
"* Running on public URL: https://ad9f94902cc390abc2.gradio.live\n",
"\n",
"This share link expires in 72 hours. For free permanent hosting and GPU upgrades, run `gradio deploy` from the terminal in the working directory to deploy to Hugging Face Spaces (https://huggingface.co/spaces)\n"
]
},
{
"data": {
"text/html": [
"<div><iframe src=\"https://ad9f94902cc390abc2.gradio.live\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": []
},
"execution_count": 13,
"metadata": {},
"output_type": "execute_result"
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Shout has been called with input testing\n"
]
}
],
"source": [
"# Adding share=True means that it can be accessed publically\n",
"# A more permanent hosting is available using a platform called Spaces from HuggingFace, which we will touch on next week\n",
"# NOTE: Some Anti-virus software and Corporate Firewalls might not like you using share=True. If you're at work on on a work network, I suggest skip this test.\n",
"\n",
"gr.Interface(fn=shout, inputs=\"textbox\", outputs=\"textbox\", flagging_mode=\"never\").launch(share=True)"
]
},
{
"cell_type": "code",
"execution_count": 14,
"id": "cd87533a-ff3a-4188-8998-5bedd5ba2da3",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"* Running on local URL: http://127.0.0.1:7862\n",
"\n",
"To create a public link, set `share=True` in `launch()`.\n"
]
},
{
"data": {
"text/html": [
"<div><iframe src=\"http://127.0.0.1:7862/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": []
},
"execution_count": 14,
"metadata": {},
"output_type": "execute_result"
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Shout has been called with input This is a test\n"
]
}
],
"source": [
"# Adding inbrowser=True opens up a new browser window automatically\n",
"\n",
"gr.Interface(fn=shout, inputs=\"textbox\", outputs=\"textbox\", flagging_mode=\"never\").launch(inbrowser=True)"
]
},
{
"cell_type": "markdown",
"id": "b42ec007-0314-48bf-84a4-a65943649215",
"metadata": {},
"source": [
"## Forcing dark mode\n",
"\n",
"Gradio appears in light mode or dark mode depending on the settings of the browser and computer. There is a way to force gradio to appear in dark mode, but Gradio recommends against this as it should be a user preference (particularly for accessibility reasons). But if you wish to force dark mode for your screens, below is how to do it."
]
},
{
"cell_type": "code",
"execution_count": 15,
"id": "e8129afa-532b-4b15-b93c-aa9cca23a546",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"* Running on local URL: http://127.0.0.1:7863\n",
"\n",
"To create a public link, set `share=True` in `launch()`.\n"
]
},
{
"data": {
"text/html": [
"<div><iframe src=\"http://127.0.0.1:7863/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": []
},
"execution_count": 15,
"metadata": {},
"output_type": "execute_result"
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Shout has been called with input this is darker\n"
]
}
],
"source": [
"# Define this variable and then pass js=force_dark_mode when creating the Interface\n",
"\n",
"force_dark_mode = \"\"\"\n",
"function refresh() {\n",
" const url = new URL(window.location);\n",
" if (url.searchParams.get('__theme') !== 'dark') {\n",
" url.searchParams.set('__theme', 'dark');\n",
" window.location.href = url.href;\n",
" }\n",
"}\n",
"\"\"\"\n",
"gr.Interface(fn=shout, inputs=\"textbox\", outputs=\"textbox\", flagging_mode=\"never\", js=force_dark_mode).launch()"
]
},
{
"cell_type": "code",
"execution_count": 16,
"id": "3cc67b26-dd5f-406d-88f6-2306ee2950c0",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"* Running on local URL: http://127.0.0.1:7864\n",
"\n",
"To create a public link, set `share=True` in `launch()`.\n"
]
},
{
"data": {
"text/html": [
"<div><iframe src=\"http://127.0.0.1:7864/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": []
},
"execution_count": 16,
"metadata": {},
"output_type": "execute_result"
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Shout has been called with input interesting\n",
"\n"
]
}
],
"source": [
"# Inputs and Outputs\n",
"\n",
"view = gr.Interface(\n",
" fn=shout,\n",
" inputs=[gr.Textbox(label=\"Your message:\", lines=6)],\n",
" outputs=[gr.Textbox(label=\"Response:\", lines=8)],\n",
" flagging_mode=\"never\"\n",
")\n",
"view.launch()"
]
},
{
"cell_type": "code",
"execution_count": 17,
"id": "f235288e-63a2-4341-935b-1441f9be969b",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"* Running on local URL: http://127.0.0.1:7865\n",
"\n",
"To create a public link, set `share=True` in `launch()`.\n"
]
},
{
"data": {
"text/html": [
"<div><iframe src=\"http://127.0.0.1:7865/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": []
},
"execution_count": 17,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# And now - changing the function from \"shout\" to \"message_gpt\"\n",
"\n",
"view = gr.Interface(\n",
" fn=message_gpt,\n",
" inputs=[gr.Textbox(label=\"Your message:\", lines=6)],\n",
" outputs=[gr.Textbox(label=\"Response:\", lines=8)],\n",
" flagging_mode=\"never\"\n",
")\n",
"view.launch()"
]
},
{
"cell_type": "code",
"execution_count": 18,
"id": "af9a3262-e626-4e4b-80b0-aca152405e63",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"* Running on local URL: http://127.0.0.1:7866\n",
"\n",
"To create a public link, set `share=True` in `launch()`.\n"
]
},
{
"data": {
"text/html": [
"<div><iframe src=\"http://127.0.0.1:7866/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": []
},
"execution_count": 18,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Let's use Markdown\n",
"# Are you wondering why it makes any difference to set system_message when it's not referred to in the code below it?\n",
"# I'm taking advantage of system_message being a global variable, used back in the message_gpt function (go take a look)\n",
"# Not a great software engineering practice, but quite sommon during Jupyter Lab R&D!\n",
"\n",
"system_message = \"You are a helpful assistant that responds in markdown\"\n",
"\n",
"view = gr.Interface(\n",
" fn=message_gpt,\n",
" inputs=[gr.Textbox(label=\"Your message:\")],\n",
" outputs=[gr.Markdown(label=\"Response:\")],\n",
" flagging_mode=\"never\"\n",
")\n",
"view.launch()"
]
},
{
"cell_type": "code",
"execution_count": 43,
"id": "88c04ebf-0671-4fea-95c9-bc1565d4bb4f",
"metadata": {},
"outputs": [],
"source": [
"# Let's create a call that streams back results\n",
"# If you'd like a refresher on Generators (the \"yield\" keyword),\n",
"# Please take a look at the Intermediate Python notebook in week1 folder.\n",
"\n",
"def stream_gpt(sytem_message,prompt,temperature):\n",
" messages = [\n",
" {\"role\": \"system\", \"content\": system_message},\n",
" {\"role\": \"user\", \"content\": prompt}\n",
" ]\n",
" stream = openai.chat.completions.create(\n",
" model='gpt-4o-mini',\n",
" messages=messages,\n",
" stream=True,\n",
" temperature=temperature\n",
" )\n",
" result = \"\"\n",
" for chunk in stream:\n",
" result += chunk.choices[0].delta.content or \"\"\n",
" yield result"
]
},
{
"cell_type": "code",
"execution_count": 20,
"id": "0bb1f789-ff11-4cba-ac67-11b815e29d09",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"* Running on local URL: http://127.0.0.1:7867\n",
"\n",
"To create a public link, set `share=True` in `launch()`.\n"
]
},
{
"data": {
"text/html": [
"<div><iframe src=\"http://127.0.0.1:7867/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": []
},
"execution_count": 20,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"view = gr.Interface(\n",
" fn=stream_gpt,\n",
" inputs=[gr.Textbox(label=\"Your message:\")],\n",
" outputs=[gr.Markdown(label=\"Response:\")],\n",
" flagging_mode=\"never\"\n",
")\n",
"view.launch()"
]
},
{
"cell_type": "code",
"execution_count": 46,
"id": "bbc8e930-ba2a-4194-8f7c-044659150626",
"metadata": {},
"outputs": [],
"source": [
"def stream_claude(system_message,prompt,temperature):\n",
" result = claude.messages.stream(\n",
" model=\"claude-3-haiku-20240307\",\n",
" max_tokens=1000,\n",
" system=system_message,\n",
" messages=[\n",
" {\"role\": \"user\", \"content\": prompt},\n",
" ],\n",
" temperature=temperature\n",
" )\n",
" response = \"\"\n",
" with result as stream:\n",
" for text in stream.text_stream:\n",
" response += text or \"\"\n",
" yield response"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "a0066ffd-196e-4eaf-ad1e-d492958b62af",
"metadata": {},
"outputs": [],
"source": [
"view = gr.Interface(\n",
" fn=stream_claude,\n",
" inputs=[gr.Textbox(label=\"Your message:\")],\n",
" outputs=[gr.Markdown(label=\"Response:\")],\n",
" flagging_mode=\"never\"\n",
")\n",
"view.launch()"
]
},
{
"cell_type": "markdown",
"id": "bc5a70b9-2afe-4a7c-9bed-2429229e021b",
"metadata": {},
"source": [
"## Minor improvement\n",
"\n",
"I've made a small improvement to this code.\n",
"\n",
"Previously, it had these lines:\n",
"\n",
"```\n",
"for chunk in result:\n",
" yield chunk\n",
"```\n",
"\n",
"There's actually a more elegant way to achieve this (which Python people might call more 'Pythonic'):\n",
"\n",
"`yield from result`\n",
"\n",
"I cover this in more detail in the Intermediate Python notebook in the week1 folder - take a look if you'd like more."
]
},
{
"cell_type": "code",
"execution_count": 48,
"id": "0087623a-4e31-470b-b2e6-d8d16fc7bcf5",
"metadata": {},
"outputs": [],
"source": [
"def stream_model(system_message,prompt,temperature,model):\n",
" if model==\"GPT\":\n",
" result = stream_gpt(system_message,prompt,temperature)\n",
" elif model==\"Claude\":\n",
" result = stream_claude(system_message,prompt,temperature)\n",
" else:\n",
" raise ValueError(\"Unknown model\")\n",
" yield from result"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "3b80d2d5",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"* Running on local URL: http://127.0.0.1:7880\n",
"\n",
"To create a public link, set `share=True` in `launch()`.\n"
]
},
{
"data": {
"text/html": [
"<div><iframe src=\"http://127.0.0.1:7880/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": []
},
"execution_count": 50,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"view = gr.Interface(\n",
" fn=stream_model,\n",
" inputs=[gr.Textbox(label=\"AI Agent\",value=system_message),\n",
" gr.Textbox(label=\"Your question:\"), \n",
" gr.Slider(0,2,value=0.7,label=\"AI Temperature\",info=\"AI Creativity Level (higher = more creative)\"),\n",
" gr.Dropdown([\"GPT\", \"Claude\"], label=\"Select model\", value=\"GPT\")],\n",
" outputs=[gr.Markdown(label=\"Response:\")],\n",
" flagging_mode=\"never\"\n",
")\n",
"view.launch()"
]
},
{
"cell_type": "markdown",
"id": "d933865b-654c-4b92-aa45-cf389f1eda3d",
"metadata": {},
"source": [
"# Building a company brochure generator\n",
"\n",
"Now you know how - it's simple!"
]
},
{
"cell_type": "markdown",
"id": "92d7c49b-2e0e-45b3-92ce-93ca9f962ef4",
"metadata": {},
"source": [
"<table style=\"margin: 0; text-align: left;\">\n",
" <tr>\n",
" <td style=\"width: 150px; height: 150px; vertical-align: middle;\">\n",
" <img src=\"../important.jpg\" width=\"150\" height=\"150\" style=\"display: block;\" />\n",
" </td>\n",
" <td>\n",
" <h2 style=\"color:#900;\">Before you read the next few cells</h2>\n",
" <span style=\"color:#900;\">\n",
" Try to do this yourself - go back to the company brochure in week1, day5 and add a Gradio UI to the end. Then come and look at the solution.\n",
" </span>\n",
" </td>\n",
" </tr>\n",
"</table>"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "1626eb2e-eee8-4183-bda5-1591b58ae3cf",
"metadata": {},
"outputs": [],
"source": [
"# A class to represent a Webpage\n",
"\n",
"class Website:\n",
" url: str\n",
" title: str\n",
" text: str\n",
"\n",
" def __init__(self, url):\n",
" self.url = url\n",
" response = requests.get(url)\n",
" self.body = response.content\n",
" soup = BeautifulSoup(self.body, 'html.parser')\n",
" self.title = soup.title.string if soup.title else \"No title found\"\n",
" for irrelevant in soup.body([\"script\", \"style\", \"img\", \"input\"]):\n",
" irrelevant.decompose()\n",
" self.text = soup.body.get_text(separator=\"\\n\", strip=True)\n",
"\n",
" def get_contents(self):\n",
" return f\"Webpage Title:\\n{self.title}\\nWebpage Contents:\\n{self.text}\\n\\n\""
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "c701ec17-ecd5-4000-9f68-34634c8ed49d",
"metadata": {},
"outputs": [],
"source": [
"# With massive thanks to Bill G. who noticed that a prior version of this had a bug! Now fixed.\n",
"\n",
"system_message = \"You are an assistant that analyzes the contents of a company website landing page \\\n",
"and creates a short brochure about the company for prospective customers, investors and recruits. Respond in markdown.\""
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "5def90e0-4343-4f58-9d4a-0e36e445efa4",
"metadata": {},
"outputs": [],
"source": [
"def stream_brochure(company_name, url, model):\n",
" prompt = f\"Please generate a company brochure for {company_name}. Here is their landing page:\\n\"\n",
" prompt += Website(url).get_contents()\n",
" if model==\"GPT\":\n",
" result = stream_gpt(prompt)\n",
" elif model==\"Claude\":\n",
" result = stream_claude(prompt)\n",
" else:\n",
" raise ValueError(\"Unknown model\")\n",
" yield from result"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "66399365-5d67-4984-9d47-93ed26c0bd3d",
"metadata": {},
"outputs": [],
"source": [
"view = gr.Interface(\n",
" fn=stream_brochure,\n",
" inputs=[\n",
" gr.Textbox(label=\"Company name:\"),\n",
" gr.Textbox(label=\"Landing page URL including http:// or https://\"),\n",
" gr.Dropdown([\"GPT\", \"Claude\"], label=\"Select model\")],\n",
" outputs=[gr.Markdown(label=\"Brochure:\")],\n",
" flagging_mode=\"never\"\n",
")\n",
"view.launch()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "ede97ca3-a0f8-4f6e-be17-d1de7fef9cc0",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "llms",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.11"
}
},
"nbformat": 4,
"nbformat_minor": 5
}