diff --git a/week2/community-contributions/Gemini-api.ipynb b/week2/community-contributions/Gemini-api.ipynb new file mode 100644 index 0000000..8782c1d --- /dev/null +++ b/week2/community-contributions/Gemini-api.ipynb @@ -0,0 +1,149 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 13, + "id": "147ce61d-b10e-478e-8300-2fb3101f617c", + "metadata": {}, + "outputs": [], + "source": [ + "# imports\n", + "\n", + "import os\n", + "from dotenv import load_dotenv\n", + "from openai import OpenAI\n", + "import anthropic\n", + "from IPython.display import Markdown, display, update_display" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "id": "2dab29c1-3a8d-45bc-9f45-407419449ba9", + "metadata": {}, + "outputs": [], + "source": [ + "import google.generativeai" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "5fb5b749-d84b-4f8c-bfb9-2f5c4e8a2daa", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Google API Key exists and begins AIzaSyDU\n" + ] + } + ], + "source": [ + "load_dotenv()\n", + "# openai_api_key = os.getenv('OPENAI_API_KEY')\n", + "# anthropic_api_key = os.getenv('ANTHROPIC_API_KEY')\n", + "google_api_key = os.getenv('GOOGLE_API_KEY')\n", + "\n", + "if google_api_key:\n", + " print(f\"Google API Key exists and begins {google_api_key[:8]}\")\n", + "else:\n", + " print(\"Google API Key not set\")" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "d34ee171-2647-47cf-9336-2d016480656f", + "metadata": {}, + "outputs": [], + "source": [ + "google.generativeai.configure()" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "856212a1-d07a-400b-9cef-a198e22f26ac", + "metadata": {}, + "outputs": [], + "source": [ + "system_message = \"You are an assistant that is great at telling jokes\"\n", + "user_prompt = \"Tell a light-hearted joke for an audience of Data Scientists\"" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "47289056-cc2b-4d2d-9e18-ecd65c0f3232", + "metadata": {}, + "outputs": [], + "source": [ + "prompts = [\n", + " {\"role\": \"system\", \"content\": system_message},\n", + " {\"role\": \"user\", \"content\": user_prompt}\n", + " ]" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "id": "6d331aaf-162b-499e-af7e-5e097e84f1bd", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Why was the Data Scientist sad? \n", + "\n", + "Because they didn't get any arrays!\n", + "\n" + ] + } + ], + "source": [ + "# The API for Gemini has a slightly different structure.\n", + "# I've heard that on some PCs, this Gemini code causes the Kernel to crash.\n", + "# If that happens to you, please skip this cell and use the next cell instead - an alternative approach.\n", + "\n", + "gemini = google.generativeai.GenerativeModel(\n", + " model_name='gemini-1.5-flash',\n", + " system_instruction=system_message\n", + ")\n", + "response = gemini.generate_content(user_prompt)\n", + "print(response.text)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b727ee91-92b8-4d62-9a03-1b85a76b905c", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.11" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/week2/community-contributions/day2-gemini.ipynb b/week2/community-contributions/day2-gemini.ipynb new file mode 100644 index 0000000..408d125 --- /dev/null +++ b/week2/community-contributions/day2-gemini.ipynb @@ -0,0 +1,495 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "1b89f103-fc49-487e-930e-14abff8bfab1", + "metadata": {}, + "outputs": [], + "source": [ + "# imports\n", + "\n", + "import os\n", + "import requests\n", + "from bs4 import BeautifulSoup\n", + "from typing import List\n", + "from dotenv import load_dotenv\n", + "from openai import OpenAI\n", + "import google.generativeai\n", + "import anthropic" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "1a40e64b-14c6-4589-a671-6817f9cb09f0", + "metadata": {}, + "outputs": [], + "source": [ + "import gradio as gr" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "c0990b15-313d-4cf8-bc5b-fc14d263ba27", + "metadata": {}, + "outputs": [], + "source": [ + "# Load environment variables in a file called .env\n", + "\n", + "load_dotenv()\n", + "os.environ['GOOGLE_API_KEY'] = os.getenv('GOOGLE_API_KEY', 'your-key-if-not-using-env')" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "66a38e1f-db7e-4697-aa9c-a303f9828531", + "metadata": {}, + "outputs": [], + "source": [ + "google.generativeai.configure()" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "beb9606e-9be9-4f2e-adfe-4e41fb99566e", + "metadata": {}, + "outputs": [], + "source": [ + "# A generic system message - no more snarky adversarial AIs!\n", + "\n", + "system_message = \"You are a helpful assistant\"" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "19ab23bc-59cf-48a3-8651-f7b1c52874db", + "metadata": {}, + "outputs": [], + "source": [ + "def message_gemini(prompt):\n", + " messages = [\n", + " {\"role\": \"system\", \"content\": system_message},\n", + " {\"role\": \"user\", \"content\": prompt}\n", + " ]\n", + " gemini = google.generativeai.GenerativeModel(\n", + " model_name='gemini-1.5-flash',\n", + " system_instruction=system_message\n", + ")\n", + " response = gemini.generate_content(prompt)\n", + " return response.text\n", + "\n", + "\n", + "# gemini = google.generativeai.GenerativeModel(\n", + "# model_name='gemini-1.5-flash',\n", + "# system_instruction=system_message\n", + "# )\n", + "# response = gemini.generate_content(user_prompt)\n", + "# print(response.text)" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "8fe3c66c-d25d-4627-a401-d84c7d6613e7", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'Today is October 26, 2023.\\n'" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "message_gemini(\"What is today's date?\")\n", + "# message_gemini(\"tell me a funny machine learning joke\")" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "b27027ed-4bff-493c-a41e-8318003e0387", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "models/chat-bison-001\n", + "models/text-bison-001\n", + "models/embedding-gecko-001\n", + "models/gemini-1.0-pro-latest\n", + "models/gemini-1.0-pro\n", + "models/gemini-pro\n", + "models/gemini-1.0-pro-001\n", + "models/gemini-1.0-pro-vision-latest\n", + "models/gemini-pro-vision\n", + "models/gemini-1.5-pro-latest\n", + "models/gemini-1.5-pro-001\n", + "models/gemini-1.5-pro-002\n", + "models/gemini-1.5-pro\n", + "models/gemini-1.5-pro-exp-0801\n", + "models/gemini-1.5-pro-exp-0827\n", + "models/gemini-1.5-flash-latest\n", + "models/gemini-1.5-flash-001\n", + "models/gemini-1.5-flash-001-tuning\n", + "models/gemini-1.5-flash\n", + "models/gemini-1.5-flash-exp-0827\n", + "models/gemini-1.5-flash-002\n", + "models/gemini-1.5-flash-8b\n", + "models/gemini-1.5-flash-8b-001\n", + "models/gemini-1.5-flash-8b-latest\n", + "models/gemini-1.5-flash-8b-exp-0827\n", + "models/gemini-1.5-flash-8b-exp-0924\n", + "models/gemini-2.0-flash-exp\n", + "models/gemini-exp-1206\n", + "models/gemini-exp-1121\n", + "models/gemini-exp-1114\n", + "models/gemini-2.0-flash-thinking-exp\n", + "models/gemini-2.0-flash-thinking-exp-1219\n", + "models/learnlm-1.5-pro-experimental\n", + "models/embedding-001\n", + "models/text-embedding-004\n", + "models/aqa\n" + ] + } + ], + "source": [ + "import google.generativeai as genai\n", + "for model in genai.list_models():\n", + " print(model.name)" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "2f82d61b-a7cd-4bee-994d-2e83d0a01bfc", + "metadata": {}, + "outputs": [], + "source": [ + "# here's a simple function\n", + "\n", + "def shout(text):\n", + " print(f\"Shout has been called with input {text}\")\n", + " return text.upper()" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "5941fe3f-aab9-47ba-b29f-d99aa3b40aed", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Shout has been called with input hello\n" + ] + }, + { + "data": { + "text/plain": [ + "'HELLO'" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "shout(\"hello\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d6470847-1cce-4bf0-8364-199504a5335f", + "metadata": {}, + "outputs": [], + "source": [ + "# Define this variable and then pass js=force_dark_mode when creating the Interface\n", + "\n", + "force_dark_mode = \"\"\"\n", + "function refresh() {\n", + " const url = new URL(window.location);\n", + " if (url.searchParams.get('__theme') !== 'dark') {\n", + " url.searchParams.set('__theme', 'dark');\n", + " window.location.href = url.href;\n", + " }\n", + "}\n", + "\"\"\"\n", + "gr.Interface(fn=shout, inputs=\"textbox\", outputs=\"textbox\", flagging_mode=\"never\", js=force_dark_mode).launch()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "69715604-cc64-4563-967f-b5720462ac69", + "metadata": {}, + "outputs": [], + "source": [ + "gr.Interface(fn=shout, inputs=\"textbox\", outputs=\"textbox\", js=force_dark_mode).launch()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "dede1d8c-fb7a-456a-923b-e221eaa30bd9", + "metadata": {}, + "outputs": [], + "source": [ + "gr.Interface(fn=shout, inputs=\"textbox\", outputs=\"textbox\", flagging_mode=\"never\").launch(share=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "55ae11b9-e7af-449f-b737-48dd7dc1a5b2", + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "view = gr.Interface(\n", + " fn=shout,\n", + " inputs=[gr.Textbox(label=\"Your message:\", lines=6)],\n", + " outputs=[gr.Textbox(label=\"Response:\", lines=8)],\n", + " flagging_mode=\"never\"\n", + ")\n", + "view.launch()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cba667cf-d270-426e-b940-a01083352ecb", + "metadata": {}, + "outputs": [], + "source": [ + "view = gr.Interface(\n", + " fn=message_gemini,\n", + " inputs=[gr.Textbox(label=\"Your message:\", lines=6)],\n", + " outputs=[gr.Textbox(label=\"Response:\", lines=8)],\n", + " flagging_mode=\"never\"\n", + ")\n", + "view.launch()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b8bb7885-740f-41f0-95e3-dabe864cea14", + "metadata": {}, + "outputs": [], + "source": [ + "# Let's use Markdown\n", + "# Are you wondering why it makes any difference to set system_message when it's not referred to in the code below it?\n", + "# I'm taking advantage of system_message being a global variable, used back in the message_gpt function (go take a look)\n", + "# Not a great software engineering practice, but quite sommon during Jupyter Lab R&D!\n", + "\n", + "system_message = \"You are a helpful assistant that responds in markdown\"\n", + "\n", + "view = gr.Interface(\n", + " fn=message_gemini,\n", + " inputs=[gr.Textbox(label=\"Your message:\")],\n", + " outputs=[gr.Markdown(label=\"Response:\")],\n", + " flagging_mode=\"never\"\n", + ")\n", + "view.launch()" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "id": "43d17b00-f4bc-45ad-a679-3112a170f5fb", + "metadata": {}, + "outputs": [], + "source": [ + "import google.generativeai as genai\n", + "\n", + "def stream_gemini(prompt):\n", + " gemini = genai.GenerativeModel(\n", + " model_name='gemini-1.5-flash',\n", + " safety_settings=None,\n", + " system_instruction=system_message\n", + " )\n", + "\n", + " response = gemini.generate_content(prompt, safety_settings=[\n", + " {\"category\": \"HARM_CATEGORY_DANGEROUS_CONTENT\", \"threshold\": \"BLOCK_NONE\"},\n", + " {\"category\": \"HARM_CATEGORY_SEXUALLY_EXPLICIT\", \"threshold\": \"BLOCK_NONE\"},\n", + " {\"category\": \"HARM_CATEGORY_HATE_SPEECH\", \"threshold\": \"BLOCK_NONE\"},\n", + " {\"category\": \"HARM_CATEGORY_HARASSMENT\", \"threshold\": \"BLOCK_NONE\"}], stream=True)\n", + " \n", + " result = \"\"\n", + " for chunk in response:\n", + " result += chunk.text\n", + " yield result\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "840f3d11-e66b-4b6b-9b98-70e0f02be9e6", + "metadata": {}, + "outputs": [], + "source": [ + "view = gr.Interface(\n", + " fn=stream_gemini,\n", + " inputs=[gr.Textbox(label=\"Your message:\")],\n", + " outputs=[gr.Markdown(label=\"Response:\")],\n", + " flagging_mode=\"never\"\n", + ")\n", + "view.launch()" + ] + }, + { + "cell_type": "markdown", + "id": "ea8a0081-8d2e-4960-b479-7c1ef346f524", + "metadata": {}, + "source": [ + "# Building a company brochure generator\n", + "\n", + "Now you know how - it's simple!" + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "id": "2d43360a-515e-4008-9eef-7a3c4e47cfba", + "metadata": {}, + "outputs": [], + "source": [ + "# A class to represent a Webpage\n", + "\n", + "class Website:\n", + " url: str\n", + " title: str\n", + " text: str\n", + "\n", + " def __init__(self, url):\n", + " self.url = url\n", + " response = requests.get(url)\n", + " self.body = response.content\n", + " soup = BeautifulSoup(self.body, 'html.parser')\n", + " self.title = soup.title.string if soup.title else \"No title found\"\n", + " for irrelevant in soup.body([\"script\", \"style\", \"img\", \"input\"]):\n", + " irrelevant.decompose()\n", + " self.text = soup.body.get_text(separator=\"\\n\", strip=True)\n", + "\n", + " def get_contents(self):\n", + " return f\"Webpage Title:\\n{self.title}\\nWebpage Contents:\\n{self.text}\\n\\n\"" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "id": "08a07e55-b05d-4360-8e05-61dd39cc019b", + "metadata": {}, + "outputs": [], + "source": [ + "def stream_brochure(company_name, url, model, response_tone):\n", + " prompt = f\"Please generate a {response_tone} company brochure for {company_name}. Here is their landing page:\\n\"\n", + " prompt += Website(url).get_contents()\n", + " if model==\"GPT\":\n", + " result = stream_gpt(prompt)\n", + " elif model==\"Claude\":\n", + " result = stream_claude(prompt)\n", + " elif model==\"Gemini\":\n", + " result = stream_gemini(prompt)\n", + " else:\n", + " raise ValueError(\"Unknown model\")\n", + " yield from result" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "id": "d9554211-c832-4558-90c8-fceab95fd23c", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "* Running on local URL: http://127.0.0.1:7871\n", + "\n", + "To create a public link, set `share=True` in `launch()`.\n" + ] + }, + { + "data": { + "text/html": [ + "
" + ], + "text/plain": [ + "\n",
+ " ![]() | \n",
+ " \n",
+ " Business Applications\n", + " Conversational Assistants are of course a hugely common use case for Gen AI, and the latest frontier models are remarkably good at nuanced conversation. And Gradio makes it easy to have a user interface. Another crucial skill we covered is how to use prompting to provide context, information and examples.\n", + "\n", + "Consider how you could apply an AI Assistant to your business, and make yourself a prototype. Use the system prompt to give context on your business, and set the tone for the LLM.\n", + " | \n",
+ "