Browse Source
Jupyter notebook to ask a question and get a response from the GPT, Claude, and Geminipull/178/head
1 changed files with 284 additions and 0 deletions
@ -0,0 +1,284 @@ |
|||||||
|
{ |
||||||
|
"cells": [ |
||||||
|
{ |
||||||
|
"cell_type": "markdown", |
||||||
|
"id": "5e6b6966-8689-4e2c-8607-a1c5d948296c", |
||||||
|
"metadata": {}, |
||||||
|
"source": [ |
||||||
|
"### With this interface you can ask a question and get an answer from the GPT, Claude and Gemini" |
||||||
|
] |
||||||
|
}, |
||||||
|
{ |
||||||
|
"cell_type": "code", |
||||||
|
"execution_count": 49, |
||||||
|
"id": "c44c5494-950d-4d2f-8d4f-b87b57c5b330", |
||||||
|
"metadata": {}, |
||||||
|
"outputs": [], |
||||||
|
"source": [ |
||||||
|
"# imports\n", |
||||||
|
"\n", |
||||||
|
"import os\n", |
||||||
|
"import requests\n", |
||||||
|
"from bs4 import BeautifulSoup\n", |
||||||
|
"from typing import List\n", |
||||||
|
"from dotenv import load_dotenv\n", |
||||||
|
"from openai import OpenAI\n", |
||||||
|
"import google.generativeai\n", |
||||||
|
"import anthropic\n", |
||||||
|
"import time" |
||||||
|
] |
||||||
|
}, |
||||||
|
{ |
||||||
|
"cell_type": "code", |
||||||
|
"execution_count": 2, |
||||||
|
"id": "d1715421-cead-400b-99af-986388a97aff", |
||||||
|
"metadata": {}, |
||||||
|
"outputs": [], |
||||||
|
"source": [ |
||||||
|
"import gradio as gr # oh yeah!" |
||||||
|
] |
||||||
|
}, |
||||||
|
{ |
||||||
|
"cell_type": "code", |
||||||
|
"execution_count": 3, |
||||||
|
"id": "337d5dfc-0181-4e3b-8ab9-e78e0c3f657b", |
||||||
|
"metadata": {}, |
||||||
|
"outputs": [ |
||||||
|
{ |
||||||
|
"name": "stdout", |
||||||
|
"output_type": "stream", |
||||||
|
"text": [ |
||||||
|
"OpenAI API Key exists and begins sk-proj-\n", |
||||||
|
"Anthropic API Key exists and begins sk-ant-\n", |
||||||
|
"Google API Key exists and begins AIzaSyAJ\n" |
||||||
|
] |
||||||
|
} |
||||||
|
], |
||||||
|
"source": [ |
||||||
|
"# Load environment variables in a file called .env\n", |
||||||
|
"# Print the key prefixes to help with any debugging\n", |
||||||
|
"\n", |
||||||
|
"load_dotenv()\n", |
||||||
|
"openai_api_key = os.getenv('OPENAI_API_KEY')\n", |
||||||
|
"anthropic_api_key = os.getenv('ANTHROPIC_API_KEY')\n", |
||||||
|
"google_api_key = os.getenv('GOOGLE_API_KEY')\n", |
||||||
|
"\n", |
||||||
|
"if openai_api_key:\n", |
||||||
|
" print(f\"OpenAI API Key exists and begins {openai_api_key[:8]}\")\n", |
||||||
|
"else:\n", |
||||||
|
" print(\"OpenAI API Key not set\")\n", |
||||||
|
" \n", |
||||||
|
"if anthropic_api_key:\n", |
||||||
|
" print(f\"Anthropic API Key exists and begins {anthropic_api_key[:7]}\")\n", |
||||||
|
"else:\n", |
||||||
|
" print(\"Anthropic API Key not set\")\n", |
||||||
|
"\n", |
||||||
|
"if google_api_key:\n", |
||||||
|
" print(f\"Google API Key exists and begins {google_api_key[:8]}\")\n", |
||||||
|
"else:\n", |
||||||
|
" print(\"Google API Key not set\")" |
||||||
|
] |
||||||
|
}, |
||||||
|
{ |
||||||
|
"cell_type": "code", |
||||||
|
"execution_count": 4, |
||||||
|
"id": "22586021-1795-4929-8079-63f5bb4edd4c", |
||||||
|
"metadata": {}, |
||||||
|
"outputs": [], |
||||||
|
"source": [ |
||||||
|
"# Connect to OpenAI, Anthropic and Google; comment out the Claude or Google lines if you're not using them\n", |
||||||
|
"\n", |
||||||
|
"openai = OpenAI()\n", |
||||||
|
"\n", |
||||||
|
"claude = anthropic.Anthropic()\n", |
||||||
|
"\n", |
||||||
|
"google.generativeai.configure()" |
||||||
|
] |
||||||
|
}, |
||||||
|
{ |
||||||
|
"cell_type": "code", |
||||||
|
"execution_count": 5, |
||||||
|
"id": "b16e6021-6dc4-4397-985a-6679d6c8ffd5", |
||||||
|
"metadata": {}, |
||||||
|
"outputs": [], |
||||||
|
"source": [ |
||||||
|
"# A generic system message - no more snarky adversarial AIs!\n", |
||||||
|
"\n", |
||||||
|
"system_message = \"You are a helpful assistant\"" |
||||||
|
] |
||||||
|
}, |
||||||
|
{ |
||||||
|
"cell_type": "code", |
||||||
|
"execution_count": 6, |
||||||
|
"id": "88c04ebf-0671-4fea-95c9-bc1565d4bb4f", |
||||||
|
"metadata": {}, |
||||||
|
"outputs": [], |
||||||
|
"source": [ |
||||||
|
"# Let's create a call that streams back results\n", |
||||||
|
"# If you'd like a refresher on Generators (the \"yield\" keyword),\n", |
||||||
|
"# Please take a look at the Intermediate Python notebook in week1 folder.\n", |
||||||
|
"\n", |
||||||
|
"def stream_gpt(prompt):\n", |
||||||
|
" messages = [\n", |
||||||
|
" {\"role\": \"system\", \"content\": system_message},\n", |
||||||
|
" {\"role\": \"user\", \"content\": prompt}\n", |
||||||
|
" ]\n", |
||||||
|
" stream = openai.chat.completions.create(\n", |
||||||
|
" model='gpt-4o-mini',\n", |
||||||
|
" messages=messages,\n", |
||||||
|
" stream=True\n", |
||||||
|
" )\n", |
||||||
|
" result = \"\"\n", |
||||||
|
" for chunk in stream:\n", |
||||||
|
" result += chunk.choices[0].delta.content or \"\"\n", |
||||||
|
" yield result" |
||||||
|
] |
||||||
|
}, |
||||||
|
{ |
||||||
|
"cell_type": "code", |
||||||
|
"execution_count": 7, |
||||||
|
"id": "bbc8e930-ba2a-4194-8f7c-044659150626", |
||||||
|
"metadata": {}, |
||||||
|
"outputs": [], |
||||||
|
"source": [ |
||||||
|
"def stream_claude(prompt):\n", |
||||||
|
" result = claude.messages.stream(\n", |
||||||
|
" model=\"claude-3-haiku-20240307\",\n", |
||||||
|
" max_tokens=1000,\n", |
||||||
|
" temperature=0.7,\n", |
||||||
|
" system=system_message,\n", |
||||||
|
" messages=[\n", |
||||||
|
" {\"role\": \"user\", \"content\": prompt},\n", |
||||||
|
" ],\n", |
||||||
|
" )\n", |
||||||
|
" response = \"\"\n", |
||||||
|
" with result as stream:\n", |
||||||
|
" for text in stream.text_stream:\n", |
||||||
|
" response += text or \"\"\n", |
||||||
|
" yield response" |
||||||
|
] |
||||||
|
}, |
||||||
|
{ |
||||||
|
"cell_type": "code", |
||||||
|
"execution_count": 8, |
||||||
|
"id": "5e228aff-16d5-4141-bd04-ed9940ef7b3b", |
||||||
|
"metadata": {}, |
||||||
|
"outputs": [], |
||||||
|
"source": [ |
||||||
|
"def stream_gemini(prompt):\n", |
||||||
|
" gemini = google.generativeai.GenerativeModel(\n", |
||||||
|
" model_name='gemini-2.0-flash-exp',\n", |
||||||
|
" system_instruction=system_message\n", |
||||||
|
" )\n", |
||||||
|
" result = \"\"\n", |
||||||
|
" for response in gemini.generate_content(prompt, stream=True):\n", |
||||||
|
" result += response.text or \"\"\n", |
||||||
|
" yield result" |
||||||
|
] |
||||||
|
}, |
||||||
|
{ |
||||||
|
"cell_type": "code", |
||||||
|
"execution_count": 92, |
||||||
|
"id": "db99aaf1-fe0a-4e79-9057-8599d1ca0149", |
||||||
|
"metadata": {}, |
||||||
|
"outputs": [], |
||||||
|
"source": [ |
||||||
|
"def stream_models(prompt):\n", |
||||||
|
" response_gpt = \"\"\n", |
||||||
|
" response_claude = \"\"\n", |
||||||
|
" response_gemini = \"\"\n", |
||||||
|
" for gpt in stream_gpt(prompt):\n", |
||||||
|
" response_gpt = gpt\n", |
||||||
|
" yield response_gpt, response_claude, response_gemini\n", |
||||||
|
" for claude in stream_claude(prompt):\n", |
||||||
|
" response_claude = claude\n", |
||||||
|
" yield response_gpt, response_claude, response_gemini\n", |
||||||
|
" for gemini in stream_gemini(prompt):\n", |
||||||
|
" response_gemini = gemini\n", |
||||||
|
" yield response_gpt, response_claude, response_gemini" |
||||||
|
] |
||||||
|
}, |
||||||
|
{ |
||||||
|
"cell_type": "code", |
||||||
|
"execution_count": 113, |
||||||
|
"id": "3377f2fb-55f8-45cb-b713-d99d44748dad", |
||||||
|
"metadata": {}, |
||||||
|
"outputs": [ |
||||||
|
{ |
||||||
|
"name": "stdout", |
||||||
|
"output_type": "stream", |
||||||
|
"text": [ |
||||||
|
"* Running on local URL: http://127.0.0.1:7919\n", |
||||||
|
"\n", |
||||||
|
"To create a public link, set `share=True` in `launch()`.\n" |
||||||
|
] |
||||||
|
}, |
||||||
|
{ |
||||||
|
"data": { |
||||||
|
"text/html": [ |
||||||
|
"<div><iframe src=\"http://127.0.0.1:7919/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>" |
||||||
|
], |
||||||
|
"text/plain": [ |
||||||
|
"<IPython.core.display.HTML object>" |
||||||
|
] |
||||||
|
}, |
||||||
|
"metadata": {}, |
||||||
|
"output_type": "display_data" |
||||||
|
}, |
||||||
|
{ |
||||||
|
"data": { |
||||||
|
"text/plain": [] |
||||||
|
}, |
||||||
|
"execution_count": 113, |
||||||
|
"metadata": {}, |
||||||
|
"output_type": "execute_result" |
||||||
|
} |
||||||
|
], |
||||||
|
"source": [ |
||||||
|
"# Gradio interface\n", |
||||||
|
"with gr.Blocks() as view:\n", |
||||||
|
" user_input = gr.Textbox(label=\"What models can help with?\", placeholder=\"Type your question here\")\n", |
||||||
|
" ask_button = gr.Button(\"Ask\")\n", |
||||||
|
" with gr.Row():\n", |
||||||
|
" with gr.Column():\n", |
||||||
|
" gr.HTML(value=\"<b>GPT response:</b>\") \n", |
||||||
|
" gcp_stream = gr.Markdown()\n", |
||||||
|
" with gr.Column():\n", |
||||||
|
" gr.HTML(value=\"<b>Claude response:</b>\") \n", |
||||||
|
" claude_stream = gr.Markdown()\n", |
||||||
|
" with gr.Column():\n", |
||||||
|
" gr.HTML(value=\"<b>Gemine response:</b>\") \n", |
||||||
|
" gemini_stream = gr.Markdown()\n", |
||||||
|
"\n", |
||||||
|
" ask_button.click(\n", |
||||||
|
" fn=stream_models, # Function that yields multiple outputs\n", |
||||||
|
" inputs=user_input,\n", |
||||||
|
" outputs=[gcp_stream, claude_stream, gemini_stream] # Connect to multiple outputs\n", |
||||||
|
" )\n", |
||||||
|
"\n", |
||||||
|
"view.launch()" |
||||||
|
] |
||||||
|
} |
||||||
|
], |
||||||
|
"metadata": { |
||||||
|
"kernelspec": { |
||||||
|
"display_name": "Python 3 (ipykernel)", |
||||||
|
"language": "python", |
||||||
|
"name": "python3" |
||||||
|
}, |
||||||
|
"language_info": { |
||||||
|
"codemirror_mode": { |
||||||
|
"name": "ipython", |
||||||
|
"version": 3 |
||||||
|
}, |
||||||
|
"file_extension": ".py", |
||||||
|
"mimetype": "text/x-python", |
||||||
|
"name": "python", |
||||||
|
"nbconvert_exporter": "python", |
||||||
|
"pygments_lexer": "ipython3", |
||||||
|
"version": "3.11.11" |
||||||
|
} |
||||||
|
}, |
||||||
|
"nbformat": 4, |
||||||
|
"nbformat_minor": 5 |
||||||
|
} |
Loading…
Reference in new issue