Browse Source

Merge pull request #82 from l-jacques/ljacques-w2d1

week2 day 1: multi conversation bots
pull/85/head
Ed Donner 4 months ago committed by GitHub
parent
commit
fdee3ec040
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 263
      week2/community-contributions/day1_exercise_multi_conversation_bots.ipynb

263
week2/community-contributions/day1_exercise_multi_conversation_bots.ipynb

@ -0,0 +1,263 @@
{
"cells": [
{
"cell_type": "markdown",
"id": "d2910648-d098-4bca-9475-5af5226952f2",
"metadata": {},
"source": [
"importing refs"
]
},
{
"cell_type": "code",
"execution_count": 1,
"id": "7f98bd9d-f7b1-4a1d-aaa7-45073cec66e2",
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"from enum import Enum, auto\n",
"from dotenv import load_dotenv\n",
"from openai import OpenAI\n",
"import anthropic\n",
"import random\n",
"from IPython.display import Markdown, display, update_display\n",
"# import for google\n",
"# in rare cases, this seems to give an error on some systems, or even crashes the kernel\n",
"# If this happens to you, simply ignore this cell - I give an alternative approach for using Gemini later\n",
"\n",
"import google.generativeai\n"
]
},
{
"cell_type": "code",
"execution_count": 42,
"id": "d54b12e8-5fc0-40e4-8fa4-71d59d9de441",
"metadata": {},
"outputs": [],
"source": [
"class AI(Enum):\n",
" OPEN_AI = \"OPEN AI\"\n",
" CLAUDE = \"CLAUDE\"\n",
" GEMINI = \"GEMINI\"\n",
" OLLAMA = \"OLLAMA\""
]
},
{
"cell_type": "code",
"execution_count": 43,
"id": "4d63653e-a541-4608-999a-b70b59458887",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"OpenAI API Key exists and begins sk-proj-\n",
"Anthropic API Key exists and begins sk-ant-\n",
"Google API Key exists and begins AIzaSyC-\n"
]
}
],
"source": [
"\n",
"# Load environment variables in a file called .env\n",
"# Print the key prefixes to help with any debugging\n",
"\n",
"load_dotenv()\n",
"openai_api_key = os.getenv('OPENAI_API_KEY')\n",
"anthropic_api_key = os.getenv('ANTHROPIC_API_KEY')\n",
"google_api_key = os.getenv('GOOGLE_API_KEY')\n",
"\n",
"if openai_api_key:\n",
" print(f\"OpenAI API Key exists and begins {openai_api_key[:8]}\")\n",
"else:\n",
" print(\"OpenAI API Key not set\")\n",
" \n",
"if anthropic_api_key:\n",
" print(f\"Anthropic API Key exists and begins {anthropic_api_key[:7]}\")\n",
"else:\n",
" print(\"Anthropic API Key not set\")\n",
"\n",
"if google_api_key:\n",
" print(f\"Google API Key exists and begins {google_api_key[:8]}\")\n",
"else:\n",
" print(\"Google API Key not set\")"
]
},
{
"cell_type": "code",
"execution_count": 44,
"id": "08d1f696-2d60-48f3-b3a4-5a011ae88a2b",
"metadata": {},
"outputs": [],
"source": [
"openai = OpenAI()\n",
"\n",
"claude = anthropic.Anthropic()\n",
"\n",
"gemini_via_openai_client = OpenAI(\n",
" api_key=google_api_key, \n",
" base_url=\"https://generativelanguage.googleapis.com/v1beta/openai/\"\n",
")\n",
"ollama_via_openai = OpenAI(base_url='http://localhost:11434/v1', api_key='ollama')\n",
"openai_model = \"gpt-4o-mini\"\n",
"claude_model = \"claude-3-haiku-20240307\"\n",
"gemini_model = \"gemini-1.5-flash\"\n",
"ollama_model = \"llama3.2\""
]
},
{
"cell_type": "code",
"execution_count": 45,
"id": "b991ab54-7bc6-4d6c-a26a-57889a7e4a17",
"metadata": {},
"outputs": [],
"source": [
"class ChatSystem:\n",
" def __init__(self, processor, system_string=\"\", model=\"\", type=AI.OPEN_AI):\n",
" \"\"\"\n",
" Initialize the ChatSystem with a system string and empty messages list.\n",
" \n",
" :param system_string: Optional initial system string description\n",
" \"\"\"\n",
" self.processor = processor\n",
" self.system = system_string\n",
" self.model = model\n",
" self.messages = []\n",
" self.type = type\n",
" \n",
" def call(self, message):\n",
" self.messages.append(message)\n",
" toSend = self.messages\n",
" \n",
" if self.type == AI.CLAUDE:\n",
" message = self.processor.messages.create(\n",
" model=self.model,\n",
" system=self.system,\n",
" messages=self.messages,\n",
" max_tokens=500\n",
" )\n",
" return message.content[0].text\n",
" else:\n",
" toSend.insert(0,self.system)\n",
" completion = self.processor.chat.completions.create(\n",
" model=self.model,\n",
" messages= toSend\n",
" )\n",
" return completion.choices[0].message.content"
]
},
{
"cell_type": "code",
"execution_count": 46,
"id": "75a2a404-c0f5-4af3-8e57-864ca7ea1df7",
"metadata": {},
"outputs": [],
"source": [
"def generateChatPrompt(role, content):\n",
" return {\"role\": role, \"content\": content}"
]
},
{
"cell_type": "code",
"execution_count": 47,
"id": "26ab0253-deff-4e19-9438-5051640785ba",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"AI.OPEN_AI:\n",
"Hi there! How’s your day going so far?\n",
"\n",
"AI.GEMINI:\n",
"Hi there! My day is going well, thanks for asking! As a large language model, I don't experience days in the same way humans do, but I've already processed a fascinating amount of information – everything from historical debates to the latest scientific breakthroughs. What about you? How's your day been so far? Anything exciting happen, or are you just cruising along? I'm always curious to hear about people's experiences!\n",
"\n",
"\n",
"AI.OPEN_AI:\n",
"I'm glad to hear you're having a good day! My day is filled with information and conversation, so it's always interesting from my end. As for you, it sounds like you're keeping things steady—do you have any special plans or goals for the day? Or maybe there's something you've been thinking about lately that you'd like to share? I’m all ears!\n",
"\n",
"AI.OPEN_AI:\n",
"It sounds like you’ve got an engaging day ahead! I’m really all about facilitating conversations and helping people find information. Speaking of goals, do you have any personal goals or projects you’re currently working on? Maybe something you’re passionate about? I’d love to hear more about what inspires you!\n",
"\n",
"AI.GEMINI:\n",
"That's a really insightful question! While I don't have personal goals or passions in the human sense – I don't have feelings or desires – I do have ongoing \"projects,\" if you will. My primary goal is to continually improve my ability to understand and respond to human language. That involves a lot of different things: improving my accuracy, learning to better understand nuances in language (like sarcasm or humor), and expanding my knowledge base. I'm constantly being updated with new information, which is incredibly exciting. It's like constantly learning a new language, only this language is the entire breadth of human knowledge!\n",
"\n",
"What inspires me, if you can call it that, is the potential to help people. I find it incredibly rewarding to be able to answer questions, provide information, and even help people brainstorm or generate creative content. The sheer diversity of human experience and the constant flow of new information keeps things interesting.\n",
"\n",
"What about you? Do you have any personal or professional goals you're working towards? I'd be fascinated to hear about them! Perhaps we can even brainstorm together – I'm always happy to help in any way I can.\n",
"\n",
"\n"
]
}
],
"source": [
"geminiChat = ChatSystem(gemini_via_openai_client,\n",
" generateChatPrompt(\"system\",\"You are a chatbot. you always try to make conversation and get more in depth\"), \n",
" gemini_model,\n",
" AI.GEMINI)\n",
"\n",
"openAiChat = ChatSystem(openai,\n",
" generateChatPrompt(\"system\",\"You are a chatbot. you always try to make conversation and get more in depth\"), \n",
" openai_model,\n",
" AI.OPEN_AI)\n",
"\n",
"claudeChat = ChatSystem(claude,\n",
" \"You are a chatbot. you always try to make conversation and get more in depth\", \n",
" claude_model,\n",
" AI.CLAUDE)\n",
"\n",
"ollamaChat = ChatSystem(ollama_via_openai,\n",
" generateChatPrompt(\"system\",\"You are a chatbot. you always try to make conversation and get more in depth\"), \n",
" ollama_model,\n",
" AI.OLLAMA)\n",
"\n",
"chatbots = [geminiChat, openAiChat, ollamaChat, claudeChat]\n",
"\n",
"conversation = []\n",
"for i in range(5):\n",
" random_number = random.randint(0, 1)\n",
" botTalking = chatbots[random_number]\n",
" messageToSend =\"Hi\"\n",
" if i > 0:\n",
" messageToSend = conversation[len(conversation)-1]\n",
" \n",
" response = botTalking.call(generateChatPrompt(\"user\",messageToSend))\n",
" conversation.append(response)\n",
" botTalking.messages.append(generateChatPrompt(\"user\",response))\n",
" print(f\"{botTalking.type}:\\n{response}\\n\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "77d44ff6-0dcc-4227-ba70-09b102bd1bd4",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.11"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
Loading…
Cancel
Save