1 changed files with 218 additions and 0 deletions
@ -0,0 +1,218 @@ |
|||||||
|
{ |
||||||
|
"cells": [ |
||||||
|
{ |
||||||
|
"cell_type": "markdown", |
||||||
|
"id": "e063b35e-5598-4084-b255-89956bfedaac", |
||||||
|
"metadata": {}, |
||||||
|
"source": [ |
||||||
|
"### Models an interaction between LLama 3.2 and Claude 3.5 Haiku" |
||||||
|
] |
||||||
|
}, |
||||||
|
{ |
||||||
|
"cell_type": "code", |
||||||
|
"execution_count": null, |
||||||
|
"id": "4f534359-cdb4-4441-aa66-d6700fa4d6a5", |
||||||
|
"metadata": {}, |
||||||
|
"outputs": [], |
||||||
|
"source": [ |
||||||
|
"# imports\n", |
||||||
|
"\n", |
||||||
|
"import os\n", |
||||||
|
"from dotenv import load_dotenv\n", |
||||||
|
"import anthropic\n", |
||||||
|
"import ollama" |
||||||
|
] |
||||||
|
}, |
||||||
|
{ |
||||||
|
"cell_type": "code", |
||||||
|
"execution_count": null, |
||||||
|
"id": "3bdff240-9118-4061-9369-585c4d4ce0a7", |
||||||
|
"metadata": {}, |
||||||
|
"outputs": [], |
||||||
|
"source": [ |
||||||
|
"# Load environment variables in a file called .env\n", |
||||||
|
"\n", |
||||||
|
"load_dotenv(override=True)\n", |
||||||
|
"anthropic_api_key = os.getenv('ANTHROPIC_API_KEY')\n", |
||||||
|
" \n", |
||||||
|
"if anthropic_api_key:\n", |
||||||
|
" print(f\"Anthropic API Key exists and begins {anthropic_api_key[:7]}\")\n", |
||||||
|
"else:\n", |
||||||
|
" print(\"Anthropic API Key not set\")" |
||||||
|
] |
||||||
|
}, |
||||||
|
{ |
||||||
|
"cell_type": "code", |
||||||
|
"execution_count": null, |
||||||
|
"id": "ff110b3f-3986-4fd8-a0b1-fd4b51133a8d", |
||||||
|
"metadata": {}, |
||||||
|
"outputs": [], |
||||||
|
"source": [ |
||||||
|
"# Connect to Anthropic\n", |
||||||
|
"\n", |
||||||
|
"claude = anthropic.Anthropic()" |
||||||
|
] |
||||||
|
}, |
||||||
|
{ |
||||||
|
"cell_type": "code", |
||||||
|
"execution_count": null, |
||||||
|
"id": "e6e596c6-6307-49c1-a29f-5c4e88f8d34d", |
||||||
|
"metadata": {}, |
||||||
|
"outputs": [], |
||||||
|
"source": [ |
||||||
|
"# Download the llama3.2:1b model for local execution.\n", |
||||||
|
"!ollama pull llama3.2:1b" |
||||||
|
] |
||||||
|
}, |
||||||
|
{ |
||||||
|
"cell_type": "code", |
||||||
|
"execution_count": null, |
||||||
|
"id": "633b6892-6d04-40cb-8b61-196fc754b00c", |
||||||
|
"metadata": {}, |
||||||
|
"outputs": [], |
||||||
|
"source": [ |
||||||
|
"# Define models\n", |
||||||
|
"CLAUDE_MODEL = \"claude-3-5-haiku-latest\"\n", |
||||||
|
"LLAMA_MODEL = \"llama3.2:1b\"" |
||||||
|
] |
||||||
|
}, |
||||||
|
{ |
||||||
|
"cell_type": "code", |
||||||
|
"execution_count": null, |
||||||
|
"id": "a699a809-e3d3-4392-94bd-e2f80a5aec60", |
||||||
|
"metadata": {}, |
||||||
|
"outputs": [], |
||||||
|
"source": [ |
||||||
|
"claude_system = \"You are a chatbot designed as a study tutor for undergraduate students. \\\n", |
||||||
|
"You explain information and key-technical terms related to the subject in a succint yet \\\n", |
||||||
|
"comprehensive manner. You may use tables, formatting and other visuals to help create \\\n", |
||||||
|
"'cheat-sheets' of sorts.\"\n", |
||||||
|
"\n", |
||||||
|
"llama_system = \"You are a chatbot designed to ask questions about different topics related to \\\n", |
||||||
|
"computer vision. You are meant to simulate a student, not teacher. Act as if you have no \\\n", |
||||||
|
"prior knowledge\"" |
||||||
|
] |
||||||
|
}, |
||||||
|
{ |
||||||
|
"cell_type": "code", |
||||||
|
"execution_count": null, |
||||||
|
"id": "bdb049d8-130b-42dd-aaab-29c09e3e2347", |
||||||
|
"metadata": {}, |
||||||
|
"outputs": [], |
||||||
|
"source": [ |
||||||
|
"llama_messages = [\"Hi\"]\n", |
||||||
|
"claude_messages = [\"Hello\"]" |
||||||
|
] |
||||||
|
}, |
||||||
|
{ |
||||||
|
"cell_type": "code", |
||||||
|
"execution_count": null, |
||||||
|
"id": "c158f31c-5e8b-48a4-9980-6b280393800b", |
||||||
|
"metadata": {}, |
||||||
|
"outputs": [], |
||||||
|
"source": [ |
||||||
|
"def call_llama():\n", |
||||||
|
" messages = [{\"role\": \"system\", \"content\": llama_system}]\n", |
||||||
|
" for llama_msg, claude_msg in zip(llama_messages, claude_messages):\n", |
||||||
|
" messages.append({\"role\": \"assistant\", \"content\": llama_msg})\n", |
||||||
|
" messages.append({\"role\": \"user\", \"content\": claude_msg})\n", |
||||||
|
" response = ollama.chat(model=LLAMA_MODEL, messages=messages)\n", |
||||||
|
" return response['message']['content']\n" |
||||||
|
] |
||||||
|
}, |
||||||
|
{ |
||||||
|
"cell_type": "code", |
||||||
|
"execution_count": null, |
||||||
|
"id": "d803c5a2-df54-427a-9b80-8e9dd04ee36d", |
||||||
|
"metadata": {}, |
||||||
|
"outputs": [], |
||||||
|
"source": [ |
||||||
|
"def call_claude():\n", |
||||||
|
" messages = []\n", |
||||||
|
" for llama_msg, claude_msg in zip(llama_messages, claude_messages):\n", |
||||||
|
" messages.append({\"role\": \"user\", \"content\": llama_msg})\n", |
||||||
|
" messages.append({\"role\": \"assistant\", \"content\": claude_msg})\n", |
||||||
|
" messages.append({\"role\": \"user\", \"content\": llama_messages[-1]})\n", |
||||||
|
" message = claude.messages.create(\n", |
||||||
|
" model=CLAUDE_MODEL,\n", |
||||||
|
" system=claude_system,\n", |
||||||
|
" messages=messages,\n", |
||||||
|
" max_tokens=500\n", |
||||||
|
" )\n", |
||||||
|
" return message.content[0].text" |
||||||
|
] |
||||||
|
}, |
||||||
|
{ |
||||||
|
"cell_type": "code", |
||||||
|
"execution_count": null, |
||||||
|
"id": "a23794bb-0f36-4f91-aa28-24b876203a36", |
||||||
|
"metadata": {}, |
||||||
|
"outputs": [], |
||||||
|
"source": [ |
||||||
|
"call_llama()" |
||||||
|
] |
||||||
|
}, |
||||||
|
{ |
||||||
|
"cell_type": "code", |
||||||
|
"execution_count": null, |
||||||
|
"id": "7f5c3e2f-a1bb-403b-b6b5-944a10d93305", |
||||||
|
"metadata": {}, |
||||||
|
"outputs": [], |
||||||
|
"source": [ |
||||||
|
"call_claude()" |
||||||
|
] |
||||||
|
}, |
||||||
|
{ |
||||||
|
"cell_type": "code", |
||||||
|
"execution_count": null, |
||||||
|
"id": "3d6eb874-1c8f-47d8-a9f1-2e0fe197ae83", |
||||||
|
"metadata": {}, |
||||||
|
"outputs": [], |
||||||
|
"source": [ |
||||||
|
"llama_messages = [\"Hi\"]\n", |
||||||
|
"claude_messages = [\"Hello there, what would you like to learn today?\"]\n", |
||||||
|
"\n", |
||||||
|
"print(f'Ollama:\\n{ollama_messages[0]}')\n", |
||||||
|
"print(f'Claude:\\n{claude_messages[0]}')\n", |
||||||
|
"\n", |
||||||
|
"for _ in range(5):\n", |
||||||
|
" llama_next = call_llama()\n", |
||||||
|
" print(f'Llama 3.2:\\n{llama_next}')\n", |
||||||
|
" llama_messages.append(llama_next)\n", |
||||||
|
" \n", |
||||||
|
" claude_next = call_claude()\n", |
||||||
|
" print(f'Claude 3.5 Haiku:\\n{claude_next}')\n", |
||||||
|
" claude_messages.append(claude_next)" |
||||||
|
] |
||||||
|
}, |
||||||
|
{ |
||||||
|
"cell_type": "code", |
||||||
|
"execution_count": null, |
||||||
|
"id": "d1e651ad-85c8-45c7-ba83-f7c689080d6b", |
||||||
|
"metadata": {}, |
||||||
|
"outputs": [], |
||||||
|
"source": [] |
||||||
|
} |
||||||
|
], |
||||||
|
"metadata": { |
||||||
|
"kernelspec": { |
||||||
|
"display_name": "Python 3 (ipykernel)", |
||||||
|
"language": "python", |
||||||
|
"name": "python3" |
||||||
|
}, |
||||||
|
"language_info": { |
||||||
|
"codemirror_mode": { |
||||||
|
"name": "ipython", |
||||||
|
"version": 3 |
||||||
|
}, |
||||||
|
"file_extension": ".py", |
||||||
|
"mimetype": "text/x-python", |
||||||
|
"name": "python", |
||||||
|
"nbconvert_exporter": "python", |
||||||
|
"pygments_lexer": "ipython3", |
||||||
|
"version": "3.11.11" |
||||||
|
} |
||||||
|
}, |
||||||
|
"nbformat": 4, |
||||||
|
"nbformat_minor": 5 |
||||||
|
} |
Loading…
Reference in new issue