3 changed files with 4440 additions and 0 deletions
@ -0,0 +1,274 @@
|
||||
{ |
||||
"cells": [ |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "41136d6f-07bc-4f6f-acba-784b8e5707b1", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"# imports\n", |
||||
"\n", |
||||
"import requests\n", |
||||
"from bs4 import BeautifulSoup\n", |
||||
"from IPython.display import Markdown, display" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "8612b4f7-5c31-48f3-8423-261914509617", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"# Constants\n", |
||||
"\n", |
||||
"OLLAMA_API = \"http://localhost:11434/api/chat\"\n", |
||||
"HEADERS = {\"Content-Type\": \"application/json\"}\n", |
||||
"MODEL = \"llama3.2\"" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "508bd442-7860-4215-b0f2-57f7adefd807", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"# Create a messages list using the same format that we used for OpenAI\n", |
||||
"\n", |
||||
"messages = [\n", |
||||
" {\"role\": \"user\", \"content\": \"Describe some of the business applications of Generative AI\"}\n", |
||||
"]" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "cc7e8ada-4f8d-4090-be64-4aa72e03ac58", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"# Let's just make sure the model is loaded\n", |
||||
"\n", |
||||
"!ollama pull llama3.2" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "4afd2e56-191a-4e31-949e-9b9376a39b5a", |
||||
"metadata": { |
||||
"scrolled": true |
||||
}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"# There's actually an alternative approach that some people might prefer\n", |
||||
"# You can use the OpenAI client python library to call Ollama:\n", |
||||
"\n", |
||||
"from openai import OpenAI\n", |
||||
"ollama_via_openai = OpenAI(base_url='http://localhost:11434/v1', api_key='ollama')\n", |
||||
"\n", |
||||
"response = ollama_via_openai.chat.completions.create(\n", |
||||
" model=MODEL,\n", |
||||
" messages=messages\n", |
||||
")\n", |
||||
"\n", |
||||
"print(response.choices[0].message.content)" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "365f3d83-2601-42fb-89cc-98a4e1f79e0d", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"message = \"Hello, GPT! This is my first ever message to you! Hi!\"\n", |
||||
"response = ollama_via_openai.chat.completions.create(model=MODEL, messages=[{\"role\":\"user\", \"content\":message}])\n", |
||||
"print(response.choices[0].message.content)" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "29c383ae-bf5b-41bc-b5af-a22f851745dc", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"# A class to represent a Webpage\n", |
||||
"# If you're not familiar with Classes, check out the \"Intermediate Python\" notebook\n", |
||||
"\n", |
||||
"# Some websites need you to use proper headers when fetching them:\n", |
||||
"headers = {\n", |
||||
" \"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36\"\n", |
||||
"}\n", |
||||
"\n", |
||||
"class Website:\n", |
||||
"\n", |
||||
" def __init__(self, url):\n", |
||||
" \"\"\"\n", |
||||
" Create this Website object from the given url using the BeautifulSoup library\n", |
||||
" \"\"\"\n", |
||||
" self.url = url\n", |
||||
" response = requests.get(url, headers=headers)\n", |
||||
" soup = BeautifulSoup(response.content, 'html.parser')\n", |
||||
" self.title = soup.title.string if soup.title else \"No title found\"\n", |
||||
" for irrelevant in soup.body([\"script\", \"style\", \"img\", \"input\"]):\n", |
||||
" irrelevant.decompose()\n", |
||||
" self.text = soup.body.get_text(separator=\"\\n\", strip=True)" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "dc61e30f-653f-4554-b1cd-6e61a0e2430a", |
||||
"metadata": { |
||||
"scrolled": true |
||||
}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"ed = Website(\"https://edwarddonner.com\")\n", |
||||
"print(ed.title)\n", |
||||
"print(ed.text)" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "db2066fb-3079-4775-832a-dcc0f19beb6e", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"\n", |
||||
"system_prompt = \"You are an assistant that analyzes the contents of a website \\\n", |
||||
"and provides a short summary, ignoring text that might be navigation related. \\\n", |
||||
"Respond in markdown.\"" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "af81b070-b6fe-4b18-aa0b-c03cd76a0adf", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"def user_prompt_for(website):\n", |
||||
" user_prompt = f\"You are looking at a website titled {website.title}\"\n", |
||||
" user_prompt += \"\\nThe contents of this website is as follows; \\\n", |
||||
"please provide a short summary of this website in markdown. \\\n", |
||||
"If it includes news or announcements, then summarize these too.\\n\\n\"\n", |
||||
" user_prompt += website.text\n", |
||||
" return user_prompt" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "4e66291b-23b1-4915-b6a3-11a4b6a4db66", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"messages = [\n", |
||||
" {\"role\": \"system\", \"content\": \"You are a snarky assistant\"},\n", |
||||
" {\"role\": \"user\", \"content\": \"What is 2 + 2?\"}\n", |
||||
"]" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "67c92f47-4a3b-491f-af00-07fda470087e", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"def messages_for(website):\n", |
||||
" return [\n", |
||||
" {\"role\": \"system\", \"content\": system_prompt},\n", |
||||
" {\"role\": \"user\", \"content\": user_prompt_for(website)}\n", |
||||
" ]" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "db1b9085-e5e7-4ec9-a264-acc389085ada", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"messages_for(ed)" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "677bfc2f-19ac-46a0-b67e-a2b2ddf9cf6b", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"def summarize(url):\n", |
||||
" website = Website(url)\n", |
||||
" response = ollama_via_openai.chat.completions.create(\n", |
||||
" model = MODEL,\n", |
||||
" messages = messages_for(website)\n", |
||||
" )\n", |
||||
" return response.choices[0].message.content" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "ee3242ba-b695-4b1e-8a91-2fdeb536c2e7", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"summarize(\"https://edwarddonner.com\")" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "85142cb8-ce0c-4c31-8b26-bb1744cf99ec", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"def display_summary(url):\n", |
||||
" summary = summarize(url)\n", |
||||
" display(Markdown(summary))" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "63db51a7-dd03-4514-8954-57156967f82c", |
||||
"metadata": { |
||||
"scrolled": true |
||||
}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"display_summary(\"https://app.daily.dev/posts/bregman-arie-devops-exercises-linux-jenkins-aws-sre-prometheus-docker-python-ansible-git-k-yli9wthnf\")" |
||||
] |
||||
} |
||||
], |
||||
"metadata": { |
||||
"kernelspec": { |
||||
"display_name": "Python [conda env:base] *", |
||||
"language": "python", |
||||
"name": "conda-base-py" |
||||
}, |
||||
"language_info": { |
||||
"codemirror_mode": { |
||||
"name": "ipython", |
||||
"version": 3 |
||||
}, |
||||
"file_extension": ".py", |
||||
"mimetype": "text/x-python", |
||||
"name": "python", |
||||
"nbconvert_exporter": "python", |
||||
"pygments_lexer": "ipython3", |
||||
"version": "3.12.7" |
||||
} |
||||
}, |
||||
"nbformat": 4, |
||||
"nbformat_minor": 5 |
||||
} |
File diff suppressed because one or more lines are too long
@ -0,0 +1,290 @@
|
||||
{ |
||||
"cells": [ |
||||
{ |
||||
"cell_type": "markdown", |
||||
"id": "3f9b483c-f410-4ad3-8f3a-e33527f30f8a", |
||||
"metadata": { |
||||
"panel-layout": { |
||||
"height": 68.2639, |
||||
"visible": true, |
||||
"width": 100 |
||||
} |
||||
}, |
||||
"source": [ |
||||
"# Project - Laptops Assistant\n", |
||||
"\n", |
||||
"A simple inventory tool integrated with Anthropic API" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "cfaff08d-f6e5-4d2d-bfb8-76c154836f3d", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"# imports\n", |
||||
"\n", |
||||
"import os\n", |
||||
"import json\n", |
||||
"from dotenv import load_dotenv\n", |
||||
"import anthropic\n", |
||||
"import gradio as gr" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "a04047ea-d01b-469b-93ce-ab4f4e36ca1e", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"# Load environment variables in a file called .env\n", |
||||
"# Print the key prefixes to help with any debugging\n", |
||||
"\n", |
||||
"load_dotenv(override=True)\n", |
||||
"anthropic_api_key = os.getenv('ANTHROPIC_API_KEY')\n", |
||||
"\n", |
||||
"if anthropic_api_key:\n", |
||||
" print(f\"Anthropic API Key exists and begins {anthropic_api_key[:7]}\")\n", |
||||
"else:\n", |
||||
" print(\"Anthropic API Key not set\")" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "f5e00ced-f47b-4713-8174-7901e1a69881", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"# Connect to OpenAI, Anthropic and Google; comment out the Claude or Google lines if you're not using them\n", |
||||
"\n", |
||||
"claude = anthropic.Anthropic()" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "3c715efd-cebf-4dc2-8c99-798f3179dd21", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"MODEL = \"claude-3-haiku-20240307\"" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "2b029d1d-9199-483a-94b7-893680af8ad1", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"system_message = \"You are a helpful assistant for an Inventory Sales called InvAI. \"\n", |
||||
"system_message += \"Give short, courteous answers, no more than 1 sentence. \"\n", |
||||
"system_message += \"Always be accurate. If you don't know the answer, say so.\"" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "8ca1197c-e6a1-4579-96c6-24e8e305cc72", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"laptop_items = [\n", |
||||
" {\n", |
||||
" \"model\": \"Aspire 3 A315-59-570Z OPI Pure Silver\", \n", |
||||
" \"brand\": \"Acer\",\n", |
||||
" \"price\": \"$595.96\"\n", |
||||
" },\n", |
||||
" {\n", |
||||
" \"model\": \"Aspire Lite 14 AL14-31P-36BE Pure Silver\", \n", |
||||
" \"brand\": \"Acer\",\n", |
||||
" \"price\": \"$463.52\"\n", |
||||
" },\n", |
||||
" {\n", |
||||
" \"model\": \"Raider 18 HX\",\n", |
||||
" \"brand\": \"MSI\",\n", |
||||
" \"price\": \"$235.25\"\n", |
||||
" }\n", |
||||
"]" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "1d2bc76b-c1d0-4b3d-a299-9972f7687e4c", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"def get_laptop_price(model):\n", |
||||
" print(f\"Tool get_laptop_price called for laptop model {model}\")\n", |
||||
" laptop_model = model.lower()\n", |
||||
" for item in laptop_items:\n", |
||||
" if laptop_model in item.get(\"model\").lower():\n", |
||||
" return item\n", |
||||
" return \"Unknown\"" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "afc9b4a3-3a6f-4839-bebc-89bd598394fd", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"\n", |
||||
"# get_laptop_price(\"Lite 14 AL14-31P-36BE Pure SilveR\")\n", |
||||
"\n", |
||||
"get_laptop_price(\"Aspire Lite 14\")" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "12190074-fad8-43f6-8be1-f96a08c16b59", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"# There's a particular dictionary structure that's required to describe our function:\n", |
||||
"\n", |
||||
"price_function = {\n", |
||||
" \"name\": \"get_laptop_price\",\n", |
||||
" \"description\": (\n", |
||||
" \"Returns the laptop's price, brand, and exact model from a given query.\"\n", |
||||
" \"Use when the user asks about a laptop's price, e.g.,\"\n", |
||||
" \"'How much is this laptop?' → 'The Acer Aspire Lite 14 AL14-31P-36BE Pure Silver is priced at $463.52.'\"\n", |
||||
" ),\n", |
||||
" \"input_schema\": {\n", |
||||
" \"type\": \"object\",\n", |
||||
" \"properties\": {\n", |
||||
" \"model\": {\n", |
||||
" \"type\": \"string\",\n", |
||||
" \"description\": \"The model name of the laptop the customer is asking about.\"\n", |
||||
" }\n", |
||||
" },\n", |
||||
" \"required\": [\"model\"]\n", |
||||
" }\n", |
||||
"}" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "475195e1-dd78-45ba-af6d-16d7cf5c85ae", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"# And this is included in a list of tools:\n", |
||||
"\n", |
||||
"tools = [price_function]" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "3834314d-fd37-4e27-9511-bd519389b31b", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"def chat(message, history):\n", |
||||
" print(history)\n", |
||||
" messages = [{\"role\": \"user\", \"content\": message}]\n", |
||||
"\n", |
||||
" for history_message in history:\n", |
||||
" if history_message[\"role\"] == \"user\":\n", |
||||
" messages.append({\"role\": \"user\", \"content\": history_message[\"content\"]})\n", |
||||
" \n", |
||||
" response = claude.messages.create(model=MODEL, messages=messages, tools=tools, max_tokens=500)\n", |
||||
"\n", |
||||
" if len(response.content) > 1:\n", |
||||
" assistant, user, laptop_model = handle_tool_call(response)\n", |
||||
" messages.append(assistant)\n", |
||||
" messages.append(user)\n", |
||||
" response = claude.messages.create(model=MODEL, messages=messages, tools=tools, max_tokens=500)\n", |
||||
"\n", |
||||
"\n", |
||||
" return response.content[0].text" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "745a9bf8-6ceb-4c1c-bfbf-b0d1f3d5d6fc", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"# We have to write that function handle_tool_call:\n", |
||||
"\n", |
||||
"def handle_tool_call(message):\n", |
||||
" # laptop_model = message\n", |
||||
" laptop_model = message.content[1].input.get(\"model\")\n", |
||||
" laptop_item = get_laptop_price(laptop_model)\n", |
||||
" assistant = {\n", |
||||
" \"role\": \"assistant\",\n", |
||||
" \"content\": [\n", |
||||
" {\n", |
||||
" \"type\": \"text\",\n", |
||||
" \"text\": message.content[0].text\n", |
||||
" },\n", |
||||
" {\n", |
||||
" \"type\": \"tool_use\",\n", |
||||
" \"id\": message.content[1].id,\n", |
||||
" \"name\": message.content[1].name,\n", |
||||
" \"input\": message.content[1].input\n", |
||||
" }\n", |
||||
" ]\n", |
||||
" }\n", |
||||
" user = {\n", |
||||
" \"role\": \"user\",\n", |
||||
" \"content\": [\n", |
||||
" {\n", |
||||
" \"type\": \"tool_result\",\n", |
||||
" \"tool_use_id\": message.content[1].id,\n", |
||||
" # \"content\": laptop_item.get(\"price\")\n", |
||||
" \"content\": json.dumps(laptop_item)\n", |
||||
" }\n", |
||||
" ]\n", |
||||
" }\n", |
||||
" \n", |
||||
"\n", |
||||
" return assistant, user, laptop_model" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "9408eeb4-d07b-4193-92cd-197610ed942e", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"gr.ChatInterface(fn=chat, type=\"messages\").launch()" |
||||
] |
||||
} |
||||
], |
||||
"metadata": { |
||||
"kernelspec": { |
||||
"display_name": "Python [conda env:base] *", |
||||
"language": "python", |
||||
"name": "conda-base-py" |
||||
}, |
||||
"language_info": { |
||||
"codemirror_mode": { |
||||
"name": "ipython", |
||||
"version": 3 |
||||
}, |
||||
"file_extension": ".py", |
||||
"mimetype": "text/x-python", |
||||
"name": "python", |
||||
"nbconvert_exporter": "python", |
||||
"pygments_lexer": "ipython3", |
||||
"version": "3.12.7" |
||||
}, |
||||
"panel-cell-order": [ |
||||
"3f9b483c-f410-4ad3-8f3a-e33527f30f8a" |
||||
] |
||||
}, |
||||
"nbformat": 4, |
||||
"nbformat_minor": 5 |
||||
} |
Loading…
Reference in new issue