diff --git a/week1/day2 EXERCISE.ipynb b/week1/day2 EXERCISE.ipynb index 2c079f1..36baca5 100644 --- a/week1/day2 EXERCISE.ipynb +++ b/week1/day2 EXERCISE.ipynb @@ -182,27 +182,6 @@ "## Alternative approach - using OpenAI python library to connect to Ollama" ] }, - { - "cell_type": "code", - "execution_count": null, - "id": "23057e00-b6fc-4678-93a9-6b31cb704bff", - "metadata": {}, - "outputs": [], - "source": [ - "# There's actually an alternative approach that some people might prefer\n", - "# You can use the OpenAI client python library to call Ollama:\n", - "\n", - "from openai import OpenAI\n", - "ollama_via_openai = OpenAI(base_url='http://localhost:11434/v1', api_key='ollama')\n", - "\n", - "response = ollama_via_openai.chat.completions.create(\n", - " model=MODEL,\n", - " messages=messages\n", - ")\n", - "\n", - "print(response.choices[0].message.content)" - ] - }, { "cell_type": "markdown", "id": "9f9e22da-b891-41f6-9ac9-bd0c0a5f4f44", @@ -233,6 +212,27 @@ "Then this will make the same endpoint calls, but to Ollama instead of OpenAI." ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "23057e00-b6fc-4678-93a9-6b31cb704bff", + "metadata": {}, + "outputs": [], + "source": [ + "# There's actually an alternative approach that some people might prefer\n", + "# You can use the OpenAI client python library to call Ollama:\n", + "\n", + "from openai import OpenAI\n", + "ollama_via_openai = OpenAI(base_url='http://localhost:11434/v1', api_key='ollama')\n", + "\n", + "response = ollama_via_openai.chat.completions.create(\n", + " model=MODEL,\n", + " messages=messages\n", + ")\n", + "\n", + "print(response.choices[0].message.content)" + ] + }, { "cell_type": "markdown", "id": "bc7d1de3-e2ac-46ff-a302-3b4ba38c4c90", @@ -275,7 +275,7 @@ }, { "cell_type": "markdown", - "id": "1622d9bb-5c68-4d4e-9ca4-b492c751f898", + "id": "f160ab15-4206-4515-8751-0e5108ff577c", "metadata": {}, "source": [ "# NOW the exercise for you\n", @@ -283,13 +283,215 @@ "Take the code from day1 and incorporate it here, to build a website summarizer that uses Llama 3.2 running locally instead of OpenAI; use either of the above approaches." ] }, + { + "cell_type": "markdown", + "id": "1622d9bb-5c68-4d4e-9ca4-b492c751f898", + "metadata": {}, + "source": [ + "## Imports for Llama using it's package\n", + "\n", + "We'll use the Llama package (from Ollama) to make requests.\n", + "\n", + "Note: everything has already been imported." + ] + }, { "cell_type": "code", "execution_count": null, - "id": "6de38216-6d1c-48c4-877b-86d403f4e0f8", + "id": "9b89249a-4ae4-461d-a547-b7c13f98452e", "metadata": {}, "outputs": [], - "source": [] + "source": [ + "#!/usr/bin/env python\n", + "# coding: utf-8\n", + "\n", + "# import os\n", + "# from dotenv import load_dotenv\n", + "# from bs4 import BeautifulSoup\n", + "# from IPython.display import Markdown, display\n", + "# import ollama\n" + ] + }, + { + "cell_type": "markdown", + "id": "a88db1c7-aeaf-4284-a3b2-55ad6f7cdbc7", + "metadata": {}, + "source": [ + "## Website class" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "87879ba5-a358-4e07-bac5-118f5b309b1a", + "metadata": {}, + "outputs": [], + "source": [ + "# A class to represent a Webpage\n", + "# If you're not familiar with Classes, check out the \"Intermediate Python\" notebook\n", + "\n", + "# Some websites need you to use proper headers when fetching them:\n", + "headers = {\n", + " \"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36\"\n", + "}\n", + "\n", + "class Website:\n", + "\n", + " def __init__(self, url):\n", + " \"\"\"\n", + " Create this Website object from the given url using the BeautifulSoup library\n", + " \"\"\"\n", + " self.url = url\n", + " response = requests.get(url, headers=headers)\n", + " soup = BeautifulSoup(response.content, 'html.parser')\n", + " self.title = soup.title.string if soup.title else \"No title found\"\n", + " for irrelevant in soup.body([\"script\", \"style\", \"img\", \"input\"]):\n", + " irrelevant.decompose()\n", + " self.text = soup.body.get_text(separator=\"\\n\", strip=True)" + ] + }, + { + "cell_type": "markdown", + "id": "f7234905-4295-4f52-a786-79b742112422", + "metadata": {}, + "source": [ + "## System function" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "85d38054-88f8-4981-accd-b5e3d1e5e73e", + "metadata": {}, + "outputs": [], + "source": [ + "# Define our system prompt - you can experiment with this later, changing the last sentence to 'Respond in markdown in Spanish.\"\n", + "\n", + "system_prompt = \"You are an assistant that analyzes the contents of a website \\\n", + "and provides a short summary, ignoring text that might be navigation related. \\\n", + "Respond in markdown.\"" + ] + }, + { + "cell_type": "markdown", + "id": "57b40e11-3567-4a25-ac6b-6bbcbc5e47e5", + "metadata": {}, + "source": [ + "## User prompt function" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "05e68137-e711-458e-ad11-c184b80a388b", + "metadata": {}, + "outputs": [], + "source": [ + "# A function that writes a User Prompt that asks for summaries of websites:\n", + "\n", + "user_prompt_content = \"\\nThe contents of this website is as follows; \\\n", + "please provide a short summary of this website in markdown. \\\n", + "If it includes news or announcements, then summarize these too.\\n\\n\"\n", + "\n", + "def user_prompt_for(website):\n", + " user_prompt = f\"You are looking at a website titled {website.title}\"\n", + " user_prompt += \"\\nThe contents of this website is as follows; \\\n", + "please provide a short summary of this website in markdown. \\\n", + "If it includes news or announcements, then summarize these too.\\n\\n\"\n", + " user_prompt += website.text\n", + " return user_prompt" + ] + }, + { + "cell_type": "markdown", + "id": "d7ba76f3-6ef9-4773-ac96-66c758d85e3d", + "metadata": {}, + "source": [ + "## Messages for website function" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e2008550-d50d-4ea1-81a9-2a317068cda3", + "metadata": {}, + "outputs": [], + "source": [ + "# See how this function creates exactly the format above\n", + "\n", + "def messages_for(website):\n", + " return [\n", + " {\"role\": \"system\", \"content\": system_prompt},\n", + " {\"role\": \"user\", \"content\": user_prompt_for(website)}\n", + " ]" + ] + }, + { + "cell_type": "markdown", + "id": "921bef16-e40c-412b-9fcd-4841ff02655e", + "metadata": {}, + "source": [ + "## Summarize function" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f7f12198-a30e-4533-af5c-c12beeecbbce", + "metadata": {}, + "outputs": [], + "source": [ + "# And now: call the OpenAI API. You will get very familiar with this!\n", + "\n", + "def summarize_ollama(url):\n", + " website = Website(url)\n", + " response = ollama.chat(\n", + " model=MODEL,\n", + " messages = messages_for(website)\n", + " )\n", + " return response['message']['content']" + ] + }, + { + "cell_type": "markdown", + "id": "91639e0a-e4a3-43bb-9f38-ed7bea91be72", + "metadata": {}, + "source": [ + "## Display Summary function" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b11a6240-65e8-4453-809d-ee51dde5fc25", + "metadata": {}, + "outputs": [], + "source": [ + "# A function to display this nicely in the Jupyter output, using markdown\n", + "\n", + "def display_summary_ollama(url):\n", + " summary = summarize_ollama(url)\n", + " display(Markdown(summary))" + ] + }, + { + "cell_type": "markdown", + "id": "874fad51-bd82-48d1-ad6f-a63f3db90c19", + "metadata": {}, + "source": [ + "## Test the function" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7b45d561-0cd0-4d21-b50b-e61edb932498", + "metadata": {}, + "outputs": [], + "source": [ + "test_url = \"https://edwarddonner.com\"\n", + "display_summary_ollama(test_url)" + ] } ], "metadata": { @@ -308,7 +510,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.11" + "version": "3.11.12" } }, "nbformat": 4,