From fe1a0d79acd8d3e4c918df27dfb00a551e99851e Mon Sep 17 00:00:00 2001 From: 266367 <266367@nttdata.com> Date: Mon, 27 Jan 2025 19:57:40 -0500 Subject: [PATCH 1/2] Wk1 Day2 Exercise Ollama solution --- .../wk1-day1-deepseek-stream-summarize.ipynb | 250 +++++++++--------- 1 file changed, 128 insertions(+), 122 deletions(-) diff --git a/week1/community-contributions/wk1-day1-deepseek-stream-summarize.ipynb b/week1/community-contributions/wk1-day1-deepseek-stream-summarize.ipynb index 2e615ed..0e7a226 100644 --- a/week1/community-contributions/wk1-day1-deepseek-stream-summarize.ipynb +++ b/week1/community-contributions/wk1-day1-deepseek-stream-summarize.ipynb @@ -1,125 +1,131 @@ { - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "a767b6bc-65fe-42b2-988f-efd54125114f", - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "import requests\n", - "from dotenv import load_dotenv\n", - "from bs4 import BeautifulSoup\n", - "from IPython.display import Markdown, display, clear_output\n", - "from openai import OpenAI\n", - "\n", - "load_dotenv(override=True)\n", - "api_key = os.getenv('DEEPSEEK_API_KEY')\n", - "base_url=os.getenv('DEEPSEEK_BASE_URL')\n", - "MODEL = \"deepseek-chat\"\n", - "\n", - "system_prompt = \"You are an assistant that analyzes the contents of a website \\\n", - "and provides a short summary, ignoring text that might be navigation related. \\\n", - "Respond in markdown.\"\n", - "\n", - "messages = [\n", - " {\"role\": \"system\", \"content\": \"You are a snarky assistant\"},\n", - " {\"role\": \"user\", \"content\": \"What is 2 + 2?\"}\n", - "]\n", - " \n", - "# Check the key\n", - "if not api_key:\n", - " print(\"No API key was found - please head over to the troubleshooting notebook in this folder to identify & fix!\")\n", - "elif not api_key.startswith(\"sk-proj-\"):\n", - " print(\"An API key was found, but it doesn't start sk-proj-; Looks like you are using DeepSeek (R1) model.\")\n", - "elif api_key.strip() != api_key:\n", - " print(\"An API key was found, but it looks like it might have space or tab characters at the start or end - please remove them - see troubleshooting notebook\")\n", - "else:\n", - " print(\"API key found and looks good so far!\")\n", - " \n", - "openai = OpenAI(api_key=api_key, base_url=base_url)\n", - "\n", - "headers = {\n", - " \"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36\"\n", - "}\n", - "\n", - "class Website:\n", - "\n", - " def __init__(self, url):\n", - " \"\"\"\n", - " Create this Website object from the given url using the BeautifulSoup library\n", - " \"\"\"\n", - " self.url = url\n", - " response = requests.get(url, headers=headers)\n", - " soup = BeautifulSoup(response.content, 'html.parser')\n", - " self.title = soup.title.string if soup.title else \"No title found\"\n", - " for irrelevant in soup.body([\"script\", \"style\", \"img\", \"input\"]):\n", - " irrelevant.decompose()\n", - " self.text = soup.body.get_text(separator=\"\\n\", strip=True)\n", - " \n", - "def user_prompt_for(website):\n", - " user_prompt = f\"You are looking at a website titled {website.title}\"\n", - " user_prompt += \"\\nThe contents of this website is as follows; please provide a short summary of this website in markdown. If it includes news or announcements, then summarize these too.\\n\\n\"\n", - " user_prompt += website.text\n", - " return user_prompt\n", - "\n", - "def messages_for(website):\n", - " return [\n", - " {\"role\": \"system\", \"content\": system_prompt},\n", - " {\"role\": \"user\", \"content\": user_prompt_for(website)}\n", - " ]\n", - " \n", - "def summarize(url):\n", - " website = Website(url)\n", - " response = openai.chat.completions.create(\n", - " model=MODEL,\n", - " messages=messages_for(website),\n", - " stream=True\n", - " )\n", - " print(\"Streaming response:\")\n", - " accumulated_content = \"\" # Accumulate the content here\n", - " for chunk in response:\n", - " if chunk.choices[0].delta.content: # Check if there's content in the chunk\n", - " accumulated_content += chunk.choices[0].delta.content # Append the chunk to the accumulated content\n", - " clear_output(wait=True) # Clear the previous output\n", - " display(Markdown(accumulated_content)) # Display the updated content\n", - "\n", - "def display_summary():\n", - " url = str(input(\"Enter the URL of the website you want to summarize: \"))\n", - " summarize(url)\n", - "\n", - "display_summary()" - ] + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "a767b6bc-65fe-42b2-988f-efd54125114f", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import requests\n", + "from dotenv import load_dotenv\n", + "from bs4 import BeautifulSoup\n", + "from IPython.display import Markdown, display, clear_output\n", + "from openai import OpenAI\n", + "\n", + "load_dotenv(override=True)\n", + "# Deep seek API payload\n", + "# api_key = os.getenv('DEEPSEEK_API_KEY')\n", + "# base_url=os.getenv('DEEPSEEK_BASE_URL')\n", + "# MODEL = \"deepseek-chat\"\n", + "\n", + "# Day 2 Exercise with Ollama API\n", + "api_key = os.getenv('OLLAMA_API_KEY')\n", + "base_url = os.getenv('OLLAMA_BASE_URL')\n", + "MODEL = \"llama3.2\"\n", + "\n", + "system_prompt = \"You are an assistant that analyzes the contents of a website \\\n", + "and provides a short summary, ignoring text that might be navigation related. \\\n", + "Respond in markdown.\"\n", + "\n", + "messages = [\n", + " {\"role\": \"system\", \"content\": \"You are a snarky assistant\"},\n", + " {\"role\": \"user\", \"content\": \"What is 2 + 2?\"}\n", + "]\n", + " \n", + "# Check the key\n", + "if not api_key:\n", + " print(\"No API key was found - please head over to the troubleshooting notebook in this folder to identify & fix!\")\n", + "elif not api_key.startswith(\"sk-proj-\"):\n", + " print(\"An API key was found, but it doesn't start sk-proj-; Looks like you are using DeepSeek (R1) model.\")\n", + "elif api_key.strip() != api_key:\n", + " print(\"An API key was found, but it looks like it might have space or tab characters at the start or end - please remove them - see troubleshooting notebook\")\n", + "else:\n", + " print(\"API key found and looks good so far!\")\n", + " \n", + "openai = OpenAI(api_key=api_key, base_url=base_url)\n", + "\n", + "headers = {\n", + " \"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36\"\n", + "}\n", + "\n", + "class Website:\n", + "\n", + " def __init__(self, url):\n", + " \"\"\"\n", + " Create this Website object from the given url using the BeautifulSoup library\n", + " \"\"\"\n", + " self.url = url\n", + " response = requests.get(url, headers=headers)\n", + " soup = BeautifulSoup(response.content, 'html.parser')\n", + " self.title = soup.title.string if soup.title else \"No title found\"\n", + " for irrelevant in soup.body([\"script\", \"style\", \"img\", \"input\"]):\n", + " irrelevant.decompose()\n", + " self.text = soup.body.get_text(separator=\"\\n\", strip=True)\n", + " \n", + "def user_prompt_for(website):\n", + " user_prompt = f\"You are looking at a website titled {website.title}\"\n", + " user_prompt += \"\\nThe contents of this website is as follows; please provide a short summary of this website in markdown. If it includes news or announcements, then summarize these too.\\n\\n\"\n", + " user_prompt += website.text\n", + " return user_prompt\n", + "\n", + "def messages_for(website):\n", + " return [\n", + " {\"role\": \"system\", \"content\": system_prompt},\n", + " {\"role\": \"user\", \"content\": user_prompt_for(website)}\n", + " ]\n", + " \n", + "def summarize(url):\n", + " website = Website(url)\n", + " response = openai.chat.completions.create(\n", + " model=MODEL,\n", + " messages=messages_for(website),\n", + " stream=True\n", + " )\n", + " print(\"Streaming response:\")\n", + " accumulated_content = \"\" # Accumulate the content here\n", + " for chunk in response:\n", + " if chunk.choices[0].delta.content: # Check if there's content in the chunk\n", + " accumulated_content += chunk.choices[0].delta.content # Append the chunk to the accumulated content\n", + " clear_output(wait=True) # Clear the previous output\n", + " display(Markdown(accumulated_content)) # Display the updated content\n", + "\n", + "def display_summary():\n", + " url = str(input(\"Enter the URL of the website you want to summarize: \"))\n", + " summarize(url)\n", + "\n", + "display_summary()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "01c9e5e7-7510-43ef-bb9c-aa44b15d39a7", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.11" + } }, - { - "cell_type": "code", - "execution_count": null, - "id": "01c9e5e7-7510-43ef-bb9c-aa44b15d39a7", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.11" - } - }, - "nbformat": 4, - "nbformat_minor": 5 + "nbformat": 4, + "nbformat_minor": 5 } From 26e2b74727c9380364d7a2e2965fb6ef54d9e0f8 Mon Sep 17 00:00:00 2001 From: 266367 <266367@nttdata.com> Date: Wed, 29 Jan 2025 13:07:36 -0500 Subject: [PATCH 2/2] rebase and cleanup --- .../wk1-day1-deepseek-stream-summarize.ipynb | 250 +++++++++--------- .../wk1-day2-ollama-exer.ipynb | 118 +++++++++ 2 files changed, 240 insertions(+), 128 deletions(-) create mode 100644 week1/community-contributions/wk1-day2-ollama-exer.ipynb diff --git a/week1/community-contributions/wk1-day1-deepseek-stream-summarize.ipynb b/week1/community-contributions/wk1-day1-deepseek-stream-summarize.ipynb index 0e7a226..2e615ed 100644 --- a/week1/community-contributions/wk1-day1-deepseek-stream-summarize.ipynb +++ b/week1/community-contributions/wk1-day1-deepseek-stream-summarize.ipynb @@ -1,131 +1,125 @@ { - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "a767b6bc-65fe-42b2-988f-efd54125114f", - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "import requests\n", - "from dotenv import load_dotenv\n", - "from bs4 import BeautifulSoup\n", - "from IPython.display import Markdown, display, clear_output\n", - "from openai import OpenAI\n", - "\n", - "load_dotenv(override=True)\n", - "# Deep seek API payload\n", - "# api_key = os.getenv('DEEPSEEK_API_KEY')\n", - "# base_url=os.getenv('DEEPSEEK_BASE_URL')\n", - "# MODEL = \"deepseek-chat\"\n", - "\n", - "# Day 2 Exercise with Ollama API\n", - "api_key = os.getenv('OLLAMA_API_KEY')\n", - "base_url = os.getenv('OLLAMA_BASE_URL')\n", - "MODEL = \"llama3.2\"\n", - "\n", - "system_prompt = \"You are an assistant that analyzes the contents of a website \\\n", - "and provides a short summary, ignoring text that might be navigation related. \\\n", - "Respond in markdown.\"\n", - "\n", - "messages = [\n", - " {\"role\": \"system\", \"content\": \"You are a snarky assistant\"},\n", - " {\"role\": \"user\", \"content\": \"What is 2 + 2?\"}\n", - "]\n", - " \n", - "# Check the key\n", - "if not api_key:\n", - " print(\"No API key was found - please head over to the troubleshooting notebook in this folder to identify & fix!\")\n", - "elif not api_key.startswith(\"sk-proj-\"):\n", - " print(\"An API key was found, but it doesn't start sk-proj-; Looks like you are using DeepSeek (R1) model.\")\n", - "elif api_key.strip() != api_key:\n", - " print(\"An API key was found, but it looks like it might have space or tab characters at the start or end - please remove them - see troubleshooting notebook\")\n", - "else:\n", - " print(\"API key found and looks good so far!\")\n", - " \n", - "openai = OpenAI(api_key=api_key, base_url=base_url)\n", - "\n", - "headers = {\n", - " \"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36\"\n", - "}\n", - "\n", - "class Website:\n", - "\n", - " def __init__(self, url):\n", - " \"\"\"\n", - " Create this Website object from the given url using the BeautifulSoup library\n", - " \"\"\"\n", - " self.url = url\n", - " response = requests.get(url, headers=headers)\n", - " soup = BeautifulSoup(response.content, 'html.parser')\n", - " self.title = soup.title.string if soup.title else \"No title found\"\n", - " for irrelevant in soup.body([\"script\", \"style\", \"img\", \"input\"]):\n", - " irrelevant.decompose()\n", - " self.text = soup.body.get_text(separator=\"\\n\", strip=True)\n", - " \n", - "def user_prompt_for(website):\n", - " user_prompt = f\"You are looking at a website titled {website.title}\"\n", - " user_prompt += \"\\nThe contents of this website is as follows; please provide a short summary of this website in markdown. If it includes news or announcements, then summarize these too.\\n\\n\"\n", - " user_prompt += website.text\n", - " return user_prompt\n", - "\n", - "def messages_for(website):\n", - " return [\n", - " {\"role\": \"system\", \"content\": system_prompt},\n", - " {\"role\": \"user\", \"content\": user_prompt_for(website)}\n", - " ]\n", - " \n", - "def summarize(url):\n", - " website = Website(url)\n", - " response = openai.chat.completions.create(\n", - " model=MODEL,\n", - " messages=messages_for(website),\n", - " stream=True\n", - " )\n", - " print(\"Streaming response:\")\n", - " accumulated_content = \"\" # Accumulate the content here\n", - " for chunk in response:\n", - " if chunk.choices[0].delta.content: # Check if there's content in the chunk\n", - " accumulated_content += chunk.choices[0].delta.content # Append the chunk to the accumulated content\n", - " clear_output(wait=True) # Clear the previous output\n", - " display(Markdown(accumulated_content)) # Display the updated content\n", - "\n", - "def display_summary():\n", - " url = str(input(\"Enter the URL of the website you want to summarize: \"))\n", - " summarize(url)\n", - "\n", - "display_summary()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "01c9e5e7-7510-43ef-bb9c-aa44b15d39a7", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.11" - } + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "a767b6bc-65fe-42b2-988f-efd54125114f", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import requests\n", + "from dotenv import load_dotenv\n", + "from bs4 import BeautifulSoup\n", + "from IPython.display import Markdown, display, clear_output\n", + "from openai import OpenAI\n", + "\n", + "load_dotenv(override=True)\n", + "api_key = os.getenv('DEEPSEEK_API_KEY')\n", + "base_url=os.getenv('DEEPSEEK_BASE_URL')\n", + "MODEL = \"deepseek-chat\"\n", + "\n", + "system_prompt = \"You are an assistant that analyzes the contents of a website \\\n", + "and provides a short summary, ignoring text that might be navigation related. \\\n", + "Respond in markdown.\"\n", + "\n", + "messages = [\n", + " {\"role\": \"system\", \"content\": \"You are a snarky assistant\"},\n", + " {\"role\": \"user\", \"content\": \"What is 2 + 2?\"}\n", + "]\n", + " \n", + "# Check the key\n", + "if not api_key:\n", + " print(\"No API key was found - please head over to the troubleshooting notebook in this folder to identify & fix!\")\n", + "elif not api_key.startswith(\"sk-proj-\"):\n", + " print(\"An API key was found, but it doesn't start sk-proj-; Looks like you are using DeepSeek (R1) model.\")\n", + "elif api_key.strip() != api_key:\n", + " print(\"An API key was found, but it looks like it might have space or tab characters at the start or end - please remove them - see troubleshooting notebook\")\n", + "else:\n", + " print(\"API key found and looks good so far!\")\n", + " \n", + "openai = OpenAI(api_key=api_key, base_url=base_url)\n", + "\n", + "headers = {\n", + " \"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36\"\n", + "}\n", + "\n", + "class Website:\n", + "\n", + " def __init__(self, url):\n", + " \"\"\"\n", + " Create this Website object from the given url using the BeautifulSoup library\n", + " \"\"\"\n", + " self.url = url\n", + " response = requests.get(url, headers=headers)\n", + " soup = BeautifulSoup(response.content, 'html.parser')\n", + " self.title = soup.title.string if soup.title else \"No title found\"\n", + " for irrelevant in soup.body([\"script\", \"style\", \"img\", \"input\"]):\n", + " irrelevant.decompose()\n", + " self.text = soup.body.get_text(separator=\"\\n\", strip=True)\n", + " \n", + "def user_prompt_for(website):\n", + " user_prompt = f\"You are looking at a website titled {website.title}\"\n", + " user_prompt += \"\\nThe contents of this website is as follows; please provide a short summary of this website in markdown. If it includes news or announcements, then summarize these too.\\n\\n\"\n", + " user_prompt += website.text\n", + " return user_prompt\n", + "\n", + "def messages_for(website):\n", + " return [\n", + " {\"role\": \"system\", \"content\": system_prompt},\n", + " {\"role\": \"user\", \"content\": user_prompt_for(website)}\n", + " ]\n", + " \n", + "def summarize(url):\n", + " website = Website(url)\n", + " response = openai.chat.completions.create(\n", + " model=MODEL,\n", + " messages=messages_for(website),\n", + " stream=True\n", + " )\n", + " print(\"Streaming response:\")\n", + " accumulated_content = \"\" # Accumulate the content here\n", + " for chunk in response:\n", + " if chunk.choices[0].delta.content: # Check if there's content in the chunk\n", + " accumulated_content += chunk.choices[0].delta.content # Append the chunk to the accumulated content\n", + " clear_output(wait=True) # Clear the previous output\n", + " display(Markdown(accumulated_content)) # Display the updated content\n", + "\n", + "def display_summary():\n", + " url = str(input(\"Enter the URL of the website you want to summarize: \"))\n", + " summarize(url)\n", + "\n", + "display_summary()" + ] }, - "nbformat": 4, - "nbformat_minor": 5 + { + "cell_type": "code", + "execution_count": null, + "id": "01c9e5e7-7510-43ef-bb9c-aa44b15d39a7", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.11" + } + }, + "nbformat": 4, + "nbformat_minor": 5 } diff --git a/week1/community-contributions/wk1-day2-ollama-exer.ipynb b/week1/community-contributions/wk1-day2-ollama-exer.ipynb new file mode 100644 index 0000000..ebedd97 --- /dev/null +++ b/week1/community-contributions/wk1-day2-ollama-exer.ipynb @@ -0,0 +1,118 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import requests\n", + "from dotenv import load_dotenv\n", + "from bs4 import BeautifulSoup\n", + "from IPython.display import Markdown, display, clear_output\n", + "from openai import OpenAI\n", + "\n", + "load_dotenv(override=True)\n", + "\n", + "# Day 2 Exercise with Ollama API\n", + "api_key = os.getenv('OLLAMA_API_KEY')\n", + "base_url = os.getenv('OLLAMA_BASE_URL')\n", + "MODEL = \"llama3.2\"\n", + "\n", + "system_prompt = \"You are an assistant that analyzes the contents of a website \\\n", + "and provides a short summary, ignoring text that might be navigation related. \\\n", + "Respond in markdown.\"\n", + "\n", + "messages = [\n", + " {\"role\": \"system\", \"content\": \"You are a snarky assistant\"},\n", + " {\"role\": \"user\", \"content\": \"What is 2 + 2?\"}\n", + "]\n", + " \n", + "# Check the key\n", + "if not api_key:\n", + " print(\"No API key was found - please head over to the troubleshooting notebook in this folder to identify & fix!\")\n", + "elif not api_key.startswith(\"sk-proj-\"):\n", + " print(\"An API key was found, but it doesn't start sk-proj-; Looks like you are using DeepSeek (R1) model.\")\n", + "elif api_key.strip() != api_key:\n", + " print(\"An API key was found, but it looks like it might have space or tab characters at the start or end - please remove them - see troubleshooting notebook\")\n", + "else:\n", + " print(\"API key found and looks good so far!\")\n", + " \n", + "openai = OpenAI(api_key=api_key, base_url=base_url)\n", + "\n", + "headers = {\n", + " \"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36\"\n", + "}\n", + "\n", + "class Website:\n", + "\n", + " def __init__(self, url):\n", + " \"\"\"\n", + " Create this Website object from the given url using the BeautifulSoup library\n", + " \"\"\"\n", + " self.url = url\n", + " response = requests.get(url, headers=headers)\n", + " soup = BeautifulSoup(response.content, 'html.parser')\n", + " self.title = soup.title.string if soup.title else \"No title found\"\n", + " for irrelevant in soup.body([\"script\", \"style\", \"img\", \"input\"]):\n", + " irrelevant.decompose()\n", + " self.text = soup.body.get_text(separator=\"\\n\", strip=True)\n", + " \n", + "def user_prompt_for(website):\n", + " user_prompt = f\"You are looking at a website titled {website.title}\"\n", + " user_prompt += \"\\nThe contents of this website is as follows; please provide a short summary of this website in markdown. If it includes news or announcements, then summarize these too.\\n\\n\"\n", + " user_prompt += website.text\n", + " return user_prompt\n", + "\n", + "def messages_for(website):\n", + " return [\n", + " {\"role\": \"system\", \"content\": system_prompt},\n", + " {\"role\": \"user\", \"content\": user_prompt_for(website)}\n", + " ]\n", + " \n", + "def summarize(url):\n", + " website = Website(url)\n", + " response = openai.chat.completions.create(\n", + " model=MODEL,\n", + " messages=messages_for(website),\n", + " stream=True\n", + " )\n", + " print(\"Streaming response:\")\n", + " accumulated_content = \"\" # Accumulate the content here\n", + " for chunk in response:\n", + " if chunk.choices[0].delta.content: # Check if there's content in the chunk\n", + " accumulated_content += chunk.choices[0].delta.content # Append the chunk to the accumulated content\n", + " clear_output(wait=True) # Clear the previous output\n", + " display(Markdown(accumulated_content)) # Display the updated content\n", + " \n", + "def display_summary():\n", + " url = str(input(\"Enter the URL of the website you want to summarize: \"))\n", + " summarize(url)\n", + "\n", + "display_summary()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.11" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +}