Browse Source

Day 2 exercise to use Ollama to Summarising websites.

pull/52/head
Madhul Sachdeva 5 months ago
parent
commit
fa06d38f47
  1. 345
      week1/day1.ipynb
  2. 225
      week1/day2 EXERCISE.ipynb

345
week1/day1.ipynb

File diff suppressed because one or more lines are too long

225
week1/day2 EXERCISE.ipynb

@ -203,6 +203,231 @@
"id": "402d5686-4e76-4110-b65a-b3906c35c0a4",
"metadata": {},
"outputs": [],
"source": [
"# imports\n",
"import os\n",
"import requests\n",
"from bs4 import BeautifulSoup\n",
"from IPython.display import Markdown, display\n",
"from openai import OpenAI\n",
"from requests.exceptions import RequestException\n",
"# If you get an error running this cell, then please head over to the troubleshooting notebook!"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "9cfeb2c1-a2e1-47af-bd62-253b703d8130",
"metadata": {},
"outputs": [],
"source": [
"# Constants\n",
"\n",
"OLLAMA_API = \"http://localhost:11434/api/chat\"\n",
"HEADERS = {\"Content-Type\": \"application/json\"}\n",
"MODEL = \"llama3.2\"\n",
"ollama_via_openai = OpenAI(base_url='http://localhost:11434/v1', api_key='ollama')"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "76991489-946f-492c-9f62-9f73a9e53b43",
"metadata": {},
"outputs": [],
"source": [
"# A class to represent a Webpage\n",
"# If you're not familiar with Classes, check out the \"Intermediate Python\" notebook\n",
"\n",
"# Some websites need you to use proper headers when fetching them:\n",
"\n",
"class Website:\n",
" def __init__(self, url):\n",
" \"\"\"\n",
" Create this Website object from the given url using the BeautifulSoup library\n",
" \"\"\"\n",
" self.url = url\n",
" self.title = \"No title found\"\n",
" self.text = \"No content found\"\n",
" \n",
" headers = {\n",
" 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'\n",
" }\n",
" \n",
" try:\n",
" response = requests.get(url, headers=headers, timeout=10)\n",
" response.raise_for_status() # Raises an HTTPError for bad responses\n",
" \n",
" soup = BeautifulSoup(response.content, 'html.parser')\n",
" self.title = soup.title.string if soup.title else \"No title found\"\n",
" \n",
" # Get text content\n",
" self.text = ' '.join([p.get_text() for p in soup.find_all('p')])\n",
" if not self.text:\n",
" self.text = \"No content found\"\n",
"\n",
" for irrelevant in soup.body([\"script\", \"style\", \"img\", \"input\"]):\n",
" irrelevant.decompose()\n",
" self.text = soup.body.get_text(separator=\"\\n\", strip=True)\n",
" \n",
" except requests.exceptions.SSLError:\n",
" raise ConnectionError(f\"SSL Certificate verification failed for {url}\")\n",
" except requests.exceptions.ConnectionError:\n",
" raise ConnectionError(f\"Failed to connect to {url}. Please check if the URL is correct and accessible.\")\n",
" except requests.exceptions.Timeout:\n",
" raise ConnectionError(f\"Connection timed out while trying to access {url}\")\n",
" except requests.exceptions.RequestException as e:\n",
" raise ConnectionError(f\"An error occurred while fetching the website: {str(e)}\")\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "61d58b43-5ec5-4580-b963-8336aee8681e",
"metadata": {},
"outputs": [],
"source": [
"# A class to represent a Webpage\n",
"# If you're not familiar with Classes, check out the \"Intermediate Python\" notebook\n",
"\n",
"# Some websites need you to use proper headers when fetching them:\n",
"headers = {\n",
" \"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36\"\n",
"}\n",
"\n",
"class Website:\n",
"\n",
" def __init__(self, url):\n",
" \"\"\"\n",
" Create this Website object from the given url using the BeautifulSoup library\n",
" \"\"\"\n",
" self.url = url\n",
" response = requests.get(url, headers=headers)\n",
" soup = BeautifulSoup(response.content, 'html.parser')\n",
" self.title = soup.title.string if soup.title else \"No title found\"\n",
" for irrelevant in soup.body([\"script\", \"style\", \"img\", \"input\"]):\n",
" irrelevant.decompose()\n",
" self.text = soup.body.get_text(separator=\"\\n\", strip=True)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "d5fe4e6a-24f4-483a-ae24-1e7ed1bf2a6f",
"metadata": {},
"outputs": [],
"source": [
"# A function that writes a User Prompt that asks for summaries of websites:\n",
"\n",
"def user_prompt_for(website):\n",
" user_prompt = f\"You are looking at a website titled {website.title}\"\n",
" user_prompt += \"\\nThe contents of this website is as follows; \\\n",
"please provide a short summary of this website in markdown. \\\n",
"If it includes news or announcements, then summarise these too. Ignoring text that might be navigation related. \\n --- \\n\"\n",
" user_prompt += website.text\n",
" return user_prompt"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e9c9876a-45ff-43ef-8315-b10acfd4b872",
"metadata": {},
"outputs": [],
"source": [
"ms=Website(\"https://technicallysimple.me\")\n",
"print(user_prompt_for(ms))"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "f2369302-7b8c-465e-8606-b9cc0a51cb78",
"metadata": {},
"outputs": [],
"source": [
"# Create a messages list using the same format that we used for OpenAI\n",
"def messages_for(website):\n",
" return [\n",
" {\"role\": \"user\", \"content\": user_prompt_for(website)}\n",
" ]"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "a73d69c1-87fb-4e82-94f1-50dd76fb5e60",
"metadata": {},
"outputs": [],
"source": [
"me=Website(\"https://technicallysimple.me\")\n",
"print(messages_for(me))"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "16ae9026-1684-4cc3-9859-f2cd7d22fb52",
"metadata": {},
"outputs": [],
"source": [
"def summarise(url):\n",
" try:\n",
" aWebsite = Website(url)\n",
" response = ollama_via_openai.chat.completions.create(\n",
" model=MODEL,\n",
" messages=messages_for(aWebsite)\n",
" )\n",
" return response.choices[0].message.content\n",
" except ConnectionError as e:\n",
" return f\"Error: {str(e)}\"\n",
" except Exception as e:\n",
" return f\"An unexpected error occurred: {str(e)}\""
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "4e0f7264-9622-4751-83b9-a31c3c0c4589",
"metadata": {},
"outputs": [],
"source": [
"def display_summary(url):\n",
" try:\n",
" summary = summarise(url)\n",
" display(Markdown(summary))\n",
" except Exception as e:\n",
" print(f\"Failed to display summary: {str(e)}\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "40aba8f2-577f-4003-bf46-377cc815f243",
"metadata": {},
"outputs": [],
"source": [
"address=input(\"Enter URL: \")\n",
"summarise(address)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "d8659653-d3a1-4cc1-bbd8-a003fb22041f",
"metadata": {},
"outputs": [],
"source": [
"address=input(\"Enter URL: \")\n",
"display_summary(address)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "aff32906-2401-4d92-b377-c91ee572e208",
"metadata": {},
"outputs": [],
"source": []
}
],

Loading…
Cancel
Save