1 changed files with 230 additions and 0 deletions
@ -0,0 +1,230 @@
|
||||
{ |
||||
"cells": [ |
||||
{ |
||||
"cell_type": "markdown", |
||||
"id": "56c86bae-1d3c-4c01-b5d6-c8879fec1954", |
||||
"metadata": {}, |
||||
"source": [ |
||||
"# Wiki Summarizer\n", |
||||
"\n", |
||||
"This Project takes the name of a topic as input, and checks if the corresponding wiki-page exists. If it does, it parses the web page, and outputs a summary created using the GPT-4o-mini model. \n", |
||||
"\n", |
||||
"Concepts used: \n", |
||||
"- Web Scraping via Beautiful Soup\n", |
||||
"- User and System Prompts\n", |
||||
"- Use of Open AI GPT-4o-mini via API key" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "4820830e-b3b4-426e-b1a2-518e7c7f6c1a", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"# imports\n", |
||||
"\n", |
||||
"import os\n", |
||||
"import requests\n", |
||||
"from dotenv import load_dotenv\n", |
||||
"from bs4 import BeautifulSoup\n", |
||||
"from IPython.display import Markdown, display\n", |
||||
"from openai import OpenAI" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "2cd7ad51-396c-45c5-9089-f7b21a19da50", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"# Load environment variables in a file called .env\n", |
||||
"\n", |
||||
"load_dotenv(override=True)\n", |
||||
"api_key = os.getenv('OPENAI_API_KEY')\n", |
||||
"\n", |
||||
"# Check the key\n", |
||||
"\n", |
||||
"if not api_key:\n", |
||||
" print(\"No API key was found - please head over to the troubleshooting notebook in this folder to identify & fix!\")\n", |
||||
"elif not api_key.startswith(\"sk-proj-\"):\n", |
||||
" print(\"An API key was found, but it doesn't start sk-proj-; please check you're using the right key - see troubleshooting notebook\")\n", |
||||
"elif api_key.strip() != api_key:\n", |
||||
" print(\"An API key was found, but it looks like it might have space or tab characters at the start or end - please remove them - see troubleshooting notebook\")\n", |
||||
"else:\n", |
||||
" print(\"API key found and looks good so far!\")" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "689421a0-20a1-428b-a8b8-fa239fa6f633", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"# creating an instance\n", |
||||
"openai = OpenAI()" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "401901ae-7639-4190-98fd-e69374084723", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"def isWiki(url):\n", |
||||
" \"\"\"\n", |
||||
" Check whether a Wikipedia page exists for a given topic, and \n", |
||||
" returns a Boolean value.\n", |
||||
" \"\"\"\n", |
||||
" response = requests.get(url)\n", |
||||
"\n", |
||||
" if response.status_code != 200:\n", |
||||
" return False\n", |
||||
" \n", |
||||
" return True" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "7cdb14d3-05ea-4de2-a475-d49a5731692e", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"# A class to represent a Webpage\n", |
||||
"\n", |
||||
"# Some websites need you to use proper headers when fetching them:\n", |
||||
"headers = {\n", |
||||
" \"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36\"\n", |
||||
"}\n", |
||||
"\n", |
||||
"class Website:\n", |
||||
"\n", |
||||
" def __init__(self, url):\n", |
||||
" \"\"\"\n", |
||||
" Create this Website object from the given url using the BeautifulSoup library\n", |
||||
" \"\"\"\n", |
||||
" self.url = url\n", |
||||
" response = requests.get(url, headers=headers)\n", |
||||
" soup = BeautifulSoup(response.content, 'html.parser')\n", |
||||
" self.title = soup.title.string if soup.title else \"No title found\"\n", |
||||
" for irrelevant in soup.body([\"script\", \"style\", \"img\", \"input\"]):\n", |
||||
" irrelevant.decompose()\n", |
||||
" self.text = soup.body.get_text(separator=\"\\n\", strip=True)" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "7f6ed50e-0fb5-479e-9845-f62cf25980f7", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"system_prompt = \"You are an educational assistant tasked with helping users understand topics\\\n", |
||||
"by providing succinct and clear summaries of requested data. Ignore navigation-related text\\\n", |
||||
"and provide answers in markdown format\"" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "b2d77dd9-a94f-49c1-a1be-11d157bd37fb", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"# A function that writes a User Prompt that asks for summaries of wiki pages:\n", |
||||
"\n", |
||||
"def user_prompt_for(wiki):\n", |
||||
" user_prompt = f\"You are looking at a Wikipedia page titled {wiki.title}\"\n", |
||||
" user_prompt += \"\\nThe contents of this page is as follows; \\\n", |
||||
"please provide a short summary of this website in markdown.\\n\"\n", |
||||
" user_prompt += wiki.text\n", |
||||
" return user_prompt" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "0d23bcc4-1d89-4bd4-9809-d3a1819aa919", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"def messages_for(wiki):\n", |
||||
" return [\n", |
||||
" {\"role\": \"system\", \"content\": system_prompt},\n", |
||||
" {\"role\": \"user\", \"content\": user_prompt_for(wiki)}\n", |
||||
" ]" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "971bd7fb-2ff8-4494-b386-de69a39c24ff", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"def summarize(url):\n", |
||||
" website = Website(url)\n", |
||||
" response = openai.chat.completions.create(\n", |
||||
" model = \"gpt-4o-mini\",\n", |
||||
" messages = messages_for(website)\n", |
||||
" )\n", |
||||
" return response.choices[0].message.content" |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "a8fdf9f2-f49e-4d06-ac9e-dfcb8da33d60", |
||||
"metadata": {}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"def display_summary(topic):\n", |
||||
" url = f\"https://en.wikipedia.org/wiki/{topic}\"\n", |
||||
" if isWiki(url):\n", |
||||
" summary = summarize(url)\n", |
||||
" display(Markdown(summary))\n", |
||||
" else:\n", |
||||
" print('A Wikipedia page does not exist for this topic')\n", |
||||
" " |
||||
] |
||||
}, |
||||
{ |
||||
"cell_type": "code", |
||||
"execution_count": null, |
||||
"id": "f4758ef0-9b7c-4d3e-9131-e3284dc76b6b", |
||||
"metadata": { |
||||
"scrolled": true |
||||
}, |
||||
"outputs": [], |
||||
"source": [ |
||||
"topic = input('Enter the name of Wikipedia page for which you would like a summary: ').strip()\n", |
||||
"display_summary(topic)" |
||||
] |
||||
} |
||||
], |
||||
"metadata": { |
||||
"kernelspec": { |
||||
"display_name": "Python 3 (ipykernel)", |
||||
"language": "python", |
||||
"name": "python3" |
||||
}, |
||||
"language_info": { |
||||
"codemirror_mode": { |
||||
"name": "ipython", |
||||
"version": 3 |
||||
}, |
||||
"file_extension": ".py", |
||||
"mimetype": "text/x-python", |
||||
"name": "python", |
||||
"nbconvert_exporter": "python", |
||||
"pygments_lexer": "ipython3", |
||||
"version": "3.11.11" |
||||
} |
||||
}, |
||||
"nbformat": 4, |
||||
"nbformat_minor": 5 |
||||
} |
Loading…
Reference in new issue