# imports import os import requests import json from typing import List from dotenv import load_dotenv from bs4 import BeautifulSoup from IPython.display import Markdown, display, update_display from openai import OpenAI # constants MODEL_GPT = 'gpt-4o-mini' MODEL_LLAMA = 'llama3.2' # set up environment load_dotenv() api_key = os.getenv('OPENAI_API_KEY') if api_key and api_key.startswith('sk-proj-') and len(api_key)>10: print("API key looks good so far") else: print("There might be a problem with your API key? Please visit the troubleshooting notebook!") # A class to represent a Webpage class Website: """ A utility class to represent a Website that we have scraped, now with links """ def __init__(self, url): self.url = url response = requests.get(url) self.body = response.content soup = BeautifulSoup(self.body, 'html.parser') self.title = soup.title.string if soup.title else "No title found" if soup.body: for irrelevant in soup.body(["script", "style", "img", "input"]): irrelevant.decompose() self.text = soup.body.get_text(separator="\n", strip=True) else: self.text = "" links = [link.get('href') for link in soup.find_all('a')] self.links = [link for link in links if link] def get_contents(self): return f"Webpage Title:\n{self.title}\nWebpage Contents:\n{self.text}\n\n" dr = Website("https://www.drbruceforciea.com") print(dr.get_contents()) print(dr.links) link_system_prompt = "You are provided with a list of links found on a webpage. \ You are able to decide which of the links would be most relevant to learn anatomy and physiology, \ such as links to an Anatomy or Physiology page, Learing Page, Book Page.\n" link_system_prompt += "You should respond in JSON as in this example:" link_system_prompt += """ { "links": [ {"type": "anatomy and physiology page", "url": "https://full.url/goes/here/anatomy-and-physiology"}, {"type": "learning page": "url": "https://another.full.url/learning"} ] } """ def get_links_user_prompt(website): user_prompt = f"Here is the list of links on the website of {website.url} - " user_prompt += "please decide which of these are relevant web links to learn anatomy and physiology, respond with the full https URL in JSON format. \ Do not include Terms of Service, Privacy, email links.\n" user_prompt += "Links (some might be relative links):\n" user_prompt += "\n".join(website.links) return user_prompt print(get_links_user_prompt(dr)) def get_links(url): website = Website(url) response = openai.chat.completions.create( model=MODEL, messages=[ {"role": "system", "content": link_system_prompt}, {"role": "user", "content": get_links_user_prompt(website)} ], response_format={"type": "json_object"} ) result = response.choices[0].message.content return json.loads(result) # Give a medicine related website link. nationalcancerinstitute = Website("https://training.seer.cancer.gov/modules_reg_surv.html") nationalcancerinstitute.links get_links("https://training.seer.cancer.gov/modules_reg_surv.html") def get_all_details(url): result = "Landing page:\n" result += Website(url).get_contents() links = get_links(url) print("Found links:", links) for link in links["links"]: result += f"\n\n{link['type']}\n" result += Website(link["url"]).get_contents() return result # here is the question; type over this to ask something new question = """ Please explain what this code does and why: yield from {book.get("author") for book in books if book.get("author")} """ # Get gpt-4o-mini to answer, with streaming # Get Llama 3.2 to answer