From 38c2317c0e93a4542267fe45e62d5a354f4dd979 Mon Sep 17 00:00:00 2001 From: Edward Donner Date: Tue, 10 Dec 2024 22:18:25 -0500 Subject: [PATCH] Minor refinements --- requirements.txt | 2 +- week1/day1.ipynb | 7 ++++++- week1/day5.ipynb | 7 ++++++- week6/day3.ipynb | 2 +- week6/day5.ipynb | 2 +- 5 files changed, 15 insertions(+), 5 deletions(-) diff --git a/requirements.txt b/requirements.txt index 5dd33ad..5d110bf 100644 --- a/requirements.txt +++ b/requirements.txt @@ -23,7 +23,7 @@ langchain[docarray] datasets sentencepiece matplotlib -google.generativeai +google-generativeai anthropic scikit-learn unstructured diff --git a/week1/day1.ipynb b/week1/day1.ipynb index 2c2e1c2..0756a2f 100644 --- a/week1/day1.ipynb +++ b/week1/day1.ipynb @@ -184,6 +184,11 @@ "# A class to represent a Webpage\n", "# If you're not familiar with Classes, check out the \"Intermediate Python\" notebook\n", "\n", + "# Some websites need you to use proper headers when fetching them:\n", + "headers = {\n", + " \"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36\"\n", + "}\n", + "\n", "class Website:\n", "\n", " def __init__(self, url):\n", @@ -191,7 +196,7 @@ " Create this Website object from the given url using the BeautifulSoup library\n", " \"\"\"\n", " self.url = url\n", - " response = requests.get(url)\n", + " response = requests.get(url, headers=headers)\n", " soup = BeautifulSoup(response.content, 'html.parser')\n", " self.title = soup.title.string if soup.title else \"No title found\"\n", " for irrelevant in soup.body([\"script\", \"style\", \"img\", \"input\"]):\n", diff --git a/week1/day5.ipynb b/week1/day5.ipynb index 3cdc54a..8ddd23a 100644 --- a/week1/day5.ipynb +++ b/week1/day5.ipynb @@ -70,6 +70,11 @@ "source": [ "# A class to represent a Webpage\n", "\n", + "# Some websites need you to use proper headers when fetching them:\n", + "headers = {\n", + " \"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36\"\n", + "}\n", + "\n", "class Website:\n", " \"\"\"\n", " A utility class to represent a Website that we have scraped, now with links\n", @@ -77,7 +82,7 @@ "\n", " def __init__(self, url):\n", " self.url = url\n", - " response = requests.get(url)\n", + " response = requests.get(url, headers=headers)\n", " self.body = response.content\n", " soup = BeautifulSoup(self.body, 'html.parser')\n", " self.title = soup.title.string if soup.title else \"No title found\"\n", diff --git a/week6/day3.ipynb b/week6/day3.ipynb index 4132ae3..62345ac 100644 --- a/week6/day3.ipynb +++ b/week6/day3.ipynb @@ -893,7 +893,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.10" + "version": "3.11.11" } }, "nbformat": 4, diff --git a/week6/day5.ipynb b/week6/day5.ipynb index 2c4d61e..1886310 100644 --- a/week6/day5.ipynb +++ b/week6/day5.ipynb @@ -547,7 +547,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.10" + "version": "3.11.11" } }, "nbformat": 4,