From 252e43adad3a04cd09b9106efcc87c8ca3b15250 Mon Sep 17 00:00:00 2001 From: Rakesh H K Date: Fri, 17 Jan 2025 18:29:40 +0530 Subject: [PATCH] Completed Day2 exercise. Included selenium chrome webdriver support to read webpages. --- .../day2 Exercise_RHK.ipynb | 383 ++++++++++++++++++ 1 file changed, 383 insertions(+) create mode 100644 week1/community-contributions/day2 Exercise_RHK.ipynb diff --git a/week1/community-contributions/day2 Exercise_RHK.ipynb b/week1/community-contributions/day2 Exercise_RHK.ipynb new file mode 100644 index 0000000..ffd2f4f --- /dev/null +++ b/week1/community-contributions/day2 Exercise_RHK.ipynb @@ -0,0 +1,383 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "d15d8294-3328-4e07-ad16-8a03e9bbfdb9", + "metadata": {}, + "source": [ + "# Welcome to your first assignment!\n", + "\n", + "Instructions are below. Please give this a try, and look in the solutions folder if you get stuck (or feel free to ask me!)" + ] + }, + { + "cell_type": "markdown", + "id": "ada885d9-4d42-4d9b-97f0-74fbbbfe93a9", + "metadata": {}, + "source": [ + "\n", + " \n", + " \n", + " \n", + " \n", + "
\n", + " \n", + " \n", + "

Just before we get to the assignment --

\n", + " I thought I'd take a second to point you at this page of useful resources for the course. This includes links to all the slides.
\n", + " https://edwarddonner.com/2024/11/13/llm-engineering-resources/
\n", + " Please keep this bookmarked, and I'll continue to add more useful links there over time.\n", + "
\n", + "
" + ] + }, + { + "cell_type": "markdown", + "id": "6e9fa1fc-eac5-4d1d-9be4-541b3f2b3458", + "metadata": {}, + "source": [ + "# HOMEWORK EXERCISE ASSIGNMENT\n", + "\n", + "Upgrade the day 1 project to summarize a webpage to use an Open Source model running locally via Ollama rather than OpenAI\n", + "\n", + "You'll be able to use this technique for all subsequent projects if you'd prefer not to use paid APIs.\n", + "\n", + "**Benefits:**\n", + "1. No API charges - open-source\n", + "2. Data doesn't leave your box\n", + "\n", + "**Disadvantages:**\n", + "1. Significantly less power than Frontier Model\n", + "\n", + "## Recap on installation of Ollama\n", + "\n", + "Simply visit [ollama.com](https://ollama.com) and install!\n", + "\n", + "Once complete, the ollama server should already be running locally. \n", + "If you visit: \n", + "[http://localhost:11434/](http://localhost:11434/)\n", + "\n", + "You should see the message `Ollama is running`. \n", + "\n", + "If not, bring up a new Terminal (Mac) or Powershell (Windows) and enter `ollama serve` \n", + "And in another Terminal (Mac) or Powershell (Windows), enter `ollama pull llama3.2` \n", + "Then try [http://localhost:11434/](http://localhost:11434/) again.\n", + "\n", + "If Ollama is slow on your machine, try using `llama3.2:1b` as an alternative. Run `ollama pull llama3.2:1b` from a Terminal or Powershell, and change the code below from `MODEL = \"llama3.2\"` to `MODEL = \"llama3.2:1b\"`" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4e2a9393-7767-488e-a8bf-27c12dca35bd", + "metadata": {}, + "outputs": [], + "source": [ + "# imports\n", + "\n", + "import requests\n", + "from bs4 import BeautifulSoup\n", + "from IPython.display import Markdown, display" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "29ddd15d-a3c5-4f4e-a678-873f56162724", + "metadata": {}, + "outputs": [], + "source": [ + "# Constants\n", + "\n", + "OLLAMA_API = \"http://localhost:11434/api/chat\"\n", + "HEADERS = {\"Content-Type\": \"application/json\"}\n", + "MODEL = \"llama3.2\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "dac0a679-599c-441f-9bf2-ddc73d35b940", + "metadata": {}, + "outputs": [], + "source": [ + "# Create a messages list using the same format that we used for OpenAI\n", + "\n", + "messages = [\n", + " {\"role\": \"user\", \"content\": \"Describe some of the business applications of Generative AI\"}\n", + "]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7bb9c624-14f0-4945-a719-8ddb64f66f47", + "metadata": {}, + "outputs": [], + "source": [ + "payload = {\n", + " \"model\": MODEL,\n", + " \"messages\": messages,\n", + " \"stream\": False\n", + " }" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "479ff514-e8bd-4985-a572-2ea28bb4fa40", + "metadata": {}, + "outputs": [], + "source": [ + "# Let's just make sure the model is loaded\n", + "\n", + "!ollama pull llama3.2" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "42b9f644-522d-4e05-a691-56e7658c0ea9", + "metadata": {}, + "outputs": [], + "source": [ + "# If this doesn't work for any reason, try the 2 versions in the following cells\n", + "# And double check the instructions in the 'Recap on installation of Ollama' at the top of this lab\n", + "# And if none of that works - contact me!\n", + "\n", + "response = requests.post(OLLAMA_API, json=payload, headers=HEADERS)\n", + "print(response.json()['message']['content'])" + ] + }, + { + "cell_type": "markdown", + "id": "6a021f13-d6a1-4b96-8e18-4eae49d876fe", + "metadata": {}, + "source": [ + "# Introducing the ollama package\n", + "\n", + "And now we'll do the same thing, but using the elegant ollama python package instead of a direct HTTP call.\n", + "\n", + "Under the hood, it's making the same call as above to the ollama server running at localhost:11434" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7745b9c4-57dc-4867-9180-61fa5db55eb8", + "metadata": {}, + "outputs": [], + "source": [ + "import ollama\n", + "\n", + "response = ollama.chat(model=MODEL, messages=messages)\n", + "print(response['message']['content'])" + ] + }, + { + "cell_type": "markdown", + "id": "a4704e10-f5fb-4c15-a935-f046c06fb13d", + "metadata": {}, + "source": [ + "## Alternative approach - using OpenAI python library to connect to Ollama" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "23057e00-b6fc-4678-93a9-6b31cb704bff", + "metadata": {}, + "outputs": [], + "source": [ + "# There's actually an alternative approach that some people might prefer\n", + "# You can use the OpenAI client python library to call Ollama:\n", + "\n", + "from openai import OpenAI\n", + "ollama_via_openai = OpenAI(base_url='http://localhost:11434/v1', api_key='ollama')\n", + "\n", + "response = ollama_via_openai.chat.completions.create(\n", + " model=MODEL,\n", + " messages=messages\n", + ")\n", + "\n", + "print(response.choices[0].message.content)" + ] + }, + { + "cell_type": "markdown", + "id": "1622d9bb-5c68-4d4e-9ca4-b492c751f898", + "metadata": {}, + "source": [ + "# NOW the exercise for you\n", + "\n", + "Take the code from day1 and incorporate it here, to build a website summarizer that uses Llama 3.2 running locally instead of OpenAI; use either of the above approaches." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "402d5686-4e76-4110-b65a-b3906c35c0a4", + "metadata": {}, + "outputs": [], + "source": [ + "# imports\n", + "\n", + "import ollama\n", + "import requests\n", + "from bs4 import BeautifulSoup\n", + "from IPython.display import Markdown, display\n", + "from selenium import webdriver\n", + "from selenium.webdriver.chrome.service import Service\n", + "from selenium.webdriver.common.by import By\n", + "from selenium.webdriver.chrome.options import Options\n", + "from openai import OpenAI\n", + "\n", + "#!ollama pull llama3.2\n", + "MODEL = \"llama3.2\"\n", + "openai = OpenAI(base_url=\"http://localhost:11434/v1\", api_key=\"ollama\")\n" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "cca8ae91-ad1e-4239-951f-e1376a5ec934", + "metadata": {}, + "outputs": [ + { + "name": "stdin", + "output_type": "stream", + "text": [ + "Please complete the verification in the browser and press Enter to continue... \n" + ] + }, + { + "data": { + "text/markdown": [ + "This article discussing Serverless architecture is a comprehensive overview of the concept, its benefits, and challenges. Here's a summary of the main points:\n", + "\n", + "**What is Serverless Architecture?**\n", + "\n", + "Serverless computing allows developers to run their applications without managing servers or provisioning resources upfront. Instead, they pay only for the compute time used.\n", + "\n", + "**Key Techniques:**\n", + "\n", + "1. **Function as a Service (FaaS)**: Ephemeral function instances that can be executed with specific inputs and outputs.\n", + "2. **Background Services**: Tightly integrate third-party remote application services directly into the frontend of an app.\n", + "\n", + "**Benefits:**\n", + "\n", + "1. Reduced operational and development costs\n", + "2. Easier management and scaling\n", + "3. Reduced environmental impact\n", + "4. Faster time-to-market\n", + "\n", + "**Challenges:**\n", + "\n", + "1. Debugging and monitoring complexity\n", + "2. Limited control over server-side code execution\n", + "3. High dependencies on cloud providers\n", + "\n", + "**Serverless Landscape:**\n", + "\n", + "The author expects the Serverless community to grow, with upcoming conferences, meetups, and online groups.\n", + "\n", + "**Conclusion:**\n", + "\n", + "Serverless architecture offers significant advantages but also presents challenges. It's essential to weigh the pros and cons carefully before adopting a Serverless approach. Despite its \"slightly awkward teenage years,\" Serverless is expected to continue evolving and maturing in the near future.\n", + "\n", + "Key Takeaways:\n", + "\n", + "1. Understand the basics of Serverless computing.\n", + "2. Be-aware of the trade-offs between scalability, control, and cost.\n", + "3. Consider your use case before adopting a Serverless architecture.\n", + "4. Stay updated with the latest developments and best practices in the Serverless community." + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "headers = {\n", + " \"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36\"\n", + "}\n", + "\n", + "PATH_TO_CHROME_DRIVER = \"/Users/rakeshhk/Rakesh/softwares/chromedriver-mac-arm64/chromedriver\"\n", + "\n", + "class Website:\n", + "\n", + " def __init__(self, url):\n", + " self.url = url\n", + "\n", + " options = Options()\n", + "\n", + " options.add_argument(\"--no-sandbox\")\n", + " options.add_argument(\"--disable-dev-shm-usage\")\n", + "\n", + " service = Service(PATH_TO_CHROME_DRIVER)\n", + " driver = webdriver.Chrome(service=service, options=options)\n", + " driver.get(url)\n", + "\n", + " input(\"Please complete the verification in the browser and press Enter to continue...\")\n", + " page_source = driver.page_source\n", + " driver.quit()\n", + " \n", + " soup = BeautifulSoup(page_source, 'html.parser')\n", + " self.title = soup.title.string if soup.title else \"No title found\"\n", + " for irrelevant in soup.body([\"script\", \"style\", \"img\", \"input\"]):\n", + " irrelevant.decompose()\n", + " self.text = soup.get_text(separator=\"\\n\", strip=True)\n", + "\n", + "def messages_for(website):\n", + " return [{\"role\":\"system\", \"content\": \"You are a technology trainer, please read the content provided and highlight the key points in less than 200 words.\"},\n", + " {\"role\":\"user\", \"content\":website.text}]\n", + "\n", + "def summarize(url):\n", + " website = Website(url)\n", + " response = openai.chat.completions.create(\n", + " model = MODEL,\n", + " messages = messages_for(website)\n", + " )\n", + " return response.choices[0].message.content\n", + "\n", + "def display_summary(url):\n", + " summary = summarize(url)\n", + " display(Markdown(summary))\n", + " \n", + "display_summary(\"https://martinfowler.com/articles/serverless.html\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "09f59679-22ff-46c4-a736-7309a6ca4365", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.11" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +}