{ "cells": [ { "cell_type": "markdown", "id": "1c14de02-8bd2-4f75-bcd8-d4f2e58e2a24", "metadata": {}, "source": [ "# Hi everyone\n", "I wanted to be able to use Llama3.2 in streaming mode with all the other paid frontier models, so as a demonstration, here's the Company Brochure Generator with Gradio, enhanched with Llama3.2 (using ollama library)!" ] }, { "cell_type": "code", "execution_count": null, "id": "2e02ac9c-7034-4aa1-9626-a7049168f096", "metadata": {}, "outputs": [], "source": [ "import os\n", "import requests\n", "from bs4 import BeautifulSoup\n", "from typing import List\n", "from dotenv import load_dotenv\n", "from openai import OpenAI\n", "import google.generativeai\n", "import anthropic\n", "import ollama\n", "import gradio as gr\n", "\n", "load_dotenv()\n", "openai_api_key = os.getenv('OPENAI_API_KEY')\n", "anthropic_api_key = os.getenv('ANTHROPIC_API_KEY')\n", "\n", "openai = OpenAI()\n", "claude = anthropic.Anthropic()\n", "\n", "system_message = \"You are an assistant that analyzes the contents of a company website landing page \\\n", "and creates a short brochure about the company for prospective customers, investors and recruits. Respond in markdown.\"\n", "\n", "class Website:\n", " url: str\n", " title: str\n", " text: str\n", "\n", " def __init__(self, url):\n", " self.url = url\n", " response = requests.get(url)\n", " self.body = response.content\n", " soup = BeautifulSoup(self.body, 'html.parser')\n", " self.title = soup.title.string if soup.title else \"No title found\"\n", " for irrelevant in soup.body([\"script\", \"style\", \"img\", \"input\"]):\n", " irrelevant.decompose()\n", " self.text = soup.body.get_text(separator=\"\\n\", strip=True)\n", "\n", " def get_contents(self):\n", " return f\"Webpage Title:\\n{self.title}\\nWebpage Contents:\\n{self.text}\\n\\n\"\n", "\n", "\n", "def stream_gpt(prompt):\n", " messages = [\n", " {\"role\": \"system\", \"content\": system_message},\n", " {\"role\": \"user\", \"content\": prompt}\n", " ]\n", " stream = openai.chat.completions.create(\n", " model='gpt-4o-mini',\n", " messages=messages,\n", " stream=True\n", " )\n", " result = \"\"\n", " for chunk in stream:\n", " result += chunk.choices[0].delta.content or \"\"\n", " yield result\n", "\n", "def stream_claude(prompt):\n", " result = claude.messages.stream(\n", " model=\"claude-3-haiku-20240307\",\n", " max_tokens=1000,\n", " temperature=0.7,\n", " system=system_message,\n", " messages=[\n", " {\"role\": \"user\", \"content\": prompt},\n", " ],\n", " )\n", " response = \"\"\n", " with result as stream:\n", " for text in stream.text_stream:\n", " response += text or \"\"\n", " yield response\n", "\n", "def stream_llama(prompt):\n", " messages = [\n", " {\"role\": \"user\", \"content\": prompt}\n", " ]\n", " response = \"\"\n", " for chunk in ollama.chat(\n", " model=\"llama3.2\", \n", " messages=messages, \n", " stream=True\n", " ):\n", " # Check if the chunk contains text\n", " if chunk.get('message', {}).get('content'):\n", " # Append the new text to the response\n", " response += chunk['message']['content']\n", " # Yield the incrementally built response\n", " yield response\n", "\n", "def stream_brochure(company_name, url, model):\n", " prompt = f\"Please generate a company brochure for {company_name}. Here is their landing page:\\n\"\n", " prompt += Website(url).get_contents()\n", " if model==\"GPT\":\n", " result = stream_gpt(prompt)\n", " elif model==\"Claude\":\n", " result = stream_claude(prompt)\n", " elif model==\"Llama\":\n", " result = stream_llama(prompt)\n", " else:\n", " raise ValueError(\"Unknown model\")\n", " yield from result\n", "\n", "view = gr.Interface(\n", " fn=stream_brochure,\n", " inputs=[\n", " gr.Textbox(label=\"Company name:\"),\n", " gr.Textbox(label=\"Landing page URL including http:// or https://\"),\n", " gr.Dropdown([\"GPT\", \"Claude\", \"Llama\"], label=\"Select model\")],\n", " outputs=[gr.Markdown(label=\"Brochure:\")],\n", " flagging_mode=\"never\"\n", ")\n", "view.launch()" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.11.10" } }, "nbformat": 4, "nbformat_minor": 5 }