Browse Source

week1 day5 update

pull/232/head
Phi-Li-Ne 2 months ago
parent
commit
a0d68a5454
  1. 151
      week1/day5.ipynb

151
week1/day5.ipynb

@ -106,8 +106,8 @@
"metadata": {},
"outputs": [],
"source": [
"ed = Website(\"https://edwarddonner.com\")\n",
"ed.links"
"# ed = Website(\"https://edwarddonner.com\")\n",
"# ed.links"
]
},
{
@ -195,8 +195,8 @@
"metadata": {},
"outputs": [],
"source": [
"print(f\"Oneshot system prompt:\\n{oneshot_system_prompt}\")\n",
"print(f\"\\n\\n\\nFewshot system prompt:\\n{fewshot_system_prompt}\")"
"# print(f\"Oneshot system prompt:\\n{oneshot_system_prompt}\")\n",
"# print(f\"\\n\\n\\nFewshot system prompt:\\n{fewshot_system_prompt}\")"
]
},
{
@ -222,7 +222,7 @@
"metadata": {},
"outputs": [],
"source": [
"print(get_links_user_prompt(ed))"
"# print(get_links_user_prompt(ed))"
]
},
{
@ -245,7 +245,7 @@
" )\n",
" \n",
" result = response.choices[0].message.content \n",
" print(f\"Response: {result}\")\n",
" #print(f\"Response: {result}\")\n",
" return json.loads(result)"
]
},
@ -256,7 +256,7 @@
"metadata": {},
"outputs": [],
"source": [
"get_links(ed_url)"
"# get_links(ed_url)"
]
},
{
@ -267,10 +267,10 @@
"outputs": [],
"source": [
"# Anthropic has made their site harder to scrape, so I'm using HuggingFace..\n",
"hf = \"https://huggingface.co\"\n",
"# hf = \"https://huggingface.co\"\n",
"\n",
"huggingface = Website(hf)\n",
"huggingface.links"
"# huggingface = Website(hf)\n",
"# huggingface.links"
]
},
{
@ -280,21 +280,21 @@
"metadata": {},
"outputs": [],
"source": [
"ed_url = \"https://edwarddonner.com\"\n",
"hf_url = \"https://huggingface.co\"\n",
"# ed_url = \"https://edwarddonner.com\"\n",
"# hf_url = \"https://huggingface.co\"\n",
"\n",
"print(f\"Links generated with oneshot prompt for {ed_url}:\\n\")\n",
"get_links(ed_url)\n",
"# print(f\"Links generated with oneshot prompt for {ed_url}:\\n\")\n",
"# get_links(ed_url)\n",
"\n",
"print(f\"\\n\\nLinks generated with fewshot prompt for {ed_url}:\\n\")\n",
"get_links(ed_url, fewshot_system_prompt)\n",
"# print(f\"\\n\\nLinks generated with fewshot prompt for {ed_url}:\\n\")\n",
"# get_links(ed_url, fewshot_system_prompt)\n",
"\n",
"print(50*\"*\")\n",
"print(f\"\\nLinks generated with oneshot prompt for {hf_url}:\\n\")\n",
"get_links(hf_url)\n",
"# print(50*\"*\")\n",
"# print(f\"\\nLinks generated with oneshot prompt for {hf_url}:\\n\")\n",
"# get_links(hf_url)\n",
"\n",
"print(f\"\\n\\nLinks generated with fewshot prompt for {hf_url}:\\n\")\n",
"get_links(hf_url, fewshot_system_prompt)"
"# print(f\"\\n\\nLinks generated with fewshot prompt for {hf_url}:\\n\")\n",
"# get_links(hf_url, fewshot_system_prompt)"
]
},
{
@ -319,10 +319,13 @@
" result += Website(url).get_contents()\n",
"\n",
" links = get_links(url, type)\n",
" print(\"Found links:\", links)\n",
" #print(\"Found links:\", links)\n",
" for link in links[\"links\"]:\n",
" if link[\"url\"].startswith(\"http://\"):\n",
" result += f\"\\n\\n{link['type']}\\n\"\n",
" result += Website(link[\"url\"]).get_contents()\n",
" else:\n",
" print(f\"Skipping failed link {link}\")\n",
" return result"
]
},
@ -333,7 +336,7 @@
"metadata": {},
"outputs": [],
"source": [
"print(get_all_details(ed_url))"
"# print(get_all_details(ed_url))"
]
},
{
@ -415,7 +418,7 @@
"metadata": {},
"outputs": [],
"source": [
"brochure_ed = create_brochure(\"Edward Donner\", ed_url)"
"# brochure_ed = create_brochure(\"Edward Donner\", ed_url)"
]
},
{
@ -425,7 +428,7 @@
"metadata": {},
"outputs": [],
"source": [
"brochure_hf = create_brochure(\"HuggingFace\", \"https://huggingface.co\")"
"# brochure_hf = create_brochure(\"HuggingFace\", \"https://huggingface.co\")"
]
},
{
@ -435,7 +438,7 @@
"metadata": {},
"outputs": [],
"source": [
"display(Markdown(brochure_ed))"
"# display(Markdown(brochure_ed))"
]
},
{
@ -445,7 +448,7 @@
"metadata": {},
"outputs": [],
"source": [
"display(Markdown(brochure_hf))"
"# display(Markdown(brochure_hf))"
]
},
{
@ -476,8 +479,8 @@
"metadata": {},
"outputs": [],
"source": [
"translation = translate_brochure(brochure_ed, language=\"German\")\n",
"display(Markdown(translation))"
"# translation = translate_brochure(brochure_ed, language=\"German\")\n",
"# display(Markdown(translation))"
]
},
{
@ -487,8 +490,8 @@
"metadata": {},
"outputs": [],
"source": [
"translation = translate_brochure(brochure_hf, language=\"German\")\n",
"display(Markdown(translation))"
"# translation = translate_brochure(brochure_hf, language=\"German\")\n",
"# display(Markdown(translation))"
]
},
{
@ -513,7 +516,7 @@
" stream = openai.chat.completions.create(\n",
" model=MODEL,\n",
" messages=[\n",
" {\"role\": \"system\", \"content\": system_prompt},\n",
" {\"role\": \"system\", \"content\": fewshot_system_prompt},\n",
" {\"role\": \"user\", \"content\": get_brochure_user_prompt(company_name, url)}\n",
" ],\n",
" stream=True\n",
@ -544,9 +547,89 @@
"metadata": {},
"outputs": [],
"source": [
"# Try changing the system prompt to the humorous version when you make the Brochure for Hugging Face:\n",
"# Extending to Gradio UI\n",
"\n",
"stream_brochure(\"HuggingFace\", \"https://huggingface.co\")"
"import gradio as gr\n",
"import ollama\n",
"import anthropic\n",
"\n",
"anthropic_api_key = os.getenv('ANTHROPIC_API_KEY')\n",
"claude = anthropic.Anthropic()\n",
"ollama_via_openai = OpenAI(base_url='http://localhost:11434/v1', api_key='ollama')\n",
"\n",
"def stream_claude(systemp, userp):\n",
" result = claude.messages.stream(\n",
" model=\"claude-3-haiku-20240307\",\n",
" max_tokens=1000,\n",
" temperature=0.7,\n",
" system=systemp,\n",
" messages=[\n",
" {\"role\": \"user\", \"content\": userp},\n",
" ],\n",
" )\n",
" response = \"\"\n",
" with result as stream:\n",
" for text in stream.text_stream:\n",
" response += text or \"\"\n",
" yield response\n",
"\n",
"def stream_openai(model, systemp, userp):\n",
" messages = [\n",
" {\"role\": \"system\", \"content\": systemp},\n",
" {\"role\": \"user\", \"content\": userp}\n",
" ]\n",
" if model == \"GPT\":\n",
" model_client = openai\n",
" model = 'gpt-4o-mini'\n",
" else:\n",
" model_client = ollama_via_openai\n",
" model = 'llama3.2'\n",
" \n",
" stream = model_client.chat.completions.create(\n",
" model=model,\n",
" messages=messages,\n",
" stream=True\n",
" )\n",
" result = \"\"\n",
" for chunk in stream:\n",
" result += chunk.choices[0].delta.content or \"\"\n",
" yield result\n",
"\n",
" \n",
"def stream_brochure_in_gradio(company_name, url, model):\n",
" user_pr = get_brochure_user_prompt(company_name, url)\n",
"\n",
" if model==\"GPT\" or \"Llama\":\n",
" result = stream_openai(model, fewshot_system_prompt, user_pr)\n",
" elif model==\"Claude\":\n",
" result = stream_claude(fewshot_system_prompt, user_pr)\n",
" elif model==\"Llama\":\n",
" result = stream_llama(fewshot_system_prompt, user_pr)\n",
" else:\n",
" raise ValueError(\"Unknown model\")\n",
" yield from result\n",
" \n",
"\n",
" \n",
"view = gr.Interface(\n",
" fn=stream_brochure_in_gradio,\n",
" inputs=[gr.Textbox(label=\"Company name:\"),\n",
" gr.Textbox(label=\"Landing page URL including http:// or https://\"),\n",
" gr.Dropdown([\"GPT\", \"Llama\", \"Claude\"], label=\"Select Model\")],\n",
" outputs=[gr.Markdown(label=\"Brochure:\")],\n",
" flagging_mode=\"never\"\n",
")\n",
"view.launch(inbrowser=True)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "1687a8fb-b999-495f-a2b0-288a0e39c33d",
"metadata": {},
"outputs": [],
"source": [
"stream_brochure_in_gradio(\"https://be-able.info/de/be-able/\")"
]
},
{

Loading…
Cancel
Save