Browse Source

Week 1 Anatomy translator

pull/61/head
Yifan Wei 5 months ago
parent
commit
c78c579f53
  1. 204
      .virtual_documents/week1/day1.ipynb
  2. 133
      .virtual_documents/week1/week1 EXERCISE.ipynb
  3. 155
      week1/Guide to Jupyter.ipynb
  4. 298
      week1/day1.ipynb
  5. 105
      week1/day2 EXERCISE.ipynb
  6. 2432
      week1/day5.ipynb
  7. 2
      week1/troubleshooting.ipynb
  8. 1093
      week1/week1 EXERCISE.ipynb
  9. 87
      week2/day1.ipynb

204
.virtual_documents/week1/day1.ipynb

@ -0,0 +1,204 @@
# imports
import os
import requests
from dotenv import load_dotenv
from bs4 import BeautifulSoup
from IPython.display import Markdown, display
from openai import OpenAI
# If you get an error running this cell, then please head over to the troubleshooting notebook!
# Load environment variables in a file called .env
load_dotenv()
api_key = os.getenv('OPENAI_API_KEY')
# Check the key
if not api_key:
print("No API key was found - please head over to the troubleshooting notebook in this folder to identify & fix!")
elif not api_key.startswith("sk-proj-"):
print("An API key was found, but it doesn't start sk-proj-; please check you're using the right key - see troubleshooting notebook")
elif api_key.strip() != api_key:
print("An API key was found, but it looks like it might have space or tab characters at the start or end - please remove them - see troubleshooting notebook")
else:
print("API key found and looks good so far!")
openai = OpenAI()
# If this doesn't work, try Kernel menu >> Restart Kernel and Clear Outputs Of All Cells, then run the cells from the top of this notebook down.
# If it STILL doesn't work (horrors!) then please see the troubleshooting notebook, or try the below line instead:
# openai = OpenAI(api_key="your-key-here-starting-sk-proj-")
# To give you a preview -- calling OpenAI with these messages is this easy:
message = "Hello, GPT! This is my first ever message to you! Hi!"
response = openai.chat.completions.create(model="gpt-4o-mini", messages=[{"role":"user", "content":message}])
print(response.choices[0].message.content)
# A class to represent a Webpage
# If you're not familiar with Classes, check out the "Intermediate Python" notebook
class Website:
def __init__(self, url):
"""
Create this Website object from the given url using the BeautifulSoup library
"""
self.url = url
response = requests.get(url)
soup = BeautifulSoup(response.content, 'html.parser')
self.title = soup.title.string if soup.title else "No title found"
for irrelevant in soup.body(["script", "style", "img", "input"]):
irrelevant.decompose()
self.text = soup.body.get_text(separator="\n", strip=True)
# Let's try one out. Change the website and add print statements to follow along.
ed = Website("https://edwarddonner.com")
print(ed.title)
print(ed.text)
# Define our system prompt - you can experiment with this later, changing the last sentence to 'Respond in markdown in Spanish."
system_prompt = "You are an assistant that analyzes the contents of a website \
and provides a short summary, ignoring text that might be navigation related. \
Respond in markdown."
# A function that writes a User Prompt that asks for summaries of websites:
def user_prompt_for(website):
user_prompt = f"You are looking at a website titled {website.title}"
user_prompt += "\nThe contents of this website is as follows; \
please provide a short summary of this website in markdown. \
If it includes news or announcements, then summarize these too.\n\n"
user_prompt += website.text
return user_prompt
print(user_prompt_for(ed))
messages = [
{"role": "system", "content": "You are a snarky assistant"},
{"role": "user", "content": "What is 2 + 2?"}
]
# To give you a preview -- calling OpenAI with system and user messages:
response = openai.chat.completions.create(model="gpt-4o-mini", messages=messages)
print(response.choices[0].message.content)
# See how this function creates exactly the format above
def messages_for(website):
return [
{"role": "system", "content": system_prompt},
{"role": "user", "content": user_prompt_for(website)}
]
# Try this out, and then try for a few more websites
messages_for(ed)
# And now: call the OpenAI API. You will get very familiar with this!
def summarize(url):
website = Website(url)
response = openai.chat.completions.create(
model = "gpt-4o-mini",
messages = messages_for(website)
)
return response.choices[0].message.content
summarize("https://edwarddonner.com")
# A function to display this nicely in the Jupyter output, using markdown
def display_summary(url):
summary = summarize(url)
display(Markdown(summary))
display_summary("https://edwarddonner.com")
display_summary("https://cnn.com")
display_summary("https://anthropic.com")
# Step 1: Create your prompts
system_prompt = "something here"
user_prompt = """
Lots of text
Can be pasted here
"""
# Step 2: Make the messages list
messages = [] # fill this in
# Step 3: Call OpenAI
response =
# Step 4: print the result
print(

133
.virtual_documents/week1/week1 EXERCISE.ipynb

@ -0,0 +1,133 @@
# imports
import os
import requests
import json
from typing import List
from dotenv import load_dotenv
from bs4 import BeautifulSoup
from IPython.display import Markdown, display, update_display
from openai import OpenAI
# constants
MODEL_GPT = 'gpt-4o-mini'
MODEL_LLAMA = 'llama3.2'
# set up environment
load_dotenv()
api_key = os.getenv('OPENAI_API_KEY')
if api_key and api_key.startswith('sk-proj-') and len(api_key)>10:
print("API key looks good so far")
else:
print("There might be a problem with your API key? Please visit the troubleshooting notebook!")
# A class to represent a Webpage
class Website:
"""
A utility class to represent a Website that we have scraped, now with links
"""
def __init__(self, url):
self.url = url
response = requests.get(url)
self.body = response.content
soup = BeautifulSoup(self.body, 'html.parser')
self.title = soup.title.string if soup.title else "No title found"
if soup.body:
for irrelevant in soup.body(["script", "style", "img", "input"]):
irrelevant.decompose()
self.text = soup.body.get_text(separator="\n", strip=True)
else:
self.text = ""
links = [link.get('href') for link in soup.find_all('a')]
self.links = [link for link in links if link]
def get_contents(self):
return f"Webpage Title:\n{self.title}\nWebpage Contents:\n{self.text}\n\n"
dr = Website("https://www.drbruceforciea.com")
print(dr.get_contents())
print(dr.links)
link_system_prompt = "You are provided with a list of links found on a webpage. \
You are able to decide which of the links would be most relevant to learn anatomy and physiology, \
such as links to an Anatomy or Physiology page, Learing Page, Book Page.\n"
link_system_prompt += "You should respond in JSON as in this example:"
link_system_prompt += """
{
"links": [
{"type": "anatomy and physiology page", "url": "https://full.url/goes/here/anatomy-and-physiology"},
{"type": "learning page": "url": "https://another.full.url/learning"}
]
}
"""
def get_links_user_prompt(website):
user_prompt = f"Here is the list of links on the website of {website.url} - "
user_prompt += "please decide which of these are relevant web links to learn anatomy and physiology, respond with the full https URL in JSON format. \
Do not include Terms of Service, Privacy, email links.\n"
user_prompt += "Links (some might be relative links):\n"
user_prompt += "\n".join(website.links)
return user_prompt
print(get_links_user_prompt(dr))
def get_links(url):
website = Website(url)
response = openai.chat.completions.create(
model=MODEL,
messages=[
{"role": "system", "content": link_system_prompt},
{"role": "user", "content": get_links_user_prompt(website)}
],
response_format={"type": "json_object"}
)
result = response.choices[0].message.content
return json.loads(result)
# Give a medicine related website link.
nationalcancerinstitute = Website("https://training.seer.cancer.gov/modules_reg_surv.html")
nationalcancerinstitute.links
get_links("https://training.seer.cancer.gov/modules_reg_surv.html")
def get_all_details(url):
result = "Landing page:\n"
result += Website(url).get_contents()
links = get_links(url)
print("Found links:", links)
for link in links["links"]:
result += f"\n\n{link['type']}\n"
result += Website(link["url"]).get_contents()
return result
# here is the question; type over this to ask something new
question = """
Please explain what this code does and why:
yield from {book.get("author") for book in books if book.get("author")}
"""
# Get gpt-4o-mini to answer, with streaming
# Get Llama 3.2 to answer

155
week1/Guide to Jupyter.ipynb

@ -32,10 +32,21 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 1,
"id": "33d37cd8-55c9-4e03-868c-34aa9cab2c80", "id": "33d37cd8-55c9-4e03-868c-34aa9cab2c80",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [
{
"data": {
"text/plain": [
"4"
]
},
"execution_count": 1,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [ "source": [
"# Click anywhere in this cell and press Shift + Return\n", "# Click anywhere in this cell and press Shift + Return\n",
"\n", "\n",
@ -54,7 +65,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 2,
"id": "585eb9c1-85ee-4c27-8dc2-b4d8d022eda0", "id": "585eb9c1-85ee-4c27-8dc2-b4d8d022eda0",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
@ -66,10 +77,21 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 3,
"id": "07792faa-761d-46cb-b9b7-2bbf70bb1628", "id": "07792faa-761d-46cb-b9b7-2bbf70bb1628",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [
{
"data": {
"text/plain": [
"'bananas'"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [ "source": [
"# The result of the last statement is shown after you run it\n", "# The result of the last statement is shown after you run it\n",
"\n", "\n",
@ -78,10 +100,18 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 4,
"id": "a067d2b1-53d5-4aeb-8a3c-574d39ff654a", "id": "a067d2b1-53d5-4aeb-8a3c-574d39ff654a",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"My favorite fruit is bananas\n"
]
}
],
"source": [ "source": [
"# Use the variable\n", "# Use the variable\n",
"\n", "\n",
@ -90,7 +120,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 6,
"id": "4c5a4e60-b7f4-4953-9e80-6d84ba4664ad", "id": "4c5a4e60-b7f4-4953-9e80-6d84ba4664ad",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
@ -116,10 +146,18 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 8,
"id": "8e5ec81d-7c5b-4025-bd2e-468d67b581b6", "id": "8e5ec81d-7c5b-4025-bd2e-468d67b581b6",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"My favorite fruit is apples\n"
]
}
],
"source": [ "source": [
"# Then run this cell twice, and see if you understand what's going on\n", "# Then run this cell twice, and see if you understand what's going on\n",
"\n", "\n",
@ -144,10 +182,18 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 9,
"id": "84b1e410-5eda-4e2c-97ce-4eebcff816c5", "id": "84b1e410-5eda-4e2c-97ce-4eebcff816c5",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"My favorite fruit is apples\n"
]
}
],
"source": [ "source": [
"print(f\"My favorite fruit is {favorite_fruit}\")" "print(f\"My favorite fruit is {favorite_fruit}\")"
] ]
@ -245,10 +291,21 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 10,
"id": "82042fc5-a907-4381-a4b8-eb9386df19cd", "id": "82042fc5-a907-4381-a4b8-eb9386df19cd",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Guide to Jupyter.ipynb day2 EXERCISE.ipynb troubleshooting.ipynb\n",
"Intermediate Python.ipynb day5.ipynb week1 EXERCISE.ipynb\n",
"\u001b[34mcommunity-contributions\u001b[m\u001b[m diagnostics.py\n",
"day1.ipynb \u001b[34msolutions\u001b[m\u001b[m\n"
]
}
],
"source": [ "source": [
"# list the current directory\n", "# list the current directory\n",
"\n", "\n",
@ -257,10 +314,40 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 11,
"id": "4fc3e3da-8a55-40cc-9706-48bf12a0e20e", "id": "4fc3e3da-8a55-40cc-9706-48bf12a0e20e",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"PING cnn.com (151.101.195.5): 56 data bytes\n",
"64 bytes from 151.101.195.5: icmp_seq=0 ttl=58 time=9.569 ms\n",
"64 bytes from 151.101.195.5: icmp_seq=1 ttl=58 time=15.249 ms\n",
"64 bytes from 151.101.195.5: icmp_seq=2 ttl=58 time=17.790 ms\n",
"64 bytes from 151.101.195.5: icmp_seq=3 ttl=58 time=14.748 ms\n",
"64 bytes from 151.101.195.5: icmp_seq=4 ttl=58 time=17.198 ms\n",
"64 bytes from 151.101.195.5: icmp_seq=5 ttl=58 time=16.242 ms\n",
"64 bytes from 151.101.195.5: icmp_seq=6 ttl=58 time=14.943 ms\n",
"64 bytes from 151.101.195.5: icmp_seq=7 ttl=58 time=16.258 ms\n",
"64 bytes from 151.101.195.5: icmp_seq=8 ttl=58 time=13.901 ms\n",
"64 bytes from 151.101.195.5: icmp_seq=9 ttl=58 time=12.729 ms\n",
"64 bytes from 151.101.195.5: icmp_seq=10 ttl=58 time=17.548 ms\n",
"64 bytes from 151.101.195.5: icmp_seq=11 ttl=58 time=32.210 ms\n",
"64 bytes from 151.101.195.5: icmp_seq=12 ttl=58 time=14.898 ms\n",
"64 bytes from 151.101.195.5: icmp_seq=13 ttl=58 time=12.431 ms\n",
"64 bytes from 151.101.195.5: icmp_seq=14 ttl=58 time=16.906 ms\n",
"64 bytes from 151.101.195.5: icmp_seq=15 ttl=58 time=15.539 ms\n",
"64 bytes from 151.101.195.5: icmp_seq=16 ttl=58 time=15.169 ms\n",
"^C\n",
"\n",
"--- cnn.com ping statistics ---\n",
"17 packets transmitted, 17 packets received, 0.0% packet loss\n",
"round-trip min/avg/max/stddev = 9.569/16.078/32.210/4.506 ms\n"
]
}
],
"source": [ "source": [
"# ping cnn.com - press the stop button in the toolbar when you're bored\n", "# ping cnn.com - press the stop button in the toolbar when you're bored\n",
"\n", "\n",
@ -295,7 +382,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 12,
"id": "2646a4e5-3c23-4aee-a34d-d623815187d2", "id": "2646a4e5-3c23-4aee-a34d-d623815187d2",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
@ -313,10 +400,19 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 13,
"id": "6e96be3d-fa82-42a3-a8aa-b81dd20563a5", "id": "6e96be3d-fa82-42a3-a8aa-b81dd20563a5",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"\n",
"00%|███████████████████████████████████████| 1000/1000 [00:12<00:00, 81.81it/s]"
]
}
],
"source": [ "source": [
"# And now, with a nice little progress bar:\n", "# And now, with a nice little progress bar:\n",
"\n", "\n",
@ -331,10 +427,27 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 14,
"id": "63c788dd-4618-4bb4-a5ce-204411a38ade", "id": "63c788dd-4618-4bb4-a5ce-204411a38ade",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [
{
"data": {
"text/markdown": [
"# This is a big heading!\n",
"\n",
"- And this is a bullet-point\n",
"- So is this\n",
"- Me, too!"
],
"text/plain": [
"<IPython.core.display.Markdown object>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [ "source": [
"# On a different topic, here's a useful way to print output in markdown\n", "# On a different topic, here's a useful way to print output in markdown\n",
"\n", "\n",

298
week1/day1.ipynb

@ -69,7 +69,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 1,
"id": "4e2a9393-7767-488e-a8bf-27c12dca35bd", "id": "4e2a9393-7767-488e-a8bf-27c12dca35bd",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
@ -108,10 +108,18 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 2,
"id": "7b87cadb-d513-4303-baee-a37b6f938e4d", "id": "7b87cadb-d513-4303-baee-a37b6f938e4d",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"API key found and looks good so far!\n"
]
}
],
"source": [ "source": [
"# Load environment variables in a file called .env\n", "# Load environment variables in a file called .env\n",
"\n", "\n",
@ -132,7 +140,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 4,
"id": "019974d9-f3ad-4a8a-b5f9-0a3719aea2d3", "id": "019974d9-f3ad-4a8a-b5f9-0a3719aea2d3",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
@ -154,10 +162,18 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 5,
"id": "a58394bf-1e45-46af-9bfd-01e24da6f49a", "id": "a58394bf-1e45-46af-9bfd-01e24da6f49a",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Hello! Welcome! I'm glad to be chatting with you. How can I assist you today?\n"
]
}
],
"source": [ "source": [
"# To give you a preview -- calling OpenAI with these messages is this easy:\n", "# To give you a preview -- calling OpenAI with these messages is this easy:\n",
"\n", "\n",
@ -176,7 +192,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 6,
"id": "c5e793b2-6775-426a-a139-4848291d0463", "id": "c5e793b2-6775-426a-a139-4848291d0463",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
@ -206,10 +222,63 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 7,
"id": "2ef960cf-6dc2-4cda-afb3-b38be12f4c97", "id": "2ef960cf-6dc2-4cda-afb3-b38be12f4c97",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Home - Edward Donner\n",
"Home\n",
"Outsmart\n",
"An arena that pits LLMs against each other in a battle of diplomacy and deviousness\n",
"About\n",
"Posts\n",
"Well, hi there.\n",
"I’m Ed. I like writing code and experimenting with LLMs, and hopefully you’re here because you do too. I also enjoy DJing (but I’m badly out of practice), amateur electronic music production (\n",
"very\n",
"amateur) and losing myself in\n",
"Hacker News\n",
", nodding my head sagely to things I only half understand.\n",
"I’m the co-founder and CTO of\n",
"Nebula.io\n",
". We’re applying AI to a field where it can make a massive, positive impact: helping people discover their potential and pursue their reason for being. Recruiters use our product today to source, understand, engage and manage talent. I’m previously the founder and CEO of AI startup untapt,\n",
"acquired in 2021\n",
".\n",
"We work with groundbreaking, proprietary LLMs verticalized for talent, we’ve\n",
"patented\n",
"our matching model, and our award-winning platform has happy customers and tons of press coverage.\n",
"Connect\n",
"with me for more!\n",
"November 13, 2024\n",
"Mastering AI and LLM Engineering – Resources\n",
"October 16, 2024\n",
"From Software Engineer to AI Data Scientist – resources\n",
"August 6, 2024\n",
"Outsmart LLM Arena – a battle of diplomacy and deviousness\n",
"June 26, 2024\n",
"Choosing the Right LLM: Toolkit and Resources\n",
"Navigation\n",
"Home\n",
"Outsmart\n",
"An arena that pits LLMs against each other in a battle of diplomacy and deviousness\n",
"About\n",
"Posts\n",
"Get in touch\n",
"ed [at] edwarddonner [dot] com\n",
"www.edwarddonner.com\n",
"Follow me\n",
"LinkedIn\n",
"Twitter\n",
"Facebook\n",
"Subscribe to newsletter\n",
"Type your email…\n",
"Subscribe\n"
]
}
],
"source": [ "source": [
"# Let's try one out. Change the website and add print statements to follow along.\n", "# Let's try one out. Change the website and add print statements to follow along.\n",
"\n", "\n",
@ -238,7 +307,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 8,
"id": "abdb8417-c5dc-44bc-9bee-2e059d162699", "id": "abdb8417-c5dc-44bc-9bee-2e059d162699",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
@ -252,7 +321,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 9,
"id": "f0275b1b-7cfe-4f9d-abfa-7650d378da0c", "id": "f0275b1b-7cfe-4f9d-abfa-7650d378da0c",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
@ -270,10 +339,65 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 10,
"id": "26448ec4-5c00-4204-baec-7df91d11ff2e", "id": "26448ec4-5c00-4204-baec-7df91d11ff2e",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"You are looking at a website titled Home - Edward Donner\n",
"The contents of this website is as follows; please provide a short summary of this website in markdown. If it includes news or announcements, then summarize these too.\n",
"\n",
"Home\n",
"Outsmart\n",
"An arena that pits LLMs against each other in a battle of diplomacy and deviousness\n",
"About\n",
"Posts\n",
"Well, hi there.\n",
"I’m Ed. I like writing code and experimenting with LLMs, and hopefully you’re here because you do too. I also enjoy DJing (but I’m badly out of practice), amateur electronic music production (\n",
"very\n",
"amateur) and losing myself in\n",
"Hacker News\n",
", nodding my head sagely to things I only half understand.\n",
"I’m the co-founder and CTO of\n",
"Nebula.io\n",
". We’re applying AI to a field where it can make a massive, positive impact: helping people discover their potential and pursue their reason for being. Recruiters use our product today to source, understand, engage and manage talent. I’m previously the founder and CEO of AI startup untapt,\n",
"acquired in 2021\n",
".\n",
"We work with groundbreaking, proprietary LLMs verticalized for talent, we’ve\n",
"patented\n",
"our matching model, and our award-winning platform has happy customers and tons of press coverage.\n",
"Connect\n",
"with me for more!\n",
"November 13, 2024\n",
"Mastering AI and LLM Engineering – Resources\n",
"October 16, 2024\n",
"From Software Engineer to AI Data Scientist – resources\n",
"August 6, 2024\n",
"Outsmart LLM Arena – a battle of diplomacy and deviousness\n",
"June 26, 2024\n",
"Choosing the Right LLM: Toolkit and Resources\n",
"Navigation\n",
"Home\n",
"Outsmart\n",
"An arena that pits LLMs against each other in a battle of diplomacy and deviousness\n",
"About\n",
"Posts\n",
"Get in touch\n",
"ed [at] edwarddonner [dot] com\n",
"www.edwarddonner.com\n",
"Follow me\n",
"LinkedIn\n",
"Twitter\n",
"Facebook\n",
"Subscribe to newsletter\n",
"Type your email…\n",
"Subscribe\n"
]
}
],
"source": [ "source": [
"print(user_prompt_for(ed))" "print(user_prompt_for(ed))"
] ]
@ -299,7 +423,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 11,
"id": "f25dcd35-0cd0-4235-9f64-ac37ed9eaaa5", "id": "f25dcd35-0cd0-4235-9f64-ac37ed9eaaa5",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
@ -312,10 +436,18 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 12,
"id": "21ed95c5-7001-47de-a36d-1d6673b403ce", "id": "21ed95c5-7001-47de-a36d-1d6673b403ce",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Oh, a real brain teaser! The answer is 4. But if you’re looking for something more challenging, I’m all ears!\n"
]
}
],
"source": [ "source": [
"# To give you a preview -- calling OpenAI with system and user messages:\n", "# To give you a preview -- calling OpenAI with system and user messages:\n",
"\n", "\n",
@ -333,7 +465,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 13,
"id": "0134dfa4-8299-48b5-b444-f2a8c3403c88", "id": "0134dfa4-8299-48b5-b444-f2a8c3403c88",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
@ -349,10 +481,24 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 14,
"id": "36478464-39ee-485c-9f3f-6a4e458dbc9c", "id": "36478464-39ee-485c-9f3f-6a4e458dbc9c",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [
{
"data": {
"text/plain": [
"[{'role': 'system',\n",
" 'content': 'You are an assistant that analyzes the contents of a website and provides a short summary, ignoring text that might be navigation related. Respond in markdown.'},\n",
" {'role': 'user',\n",
" 'content': 'You are looking at a website titled Home - Edward Donner\\nThe contents of this website is as follows; please provide a short summary of this website in markdown. If it includes news or announcements, then summarize these too.\\n\\nHome\\nOutsmart\\nAn arena that pits LLMs against each other in a battle of diplomacy and deviousness\\nAbout\\nPosts\\nWell, hi there.\\nI’m Ed. I like writing code and experimenting with LLMs, and hopefully you’re here because you do too. I also enjoy DJing (but I’m badly out of practice), amateur electronic music production (\\nvery\\namateur) and losing myself in\\nHacker News\\n, nodding my head sagely to things I only half understand.\\nI’m the co-founder and CTO of\\nNebula.io\\n. We’re applying AI to a field where it can make a massive, positive impact: helping people discover their potential and pursue their reason for being. Recruiters use our product today to source, understand, engage and manage talent. I’m previously the founder and CEO of AI startup untapt,\\nacquired in 2021\\n.\\nWe work with groundbreaking, proprietary LLMs verticalized for talent, we’ve\\npatented\\nour matching model, and our award-winning platform has happy customers and tons of press coverage.\\nConnect\\nwith me for more!\\nNovember 13, 2024\\nMastering AI and LLM Engineering – Resources\\nOctober 16, 2024\\nFrom Software Engineer to AI Data Scientist – resources\\nAugust 6, 2024\\nOutsmart LLM Arena – a battle of diplomacy and deviousness\\nJune 26, 2024\\nChoosing the Right LLM: Toolkit and Resources\\nNavigation\\nHome\\nOutsmart\\nAn arena that pits LLMs against each other in a battle of diplomacy and deviousness\\nAbout\\nPosts\\nGet in touch\\ned [at] edwarddonner [dot] com\\nwww.edwarddonner.com\\nFollow me\\nLinkedIn\\nTwitter\\nFacebook\\nSubscribe to newsletter\\nType your email…\\nSubscribe'}]"
]
},
"execution_count": 14,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [ "source": [
"# Try this out, and then try for a few more websites\n", "# Try this out, and then try for a few more websites\n",
"\n", "\n",
@ -369,7 +515,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 16,
"id": "905b9919-aba7-45b5-ae65-81b3d1d78e34", "id": "905b9919-aba7-45b5-ae65-81b3d1d78e34",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
@ -387,17 +533,28 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 17,
"id": "05e38d41-dfa4-4b20-9c96-c46ea75d9fb5", "id": "05e38d41-dfa4-4b20-9c96-c46ea75d9fb5",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [
{
"data": {
"text/plain": [
"'# Summary of Edward Donner\\'s Website\\n\\nEdward Donner\\'s website features content focused on his interests in coding, experimenting with large language models (LLMs), and his professional work in AI. As the co-founder and CTO of Nebula.io, he emphasizes the positive impact of AI in helping individuals discover their potential and improve talent management. He previously founded and led an AI startup, untapt, which was acquired in 2021.\\n\\n## Recent News and Announcements\\n\\n- **November 13, 2024**: Shared resources for mastering AI and LLM engineering.\\n- **October 16, 2024**: Provided resources for transitioning from a software engineer to an AI data scientist.\\n- **August 6, 2024**: Announced the \"Outsmart LLM Arena,\" an initiative to engage LLMs in a competitive format.\\n- **June 26, 2024**: Offered a toolkit and resources for selecting the right LLM. \\n\\nThe website invites visitors to connect with Edward Donner for further engagement.'"
]
},
"execution_count": 17,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [ "source": [
"summarize(\"https://edwarddonner.com\")" "summarize(\"https://edwarddonner.com\")"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 18,
"id": "3d926d59-450e-4609-92ba-2d6f244f1342", "id": "3d926d59-450e-4609-92ba-2d6f244f1342",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
@ -411,10 +568,38 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 19,
"id": "3018853a-445f-41ff-9560-d925d1774b2f", "id": "3018853a-445f-41ff-9560-d925d1774b2f",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [
{
"data": {
"text/markdown": [
"# Summary of Edward Donner's Website\n",
"\n",
"The website is a personal platform belonging to Edward Donner, a co-founder and CTO of Nebula.io, a company focused on utilizing AI to assist individuals in discovering their potential and managing talent. Edward expresses his interests in coding, experimenting with large language models (LLMs), and DJing, along with occasional contributions to Hacker News.\n",
"\n",
"## Key Features:\n",
"- **Outsmart**: A unique arena designed to challenge LLMs in a competition of diplomacy and strategy.\n",
"- **About**: Edward shares his background in tech and his experiences, including his previous venture, untapt, which was acquired in 2021.\n",
"- **Posts**: The blog section features various resources and announcements related to AI and LLMs.\n",
"\n",
"## Recent Announcements:\n",
"1. **November 13, 2024**: Post on \"Mastering AI and LLM Engineering – Resources\".\n",
"2. **October 16, 2024**: Article titled \"From Software Engineer to AI Data Scientist – resources\".\n",
"3. **August 6, 2024**: Introduction of \"Outsmart LLM Arena – a battle of diplomacy and deviousness\".\n",
"4. **June 26, 2024**: Guide on \"Choosing the Right LLM: Toolkit and Resources\". \n",
"\n",
"Overall, the website serves as a hub for Edward's thoughts and contributions in the field of AI and LLMs, offering resources for those interested in these topics."
],
"text/plain": [
"<IPython.core.display.Markdown object>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [ "source": [
"display_summary(\"https://edwarddonner.com\")" "display_summary(\"https://edwarddonner.com\")"
] ]
@ -437,20 +622,73 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 20,
"id": "45d83403-a24c-44b5-84ac-961449b4008f", "id": "45d83403-a24c-44b5-84ac-961449b4008f",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [
{
"data": {
"text/markdown": [
"## Summary of CNN\n",
"\n",
"CNN is an extensive news platform offering breaking news, video content, and in-depth analysis across a variety of categories such as US and world news, politics, business, health, entertainment, sports, and lifestyle topics. \n",
"\n",
"### Key News Highlights\n",
"- **Ukraine-Russia War:** Continuous updates and analyses concerning the conflict.\n",
"- **Israel-Hamas War:** Ongoing coverage on developments related to the conflict.\n",
"- **US Politics:** Coverage includes the actions of former President Trump in relation to immigration and election matters, and discussions surrounding the current political landscape.\n",
"- **Global Events:** Notable stories include the aftermath of the Syrian civil war and the implications of the regime change in Syria.\n",
"\n",
"### Noteworthy Headlines\n",
"- **Strikes in Damascus:** Reports indicate strikes heard as rebel forces gain control in Syria.\n",
"- **Juan Soto's Contract:** Sports news highlights the record-breaking contract signed by baseball player Juan Soto.\n",
"- **Health Insurance CEO's Death:** Coverage includes public reactions to the death of a prominent health insurance CEO.\n",
"- **Natural Disasters:** Reports about extreme weather conditions affecting various regions, particularly in Southern California.\n",
"\n",
"CNN also emphasizes reader engagement, providing options for feedback on its advertisements and service quality, showcasing its commitment to user experience. The platform offers a wide array of resources including live updates, videos, and podcasts, making it a comprehensive source for current events."
],
"text/plain": [
"<IPython.core.display.Markdown object>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [ "source": [
"display_summary(\"https://cnn.com\")" "display_summary(\"https://cnn.com\")"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 21,
"id": "75e9fd40-b354-4341-991e-863ef2e59db7", "id": "75e9fd40-b354-4341-991e-863ef2e59db7",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [
{
"data": {
"text/markdown": [
"# Summary of Anthropic Website\n",
"\n",
"Anthropic is an AI research company focused on developing reliable and safe AI systems. The site showcases the company's commitment to building AI models that align with human intentions and values. Key features of the website include:\n",
"\n",
"- **Mission and Values**: Anthropic emphasizes its dedication to research that prioritizes safety and alignment in artificial intelligence development.\n",
"- **AI Models**: The company highlights its work on advanced AI models, detailing their capabilities and the ethical considerations involved in their deployment.\n",
"- **Research Publications**: Anthropic shares insights from its research efforts, offering access to various papers and findings relating to AI safety and alignment methodologies.\n",
"\n",
"### News and Announcements\n",
"- The website may feature recent developments or updates in AI research, partnerships, or new model releases. Specific announcements may include ongoing initiatives or collaborations in the AI safety field, as well as insights into future projects aimed at enhancing AI's alignment with human values.\n",
"\n",
"For specific news items and detailed announcements, further exploration of the website is suggested."
],
"text/plain": [
"<IPython.core.display.Markdown object>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [ "source": [
"display_summary(\"https://anthropic.com\")" "display_summary(\"https://anthropic.com\")"
] ]
@ -564,7 +802,7 @@
"name": "python", "name": "python",
"nbconvert_exporter": "python", "nbconvert_exporter": "python",
"pygments_lexer": "ipython3", "pygments_lexer": "ipython3",
"version": "3.11.11" "version": "3.12.7"
} }
}, },
"nbformat": 4, "nbformat": 4,

105
week1/day2 EXERCISE.ipynb

@ -68,7 +68,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 1,
"id": "4e2a9393-7767-488e-a8bf-27c12dca35bd", "id": "4e2a9393-7767-488e-a8bf-27c12dca35bd",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
@ -82,7 +82,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 2,
"id": "29ddd15d-a3c5-4f4e-a678-873f56162724", "id": "29ddd15d-a3c5-4f4e-a678-873f56162724",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
@ -96,7 +96,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 3,
"id": "dac0a679-599c-441f-9bf2-ddc73d35b940", "id": "dac0a679-599c-441f-9bf2-ddc73d35b940",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
@ -110,7 +110,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 4,
"id": "7bb9c624-14f0-4945-a719-8ddb64f66f47", "id": "7bb9c624-14f0-4945-a719-8ddb64f66f47",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
@ -124,10 +124,39 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 5,
"id": "42b9f644-522d-4e05-a691-56e7658c0ea9", "id": "42b9f644-522d-4e05-a691-56e7658c0ea9",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Generative AI has numerous business applications across various industries, including:\n",
"\n",
"1. **Content Creation**: AI-powered tools can generate high-quality content such as articles, blog posts, social media posts, and product descriptions, saving time and resources for writers and marketers.\n",
"2. **Visual Content Generation**: Generative AI can create images, videos, and 3D models, revolutionizing the way businesses produce visual content for marketing, advertising, and branding purposes.\n",
"3. **Chatbots and Virtual Assistants**: AI-powered chatbots can interact with customers, provide customer support, and answer frequently asked questions, improving customer experience and reducing support costs.\n",
"4. **Product Design and Development**: Generative AI can help designers and engineers create new product designs, prototypes, and models, streamlining the design process and reducing costs.\n",
"5. **Marketing Automation**: AI-powered tools can analyze customer data, behavior, and preferences to generate targeted marketing campaigns, personalized emails, and offers, increasing campaign effectiveness and ROI.\n",
"6. **Predictive Analytics**: Generative AI can analyze historical data, identify patterns, and make predictions about future market trends, helping businesses anticipate and prepare for changes in the market.\n",
"7. **Financial Modeling and Analysis**: AI-powered tools can generate financial models, forecasts, and scenarios, enabling businesses to optimize investment strategies, predict revenue growth, and mitigate risks.\n",
"8. **Data Analysis and Insights**: Generative AI can analyze large datasets, identify insights, and provide recommendations, helping businesses make data-driven decisions and drive business outcomes.\n",
"9. **Cybersecurity**: AI-powered tools can detect anomalies, predict threats, and generate security alerts, enhancing the effectiveness of cybersecurity measures and protecting against cyber-attacks.\n",
"10. **Supply Chain Optimization**: Generative AI can analyze supply chain data, identify bottlenecks, and optimize logistics and inventory management, reducing costs, improving efficiency, and increasing customer satisfaction.\n",
"\n",
"Some specific industries where Generative AI has already shown promise include:\n",
"\n",
"* E-commerce (product recommendations, personalized content)\n",
"* Advertising (ad creative generation, targeting optimization)\n",
"* Healthcare (medical imaging analysis, disease diagnosis)\n",
"* Education (personalized learning plans, adaptive assessments)\n",
"* Finance (risk analysis, portfolio optimization)\n",
"\n",
"These are just a few examples of the many business applications of Generative AI. As the technology continues to evolve, we can expect to see even more innovative uses across various industries and domains.\n"
]
}
],
"source": [ "source": [
"response = requests.post(OLLAMA_API, json=payload, headers=HEADERS)\n", "response = requests.post(OLLAMA_API, json=payload, headers=HEADERS)\n",
"print(response.json()['message']['content'])" "print(response.json()['message']['content'])"
@ -147,10 +176,39 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 6,
"id": "7745b9c4-57dc-4867-9180-61fa5db55eb8", "id": "7745b9c4-57dc-4867-9180-61fa5db55eb8",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Generative AI has numerous business applications across various industries. Here are some examples:\n",
"\n",
"1. **Content Creation**: Generative AI can be used to create personalized content, such as articles, social media posts, and product descriptions. This can help businesses automate their content creation process and reduce the cost of creating high-quality content.\n",
"2. **Product Design**: Generative AI can be used to design new products, such as furniture, clothing, and electronics. This can help businesses create innovative products with unique designs that appeal to customers.\n",
"3. **Marketing Automation**: Generative AI can be used to automate marketing campaigns, such as email marketing and social media advertising. This can help businesses personalize their marketing messages and improve the effectiveness of their campaigns.\n",
"4. **Customer Service**: Generative AI can be used to create chatbots that provide customer support and answer common questions. This can help businesses improve their customer service experience and reduce the cost of providing support.\n",
"5. **Predictive Maintenance**: Generative AI can be used to analyze data from sensors and machines to predict when maintenance is needed. This can help businesses reduce downtime, improve equipment efficiency, and extend equipment lifespan.\n",
"6. **Recommendation Systems**: Generative AI can be used to create personalized product recommendations based on customer behavior and preferences. This can help businesses increase sales and improve customer satisfaction.\n",
"7. **Data Analysis**: Generative AI can be used to analyze large datasets and identify patterns and trends that may not be visible to human analysts. This can help businesses make data-driven decisions and improve their operations.\n",
"8. **Financial Modeling**: Generative AI can be used to create financial models that simulate different scenarios and predict outcomes. This can help businesses make informed investment decisions and reduce the risk of losses.\n",
"9. **Supply Chain Optimization**: Generative AI can be used to optimize supply chain logistics, such as predicting demand, managing inventory, and optimizing shipping routes. This can help businesses improve their supply chain efficiency and reduce costs.\n",
"10. **Cybersecurity**: Generative AI can be used to detect and respond to cyber threats in real-time. This can help businesses protect their data and systems from attacks.\n",
"\n",
"Some examples of companies that are using generative AI include:\n",
"\n",
"* **Netflix**: Using generative AI to create personalized movie recommendations\n",
"* **Amazon**: Using generative AI to personalize product recommendations and improve customer service\n",
"* **Google**: Using generative AI to improve search results and provide more accurate information\n",
"* **BMW**: Using generative AI to design new car models and optimize production processes\n",
"* **Airbnb**: Using generative AI to create personalized travel experiences for customers\n",
"\n",
"These are just a few examples of the many business applications of generative AI. As the technology continues to evolve, we can expect to see even more innovative uses in various industries.\n"
]
}
],
"source": [ "source": [
"import ollama\n", "import ollama\n",
"\n", "\n",
@ -168,10 +226,37 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 7,
"id": "23057e00-b6fc-4678-93a9-6b31cb704bff", "id": "23057e00-b6fc-4678-93a9-6b31cb704bff",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Generative AI has numerous business applications across various industries, including:\n",
"\n",
"1. **Content Creation**: Generative AI can be used to generate high-quality content such as images, videos, articles, and social media posts automatically.\n",
"2. **Personalization**: Generative AI can be used to create personalized experiences for customers by generating customized products or services based on their preferences, behavior, and demographics.\n",
"3. **Marketing Automation**: Generative AI can automate marketing tasks such as lead generation, email campaigns, and ad creative optimization.\n",
"4. **Product Design**: Generative AI can be used to generate designs for new products, packaging, and branding materials in various industries like fashion, furniture, and consumer goods.\n",
"5. **Sales Predictive Analytics**: Generative AI can analyze historical sales data and generate predictive models to forecast demand, identify potential customers, and optimize sales strategies.\n",
"6. **Finance and Banking**: Generative AI can be used for portfolio optimization, risk analysis, and credit scoring.\n",
"7. **Healthcare**: Generative AI can be used to identify patterns in patient data, diagnose diseases more accurately, and generate personalized treatment plans.\n",
"8. **Education**: Generative AI can create personalized learning materials, adaptive curricula, and virtual teaching assistants.\n",
"9. **Supply Chain Optimization**: Generative AI can optimize logistics and supply chain management by analyzing traffic patterns, predicting demand, and identifying opportunities for cost savings.\n",
"10. **Customer Service Chatbots**: Generative AI can generate conversational flows, respond to customer inquiries, and provide basic support.\n",
"\n",
"Some specific examples of business applications include:\n",
"\n",
"* **Google's Imagining Assistant**: uses Generative AI to create customized search results and suggestions based on a user's context.\n",
"* **Facebook's Custom Content Creation Tool**: uses Generative AI to create personalized news articles and social media posts based on a user's interests.\n",
"* **IBM Watson**: uses Generative AI to analyze vast amounts of customer data, identify patterns, and generate predictive models.\n",
"\n",
"These are just a few examples of the many business applications of Generative AI. As the technology continues to evolve, we can expect to see even more innovative solutions across various industries.\n"
]
}
],
"source": [ "source": [
"# There's actually an alternative approach that some people might prefer\n", "# There's actually an alternative approach that some people might prefer\n",
"# You can use the OpenAI client python library to call Ollama:\n", "# You can use the OpenAI client python library to call Ollama:\n",

2432
week1/day5.ipynb

File diff suppressed because one or more lines are too long

2
week1/troubleshooting.ipynb

@ -365,7 +365,7 @@
"name": "python", "name": "python",
"nbconvert_exporter": "python", "nbconvert_exporter": "python",
"pygments_lexer": "ipython3", "pygments_lexer": "ipython3",
"version": "3.11.11" "version": "3.12.7"
} }
}, },
"nbformat": 4, "nbformat": 4,

1093
week1/week1 EXERCISE.ipynb

File diff suppressed because one or more lines are too long

87
week2/day1.ipynb

File diff suppressed because one or more lines are too long
Loading…
Cancel
Save