From 735e29fd30ce190cc01f289070a6368251bd0d9f Mon Sep 17 00:00:00 2001 From: Phi-Li-Ne Date: Thu, 20 Feb 2025 19:07:18 +0100 Subject: [PATCH] Implement week1 exercise --- week1/week1 EXERCISE.ipynb | 123 +++++++++++++++++++++++++++++++++---- 1 file changed, 110 insertions(+), 13 deletions(-) diff --git a/week1/week1 EXERCISE.ipynb b/week1/week1 EXERCISE.ipynb index f3486fe..673f5a1 100644 --- a/week1/week1 EXERCISE.ipynb +++ b/week1/week1 EXERCISE.ipynb @@ -18,7 +18,12 @@ "metadata": {}, "outputs": [], "source": [ - "# imports" + "# imports\n", + "import os\n", + "from dotenv import load_dotenv\n", + "\n", + "from IPython.display import Markdown, display, update_display\n", + "from openai import OpenAI" ] }, { @@ -41,22 +46,79 @@ "metadata": {}, "outputs": [], "source": [ - "# set up environment" + "# set up environment\n", + "load_dotenv(override=True)\n", + "api_key = os.getenv(\"OPENAI_API_KEY\")\n", + "\n", + "# set up clients\n", + "openai = OpenAI()\n", + "ollama = OpenAI(base_url=\"http://localhost:11434/v1\" , api_key=\"ollama\")\n", + "\n", + "# set up system prompt\n", + "system_prompt = \"You are a coding tutor. If the user asks you a question, answer it to the point. If you are asked to create a code snippet, generate the code in Python and then explain it shortly.\"" ] }, { "cell_type": "code", - "execution_count": null, - "id": "3f0d0137-52b0-47a8-81a8-11a90a010798", + "execution_count": 53, + "id": "58f098cb-4b4e-4394-b0b5-29db88e9101c", "metadata": {}, "outputs": [], "source": [ - "# here is the question; type over this to ask something new\n", + "def send_request(user_prompt, model=MODEL_LLAMA, stream=False):\n", + " message = [{\"role\": \"system\", \"content\": system_prompt}, {\"role\": \"user\", \"content\": user_prompt}]\n", + " if model.startswith(\"gpt\"):\n", + " model_client = openai\n", + " else:\n", + " model_client = ollama\n", + "\n", + " \n", + " response = model_client.chat.completions.create(\n", + " model=model,\n", + " messages=message,\n", + " stream=stream\n", + " )\n", "\n", - "question = \"\"\"\n", - "Please explain what this code does and why:\n", - "yield from {book.get(\"author\") for book in books if book.get(\"author\")}\n", - "\"\"\"" + " if stream:\n", + " streaming = \"\"\n", + " display_handle = display(Markdown(\"\"), display_id=True)\n", + " for chunk in response:\n", + " streaming += chunk.choices[0].delta.content or ''\n", + " streaming = streaming.replace(\"```\",\"\").replace(\"markdown\", \"\")\n", + " update_display(Markdown(streaming), display_id=display_handle.display_id)\n", + "\n", + " else:\n", + " return display(Markdown(response.choices[0].message.content))\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 49, + "id": "3f0d0137-52b0-47a8-81a8-11a90a010798", + "metadata": {}, + "outputs": [ + { + "name": "stdin", + "output_type": "stream", + "text": [ + " How can I display python code properly while streaming the answer from openai? Create a code snippet for this. The streaming should happen in the code canvas.\n" + ] + } + ], + "source": [ + "# here is the question; type over this to ask something new\n", + "question = input()" + ] + }, + { + "cell_type": "code", + "execution_count": 48, + "id": "2bc093fa-b2ff-47e9-8ea8-e41499385116", + "metadata": {}, + "outputs": [], + "source": [ + "# question = \"\"\"How can I display python code properly while streaming the answer from openai? Create a code snippet for this. The streaming should happen in the code canvas.\"\"\"" ] }, { @@ -66,17 +128,52 @@ "metadata": {}, "outputs": [], "source": [ - "# Get gpt-4o-mini to answer, with streaming" + "# Get gpt-4o-mini to answer, with streaming\n", + "send_request(model=MODEL_GPT, user_prompt=question, stream=True)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 54, "id": "8f7c8ea8-4082-4ad0-8751-3301adcf6538", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "To display Python code properly with OpenAI's chat interface, you'll need to use the `code` formatting in the response format provided by the API endpoint. \n", + "\n", + "Here's an example of how you can modify the API request URL to include the formatted code:\n", + "\n", + "```python\n", + "import requests\n", + "import json\n", + "\n", + "query = {\n", + " \"text\": \"{\\n} # Python code here\\n}\"\n", + "\n", + "headers = {\n", + " 'Content-Type': 'application/json'\n", + "}\n", + "\n", + "response = requests.post('https://api.openai.com/v1/answers', data=json.dumps(query), headers=headers)\n", + "\n", + "answer = response.json()\n", + "```\n", + "\n", + "However, the most convenient way to display the code is by using the `code` directive directly in your chat prompt. OpenAI will automatically format and highlight your code." + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ - "# Get Llama 3.2 to answer" + "# Get Llama 3.2 to answer\n", + "send_request(user_prompt=question)" ] } ],