From c2e9f8a88d9714deec4697493478a4fa4ebe49f7 Mon Sep 17 00:00:00 2001
From: emmanuel <emmanuel.babala@dame-prevoyance.com>
Date: Sun, 19 Jan 2025 20:29:30 +0100
Subject: [PATCH] update prompt

---
 .../day5-homework.ipynb                       | 26 ++++++-------------
 1 file changed, 8 insertions(+), 18 deletions(-)

diff --git a/week4/community-contributions/day5-homework.ipynb b/week4/community-contributions/day5-homework.ipynb
index c34be7b..7503266 100644
--- a/week4/community-contributions/day5-homework.ipynb
+++ b/week4/community-contributions/day5-homework.ipynb
@@ -1,22 +1,11 @@
 {
  "cells": [
   {
-   "cell_type": "code",
-   "execution_count": 1,
-   "id": "6d67dba5-38ec-459a-9132-4a56c6a814cd",
+   "cell_type": "markdown",
+   "id": "ff022957-2e81-4ea9-84d3-e52d5753e133",
    "metadata": {},
-   "outputs": [
-    {
-     "ename": "SyntaxError",
-     "evalue": "invalid syntax (2447672335.py, line 1)",
-     "output_type": "error",
-     "traceback": [
-      "\u001b[1;36m  Cell \u001b[1;32mIn[1], line 1\u001b[1;36m\u001b[0m\n\u001b[1;33m    Comment and Unit Test Generater\u001b[0m\n\u001b[1;37m                     ^\u001b[0m\n\u001b[1;31mSyntaxError\u001b[0m\u001b[1;31m:\u001b[0m invalid syntax\n"
-     ]
-    }
-   ],
    "source": [
-    "Comment and Unit Test Generater \n",
+    "### Comment and Unit Test Generater \n",
     "\n",
     "The requirement: \n",
     "* use an LLM to generate docstring and comments for Python code\n",
@@ -164,7 +153,6 @@
     "        - All parameters with types and descriptions\n",
     "        - Return values with types\n",
     "        - Exceptions that may be raised\n",
-    "        - At least one usage example\n",
     "        - Any important notes or limitations\n",
     "        \n",
     "        2. Strategic inline comments for:\n",
@@ -415,7 +403,7 @@
     "    messages = messages_for_comment(code)\n",
     "    text = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)\n",
     "    client = InferenceClient(model_url, token=hf_token)\n",
-    "    stream = client.text_generation(text, stream=True, details=True, max_new_tokens=3000)\n",
+    "    stream = client.text_generation(text, stream=True, details=True, max_new_tokens=5000)\n",
     "    result = \"\"\n",
     "    for r in stream:\n",
     "        #print(r.token.text, end = \"\")\n",
@@ -522,7 +510,9 @@
    "cell_type": "code",
    "execution_count": null,
    "id": "ee27cc91-81e6-42c8-ae3c-c04161229d8c",
-   "metadata": {},
+   "metadata": {
+    "scrolled": true
+   },
    "outputs": [],
    "source": [
     "with gr.Blocks(css=css) as ui:\n",
@@ -539,7 +529,7 @@
     "        \n",
     "    comment_button.click(comment_code, inputs=[python, model], outputs=[result])\n",
     "    unit_test_button.click(get_unit_test, inputs=[python, model], outputs=[result])\n",
-    "ui.launch(inbrowser=False)"
+    "ui.launch(inbrowser=True)"
    ]
   },
   {