From a67e3046328d6b6236b55156d4eb0e18347a684e Mon Sep 17 00:00:00 2001
From: shanmuganathan120
 <137673464+Shanmuganathan2004@users.noreply.github.com>
Date: Sat, 8 Feb 2025 22:45:07 +0530
Subject: [PATCH] Add my project to week-1/community_contributions

---
 .../Text-Summarizer.ipynb                     | 76 +++++++++++++++++++
 1 file changed, 76 insertions(+)
 create mode 100644 week1/community-contributions/Text-Summarizer.ipynb

diff --git a/week1/community-contributions/Text-Summarizer.ipynb b/week1/community-contributions/Text-Summarizer.ipynb
new file mode 100644
index 0000000..822cc8c
--- /dev/null
+++ b/week1/community-contributions/Text-Summarizer.ipynb
@@ -0,0 +1,76 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# 1) Import Required Libraries \n",
+    "\n",
+    "import requests\n",
+    "import gradio as gr\n",
+    "\n",
+    "# Deepseek only uses abstract summarization\n",
+    "# This tool use DeepSeek API Endpoint\n",
+    "\n",
+    "# 2) Define the DeepSeek API Endpoint\n",
+    "\n",
+    "OLLAMA_URL = \"http://localhost:11434/api/generate\"\n",
+    "\n",
+    "# 3) Define the Summarization Function which can retrieve Information\n",
+    "\n",
+    "def summarize_text(text):\n",
+    "    payload = {\n",
+    "        \"model\": \"deepseek-r1\", #Here you can load whatever the model you have in your ollama(ex:deepseek-r1:1.5b,7b,8b,14b) I used 7b model here \n",
+    "        \"prompt\": f\"Summarize the following text in **5 bullet points**:\\n\\n{text}\", #The prompt is here for tell commands for the llm to act \n",
+    "        \"stream\": False  # Ensures the response is returned as a whole, not streamed\n",
+    "    }\n",
+    "\n",
+    "    response = requests.post(OLLAMA_URL, json=payload) #Send Requests to deepseekAPI\n",
+    "\n",
+    "    if response.status_code == 200: #if server run correctly it return the result or it will give error\n",
+    "        return response.json().get(\"response\", \"No summary generated.\")\n",
+    "    else:\n",
+    "        return f\"Error: {response.text}\"\n",
+    "\n",
+    "# 4) Create Gradio interface to design \n",
+    "interface = gr.Interface(\n",
+    "    fn=summarize_text,\n",
+    "    inputs=gr.Textbox(lines=10, placeholder=\"Enter text to summarize\"),\n",
+    "    outputs=gr.Textbox(label=\"Summarized Text\"),\n",
+    "    #theme='NoCrypt/miku', #Theme for the Interface I used Hatsune Miku from HF \n",
+    "    title=\"AI-Powered Text Summarizer\",\n",
+    "    description=\"Enter a long text and DeepSeek AI will generate a concise summary.\"\n",
+    ")\n",
+    "\n",
+    "# Launch the web app\n",
+    "if __name__ == \"__main__\":\n",
+    "    interface.launch()\n",
+    "\n",
+    "\n"
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "base",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.12.4"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}