diff --git a/week3/community-contributions/anime_audio_translator.colab.ipynb b/week3/community-contributions/anime_audio_translator.colab.ipynb new file mode 100644 index 0000000..734d9b0 --- /dev/null +++ b/week3/community-contributions/anime_audio_translator.colab.ipynb @@ -0,0 +1 @@ +{"nbformat":4,"nbformat_minor":0,"metadata":{"colab":{"provenance":[],"gpuType":"T4","authorship_tag":"ABX9TyO+HrhlkaVchpoGIfmYAHdf"},"kernelspec":{"name":"python3","display_name":"Python 3"},"language_info":{"name":"python"},"accelerator":"GPU"},"cells":[{"cell_type":"code","execution_count":null,"metadata":{"id":"kayiMLgsBnVt"},"outputs":[],"source":["!pip install -q requests torch bitsandbytes transformers sentencepiece accelerate openai gradio"]},{"cell_type":"code","source":["import os\n","import requests\n","from IPython.display import Markdown, display, update_display\n","from openai import OpenAI\n","from google.colab import drive, userdata\n","from huggingface_hub import login\n","from transformers import AutoTokenizer, AutoModelForCausalLM, BitsAndBytesConfig, TextStreamer\n","import torch\n","import gradio as gr"],"metadata":{"id":"ByKEQHyhiLl7","executionInfo":{"status":"ok","timestamp":1744678358807,"user_tz":-480,"elapsed":15255,"user":{"displayName":"Kenneth Andales","userId":"04047926009324958530"}}},"execution_count":2,"outputs":[]},{"cell_type":"code","source":["AUDIO_MODEL = 'whisper-1'\n","LLAMA = \"meta-llama/Meta-Llama-3.1-8B-Instruct\""],"metadata":{"id":"9tzK_t3jiOo1","executionInfo":{"status":"ok","timestamp":1744678358815,"user_tz":-480,"elapsed":2,"user":{"displayName":"Kenneth Andales","userId":"04047926009324958530"}}},"execution_count":3,"outputs":[]},{"cell_type":"code","source":["hf_token = userdata.get('HF_TOKEN')\n","login(hf_token, add_to_git_credential=True)"],"metadata":{"id":"PYNmGaQniW73","executionInfo":{"status":"ok","timestamp":1744678360474,"user_tz":-480,"elapsed":737,"user":{"displayName":"Kenneth Andales","userId":"04047926009324958530"}}},"execution_count":4,"outputs":[]},{"cell_type":"code","source":["openai_api_key = userdata.get(\"OPENAI_API_KEY\")\n","openai = OpenAI(api_key=openai_api_key)"],"metadata":{"id":"yGjVTeMEig-b","executionInfo":{"status":"ok","timestamp":1744678362522,"user_tz":-480,"elapsed":555,"user":{"displayName":"Kenneth Andales","userId":"04047926009324958530"}}},"execution_count":5,"outputs":[]},{"cell_type":"code","source":["def message_prompt(transciption):\n"," system_message = \"\"\"\n"," You are an assistant that translate japanese text into two different languages like 'English' and 'Filipino',\n"," please display the translated text into markdown and include the original text from japanese using 'Romaji',\n"," sample format would be - original text (converted to romaji): orignal_translated_text_here \\n\\n translated to english: translated_english_text_here \\n\\n translated to filipino: translated_filipino_text_here\"\n"," \"\"\"\n","\n"," user_propmpt = f\"Here is the transcripted japanese audio and translate it into two languages: '{transciption}'. No explaination just the translated languages only.\"\n","\n"," messages = [\n"," {\"role\": \"system\", \"content\": system_message},\n"," {\"role\": \"user\", \"content\": user_propmpt}\n"," ]\n","\n"," return messages"],"metadata":{"id":"6jboyASHilLz","executionInfo":{"status":"ok","timestamp":1744679561600,"user_tz":-480,"elapsed":9,"user":{"displayName":"Kenneth Andales","userId":"04047926009324958530"}}},"execution_count":36,"outputs":[]},{"cell_type":"code","source":["quant_config = BitsAndBytesConfig(\n"," load_in_4bit=True,\n"," bnb_4bit_use_double_quant=True,\n"," bnb_4bit_quant_type=\"nf4\",\n"," bnb_4bit_compute_dtype=torch.bfloat16\n",")"],"metadata":{"id":"nYrf_wKmmoUs","executionInfo":{"status":"ok","timestamp":1744678366113,"user_tz":-480,"elapsed":7,"user":{"displayName":"Kenneth Andales","userId":"04047926009324958530"}}},"execution_count":7,"outputs":[]},{"cell_type":"code","source":["def translation(messages):\n"," tokenizer = AutoTokenizer.from_pretrained(LLAMA)\n"," tokenizer.pad_token = tokenizer.eos_token\n"," inputs = tokenizer.apply_chat_template(messages, return_tensors=\"pt\").to(\"cuda\")\n"," streamer = TextStreamer(tokenizer)\n"," model = AutoModelForCausalLM.from_pretrained(LLAMA, device_map=\"auto\", quantization_config=quant_config)\n"," outputs = model.generate(inputs, max_new_tokens=2000, streamer=streamer)\n","\n"," return tokenizer.decode(outputs[0])"],"metadata":{"id":"ESlOaRGioqUQ","executionInfo":{"status":"ok","timestamp":1744678367778,"user_tz":-480,"elapsed":7,"user":{"displayName":"Kenneth Andales","userId":"04047926009324958530"}}},"execution_count":8,"outputs":[]},{"cell_type":"code","source":["def translate_text(file):\n"," try:\n"," audio_file = open(file, \"rb\")\n","\n"," transciption = openai.audio.transcriptions.create(\n"," model=AUDIO_MODEL,\n"," file=audio_file,\n"," response_format=\"text\",\n"," language=\"ja\"\n"," )\n","\n"," messages = message_prompt(transciption)\n"," response = translation(messages)\n","\n"," return response\n"," except Exception as e:\n"," return f\"Unexpected error: {str(e)}\""],"metadata":{"id":"FSGFTvIEys0j","executionInfo":{"status":"ok","timestamp":1744679567326,"user_tz":-480,"elapsed":6,"user":{"displayName":"Kenneth Andales","userId":"04047926009324958530"}}},"execution_count":37,"outputs":[]},{"cell_type":"code","source":["with gr.Blocks() as demo:\n"," gr.Markdown(\"# 🎙️ Anime Audio Translator\")\n"," with gr.Row():\n"," with gr.Column():\n"," audio_file = gr.Audio(type=\"filepath\", label=\"Upload Audio\")\n"," button = gr.Button(\"Translate\", variant=\"primary\")\n","\n"," with gr.Column():\n"," gr.Label(value=\"Result of translated text to 'English' and 'Filipino'\", label=\"Character\")\n"," output_text = gr.Markdown()\n","\n"," button.click(\n"," fn=translate_text,\n"," inputs=audio_file,\n"," outputs=output_text,\n"," trigger_mode=\"once\"\n"," )\n","demo.launch(\n"," # share=True\n",")"],"metadata":{"id":"bexgSsWuvUmU"},"execution_count":null,"outputs":[]}]} \ No newline at end of file diff --git a/week4/community-contributions/code_conversion.ipynb b/week4/community-contributions/code_conversion.ipynb index db41fef..c718abe 100644 --- a/week4/community-contributions/code_conversion.ipynb +++ b/week4/community-contributions/code_conversion.ipynb @@ -298,7 +298,6 @@ "outputs": [], "source": [ "def execute_cpp(code):\n", - " write_output(code, \"cpp\")\n", " try:\n", " compile_cmd = [\"clang++\", \"-Ofast\", \"-std=c++17\", \"-o\", \"optimized\", \"optimized.cpp\"]\n", " compile_result = subprocess.run(compile_cmd, shell=True, text=True, capture_output=True)\n", @@ -306,25 +305,53 @@ " run_result = subprocess.run(run_cmd, check=True, text=True, capture_output=True)\n", " return run_result.stdout\n", " except subprocess.CalledProcessError as e:\n", - " return f\"An error occurred:\\n{e.stderr}\"\n", - "\n", + " return f\"An error occurred:\\n{e.stderr}\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "91ba8a3c-8686-4636-bf21-efc861f3a2b7", + "metadata": {}, + "outputs": [], + "source": [ "def execute_js(code):\n", - " write_output(code, \"js\")\n", " try:\n", - " run_result = subprocess.run([\"node\", \"optimized.js\"], shell=True, text=True, capture_output=True)\n", + " run_result = subprocess.run([\"node\", \"optimized.js\"], check=True, text=True, capture_output=True)\n", " return run_result.stdout\n", " except subprocess.CalledProcessError as e:\n", - " return f\"An error occurred:\\n{e.stderr}\"\n", - "\n", + " return f\"An error occurred:\\n{e.stderr}\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b9006f67-f631-4ad4-bf45-b9366c822a04", + "metadata": {}, + "outputs": [], + "source": [ "def execute_php(code):\n", - " write_output(code, \"php\")\n", " try:\n", - " run_result = subprocess.run([\"php\", \"optimized.php\"], shell=True, text=True, capture_output=True)\n", - " return run_result.stdout or run_result.stderr\n", + " run_result = subprocess.run([\"php\", \"optimized.php\"], check=True, text=True, capture_output=True)\n", + " return run_result.stdout\n", " except subprocess.CalledProcessError as e:\n", " return f\"An error occurred:\\n{e.stderr}\"\n" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "b3991a09-f60d-448a-8e92-2561296d05cf", + "metadata": {}, + "outputs": [], + "source": [ + "def handle_execution(code, prog_lang):\n", + " write_output(code, prog_lang)\n", + "\n", + " index = next((i for i, lang in enumerate(programming_languages) if lang[\"extension\"] == prog_lang), -1)\n", + " return programming_languages[index][\"fn\"](code)" + ] + }, { "cell_type": "code", "execution_count": null, @@ -363,17 +390,10 @@ " current_selected = gr.Dropdown([extension], value=extension, visible=False)\n", " \n", " convert.click(optimize, inputs=[python, model, current_selected], outputs=[converted_code])\n", - " \n", - " match extension:\n", - " case \"cpp\":\n", - " prog_run.click(execute_cpp, inputs=[converted_code], outputs=[prog_out])\n", - " case \"js\":\n", - " prog_run.click(execute_js, inputs=[converted_code], outputs=[prog_out])\n", - " case \"php\":\n", - " prog_run.click(execute_php, inputs=[converted_code], outputs=[prog_out])\n", + " prog_run.click(handle_execution, inputs=[converted_code, current_selected], outputs=[prog_out])\n", "\n", "with gr.Blocks(css=css) as ui:\n", - " gr.Markdown(\"## Convert code from Python to selected Programming Language\")\n", + " gr.Markdown(\"# Convert code from Python to any Programming Language\")\n", " with gr.Row():\n", " with gr.Column():\n", " python = gr.Textbox(label=\"Python code:\", value=python_hard, lines=10)\n", @@ -391,7 +411,7 @@ " create_prog_lang_ui(lang, model)\n", "\n", "ui.launch(\n", - " # inbrowser=True\n", + " inbrowser=True\n", ")" ] }