Browse Source

Merge pull request #110 from KaushikML/main

Added Java Support WEEK 4 DAY3
pull/119/head
Ed Donner 4 months ago committed by GitHub
parent
commit
7fc832b85f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 795
      week4/community-contributions/Code_Converter(Added_Java_Support)(Open_Ai_Only).ipynb

795
week4/community-contributions/Code_Converter(Added_Java_Support)(Open_Ai_Only).ipynb

@ -0,0 +1,795 @@
{
"cells": [
{
"cell_type": "markdown",
"id": "4a6ab9a2-28a2-445d-8512-a0dc8d1b54e9",
"metadata": {},
"source": [
"# Code Generator\n",
"\n",
"The requirement: use a Frontier model to generate high performance C++ code from Python code\n"
]
},
{
"cell_type": "code",
"execution_count": 1,
"id": "e610bf56-a46e-4aff-8de1-ab49d62b1ad3",
"metadata": {},
"outputs": [],
"source": [
"# imports\n",
"\n",
"import os\n",
"import io\n",
"import sys\n",
"from dotenv import load_dotenv\n",
"from openai import OpenAI\n",
"import google.generativeai\n",
"import anthropic\n",
"from IPython.display import Markdown, display, update_display\n",
"import gradio as gr\n",
"import subprocess"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "4f672e1c-87e9-4865-b760-370fa605e614",
"metadata": {},
"outputs": [],
"source": [
"# environment\n",
"\n",
"load_dotenv()\n",
"os.environ['OPENAI_API_KEY'] = os.getenv('OPENAI_API_KEY', 'your-key-if-not-using-env')\n",
"os.environ['ANTHROPIC_API_KEY'] = os.getenv('ANTHROPIC_API_KEY', 'your-key-if-not-using-env')"
]
},
{
"cell_type": "code",
"execution_count": 28,
"id": "8aa149ed-9298-4d69-8fe2-8f5de0f667da",
"metadata": {},
"outputs": [],
"source": [
"# initialize\n",
"# NOTE - option to use ultra-low cost models by uncommenting last 2 lines\n",
"\n",
"openai = OpenAI()\n",
"# claude = anthropic.Anthropic()\n",
"OPENAI_MODEL = \"gpt-4o\"\n",
"# CLAUDE_MODEL = \"claude-3-5-sonnet-20240620\"\n",
"\n",
"# Want to keep costs ultra-low? Uncomment these lines:\n",
"# OPENAI_MODEL = \"gpt-4o-mini\"\n",
"# CLAUDE_MODEL = \"claude-3-haiku-20240307\""
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "6896636f-923e-4a2c-9d6c-fac07828a201",
"metadata": {},
"outputs": [],
"source": [
"# system_message = \"You are an assistant that reimplements Python code in high-performance C++ and Java for an M1 Mac. \"\n",
"# system_message += \"Respond only with C++ and Java code; use comments sparingly and do not provide any explanation other than occasional comments. \"\n",
"# system_message += \"The C++ and Java responses need to produce identical output in the fastest possible time.\"\n"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "8e7b3546-57aa-4c29-bc5d-f211970d04eb",
"metadata": {},
"outputs": [],
"source": [
"# def user_prompt_for(python):\n",
"# user_prompt = \"Rewrite this Python code in C++ and Java with the fastest possible implementation that produces identical output in the least time. \"\n",
"# user_prompt += \"Respond only with C++ and Java code; do not explain your work other than a few comments. \"\n",
"# user_prompt += \"Pay attention to number types to ensure no int overflows. Remember to #include all necessary C++ packages such as iomanip for C++, and import required packages for Java.\\n\\n\"\n",
"# user_prompt += python\n",
"# return user_prompt\n"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "c6190659-f54c-4951-bef4-4960f8e51cc4",
"metadata": {},
"outputs": [],
"source": [
"# def messages_for(python):\n",
"# return [\n",
"# {\"role\": \"system\", \"content\": system_message}, # Includes the updated system message with C++ and Java\n",
"# {\"role\": \"user\", \"content\": user_prompt_for(python)} # Calls the updated user prompt function for C++ and Java\n",
"# ]\n"
]
},
{
"cell_type": "code",
"execution_count": 29,
"id": "71e1ba8c-5b05-4726-a9f3-8d8c6257350b",
"metadata": {},
"outputs": [],
"source": [
"def write_output(code, file_name):\n",
" \"\"\"Write the generated code to a file.\"\"\"\n",
" with open(file_name, \"w\") as f:\n",
" f.write(code)\n"
]
},
{
"cell_type": "code",
"execution_count": 30,
"id": "e7d2fea8-74c6-4421-8f1e-0e76d5b201b9",
"metadata": {},
"outputs": [],
"source": [
"def system_message_for(language):\n",
" \"\"\"Create a system message tailored for the requested language.\"\"\"\n",
" return (\n",
" f\"You are an assistant that reimplements Python code in high-performance {language.upper()} for an M1 Mac. \"\n",
" f\"Respond only with {language.upper()} code; do not explain your work other than occasional comments. \"\n",
" \"Pay attention to number types to ensure no overflows and include all necessary packages.\\n\\n\"\n",
" )\n",
"\n",
"def user_prompt_for(python):\n",
" \"\"\"Generate the user prompt.\"\"\"\n",
" return (\n",
" \"Rewrite this Python code in the requested language with the fastest possible implementation that produces \"\n",
" \"identical output in the least time. Use appropriate syntax for the language.\\n\\n\" + python\n",
" )\n",
"\n",
"def messages_for(python, language):\n",
" \"\"\"Generate the messages for GPT.\"\"\"\n",
" return [\n",
" {\"role\": \"system\", \"content\": system_message_for(language)},\n",
" {\"role\": \"user\", \"content\": user_prompt_for(python)},\n",
" ]\n",
" "
]
},
{
"cell_type": "code",
"execution_count": 31,
"id": "3ec5a816-7bf4-4daa-b0c9-f04edb1c0140",
"metadata": {},
"outputs": [],
"source": [
"def optimize_gpt(python, language=\"cpp\"):\n",
" \"\"\"Optimize the given Python code and generate C++ or Java output.\"\"\"\n",
" code = \"\"\n",
" for chunk in stream_gpt(python, language):\n",
" print(chunk, end=\"\") # Stream the output\n",
" code = chunk # Store the final code\n",
" \n",
" file_name = f\"optimized.{language}\"\n",
" write_output(code, file_name)\n",
" print(f\"\\nCode written to {file_name}.\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "8adf0436-cf0e-429c-bd35-c3d551631b27",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"id": "7cd84ad8-d55c-4fe0-9eeb-1895c95c4a9d",
"metadata": {},
"outputs": [],
"source": [
"# def optimize_claude(python):\n",
"# result = claude.messages.stream(\n",
"# model=CLAUDE_MODEL,\n",
"# max_tokens=2000,\n",
"# system=system_message,\n",
"# messages=[{\"role\": \"user\", \"content\": user_prompt_for(python)}],\n",
"# )\n",
"# reply = \"\"\n",
"# with result as stream:\n",
"# for text in stream.text_stream:\n",
"# reply += text\n",
"# print(text, end=\"\", flush=True)\n",
"# write_output(reply)"
]
},
{
"cell_type": "code",
"execution_count": 32,
"id": "a1cbb778-fa57-43de-b04b-ed523f396c38",
"metadata": {},
"outputs": [],
"source": [
"pi = \"\"\"\n",
"import time\n",
"\n",
"def calculate(iterations, param1, param2):\n",
" result = 1.0\n",
" for i in range(1, iterations+1):\n",
" j = i * param1 - param2\n",
" result -= (1/j)\n",
" j = i * param1 + param2\n",
" result += (1/j)\n",
" return result\n",
"\n",
"start_time = time.time()\n",
"result = calculate(100_000_000, 4, 1) * 4\n",
"end_time = time.time()\n",
"\n",
"print(f\"Result: {result:.12f}\")\n",
"print(f\"Execution Time: {(end_time - start_time):.6f} seconds\")\n",
"\"\"\""
]
},
{
"cell_type": "code",
"execution_count": 10,
"id": "7fe1cd4b-d2c5-4303-afed-2115a3fef200",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Result: 3.141592658589\n",
"Execution Time: 46.954224 seconds\n"
]
}
],
"source": [
"exec(pi)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "57ea7f4b-a862-4805-a074-2019314cbd4a",
"metadata": {},
"outputs": [],
"source": [
"optimize_gpt(python_code, language=\"java\")\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "105db6f9-343c-491d-8e44-3a5328b81719",
"metadata": {},
"outputs": [],
"source": [
"optimize_gpt(python_code, language=\"cpp\")\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "bf26ee95-0c77-491d-9a91-579a1e96a8a3",
"metadata": {},
"outputs": [],
"source": [
"exec(pi)"
]
},
{
"cell_type": "markdown",
"id": "bf8f8018-f64d-425c-a0e1-d7862aa9592d",
"metadata": {},
"source": [
"# Compiling C++ and executing\n",
"\n",
"You can use any platform now (Windows,Mac,Linux) i have added compatiblity in this"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "4194e40c-04ab-4940-9d64-b4ad37c5bb40",
"metadata": {},
"outputs": [],
"source": [
"import subprocess\n",
"import platform\n",
"\n",
"def compile_and_run(language=\"cpp\"):\n",
" \"\"\"Compile and run the generated code.\"\"\"\n",
" is_windows = platform.system() == \"Windows\"\n",
" \n",
" if language == \"cpp\":\n",
" if is_windows:\n",
" # Windows: Use g++ (requires MinGW or equivalent installed)\n",
" compile_command = [\"g++\", \"-O3\", \"-std=c++17\", \"-o\", \"optimized.exe\", \"optimized.cpp\"]\n",
" execute_command = [\"optimized.exe\"]\n",
" else:\n",
" # Non-Windows: Use clang++\n",
" compile_command = [\n",
" \"clang++\", \"-O3\", \"-std=c++17\", \"-march=armv8.3-a\", \"-o\", \"optimized\", \"optimized.cpp\"\n",
" ]\n",
" execute_command = [\"./optimized\"]\n",
" elif language == \"java\":\n",
" # Both Windows and non-Windows use the same Java commands\n",
" compile_command = [\"javac\", \"optimized.java\"]\n",
" execute_command = [\"java\", \"optimized\"]\n",
" else:\n",
" raise ValueError(\"Unsupported language. Choose 'cpp' or 'java'.\")\n",
"\n",
" # Compile\n",
" try:\n",
" subprocess.run(compile_command, check=True, shell=is_windows)\n",
" print(f\"{language.upper()} compilation successful.\")\n",
" except subprocess.CalledProcessError as e:\n",
" print(f\"{language.upper()} compilation failed:\\n{e}\")\n",
" return\n",
"\n",
" # Run\n",
" try:\n",
" output = subprocess.run(\n",
" execute_command, capture_output=True, text=True, shell=is_windows\n",
" )\n",
" print(f\"{language.upper()} execution output:\\n{output.stdout}\")\n",
" except subprocess.CalledProcessError as e:\n",
" print(f\"{language.upper()} execution failed:\\n{e.stderr}\")\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "240ae457-ec5a-4268-9e7d-e782c2113f02",
"metadata": {},
"outputs": [],
"source": [
"compile_and_run(language=\"cpp\")\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "983a11fe-e24d-4c65-8269-9802c5ef3ae6",
"metadata": {},
"outputs": [],
"source": [
"# optimize_claude(pi)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "d5a766f9-3d23-4bb4-a1d4-88ec44b61ddf",
"metadata": {},
"outputs": [],
"source": [
"# Repeat for Claude - again, use the right approach for your platform\n",
"\n",
"# !clang++ -O3 -std=c++17 -march=armv8.3-a -o optimized optimized.cpp\n",
"# !./optimized"
]
},
{
"cell_type": "code",
"execution_count": 26,
"id": "c3b497b3-f569-420e-b92e-fb0f49957ce0",
"metadata": {},
"outputs": [],
"source": [
"python_hard = \"\"\"# Be careful to support large number sizes\n",
"\n",
"def lcg(seed, a=1664525, c=1013904223, m=2**32):\n",
" value = seed\n",
" while True:\n",
" value = (a * value + c) % m\n",
" yield value\n",
" \n",
"def max_subarray_sum(n, seed, min_val, max_val):\n",
" lcg_gen = lcg(seed)\n",
" random_numbers = [next(lcg_gen) % (max_val - min_val + 1) + min_val for _ in range(n)]\n",
" max_sum = float('-inf')\n",
" for i in range(n):\n",
" current_sum = 0\n",
" for j in range(i, n):\n",
" current_sum += random_numbers[j]\n",
" if current_sum > max_sum:\n",
" max_sum = current_sum\n",
" return max_sum\n",
"\n",
"def total_max_subarray_sum(n, initial_seed, min_val, max_val):\n",
" total_sum = 0\n",
" lcg_gen = lcg(initial_seed)\n",
" for _ in range(20):\n",
" seed = next(lcg_gen)\n",
" total_sum += max_subarray_sum(n, seed, min_val, max_val)\n",
" return total_sum\n",
"\n",
"# Parameters\n",
"n = 10000 # Number of random numbers\n",
"initial_seed = 42 # Initial seed for the LCG\n",
"min_val = -10 # Minimum value of random numbers\n",
"max_val = 10 # Maximum value of random numbers\n",
"\n",
"# Timing the function\n",
"import time\n",
"start_time = time.time()\n",
"result = total_max_subarray_sum(n, initial_seed, min_val, max_val)\n",
"end_time = time.time()\n",
"\n",
"print(\"Total Maximum Subarray Sum (20 runs):\", result)\n",
"print(\"Execution Time: {:.6f} seconds\".format(end_time - start_time))\n",
"\"\"\""
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "dab5e4bc-276c-4555-bd4c-12c699d5e899",
"metadata": {},
"outputs": [],
"source": [
"exec(python_hard)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e8d24ed5-2c15-4f55-80e7-13a3952b3cb8",
"metadata": {},
"outputs": [],
"source": [
"# optimize_gpt(python_hard)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e0b3d073-88a2-40b2-831c-6f0c345c256f",
"metadata": {},
"outputs": [],
"source": [
"# # Replace this with the right C++ compile + execute command for your platform\n",
"\n",
"# !clang++ -O3 -std=c++17 -march=armv8.3-a -o optimized optimized.cpp\n",
"# !./optimized"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e9305446-1d0c-4b51-866a-b8c1e299bf5c",
"metadata": {},
"outputs": [],
"source": [
"# optimize_claude(python_hard)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "0c181036-8193-4fdd-aef3-fc513b218d43",
"metadata": {},
"outputs": [],
"source": [
"# Replace this with the right C++ compile + execute command for your platform\n",
"\n",
"# !clang++ -O3 -std=c++17 -march=armv8.3-a -o optimized optimized.cpp\n",
"# !./optimized"
]
},
{
"cell_type": "code",
"execution_count": 19,
"id": "0be9f47d-5213-4700-b0e2-d444c7c738c0",
"metadata": {},
"outputs": [],
"source": [
"def stream_gpt(python, language=\"cpp\"):\n",
" \"\"\"Stream the GPT output for the requested language.\"\"\"\n",
" if language not in [\"cpp\", \"java\"]:\n",
" raise ValueError(\"Invalid language specified. Choose 'cpp' or 'java'.\")\n",
" \n",
" # Stream response\n",
" stream = openai.ChatCompletion.create(\n",
" model=OPENAI_MODEL, messages=messages_for(python, language), stream=True\n",
" )\n",
" reply = \"\"\n",
" code_block = f\"```{language}\\n\" # Detect code block for the language\n",
"\n",
" for chunk in stream:\n",
" fragment = chunk.choices[0].delta.content or \"\"\n",
" reply += fragment\n",
" \n",
" # Clean the streamed reply\n",
" cleaned_reply = reply.replace(code_block, \"\").replace(\"```\", \"\")\n",
" yield cleaned_reply\n"
]
},
{
"cell_type": "code",
"execution_count": 20,
"id": "8669f56b-8314-4582-a167-78842caea131",
"metadata": {},
"outputs": [],
"source": [
"# def stream_claude(python):\n",
"# result = claude.messages.stream(\n",
"# model=CLAUDE_MODEL,\n",
"# max_tokens=2000,\n",
"# system=system_message,\n",
"# messages=[{\"role\": \"user\", \"content\": user_prompt_for(python)}],\n",
"# )\n",
"# reply = \"\"\n",
"# with result as stream:\n",
"# for text in stream.text_stream:\n",
"# reply += text\n",
"# yield reply.replace('```cpp\\n','').replace('```','')"
]
},
{
"cell_type": "code",
"execution_count": 21,
"id": "2f1ae8f5-16c8-40a0-aa18-63b617df078d",
"metadata": {},
"outputs": [],
"source": [
"def optimize(python, model=\"GPT\", language=\"cpp\"):\n",
" \"\"\"\n",
" Optimize the given Python code using the specified model (GPT or Claude) and generate the output\n",
" in the requested programming language.\n",
"\n",
" Args:\n",
" python (str): The Python code to optimize.\n",
" model (str): The model to use (\"GPT\" or \"Claude\").\n",
" language (str): The target programming language (\"cpp\" or \"java\").\n",
"\n",
" Yields:\n",
" str: The streamed output of the generated code.\n",
" \"\"\"\n",
" if model == \"GPT\":\n",
" result = stream_gpt(python, language=language)\n",
" \n",
" else:\n",
" raise ValueError(\"Unknown model. Please choose 'GPT' or 'Claude'.\")\n",
"\n",
" for stream_so_far in result:\n",
" yield stream_so_far\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "f1ddb38e-6b0a-4c37-baa4-ace0b7de887a",
"metadata": {},
"outputs": [],
"source": [
"# import gradio as gr\n",
"\n",
"# # Assuming `optimize` is already defined and imported\n",
"# # python_hard should be a pre-defined Python code snippet for the default value\n",
"\n",
"# with gr.Blocks() as ui:\n",
"# with gr.Row():\n",
"# python = gr.Textbox(label=\"Python code:\", lines=10, value=\"\") # Default value can be set here\n",
"# cpp = gr.Textbox(label=\"Converted code:\", lines=10, interactive=False) # Output box\n",
"\n",
"# with gr.Row():\n",
"# model = gr.Dropdown([\"GPT\", \"Claude\"], label=\"Select model\", value=\"GPT\") # Default is GPT\n",
"# language = gr.Dropdown([\"cpp\", \"java\"], label=\"Target language\", value=\"cpp\") # Default is C++\n",
"# convert = gr.Button(\"Convert code\")\n",
"\n",
"# # Connect the button to the optimize function\n",
"# def convert_code(python, model, language):\n",
"# result = \"\"\n",
"# for output in optimize(python, model=model, language=language):\n",
"# result = output # Collect the last streamed result\n",
"# return result\n",
"\n",
"# convert.click(\n",
"# fn=convert_code,\n",
"# inputs=[python, model, language], # Inputs from UI\n",
"# outputs=[cpp], # Output to the C++/Java box\n",
"# )\n",
"\n",
"# ui.launch(inbrowser=True)\n"
]
},
{
"cell_type": "code",
"execution_count": 22,
"id": "19bf2bff-a822-4009-a539-f003b1651383",
"metadata": {},
"outputs": [],
"source": [
"import io\n",
"import sys\n",
"\n",
"def execute_python(code):\n",
" \"\"\"\n",
" Execute Python code dynamically and capture its output.\n",
"\n",
" Args:\n",
" code (str): The Python code to execute.\n",
"\n",
" Returns:\n",
" str: The captured standard output of the executed code.\n",
"\n",
" Raises:\n",
" Exception: If the execution of the code raises an error.\n",
" \"\"\"\n",
" output = io.StringIO()\n",
" try:\n",
" sys.stdout = output # Redirect standard output to the StringIO object\n",
" exec(code, {}) # Execute code with an empty global context for safety\n",
" except Exception as e:\n",
" return f\"Error during execution: {str(e)}\"\n",
" finally:\n",
" sys.stdout = sys.__stdout__ # Restore standard output\n",
"\n",
" return output.getvalue()\n"
]
},
{
"cell_type": "code",
"execution_count": 23,
"id": "77f3ab5d-fcfb-4d3f-8728-9cacbf833ea6",
"metadata": {},
"outputs": [],
"source": [
"# You'll need to change the code in the try block to compile the C++ code for your platform\n",
"# I pasted this into Claude's chat UI with a request for it to give me a version for an Intel PC,\n",
"# and it responded with something that looks perfect - you can try a similar approach for your platform.\n",
"\n",
"# M1 Mac version to compile and execute optimized C++ code:\n",
"\n",
"def execute_cpp(code):\n",
" write_output(code)\n",
" try:\n",
" compile_cmd = [\"clang++\", \"-Ofast\", \"-std=c++17\", \"-march=armv8.5-a\", \"-mtune=apple-m1\", \"-mcpu=apple-m1\", \"-o\", \"optimized\", \"optimized.cpp\"]\n",
" compile_result = subprocess.run(compile_cmd, check=True, text=True, capture_output=True)\n",
" run_cmd = [\"./optimized\"]\n",
" run_result = subprocess.run(run_cmd, check=True, text=True, capture_output=True)\n",
" return run_result.stdout\n",
" except subprocess.CalledProcessError as e:\n",
" return f\"An error occurred:\\n{e.stderr}\""
]
},
{
"cell_type": "code",
"execution_count": 36,
"id": "9645b5c4-41a1-4a88-a5e6-cf618864af04",
"metadata": {},
"outputs": [],
"source": [
"\n",
"def execute_java(code):\n",
" \"\"\"Compile and execute Java code dynamically.\"\"\"\n",
" write_output(code, \"Optimized.java\")\n",
" try:\n",
" # Compile the Java code\n",
" compile_cmd = [\"javac\", \"Optimized.java\"]\n",
" subprocess.run(compile_cmd, check=True, text=True, capture_output=True)\n",
" \n",
" # Run the compiled Java program\n",
" run_cmd = [\"java\", \"Optimized\"]\n",
" run_result = subprocess.run(run_cmd, check=True, text=True, capture_output=True)\n",
" return run_result.stdout # Return the output\n",
" except subprocess.CalledProcessError as e:\n",
" return f\"Error during compilation or execution:\\n{e.stderr}\""
]
},
{
"cell_type": "code",
"execution_count": 24,
"id": "9a2274f1-d03b-42c0-8dcc-4ce159b18442",
"metadata": {},
"outputs": [],
"source": [
"# css = \"\"\"\n",
"# .python {background-color: #306998;}\n",
"# .cpp {background-color: #050;}\n",
"# \"\"\""
]
},
{
"cell_type": "code",
"execution_count": 40,
"id": "f1303932-160c-424b-97a8-d28c816721b2",
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div><iframe src=\"http://127.0.0.1:7864/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": []
},
"execution_count": 40,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"css = \"\"\"\n",
".python {background-color: #306998;}\n",
".cpp {background-color: #050;}\n",
".java {background-color: #b07219;}\n",
"\"\"\"\n",
"\n",
"with gr.Blocks(css=css) as ui:\n",
" gr.Markdown(\"## Convert code from Python to C++, Java, or Run Directly\")\n",
" \n",
" with gr.Row():\n",
" python = gr.Textbox(label=\"Python code:\", value=\"print('Hello from Python!')\", lines=10)\n",
" cpp = gr.Textbox(label=\"C++ code:\", lines=10)\n",
" java = gr.Textbox(label=\"Java code:\", lines=10)\n",
" \n",
" with gr.Row():\n",
" model = gr.Dropdown([\"GPT\"], label=\"Select model\", value=\"GPT\")\n",
" \n",
" with gr.Row():\n",
" convert_cpp = gr.Button(\"Convert to C++\")\n",
" convert_java = gr.Button(\"Convert to Java\")\n",
" \n",
" with gr.Row():\n",
" python_run = gr.Button(\"Run Python\")\n",
" cpp_run = gr.Button(\"Run C++\")\n",
" java_run = gr.Button(\"Run Java\")\n",
" \n",
" with gr.Row():\n",
" python_out = gr.TextArea(label=\"Python result:\", elem_classes=[\"python\"])\n",
" cpp_out = gr.TextArea(label=\"C++ result:\", elem_classes=[\"cpp\"])\n",
" java_out = gr.TextArea(label=\"Java result:\", elem_classes=[\"java\"])\n",
"\n",
" # Add C++ and Java conversion\n",
" convert_cpp.click(optimize, inputs=[python, model], outputs=[cpp])\n",
" convert_java.click(optimize, inputs=[python, model], outputs=[java])\n",
" \n",
" # Add execution buttons for each language\n",
" python_run.click(execute_python, inputs=[python], outputs=[python_out])\n",
" cpp_run.click(execute_cpp, inputs=[cpp], outputs=[cpp_out])\n",
" java_run.click(execute_java, inputs=[java], outputs=[java_out])\n",
"\n",
"ui.launch(inbrowser=True)\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "1e910bc5-b343-48e8-9da5-2ce8e2ab888e",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.11"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
Loading…
Cancel
Save