Browse Source

Create week1-coderesearcher.py

pull/142/head
Daniel Emakporuena 3 months ago committed by GitHub
parent
commit
ee2e39c2b6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 45
      week1/community-contributions/week1-coderesearcher.py

45
week1/community-contributions/week1-coderesearcher.py

@ -0,0 +1,45 @@
import ollama, os
from openai import OpenAI
from dotenv import load_dotenv
from IPython.display import Markdown, display
load_dotenv()
open_key = os.getenv("OPENAI_API_KEY")
OPEN_MODEL = "gpt-4-turbo"
ollama_model = "llama3.2"
openai = OpenAI()
system_prompt = "You are an assistant that focuses on the reason for each code, analysing and interpreting what the code does and how it could be improved, \
Give your answer in markdown down with two different topics namely: Explanation and Code Improvement. However if you think there is no possible improvement \
to said code, simply state 'no possible improvement '"
def user_prompt():
custom_message = input("Write your prompt message: ")
return custom_message
def explain():
response = openai.chat.completions.create(model=OPEN_MODEL,
messages = [
{"role":"system", "content":system_prompt},
{"role": "user", "content":user_prompt()}
])
result = response.choices[0].message.content
display(Markdown(result))
# explain() run this to get the openai output with peronalized input
#With ollama
ollama_api = "https://localhost:11434/api/chat"
def explainer_with_ollama():
response = ollama.chat(model=ollama_model, messages=[
{"role":"system", "content":system_prompt},
{"role":"user", "content":user_prompt()}
])
result = response["message"]["content"]
display(Markdown(result))
#explainer_with_ollama() run for ollama output with same personalized input
Loading…
Cancel
Save