From the uDemy course on LLM engineering.
https://www.udemy.com/course/llm-engineering-master-ai-and-large-language-models
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
41 lines
1.3 KiB
41 lines
1.3 KiB
import openai # type: ignore |
|
import ollama # type: ignore |
|
from utils.config import Config |
|
import requests # type: ignore |
|
|
|
|
|
# Initialize clients |
|
openai_client = openai.Client(api_key=Config.OPENAI_API_KEY) |
|
ollama_api_url = Config.OLLAMA_API_URL |
|
|
|
def call_llm(messages, model="gpt-4", provider="openai"): |
|
""" |
|
Generic function to call the appropriate LLM provider. |
|
Supports: openai, deepseek, llama. |
|
""" |
|
if provider == "openai": |
|
response = openai_client.chat.completions.create( |
|
model=model, |
|
messages=messages |
|
) |
|
return response.choices[0].message.content |
|
|
|
elif provider == "ollama_lib": |
|
response = ollama.chat( |
|
model=model, |
|
messages=messages |
|
) |
|
return response['message']['content'] |
|
|
|
elif provider == "ollama_api": |
|
payload = { |
|
"model": model, |
|
"messages": messages, |
|
"stream": False # Set to True for streaming responses |
|
} |
|
response = requests.post(ollama_api_url, json=payload) |
|
response_data = response.json() |
|
return response_data.get('message', {}).get('content', 'No summary generated') |
|
|
|
else: |
|
raise ValueError("Unsupported provider. Choose 'openai', 'deepseek', or 'llama'.") |