Browse Source

Added support for local models

model_as_env_variable
Jonathan Dunn 1 year ago
parent
commit
0ce5ed24c2
  1. 8
      installer/client/cli/fabric.py
  2. 81
      installer/client/cli/utils.py

8
installer/client/cli/fabric.py

@ -43,6 +43,8 @@ def main():
parser.add_argument( parser.add_argument(
"--setup", help="Set up your fabric instance", action="store_true" "--setup", help="Set up your fabric instance", action="store_true"
) )
parser.add_argument(
'--local', '-L', help="Use local LLM. Default is llama2", action="store_true")
parser.add_argument( parser.add_argument(
"--model", "-m", help="Select the model to use (GPT-4 by default)", default="gpt-4-turbo-preview" "--model", "-m", help="Select the model to use (GPT-4 by default)", default="gpt-4-turbo-preview"
) )
@ -90,7 +92,11 @@ def main():
if not os.path.exists(os.path.join(config, "context.md")): if not os.path.exists(os.path.join(config, "context.md")):
print("Please create a context.md file in ~/.config/fabric") print("Please create a context.md file in ~/.config/fabric")
sys.exit() sys.exit()
standalone = Standalone(args, args.pattern) standalone = None
if args.local:
standalone = Standalone(args, args.pattern, local=True)
else:
standalone = Standalone(args, args.pattern)
if args.list: if args.list:
try: try:
direct = sorted(os.listdir(config_patterns_directory)) direct = sorted(os.listdir(config_patterns_directory))

81
installer/client/cli/utils.py

@ -1,6 +1,7 @@
import requests import requests
import os import os
from openai import OpenAI from openai import OpenAI
import asyncio
import pyperclip import pyperclip
import sys import sys
import platform import platform
@ -15,7 +16,7 @@ env_file = os.path.join(config_directory, ".env")
class Standalone: class Standalone:
def __init__(self, args, pattern="", env_file="~/.config/fabric/.env"): def __init__(self, args, pattern="", env_file="~/.config/fabric/.env", local=False):
""" Initialize the class with the provided arguments and environment file. """ Initialize the class with the provided arguments and environment file.
Args: Args:
@ -44,10 +45,24 @@ class Standalone:
except FileNotFoundError: except FileNotFoundError:
print("No API key found. Use the --apikey option to set the key") print("No API key found. Use the --apikey option to set the key")
sys.exit() sys.exit()
self.local = local
self.config_pattern_directory = config_directory self.config_pattern_directory = config_directory
self.pattern = pattern self.pattern = pattern
self.args = args self.args = args
self.model = args.model self.model = args.model
if self.local:
if self.args.model == 'gpt-4-turbo-preview':
self.args.model = 'llama2'
async def localChat(self, messages):
from ollama import AsyncClient
response = await AsyncClient().chat(model=self.args.model, messages=messages)
print(response['message']['content'])
async def localStream(self, messages):
from ollama import AsyncClient
async for part in await AsyncClient().chat(model=self.args.model, messages=messages, stream=True):
print(part['message']['content'], end='', flush=True)
def streamMessage(self, input_data: str, context=""): def streamMessage(self, input_data: str, context=""):
""" Stream a message and handle exceptions. """ Stream a message and handle exceptions.
@ -87,26 +102,29 @@ class Standalone:
else: else:
messages = [user_message] messages = [user_message]
try: try:
stream = self.client.chat.completions.create( if self.local:
model=self.model, asyncio.run(self.localStream(messages))
messages=messages, else:
temperature=0.0, stream = self.client.chat.completions.create(
top_p=1, model=self.model,
frequency_penalty=0.1, messages=messages,
presence_penalty=0.1, temperature=0.0,
stream=True, top_p=1,
) frequency_penalty=0.1,
for chunk in stream: presence_penalty=0.1,
if chunk.choices[0].delta.content is not None: stream=True,
char = chunk.choices[0].delta.content )
buffer += char for chunk in stream:
if char not in ["\n", " "]: if chunk.choices[0].delta.content is not None:
print(char, end="") char = chunk.choices[0].delta.content
elif char == " ": buffer += char
print(" ", end="") # Explicitly handle spaces if char not in ["\n", " "]:
elif char == "\n": print(char, end="")
print() # Handle newlines elif char == " ":
sys.stdout.flush() print(" ", end="") # Explicitly handle spaces
elif char == "\n":
print() # Handle newlines
sys.stdout.flush()
except Exception as e: except Exception as e:
print(f"Error: {e}") print(f"Error: {e}")
print(e) print(e)
@ -153,15 +171,18 @@ class Standalone:
else: else:
messages = [user_message] messages = [user_message]
try: try:
response = self.client.chat.completions.create( if self.local:
model=self.model, asyncio.run(self.localChat(messages))
messages=messages, else:
temperature=0.0, response = self.client.chat.completions.create(
top_p=1, model=self.model,
frequency_penalty=0.1, messages=messages,
presence_penalty=0.1, temperature=0.0,
) top_p=1,
print(response.choices[0].message.content) frequency_penalty=0.1,
presence_penalty=0.1,
)
print(response.choices[0].message.content)
except Exception as e: except Exception as e:
print(f"Error: {e}") print(f"Error: {e}")
print(e) print(e)

Loading…
Cancel
Save