Browse Source

Added support for local models

model_as_env_variable
Jonathan Dunn 1 year ago
parent
commit
0ce5ed24c2
  1. 6
      installer/client/cli/fabric.py
  2. 23
      installer/client/cli/utils.py

6
installer/client/cli/fabric.py

@ -43,6 +43,8 @@ def main():
parser.add_argument( parser.add_argument(
"--setup", help="Set up your fabric instance", action="store_true" "--setup", help="Set up your fabric instance", action="store_true"
) )
parser.add_argument(
'--local', '-L', help="Use local LLM. Default is llama2", action="store_true")
parser.add_argument( parser.add_argument(
"--model", "-m", help="Select the model to use (GPT-4 by default)", default="gpt-4-turbo-preview" "--model", "-m", help="Select the model to use (GPT-4 by default)", default="gpt-4-turbo-preview"
) )
@ -90,6 +92,10 @@ def main():
if not os.path.exists(os.path.join(config, "context.md")): if not os.path.exists(os.path.join(config, "context.md")):
print("Please create a context.md file in ~/.config/fabric") print("Please create a context.md file in ~/.config/fabric")
sys.exit() sys.exit()
standalone = None
if args.local:
standalone = Standalone(args, args.pattern, local=True)
else:
standalone = Standalone(args, args.pattern) standalone = Standalone(args, args.pattern)
if args.list: if args.list:
try: try:

23
installer/client/cli/utils.py

@ -1,6 +1,7 @@
import requests import requests
import os import os
from openai import OpenAI from openai import OpenAI
import asyncio
import pyperclip import pyperclip
import sys import sys
import platform import platform
@ -15,7 +16,7 @@ env_file = os.path.join(config_directory, ".env")
class Standalone: class Standalone:
def __init__(self, args, pattern="", env_file="~/.config/fabric/.env"): def __init__(self, args, pattern="", env_file="~/.config/fabric/.env", local=False):
""" Initialize the class with the provided arguments and environment file. """ Initialize the class with the provided arguments and environment file.
Args: Args:
@ -44,10 +45,24 @@ class Standalone:
except FileNotFoundError: except FileNotFoundError:
print("No API key found. Use the --apikey option to set the key") print("No API key found. Use the --apikey option to set the key")
sys.exit() sys.exit()
self.local = local
self.config_pattern_directory = config_directory self.config_pattern_directory = config_directory
self.pattern = pattern self.pattern = pattern
self.args = args self.args = args
self.model = args.model self.model = args.model
if self.local:
if self.args.model == 'gpt-4-turbo-preview':
self.args.model = 'llama2'
async def localChat(self, messages):
from ollama import AsyncClient
response = await AsyncClient().chat(model=self.args.model, messages=messages)
print(response['message']['content'])
async def localStream(self, messages):
from ollama import AsyncClient
async for part in await AsyncClient().chat(model=self.args.model, messages=messages, stream=True):
print(part['message']['content'], end='', flush=True)
def streamMessage(self, input_data: str, context=""): def streamMessage(self, input_data: str, context=""):
""" Stream a message and handle exceptions. """ Stream a message and handle exceptions.
@ -87,6 +102,9 @@ class Standalone:
else: else:
messages = [user_message] messages = [user_message]
try: try:
if self.local:
asyncio.run(self.localStream(messages))
else:
stream = self.client.chat.completions.create( stream = self.client.chat.completions.create(
model=self.model, model=self.model,
messages=messages, messages=messages,
@ -153,6 +171,9 @@ class Standalone:
else: else:
messages = [user_message] messages = [user_message]
try: try:
if self.local:
asyncio.run(self.localChat(messages))
else:
response = self.client.chat.completions.create( response = self.client.chat.completions.create(
model=self.model, model=self.model,
messages=messages, messages=messages,

Loading…
Cancel
Save