Browse Source

got rid of --claude and --local. everything is in --model

pull/174/head
jad2121 9 months ago
parent
commit
d7fb8fe92d
  1. 9
      README.md
  2. 24
      installer/client/cli/fabric.py
  3. 56
      installer/client/cli/utils.py

9
README.md

@ -197,12 +197,11 @@ Once you have it all set up, here's how to use it.
`fabric -h`
```bash
fabric -h
usage: fabric [-h] [--text TEXT] [--copy] [--agents {trip_planner,ApiKeys}]
fabric [-h] [--text TEXT] [--copy] [--agents {trip_planner,ApiKeys}]
[--output [OUTPUT]] [--stream] [--list] [--update]
[--pattern PATTERN] [--setup]
[--changeDefaultModel CHANGEDEFAULTMODEL] [--local] [--claude]
[--model MODEL] [--listmodels] [--context]
[--changeDefaultModel CHANGEDEFAULTMODEL] [--model MODEL]
[--listmodels] [--context]
An open source framework for augmenting humans using AI.
@ -228,8 +227,6 @@ options:
Change the default model. Your choice will be saved in
~/.config/fabric/.env). For a list of available
models, use the --listmodels flag.
--local, -L Use local LLM. Default is llama2
--claude Use Claude AI
--model MODEL, -m MODEL
Select the model to use (GPT-4 by default for chatGPT
and llama2 for Ollama)

24
installer/client/cli/fabric.py

@ -45,11 +45,6 @@ def main():
)
parser.add_argument('--changeDefaultModel',
help="Change the default model. Your choice will be saved in ~/.config/fabric/.env). For a list of available models, use the --listmodels flag.")
parser.add_argument(
'--local', '-L', help="Use local LLM. Default is llama2", action="store_true")
parser.add_argument(
"--claude", help="Use Claude AI", action="store_true")
parser.add_argument(
"--model", "-m", help="Select the model to use (GPT-4 by default for chatGPT and llama2 for Ollama)", default="gpt-4-turbo-preview"
@ -81,6 +76,7 @@ def main():
sys.exit()
if args.changeDefaultModel:
Setup().default_model(args.changeDefaultModel)
print(f"Default model changed to {args.changeDefaultModel}")
sys.exit()
if args.agents:
# Handle the agents logic
@ -101,12 +97,6 @@ def main():
if not os.path.exists(os.path.join(config, "context.md")):
print("Please create a context.md file in ~/.config/fabric")
sys.exit()
standalone = None
if args.local:
standalone = Standalone(args, args.pattern, local=True)
elif args.claude:
standalone = Standalone(args, args.pattern, claude=True)
else:
standalone = Standalone(args, args.pattern)
if args.list:
try:
@ -118,9 +108,15 @@ def main():
print("No patterns found")
sys.exit()
if args.listmodels:
setup = Setup()
allmodels = setup.fetch_available_models()
for model in allmodels:
gptmodels, localmodels, claudemodels = standalone.fetch_available_models()
print("GPT Models:")
for model in gptmodels:
print(model)
print("\nLocal Models:")
for model in localmodels:
print(model)
print("\nClaude Models:")
for model in claudemodels:
print(model)
sys.exit()
if args.text is not None:

56
installer/client/cli/utils.py

@ -17,7 +17,7 @@ env_file = os.path.join(config_directory, ".env")
class Standalone:
def __init__(self, args, pattern="", env_file="~/.config/fabric/.env", local=False, claude=False):
def __init__(self, args, pattern="", env_file="~/.config/fabric/.env"):
""" Initialize the class with the provided arguments and environment file.
Args:
@ -46,21 +46,19 @@ class Standalone:
except FileNotFoundError:
print("No API key found. Use the --apikey option to set the key")
sys.exit()
self.local = local
self.local = False
self.config_pattern_directory = config_directory
self.pattern = pattern
self.args = args
self.model = args.model
self.claude = claude
self.claude = False
sorted_gpt_models, ollamaList, claudeList = self.fetch_available_models()
try:
self.model = os.environ["DEFAULT_MODEL"]
except:
if self.local:
if self.args.model == 'gpt-4-turbo-preview':
self.model = 'llama2'
if self.claude:
if self.args.model == 'gpt-4-turbo-preview':
self.model = 'claude-3-opus-20240229'
pass
self.local = self.model.strip() in ollamaList
self.claude = self.model.strip() in claudeList
async def localChat(self, messages):
from ollama import AsyncClient
@ -259,6 +257,9 @@ class Standalone:
f.write(response.choices[0].message.content)
def fetch_available_models(self):
gptlist = []
fullOllamaList = []
claudeList = ['claude-3-opus-20240229']
headers = {
"Authorization": f"Bearer {self.client.api_key}"
}
@ -267,25 +268,27 @@ class Standalone:
"https://api.openai.com/v1/models", headers=headers)
if response.status_code == 200:
print("OpenAI GPT models:\n")
models = response.json().get("data", [])
# Filter only gpt models
gpt_models = [model for model in models if model.get(
"id", "").startswith(("gpt"))]
# Sort the models alphabetically by their ID
sorted_gpt_models = sorted(gpt_models, key=lambda x: x.get("id"))
sorted_gpt_models = sorted(
gpt_models, key=lambda x: x.get("id"))
for model in sorted_gpt_models:
print(model.get("id"))
print("\nLocal Ollama models:")
import ollama
ollamaList = ollama.list()['models']
for model in ollamaList:
print(model['name'].rstrip(":latest"))
print("\nClaude models:")
print("claude-3-opus-20240229")
gptlist.append(model.get("id"))
else:
print(f"Failed to fetch models: HTTP {response.status_code}")
sys.exit()
import ollama
try:
default_modelollamaList = ollama.list()['models']
for model in default_modelollamaList:
fullOllamaList.append(model['name'].rstrip(":latest"))
except:
fullOllamaList = []
return gptlist, fullOllamaList, claudeList
def get_cli_input(self):
""" aided by ChatGPT; uses platform library
@ -520,17 +523,14 @@ class Setup:
def update_fabric_command(self, line, model):
fabric_command_regex = re.compile(
r"(alias.*fabric --pattern\s+\S+.*?)( --claude| --local)?'")
r"(alias.*fabric --pattern\s+\S+.*?)( --model.*)?'")
match = fabric_command_regex.search(line)
if match:
base_command = match.group(1)
# Provide a default value for current_flag
current_flag = match.group(2) if match.group(2) else ""
new_flag = ""
if model in self.claudeList:
new_flag = " --claude"
elif model in self.fullOllamaList:
new_flag = " --local"
new_flag = f" --model {model}"
# Update the command if the new flag is different or to remove an existing flag.
# Ensure to add the closing quote that was part of the original regex
return f"{base_command}{new_flag}'\n"
@ -539,15 +539,11 @@ class Setup:
def update_fabric_alias(self, line, model):
fabric_alias_regex = re.compile(
r"(alias fabric='[^']+?)( --claude| --local)?'")
r"(alias fabric='[^']+?)( --model.*)?'")
match = fabric_alias_regex.search(line)
if match:
base_command, current_flag = match.groups()
new_flag = ""
if model in self.claudeList:
new_flag = " --claude"
elif model in self.fullOllamaList:
new_flag = " --local"
new_flag = f" --model {model}"
# Update the alias if the new flag is different or to remove an existing flag.
return f"{base_command}{new_flag}'\n"
else:

Loading…
Cancel
Save