Browse Source

fixed some stuff

pull/174/head
jad2121 9 months ago
parent
commit
341c358260
  1. 9
      README.md
  2. 8
      installer/client/cli/fabric.py
  3. 42
      installer/client/cli/utils.py

9
README.md

@ -198,7 +198,7 @@ Once you have it all set up, here's how to use it.
```bash
fabric [-h] [--text TEXT] [--copy] [--agents {trip_planner,ApiKeys}]
[--output [OUTPUT]] [--stream] [--list] [--update]
[--output [OUTPUT]] [--stream] [--list] [--clear] [--update]
[--pattern PATTERN] [--setup]
[--changeDefaultModel CHANGEDEFAULTMODEL] [--model MODEL]
[--listmodels] [--context]
@ -219,6 +219,8 @@ options:
realtime. NOTE: You will not be able to pipe the
output into another command.
--list, -l List available patterns
--clear Clears your persistant model choice so that you can
once again use the --model flag
--update, -u Update patterns
--pattern PATTERN, -p PATTERN
The pattern (prompt) to use
@ -228,8 +230,9 @@ options:
~/.config/fabric/.env). For a list of available
models, use the --listmodels flag.
--model MODEL, -m MODEL
Select the model to use (GPT-4 by default for chatGPT
and llama2 for Ollama)
Select the model to use. NOTE: Will not work if you
have set a default model. please use --clear to clear
persistance before using this flag
--listmodels List all available models
--context, -c Use Context file (context.md) to add context to your
pattern

8
installer/client/cli/fabric.py

@ -37,6 +37,8 @@ def main():
parser.add_argument(
"--list", "-l", help="List available patterns", action="store_true"
)
parser.add_argument('--clear', help="Clears your persistant model choice so that you can once again use the --model flag",
action="store_true")
parser.add_argument(
"--update", "-u", help="Update patterns", action="store_true")
parser.add_argument("--pattern", "-p", help="The pattern (prompt) to use")
@ -47,7 +49,7 @@ def main():
help="Change the default model. Your choice will be saved in ~/.config/fabric/.env). For a list of available models, use the --listmodels flag.")
parser.add_argument(
"--model", "-m", help="Select the model to use (GPT-4 by default for chatGPT and llama2 for Ollama)", default="gpt-4-turbo-preview"
"--model", "-m", help="Select the model to use. NOTE: Will not work if you have set a default model. please use --clear to clear persistance before using this flag", default="gpt-4-turbo-preview"
)
parser.add_argument(
"--listmodels", help="List all available models", action="store_true"
@ -97,6 +99,10 @@ def main():
if not os.path.exists(os.path.join(config, "context.md")):
print("Please create a context.md file in ~/.config/fabric")
sys.exit()
if args.clear:
Setup().clean_env()
print("Model choice cleared. please restart your session to use the --model flag.")
sys.exit()
standalone = Standalone(args, args.pattern)
if args.list:
try:

42
installer/client/cli/utils.py

@ -537,6 +537,48 @@ class Setup:
else:
return line # Return the line unmodified if no match is found.
def clear_env_line(self, line):
fabric_command_regex = re.compile(
r"(alias.*fabric --pattern\s+\S+.*?)( --model.*)?'")
match = fabric_command_regex.search(line)
if match:
base_command = match.group(1)
return f"{base_command}'\n"
else:
return line # Return the line unmodified if no match is found.
def clean_env(self):
"""Clear the DEFAULT_MODEL from the environment file.
Returns:
None
"""
user_home = os.path.expanduser("~")
sh_config = None
# Check for shell configuration files
if os.path.exists(os.path.join(user_home, ".bashrc")):
sh_config = os.path.join(user_home, ".bashrc")
elif os.path.exists(os.path.join(user_home, ".zshrc")):
sh_config = os.path.join(user_home, ".zshrc")
else:
print("No environment file found.")
if sh_config:
with open(sh_config, "r") as f:
lines = f.readlines()
with open(sh_config, "w") as f:
for line in lines:
modified_line = line
# Update existing fabric commands
if "fabric --pattern" in line:
modified_line = self.clear_env_line(
modified_line)
elif "fabric=" in line:
modified_line = self.clear_env_line(
modified_line)
f.write(modified_line)
else:
print("No shell configuration file found.")
def update_fabric_alias(self, line, model):
fabric_alias_regex = re.compile(
r"(alias fabric='[^']+?)( --model.*)?'")

Loading…
Cancel
Save