Browse Source

merging

pull/63/head
agu3rra 1 year ago
parent
commit
0b16836ad5
  1. 9
      client/fabric/fabric.py
  2. 32
      client/fabric/utils.py

9
client/fabric/fabric.py

@ -39,6 +39,12 @@ def main():
parser.add_argument( parser.add_argument(
"--setup", help="Set up your fabric instance", action="store_true" "--setup", help="Set up your fabric instance", action="store_true"
) )
parser.add_argument(
"--model", "-m", help="Select the model to use (GPT-4 by default)", default="gpt-4"
)
parser.add_argument(
"--listmodels", help="List all available models", action="store_true"
)
args = parser.parse_args() args = parser.parse_args()
home_holder = os.path.expanduser("~") home_holder = os.path.expanduser("~")
@ -70,6 +76,9 @@ def main():
except FileNotFoundError: except FileNotFoundError:
print("No patterns found") print("No patterns found")
sys.exit() sys.exit()
if args.listmodels:
Update.list_models()
sys.exit()
if args.text is not None: if args.text is not None:
text = args.text text = args.text
else: else:

32
client/fabric/utils.py

@ -12,6 +12,7 @@ config_directory = os.path.expanduser("~/.config/fabric")
env_file = os.path.join(config_directory, ".env") env_file = os.path.join(config_directory, ".env")
class Standalone: class Standalone:
def __init__(self, args, pattern="", env_file="~/.config/fabric/.env"): def __init__(self, args, pattern="", env_file="~/.config/fabric/.env"):
""" Initialize the class with the provided arguments and environment file. """ Initialize the class with the provided arguments and environment file.
@ -45,6 +46,7 @@ class Standalone:
self.config_pattern_directory = config_directory self.config_pattern_directory = config_directory
self.pattern = pattern self.pattern = pattern
self.args = args self.args = args
self.model = args.model
def streamMessage(self, input_data: str): def streamMessage(self, input_data: str):
""" Stream a message and handle exceptions. """ Stream a message and handle exceptions.
@ -78,7 +80,11 @@ class Standalone:
messages = [user_message] messages = [user_message]
try: try:
stream = self.client.chat.completions.create( stream = self.client.chat.completions.create(
<<<<<<< HEAD:client/fabric/utils.py
model="gpt-4-turbo-preview", model="gpt-4-turbo-preview",
=======
model=self.model,
>>>>>>> 086cfbc (add model and list-model to args):client/utils.py
messages=messages, messages=messages,
temperature=0.0, temperature=0.0,
top_p=1, top_p=1,
@ -137,7 +143,11 @@ class Standalone:
messages = [user_message] messages = [user_message]
try: try:
response = self.client.chat.completions.create( response = self.client.chat.completions.create(
<<<<<<< HEAD:client/fabric/utils.py
model="gpt-4-turbo-preview", model="gpt-4-turbo-preview",
=======
model=self.model,
>>>>>>> 086cfbc (add model and list-model to args):client/utils.py
messages=messages, messages=messages,
temperature=0.0, temperature=0.0,
top_p=1, top_p=1,
@ -273,6 +283,28 @@ class Update:
self.progress_bar.close() # Ensure the progress bar is cleaned up properly self.progress_bar.close() # Ensure the progress bar is cleaned up properly
else: else:
print(f"Failed to fetch directory contents due to an HTTP error: {e}") print(f"Failed to fetch directory contents due to an HTTP error: {e}")
def list_models():
AVAILABLE_MODELS = [
"gpt-4-0125-preview",
"gpt-4-turbo-preview",
"gpt-4-1106-preview",
"gpt-4-vision-preview",
"gpt-4",
"gpt-4-0613",
"gpt-4-32k",
"gpt-4-32k-0613",
"gpt-3.5-turbo-0125",
"gpt-3.5-turbo",
"gpt-3.5-turbo-1106",
"gpt-3.5-turbo-instruct",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-16k-0613"
]
print("Available models:")
for model in AVAILABLE_MODELS:
print(model)
class Setup: class Setup:

Loading…
Cancel
Save