diff --git a/client/fabric b/client/fabric index b1fe682..1c0ff2e 100755 --- a/client/fabric +++ b/client/fabric @@ -39,6 +39,12 @@ if __name__ == "__main__": parser.add_argument( "--setup", help="Set up your fabric instance", action="store_true" ) + parser.add_argument( + "--model", "-m", help="Select the model to use (GPT-4 by default)", default="gpt-4" + ) + parser.add_argument( + "--listmodels", help="List all available models", action="store_true" + ) args = parser.parse_args() home_holder = os.path.expanduser("~") @@ -70,6 +76,9 @@ if __name__ == "__main__": except FileNotFoundError: print("No patterns found") sys.exit() + if args.listmodels: + Update.list_models() + sys.exit() if args.text is not None: text = args.text else: diff --git a/client/utils.py b/client/utils.py index c2d3db5..c9ab83d 100644 --- a/client/utils.py +++ b/client/utils.py @@ -12,6 +12,7 @@ config_directory = os.path.expanduser("~/.config/fabric") env_file = os.path.join(config_directory, ".env") + class Standalone: def __init__(self, args, pattern="", env_file="~/.config/fabric/.env"): # Expand the tilde to the full path @@ -30,6 +31,7 @@ class Standalone: self.config_pattern_directory = config_directory self.pattern = pattern self.args = args + self.model = args.model def streamMessage(self, input_data: str): wisdomFilePath = os.path.join( @@ -51,7 +53,7 @@ class Standalone: messages = [user_message] try: stream = self.client.chat.completions.create( - model="gpt-4", + model=self.model, messages=messages, temperature=0.0, top_p=1, @@ -98,7 +100,7 @@ class Standalone: messages = [user_message] try: response = self.client.chat.completions.create( - model="gpt-4", + model=self.model, messages=messages, temperature=0.0, top_p=1, @@ -181,6 +183,28 @@ class Update: self.progress_bar.close() # Ensure the progress bar is cleaned up properly else: print(f"Failed to fetch directory contents due to an HTTP error: {e}") + def list_models(): + AVAILABLE_MODELS = [ + "gpt-4-0125-preview", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-0613", + "gpt-4-32k", + "gpt-4-32k-0613", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-instruct", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-16k-0613" + ] + + print("Available models:") + for model in AVAILABLE_MODELS: + print(model) class Setup: