""" Initialize the class with the provided arguments and environment file.
@ -37,7 +40,7 @@ class Standalone:
env_file=os.path.expanduser(env_file)
load_dotenv(env_file)
if"OPENAI_API_KEY"notinos.environ:
print("Error: OPENAI_API_KEY not found in environment variables.")
eprint("Error: OPENAI_API_KEY not found in environment variables.")
self.client=OpenAI()
else:
api_key=os.environ['OPENAI_API_KEY']
@ -141,7 +144,7 @@ class Standalone:
system_message={"role":"system","content":system}
messages=[system_message,user_message]
exceptFileNotFoundError:
print("pattern not found")
eprint("pattern not found")
return
else:
ifcontext:
@ -181,17 +184,17 @@ class Standalone:
sys.stdout.flush()
exceptExceptionase:
if"All connection attempts failed"instr(e):
print(
eprint(
"Error: cannot connect to llama2. If you have not already, please visit https://ollama.com for installation instructions")
if"CLAUDE_API_KEY"instr(e):
print(
eprint(
"Error: CLAUDE_API_KEY not found in environment variables. Please run --setup and add the key")
if"overloaded_error"instr(e):
print(
eprint(
"Error: Fabric is working fine, but claude is overloaded. Please try again later.")
else:
print(f"Error: {e}")
print(e)
eprint(f"Error: {e}")
eprint(e)
ifself.args.copy:
pyperclip.copy(buffer)
ifself.args.output:
@ -227,7 +230,7 @@ class Standalone:
system_message={"role":"system","content":system}
messages=[system_message,user_message]
exceptFileNotFoundError:
print("pattern not found")
eprint("pattern not found")
return
else:
ifcontext:
@ -260,19 +263,19 @@ class Standalone:
f.write(response.choices[0].message.content)
exceptExceptionase:
if"All connection attempts failed"instr(e):
print(
eprint(
"Error: cannot connect to llama2. If you have not already, please visit https://ollama.com for installation instructions")
if"CLAUDE_API_KEY"instr(e):
print(
eprint(
"Error: CLAUDE_API_KEY not found in environment variables. Please run --setup and add the key")
if"overloaded_error"instr(e):
print(
eprint(
"Error: Fabric is working fine, but claude is overloaded. Please try again later.")
if"Attempted to call a sync iterator on an async stream"instr(e):
print("Error: There is a problem connecting fabric with your local ollama installation. Please visit https://ollama.com for installation instructions. It is possible that you have chosen the wrong model. Please run fabric --listmodels to see the available models and choose the right one with fabric --model <model> or fabric --changeDefaultModel. If this does not work. Restart your computer (always a good idea) and try again. If you are still having problems, please visit https://ollama.com for installation instructions.")
eprint("Error: There is a problem connecting fabric with your local ollama installation. Please visit https://ollama.com for installation instructions. It is possible that you have chosen the wrong model. Please run fabric --listmodels to see the available models and choose the right one with fabric --model <model> or fabric --changeDefaultModel. If this does not work. Restart your computer (always a good idea) and try again. If you are still having problems, please visit https://ollama.com for installation instructions.")
else:
print(f"Error: {e}")
print(e)
eprint(f"Error: {e}")
eprint(e)
deffetch_available_models(self):
@ -299,10 +302,10 @@ class Standalone:
formodelinsorted_gpt_models:
gptlist.append(model.get("id"))
else:
print(f"Failed to fetch models: HTTP {response.status_code}")
eprint(f"Failed to fetch models: HTTP {response.status_code}")
sys.exit()
except:
print('No OpenAI API key found. Please run fabric --setup and add the key if you wish to interact with openai')
eprint('No OpenAI API key found. Please run fabric --setup and add the key if you wish to interact with openai')
importollama
try:
default_modelollamaList=ollama.list()['models']
@ -341,7 +344,7 @@ class Update:
self.pattern_directory=os.path.join(
self.config_directory,"patterns")
os.makedirs(self.pattern_directory,exist_ok=True)
print("Updating patterns...")
eprint("Updating patterns...")
self.update_patterns()# Start the update process immediately