|
|
@ -12,6 +12,7 @@ config_directory = os.path.expanduser("~/.config/fabric") |
|
|
|
env_file = os.path.join(config_directory, ".env") |
|
|
|
env_file = os.path.join(config_directory, ".env") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class Standalone: |
|
|
|
class Standalone: |
|
|
|
def __init__(self, args, pattern="", env_file="~/.config/fabric/.env"): |
|
|
|
def __init__(self, args, pattern="", env_file="~/.config/fabric/.env"): |
|
|
|
""" Initialize the class with the provided arguments and environment file. |
|
|
|
""" Initialize the class with the provided arguments and environment file. |
|
|
@ -45,6 +46,7 @@ class Standalone: |
|
|
|
self.config_pattern_directory = config_directory |
|
|
|
self.config_pattern_directory = config_directory |
|
|
|
self.pattern = pattern |
|
|
|
self.pattern = pattern |
|
|
|
self.args = args |
|
|
|
self.args = args |
|
|
|
|
|
|
|
self.model = args.model |
|
|
|
|
|
|
|
|
|
|
|
def streamMessage(self, input_data: str): |
|
|
|
def streamMessage(self, input_data: str): |
|
|
|
""" Stream a message and handle exceptions. |
|
|
|
""" Stream a message and handle exceptions. |
|
|
@ -78,7 +80,7 @@ class Standalone: |
|
|
|
messages = [user_message] |
|
|
|
messages = [user_message] |
|
|
|
try: |
|
|
|
try: |
|
|
|
stream = self.client.chat.completions.create( |
|
|
|
stream = self.client.chat.completions.create( |
|
|
|
model="gpt-4-turbo-preview", |
|
|
|
model=self.model, |
|
|
|
messages=messages, |
|
|
|
messages=messages, |
|
|
|
temperature=0.0, |
|
|
|
temperature=0.0, |
|
|
|
top_p=1, |
|
|
|
top_p=1, |
|
|
@ -137,7 +139,7 @@ class Standalone: |
|
|
|
messages = [user_message] |
|
|
|
messages = [user_message] |
|
|
|
try: |
|
|
|
try: |
|
|
|
response = self.client.chat.completions.create( |
|
|
|
response = self.client.chat.completions.create( |
|
|
|
model="gpt-4-turbo-preview", |
|
|
|
model=self.model, |
|
|
|
messages=messages, |
|
|
|
messages=messages, |
|
|
|
temperature=0.0, |
|
|
|
temperature=0.0, |
|
|
|
top_p=1, |
|
|
|
top_p=1, |
|
|
@ -154,6 +156,25 @@ class Standalone: |
|
|
|
with open(self.args.output, "w") as f: |
|
|
|
with open(self.args.output, "w") as f: |
|
|
|
f.write(response.choices[0].message.content) |
|
|
|
f.write(response.choices[0].message.content) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def fetch_available_models(self): |
|
|
|
|
|
|
|
headers = { |
|
|
|
|
|
|
|
"Authorization": f"Bearer { self.client.api_key }" |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
response = requests.get("https://api.openai.com/v1/models", headers=headers) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if response.status_code == 200: |
|
|
|
|
|
|
|
models = response.json().get("data", []) |
|
|
|
|
|
|
|
# Filter only gpt models |
|
|
|
|
|
|
|
gpt_models = [model for model in models if model.get("id", "").startswith(("gpt"))] |
|
|
|
|
|
|
|
# Sort the models alphabetically by their ID |
|
|
|
|
|
|
|
sorted_gpt_models = sorted(gpt_models, key=lambda x: x.get("id")) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
for model in sorted_gpt_models: |
|
|
|
|
|
|
|
print(model.get("id")) |
|
|
|
|
|
|
|
else: |
|
|
|
|
|
|
|
print(f"Failed to fetch models: HTTP {response.status_code}") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class Update: |
|
|
|
class Update: |
|
|
|
def __init__(self): |
|
|
|
def __init__(self): |
|
|
@ -274,7 +295,6 @@ class Update: |
|
|
|
else: |
|
|
|
else: |
|
|
|
print(f"Failed to fetch directory contents due to an HTTP error: {e}") |
|
|
|
print(f"Failed to fetch directory contents due to an HTTP error: {e}") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class Setup: |
|
|
|
class Setup: |
|
|
|
def __init__(self): |
|
|
|
def __init__(self): |
|
|
|
""" Initialize the object. |
|
|
|
""" Initialize the object. |
|
|
|