Daniel Miessler
9 months ago
21 changed files with 4509 additions and 0 deletions
@ -0,0 +1,81 @@
|
||||
# The `fabric` client |
||||
|
||||
This is the primary `fabric` client, which has multiple modes of operation. |
||||
|
||||
## Client modes |
||||
|
||||
You can use the client in three different modes: |
||||
|
||||
1. **Local Only:** You can use the client without a server, and it will use patterns it's downloaded from this repository, or ones that you specify. |
||||
2. **Local Server:** You can run your own version of a Fabric Mill locally (on a private IP), which you can then connect to and use. |
||||
3. **Remote Server:** You can specify a remote server that your client commands will then be calling. |
||||
|
||||
## Client features |
||||
|
||||
1. Standalone Mode: Run without needing a server. |
||||
2. Clipboard Integration: Copy responses to the clipboard. |
||||
3. File Output: Save responses to files for later reference. |
||||
4. Pattern Module: Utilize specific patterns for different types of analysis. |
||||
5. Server Mode: Operate the tool in server mode to control your own patterns and let your other apps access it. |
||||
|
||||
## Installation |
||||
|
||||
1. If you have this repository downloaded, you already have the client. |
||||
`git clone git@github.com:danielmiessler/fabric.git` |
||||
2. Navigate to the client's directory: |
||||
`cd client` |
||||
3. Install poetry (if you don't have it already) |
||||
`pip3 install poetry` |
||||
4. Install the required packages: |
||||
`poetry install` |
||||
5. Activate the virtual environment: |
||||
`poetry shell` |
||||
6. Copy to path: |
||||
`echo export PATH=$PATH:$(pwd) >> ~/.bashrc` # or .zshrc |
||||
7. Copy your OpenAI API key to the `.env` file in your `nvim ~/.config/fabric/` directory (or create that file and put it in) |
||||
`OPENAI_API_KEY=[Your_API_Key]` |
||||
|
||||
## Usage |
||||
|
||||
To use `fabric`, call it with your desired options (remember to activate the virtual environment with `poetry shell` - step 5 above): |
||||
|
||||
fabric [options] |
||||
Options include: |
||||
|
||||
--pattern, -p: Select the module for analysis. |
||||
--stream, -s: Stream output to another application. |
||||
--output, -o: Save the response to a file. |
||||
--copy, -c: Copy the response to the clipboard. |
||||
|
||||
Example: |
||||
|
||||
```bash |
||||
# Pasting in an article about LLMs |
||||
pbpaste | fabric --pattern extract_wisdom --output wisdom.txt | fabric --pattern summarize --stream |
||||
``` |
||||
|
||||
```markdown |
||||
ONE SENTENCE SUMMARY: |
||||
|
||||
- The content covered the basics of LLMs and how they are used in everyday practice. |
||||
|
||||
MAIN POINTS: |
||||
|
||||
1. LLMs are large language models, and typically use the transformer architecture. |
||||
2. LLMs used to be used for story generation, but they're now used for many AI applications. |
||||
3. They are vulnerable to hallucination if not configured correctly, so be careful. |
||||
|
||||
TAKEAWAYS: |
||||
|
||||
1. It's possible to use LLMs for multiple AI use cases. |
||||
2. It's important to validate that the results you're receiving are correct. |
||||
3. The field of AI is moving faster than ever as a result of GenAI breakthroughs. |
||||
``` |
||||
|
||||
## Contributing |
||||
|
||||
We welcome contributions to Fabric, including improvements and feature additions to this client. |
||||
|
||||
## Credits |
||||
|
||||
The `fabric` client was created by Jonathan Dunn and Daniel Meissler. |
@ -0,0 +1,89 @@
|
||||
from utils import Standalone, Update, Setup |
||||
import argparse |
||||
import sys |
||||
import os |
||||
|
||||
|
||||
script_directory = os.path.dirname(os.path.realpath(__file__)) |
||||
|
||||
def main(): |
||||
parser = argparse.ArgumentParser( |
||||
description="An open source framework for augmenting humans using AI." |
||||
) |
||||
parser.add_argument("--text", "-t", help="Text to extract summary from") |
||||
parser.add_argument( |
||||
"--copy", "-c", help="Copy the response to the clipboard", action="store_true" |
||||
) |
||||
parser.add_argument( |
||||
"--output", |
||||
"-o", |
||||
help="Save the response to a file", |
||||
nargs="?", |
||||
const="analyzepaper.txt", |
||||
default=None, |
||||
) |
||||
parser.add_argument( |
||||
"--stream", |
||||
"-s", |
||||
help="Use this option if you want to see the results in realtime. NOTE: You will not be able to pipe the output into another command.", |
||||
action="store_true", |
||||
) |
||||
parser.add_argument( |
||||
"--list", "-l", help="List available patterns", action="store_true" |
||||
) |
||||
parser.add_argument("--update", "-u", help="Update patterns", action="store_true") |
||||
parser.add_argument("--pattern", "-p", help="The pattern (prompt) to use") |
||||
parser.add_argument( |
||||
"--setup", help="Set up your fabric instance", action="store_true" |
||||
) |
||||
parser.add_argument( |
||||
"--model", "-m", help="Select the model to use (GPT-4 by default)", default="gpt-4-turbo-preview" |
||||
) |
||||
parser.add_argument( |
||||
"--listmodels", help="List all available models", action="store_true" |
||||
) |
||||
|
||||
args = parser.parse_args() |
||||
home_holder = os.path.expanduser("~") |
||||
config = os.path.join(home_holder, ".config", "fabric") |
||||
config_patterns_directory = os.path.join(config, "patterns") |
||||
env_file = os.path.join(config, ".env") |
||||
if not os.path.exists(config): |
||||
os.makedirs(config) |
||||
if args.setup: |
||||
Setup().run() |
||||
sys.exit() |
||||
if not os.path.exists(env_file) or not os.path.exists(config_patterns_directory): |
||||
print("Please run --setup to set up your API key and download patterns.") |
||||
sys.exit() |
||||
if not os.path.exists(config_patterns_directory): |
||||
Update() |
||||
sys.exit() |
||||
if args.update: |
||||
Update() |
||||
print("Your Patterns have been updated.") |
||||
sys.exit() |
||||
standalone = Standalone(args, args.pattern) |
||||
if args.list: |
||||
try: |
||||
direct = os.listdir(config_patterns_directory) |
||||
for d in direct: |
||||
print(d) |
||||
sys.exit() |
||||
except FileNotFoundError: |
||||
print("No patterns found") |
||||
sys.exit() |
||||
if args.listmodels: |
||||
standalone.fetch_available_models() |
||||
sys.exit() |
||||
if args.text is not None: |
||||
text = args.text |
||||
else: |
||||
text = standalone.get_cli_input() |
||||
if args.stream: |
||||
standalone.streamMessage(text) |
||||
else: |
||||
standalone.sendMessage(text) |
||||
|
||||
if __name__ == "__main__": |
||||
main() |
@ -0,0 +1,6 @@
|
||||
#!/usr/bin/env python3 |
||||
|
||||
import pyperclip |
||||
|
||||
pasted_text = pyperclip.paste() |
||||
print(pasted_text) |
@ -0,0 +1,403 @@
|
||||
import requests |
||||
import os |
||||
from openai import OpenAI |
||||
import pyperclip |
||||
import sys |
||||
import platform |
||||
from dotenv import load_dotenv |
||||
from requests.exceptions import HTTPError |
||||
from tqdm import tqdm |
||||
|
||||
current_directory = os.path.dirname(os.path.realpath(__file__)) |
||||
config_directory = os.path.expanduser("~/.config/fabric") |
||||
env_file = os.path.join(config_directory, ".env") |
||||
|
||||
|
||||
|
||||
class Standalone: |
||||
def __init__(self, args, pattern="", env_file="~/.config/fabric/.env"): |
||||
""" Initialize the class with the provided arguments and environment file. |
||||
|
||||
Args: |
||||
args: The arguments for initialization. |
||||
pattern: The pattern to be used (default is an empty string). |
||||
env_file: The path to the environment file (default is "~/.config/fabric/.env"). |
||||
|
||||
Returns: |
||||
None |
||||
|
||||
Raises: |
||||
KeyError: If the "OPENAI_API_KEY" is not found in the environment variables. |
||||
FileNotFoundError: If no API key is found in the environment variables. |
||||
""" |
||||
|
||||
# Expand the tilde to the full path |
||||
env_file = os.path.expanduser(env_file) |
||||
load_dotenv(env_file) |
||||
try: |
||||
apikey = os.environ["OPENAI_API_KEY"] |
||||
self.client = OpenAI() |
||||
self.client.api_key = apikey |
||||
except KeyError: |
||||
print("OPENAI_API_KEY not found in environment variables.") |
||||
|
||||
except FileNotFoundError: |
||||
print("No API key found. Use the --apikey option to set the key") |
||||
sys.exit() |
||||
self.config_pattern_directory = config_directory |
||||
self.pattern = pattern |
||||
self.args = args |
||||
self.model = args.model |
||||
|
||||
def streamMessage(self, input_data: str): |
||||
""" Stream a message and handle exceptions. |
||||
|
||||
Args: |
||||
input_data (str): The input data for the message. |
||||
|
||||
Returns: |
||||
None: If the pattern is not found. |
||||
|
||||
Raises: |
||||
FileNotFoundError: If the pattern file is not found. |
||||
""" |
||||
|
||||
wisdomFilePath = os.path.join( |
||||
config_directory, f"patterns/{self.pattern}/system.md" |
||||
) |
||||
user_message = {"role": "user", "content": f"{input_data}"} |
||||
wisdom_File = os.path.join(current_directory, wisdomFilePath) |
||||
buffer = "" |
||||
if self.pattern: |
||||
try: |
||||
with open(wisdom_File, "r") as f: |
||||
system = f.read() |
||||
system_message = {"role": "system", "content": system} |
||||
messages = [system_message, user_message] |
||||
except FileNotFoundError: |
||||
print("pattern not found") |
||||
return |
||||
else: |
||||
messages = [user_message] |
||||
try: |
||||
stream = self.client.chat.completions.create( |
||||
model=self.model, |
||||
messages=messages, |
||||
temperature=0.0, |
||||
top_p=1, |
||||
frequency_penalty=0.1, |
||||
presence_penalty=0.1, |
||||
stream=True, |
||||
) |
||||
for chunk in stream: |
||||
if chunk.choices[0].delta.content is not None: |
||||
char = chunk.choices[0].delta.content |
||||
buffer += char |
||||
if char not in ["\n", " "]: |
||||
print(char, end="") |
||||
elif char == " ": |
||||
print(" ", end="") # Explicitly handle spaces |
||||
elif char == "\n": |
||||
print() # Handle newlines |
||||
sys.stdout.flush() |
||||
except Exception as e: |
||||
print(f"Error: {e}") |
||||
print(e) |
||||
if self.args.copy: |
||||
pyperclip.copy(buffer) |
||||
if self.args.output: |
||||
with open(self.args.output, "w") as f: |
||||
f.write(buffer) |
||||
|
||||
def sendMessage(self, input_data: str): |
||||
""" Send a message using the input data and generate a response. |
||||
|
||||
Args: |
||||
input_data (str): The input data to be sent as a message. |
||||
|
||||
Returns: |
||||
None |
||||
|
||||
Raises: |
||||
FileNotFoundError: If the specified pattern file is not found. |
||||
""" |
||||
|
||||
wisdomFilePath = os.path.join( |
||||
config_directory, f"patterns/{self.pattern}/system.md" |
||||
) |
||||
user_message = {"role": "user", "content": f"{input_data}"} |
||||
wisdom_File = os.path.join(current_directory, wisdomFilePath) |
||||
if self.pattern: |
||||
try: |
||||
with open(wisdom_File, "r") as f: |
||||
system = f.read() |
||||
system_message = {"role": "system", "content": system} |
||||
messages = [system_message, user_message] |
||||
except FileNotFoundError: |
||||
print("pattern not found") |
||||
return |
||||
else: |
||||
messages = [user_message] |
||||
try: |
||||
response = self.client.chat.completions.create( |
||||
model=self.model, |
||||
messages=messages, |
||||
temperature=0.0, |
||||
top_p=1, |
||||
frequency_penalty=0.1, |
||||
presence_penalty=0.1, |
||||
) |
||||
print(response.choices[0].message.content) |
||||
except Exception as e: |
||||
print(f"Error: {e}") |
||||
print(e) |
||||
if self.args.copy: |
||||
pyperclip.copy(response.choices[0].message.content) |
||||
if self.args.output: |
||||
with open(self.args.output, "w") as f: |
||||
f.write(response.choices[0].message.content) |
||||
|
||||
def fetch_available_models(self): |
||||
headers = { |
||||
"Authorization": f"Bearer { self.client.api_key }" |
||||
} |
||||
|
||||
response = requests.get("https://api.openai.com/v1/models", headers=headers) |
||||
|
||||
if response.status_code == 200: |
||||
models = response.json().get("data", []) |
||||
# Filter only gpt models |
||||
gpt_models = [model for model in models if model.get("id", "").startswith(("gpt"))] |
||||
# Sort the models alphabetically by their ID |
||||
sorted_gpt_models = sorted(gpt_models, key=lambda x: x.get("id")) |
||||
|
||||
for model in sorted_gpt_models: |
||||
print(model.get("id")) |
||||
else: |
||||
print(f"Failed to fetch models: HTTP {response.status_code}") |
||||
|
||||
def get_cli_input(self): |
||||
""" aided by ChatGPT; uses platform library |
||||
accepts either piped input or console input |
||||
from either Windows or Linux |
||||
|
||||
Args: |
||||
none |
||||
Returns: |
||||
string from either user or pipe |
||||
""" |
||||
system = platform.system() |
||||
if system == 'Windows': |
||||
if not sys.stdin.isatty(): # Check if input is being piped |
||||
return sys.stdin.readline().strip() # Read piped input |
||||
else: |
||||
return input("Enter Question: ") # Prompt user for input from console |
||||
else: |
||||
return sys.stdin.read() |
||||
|
||||
|
||||
class Update: |
||||
def __init__(self): |
||||
""" Initialize the object with default values and update patterns. |
||||
|
||||
This method initializes the object with default values for root_api_url, config_directory, and pattern_directory. |
||||
It then creates the pattern_directory if it does not exist and calls the update_patterns method to update the patterns. |
||||
|
||||
Raises: |
||||
OSError: If there is an issue creating the pattern_directory. |
||||
""" |
||||
|
||||
self.root_api_url = "https://api.github.com/repos/danielmiessler/fabric/contents/patterns?ref=main" |
||||
self.config_directory = os.path.expanduser("~/.config/fabric") |
||||
self.pattern_directory = os.path.join(self.config_directory, "patterns") |
||||
os.makedirs(self.pattern_directory, exist_ok=True) |
||||
self.update_patterns() # Call the update process from a method. |
||||
|
||||
def update_patterns(self): |
||||
""" Update the patterns by downloading from the GitHub directory. |
||||
|
||||
Raises: |
||||
HTTPError: If there is an HTTP error while downloading patterns. |
||||
""" |
||||
|
||||
try: |
||||
self.progress_bar = tqdm(desc="Downloading Patterns…", unit="file") |
||||
self.get_github_directory_contents( |
||||
self.root_api_url, self.pattern_directory |
||||
) |
||||
# Close progress bar on success before printing the message. |
||||
self.progress_bar.close() |
||||
except HTTPError as e: |
||||
# Ensure progress bar is closed on HTTPError as well. |
||||
self.progress_bar.close() |
||||
if e.response.status_code == 403: |
||||
print( |
||||
"GitHub API rate limit exceeded. Please wait before trying again." |
||||
) |
||||
sys.exit() |
||||
else: |
||||
print(f"Failed to download patterns due to an HTTP error: {e}") |
||||
sys.exit() # Exit after handling the error. |
||||
|
||||
def download_file(self, url, local_path): |
||||
""" Download a file from the given URL and save it to the local path. |
||||
|
||||
Args: |
||||
url (str): The URL of the file to be downloaded. |
||||
local_path (str): The local path where the file will be saved. |
||||
|
||||
Raises: |
||||
HTTPError: If an HTTP error occurs during the download process. |
||||
""" |
||||
|
||||
try: |
||||
response = requests.get(url) |
||||
response.raise_for_status() |
||||
with open(local_path, "wb") as f: |
||||
f.write(response.content) |
||||
self.progress_bar.update(1) |
||||
except HTTPError as e: |
||||
print(f"Failed to download file {url}. HTTP error: {e}") |
||||
sys.exit() |
||||
|
||||
def process_item(self, item, local_dir): |
||||
""" Process the given item and save it to the local directory. |
||||
|
||||
Args: |
||||
item (dict): The item to be processed, containing information about the type, download URL, name, and URL. |
||||
local_dir (str): The local directory where the item will be saved. |
||||
|
||||
Returns: |
||||
None |
||||
|
||||
Raises: |
||||
OSError: If there is an issue creating the new directory using os.makedirs. |
||||
""" |
||||
|
||||
if item["type"] == "file": |
||||
self.download_file( |
||||
item["download_url"], os.path.join(local_dir, item["name"]) |
||||
) |
||||
elif item["type"] == "dir": |
||||
new_dir = os.path.join(local_dir, item["name"]) |
||||
os.makedirs(new_dir, exist_ok=True) |
||||
self.get_github_directory_contents(item["url"], new_dir) |
||||
|
||||
def get_github_directory_contents(self, api_url, local_dir): |
||||
""" Get the contents of a directory from GitHub API and process each item. |
||||
|
||||
Args: |
||||
api_url (str): The URL of the GitHub API endpoint for the directory. |
||||
local_dir (str): The local directory where the contents will be processed. |
||||
|
||||
Returns: |
||||
None |
||||
|
||||
Raises: |
||||
HTTPError: If an HTTP error occurs while fetching the directory contents. |
||||
If the status code is 403, it prints a message about GitHub API rate limit exceeded |
||||
and closes the progress bar. For any other status code, it prints a message |
||||
about failing to fetch directory contents due to an HTTP error. |
||||
""" |
||||
|
||||
try: |
||||
response = requests.get(api_url) |
||||
response.raise_for_status() |
||||
jsonList = response.json() |
||||
for item in jsonList: |
||||
self.process_item(item, local_dir) |
||||
except HTTPError as e: |
||||
if e.response.status_code == 403: |
||||
print( |
||||
"GitHub API rate limit exceeded. Please wait before trying again." |
||||
) |
||||
self.progress_bar.close() # Ensure the progress bar is cleaned up properly |
||||
else: |
||||
print(f"Failed to fetch directory contents due to an HTTP error: {e}") |
||||
|
||||
class Setup: |
||||
def __init__(self): |
||||
""" Initialize the object. |
||||
|
||||
Raises: |
||||
OSError: If there is an error in creating the pattern directory. |
||||
""" |
||||
|
||||
self.config_directory = os.path.expanduser("~/.config/fabric") |
||||
self.pattern_directory = os.path.join(self.config_directory, "patterns") |
||||
os.makedirs(self.pattern_directory, exist_ok=True) |
||||
self.env_file = os.path.join(self.config_directory, ".env") |
||||
|
||||
def api_key(self, api_key): |
||||
""" Set the OpenAI API key in the environment file. |
||||
|
||||
Args: |
||||
api_key (str): The API key to be set. |
||||
|
||||
Returns: |
||||
None |
||||
|
||||
Raises: |
||||
OSError: If the environment file does not exist or cannot be accessed. |
||||
""" |
||||
|
||||
if not os.path.exists(self.env_file): |
||||
with open(self.env_file, "w") as f: |
||||
f.write(f"OPENAI_API_KEY={api_key}") |
||||
print(f"OpenAI API key set to {api_key}") |
||||
|
||||
def patterns(self): |
||||
""" Method to update patterns and exit the system. |
||||
|
||||
Returns: |
||||
None |
||||
""" |
||||
|
||||
Update() |
||||
sys.exit() |
||||
|
||||
def run(self): |
||||
""" Execute the Fabric program. |
||||
|
||||
This method prompts the user for their OpenAI API key, sets the API key in the Fabric object, and then calls the patterns method. |
||||
|
||||
Returns: |
||||
None |
||||
""" |
||||
|
||||
print("Welcome to Fabric. Let's get started.") |
||||
apikey = input("Please enter your OpenAI API key\n") |
||||
self.api_key(apikey.strip()) |
||||
self.patterns() |
||||
|
||||
|
||||
class Transcribe: |
||||
def youtube(video_id): |
||||
""" |
||||
This method gets the transciption |
||||
of a YouTube video designated with the video_id |
||||
|
||||
Input: |
||||
the video id specifing a YouTube video |
||||
an example url for a video: https://www.youtube.com/watch?v=vF-MQmVxnCs&t=306s |
||||
the video id is vF-MQmVxnCs&t=306s |
||||
|
||||
Output: |
||||
a transcript for the video |
||||
|
||||
Raises: |
||||
an exception and prints error |
||||
|
||||
|
||||
""" |
||||
try: |
||||
transcript_list = YouTubeTranscriptApi.get_transcript(video_id) |
||||
transcript = "" |
||||
for segment in transcript_list: |
||||
transcript += segment['text'] + " " |
||||
return transcript.strip() |
||||
except Exception as e: |
||||
print("Error:", e) |
||||
return None |
||||
|
||||
|
@ -0,0 +1,21 @@
|
||||
Fabric is not just a tool; it's a transformative step towards integrating the power of GPT prompts into your digital life. With Fabric, you have the ability to create a personal API that brings advanced GPT capabilities into various aspects of your digital environment. Whether you're looking to incorporate powerful GPT prompts into command line operations or extend their functionality to a wider network through a personal API, Fabric is designed to seamlessly blend with your digital ecosystem. This tool is all about augmenting your digital interactions, enhancing productivity, and enabling a more intelligent, GPT-powered experience in every aspect of your online presence. |
||||
|
||||
## Features |
||||
|
||||
1. Text Analysis: Easily extract summaries from texts. |
||||
2. Clipboard Integration: Conveniently copy responses to the clipboard. |
||||
3. File Output: Save responses to files for later reference. |
||||
4. Pattern Module: Utilize specific modules for different types of analysis. |
||||
5. Server Mode: Operate the tool in server mode for expanded capabilities. |
||||
6. Remote & Standalone Modes: Choose between remote and standalone operations. |
||||
|
||||
## Installation |
||||
|
||||
1. Install dependencies: |
||||
`npm install` |
||||
2. Start the application: |
||||
`npm start` |
||||
|
||||
Contributing |
||||
|
||||
We welcome contributions to Fabric! For details on our code of conduct and the process for submitting pull requests, please read the CONTRIBUTING.md. |
@ -0,0 +1,45 @@
|
||||
const { OpenAI } = require("openai"); |
||||
require("dotenv").config({ |
||||
path: require("os").homedir() + "/.config/fabric/.env", |
||||
}); |
||||
|
||||
let openaiClient = null; |
||||
|
||||
// Function to initialize and get the OpenAI client
|
||||
function getOpenAIClient() { |
||||
if (!process.env.OPENAI_API_KEY) { |
||||
throw new Error( |
||||
"The OPENAI_API_KEY environment variable is missing or empty." |
||||
); |
||||
} |
||||
return new OpenAI({ apiKey: process.env.OPENAI_API_KEY }); |
||||
} |
||||
|
||||
async function queryOpenAI(system, user, callback) { |
||||
const openai = getOpenAIClient(); // Ensure the client is initialized here
|
||||
const messages = [ |
||||
{ role: "system", content: system }, |
||||
{ role: "user", content: user }, |
||||
]; |
||||
try { |
||||
const stream = await openai.chat.completions.create({ |
||||
model: "gpt-4-1106-preview", // Adjust the model as necessary.
|
||||
messages: messages, |
||||
temperature: 0.0, |
||||
top_p: 1, |
||||
frequency_penalty: 0.1, |
||||
presence_penalty: 0.1, |
||||
stream: true, |
||||
}); |
||||
|
||||
for await (const chunk of stream) { |
||||
const message = chunk.choices[0]?.delta?.content || ""; |
||||
callback(message); // Process each chunk of data
|
||||
} |
||||
} catch (error) { |
||||
console.error("Error querying OpenAI:", error); |
||||
callback("Error querying OpenAI. Please try again."); |
||||
} |
||||
} |
||||
|
||||
module.exports = { queryOpenAI }; |
@ -0,0 +1,70 @@
|
||||
<!DOCTYPE html> |
||||
<html lang="en"> |
||||
<head> |
||||
<meta charset="UTF-8" /> |
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" /> |
||||
<title>Fabric</title> |
||||
<link rel="stylesheet" href="static/stylesheet/bootstrap.min.css" /> |
||||
<link rel="stylesheet" href="static/stylesheet/style.css" /> |
||||
</head> |
||||
<body> |
||||
<nav class="navbar navbar-expand-md navbar-dark fixed-top bg-dark"> |
||||
<a class="navbar-brand" href="#"> |
||||
<img |
||||
src="static/images/fabric-logo-gif.gif" |
||||
alt="Fabric Logo" |
||||
height="40" |
||||
/> |
||||
</a> |
||||
<button id="configButton" class="btn btn-outline-success my-2 my-sm-0"> |
||||
Config |
||||
</button> |
||||
<button |
||||
class="navbar-toggler" |
||||
type="button" |
||||
data-toggle="collapse" |
||||
data-target="#navbarCollap se" |
||||
aria-controls="navbarCollapse" |
||||
aria-expanded="false" |
||||
aria-label="Toggle navigation" |
||||
> |
||||
<span class="navbar-toggler-icon"></span> |
||||
</button> |
||||
<button |
||||
id="updatePatternsButton" |
||||
class="btn btn-outline-success my-2 my-sm-0" |
||||
> |
||||
Update Patterns |
||||
</button> |
||||
<div class="collapse navbar-collapse" id="navbarCollapse"></div> |
||||
<div class="m1-auto"> |
||||
<a class="navbar-brand" id="themeChanger" href="#">Dark</a> |
||||
</div> |
||||
</nav> |
||||
<main> |
||||
<div class="container" id="my-form"> |
||||
<select class="form-control" id="patternSelector"></select> |
||||
<textarea |
||||
rows="5" |
||||
class="form-control" |
||||
id="userInput" |
||||
placeholder="start typing or drag a file (.txt, .svg, .pdf and .doc are currently supported)" |
||||
></textarea> |
||||
<button class="btn btn-primary" id="submit">Submit</button> |
||||
</div> |
||||
<div id="configSection" class="container hidden"> |
||||
<input |
||||
type="text" |
||||
id="apiKeyInput" |
||||
placeholder="Enter OpenAI API Key" |
||||
class="form-control" |
||||
/> |
||||
<button id="saveApiKey" class="btn btn-primary">Save API Key</button> |
||||
</div> |
||||
<div class="container hidden" id="responseContainer"></div> |
||||
</main> |
||||
<script src="static/js/jquery-3.0.0.slim.min.js"></script> |
||||
<script src="static/js/bootstrap.min.js"></script> |
||||
<script src="static/js/index.js"></script> |
||||
</body> |
||||
</html> |
@ -0,0 +1,300 @@
|
||||
const { app, BrowserWindow, ipcMain, dialog } = require("electron"); |
||||
const pdfParse = require("pdf-parse"); |
||||
const mammoth = require("mammoth"); |
||||
const fs = require("fs"); |
||||
const path = require("path"); |
||||
const os = require("os"); |
||||
const { queryOpenAI } = require("./chatgpt.js"); |
||||
const axios = require("axios"); |
||||
const fsExtra = require("fs-extra"); |
||||
|
||||
let fetch; |
||||
import("node-fetch").then((module) => { |
||||
fetch = module.default; |
||||
}); |
||||
const unzipper = require("unzipper"); |
||||
|
||||
let win; |
||||
|
||||
function promptUserForApiKey() { |
||||
// Create a new window to prompt the user for the API key
|
||||
const promptWindow = new BrowserWindow({ |
||||
// Window configuration for the prompt
|
||||
width: 500, |
||||
height: 200, |
||||
webPreferences: { |
||||
nodeIntegration: true, |
||||
contextIsolation: false, // Consider security implications
|
||||
}, |
||||
}); |
||||
|
||||
// Handle the API key submission from the prompt window
|
||||
ipcMain.on("submit-api-key", (event, apiKey) => { |
||||
if (apiKey) { |
||||
saveApiKey(apiKey); |
||||
promptWindow.close(); |
||||
createWindow(); // Proceed to create the main window
|
||||
} else { |
||||
// Handle invalid input or user cancellation
|
||||
promptWindow.close(); |
||||
} |
||||
}); |
||||
} |
||||
|
||||
function loadApiKey() { |
||||
const configPath = path.join(os.homedir(), ".config", "fabric", ".env"); |
||||
if (fs.existsSync(configPath)) { |
||||
const envContents = fs.readFileSync(configPath, { encoding: "utf8" }); |
||||
const matches = envContents.match(/^OPENAI_API_KEY=(.*)$/m); |
||||
if (matches && matches[1]) { |
||||
return matches[1]; |
||||
} |
||||
} |
||||
return null; |
||||
} |
||||
|
||||
function saveApiKey(apiKey) { |
||||
const configPath = path.join(os.homedir(), ".config", "fabric"); |
||||
const envFilePath = path.join(configPath, ".env"); |
||||
|
||||
if (!fs.existsSync(configPath)) { |
||||
fs.mkdirSync(configPath, { recursive: true }); |
||||
} |
||||
|
||||
fs.writeFileSync(envFilePath, `OPENAI_API_KEY=${apiKey}`); |
||||
process.env.OPENAI_API_KEY = apiKey; // Set for current session
|
||||
} |
||||
|
||||
function ensureFabricFoldersExist() { |
||||
return new Promise(async (resolve, reject) => { |
||||
const fabricPath = path.join(os.homedir(), ".config", "fabric"); |
||||
const patternsPath = path.join(fabricPath, "patterns"); |
||||
|
||||
try { |
||||
if (!fs.existsSync(fabricPath)) { |
||||
fs.mkdirSync(fabricPath, { recursive: true }); |
||||
} |
||||
|
||||
if (!fs.existsSync(patternsPath)) { |
||||
fs.mkdirSync(patternsPath, { recursive: true }); |
||||
await downloadAndUpdatePatterns(patternsPath); |
||||
} |
||||
resolve(); // Resolve the promise once everything is set up
|
||||
} catch (error) { |
||||
console.error("Error ensuring fabric folders exist:", error); |
||||
reject(error); // Reject the promise if an error occurs
|
||||
} |
||||
}); |
||||
} |
||||
|
||||
async function downloadAndUpdatePatterns(patternsPath) { |
||||
try { |
||||
const response = await axios({ |
||||
method: "get", |
||||
url: "https://github.com/danielmiessler/fabric/archive/refs/heads/main.zip", |
||||
responseType: "arraybuffer", |
||||
}); |
||||
|
||||
const zipPath = path.join(os.tmpdir(), "fabric.zip"); |
||||
fs.writeFileSync(zipPath, response.data); |
||||
console.log("Zip file written to:", zipPath); |
||||
|
||||
const tempExtractPath = path.join(os.tmpdir(), "fabric_extracted"); |
||||
fsExtra.emptyDirSync(tempExtractPath); |
||||
|
||||
await fsExtra.remove(patternsPath); // Delete the existing patterns directory
|
||||
|
||||
await fs |
||||
.createReadStream(zipPath) |
||||
.pipe(unzipper.Extract({ path: tempExtractPath })) |
||||
.promise(); |
||||
|
||||
console.log("Extraction complete"); |
||||
|
||||
const extractedPatternsPath = path.join( |
||||
tempExtractPath, |
||||
"fabric-main", |
||||
"patterns" |
||||
); |
||||
|
||||
await fsExtra.copy(extractedPatternsPath, patternsPath); |
||||
console.log("Patterns successfully updated"); |
||||
|
||||
// Inform the renderer process that the patterns have been updated
|
||||
win.webContents.send("patterns-updated"); |
||||
} catch (error) { |
||||
console.error("Error downloading or updating patterns:", error); |
||||
} |
||||
} |
||||
|
||||
function checkApiKeyExists() { |
||||
const configPath = path.join(os.homedir(), ".config", "fabric", ".env"); |
||||
return fs.existsSync(configPath); |
||||
} |
||||
|
||||
function getPatternFolders() { |
||||
const patternsPath = path.join(os.homedir(), ".config", "fabric", "patterns"); |
||||
return fs |
||||
.readdirSync(patternsPath, { withFileTypes: true }) |
||||
.filter((dirent) => dirent.isDirectory()) |
||||
.map((dirent) => dirent.name); |
||||
} |
||||
|
||||
function getPatternContent(patternName) { |
||||
const patternPath = path.join( |
||||
os.homedir(), |
||||
".config", |
||||
"fabric", |
||||
"patterns", |
||||
patternName, |
||||
"system.md" |
||||
); |
||||
try { |
||||
return fs.readFileSync(patternPath, "utf8"); |
||||
} catch (error) { |
||||
console.error("Error reading pattern file:", error); |
||||
return ""; |
||||
} |
||||
} |
||||
|
||||
function createWindow() { |
||||
win = new BrowserWindow({ |
||||
width: 800, |
||||
height: 600, |
||||
webPreferences: { |
||||
contextIsolation: true, |
||||
nodeIntegration: false, |
||||
preload: path.join(__dirname, "preload.js"), |
||||
}, |
||||
}); |
||||
|
||||
win.loadFile("index.html"); |
||||
|
||||
win.on("closed", () => { |
||||
win = null; |
||||
}); |
||||
} |
||||
ipcMain.on("process-complex-file", (event, filePath) => { |
||||
const extension = path.extname(filePath).toLowerCase(); |
||||
let fileProcessPromise; |
||||
|
||||
if (extension === ".pdf") { |
||||
const dataBuffer = fs.readFileSync(filePath); |
||||
fileProcessPromise = pdfParse(dataBuffer).then((data) => data.text); |
||||
} else if (extension === ".docx") { |
||||
fileProcessPromise = mammoth |
||||
.extractRawText({ path: filePath }) |
||||
.then((result) => result.value) |
||||
.catch((err) => { |
||||
console.error("Error processing DOCX file:", err); |
||||
throw new Error("Error processing DOCX file."); |
||||
}); |
||||
} else { |
||||
event.reply("file-response", "Error: Unsupported file type"); |
||||
return; |
||||
} |
||||
|
||||
fileProcessPromise |
||||
.then((extractedText) => { |
||||
// Sending the extracted text back to the frontend.
|
||||
event.reply("file-response", extractedText); |
||||
}) |
||||
.catch((error) => { |
||||
// Handling any errors during file processing and sending them back to the frontend.
|
||||
event.reply("file-response", `Error processing file: ${error.message}`); |
||||
}); |
||||
}); |
||||
|
||||
ipcMain.on("start-query-openai", async (event, system, user) => { |
||||
if (system == null || user == null) { |
||||
console.error("Received null for system or user message"); |
||||
event.reply("openai-response", "Error: System or user message is null."); |
||||
return; |
||||
} |
||||
try { |
||||
await queryOpenAI(system, user, (message) => { |
||||
event.reply("openai-response", message); |
||||
}); |
||||
} catch (error) { |
||||
console.error("Error querying OpenAI:", error); |
||||
event.reply("no-api-key", "Error querying OpenAI."); |
||||
} |
||||
}); |
||||
|
||||
// Example of using ipcMain.handle for asynchronous operations
|
||||
ipcMain.handle("get-patterns", async (event) => { |
||||
try { |
||||
return getPatternFolders(); |
||||
} catch (error) { |
||||
console.error("Failed to get patterns:", error); |
||||
return []; |
||||
} |
||||
}); |
||||
|
||||
ipcMain.on("update-patterns", () => { |
||||
const patternsPath = path.join(os.homedir(), ".config", "fabric", "patterns"); |
||||
downloadAndUpdatePatterns(patternsPath); |
||||
}); |
||||
|
||||
ipcMain.handle("get-pattern-content", async (event, patternName) => { |
||||
try { |
||||
return getPatternContent(patternName); |
||||
} catch (error) { |
||||
console.error("Failed to get pattern content:", error); |
||||
return ""; |
||||
} |
||||
}); |
||||
|
||||
ipcMain.handle("save-api-key", async (event, apiKey) => { |
||||
try { |
||||
const configPath = path.join(os.homedir(), ".config", "fabric"); |
||||
if (!fs.existsSync(configPath)) { |
||||
fs.mkdirSync(configPath, { recursive: true }); |
||||
} |
||||
|
||||
const envFilePath = path.join(configPath, ".env"); |
||||
fs.writeFileSync(envFilePath, `OPENAI_API_KEY=${apiKey}`); |
||||
process.env.OPENAI_API_KEY = apiKey; |
||||
|
||||
return "API Key saved successfully."; |
||||
} catch (error) { |
||||
console.error("Error saving API key:", error); |
||||
throw new Error("Failed to save API Key."); |
||||
} |
||||
}); |
||||
|
||||
app.whenReady().then(async () => { |
||||
try { |
||||
const apiKey = loadApiKey(); |
||||
if (!apiKey) { |
||||
promptUserForApiKey(); |
||||
} else { |
||||
process.env.OPENAI_API_KEY = apiKey; |
||||
createWindow(); |
||||
} |
||||
await ensureFabricFoldersExist(); // Ensure fabric folders exist
|
||||
createWindow(); // Create the application window
|
||||
|
||||
// After window creation, check if the API key exists
|
||||
if (!checkApiKeyExists()) { |
||||
console.log("API key is missing. Prompting user to input API key."); |
||||
// Optionally, directly invoke a function here to show a prompt in the renderer process
|
||||
win.webContents.send("request-api-key"); |
||||
} |
||||
} catch (error) { |
||||
console.error("Failed to initialize fabric folders:", error); |
||||
// Handle initialization failure (e.g., close the app or show an error message)
|
||||
} |
||||
}); |
||||
|
||||
app.on("window-all-closed", () => { |
||||
if (process.platform !== "darwin") { |
||||
app.quit(); |
||||
} |
||||
}); |
||||
|
||||
app.on("activate", () => { |
||||
if (win === null) { |
||||
createWindow(); |
||||
} |
||||
}); |
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,23 @@
|
||||
{ |
||||
"name": "fabric_electron", |
||||
"version": "1.0.0", |
||||
"description": "a fabric electron app", |
||||
"main": "main.js", |
||||
"scripts": { |
||||
"start": "electron ." |
||||
}, |
||||
"author": "", |
||||
"license": "ISC", |
||||
"devDependencies": { |
||||
"dotenv": "^16.4.1", |
||||
"electron": "^28.2.2", |
||||
"openai": "^4.27.0" |
||||
}, |
||||
"dependencies": { |
||||
"axios": "^1.6.7", |
||||
"mammoth": "^1.6.0", |
||||
"node-fetch": "^2.6.7", |
||||
"pdf-parse": "^1.1.1", |
||||
"unzipper": "^0.10.14" |
||||
} |
||||
} |
@ -0,0 +1,9 @@
|
||||
const { contextBridge, ipcRenderer } = require("electron"); |
||||
|
||||
contextBridge.exposeInMainWorld("electronAPI", { |
||||
invoke: (channel, ...args) => ipcRenderer.invoke(channel, ...args), |
||||
send: (channel, ...args) => ipcRenderer.send(channel, ...args), |
||||
on: (channel, func) => { |
||||
ipcRenderer.on(channel, (event, ...args) => func(...args)); |
||||
}, |
||||
}); |
After Width: | Height: | Size: 42 MiB |
File diff suppressed because one or more lines are too long
@ -0,0 +1,267 @@
|
||||
document.addEventListener("DOMContentLoaded", async function () { |
||||
const patternSelector = document.getElementById("patternSelector"); |
||||
const userInput = document.getElementById("userInput"); |
||||
const submitButton = document.getElementById("submit"); |
||||
const responseContainer = document.getElementById("responseContainer"); |
||||
const themeChanger = document.getElementById("themeChanger"); |
||||
const configButton = document.getElementById("configButton"); |
||||
const configSection = document.getElementById("configSection"); |
||||
const saveApiKeyButton = document.getElementById("saveApiKey"); |
||||
const apiKeyInput = document.getElementById("apiKeyInput"); |
||||
const originalPlaceholder = userInput.placeholder; |
||||
const updatePatternsButton = document.getElementById("updatePatternsButton"); |
||||
const copyButton = document.createElement("button"); |
||||
|
||||
window.electronAPI.on("patterns-ready", () => { |
||||
console.log("Patterns are ready. Refreshing the pattern list."); |
||||
loadPatterns(); |
||||
}); |
||||
window.electronAPI.on("request-api-key", () => { |
||||
// Show the API key input section or modal to the user
|
||||
configSection.classList.remove("hidden"); // Assuming 'configSection' is your API key input area
|
||||
}); |
||||
copyButton.textContent = "Copy"; |
||||
copyButton.id = "copyButton"; |
||||
document.addEventListener("click", function (e) { |
||||
if (e.target && e.target.id === "copyButton") { |
||||
// Your copy to clipboard function
|
||||
copyToClipboard(); |
||||
} |
||||
}); |
||||
window.electronAPI.on("no-api-key", () => { |
||||
alert("API key is missing. Please enter your OpenAI API key."); |
||||
}); |
||||
|
||||
window.electronAPI.on("patterns-updated", () => { |
||||
alert("Patterns updated. Refreshing the pattern list."); |
||||
loadPatterns(); |
||||
}); |
||||
|
||||
function htmlToPlainText(html) { |
||||
// Create a temporary div element to hold the HTML
|
||||
var tempDiv = document.createElement("div"); |
||||
tempDiv.innerHTML = html; |
||||
|
||||
// Replace <br> tags with newline characters
|
||||
tempDiv.querySelectorAll("br").forEach((br) => br.replaceWith("\n")); |
||||
|
||||
// Replace block elements like <p> and <div> with newline characters
|
||||
tempDiv.querySelectorAll("p, div").forEach((block) => { |
||||
block.prepend("\n"); // Add a newline before the block element's content
|
||||
block.replaceWith(...block.childNodes); // Replace the block element with its own contents
|
||||
}); |
||||
|
||||
// Return the text content, trimming leading and trailing newlines
|
||||
return tempDiv.textContent.trim(); |
||||
} |
||||
|
||||
async function submitQuery(userInputValue) { |
||||
userInput.value = ""; // Clear the input after submitting
|
||||
systemCommand = await window.electronAPI.invoke( |
||||
"get-pattern-content", |
||||
patternSelector.value |
||||
); |
||||
responseContainer.innerHTML = ""; // Clear previous responses
|
||||
if (responseContainer.classList.contains("hidden")) { |
||||
console.log("contains hidden"); |
||||
responseContainer.classList.remove("hidden"); |
||||
responseContainer.appendChild(copyButton); |
||||
} |
||||
window.electronAPI.send( |
||||
"start-query-openai", |
||||
systemCommand, |
||||
userInputValue |
||||
); |
||||
} |
||||
|
||||
function copyToClipboard() { |
||||
const containerClone = responseContainer.cloneNode(true); |
||||
// Remove the copy button from the clone
|
||||
const copyButtonClone = containerClone.querySelector("#copyButton"); |
||||
if (copyButtonClone) { |
||||
copyButtonClone.parentNode.removeChild(copyButtonClone); |
||||
} |
||||
|
||||
// Convert HTML to plain text, preserving newlines
|
||||
const plainText = htmlToPlainText(containerClone.innerHTML); |
||||
|
||||
// Use a temporary textarea for copying
|
||||
const textArea = document.createElement("textarea"); |
||||
textArea.style.position = "absolute"; |
||||
textArea.style.left = "-9999px"; |
||||
textArea.setAttribute("aria-hidden", "true"); |
||||
textArea.value = plainText; |
||||
document.body.appendChild(textArea); |
||||
textArea.select(); |
||||
|
||||
try { |
||||
document.execCommand("copy"); |
||||
console.log("Text successfully copied to clipboard"); |
||||
} catch (err) { |
||||
console.error("Failed to copy text: ", err); |
||||
} |
||||
|
||||
document.body.removeChild(textArea); |
||||
} |
||||
async function loadPatterns() { |
||||
try { |
||||
const patterns = await window.electronAPI.invoke("get-patterns"); |
||||
patternSelector.innerHTML = ""; // Clear existing options first
|
||||
patterns.forEach((pattern) => { |
||||
const option = document.createElement("option"); |
||||
option.value = pattern; |
||||
option.textContent = pattern; |
||||
patternSelector.appendChild(option); |
||||
}); |
||||
} catch (error) { |
||||
console.error("Failed to load patterns:", error); |
||||
} |
||||
} |
||||
loadPatterns(); |
||||
|
||||
function fallbackCopyTextToClipboard(text) { |
||||
const textArea = document.createElement("textarea"); |
||||
textArea.value = text; |
||||
document.body.appendChild(textArea); |
||||
textArea.focus(); |
||||
textArea.select(); |
||||
|
||||
try { |
||||
const successful = document.execCommand("copy"); |
||||
const msg = successful ? "successful" : "unsuccessful"; |
||||
console.log("Fallback: Copying text command was " + msg); |
||||
} catch (err) { |
||||
console.error("Fallback: Oops, unable to copy", err); |
||||
} |
||||
|
||||
document.body.removeChild(textArea); |
||||
} |
||||
|
||||
updatePatternsButton.addEventListener("click", () => { |
||||
window.electronAPI.send("update-patterns"); |
||||
}); |
||||
|
||||
// Load patterns on startup
|
||||
try { |
||||
const patterns = await window.electronAPI.invoke("get-patterns"); |
||||
patterns.forEach((pattern) => { |
||||
const option = document.createElement("option"); |
||||
option.value = pattern; |
||||
option.textContent = pattern; |
||||
patternSelector.appendChild(option); |
||||
}); |
||||
} catch (error) { |
||||
console.error("Failed to load patterns:", error); |
||||
} |
||||
|
||||
// Listen for OpenAI responses
|
||||
window.electronAPI.on("openai-response", (message) => { |
||||
const formattedMessage = message.replace(/\n/g, "<br>"); |
||||
responseContainer.innerHTML += formattedMessage; // Append new data as it arrives
|
||||
}); |
||||
|
||||
window.electronAPI.on("file-response", (message) => { |
||||
if (message.startsWith("Error")) { |
||||
alert(message); |
||||
return; |
||||
} |
||||
submitQuery(message); |
||||
}); |
||||
|
||||
// Submit button click handler
|
||||
submitButton.addEventListener("click", async () => { |
||||
const userInputValue = userInput.value; |
||||
submitQuery(userInputValue); |
||||
}); |
||||
|
||||
// Theme changer click handler
|
||||
themeChanger.addEventListener("click", function (e) { |
||||
e.preventDefault(); |
||||
document.body.classList.toggle("light-theme"); |
||||
themeChanger.innerText = |
||||
themeChanger.innerText === "Dark" ? "Light" : "Dark"; |
||||
}); |
||||
|
||||
// Config button click handler - toggles the config section visibility
|
||||
configButton.addEventListener("click", function (e) { |
||||
e.preventDefault(); |
||||
configSection.classList.toggle("hidden"); |
||||
}); |
||||
|
||||
// Save API Key button click handler
|
||||
saveApiKeyButton.addEventListener("click", () => { |
||||
const apiKey = apiKeyInput.value; |
||||
window.electronAPI |
||||
.invoke("save-api-key", apiKey) |
||||
.then(() => { |
||||
alert("API Key saved successfully."); |
||||
// Optionally hide the config section and clear the input after saving
|
||||
configSection.classList.add("hidden"); |
||||
apiKeyInput.value = ""; |
||||
}) |
||||
.catch((err) => { |
||||
console.error("Error saving API key:", err); |
||||
alert("Failed to save API Key."); |
||||
}); |
||||
}); |
||||
|
||||
// Handler for pattern selection change
|
||||
patternSelector.addEventListener("change", async () => { |
||||
const selectedPattern = patternSelector.value; |
||||
const systemCommand = await window.electronAPI.invoke( |
||||
"get-pattern-content", |
||||
selectedPattern |
||||
); |
||||
// Use systemCommand as part of the input for querying OpenAI
|
||||
}); |
||||
|
||||
// drag and drop
|
||||
userInput.addEventListener("dragover", (event) => { |
||||
event.stopPropagation(); |
||||
event.preventDefault(); |
||||
// Add some visual feedback
|
||||
userInput.classList.add("drag-over"); |
||||
userInput.placeholder = "Drop file here"; |
||||
}); |
||||
|
||||
userInput.addEventListener("dragleave", (event) => { |
||||
event.stopPropagation(); |
||||
event.preventDefault(); |
||||
// Remove visual feedback
|
||||
userInput.classList.remove("drag-over"); |
||||
userInput.placeholder = originalPlaceholder; |
||||
}); |
||||
|
||||
userInput.addEventListener("drop", (event) => { |
||||
event.stopPropagation(); |
||||
event.preventDefault(); |
||||
const file = event.dataTransfer.files[0]; |
||||
userInput.classList.remove("drag-over"); |
||||
userInput.placeholder = originalPlaceholder; |
||||
processFile(file); |
||||
}); |
||||
|
||||
function processFile(file) { |
||||
const fileType = file.type; |
||||
const reader = new FileReader(); |
||||
let content = ""; |
||||
|
||||
reader.onload = (event) => { |
||||
content = event.target.result; |
||||
userInput.value = content; |
||||
submitQuery(content); |
||||
}; |
||||
|
||||
if (fileType === "text/plain" || fileType === "image/svg+xml") { |
||||
reader.readAsText(file); |
||||
} else if ( |
||||
fileType === "application/pdf" || |
||||
fileType.match(/wordprocessingml/) |
||||
) { |
||||
// For PDF and DOCX, we need to handle them in the main process due to complexity
|
||||
window.electronAPI.send("process-complex-file", file.path); |
||||
} else { |
||||
console.error("Unsupported file type"); |
||||
} |
||||
} |
||||
}); |
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -0,0 +1,160 @@
|
||||
body { |
||||
font-family: "Segoe UI", Arial, sans-serif; |
||||
margin: 0; |
||||
padding: 0; |
||||
background-color: #2b2b2b; |
||||
color: #e0e0e0; |
||||
} |
||||
|
||||
.container { |
||||
max-width: 90%; |
||||
margin: 50px auto; |
||||
padding: 15px; |
||||
background: #333333; |
||||
box-shadow: 0 2px 4px rgba(255, 255, 255, 0.1); |
||||
border-radius: 5px; |
||||
} |
||||
|
||||
#responseContainer { |
||||
margin-top: 15px; |
||||
border: 1px solid #444; |
||||
padding: 10px; |
||||
min-height: 100px; |
||||
background-color: #3a3a3a; |
||||
color: #e0e0e0; |
||||
} |
||||
|
||||
.btn-primary { |
||||
background-color: #007bff; |
||||
color: white; |
||||
border: none; |
||||
} |
||||
|
||||
#userInput { |
||||
margin-bottom: 10px; |
||||
background-color: #424242; /* Darker shade for textarea */ |
||||
color: #e0e0e0; /* Light text for readability */ |
||||
border: 1px solid #555; /* Adjusted border color */ |
||||
padding: 10px; /* Added padding for better text visibility */ |
||||
} |
||||
#patternSelector { |
||||
margin-bottom: 10px; |
||||
background-color: #424242; /* Darker shade for textarea */ |
||||
color: #e0e0e0; /* Light text for readability */ |
||||
border: 1px solid #555; /* Adjusted border color */ |
||||
padding: 10px; /* Added padding for better text visibility */ |
||||
height: 40px; |
||||
} |
||||
|
||||
@media (min-width: 768px) { |
||||
.container { |
||||
max-width: 80%; |
||||
} |
||||
} |
||||
|
||||
.light-theme { |
||||
background-color: #fff; |
||||
color: #333; |
||||
} |
||||
|
||||
.light-theme .container { |
||||
background: #f0f0f0; |
||||
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1); |
||||
} |
||||
|
||||
.light-theme #responseContainer, |
||||
.light-theme #userInput, |
||||
.light-theme #patternSelector { |
||||
background-color: #fff; |
||||
color: #333; |
||||
border: 1px solid #ddd; |
||||
} |
||||
|
||||
.light-theme .btn-primary { |
||||
background-color: #0066cc; |
||||
color: white; |
||||
} |
||||
|
||||
.hidden { |
||||
display: none; |
||||
} |
||||
.drag-over { |
||||
background-color: #505050; /* Slightly lighter than the regular background for visibility */ |
||||
border: 2px dashed #007bff; /* Dashed border with the primary button color for emphasis */ |
||||
box-shadow: 0 0 10px #007bff; /* Soft glow effect to highlight the area */ |
||||
color: #e0e0e0; /* Maintaining the light text color for readability */ |
||||
transition: background-color 0.3s ease, box-shadow 0.3s ease; /* Smooth transition for background and shadow changes */ |
||||
} |
||||
|
||||
.light-theme .drag-over { |
||||
background-color: #e6e6e6; /* Lighter background for light theme */ |
||||
border: 2px dashed #0066cc; /* Adjusted border color for light theme */ |
||||
box-shadow: 0 0 10px #0066cc; /* Soft glow effect for light theme */ |
||||
color: #333; /* Darker text for contrast in light theme */ |
||||
} |
||||
|
||||
/* Existing dark theme styles for reference */ |
||||
.navbar-dark.bg-dark { |
||||
background-color: #343a40 !important; |
||||
} |
||||
|
||||
/* Light theme styles */ |
||||
body.light-theme .navbar-dark.bg-dark { |
||||
background-color: #e2e6ea !important; /* Slightly darker shade for better visibility */ |
||||
color: #000 !important; /* Keep dark text color for contrast */ |
||||
} |
||||
|
||||
body.light-theme .navbar-dark .navbar-brand, |
||||
body.light-theme .navbar-dark .btn-outline-success { |
||||
color: #0056b3 !important; /* Darker color for better visibility and contrast */ |
||||
} |
||||
|
||||
body.light-theme .navbar-toggler-icon { |
||||
background-image: url("data:image/svg+xml,<svg xmlns='http://www.w3.org/2000/svg' width='30' height='30' viewBox='0 0 30 30'><path stroke='rgba(0, 0, 0, 0.75)' stroke-linecap='round' stroke-miterlimit='10' stroke-width='2' d='M4 7h22M4 15h22M4 23h22'/></svg>") !important; |
||||
/* Slightly darker stroke for the navbar-toggler-icon for better visibility */ |
||||
} |
||||
|
||||
@media (max-width: 768px) { |
||||
.navbar-brand img { |
||||
height: 20px; /* Smaller logo for smaller screens */ |
||||
} |
||||
|
||||
.navbar-dark .navbar-toggler { |
||||
padding: 0.25rem 0.5rem; /* Adjust padding for the toggle button */ |
||||
} |
||||
} |
||||
#responseContainer { |
||||
position: relative; /* Needed for absolute positioning of the child button */ |
||||
} |
||||
|
||||
#copyButton { |
||||
position: absolute; |
||||
top: 10px; /* Adjust as needed */ |
||||
right: 10px; /* Adjust as needed */ |
||||
background-color: rgba( |
||||
0, |
||||
123, |
||||
255, |
||||
0.5 |
||||
); /* Bootstrap primary color with transparency */ |
||||
color: white; |
||||
border: none; |
||||
border-radius: 5px; |
||||
padding: 5px 10px; |
||||
font-size: 0.8rem; |
||||
cursor: pointer; |
||||
transition: background-color 0.3s ease; |
||||
} |
||||
|
||||
#copyButton:hover { |
||||
background-color: rgba( |
||||
0, |
||||
123, |
||||
255, |
||||
0.8 |
||||
); /* Slightly less transparent on hover */ |
||||
} |
||||
|
||||
#copyButton:focus { |
||||
outline: none; |
||||
} |
@ -0,0 +1,39 @@
|
||||
[tool.poetry] |
||||
name = "fabric" |
||||
version = "0.1.0" |
||||
description = "Fabric client" |
||||
authors = [ |
||||
"Daniel Miessler <https://github.com/danielmiessler>", |
||||
"Jonathan Dunn <https://github.com/xssdoctor>", |
||||
"Scott Behrens <https://github.com/sbehrens>", |
||||
] |
||||
readme = "README.md" |
||||
|
||||
[tool.poetry.dependencies] |
||||
python = "^3.10" |
||||
pyyaml = "^6.0.1" |
||||
requests = "^2.31.0" |
||||
pyperclip = "^1.8.2" |
||||
python-socketio = "^5.11.0" |
||||
websocket-client = "^1.7.0" |
||||
flask = "^3.0.2" |
||||
flask-sqlalchemy = "^3.1.1" |
||||
flask-login = "^0.6.3" |
||||
flask-jwt-extended = "^4.6.0" |
||||
python-dotenv = "^1.0.1" |
||||
openai = "^1.11.0" |
||||
flask-socketio = "^5.3.6" |
||||
flask-sock = "^0.7.0" |
||||
gunicorn = "^21.2.0" |
||||
gevent = "^23.9.1" |
||||
httpx = "^0.26.0" |
||||
tqdm = "^4.66.1" |
||||
|
||||
|
||||
[build-system] |
||||
requires = ["poetry-core"] |
||||
build-backend = "poetry.core.masonry.api" |
||||
|
||||
# Turns this into a poetry install CLI command |
||||
[tool.poetry.scripts] |
||||
fabric = 'fabric:main' |
Loading…
Reference in new issue