Browse Source

Updating the readme.

pull/33/head
Daniel Miessler 1 year ago
parent
commit
c3e80f12b6
  1. 10
      README.md
  2. 54
      client/fabric
  3. 65
      client/utils.py

10
README.md

@ -22,9 +22,13 @@
Since the start of 2023 and GenAI we've seen a massive number of AI applications for accomplishing tasks. It's powerful, but **it's not easy to integrate this functionality into our lives.**
_In other words, AI doesn't have a capabilities problem—it has an **integration** problem._
```markdown
<p align="center">
<h4></h4>_In other words, AI doesn't have a capabilities problem—it has an **integration** problem._</h4>
</p>
```
Fabric was created to address that problem by allowing everyone to leverage AI throughout our life and work.
Fabric was created to address this by enabling everyone to granularly apply AI to everyday challenges.
### Too many prompts
@ -198,6 +202,7 @@ One of the coolest parts of the project is that it's **command-line native**!
Each pattern (prompt) you see in the `/patterns` directory can be used in any AI application you use, but you can also set up your own server using the `/server` code and then call APIs directly!
Once you're set up, you can do things like:
```bash
# Take any idea from `stdin` and send it to the `/write_essay` API!
cat "An idea that coding is like speaking with rules." | write_essay
@ -213,7 +218,6 @@ Fabric is themed off of, well… _fabric_—as in…woven materials. So, think b
- The optional server-side functionality of `fabric` is called the **Mill**.
- The optional client-side scripts within `fabric` are called **Looms**.
## More Documentation
> [!IMPORTANT]\

54
client/fabric

@ -1,4 +1,4 @@
#!/usr/bin/env python3
#!/Users/daniel/Cloud/Development/fabric/client/.venv/bin/python3
from utils import Standalone, Update
import argparse
@ -11,42 +11,54 @@ script_directory = os.path.dirname(os.path.realpath(__file__))
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description='An open source framework for augmenting humans using AI.')
parser.add_argument('--text', '-t', help='Text to extract summary from')
description="An open source framework for augmenting humans using AI."
)
parser.add_argument("--text", "-t", help="Text to extract summary from")
parser.add_argument(
'--copy', '-c', help='Copy the response to the clipboard', action='store_true')
parser.add_argument('--output', '-o', help='Save the response to a file',
nargs='?', const='analyzepaper.txt', default=None)
"--copy", "-c", help="Copy the response to the clipboard", action="store_true"
)
parser.add_argument(
'--stream', '-s', help='Use this option if you are piping output to another app. The output will not be streamed', action='store_true')
"--output",
"-o",
help="Save the response to a file",
nargs="?",
const="analyzepaper.txt",
default=None,
)
parser.add_argument(
'--list', '-l', help='List available patterns', action='store_true')
"--stream",
"-s",
help="Use this option if you are piping output to another app. The output will not be streamed",
action="store_true",
)
parser.add_argument(
'--update', '-u', help='Update patterns', action='store_true')
parser.add_argument('--pattern', '-p', help='The pattern (prompt) to use')
parser.add_argument('--apikey', '-a', help='Add an OpenAI key')
"--list", "-l", help="List available patterns", action="store_true"
)
parser.add_argument("--update", "-u", help="Update patterns", action="store_true")
parser.add_argument("--pattern", "-p", help="The pattern (prompt) to use")
parser.add_argument("--apikey", "-a", help="Add an OpenAI key")
args = parser.parse_args()
home_holder = os.path.expanduser('~')
config = os.path.join(home_holder, '.config', 'fabric')
config_patterns_directory = os.path.join(config, 'patterns')
env_file = os.path.join(config, '.env')
home_holder = os.path.expanduser("~")
config = os.path.join(home_holder, ".config", "fabric")
config_patterns_directory = os.path.join(config, "patterns")
env_file = os.path.join(config, ".env")
if not os.path.exists(config):
os.makedirs(config)
if args.apikey:
with open(env_file, 'w') as f:
f.write(f'OPENAI_API_KEY={args.apikey}')
print(f'OpenAI API key set to {args.apikey}')
with open(env_file, "w") as f:
f.write(f"OPENAI_API_KEY={args.apikey}")
print(f"OpenAI API key set to {args.apikey}")
sys.exit()
if not os.path.exists(env_file):
print('No API key found. Use the --apikey option to set the key')
print("No API key found. Use the --apikey option to set the key")
sys.exit()
if not os.path.exists(config_patterns_directory):
Update()
sys.exit()
if args.update:
Update()
print('patterns updated:')
print("patterns updated:")
sys.exit()
standalone = Standalone(args, args.pattern)
if args.list:
@ -56,7 +68,7 @@ if __name__ == "__main__":
print(d)
sys.exit()
except FileNotFoundError:
print('No patterns found')
print("No patterns found")
sys.exit()
if args.text is not None:
text = args.text

65
client/utils.py

@ -6,11 +6,11 @@ import sys
current_directory = os.path.dirname(os.path.realpath(__file__))
config_directory = os.path.expanduser("~/.config/fabric")
env_file = os.path.join(config_directory, '.env')
env_file = os.path.join(config_directory, ".env")
class Standalone:
def __init__(self, args, pattern=''):
def __init__(self, args, pattern=""):
try:
with open(env_file, "r") as f:
apikey = f.read().split("=")[1]
@ -24,11 +24,11 @@ class Standalone:
def streamMessage(self, input_data: str):
wisdomFilePath = os.path.join(
config_directory, f"patterns/{self.pattern}/system.md")
config_directory, f"patterns/{self.pattern}/system.md"
)
user_message = {"role": "user", "content": f"{input_data}"}
wisdom_File = os.path.join(
current_directory, wisdomFilePath)
buffer = ''
wisdom_File = os.path.join(current_directory, wisdomFilePath)
buffer = ""
if self.pattern:
try:
with open(wisdom_File, "r") as f:
@ -36,29 +36,29 @@ class Standalone:
system_message = {"role": "system", "content": system}
messages = [system_message, user_message]
except FileNotFoundError:
print('pattern not found')
print("pattern not found")
return
else:
messages = [user_message]
try:
stream = self.client.chat.completions.create(
model="gpt-4-1106-preview",
model="gpt-4-turbo-preview",
messages=messages,
temperature=0.0,
top_p=1,
frequency_penalty=0.1,
presence_penalty=0.1,
stream=True
stream=True,
)
for chunk in stream:
if chunk.choices[0].delta.content is not None:
char = chunk.choices[0].delta.content
buffer += char
if char not in ['\n', ' ']:
print(char, end='')
elif char == ' ':
print(' ', end='') # Explicitly handle spaces
elif char == '\n':
if char not in ["\n", " "]:
print(char, end="")
elif char == " ":
print(" ", end="") # Explicitly handle spaces
elif char == "\n":
print() # Handle newlines
sys.stdout.flush()
except Exception as e:
@ -66,15 +66,15 @@ class Standalone:
if self.args.copy:
pyperclip.copy(buffer)
if self.args.output:
with open(self.args.output, 'w') as f:
with open(self.args.output, "w") as f:
f.write(buffer)
def sendMessage(self, input_data: str):
wisdomFilePath = os.path.join(
config_directory, f"patterns/{self.pattern}/system.md")
config_directory, f"patterns/{self.pattern}/system.md"
)
user_message = {"role": "user", "content": f"{input_data}"}
wisdom_File = os.path.join(
current_directory, wisdomFilePath)
wisdom_File = os.path.join(current_directory, wisdomFilePath)
if self.pattern:
try:
with open(wisdom_File, "r") as f:
@ -82,18 +82,18 @@ class Standalone:
system_message = {"role": "system", "content": system}
messages = [system_message, user_message]
except FileNotFoundError:
print('pattern not found')
print("pattern not found")
return
else:
messages = [user_message]
try:
response = self.client.chat.completions.create(
model="gpt-4-1106-preview",
model="gpt-4-turbo-preview",
messages=messages,
temperature=0.0,
top_p=1,
frequency_penalty=0.1,
presence_penalty=0.1
presence_penalty=0.1,
)
print(response.choices[0].message.content)
except Exception as e:
@ -101,7 +101,7 @@ class Standalone:
if self.args.copy:
pyperclip.copy(response.choices[0].message.content)
if self.args.output:
with open(self.args.output, 'w') as f:
with open(self.args.output, "w") as f:
f.write(response.choices[0].message.content)
@ -110,12 +110,10 @@ class Update:
# Initialize with the root API URL
self.root_api_url = "https://api.github.com/repos/danielmiessler/fabric/contents/patterns?ref=main"
self.config_directory = os.path.expanduser("~/.config/fabric")
self.pattern_directory = os.path.join(
self.config_directory, 'patterns')
self.pattern_directory = os.path.join(self.config_directory, "patterns")
# Ensure local directory exists
os.makedirs(self.pattern_directory, exist_ok=True)
self.get_github_directory_contents(
self.root_api_url, self.pattern_directory)
self.get_github_directory_contents(self.root_api_url, self.pattern_directory)
def download_file(self, url, local_path):
"""
@ -123,21 +121,22 @@ class Update:
"""
response = requests.get(url)
response.raise_for_status() # This will raise an exception for HTTP error codes
with open(local_path, 'wb') as f:
with open(local_path, "wb") as f:
f.write(response.content)
def process_item(self, item, local_dir):
"""
Process an individual item, downloading if it's a file, or processing further if it's a directory.
"""
if item['type'] == 'file':
if item["type"] == "file":
print(f"Downloading file: {item['name']} to {local_dir}")
self.download_file(item['download_url'],
os.path.join(local_dir, item['name']))
elif item['type'] == 'dir':
new_dir = os.path.join(local_dir, item['name'])
self.download_file(
item["download_url"], os.path.join(local_dir, item["name"])
)
elif item["type"] == "dir":
new_dir = os.path.join(local_dir, item["name"])
os.makedirs(new_dir, exist_ok=True)
self.get_github_directory_contents(item['url'], new_dir)
self.get_github_directory_contents(item["url"], new_dir)
def get_github_directory_contents(self, api_url, local_dir):
"""

Loading…
Cancel
Save