Browse Source

added options to set temperature, top_p, frequency_penelty, presence_penalty

pull/325/head
xssdoctor 8 months ago
parent
commit
f56cf9ff70
  1. 8
      installer/client/cli/fabric.py
  2. 20
      installer/client/cli/utils.py

8
installer/client/cli/fabric.py

@ -39,6 +39,14 @@ def main():
parser.add_argument(
"--list", "-l", help="List available patterns", action="store_true"
)
parser.add_argument(
'--temp', help="set the temperature for the model. Default is 0", default=0, type=float)
parser.add_argument(
'--top_p', help="set the top_p for the model. Default is 1", default=1, type=float)
parser.add_argument(
'--frequency_penalty', help="set the frequency penalty for the model. Default is 0.1", default=0.1, type=float)
parser.add_argument(
'--presence_penalty', help="set the presence penalty for the model. Default is 0.1", default=0.1, type=float)
parser.add_argument(
"--update", "-u", help="Update patterns. NOTE: This will revert the default model to gpt4-turbo. please run --changeDefaultModel to once again set default model", action="store_true")
parser.add_argument("--pattern", "-p", help="The pattern (prompt) to use")

20
installer/client/cli/utils.py

@ -87,7 +87,7 @@ class Standalone:
max_tokens=4096,
system=system,
messages=[user],
model=self.model, temperature=0.0, top_p=1.0
model=self.model, temperature=self.args.temp, top_p=self.args.top_p
) as stream:
async for text in stream.text_stream:
print(text, end="", flush=True)
@ -104,7 +104,7 @@ class Standalone:
system=system,
messages=[user],
model=self.model,
temperature=0.0, top_p=1.0
temperature=self.args.temp, top_p=self.args.top_p
)
print(message.content[0].text)
copy = self.args.copy
@ -162,10 +162,10 @@ class Standalone:
stream = self.client.chat.completions.create(
model=self.model,
messages=messages,
temperature=0.0,
top_p=1,
frequency_penalty=0.1,
presence_penalty=0.1,
temperature=self.args.temp,
top_p=self.args.top_p,
frequency_penalty=self.args.frequency_penalty,
presence_penalty=self.args.presence_penalty,
stream=True,
)
for chunk in stream:
@ -247,10 +247,10 @@ class Standalone:
response = self.client.chat.completions.create(
model=self.model,
messages=messages,
temperature=0.0,
top_p=1,
frequency_penalty=0.1,
presence_penalty=0.1,
temperature=self.args.temp,
top_p=self.args.top_p,
frequency_penalty=self.args.frequency_penalty,
presence_penalty=self.args.presence_penalty,
)
print(response.choices[0].message.content)
if self.args.copy:

Loading…
Cancel
Save