Browse Source

Pass the temperature and top p parameters to ollama backend correctly

pull/424/head^2
Henry 10 months ago
parent
commit
1147b50a0c
  1. 21
      installer/client/cli/utils.py

21
installer/client/cli/utils.py

@ -63,12 +63,16 @@ class Standalone:
self.google = self.model in googleList self.google = self.model in googleList
async def localChat(self, messages, host=''): async def localChat(self, messages, host=''):
from ollama import AsyncClient from ollama import AsyncClient, Options
response = None response = None
if host: if host:
response = await AsyncClient(host=host).chat(model=self.model, messages=messages) response = await AsyncClient(host=host).chat(model=self.model, messages=messages,
options=Options(temperature=self.args.temp,
top_p=self.args.top_p))
else: else:
response = await AsyncClient().chat(model=self.model, messages=messages) response = await AsyncClient().chat(model=self.model, messages=messages,
options=Options(temperature=self.args.temp,
top_p=self.args.top_p))
print(response['message']['content']) print(response['message']['content'])
copy = self.args.copy copy = self.args.copy
if copy: if copy:
@ -78,14 +82,19 @@ class Standalone:
f.write(response['message']['content']) f.write(response['message']['content'])
async def localStream(self, messages, host=''): async def localStream(self, messages, host=''):
from ollama import AsyncClient from ollama import AsyncClient, Options
buffer = "" buffer = ""
if host: if host:
async for part in await AsyncClient(host=host).chat(model=self.model, messages=messages, stream=True): # local ollama with stream and spec host
async for part in await AsyncClient(host=host).chat(model=self.model, messages=messages, stream=True,
options=Options(temperature=self.args.temp,
top_p=self.args.top_p)):
buffer += part['message']['content'] buffer += part['message']['content']
print(part['message']['content'], end='', flush=True) print(part['message']['content'], end='', flush=True)
else: else:
async for part in await AsyncClient().chat(model=self.model, messages=messages, stream=True): async for part in await AsyncClient().chat(model=self.model, messages=messages, stream=True,
options=Options(temperature=self.args.temp,
top_p=self.args.top_p)):
buffer += part['message']['content'] buffer += part['message']['content']
print(part['message']['content'], end='', flush=True) print(part['message']['content'], end='', flush=True)
if self.args.output: if self.args.output:

Loading…
Cancel
Save