Browse Source

Merge pull request #315 from ksylvan/main

Get OLLAMA models to work in Windows (both native and WSL).
pull/332/head
xssdoctor 7 months ago committed by GitHub
parent
commit
f93d8bb3c0
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 10
      installer/client/cli/utils.py

10
installer/client/cli/utils.py

@ -9,8 +9,8 @@ from dotenv import load_dotenv
import zipfile
import tempfile
import subprocess
import re
import shutil
from youtube_transcript_api import YouTubeTranscriptApi
current_directory = os.path.dirname(os.path.realpath(__file__))
config_directory = os.path.expanduser("~/.config/fabric")
@ -64,7 +64,7 @@ class Standalone:
from ollama import AsyncClient
response = None
if host:
response = await AsyncClient(host=host).chat(model=self.model, messages=messages, host=host)
response = await AsyncClient(host=host).chat(model=self.model, messages=messages)
else:
response = await AsyncClient().chat(model=self.model, messages=messages)
print(response['message']['content'])
@ -75,7 +75,7 @@ class Standalone:
async def localStream(self, messages, host=''):
from ollama import AsyncClient
if host:
async for part in await AsyncClient(host=host).chat(model=self.model, messages=messages, stream=True, host=host):
async for part in await AsyncClient(host=host).chat(model=self.model, messages=messages, stream=True):
print(part['message']['content'], end='', flush=True)
else:
async for part in await AsyncClient().chat(model=self.model, messages=messages, stream=True):
@ -301,6 +301,10 @@ class Standalone:
import ollama
try:
if self.args.remoteOllamaServer:
client = ollama.Client(host=self.args.remoteOllamaServer)
default_modelollamaList = client.list()['models']
else:
default_modelollamaList = ollama.list()['models']
for model in default_modelollamaList:
fullOllamaList.append(model['name'])

Loading…
Cancel
Save