Merge pull request #315 from ksylvan/main

Get OLLAMA models to work in Windows (both native and WSL).
This commit is contained in:
xssdoctor 2024-04-07 06:22:16 -04:00 committed by GitHub
commit f93d8bb3c0
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -9,8 +9,8 @@ from dotenv import load_dotenv
import zipfile
import tempfile
import subprocess
import re
import shutil
from youtube_transcript_api import YouTubeTranscriptApi
current_directory = os.path.dirname(os.path.realpath(__file__))
config_directory = os.path.expanduser("~/.config/fabric")
@ -64,7 +64,7 @@ class Standalone:
from ollama import AsyncClient
response = None
if host:
response = await AsyncClient(host=host).chat(model=self.model, messages=messages, host=host)
response = await AsyncClient(host=host).chat(model=self.model, messages=messages)
else:
response = await AsyncClient().chat(model=self.model, messages=messages)
print(response['message']['content'])
@ -75,7 +75,7 @@ class Standalone:
async def localStream(self, messages, host=''):
from ollama import AsyncClient
if host:
async for part in await AsyncClient(host=host).chat(model=self.model, messages=messages, stream=True, host=host):
async for part in await AsyncClient(host=host).chat(model=self.model, messages=messages, stream=True):
print(part['message']['content'], end='', flush=True)
else:
async for part in await AsyncClient().chat(model=self.model, messages=messages, stream=True):
@ -301,7 +301,11 @@ class Standalone:
import ollama
try:
default_modelollamaList = ollama.list()['models']
if self.args.remoteOllamaServer:
client = ollama.Client(host=self.args.remoteOllamaServer)
default_modelollamaList = client.list()['models']
else:
default_modelollamaList = ollama.list()['models']
for model in default_modelollamaList:
fullOllamaList.append(model['name'])
except: