added support for remote ollama instances with --remoteOllamaServer

This commit is contained in:
Jonathan Dunn 2024-03-12 12:59:57 -04:00
parent a95aabe1ac
commit 6c50ee4845
2 changed files with 43 additions and 13 deletions

View File

@ -54,6 +54,8 @@ def main():
parser.add_argument(
"--listmodels", help="List all available models", action="store_true"
)
parser.add_argument('--remoteOllamaServer',
help='The URL of the remote ollamaserver to use. ONLY USE THIS if you are using a local ollama server in an non-deault location or port')
parser.add_argument('--context', '-c',
help="Use Context file (context.md) to add context to your pattern", action="store_true")
@ -130,20 +132,34 @@ def main():
else:
text = standalone.get_cli_input()
if args.stream and not args.context:
standalone.streamMessage(text)
if args.remoteOllamaServer:
standalone.streamMessage(text, host=args.remoteOllamaServer)
else:
standalone.streamMessage(text)
sys.exit()
if args.stream and args.context:
with open(config_context, "r") as f:
context = f.read()
standalone.streamMessage(text, context=context)
if args.remoteOllamaServer:
standalone.streamMessage(
text, context=context, host=args.remoteOllamaServer)
else:
standalone.streamMessage(text, context=context)
sys.exit()
elif args.context:
with open(config_context, "r") as f:
context = f.read()
standalone.sendMessage(text, context=context)
if args.remoteOllamaServer:
standalone.sendMessage(
text, context=context, host=args.remoteOllamaServer)
else:
standalone.sendMessage(text, context=context)
sys.exit()
else:
standalone.sendMessage(text)
if args.remoteOllamaServer:
standalone.sendMessage(text, host=args.remoteOllamaServer)
else:
standalone.sendMessage(text)
sys.exit()

View File

@ -52,15 +52,23 @@ class Standalone:
self.local = self.model.strip() in ollamaList
self.claude = self.model.strip() in claudeList
async def localChat(self, messages):
async def localChat(self, messages, host=''):
from ollama import AsyncClient
response = await AsyncClient().chat(model=self.model, messages=messages)
response = None
if host:
response = await AsyncClient(host=host).chat(model=self.model, messages=messages)
else:
response = await AsyncClient().chat(model=self.model, messages=messages)
print(response['message']['content'])
async def localStream(self, messages):
async def localStream(self, messages, host=''):
from ollama import AsyncClient
async for part in await AsyncClient().chat(model=self.model, messages=messages, stream=True):
print(part['message']['content'], end='', flush=True)
if host:
async for part in await AsyncClient(host=host).chat(model=self.model, messages=messages, stream=True):
print(part['message']['content'], end='', flush=True)
else:
async for part in await AsyncClient().chat(model=self.model, messages=messages, stream=True):
print(part['message']['content'], end='', flush=True)
async def claudeStream(self, system, user):
from anthropic import AsyncAnthropic
@ -91,7 +99,7 @@ class Standalone:
)
print(message.content[0].text)
def streamMessage(self, input_data: str, context=""):
def streamMessage(self, input_data: str, context="", host=''):
""" Stream a message and handle exceptions.
Args:
@ -131,7 +139,10 @@ class Standalone:
messages = [user_message]
try:
if self.local:
asyncio.run(self.localStream(messages))
if host:
asyncio.run(self.localStream(messages, host=host))
else:
asyncio.run(self.localStream(messages))
elif self.claude:
from anthropic import AsyncAnthropic
asyncio.run(self.claudeStream(system, user_message))
@ -175,7 +186,7 @@ class Standalone:
with open(self.args.output, "w") as f:
f.write(buffer)
def sendMessage(self, input_data: str, context=""):
def sendMessage(self, input_data: str, context="", host=''):
""" Send a message using the input data and generate a response.
Args:
@ -214,7 +225,10 @@ class Standalone:
messages = [user_message]
try:
if self.local:
asyncio.run(self.localChat(messages))
if host:
asyncio.run(self.localChat(messages, host=host))
else:
asyncio.run(self.localChat(messages))
elif self.claude:
asyncio.run(self.claudeChat(system, user_message))
else: