mirror of
https://github.com/danielmiessler/fabric
synced 2024-11-08 07:11:06 +00:00
added --changeDefaultModel to persistantly change default model
This commit is contained in:
parent
b84451114c
commit
2f295974e8
22
README.md
22
README.md
@ -197,10 +197,8 @@ Once you have it all set up, here's how to use it.
|
||||
`fabric -h`
|
||||
|
||||
```bash
|
||||
fabric [-h] [--text TEXT] [--copy] [--agents {trip_planner,ApiKeys}]
|
||||
[--output [OUTPUT]] [--stream] [--list] [--update]
|
||||
[--pattern PATTERN] [--setup] [--local] [--claude]
|
||||
[--model MODEL] [--listmodels] [--context]
|
||||
fabric [-h] [--text TEXT] [--copy] [--agents {trip_planner,ApiKeys}] [--output [OUTPUT]] [--stream] [--list] [--update] [--pattern PATTERN] [--setup] [--changeDefaultModel CHANGEDEFAULTMODEL] [--local]
|
||||
[--claude] [--model MODEL] [--listmodels] [--context]
|
||||
|
||||
An open source framework for augmenting humans using AI.
|
||||
|
||||
@ -209,27 +207,23 @@ options:
|
||||
--text TEXT, -t TEXT Text to extract summary from
|
||||
--copy, -C Copy the response to the clipboard
|
||||
--agents {trip_planner,ApiKeys}, -a {trip_planner,ApiKeys}
|
||||
Use an AI agent to help you with a task. Acceptable
|
||||
values are 'trip_planner' or 'ApiKeys'. This option
|
||||
cannot be used with any other flag.
|
||||
Use an AI agent to help you with a task. Acceptable values are 'trip_planner' or 'ApiKeys'. This option cannot be used with any other flag.
|
||||
--output [OUTPUT], -o [OUTPUT]
|
||||
Save the response to a file
|
||||
--stream, -s Use this option if you want to see the results in
|
||||
realtime. NOTE: You will not be able to pipe the
|
||||
output into another command.
|
||||
--stream, -s Use this option if you want to see the results in realtime. NOTE: You will not be able to pipe the output into another command.
|
||||
--list, -l List available patterns
|
||||
--update, -u Update patterns
|
||||
--pattern PATTERN, -p PATTERN
|
||||
The pattern (prompt) to use
|
||||
--setup Set up your fabric instance
|
||||
--changeDefaultModel CHANGEDEFAULTMODEL
|
||||
Change the default model. Your choice will be saved in ~/.config/fabric/.env). For a list of available models, use the --listmodels flag.
|
||||
--local, -L Use local LLM. Default is llama2
|
||||
--claude Use Claude AI
|
||||
--model MODEL, -m MODEL
|
||||
Select the model to use (GPT-4 by default for chatGPT
|
||||
and llama2 for Ollama)
|
||||
Select the model to use (GPT-4 by default for chatGPT and llama2 for Ollama)
|
||||
--listmodels List all available models
|
||||
--context, -c Use Context file (context.md) to add context to your
|
||||
pattern
|
||||
--context, -c Use Context file (context.md) to add context to your pattern
|
||||
```
|
||||
|
||||
#### Example commands
|
||||
|
@ -43,6 +43,8 @@ def main():
|
||||
parser.add_argument(
|
||||
"--setup", help="Set up your fabric instance", action="store_true"
|
||||
)
|
||||
parser.add_argument('--changeDefaultModel',
|
||||
help="Change the default model. Your choice will be saved in ~/.config/fabric/.env). For a list of available models, use the --listmodels flag.")
|
||||
parser.add_argument(
|
||||
'--local', '-L', help="Use local LLM. Default is llama2", action="store_true")
|
||||
|
||||
@ -77,6 +79,10 @@ def main():
|
||||
Update()
|
||||
Alias()
|
||||
sys.exit()
|
||||
if args.changeDefaultModel:
|
||||
Setup().default_model(args.changeDefaultModel)
|
||||
print(f"Default model changed to {args.changeDefaultModel}")
|
||||
sys.exit()
|
||||
if args.agents:
|
||||
# Handle the agents logic
|
||||
if args.agents == 'trip_planner':
|
||||
|
@ -51,21 +51,24 @@ class Standalone:
|
||||
self.args = args
|
||||
self.model = args.model
|
||||
self.claude = claude
|
||||
if self.local:
|
||||
if self.args.model == 'gpt-4-turbo-preview':
|
||||
self.args.model = 'llama2'
|
||||
if self.claude:
|
||||
if self.args.model == 'gpt-4-turbo-preview':
|
||||
self.model = 'claude-3-opus-20240229'
|
||||
try:
|
||||
self.model = os.environ["DEFAULT_MODEL"]
|
||||
except:
|
||||
if self.local:
|
||||
if self.args.model == 'gpt-4-turbo-preview':
|
||||
self.model = 'llama2'
|
||||
if self.claude:
|
||||
if self.args.model == 'gpt-4-turbo-preview':
|
||||
self.model = 'claude-3-opus-20240229'
|
||||
|
||||
async def localChat(self, messages):
|
||||
from ollama import AsyncClient
|
||||
response = await AsyncClient().chat(model=self.args.model, messages=messages)
|
||||
response = await AsyncClient().chat(model=self.model, messages=messages)
|
||||
print(response['message']['content'])
|
||||
|
||||
async def localStream(self, messages):
|
||||
from ollama import AsyncClient
|
||||
async for part in await AsyncClient().chat(model=self.args.model, messages=messages, stream=True):
|
||||
async for part in await AsyncClient().chat(model=self.model, messages=messages, stream=True):
|
||||
print(part['message']['content'], end='', flush=True)
|
||||
|
||||
async def claudeStream(self, system, user):
|
||||
@ -243,6 +246,8 @@ class Standalone:
|
||||
if "overloaded_error" in str(e):
|
||||
print(
|
||||
"Error: Fabric is working fine, but claude is overloaded. Please try again later.")
|
||||
if "Attempted to call a sync iterator on an async stream" in str(e):
|
||||
print("Error: There is a problem connecting fabric with your local ollama installation. Please visit https://ollama.com for installation instructions. It is possible that you have chosen the wrong model. Please run fabric --listmodels to see the available models and choose the right one with fabric --model <model> or fabric --changeDefaultModel. If this does not work. Restart your computer (always a good idea) and try again. If you are still having problems, please visit https://ollama.com for installation instructions.")
|
||||
else:
|
||||
print(f"Error: {e}")
|
||||
print(e)
|
||||
@ -261,6 +266,7 @@ class Standalone:
|
||||
"https://api.openai.com/v1/models", headers=headers)
|
||||
|
||||
if response.status_code == 200:
|
||||
print("OpenAI GPT models:\n")
|
||||
models = response.json().get("data", [])
|
||||
# Filter only gpt models
|
||||
gpt_models = [model for model in models if model.get(
|
||||
@ -270,6 +276,13 @@ class Standalone:
|
||||
|
||||
for model in sorted_gpt_models:
|
||||
print(model.get("id"))
|
||||
print("\nLocal Ollama models:")
|
||||
import ollama
|
||||
ollamaList = ollama.list()['models']
|
||||
for model in ollamaList:
|
||||
print(model['name'].rstrip(":latest"))
|
||||
print("\nClaude models:")
|
||||
print("claude-3-opus-20240229")
|
||||
else:
|
||||
print(f"Failed to fetch models: HTTP {response.status_code}")
|
||||
|
||||
@ -461,6 +474,33 @@ class Setup:
|
||||
with open(self.env_file, "w") as f:
|
||||
f.write(f"CLAUDE_API_KEY={claude_key}")
|
||||
|
||||
def default_model(self, model):
|
||||
""" Set the default model in the environment file.
|
||||
|
||||
Args:
|
||||
model (str): The model to be set.
|
||||
"""
|
||||
|
||||
model = model.strip()
|
||||
if os.path.exists(self.env_file) and model:
|
||||
with open(self.env_file, "r") as f:
|
||||
lines = f.readlines()
|
||||
with open(self.env_file, "w") as f:
|
||||
for line in lines:
|
||||
if "DEFAULT_MODEL" not in line:
|
||||
f.write(line)
|
||||
f.write(f"DEFAULT_MODEL={model}")
|
||||
elif model:
|
||||
with open(self.env_file, "w") as f:
|
||||
f.write(f"DEFAULT_MODEL={model}")
|
||||
else:
|
||||
with open(self.env_file, "r") as f:
|
||||
lines = f.readlines()
|
||||
with open(self.env_file, "w") as f:
|
||||
for line in lines:
|
||||
if "DEFAULT_MODEL" not in line:
|
||||
f.write(line)
|
||||
|
||||
def patterns(self):
|
||||
""" Method to update patterns and exit the system.
|
||||
|
||||
@ -486,6 +526,7 @@ class Setup:
|
||||
print("Please enter your claude API key. If you do not have one, or if you have already entered it, press enter.\n")
|
||||
claudekey = input()
|
||||
self.claude_key(claudekey.strip())
|
||||
print("Please enter your default model. Press enter to choose the default gpt-4-turbo-preview\n")
|
||||
self.patterns()
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user