mirror of
https://github.com/danielmiessler/fabric
synced 2024-11-10 07:10:31 +00:00
Merge pull request #43 from Gilgamesh555/cli-model-version
CLI Model - ModelList New Args
This commit is contained in:
commit
c222d7a220
@ -39,6 +39,12 @@ if __name__ == "__main__":
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--setup", help="Set up your fabric instance", action="store_true"
|
"--setup", help="Set up your fabric instance", action="store_true"
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--model", "-m", help="Select the model to use (GPT-4 by default)", default="gpt-4-turbo-preview"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--listmodels", help="List all available models", action="store_true"
|
||||||
|
)
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
home_holder = os.path.expanduser("~")
|
home_holder = os.path.expanduser("~")
|
||||||
@ -70,6 +76,9 @@ if __name__ == "__main__":
|
|||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
print("No patterns found")
|
print("No patterns found")
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
if args.listmodels:
|
||||||
|
standalone.fetch_available_models()
|
||||||
|
sys.exit()
|
||||||
if args.text is not None:
|
if args.text is not None:
|
||||||
text = args.text
|
text = args.text
|
||||||
else:
|
else:
|
||||||
|
@ -12,6 +12,7 @@ config_directory = os.path.expanduser("~/.config/fabric")
|
|||||||
env_file = os.path.join(config_directory, ".env")
|
env_file = os.path.join(config_directory, ".env")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class Standalone:
|
class Standalone:
|
||||||
def __init__(self, args, pattern="", env_file="~/.config/fabric/.env"):
|
def __init__(self, args, pattern="", env_file="~/.config/fabric/.env"):
|
||||||
""" Initialize the class with the provided arguments and environment file.
|
""" Initialize the class with the provided arguments and environment file.
|
||||||
@ -45,6 +46,7 @@ class Standalone:
|
|||||||
self.config_pattern_directory = config_directory
|
self.config_pattern_directory = config_directory
|
||||||
self.pattern = pattern
|
self.pattern = pattern
|
||||||
self.args = args
|
self.args = args
|
||||||
|
self.model = args.model
|
||||||
|
|
||||||
def streamMessage(self, input_data: str):
|
def streamMessage(self, input_data: str):
|
||||||
""" Stream a message and handle exceptions.
|
""" Stream a message and handle exceptions.
|
||||||
@ -78,7 +80,7 @@ class Standalone:
|
|||||||
messages = [user_message]
|
messages = [user_message]
|
||||||
try:
|
try:
|
||||||
stream = self.client.chat.completions.create(
|
stream = self.client.chat.completions.create(
|
||||||
model="gpt-4-turbo-preview",
|
model=self.model,
|
||||||
messages=messages,
|
messages=messages,
|
||||||
temperature=0.0,
|
temperature=0.0,
|
||||||
top_p=1,
|
top_p=1,
|
||||||
@ -137,7 +139,7 @@ class Standalone:
|
|||||||
messages = [user_message]
|
messages = [user_message]
|
||||||
try:
|
try:
|
||||||
response = self.client.chat.completions.create(
|
response = self.client.chat.completions.create(
|
||||||
model="gpt-4-turbo-preview",
|
model=self.model,
|
||||||
messages=messages,
|
messages=messages,
|
||||||
temperature=0.0,
|
temperature=0.0,
|
||||||
top_p=1,
|
top_p=1,
|
||||||
@ -154,6 +156,25 @@ class Standalone:
|
|||||||
with open(self.args.output, "w") as f:
|
with open(self.args.output, "w") as f:
|
||||||
f.write(response.choices[0].message.content)
|
f.write(response.choices[0].message.content)
|
||||||
|
|
||||||
|
def fetch_available_models(self):
|
||||||
|
headers = {
|
||||||
|
"Authorization": f"Bearer { self.client.api_key }"
|
||||||
|
}
|
||||||
|
|
||||||
|
response = requests.get("https://api.openai.com/v1/models", headers=headers)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
models = response.json().get("data", [])
|
||||||
|
# Filter only gpt models
|
||||||
|
gpt_models = [model for model in models if model.get("id", "").startswith(("gpt"))]
|
||||||
|
# Sort the models alphabetically by their ID
|
||||||
|
sorted_gpt_models = sorted(gpt_models, key=lambda x: x.get("id"))
|
||||||
|
|
||||||
|
for model in sorted_gpt_models:
|
||||||
|
print(model.get("id"))
|
||||||
|
else:
|
||||||
|
print(f"Failed to fetch models: HTTP {response.status_code}")
|
||||||
|
|
||||||
|
|
||||||
class Update:
|
class Update:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
@ -274,7 +295,6 @@ class Update:
|
|||||||
else:
|
else:
|
||||||
print(f"Failed to fetch directory contents due to an HTTP error: {e}")
|
print(f"Failed to fetch directory contents due to an HTTP error: {e}")
|
||||||
|
|
||||||
|
|
||||||
class Setup:
|
class Setup:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
""" Initialize the object.
|
""" Initialize the object.
|
||||||
|
Loading…
Reference in New Issue
Block a user