diff --git a/client/fabric b/client/fabric index b1fe682..82d6719 100755 --- a/client/fabric +++ b/client/fabric @@ -39,6 +39,12 @@ if __name__ == "__main__": parser.add_argument( "--setup", help="Set up your fabric instance", action="store_true" ) + parser.add_argument( + "--model", "-m", help="Select the model to use (GPT-4 by default)", default="gpt-4-turbo-preview" + ) + parser.add_argument( + "--listmodels", help="List all available models", action="store_true" + ) args = parser.parse_args() home_holder = os.path.expanduser("~") @@ -70,6 +76,9 @@ if __name__ == "__main__": except FileNotFoundError: print("No patterns found") sys.exit() + if args.listmodels: + standalone.fetch_available_models() + sys.exit() if args.text is not None: text = args.text else: diff --git a/client/utils.py b/client/utils.py index 893c714..c60ffc6 100644 --- a/client/utils.py +++ b/client/utils.py @@ -12,6 +12,7 @@ config_directory = os.path.expanduser("~/.config/fabric") env_file = os.path.join(config_directory, ".env") + class Standalone: def __init__(self, args, pattern="", env_file="~/.config/fabric/.env"): """ Initialize the class with the provided arguments and environment file. @@ -45,6 +46,7 @@ class Standalone: self.config_pattern_directory = config_directory self.pattern = pattern self.args = args + self.model = args.model def streamMessage(self, input_data: str): """ Stream a message and handle exceptions. @@ -78,7 +80,7 @@ class Standalone: messages = [user_message] try: stream = self.client.chat.completions.create( - model="gpt-4-turbo-preview", + model=self.model, messages=messages, temperature=0.0, top_p=1, @@ -137,7 +139,7 @@ class Standalone: messages = [user_message] try: response = self.client.chat.completions.create( - model="gpt-4-turbo-preview", + model=self.model, messages=messages, temperature=0.0, top_p=1, @@ -154,6 +156,25 @@ class Standalone: with open(self.args.output, "w") as f: f.write(response.choices[0].message.content) + def fetch_available_models(self): + headers = { + "Authorization": f"Bearer { self.client.api_key }" + } + + response = requests.get("https://api.openai.com/v1/models", headers=headers) + + if response.status_code == 200: + models = response.json().get("data", []) + # Filter only gpt models + gpt_models = [model for model in models if model.get("id", "").startswith(("gpt"))] + # Sort the models alphabetically by their ID + sorted_gpt_models = sorted(gpt_models, key=lambda x: x.get("id")) + + for model in sorted_gpt_models: + print(model.get("id")) + else: + print(f"Failed to fetch models: HTTP {response.status_code}") + class Update: def __init__(self): @@ -274,7 +295,6 @@ class Update: else: print(f"Failed to fetch directory contents due to an HTTP error: {e}") - class Setup: def __init__(self): """ Initialize the object.