Merge pull request #72 from fastdaima/dev

Use optional dependency group for local LLMs #54
This commit is contained in:
Saryev Rustam 2024-08-23 14:44:30 +03:00 committed by GitHub
commit a8b2e91aa3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 26 additions and 9 deletions

View File

@ -10,8 +10,6 @@ keywords = ["chatgpt", "openai", "cli"]
[tool.poetry.dependencies]
python = ">=3.8.1,<4.0"
fire = "^0.5.0"
openai = "^0.27.7"
tiktoken = "^0.4.0"
faiss-cpu = "^1.7.4"
halo = "^0.0.31"
urllib3 = "1.26.18"
@ -19,9 +17,16 @@ gitpython = "^3.1.31"
questionary = "^1.10.0"
sentence-transformers = "^2.2.2"
unstructured = "^0.6.10"
gpt4all = "^1.0.1"
langchain = ">=0.0.223,<0.1.12"
llama-cpp-python = "0.1.68"
llama-cpp-python = { version = "^0.1.68", optional = true }
gpt4all = { version = "^1.0.1", optional = true }
openai = { version = "^0.27.7", optional = true }
tiktoken = { version = "^0.4.0", optional = true }
[tool.poetry.extras]
local = ["gpt4all", "llama-cpp-python"]
all = ["gpt4all", "llama-cpp-python", "openai", "tiktoken"]
[tool.poetry.group.dev.dependencies]
setuptools = "^68.0.0"

View File

@ -1,12 +1,21 @@
import os
openai_flag = True
try:
import openai
except:
openai_flag = False
import gpt4all
import openai
import questionary
import yaml
from talk_codebase.consts import MODEL_TYPES
config_path = os.path.join(os.path.expanduser("~"), ".talk_codebase_config.yaml")
@ -142,13 +151,16 @@ def remove_model_type():
def configure_model_type(config):
if config.get("model_type"):
return
choices = [{"name": "Local", "value": MODEL_TYPES["LOCAL"]}]
if openai_flag: choices.append(
{"name": "OpenAI", "value": MODEL_TYPES["OPENAI"]})
model_type = questionary.select(
"🤖 Select model type:",
choices=[
{"name": "Local", "value": MODEL_TYPES["LOCAL"]},
{"name": "OpenAI", "value": MODEL_TYPES["OPENAI"]},
]
choices=choices
).ask()
config["model_type"] = model_type
save_config(config)