mirror of
https://github.com/rsaryev/talk-codebase
synced 2024-11-16 06:15:20 +00:00
use default gpt-3.5-turbo-0613
This commit is contained in:
parent
c092d2d0d8
commit
703b98ea82
1036
poetry.lock
generated
1036
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@ -1,6 +1,6 @@
|
|||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "talk-codebase"
|
name = "talk-codebase"
|
||||||
version = "0.1.28"
|
version = "0.1.29"
|
||||||
description = "talk-codebase is a powerful tool for querying and analyzing codebases."
|
description = "talk-codebase is a powerful tool for querying and analyzing codebases."
|
||||||
authors = ["Saryev Rustam <rustam1997@gmail.com>"]
|
authors = ["Saryev Rustam <rustam1997@gmail.com>"]
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
@ -8,8 +8,8 @@ packages = [{ include = "talk_codebase" }]
|
|||||||
keywords = ["chatgpt", "openai", "cli"]
|
keywords = ["chatgpt", "openai", "cli"]
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
[tool.poetry.dependencies]
|
||||||
python = "^3.9"
|
python = ">=3.8.1,<4.0"
|
||||||
langchain = "^0.0.181"
|
langchain = "^0.0.200"
|
||||||
fire = "^0.5.0"
|
fire = "^0.5.0"
|
||||||
openai = "^0.27.7"
|
openai = "^0.27.7"
|
||||||
tiktoken = "^0.4.0"
|
tiktoken = "^0.4.0"
|
||||||
|
@ -38,7 +38,7 @@ def configure():
|
|||||||
config["model_type"] = model_type
|
config["model_type"] = model_type
|
||||||
if model_type == "openai":
|
if model_type == "openai":
|
||||||
api_key = input("🤖 Enter your OpenAI API key: ")
|
api_key = input("🤖 Enter your OpenAI API key: ")
|
||||||
model_name = input("🤖 Enter your model name (default: gpt-3.5-turbo): ")
|
model_name = input(f"🤖 Enter your model name (default: {DEFAULT_CONFIG['model_name']}): ")
|
||||||
config["model_name"] = model_name if model_name else DEFAULT_CONFIG["model_name"]
|
config["model_name"] = model_name if model_name else DEFAULT_CONFIG["model_name"]
|
||||||
config["api_key"] = api_key
|
config["api_key"] = api_key
|
||||||
elif model_type == "local":
|
elif model_type == "local":
|
||||||
|
@ -16,7 +16,7 @@ DEFAULT_CONFIG = {
|
|||||||
"chunk_size": "500",
|
"chunk_size": "500",
|
||||||
"chunk_overlap": "50",
|
"chunk_overlap": "50",
|
||||||
"k": "4",
|
"k": "4",
|
||||||
"model_name": "gpt-3.5-turbo",
|
"model_name": "gpt-3.5-turbo-0613",
|
||||||
"model_path": "models/ggml-gpt4all-j-v1.3-groovy.bin",
|
"model_path": "models/ggml-gpt4all-j-v1.3-groovy.bin",
|
||||||
"model_type": MODEL_TYPES["OPENAI"],
|
"model_type": MODEL_TYPES["OPENAI"],
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user