wip: LLM helpers: neoai/codegpt/chatgpt

master
blob42 2 months ago
parent b8417bfb82
commit ba813c1781

@ -7,6 +7,8 @@ end
local M = {}
M.setup = function()
vim.g["codegpt_chat_completions_url"] = "http://localai.srvlan:8080/v1/chat/completions"
vim.g["codegpt_commands"] = {
["q4"] = {
callback_type = "code_popup",

@ -0,0 +1,107 @@
local ok, neoai = pcall(require, "neoai")
if not ok then
vim.notify("missing module neoai", vim.log.levels.WARN)
return
end
local config = {
-- Below are the default options, feel free to override what you would like changed
ui = {
output_popup_text = "NeoAI",
input_popup_text = "Prompt",
width = 30, -- As percentage eg. 30%
output_popup_height = 80, -- As percentage eg. 80%
submit = "<Enter>", -- Key binding to submit the prompt
},
models = {
-- {
-- name = "openai",
-- model = "gpt-3.5-turbo",
-- params = nil,
-- },
{
name = "openai",
model = "dolphin-mixtral",
params = {
temperature = 0.2
},
}
},
register_output = {
["g"] = function(output)
return output
end,
["c"] = require("neoai.utils").extract_code_snippets,
},
inject = {
cutoff_width = 75,
},
prompts = {
context_prompt = function(context)
return "Hey, I'd like to provide some context for future "
.. "messages. Here is the code/text that I want to refer "
.. "to in our upcoming conversations:\n\n"
.. context
end,
},
mappings = {
["select_up"] = "<C-k>",
["select_down"] = "<C-j>",
},
open_ai = {
api_base = "http://localai.srvlan:8080/v1",
api_key = {
env = "OPENAI_API_KEY",
value = nil,
-- `get` is is a function that retrieves an API key, can be used to override the default method.
-- get = function() ... end
-- Here is some code for a function that retrieves an API key. You can use it with
-- the Linux 'pass' application.
-- get = function()
-- local key = vim.fn.system("pass show openai/mytestkey")
-- key = string.gsub(key, "\n", "")
-- return key
-- end,
},
},
shortcuts = {
{
name = "textify",
key = "<leader>as",
desc = "fix text with AI",
use_context = true,
prompt = [[
Please rewrite the text to make it more readable, clear,
concise, and fix any grammatical, punctuation, or spelling
errors
]],
modes = { "v" },
strip_function = nil,
},
{
name = "gitcommit",
key = "<leader>ag",
desc = "generate git commit message",
use_context = false,
prompt = function()
return [[
Using the following git diff generate a consise and
clear git commit message, with a short title summary
that is 75 characters or less:
]] .. vim.fn.system("git diff --cached")
end,
modes = { "n" },
strip_function = nil,
},
},
}
local M = {}
M.setup = function()
neoai.setup(config)
end
return M

@ -152,17 +152,17 @@ return {
-- AI/Deep Learning Helpers
-- Github Copilot
["zbirenbaum/copilot.lua"] = {
opt = true,
cmd = "Copilot",
keys = {"<leader>ghp"},
setup = function()
require("core.utils").load_mappings "copilot"
end,
config = function()
require("custom.plugins.configs.copilot").setup()
end,
},
-- ["zbirenbaum/copilot.lua"] = {
-- opt = true,
-- cmd = "Copilot",
-- keys = {"<leader>ghp"},
-- setup = function()
-- require("core.utils").load_mappings "copilot"
-- end,
-- config = function()
-- require("custom.plugins.configs.copilot").setup()
-- end,
-- },
-- ["github/copilot.vim"] = {
-- opt = true,
@ -188,22 +188,14 @@ return {
require("custom.plugins.configs.code-gpt").setup()
end
},
["jackMort/ChatGPT.nvim"] = {
-- lock = true,
opt = true,
keys = {"<leader>gpt"},
module_pattern = {"chatgpt*"},
after = {"nui.nvim", "telescope.nvim"},
setup = function()
require("custom.plugins.configs.chat-gpt").load_api_key()
end,
["Bryley/neoai.nvim"] = {
cmd = {"NeoAI*"},
config = function()
require("custom.plugins.configs.chat-gpt").setup()
require("custom.plugins.configs.neoai").setup()
end,
requires = {
"MunifTanjim/nui.nvim",
"nvim-lua/plenary.nvim",
"nvim-telescope/telescope.nvim"
"MunifTanjim/nui.nvim"
}
},

Loading…
Cancel
Save