- CodeGPT
- rust-tools > rustaceanvim
master
blob42 2 weeks ago
parent 61710809ff
commit fe252663bb

@ -288,8 +288,8 @@ M.general = { --{{{
["<leader>tf"] = { "<cmd> set foldmethod=expr<CR>|<cmd> set foldexpr=nvim_treesitter#foldexpr()<CR>",
"enable Treesitter folding" },
["<leader>ts"] = { "<cmd> TSEnable highlight <CR>", "enable treesitter" },
["<leader>tS"] = { "<cmd> TSDisable highlight <CR>", "enable treesitter" },
["<leader>ts"] = { "<cmd> TSEnable highlight <CR>", "enable treesitter highlights" },
["<leader>tS"] = { "<cmd> TSDisable highlight <CR>", "disable treesitter higlights" },
["<leader>tp"] = {
@ -347,8 +347,8 @@ M.general = { --{{{
["[e"] = { "<cmd> cp <CR>", "quickfix previous error" },
-- loclist
["]l"] = { "<cmd> lne <CR>", "quickfix next error" },
["[l"] = { "<cmd> lp <CR>", "quickfix previous error" },
["]l"] = { "<cmd> lne <CR>", "loclist next error" },
["[l"] = { "<cmd> lp <CR>", "loclist previous error" },
-- Tabularize mappings
@ -411,8 +411,22 @@ M.general = { --{{{
-- TODO: move to lspconfig section
-- ["<leader>lsp"] = { "<cmd> lua require('custom.plugins.configs.navigator').enable()<CR>", "lsp enable"},
["<leader>lsp"] = { "<cmd> LspStart<CR>", "lsp enable" },
["<M-s><M-s>"] = { "<cmd> LspStart<CR>", "lsp enable" },
["<leader>lsp"] = { function()
if vim.o.filetype == "rust" then
require('rustaceanvim.lsp').start()
else
vim.cmd("LspStart")
end
end, "lsp enable" },
-- ["<M-s><M-s>"] = { "<cmd> LspStart<CR>", "lsp enable" },
["<M-s><M-s>"] = { function()
if vim.o.filetype == "rust" then
require('rustaceanvim.lsp').start()
else
vim.cmd("LspStart")
end
end, "lsp enable" },
["<M-t><M-t>"] = {function()
local bufnr = vim.api.nvim_get_current_buf()
-- get all clients for buffer
@ -615,10 +629,9 @@ M.dap = { -- {{{
if vim.o.filetype == "go" then
mydap.go_debug()
-- TODO!: use rustaceanvim
elseif vim.o.filetype == "rust" then
local rt = require("rust-tools")
-- make sure lsp is running ?
rt.debuggables.debuggables()
vim.cmd("RustLsp debug")
else
dap.continue()
end
@ -1059,38 +1072,44 @@ M.gitsigns = {
M.grapple = {
plugin = true,
n = {
["<leader>J"] = { "<cmd> lua require'grapple'.cycle_forward()<CR>" },
["<CR>"] = { "<cmd> lua require'grapple'.cycle_forward()<CR>" },
["<Down>"] = { "<cmd> lua require'grapple'.cycle_forward()<CR>" },
["<leader>K"] = { "<cmd> lua require'grapple'.cycle_backward()<CR>" },
["<S-Tab>"] = { "<cmd> lua require'grapple'.cycle_backward()<CR>" },
["<Up>"] = { "<cmd> lua require'grapple'.cycle_backward()<CR>" },
["<leader>T"] = { "<cmd> GrappleTag<CR>"},
["<leader>U"] = { "<cmd> GrappleUntag<CR>"},
-- ["<leader>J"] = { "<cmd> lua require'grapple'.cycle_forward()<CR>" },
-- "<cmd>Grapple cycle forward<CR>"
["<CR>"] = { function()
if vim.o.filetype == "qf" then
vim.api.nvim_feedkeys(termcodes('<CR>'), 'n', false)
else
vim.cmd("Grapple cycle forward")
end
end, "grapple cycle forward"},
["<Down>"] = { "<cmd>Grapple cycle forward scope=global <CR>" },
-- ["<leader>K"] = { "<cmd> lua require'grapple'.cycle_backward()<CR>" },
["<S-Tab>"] = { "<cmd> Grapple cycle backward<CR>" },
["<Up>"] = { "<cmd>Grapple cycle backward scope=global<CR>" },
["<leader>T"] = { "<cmd> Grapple tag<CR>"},
["<leader>U"] = { "<cmd> Grapple untag<CR>"},
["<leader>GT"] = { function()
vim.ui.input({ prompt = "tag: " }, function(input)
require("grapple").tag({scope="global"})
end)
require("grapple").tag({ scope="global"})
end, "grapple global tag" },
["<leader>N"] = { function()
vim.ui.input({ prompt = "tag: " }, function(input)
require("grapple").tag({ key = input })
require("grapple").tag({ name = input })
end)
end, "grapple tag with name" },
["<leader>GN"] = { function()
vim.ui.input({ prompt = "tag: " }, function(input)
require("grapple").tag({scope="global", key = input})
require("grapple").tag({scope="global", name = input})
end)
end, "grapple global tag with name" },
--TODO: keybind for popup select names
-- ["<leader><leader>m"] = { "<cmd> lua require'grapple'.scope_select('global', 'mappings')<CR>" },
["<leader><leader>m"] = { "<cmd> lua require'grapple'.select {key='mappings', scope='global'}<CR>" },
["<leader><leader>p"] = { "<cmd> lua require'grapple'.select {key='plugins', scope='global'}<CR>" },
["<leader><leader>b"] = { "<cmd> lua require'grapple'.select {key='bonzai', scope='global'}<CR>" },
["<leader><leader>P"] = { "<cmd> lua require'grapple'.select({key='Plugins'})<CR>" },
["<leader><leader>o"] = { "<cmd> lua require'grapple'.select {key='options', scope='global'}<CR>" },
["<leader><leader>g"] = { "<cmd> lua require'grapple'.popup_tags()<CR>" },
["<leader><leader>G"] = { "<cmd> lua require'grapple'.popup_tags('global')<CR>" },
["<leader><leader>m"] = { "<cmd> lua require'grapple'.select {name='mappings', scope='global'}<CR>" },
["<leader><leader>p"] = { "<cmd> lua require'grapple'.select {name='plugins', scope='global'}<CR>" },
["<leader><leader>b"] = { "<cmd> lua require'grapple'.select {name='bonzai', scope='global'}<CR>" },
["<leader><leader>P"] = { "<cmd> lua require'grapple'.select({name='Plugins', scope='global'})<CR>" },
["<leader><leader>o"] = { "<cmd> lua require'grapple'.select {name='options', scope='global'}<CR>" },
["<leader><leader>ar"] = { "<cmd> lua require'grapple'.select {name='aichat-roles', scope='global'}<CR>" },
["<leader><leader>g"] = { "<cmd> Grapple open_tags<CR>" },
["<leader><leader>G"] = { "<cmd> Grapple open_tags scope=global <CR>" },
}
}

@ -1,12 +1,5 @@
local M = {}
M.load_api_key = function()
local openai_api_key_path = vim.fn.expand('$XDG_CONFIG_HOME') .. '/openai/token'
local openai_api_key = vim.fn.readfile(openai_api_key_path, '', 1)
vim.fn.setenv('OPENAI_API_KEY', openai_api_key[1])
end
local config = {
-- welcome_message = WELCOME_MESSAGE,

@ -1,3 +1,4 @@
local ok, codegpt = pcall(require, "codegpt")
if not ok then
vim.notify("missing module codegpt", vim.log.levels.WARN)
@ -7,13 +8,42 @@ end
local M = {}
M.setup = function()
-- vim.g["codegpt_write_response_to_err_log"] = true
require("spike.utils.openai").load_api_key("localai")
vim.g["codegpt_openai_api_key"] = vim.fn.getenv("OPENAI_API_KEY")
vim.g["codegpt_chat_completions_url"] = "http://localai.srvlan:8080/v1/chat/completions"
vim.g["codegpt_global_commands_defaults"] = {
-- model = "dolphin-mixtral",
model = "llama3-8b-inst",
max_tokens = 8192,
temperature = 0.4,
-- extra_params = {
-- presence_penalty = 0,
-- frequency_penalty= 0
-- }
}
vim.g["codegpt_commands"] = {
["q4"] = {
["question"] = {
callback_type = "text_popup",
system_message_template = "You are a helpful {{filetype}} programming assistant. Analyze the question and any provided sample code and give thourough detailed explanations.",
user_message_template = "I have a question about the following {{language}} code: ```{{filetype}}\n{{text_selection}}```\n {{command_args}}",
},
["explain"] = {
system_message_template = "You are a helpful {{filetype}} pair programming assistant. Help the user understand source code. Explain as if you were explaining to an other developer.",
user_message_template = "Explain the following {{language}} code: ```{{filetype}}\n{{text_selection}}```\n {{command_args}}",
callback_type = "text_popup",
temperature = 0.6
},
["implement"] = {
callback_type = "code_popup",
system_message_template = "You are a {{filetype}} software pair assistant AI. Answer my questions. Think step by step out loud.",
user_message_template = "I have a question about the following {{language}} code: ```{{filetype}} {{text_selection}}``` {{command_args}}"
system_message_template = "You are a {{filetype}} programming assistant. Complete or implement the feature from the provided description. Think step by step before answering. Use {{filetype}} best practicies. Only output code snippets.",
user_message_template = "I have the following specification for a {{language}} project: ```{{filetype}} {{text_selection}}``` {{command_args}}"
},
["tests"] = {
@ -21,19 +51,21 @@ M.setup = function()
python = "Use pytest framework."
}
},
["code4"] = {
system_message_template = "You are a Programming pair Assistant AI. You are helpful with improving and optimizing source code using the best idiomatic practicies.",
model = "gpt-4",
doc = {
system_message_template = "You are a {{language}} programming assistant specialized in documenting source code.",
user_message_template = "I have the following {{language}} code:\n```{{filetype}}\n{{text_selection}}\n```\nWrite good idiomatic documentation using the target language docstring. {{language_instructions}} {{command_args}}",
},
["completion"] = {
system_message_template = "You are a Programming pair Assistant AI. You are helpful with improving and optimizing source code using the idiomatic practicies.",
user_message_template = "I have the following {{language}} code: ```{{filetype}}\n{{text_selection}}```\n{{command_args}}. {{language_instructions}} Think step by step then only return the code snippet and nothing else."
},
["docu4"] = {
["pydoc"] = {
language_instructions = {
python = "Use docstings to document the code. This project uses Sphinx. Use the google style python docstrings. Add sphinx directives if needed."
},
system_message_template = "You are a technical documentation assistant to a software developer. Help the user write clean detailed and easy to read project documentation.",
user_message_template = "Create or improve the documentation for: ```{{text_selection}}```\n. Use a professional tone. {{language_instructions}} {{command_args}}",
model = "gpt-4"
},
}
}
end

@ -0,0 +1,20 @@
local M = {}
local config = {
server = {
cmd = {"run-rust-analyzer"},
auto_attach = false,
on_attach = function(client, bufnr)
require('navigator.lspclient.mapping').setup({client=client, bufnr=bufnr}) -- setup navigator keymaps here,
require("navigator.dochighlight").documentHighlight(bufnr)
require('navigator.codeAction').code_action_prompt(bufnr)
-- otherwise, you can define your own commands to call navigator functions
end
}
}
function M.setup()
vim.g.rustaceanvim = config
end
return M

@ -20,6 +20,7 @@ return {
"yaml",
"toml",
"vue",
"xml"
},
highlight = {

@ -215,6 +215,7 @@ return {
-- module_pattern = {"chatgpt*"},
-- after = {"nui.nvim", "telescope.nvim"},
-- setup = function()
-- this func moved to utils/openai
-- require("custom.plugins.configs.chat-gpt").load_api_key()
-- end,
-- config = function()
@ -377,6 +378,7 @@ return {
},
-- User Interface / UX
["stevearc/dressing.nvim"] = {
lock = true,
config = function()
@ -877,15 +879,21 @@ return {
}, -- }}}
-- Rust dev
["simrat39/rust-tools.nvim"] = { -- {{{
lock = false,
ft = { "rust" },
opt = true,
config = function()
require("custom.plugins.configs.rust-tools").setup()
end
}, -- }}}
-- ["simrat39/rust-tools.nvim"] = { -- {{{
-- lock = false,
-- ft = { "rust" },
-- opt = true,
-- config = function()
-- require("custom.plugins.configs.rust-tools").setup()
-- end
-- }, -- }}}
["mrcjkb/rustaceanvim"] = {
tag = "4.22.8",
setup = function()
require("custom.plugins.configs.rustaceanvim").setup()
end,
},
-- PlantUML
["aklt/plantuml-syntax"] = {

@ -143,8 +143,9 @@ function M.start()
end
function M.stop()
if M.layer ~= nil then
M.layer:exit()
if M.layer ~= nil and
M.layer:is_active() then
M.layer:exit()
end
end

@ -0,0 +1,10 @@
-- openai api helpers
local M = {}
M.load_api_key = function(provider)
local openai_api_key_path = vim.fn.expand('$XDG_CONFIG_HOME') .. '/openai/token-' .. provider
local openai_api_key = vim.fn.readfile(openai_api_key_path, '', 1)
vim.fn.setenv('OPENAI_API_KEY', openai_api_key[1])
end
return M

@ -1 +1,2 @@
iabbrev .-
cabbrev chx !chmod +x % <CR>

@ -7,7 +7,7 @@ local function dwm()
group = group,
pattern = '*/suckless/*/{*.c,*.h}',
callback = function()
local make_cmd = 'make && doas make install'
local make_cmd = 'make && make install'
-- if vim.env.STREAMING ~= nil then
-- make_cmd = 'make && make install'
-- end

Loading…
Cancel
Save