diff --git a/.DS_Store b/.DS_Store new file mode 100644 index 0000000..f9dedd3 Binary files /dev/null and b/.DS_Store differ diff --git a/lua/.DS_Store b/lua/.DS_Store new file mode 100644 index 0000000..a2ad8f7 Binary files /dev/null and b/lua/.DS_Store differ diff --git a/lua/shelbybark/.DS_Store b/lua/shelbybark/.DS_Store new file mode 100644 index 0000000..ee66cf2 Binary files /dev/null and b/lua/shelbybark/.DS_Store differ diff --git a/lua/shelbybark/plugins/codecompanion.lua b/lua/shelbybark/plugins/codecompanion.lua new file mode 100644 index 0000000..c2890c1 --- /dev/null +++ b/lua/shelbybark/plugins/codecompanion.lua @@ -0,0 +1,77 @@ +return { + { + "olimorris/codecompanion.nvim", + dependencies = { + "nvim-lua/plenary.nvim", + "nvim-treesitter/nvim-treesitter", + "j-hui/fidget.nvim", + }, + opts = { + strategies = { + chat = { + adapter = "openai", + }, + inline = { + adapter = "openai", + }, + }, + adapters = {}, + }, + config = function(_, opts) + local api_key = "" + local handle = io.popen("pass openai/api_key") + if handle then + api_key = handle:read("*a"):gsub("%s+$", "") + handle:close() + end + + opts.adapters.openai = function() + return require("codecompanion.adapters").extend("openai", { + env = { + api_key = api_key, + }, + }) + end + -- opts.adapters.openai = require("codecompanion.adapters").extend("openai", { + -- env = { + -- api_key = api_key, + -- }, + -- }) + + require("codecompanion").setup(opts) + + local progress = require("fidget.progress") + local handles = {} + local group = vim.api.nvim_create_augroup("CodeCompanionFidget", {}) + + vim.api.nvim_create_autocmd("User", { + pattern = "CodeCompanionRequestStarted", + group = group, + callback = function(e) + handles[e.data.id] = progress.handle.create({ + title = "CodeCompanion", + message = "Thinking...", + lsp_client = { name = e.data.adapter.formatted_name }, + }) + end, + }) + + vim.api.nvim_create_autocmd("User", { + pattern = "CodeCompanionRequestFinished", + group = group, + callback = function(e) + local h = handles[e.data.id] + if h then + h.message = e.data.status == "success" and "Done" or "Failed" + h:finish() + handles[e.data.id] = nil + end + end, + }) + end, + }, + { + "MeanderingProgrammer/render-markdown.nvim", + ft = { "markdown", "codecompanion" }, + }, +} diff --git a/lua/shelbybark/plugins/avante.lua b/lua/shelbybark/unused-plugins/avante.lua similarity index 97% rename from lua/shelbybark/plugins/avante.lua rename to lua/shelbybark/unused-plugins/avante.lua index 66f85b4..cd28712 100644 --- a/lua/shelbybark/plugins/avante.lua +++ b/lua/shelbybark/unused-plugins/avante.lua @@ -9,7 +9,7 @@ return { provider = "openai", providers = { openai = { - endpoint = "https://api.openai.com/v1", + endpoint = "https://api.openai.com/v1/responses", model = "gpt-4o", -- your desired model (or use gpt-4o, etc.) extra_request_body = { timeout = 30000, -- Timeout in milliseconds, increase this for reasoning models diff --git a/lua/shelbybark/unused-plugins/chatgpt.lua b/lua/shelbybark/unused-plugins/chatgpt.lua new file mode 100644 index 0000000..10cc77a --- /dev/null +++ b/lua/shelbybark/unused-plugins/chatgpt.lua @@ -0,0 +1,24 @@ +return { + "jackMort/ChatGPT.nvim", + event = "VeryLazy", + config = function() + require("chatgpt").setup({ + -- this config assumes you have OPENAI_API_KEY environment variable set + openai_params = { + model = "gpt-4o", + frequency_penalty = 0, + presence_penalty = 0, + max_tokens = 4095, + temperature = 0.2, + top_p = 0.1, + n = 1, + }, + }) + end, + dependencies = { + "MunifTanjim/nui.nvim", + "nvim-lua/plenary.nvim", + "folke/trouble.nvim", -- optional + "nvim-telescope/telescope.nvim", + }, +}