This commit is contained in:
2025-08-01 20:40:44 -05:00
parent f2228527e5
commit 621ed0d00e
6 changed files with 102 additions and 1 deletions

BIN
.DS_Store vendored Normal file

Binary file not shown.

BIN
lua/.DS_Store vendored Normal file

Binary file not shown.

BIN
lua/shelbybark/.DS_Store vendored Normal file

Binary file not shown.

View File

@@ -0,0 +1,77 @@
return {
{
"olimorris/codecompanion.nvim",
dependencies = {
"nvim-lua/plenary.nvim",
"nvim-treesitter/nvim-treesitter",
"j-hui/fidget.nvim",
},
opts = {
strategies = {
chat = {
adapter = "openai",
},
inline = {
adapter = "openai",
},
},
adapters = {},
},
config = function(_, opts)
local api_key = ""
local handle = io.popen("pass openai/api_key")
if handle then
api_key = handle:read("*a"):gsub("%s+$", "")
handle:close()
end
opts.adapters.openai = function()
return require("codecompanion.adapters").extend("openai", {
env = {
api_key = api_key,
},
})
end
-- opts.adapters.openai = require("codecompanion.adapters").extend("openai", {
-- env = {
-- api_key = api_key,
-- },
-- })
require("codecompanion").setup(opts)
local progress = require("fidget.progress")
local handles = {}
local group = vim.api.nvim_create_augroup("CodeCompanionFidget", {})
vim.api.nvim_create_autocmd("User", {
pattern = "CodeCompanionRequestStarted",
group = group,
callback = function(e)
handles[e.data.id] = progress.handle.create({
title = "CodeCompanion",
message = "Thinking...",
lsp_client = { name = e.data.adapter.formatted_name },
})
end,
})
vim.api.nvim_create_autocmd("User", {
pattern = "CodeCompanionRequestFinished",
group = group,
callback = function(e)
local h = handles[e.data.id]
if h then
h.message = e.data.status == "success" and "Done" or "Failed"
h:finish()
handles[e.data.id] = nil
end
end,
})
end,
},
{
"MeanderingProgrammer/render-markdown.nvim",
ft = { "markdown", "codecompanion" },
},
}

View File

@@ -9,7 +9,7 @@ return {
provider = "openai",
providers = {
openai = {
endpoint = "https://api.openai.com/v1",
endpoint = "https://api.openai.com/v1/responses",
model = "gpt-4o", -- your desired model (or use gpt-4o, etc.)
extra_request_body = {
timeout = 30000, -- Timeout in milliseconds, increase this for reasoning models

View File

@@ -0,0 +1,24 @@
return {
"jackMort/ChatGPT.nvim",
event = "VeryLazy",
config = function()
require("chatgpt").setup({
-- this config assumes you have OPENAI_API_KEY environment variable set
openai_params = {
model = "gpt-4o",
frequency_penalty = 0,
presence_penalty = 0,
max_tokens = 4095,
temperature = 0.2,
top_p = 0.1,
n = 1,
},
})
end,
dependencies = {
"MunifTanjim/nui.nvim",
"nvim-lua/plenary.nvim",
"folke/trouble.nvim", -- optional
"nvim-telescope/telescope.nvim",
},
}