AI and Copilot Plugins
AI-assisted coding has become a standard part of modern development workflows. Neovim has best-in-class integrations for all major AI coding tools.
1. GitHub Copilot — copilot.lua (Official-quality)
The fastest Copilot integration — async, native Lua, no Node.js overhead in the editor:
lua/plugins/ai.lua
return {
-- Core Copilot engine
{
"zbirenbaum/copilot.lua",
cmd = "Copilot",
event = "InsertEnter",
config = function()
require("copilot").setup({
panel = {
enabled = true,
auto_refresh = false,
keymap = {
jump_prev = "[[",
jump_next = "]]",
accept = "<CR>",
refresh = "gr",
open = "<M-CR>",
},
layout = {
position = "bottom", -- top | left | right | bottom
ratio = 0.4,
},
},
suggestion = {
enabled = true,
auto_trigger = true, -- show suggestions automatically
hide_during_completion = true,
debounce = 75,
keymap = {
accept = "<M-l>", -- Alt+l to accept
accept_word = false,
accept_line = "<M-L>", -- Alt+Shift+l to accept line
next = "<M-]>", -- Alt+] next suggestion
prev = "<M-[>", -- Alt+[ prev suggestion
dismiss = "<C-]>", -- Ctrl+] dismiss
},
},
filetypes = {
yaml = false,
markdown = false,
help = false,
gitcommit = false,
gitrebase = false,
["."] = false, -- disable for dotfiles by default
-- Explicitly enable for:
lua = true,
python = true,
javascript = true,
typescript = true,
php = true,
bash = true,
sh = true,
},
copilot_node_command = "node", -- or path to node binary
server_opts_overrides = {},
})
end,
},
-- Copilot as nvim-cmp source (optional: shows as completion items)
{
"zbirenbaum/copilot-cmp",
dependencies = { "zbirenbaum/copilot.lua" },
config = function()
require("copilot_cmp").setup()
end,
},
}
Authentication
# First time setup
nvim
:Copilot setup → opens browser to authenticate
:Copilot status → verify it's enabled
:Copilot enable → enable for current session
:Copilot disable → disable for current session
Key Bindings
| Key | Action |
|---|---|
Alt+l | Accept full suggestion |
Alt+Shift+l | Accept one line |
Alt+] | Next suggestion |
Alt+[ | Previous suggestion |
Ctrl+] | Dismiss suggestion |
2. Avante.nvim — Cursor-style AI Chat in Neovim
Avante is an AI chat panel that reads your code, understands context, and can directly apply AI-suggested changes to your buffer:
{
"yetone/avante.nvim",
event = "VeryLazy",
version = false,
build = "make", -- builds native modules for performance
dependencies = {
"nvim-treesitter/nvim-treesitter",
"stevearc/dressing.nvim",
"nvim-lua/plenary.nvim",
"MunifTanjim/nui.nvim",
"nvim-tree/nvim-web-devicons",
"zbirenbaum/copilot.lua", -- optional
{
"HakonHarnes/img-clip.nvim", -- paste images into chat
event = "VeryLazy",
opts = {
default = {
embed_image_as_base64 = false,
prompt_for_file_name = false,
drag_and_drop = { insert_mode = true },
use_absolute_path = true,
},
},
},
{
"MeanderingProgrammer/render-markdown.nvim", -- render markdown in buffer
opts = { file_types = { "markdown", "Avante" } },
ft = { "markdown", "Avante" },
},
},
opts = {
-- Choose AI provider:
provider = "copilot", -- copilot | claude | openai | azure | gemini | ollama
-- Provider configs
copilot = {
model = "gpt-4o",
},
openai = {
endpoint = "https://api.openai.com/v1",
model = "gpt-4o",
timeout = 30000,
temperature = 0,
max_tokens = 4096,
},
claude = {
endpoint = "https://api.anthropic.com",
model = "claude-3-7-sonnet-20250219",
timeout = 30000,
temperature = 0,
max_tokens = 8000,
},
ollama = {
endpoint = "http://127.0.0.1:11434",
model = "llama3.1",
},
-- Behaviour
behaviour = {
auto_suggestions = false, -- experimental
auto_set_highlight_group = true,
auto_set_keymaps = true,
auto_apply_diff_after_generation = false,
support_paste_from_clipboard = false,
},
-- Keymaps
mappings = {
diff = {
ours = "co",
theirs = "ct",
all_theirs = "ca",
both = "cb",
cursor = "cc",
next = "]x",
prev = "[x",
},
suggestion = {
accept_all = "<M-a>",
accept_cursor = "<M-l>",
reject_all = "<M-r>",
},
jump = { next = "]]", prev = "[[" },
submit = { normal = "<CR>", insert = "<C-s>" },
ask = "<leader>aa",
edit = "<leader>ae",
refresh = "<leader>ar",
focus = "<leader>af",
toggle = {
default = "<leader>at",
debug = "<leader>ad",
hint = "<leader>ah",
suggestion = "<leader>as",
repomap = "<leader>aR",
},
files = {
add_current = "<leader>ac",
add_all_buffers = "<leader>aA",
},
},
},
},
Avante Workflow
<leader>aa → open Avante chat panel with current file context
→ ask: "Refactor this function to be more readable"
→ Avante shows a diff of suggested changes
→ press co/ct to accept/reject hunks (like git merge)
<leader>ae → ask about selected code (visual mode)
<leader>at → toggle the Avante panel
3. Codeium — Free GitHub Copilot Alternative
Codeium is a free AI completion tool:
{
"Exafunction/codeium.nvim",
dependencies = {
"nvim-lua/plenary.nvim",
"hrsh7th/nvim-cmp",
},
event = "InsertEnter",
config = function()
require("codeium").setup({
enable_chat = false,
})
-- Add to cmp sources:
-- { name = "codeium", group_index = 1, priority = 100 },
end,
},
Setup: :Codeium Auth → paste your API key from codeium.com (free account).
4. gen.nvim — Ollama Local AI
Use local LLM models with Ollama, no internet required:
{
"David-Kunz/gen.nvim",
cmd = "Gen",
keys = {
{ "<leader>ai", "<cmd>Gen<cr>", mode = { "n", "v" }, desc = "Open AI (gen.nvim)" },
{ "<leader>ac", "<cmd>Gen Chat<cr>", desc = "AI Chat" },
{ "<leader>ag", "<cmd>Gen Generate<cr>", desc = "AI Generate" },
{ "<leader>as", "<cmd>Gen Summarize<cr>", mode = "v", desc = "AI Summarize" },
},
opts = {
model = "llama3.1", -- your Ollama model
host = "localhost",
port = "11434",
display_mode = "float", -- float | split
show_prompt = true,
show_model = true,
no_auto_close = false,
quit_map = "q",
retry_map = "<c-r>",
accept_map = "<c-cr>",
init = function(options)
pcall(io.popen, "ollama serve > /dev/null 2>&1 &")
end,
command = function(options)
return "curl --silent --no-buffer -X POST http://" ..
options.host .. ":" .. options.port .. "/api/chat -d $body"
end,
result_filetype = "markdown",
debug = false,
},
},
Prerequisites
# Install Ollama
curl -fsSL https://ollama.com/install.sh | sh
# Pull a model
ollama pull llama3.1
ollama pull codellama
ollama pull deepseek-coder-v2
# Verify
ollama list
5. ChatGPT.nvim — Direct ChatGPT Integration
{
"jackMort/ChatGPT.nvim",
event = "VeryLazy",
dependencies = {
"MunifTanjim/nui.nvim",
"nvim-lua/plenary.nvim",
"folke/trouble.nvim",
"nvim-telescope/telescope.nvim",
},
config = function()
require("chatgpt").setup({
api_key_cmd = "cat ~/.config/openai_key", -- read key from file
-- or: api_key_cmd = "pass openai/api-key" -- from password manager
yank_register = "+",
extra_curl_params = {},
openai_params = {
model = "gpt-4o",
frequency_penalty = 0,
presence_penalty = 0,
max_tokens = 300,
temperature = 0,
top_p = 1,
n = 1,
},
openai_edit_params = {
model = "gpt-4o",
frequency_penalty = 0,
presence_penalty = 0,
temperature = 0,
top_p = 1,
n = 1,
},
use_openai_functions_for_edits = false,
actions_paths = {},
show_quickfixes_cmd = "Trouble quickfix",
predefined_chat_gpt_prompts = "https://raw.githubusercontent.com/f/awesome-chatgpt-prompts/main/prompts.csv",
keymaps = {
close = "<C-c>",
yank_last = "<C-y>",
yank_last_code = "<C-k>",
scroll_up = "<C-u>",
scroll_down = "<C-d>",
new_session = "<C-n>",
cycle_windows = "<Tab>",
cycle_modes = "<C-f>",
next_message = "<C-j>",
prev_message = "<C-k>",
select_session = "<Space>",
rename_session = "r",
delete_session = "d",
draft_message = "<C-r>",
edit_message = "e",
delete_message = "d",
toggle_settings = "<C-o>",
toggle_sessions = "<C-p>",
toggle_help = "<C-h>",
toggle_message_role = "<C-r>",
toggle_system_role_open = "<C-s>",
stop_generating = "<C-x>",
},
})
vim.keymap.set("n", "<leader>ac", "<cmd>ChatGPT<CR>", { desc = "ChatGPT" })
vim.keymap.set({"n","v"}, "<leader>ae", "<cmd>ChatGPTEditWithInstruction<CR>", { desc = "Edit with AI" })
vim.keymap.set({"n","v"}, "<leader>ag", "<cmd>ChatGPTRun grammar_correction<CR>", { desc = "Grammar fix" })
vim.keymap.set({"n","v"}, "<leader>at", "<cmd>ChatGPTRun translate<CR>", { desc = "AI Translate" })
vim.keymap.set({"n","v"}, "<leader>aD", "<cmd>ChatGPTRun docstring<CR>", { desc = "Add docstring" })
vim.keymap.set({"n","v"}, "<leader>aA", "<cmd>ChatGPTRun add_tests<CR>", { desc = "Add tests" })
vim.keymap.set({"n","v"}, "<leader>ao", "<cmd>ChatGPTRun optimize_code<CR>", { desc = "Optimize code" })
vim.keymap.set({"n","v"}, "<leader>aS", "<cmd>ChatGPTRun summarize<CR>", { desc = "Summarize" })
vim.keymap.set({"n","v"}, "<leader>af", "<cmd>ChatGPTRun fix_bugs<CR>", { desc = "Fix bugs" })
vim.keymap.set({"n","v"}, "<leader>ax", "<cmd>ChatGPTRun explain_code<CR>", { desc = "Explain code" })
end,
},
AI Plugin Comparison
| Plugin | Provider | Cost | Offline | Code Apply |
|---|---|---|---|---|
copilot.lua | GitHub Copilot | $10/mo | ❌ | Via suggestion |
avante.nvim | Copilot/Claude/OpenAI | Varies | Via Ollama | ✅ Diff apply |
codeium.nvim | Codeium | Free | ❌ | Via suggestion |
gen.nvim | Ollama (local) | Free | ✅ | Manual |
ChatGPT.nvim | OpenAI | Pay-per-use | ❌ | Via instruction |