A neovim plugin for no frills LLM-assisted programming.
llm.nvim-showcase.mp4
Before using the plugin, set the GROQ_API_KEY and/or the OPENAI_API_KEY env vars with your api keys.
lazy.nvim
{
"melbaldove/llm.nvim",
dependencies = { "nvim-neotest/nvim-nio" }
}setup()
Configure the plugin. This can be omitted to use the default configuration.
require('llm').setup({
-- How long to wait for the request to start returning data.
timeout_ms = 10000,
-- Extra OpenAI-compatible services to add
services = {
other_provider = {
url = "https://example.com/other-provider/v1/chat/completions",
model = "llama3",
api_key_name = "OTHER_PROVIDER_API_KEY",
}
}
})prompt()
Triggers the LLM assistant. You can pass an optional replace flag to replace the current selection with the LLM's response. The prompt is either the visually selected text or the file content up to the cursor if no selection is made.
create_llm_md()
Creates a new llm.md file in the current working directory, where you can write questions or prompts for the LLM.
Example Bindings
vim.keymap.set("n", "<leader>m", function() require("llm").create_llm_md() end)
-- keybinds for prompting with groq
vim.keymap.set("n", "<leader>,", function() require("llm").prompt({ replace = false, service = "groq" }) end)
vim.keymap.set("v", "<leader>,", function() require("llm").prompt({ replace = false, service = "groq" }) end)
vim.keymap.set("v", "<leader>.", function() require("llm").prompt({ replace = true, service = "groq" }) end)
-- keybinds for prompting with openai
vim.keymap.set("n", "<leader>g,", function() require("llm").prompt({ replace = false, service = "openai" }) end)
vim.keymap.set("v", "<leader>g,", function() require("llm").prompt({ replace = false, service = "openai" }) end)
vim.keymap.set("v", "<leader>g.", function() require("llm").prompt({ replace = true, service = "openai" }) end)- ollama support
- Special thanks to yacine and his ask.md vscode plugin for inspiration!