addet genvim llm
This commit is contained in:
parent
03afba17b5
commit
4292c2f215
2 changed files with 37 additions and 0 deletions
|
@ -89,3 +89,6 @@ map("n", "<leader>L", ":LoremIpsum ")
|
||||||
-- window management
|
-- window management
|
||||||
map("n", "<leader>sv", "<C-w>v", { desc = "Split window vertically" }) -- split window vertically
|
map("n", "<leader>sv", "<C-w>v", { desc = "Split window vertically" }) -- split window vertically
|
||||||
map("n", "<leader>sh", "<C-w>s", { desc = "Split window horizontally" }) -- split window horizontally
|
map("n", "<leader>sh", "<C-w>s", { desc = "Split window horizontally" }) -- split window horizontally
|
||||||
|
|
||||||
|
-- ─< genvim keymaps (ai enhanced, coding!) >───────────────────────────────────────────
|
||||||
|
map({ "n", "v" }, "<leader>a", ":Gen<CR>")
|
||||||
|
|
34
lua/pika/plugins/genvim.lua
Normal file
34
lua/pika/plugins/genvim.lua
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
return {
|
||||||
|
{
|
||||||
|
"David-Kunz/gen.nvim",
|
||||||
|
opts = {
|
||||||
|
model = "mistral", -- The default model to use.
|
||||||
|
host = "10.0.0.4", -- The host running the Ollama service.
|
||||||
|
port = "11434", -- The port on which the Ollama service is listening.
|
||||||
|
quit_map = "q", -- set keymap for close the response window
|
||||||
|
retry_map = "<c-r>", -- set keymap to re-send the current prompt
|
||||||
|
init = function(options)
|
||||||
|
pcall(io.popen, "ollama serve > /dev/null 2>&1 &")
|
||||||
|
end,
|
||||||
|
-- Function to initialize Ollama
|
||||||
|
command = function(options)
|
||||||
|
local body = { model = options.model, stream = true }
|
||||||
|
return "curl --silent --no-buffer -X POST http://"
|
||||||
|
.. options.host
|
||||||
|
.. ":"
|
||||||
|
.. options.port
|
||||||
|
.. "/api/chat -d $body"
|
||||||
|
end,
|
||||||
|
-- The command for the Ollama service. You can use placeholders $prompt, $model and $body (shellescaped).
|
||||||
|
-- This can also be a command string.
|
||||||
|
-- The executed command must return a JSON object with { response, context }
|
||||||
|
-- (context property is optional).
|
||||||
|
-- list_models = '<omitted lua function>', -- Retrieves a list of model names
|
||||||
|
display_mode = "float", -- The display mode. Can be "float" or "split" or "horizontal-split".
|
||||||
|
show_prompt = false, -- Shows the prompt submitted to Ollama.
|
||||||
|
show_model = true, -- Displays which model you are using at the beginning of your chat session.
|
||||||
|
no_auto_close = false, -- Never closes the window automatically.
|
||||||
|
debug = false, -- Prints errors and the command which is run.
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
Loading…
Add table
Add a link
Reference in a new issue