This commit is contained in:
2025-08-17 11:55:32 -04:00
parent 91bf7b150f
commit 5bac05e0de
5 changed files with 65 additions and 86 deletions

View File

@@ -1,32 +1,28 @@
return {
-- {
-- 'milanglacier/minuet-ai.nvim',
-- dependencies = {
-- 'nvim-lua/plenary.nvim',
-- 'Saghen/blink.cmp'
-- },
-- config = function()
-- require('minuet').setup {
-- provider = 'openai_fim_compatible',
-- n_completions = 1, -- recommend for local model for resource saving
-- -- I recommend beginning with a small context window size and incrementally
-- -- expanding it, depending on your local computing power. A context window
-- -- of 512, serves as an good starting point to estimate your computing
-- -- power. Once you have a reliable estimate of your local computing power,
-- -- you should adjust the context window to a larger value.
-- context_window = 8192,
-- provider_options = {
-- openai_fim_compatible = {
-- -- For Windows users, TERM may not be present in environment variables.
-- -- Consider using APPDATA instead.
-- api_key = 'TERM',
-- name = 'Llama.cpp',
-- end_point = 'http://172.16.10.19:1234/v1/completions',
-- model = 'qwen/qwen3-coder-30b',
-- optional = {
-- max_tokens = 512,
-- top_p = 0.9,
-- },
{
'milanglacier/minuet-ai.nvim',
dependencies = {
'nvim-lua/plenary.nvim',
'Saghen/blink.cmp'
},
config = function()
require('minuet').setup {
provider = 'openai_fim_compatible',
n_completions = 1, -- recommend for local model for resource saving
-- I recommend beginning with a small context window size and incrementally
-- expanding it, depending on your local computing power. A context window
-- of 512, serves as an good starting point to estimate your computing
-- power. Once you have a reliable estimate of your local computing power,
-- you should adjust the context window to a larger value.
context_window = 512,
provider_options = {
openai_fim_compatible = {
-- For Windows users, TERM may not be present in environment variables.
-- Consider using APPDATA instead.
api_key = 'TERM',
name = 'Llama.cpp',
end_point = 'http://127.0.0.1:1234/v1/completions',
model = 'codellama-7b-instruct',
-- template = {
-- prompt = function(context_before_cursor, context_after_cursor, _)
-- return '<|fim_prefix|>'
@@ -37,11 +33,11 @@ return {
-- end,
-- suffix = false,
-- },
-- },
-- },
-- }
-- end,
-- },
},
},
}
end,
},
{
'saghen/blink.cmp',
-- optional: provides snippets for the snippet source
@@ -82,7 +78,7 @@ return {
-- Default list of enabled providers defined so that you can extend it
-- elsewhere in your config, without redefining it, due to `opts_extend`
sources = {
default = { 'lsp', 'path', 'snippets', 'buffer' },
default = { 'lsp', 'path', 'snippets', 'buffer', 'minuet' },
},
-- (Default) Rust fuzzy matcher for typo resistance and significantly better performance
-- You may use a lua implementation instead by using `implementation = "lua"` or fallback to the lua implementation,
@@ -91,24 +87,23 @@ return {
-- See the fuzzy documentation for more information
fuzzy = { implementation = "prefer_rust_with_warning" }
},
opts_extend = { "sources.default" },
-- config = function()
-- require('blink-cmp').setup {
-- sources = {
-- default = { 'lsp', 'path', 'buffer', 'snippets', 'minuet' },
-- providers = {
-- minuet = {
-- name = 'minuet',
-- module = 'minuet.blink',
-- async = true,
-- -- Should match minuet.config.request_timeout * 1000,
-- -- since minuet.config.request_timeout is in seconds
-- timeout_ms = 3000,
-- score_offset = 50, -- Gives minuet higher priority among suggestions
-- },
-- },
-- },
-- }
-- end
config = function()
require('blink-cmp').setup {
sources = {
default = { 'lsp', 'path', 'buffer', 'snippets', 'minuet' },
providers = {
minuet = {
name = 'minuet',
module = 'minuet.blink',
async = true,
-- Should match minuet.config.request_timeout * 1000,
-- since minuet.config.request_timeout is in seconds
timeout_ms = 3000,
score_offset = 50, -- Gives minuet higher priority among suggestions
},
},
},
}
end
}
}