Replies: 3 comments 9 replies
-
my custom openai_compatible adapter also stop working with another message aliyun_qwen = function()
return require("codecompanion.adapters").extend("openai_compatible", {
name = "aliyun_qwen",
env = {
url = "https://dashscope.aliyuncs.com", -- optional: default value is ollama url http://127.0.0.1:11434
api_key = function()
return os.getenv("DEEPSEEK_API_ALIYUN")
end,
chat_url = "/compatible-mode/v1/chat/completions", -- optional: default value, override if different
},
schema = {
model = {
default = "qwen-omni-turbo-latest",
},
},
})
end,
|
Beta Was this translation helpful? Give feedback.
-
even my custom deepseek siliconflow adapter is not working inline 2025-02-20.12-09-06.mp4
here is my full config local code = require("codecompanion")
code.setup({
adapters = {
deepseek = function()
return require("codecompanion.adapters").extend("deepseek", {
name = "deepseek",
env = {
api_key = function()
return os.getenv("DEEPSEEK_API_KEY")
end,
},
schema = {
model = {
default = "deepseek-coder",
},
},
})
end,
siliconflow_r1 = function()
return require("codecompanion.adapters").extend("deepseek", {
name = "siliconflow_r1",
url = "https://api.siliconflow.cn/v1/chat/completions",
env = {
api_key = function()
return os.getenv("DEEPSEEK_API_KEY_S")
end,
},
schema = {
model = {
default = "deepseek-ai/DeepSeek-R1",
choices = {
["deepseek-ai/DeepSeek-R1"] = { opts = { can_reason = true } },
"deepseek-ai/DeepSeek-V3",
},
},
},
})
end,
siliconflow_v3 = function()
return require("codecompanion.adapters").extend("deepseek", {
name = "siliconflow_v3",
url = "https://api.siliconflow.cn/v1/chat/completions",
env = {
api_key = function()
return os.getenv("DEEPSEEK_API_KEY_S")
end,
},
schema = {
model = {
default = "deepseek-ai/DeepSeek-V3",
choices = {
"deepseek-ai/DeepSeek-V3",
["deepseek-ai/DeepSeek-R1"] = { opts = { can_reason = true } },
},
},
},
})
end,
aliyun_deepseek = function()
return require("codecompanion.adapters").extend("deepseek", {
name = "aliyun_deepseek",
url = "https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions",
env = {
api_key = function()
return os.getenv("DEEPSEEK_API_ALIYUN")
end,
},
schema = {
model = {
default = "deepseek-r1",
choices = {
["deepseek-r1"] = { opts = { can_reason = true } },
},
},
},
})
end,
-- 阿里千问
aliyun_qwen = function()
return require("codecompanion.adapters").extend("openai_compatible", {
name = "aliyun_qwen",
env = {
url = "https://dashscope.aliyuncs.com", -- optional: default value is ollama url http://127.0.0.1:11434
api_key = function()
return os.getenv("DEEPSEEK_API_ALIYUN")
end,
chat_url = "/compatible-mode/v1/chat/completions", -- optional: default value, override if different
},
schema = {
model = {
default = "qwen-omni-turbo-latest",
},
},
})
end,
opts = {
show_defaults = false,
},
},
strategies = {
chat = { adapter = "siliconflow_r1" },
inline = { adapter = "siliconflow_v3" },
},
opts = {
language = "Chinese",
},
prompt_library = {
-- https://github.com/olimorris/codecompanion.nvim/blob/a0a82a50f5e607187e4b978ef83c804b053dc455/lua/codecompanion/config.lua#L556
["DeepSeek Explain In Chinese"] = {
strategy = "chat",
description = "中文解释代码",
opts = {
index = 5,
is_default = true,
is_slash_cmd = false,
modes = { "v" },
short_name = "explain in chinese",
auto_submit = true,
user_prompt = false,
stop_context_insertion = true,
adapter = {
name = "aliyun_deepseek",
model = "deepseek-r1",
},
},
prompts = {
{
role = "system",
content = [[当被要求解释代码时,请遵循以下步骤:
1. 识别编程语言。
2. 描述代码的目的,并引用该编程语言的核心概念。
3. 解释每个函数或重要的代码块,包括参数和返回值。
4. 突出说明使用的任何特定函数或方法及其作用。
5. 如果适用,提供该代码如何融入更大应用程序的上下文。]],
opts = {
visible = false,
},
},
{
role = "user",
content = function(context)
local input = require("codecompanion.helpers.actions").get_code(context.start_line, context.end_line)
return string.format(
[[请解释 buffer %d 中的这段代码:
```%s
%s ]],
|
Beta Was this translation helpful? Give feedback.
-
This isn't the place for issues please create an actual issue and follow the guide in the issue template. You'll need to create a minimal.lua file that allows me to recreate this issue. You're seeing this error:
That's an issue with the LLM. Not CodeCompanion. |
Beta Was this translation helpful? Give feedback.
-
2025-02-20.11-46-21.mp4
how can I add copilot adapter ?
Beta Was this translation helpful? Give feedback.
All reactions