添加基础的 LLM 支持
This commit is contained in:
40
konabot/plugins/llm_test.py
Normal file
40
konabot/plugins/llm_test.py
Normal file
@ -0,0 +1,40 @@
|
||||
"""
|
||||
肥肠危险注意:本文件仅用于开发环境测试 LLM 模块能否正常工作!
|
||||
|
||||
请不要在生产环境启用它!
|
||||
"""
|
||||
|
||||
import nonebot
|
||||
from nonebot_plugin_alconna import Alconna, Args, on_alconna
|
||||
from pydantic import BaseModel
|
||||
|
||||
from konabot.common.llm import get_llm
|
||||
from konabot.common.longtask import DepLongTaskTarget
|
||||
|
||||
|
||||
class LLMTestConfig(BaseModel):
|
||||
debug_enable_llm_test: bool = False
|
||||
|
||||
|
||||
config = nonebot.get_plugin_config(LLMTestConfig)
|
||||
|
||||
|
||||
if config.debug_enable_llm_test:
|
||||
cmd = on_alconna(Alconna(
|
||||
"debug-ask-llm",
|
||||
Args["prompt", str],
|
||||
))
|
||||
|
||||
@cmd.handle()
|
||||
async def _(prompt: str, target: DepLongTaskTarget):
|
||||
llm = get_llm()
|
||||
msg = await llm.chat(
|
||||
[
|
||||
{"role": "user", "content": prompt}
|
||||
],
|
||||
timeout=None,
|
||||
max_tokens=1024,
|
||||
)
|
||||
content = msg.content or ""
|
||||
await target.send_message(content)
|
||||
|
||||
Reference in New Issue
Block a user