This commit is contained in:
@ -11,6 +11,7 @@ from nonebot.adapters.onebot.v11.message import Message as OB11Message
|
||||
from konabot.common.apis.ali_content_safety import AlibabaGreen
|
||||
from konabot.common.longtask import DepLongTaskTarget
|
||||
from konabot.plugins.handle_text.base import PipelineRunner, TextHandlerEnvironment, register_text_handlers
|
||||
from konabot.plugins.handle_text.handlers.ai_handlers import THQwen
|
||||
from konabot.plugins.handle_text.handlers.encoding_handlers import THAlign, THAlphaConv, THB64Hex, THBase64, THBaseConv, THCaesar, THMorse, THReverse
|
||||
from konabot.plugins.handle_text.handlers.random_handlers import THShuffle, THSorted
|
||||
from konabot.plugins.handle_text.handlers.unix_handlers import THCat, THEcho, THReplace, THRm
|
||||
@ -79,6 +80,7 @@ async def _():
|
||||
THAlign(),
|
||||
THSorted(),
|
||||
THMorse(),
|
||||
THQwen(),
|
||||
)
|
||||
logger.info(f"注册了 TextHandler:{PipelineRunner.get_runner().handlers}")
|
||||
|
||||
|
||||
44
konabot/plugins/handle_text/handlers/ai_handlers.py
Normal file
44
konabot/plugins/handle_text/handlers/ai_handlers.py
Normal file
@ -0,0 +1,44 @@
|
||||
from typing import Any, cast
|
||||
from konabot.common.llm import get_llm
|
||||
from konabot.plugins.handle_text.base import TextHandler, TextHandlerEnvironment, TextHandleResult
|
||||
|
||||
|
||||
class THQwen(TextHandler):
|
||||
name = "qwen"
|
||||
|
||||
async def handle(self, env: TextHandlerEnvironment, istream: str | None, args: list[str]) -> TextHandleResult:
|
||||
llm = get_llm("qwen3-max")
|
||||
messages = []
|
||||
|
||||
if istream is not None:
|
||||
messages.append({
|
||||
"role": "user",
|
||||
"content": istream
|
||||
})
|
||||
if len(args) > 0:
|
||||
message = ' '.join(args)
|
||||
messages.append({
|
||||
"role": "user",
|
||||
"content": message,
|
||||
})
|
||||
if len(messages) == 0:
|
||||
return TextHandleResult(
|
||||
code=1,
|
||||
ostream="使用方法:qwen <提示词>",
|
||||
)
|
||||
|
||||
messages = [{
|
||||
"role": "system",
|
||||
"content": "除非用户要求,请尽可能短点回答。另外,当前环境不支持 Markdown 语法,如果可以,请使用纯文本回答"
|
||||
}] + messages
|
||||
result = await llm.chat(cast(Any, messages))
|
||||
content = result.content
|
||||
if content is None:
|
||||
return TextHandleResult(
|
||||
code=500,
|
||||
ostream="问 AI 的时候发生了未知的错误",
|
||||
)
|
||||
return TextHandleResult(
|
||||
code=0,
|
||||
ostream=content,
|
||||
)
|
||||
Reference in New Issue
Block a user