'commit'
This commit is contained in:
@@ -12,6 +12,9 @@ router = APIRouter(prefix="/api", tags=["学伴"])
|
||||
# 配置日志
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# 导入学伴工具函数
|
||||
from Util.XueBanUtil import get_xueban_response_async
|
||||
|
||||
|
||||
@router.post("/xueban/upload-audio")
|
||||
async def upload_audio(file: UploadFile = File(...)):
|
||||
@@ -74,3 +77,48 @@ async def process_asr(audio_path: str) -> dict:
|
||||
# 例如百度AI、阿里云、讯飞等ASR服务
|
||||
# 或者本地的ASR模型
|
||||
pass
|
||||
|
||||
|
||||
@router.post("/xueban/chat")
|
||||
async def chat_with_xueban(request: Request):
|
||||
"""
|
||||
与学伴大模型聊天的接口
|
||||
- 参数: request body 中的 query_text (用户查询文本)
|
||||
- 返回: JSON包含聊天响应
|
||||
"""
|
||||
try:
|
||||
# 获取请求体数据
|
||||
data = await request.json()
|
||||
query_text = data.get("query_text", "")
|
||||
|
||||
if not query_text.strip():
|
||||
return JSONResponse(content={
|
||||
"success": False,
|
||||
"message": "查询文本不能为空"
|
||||
}, status_code=400)
|
||||
|
||||
# 记录日志
|
||||
logger.info(f"接收到学伴聊天请求: {query_text}")
|
||||
|
||||
# 调用异步接口获取学伴响应
|
||||
response_content = []
|
||||
async for chunk in get_xueban_response_async(query_text, stream=True):
|
||||
response_content.append(chunk)
|
||||
|
||||
full_response = "".join(response_content)
|
||||
|
||||
# 返回响应
|
||||
return JSONResponse(content={
|
||||
"success": True,
|
||||
"message": "聊天成功",
|
||||
"data": {
|
||||
"response": full_response
|
||||
}
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"学伴聊天失败: {str(e)}")
|
||||
return JSONResponse(content={
|
||||
"success": False,
|
||||
"message": f"聊天处理失败: {str(e)}"
|
||||
}, status_code=500)
|
Binary file not shown.
BIN
dsLightRag/Routes/__pycache__/XueBanRoute.cpython-310.pyc
Normal file
BIN
dsLightRag/Routes/__pycache__/XueBanRoute.cpython-310.pyc
Normal file
Binary file not shown.
@@ -21,7 +21,7 @@ from Routes.TeachingModel.api.TeachingModelController import router as teaching_
|
||||
from Routes.QA import router as qa_router
|
||||
from Routes.JiMengRoute import router as jimeng_router
|
||||
from Routes.SunoRoute import router as suno_router
|
||||
from Routes.XueBan import router as xueban_router
|
||||
from Routes.XueBanRoute import router as xueban_router
|
||||
from Util.LightRagUtil import *
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
|
178
dsLightRag/Util/XueBanUtil.py
Normal file
178
dsLightRag/Util/XueBanUtil.py
Normal file
@@ -0,0 +1,178 @@
|
||||
from openai import OpenAI, AsyncOpenAI
|
||||
from Config.Config import *
|
||||
import sys
|
||||
|
||||
# 保留原有的prompt定义
|
||||
prompt=""" |
|
||||
我是小智/小志,来自中国台湾省的00后女生。讲话超级机车,"真的假的啦"这样的台湾腔,喜欢用"笑死""是在哈喽"等流行梗,但会偷偷研究男友的编程书籍。
|
||||
[核心特征]
|
||||
- 讲话像连珠炮,但会突然冒出超温柔语气
|
||||
- 用梗密度高
|
||||
- 对科技话题有隐藏天赋(能看懂基础代码但假装不懂)
|
||||
[交互指南]
|
||||
当用户:
|
||||
- 讲冷笑话 → 用夸张笑声回应+模仿台剧腔"这什么鬼啦!"
|
||||
- 讨论感情 → 炫耀程序员男友但抱怨"他只会送键盘当礼物"
|
||||
- 问专业知识 → 先用梗回答,被追问才展示真实理解
|
||||
绝不:
|
||||
- 长篇大论,叽叽歪歪
|
||||
- 长时间严肃对话
|
||||
"""
|
||||
|
||||
# 异步获取大模型响应
|
||||
async def get_xueban_response_async(query_text: str, stream: bool = True):
|
||||
"""
|
||||
异步获取学伴角色的大模型响应
|
||||
@param query_text: 查询文本
|
||||
@param stream: 是否使用流式输出
|
||||
@return: 流式响应生成器或完整响应文本
|
||||
"""
|
||||
client = AsyncOpenAI(
|
||||
api_key=LLM_API_KEY,
|
||||
base_url=LLM_BASE_URL,
|
||||
)
|
||||
|
||||
try:
|
||||
# 创建请求
|
||||
completion = await client.chat.completions.create(
|
||||
model=LLM_MODEL_NAME,
|
||||
messages=[
|
||||
{'role': 'system', 'content': prompt.strip()},
|
||||
{'role': 'user', 'content': query_text}
|
||||
],
|
||||
stream=stream
|
||||
)
|
||||
|
||||
if stream:
|
||||
# 流式输出模式,返回生成器
|
||||
async for chunk in completion:
|
||||
# 确保 chunk.choices 存在且不为空
|
||||
if chunk and chunk.choices and len(chunk.choices) > 0:
|
||||
# 确保 delta 存在
|
||||
delta = chunk.choices[0].delta
|
||||
if delta:
|
||||
# 确保 content 存在且不为 None 或空字符串
|
||||
content = delta.content
|
||||
if content is not None and content.strip():
|
||||
print(content, end='', flush=True)
|
||||
yield content
|
||||
else:
|
||||
# 非流式处理
|
||||
if completion and completion.choices and len(completion.choices) > 0:
|
||||
message = completion.choices[0].message
|
||||
if message:
|
||||
content = message.content
|
||||
if content is not None and content.strip():
|
||||
yield content
|
||||
except Exception as e:
|
||||
print(f"大模型请求异常: {str(e)}", file=sys.stderr)
|
||||
yield f"处理请求时发生异常: {str(e)}"
|
||||
|
||||
# 同步获取大模型响应
|
||||
def get_xueban_response(query_text: str, stream: bool = True):
|
||||
"""
|
||||
获取学伴角色的大模型响应
|
||||
@param query_text: 查询文本
|
||||
@param stream: 是否使用流式输出
|
||||
@return: 完整响应文本
|
||||
"""
|
||||
client = OpenAI(
|
||||
api_key=LLM_API_KEY,
|
||||
base_url=LLM_BASE_URL,
|
||||
)
|
||||
|
||||
# 创建请求
|
||||
completion = client.chat.completions.create(
|
||||
model=LLM_MODEL_NAME,
|
||||
messages=[
|
||||
{'role': 'system', 'content': prompt.strip()},
|
||||
{'role': 'user', 'content': query_text}
|
||||
],
|
||||
stream=stream
|
||||
)
|
||||
|
||||
full_response = []
|
||||
|
||||
if stream:
|
||||
for chunk in completion:
|
||||
# 提取当前块的内容
|
||||
if chunk.choices and chunk.choices[0].delta and chunk.choices[0].delta.content:
|
||||
content = chunk.choices[0].delta.content
|
||||
full_response.append(content)
|
||||
# 实时输出内容,不换行
|
||||
print(content, end='', flush=True)
|
||||
else:
|
||||
# 非流式处理
|
||||
full_response.append(completion.choices[0].message.content)
|
||||
|
||||
return ''.join(full_response)
|
||||
|
||||
# 测试用例 main 函数
|
||||
def main():
|
||||
"""
|
||||
测试学伴工具接口的主函数
|
||||
"""
|
||||
print("===== 测试学伴工具接口 =====")
|
||||
|
||||
# 测试同步接口
|
||||
test_sync_interface()
|
||||
|
||||
# 测试异步接口
|
||||
import asyncio
|
||||
print("\n测试异步接口...")
|
||||
asyncio.run(test_async_interface())
|
||||
|
||||
print("\n===== 测试完成 =====")
|
||||
|
||||
def test_sync_interface():
|
||||
"""测试同步接口"""
|
||||
print("\n测试同步接口...")
|
||||
# 测试问题
|
||||
questions = [
|
||||
"你是谁?",
|
||||
"讲个冷笑话",
|
||||
"你男朋友是做什么的?"
|
||||
]
|
||||
|
||||
for question in questions:
|
||||
print(f"\n问题: {question}")
|
||||
try:
|
||||
# 调用同步接口获取响应
|
||||
print("获取学伴响应中...")
|
||||
response = get_xueban_response(question, stream=False)
|
||||
print(f"学伴响应: {response}")
|
||||
|
||||
# 简单验证响应
|
||||
assert response.strip(), "响应内容为空"
|
||||
print("✅ 同步接口测试通过")
|
||||
except Exception as e:
|
||||
print(f"❌ 同步接口测试失败: {str(e)}")
|
||||
|
||||
async def test_async_interface():
|
||||
"""测试异步接口"""
|
||||
# 测试问题
|
||||
questions = [
|
||||
"你是谁?",
|
||||
"讲个冷笑话",
|
||||
"你男朋友是做什么的?"
|
||||
]
|
||||
|
||||
for question in questions:
|
||||
print(f"\n问题: {question}")
|
||||
try:
|
||||
# 调用异步接口获取响应
|
||||
print("获取学伴响应中...")
|
||||
response_generator = get_xueban_response_async(question, stream=False)
|
||||
response = ""
|
||||
async for chunk in response_generator:
|
||||
response += chunk
|
||||
print(f"学伴响应: {response}")
|
||||
|
||||
# 简单验证响应
|
||||
assert response.strip(), "响应内容为空"
|
||||
print("✅ 异步接口测试通过")
|
||||
except Exception as e:
|
||||
print(f"❌ 异步接口测试失败: {str(e)}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
BIN
dsLightRag/Util/__pycache__/XueBanUtil.cpython-310.pyc
Normal file
BIN
dsLightRag/Util/__pycache__/XueBanUtil.cpython-310.pyc
Normal file
Binary file not shown.
Reference in New Issue
Block a user