55 lines
2.0 KiB
Python
55 lines
2.0 KiB
Python
from fastapi import APIRouter, Request, HTTPException
|
||
import json
|
||
import logging
|
||
|
||
from Util import OcrUtil, LlmUtil
|
||
from sse_starlette.sse import EventSourceResponse
|
||
|
||
# 创建路由路由器
|
||
router = APIRouter(prefix="/api", tags=["大模型通用回答"])
|
||
|
||
# 配置日志
|
||
logger = logging.getLogger(__name__)
|
||
|
||
@router.post("/llm")
|
||
async def llm(request: Request):
|
||
try:
|
||
# 获取请求参数
|
||
try:
|
||
data = await request.json()
|
||
question = data.get("question")
|
||
except Exception:
|
||
form = await request.form()
|
||
question = form.get("question")
|
||
|
||
# 验证参数
|
||
if not question:
|
||
raise HTTPException(status_code=400, detail="缺少必要参数: question")
|
||
|
||
async def generate_llm_response():
|
||
try:
|
||
query_text = f'你是一名K12的专业教师,擅长回答学生的问题。下面是问题内容:{question}'
|
||
full_response = []
|
||
async for chunk in LlmUtil.get_llm_response_async(query_text):
|
||
# 确保chunk不是None且非空字符串
|
||
if chunk is not None and chunk.strip():
|
||
full_response.append(chunk)
|
||
yield {"data": json.dumps({'step': 'review', 'content': chunk}, ensure_ascii=False)}
|
||
|
||
# 发送完成信号
|
||
yield {"data": json.dumps({'step': 'review', 'content': '回答完成', 'done': True}, ensure_ascii=False)}
|
||
yield f"DONE"
|
||
except Exception as e:
|
||
logger.error(f"异常: {str(e)}")
|
||
yield {"data": json.dumps({'error': f'处理异常: {str(e)}'}, ensure_ascii=False)}
|
||
yield f"DONE"
|
||
|
||
# 返回SSE响应
|
||
return EventSourceResponse(generate_llm_response())
|
||
except HTTPException as e:
|
||
raise e
|
||
except Exception as e:
|
||
logger.error(f"接口异常: {str(e)}")
|
||
raise HTTPException(status_code=500, detail=f"服务器内部错误: {str(e)}")
|
||
|