main
HuangHai 3 weeks ago
parent 329439f433
commit 32798cbaff

@ -133,25 +133,7 @@ async def rag_stream(request: fastapi.Request):
search_results = queryByEs(query, query_tags, logger)
# 流式调用大模型
return StreamingResponse(
callLLM(request, query, search_results, logger, True),
media_type="text/event-stream"
)
data = await request.json()
query = data.get('query', '')
query_tags = data.get('tags', [])
# 调用es进行混合搜索
search_results = queryByEs(query, query_tags, logger)
# 调用大模型
markdown_content = callLLM(request, query, search_results, logger, False)
# 如果有正确的结果
if markdown_content:
return {"data": markdown_content, "format": "markdown"}
return {"data": "没有在知识库中找到相关的信息,无法回答此问题。"}
return callLLM(request, query, search_results, logger, True)
if __name__ == "__main__":

@ -25,4 +25,25 @@ class ALiYunUtil:
)
return completion.choices[0].message.content
except Exception as e:
return f"发生错误: {str(e)}"
return f"发生错误: {str(e)}"
async def chat_stream(self, prompt, model=None):
"""
与阿里云大模型流式对话
:param prompt: 用户输入的问题
:param model: 可选指定使用的模型
:return: 异步生成器返回模型流式响应
"""
try:
stream = await self.client.chat.completions.create(
model=model or self.model_name,
messages=[
{'role': 'user', 'content': prompt}
],
stream=True
)
async for chunk in stream:
if chunk.choices[0].delta.content:
yield chunk.choices[0].delta.content
except Exception as e:
yield f"发生错误: {str(e)}"

@ -0,0 +1,47 @@
2025-06-29 19:39:37,205 - __main__ - INFO - 正在调用阿里云大模型生成回答...
INFO: 127.0.0.1:63054 - "POST /api/rag_stream HTTP/1.1" 200 OK
ERROR: Exception in ASGI application
Traceback (most recent call last):
File "D:\anaconda3\envs\rag\lib\site-packages\uvicorn\protocols\http\h11_impl.py", line 403, in run_asgi
result = await app( # type: ignore[func-returns-value]
File "D:\anaconda3\envs\rag\lib\site-packages\uvicorn\middleware\proxy_headers.py", line 60, in __call__
return await self.app(scope, receive, send)
File "D:\anaconda3\envs\rag\lib\site-packages\fastapi\applications.py", line 1054, in __call__
await super().__call__(scope, receive, send)
File "D:\anaconda3\envs\rag\lib\site-packages\starlette\applications.py", line 112, in __call__
await self.middleware_stack(scope, receive, send)
File "D:\anaconda3\envs\rag\lib\site-packages\starlette\middleware\errors.py", line 187, in __call__
raise exc
File "D:\anaconda3\envs\rag\lib\site-packages\starlette\middleware\errors.py", line 165, in __call__
await self.app(scope, receive, _send)
File "D:\anaconda3\envs\rag\lib\site-packages\starlette\middleware\exceptions.py", line 62, in __call__
await wrap_app_handling_exceptions(self.app, conn)(scope, receive, send)
File "D:\anaconda3\envs\rag\lib\site-packages\starlette\_exception_handler.py", line 53, in wrapped_app
raise exc
File "D:\anaconda3\envs\rag\lib\site-packages\starlette\_exception_handler.py", line 42, in wrapped_app
await app(scope, receive, sender)
File "D:\anaconda3\envs\rag\lib\site-packages\starlette\routing.py", line 714, in __call__
await self.middleware_stack(scope, receive, send)
File "D:\anaconda3\envs\rag\lib\site-packages\starlette\routing.py", line 734, in app
await route.handle(scope, receive, send)
File "D:\anaconda3\envs\rag\lib\site-packages\starlette\routing.py", line 288, in handle
await self.app(scope, receive, send)
File "D:\anaconda3\envs\rag\lib\site-packages\starlette\routing.py", line 76, in app
await wrap_app_handling_exceptions(app, request)(scope, receive, send)
File "D:\anaconda3\envs\rag\lib\site-packages\starlette\_exception_handler.py", line 53, in wrapped_app
raise exc
File "D:\anaconda3\envs\rag\lib\site-packages\starlette\_exception_handler.py", line 42, in wrapped_app
await app(scope, receive, sender)
File "D:\anaconda3\envs\rag\lib\site-packages\starlette\routing.py", line 74, in app
await response(scope, receive, send)
File "D:\anaconda3\envs\rag\lib\site-packages\starlette\responses.py", line 263, in __call__
async with anyio.create_task_group() as task_group:
File "D:\anaconda3\envs\rag\lib\site-packages\anyio\_backends\_asyncio.py", line 597, in __aexit__
raise exceptions[0]
File "D:\anaconda3\envs\rag\lib\site-packages\starlette\responses.py", line 266, in wrap
await func()
File "D:\anaconda3\envs\rag\lib\site-packages\starlette\responses.py", line 246, in stream_response
async for chunk in self.body_iterator:
File "D:\dsWork\dsProject\dsRag\Util\SearchUtil.py", line 171, in generate
async for chunk in aliyun_util.chat_stream(prompt):
AttributeError: 'ALiYunUtil' object has no attribute 'chat_stream'
Loading…
Cancel
Save