|
|
@ -1,6 +1,21 @@
|
|
|
|
|
|
|
|
import logging
|
|
|
|
|
|
|
|
import os
|
|
|
|
|
|
|
|
import subprocess
|
|
|
|
|
|
|
|
import tempfile
|
|
|
|
import urllib.parse
|
|
|
|
import urllib.parse
|
|
|
|
|
|
|
|
import uuid
|
|
|
|
|
|
|
|
import warnings
|
|
|
|
|
|
|
|
from io import BytesIO
|
|
|
|
|
|
|
|
from logging.handlers import RotatingFileHandler
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import fastapi
|
|
|
|
|
|
|
|
import uvicorn
|
|
|
|
|
|
|
|
from fastapi import FastAPI, HTTPException
|
|
|
|
|
|
|
|
from starlette.staticfiles import StaticFiles
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
from Util.ALiYunUtil import ALiYunUtil
|
|
|
|
from Util.SearchUtil import *
|
|
|
|
from Util.SearchUtil import *
|
|
|
|
|
|
|
|
|
|
|
|
# 初始化日志
|
|
|
|
# 初始化日志
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
logger.setLevel(logging.INFO)
|
|
|
|
logger.setLevel(logging.INFO)
|
|
|
@ -24,7 +39,6 @@ logger.addHandler(file_handler)
|
|
|
|
logger.addHandler(console_handler)
|
|
|
|
logger.addHandler(console_handler)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def lifespan(app: FastAPI):
|
|
|
|
async def lifespan(app: FastAPI):
|
|
|
|
# 初始化阿里云大模型工具
|
|
|
|
# 初始化阿里云大模型工具
|
|
|
|
app.state.aliyun_util = ALiYunUtil()
|
|
|
|
app.state.aliyun_util = ALiYunUtil()
|
|
|
@ -42,7 +56,7 @@ app.mount("/static", StaticFiles(directory="Static"), name="static")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.post("/api/save-word")
|
|
|
|
@app.post("/api/save-word")
|
|
|
|
async def save_to_word(request: Request):
|
|
|
|
async def save_to_word(request: fastapi.Request):
|
|
|
|
output_file = None
|
|
|
|
output_file = None
|
|
|
|
try:
|
|
|
|
try:
|
|
|
|
# Parse request data
|
|
|
|
# Parse request data
|
|
|
@ -91,18 +105,17 @@ async def save_to_word(request: Request):
|
|
|
|
logger.warning(f"Failed to clean up temp files: {str(e)}")
|
|
|
|
logger.warning(f"Failed to clean up temp files: {str(e)}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.post("/api/rag", response_model=None)
|
|
|
|
@app.post("/api/rag")
|
|
|
|
async def rag(request: fastapi.Request):
|
|
|
|
async def rag(request: Request):
|
|
|
|
|
|
|
|
data = await request.json()
|
|
|
|
data = await request.json()
|
|
|
|
query = data.get('query', '')
|
|
|
|
query = data.get('query', '')
|
|
|
|
query_tags = data.get('tags', [])
|
|
|
|
query_tags = data.get('tags', [])
|
|
|
|
|
|
|
|
|
|
|
|
# 调用es进行混合搜索
|
|
|
|
# 调用es进行混合搜索
|
|
|
|
search_results = queryByEs(query, query_tags)
|
|
|
|
search_results = queryByEs(query, query_tags, logger)
|
|
|
|
|
|
|
|
|
|
|
|
# 调用大模型
|
|
|
|
# 调用大模型
|
|
|
|
markdown_content = callLLM(request, query, search_results)
|
|
|
|
markdown_content = callLLM(request, query, search_results, logger)
|
|
|
|
|
|
|
|
|
|
|
|
# 如果有正确的结果
|
|
|
|
# 如果有正确的结果
|
|
|
|
if markdown_content:
|
|
|
|
if markdown_content:
|
|
|
|