|
|
|
@ -3,6 +3,7 @@ import json
|
|
|
|
|
import re
|
|
|
|
|
from CommonUtil import *
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# 流式生成数据的函数
|
|
|
|
|
async def generate_stream_markdown(course_name: str):
|
|
|
|
|
"""
|
|
|
|
@ -36,11 +37,34 @@ async def generate_stream_markdown(course_name: str):
|
|
|
|
|
print("\n完整的 Markdown 内容:")
|
|
|
|
|
print(full_markdown)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# 测试函数
|
|
|
|
|
async def test_generate_stream_markdown():
|
|
|
|
|
async for chunk in generate_stream_markdown("三角形面积"):
|
|
|
|
|
print(chunk.decode("utf-8"), end="", flush=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def extract_level1_title(markdown_content):
|
|
|
|
|
"""
|
|
|
|
|
从 Markdown 字符串中提取一级目录的文本内容
|
|
|
|
|
"""
|
|
|
|
|
# 使用正则表达式匹配一级目录的标题
|
|
|
|
|
match = re.search(r'^#\s+(.+)$', markdown_content, re.MULTILINE)
|
|
|
|
|
if match:
|
|
|
|
|
return match.group(1) # 返回一级目录的文本内容
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
def extract_level2_titles(markdown_content):
|
|
|
|
|
"""
|
|
|
|
|
从 Markdown 字符串中提取所有二级目录的文本内容,返回一个数组
|
|
|
|
|
"""
|
|
|
|
|
# 使用正则表达式匹配所有二级目录的标题
|
|
|
|
|
matches = re.findall(r'^##\s+(.+)$', markdown_content, re.MULTILINE)
|
|
|
|
|
# 去重,确保每个二级目录标题只出现一次
|
|
|
|
|
unique_matches = list(dict.fromkeys(matches))
|
|
|
|
|
return unique_matches # 返回去重后的二级目录标题数组
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# 运行应用
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
|
# 使用 asyncio.run 运行异步函数
|
|
|
|
@ -50,5 +74,12 @@ if __name__ == "__main__":
|
|
|
|
|
with open("Sample.md", "r", encoding="utf-8") as file:
|
|
|
|
|
markdown_content = file.read()
|
|
|
|
|
|
|
|
|
|
print("markdown_content:", markdown_content)
|
|
|
|
|
# 一级名称
|
|
|
|
|
level1_title = extract_level1_title(markdown_content)
|
|
|
|
|
ret = {"type": "cover", "data": {"title": level1_title, "text": ""}}
|
|
|
|
|
print(json.dumps(ret, ensure_ascii=False))
|
|
|
|
|
|
|
|
|
|
# 二级名称列表
|
|
|
|
|
contents = extract_level2_titles(markdown_content)
|
|
|
|
|
ret = {"type": "contents", "data": {"items": contents}}
|
|
|
|
|
print(json.dumps(ret, ensure_ascii=False))
|
|
|
|
|