main
HuangHai 1 month ago
parent f0294aa1a5
commit 3638caaed6

@ -5,7 +5,6 @@ import com.dsideal.base.Util.LocalMysqlConnectUtil;
import com.jfinal.plugin.activerecord.Db;
import com.jfinal.plugin.activerecord.Record;
import com.dsideal.base.Util.CallDeepSeek;
import java.util.List;
import java.util.Map;
import java.util.Set;
@ -14,9 +13,7 @@ import java.util.ArrayList;
import java.util.concurrent.CountDownLatch;
import java.text.SimpleDateFormat;
import java.util.Date;
import cn.hutool.core.io.FileUtil;
import java.io.File;
public class TestMax32K {
@ -35,7 +32,7 @@ public class TestMax32K {
/**
*
*/
private static List<String> splitLargeTable(Set<String> fieldNames,
private static List<String> splitLargeTable(String tableName, Set<String> fieldNames,
List<Record> allTableData, int maxSize) {
List<String> chunks = new ArrayList<>();
StringBuilder currentTableChunk = new StringBuilder();
@ -60,7 +57,7 @@ public class TestMax32K {
// 检查是否超过限制
if (currentTableChunk.length() + rowData.length() > maxSize) {
if (!currentTableChunk.isEmpty()) {
if (currentTableChunk.length() > 0) {
chunks.add(currentTableChunk.toString());
currentTableChunk = new StringBuilder();
}
@ -68,14 +65,24 @@ public class TestMax32K {
currentTableChunk.append(rowData);
}
if (!currentTableChunk.isEmpty()) {
if (currentTableChunk.length() > 0) {
chunks.add(currentTableChunk.toString());
}
return chunks;
}
/**
*
*/
public static String[] getDataChunks() {
// 这里可以将main方法中的逻辑提取出来返回分块数据
// 为了简化,这里只是示例
return new String[]{"示例数据块1", "示例数据块2"};
}
public static String generateComprehensiveReport() {
LocalMysqlConnectUtil.Init();
String[] regions = {"文山州", "楚雄州"};
String sql = "select table_name as TABLE_NAME from core_dataset_table where dataset_group_id in (select id from core_dataset_group where pid='1036317909951057920')";
@ -83,74 +90,46 @@ public class TestMax32K {
// 获取分块数据
String[] dataChunks = getDataChunks(regions, tableList);
List<String> chunkAnalyses = new ArrayList<>();
System.out.println("开始分析 " + dataChunks.length + " 个数据块...");
System.out.println("开始逐步提交 " + dataChunks.length + " 个数据块...");
final StringBuilder finalReport = new StringBuilder();
final CountDownLatch finalLatch = new CountDownLatch(1);
// 第一阶段:流式分析各个数据块
// 逐步提交数据块
for (int i = 0; i < dataChunks.length; i++) {
final int chunkIndex = i;
final StringBuilder chunkResult = new StringBuilder();
final CountDownLatch latch = new CountDownLatch(1);
String prompt = "请对以下教育数据进行简要分析重点关注关键指标和趋势控制在500字以内\n" + dataChunks[i];
System.out.println("\n=== 正在分析第 " + (i + 1) + " 个数据块 ===");
final boolean isLastChunk = (i == dataChunks.length - 1);
CallDeepSeek.callDeepSeekStream(prompt, new CallDeepSeek.SSEListener() {
@Override
public void onData(String data) {
System.out.print(data);
chunkResult.append(data);
}
@Override
public void onComplete(String fullResponse) {
System.out.println("\n--- 第 " + (chunkIndex + 1) + " 个数据块分析完成 ---\n");
chunkAnalyses.add(chunkResult.toString());
latch.countDown();
}
@Override
public void onError(String error) {
System.err.println("分析第 " + (chunkIndex + 1) + " 个数据块时出错: " + error);
chunkAnalyses.add("分析失败: " + error);
latch.countDown();
}
});
try {
// 等待当前块分析完成
latch.await();
Thread.sleep(1000); // 稍微延迟避免API调用过于频繁
} catch (InterruptedException e) {
System.err.println("等待分析结果时被中断: " + e.getMessage());
}
String prompt;
if (isLastChunk) {
// 最后一个数据块:要求返回完整分析报告
prompt = "这是最后一部分教育数据请基于之前提交的所有数据生成一份完整的综合分析报告3000字以内\n" + dataChunks[i];
} else {
// 中间数据块:只提交数据,不要求返回分析
prompt = "这是第" + (i + 1) + "部分教育数据,共" + dataChunks.length + "部分,请接收并记录,暂不需要分析:\n" + dataChunks[i];
}
// 第二阶段:流式生成综合报告
System.out.println("\n=== 开始生成综合分析报告 ===");
StringBuilder combinedAnalysis = new StringBuilder();
combinedAnalysis.append("基于以下分块分析结果请生成一份完整的教育数据综合分析报告3000字以内\n\n");
for (int i = 0; i < chunkAnalyses.size(); i++) {
combinedAnalysis.append("数据块").append(i + 1).append("分析:\n");
combinedAnalysis.append(chunkAnalyses.get(i)).append("\n\n");
}
System.out.println("\n=== 提交第 " + (i + 1) + "/" + dataChunks.length + " 个数据块 ===");
final StringBuilder finalReport = new StringBuilder();
final CountDownLatch finalLatch = new CountDownLatch(1);
final CountDownLatch chunkLatch = new CountDownLatch(1);
CallDeepSeek.callDeepSeekStream(combinedAnalysis.toString(), new CallDeepSeek.SSEListener() {
CallDeepSeek.callDeepSeekStream(prompt, new CallDeepSeek.SSEListener() {
@Override
public void onData(String data) {
if (isLastChunk) {
// 只有最后一个数据块才显示和保存返回内容
System.out.print(data);
finalReport.append(data);
} else {
// 中间数据块的响应不显示(或只显示确认信息)
// System.out.print("."); // 可选:显示进度点
}
}
@Override
public void onComplete(String fullResponse) {
if (isLastChunk) {
System.out.println("\n\n=== 综合分析报告生成完成 ===");
// 保存报告到文件
@ -166,16 +145,34 @@ public class TestMax32K {
}
finalLatch.countDown();
} else {
System.out.println("第 " + (chunkIndex + 1) + " 个数据块已提交");
}
chunkLatch.countDown();
}
@Override
public void onError(String error) {
System.err.println("生成综合报告时出错: " + error);
System.err.println("提交第 " + (chunkIndex + 1) + " 个数据块时出错: " + error);
if (isLastChunk) {
finalReport.append("生成失败: ").append(error);
finalLatch.countDown();
}
chunkLatch.countDown();
}
});
try {
// 等待当前块处理完成
chunkLatch.await();
if (!isLastChunk) {
Thread.sleep(1000); // 中间块之间稍微延迟
}
} catch (InterruptedException e) {
System.err.println("等待数据块处理时被中断: " + e.getMessage());
}
}
try {
finalLatch.await();
} catch (InterruptedException e) {
@ -257,7 +254,7 @@ public class TestMax32K {
// 如果单个表数据超过限制,需要进一步分割
if (tableDataStr.length() > MAX_CHUNK_SIZE - header.length()) {
List<String> tableChunks = splitLargeTable(fieldNames, allTableData, MAX_CHUNK_SIZE - header.length());
List<String> tableChunks = splitLargeTable(tableName, fieldNames, allTableData, MAX_CHUNK_SIZE - header.length());
for (int i = 0; i < tableChunks.size(); i++) {
StringBuilder chunkBuilder = new StringBuilder();
chunkBuilder.append(header);

Loading…
Cancel
Save