main
黄海 7 months ago
parent c98d85d5e9
commit 27d55cf264

@ -1,87 +0,0 @@
package UnitTest;
import com.jfinal.plugin.activerecord.ActiveRecordPlugin;
import com.jfinal.plugin.activerecord.Db;
import com.jfinal.plugin.activerecord.Record;
import com.jfinal.plugin.activerecord.dialect.PostgreSqlDialect;
import com.jfinal.plugin.druid.DruidPlugin;
import java.util.ArrayList;
import java.util.List;
import static com.dsideal.QingLong.Const.DbConst.MASTER;
import static com.dsideal.QingLong.Const.DbConst.SLAVE;
public class OpenGaussReadWriteSplit {
public static DruidPlugin createDruidPlugin(String url, String username, String password, String driverClass) {
DruidPlugin druidPlugin = new DruidPlugin(url, username, password, driverClass);
//最大连接池数量
druidPlugin.setMaxActive(100);
//最小连接池数量
druidPlugin.setMinIdle(50);
//初始化时建立物理连接的个数默认为0
druidPlugin.setInitialSize(1);
//获取连接时最大等待时间单位毫秒。配置了maxWait之后缺省启用公平锁并发效率会有所下降如果需要可以通过配置useUnfairLock属性为true使用非公平锁。
druidPlugin.setMaxWait(60000);
//如果连接空闲时间大于等于minEvictableIdleTimeMillis则关闭物理连接。
druidPlugin.setTimeBetweenEvictionRunsMillis(60000);
//连接保持空闲而不被驱逐的最小时间
druidPlugin.setMinEvictableIdleTimeMillis(300000);
//建议配置为true不影响性能并且保证安全性。申请连接的时候检测如果空闲时间大于timeBetweenEvictionRunsMillis执行validationQuery检测连接是否有效。
druidPlugin.setTestWhileIdle(true);
//申请连接时执行validationQuery检测连接是否有效做了这个配置会降低性能。默认为true
druidPlugin.setTestOnBorrow(true);
//归还连接时执行validationQuery检测连接是否有效做了这个配置会降低性能。默认为true
druidPlugin.setTestOnReturn(true);
//数据监控
//druidPlugin.addFilter(new StatFilter());
return druidPlugin;
}
public static void main(String[] args) {
String masterUrl = "jdbc:postgresql://10.10.14.63:15400/test_db";
String passWord = "DsideaL147258369";
String slaveUrl = "jdbc:postgresql://10.10.14.61:15400,10.10.14.62:15400/test_db?loadBalanceHosts=true&targetServerType=preferSlave";
String driverClass = "org.postgresql.Driver";
String userName = "postgres";
DruidPlugin druidMaster = createDruidPlugin(masterUrl, userName, passWord, driverClass);
druidMaster.start();
DruidPlugin druidSlave = createDruidPlugin(slaveUrl, userName, passWord, driverClass);
druidSlave.start();
ActiveRecordPlugin arpMaster = new ActiveRecordPlugin(MASTER, druidMaster);
arpMaster.setDevMode(false);
arpMaster.setDialect(new PostgreSqlDialect());
arpMaster.start();
ActiveRecordPlugin arpSlave = new ActiveRecordPlugin(SLAVE, druidSlave);
arpSlave.setDevMode(false);
arpSlave.setDialect(new PostgreSqlDialect());
arpSlave.start();
String sql = "truncate table test";
Db.use(MASTER).update(sql);
long start = System.currentTimeMillis();
List<String> list=new ArrayList<>();
for (int i = 1; i <= 1000; i++) {
sql = "insert into test(id,txt) values("+i+",'黄海')";
list.add(sql);
}
Db.batch(list,500);
long end = System.currentTimeMillis();
long elapsed = end - start;
System.out.println("程序耗时:" + elapsed + " 毫秒");
sql = "select * from test where id=?";
List<Record> list2 = Db.use(SLAVE).find(sql, 100000);
System.out.println(list2);
}
}

@ -1,181 +0,0 @@
package UnitTest;
import com.alibaba.fastjson.JSONObject;
import com.jfinal.plugin.activerecord.ActiveRecordPlugin;
import com.jfinal.plugin.activerecord.Db;
import com.jfinal.plugin.activerecord.Record;
import com.jfinal.plugin.activerecord.dialect.PostgreSqlDialect;
import com.jfinal.plugin.druid.DruidPlugin;
import com.jfinal.plugin.redis.Redis;
import com.jfinal.plugin.redis.RedisPlugin;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.postgresql.PGProperty;
import org.postgresql.jdbc.PgConnection;
import org.postgresql.replication.LogSequenceNumber;
import org.postgresql.replication.PGReplicationStream;
import java.nio.ByteBuffer;
import java.sql.DriverManager;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
public class OpenGaussReplicationToKafka {
public static String SOURCE_URL = "jdbc:postgresql://10.10.14.61:15400/test_db";
public static String USER = "postgres";
public static String PASSWD = "DsideaL147258369";
public static String DRIVER_CLASS = "org.postgresql.Driver";
public static String TOPIC = "pg_test";//定义主题
//KAFKA
public static final String BROKERS_ADDRESS = "10.10.14.67:9092";
//REDIS
public static final String REDIS_HOST = "10.10.14.199";
public static final int REDIS_PORT = 18890;
public static final int EXPIRE_SECONDS = 3600 * 24 * 3;
// 槽名
public static final String slotName = "slot_huanghai";
public static void Init() {
//读取库
DruidPlugin druid = new DruidPlugin(SOURCE_URL, USER, PASSWD, DRIVER_CLASS);
druid.start();
ActiveRecordPlugin arp = new ActiveRecordPlugin(druid);
arp.setDialect(new PostgreSqlDialect());
arp.start();
// 用于缓存模块的redis服务
RedisPlugin redis = new RedisPlugin("myRedis", REDIS_HOST, REDIS_PORT, 10 * 1000);
redis.start();
}
public static void CreateSlot(String slotName) {
String sql = "select * from pg_create_logical_replication_slot(?, 'mppdb_decoding')";
Db.find(sql, slotName);
}
public static void DeleteSlot(String slotName) {
try {
String sql = "select pg_drop_replication_slot(?)";
Db.find(sql, slotName);
} catch (Exception err) {
System.out.println(err);
}
}
public static void ListSlot() {
String sql = "select * from pg_replication_slots";
List<Record> list = Db.find(sql);
System.out.println(list);
}
public static String GetRestartLsn(String slotName) {
String sql = "select restart_lsn from pg_replication_slots where slot_name=?";
List<Record> list = Db.find(sql, slotName);
if (list.size() > 0) return list.get(0).getStr("restart_lsn");
return null;
}
public static void InsertTestData() {
String sql = "select max(id) as maxid from test";
int maxId = 0;
Record record = Db.findFirst(sql);
if (record.get("maxid") != null) maxId = record.getInt("maxid");
maxId = maxId + 1;
sql = "insert into test(id,txt) values(?,?)";
Db.update(sql, maxId, "黄海的测试数据:" + maxId);
}
public static void TruncateTable() {
String sql = "truncate table test";
Db.update(sql);
}
public static void main(String[] args) throws Exception {
//初始化数据库链接
Init();
// 删除槽
// DeleteSlot(slotName);
// 创建槽
// CreateSlot(slotName);
// 查看槽
// ListSlot();
// TruncateTable();
//插入测试数据
// InsertTestData();
//获取最后的读取偏移位置
String lsn = GetRestartLsn(slotName);
Properties props = new Properties();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, BROKERS_ADDRESS);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
KafkaProducer<String, String> kafkaProducer = new KafkaProducer(props);
Properties properties = new Properties();
PGProperty.USER.set(properties, USER);
PGProperty.PASSWORD.set(properties, PASSWD);
PGProperty.ASSUME_MIN_SERVER_VERSION.set(properties, "9.4");
PGProperty.REPLICATION.set(properties, "database");
PGProperty.PREFER_QUERY_MODE.set(properties, "simple");
Class.forName(DRIVER_CLASS);
PgConnection conn = (PgConnection) DriverManager.getConnection(SOURCE_URL, properties);
System.out.println("connection success!");
LogSequenceNumber waitLSN = LogSequenceNumber.valueOf(lsn);
PGReplicationStream stream = conn
.getReplicationAPI()
.replicationStream()
.logical()
.withSlotName(slotName)
.withStatusInterval(10, TimeUnit.SECONDS)
.withSlotOption("include-xids", false)
.withSlotOption("skip-empty-xacts", true)
.withStartPosition(waitLSN)
.start();
System.out.println("本轮LSN起始位置" + lsn);
try {
while (true) {
ByteBuffer byteBuffer = stream.readPending();
if (byteBuffer == null) {
TimeUnit.MILLISECONDS.sleep(10L);
continue;
}
LogSequenceNumber nowLsn = stream.getLastReceiveLSN();
String key = "LSN_" + nowLsn.asLong();
if (Redis.use().exists(key)) {
System.out.println("发现已经处理完成的LSN=" + key + ",将放过此记录不再加入kafka!");
stream.setAppliedLSN(nowLsn);
stream.setFlushedLSN(nowLsn);
continue;
}
int offset = byteBuffer.arrayOffset();
byte[] source = byteBuffer.array();
int length = source.length - offset;
String res = new String(source, offset, length);
if (res.equals("BEGIN")) continue;
if (res.startsWith("COMMIT")) continue;
JSONObject jo = JSONObject.parseObject(res);
jo.put("LSN", key);
res = jo.toString();
ProducerRecord<String, String> record = new ProducerRecord<>(TOPIC, res);
kafkaProducer.send(record);
System.out.println("send ok ==> " + res);
stream.setAppliedLSN(nowLsn);
stream.setFlushedLSN(nowLsn);
stream.forceUpdateStatus();
//write to redis
Redis.use().setex(key, EXPIRE_SECONDS, 1);
}
} catch (Exception err) {
System.out.println(err);
}
}
}

@ -1,70 +0,0 @@
package UnitTest;
import com.jfinal.plugin.activerecord.ActiveRecordPlugin;
import com.jfinal.plugin.activerecord.Db;
import com.jfinal.plugin.activerecord.dialect.PostgreSqlDialect;
import com.jfinal.plugin.druid.DruidPlugin;
import java.util.ArrayList;
import java.util.List;
import static com.dsideal.QingLong.Const.DbConst.MASTER;
public class OpenGaussWriteBatch {
public static DruidPlugin createDruidPlugin(String url, String username, String password, String driverClass) {
DruidPlugin druidPlugin = new DruidPlugin(url, username, password, driverClass);
//最大连接池数量
druidPlugin.setMaxActive(100);
//最小连接池数量
druidPlugin.setMinIdle(50);
//初始化时建立物理连接的个数默认为0
druidPlugin.setInitialSize(1);
//获取连接时最大等待时间单位毫秒。配置了maxWait之后缺省启用公平锁并发效率会有所下降如果需要可以通过配置useUnfairLock属性为true使用非公平锁。
druidPlugin.setMaxWait(60000);
//如果连接空闲时间大于等于minEvictableIdleTimeMillis则关闭物理连接。
druidPlugin.setTimeBetweenEvictionRunsMillis(60000);
//连接保持空闲而不被驱逐的最小时间
druidPlugin.setMinEvictableIdleTimeMillis(300000);
//建议配置为true不影响性能并且保证安全性。申请连接的时候检测如果空闲时间大于timeBetweenEvictionRunsMillis执行validationQuery检测连接是否有效。
druidPlugin.setTestWhileIdle(true);
//申请连接时执行validationQuery检测连接是否有效做了这个配置会降低性能。默认为true
druidPlugin.setTestOnBorrow(true);
//归还连接时执行validationQuery检测连接是否有效做了这个配置会降低性能。默认为true
druidPlugin.setTestOnReturn(true);
return druidPlugin;
}
public static void main(String[] args) {
String masterUrl = "jdbc:postgresql://10.10.14.63:15400/test_db";
String passWord = "DsideaL147258369";
String driverClass = "org.postgresql.Driver";
String userName = "postgres";
DruidPlugin druidMaster = createDruidPlugin(masterUrl, userName, passWord, driverClass);
druidMaster.start();
ActiveRecordPlugin arpMaster = new ActiveRecordPlugin(MASTER, druidMaster);
arpMaster.setDevMode(false);
arpMaster.setDialect(new PostgreSqlDialect());
arpMaster.start();
String sql = "truncate table test";
Db.update(sql);
long start = System.currentTimeMillis();
List<String> list=new ArrayList<>();
for (int i = 1; i <= 100000; i++) {
sql = "insert into test(id,txt) values("+i+",'黄海')";
list.add(sql);
}
Db.batch(list,500);
long end = System.currentTimeMillis();
long elapsed = end - start;
System.out.println("程序耗时:" + elapsed + " 毫秒");
}
}

@ -1,75 +0,0 @@
package UnitTest;
import java.sql.*;
public class OpenGaussWriteShiWu_JDBC {
// JDBC连接参数
private static final String DB_DRIVER = "org.postgresql.Driver";
private static final String DB_CONNECTION = "jdbc:postgresql://10.10.14.63:15400/test_db";
private static final String DB_USER = "postgres";
private static final String DB_PASSWORD = "DsideaL147258369";
// 批量插入数据
public static void batchInsertData() {
// JDBC连接对象
Connection conn = null;
// JDBC执行语句对象
Statement stmt = null;
try {
// 加载JDBC驱动程序
Class.forName(DB_DRIVER);
// 获取JDBC连接对象
conn = DriverManager.getConnection(DB_CONNECTION, DB_USER, DB_PASSWORD);
// 设置事务自动提交为false
conn.setAutoCommit(false);
// 获取JDBC执行语句对象
stmt = conn.createStatement();
// 批量插入数据的SQL语句
String sql = "INSERT INTO test(id, txt) VALUES (?, ?)";
// 预编译SQL语句并添加批量操作数据
long start = System.currentTimeMillis();
int cnt=0;
for (int k = 1; k <= 10; k++) {
PreparedStatement pstmt = conn.prepareStatement(sql);
for (int i = 1; i <= 10000; i++) {
cnt++;
pstmt.setInt(1, cnt);
pstmt.setString(2, "黄海_" + i);
pstmt.addBatch();
}
// 执行批量插入操作
pstmt.executeBatch();
// 提交事务
conn.commit();
}
long end = System.currentTimeMillis();
long elapsed = end - start;
System.out.println("程序耗时:" + elapsed + " 毫秒");
} catch (SQLException | ClassNotFoundException e) {
e.printStackTrace();
try {
conn.rollback();
} catch (SQLException ex) {
ex.printStackTrace();
}
} finally {
try {
if (stmt != null) {
stmt.close();
}
if (conn != null) {
conn.close();
}
} catch (SQLException ex) {
ex.printStackTrace();
}
}
}
public static void main(String[] args) {
batchInsertData();
}
}

@ -1,93 +0,0 @@
package UnitTest;
import com.jfinal.plugin.activerecord.ActiveRecordPlugin;
import com.jfinal.plugin.activerecord.Db;
import com.jfinal.plugin.activerecord.DbKit;
import com.jfinal.plugin.activerecord.dialect.PostgreSqlDialect;
import com.jfinal.plugin.druid.DruidPlugin;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import static com.dsideal.QingLong.Const.DbConst.MASTER;
public class OpenGaussWriteShiWu_Jfinal {
public static DruidPlugin createDruidPlugin(String url, String username, String password, String driverClass) {
DruidPlugin druidPlugin = new DruidPlugin(url, username, password, driverClass);
//最大连接池数量
druidPlugin.setMaxActive(100);
//最小连接池数量
druidPlugin.setMinIdle(50);
//初始化时建立物理连接的个数默认为0
druidPlugin.setInitialSize(1);
//获取连接时最大等待时间单位毫秒。配置了maxWait之后缺省启用公平锁并发效率会有所下降如果需要可以通过配置useUnfairLock属性为true使用非公平锁。
druidPlugin.setMaxWait(60000);
//如果连接空闲时间大于等于minEvictableIdleTimeMillis则关闭物理连接。
druidPlugin.setTimeBetweenEvictionRunsMillis(60000);
//连接保持空闲而不被驱逐的最小时间
druidPlugin.setMinEvictableIdleTimeMillis(300000);
//建议配置为true不影响性能并且保证安全性。申请连接的时候检测如果空闲时间大于timeBetweenEvictionRunsMillis执行validationQuery检测连接是否有效。
druidPlugin.setTestWhileIdle(true);
//申请连接时执行validationQuery检测连接是否有效做了这个配置会降低性能。默认为true
druidPlugin.setTestOnBorrow(true);
//归还连接时执行validationQuery检测连接是否有效做了这个配置会降低性能。默认为true
druidPlugin.setTestOnReturn(true);
return druidPlugin;
}
public static void main(String[] args) throws SQLException {
String masterUrl = "jdbc:postgresql://10.10.14.63:15400/test_db";
String passWord = "DsideaL147258369";
String driverClass = "org.postgresql.Driver";
String userName = "postgres";
DruidPlugin druidMaster = createDruidPlugin(masterUrl, userName, passWord, driverClass);
druidMaster.start();
ActiveRecordPlugin arpMaster = new ActiveRecordPlugin(MASTER, druidMaster);
arpMaster.setDevMode(false);
arpMaster.setDialect(new PostgreSqlDialect());
arpMaster.start();
String truncateSql = "truncate table test";
Db.use(MASTER).update(truncateSql);
//记录开始时间
long start = System.currentTimeMillis();
// 声明数据库连接
Connection conn = null;
try {
conn = DbKit.getConfig().getDataSource().getConnection();
conn.setAutoCommit(false);
String sql = "INSERT INTO test(id, txt) " + "VALUES (?, ?)";
PreparedStatement pstmt = conn.prepareStatement(sql);
for (int i = 1; i <= 100000; i++) {
pstmt.setInt(1, i);
pstmt.setString(2, "黄海_" + i);
pstmt.addBatch();
}
// 执行批量插入操作
pstmt.executeBatch();
conn.commit();
System.out.println("事务操作成功");
} catch (Exception e) {
System.err.println("事务操作失败");
try {
conn.rollback();
} catch (SQLException eSql) {
System.err.println("Connection 回滚失败");
}
} finally {
if (null != conn) {
conn.close();
}
}
long end = System.currentTimeMillis();
long elapsed = end - start;
System.out.println("程序耗时:" + elapsed + " 毫秒");
}
}

@ -1,52 +0,0 @@
package UnitTest;
import com.dsideal.QingLong.Util.CommonUtil;
import java.util.Arrays;
public class TestC {
public static int maxSumTwoNoOverlap(int[] nums, int firstLen, int secondLen) {
int n = nums.length;
int[] prefixSum = new int[n + 1];
for (int i = 1; i <= n; i++) {
prefixSum[i] = prefixSum[i - 1] + nums[i - 1];
}
int maxSum = 0;
int[] maxSumLeft = new int[n];
int[] maxSumRight = new int[n];
int maxLeft = 0, maxRight = 0;
for (int i = firstLen - 1; i <= n - secondLen; i++) {
int sum = prefixSum[i + 1] - prefixSum[i - firstLen + 1];
if (sum > maxLeft) {
maxLeft = sum;
}
maxSumLeft[i] = maxLeft;
}
for (int i = n - secondLen; i >= firstLen - 1; i--) {
int sum = prefixSum[i + secondLen] - prefixSum[i];
if (sum >= maxRight) {
maxRight = sum;
}
maxSumRight[i] = maxRight;
}
for (int i = firstLen - 1; i <= n - secondLen; i++) {
int sum = prefixSum[i + 1] - prefixSum[i - firstLen + 1];
maxSum = Math.max(maxSum, sum + maxSumRight[i + 1]);
maxSum = Math.max(maxSum, sum + maxSumLeft[i - 1]);
}
return maxSum;
}
public static void main(String[] args) {
int[] nums1 = { 0, 6, 5, 2, 2, 5, 1, 9, 4 };
int firstLen1 = 1;
int secondLen1 = 2;
System.out.println(maxSumTwoNoOverlap(nums1, firstLen1, secondLen1)); // expect 20
}
}

@ -1,105 +0,0 @@
package UnitTest;
import com.aspose.words.*;
import com.aspose.words.Font;
import java.awt.*;
public class TestDocx {
public static void main(String[] args) throws Exception {
//创建新文档
Document doc = new Document();
DocumentBuilder builder = new DocumentBuilder(doc);
Font font = builder.getFont();
font.setSize(20);
//字体加粗
font.setBold(true);
//设置字体
font.setName("黑体");
//插入文本行信息
builder.moveToDocumentEnd();
// 设置段落格式为居中对齐
builder.getParagraphFormat().setAlignment(ParagraphAlignment.CENTER);
// 插入并居中文本
builder.write("XXX评审会议专家抽取结果");
Table table = builder.startTable();
builder.insertCell();
table.setLeftIndent(3.0);//左边距
//行高
builder.getRowFormat().setHeight(20.0);
builder.getRowFormat().setHeightRule(HeightRule.AT_LEAST);
//表格表头颜色背景
builder.getCellFormat().getShading().setBackgroundPatternColor(new Color((198), (217), (241)));
builder.getParagraphFormat().setAlignment(ParagraphAlignment.CENTER);
builder.getFont().setSize(14.0);
builder.getFont().setName("微软雅黑");
builder.getFont().setBold(true);
builder.getCellFormat().setWidth(100.0);
builder.write("序号");
builder.insertCell();
builder.getCellFormat().setWidth(200.0);
builder.write("类型");
builder.insertCell();
builder.getCellFormat().setWidth(200.0);
builder.write("单位");
builder.insertCell();
builder.getCellFormat().setWidth(200.0);
builder.write("姓名");
builder.endRow();
builder.getCellFormat().getShading().setBackgroundPatternColor(Color.WHITE);
builder.getCellFormat().setWidth(100.0);
builder.getCellFormat().setVerticalAlignment(CellVerticalAlignment.CENTER);
builder.getRowFormat().setHeight(30.0);
builder.getRowFormat().setHeightRule(HeightRule.AUTO);
builder.getFont().setSize(11.0);
builder.getFont().setName("微软雅黑");
builder.getFont().setBold(false);
for (int i = 1; i <= 5; i++) {
builder.insertCell();
builder.getCellFormat().setWidth(100.0);
builder.write("Row 1, Cell 1 Content");
builder.insertCell();
builder.getCellFormat().setWidth(200.0);
builder.write("Row 1, Cell 2 Content");
builder.insertCell();
builder.getCellFormat().setWidth(200.0);
builder.write("Row 1, Cell 3 Content");
builder.insertCell();
builder.getCellFormat().setWidth(200.0);
builder.write("Row 1, Cell 4 Content");
builder.endRow();
}
builder.endTable();
// 遍历文档中的所有节sections设置页面边距
for (Section sect : doc.getSections()) {
// 获取页边距
PageSetup pageSetup = sect.getPageSetup();
// 设置页边距这里设置为1英寸你可以根据需要调整数值
pageSetup.setLeftMargin(50);
pageSetup.setRightMargin(50);
pageSetup.setTopMargin(50);
pageSetup.setBottomMargin(50);
}
//保存文档
doc.save("d:/table.docx");
}
}

@ -1,32 +0,0 @@
package UnitTest;
import com.jfinal.plugin.activerecord.ActiveRecordPlugin;
import com.jfinal.plugin.activerecord.Db;
import com.jfinal.plugin.activerecord.Record;
import com.jfinal.plugin.activerecord.dialect.PostgreSqlDialect;
import com.jfinal.plugin.hikaricp.HikariCpPlugin;
import org.apache.poi.ss.usermodel.*;
import org.apache.poi.xssf.streaming.SXSSFWorkbook;
import org.apache.poi.xssf.usermodel.DefaultIndexedColorMap;
import org.apache.poi.xssf.usermodel.XSSFColor;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class TestExportExcel {
public static void main(String[] args) throws IOException {
//读取库
HikariCpPlugin hp = new HikariCpPlugin("jdbc:postgresql://10.10.14.71:5432/szjz_db", "postgres",
"DsideaL147258369", "org.postgresql.Driver");
hp.start();
ActiveRecordPlugin arp = new ActiveRecordPlugin(hp);
arp.setDialect(new PostgreSqlDialect());
arp.start();
}
}

@ -1,11 +0,0 @@
package UnitTest;
import cn.hutool.core.io.FileUtil;
public class TestGrid {
public static void main(String[] args) {
String path = "D:\\dsWork\\QingLong\\src\\main\\java\\UnitTest\\TestGrid.json";
String content = FileUtil.readUtf8String(path);
}
}

@ -1,96 +0,0 @@
[
{
"id": "input_6",
"index": 0,
"label": "单行文本",
"tag": "input",
"tagIcon": "input",
"placeholder": "请输入",
"defaultValue": null,
"labelWidth": 110,
"width": "100%",
"clearable": true,
"maxlength": null,
"showWordLimit": false,
"readonly": false,
"disabled": false,
"required": true,
"expression": "",
"document": ""
},
{
"id": "grid_8",
"index": 1,
"tag": "grid",
"span": 2,
"columns": [
{
"span": 12,
"list": [
{
"id": "input_9",
"index": 0,
"label": "单行文本",
"tag": "input",
"tagIcon": "input",
"placeholder": "请输入",
"defaultValue": null,
"labelWidth": 110,
"width": "100%",
"clearable": true,
"maxlength": null,
"showWordLimit": false,
"readonly": false,
"disabled": false,
"required": true,
"expression": "",
"document": ""
},
{
"id": "input_11",
"index": 1,
"label": "单行文本",
"tag": "input",
"tagIcon": "input",
"placeholder": "请输入",
"defaultValue": null,
"labelWidth": 110,
"width": "100%",
"clearable": true,
"maxlength": null,
"showWordLimit": false,
"readonly": false,
"disabled": false,
"required": true,
"expression": "",
"document": ""
}
]
},
{
"span": 12,
"list": [
{
"id": "input_10",
"index": 0,
"label": "单行文本",
"tag": "input",
"tagIcon": "input",
"placeholder": "请输入",
"defaultValue": null,
"labelWidth": 110,
"width": "100%",
"clearable": true,
"maxlength": null,
"showWordLimit": false,
"readonly": false,
"disabled": false,
"required": true,
"expression": "",
"document": ""
}
]
}
]
}
]

@ -1,12 +0,0 @@
<div>
实验室建设年份: <input type="text" name="xx_kxsysjsnf1" value="2024"> 实验室面积: <input type="text" name="xx_kxsysmj1" value="200">
</div>
<div>
实验室建设年份: <input type="text" name="xx_kxsysjsnf2" value="2021"> 实验室面积: <input type="text" name="xx_kxsysmj2" value="140">
</div>
<div>
实验室建设年份: <input type="text" name="xx_kxsysjsnf3" value="2000"> 实验室面积: <input type="text" name="xx_kxsysmj3" value="230">
</div>

@ -1,60 +0,0 @@
package UnitTest;
import cn.hutool.core.io.FileUtil;
import com.jfinal.kit.Kv;
import com.jfinal.kit.StrKit;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import com.jfinal.plugin.activerecord.Record;
import java.util.HashMap;
import java.util.Map;
import org.jsoup.select.Elements;
public class TestHtml {
public static Record CkSave(String html) {
Document doc = Jsoup.parseBodyFragment(html);
Element body = doc.body();
Elements elements = body.getAllElements().getFirst().getElementsByTag("input");
for (int i = 0; i < elements.size(); i += 3) {
/*
使
xx_ldjy_jsfl
xx_ldjy_js 666
xx_ldjy_jsfl_VOtzywtcmC BBB
xx_ldjy_js_VOtzywtcmC 999
* */
Element input = elements.get(i);
if (input.attr("value").trim().endsWith("其他")) {
Element input2 = elements.get(i + 1);
String v2 = input2.attr("value");
Element input3 = elements.get(i + 2);
String v3 = input3.attr("value");
System.out.println("lx_name=" + v2 + ",js=" + v3);
} else {
Element input3 = elements.get(i + 2);
String v3 = input3.attr("value");
System.out.println("lx_name=" + input.attr("value").trim() + ",js=" + v3);
}
}
return null;
}
public static void main(String[] args) {
//开始解析
String html = FileUtil.readUtf8String("C:\\Users\\Administrator\\Desktop\\Html\\劳动.txt");
CkSave(html);
}
}

@ -1,59 +0,0 @@
package UnitTest;
import com.dsideal.QingLong.Start;
import com.jfinal.kit.Kv;
import com.jfinal.kit.PropKit;
import com.jfinal.plugin.activerecord.ActiveRecordPlugin;
import com.jfinal.plugin.activerecord.CaseInsensitiveContainerFactory;
import com.jfinal.plugin.activerecord.Db;
import com.jfinal.plugin.activerecord.SqlPara;
import com.jfinal.plugin.activerecord.Record;
import com.jfinal.plugin.activerecord.dialect.PostgreSqlDialect;
import com.jfinal.plugin.hikaricp.HikariCpPlugin;
import java.io.File;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
public class TestIn {
public static void main(String[] args) throws URISyntaxException {
//告之配置文件位置
PropKit.use("application.properties");
HikariCpPlugin hp = new HikariCpPlugin(PropKit.get("jdbcUrl"), PropKit.get("user"),
PropKit.get("password").trim(), PropKit.get("driverClassName"));
hp.start();
// 配置ActiveRecord插件
ActiveRecordPlugin arp = new ActiveRecordPlugin(hp);
//配置默认小写
arp.setContainerFactory(new CaseInsensitiveContainerFactory(true));
arp.setDialect(new PostgreSqlDialect());
//遍历sql目录下所有的sql文件
File sqlDir;
String basePath = Start.class.getClassLoader().getResource(".").getPath();
sqlDir = new File(basePath + "/Sql");
File[] sqlFiles = sqlDir.listFiles();
for (File sqlFile : sqlFiles != null ? sqlFiles : new File[0]) {
//只加载.sql文件
if (sqlFile.getName().indexOf(".sql") > 0) {
arp.addSqlTemplate("/Sql/" + sqlFile.getName());
}
}
arp.start();
//JFinal的in查询现在太方便了
List<String> idList = new ArrayList<>();
idList.add("A36E0132-625C-4C43-A1B9-8312DD8CCF07");
idList.add("11716B1E-CD95-4E36-9857-296ABE6E4B96");
Kv kv = Kv.by("idList", idList);
SqlPara sqlPara = Db.getSqlPara("Test.huanghai_in", kv);
List<Record> list = Db.find(sqlPara);
System.out.println(list);
kv = Kv.by("org_name", "第八中学");
sqlPara = Db.getSqlPara("Test.huanghai_like", kv);
list = Db.find(sqlPara);
System.out.println(list.get(0).getStr("org_name"));
}
}

@ -1,44 +0,0 @@
package UnitTest;
import com.dsideal.QingLong.Util.RedisKit;
import com.jfinal.kit.PropKit;
import com.jfinal.plugin.redis.RedisPlugin;
import redis.clients.jedis.resps.Tuple;
import java.util.List;
public class TestJedisRangeScore {
public static void main(String[] args) throws InterruptedException {
PropKit.use("application.properties");
// 用于缓存模块的redis服务
RedisPlugin redis = new RedisPlugin("myRedis", PropKit.get("redis_ip"), PropKit.getInt("redis_port"), 10 * 1000);
redis.start();
String scoreSet = "my_set";
// 添加成员及其分数到有序集合
long ts = System.currentTimeMillis() / 1000; //获取当前时间戳(秒)
long ots = ts - 2;//模拟2秒前的
RedisKit.zadd(scoreSet, ots, "member1");
RedisKit.zadd(scoreSet, ots, "member2");
//模拟现在的
RedisKit.zadd(scoreSet, ts, "member3");
RedisKit.zadd(scoreSet, ts, "member4");
List<Tuple> members = RedisKit.zrangeByScoreWithScores(scoreSet, "-inf", String.valueOf(ts));
int validCount = 0;
for (Tuple member : members) {
double score = member.getScore();
String memberValue = member.getElement();
if (score < ts - 1) {//1秒以前的算过期清理掉
RedisKit.zrem(scoreSet, memberValue); // 删除过期成员
} else {
System.out.println("Valid member: " + memberValue); // 输出有效成员
validCount++;
}
}
System.out.println("Valid member count: " + validCount); // 输出有效成员数量
}
}

@ -1,30 +0,0 @@
package UnitTest;
import com.dsideal.QingLong.Util.RedisKit;
import com.jfinal.kit.PropKit;
import com.jfinal.plugin.redis.RedisPlugin;
import java.util.Set;
public class TestJedisSet {
public static void main(String[] args) {
PropKit.use("application.properties");
// 用于缓存模块的redis服务
RedisPlugin redis = new RedisPlugin("myRedis", PropKit.get("redis_ip"), PropKit.getInt("redis_port"), 10 * 1000);
redis.start();
String SYS_JRXT="SYS_JRXT";
RedisKit.SAdd(SYS_JRXT,"a");
RedisKit.SAdd(SYS_JRXT,"b");
RedisKit.SAdd(SYS_JRXT,"c");
Set<String> s=RedisKit.SMembers(SYS_JRXT);
for (String string : s) {
System.out.println(string);
}
System.out.println("===================================");
RedisKit.SRem(SYS_JRXT,"b");
s=RedisKit.SMembers(SYS_JRXT);
for (String string : s) {
System.out.println(string);
}
}
}

@ -1,39 +0,0 @@
package UnitTest;
import com.jfinal.plugin.activerecord.ActiveRecordPlugin;
import com.jfinal.plugin.activerecord.Db;
import com.jfinal.plugin.activerecord.Record;
import com.jfinal.plugin.activerecord.dialect.PostgreSqlDialect;
import com.jfinal.plugin.druid.DruidPlugin;
import java.util.ArrayList;
import java.util.List;
public class TestOpenGauss {
public static void main(String[] args) {
//读取库
DruidPlugin druid = new DruidPlugin("jdbc:postgresql://10.10.14.62:5432/ccdjzswd_db", "dsideal",
"DsideaL147258369", "org.postgresql.Driver");
druid.start();
ActiveRecordPlugin arp = new ActiveRecordPlugin(druid);
arp.setDialect(new PostgreSqlDialect());
arp.start();
String sql = "truncate table t1";
Db.update(sql);
List<Record> aList = new ArrayList<>();
for (int i = 1; i <= 1000; i++) {
Record record = new Record();
record.set("id", i);
record.set("name", "黄海" + i);
aList.add(record);
}
Db.batchSave("t1", aList, 100);
sql = "select * from t1";
List<Record> list = Db.find(sql);
System.out.println(list);
}
}

@ -1,33 +0,0 @@
package UnitTest;
import org.apache.commons.pool2.impl.GenericObjectPoolConfig;
import redis.clients.jedis.HostAndPort;
import redis.clients.jedis.JedisCluster;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
public class TestRedisCluster {
public static void main(String[] args) throws IOException { // 创建集群的节点集合
Set nodes = new HashSet<>();// 添加节点到集群中
nodes.add(new HostAndPort("10.10.14.61", 8890));
nodes.add(new HostAndPort("10.10.14.61", 8891));
nodes.add(new HostAndPort("10.10.14.61", 8892));
nodes.add(new HostAndPort("10.10.14.61", 8893));
nodes.add(new HostAndPort("10.10.14.61", 8894));
nodes.add(new HostAndPort("10.10.14.61", 8895));
int maxTotal = Integer.parseInt("100");
int maxIdle = Integer.parseInt("100");
int MinIdle = Integer.parseInt("100");// 设置Redis Pool相关参数
GenericObjectPoolConfig poolConfig = new GenericObjectPoolConfig();
poolConfig.setMaxTotal(maxTotal);
poolConfig.setMaxIdle(maxIdle);
poolConfig.setMinIdle(MinIdle);// 利用上面的集群节点nodes和poolConfig创建redis集群连接池并获取一个redis连接
JedisCluster jedisCluster = new JedisCluster(nodes, poolConfig);// 利用获取的jedisCluster可以进行jedis的所有操作
System.out.println(jedisCluster.set("name", "beyond"));
System.out.println(jedisCluster.get("name"));// 归还连接
jedisCluster.close();
}
}

@ -1,20 +1,19 @@
package UnitTest;
import com.dsideal.QingLong.Const.DbConst;
import com.jfinal.plugin.activerecord.ActiveRecordPlugin;
import com.jfinal.plugin.activerecord.Db;
import com.jfinal.plugin.activerecord.Record;
import com.jfinal.plugin.activerecord.dialect.PostgreSqlDialect;
import com.jfinal.plugin.hikaricp.HikariCpPlugin;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static com.dsideal.QingLong.Const.DbConst.MASTER;
public class ZhuangJaiLingYu {
public static HikariCpPlugin createDruidPlugin(String url, String username, String password, String driverClass) {
@ -38,7 +37,7 @@ public class ZhuangJaiLingYu {
HikariCpPlugin druidMaster = createDruidPlugin(masterUrl, userName, passWord, driverClass);
druidMaster.start();
ActiveRecordPlugin arpMaster = new ActiveRecordPlugin(MASTER, druidMaster);
ActiveRecordPlugin arpMaster = new ActiveRecordPlugin(DbConst.SZJZ, druidMaster);
arpMaster.setDevMode(false);
arpMaster.setDialect(new PostgreSqlDialect());
arpMaster.start();

@ -1,28 +0,0 @@
package UnitTest;
public class test01 {
/* 零钱兑换:贪心 */
private static int coinChangeGreedy(int[] coins, int amt) {
// 假设 coins 列表有序
int i = coins.length - 1;
int count = 0;
// 循环进行贪心选择,直到无剩余金额
while (amt > 0) {
// 找到小于且最接近剩余金额的硬币
while (i > 0 && coins[i] > amt) {
i--;
}
// 选择 coins[i]
amt -= coins[i];
System.out.println(coins[i]);
count++;
}
// 若未找到可行方案,则返回 -1
return amt == 0 ? count : -1;
}
public static void main(String[] args) {
int[] coins = {1,5,10,20,50,100};
System.out.println(coinChangeGreedy(coins,1257));
}
}

@ -1,20 +0,0 @@
package UnitTest;
public class test1 {
public static void main(String[] args) {
MultiThreadTool multiThreadTool = new MultiThreadTool(20);
for (int i = 0; i < 100; i++) {
int taskIndex = i;
multiThreadTool.execute(() -> {
System.out.println("Task " + taskIndex + " is running.");
try {
Thread.sleep(1000); // 模拟任务执行
} catch (InterruptedException e) {
e.printStackTrace();
}
System.out.println("Task " + taskIndex + " is completed.");
});
}
}
}

@ -1,6 +1,6 @@
package com.dsideal.QingLong.Const;
public class DbConst {
public static String MASTER="master";
public static String SLAVE="slave";
public static String SZJZ="SZJZ";
public static String MAXKB="MAXKB";
}

@ -1,23 +1,34 @@
package com.dsideal.QingLong.MaxKB.Controller;
import com.dsideal.QingLong.MaxKB.Model.MaxKbModel;
import com.dsideal.QingLong.Util.CommonUtil;
import com.dsideal.QingLong.Util.SessionKit;
import com.jfinal.aop.Before;
import com.jfinal.core.Controller;
import com.jfinal.kit.Kv;
import com.jfinal.plugin.activerecord.Record;
import com.jfinal.ext.interceptor.GET;
import com.jfinal.kit.StrKit;
public class MaxKbController extends Controller {
MaxKbModel mkm = new MaxKbModel();
/**
*
*/
@Before(GET.class)
public void hasZsk() {
public void getApplication() {
String person_id = SessionKit.get(getRequest(), getResponse(), "person_id");
if (StrKit.isBlank(person_id)) {
renderJson(CommonUtil.returnMessageJson(false, "未登录,无法获取是否完成知识库的创建"));
renderJson(CommonUtil.returnMessageJson(false, "未登录"));
}
renderJson(CommonUtil.returnMessageJson(true, "已登录,并且已完成知识库的创建!"));
Record personApplicationRecord = mkm.getPersonApplication(person_id);
if (personApplicationRecord == null) {
renderJson(CommonUtil.returnMessageJson(false, "未完成应用程序与知识库的创建!"));
}
Kv kv = Kv.by("success", true);
kv.set("message", "已完成应用程序与知识库的创建!");
kv.set("data", personApplicationRecord);
renderJson(kv);
}
}

@ -1,5 +1,6 @@
package com.dsideal.QingLong.MaxKB.Model;
import com.dsideal.QingLong.Const.DbConst;
import com.jfinal.plugin.activerecord.Db;
import com.jfinal.plugin.activerecord.Record;
@ -14,7 +15,7 @@ public class MaxKbModel {
*/
public List<Record> getZskList(String zskName) {
String sql = "select * from dataset where name=?";
return Db.find(sql, zskName);
return Db.use(DbConst.MAXKB).find(sql, zskName);
}
/**
@ -25,7 +26,7 @@ public class MaxKbModel {
*/
public List<Record> getDocumentListByZskId(String zskId) {
String sql = "select * from document where dataset_id=?::uuid order by create_time desc";
return Db.find(sql, zskId);
return Db.use(DbConst.MAXKB).find(sql, zskId);
}
/**
@ -36,15 +37,15 @@ public class MaxKbModel {
public void delDocumentProblem(String documentId) {
//根据文档id查找它相关有哪些问题id,并且删除问题
String sql = "select * from problem_paragraph_mapping where document_id=?::uuid";
List<Record> recordList = Db.find(sql, documentId);
List<Record> recordList = Db.use(DbConst.MAXKB).find(sql, documentId);
for (Record record : recordList) {
String problem_id = record.getStr("problem_id");
sql = "delete from problem where id=?::uuid";
Db.update(sql, problem_id);
Db.use(DbConst.MAXKB).update(sql, problem_id);
}
//相关的都删除掉了,需要删除问题与段落的关联关系了
sql = "delete from problem_paragraph_mapping where document_id=?::uuid";
Db.update(sql, documentId);
Db.use(DbConst.MAXKB).update(sql, documentId);
}
/**
@ -55,7 +56,7 @@ public class MaxKbModel {
*/
public List<Record> getModel(String modelName) {
String sql = "select * from model where name=?";
return Db.find(sql, modelName);
return Db.use(DbConst.MAXKB).find(sql, modelName);
}
/**
@ -66,7 +67,7 @@ public class MaxKbModel {
*/
public Record getApplication(String applicationName) {
String sql = "select * from application where name=?";
return Db.findFirst(sql, applicationName);
return Db.use(DbConst.MAXKB).findFirst(sql, applicationName);
}
/**
@ -83,7 +84,7 @@ public class MaxKbModel {
System.exit(0);
}
//克隆出对象
record = new com.jfinal.plugin.activerecord.Record().setColumns(record.getColumns());
record = new Record().setColumns(record.getColumns());
//生成一个uuid
record.set("id", UUID.randomUUID());
//名称
@ -91,7 +92,7 @@ public class MaxKbModel {
//描述
record.set("desc", "【代码生成】");
//增加
Db.save("application", "id", record);
Db.use(DbConst.MAXKB).save("application", "id", record);
return record.getStr("id");
}
@ -104,7 +105,7 @@ public class MaxKbModel {
*/
public Record getZsk(String zskName) {
String sql = "select * from dataset where name=?";
return Db.findFirst(sql, zskName);
return Db.use(DbConst.MAXKB).findFirst(sql, zskName);
}
/**
@ -128,7 +129,7 @@ public class MaxKbModel {
//描述
record.set("desc", "【代码生成】");
//增加
Db.save("dataset", "id", record);
Db.use(DbConst.MAXKB).save("dataset", "id", record);
return record.getStr("id");
}
@ -149,7 +150,7 @@ public class MaxKbModel {
record.set("white_active", false);
record.set("white_list", new String[]{""});
record.set("show_source", false);
Db.save("application_access_token", "application_id", record);
Db.use(DbConst.MAXKB).save("application_access_token", "application_id", record);
return record.getStr("access_token");
}
@ -159,34 +160,34 @@ public class MaxKbModel {
public void delCodeGenerateApplicationZsk() {
//通过SQL查询出所有desc=【代码生成】的数据
String sql = "select * from application where \"desc\"='【代码生成】'";
List<Record> list = Db.find(sql);
List<Record> list = Db.use(DbConst.MAXKB).find(sql);
for (Record record : list) {
String application_id = record.getStr("id");
//删除应用与知识库的关联
sql = "delete from application_dataset_mapping where application_id=?";
Db.update(sql, UUID.fromString(application_id));
Db.use(DbConst.MAXKB).update(sql, UUID.fromString(application_id));
sql = "delete from application_access_token where application_id=?";
Db.update(sql, UUID.fromString(application_id));
Db.use(DbConst.MAXKB).update(sql, UUID.fromString(application_id));
sql = "select * from application_chat where application_id=?";
List<Record> list1 = Db.find(sql, UUID.fromString(application_id));
for (Record record1 : list1) {
String chat_id = record1.getStr("id");
sql = "delete from application_chat_record where chat_id=?";
Db.update(sql, UUID.fromString(chat_id));
Db.use(DbConst.MAXKB).update(sql, UUID.fromString(chat_id));
}
sql = "delete from application_chat where application_id=?";
Db.update(sql, UUID.fromString(application_id));
Db.use(DbConst.MAXKB).update(sql, UUID.fromString(application_id));
sql = "delete from application_api_key where application_id=?";
Db.update(sql, UUID.fromString(application_id));
Db.use(DbConst.MAXKB).update(sql, UUID.fromString(application_id));
//删除应用
sql = "delete from application where id=?";
Db.update(sql, UUID.fromString(application_id));
Db.use(DbConst.MAXKB).update(sql, UUID.fromString(application_id));
}
sql = "delete from dataset where \"desc\"='【代码生成】'";
Db.update(sql);
Db.use(DbConst.MAXKB).update(sql);
}
/**
@ -197,7 +198,7 @@ public class MaxKbModel {
*/
public Record getDocument(String documentId) {
String sql = "select * from document where id=?";
return Db.findFirst(sql, UUID.fromString(documentId));
return Db.use(DbConst.MAXKB).findFirst(sql, UUID.fromString(documentId));
}
/**
@ -211,4 +212,14 @@ public class MaxKbModel {
String status = record.getStr("status");
return status.equals("nn2");
}
/**
*
* @param person_id ID
* @return
*/
public Record getPersonApplication(String person_id) {
String sql = "select * from t_ai_person_application where person_id=?";
return Db.findFirst(sql, person_id);
}
}

@ -4,6 +4,7 @@ import cn.hutool.core.io.FileUtil;
import com.dsideal.QingLong.Base.Controller.BaseController;
import com.dsideal.QingLong.Classes.Controller.ClassController;
import com.dsideal.QingLong.Collect.Controller.CollectController;
import com.dsideal.QingLong.Const.DbConst;
import com.dsideal.QingLong.Const.RedisKeyConst;
import com.dsideal.QingLong.DataShare.Controller.DataShareController;
import com.dsideal.QingLong.Dm.Controller.DmController;
@ -131,29 +132,28 @@ public class Start extends JFinalConfig {
@Override
public void configPlugin(Plugins me) {
HikariCpPlugin masterPlugin = new HikariCpPlugin(PropKit.get("jdbcUrl"), PropKit.get("user"), PropKit.get("password").trim(), PropKit.get("driverClassName"));
masterPlugin.setConnectionTestQuery(this.connectionTestQuery);
masterPlugin.setConnectionTimeout(this.connectionTimeoutMs);
masterPlugin.setIdleTimeout(this.idleTimeoutMs);
masterPlugin.setMaxLifetime(this.maxLifetimeMs);
masterPlugin.setMaximumPoolSize(this.maxPoolSize);
String jdbcUrlSlave = PropKit.get("jdbcUrlSlave");
if (StrKit.isBlank(jdbcUrlSlave)) {
jdbcUrlSlave = PropKit.get("jdbcUrl");
}
HikariCpPlugin hp = new HikariCpPlugin(PropKit.get("jdbcUrl"), PropKit.get("user"), PropKit.get("password").trim(), PropKit.get("driverClassName"));
hp.setConnectionTestQuery(this.connectionTestQuery);
hp.setConnectionTimeout(this.connectionTimeoutMs);
hp.setIdleTimeout(this.idleTimeoutMs);
hp.setMaxLifetime(this.maxLifetimeMs);
hp.setMaximumPoolSize(this.maxPoolSize);
me.add(hp);
HikariCpPlugin hpMaxKB = new HikariCpPlugin(PropKit.get("MaxKB_jdbcUrl"), PropKit.get("MaxKB_user"), PropKit.get("MaxKB_password").trim(),
PropKit.get("driverClassName"));
hpMaxKB.setConnectionTestQuery(this.connectionTestQuery);
hpMaxKB.setConnectionTimeout(this.connectionTimeoutMs);
hpMaxKB.setIdleTimeout(this.idleTimeoutMs);
hpMaxKB.setMaxLifetime(this.maxLifetimeMs);
hpMaxKB.setMaximumPoolSize(this.maxPoolSize);
me.add(hpMaxKB);
HikariCpPlugin slavePlugin = new HikariCpPlugin(jdbcUrlSlave, PropKit.get("user"), PropKit.get("password").trim(), PropKit.get("driverClassName"));
slavePlugin.setConnectionTestQuery(this.connectionTestQuery);
slavePlugin.setConnectionTimeout(this.connectionTimeoutMs);
slavePlugin.setIdleTimeout(this.idleTimeoutMs);
slavePlugin.setMaxLifetime(this.maxLifetimeMs);
slavePlugin.setMaximumPoolSize(this.maxPoolSize);
me.add(masterPlugin);
me.add(slavePlugin);
ActiveRecordPlugin masterArp = new ActiveRecordPlugin("master", masterPlugin);
masterArp.setDialect(new PostgreSqlDialect());
ActiveRecordPlugin slaveArp = new ActiveRecordPlugin("slave", slavePlugin);
ActiveRecordPlugin arp = new ActiveRecordPlugin(DbConst.SZJZ, hp);
arp.setDialect(new PostgreSqlDialect());
ActiveRecordPlugin slaveArp = new ActiveRecordPlugin(DbConst.MAXKB, hpMaxKB);
slaveArp.setDialect(new PostgreSqlDialect());
String basePath = Start.class.getClassLoader().getResource(".").getPath();
File sqlDir = new File(basePath + "/Sql");
File[] sqlFiles = sqlDir.listFiles();
@ -163,12 +163,12 @@ public class Start extends JFinalConfig {
for (int i = 0; i < var11; ++i) {
File sqlFile = var10[i];
if (sqlFile.getName().indexOf(".sql") > 0) {
masterArp.addSqlTemplate("/Sql/" + sqlFile.getName());
arp.addSqlTemplate("/Sql/" + sqlFile.getName());
slaveArp.addSqlTemplate("/Sql/" + sqlFile.getName());
}
}
me.add(masterArp);
me.add(arp);
me.add(slaveArp);
RedisPlugin redis = new RedisPlugin("Redis", PropKit.get("redis_ip"), PropKit.getInt("redis_port"), 10000);
me.add(redis);

Loading…
Cancel
Save