提交 9bfa0c8e 编写于 作者: Z zyyang

[TD-2052]<feature>: common connection pool demos

上级 9e663ecd
......@@ -5,14 +5,13 @@
<groupId>com.taosdata.jdbc</groupId>
<artifactId>taos-jdbcdriver</artifactId>
<version>2.0.9</version>
<version>2.0.10</version>
<packaging>jar</packaging>
<name>JDBCDriver</name>
<url>https://github.com/taosdata/TDengine/tree/master/src/connector/jdbc</url>
<description>TDengine JDBC Driver</description>
<licenses>
<license>
<name>GNU AFFERO GENERAL PUBLIC LICENSE Version 3</name>
......
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.taosdata.demo</groupId>
<artifactId>connectionPools</artifactId>
<version>1.0-SNAPSHOT</version>
<dependencies>
<dependency>
<groupId>com.taosdata.jdbc</groupId>
<artifactId>taos-jdbcdriver</artifactId>
<version>2.0.10</version>
</dependency>
<!-- druid -->
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid</artifactId>
<version>1.1.17</version>
</dependency>
<!-- HikariCP -->
<dependency>
<groupId>com.zaxxer</groupId>
<artifactId>HikariCP</artifactId>
<version>3.2.0</version>
</dependency>
<!-- dbcp Connection Pool -->
<dependency>
<groupId>commons-pool</groupId>
<artifactId>commons-pool</artifactId>
<version>1.5.4</version>
</dependency>
<dependency>
<groupId>commons-dbcp</groupId>
<artifactId>commons-dbcp</artifactId>
<version>1.4</version>
</dependency>
<!-- log4j -->
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
</dependency>
</dependencies>
</project>
\ No newline at end of file
package com.taosdata.demo;
import com.taosdata.demo.common.InsertTask;
import com.taosdata.demo.pool.DbcpBuilder;
import com.taosdata.demo.pool.DruidPoolBuilder;
import com.taosdata.demo.pool.HikariCpBuilder;
import org.apache.log4j.Logger;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
public class ConnectionPoolDemo {
private static Logger logger = Logger.getLogger(DruidPoolBuilder.class);
private static int batchSize = 10;
private static int sleep = 1000;
private static int poolSize = 50;
private static int tableSize = 1000;
private static int threadCount = 50;
private static final String dbName = "pool_test";
public static void main(String[] args) throws InterruptedException {
String host = null;
for (int i = 0; i < args.length; i++) {
if ("-host".equalsIgnoreCase(args[i]) && i < args.length - 1) {
host = args[++i];
}
if ("-batchSize".equalsIgnoreCase(args[i]) && i < args.length - 1) {
batchSize = Integer.parseInt(args[++i]);
}
if ("-sleep".equalsIgnoreCase(args[i]) && i < args.length - 1) {
sleep = Integer.parseInt(args[++i]);
}
if ("-poolSize".equalsIgnoreCase(args[i]) && i < args.length - 1) {
poolSize = Integer.parseInt(args[++i]);
}
if ("-tableSize".equalsIgnoreCase(args[i]) && i < args.length - 1) {
tableSize = Integer.parseInt(args[++i]);
}
}
if (host == null) {
System.out.println("Usage: java -jar XXX.jar " +
"-host <hostname> " +
"-batchSize <batchSize> " +
"-sleep <sleep> " +
"-poolSize <poolSize> " +
"-tableSize <tableSize>");
return;
}
// DataSource dataSource = DbcpBuilder.getDataSource(host, poolSize);
// DataSource dataSource = DruidPoolBuilder.getDataSource(host, poolSize);
DataSource dataSource = HikariCpBuilder.getDataSource(host, poolSize);
init(dataSource);
ExecutorService executor = Executors.newFixedThreadPool(threadCount);
while (true) {
executor.execute(new InsertTask(dataSource, dbName, tableSize, batchSize));
if (sleep > 0)
TimeUnit.MILLISECONDS.sleep(sleep);
}
}
private static void init(DataSource dataSource) {
try (Connection conn = dataSource.getConnection()) {
execute(conn, "drop database if exists " + dbName + "");
execute(conn, "create database if not exists " + dbName + "");
execute(conn, "use " + dbName + "");
execute(conn, "create table weather(ts timestamp, temperature float, humidity int) tags(location nchar(64), groupId int)");
for (int tb_ind = 1; tb_ind <= tableSize; tb_ind++) {
execute(conn, "create table t_" + tb_ind + " using weather tags('beijing'," + (tb_ind + 1) + ")");
}
System.out.println(">>>>>>>>>>>>>>>>>>>>>>>>>>>> init finished.");
} catch (SQLException e) {
e.printStackTrace();
}
}
private static void execute(Connection con, String sql) {
try (Statement stmt = con.createStatement()) {
stmt.executeUpdate(sql);
logger.info("SQL >>> " + sql);
} catch (SQLException e) {
e.printStackTrace();
}
}
}
package com.taosdata.demo.common;
import org.apache.log4j.Logger;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Random;
public class InsertTask implements Runnable {
private final Random random = new Random(System.currentTimeMillis());
private static Logger logger = Logger.getLogger(InsertTask.class);
private final DataSource ds;
private final int batchSize;
private final String dbName;
private final int tableSize;
public InsertTask(DataSource ds, String dbName, int tableSize, int batchSize) {
this.ds = ds;
this.dbName = dbName;
this.tableSize = tableSize;
this.batchSize = batchSize;
}
@Override
public void run() {
Connection conn = null;
Statement stmt = null;
int affectedRows = 0;
long start = System.currentTimeMillis();
try {
conn = ds.getConnection();
stmt = conn.createStatement();
for (int tb_index = 1; tb_index <= tableSize; tb_index++) {
StringBuilder sb = new StringBuilder();
sb.append("insert into " + dbName + ".t_" + tb_index + "(ts, temperature, humidity) values ");
for (int i = 0; i < batchSize; i++) {
sb.append("(" + (start + i) + ", " + (random.nextFloat() * 30) + ", " + (random.nextInt(70)) + ") ");
}
logger.info("SQL >>> " + sb.toString());
affectedRows += stmt.executeUpdate(sb.toString());
}
} catch (SQLException e) {
e.printStackTrace();
} finally {
if (stmt != null) {
try {
stmt.close();
} catch (SQLException e) {
}
}
if (conn != null) {
try {
conn.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
logger.info(">>> affectedRows:" + affectedRows + " TimeCost:" + (System.currentTimeMillis() - start) + " ms");
}
}
}
package com.taosdata.demo.pool;
import org.apache.commons.dbcp.BasicDataSource;
import javax.sql.DataSource;
public class DbcpBuilder {
public static DataSource getDataSource(String host, int poolSize) {
BasicDataSource ds = new BasicDataSource();
ds.setDriverClassName("com.taosdata.jdbc.TSDBDriver");
ds.setUrl("jdbc:TAOS://" + host + ":6030");
ds.setUsername("root");
ds.setPassword("taosdata");
ds.setMaxActive(poolSize);
ds.setMinIdle(poolSize);
ds.setInitialSize(poolSize);
return ds;
}
}
package com.taosdata.demo.pool;
import com.alibaba.druid.pool.DruidDataSource;
import javax.sql.DataSource;
public class DruidPoolBuilder {
public static DataSource getDataSource(String host, int poolSize) {
final String url = "jdbc:TAOS://" + host + ":6030";
DruidDataSource dataSource = new DruidDataSource();
dataSource.setUrl(url);
dataSource.setDriverClassName("com.taosdata.jdbc.TSDBDriver");
dataSource.setUsername("root");
dataSource.setPassword("taosdata");
//初始连接数,默认0
dataSource.setInitialSize(poolSize);
//最大连接数,默认8
dataSource.setMaxActive(poolSize);
//最小闲置数
dataSource.setMinIdle(poolSize);
//获取连接的最大等待时间,单位毫秒
dataSource.setMaxWait(2000);
return dataSource;
}
}
package com.taosdata.demo.pool;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import javax.sql.DataSource;
public class HikariCpBuilder {
public static DataSource getDataSource(String host, int poolSize) {
HikariConfig config = new HikariConfig();
config.setDriverClassName("com.taosdata.jdbc.TSDBDriver");
config.setJdbcUrl("jdbc:TAOS://" + host + ":6030");
config.setUsername("root");
config.setPassword("taosdata");
config.setMaximumPoolSize(poolSize);
config.setMinimumIdle(poolSize);
HikariDataSource ds = new HikariDataSource(config);
return ds;
}
}
app.insertBatchSize=50
\ No newline at end of file
### 设置###
log4j.rootLogger=debug,stdout,DebugLog,ErrorLog
### 输出信息到控制抬 ###
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.Target=System.out
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=[%-5p] %d{yyyy-MM-dd HH:mm:ss,SSS} method:%l%n%m%n
### 输出DEBUG 级别以上的日志到=logs/debug.log
log4j.appender.DebugLog=org.apache.log4j.DailyRollingFileAppender
log4j.appender.DebugLog.File=logs/debug.log
log4j.appender.DebugLog.Append=true
log4j.appender.DebugLog.Threshold=DEBUG
log4j.appender.DebugLog.layout=org.apache.log4j.PatternLayout
log4j.appender.DebugLog.layout.ConversionPattern=%-d{yyyy-MM-dd HH:mm:ss} [ %t:%r ] - [ %p ] %m%n
### 输出ERROR 级别以上的日志到=logs/error.log
log4j.appender.ErrorLog=org.apache.log4j.DailyRollingFileAppender
log4j.appender.ErrorLog.File=logs/error.log
log4j.appender.ErrorLog.Append=true
log4j.appender.ErrorLog.Threshold=ERROR
log4j.appender.ErrorLog.layout=org.apache.log4j.PatternLayout
log4j.appender.ErrorLog.layout.ConversionPattern=%-d{yyyy-MM-dd HH:mm:ss} [ %t:%r ] - [ %p ] %m%n
\ No newline at end of file
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册