bug fixed

This commit is contained in:
wenshao 2013-08-14 20:19:07 +08:00
parent 7a7f77182b
commit 7f255fa494
3 changed files with 33 additions and 37 deletions

View File

@ -1,33 +1,5 @@
INSERT INTO druid_sql (domain, app, cluster, host, pid
, collectTime, sqlHash, dataSource, lastStartTime, batchTotal
, batchToMax, execSuccessCount, execNanoTotal, execNanoMax, running
, concurrentMax, rsHoldTime, execRsHoldTime, name, file
, dbType, execNanoMaxOccurTime, errorCount, errorLastMsg, errorLastClass
, errorLastStackTrace, errorLastTime, updateCount, updateCountMax, fetchRowCount
, fetchRowCountMax, inTxnCount, lastSlowParameters, clobOpenCount, blobOpenCount
, readStringLength, readBytesLength, inputStreamOpenCount, readerOpenCount, h1
, h10, h100, h1000, h10000, h100000
, h1000000, hmore, eh1, eh10, eh100
, eh1000, eh10000, eh100000, eh1000000, ehmore
, f1, f10, f100, f1000, f10000
, fmore, u1, u10, u100, u1000
, u10000, umore)
VALUES (?, ?, ?, ?, ?
, ?, ?, ?, ?, ?
, ?, ?, ?, ?, ?
, ?, ?, ?, ?, ?
, ?, ?, ?, ?, ?
, ?, ?, ?, ?, ?
, ?, ?, ?, ?, ?
, ?, ?, ?, ?, ?
, ?, ?, ?, ?, ?
, ?, ?, ?, ?, ?
, ?, ?, ?, ?, ?
, ?, ?, ?, ?, ?
, ?, ?, ?, ?, ?
, ?, ?)
CREATE TABLE druid_sql (
id bigint(20) NOT NULL,
id bigint(20) AUTO_INCREMENT NOT NULL,
domain varchar(45),
app varchar(45),
cluster varchar(45),

View File

@ -89,7 +89,7 @@ public class MonitorClient {
}
@SuppressWarnings("resource")
private void collectSql() {
public void collectSql() {
Set<Object> dataSources = DruidDataSourceStatManager.getInstances().keySet();
List<DruidDataSourceStatValue> statValueList = new ArrayList<DruidDataSourceStatValue>(dataSources.size());

View File

@ -1,31 +1,55 @@
package com.alibaba.druid.bvt.support.monitor;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import junit.framework.TestCase;
import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.sql.SQLUtils;
import com.alibaba.druid.stat.JdbcSqlStatValue;
import com.alibaba.druid.support.monitor.MonitorClient;
import com.alibaba.druid.support.monitor.dao.MonitorDaoJdbcImpl;
import com.alibaba.druid.support.monitor.dao.MonitorDaoJdbcImpl.BeanInfo;
import com.alibaba.druid.support.monitor.dao.MonitorDaoJdbcImpl.FieldInfo;
import com.alibaba.druid.util.JdbcConstants;
import com.alibaba.druid.util.JdbcUtils;
public class MonitorDaoJdbcImplTest extends TestCase {
private DruidDataSource dataSource;
@Override
protected void setUp() throws Exception {
dataSource = new DruidDataSource();
dataSource.setUrl("jdbc:h2:mem:test");
dataSource.setInitialSize(2);
dataSource.setMinIdle(2);
dataSource.setFilters("stat,log4j");
dataSource.init();
}
@Override
protected void tearDown() throws Exception {
JdbcUtils.close(dataSource);
}
public void testBuildSql() throws Exception {
MonitorDaoJdbcImpl dao = new MonitorDaoJdbcImpl();
dao.setDataSource(dataSource);
{
String sql = dao.buildInsertSql(new BeanInfo(JdbcSqlStatValue.class));
MonitorClient client = new MonitorClient();
client.setDao(dao);
System.out.println(SQLUtils.format(sql, JdbcConstants.MYSQL));
}
{
String sql = buildCreateSql(dao, new BeanInfo(JdbcSqlStatValue.class));
System.out.println(SQLUtils.format(sql, JdbcConstants.MYSQL));
JdbcUtils.execute(dataSource, sql, Collections.emptyList());
}
client.collectSql();
}
public String buildCreateSql(MonitorDaoJdbcImpl dao, BeanInfo beanInfo) {
@ -34,7 +58,7 @@ public class MonitorDaoJdbcImplTest extends TestCase {
buf.append("CREATE TABLE ") //
.append(dao.getTableName(beanInfo));
buf.append("( id bigint(20) NOT NULL");
buf.append("( id bigint(20) NOT NULL AUTO_INCREMENT");
buf.append(", domain varchar(45)");
buf.append(", app varchar(45)");
buf.append(", cluster varchar(45)");