提交 94f2b669 编写于 作者: T tristaZero

Merge branch 'dev' of ssh://github.com/shardingjdbc/sharding-jdbc into dev

# Conflicts:
#	pom.xml
#	sharding-core/src/main/java/io/shardingsphere/core/yaml/proxy/YamlProxyConfiguration.java
#	sharding-proxy/src/main/java/io/shardingsphere/proxy/config/RuleRegistry.java
......@@ -35,10 +35,10 @@
- [x] COM_INIT_DB Packet
- [x] COM_QUERY Packet
- [x] COM_FIELD_LIST Packet
- [ ] COM_STMT_PREPARE
- [ ] COM_STMT_EXECUTE
- [ ] COM_STMT_CLOSE
- [ ] COM_STMT_RESET
- [x] COM_STMT_PREPARE
- [x] COM_STMT_EXECUTE
- [x] COM_STMT_CLOSE
- [x] COM_STMT_RESET
- [ ] Oracle
- [ ] SQLServer
- [ ] PostgreSQL
......@@ -62,9 +62,9 @@
- [x] Aggregation Functions
- [x] LIMIT, rownum, TOP
- [x] Simple Sub Query
- [x] OR
- [ ] DISTINCT
- [ ] HAVING
- [ ] OR
- [ ] UNION, UNION ALL
- [ ] Calculate Expression, eg: SUM(pv) / COUNT(uv)
- [ ] Complicated Sub Query
......@@ -74,7 +74,7 @@
- [x] INSERT SET
- [x] UPDATE
- [x] DELETE
- [ ] INSERT INTO VALUES (xxx), (xxx)
- [x] INSERT INTO VALUES (xxx), (xxx)
- [ ] UPDATE Multiple Tables
- [ ] DELETE Multiple Tables
- [x] DDL
......
......@@ -79,10 +79,12 @@
<coveralls-maven-plugin.version>4.1.0</coveralls-maven-plugin.version>
<docker-maven-plugin.version>0.4.14</docker-maven-plugin.version>
<commons-codec.version>1.10</commons-codec.version>
<atomikos.version>4.0.4</atomikos.version>
<javax.transaction.version>1.1</javax.transaction.version>
<javadocExecutable>${java.home}/../bin/javadoc</javadocExecutable>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
......@@ -321,6 +323,26 @@
<artifactId>commons-codec</artifactId>
<version>${commons-codec.version}</version>
</dependency>
<dependency>
<groupId>com.atomikos</groupId>
<artifactId>transactions</artifactId>
<version>${atomikos.version}</version>
</dependency>
<dependency>
<groupId>com.atomikos</groupId>
<artifactId>transactions-jta</artifactId>
<version>${atomikos.version}</version>
</dependency>
<dependency>
<groupId>com.atomikos</groupId>
<artifactId>transactions-jdbc</artifactId>
<version>${atomikos.version}</version>
</dependency>
<dependency>
<groupId>javax.transaction</groupId>
<artifactId>jta</artifactId>
<version>${javax.transaction.version}</version>
</dependency>
</dependencies>
</dependencyManagement>
......
/*
* Copyright 2016-2018 shardingsphere.io.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* </p>
*/
package io.shardingsphere.core.constant;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
/**
* TransactionType Enum.
*
* @author zhaojun
*/
@RequiredArgsConstructor
@Getter
public enum TransactionType {
/**
* default local transaction.
*/
NONE(""),
/**
* XA distribute transaction provided by RDBMS vendor.
*/
XA("XA"),
/**
* TCC (Try-Confirm-Cancel) distribute transaction mode.
*/
TCC("TCC");
private final String type;
/**
* Find enum by type value.
*
* @param type property type
* @return value enum, return {@code NONE} if not found
*/
public static TransactionType findByValue(final String type) {
for (TransactionType each : TransactionType.values()) {
if (each.getType().equals(type)) {
return each;
}
}
return NONE;
}
}
......@@ -60,7 +60,7 @@ public abstract class ShardingMetaData {
public ShardingMetaData(final ListeningExecutorService executorService) {
this.executorService = executorService;
}
/**
* Initialize sharding metadata.
*
......@@ -109,7 +109,7 @@ public abstract class ShardingMetaData {
public void refresh(final TableRule each, final ShardingRule shardingRule) {
refresh(each, shardingRule, Collections.<String, Connection>emptyMap());
}
/**
* Refresh each tableMetaData by TableRule.
*
......@@ -120,7 +120,7 @@ public abstract class ShardingMetaData {
public void refresh(final TableRule each, final ShardingRule shardingRule, final Map<String, Connection> connectionMap) {
tableMetaDataMap.put(each.getLogicTable(), getFinalTableMetaData(each.getLogicTable(), each.getActualDataNodes(), shardingRule.getShardingDataSourceNames(), connectionMap));
}
private TableMetaData getFinalTableMetaData(final String logicTableName, final List<DataNode> actualDataNodes,
final ShardingDataSourceNames shardingDataSourceNames, final Map<String, Connection> connectionMap) {
List<TableMetaData> actualTableMetaDataList = getAllActualTableMetaData(actualDataNodes, shardingDataSourceNames, connectionMap);
......@@ -145,7 +145,7 @@ public abstract class ShardingMetaData {
}
}));
}
try {
return Futures.allAsList(result).get();
} catch (final InterruptedException | ExecutionException ex) {
......@@ -163,7 +163,7 @@ public abstract class ShardingMetaData {
* @throws SQLException SQL exception
*/
public abstract TableMetaData getTableMetaData(DataNode dataNode, ShardingDataSourceNames shardingDataSourceNames, Map<String, Connection> connectionMap) throws SQLException;
private String getErrorMsgOfTableMetaData(final String logicTableName, final TableMetaData oldTableMetaData, final TableMetaData newTableMetaData) {
return String.format("Cannot get uniformed table structure for %s. The different metadata of actual tables is as follows:\n%s\n%s.",
logicTableName, oldTableMetaData.toString(), newTableMetaData.toString());
......
......@@ -58,6 +58,8 @@ public final class YamlProxyConfiguration {
private ProxyAuthority proxyAuthority = new ProxyAuthority();
private String transactionMode;
/**
* Unmarshal yaml sharding configuration from yaml file.
*
......
......@@ -17,6 +17,7 @@
package io.shardingsphere.core.parsing.integrate.jaxb.root;
import com.google.common.base.Splitter;
import io.shardingsphere.core.parsing.integrate.jaxb.condition.ExpectedOrCondition;
import io.shardingsphere.core.parsing.integrate.jaxb.groupby.ExpectedGroupByColumn;
import io.shardingsphere.core.parsing.integrate.jaxb.item.ExpectedAggregationSelectItem;
......@@ -32,7 +33,7 @@ import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElementWrapper;
import javax.xml.bind.annotation.XmlList;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
......@@ -45,8 +46,7 @@ public final class ParserResult {
private String sqlCaseId;
@XmlAttribute
@XmlList
private List<String> parameters = new LinkedList<>();
private String parameters;
@XmlElementWrapper
@XmlElement(name = "table")
......@@ -72,4 +72,13 @@ public final class ParserResult {
@XmlElement
private ExpectedLimit limit;
/**
* Get parameters.
*
* @return parameters
*/
public List<String> getParameters() {
return null == parameters ? Collections.<String>emptyList() : Splitter.on(",").trimResults().splitToList(parameters);
}
}
<?xml version="1.0" encoding="UTF-8"?>
<parser-result-sets>
<parser-result sql-case-id="assertDeleteWithShardingValue" parameters="1000 1001 'init'">
<parser-result sql-case-id="assertDeleteWithShardingValue" parameters="1000, 1001, 'init'">
<tables>
<table name="t_order"/>
</tables>
......
<?xml version="1.0" encoding="UTF-8"?>
<parser-result-sets>
<parser-result sql-case-id="assertInsertWithAllPlaceholders" parameters="1 1 'init'">
<parser-result sql-case-id="assertInsertWithAllPlaceholders" parameters="1, 1, 'init'">
<tables>
<table name="t_order" />
</tables>
......@@ -20,7 +20,7 @@
</or-condition>
</parser-result>
<parser-result sql-case-id="assertInsertWithPartialPlaceholder" parameters="1 1">
<parser-result sql-case-id="assertInsertWithPartialPlaceholder" parameters="1, 1">
<tables>
<table name="t_order" />
</tables>
......@@ -40,7 +40,7 @@
</or-condition>
</parser-result>
<parser-result sql-case-id="assertInsertWithGenerateKeyColumn" parameters="10000 1000 10">
<parser-result sql-case-id="assertInsertWithGenerateKeyColumn" parameters="10000, 1000, 10">
<tables>
<table name="t_order_item" />
</tables>
......@@ -63,7 +63,7 @@
</or-condition>
</parser-result>
<parser-result sql-case-id="assertInsertWithoutGenerateKeyColumn" parameters="1000 10">
<parser-result sql-case-id="assertInsertWithoutGenerateKeyColumn" parameters="1000, 10">
<tables>
<table name="t_order_item" />
</tables>
......@@ -88,7 +88,7 @@
</or-condition>
</parser-result>
<parser-result sql-case-id="assertInsertSetWithAllPlaceholders" parameters="1 1 'init'">
<parser-result sql-case-id="assertInsertSetWithAllPlaceholders" parameters="1, 1, 'init'">
<tables>
<table name="t_order" />
</tables>
......@@ -108,7 +108,7 @@
</or-condition>
</parser-result>
<parser-result sql-case-id="assertInsertSetWithPartialPlaceholder" parameters="1 1">
<parser-result sql-case-id="assertInsertSetWithPartialPlaceholder" parameters="1, 1">
<tables>
<table name="t_order" />
</tables>
......@@ -128,7 +128,7 @@
</or-condition>
</parser-result>
<parser-result sql-case-id="assertInsertSetWithGenerateKeyColumn" parameters="10000 1000 10">
<parser-result sql-case-id="assertInsertSetWithGenerateKeyColumn" parameters="10000, 1000, 10">
<tables>
<table name="t_order_item" />
</tables>
......@@ -151,7 +151,7 @@
</or-condition>
</parser-result>
<parser-result sql-case-id="assertInsertSetWithoutGenerateKeyColumn" parameters="1000 10">
<parser-result sql-case-id="assertInsertSetWithoutGenerateKeyColumn" parameters="1000, 10">
<tables>
<table name="t_order_item" />
</tables>
......@@ -171,7 +171,7 @@
</or-condition>
</parser-result>
<parser-result sql-case-id="assertInsertOnDuplicateKeyUpdate" parameters="1 1 'init'">
<parser-result sql-case-id="assertInsertOnDuplicateKeyUpdate" parameters="1, 1, 'init'">
<tables>
<table name="t_order" />
</tables>
......@@ -191,7 +191,7 @@
</or-condition>
</parser-result>
<parser-result sql-case-id="assertInsertWithTableIdentifier" parameters="1 1 'init'">
<parser-result sql-case-id="assertInsertWithTableIdentifier" parameters="1, 1, 'init'">
<tables>
<table name="t_order" />
</tables>
......@@ -214,7 +214,7 @@
</or-condition>
</parser-result>
<parser-result sql-case-id="assertInsertSetWithTableIdentifier" parameters="1 1 'init'">
<parser-result sql-case-id="assertInsertSetWithTableIdentifier" parameters="1, 1, 'init'">
<tables>
<table name="t_order" />
</tables>
......@@ -237,7 +237,7 @@
</or-condition>
</parser-result>
<!--<parser-result sql-case-id="assertInsertOnDuplicateKeyUpdateWithTableIdentifier" parameters="1 1 'init'">-->
<!--<parser-result sql-case-id="assertInsertOnDuplicateKeyUpdateWithTableIdentifier" parameters="1, 1, 'init'">-->
<!--<tables>-->
<!--<table name="t_order" />-->
<!--</tables>-->
......@@ -262,7 +262,7 @@
<!--</or-condition>-->
<!--</parser-result>-->
<parser-result sql-case-id="assertBatchInsertWithGenerateKeyColumn" parameters="10000 1000 10 10010 1001 10">
<parser-result sql-case-id="assertBatchInsertWithGenerateKeyColumn" parameters="10000, 1000, 10, 10010, 1001, 10">
<tables>
<table name="t_order_item" />
</tables>
......@@ -296,7 +296,7 @@
</or-condition>
</parser-result>
<parser-result sql-case-id="assertBatchInsertWithoutGenerateKeyColumn" parameters="1000 10 1001 10">
<parser-result sql-case-id="assertBatchInsertWithoutGenerateKeyColumn" parameters="1000, 10, 1001, 10">
<tables>
<table name="t_order_item" />
</tables>
......@@ -329,7 +329,7 @@
</or-condition>
</parser-result>
<parser-result sql-case-id="assertInsertWithJsonAndGeo" parameters="7 200 100 200 '{&quot;rule&quot;:&quot;null&quot;}'">
<parser-result sql-case-id="assertInsertWithJsonAndGeo" parameters="7, 200, 100, 200, '{&quot;rule&quot;:&quot;null&quot;}'">
<tables>assertInsertWithJsonAndGeo
<table name="t_place" />
</tables>
......@@ -349,7 +349,7 @@
</or-condition>
</parser-result>
<parser-result sql-case-id="assertInsertWithoutColumnsWithAllPlaceholders" parameters="1 1 'init'">
<parser-result sql-case-id="assertInsertWithoutColumnsWithAllPlaceholders" parameters="1, 1, 'init'">
<tables>
<table name="t_order" />
</tables>
......@@ -377,7 +377,7 @@
</or-condition>
</parser-result>
<parser-result sql-case-id="assertInsertWithoutColumnsWithPartialPlaceholder" parameters="1 1">
<parser-result sql-case-id="assertInsertWithoutColumnsWithPartialPlaceholder" parameters="1, 1">
<tables>
<table name="t_order" />
</tables>
......@@ -405,7 +405,7 @@
</or-condition>
</parser-result>
<parser-result sql-case-id="assertInsertWithoutColumnsWithGenerateKeyColumn" parameters="10000 1000 10">
<parser-result sql-case-id="assertInsertWithoutColumnsWithGenerateKeyColumn" parameters="10000, 1000, 10">
<tables>
<table name="t_order_item" />
</tables>
......@@ -439,7 +439,7 @@
</or-condition>
</parser-result>
<parser-result sql-case-id="assertInsertWithoutColumnsWithoutGenerateKeyColumn" parameters="1000 10">
<parser-result sql-case-id="assertInsertWithoutColumnsWithoutGenerateKeyColumn" parameters="1000, 10">
<tables>
<table name="t_order_item" />
</tables>
......
<?xml version="1.0" encoding="UTF-8"?>
<parser-result-sets>
<parser-result sql-case-id="assertSelectOne" />
<parser-result sql-case-id="select_constant_without_table" />
<parser-result sql-case-id="assertSelectNotEqualsWithSingleTable" parameters="1">
<parser-result sql-case-id="select_not_equal_with_single_table" parameters="1">
<tables>
<table name="t_order_item" />
</tables>
......@@ -14,7 +14,7 @@
</order-by-columns>
</parser-result>
<parser-result sql-case-id="assertSelectNotEqualsWithSingleTableForExclamationEqual" parameters="1">
<parser-result sql-case-id="select_exclamation_equal_with_single_table" parameters="1">
<tables>
<table name="t_order_item" />
</tables>
......@@ -26,7 +26,7 @@
</order-by-columns>
</parser-result>
<parser-result sql-case-id="assertSelectNotEqualsWithSingleTableForNotIn" parameters="100000 100001">
<parser-result sql-case-id="select_not_in_with_single_table" parameters="100000, 100001">
<tables>
<table name="t_order_item" />
</tables>
......@@ -38,7 +38,7 @@
</order-by-columns>
</parser-result>
<parser-result sql-case-id="assertSelectNotEqualsWithSingleTableForNotBetween" parameters="100000 100001">
<parser-result sql-case-id="select_not_between_with_single_table" parameters="100000, 100001">
<tables>
<table name="t_order_item" />
</tables>
......@@ -50,7 +50,7 @@
</order-by-columns>
</parser-result>
<parser-result sql-case-id="assertSelectEqualsWithSingleTable" parameters="1 1">
<parser-result sql-case-id="select_equal_with_single_table" parameters="1, 1">
<tables>
<table name="t_order" />
</tables>
......@@ -68,9 +68,8 @@
</and-condition>
</or-condition>
</parser-result>
<!-- // TODO 这里可优化,将两个字段AND = 替换为永false, 目前因为condition是map,同样的column会覆盖 -->
<parser-result sql-case-id="assertSelectEqualsWithSameShardingColumns" parameters="1 2">
<parser-result sql-case-id="select_equal_with_same_sharding_column" parameters="1, 2">
<tables>
<table name="t_order" />
</tables>
......@@ -89,7 +88,7 @@
</or-condition>
</parser-result>
<parser-result sql-case-id="assertSelectBetweenWithSingleTable" parameters="1 10 2 5">
<parser-result sql-case-id="select_between_with_single_table" parameters="1, 10, 2, 5">
<tables>
<table name="t_order" />
</tables>
......@@ -114,7 +113,7 @@
</order-by-columns>
</parser-result>
<parser-result sql-case-id="assertSelectInWithSingleTable" parameters="1 2 3 9 10">
<parser-result sql-case-id="assertSelectInWithSingleTable" parameters="1, 2, 3, 9, 10">
<tables>
<table name="t_order" />
</tables>
......@@ -140,8 +139,7 @@
</order-by-columns>
</parser-result>
<!-- // TODO 目前对于IN多结果不同做到交集处理 仅仅是简单的加入, 目前因为condition是map,同样的column会覆盖 -->
<parser-result sql-case-id="assertSelectInWithSameShardingColumns" parameters="100 1001 1001 1002">
<parser-result sql-case-id="select_in_with_same_sharding_column" parameters="1000, 1001, 1001, 1002">
<tables>
<table name="t_order" />
</tables>
......@@ -151,7 +149,7 @@
<or-condition>
<and-condition>
<condition column-name="order_id" table-name="t_order" operator="IN">
<value index="0" literal="100" type="int" />
<value index="0" literal="1000" type="int" />
<value index="1" literal="1001" type="int" />
</condition>
<condition column-name="order_id" table-name="t_order" operator="IN">
......@@ -165,7 +163,7 @@
</order-by-columns>
</parser-result>
<parser-result sql-case-id="assertSelectIterator" parameters="1 2">
<parser-result sql-case-id="assertSelectIterator" parameters="1, 2">
<tables>
<table name="t_order_item" alias="t"/>
</tables>
......@@ -196,7 +194,7 @@
</order-by-columns>
</parser-result>
<parser-result sql-case-id="assertSelectLikeWithCount" parameters="'init' 1 2 9 10">
<parser-result sql-case-id="assertSelectLikeWithCount" parameters="'init', 1, 2, 9, 10">
<tables>
<table name="t_order" alias="o"/>
</tables>
......@@ -220,7 +218,7 @@
</aggregation-select-items>
</parser-result>
<parser-result sql-case-id="assertSelectWithBindingTable" parameters="1 2 9 10">
<parser-result sql-case-id="assertSelectWithBindingTable" parameters="1, 2, 9, 10">
<tables>
<table name="t_order" alias="o"/>
<table name="t_order_item" alias="i"/>
......@@ -246,7 +244,7 @@
</order-by-columns>
</parser-result>
<parser-result sql-case-id="assertSelectWithBindingTableAndConfigTable" parameters="1 2 9 10 'init'">
<parser-result sql-case-id="assertSelectWithBindingTableAndConfigTable" parameters="1, 2, 9, 10, 'init'">
<tables>
<table name="t_order" alias="o"/>
<table name="t_order_item" alias="i"/>
......@@ -272,7 +270,7 @@
</order-by-columns>
</parser-result>
<parser-result sql-case-id="assertSelectWithUpperCaseBindingTable" parameters="1 2 9 10">
<parser-result sql-case-id="assertSelectWithUpperCaseBindingTable" parameters="1, 2, 9, 10">
<tables>
<table name="T_ORDER" alias="o"/>
<table name="T_order_item" alias="i"/>
......@@ -298,7 +296,7 @@
</order-by-columns>
</parser-result>
<parser-result sql-case-id="assertSelectWithUpperCaseBindingTableAndConfigTable" parameters="1 2 9 10 'init'">
<parser-result sql-case-id="assertSelectWithUpperCaseBindingTableAndConfigTable" parameters="1, 2, 9, 10, 'init'">
<tables>
<table name="T_ORDER" alias="o"/>
<table name="T_order_item" alias="i"/>
......@@ -324,7 +322,7 @@
</order-by-columns>
</parser-result>
<parser-result sql-case-id="assertSelectCountWithBindingTable" parameters="1 2 9 10">
<parser-result sql-case-id="assertSelectCountWithBindingTable" parameters="1, 2, 9, 10">
<tables>
<table name="t_order" alias="o"/>
<table name="t_order_item" alias="i"/>
......@@ -350,7 +348,7 @@
</aggregation-select-items>
</parser-result>
<parser-result sql-case-id="assertSelectCountWithBindingTableWithJoin" parameters="1 2 9 10">
<parser-result sql-case-id="assertSelectCountWithBindingTableWithJoin" parameters="1, 2, 9, 10">
<tables>
<table name="t_order" alias="o"/>
<table name="t_order_item" alias="i"/>
......@@ -410,7 +408,7 @@
</and-condition>
</or-condition>
</parser-result>
<parser-result sql-case-id="assertSelectEqualsWithJsonAnGeo" parameters="'{&quot;rule2&quot;:&quot;null2&quot;}' 100 200 1">
<parser-result sql-case-id="assertSelectEqualsWithJsonAnGeo" parameters="'{&quot;rule2&quot;:&quot;null2&quot;}', 100, 200, 1">
<tables>
<table name="t_place" />
</tables>
......@@ -425,7 +423,7 @@
</and-condition>
</or-condition>
</parser-result>
<parser-result sql-case-id="assertSelectInWithJsonAnGeo" parameters="'{&quot;rule2&quot;:&quot;null2&quot;}' '{&quot;rule3&quot;:&quot;null3&quot;}' 100 200 1">
<parser-result sql-case-id="assertSelectInWithJsonAnGeo" parameters="'{&quot;rule2&quot;:&quot;null2&quot;}', '{&quot;rule3&quot;:&quot;null3&quot;}', 100, 200, 1">
<tables>
<table name="t_place" />
</tables>
......@@ -440,7 +438,7 @@
</and-condition>
</or-condition>
</parser-result>
<parser-result sql-case-id="assertSelectBetweenWithJsonAnGeo" parameters="'{&quot;rule2&quot;:&quot;null2&quot;}' '{&quot;rule3&quot;:&quot;null3&quot;}' 100 200 1">
<parser-result sql-case-id="assertSelectBetweenWithJsonAnGeo" parameters="'{&quot;rule2&quot;:&quot;null2&quot;}', '{&quot;rule3&quot;:&quot;null3&quot;}', 100, 200, 1">
<tables>
<table name="t_place" />
</tables>
......
......@@ -41,7 +41,7 @@
</aggregation-select-items>
</parser-result>
<parser-result sql-case-id="assertSelectRegexpWithSingleTable" parameters="'init' 1 2">
<parser-result sql-case-id="assertSelectRegexpWithSingleTable" parameters="'init', 1, 2">
<tables>
<table name="t_order_item" alias="t"/>
</tables>
......
<?xml version="1.0" encoding="UTF-8"?>
<parser-result-sets>
<parser-result sql-case-id="assertSelectDateFuncWithGroupBy" parameters="1000 1100">
<parser-result sql-case-id="assertSelectDateFuncWithGroupBy" parameters="1000, 1100">
<tables>
<table name="t_order_item" />
</tables>
......@@ -144,7 +144,7 @@
</order-by-columns>
</parser-result>
<parser-result sql-case-id="assertSelectCountWithoutGroupedColumn" parameters="1 2 9 10">
<parser-result sql-case-id="assertSelectCountWithoutGroupedColumn" parameters="1, 2, 9, 10">
<tables>
<table name="t_order" alias="o" />
<table name="t_order_item" alias="i" />
......@@ -182,7 +182,7 @@
</order-by-columns>
</parser-result>
<parser-result sql-case-id="assertSelectCountWithGroupByBindingTable" parameters="1 2 9 10">
<parser-result sql-case-id="assertSelectCountWithGroupByBindingTable" parameters="1, 2, 9, 10">
<tables>
<table name="t_order" alias="o" />
<table name="t_order_item" alias="i" />
......
<?xml version="1.0" encoding="UTF-8"?>
<parser-result-sets>
<parser-result sql-case-id="assertSelectInWithNullParameter" parameters="1 null">
<parser-result sql-case-id="assertSelectInWithNullParameter" parameters="1, null">
<tables>
<table name="t_order"/>
</tables>
......
<?xml version="1.0" encoding="UTF-8"?>
<parser-result-sets>
<parser-result sql-case-id="assertSelectOrWithSameShardingColumns" parameters="1 2">
<parser-result sql-case-id="assertSelectOrWithSameShardingColumns" parameters="1, 2">
<tables>
<table name="t_order"/>
</tables>
......@@ -21,7 +21,7 @@
</or-condition>
</parser-result>
<parser-result sql-case-id="assertSelectOrWithDifferentShardingColumns" parameters="1 2">
<parser-result sql-case-id="assertSelectOrWithDifferentShardingColumns" parameters="1, 2">
<tables>
<table name="t_order"/>
</tables>
......@@ -42,7 +42,7 @@
</or-condition>
</parser-result>
<parser-result sql-case-id="assertSelectOrWithShardingColumnsAndNoShardingColumns" parameters="1 'init'">
<parser-result sql-case-id="assertSelectOrWithShardingColumnsAndNoShardingColumns" parameters="1, 'init'">
<tables>
<table name="t_order"/>
</tables>
......@@ -51,7 +51,7 @@
</tokens>
</parser-result>
<parser-result sql-case-id="assertSelectOrWithSimpleParen" parameters="1 'init' 3">
<parser-result sql-case-id="assertSelectOrWithSimpleParen" parameters="1, 'init', 3">
<tables>
<table name="t_order"/>
</tables>
......@@ -75,7 +75,7 @@
</or-condition>
</parser-result>
<parser-result sql-case-id="assertSelectOrWithComplexParen" parameters="'init' 1 2 3 4">
<parser-result sql-case-id="assertSelectOrWithComplexParen" parameters="'init', 1, 2, 3, 4">
<tables>
<table name="t_order"/>
</tables>
......@@ -118,7 +118,7 @@
</or-condition>
</parser-result>
<parser-result sql-case-id="assertSelectOrWithBindingTable" parameters="1 2 3">
<parser-result sql-case-id="assertSelectOrWithBindingTable" parameters="1, 2, 3">
<tables>
<table name="t_order" alias="o"/>
<table name="t_order_item" alias="i"/>
......@@ -147,7 +147,7 @@
</or-condition>
</parser-result>
<parser-result sql-case-id="assertSelectOrWithBindingTableAndConfigTable" parameters="1 2 3 'init'">
<parser-result sql-case-id="assertSelectOrWithBindingTableAndConfigTable" parameters="1, 2, 3, 'init'">
<tables>
<table name="t_order" alias="o"/>
<table name="t_order_item" alias="i"/>
......
<?xml version="1.0" encoding="UTF-8"?>
<parser-result-sets>
<parser-result sql-case-id="assertSelectPaginationWithOffset" parameters="1 2 9 10 5">
<parser-result sql-case-id="assertSelectPaginationWithOffset" parameters="1, 2, 9, 10, 5">
<tables>
<table name="t_order" alias="o" />
<table name="t_order_item" alias="i" />
......@@ -28,7 +28,7 @@
<limit offset="5" offset-index="4" />
</parser-result>
<parser-result sql-case-id="assertSelectPaginationWithRowCount" parameters="1 2 9 10 5">
<parser-result sql-case-id="assertSelectPaginationWithRowCount" parameters="1, 2, 9, 10, 5">
<tables>
<table name="t_order" alias="o" />
<table name="t_order_item" alias="i" />
......@@ -56,7 +56,7 @@
<limit row-count="5" row-count-index="4" />
</parser-result>
<parser-result sql-case-id="assertSelectPaginationWithLimit" parameters="1 2 9 10 5 3">
<parser-result sql-case-id="assertSelectPaginationWithLimit" parameters="1, 2, 9, 10, 5, 3">
<tables>
<table name="t_order" alias="o" />
<table name="t_order_item" alias="i" />
......@@ -85,7 +85,7 @@
<limit offset="5" offset-index="4" row-count="3" row-count-index="5" />
</parser-result>
<parser-result sql-case-id="assertSelectSingleTablePaginationWithLimit" parameters="5 3">
<parser-result sql-case-id="assertSelectSingleTablePaginationWithLimit" parameters="5, 3">
<tables>
<table name="t_order" alias="o" />
</tables>
......@@ -100,7 +100,7 @@
<limit offset="5" offset-index="0" row-count="3" row-count-index="1" />
</parser-result>
<parser-result sql-case-id="assertSelectPaginationWithTop" parameters="3 1 2 9 10">
<parser-result sql-case-id="assertSelectPaginationWithTop" parameters="3, 1, 2, 9, 10">
<tables>
<table name="t_order" alias="o" />
<table name="t_order_item" alias="i" />
......@@ -128,7 +128,7 @@
<limit row-count="3" row-count-index="0" />
</parser-result>
<parser-result sql-case-id="assertSelectPaginationWithOffsetAndLimit" parameters="1 2 9 10 5 3">
<parser-result sql-case-id="assertSelectPaginationWithOffsetAndLimit" parameters="1, 2, 9, 10, 5, 3">
<tables>
<table name="t_order" alias="o" />
<table name="t_order_item" alias="i" />
......@@ -157,7 +157,7 @@
<limit offset="5" offset-index="4" row-count="3" row-count-index="5" />
</parser-result>
<parser-result sql-case-id="assertSelectPaginationWithTopAndRange" parameters="3 1 2 9 10 6">
<parser-result sql-case-id="assertSelectPaginationWithTopAndRange" parameters="3, 1, 2, 9, 10, 6">
<tables>
<table name="t_order" alias="o" />
<table name="t_order_item" alias="i" />
......@@ -186,7 +186,7 @@
<limit offset="6" offset-index="5" row-count-index="0" row-count="3" />
</parser-result>
<parser-result sql-case-id="assertSelectPaginationWithTopAndRangeWithEqual" parameters="3 1 2 9 10 6">
<parser-result sql-case-id="assertSelectPaginationWithTopAndRangeWithEqual" parameters="3, 1, 2, 9, 10, 6">
<tables>
<table name="t_order" alias="o" />
<table name="t_order_item" alias="i" />
......@@ -215,7 +215,7 @@
<limit offset="6" offset-index="5" row-count-index="0" row-count="3" />
</parser-result>
<parser-result sql-case-id="assertSelectSingleTablePaginationWithTopAndRange" parameters="3 1">
<parser-result sql-case-id="assertSelectSingleTablePaginationWithTopAndRange" parameters="3, 1">
<tables>
<table name="t_order" alias="o" />
</tables>
......@@ -235,7 +235,7 @@
<limit offset="1" offset-index="1" row-count-index="0" row-count="3" />
</parser-result>
<parser-result sql-case-id="assertSelectPaginationWithRowNumber" parameters="1 2 9 10 3">
<parser-result sql-case-id="assertSelectPaginationWithRowNumber" parameters="1, 2, 9, 10, 3">
<tables>
<table name="t_order" alias="order0_" />
<table name="t_order_item" alias="i" />
......@@ -268,7 +268,7 @@
<limit row-count="3" row-count-index="4" />
</parser-result>
<parser-result sql-case-id="assertSelectPaginationWithRowNumberAndRange" parameters="1 2 9 10 5 3">
<parser-result sql-case-id="assertSelectPaginationWithRowNumberAndRange" parameters="1, 2, 9, 10, 5, 3">
<tables>
<table name="t_order" alias="order0_" />
<table name="t_order_item" alias="i" />
......@@ -302,7 +302,7 @@
<limit offset="3" offset-index="5" row-count-index="4" row-count="5" />
</parser-result>
<parser-result sql-case-id="assertSelectPaginationWithRowNumberAndRangeWithEqual" parameters="1 2 9 10 5 3">
<parser-result sql-case-id="assertSelectPaginationWithRowNumberAndRangeWithEqual" parameters="1, 2, 9, 10, 5, 3">
<tables>
<table name="t_order" alias="order0_" />
<table name="t_order_item" alias="i" />
......@@ -336,7 +336,7 @@
<limit offset="3" offset-index="5" row-count-index="4" row-count="5" />
</parser-result>
<parser-result sql-case-id="assertSelectSingleTablePaginationWithRowNumberAndRange" parameters="4 1">
<parser-result sql-case-id="assertSelectSingleTablePaginationWithRowNumberAndRange" parameters="4, 1">
<tables>
<table name="t_order"/>
</tables>
......@@ -351,7 +351,7 @@
<limit offset="1" offset-index="1" row-count-index="0" row-count="4" />
</parser-result>
<parser-result sql-case-id="assertSelectSingleTablePaginationWithRowNumberAndRangeWithEqual" parameters="1000 4 1">
<parser-result sql-case-id="assertSelectSingleTablePaginationWithRowNumberAndRangeWithEqual" parameters="1000, 4, 1">
<tables>
<table name="t_order" />
</tables>
......@@ -366,7 +366,7 @@
<limit offset="1" offset-index="2" row-count-index="1" row-count="4" />
</parser-result>
<parser-result sql-case-id="assertSelectSingleTablePaginationWithRowNumberAndRangeWithEqualDiffOrder" parameters="4 1000 1">
<parser-result sql-case-id="assertSelectSingleTablePaginationWithRowNumberAndRangeWithEqualDiffOrder" parameters="4, 1000, 1">
<tables>
<table name="t_order" />
</tables>
......@@ -381,7 +381,7 @@
<limit offset="1" offset-index="2" row-count-index="0" row-count="4" />
</parser-result>
<parser-result sql-case-id="assertSelectSingleTablePaginationWithTopAndRangeWithEqual" parameters="4 1">
<parser-result sql-case-id="assertSelectSingleTablePaginationWithTopAndRangeWithEqual" parameters="4, 1">
<tables>
<table name="t_order" alias="o" />
</tables>
......@@ -401,7 +401,7 @@
<limit offset="1" offset-index="1" row-count-index="0" row-count="4" />
</parser-result>
<parser-result sql-case-id="assertSelectLeftJoinAndLimit" parameters="10 1000 0 20">
<parser-result sql-case-id="assertSelectLeftJoinAndLimit" parameters="10, 1000, 0, 20">
<tables>
<table name="t_order" alias="o" />
<table name="t_order_item" alias="i" />
......
<?xml version="1.0" encoding="UTF-8"?>
<parser-result-sets>
<parser-result sql-case-id="assertSelectPaginationWithGroupByAndOrderBy" parameters="1 2 9 10 5 3">
<parser-result sql-case-id="assertSelectPaginationWithGroupByAndOrderBy" parameters="1, 2, 9, 10, 5, 3">
<tables>
<table name="t_order" alias="o" />
<table name="t_order_item" alias="i" />
......@@ -38,7 +38,7 @@
<limit offset="5" offset-index="4" row-count="3" row-count-index="5" />
</parser-result>
<parser-result sql-case-id="assertSelectPaginationWithDiffGroupByAndOrderBy" parameters="1 2 9 10 5 3">
<parser-result sql-case-id="assertSelectPaginationWithDiffGroupByAndOrderBy" parameters="1, 2, 9, 10, 5, 3">
<tables>
<table name="t_order" alias="o" />
<table name="t_order_item" alias="i" />
......@@ -75,7 +75,7 @@
<limit offset="5" offset-index="4" row-count="3" row-count-index="5" />
</parser-result>
<parser-result sql-case-id="assertSelectPaginationWithTopAndGroupByAndOrderBy" parameters="3 1 2 9 10 6">
<parser-result sql-case-id="assertSelectPaginationWithTopAndGroupByAndOrderBy" parameters="3, 1, 2, 9, 10, 6">
<tables>
<table name="t_order" alias="o" />
<table name="t_order_item" alias="i" />
......@@ -107,7 +107,7 @@
<limit offset="6" offset-index="5" row-count-index="0" row-count="3" />
</parser-result>
<parser-result sql-case-id="assertSelectPaginationWithTopAndGroupByAndOrderByAndParentheses" parameters="3 1 2 9 10 6">
<parser-result sql-case-id="assertSelectPaginationWithTopAndGroupByAndOrderByAndParentheses" parameters="3, 1, 2, 9, 10, 6">
<tables>
<table name="t_order" alias="o" />
<table name="t_order_item" alias="i" />
......@@ -139,7 +139,7 @@
<limit offset="6" offset-index="5" row-count-index="0" row-count="3" />
</parser-result>
<parser-result sql-case-id="assertSelectPaginationWithTopAndDiffGroupByAndOrderBy" parameters="3 1 2 9 10 6">
<parser-result sql-case-id="assertSelectPaginationWithTopAndDiffGroupByAndOrderBy" parameters="3, 1, 2, 9, 10, 6">
<tables>
<table name="t_order" alias="o" />
<table name="t_order_item" alias="i" />
......@@ -176,7 +176,7 @@
<limit offset="6" offset-index="5" row-count-index="0" row-count="3" />
</parser-result>
<parser-result sql-case-id="assertSelectPaginationWithTopAndDiffGroupByAndOrderByAndParentheses" parameters="3 1 2 9 10 6">
<parser-result sql-case-id="assertSelectPaginationWithTopAndDiffGroupByAndOrderByAndParentheses" parameters="3, 1, 2, 9, 10, 6">
<tables>
<table name="t_order" alias="o" />
<table name="t_order_item" alias="i" />
......@@ -213,7 +213,7 @@
<limit offset="6" offset-index="5" row-count-index="0" row-count="3" />
</parser-result>
<parser-result sql-case-id="assertSelectPaginationWithRowNumberAndGroupByAndOrderBy" parameters="1 2 9 10 5 3">
<parser-result sql-case-id="assertSelectPaginationWithRowNumberAndGroupByAndOrderBy" parameters="1, 2, 9, 10, 5, 3">
<tables>
<table name="t_order" alias="order0_" />
<table name="t_order_item" alias="i" />
......@@ -251,7 +251,7 @@
<limit offset="3" offset-index="5" row-count-index="4" row-count="5" />
</parser-result>
<parser-result sql-case-id="assertSelectPaginationWithRowNumberAndDiffGroupByAndOrderBy" parameters="1 2 9 10 5 3">
<parser-result sql-case-id="assertSelectPaginationWithRowNumberAndDiffGroupByAndOrderBy" parameters="1, 2, 9, 10, 5, 3">
<tables>
<table name="t_order" alias="order0_" />
<table name="t_order_item" alias="i" />
......
<?xml version="1.0" encoding="UTF-8"?>
<parser-result-sets>
<parser-result sql-case-id="assertSelectSubQuerySingleTableWithParentheses" parameters="1 2">
<parser-result sql-case-id="assertSelectSubQuerySingleTableWithParentheses" parameters="1, 2">
<tables>
<table name="t_order" alias="o" />
</tables>
......@@ -18,7 +18,7 @@
</parser-result>
<!-- TODO cannot pass,add later
<parser-result sql-case-id="assertSelectSubQueryMultiTableWithParentheses" parameters="1 2">
<parser-result sql-case-id="assertSelectSubQueryMultiTableWithParentheses" parameters="1, 2">
<tables>
<table name="t_order" alias="o" />
<table name="t_order_item" alias="i" />
......
<?xml version="1.0" encoding="UTF-8"?>
<parser-result-sets>
<parser-result sql-case-id="assertUpdateWithAlias" parameters="'update' 1 1">
<parser-result sql-case-id="assertUpdateWithAlias" parameters="'update', 1, 1">
<tables>
<table name="t_order" alias="o" />
</tables>
......@@ -19,7 +19,7 @@
</or-condition>
</parser-result>
<parser-result sql-case-id="assertUpdateWithoutAlias" parameters="'update' 1 1">
<parser-result sql-case-id="assertUpdateWithoutAlias" parameters="'update', 1, 1">
<tables>
<table name="t_order" />
</tables>
......@@ -38,7 +38,7 @@
</or-condition>
</parser-result>
<parser-result sql-case-id="assertUpdateWithJsonAndGeo" parameters="'2017-06-07' 100 200 '{&quot;rule2&quot;:&quot;null2&quot;}' 3 5 7 200">
<parser-result sql-case-id="assertUpdateWithJsonAndGeo" parameters="'2017-06-07', 100, 200, '{&quot;rule2&quot;:&quot;null2&quot;}', 3, 5, 7, 200">
<tables>
<table name="t_place" />
</tables>
......
......@@ -99,7 +99,7 @@ public final class IntegrateTestCasesLoader {
}
private Map<String, IntegrateTestCase> loadIntegrateTestCases(final String filePrefix) throws IOException, URISyntaxException, JAXBException {
URL url = IntegrateTestCasesLoader.class.getClassLoader().getResource("asserts/");
URL url = IntegrateTestCasesLoader.class.getClassLoader().getResource("asserts/cases/");
Preconditions.checkNotNull(url, "Cannot found integrate test cases.");
return new HashMap<>(loadIntegrateTestCases(url, filePrefix));
}
......
......@@ -18,7 +18,6 @@
package io.shardingsphere.dbtest.cases.assertion.root;
import com.google.common.base.Splitter;
import io.shardingsphere.dbtest.common.SQLValue;
import lombok.Getter;
import javax.xml.bind.annotation.XmlAccessType;
......
......@@ -15,7 +15,7 @@
* </p>
*/
package io.shardingsphere.dbtest.common;
package io.shardingsphere.dbtest.cases.assertion.root;
import lombok.Getter;
......
......@@ -15,7 +15,7 @@
* </p>
*/
package io.shardingsphere.dbtest.common;
package io.shardingsphere.dbtest.cases.assertion.root;
import io.shardingsphere.dbtest.cases.dataset.init.DataSetMetadata;
import lombok.Getter;
......
......@@ -21,9 +21,6 @@ import io.shardingsphere.core.api.yaml.YamlMasterSlaveDataSourceFactory;
import io.shardingsphere.core.api.yaml.YamlShardingDataSourceFactory;
import io.shardingsphere.core.constant.DatabaseType;
import io.shardingsphere.dbtest.cases.assertion.IntegrateTestCasesLoader;
import io.shardingsphere.dbtest.cases.assertion.ddl.DDLIntegrateTestCaseAssertion;
import io.shardingsphere.dbtest.cases.assertion.dml.DMLIntegrateTestCaseAssertion;
import io.shardingsphere.dbtest.cases.assertion.dql.DQLIntegrateTestCaseAssertion;
import io.shardingsphere.dbtest.cases.assertion.root.IntegrateTestCase;
import io.shardingsphere.dbtest.cases.assertion.root.IntegrateTestCaseAssertion;
import io.shardingsphere.dbtest.env.DatabaseTypeEnvironment;
......@@ -83,7 +80,7 @@ public abstract class BaseIntegrateTest {
this.caseType = caseType;
this.countInSameCase = countInSameCase;
sql = SQLCasesLoader.getInstance().getSupportedSQL(sqlCaseId);
expectedDataFile = path.substring(0, path.lastIndexOf(File.separator) + 1) + "asserts/" + getExpectedDataFileType() + "/" + assertion.getExpectedDataFile();
expectedDataFile = path.substring(0, path.lastIndexOf(File.separator) + 1) + "dataset/" + assertion.getExpectedDataFile();
if (databaseTypeEnvironment.isEnabled()) {
dataSourceMap = createDataSourceMap(assertion);
dataSource = createDataSource(dataSourceMap);
......@@ -93,19 +90,6 @@ public abstract class BaseIntegrateTest {
}
}
private String getExpectedDataFileType() {
if (assertion instanceof DDLIntegrateTestCaseAssertion) {
return "ddl";
}
if (assertion instanceof DMLIntegrateTestCaseAssertion) {
return "dml";
}
if (assertion instanceof DQLIntegrateTestCaseAssertion) {
return "dql";
}
throw new UnsupportedOperationException(String.format("Cannot support '%s'", assertion.getClass().getName()));
}
private Map<String, DataSource> createDataSourceMap(final IntegrateTestCaseAssertion assertion) throws IOException, JAXBException {
Collection<String> dataSourceNames = SchemaEnvironmentManager.getDataSourceNames(assertion.getShardingRuleType());
Map<String, DataSource> result = new HashMap<>(dataSourceNames.size(), 1);
......
......@@ -69,7 +69,7 @@ public final class DDLIntegrateTest extends BaseIntegrateTest {
this.assertion = assertion;
}
@Parameters(name = "{0}.{5} -> {2} -> {3} -> {4}")
@Parameters(name = "{0}[{5}] -> {2} -> {3} -> {4}")
public static Collection<Object[]> getParameters() {
// TODO sqlCasesLoader size should eq integrateTestCasesLoader size
// assertThat(sqlCasesLoader.countAllSupportedSQLCases(), is(integrateTestCasesLoader.countAllDataSetTestCases()));
......
......@@ -23,11 +23,11 @@ import io.shardingsphere.core.util.InlineExpressionParser;
import io.shardingsphere.dbtest.cases.assertion.IntegrateTestCasesLoader;
import io.shardingsphere.dbtest.cases.assertion.dml.DMLIntegrateTestCase;
import io.shardingsphere.dbtest.cases.assertion.dml.DMLIntegrateTestCaseAssertion;
import io.shardingsphere.dbtest.cases.assertion.root.SQLValue;
import io.shardingsphere.dbtest.cases.dataset.init.DataSetColumnMetadata;
import io.shardingsphere.dbtest.cases.dataset.init.DataSetMetadata;
import io.shardingsphere.dbtest.cases.dataset.init.DataSetRow;
import io.shardingsphere.dbtest.cases.dataset.init.DataSetsRoot;
import io.shardingsphere.dbtest.common.SQLValue;
import io.shardingsphere.dbtest.env.DatabaseTypeEnvironment;
import io.shardingsphere.dbtest.env.EnvironmentPath;
import io.shardingsphere.dbtest.env.dataset.DataSetEnvironmentManager;
......@@ -77,7 +77,7 @@ public final class DMLIntegrateTest extends BaseIntegrateTest {
this.assertion = assertion;
}
@Parameters(name = "{0}.{5} -> {2} -> {3} -> {4}")
@Parameters(name = "{0}[{5}] -> {2} -> {3} -> {4}")
public static Collection<Object[]> getParameters() {
// TODO sqlCasesLoader size should eq integrateTestCasesLoader size
// assertThat(sqlCasesLoader.countAllSupportedSQLCases(), is(integrateTestCasesLoader.countAllDataSetTestCases()));
......
......@@ -21,9 +21,9 @@ import io.shardingsphere.core.constant.DatabaseType;
import io.shardingsphere.dbtest.cases.assertion.IntegrateTestCasesLoader;
import io.shardingsphere.dbtest.cases.assertion.dql.DQLIntegrateTestCase;
import io.shardingsphere.dbtest.cases.assertion.dql.DQLIntegrateTestCaseAssertion;
import io.shardingsphere.dbtest.cases.assertion.root.SQLValue;
import io.shardingsphere.dbtest.cases.dataset.expected.dataset.ExpectedDataSetRow;
import io.shardingsphere.dbtest.cases.dataset.expected.dataset.ExpectedDataSetsRoot;
import io.shardingsphere.dbtest.common.SQLValue;
import io.shardingsphere.dbtest.env.DatabaseTypeEnvironment;
import io.shardingsphere.dbtest.env.EnvironmentPath;
import io.shardingsphere.dbtest.env.IntegrateTestEnvironment;
......@@ -79,7 +79,7 @@ public final class DQLIntegrateTest extends BaseIntegrateTest {
this.assertion = assertion;
}
@Parameters(name = "{0}.{5} -> {2} -> {3} -> {4}")
@Parameters(name = "{0}[{5}] -> {2} -> {3} -> {4}")
public static Collection<Object[]> getParameters() {
// TODO sqlCasesLoader size should eq integrateTestCasesLoader size
// assertThat(sqlCasesLoader.countAllSupportedSQLCases(), is(integrateTestCasesLoader.countAllDataSetTestCases()));
......@@ -97,7 +97,6 @@ public final class DQLIntegrateTest extends BaseIntegrateTest {
result.addAll(getParameters(databaseType, caseType, integrateTestCase));
}
}
return result;
}
......
......@@ -32,11 +32,11 @@ import static org.junit.Assert.assertNotNull;
@NoArgsConstructor(access = AccessLevel.PRIVATE)
public final class EnvironmentPath {
private static final String DATABASE_ENVIRONMENT_RESOURCES_PATH = "integrate/dbtest/%s/schema.xml";
private static final String DATABASE_ENVIRONMENT_RESOURCES_PATH = "asserts/env/%s/schema.xml";
private static final String DATA_INITIALIZE_RESOURCES_PATH = "integrate/dbtest/%s/data-init.xml";
private static final String DATA_INITIALIZE_RESOURCES_PATH = "asserts/env/%s/data-init.xml";
private static final String SHARDING_RULE_RESOURCES_PATH = "integrate/dbtest/%s/sharding-rule.yaml";
private static final String SHARDING_RULE_RESOURCES_PATH = "asserts/env/%s/sharding-rule.yaml";
/**
* Get database environment resource File.
......
......@@ -20,12 +20,12 @@ package io.shardingsphere.dbtest.env.dataset;
import com.google.common.base.Joiner;
import io.shardingsphere.core.rule.DataNode;
import io.shardingsphere.core.util.InlineExpressionParser;
import io.shardingsphere.dbtest.cases.assertion.root.SQLValue;
import io.shardingsphere.dbtest.cases.assertion.root.SQLValueGroup;
import io.shardingsphere.dbtest.cases.dataset.init.DataSetColumnMetadata;
import io.shardingsphere.dbtest.cases.dataset.init.DataSetMetadata;
import io.shardingsphere.dbtest.cases.dataset.init.DataSetRow;
import io.shardingsphere.dbtest.cases.dataset.init.DataSetsRoot;
import io.shardingsphere.dbtest.common.SQLValue;
import io.shardingsphere.dbtest.common.SQLValueGroup;
import javax.sql.DataSource;
import javax.xml.bind.JAXBContext;
......
......@@ -22,7 +22,7 @@ import io.shardingsphere.core.api.algorithm.sharding.standard.PreciseShardingAlg
import java.util.Collection;
public class SingleAlgorithm implements PreciseShardingAlgorithm<Integer> {
public class PreciseModuloAlgorithm implements PreciseShardingAlgorithm<Integer> {
@Override
public String doSharding(final Collection<String> availableTargetNames, final PreciseShardingValue<Integer> shardingValue) {
......
/*
* Copyright 2016-2018 shardingsphere.io.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* </p>
*/
package io.shardingsphere.dbtest.fixture;
import com.google.common.collect.Range;
import io.shardingsphere.core.api.algorithm.sharding.RangeShardingValue;
import io.shardingsphere.core.api.algorithm.sharding.standard.RangeShardingAlgorithm;
import java.util.Collection;
import java.util.LinkedHashSet;
public final class RangeModuloAlgorithm implements RangeShardingAlgorithm<Integer> {
@Override
public Collection<String> doSharding(final Collection<String> availableTargetNames, final RangeShardingValue<Integer> shardingValue) {
Collection<String> result = new LinkedHashSet<>(availableTargetNames.size());
Range<Integer> range = shardingValue.getValueRange();
for (Integer i = range.lowerEndpoint(); i <= range.upperEndpoint(); i++) {
for (String each : availableTargetNames) {
if (each.endsWith(i % 10 + "")) {
result.add(each);
}
}
}
return result;
}
}
<datasets>
<columns values="item_id, order_id, user_id, status" />
<dataset values="100002, 1000, 10, init" />
<dataset values="100101, 1001, 10, init" />
<dataset values="100102, 1001, 10, init" />
<dataset values="110001, 1100, 11, init" />
<dataset values="110002, 1100, 11, init" />
<dataset values="110101, 1101, 11, init" />
<dataset values="110102, 1101, 11, init" />
<dataset values="120001, 1200, 12, init" />
<dataset values="120002, 1200, 12, init" />
<dataset values="120101, 1201, 12, init" />
<dataset values="120102, 1201, 12, init" />
<dataset values="130001, 1300, 13, init" />
<dataset values="130002, 1300, 13, init" />
<dataset values="130101, 1301, 13, init" />
<dataset values="130102, 1301, 13, init" />
<dataset values="140001, 1400, 14, init" />
<dataset values="140002, 1400, 14, init" />
<dataset values="140101, 1401, 14, init" />
<dataset values="140102, 1401, 14, init" />
<dataset values="150001, 1500, 15, init" />
<dataset values="150002, 1500, 15, init" />
<dataset values="150101, 1501, 15, init" />
<dataset values="150102, 1501, 15, init" />
<dataset values="160001, 1600, 16, init" />
<dataset values="160002, 1600, 16, init" />
<dataset values="160101, 1601, 16, init" />
<dataset values="160102, 1601, 16, init" />
<dataset values="170001, 1700, 17, init" />
<dataset values="170002, 1700, 17, init" />
<dataset values="170101, 1701, 17, init" />
<dataset values="170102, 1701, 17, init" />
<dataset values="180001, 1800, 18, init" />
<dataset values="180002, 1800, 18, init" />
<dataset values="180101, 1801, 18, init" />
<dataset values="180102, 1801, 18, init" />
<dataset values="190001, 1900, 19, init" />
<dataset values="190002, 1900, 19, init" />
<dataset values="190101, 1901, 19, init" />
<dataset values="190102, 1901, 19, init" />
<dataset values="200001, 2000, 20, init" />
<dataset values="200002, 2000, 20, init" />
<dataset values="200101, 2001, 20, init" />
<dataset values="200102, 2001, 20, init" />
<dataset values="210001, 2100, 21, init" />
<dataset values="210002, 2100, 21, init" />
<dataset values="210101, 2101, 21, init" />
<dataset values="210102, 2101, 21, init" />
<dataset values="220001, 2200, 22, init" />
<dataset values="220002, 2200, 22, init" />
<dataset values="220101, 2201, 22, init" />
<dataset values="220102, 2201, 22, init" />
<dataset values="230001, 2300, 23, init" />
<dataset values="230002, 2300, 23, init" />
<dataset values="230101, 2301, 23, init" />
<dataset values="230102, 2301, 23, init" />
<dataset values="240001, 2400, 24, init" />
<dataset values="240002, 2400, 24, init" />
<dataset values="240101, 2401, 24, init" />
<dataset values="240102, 2401, 24, init" />
<dataset values="250001, 2500, 25, init" />
<dataset values="250002, 2500, 25, init" />
<dataset values="250101, 2501, 25, init" />
<dataset values="250102, 2501, 25, init" />
<dataset values="260001, 2600, 26, init" />
<dataset values="260002, 2600, 26, init" />
<dataset values="260101, 2601, 26, init" />
<dataset values="260102, 2601, 26, init" />
<dataset values="270001, 2700, 27, init" />
<dataset values="270002, 2700, 27, init" />
<dataset values="270101, 2701, 27, init" />
<dataset values="270102, 2701, 27, init" />
<dataset values="280001, 2800, 28, init" />
<dataset values="280002, 2800, 28, init" />
<dataset values="280101, 2801, 28, init" />
<dataset values="280102, 2801, 28, init" />
<dataset values="290001, 2900, 29, init" />
<dataset values="290002, 2900, 29, init" />
<dataset values="290101, 2901, 29, init" />
<dataset values="290102, 2901, 29, init" />
</datasets>
<datasets>
<columns values="item_id, order_id, user_id, status" />
<dataset values="100002, 1000, 10, init" />
<dataset values="100101, 1001, 10, init" />
<dataset values="100102, 1001, 10, init" />
<dataset values="110001, 1100, 11, init" />
<dataset values="110002, 1100, 11, init" />
<dataset values="110101, 1101, 11, init" />
<dataset values="110102, 1101, 11, init" />
<dataset values="120001, 1200, 12, init" />
<dataset values="120002, 1200, 12, init" />
<dataset values="120101, 1201, 12, init" />
<dataset values="120102, 1201, 12, init" />
<dataset values="130001, 1300, 13, init" />
<dataset values="130002, 1300, 13, init" />
<dataset values="130101, 1301, 13, init" />
<dataset values="130102, 1301, 13, init" />
<dataset values="140001, 1400, 14, init" />
<dataset values="140002, 1400, 14, init" />
<dataset values="140101, 1401, 14, init" />
<dataset values="140102, 1401, 14, init" />
<dataset values="150001, 1500, 15, init" />
<dataset values="150002, 1500, 15, init" />
<dataset values="150101, 1501, 15, init" />
<dataset values="150102, 1501, 15, init" />
<dataset values="160001, 1600, 16, init" />
<dataset values="160002, 1600, 16, init" />
<dataset values="160101, 1601, 16, init" />
<dataset values="160102, 1601, 16, init" />
<dataset values="170001, 1700, 17, init" />
<dataset values="170002, 1700, 17, init" />
<dataset values="170101, 1701, 17, init" />
<dataset values="170102, 1701, 17, init" />
<dataset values="180001, 1800, 18, init" />
<dataset values="180002, 1800, 18, init" />
<dataset values="180101, 1801, 18, init" />
<dataset values="180102, 1801, 18, init" />
<dataset values="190001, 1900, 19, init" />
<dataset values="190002, 1900, 19, init" />
<dataset values="190101, 1901, 19, init" />
<dataset values="190102, 1901, 19, init" />
<dataset values="200001, 2000, 20, init" />
<dataset values="200002, 2000, 20, init" />
<dataset values="200101, 2001, 20, init" />
<dataset values="200102, 2001, 20, init" />
<dataset values="210001, 2100, 21, init" />
<dataset values="210002, 2100, 21, init" />
<dataset values="210101, 2101, 21, init" />
<dataset values="210102, 2101, 21, init" />
<dataset values="220001, 2200, 22, init" />
<dataset values="220002, 2200, 22, init" />
<dataset values="220101, 2201, 22, init" />
<dataset values="220102, 2201, 22, init" />
<dataset values="230001, 2300, 23, init" />
<dataset values="230002, 2300, 23, init" />
<dataset values="230101, 2301, 23, init" />
<dataset values="230102, 2301, 23, init" />
<dataset values="240001, 2400, 24, init" />
<dataset values="240002, 2400, 24, init" />
<dataset values="240101, 2401, 24, init" />
<dataset values="240102, 2401, 24, init" />
<dataset values="250001, 2500, 25, init" />
<dataset values="250002, 2500, 25, init" />
<dataset values="250101, 2501, 25, init" />
<dataset values="250102, 2501, 25, init" />
<dataset values="260001, 2600, 26, init" />
<dataset values="260002, 2600, 26, init" />
<dataset values="260101, 2601, 26, init" />
<dataset values="260102, 2601, 26, init" />
<dataset values="270001, 2700, 27, init" />
<dataset values="270002, 2700, 27, init" />
<dataset values="270101, 2701, 27, init" />
<dataset values="270102, 2701, 27, init" />
<dataset values="280001, 2800, 28, init" />
<dataset values="280002, 2800, 28, init" />
<dataset values="280101, 2801, 28, init" />
<dataset values="280102, 2801, 28, init" />
<dataset values="290001, 2900, 29, init" />
<dataset values="290002, 2900, 29, init" />
<dataset values="290101, 2901, 29, init" />
<dataset values="290102, 2901, 29, init" />
</datasets>
<datasets>
<columns values="item_id, order_id, user_id, status" />
<dataset values="100000, 1000, 10, init" />
<dataset values="100100, 1001, 10, init" />
<dataset values="100101, 1001, 10, init" />
<dataset values="100200, 1002, 10, init" />
<dataset values="100201, 1002, 10, init" />
<dataset values="100300, 1003, 10, init" />
<dataset values="100301, 1003, 10, init" />
<dataset values="100400, 1004, 10, init" />
<dataset values="100401, 1004, 10, init" />
<dataset values="100500, 1005, 10, init" />
<dataset values="100501, 1005, 10, init" />
<dataset values="100600, 1006, 10, init" />
<dataset values="100601, 1006, 10, init" />
<dataset values="100700, 1007, 10, init" />
<dataset values="100701, 1007, 10, init" />
<dataset values="100800, 1008, 10, init" />
<dataset values="100801, 1008, 10, init" />
<dataset values="100900, 1009, 10, init" />
<dataset values="100901, 1009, 10, init" />
<dataset values="110000, 1100, 11, init" />
<dataset values="110001, 1100, 11, init" />
<dataset values="110100, 1101, 11, init" />
<dataset values="110101, 1101, 11, init" />
<dataset values="110200, 1102, 11, init" />
<dataset values="110201, 1102, 11, init" />
<dataset values="110300, 1103, 11, init" />
<dataset values="110301, 1103, 11, init" />
<dataset values="110400, 1104, 11, init" />
<dataset values="110401, 1104, 11, init" />
<dataset values="110500, 1105, 11, init" />
<dataset values="110501, 1105, 11, init" />
<dataset values="110600, 1106, 11, init" />
<dataset values="110601, 1106, 11, init" />
<dataset values="110700, 1107, 11, init" />
<dataset values="110701, 1107, 11, init" />
<dataset values="110800, 1108, 11, init" />
<dataset values="110801, 1108, 11, init" />
<dataset values="110900, 1109, 11, init" />
<dataset values="110901, 1109, 11, init" />
<dataset values="120000, 1200, 12, init" />
<dataset values="120001, 1200, 12, init" />
<dataset values="120100, 1201, 12, init" />
<dataset values="120101, 1201, 12, init" />
<dataset values="120200, 1202, 12, init" />
<dataset values="120201, 1202, 12, init" />
<dataset values="120300, 1203, 12, init" />
<dataset values="120301, 1203, 12, init" />
<dataset values="120400, 1204, 12, init" />
<dataset values="120401, 1204, 12, init" />
<dataset values="120500, 1205, 12, init" />
<dataset values="120501, 1205, 12, init" />
<dataset values="120600, 1206, 12, init" />
<dataset values="120601, 1206, 12, init" />
<dataset values="120700, 1207, 12, init" />
<dataset values="120701, 1207, 12, init" />
<dataset values="120800, 1208, 12, init" />
<dataset values="120801, 1208, 12, init" />
<dataset values="120900, 1209, 12, init" />
<dataset values="120901, 1209, 12, init" />
<dataset values="130000, 1300, 13, init" />
<dataset values="130001, 1300, 13, init" />
<dataset values="130100, 1301, 13, init" />
<dataset values="130101, 1301, 13, init" />
<dataset values="130200, 1302, 13, init" />
<dataset values="130201, 1302, 13, init" />
<dataset values="130300, 1303, 13, init" />
<dataset values="130301, 1303, 13, init" />
<dataset values="130400, 1304, 13, init" />
<dataset values="130401, 1304, 13, init" />
<dataset values="130500, 1305, 13, init" />
<dataset values="130501, 1305, 13, init" />
<dataset values="130600, 1306, 13, init" />
<dataset values="130601, 1306, 13, init" />
<dataset values="130700, 1307, 13, init" />
<dataset values="130701, 1307, 13, init" />
<dataset values="130800, 1308, 13, init" />
<dataset values="130801, 1308, 13, init" />
<dataset values="130900, 1309, 13, init" />
<dataset values="130901, 1309, 13, init" />
<dataset values="140000, 1400, 14, init" />
<dataset values="140001, 1400, 14, init" />
<dataset values="140100, 1401, 14, init" />
<dataset values="140101, 1401, 14, init" />
<dataset values="140200, 1402, 14, init" />
<dataset values="140201, 1402, 14, init" />
<dataset values="140300, 1403, 14, init" />
<dataset values="140301, 1403, 14, init" />
<dataset values="140400, 1404, 14, init" />
<dataset values="140401, 1404, 14, init" />
<dataset values="140500, 1405, 14, init" />
<dataset values="140501, 1405, 14, init" />
<dataset values="140600, 1406, 14, init" />
<dataset values="140601, 1406, 14, init" />
<dataset values="140700, 1407, 14, init" />
<dataset values="140701, 1407, 14, init" />
<dataset values="140800, 1408, 14, init" />
<dataset values="140801, 1408, 14, init" />
<dataset values="140900, 1409, 14, init" />
<dataset values="140901, 1409, 14, init" />
<dataset values="150000, 1500, 15, init" />
<dataset values="150001, 1500, 15, init" />
<dataset values="150100, 1501, 15, init" />
<dataset values="150101, 1501, 15, init" />
<dataset values="150200, 1502, 15, init" />
<dataset values="150201, 1502, 15, init" />
<dataset values="150300, 1503, 15, init" />
<dataset values="150301, 1503, 15, init" />
<dataset values="150400, 1504, 15, init" />
<dataset values="150401, 1504, 15, init" />
<dataset values="150500, 1505, 15, init" />
<dataset values="150501, 1505, 15, init" />
<dataset values="150600, 1506, 15, init" />
<dataset values="150601, 1506, 15, init" />
<dataset values="150700, 1507, 15, init" />
<dataset values="150701, 1507, 15, init" />
<dataset values="150800, 1508, 15, init" />
<dataset values="150801, 1508, 15, init" />
<dataset values="150900, 1509, 15, init" />
<dataset values="150901, 1509, 15, init" />
<dataset values="160000, 1600, 16, init" />
<dataset values="160001, 1600, 16, init" />
<dataset values="160100, 1601, 16, init" />
<dataset values="160101, 1601, 16, init" />
<dataset values="160200, 1602, 16, init" />
<dataset values="160201, 1602, 16, init" />
<dataset values="160300, 1603, 16, init" />
<dataset values="160301, 1603, 16, init" />
<dataset values="160400, 1604, 16, init" />
<dataset values="160401, 1604, 16, init" />
<dataset values="160500, 1605, 16, init" />
<dataset values="160501, 1605, 16, init" />
<dataset values="160600, 1606, 16, init" />
<dataset values="160601, 1606, 16, init" />
<dataset values="160700, 1607, 16, init" />
<dataset values="160701, 1607, 16, init" />
<dataset values="160800, 1608, 16, init" />
<dataset values="160801, 1608, 16, init" />
<dataset values="160900, 1609, 16, init" />
<dataset values="160901, 1609, 16, init" />
<dataset values="170000, 1700, 17, init" />
<dataset values="170001, 1700, 17, init" />
<dataset values="170100, 1701, 17, init" />
<dataset values="170101, 1701, 17, init" />
<dataset values="170200, 1702, 17, init" />
<dataset values="170201, 1702, 17, init" />
<dataset values="170300, 1703, 17, init" />
<dataset values="170301, 1703, 17, init" />
<dataset values="170400, 1704, 17, init" />
<dataset values="170401, 1704, 17, init" />
<dataset values="170500, 1705, 17, init" />
<dataset values="170501, 1705, 17, init" />
<dataset values="170600, 1706, 17, init" />
<dataset values="170601, 1706, 17, init" />
<dataset values="170700, 1707, 17, init" />
<dataset values="170701, 1707, 17, init" />
<dataset values="170800, 1708, 17, init" />
<dataset values="170801, 1708, 17, init" />
<dataset values="170900, 1709, 17, init" />
<dataset values="170901, 1709, 17, init" />
<dataset values="180000, 1800, 18, init" />
<dataset values="180001, 1800, 18, init" />
<dataset values="180100, 1801, 18, init" />
<dataset values="180101, 1801, 18, init" />
<dataset values="180200, 1802, 18, init" />
<dataset values="180201, 1802, 18, init" />
<dataset values="180300, 1803, 18, init" />
<dataset values="180301, 1803, 18, init" />
<dataset values="180400, 1804, 18, init" />
<dataset values="180401, 1804, 18, init" />
<dataset values="180500, 1805, 18, init" />
<dataset values="180501, 1805, 18, init" />
<dataset values="180600, 1806, 18, init" />
<dataset values="180601, 1806, 18, init" />
<dataset values="180700, 1807, 18, init" />
<dataset values="180701, 1807, 18, init" />
<dataset values="180800, 1808, 18, init" />
<dataset values="180801, 1808, 18, init" />
<dataset values="180900, 1809, 18, init" />
<dataset values="180901, 1809, 18, init" />
<dataset values="190000, 1900, 19, init" />
<dataset values="190001, 1900, 19, init" />
<dataset values="190100, 1901, 19, init" />
<dataset values="190101, 1901, 19, init" />
<dataset values="190200, 1902, 19, init" />
<dataset values="190201, 1902, 19, init" />
<dataset values="190300, 1903, 19, init" />
<dataset values="190301, 1903, 19, init" />
<dataset values="190400, 1904, 19, init" />
<dataset values="190401, 1904, 19, init" />
<dataset values="190500, 1905, 19, init" />
<dataset values="190501, 1905, 19, init" />
<dataset values="190600, 1906, 19, init" />
<dataset values="190601, 1906, 19, init" />
<dataset values="190700, 1907, 19, init" />
<dataset values="190701, 1907, 19, init" />
<dataset values="190800, 1908, 19, init" />
<dataset values="190801, 1908, 19, init" />
<dataset values="190900, 1909, 19, init" />
<dataset values="190901, 1909, 19, init" />
</datasets>
<datasets>
<columns values="item_id, order_id, user_id, status" />
<dataset values="100100, 1001, 10, init" />
<dataset values="100101, 1001, 10, init" />
<dataset values="100200, 1002, 10, init" />
<dataset values="100201, 1002, 10, init" />
<dataset values="100300, 1003, 10, init" />
<dataset values="100301, 1003, 10, init" />
<dataset values="100400, 1004, 10, init" />
<dataset values="100401, 1004, 10, init" />
<dataset values="100500, 1005, 10, init" />
<dataset values="100501, 1005, 10, init" />
<dataset values="100600, 1006, 10, init" />
<dataset values="100601, 1006, 10, init" />
<dataset values="100700, 1007, 10, init" />
<dataset values="100701, 1007, 10, init" />
<dataset values="100800, 1008, 10, init" />
<dataset values="100801, 1008, 10, init" />
<dataset values="100900, 1009, 10, init" />
<dataset values="100901, 1009, 10, init" />
<dataset values="110000, 1100, 11, init" />
<dataset values="110001, 1100, 11, init" />
<dataset values="110100, 1101, 11, init" />
<dataset values="110101, 1101, 11, init" />
<dataset values="110200, 1102, 11, init" />
<dataset values="110201, 1102, 11, init" />
<dataset values="110300, 1103, 11, init" />
<dataset values="110301, 1103, 11, init" />
<dataset values="110400, 1104, 11, init" />
<dataset values="110401, 1104, 11, init" />
<dataset values="110500, 1105, 11, init" />
<dataset values="110501, 1105, 11, init" />
<dataset values="110600, 1106, 11, init" />
<dataset values="110601, 1106, 11, init" />
<dataset values="110700, 1107, 11, init" />
<dataset values="110701, 1107, 11, init" />
<dataset values="110800, 1108, 11, init" />
<dataset values="110801, 1108, 11, init" />
<dataset values="110900, 1109, 11, init" />
<dataset values="110901, 1109, 11, init" />
<dataset values="120000, 1200, 12, init" />
<dataset values="120001, 1200, 12, init" />
<dataset values="120100, 1201, 12, init" />
<dataset values="120101, 1201, 12, init" />
<dataset values="120200, 1202, 12, init" />
<dataset values="120201, 1202, 12, init" />
<dataset values="120300, 1203, 12, init" />
<dataset values="120301, 1203, 12, init" />
<dataset values="120400, 1204, 12, init" />
<dataset values="120401, 1204, 12, init" />
<dataset values="120500, 1205, 12, init" />
<dataset values="120501, 1205, 12, init" />
<dataset values="120600, 1206, 12, init" />
<dataset values="120601, 1206, 12, init" />
<dataset values="120700, 1207, 12, init" />
<dataset values="120701, 1207, 12, init" />
<dataset values="120800, 1208, 12, init" />
<dataset values="120801, 1208, 12, init" />
<dataset values="120900, 1209, 12, init" />
<dataset values="120901, 1209, 12, init" />
<dataset values="130000, 1300, 13, init" />
<dataset values="130001, 1300, 13, init" />
<dataset values="130100, 1301, 13, init" />
<dataset values="130101, 1301, 13, init" />
<dataset values="130200, 1302, 13, init" />
<dataset values="130201, 1302, 13, init" />
<dataset values="130300, 1303, 13, init" />
<dataset values="130301, 1303, 13, init" />
<dataset values="130400, 1304, 13, init" />
<dataset values="130401, 1304, 13, init" />
<dataset values="130500, 1305, 13, init" />
<dataset values="130501, 1305, 13, init" />
<dataset values="130600, 1306, 13, init" />
<dataset values="130601, 1306, 13, init" />
<dataset values="130700, 1307, 13, init" />
<dataset values="130701, 1307, 13, init" />
<dataset values="130800, 1308, 13, init" />
<dataset values="130801, 1308, 13, init" />
<dataset values="130900, 1309, 13, init" />
<dataset values="130901, 1309, 13, init" />
<dataset values="140000, 1400, 14, init" />
<dataset values="140001, 1400, 14, init" />
<dataset values="140100, 1401, 14, init" />
<dataset values="140101, 1401, 14, init" />
<dataset values="140200, 1402, 14, init" />
<dataset values="140201, 1402, 14, init" />
<dataset values="140300, 1403, 14, init" />
<dataset values="140301, 1403, 14, init" />
<dataset values="140400, 1404, 14, init" />
<dataset values="140401, 1404, 14, init" />
<dataset values="140500, 1405, 14, init" />
<dataset values="140501, 1405, 14, init" />
<dataset values="140600, 1406, 14, init" />
<dataset values="140601, 1406, 14, init" />
<dataset values="140700, 1407, 14, init" />
<dataset values="140701, 1407, 14, init" />
<dataset values="140800, 1408, 14, init" />
<dataset values="140801, 1408, 14, init" />
<dataset values="140900, 1409, 14, init" />
<dataset values="140901, 1409, 14, init" />
<dataset values="150000, 1500, 15, init" />
<dataset values="150001, 1500, 15, init" />
<dataset values="150100, 1501, 15, init" />
<dataset values="150101, 1501, 15, init" />
<dataset values="150200, 1502, 15, init" />
<dataset values="150201, 1502, 15, init" />
<dataset values="150300, 1503, 15, init" />
<dataset values="150301, 1503, 15, init" />
<dataset values="150400, 1504, 15, init" />
<dataset values="150401, 1504, 15, init" />
<dataset values="150500, 1505, 15, init" />
<dataset values="150501, 1505, 15, init" />
<dataset values="150600, 1506, 15, init" />
<dataset values="150601, 1506, 15, init" />
<dataset values="150700, 1507, 15, init" />
<dataset values="150701, 1507, 15, init" />
<dataset values="150800, 1508, 15, init" />
<dataset values="150801, 1508, 15, init" />
<dataset values="150900, 1509, 15, init" />
<dataset values="150901, 1509, 15, init" />
<dataset values="160000, 1600, 16, init" />
<dataset values="160001, 1600, 16, init" />
<dataset values="160100, 1601, 16, init" />
<dataset values="160101, 1601, 16, init" />
<dataset values="160200, 1602, 16, init" />
<dataset values="160201, 1602, 16, init" />
<dataset values="160300, 1603, 16, init" />
<dataset values="160301, 1603, 16, init" />
<dataset values="160400, 1604, 16, init" />
<dataset values="160401, 1604, 16, init" />
<dataset values="160500, 1605, 16, init" />
<dataset values="160501, 1605, 16, init" />
<dataset values="160600, 1606, 16, init" />
<dataset values="160601, 1606, 16, init" />
<dataset values="160700, 1607, 16, init" />
<dataset values="160701, 1607, 16, init" />
<dataset values="160800, 1608, 16, init" />
<dataset values="160801, 1608, 16, init" />
<dataset values="160900, 1609, 16, init" />
<dataset values="160901, 1609, 16, init" />
<dataset values="170000, 1700, 17, init" />
<dataset values="170001, 1700, 17, init" />
<dataset values="170100, 1701, 17, init" />
<dataset values="170101, 1701, 17, init" />
<dataset values="170200, 1702, 17, init" />
<dataset values="170201, 1702, 17, init" />
<dataset values="170300, 1703, 17, init" />
<dataset values="170301, 1703, 17, init" />
<dataset values="170400, 1704, 17, init" />
<dataset values="170401, 1704, 17, init" />
<dataset values="170500, 1705, 17, init" />
<dataset values="170501, 1705, 17, init" />
<dataset values="170600, 1706, 17, init" />
<dataset values="170601, 1706, 17, init" />
<dataset values="170700, 1707, 17, init" />
<dataset values="170701, 1707, 17, init" />
<dataset values="170800, 1708, 17, init" />
<dataset values="170801, 1708, 17, init" />
<dataset values="170900, 1709, 17, init" />
<dataset values="170901, 1709, 17, init" />
<dataset values="180000, 1800, 18, init" />
<dataset values="180001, 1800, 18, init" />
<dataset values="180100, 1801, 18, init" />
<dataset values="180101, 1801, 18, init" />
<dataset values="180200, 1802, 18, init" />
<dataset values="180201, 1802, 18, init" />
<dataset values="180300, 1803, 18, init" />
<dataset values="180301, 1803, 18, init" />
<dataset values="180400, 1804, 18, init" />
<dataset values="180401, 1804, 18, init" />
<dataset values="180500, 1805, 18, init" />
<dataset values="180501, 1805, 18, init" />
<dataset values="180600, 1806, 18, init" />
<dataset values="180601, 1806, 18, init" />
<dataset values="180700, 1807, 18, init" />
<dataset values="180701, 1807, 18, init" />
<dataset values="180800, 1808, 18, init" />
<dataset values="180801, 1808, 18, init" />
<dataset values="180900, 1809, 18, init" />
<dataset values="180901, 1809, 18, init" />
<dataset values="190000, 1900, 19, init" />
<dataset values="190001, 1900, 19, init" />
<dataset values="190100, 1901, 19, init" />
<dataset values="190101, 1901, 19, init" />
<dataset values="190200, 1902, 19, init" />
<dataset values="190201, 1902, 19, init" />
<dataset values="190300, 1903, 19, init" />
<dataset values="190301, 1903, 19, init" />
<dataset values="190400, 1904, 19, init" />
<dataset values="190401, 1904, 19, init" />
<dataset values="190500, 1905, 19, init" />
<dataset values="190501, 1905, 19, init" />
<dataset values="190600, 1906, 19, init" />
<dataset values="190601, 1906, 19, init" />
<dataset values="190700, 1907, 19, init" />
<dataset values="190701, 1907, 19, init" />
<dataset values="190800, 1908, 19, init" />
<dataset values="190801, 1908, 19, init" />
<dataset values="190900, 1909, 19, init" />
<dataset values="190901, 1909, 19, init" />
</datasets>
<datasets>
<columns values="order_id, user_id, status" />
<dataset values="1009, 10, init_slave" />
<dataset values="1100, 11, init_slave" />
<dataset values="1101, 11, init_slave" />
<dataset values="1102, 11, init_slave" />
<dataset values="1103, 11, init_slave" />
<dataset values="1104, 11, init_slave" />
<dataset values="1105, 11, init_slave" />
<dataset values="1106, 11, init_slave" />
<dataset values="1107, 11, init_slave" />
<dataset values="1108, 11, init_slave" />
</datasets>
<datasets>
<columns values="item_id, order_id, user_id, status" />
<dataset values="100000, 1000, 10, init_slave" />
<dataset values="100100, 1001, 10, init_slave" />
<dataset values="100101, 1001, 10, init_slave" />
<dataset values="100200, 1002, 10, init_slave" />
<dataset values="100201, 1002, 10, init_slave" />
<dataset values="100300, 1003, 10, init_slave" />
<dataset values="100301, 1003, 10, init_slave" />
<dataset values="100400, 1004, 10, init_slave" />
<dataset values="100401, 1004, 10, init_slave" />
<dataset values="100500, 1005, 10, init_slave" />
<dataset values="100501, 1005, 10, init_slave" />
<dataset values="100600, 1006, 10, init_slave" />
<dataset values="100601, 1006, 10, init_slave" />
<dataset values="100700, 1007, 10, init_slave" />
<dataset values="100701, 1007, 10, init_slave" />
<dataset values="100800, 1008, 10, init_slave" />
<dataset values="100801, 1008, 10, init_slave" />
<dataset values="100900, 1009, 10, init_slave" />
<dataset values="100901, 1009, 10, init_slave" />
<dataset values="110000, 1100, 11, init_slave" />
<dataset values="110001, 1100, 11, init_slave" />
<dataset values="110100, 1101, 11, init_slave" />
<dataset values="110101, 1101, 11, init_slave" />
<dataset values="110200, 1102, 11, init_slave" />
<dataset values="110201, 1102, 11, init_slave" />
<dataset values="110300, 1103, 11, init_slave" />
<dataset values="110301, 1103, 11, init_slave" />
<dataset values="110400, 1104, 11, init_slave" />
<dataset values="110401, 1104, 11, init_slave" />
<dataset values="110500, 1105, 11, init_slave" />
<dataset values="110501, 1105, 11, init_slave" />
<dataset values="110600, 1106, 11, init_slave" />
<dataset values="110601, 1106, 11, init_slave" />
<dataset values="110700, 1107, 11, init_slave" />
<dataset values="110701, 1107, 11, init_slave" />
<dataset values="110800, 1108, 11, init_slave" />
<dataset values="110801, 1108, 11, init_slave" />
<dataset values="110900, 1109, 11, init_slave" />
<dataset values="110901, 1109, 11, init_slave" />
<dataset values="120000, 1200, 12, init_slave" />
<dataset values="120001, 1200, 12, init_slave" />
<dataset values="120100, 1201, 12, init_slave" />
<dataset values="120101, 1201, 12, init_slave" />
<dataset values="120200, 1202, 12, init_slave" />
<dataset values="120201, 1202, 12, init_slave" />
<dataset values="120300, 1203, 12, init_slave" />
<dataset values="120301, 1203, 12, init_slave" />
<dataset values="120400, 1204, 12, init_slave" />
<dataset values="120401, 1204, 12, init_slave" />
<dataset values="120500, 1205, 12, init_slave" />
<dataset values="120501, 1205, 12, init_slave" />
<dataset values="120600, 1206, 12, init_slave" />
<dataset values="120601, 1206, 12, init_slave" />
<dataset values="120700, 1207, 12, init_slave" />
<dataset values="120701, 1207, 12, init_slave" />
<dataset values="120800, 1208, 12, init_slave" />
<dataset values="120801, 1208, 12, init_slave" />
<dataset values="120900, 1209, 12, init_slave" />
<dataset values="120901, 1209, 12, init_slave" />
<dataset values="130000, 1300, 13, init_slave" />
<dataset values="130001, 1300, 13, init_slave" />
<dataset values="130100, 1301, 13, init_slave" />
<dataset values="130101, 1301, 13, init_slave" />
<dataset values="130200, 1302, 13, init_slave" />
<dataset values="130201, 1302, 13, init_slave" />
<dataset values="130300, 1303, 13, init_slave" />
<dataset values="130301, 1303, 13, init_slave" />
<dataset values="130400, 1304, 13, init_slave" />
<dataset values="130401, 1304, 13, init_slave" />
<dataset values="130500, 1305, 13, init_slave" />
<dataset values="130501, 1305, 13, init_slave" />
<dataset values="130600, 1306, 13, init_slave" />
<dataset values="130601, 1306, 13, init_slave" />
<dataset values="130700, 1307, 13, init_slave" />
<dataset values="130701, 1307, 13, init_slave" />
<dataset values="130800, 1308, 13, init_slave" />
<dataset values="130801, 1308, 13, init_slave" />
<dataset values="130900, 1309, 13, init_slave" />
<dataset values="130901, 1309, 13, init_slave" />
<dataset values="140000, 1400, 14, init_slave" />
<dataset values="140001, 1400, 14, init_slave" />
<dataset values="140100, 1401, 14, init_slave" />
<dataset values="140101, 1401, 14, init_slave" />
<dataset values="140200, 1402, 14, init_slave" />
<dataset values="140201, 1402, 14, init_slave" />
<dataset values="140300, 1403, 14, init_slave" />
<dataset values="140301, 1403, 14, init_slave" />
<dataset values="140400, 1404, 14, init_slave" />
<dataset values="140401, 1404, 14, init_slave" />
<dataset values="140500, 1405, 14, init_slave" />
<dataset values="140501, 1405, 14, init_slave" />
<dataset values="140600, 1406, 14, init_slave" />
<dataset values="140601, 1406, 14, init_slave" />
<dataset values="140700, 1407, 14, init_slave" />
<dataset values="140701, 1407, 14, init_slave" />
<dataset values="140800, 1408, 14, init_slave" />
<dataset values="140801, 1408, 14, init_slave" />
<dataset values="140900, 1409, 14, init_slave" />
<dataset values="140901, 1409, 14, init_slave" />
<dataset values="150000, 1500, 15, init_slave" />
<dataset values="150001, 1500, 15, init_slave" />
<dataset values="150100, 1501, 15, init_slave" />
<dataset values="150101, 1501, 15, init_slave" />
<dataset values="150200, 1502, 15, init_slave" />
<dataset values="150201, 1502, 15, init_slave" />
<dataset values="150300, 1503, 15, init_slave" />
<dataset values="150301, 1503, 15, init_slave" />
<dataset values="150400, 1504, 15, init_slave" />
<dataset values="150401, 1504, 15, init_slave" />
<dataset values="150500, 1505, 15, init_slave" />
<dataset values="150501, 1505, 15, init_slave" />
<dataset values="150600, 1506, 15, init_slave" />
<dataset values="150601, 1506, 15, init_slave" />
<dataset values="150700, 1507, 15, init_slave" />
<dataset values="150701, 1507, 15, init_slave" />
<dataset values="150800, 1508, 15, init_slave" />
<dataset values="150801, 1508, 15, init_slave" />
<dataset values="150900, 1509, 15, init_slave" />
<dataset values="150901, 1509, 15, init_slave" />
<dataset values="160000, 1600, 16, init_slave" />
<dataset values="160001, 1600, 16, init_slave" />
<dataset values="160100, 1601, 16, init_slave" />
<dataset values="160101, 1601, 16, init_slave" />
<dataset values="160200, 1602, 16, init_slave" />
<dataset values="160201, 1602, 16, init_slave" />
<dataset values="160300, 1603, 16, init_slave" />
<dataset values="160301, 1603, 16, init_slave" />
<dataset values="160400, 1604, 16, init_slave" />
<dataset values="160401, 1604, 16, init_slave" />
<dataset values="160500, 1605, 16, init_slave" />
<dataset values="160501, 1605, 16, init_slave" />
<dataset values="160600, 1606, 16, init_slave" />
<dataset values="160601, 1606, 16, init_slave" />
<dataset values="160700, 1607, 16, init_slave" />
<dataset values="160701, 1607, 16, init_slave" />
<dataset values="160800, 1608, 16, init_slave" />
<dataset values="160801, 1608, 16, init_slave" />
<dataset values="160900, 1609, 16, init_slave" />
<dataset values="160901, 1609, 16, init_slave" />
<dataset values="170000, 1700, 17, init_slave" />
<dataset values="170001, 1700, 17, init_slave" />
<dataset values="170100, 1701, 17, init_slave" />
<dataset values="170101, 1701, 17, init_slave" />
<dataset values="170200, 1702, 17, init_slave" />
<dataset values="170201, 1702, 17, init_slave" />
<dataset values="170300, 1703, 17, init_slave" />
<dataset values="170301, 1703, 17, init_slave" />
<dataset values="170400, 1704, 17, init_slave" />
<dataset values="170401, 1704, 17, init_slave" />
<dataset values="170500, 1705, 17, init_slave" />
<dataset values="170501, 1705, 17, init_slave" />
<dataset values="170600, 1706, 17, init_slave" />
<dataset values="170601, 1706, 17, init_slave" />
<dataset values="170700, 1707, 17, init_slave" />
<dataset values="170701, 1707, 17, init_slave" />
<dataset values="170800, 1708, 17, init_slave" />
<dataset values="170801, 1708, 17, init_slave" />
<dataset values="170900, 1709, 17, init_slave" />
<dataset values="170901, 1709, 17, init_slave" />
<dataset values="180000, 1800, 18, init_slave" />
<dataset values="180001, 1800, 18, init_slave" />
<dataset values="180100, 1801, 18, init_slave" />
<dataset values="180101, 1801, 18, init_slave" />
<dataset values="180200, 1802, 18, init_slave" />
<dataset values="180201, 1802, 18, init_slave" />
<dataset values="180300, 1803, 18, init_slave" />
<dataset values="180301, 1803, 18, init_slave" />
<dataset values="180400, 1804, 18, init_slave" />
<dataset values="180401, 1804, 18, init_slave" />
<dataset values="180500, 1805, 18, init_slave" />
<dataset values="180501, 1805, 18, init_slave" />
<dataset values="180600, 1806, 18, init_slave" />
<dataset values="180601, 1806, 18, init_slave" />
<dataset values="180700, 1807, 18, init_slave" />
<dataset values="180701, 1807, 18, init_slave" />
<dataset values="180800, 1808, 18, init_slave" />
<dataset values="180801, 1808, 18, init_slave" />
<dataset values="180900, 1809, 18, init_slave" />
<dataset values="180901, 1809, 18, init_slave" />
<dataset values="190000, 1900, 19, init_slave" />
<dataset values="190001, 1900, 19, init_slave" />
<dataset values="190100, 1901, 19, init_slave" />
<dataset values="190101, 1901, 19, init_slave" />
<dataset values="190200, 1902, 19, init_slave" />
<dataset values="190201, 1902, 19, init_slave" />
<dataset values="190300, 1903, 19, init_slave" />
<dataset values="190301, 1903, 19, init_slave" />
<dataset values="190400, 1904, 19, init_slave" />
<dataset values="190401, 1904, 19, init_slave" />
<dataset values="190500, 1905, 19, init_slave" />
<dataset values="190501, 1905, 19, init_slave" />
<dataset values="190600, 1906, 19, init_slave" />
<dataset values="190601, 1906, 19, init_slave" />
<dataset values="190700, 1907, 19, init_slave" />
<dataset values="190701, 1907, 19, init_slave" />
<dataset values="190800, 1908, 19, init_slave" />
<dataset values="190801, 1908, 19, init_slave" />
<dataset values="190900, 1909, 19, init_slave" />
<dataset values="190901, 1909, 19, init_slave" />
</datasets>
<datasets>
<columns values="item_id, order_id, user_id, status" />
<dataset values="100100, 1001, 10, init_slave" />
<dataset values="100101, 1001, 10, init_slave" />
<dataset values="100200, 1002, 10, init_slave" />
<dataset values="100201, 1002, 10, init_slave" />
<dataset values="100300, 1003, 10, init_slave" />
<dataset values="100301, 1003, 10, init_slave" />
<dataset values="100400, 1004, 10, init_slave" />
<dataset values="100401, 1004, 10, init_slave" />
<dataset values="100500, 1005, 10, init_slave" />
<dataset values="100501, 1005, 10, init_slave" />
<dataset values="100600, 1006, 10, init_slave" />
<dataset values="100601, 1006, 10, init_slave" />
<dataset values="100700, 1007, 10, init_slave" />
<dataset values="100701, 1007, 10, init_slave" />
<dataset values="100800, 1008, 10, init_slave" />
<dataset values="100801, 1008, 10, init_slave" />
<dataset values="100900, 1009, 10, init_slave" />
<dataset values="100901, 1009, 10, init_slave" />
<dataset values="110000, 1100, 11, init_slave" />
<dataset values="110001, 1100, 11, init_slave" />
<dataset values="110100, 1101, 11, init_slave" />
<dataset values="110101, 1101, 11, init_slave" />
<dataset values="110200, 1102, 11, init_slave" />
<dataset values="110201, 1102, 11, init_slave" />
<dataset values="110300, 1103, 11, init_slave" />
<dataset values="110301, 1103, 11, init_slave" />
<dataset values="110400, 1104, 11, init_slave" />
<dataset values="110401, 1104, 11, init_slave" />
<dataset values="110500, 1105, 11, init_slave" />
<dataset values="110501, 1105, 11, init_slave" />
<dataset values="110600, 1106, 11, init_slave" />
<dataset values="110601, 1106, 11, init_slave" />
<dataset values="110700, 1107, 11, init_slave" />
<dataset values="110701, 1107, 11, init_slave" />
<dataset values="110800, 1108, 11, init_slave" />
<dataset values="110801, 1108, 11, init_slave" />
<dataset values="110900, 1109, 11, init_slave" />
<dataset values="110901, 1109, 11, init_slave" />
<dataset values="120000, 1200, 12, init_slave" />
<dataset values="120001, 1200, 12, init_slave" />
<dataset values="120100, 1201, 12, init_slave" />
<dataset values="120101, 1201, 12, init_slave" />
<dataset values="120200, 1202, 12, init_slave" />
<dataset values="120201, 1202, 12, init_slave" />
<dataset values="120300, 1203, 12, init_slave" />
<dataset values="120301, 1203, 12, init_slave" />
<dataset values="120400, 1204, 12, init_slave" />
<dataset values="120401, 1204, 12, init_slave" />
<dataset values="120500, 1205, 12, init_slave" />
<dataset values="120501, 1205, 12, init_slave" />
<dataset values="120600, 1206, 12, init_slave" />
<dataset values="120601, 1206, 12, init_slave" />
<dataset values="120700, 1207, 12, init_slave" />
<dataset values="120701, 1207, 12, init_slave" />
<dataset values="120800, 1208, 12, init_slave" />
<dataset values="120801, 1208, 12, init_slave" />
<dataset values="120900, 1209, 12, init_slave" />
<dataset values="120901, 1209, 12, init_slave" />
<dataset values="130000, 1300, 13, init_slave" />
<dataset values="130001, 1300, 13, init_slave" />
<dataset values="130100, 1301, 13, init_slave" />
<dataset values="130101, 1301, 13, init_slave" />
<dataset values="130200, 1302, 13, init_slave" />
<dataset values="130201, 1302, 13, init_slave" />
<dataset values="130300, 1303, 13, init_slave" />
<dataset values="130301, 1303, 13, init_slave" />
<dataset values="130400, 1304, 13, init_slave" />
<dataset values="130401, 1304, 13, init_slave" />
<dataset values="130500, 1305, 13, init_slave" />
<dataset values="130501, 1305, 13, init_slave" />
<dataset values="130600, 1306, 13, init_slave" />
<dataset values="130601, 1306, 13, init_slave" />
<dataset values="130700, 1307, 13, init_slave" />
<dataset values="130701, 1307, 13, init_slave" />
<dataset values="130800, 1308, 13, init_slave" />
<dataset values="130801, 1308, 13, init_slave" />
<dataset values="130900, 1309, 13, init_slave" />
<dataset values="130901, 1309, 13, init_slave" />
<dataset values="140000, 1400, 14, init_slave" />
<dataset values="140001, 1400, 14, init_slave" />
<dataset values="140100, 1401, 14, init_slave" />
<dataset values="140101, 1401, 14, init_slave" />
<dataset values="140200, 1402, 14, init_slave" />
<dataset values="140201, 1402, 14, init_slave" />
<dataset values="140300, 1403, 14, init_slave" />
<dataset values="140301, 1403, 14, init_slave" />
<dataset values="140400, 1404, 14, init_slave" />
<dataset values="140401, 1404, 14, init_slave" />
<dataset values="140500, 1405, 14, init_slave" />
<dataset values="140501, 1405, 14, init_slave" />
<dataset values="140600, 1406, 14, init_slave" />
<dataset values="140601, 1406, 14, init_slave" />
<dataset values="140700, 1407, 14, init_slave" />
<dataset values="140701, 1407, 14, init_slave" />
<dataset values="140800, 1408, 14, init_slave" />
<dataset values="140801, 1408, 14, init_slave" />
<dataset values="140900, 1409, 14, init_slave" />
<dataset values="140901, 1409, 14, init_slave" />
<dataset values="150000, 1500, 15, init_slave" />
<dataset values="150001, 1500, 15, init_slave" />
<dataset values="150100, 1501, 15, init_slave" />
<dataset values="150101, 1501, 15, init_slave" />
<dataset values="150200, 1502, 15, init_slave" />
<dataset values="150201, 1502, 15, init_slave" />
<dataset values="150300, 1503, 15, init_slave" />
<dataset values="150301, 1503, 15, init_slave" />
<dataset values="150400, 1504, 15, init_slave" />
<dataset values="150401, 1504, 15, init_slave" />
<dataset values="150500, 1505, 15, init_slave" />
<dataset values="150501, 1505, 15, init_slave" />
<dataset values="150600, 1506, 15, init_slave" />
<dataset values="150601, 1506, 15, init_slave" />
<dataset values="150700, 1507, 15, init_slave" />
<dataset values="150701, 1507, 15, init_slave" />
<dataset values="150800, 1508, 15, init_slave" />
<dataset values="150801, 1508, 15, init_slave" />
<dataset values="150900, 1509, 15, init_slave" />
<dataset values="150901, 1509, 15, init_slave" />
<dataset values="160000, 1600, 16, init_slave" />
<dataset values="160001, 1600, 16, init_slave" />
<dataset values="160100, 1601, 16, init_slave" />
<dataset values="160101, 1601, 16, init_slave" />
<dataset values="160200, 1602, 16, init_slave" />
<dataset values="160201, 1602, 16, init_slave" />
<dataset values="160300, 1603, 16, init_slave" />
<dataset values="160301, 1603, 16, init_slave" />
<dataset values="160400, 1604, 16, init_slave" />
<dataset values="160401, 1604, 16, init_slave" />
<dataset values="160500, 1605, 16, init_slave" />
<dataset values="160501, 1605, 16, init_slave" />
<dataset values="160600, 1606, 16, init_slave" />
<dataset values="160601, 1606, 16, init_slave" />
<dataset values="160700, 1607, 16, init_slave" />
<dataset values="160701, 1607, 16, init_slave" />
<dataset values="160800, 1608, 16, init_slave" />
<dataset values="160801, 1608, 16, init_slave" />
<dataset values="160900, 1609, 16, init_slave" />
<dataset values="160901, 1609, 16, init_slave" />
<dataset values="170000, 1700, 17, init_slave" />
<dataset values="170001, 1700, 17, init_slave" />
<dataset values="170100, 1701, 17, init_slave" />
<dataset values="170101, 1701, 17, init_slave" />
<dataset values="170200, 1702, 17, init_slave" />
<dataset values="170201, 1702, 17, init_slave" />
<dataset values="170300, 1703, 17, init_slave" />
<dataset values="170301, 1703, 17, init_slave" />
<dataset values="170400, 1704, 17, init_slave" />
<dataset values="170401, 1704, 17, init_slave" />
<dataset values="170500, 1705, 17, init_slave" />
<dataset values="170501, 1705, 17, init_slave" />
<dataset values="170600, 1706, 17, init_slave" />
<dataset values="170601, 1706, 17, init_slave" />
<dataset values="170700, 1707, 17, init_slave" />
<dataset values="170701, 1707, 17, init_slave" />
<dataset values="170800, 1708, 17, init_slave" />
<dataset values="170801, 1708, 17, init_slave" />
<dataset values="170900, 1709, 17, init_slave" />
<dataset values="170901, 1709, 17, init_slave" />
<dataset values="180000, 1800, 18, init_slave" />
<dataset values="180001, 1800, 18, init_slave" />
<dataset values="180100, 1801, 18, init_slave" />
<dataset values="180101, 1801, 18, init_slave" />
<dataset values="180200, 1802, 18, init_slave" />
<dataset values="180201, 1802, 18, init_slave" />
<dataset values="180300, 1803, 18, init_slave" />
<dataset values="180301, 1803, 18, init_slave" />
<dataset values="180400, 1804, 18, init_slave" />
<dataset values="180401, 1804, 18, init_slave" />
<dataset values="180500, 1805, 18, init_slave" />
<dataset values="180501, 1805, 18, init_slave" />
<dataset values="180600, 1806, 18, init_slave" />
<dataset values="180601, 1806, 18, init_slave" />
<dataset values="180700, 1807, 18, init_slave" />
<dataset values="180701, 1807, 18, init_slave" />
<dataset values="180800, 1808, 18, init_slave" />
<dataset values="180801, 1808, 18, init_slave" />
<dataset values="180900, 1809, 18, init_slave" />
<dataset values="180901, 1809, 18, init_slave" />
<dataset values="190000, 1900, 19, init_slave" />
<dataset values="190001, 1900, 19, init_slave" />
<dataset values="190100, 1901, 19, init_slave" />
<dataset values="190101, 1901, 19, init_slave" />
<dataset values="190200, 1902, 19, init_slave" />
<dataset values="190201, 1902, 19, init_slave" />
<dataset values="190300, 1903, 19, init_slave" />
<dataset values="190301, 1903, 19, init_slave" />
<dataset values="190400, 1904, 19, init_slave" />
<dataset values="190401, 1904, 19, init_slave" />
<dataset values="190500, 1905, 19, init_slave" />
<dataset values="190501, 1905, 19, init_slave" />
<dataset values="190600, 1906, 19, init_slave" />
<dataset values="190601, 1906, 19, init_slave" />
<dataset values="190700, 1907, 19, init_slave" />
<dataset values="190701, 1907, 19, init_slave" />
<dataset values="190800, 1908, 19, init_slave" />
<dataset values="190801, 1908, 19, init_slave" />
<dataset values="190900, 1909, 19, init_slave" />
<dataset values="190901, 1909, 19, init_slave" />
</datasets>
<datasets>
<columns values="order_id, user_id, status" />
<dataset values="1009, 10, init" />
<dataset values="1100, 11, init" />
<dataset values="1101, 11, init" />
<dataset values="1102, 11, init" />
<dataset values="1103, 11, init" />
<dataset values="1104, 11, init" />
<dataset values="1105, 11, init" />
<dataset values="1106, 11, init" />
<dataset values="1107, 11, init" />
<dataset values="1108, 11, init" />
</datasets>
<datasets>
<columns values="order_id, user_id, status" />
<dataset values="1001, 10, init" />
</datasets>
<datasets>
<columns values="item_id, order_id, user_id, status, c_date" />
<dataset values="100001, 1000, 10, init, 2017-08-08" />
<dataset values="100000, 1000, 10, init, 2017-08-08" />
<dataset values="100100, 1001, 10, init, 2017-08-08" />
<dataset values="100101, 1001, 10, init, 2017-08-08" />
<dataset values="100200, 1002, 10, init, 2017-08-08" />
......
<integrate-test-cases>
<dql-test-case sql-case-id="select_constant_without_table">
<assertion sharding-rule-type="db" expected-data-file="select_constant_without_table.xml" />
<assertion sharding-rule-type="tbl" expected-data-file="select_constant_without_table.xml" />
<assertion sharding-rule-type="dbtbl" expected-data-file="select_constant_without_table.xml" />
<assertion sharding-rule-type="masterslave" expected-data-file="select_constant_without_table.xml" />
</dql-test-case>
<dql-test-case sql-case-id="select_not_equal_with_single_table">
<assertion sharding-rule-type="db" parameters="100001:int" expected-data-file="db/select_not_equal_with_single_table.xml" />
<assertion sharding-rule-type="tbl" parameters="100001:int" expected-data-file="tbl/select_not_equal_with_single_table.xml" />
<assertion sharding-rule-type="dbtbl" parameters="100001:int" expected-data-file="dbtbl/select_not_equal_with_single_table.xml" />
<assertion sharding-rule-type="masterslave" parameters="100001:int" expected-data-file="masterslave/select_not_equal_with_single_table.xml" />
</dql-test-case>
<dql-test-case sql-case-id="select_exclamation_equal_with_single_table">
<assertion sharding-rule-type="db" parameters="100001:int" expected-data-file="db/select_not_equal_with_single_table.xml" />
<assertion sharding-rule-type="tbl" parameters="100001:int" expected-data-file="tbl/select_not_equal_with_single_table.xml" />
<assertion sharding-rule-type="dbtbl" parameters="100001:int" expected-data-file="dbtbl/select_not_equal_with_single_table.xml" />
<assertion sharding-rule-type="masterslave" parameters="100001:int" expected-data-file="masterslave/select_not_equal_with_single_table.xml" />
</dql-test-case>
<dql-test-case sql-case-id="select_not_in_with_single_table">
<assertion sharding-rule-type="db" parameters="100000:int, 100001:int" expected-data-file="db/select_not_in_with_single_table.xml" />
<assertion sharding-rule-type="tbl" parameters="100000:int, 100001:int" expected-data-file="tbl/select_not_in_with_single_table.xml" />
<assertion sharding-rule-type="dbtbl" parameters="100000:int, 100001:int" expected-data-file="dbtbl/select_not_in_with_single_table.xml" />
<assertion sharding-rule-type="masterslave" parameters="100000:int, 100001:int" expected-data-file="masterslave/select_not_in_with_single_table.xml" />
</dql-test-case>
<dql-test-case sql-case-id="select_not_between_with_single_table">
<assertion sharding-rule-type="db" parameters="100000:int, 100001:int" expected-data-file="db/select_not_in_with_single_table.xml" />
<assertion sharding-rule-type="tbl" parameters="100000:int, 100001:int" expected-data-file="tbl/select_not_in_with_single_table.xml" />
<assertion sharding-rule-type="dbtbl" parameters="100000:int, 100001:int" expected-data-file="dbtbl/select_not_in_with_single_table.xml" />
<assertion sharding-rule-type="masterslave" parameters="100000:int, 100001:int" expected-data-file="masterslave/select_not_in_with_single_table.xml" />
</dql-test-case>
<dql-test-case sql-case-id="select_equal_with_single_table">
<assertion sharding-rule-type="db" parameters="12:int, 1201:int" expected-data-file="db/select_equal_with_single_table.xml" />
<assertion sharding-rule-type="tbl" parameters="10:int, 1000:int" expected-data-file="tbl/select_equal_with_single_table.xml" />
<assertion sharding-rule-type="dbtbl" parameters="12:int, 1201:int" expected-data-file="dbtbl/select_equal_with_single_table.xml" />
<assertion sharding-rule-type="masterslave" parameters="12:int, 1201:int" expected-data-file="masterslave/select_equal_with_single_table.xml" />
<assertion sharding-rule-type="tbl" parameters="12:int, 1000:int" expected-data-file="empty_result.xml" />
<assertion sharding-rule-type="db" parameters="12:int, 1000:int" expected-data-file="empty_result.xml" />
<assertion sharding-rule-type="dbtbl" parameters="12:int, 1000:int" expected-data-file="empty_result.xml" />
<assertion sharding-rule-type="masterslave" parameters="12:int, 1000:int" expected-data-file="empty_result.xml" />
</dql-test-case>
<dql-test-case sql-case-id="select_equal_with_same_sharding_column">
<assertion sharding-rule-type="db" parameters="1000:int, 1001:int" expected-data-file="empty_result.xml" />
<assertion sharding-rule-type="tbl" parameters="1000:int, 1001:int" expected-data-file="empty_result.xml" />
<assertion sharding-rule-type="dbtbl" parameters="1000:int, 1001:int" expected-data-file="empty_result.xml" />
<assertion sharding-rule-type="masterslave" parameters="1000:int, 1001:int" expected-data-file="empty_result.xml" />
</dql-test-case>
<dql-test-case sql-case-id="select_in_with_same_sharding_column">
<assertion sharding-rule-type="db" parameters="1000:int, 1001:int, 1001:int, 1100:int" expected-data-file="db/select_in_with_same_sharding_column.xml" />
<assertion sharding-rule-type="tbl" parameters="1000:int, 1001:int, 1001:int, 1100:int" expected-data-file="tbl/select_in_with_same_sharding_column.xml" />
<assertion sharding-rule-type="dbtbl" parameters="1000:int, 1001:int, 1001:int, 1100:int" expected-data-file="dbtbl/select_in_with_same_sharding_column.xml" />
<assertion sharding-rule-type="masterslave" parameters="1000:int, 1001:int, 1001:int, 1100:int" expected-data-file="masterslave/select_in_with_same_sharding_column.xml" />
</dql-test-case>
<dql-test-case sql-case-id="select_between_with_single_table">
<assertion sharding-rule-type="db" parameters="10:int, 12:int, 1001:int, 1200:int" expected-data-file="db/select_between_with_single_table.xml" />
<assertion sharding-rule-type="tbl" parameters="10:int, 12:int, 1009:int, 1108:int" expected-data-file="tbl/select_between_with_single_table.xml" />
<assertion sharding-rule-type="dbtbl" parameters="10:int, 12:int, 1009:int, 1108:int" expected-data-file="dbtbl/select_between_with_single_table.xml" />
<assertion sharding-rule-type="masterslave" parameters="10:int, 12:int, 1009:int, 1108:int" expected-data-file="masterslave/select_between_with_single_table.xml" />
<assertion sharding-rule-type="db" parameters="10:int, 12:int, 1309:int, 1408:int" expected-data-file="empty_result.xml" />
<assertion sharding-rule-type="tbl" parameters="10:int, 12:int, 1309:int, 1408:int" expected-data-file="empty_result.xml" />
<assertion sharding-rule-type="dbtbl" parameters="10:int, 12:int, 1309:int, 1408:int" expected-data-file="empty_result.xml" />
<assertion sharding-rule-type="masterslave" parameters="10:int, 12:int, 1309:int, 1408:int" expected-data-file="empty_result.xml" />
</dql-test-case>
</integrate-test-cases>
......@@ -5,11 +5,8 @@ shardingRule:
databaseStrategy:
standard:
shardingColumn: user_id
preciseAlgorithmClassName: io.shardingsphere.dbtest.fixture.SingleAlgorithm
tableStrategy:
inline:
shardingColumn: id
algorithmExpression: t_order
preciseAlgorithmClassName: io.shardingsphere.dbtest.fixture.PreciseModuloAlgorithm
rangeAlgorithmClassName: io.shardingsphere.dbtest.fixture.RangeModuloAlgorithm
t_order_item:
actualDataNodes: db_${0..9}.t_order_item
keyGeneratorColumnName: order_item_id
......@@ -17,12 +14,8 @@ shardingRule:
databaseStrategy:
standard:
shardingColumn: user_id
preciseAlgorithmClassName: io.shardingsphere.dbtest.fixture.SingleAlgorithm
rangeAlgorithmClassName: io.shardingsphere.core.fixture.RangeOrderShardingAlgorithm
tableStrategy:
inline:
shardingColumn: id
algorithmExpression: t_order_item
preciseAlgorithmClassName: io.shardingsphere.dbtest.fixture.PreciseModuloAlgorithm
rangeAlgorithmClassName: io.shardingsphere.dbtest.fixture.RangeModuloAlgorithm
bindingTables:
- t_order,t_order_item
defaultDataSourceName: db_0
......
......@@ -5,12 +5,13 @@ shardingRule:
databaseStrategy:
standard:
shardingColumn: user_id
preciseAlgorithmClassName: io.shardingsphere.dbtest.fixture.SingleAlgorithm
rangeAlgorithmClassName: io.shardingsphere.core.fixture.RangeOrderShardingAlgorithm
preciseAlgorithmClassName: io.shardingsphere.dbtest.fixture.PreciseModuloAlgorithm
rangeAlgorithmClassName: io.shardingsphere.dbtest.fixture.RangeModuloAlgorithm
tableStrategy:
inline:
standard:
shardingColumn: order_id
algorithmExpression: t_order_${order_id % 10}
preciseAlgorithmClassName: io.shardingsphere.dbtest.fixture.PreciseModuloAlgorithm
rangeAlgorithmClassName: io.shardingsphere.dbtest.fixture.RangeModuloAlgorithm
t_order_item:
actualDataNodes: dbtbl_${0..9}.t_order_item_${0..9}
keyGeneratorColumnName: order_item_id
......@@ -18,11 +19,13 @@ shardingRule:
databaseStrategy:
standard:
shardingColumn: user_id
preciseAlgorithmClassName: io.shardingsphere.dbtest.fixture.SingleAlgorithm
preciseAlgorithmClassName: io.shardingsphere.dbtest.fixture.PreciseModuloAlgorithm
rangeAlgorithmClassName: io.shardingsphere.dbtest.fixture.RangeModuloAlgorithm
tableStrategy:
inline:
standard:
shardingColumn: order_id
algorithmExpression: t_order_item_${order_id % 10}
preciseAlgorithmClassName: io.shardingsphere.dbtest.fixture.PreciseModuloAlgorithm
rangeAlgorithmClassName: io.shardingsphere.dbtest.fixture.RangeModuloAlgorithm
bindingTables:
- t_order,t_order_item
defaultDataSourceName: dbtbl_0
......
......@@ -7,11 +7,13 @@ shardingRule:
databaseStrategy:
standard:
shardingColumn: user_id
preciseAlgorithmClassName: io.shardingsphere.core.integrate.api.yaml.fixture.SingleAlgorithm
preciseAlgorithmClassName: io.shardingsphere.dbtest.fixture.PreciseModuloAlgorithm
rangeAlgorithmClassName: io.shardingsphere.dbtest.fixture.RangeModuloAlgorithm
tableStrategy:
inline:
standard:
shardingColumn: order_id
algorithmExpression: t_order_${order_id % 10}
preciseAlgorithmClassName: io.shardingsphere.dbtest.fixture.PreciseModuloAlgorithm
rangeAlgorithmClassName: io.shardingsphere.dbtest.fixture.RangeModuloAlgorithm
keyGeneratorColumnName: order_id
keyGeneratorClassName: io.shardingsphere.core.integrate.api.yaml.fixture.IncrementKeyGenerator
logicIndex: t_order_index
......@@ -20,19 +22,15 @@ shardingRule:
databaseStrategy:
standard:
shardingColumn: user_id
preciseAlgorithmClassName: io.shardingsphere.core.integrate.api.yaml.fixture.SingleAlgorithm
preciseAlgorithmClassName: io.shardingsphere.dbtest.fixture.PreciseModuloAlgorithm
rangeAlgorithmClassName: io.shardingsphere.dbtest.fixture.RangeModuloAlgorithm
tableStrategy:
inline:
standard:
shardingColumn: order_id
algorithmExpression: t_order_item_${order_id % 10}
preciseAlgorithmClassName: io.shardingsphere.dbtest.fixture.PreciseModuloAlgorithm
rangeAlgorithmClassName: io.shardingsphere.dbtest.fixture.RangeModuloAlgorithm
bindingTables:
- t_order,t_order_item
defaultDatabaseStrategy:
none:
defaultTableStrategy:
complex:
shardingColumns: id, order_id
algorithmClassName: io.shardingsphere.core.integrate.api.yaml.fixture.MultiAlgorithm
masterSlaveRules:
db_ms_0:
......
......@@ -3,17 +3,19 @@ shardingRule:
t_order:
actualDataNodes: tbl.t_order_${0..9}
tableStrategy:
inline:
standard:
shardingColumn: order_id
algorithmExpression: t_order_${order_id % 10}
preciseAlgorithmClassName: io.shardingsphere.dbtest.fixture.PreciseModuloAlgorithm
rangeAlgorithmClassName: io.shardingsphere.dbtest.fixture.RangeModuloAlgorithm
t_order_item:
actualDataNodes: tbl.t_order_item_${0..9}
keyGeneratorColumnName: order_item_id
keyGeneratorClassName: io.shardingsphere.dbtest.fixture.DecrementKeyGenerator
tableStrategy:
inline:
standard:
shardingColumn: order_id
algorithmExpression: t_order_item_${order_id % 10}
preciseAlgorithmClassName: io.shardingsphere.dbtest.fixture.PreciseModuloAlgorithm
rangeAlgorithmClassName: io.shardingsphere.dbtest.fixture.RangeModuloAlgorithm
bindingTables:
- t_order,t_order_item
defaultDataSourceName: tbl
......
<datasets>
<columns values="order_id, user_id, status" />
<dataset values="1201, 12, init" />
</datasets>
<integrate-test-cases>
<dql-test-case sql-case-id="assertSelectOne">
<assertion sharding-rule-type="db" expected-data-file="SelectOne.xml" />
<assertion sharding-rule-type="tbl" expected-data-file="SelectOne.xml" />
<assertion sharding-rule-type="dbtbl" expected-data-file="SelectOne.xml" />
<assertion sharding-rule-type="masterslave" expected-data-file="SelectOne.xml" />
</dql-test-case>
<dql-test-case sql-case-id="assertSelectNotEqualsWithSingleTable">
<assertion sharding-rule-type="tbl" parameters="100000:int" expected-data-file="SelectNotEqualsWithSingleTable.xml" />
</dql-test-case>
<dql-test-case sql-case-id="assertSelectNotEqualsWithSingleTableForExclamationEqual">
<assertion sharding-rule-type="tbl" parameters="100000:int" expected-data-file="SelectNotEqualsWithSingleTable.xml" />
</dql-test-case>
<dql-test-case sql-case-id="assertSelectNotEqualsWithSingleTableForNotIn">
<assertion sharding-rule-type="tbl" parameters="100000:int, 100001:int" expected-data-file="SelectNotWithSingleTable.xml" />
</dql-test-case>
<dql-test-case sql-case-id="assertSelectNotEqualsWithSingleTableForNotBetween">
<assertion sharding-rule-type="tbl" parameters="100000:int, 100001:int" expected-data-file="SelectNotWithSingleTable.xml" />
</dql-test-case>
<dql-test-case sql-case-id="assertSelectEqualsWithSameShardingColumns">
<assertion sharding-rule-type="tbl" parameters="1000:int, 1001:int" expected-data-file="SelectEqualsWithSameShardingColumns.xml" />
</dql-test-case>
<dql-test-case sql-case-id="assertSelectInWithSameShardingColumns">
<assertion sharding-rule-type="tbl" parameters="1000:int, 1001:int, 1001:int, 1100:int" expected-data-file="SelectInWithSameShardingColumns.xml" />
</dql-test-case>
<dql-test-case sql-case-id="assertSelectEqualsWithSingleTable">
<assertion sharding-rule-type="tbl" parameters="10:int, 1000:int" expected-data-file="SelectEqualsWithSingleTable_0.xml" />
<assertion sharding-rule-type="tbl" parameters="11:int, 1109:int" expected-data-file="SelectEqualsWithSingleTable_1.xml" />
<assertion sharding-rule-type="db" parameters="12:int, 1201:int" expected-data-file="SelectEqualsWithSingleTable_2.xml" />
<assertion sharding-rule-type="dbtbl" parameters="12:int, 1201:int" expected-data-file="SelectEqualsWithSingleTable_2.xml" />
<assertion sharding-rule-type="masterslave" parameters="12:int, 1201:int" expected-data-file="masterslave/SelectEqualsWithSingleTable_2.xml" />
<assertion sharding-rule-type="tbl" parameters="12:int, 1000:int" expected-data-file="Empty.xml" />
<assertion sharding-rule-type="db" parameters="12:int, 1000:int" expected-data-file="Empty.xml" />
<assertion sharding-rule-type="dbtbl" parameters="12:int, 1000:int" expected-data-file="Empty.xml" />
<assertion sharding-rule-type="masterslave" parameters="12:int, 1000:int" expected-data-file="Empty.xml" />
</dql-test-case>
<!--<dql-test-case sql-case-id="assertSelectBetweenWithSingleTable">
<assertion sharding-rule-type="tbl" parameters="10:int, 12:int, 1009:int, 1108:int" expected-data-file="SelectBetweenWithSingleTable.xml" />
<assertion sharding-rule-type="dbtbl" parameters="10:int, 12:int, 1009:int, 1108:int" expected-data-file="SelectBetweenWithSingleTable.xml" />
<assertion sharding-rule-type="db" parameters="10:int, 12:int, 1001:int, 1200:int" expected-data-file="SelectBetweenWithSingleTable.xml" />
<assertion sharding-rule-type="tbl" parameters="10:int, 12:int, 1309:int, 1408:int" expected-data-file="Empty.xml" />
<assertion sharding-rule-type="db" parameters="10:int, 12:int, 1309:int, 1408:int" expected-data-file="Empty.xml" />
<assertion sharding-rule-type="dbtbl" parameters="10:int, 12:int, 1309:int, 1408:int" expected-data-file="Empty.xml" />
</dql-test-case>-->
</integrate-test-cases>
<?xml version="1.0" encoding="UTF-8"?>
<sqls>
<sql id="assertSelectOne">
<sql id="select_constant_without_table">
<sharding-rule>
<data expected="select/SelectOne.xml" />
</sharding-rule>
</sql>
<sql id="assertSelectNotEqualsWithSingleTable">
<sql id="select_not_equal_with_single_table">
<sharding-rule value="tbl">
<data parameter="100000" expected="select/SelectNotEqualsWithSingleTable.xml" />
</sharding-rule>
</sql>
<sql id="assertSelectNotEqualsWithSingleTableForExclamationEqual">
<sql id="select_exclamation_equal_with_single_table">
<sharding-rule value="tbl">
<data parameter="100000" expected="select/SelectNotEqualsWithSingleTable.xml" />
</sharding-rule>
</sql>
<sql id="assertSelectNotEqualsWithSingleTableForNotIn">
<sql id="select_not_in_with_single_table">
<sharding-rule value="tbl">
<data parameter="100000,100001" expected="select/SelectNotWithSingleTable.xml" />
</sharding-rule>
</sql>
<sql id="assertSelectNotEqualsWithSingleTableForNotBetween">
<sql id="select_not_between_with_single_table">
<sharding-rule value="tbl">
<data parameter="100000,100001" expected="select/SelectNotWithSingleTable.xml" />
</sharding-rule>
</sql>
<sql id="assertSelectEqualsWithSameShardingColumns">
<sql id="select_equal_with_same_sharding_column">
<sharding-rule value="tbl">
<data parameter="1000,1001" />
</sharding-rule>
</sql>
<sql id="assertSelectInWithSameShardingColumns">
<sql id="select_in_with_same_sharding_column">
<sharding-rule value="tbl">
<data parameter="1000,1001,1001,1100" expected="select/SelectInWithSameShardingColumns.xml" />
</sharding-rule>
</sql>
<sql id="assertSelectEqualsWithSingleTable">
<sql id="select_equal_with_single_table">
<sharding-rule value="tbl">
<data parameter="10,1000" expected="select/SelectEqualsWithSingleTable_0.xml" />
<data parameter="11,1109" expected="select/SelectEqualsWithSingleTable_1.xml" />
......@@ -47,7 +47,7 @@
<data parameter="12,1000" />
</sharding-rule>
</sql>
<sql id="assertSelectBetweenWithSingleTable">
<sql id="select_between_with_single_table">
<sharding-rule value="tbl,masterslave,dbtbl">
<data parameter="10,12,1009,1108" expected="select/SelectBetweenWithSingleTable.xml" />
</sharding-rule>
......
......@@ -46,6 +46,23 @@
<artifactId>mysql-connector-java</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>com.atomikos</groupId>
<artifactId>transactions</artifactId>
</dependency>
<dependency>
<groupId>com.atomikos</groupId>
<artifactId>transactions-jta</artifactId>
</dependency>
<dependency>
<groupId>com.atomikos</groupId>
<artifactId>transactions-jdbc</artifactId>
</dependency>
<dependency>
<groupId>javax.transaction</groupId>
<artifactId>jta</artifactId>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
......
/*
* Copyright 2016-2018 shardingsphere.io.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* </p>
*/
package io.shardingsphere.proxy.config;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import io.shardingsphere.core.rule.DataSourceParameter;
import javax.sql.DataSource;
import java.util.HashMap;
import java.util.Map;
/**
* Create default proxy raw datasource using {@code HikariDataSource}.
*
* @author zhaojun
*/
public class DefaultProxyRawDataSource extends ProxyRawDataSource {
public DefaultProxyRawDataSource(final Map<String, DataSourceParameter> dataSourceParameters) {
super(dataSourceParameters);
}
@Override
protected Map<String, DataSource> buildInternal(final String key, final DataSourceParameter dataSourceParameter) {
final Map<String, DataSource> result = new HashMap<>(128, 1);
HikariConfig config = new HikariConfig();
config.setDriverClassName("com.mysql.jdbc.Driver");
config.setJdbcUrl(dataSourceParameter.getUrl());
config.setUsername(dataSourceParameter.getUsername());
config.setPassword(dataSourceParameter.getPassword());
config.setAutoCommit(dataSourceParameter.getAutoCommit());
config.setConnectionTimeout(dataSourceParameter.getConnectionTimeout());
config.setIdleTimeout(dataSourceParameter.getIdleTimeout());
config.setMaxLifetime(dataSourceParameter.getMaxLifetime());
config.setMaximumPoolSize(dataSourceParameter.getMaximumPoolSize());
config.addDataSourceProperty("useServerPrepStmts", "true");
config.addDataSourceProperty("cachePrepStmts", "true");
result.put(key, new HikariDataSource(config));
return result;
}
}
/*
* Copyright 2016-2018 shardingsphere.io.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* </p>
*/
package io.shardingsphere.proxy.config;
import io.shardingsphere.core.rule.DataSourceParameter;
import lombok.RequiredArgsConstructor;
import javax.sql.DataSource;
import java.util.HashMap;
import java.util.Map;
/**
* Abstract class for creating proxy raw datasource.
*
* @author zhaojun
*/
@RequiredArgsConstructor
public abstract class ProxyRawDataSource {
private final Map<String, DataSourceParameter> dataSourceParameters;
/**
* build Datasource map by yaml configuration.
*
* @return datasource map
*/
public Map<String, DataSource> build() {
Map<String, DataSource> result = new HashMap<>(128, 1);
for (Map.Entry<String, DataSourceParameter> entry : dataSourceParameters.entrySet()) {
result.putAll(buildInternal(entry.getKey(), entry.getValue()));
}
return result;
}
protected abstract Map<String, DataSource> buildInternal(String key, DataSourceParameter dataSourceParameter);
}
/*
* Copyright 2016-2018 shardingsphere.io.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* </p>
*/
package io.shardingsphere.proxy.config;
import io.shardingsphere.core.constant.TransactionType;
import io.shardingsphere.core.yaml.proxy.YamlProxyConfiguration;
import javax.sql.DataSource;
import java.util.Map;
/**
* Create raw datasource Map by transaction type.
*
* @author zhaojun
*/
public class ProxyRawDataSourceFactory {
/**
* Crate raw datasource Map by transaction type.
*
* @param transactionType transaction type
* @param yamlProxyConfiguration yaml proxy configuration
* @return raw datasource map
*/
public static Map<String, DataSource> create(final TransactionType transactionType, final YamlProxyConfiguration yamlProxyConfiguration) {
switch (transactionType) {
case XA:
return new XaProxyRawDataSource(yamlProxyConfiguration.getDataSources()).build();
default:
return new DefaultProxyRawDataSource(yamlProxyConfiguration.getDataSources()).build();
}
}
}
......@@ -19,17 +19,15 @@ package io.shardingsphere.proxy.config;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import io.shardingsphere.core.constant.ShardingProperties;
import io.shardingsphere.core.constant.ShardingPropertiesConstant;
import io.shardingsphere.core.constant.TransactionType;
import io.shardingsphere.core.exception.ShardingException;
import io.shardingsphere.core.metadata.ShardingMetaData;
import io.shardingsphere.core.rule.MasterSlaveRule;
import io.shardingsphere.core.rule.ProxyAuthority;
import io.shardingsphere.core.rule.ShardingRule;
import io.shardingsphere.core.yaml.proxy.YamlProxyConfiguration;
import io.shardingsphere.core.rule.DataSourceParameter;
import io.shardingsphere.proxy.metadata.ProxyShardingMetaData;
import lombok.Getter;
......@@ -37,7 +35,6 @@ import javax.sql.DataSource;
import java.io.File;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.Executors;
......@@ -48,32 +45,35 @@ import java.util.concurrent.Executors;
* @author zhangliang
* @author zhangyonglun
* @author panjuan
* @author zhaojun
*/
@Getter
public final class RuleRegistry {
private static final int MAX_EXECUTOR_THREADS = Runtime.getRuntime().availableProcessors() * 2;
private static final RuleRegistry INSTANCE = new RuleRegistry();
private final Map<String, DataSource> dataSourceMap;
private final ShardingRule shardingRule;
private final MasterSlaveRule masterSlaveRule;
private final ShardingMetaData shardingMetaData;
private final boolean isOnlyMasterSlave;
private final ListeningExecutorService executorService = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(MAX_EXECUTOR_THREADS));
private final String proxyMode;
private final boolean showSQL;
private final TransactionType transactionType;
private final ProxyAuthority proxyAuthority;
private RuleRegistry() {
YamlProxyConfiguration yamlProxyConfiguration;
try {
......@@ -81,11 +81,8 @@ public final class RuleRegistry {
} catch (final IOException ex) {
throw new ShardingException(ex);
}
dataSourceMap = new HashMap<>(128, 1);
Map<String, DataSourceParameter> dataSourceParameters = yamlProxyConfiguration.getDataSources();
for (Map.Entry<String, DataSourceParameter> entry : dataSourceParameters.entrySet()) {
dataSourceMap.put(entry.getKey(), getDataSource(entry.getValue()));
}
transactionType = TransactionType.findByValue(yamlProxyConfiguration.getTransactionMode());
dataSourceMap = ProxyRawDataSourceFactory.create(transactionType, yamlProxyConfiguration);
shardingRule = yamlProxyConfiguration.obtainShardingRule(Collections.<String>emptyList());
masterSlaveRule = yamlProxyConfiguration.obtainMasterSlaveRule();
isOnlyMasterSlave = shardingRule.getTableRules().isEmpty() && !masterSlaveRule.getMasterDataSourceName().isEmpty();
......@@ -100,22 +97,6 @@ public final class RuleRegistry {
proxyAuthority = yamlProxyConfiguration.getProxyAuthority();
}
private DataSource getDataSource(final DataSourceParameter dataSourceParameter) {
HikariConfig config = new HikariConfig();
config.setDriverClassName("com.mysql.jdbc.Driver");
config.setJdbcUrl(dataSourceParameter.getUrl());
config.setUsername(dataSourceParameter.getUsername());
config.setPassword(dataSourceParameter.getPassword());
config.setAutoCommit(dataSourceParameter.getAutoCommit());
config.setConnectionTimeout(dataSourceParameter.getConnectionTimeout());
config.setIdleTimeout(dataSourceParameter.getIdleTimeout());
config.setMaxLifetime(dataSourceParameter.getMaxLifetime());
config.setMaximumPoolSize(dataSourceParameter.getMaximumPoolSize());
config.addDataSourceProperty("useServerPrepStmts", "true");
config.addDataSourceProperty("cachePrepStmts", "true");
return new HikariDataSource(config);
}
/**
* Get instance of sharding rule registry.
*
......
/*
* Copyright 2016-2018 shardingsphere.io.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* </p>
*/
package io.shardingsphere.proxy.config;
import com.atomikos.jdbc.AtomikosDataSourceBean;
import com.google.common.base.Optional;
import io.shardingsphere.core.rule.DataSourceParameter;
import javax.sql.DataSource;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/**
* Create Xa Raw DataSource Map using {@code AtomikosDataSourceBean}.
*
* @author zhaojun
*/
public class XaProxyRawDataSource extends ProxyRawDataSource {
public XaProxyRawDataSource(final Map<String, DataSourceParameter> dataSourceParameters) {
super(dataSourceParameters);
}
@Override
protected Map<String, DataSource> buildInternal(final String key, final DataSourceParameter dataSourceParameter) {
final Map<String, DataSource> result = new HashMap<>(128, 1);
AtomikosDataSourceBean dataSourceBean = new AtomikosDataSourceBean();
dataSourceBean.setUniqueResourceName(key);
dataSourceBean.setXaDataSourceClassName("com.mysql.jdbc.jdbc2.optional.MysqlXADataSource");
Properties xaProperties = new Properties();
xaProperties.setProperty("user", dataSourceParameter.getUsername());
xaProperties.setProperty("password", Optional.fromNullable(dataSourceParameter.getPassword()).or(""));
xaProperties.setProperty("URL", dataSourceParameter.getUrl());
xaProperties.setProperty("pinGlobalTxToPhysicalConnection", "true");
dataSourceBean.setXaProperties(xaProperties);
result.put(key, dataSourceBean);
return result;
}
}
/*
* Copyright 2016-2018 shardingsphere.io.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* </p>
*/
package io.shardingsphere.proxy.frontend.mysql;
import io.netty.channel.ChannelId;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
/**
* Manage the thread for each channel invoking.
* this ensure atomikos can process xa transaction by current thread Id
*
* @author zhaojun
*/
public class ChannelThreadHolder {
private static Map<ChannelId, ExecutorService> threadPoolMap = new ConcurrentHashMap<>();
/**
* Get active single thread pool of current channel.
*
* @param channelId id of channel
* @return Thread
*/
public static ExecutorService get(final ChannelId channelId) {
ExecutorService result = threadPoolMap.get(channelId);
if (null == result) {
threadPoolMap.put(channelId, Executors.newSingleThreadExecutor());
result = threadPoolMap.get(channelId);
}
return result;
}
/**
* Remove the thread when channel was closed.
*
* @param channelId id of channel
*/
public static void remove(final ChannelId channelId) {
threadPoolMap.get(channelId).shutdown();
threadPoolMap.remove(channelId);
}
}
......@@ -72,7 +72,7 @@ public final class MySQLFrontendHandler extends FrontendHandler {
@Override
protected void executeCommand(final ChannelHandlerContext context, final ByteBuf message) {
eventLoopGroup.execute(new Runnable() {
ChannelThreadHolder.get(context.channel().id()).execute(new Runnable() {
@Override
public void run() {
......
......@@ -63,39 +63,36 @@
# # Meanwhile, the cost of the memory will be increased.
# proxy.mode: MEMORY_STRICTLY
# sql.show: false
#
#proxyAuthority:
# username: root
# password: root
# The example of master-slave rule.
#dataSources:
# ds_0:
# url: jdbc:mysql://127.0.0.1:3306/ds_0
# username: root
# password:
# autoCommit: true
# connectionTimeout: 30000
# idleTimeout: 60000
# maxLifetime: 1800000
# maximumPoolSize: 65
# ds_1:
# url: jdbc:mysql://127.0.0.1:3306/ds_1
# username: root
# password:
# autoCommit: true
# connectionTimeout: 30000
# idleTimeout: 60000
# maxLifetime: 1800000
# maximumPoolSize: 65
#masterSlaveRule:
# name: ds_ms
# masterDataSourceName: ds_0
# slaveDataSourceNames:
# - ds_1
#
#proxyAuthority:
# username: root
# password: root
dataSources:
ds_0:
url: jdbc:mysql://127.0.0.1:3306/demo_ds_0
username: root
password:
autoCommit: true
connectionTimeout: 30000
idleTimeout: 60000
maxLifetime: 1800000
maximumPoolSize: 65
ds_1:
url: jdbc:mysql://127.0.0.1:3306/demo_ds_1
username: root
password:
autoCommit: true
connectionTimeout: 30000
idleTimeout: 60000
maxLifetime: 1800000
maximumPoolSize: 65
masterSlaveRule:
name: ds_ms
masterDataSourceName: ds_0
slaveDataSourceNames:
- ds_1
proxyAuthority:
username: root
password: root
<?xml version="1.0" encoding="UTF-8"?>
<sql-cases>
<sql-case id="assertSelectOne" value="SELECT 1 as a" db-types="MySQL,PostgreSQL,SQLServer" />
<sql-case id="assertSelectNotEqualsWithSingleTable" value="SELECT * FROM t_order_item WHERE item_id &lt;&gt; %s ORDER BY item_id" />
<sql-case id="assertSelectNotEqualsWithSingleTableForExclamationEqual" value="SELECT * FROM t_order_item WHERE item_id != %s ORDER BY item_id" />
<sql-case id="assertSelectNotEqualsWithSingleTableForNotIn" value="SELECT * FROM t_order_item WHERE item_id IS NOT NULL AND item_id NOT IN (%s, %s) ORDER BY item_id" />
<sql-case id="assertSelectNotEqualsWithSingleTableForNotBetween" value="SELECT * FROM t_order_item WHERE item_id IS NOT NULL AND item_id NOT BETWEEN %s AND %s ORDER BY item_id" />
<sql-case id="assertSelectEqualsWithSingleTable" value="SELECT * FROM t_order WHERE user_id = %s AND order_id = %s" />
<sql-case id="assertSelectEqualsWithSameShardingColumns" value="SELECT * FROM t_order WHERE order_id = %s AND order_id = %s" />
<sql-case id="assertSelectBetweenWithSingleTable" value="SELECT * FROM t_order WHERE user_id BETWEEN %s AND %s AND order_id BETWEEN %s AND %s ORDER BY user_id, order_id" />
<sql-case id="select_constant_without_table" value="SELECT 1 as a" />
<sql-case id="select_not_equal_with_single_table" value="SELECT * FROM t_order_item WHERE item_id &lt;&gt; %s ORDER BY item_id" />
<sql-case id="select_exclamation_equal_with_single_table" value="SELECT * FROM t_order_item WHERE item_id != %s ORDER BY item_id" />
<sql-case id="select_not_in_with_single_table" value="SELECT * FROM t_order_item WHERE item_id IS NOT NULL AND item_id NOT IN (%s, %s) ORDER BY item_id" />
<sql-case id="select_not_between_with_single_table" value="SELECT * FROM t_order_item WHERE item_id IS NOT NULL AND item_id NOT BETWEEN %s AND %s ORDER BY item_id" />
<sql-case id="select_equal_with_single_table" value="SELECT * FROM t_order WHERE user_id = %s AND order_id = %s" />
<sql-case id="assertSelectInWithSingleTable" value="SELECT * FROM t_order WHERE user_id IN (%s, %s, %s) AND order_id IN (%s, %s) ORDER BY user_id, order_id" />
<sql-case id="assertSelectInWithSameShardingColumns" value="SELECT * FROM t_order WHERE order_id IN (%s, %s) AND order_id IN (%s, %s) ORDER BY order_id" />
<sql-case id="select_between_with_single_table" value="SELECT * FROM t_order WHERE user_id BETWEEN %s AND %s AND order_id BETWEEN %s AND %s ORDER BY user_id, order_id" />
<sql-case id="select_equal_with_same_sharding_column" value="SELECT * FROM t_order WHERE order_id = %s AND order_id = %s" />
<sql-case id="select_in_with_same_sharding_column" value="SELECT * FROM t_order WHERE order_id IN (%s, %s) AND order_id IN (%s, %s) ORDER BY order_id" />
<sql-case id="assertSelectIterator" value="SELECT t.* FROM t_order_item t WHERE t.item_id IN (%s, %s)" />
<sql-case id="assertSelectNoShardingTable" value="SELECT i.* FROM t_order o JOIN t_order_item i ON o.user_id = i.user_id AND o.order_id = i.order_id ORDER BY i.item_id" />
<sql-case id="assertSelectLikeWithCount" value="SELECT count(0) as orders_count FROM `t_order` o WHERE o.status LIKE CONCAT('%%', %s, '%%') AND o.`user_id` IN (%s, %s) AND o.`order_id` BETWEEN %s AND %s" db-types="MySQL,H2" />
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册