未验证 提交 784fde4a 编写于 作者: L Liang Zhang 提交者: GitHub

Refactor TableMetas to SchemaMetaData (#4772)

* add TableMetaDataLoader

* rename TableMetas to SchemaMetaData

* rename ShardingTableMetasLoader to ShardingMetaDataLoader

* rename TableMetaDataDecorator to MetaDataDecorator

* fix javadoc

* add SchemaMetaDataDecorator

* static SchemaMetaDataDecorator

* static SchemaMetaDataDecorator
上级 a51249a3
......@@ -20,28 +20,16 @@ package org.apache.shardingsphere.encrypt.metadata;
import org.apache.shardingsphere.encrypt.rule.EncryptRule;
import org.apache.shardingsphere.sql.parser.binder.metadata.column.ColumnMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.underlying.common.metadata.table.TableMetaDataDecorator;
import org.apache.shardingsphere.underlying.common.metadata.decorator.TableMetaDataDecorator;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Map;
/**
* Table meta data decorator for encrypt.
*/
public final class EncryptTableMetaDataDecorator implements TableMetaDataDecorator<EncryptRule> {
@Override
public TableMetas decorate(final TableMetas tableMetas, final EncryptRule encryptRule) {
Map<String, TableMetaData> result = new HashMap<>(tableMetas.getAllTableNames().size(), 1);
for (String each : tableMetas.getAllTableNames()) {
result.put(each, decorate(tableMetas.get(each), each, encryptRule));
}
return new TableMetas(result);
}
@Override
public TableMetaData decorate(final TableMetaData tableMetaData, final String tableName, final EncryptRule encryptRule) {
return new TableMetaData(getEncryptColumnMetaDataList(tableName, tableMetaData.getColumns().values(), encryptRule), tableMetaData.getIndexes().values());
......
......@@ -21,7 +21,7 @@ import lombok.RequiredArgsConstructor;
import org.apache.shardingsphere.encrypt.merge.dal.impl.DecoratedEncryptColumnsMergedResult;
import org.apache.shardingsphere.encrypt.merge.dal.impl.MergedEncryptColumnsMergedResult;
import org.apache.shardingsphere.encrypt.rule.EncryptRule;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.sql.parser.sql.statement.SQLStatement;
import org.apache.shardingsphere.sql.parser.sql.statement.dal.dialect.mysql.DescribeStatement;
......@@ -40,13 +40,13 @@ public final class EncryptDALResultDecorator implements ResultDecorator {
private final EncryptRule encryptRule;
@Override
public MergedResult decorate(final QueryResult queryResult, final SQLStatementContext sqlStatementContext, final TableMetas tableMetas) {
public MergedResult decorate(final QueryResult queryResult, final SQLStatementContext sqlStatementContext, final SchemaMetaData schemaMetaData) {
return isNeedMergeEncryptColumns(sqlStatementContext.getSqlStatement())
? new MergedEncryptColumnsMergedResult(queryResult, sqlStatementContext, encryptRule) : new TransparentMergedResult(queryResult);
}
@Override
public MergedResult decorate(final MergedResult mergedResult, final SQLStatementContext sqlStatementContext, final TableMetas tableMetas) {
public MergedResult decorate(final MergedResult mergedResult, final SQLStatementContext sqlStatementContext, final SchemaMetaData schemaMetaData) {
return isNeedMergeEncryptColumns(sqlStatementContext.getSqlStatement()) ? new DecoratedEncryptColumnsMergedResult(mergedResult, sqlStatementContext, encryptRule) : mergedResult;
}
......
......@@ -18,7 +18,7 @@
package org.apache.shardingsphere.encrypt.merge.dql;
import lombok.RequiredArgsConstructor;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.underlying.executor.QueryResult;
import org.apache.shardingsphere.underlying.merge.engine.decorator.ResultDecorator;
......@@ -36,12 +36,12 @@ public final class EncryptDQLResultDecorator implements ResultDecorator {
private final boolean queryWithCipherColumn;
@Override
public MergedResult decorate(final QueryResult queryResult, final SQLStatementContext sqlStatementContext, final TableMetas tableMetas) {
public MergedResult decorate(final QueryResult queryResult, final SQLStatementContext sqlStatementContext, final SchemaMetaData schemaMetaData) {
return new EncryptMergedResult(encryptorMetaData, new TransparentMergedResult(queryResult), queryWithCipherColumn);
}
@Override
public MergedResult decorate(final MergedResult mergedResult, final SQLStatementContext sqlStatementContext, final TableMetas tableMetas) {
public MergedResult decorate(final MergedResult mergedResult, final SQLStatementContext sqlStatementContext, final SchemaMetaData schemaMetaData) {
return new EncryptMergedResult(encryptorMetaData, mergedResult, queryWithCipherColumn);
}
}
......@@ -21,7 +21,7 @@ import lombok.RequiredArgsConstructor;
import org.apache.shardingsphere.encrypt.rewrite.condition.impl.EncryptEqualCondition;
import org.apache.shardingsphere.encrypt.rewrite.condition.impl.EncryptInCondition;
import org.apache.shardingsphere.encrypt.rule.EncryptRule;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.sql.parser.binder.type.WhereAvailable;
import org.apache.shardingsphere.sql.parser.sql.segment.dml.expr.ExpressionSegment;
......@@ -49,7 +49,7 @@ public final class EncryptConditionEngine {
private final EncryptRule encryptRule;
private final TableMetas tableMetas;
private final SchemaMetaData schemaMetaData;
/**
* Create encrypt conditions.
......@@ -90,7 +90,7 @@ public final class EncryptConditionEngine {
}
private Optional<EncryptCondition> createEncryptCondition(final SQLStatementContext sqlStatementContext, final PredicateSegment predicateSegment) {
Optional<String> tableName = sqlStatementContext.getTablesContext().findTableName(predicateSegment, tableMetas);
Optional<String> tableName = sqlStatementContext.getTablesContext().findTableName(predicateSegment, schemaMetaData);
return tableName.isPresent() && encryptRule.findEncryptor(tableName.get(), predicateSegment.getColumn().getIdentifier().getValue()).isPresent()
? createEncryptCondition(predicateSegment, tableName.get()) : Optional.empty();
}
......
......@@ -35,7 +35,7 @@ public final class EncryptSQLRewriteContextDecorator implements SQLRewriteContex
@Override
public void decorate(final EncryptRule encryptRule, final ShardingSphereProperties properties, final SQLRewriteContext sqlRewriteContext) {
boolean isQueryWithCipherColumn = properties.<Boolean>getValue(PropertiesConstant.QUERY_WITH_CIPHER_COLUMN);
for (ParameterRewriter each : new EncryptParameterRewriterBuilder(encryptRule, isQueryWithCipherColumn).getParameterRewriters(sqlRewriteContext.getTableMetas())) {
for (ParameterRewriter each : new EncryptParameterRewriterBuilder(encryptRule, isQueryWithCipherColumn).getParameterRewriters(sqlRewriteContext.getSchemaMetaData())) {
if (!sqlRewriteContext.getParameters().isEmpty() && each.isNeedRewrite(sqlRewriteContext.getSqlStatementContext())) {
each.rewrite(sqlRewriteContext.getParameterBuilder(), sqlRewriteContext.getSqlStatementContext(), sqlRewriteContext.getParameters());
}
......
......@@ -25,10 +25,10 @@ import org.apache.shardingsphere.encrypt.rewrite.parameter.impl.EncryptInsertVal
import org.apache.shardingsphere.encrypt.rewrite.parameter.impl.EncryptPredicateParameterRewriter;
import org.apache.shardingsphere.encrypt.rule.EncryptRule;
import org.apache.shardingsphere.encrypt.rule.aware.EncryptRuleAware;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.underlying.rewrite.parameter.rewriter.ParameterRewriter;
import org.apache.shardingsphere.underlying.rewrite.parameter.rewriter.ParameterRewriterBuilder;
import org.apache.shardingsphere.underlying.rewrite.sql.token.generator.aware.TableMetasAware;
import org.apache.shardingsphere.underlying.rewrite.sql.token.generator.aware.SchemaMetaDataAware;
import java.util.Collection;
import java.util.LinkedList;
......@@ -44,10 +44,10 @@ public final class EncryptParameterRewriterBuilder implements ParameterRewriterB
private final boolean queryWithCipherColumn;
@Override
public Collection<ParameterRewriter> getParameterRewriters(final TableMetas tableMetas) {
public Collection<ParameterRewriter> getParameterRewriters(final SchemaMetaData schemaMetaData) {
Collection<ParameterRewriter> result = getParameterRewriters();
for (ParameterRewriter each : result) {
setUpParameterRewriters(each, tableMetas);
setUpParameterRewriters(each, schemaMetaData);
}
return result;
}
......@@ -61,9 +61,9 @@ public final class EncryptParameterRewriterBuilder implements ParameterRewriterB
return result;
}
private void setUpParameterRewriters(final ParameterRewriter parameterRewriter, final TableMetas tableMetas) {
if (parameterRewriter instanceof TableMetasAware) {
((TableMetasAware) parameterRewriter).setTableMetas(tableMetas);
private void setUpParameterRewriters(final ParameterRewriter parameterRewriter, final SchemaMetaData schemaMetaData) {
if (parameterRewriter instanceof SchemaMetaDataAware) {
((SchemaMetaDataAware) parameterRewriter).setSchemaMetaData(schemaMetaData);
}
if (parameterRewriter instanceof EncryptRuleAware) {
((EncryptRuleAware) parameterRewriter).setEncryptRule(encryptRule);
......
......@@ -22,11 +22,11 @@ import org.apache.shardingsphere.encrypt.rewrite.aware.QueryWithCipherColumnAwar
import org.apache.shardingsphere.encrypt.rewrite.condition.EncryptCondition;
import org.apache.shardingsphere.encrypt.rewrite.condition.EncryptConditionEngine;
import org.apache.shardingsphere.encrypt.rewrite.parameter.EncryptParameterRewriter;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.underlying.rewrite.parameter.builder.ParameterBuilder;
import org.apache.shardingsphere.underlying.rewrite.parameter.builder.impl.StandardParameterBuilder;
import org.apache.shardingsphere.underlying.rewrite.sql.token.generator.aware.TableMetasAware;
import org.apache.shardingsphere.underlying.rewrite.sql.token.generator.aware.SchemaMetaDataAware;
import java.util.List;
import java.util.Map;
......@@ -36,9 +36,9 @@ import java.util.Map.Entry;
* Predicate parameter rewriter for encrypt.
*/
@Setter
public final class EncryptPredicateParameterRewriter extends EncryptParameterRewriter<SQLStatementContext> implements TableMetasAware, QueryWithCipherColumnAware {
public final class EncryptPredicateParameterRewriter extends EncryptParameterRewriter<SQLStatementContext> implements SchemaMetaDataAware, QueryWithCipherColumnAware {
private TableMetas tableMetas;
private SchemaMetaData schemaMetaData;
private boolean queryWithCipherColumn;
......@@ -49,7 +49,7 @@ public final class EncryptPredicateParameterRewriter extends EncryptParameterRew
@Override
public void rewrite(final ParameterBuilder parameterBuilder, final SQLStatementContext sqlStatementContext, final List<Object> parameters) {
List<EncryptCondition> encryptConditions = new EncryptConditionEngine(getEncryptRule(), tableMetas).createEncryptConditions(sqlStatementContext);
List<EncryptCondition> encryptConditions = new EncryptConditionEngine(getEncryptRule(), schemaMetaData).createEncryptConditions(sqlStatementContext);
if (encryptConditions.isEmpty()) {
return;
}
......
......@@ -22,13 +22,13 @@ import lombok.Setter;
import org.apache.shardingsphere.encrypt.rewrite.aware.QueryWithCipherColumnAware;
import org.apache.shardingsphere.encrypt.rewrite.token.generator.BaseEncryptSQLTokenGenerator;
import org.apache.shardingsphere.encrypt.strategy.EncryptTable;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.sql.parser.binder.type.WhereAvailable;
import org.apache.shardingsphere.sql.parser.sql.segment.dml.predicate.AndPredicate;
import org.apache.shardingsphere.sql.parser.sql.segment.dml.predicate.PredicateSegment;
import org.apache.shardingsphere.underlying.rewrite.sql.token.generator.CollectionSQLTokenGenerator;
import org.apache.shardingsphere.underlying.rewrite.sql.token.generator.aware.TableMetasAware;
import org.apache.shardingsphere.underlying.rewrite.sql.token.generator.aware.SchemaMetaDataAware;
import org.apache.shardingsphere.underlying.rewrite.sql.token.pojo.generic.SubstitutableColumnNameToken;
import java.util.Collection;
......@@ -40,9 +40,9 @@ import java.util.Optional;
* Predicate column token generator for encrypt.
*/
@Setter
public final class EncryptPredicateColumnTokenGenerator extends BaseEncryptSQLTokenGenerator implements CollectionSQLTokenGenerator, TableMetasAware, QueryWithCipherColumnAware {
public final class EncryptPredicateColumnTokenGenerator extends BaseEncryptSQLTokenGenerator implements CollectionSQLTokenGenerator, SchemaMetaDataAware, QueryWithCipherColumnAware {
private TableMetas tableMetas;
private SchemaMetaData schemaMetaData;
private boolean queryWithCipherColumn;
......@@ -86,6 +86,6 @@ public final class EncryptPredicateColumnTokenGenerator extends BaseEncryptSQLTo
}
private Optional<EncryptTable> findEncryptTable(final SQLStatementContext sqlStatementContext, final PredicateSegment segment) {
return sqlStatementContext.getTablesContext().findTableName(segment, tableMetas).flatMap(tableName -> getEncryptRule().findEncryptTable(tableName));
return sqlStatementContext.getTablesContext().findTableName(segment, schemaMetaData).flatMap(tableName -> getEncryptRule().findEncryptTable(tableName));
}
}
......@@ -25,12 +25,12 @@ import org.apache.shardingsphere.encrypt.rewrite.condition.impl.EncryptInConditi
import org.apache.shardingsphere.encrypt.rewrite.token.generator.BaseEncryptSQLTokenGenerator;
import org.apache.shardingsphere.encrypt.rewrite.token.pojo.EncryptPredicateEqualRightValueToken;
import org.apache.shardingsphere.encrypt.rewrite.token.pojo.EncryptPredicateInRightValueToken;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.sql.parser.binder.type.WhereAvailable;
import org.apache.shardingsphere.underlying.rewrite.sql.token.generator.CollectionSQLTokenGenerator;
import org.apache.shardingsphere.underlying.rewrite.sql.token.generator.aware.ParametersAware;
import org.apache.shardingsphere.underlying.rewrite.sql.token.generator.aware.TableMetasAware;
import org.apache.shardingsphere.underlying.rewrite.sql.token.generator.aware.SchemaMetaDataAware;
import org.apache.shardingsphere.underlying.rewrite.sql.token.pojo.SQLToken;
import java.util.Collection;
......@@ -45,9 +45,10 @@ import java.util.Optional;
* Predicate right value token generator for encrypt.
*/
@Setter
public final class EncryptPredicateRightValueTokenGenerator extends BaseEncryptSQLTokenGenerator implements CollectionSQLTokenGenerator, TableMetasAware, ParametersAware, QueryWithCipherColumnAware {
public final class EncryptPredicateRightValueTokenGenerator extends BaseEncryptSQLTokenGenerator
implements CollectionSQLTokenGenerator, SchemaMetaDataAware, ParametersAware, QueryWithCipherColumnAware {
private TableMetas tableMetas;
private SchemaMetaData schemaMetaData;
private List<Object> parameters;
......@@ -60,7 +61,7 @@ public final class EncryptPredicateRightValueTokenGenerator extends BaseEncryptS
@Override
public Collection<SQLToken> generateSQLTokens(final SQLStatementContext sqlStatementContext) {
List<EncryptCondition> encryptConditions = new EncryptConditionEngine(getEncryptRule(), tableMetas).createEncryptConditions(sqlStatementContext);
List<EncryptCondition> encryptConditions = new EncryptConditionEngine(getEncryptRule(), schemaMetaData).createEncryptConditions(sqlStatementContext);
return encryptConditions.isEmpty() ? Collections.emptyList() : generateSQLTokens(encryptConditions);
}
......
......@@ -24,7 +24,7 @@ import org.apache.shardingsphere.encrypt.yaml.config.YamlRootEncryptRuleConfigur
import org.apache.shardingsphere.encrypt.yaml.swapper.EncryptRuleConfigurationYamlSwapper;
import org.apache.shardingsphere.sql.parser.SQLParserEngineFactory;
import org.apache.shardingsphere.sql.parser.binder.SQLStatementContextFactory;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.sql.parser.sql.statement.SQLStatement;
import org.apache.shardingsphere.underlying.common.constant.properties.ShardingSphereProperties;
......@@ -80,12 +80,12 @@ public final class EncryptSQLRewriterParameterizedTest extends AbstractSQLRewrit
SQLStatement sqlStatement = SQLParserEngineFactory.getSQLParserEngine(
null == getTestParameters().getDatabaseType() ? "SQL92" : getTestParameters().getDatabaseType()).parse(getTestParameters().getInputSQL(), false);
SQLStatementContext sqlStatementContext = SQLStatementContextFactory.newInstance(
createTableMetas(), getTestParameters().getInputSQL(), getTestParameters().getInputParameters(), sqlStatement);
return new SQLRewriteContext(mock(TableMetas.class), sqlStatementContext, getTestParameters().getInputSQL(), getTestParameters().getInputParameters());
createSchemaMetaData(), getTestParameters().getInputSQL(), getTestParameters().getInputParameters(), sqlStatement);
return new SQLRewriteContext(mock(SchemaMetaData.class), sqlStatementContext, getTestParameters().getInputSQL(), getTestParameters().getInputParameters());
}
private TableMetas createTableMetas() {
TableMetas result = mock(TableMetas.class);
private SchemaMetaData createSchemaMetaData() {
SchemaMetaData result = mock(SchemaMetaData.class);
when(result.getAllColumnNames("t_account")).thenReturn(Arrays.asList("account_id", "certificate_number", "password", "amount", "status"));
when(result.getAllColumnNames("t_account_bak")).thenReturn(Arrays.asList("account_id", "certificate_number", "password", "amount", "status"));
return result;
......
......@@ -30,9 +30,10 @@ import org.apache.shardingsphere.underlying.rewrite.parameter.rewriter.Parameter
*/
public final class ShadowSQLRewriteContextDecorator implements SQLRewriteContextDecorator<ShadowRule> {
@SuppressWarnings("unchecked")
@Override
public void decorate(final ShadowRule shadowRule, final ShardingSphereProperties properties, final SQLRewriteContext sqlRewriteContext) {
for (ParameterRewriter each : new ShadowParameterRewriterBuilder(shadowRule).getParameterRewriters(sqlRewriteContext.getTableMetas())) {
for (ParameterRewriter each : new ShadowParameterRewriterBuilder(shadowRule).getParameterRewriters(sqlRewriteContext.getSchemaMetaData())) {
if (!sqlRewriteContext.getParameters().isEmpty() && each.isNeedRewrite(sqlRewriteContext.getSqlStatementContext())) {
each.rewrite(sqlRewriteContext.getParameterBuilder(), sqlRewriteContext.getSqlStatementContext(), sqlRewriteContext.getParameters());
}
......
......@@ -22,7 +22,7 @@ import org.apache.shardingsphere.core.rule.ShadowRule;
import org.apache.shardingsphere.shadow.rewrite.aware.ShadowRuleAware;
import org.apache.shardingsphere.shadow.rewrite.parameter.impl.ShadowInsertValueParameterRewriter;
import org.apache.shardingsphere.shadow.rewrite.parameter.impl.ShadowPredicateParameterRewriter;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.underlying.rewrite.parameter.rewriter.ParameterRewriter;
import org.apache.shardingsphere.underlying.rewrite.parameter.rewriter.ParameterRewriterBuilder;
......@@ -38,7 +38,7 @@ public final class ShadowParameterRewriterBuilder implements ParameterRewriterBu
private final ShadowRule shadowRule;
@Override
public Collection<ParameterRewriter> getParameterRewriters(final TableMetas tableMetas) {
public Collection<ParameterRewriter> getParameterRewriters(final SchemaMetaData schemaMetaData) {
Collection<ParameterRewriter> result = getParameterRewriters();
for (ParameterRewriter each : result) {
((ShadowRuleAware) each).setShadowRule(shadowRule);
......
......@@ -19,7 +19,7 @@ package org.apache.shardingsphere.shadow.rewrite.judgement.impl;
import org.apache.shardingsphere.api.config.shadow.ShadowRuleConfiguration;
import org.apache.shardingsphere.core.rule.ShadowRule;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.dml.InsertStatementContext;
import org.apache.shardingsphere.sql.parser.sql.segment.dml.column.ColumnSegment;
import org.apache.shardingsphere.sql.parser.sql.segment.dml.column.InsertColumnsSegment;
......@@ -38,8 +38,8 @@ public final class PreparedJudgementEngineTest {
@Test
public void isShadowSQL() {
TableMetas tableMetas = mock(TableMetas.class);
when(tableMetas.getAllColumnNames("tbl")).thenReturn(Arrays.asList("id", "name", "shadow"));
SchemaMetaData schemaMetaData = mock(SchemaMetaData.class);
when(schemaMetaData.getAllColumnNames("tbl")).thenReturn(Arrays.asList("id", "name", "shadow"));
ShadowRuleConfiguration shadowRuleConfiguration = new ShadowRuleConfiguration();
shadowRuleConfiguration.setColumn("shadow");
ShadowRule shadowRule = new ShadowRule(shadowRuleConfiguration);
......@@ -48,7 +48,7 @@ public final class PreparedJudgementEngineTest {
InsertColumnsSegment insertColumnsSegment = new InsertColumnsSegment(0, 0,
Arrays.asList(new ColumnSegment(0, 0, new IdentifierValue("id")), new ColumnSegment(0, 0, new IdentifierValue("name")), new ColumnSegment(0, 0, new IdentifierValue("shadow"))));
insertStatement.setInsertColumns(insertColumnsSegment);
InsertStatementContext insertStatementContext = new InsertStatementContext(tableMetas, Arrays.asList(1, "Tom", 2, "Jerry", 3, true), insertStatement);
InsertStatementContext insertStatementContext = new InsertStatementContext(schemaMetaData, Arrays.asList(1, "Tom", 2, "Jerry", 3, true), insertStatement);
PreparedJudgementEngine preparedJudgementEngine = new PreparedJudgementEngine(shadowRule, insertStatementContext, Arrays.asList(1, "Tom", true));
Assert.assertTrue("should be shadow", preparedJudgementEngine.isShadowSQL());
}
......
......@@ -19,7 +19,7 @@ package org.apache.shardingsphere.shadow.rewrite.judgement.impl;
import org.apache.shardingsphere.api.config.shadow.ShadowRuleConfiguration;
import org.apache.shardingsphere.core.rule.ShadowRule;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.dml.InsertStatementContext;
import org.apache.shardingsphere.sql.parser.binder.statement.dml.SelectStatementContext;
import org.apache.shardingsphere.sql.parser.sql.segment.dml.assignment.InsertValuesSegment;
......@@ -50,14 +50,14 @@ import static org.mockito.Mockito.when;
public final class SimpleJudgementEngineTest {
private TableMetas tableMetas;
private SchemaMetaData schemaMetaData;
private ShadowRule shadowRule;
@Before
public void setUp() {
tableMetas = mock(TableMetas.class);
when(tableMetas.getAllColumnNames("tbl")).thenReturn(Arrays.asList("id", "name", "shadow"));
schemaMetaData = mock(SchemaMetaData.class);
when(schemaMetaData.getAllColumnNames("tbl")).thenReturn(Arrays.asList("id", "name", "shadow"));
ShadowRuleConfiguration shadowRuleConfiguration = new ShadowRuleConfiguration();
shadowRuleConfiguration.setColumn("shadow");
shadowRule = new ShadowRule(shadowRuleConfiguration);
......@@ -78,7 +78,7 @@ public final class SimpleJudgementEngineTest {
add(new LiteralExpressionSegment(0, 0, "true"));
}
})));
InsertStatementContext insertStatementContext = new InsertStatementContext(tableMetas, Collections.emptyList(), insertStatement);
InsertStatementContext insertStatementContext = new InsertStatementContext(schemaMetaData, Collections.emptyList(), insertStatement);
SimpleJudgementEngine simpleJudgementEngine = new SimpleJudgementEngine(shadowRule, insertStatementContext);
Assert.assertTrue("should be shadow", simpleJudgementEngine.isShadowSQL());
insertStatement.getValues().clear();
......@@ -90,7 +90,7 @@ public final class SimpleJudgementEngineTest {
add(new LiteralExpressionSegment(0, 0, "false"));
}
})));
insertStatementContext = new InsertStatementContext(tableMetas, Collections.emptyList(), insertStatement);
insertStatementContext = new InsertStatementContext(schemaMetaData, Collections.emptyList(), insertStatement);
simpleJudgementEngine = new SimpleJudgementEngine(shadowRule, insertStatementContext);
Assert.assertFalse("should not be shadow", simpleJudgementEngine.isShadowSQL());
}
......@@ -108,7 +108,7 @@ public final class SimpleJudgementEngineTest {
projectionsSegment.setDistinctRow(true);
projectionsSegment.getProjections().addAll(Collections.singletonList(new ExpressionProjectionSegment(0, 0, "true")));
selectStatement.setProjections(projectionsSegment);
SelectStatementContext selectStatementContext = new SelectStatementContext(tableMetas, "", Collections.emptyList(), selectStatement);
SelectStatementContext selectStatementContext = new SelectStatementContext(schemaMetaData, "", Collections.emptyList(), selectStatement);
SimpleJudgementEngine simpleJudgementEngine = new SimpleJudgementEngine(shadowRule, selectStatementContext);
Assert.assertTrue("should be shadow", simpleJudgementEngine.isShadowSQL());
andPredicate.getPredicates().clear();
......
......@@ -22,8 +22,9 @@ import org.apache.shardingsphere.core.rule.DataNode;
import org.apache.shardingsphere.core.rule.ShardingRule;
import org.apache.shardingsphere.core.rule.TableRule;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetasLoader;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaDataLoader;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetaDataLoader;
import org.apache.shardingsphere.underlying.common.exception.ShardingSphereException;
import org.apache.shardingsphere.underlying.common.log.MetaDataLogger;
......@@ -37,10 +38,10 @@ import java.util.Map.Entry;
import java.util.Optional;
/**
* Table metas loader for sharding.
* Sharding meta data loader.
*/
@RequiredArgsConstructor
public final class ShardingTableMetasLoader {
public final class ShardingMetaDataLoader {
private final Map<String, DataSource> dataSourceMap;
......@@ -61,14 +62,14 @@ public final class ShardingTableMetasLoader {
TableRule tableRule = shardingRule.getTableRule(logicTableName);
if (!isCheckingMetaData) {
DataNode dataNode = tableRule.getActualDataNodes().iterator().next();
return TableMetasLoader.load(dataSourceMap.get(shardingRule.getShardingDataSourceNames().getRawMasterDataSourceName(dataNode.getDataSourceName())), dataNode.getTableName());
return TableMetaDataLoader.load(dataSourceMap.get(shardingRule.getShardingDataSourceNames().getRawMasterDataSourceName(dataNode.getDataSourceName())), dataNode.getTableName());
}
Map<String, List<DataNode>> dataNodeGroups = tableRule.getDataNodeGroups();
Map<String, TableMetaData> actualTableMetaDataMap = new HashMap<>(dataNodeGroups.size(), 1);
// TODO use multiple thread for diff data source
for (Entry<String, List<DataNode>> entry : dataNodeGroups.entrySet()) {
for (DataNode each : entry.getValue()) {
actualTableMetaDataMap.put(each.getTableName(), TableMetasLoader.load(dataSourceMap.get(each.getDataSourceName()), each.getTableName()));
actualTableMetaDataMap.put(each.getTableName(), TableMetaDataLoader.load(dataSourceMap.get(each.getDataSourceName()), each.getTableName()));
}
}
checkUniformed(logicTableName, actualTableMetaDataMap);
......@@ -76,16 +77,16 @@ public final class ShardingTableMetasLoader {
}
/**
* Load table metas.
* Load schema Meta data.
*
* @return table metas
* @return schema Meta data
* @throws SQLException SQL exception
*/
public TableMetas load() throws SQLException {
public SchemaMetaData load() throws SQLException {
Map<String, TableMetaData> result = new HashMap<>();
result.putAll(loadShardingTables());
result.putAll(loadDefaultTables());
return new TableMetas(result);
return new SchemaMetaData(result);
}
private Map<String, TableMetaData> loadShardingTables() throws SQLException {
......@@ -105,7 +106,7 @@ public final class ShardingTableMetasLoader {
return Collections.emptyMap();
}
long start = System.currentTimeMillis();
TableMetas result = TableMetasLoader.load(dataSourceMap.get(actualDefaultDataSourceName.get()), maxConnectionsSizePerQuery);
SchemaMetaData result = SchemaMetaDataLoader.load(dataSourceMap.get(actualDefaultDataSourceName.get()), maxConnectionsSizePerQuery);
MetaDataLogger.log("Default table(s) have been loaded in {} milliseconds.", System.currentTimeMillis() - start);
return result.getTables();
}
......
......@@ -24,14 +24,11 @@ import org.apache.shardingsphere.core.rule.TableRule;
import org.apache.shardingsphere.sql.parser.binder.metadata.column.ColumnMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.index.IndexMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.underlying.common.metadata.table.TableMetaDataDecorator;
import org.apache.shardingsphere.underlying.common.metadata.decorator.TableMetaDataDecorator;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
......@@ -40,15 +37,6 @@ import java.util.Optional;
*/
public final class ShardingTableMetaDataDecorator implements TableMetaDataDecorator<ShardingRule> {
@Override
public TableMetas decorate(final TableMetas tableMetas, final ShardingRule shardingRule) {
Map<String, TableMetaData> result = new HashMap<>(tableMetas.getAllTableNames().size(), 1);
for (String each : tableMetas.getAllTableNames()) {
result.put(each, decorate(tableMetas.get(each), each, shardingRule));
}
return new TableMetas(result);
}
@Override
public TableMetaData decorate(final TableMetaData tableMetaData, final String tableName, final ShardingRule shardingRule) {
return new TableMetaData(getColumnMetaDataList(tableMetaData, tableName, shardingRule), getIndexMetaDataList(tableMetaData, tableName, shardingRule));
......
......@@ -104,7 +104,7 @@ public abstract class BaseShardingEngine {
routingHook.start(sql);
try {
ShardingRouteContext result = decorate(route(sql, clonedParameters));
routingHook.finishSuccess(result, metaData.getTables());
routingHook.finishSuccess(result, metaData.getSchema());
return result;
// CHECKSTYLE:OFF
} catch (final Exception ex) {
......
......@@ -23,7 +23,7 @@ import org.apache.shardingsphere.core.rule.ShardingRule;
import org.apache.shardingsphere.encrypt.rule.EncryptRule;
import org.apache.shardingsphere.sql.parser.SQLParserEngine;
import org.apache.shardingsphere.underlying.common.metadata.ShardingSphereMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
......@@ -55,7 +55,7 @@ public final class PreparedQueryShardingEngineTest extends BaseShardingEngineTes
EncryptRule encryptRule = mock(EncryptRule.class);
when(shardingRule.getEncryptRule()).thenReturn(encryptRule);
ShardingSphereMetaData shardingSphereMetaData = mock(ShardingSphereMetaData.class);
when(shardingSphereMetaData.getTables()).thenReturn(mock(TableMetas.class));
when(shardingSphereMetaData.getSchema()).thenReturn(mock(SchemaMetaData.class));
shardingEngine = new PreparedQueryShardingEngine(shardingRule, getProperties(), shardingSphereMetaData, mock(SQLParserEngine.class));
setRoutingEngine();
}
......
......@@ -23,7 +23,7 @@ import org.apache.shardingsphere.core.rule.ShardingRule;
import org.apache.shardingsphere.encrypt.rule.EncryptRule;
import org.apache.shardingsphere.sql.parser.SQLParserEngine;
import org.apache.shardingsphere.underlying.common.metadata.ShardingSphereMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.junit.Before;
import org.junit.runner.RunWith;
import org.mockito.Mock;
......@@ -53,7 +53,7 @@ public final class SimpleQueryShardingEngineTest extends BaseShardingEngineTest
EncryptRule encryptRule = mock(EncryptRule.class);
when(shardingRule.getEncryptRule()).thenReturn(encryptRule);
ShardingSphereMetaData shardingSphereMetaData = mock(ShardingSphereMetaData.class);
when(shardingSphereMetaData.getTables()).thenReturn(mock(TableMetas.class));
when(shardingSphereMetaData.getSchema()).thenReturn(mock(SchemaMetaData.class));
shardingEngine = new SimpleQueryShardingEngine(shardingRule, getProperties(), shardingSphereMetaData, mock(SQLParserEngine.class));
setRoutingEngine();
}
......
......@@ -17,7 +17,7 @@
package org.apache.shardingsphere.core.shard.fixture;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sharding.route.engine.context.ShardingRouteContext;
import org.apache.shardingsphere.sharding.route.hook.RoutingHook;
......@@ -31,7 +31,7 @@ public final class RoutingHookFixture implements RoutingHook {
}
@Override
public void finishSuccess(final ShardingRouteContext shardingRouteContext, final TableMetas tableMetas) {
public void finishSuccess(final ShardingRouteContext shardingRouteContext, final SchemaMetaData schemaMetaData) {
}
@Override
......
......@@ -22,7 +22,7 @@ import org.apache.shardingsphere.core.rule.ShardingRule;
import org.apache.shardingsphere.sharding.merge.dal.common.SingleLocalDataMergedResult;
import org.apache.shardingsphere.sharding.merge.dal.show.LogicTablesMergedResult;
import org.apache.shardingsphere.sharding.merge.dal.show.ShowCreateTableMergedResult;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.sql.parser.sql.statement.SQLStatement;
import org.apache.shardingsphere.sql.parser.sql.statement.dal.dialect.mysql.ShowCreateTableStatement;
......@@ -49,16 +49,16 @@ public final class ShardingDALResultMerger implements ResultMerger {
private final ShardingRule shardingRule;
@Override
public MergedResult merge(final List<QueryResult> queryResults, final SQLStatementContext sqlStatementContext, final TableMetas tableMetas) throws SQLException {
public MergedResult merge(final List<QueryResult> queryResults, final SQLStatementContext sqlStatementContext, final SchemaMetaData schemaMetaData) throws SQLException {
SQLStatement dalStatement = sqlStatementContext.getSqlStatement();
if (dalStatement instanceof ShowDatabasesStatement) {
return new SingleLocalDataMergedResult(Collections.singletonList(ShardingConstant.LOGIC_SCHEMA_NAME));
}
if (dalStatement instanceof ShowTablesStatement || dalStatement instanceof ShowTableStatusStatement || dalStatement instanceof ShowIndexStatement) {
return new LogicTablesMergedResult(shardingRule, sqlStatementContext, tableMetas, queryResults);
return new LogicTablesMergedResult(shardingRule, sqlStatementContext, schemaMetaData, queryResults);
}
if (dalStatement instanceof ShowCreateTableStatement) {
return new ShowCreateTableMergedResult(shardingRule, sqlStatementContext, tableMetas, queryResults);
return new ShowCreateTableMergedResult(shardingRule, sqlStatementContext, schemaMetaData, queryResults);
}
return new TransparentMergedResult(queryResults.get(0));
}
......
......@@ -19,7 +19,7 @@ package org.apache.shardingsphere.sharding.merge.dal.show;
import org.apache.shardingsphere.core.rule.ShardingRule;
import org.apache.shardingsphere.core.rule.TableRule;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.underlying.executor.QueryResult;
import org.apache.shardingsphere.underlying.merge.result.impl.memory.MemoryMergedResult;
......@@ -38,12 +38,12 @@ import java.util.Set;
public class LogicTablesMergedResult extends MemoryMergedResult<ShardingRule> {
public LogicTablesMergedResult(final ShardingRule shardingRule,
final SQLStatementContext sqlStatementContext, final TableMetas tableMetas, final List<QueryResult> queryResults) throws SQLException {
super(shardingRule, tableMetas, sqlStatementContext, queryResults);
final SQLStatementContext sqlStatementContext, final SchemaMetaData schemaMetaData, final List<QueryResult> queryResults) throws SQLException {
super(shardingRule, schemaMetaData, sqlStatementContext, queryResults);
}
@Override
protected final List<MemoryQueryResultRow> init(final ShardingRule shardingRule, final TableMetas tableMetas,
protected final List<MemoryQueryResultRow> init(final ShardingRule shardingRule, final SchemaMetaData schemaMetaData,
final SQLStatementContext sqlStatementContext, final List<QueryResult> queryResults) throws SQLException {
List<MemoryQueryResultRow> result = new LinkedList<>();
Set<String> tableNames = new HashSet<>();
......@@ -53,7 +53,7 @@ public class LogicTablesMergedResult extends MemoryMergedResult<ShardingRule> {
String actualTableName = memoryResultSetRow.getCell(1).toString();
Optional<TableRule> tableRule = shardingRule.findTableRuleByActualTable(actualTableName);
if (!tableRule.isPresent()) {
if (shardingRule.getTableRules().isEmpty() || tableMetas.containsTable(actualTableName) && tableNames.add(actualTableName)) {
if (shardingRule.getTableRules().isEmpty() || schemaMetaData.containsTable(actualTableName) && tableNames.add(actualTableName)) {
result.add(memoryResultSetRow);
}
} else if (tableNames.add(tableRule.get().getLogicTable())) {
......
......@@ -18,7 +18,7 @@
package org.apache.shardingsphere.sharding.merge.dal.show;
import org.apache.shardingsphere.core.rule.ShardingRule;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.underlying.executor.QueryResult;
import org.apache.shardingsphere.underlying.merge.result.impl.memory.MemoryQueryResultRow;
......@@ -32,8 +32,8 @@ import java.util.List;
public final class ShowCreateTableMergedResult extends LogicTablesMergedResult {
public ShowCreateTableMergedResult(final ShardingRule shardingRule,
final SQLStatementContext sqlStatementContext, final TableMetas tableMetas, final List<QueryResult> queryResults) throws SQLException {
super(shardingRule, sqlStatementContext, tableMetas, queryResults);
final SQLStatementContext sqlStatementContext, final SchemaMetaData schemaMetaData, final List<QueryResult> queryResults) throws SQLException {
super(shardingRule, sqlStatementContext, schemaMetaData, queryResults);
}
@Override
......
......@@ -26,7 +26,7 @@ import org.apache.shardingsphere.sharding.merge.dql.pagination.LimitDecoratorMer
import org.apache.shardingsphere.sharding.merge.dql.pagination.RowNumberDecoratorMergedResult;
import org.apache.shardingsphere.sharding.merge.dql.pagination.TopAndRowNumberDecoratorMergedResult;
import org.apache.shardingsphere.spi.database.type.DatabaseType;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.segment.select.orderby.OrderByItem;
import org.apache.shardingsphere.sql.parser.binder.segment.select.pagination.PaginationContext;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
......@@ -53,7 +53,7 @@ public final class ShardingDQLResultMerger implements ResultMerger {
private final DatabaseType databaseType;
@Override
public MergedResult merge(final List<QueryResult> queryResults, final SQLStatementContext sqlStatementContext, final TableMetas tableMetas) throws SQLException {
public MergedResult merge(final List<QueryResult> queryResults, final SQLStatementContext sqlStatementContext, final SchemaMetaData schemaMetaData) throws SQLException {
if (1 == queryResults.size()) {
return new IteratorStreamMergedResult(queryResults);
}
......
......@@ -23,7 +23,7 @@ import com.google.common.collect.Maps;
import org.apache.shardingsphere.core.rule.ShardingRule;
import org.apache.shardingsphere.sharding.merge.dql.groupby.aggregation.AggregationUnit;
import org.apache.shardingsphere.sharding.merge.dql.groupby.aggregation.AggregationUnitFactory;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.segment.select.projection.impl.AggregationDistinctProjection;
import org.apache.shardingsphere.sql.parser.binder.segment.select.projection.impl.AggregationProjection;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
......@@ -50,8 +50,8 @@ public final class GroupByMemoryMergedResult extends MemoryMergedResult<Sharding
}
@Override
protected List<MemoryQueryResultRow> init(final ShardingRule shardingRule,
final TableMetas tableMetas, final SQLStatementContext sqlStatementContext, final List<QueryResult> queryResults) throws SQLException {
protected List<MemoryQueryResultRow> init(final ShardingRule shardingRule,
final SchemaMetaData schemaMetaData, final SQLStatementContext sqlStatementContext, final List<QueryResult> queryResults) throws SQLException {
SelectStatementContext selectStatementContext = (SelectStatementContext) sqlStatementContext;
Map<GroupByValue, MemoryQueryResultRow> dataMap = new HashMap<>(1024);
Map<GroupByValue, Map<AggregationProjection, AggregationUnit>> aggregationMap = new HashMap<>(1024);
......
......@@ -21,7 +21,7 @@ import org.apache.shardingsphere.api.config.sharding.ShardingRuleConfiguration;
import org.apache.shardingsphere.api.config.sharding.TableRuleConfiguration;
import org.apache.shardingsphere.core.rule.ShardingRule;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.underlying.executor.QueryResult;
import org.junit.Before;
......@@ -41,12 +41,12 @@ public final class ShowCreateTableMergedResultTest {
private ShardingRule shardingRule;
private TableMetas tableMetas;
private SchemaMetaData schemaMetaData;
@Before
public void setUp() {
shardingRule = createShardingRule();
tableMetas = createTableMetas();
schemaMetaData = createSchemaMetaData();
}
private ShardingRule createShardingRule() {
......@@ -56,21 +56,21 @@ public final class ShowCreateTableMergedResultTest {
return new ShardingRule(shardingRuleConfig, Collections.singletonList("ds"));
}
private TableMetas createTableMetas() {
Map<String, TableMetaData> tableMetas = new HashMap<>(1, 1);
tableMetas.put("table", new TableMetaData(Collections.emptyList(), Collections.emptyList()));
return new TableMetas(tableMetas);
private SchemaMetaData createSchemaMetaData() {
Map<String, TableMetaData> tableMetaDataMap = new HashMap<>(1, 1);
tableMetaDataMap.put("table", new TableMetaData(Collections.emptyList(), Collections.emptyList()));
return new SchemaMetaData(tableMetaDataMap);
}
@Test
public void assertNextForEmptyQueryResult() throws SQLException {
ShowCreateTableMergedResult actual = new ShowCreateTableMergedResult(shardingRule, mock(SQLStatementContext.class), tableMetas, Collections.emptyList());
ShowCreateTableMergedResult actual = new ShowCreateTableMergedResult(shardingRule, mock(SQLStatementContext.class), schemaMetaData, Collections.emptyList());
assertFalse(actual.next());
}
@Test
public void assertNextForTableRuleIsPresent() throws SQLException {
ShowCreateTableMergedResult actual = new ShowCreateTableMergedResult(shardingRule, mock(SQLStatementContext.class), tableMetas, Collections.singletonList(createQueryResult()));
ShowCreateTableMergedResult actual = new ShowCreateTableMergedResult(shardingRule, mock(SQLStatementContext.class), schemaMetaData, Collections.singletonList(createQueryResult()));
assertTrue(actual.next());
}
......
......@@ -22,7 +22,7 @@ import org.apache.shardingsphere.api.config.sharding.ShardingRuleConfiguration;
import org.apache.shardingsphere.api.config.sharding.TableRuleConfiguration;
import org.apache.shardingsphere.core.rule.ShardingRule;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.underlying.executor.QueryResult;
import org.junit.Before;
......@@ -42,12 +42,12 @@ public final class ShowTablesMergedResultTest {
private ShardingRule shardingRule;
private TableMetas tableMetas;
private SchemaMetaData schemaMetaData;
@Before
public void setUp() {
shardingRule = createShardingRule();
tableMetas = createTableMetas();
schemaMetaData = createSchemaMetaData();
}
private ShardingRule createShardingRule() {
......@@ -57,10 +57,10 @@ public final class ShowTablesMergedResultTest {
return new ShardingRule(shardingRuleConfig, Lists.newArrayList("ds"));
}
private TableMetas createTableMetas() {
private SchemaMetaData createSchemaMetaData() {
Map<String, TableMetaData> tableMetaDataMap = new HashMap<>(1, 1);
tableMetaDataMap.put("table", new TableMetaData(Collections.emptyList(), Collections.emptyList()));
return new TableMetas(tableMetaDataMap);
return new SchemaMetaData(tableMetaDataMap);
}
private QueryResult createQueryResult(final String value) throws SQLException {
......@@ -73,25 +73,25 @@ public final class ShowTablesMergedResultTest {
@Test
public void assertNextForEmptyQueryResult() throws SQLException {
LogicTablesMergedResult actual = new LogicTablesMergedResult(shardingRule, mock(SQLStatementContext.class), tableMetas, Collections.emptyList());
LogicTablesMergedResult actual = new LogicTablesMergedResult(shardingRule, mock(SQLStatementContext.class), schemaMetaData, Collections.emptyList());
assertFalse(actual.next());
}
@Test
public void assertNextForActualTableNameInTableRule() throws SQLException {
LogicTablesMergedResult actual = new LogicTablesMergedResult(shardingRule, mock(SQLStatementContext.class), tableMetas, Collections.singletonList(createQueryResult("table_0")));
LogicTablesMergedResult actual = new LogicTablesMergedResult(shardingRule, mock(SQLStatementContext.class), schemaMetaData, Collections.singletonList(createQueryResult("table_0")));
assertTrue(actual.next());
}
@Test
public void assertNextForActualTableNameNotInTableRuleWithDefaultDataSource() throws SQLException {
LogicTablesMergedResult actual = new LogicTablesMergedResult(shardingRule, mock(SQLStatementContext.class), tableMetas, Collections.singletonList(createQueryResult("table")));
LogicTablesMergedResult actual = new LogicTablesMergedResult(shardingRule, mock(SQLStatementContext.class), schemaMetaData, Collections.singletonList(createQueryResult("table")));
assertTrue(actual.next());
}
@Test
public void assertNextForActualTableNameNotInTableRuleWithoutDefaultDataSource() throws SQLException {
LogicTablesMergedResult actual = new LogicTablesMergedResult(shardingRule, mock(SQLStatementContext.class), tableMetas, Collections.singletonList(createQueryResult("table_3")));
LogicTablesMergedResult actual = new LogicTablesMergedResult(shardingRule, mock(SQLStatementContext.class), schemaMetaData, Collections.singletonList(createQueryResult("table_3")));
assertFalse(actual.next());
}
}
......@@ -35,9 +35,10 @@ public final class ShardingSQLRewriteContextDecorator implements SQLRewriteConte
private final ShardingRouteContext shardingRouteContext;
@SuppressWarnings("unchecked")
@Override
public void decorate(final ShardingRule shardingRule, final ShardingSphereProperties properties, final SQLRewriteContext sqlRewriteContext) {
for (ParameterRewriter each : new ShardingParameterRewriterBuilder(shardingRule, shardingRouteContext).getParameterRewriters(sqlRewriteContext.getTableMetas())) {
for (ParameterRewriter each : new ShardingParameterRewriterBuilder(shardingRule, shardingRouteContext).getParameterRewriters(sqlRewriteContext.getSchemaMetaData())) {
if (!sqlRewriteContext.getParameters().isEmpty() && each.isNeedRewrite(sqlRewriteContext.getSqlStatementContext())) {
each.rewrite(sqlRewriteContext.getParameterBuilder(), sqlRewriteContext.getSqlStatementContext(), sqlRewriteContext.getParameters());
}
......
......@@ -24,10 +24,10 @@ import org.apache.shardingsphere.sharding.rewrite.aware.ShardingRouteContextAwar
import org.apache.shardingsphere.sharding.rewrite.parameter.impl.ShardingGeneratedKeyInsertValueParameterRewriter;
import org.apache.shardingsphere.sharding.rewrite.parameter.impl.ShardingPaginationParameterRewriter;
import org.apache.shardingsphere.sharding.route.engine.context.ShardingRouteContext;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.underlying.rewrite.parameter.rewriter.ParameterRewriter;
import org.apache.shardingsphere.underlying.rewrite.parameter.rewriter.ParameterRewriterBuilder;
import org.apache.shardingsphere.underlying.rewrite.sql.token.generator.aware.TableMetasAware;
import org.apache.shardingsphere.underlying.rewrite.sql.token.generator.aware.SchemaMetaDataAware;
import java.util.Collection;
import java.util.LinkedList;
......@@ -43,10 +43,10 @@ public final class ShardingParameterRewriterBuilder implements ParameterRewriter
private final ShardingRouteContext shardingRouteContext;
@Override
public Collection<ParameterRewriter> getParameterRewriters(final TableMetas tableMetas) {
public Collection<ParameterRewriter> getParameterRewriters(final SchemaMetaData schemaMetaData) {
Collection<ParameterRewriter> result = getParameterRewriters();
for (ParameterRewriter each : result) {
setUpParameterRewriters(each, tableMetas);
setUpParameterRewriters(each, schemaMetaData);
}
return result;
}
......@@ -58,9 +58,9 @@ public final class ShardingParameterRewriterBuilder implements ParameterRewriter
return result;
}
private void setUpParameterRewriters(final ParameterRewriter parameterRewriter, final TableMetas tableMetas) {
if (parameterRewriter instanceof TableMetasAware) {
((TableMetasAware) parameterRewriter).setTableMetas(tableMetas);
private void setUpParameterRewriters(final ParameterRewriter parameterRewriter, final SchemaMetaData schemaMetaData) {
if (parameterRewriter instanceof SchemaMetaDataAware) {
((SchemaMetaDataAware) parameterRewriter).setSchemaMetaData(schemaMetaData);
}
if (parameterRewriter instanceof ShardingRuleAware) {
((ShardingRuleAware) parameterRewriter).setShardingRule(shardingRule);
......
......@@ -32,7 +32,7 @@ import org.apache.shardingsphere.sql.parser.SQLParserEngineFactory;
import org.apache.shardingsphere.sql.parser.binder.metadata.column.ColumnMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.index.IndexMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.underlying.common.constant.properties.ShardingSphereProperties;
import org.apache.shardingsphere.underlying.common.metadata.ShardingSphereMetaData;
import org.apache.shardingsphere.underlying.common.metadata.datasource.DataSourceMetas;
......@@ -81,7 +81,7 @@ public final class MixSQLRewriterParameterizedTest extends AbstractSQLRewriterPa
ShardingRouteContext shardingRouteContext = shardingRouter.route(getTestParameters().getInputSQL(), getTestParameters().getInputParameters(), false);
ShardingSphereProperties properties = new ShardingSphereProperties(ruleConfiguration.getProps());
SQLRewriteContext sqlRewriteContext = new SQLRewriteContext(
mock(TableMetas.class), shardingRouteContext.getSqlStatementContext(), getTestParameters().getInputSQL(), getTestParameters().getInputParameters());
mock(SchemaMetaData.class), shardingRouteContext.getSqlStatementContext(), getTestParameters().getInputSQL(), getTestParameters().getInputParameters());
new ShardingSQLRewriteContextDecorator(shardingRouteContext).decorate(shardingRule, properties, sqlRewriteContext);
new EncryptSQLRewriteContextDecorator().decorate(shardingRule.getEncryptRule(), properties, sqlRewriteContext);
sqlRewriteContext.generateSQLTokens();
......@@ -99,21 +99,21 @@ public final class MixSQLRewriterParameterizedTest extends AbstractSQLRewriterPa
}
private ShardingSphereMetaData createShardingSphereMetaData() {
TableMetas tableMetas = mock(TableMetas.class);
when(tableMetas.getAllTableNames()).thenReturn(Arrays.asList("t_account", "t_account_bak", "t_account_detail"));
SchemaMetaData schemaMetaData = mock(SchemaMetaData.class);
when(schemaMetaData.getAllTableNames()).thenReturn(Arrays.asList("t_account", "t_account_bak", "t_account_detail"));
TableMetaData accountTableMetaData = mock(TableMetaData.class);
when(accountTableMetaData.getColumns()).thenReturn(createColumnMetaDataMap());
Map<String, IndexMetaData> indexMetaDataMap = new HashMap<>(1, 1);
indexMetaDataMap.put("index_name", new IndexMetaData("index_name"));
when(accountTableMetaData.getIndexes()).thenReturn(indexMetaDataMap);
when(tableMetas.get("t_account")).thenReturn(accountTableMetaData);
when(schemaMetaData.get("t_account")).thenReturn(accountTableMetaData);
TableMetaData accountBakTableMetaData = mock(TableMetaData.class);
when(accountBakTableMetaData.getColumns()).thenReturn(createColumnMetaDataMap());
when(tableMetas.get("t_account_bak")).thenReturn(accountBakTableMetaData);
when(tableMetas.get("t_account_detail")).thenReturn(mock(TableMetaData.class));
when(tableMetas.getAllColumnNames("t_account")).thenReturn(Arrays.asList("account_id", "password", "amount", "status"));
when(tableMetas.getAllColumnNames("t_account_bak")).thenReturn(Arrays.asList("account_id", "password", "amount", "status"));
return new ShardingSphereMetaData(mock(DataSourceMetas.class), tableMetas);
when(schemaMetaData.get("t_account_bak")).thenReturn(accountBakTableMetaData);
when(schemaMetaData.get("t_account_detail")).thenReturn(mock(TableMetaData.class));
when(schemaMetaData.getAllColumnNames("t_account")).thenReturn(Arrays.asList("account_id", "password", "amount", "status"));
when(schemaMetaData.getAllColumnNames("t_account_bak")).thenReturn(Arrays.asList("account_id", "password", "amount", "status"));
return new ShardingSphereMetaData(mock(DataSourceMetas.class), schemaMetaData);
}
private Map<String, ColumnMetaData> createColumnMetaDataMap() {
......
......@@ -31,7 +31,7 @@ import org.apache.shardingsphere.sql.parser.SQLParserEngineFactory;
import org.apache.shardingsphere.sql.parser.binder.metadata.column.ColumnMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.index.IndexMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.underlying.common.constant.properties.ShardingSphereProperties;
import org.apache.shardingsphere.underlying.common.metadata.ShardingSphereMetaData;
import org.apache.shardingsphere.underlying.common.metadata.datasource.DataSourceMetas;
......@@ -80,7 +80,7 @@ public final class ShardingSQLRewriterParameterizedTest extends AbstractSQLRewri
ShardingRouteContext shardingRouteContext = shardingRouter.route(getTestParameters().getInputSQL(), getTestParameters().getInputParameters(), false);
ShardingSphereProperties properties = new ShardingSphereProperties(ruleConfiguration.getProps());
SQLRewriteContext sqlRewriteContext = new SQLRewriteContext(
mock(TableMetas.class), shardingRouteContext.getSqlStatementContext(), getTestParameters().getInputSQL(), getTestParameters().getInputParameters());
mock(SchemaMetaData.class), shardingRouteContext.getSqlStatementContext(), getTestParameters().getInputSQL(), getTestParameters().getInputParameters());
new ShardingSQLRewriteContextDecorator(shardingRouteContext).decorate(shardingRule, properties, sqlRewriteContext);
sqlRewriteContext.generateSQLTokens();
Collection<SQLRewriteResult> result = new LinkedList<>();
......@@ -97,17 +97,17 @@ public final class ShardingSQLRewriterParameterizedTest extends AbstractSQLRewri
}
private ShardingSphereMetaData createShardingSphereMetaData() {
TableMetas tableMetas = mock(TableMetas.class);
when(tableMetas.getAllTableNames()).thenReturn(Arrays.asList("t_account", "t_account_detail"));
SchemaMetaData schemaMetaData = mock(SchemaMetaData.class);
when(schemaMetaData.getAllTableNames()).thenReturn(Arrays.asList("t_account", "t_account_detail"));
TableMetaData accountTableMetaData = mock(TableMetaData.class);
when(accountTableMetaData.getColumns()).thenReturn(createColumnMetaDataMap());
Map<String, IndexMetaData> indexMetaDataMap = new HashMap<>(1, 1);
indexMetaDataMap.put("index_name", new IndexMetaData("index_name"));
when(accountTableMetaData.getIndexes()).thenReturn(indexMetaDataMap);
when(tableMetas.get("t_account")).thenReturn(accountTableMetaData);
when(tableMetas.get("t_account_detail")).thenReturn(mock(TableMetaData.class));
when(tableMetas.getAllColumnNames("t_account")).thenReturn(Arrays.asList("account_id", "amount", "status"));
return new ShardingSphereMetaData(mock(DataSourceMetas.class), tableMetas);
when(schemaMetaData.get("t_account")).thenReturn(accountTableMetaData);
when(schemaMetaData.get("t_account_detail")).thenReturn(mock(TableMetaData.class));
when(schemaMetaData.getAllColumnNames("t_account")).thenReturn(Arrays.asList("account_id", "amount", "status"));
return new ShardingSphereMetaData(mock(DataSourceMetas.class), schemaMetaData);
}
private Map<String, ColumnMetaData> createColumnMetaDataMap() {
......
......@@ -37,7 +37,7 @@ import org.apache.shardingsphere.sharding.route.engine.type.ShardingRouteEngineF
import org.apache.shardingsphere.sharding.route.engine.validator.ShardingStatementValidatorFactory;
import org.apache.shardingsphere.sql.parser.SQLParserEngine;
import org.apache.shardingsphere.sql.parser.binder.SQLStatementContextFactory;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.sql.parser.binder.statement.dml.InsertStatementContext;
import org.apache.shardingsphere.sql.parser.binder.statement.dml.SelectStatementContext;
......@@ -72,10 +72,10 @@ public final class ShardingRouter implements DateNodeRouter {
public ShardingRouteContext route(final String sql, final List<Object> parameters, final boolean useCache) {
SQLStatement sqlStatement = parse(sql, useCache);
ShardingStatementValidatorFactory.newInstance(sqlStatement).ifPresent(shardingStatementValidator -> shardingStatementValidator.validate(shardingRule, sqlStatement, parameters));
SQLStatementContext sqlStatementContext = SQLStatementContextFactory.newInstance(metaData.getTables(), sql, parameters, sqlStatement);
SQLStatementContext sqlStatementContext = SQLStatementContextFactory.newInstance(metaData.getSchema(), sql, parameters, sqlStatement);
Optional<GeneratedKey> generatedKey = sqlStatement instanceof InsertStatement
? GeneratedKey.getGenerateKey(shardingRule, metaData.getTables(), parameters, (InsertStatement) sqlStatement) : Optional.empty();
ShardingConditions shardingConditions = getShardingConditions(parameters, sqlStatementContext, generatedKey.orElse(null), metaData.getTables());
? GeneratedKey.getGenerateKey(shardingRule, metaData.getSchema(), parameters, (InsertStatement) sqlStatement) : Optional.empty();
ShardingConditions shardingConditions = getShardingConditions(parameters, sqlStatementContext, generatedKey.orElse(null), metaData.getSchema());
boolean needMergeShardingValues = isNeedMergeShardingValues(sqlStatementContext);
if (sqlStatementContext.getSqlStatement() instanceof DMLStatement && needMergeShardingValues) {
checkSubqueryShardingValues(sqlStatementContext, shardingConditions);
......@@ -100,13 +100,14 @@ public final class ShardingRouter implements DateNodeRouter {
return sqlParserEngine.parse(sql, useCache);
}
private ShardingConditions getShardingConditions(final List<Object> parameters, final SQLStatementContext sqlStatementContext, final GeneratedKey generatedKey, final TableMetas tableMetas) {
private ShardingConditions getShardingConditions(final List<Object> parameters,
final SQLStatementContext sqlStatementContext, final GeneratedKey generatedKey, final SchemaMetaData schemaMetaData) {
if (sqlStatementContext.getSqlStatement() instanceof DMLStatement) {
if (sqlStatementContext instanceof InsertStatementContext) {
InsertStatementContext shardingInsertStatement = (InsertStatementContext) sqlStatementContext;
return new ShardingConditions(new InsertClauseShardingConditionEngine(shardingRule).createShardingConditions(shardingInsertStatement, generatedKey, parameters));
}
return new ShardingConditions(new WhereClauseShardingConditionEngine(shardingRule, tableMetas).createShardingConditions(sqlStatementContext, parameters));
return new ShardingConditions(new WhereClauseShardingConditionEngine(shardingRule, schemaMetaData).createShardingConditions(sqlStatementContext, parameters));
}
return new ShardingConditions(Collections.emptyList());
}
......
......@@ -28,7 +28,7 @@ import org.apache.shardingsphere.sharding.route.engine.condition.AlwaysFalseShar
import org.apache.shardingsphere.sharding.route.engine.condition.Column;
import org.apache.shardingsphere.sharding.route.engine.condition.ShardingCondition;
import org.apache.shardingsphere.sharding.route.engine.condition.generator.ConditionValueGeneratorFactory;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.sql.parser.binder.type.WhereAvailable;
import org.apache.shardingsphere.sql.parser.sql.segment.dml.predicate.AndPredicate;
......@@ -54,7 +54,7 @@ public final class WhereClauseShardingConditionEngine {
private final ShardingRule shardingRule;
private final TableMetas tableMetas;
private final SchemaMetaData schemaMetaData;
/**
* Create sharding conditions.
......@@ -98,7 +98,7 @@ public final class WhereClauseShardingConditionEngine {
private Map<Column, Collection<RouteValue>> createRouteValueMap(final SQLStatementContext sqlStatementContext, final AndPredicate andPredicate, final List<Object> parameters) {
Map<Column, Collection<RouteValue>> result = new HashMap<>();
for (PredicateSegment each : andPredicate.getPredicates()) {
Optional<String> tableName = sqlStatementContext.getTablesContext().findTableName(each, tableMetas);
Optional<String> tableName = sqlStatementContext.getTablesContext().findTableName(each, schemaMetaData);
if (!tableName.isPresent() || !shardingRule.isShardingColumn(each.getColumn().getIdentifier().getValue(), tableName.get())) {
continue;
}
......
......@@ -25,7 +25,7 @@ import org.apache.shardingsphere.sql.parser.sql.segment.dml.expr.ExpressionSegme
import org.apache.shardingsphere.sql.parser.sql.segment.dml.expr.simple.LiteralExpressionSegment;
import org.apache.shardingsphere.sql.parser.sql.segment.dml.expr.simple.ParameterMarkerExpressionSegment;
import org.apache.shardingsphere.sql.parser.sql.statement.dml.InsertStatement;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import java.util.Collection;
import java.util.LinkedList;
......@@ -50,26 +50,26 @@ public final class GeneratedKey {
* Get generate key.
*
* @param shardingRule sharding rule
* @param tableMetas table metas
* @param schemaMetaData schema meta data
* @param parameters SQL parameters
* @param insertStatement insert statement
* @return generate key
*/
public static Optional<GeneratedKey> getGenerateKey(final ShardingRule shardingRule, final TableMetas tableMetas, final List<Object> parameters, final InsertStatement insertStatement) {
public static Optional<GeneratedKey> getGenerateKey(final ShardingRule shardingRule, final SchemaMetaData schemaMetaData, final List<Object> parameters, final InsertStatement insertStatement) {
Optional<String> generateKeyColumnNameOptional = shardingRule.findGenerateKeyColumnName(insertStatement.getTable().getTableName().getIdentifier().getValue());
return generateKeyColumnNameOptional.map(generateKeyColumnName -> containsGenerateKey(tableMetas, insertStatement, generateKeyColumnName)
? findGeneratedKey(tableMetas, parameters, insertStatement, generateKeyColumnName) : createGeneratedKey(shardingRule, insertStatement, generateKeyColumnName));
return generateKeyColumnNameOptional.map(generateKeyColumnName -> containsGenerateKey(schemaMetaData, insertStatement, generateKeyColumnName)
? findGeneratedKey(schemaMetaData, parameters, insertStatement, generateKeyColumnName) : createGeneratedKey(shardingRule, insertStatement, generateKeyColumnName));
}
private static boolean containsGenerateKey(final TableMetas tableMetas, final InsertStatement insertStatement, final String generateKeyColumnName) {
private static boolean containsGenerateKey(final SchemaMetaData schemaMetaData, final InsertStatement insertStatement, final String generateKeyColumnName) {
return insertStatement.getColumnNames().isEmpty()
? tableMetas.getAllColumnNames(insertStatement.getTable().getTableName().getIdentifier().getValue()).size() == insertStatement.getValueCountForPerGroup()
? schemaMetaData.getAllColumnNames(insertStatement.getTable().getTableName().getIdentifier().getValue()).size() == insertStatement.getValueCountForPerGroup()
: insertStatement.getColumnNames().contains(generateKeyColumnName);
}
private static GeneratedKey findGeneratedKey(final TableMetas tableMetas, final List<Object> parameters, final InsertStatement insertStatement, final String generateKeyColumnName) {
private static GeneratedKey findGeneratedKey(final SchemaMetaData schemaMetaData, final List<Object> parameters, final InsertStatement insertStatement, final String generateKeyColumnName) {
GeneratedKey result = new GeneratedKey(generateKeyColumnName, false);
for (ExpressionSegment each : findGenerateKeyExpressions(tableMetas, insertStatement, generateKeyColumnName)) {
for (ExpressionSegment each : findGenerateKeyExpressions(schemaMetaData, insertStatement, generateKeyColumnName)) {
if (each instanceof ParameterMarkerExpressionSegment) {
result.getGeneratedValues().add((Comparable<?>) parameters.get(((ParameterMarkerExpressionSegment) each).getParameterMarkerIndex()));
} else if (each instanceof LiteralExpressionSegment) {
......@@ -79,16 +79,16 @@ public final class GeneratedKey {
return result;
}
private static Collection<ExpressionSegment> findGenerateKeyExpressions(final TableMetas tableMetas, final InsertStatement insertStatement, final String generateKeyColumnName) {
private static Collection<ExpressionSegment> findGenerateKeyExpressions(final SchemaMetaData schemaMetaData, final InsertStatement insertStatement, final String generateKeyColumnName) {
Collection<ExpressionSegment> result = new LinkedList<>();
for (List<ExpressionSegment> each : insertStatement.getAllValueExpressions()) {
result.add(each.get(findGenerateKeyIndex(tableMetas, insertStatement, generateKeyColumnName.toLowerCase())));
result.add(each.get(findGenerateKeyIndex(schemaMetaData, insertStatement, generateKeyColumnName.toLowerCase())));
}
return result;
}
private static int findGenerateKeyIndex(final TableMetas tableMetas, final InsertStatement insertStatement, final String generateKeyColumnName) {
return insertStatement.getColumnNames().isEmpty() ? tableMetas.getAllColumnNames(insertStatement.getTable().getTableName().getIdentifier().getValue()).indexOf(generateKeyColumnName)
private static int findGenerateKeyIndex(final SchemaMetaData schemaMetaData, final InsertStatement insertStatement, final String generateKeyColumnName) {
return insertStatement.getColumnNames().isEmpty() ? schemaMetaData.getAllColumnNames(insertStatement.getTable().getTableName().getIdentifier().getValue()).indexOf(generateKeyColumnName)
: insertStatement.getColumnNames().indexOf(generateKeyColumnName);
}
......
......@@ -73,7 +73,7 @@ public final class ShardingRouteEngineFactory {
return new ShardingDatabaseBroadcastRoutingEngine();
}
if (sqlStatement instanceof DDLStatement) {
return new ShardingTableBroadcastRoutingEngine(metaData.getTables(), sqlStatementContext);
return new ShardingTableBroadcastRoutingEngine(metaData.getSchema(), sqlStatementContext);
}
if (sqlStatement instanceof DALStatement) {
return getDALRoutingEngine(shardingRule, sqlStatement, tableNames);
......@@ -114,7 +114,7 @@ public final class ShardingRouteEngineFactory {
private static ShardingRouteEngine getDCLRoutingEngine(final SQLStatementContext sqlStatementContext, final ShardingSphereMetaData metaData) {
return isDCLForSingleTable(sqlStatementContext)
? new ShardingTableBroadcastRoutingEngine(metaData.getTables(), sqlStatementContext) : new ShardingMasterInstanceBroadcastRoutingEngine(metaData.getDataSources());
? new ShardingTableBroadcastRoutingEngine(metaData.getSchema(), sqlStatementContext) : new ShardingMasterInstanceBroadcastRoutingEngine(metaData.getDataSources());
}
private static boolean isDCLForSingleTable(final SQLStatementContext sqlStatementContext) {
......
......@@ -26,7 +26,7 @@ import org.apache.shardingsphere.sharding.route.engine.type.ShardingRouteEngine;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.sql.parser.sql.segment.ddl.index.IndexSegment;
import org.apache.shardingsphere.sql.parser.sql.statement.ddl.DropIndexStatement;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.underlying.route.context.RouteResult;
import org.apache.shardingsphere.underlying.route.context.RouteUnit;
import org.apache.shardingsphere.underlying.route.context.TableUnit;
......@@ -41,7 +41,7 @@ import java.util.Optional;
@RequiredArgsConstructor
public final class ShardingTableBroadcastRoutingEngine implements ShardingRouteEngine {
private final TableMetas tableMetas;
private final SchemaMetaData schemaMetaData;
private final SQLStatementContext sqlStatementContext;
......@@ -70,8 +70,8 @@ public final class ShardingTableBroadcastRoutingEngine implements ShardingRouteE
}
private Optional<String> findLogicTableNameFromMetaData(final String logicIndexName) {
for (String each : tableMetas.getAllTableNames()) {
if (tableMetas.get(each).getIndexes().containsKey(logicIndexName)) {
for (String each : schemaMetaData.getAllTableNames()) {
if (schemaMetaData.get(each).getIndexes().containsKey(logicIndexName)) {
return Optional.of(each);
}
}
......
......@@ -17,7 +17,7 @@
package org.apache.shardingsphere.sharding.route.hook;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sharding.route.engine.context.ShardingRouteContext;
/**
......@@ -36,9 +36,9 @@ public interface RoutingHook {
* Handle when routing finished success.
*
* @param shardingRouteContext sharding route context
* @param tableMetas table metas
* @param schemaMetaData schema meta data
*/
void finishSuccess(ShardingRouteContext shardingRouteContext, TableMetas tableMetas);
void finishSuccess(ShardingRouteContext shardingRouteContext, SchemaMetaData schemaMetaData);
/**
* Handle when routing finished failure.
......
......@@ -17,7 +17,7 @@
package org.apache.shardingsphere.sharding.route.hook;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sharding.route.engine.context.ShardingRouteContext;
import org.apache.shardingsphere.spi.NewInstanceServiceLoader;
......@@ -42,9 +42,9 @@ public final class SPIRoutingHook implements RoutingHook {
}
@Override
public void finishSuccess(final ShardingRouteContext shardingRouteContext, final TableMetas tableMetas) {
public void finishSuccess(final ShardingRouteContext shardingRouteContext, final SchemaMetaData schemaMetaData) {
for (RoutingHook each : routingHooks) {
each.finishSuccess(shardingRouteContext, tableMetas);
each.finishSuccess(shardingRouteContext, schemaMetaData);
}
}
......
......@@ -36,7 +36,7 @@ import org.apache.shardingsphere.underlying.common.metadata.ShardingSphereMetaDa
import org.apache.shardingsphere.sql.parser.binder.metadata.column.ColumnMetaData;
import org.apache.shardingsphere.underlying.common.metadata.datasource.DataSourceMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.junit.Before;
import org.junit.Test;
......@@ -86,10 +86,10 @@ public final class DatabaseTest {
when(dataSourceMetas.getDataSourceMetaData("ds_0")).thenReturn(mock(DataSourceMetaData.class));
ColumnMetaData idColumnMetaData = new ColumnMetaData("id", "int", true);
ColumnMetaData nameColumnMetaData = new ColumnMetaData("user_id", "int", false);
TableMetas tableMetas = mock(TableMetas.class);
when(tableMetas.get("tesT")).thenReturn(new TableMetaData(Arrays.asList(idColumnMetaData, nameColumnMetaData), Arrays.asList(new IndexMetaData("id"), new IndexMetaData("user_id"))));
when(tableMetas.containsTable("tesT")).thenReturn(true);
return new ShardingSphereMetaData(dataSourceMetas, tableMetas);
SchemaMetaData schemaMetaData = mock(SchemaMetaData.class);
when(schemaMetaData.get("tesT")).thenReturn(new TableMetaData(Arrays.asList(idColumnMetaData, nameColumnMetaData), Arrays.asList(new IndexMetaData("id"), new IndexMetaData("user_id"))));
when(schemaMetaData.containsTable("tesT")).thenReturn(true);
return new ShardingSphereMetaData(dataSourceMetas, schemaMetaData);
}
@Test
......@@ -108,12 +108,12 @@ public final class DatabaseTest {
private ShardingSphereMetaData getMetaDataForPagination() {
ColumnMetaData idColumnMetaData = new ColumnMetaData("id", "int", true);
ColumnMetaData nameColumnMetaData = new ColumnMetaData("user_id", "int", false);
TableMetas tableMetas = mock(TableMetas.class);
when(tableMetas.get("tbl_pagination")).thenReturn(
SchemaMetaData schemaMetaData = mock(SchemaMetaData.class);
when(schemaMetaData.get("tbl_pagination")).thenReturn(
new TableMetaData(Arrays.asList(idColumnMetaData, nameColumnMetaData), Arrays.asList(new IndexMetaData("id"), new IndexMetaData("user_id"))));
when(tableMetas.containsTable("tbl_pagination")).thenReturn(true);
when(schemaMetaData.containsTable("tbl_pagination")).thenReturn(true);
ShardingSphereMetaData result = mock(ShardingSphereMetaData.class);
when(result.getTables()).thenReturn(tableMetas);
when(result.getSchema()).thenReturn(schemaMetaData);
DataSourceMetas dataSourceMetas = mock(DataSourceMetas.class);
when(dataSourceMetas.getDataSourceMetaData("ds_0")).thenReturn(mock(DataSourceMetaData.class));
when(result.getDataSources()).thenReturn(dataSourceMetas);
......@@ -134,7 +134,7 @@ public final class DatabaseTest {
ShardingRule rule = new ShardingRule(shardingRuleConfig, dataSourceMap.keySet());
String originSQL = "select city_id from t_user where city_id in (?,?) limit 5,10";
ShardingSphereMetaData metaData = mock(ShardingSphereMetaData.class);
when(metaData.getTables()).thenReturn(mock(TableMetas.class));
when(metaData.getSchema()).thenReturn(mock(SchemaMetaData.class));
SQLParserEngine sqlParserEngine = SQLParserEngineFactory.getSQLParserEngine("MySQL");
ShardingRouteContext actual = new ShardingRouter(rule, properties, metaData, sqlParserEngine).route(originSQL, Lists.newArrayList(13, 173), false);
assertThat(((SelectStatementContext) actual.getSqlStatementContext()).getPaginationContext().getActualOffset(), is(5L));
......
......@@ -27,7 +27,7 @@ import org.apache.shardingsphere.sql.parser.sql.segment.dml.expr.simple.Paramete
import org.apache.shardingsphere.sql.parser.sql.segment.generic.table.SimpleTableSegment;
import org.apache.shardingsphere.sql.parser.sql.statement.dml.InsertStatement;
import org.apache.shardingsphere.sql.parser.sql.value.identifier.IdentifierValue;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
......@@ -52,7 +52,7 @@ public final class GeneratedKeyTest {
private ShardingRule shardingRule;
@Mock
private TableMetas tableMetas;
private SchemaMetaData schemaMetaData;
@Before
public void setUp() {
......@@ -63,14 +63,14 @@ public final class GeneratedKeyTest {
@Test
public void assertGetGenerateKeyWithoutGenerateKeyColumnConfiguration() {
when(shardingRule.findGenerateKeyColumnName("tbl")).thenReturn(Optional.empty());
assertFalse(GeneratedKey.getGenerateKey(shardingRule, tableMetas, Collections.singletonList(1), insertStatement).isPresent());
assertFalse(GeneratedKey.getGenerateKey(shardingRule, schemaMetaData, Collections.singletonList(1), insertStatement).isPresent());
}
@Test
public void assertGetGenerateKeyWhenCreateWithGenerateKeyColumnConfiguration() {
insertStatement.getValues().add(new InsertValuesSegment(0, 0, Collections.singletonList(new LiteralExpressionSegment(0, 0, 1))));
when(shardingRule.findGenerateKeyColumnName("tbl")).thenReturn(Optional.of("id1"));
Optional<GeneratedKey> actual = GeneratedKey.getGenerateKey(shardingRule, tableMetas, Collections.singletonList(1), insertStatement);
Optional<GeneratedKey> actual = GeneratedKey.getGenerateKey(shardingRule, schemaMetaData, Collections.singletonList(1), insertStatement);
assertTrue(actual.isPresent());
assertThat(actual.get().getGeneratedValues().size(), is(1));
}
......@@ -82,12 +82,12 @@ public final class GeneratedKeyTest {
insertStatement.getValues().add(new InsertValuesSegment(0, 0, Collections.singletonList(new LiteralExpressionSegment(1, 2, "value"))));
insertStatement.getValues().add(new InsertValuesSegment(0, 0, Collections.singletonList(new CommonExpressionSegment(1, 2, "ignored value"))));
when(shardingRule.findGenerateKeyColumnName("tbl")).thenReturn(Optional.of("id"));
Optional<GeneratedKey> actual = GeneratedKey.getGenerateKey(shardingRule, tableMetas, Collections.singletonList(1), insertStatement);
Optional<GeneratedKey> actual = GeneratedKey.getGenerateKey(shardingRule, schemaMetaData, Collections.singletonList(1), insertStatement);
assertTrue(actual.isPresent());
assertThat(actual.get().getGeneratedValues().size(), is(3));
assertThat(actual.get().getGeneratedValues().get(0), is((Comparable) 1));
assertThat(actual.get().getGeneratedValues().get(1), is((Comparable) 100));
assertThat(actual.get().getGeneratedValues().get(2), is((Comparable) "value"));
assertTrue(GeneratedKey.getGenerateKey(shardingRule, tableMetas, Collections.singletonList(1), insertStatement).isPresent());
assertTrue(GeneratedKey.getGenerateKey(shardingRule, schemaMetaData, Collections.singletonList(1), insertStatement).isPresent());
}
}
......@@ -23,7 +23,7 @@ import org.apache.shardingsphere.api.config.sharding.TableRuleConfiguration;
import org.apache.shardingsphere.core.rule.ShardingRule;
import org.apache.shardingsphere.sql.parser.binder.metadata.index.IndexMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.segment.table.TablesContext;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.sql.parser.sql.segment.ddl.index.IndexSegment;
......@@ -59,7 +59,7 @@ public final class ShardingTableBroadcastRoutingEngineTest {
private TablesContext tablesContext;
@Mock
private TableMetas tableMetas;
private SchemaMetaData schemaMetaData;
@Mock
private TableMetaData tableMetaData;
......@@ -75,12 +75,12 @@ public final class ShardingTableBroadcastRoutingEngineTest {
shardingRuleConfig.getTableRuleConfigs().add(tableRuleConfig);
when(sqlStatementContext.getTablesContext()).thenReturn(tablesContext);
when(tablesContext.getTableNames()).thenReturn(Lists.newArrayList("t_order"));
when(tableMetas.getAllTableNames()).thenReturn(Lists.newArrayList("t_order"));
when(tableMetas.get("t_order")).thenReturn(tableMetaData);
when(schemaMetaData.getAllTableNames()).thenReturn(Lists.newArrayList("t_order"));
when(schemaMetaData.get("t_order")).thenReturn(tableMetaData);
Map<String, IndexMetaData> indexMetaDataMap = new HashMap<>(1, 1);
indexMetaDataMap.put("index_name", new IndexMetaData("index_name"));
when(tableMetaData.getIndexes()).thenReturn(indexMetaDataMap);
tableBroadcastRoutingEngine = new ShardingTableBroadcastRoutingEngine(tableMetas, sqlStatementContext);
tableBroadcastRoutingEngine = new ShardingTableBroadcastRoutingEngine(schemaMetaData, sqlStatementContext);
shardingRule = new ShardingRule(shardingRuleConfig, Arrays.asList("ds0", "ds1"));
}
......
......@@ -30,7 +30,7 @@ import org.apache.shardingsphere.underlying.common.metadata.ShardingSphereMetaDa
import org.apache.shardingsphere.sql.parser.binder.metadata.column.ColumnMetaData;
import org.apache.shardingsphere.underlying.common.metadata.datasource.DataSourceMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.underlying.route.context.RouteContext;
import java.util.Arrays;
......@@ -47,7 +47,7 @@ public abstract class AbstractSQLRouteTest extends AbstractRoutingEngineTest {
protected final RouteContext assertRoute(final String sql, final List<Object> parameters) {
ShardingRule shardingRule = createAllShardingRule();
ShardingSphereMetaData metaData = new ShardingSphereMetaData(buildDataSourceMetas(), buildTableMetas());
ShardingSphereMetaData metaData = new ShardingSphereMetaData(buildDataSourceMetas(), buildSchemaMetaData());
SQLParserEngine sqlParserEngine = SQLParserEngineFactory.getSQLParserEngine("MySQL");
ShardingRouter shardingRouter = new ShardingRouter(shardingRule, new ShardingSphereProperties(new Properties()), metaData, sqlParserEngine);
ShardingRouteContext result = shardingRouter.route(sql, parameters, false);
......@@ -66,7 +66,7 @@ public abstract class AbstractSQLRouteTest extends AbstractRoutingEngineTest {
return new DataSourceMetas(DatabaseTypes.getActualDatabaseType("MySQL"), dataSourceInfoMap);
}
private TableMetas buildTableMetas() {
private SchemaMetaData buildSchemaMetaData() {
Map<String, TableMetaData> tableMetaDataMap = new HashMap<>(3, 1);
tableMetaDataMap.put("t_order", new TableMetaData(Arrays.asList(new ColumnMetaData("order_id", "int", true),
new ColumnMetaData("user_id", "int", false),
......@@ -78,6 +78,6 @@ public abstract class AbstractSQLRouteTest extends AbstractRoutingEngineTest {
new ColumnMetaData("c_date", "timestamp", false)), Collections.emptySet()));
tableMetaDataMap.put("t_other", new TableMetaData(Collections.singletonList(new ColumnMetaData("order_id", "int", true)), Collections.emptySet()));
tableMetaDataMap.put("t_category", new TableMetaData(Collections.singletonList(new ColumnMetaData("order_id", "int", true)), Collections.emptySet()));
return new TableMetas(tableMetaDataMap);
return new SchemaMetaData(tableMetaDataMap);
}
}
......@@ -17,7 +17,7 @@
package org.apache.shardingsphere.sharding.route.fixture;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sharding.route.engine.context.ShardingRouteContext;
import org.apache.shardingsphere.sharding.route.hook.RoutingHook;
......@@ -30,7 +30,7 @@ public final class RoutingHookFixture implements RoutingHook {
private ShardingRouteContext routeContext;
private TableMetas tableMetas;
private SchemaMetaData schemaMetaData;
private Exception cause;
......@@ -40,9 +40,9 @@ public final class RoutingHookFixture implements RoutingHook {
}
@Override
public void finishSuccess(final ShardingRouteContext shardingRouteContext, final TableMetas tableMetas) {
public void finishSuccess(final ShardingRouteContext shardingRouteContext, final SchemaMetaData schemaMetaData) {
this.routeContext = shardingRouteContext;
this.tableMetas = tableMetas;
this.schemaMetaData = schemaMetaData;
}
@Override
......
......@@ -18,7 +18,7 @@
package org.apache.shardingsphere.sharding.route.hook;
import lombok.SneakyThrows;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sharding.route.engine.context.ShardingRouteContext;
import org.apache.shardingsphere.sharding.route.fixture.RoutingHookFixture;
import org.junit.Test;
......@@ -41,7 +41,7 @@ public final class SPIRoutingHookTest {
private ShardingRouteContext routeContext;
@Mock
private TableMetas tableMetas;
private SchemaMetaData schemaMetaData;
@Mock
private Exception exception;
......@@ -57,11 +57,11 @@ public final class SPIRoutingHookTest {
@Test
public void assertFinishSuccess() {
spiRoutingHook.finishSuccess(routeContext, tableMetas);
spiRoutingHook.finishSuccess(routeContext, schemaMetaData);
RoutingHook routingHook = getFixtureHook();
assertThat(routingHook, instanceOf(RoutingHookFixture.class));
assertThat(((RoutingHookFixture) routingHook).getRouteContext(), is(routeContext));
assertThat(((RoutingHookFixture) routingHook).getTableMetas(), is(tableMetas));
assertThat(((RoutingHookFixture) routingHook).getSchemaMetaData(), is(schemaMetaData));
}
@Test
......
......@@ -22,7 +22,7 @@ import lombok.Setter;
import org.apache.shardingsphere.core.rule.ShardingRule;
import org.apache.shardingsphere.encrypt.metadata.EncryptTableMetaDataDecorator;
import org.apache.shardingsphere.core.metadata.ShardingTableMetaDataDecorator;
import org.apache.shardingsphere.core.metadata.ShardingTableMetasLoader;
import org.apache.shardingsphere.core.metadata.ShardingMetaDataLoader;
import org.apache.shardingsphere.sharding.execute.sql.StatementExecuteUnit;
import org.apache.shardingsphere.sharding.execute.sql.execute.SQLExecuteCallback;
import org.apache.shardingsphere.sharding.execute.sql.execute.SQLExecuteTemplate;
......@@ -32,7 +32,7 @@ import org.apache.shardingsphere.shardingjdbc.jdbc.core.context.ShardingRuntimeC
import org.apache.shardingsphere.spi.database.type.DatabaseType;
import org.apache.shardingsphere.sql.parser.binder.metadata.index.IndexMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.sql.parser.binder.statement.ddl.AlterTableStatementContext;
import org.apache.shardingsphere.sql.parser.binder.statement.ddl.CreateIndexStatementContext;
......@@ -179,17 +179,17 @@ public abstract class AbstractStatementExecutor {
private void refreshTableMetaData(final ShardingRuntimeContext runtimeContext, final CreateTableStatement createTableStatement) throws SQLException {
String tableName = createTableStatement.getTable().getTableName().getIdentifier().getValue();
runtimeContext.getMetaData().getTables().put(tableName, loadTableMeta(tableName));
runtimeContext.getMetaData().getSchema().put(tableName, loadTableMeta(tableName));
}
private void refreshTableMetaData(final ShardingRuntimeContext runtimeContext, final AlterTableStatement alterTableStatement) throws SQLException {
String tableName = alterTableStatement.getTable().getTableName().getIdentifier().getValue();
runtimeContext.getMetaData().getTables().put(tableName, loadTableMeta(tableName));
runtimeContext.getMetaData().getSchema().put(tableName, loadTableMeta(tableName));
}
private void refreshTableMetaData(final ShardingRuntimeContext runtimeContext, final DropTableStatement dropTableStatement) {
for (SimpleTableSegment each : dropTableStatement.getTables()) {
runtimeContext.getMetaData().getTables().remove(each.getTableName().getIdentifier().getValue());
runtimeContext.getMetaData().getSchema().remove(each.getTableName().getIdentifier().getValue());
}
}
......@@ -198,19 +198,19 @@ public abstract class AbstractStatementExecutor {
return;
}
String indexName = createIndexStatement.getIndex().getIdentifier().getValue();
runtimeContext.getMetaData().getTables().get(createIndexStatement.getTable().getTableName().getIdentifier().getValue()).getIndexes().put(indexName, new IndexMetaData(indexName));
runtimeContext.getMetaData().getSchema().get(createIndexStatement.getTable().getTableName().getIdentifier().getValue()).getIndexes().put(indexName, new IndexMetaData(indexName));
}
private void refreshTableMetaData(final ShardingRuntimeContext runtimeContext, final DropIndexStatement dropIndexStatement) {
Collection<String> indexNames = getIndexNames(dropIndexStatement);
TableMetaData tableMetaData = runtimeContext.getMetaData().getTables().get(dropIndexStatement.getTable().getTableName().getIdentifier().getValue());
TableMetaData tableMetaData = runtimeContext.getMetaData().getSchema().get(dropIndexStatement.getTable().getTableName().getIdentifier().getValue());
if (null != dropIndexStatement.getTable()) {
for (String each : indexNames) {
tableMetaData.getIndexes().remove(each);
}
}
for (String each : indexNames) {
if (findLogicTableName(runtimeContext.getMetaData().getTables(), each).isPresent()) {
if (findLogicTableName(runtimeContext.getMetaData().getSchema(), each).isPresent()) {
tableMetaData.getIndexes().remove(each);
}
}
......@@ -224,9 +224,9 @@ public abstract class AbstractStatementExecutor {
return result;
}
private Optional<String> findLogicTableName(final TableMetas tableMetas, final String logicIndexName) {
for (String each : tableMetas.getAllTableNames()) {
if (tableMetas.get(each).getIndexes().containsKey(logicIndexName)) {
private Optional<String> findLogicTableName(final SchemaMetaData schemaMetaData, final String logicIndexName) {
for (String each : schemaMetaData.getAllTableNames()) {
if (schemaMetaData.get(each).getIndexes().containsKey(logicIndexName)) {
return Optional.of(each);
}
}
......@@ -237,7 +237,7 @@ public abstract class AbstractStatementExecutor {
ShardingRule shardingRule = connection.getRuntimeContext().getRule();
int maxConnectionsSizePerQuery = connection.getRuntimeContext().getProperties().<Integer>getValue(PropertiesConstant.MAX_CONNECTIONS_SIZE_PER_QUERY);
boolean isCheckingMetaData = connection.getRuntimeContext().getProperties().<Boolean>getValue(PropertiesConstant.CHECK_TABLE_METADATA_ENABLED);
TableMetaData result = new ShardingTableMetasLoader(connection.getDataSourceMap(), shardingRule, maxConnectionsSizePerQuery, isCheckingMetaData).load(tableName);
TableMetaData result = new ShardingMetaDataLoader(connection.getDataSourceMap(), shardingRule, maxConnectionsSizePerQuery, isCheckingMetaData).load(tableName);
result = new ShardingTableMetaDataDecorator().decorate(result, tableName, shardingRule);
if (!shardingRule.getEncryptRule().getEncryptTableNames().isEmpty()) {
result = new EncryptTableMetaDataDecorator().decorate(result, tableName, shardingRule.getEncryptRule());
......
......@@ -20,9 +20,10 @@ package org.apache.shardingsphere.shardingjdbc.jdbc.core.context;
import org.apache.shardingsphere.encrypt.metadata.EncryptTableMetaDataDecorator;
import org.apache.shardingsphere.encrypt.rule.EncryptRule;
import org.apache.shardingsphere.spi.database.type.DatabaseType;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetasLoader;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaDataLoader;
import org.apache.shardingsphere.underlying.common.constant.properties.PropertiesConstant;
import org.apache.shardingsphere.underlying.common.metadata.decorator.SchemaMetaDataDecorator;
import javax.sql.DataSource;
import java.sql.SQLException;
......@@ -38,7 +39,8 @@ public final class EncryptRuntimeContext extends SingleDataSourceRuntimeContext<
}
@Override
protected TableMetas loadTableMetas(final DataSource dataSource) throws SQLException {
return new EncryptTableMetaDataDecorator().decorate(TableMetasLoader.load(dataSource, getProperties().<Integer>getValue(PropertiesConstant.MAX_CONNECTIONS_SIZE_PER_QUERY)), getRule());
protected SchemaMetaData loadSchemaMetaData(final DataSource dataSource) throws SQLException {
int maxConnectionCount = getProperties().<Integer>getValue(PropertiesConstant.MAX_CONNECTIONS_SIZE_PER_QUERY);
return SchemaMetaDataDecorator.decorate(SchemaMetaDataLoader.load(dataSource, maxConnectionCount), getRule(), new EncryptTableMetaDataDecorator());
}
}
......@@ -21,8 +21,8 @@ import lombok.Getter;
import org.apache.shardingsphere.core.rule.MasterSlaveRule;
import org.apache.shardingsphere.shardingjdbc.jdbc.core.datasource.metadata.CachedDatabaseMetaData;
import org.apache.shardingsphere.spi.database.type.DatabaseType;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetasLoader;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaDataLoader;
import org.apache.shardingsphere.underlying.common.constant.properties.PropertiesConstant;
import org.apache.shardingsphere.underlying.common.metadata.datasource.DataSourceMetas;
......@@ -53,7 +53,7 @@ public final class MasterSlaveRuntimeContext extends MultipleDataSourcesRuntimeC
}
@Override
protected TableMetas loadTableMetas(final Map<String, DataSource> dataSourceMap, final DataSourceMetas dataSourceMetas) throws SQLException {
return TableMetasLoader.load(dataSourceMap.values().iterator().next(), getProperties().<Integer>getValue(PropertiesConstant.MAX_CONNECTIONS_SIZE_PER_QUERY));
protected SchemaMetaData loadSchemaMetaData(final Map<String, DataSource> dataSourceMap, final DataSourceMetas dataSourceMetas) throws SQLException {
return SchemaMetaDataLoader.load(dataSourceMap.values().iterator().next(), getProperties().<Integer>getValue(PropertiesConstant.MAX_CONNECTIONS_SIZE_PER_QUERY));
}
}
......@@ -19,7 +19,7 @@ package org.apache.shardingsphere.shardingjdbc.jdbc.core.context;
import lombok.Getter;
import org.apache.shardingsphere.spi.database.type.DatabaseType;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.underlying.common.config.DatabaseAccessConfiguration;
import org.apache.shardingsphere.underlying.common.log.MetaDataLogger;
import org.apache.shardingsphere.underlying.common.metadata.ShardingSphereMetaData;
......@@ -55,8 +55,8 @@ public abstract class MultipleDataSourcesRuntimeContext<T extends BaseRule> exte
private ShardingSphereMetaData createMetaData(final Map<String, DataSource> dataSourceMap, final DatabaseType databaseType) throws SQLException {
DataSourceMetas dataSourceMetas = new DataSourceMetas(databaseType, getDatabaseAccessConfigurationMap(dataSourceMap));
TableMetas tableMetas = loadTableMetas(dataSourceMap, dataSourceMetas);
return new ShardingSphereMetaData(dataSourceMetas, tableMetas);
SchemaMetaData schemaMetaData = loadSchemaMetaData(dataSourceMap, dataSourceMetas);
return new ShardingSphereMetaData(dataSourceMetas, schemaMetaData);
}
private Map<String, DatabaseAccessConfiguration> getDatabaseAccessConfigurationMap(final Map<String, DataSource> dataSourceMap) throws SQLException {
......@@ -71,5 +71,5 @@ public abstract class MultipleDataSourcesRuntimeContext<T extends BaseRule> exte
return result;
}
protected abstract TableMetas loadTableMetas(Map<String, DataSource> dataSourceMap, DataSourceMetas dataSourceMetas) throws SQLException;
protected abstract SchemaMetaData loadSchemaMetaData(Map<String, DataSource> dataSourceMap, DataSourceMetas dataSourceMetas) throws SQLException;
}
......@@ -23,8 +23,8 @@ import org.apache.shardingsphere.shardingjdbc.jdbc.core.datasource.EncryptDataSo
import org.apache.shardingsphere.shardingjdbc.jdbc.core.datasource.MasterSlaveDataSource;
import org.apache.shardingsphere.shardingjdbc.jdbc.core.datasource.ShardingDataSource;
import org.apache.shardingsphere.spi.database.type.DatabaseType;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetasLoader;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaDataLoader;
import org.apache.shardingsphere.underlying.common.constant.properties.PropertiesConstant;
import javax.sql.DataSource;
......@@ -65,8 +65,8 @@ public final class ShadowRuntimeContext extends SingleDataSourceRuntimeContext<S
}
@Override
protected TableMetas loadTableMetas(final DataSource dataSource) throws SQLException {
return TableMetasLoader.load(dataSource, getProperties().<Integer>getValue(PropertiesConstant.MAX_CONNECTIONS_SIZE_PER_QUERY));
protected SchemaMetaData loadSchemaMetaData(final DataSource dataSource) throws SQLException {
return SchemaMetaDataLoader.load(dataSource, getProperties().<Integer>getValue(PropertiesConstant.MAX_CONNECTIONS_SIZE_PER_QUERY));
}
public enum ShadowType {
......
......@@ -18,16 +18,17 @@
package org.apache.shardingsphere.shardingjdbc.jdbc.core.context;
import lombok.Getter;
import org.apache.shardingsphere.core.metadata.ShardingMetaDataLoader;
import org.apache.shardingsphere.core.metadata.ShardingTableMetaDataDecorator;
import org.apache.shardingsphere.core.rule.ShardingRule;
import org.apache.shardingsphere.encrypt.metadata.EncryptTableMetaDataDecorator;
import org.apache.shardingsphere.core.metadata.ShardingTableMetaDataDecorator;
import org.apache.shardingsphere.core.metadata.ShardingTableMetasLoader;
import org.apache.shardingsphere.shardingjdbc.jdbc.core.datasource.metadata.CachedDatabaseMetaData;
import org.apache.shardingsphere.spi.database.type.DatabaseType;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.transaction.ShardingTransactionManagerEngine;
import org.apache.shardingsphere.underlying.common.constant.properties.PropertiesConstant;
import org.apache.shardingsphere.underlying.common.metadata.datasource.DataSourceMetas;
import org.apache.shardingsphere.underlying.common.metadata.decorator.SchemaMetaDataDecorator;
import javax.sql.DataSource;
import java.sql.Connection;
......@@ -60,13 +61,13 @@ public final class ShardingRuntimeContext extends MultipleDataSourcesRuntimeCont
}
@Override
protected TableMetas loadTableMetas(final Map<String, DataSource> dataSourceMap, final DataSourceMetas dataSourceMetas) throws SQLException {
protected SchemaMetaData loadSchemaMetaData(final Map<String, DataSource> dataSourceMap, final DataSourceMetas dataSourceMetas) throws SQLException {
int maxConnectionsSizePerQuery = getProperties().<Integer>getValue(PropertiesConstant.MAX_CONNECTIONS_SIZE_PER_QUERY);
boolean isCheckingMetaData = getProperties().<Boolean>getValue(PropertiesConstant.CHECK_TABLE_METADATA_ENABLED);
TableMetas result = new ShardingTableMetasLoader(dataSourceMap, getRule(), maxConnectionsSizePerQuery, isCheckingMetaData).load();
result = new ShardingTableMetaDataDecorator().decorate(result, getRule());
SchemaMetaData result = new ShardingMetaDataLoader(dataSourceMap, getRule(), maxConnectionsSizePerQuery, isCheckingMetaData).load();
result = SchemaMetaDataDecorator.decorate(result, getRule(), new ShardingTableMetaDataDecorator());
if (!getRule().getEncryptRule().getEncryptTableNames().isEmpty()) {
result = new EncryptTableMetaDataDecorator().decorate(result, getRule().getEncryptRule());
result = SchemaMetaDataDecorator.decorate(result, getRule().getEncryptRule(), new EncryptTableMetaDataDecorator());
}
return result;
}
......
......@@ -19,7 +19,7 @@ package org.apache.shardingsphere.shardingjdbc.jdbc.core.context;
import lombok.Getter;
import org.apache.shardingsphere.spi.database.type.DatabaseType;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.underlying.common.config.DatabaseAccessConfiguration;
import org.apache.shardingsphere.underlying.common.log.MetaDataLogger;
import org.apache.shardingsphere.underlying.common.metadata.ShardingSphereMetaData;
......@@ -54,8 +54,8 @@ public abstract class SingleDataSourceRuntimeContext<T extends BaseRule> extends
private ShardingSphereMetaData createMetaData(final DataSource dataSource, final DatabaseType databaseType) throws SQLException {
DataSourceMetas dataSourceMetas = new DataSourceMetas(databaseType, getDatabaseAccessConfigurationMap(dataSource));
TableMetas tableMetas = loadTableMetas(dataSource);
return new ShardingSphereMetaData(dataSourceMetas, tableMetas);
SchemaMetaData schemaMetaData = loadSchemaMetaData(dataSource);
return new ShardingSphereMetaData(dataSourceMetas, schemaMetaData);
}
private Map<String, DatabaseAccessConfiguration> getDatabaseAccessConfigurationMap(final DataSource dataSource) throws SQLException {
......@@ -67,5 +67,5 @@ public abstract class SingleDataSourceRuntimeContext<T extends BaseRule> extends
return result;
}
protected abstract TableMetas loadTableMetas(DataSource dataSource) throws SQLException;
protected abstract SchemaMetaData loadSchemaMetaData(DataSource dataSource) throws SQLException;
}
......@@ -168,7 +168,7 @@ public final class EncryptPreparedStatement extends AbstractShardingPreparedStat
@SuppressWarnings("unchecked")
private SQLUnit getSQLUnit(final String sql) {
SQLStatement sqlStatement = runtimeContext.getSqlParserEngine().parse(sql, true);
sqlStatementContext = SQLStatementContextFactory.newInstance(runtimeContext.getMetaData().getTables(), sql, getParameters(), sqlStatement);
sqlStatementContext = SQLStatementContextFactory.newInstance(runtimeContext.getMetaData().getSchema(), sql, getParameters(), sqlStatement);
SQLRewriteContext sqlRewriteContext = new SQLRewriteEntry(runtimeContext.getMetaData(),
runtimeContext.getProperties()).createSQLRewriteContext(sql, getParameters(), sqlStatementContext, createSQLRewriteContextDecorator(runtimeContext.getRule()));
SQLRewriteResult sqlRewriteResult = new DefaultSQLRewriteEngine().rewrite(sqlRewriteContext);
......
......@@ -97,7 +97,7 @@ public final class EncryptStatement extends AbstractUnsupportedOperationStatemen
private String getRewriteSQL(final String sql) {
SQLStatement sqlStatement = runtimeContext.getSqlParserEngine().parse(sql, false);
sqlStatementContext = SQLStatementContextFactory.newInstance(runtimeContext.getMetaData().getTables(), sql, Collections.emptyList(), sqlStatement);
sqlStatementContext = SQLStatementContextFactory.newInstance(runtimeContext.getMetaData().getSchema(), sql, Collections.emptyList(), sqlStatement);
SQLRewriteContext sqlRewriteContext = new SQLRewriteEntry(runtimeContext.getMetaData(),
runtimeContext.getProperties()).createSQLRewriteContext(sql, Collections.emptyList(), sqlStatementContext, createSQLRewriteContextDecorator(runtimeContext.getRule()));
String result = new DefaultSQLRewriteEngine().rewrite(sqlRewriteContext).getSql();
......
......@@ -172,7 +172,7 @@ public final class ShadowPreparedStatement extends AbstractShardingPreparedState
@SuppressWarnings("unchecked")
private SQLUnit getSQLUnit(final String sql) {
SQLStatement sqlStatement = connection.getRuntimeContext().getSqlParserEngine().parse(sql, true);
SQLStatementContext sqlStatementContext = SQLStatementContextFactory.newInstance(connection.getRuntimeContext().getMetaData().getTables(), sql, getParameters(), sqlStatement);
SQLStatementContext sqlStatementContext = SQLStatementContextFactory.newInstance(connection.getRuntimeContext().getMetaData().getSchema(), sql, getParameters(), sqlStatement);
ShadowJudgementEngine shadowJudgementEngine = new PreparedJudgementEngine(connection.getRuntimeContext().getRule(), sqlStatementContext, getParameters());
isShadowSQL = shadowJudgementEngine.isShadowSQL();
SQLRewriteContext sqlRewriteContext = new SQLRewriteEntry(connection.getRuntimeContext().getMetaData(), connection.getRuntimeContext().getProperties())
......
......@@ -175,7 +175,7 @@ public final class ShadowStatement extends AbstractStatementAdapter {
private Statement getStatementAndReplay(final String sql) throws SQLException {
SQLStatement sqlStatement = connection.getRuntimeContext().getSqlParserEngine().parse(sql, false);
sqlStatementContext = SQLStatementContextFactory.newInstance(connection.getRuntimeContext().getMetaData().getTables(), sql, Collections.emptyList(), sqlStatement);
sqlStatementContext = SQLStatementContextFactory.newInstance(connection.getRuntimeContext().getMetaData().getSchema(), sql, Collections.emptyList(), sqlStatement);
ShadowJudgementEngine shadowJudgementEngine = new SimpleJudgementEngine(connection.getRuntimeContext().getRule(), sqlStatementContext);
isShadowSQL = shadowJudgementEngine.isShadowSQL();
Statement result = shadowStatementGenerator.createStatement();
......
......@@ -159,7 +159,7 @@ public final class ShardingPreparedStatement extends AbstractShardingPreparedSta
engines.put(encryptRule, new JDBCEncryptResultDecoratorEngine(resultSets.get(0).getMetaData()));
}
MergeEntry mergeEntry = new MergeEntry(connection.getRuntimeContext().getDatabaseType(),
connection.getRuntimeContext().getMetaData().getTables(), connection.getRuntimeContext().getProperties(), engines);
connection.getRuntimeContext().getMetaData().getSchema(), connection.getRuntimeContext().getProperties(), engines);
return mergeEntry.process(queryResults, shardingExecutionContext.getSqlStatementContext());
}
......
......@@ -136,7 +136,7 @@ public final class ShardingStatement extends AbstractStatementAdapter {
engines.put(encryptRule, new JDBCEncryptResultDecoratorEngine(resultSets.get(0).getMetaData()));
}
MergeEntry mergeEntry = new MergeEntry(connection.getRuntimeContext().getDatabaseType(),
connection.getRuntimeContext().getMetaData().getTables(), connection.getRuntimeContext().getProperties(), engines);
connection.getRuntimeContext().getMetaData().getSchema(), connection.getRuntimeContext().getProperties(), engines);
return mergeEntry.process(queryResults, shardingExecutionContext.getSqlStatementContext());
}
......
......@@ -141,7 +141,7 @@ public final class JDBCDatabaseCommunicationEngine implements DatabaseCommunicat
engines.put(encryptRule, new ProxyResultDecoratorEngine(((QueryResponse) response).getQueryHeaders()));
}
MergeEntry mergeEntry = new MergeEntry(
LogicSchemas.getInstance().getDatabaseType(), logicSchema.getMetaData().getTables(), ShardingProxyContext.getInstance().getProperties(), engines);
LogicSchemas.getInstance().getDatabaseType(), logicSchema.getMetaData().getSchema(), ShardingProxyContext.getInstance().getProperties(), engines);
return mergeEntry.process(queryResults, sqlStatementContext);
}
......
......@@ -33,7 +33,7 @@ import org.apache.shardingsphere.shardingproxy.backend.schema.impl.ShadowSchema;
import org.apache.shardingsphere.shardingproxy.backend.schema.impl.ShardingSchema;
import org.apache.shardingsphere.shardingproxy.context.ShardingProxyContext;
import org.apache.shardingsphere.sql.parser.binder.SQLStatementContextFactory;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.CommonSQLStatementContext;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.sql.parser.sql.statement.SQLStatement;
......@@ -96,7 +96,7 @@ public final class PreparedStatementExecutorWrapper implements JDBCExecutorWrapp
private ExecutionContext doMasterSlaveRoute(final String sql) {
SQLStatement sqlStatement = logicSchema.getSqlParserEngine().parse(sql, true);
CommonSQLStatementContext sqlStatementContext = new CommonSQLStatementContext(sqlStatement);
SQLRewriteContext sqlRewriteContext = new SQLRewriteContext(logicSchema.getMetaData().getTables(), sqlStatementContext, sql, parameters);
SQLRewriteContext sqlRewriteContext = new SQLRewriteContext(logicSchema.getMetaData().getSchema(), sqlStatementContext, sql, parameters);
sqlRewriteContext.generateSQLTokens();
String rewriteSQL = new DefaultSQLRewriteEngine().rewrite(sqlRewriteContext).getSql();
ExecutionContext result = new ExecutionContext(sqlStatementContext);
......@@ -111,8 +111,8 @@ public final class PreparedStatementExecutorWrapper implements JDBCExecutorWrapp
private ExecutionContext doEncryptRoute(final String sql) {
EncryptSchema encryptSchema = (EncryptSchema) logicSchema;
SQLStatement sqlStatement = encryptSchema.getSqlParserEngine().parse(sql, true);
TableMetas tableMetas = logicSchema.getMetaData().getTables();
SQLStatementContext sqlStatementContext = SQLStatementContextFactory.newInstance(tableMetas, sql, parameters, sqlStatement);
SchemaMetaData schemaMetaData = logicSchema.getMetaData().getSchema();
SQLStatementContext sqlStatementContext = SQLStatementContextFactory.newInstance(schemaMetaData, sql, parameters, sqlStatement);
SQLRewriteContext sqlRewriteContext = new SQLRewriteEntry(logicSchema.getMetaData(), ShardingProxyContext.getInstance().getProperties())
.createSQLRewriteContext(sql, parameters, sqlStatementContext, createSQLRewriteContextDecorator(encryptSchema.getEncryptRule()));
SQLRewriteResult sqlRewriteResult = new DefaultSQLRewriteEngine().rewrite(sqlRewriteContext);
......@@ -125,8 +125,8 @@ public final class PreparedStatementExecutorWrapper implements JDBCExecutorWrapp
private ExecutionContext doShadowRoute(final String sql) {
ShadowSchema shadowSchema = (ShadowSchema) logicSchema;
SQLStatement sqlStatement = shadowSchema.getSqlParserEngine().parse(sql, true);
TableMetas tableMetas = logicSchema.getMetaData().getTables();
SQLStatementContext sqlStatementContext = SQLStatementContextFactory.newInstance(tableMetas, sql, parameters, sqlStatement);
SchemaMetaData schemaMetaData = logicSchema.getMetaData().getSchema();
SQLStatementContext sqlStatementContext = SQLStatementContextFactory.newInstance(schemaMetaData, sql, parameters, sqlStatement);
ShadowJudgementEngine shadowJudgementEngine = new PreparedJudgementEngine(shadowSchema.getShadowRule(), sqlStatementContext, parameters);
SQLRewriteContext sqlRewriteContext = new SQLRewriteEntry(logicSchema.getMetaData(), ShardingProxyContext.getInstance().getProperties())
.createSQLRewriteContext(sql, parameters, sqlStatementContext, createSQLRewriteContextDecorator(shadowSchema.getShadowRule()));
......@@ -150,6 +150,7 @@ public final class PreparedStatementExecutorWrapper implements JDBCExecutorWrapp
return result;
}
@SuppressWarnings("unchecked")
private ExecutionContext doTransparentRoute(final String sql) {
SQLStatement sqlStatement = logicSchema.getSqlParserEngine().parse(sql, false);
ExecutionContext result = new ExecutionContext(new CommonSQLStatementContext(sqlStatement));
......
......@@ -33,7 +33,7 @@ import org.apache.shardingsphere.shardingproxy.backend.schema.impl.ShadowSchema;
import org.apache.shardingsphere.shardingproxy.backend.schema.impl.ShardingSchema;
import org.apache.shardingsphere.shardingproxy.context.ShardingProxyContext;
import org.apache.shardingsphere.sql.parser.binder.SQLStatementContextFactory;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.CommonSQLStatementContext;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.sql.parser.sql.statement.SQLStatement;
......@@ -94,7 +94,7 @@ public final class StatementExecutorWrapper implements JDBCExecutorWrapper {
private ExecutionContext doMasterSlaveRoute(final String sql) {
SQLStatement sqlStatement = logicSchema.getSqlParserEngine().parse(sql, false);
CommonSQLStatementContext sqlStatementContext = new CommonSQLStatementContext(sqlStatement);
SQLRewriteContext sqlRewriteContext = new SQLRewriteContext(logicSchema.getMetaData().getTables(), sqlStatementContext, sql, Collections.emptyList());
SQLRewriteContext sqlRewriteContext = new SQLRewriteContext(logicSchema.getMetaData().getSchema(), sqlStatementContext, sql, Collections.emptyList());
sqlRewriteContext.generateSQLTokens();
String rewriteSQL = new DefaultSQLRewriteEngine().rewrite(sqlRewriteContext).getSql();
ExecutionContext result = new ExecutionContext(sqlStatementContext);
......@@ -109,8 +109,8 @@ public final class StatementExecutorWrapper implements JDBCExecutorWrapper {
private ExecutionContext doEncryptRoute(final String sql) {
EncryptSchema encryptSchema = (EncryptSchema) logicSchema;
SQLStatement sqlStatement = encryptSchema.getSqlParserEngine().parse(sql, false);
TableMetas tableMetas = logicSchema.getMetaData().getTables();
SQLStatementContext sqlStatementContext = SQLStatementContextFactory.newInstance(tableMetas, sql, new LinkedList<>(), sqlStatement);
SchemaMetaData schemaMetaData = logicSchema.getMetaData().getSchema();
SQLStatementContext sqlStatementContext = SQLStatementContextFactory.newInstance(schemaMetaData, sql, new LinkedList<>(), sqlStatement);
SQLRewriteContext sqlRewriteContext = new SQLRewriteEntry(logicSchema.getMetaData(), ShardingProxyContext.getInstance().getProperties())
.createSQLRewriteContext(sql, Collections.emptyList(), sqlStatementContext, createSQLRewriteContextDecorator(encryptSchema.getEncryptRule()));
SQLRewriteResult sqlRewriteResult = new DefaultSQLRewriteEngine().rewrite(sqlRewriteContext);
......@@ -123,8 +123,8 @@ public final class StatementExecutorWrapper implements JDBCExecutorWrapper {
private ExecutionContext doShadowRoute(final String sql) {
ShadowSchema shadowSchema = (ShadowSchema) logicSchema;
SQLStatement sqlStatement = shadowSchema.getSqlParserEngine().parse(sql, true);
TableMetas tableMetas = logicSchema.getMetaData().getTables();
SQLStatementContext sqlStatementContext = SQLStatementContextFactory.newInstance(tableMetas, sql, new LinkedList<>(), sqlStatement);
SchemaMetaData schemaMetaData = logicSchema.getMetaData().getSchema();
SQLStatementContext sqlStatementContext = SQLStatementContextFactory.newInstance(schemaMetaData, sql, new LinkedList<>(), sqlStatement);
ShadowJudgementEngine shadowJudgementEngine = new SimpleJudgementEngine(shadowSchema.getShadowRule(), sqlStatementContext);
String dataSourceName = shadowJudgementEngine.isShadowSQL()
? shadowSchema.getShadowRule().getRuleConfiguration().getShadowMappings().get(logicSchema.getDataSources().keySet().iterator().next())
......
......@@ -81,7 +81,7 @@ public final class QueryHeader {
if (null != actualTableName && logicSchema instanceof ShardingSchema) {
Collection<String> logicTableNames = logicSchema.getShardingRule().getLogicTableNames(actualTableName);
table = logicTableNames.isEmpty() ? "" : logicTableNames.iterator().next();
TableMetaData tableMetaData = logicSchema.getMetaData().getTables().get(table);
TableMetaData tableMetaData = logicSchema.getMetaData().getSchema().get(table);
primaryKey = null != tableMetaData && tableMetaData.getColumns().get(resultSetMetaData.getColumnName(columnIndex).toLowerCase()).isPrimaryKey();
} else {
table = actualTableName;
......
......@@ -31,11 +31,12 @@ import org.apache.shardingsphere.shardingproxy.backend.schema.LogicSchema;
import org.apache.shardingsphere.shardingproxy.backend.schema.LogicSchemas;
import org.apache.shardingsphere.shardingproxy.config.yaml.YamlDataSourceParameter;
import org.apache.shardingsphere.shardingproxy.context.ShardingProxyContext;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetasLoader;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaDataLoader;
import org.apache.shardingsphere.underlying.common.constant.properties.PropertiesConstant;
import org.apache.shardingsphere.underlying.common.metadata.ShardingSphereMetaData;
import org.apache.shardingsphere.underlying.common.metadata.datasource.DataSourceMetas;
import org.apache.shardingsphere.underlying.common.metadata.decorator.SchemaMetaDataDecorator;
import javax.sql.DataSource;
import java.sql.SQLException;
......@@ -62,14 +63,14 @@ public final class EncryptSchema extends LogicSchema {
private ShardingSphereMetaData createMetaData() throws SQLException {
DataSourceMetas dataSourceMetas = new DataSourceMetas(LogicSchemas.getInstance().getDatabaseType(), getDatabaseAccessConfigurationMap());
TableMetas tableMetas = createTableMetas();
return new ShardingSphereMetaData(dataSourceMetas, tableMetas);
SchemaMetaData schemaMetaData = createSchemaMetaData();
return new ShardingSphereMetaData(dataSourceMetas, schemaMetaData);
}
private TableMetas createTableMetas() throws SQLException {
private SchemaMetaData createSchemaMetaData() throws SQLException {
DataSource dataSource = getBackendDataSource().getDataSources().values().iterator().next();
int maxConnectionsSizePerQuery = ShardingProxyContext.getInstance().getProperties().<Integer>getValue(PropertiesConstant.MAX_CONNECTIONS_SIZE_PER_QUERY);
return new EncryptTableMetaDataDecorator().decorate(TableMetasLoader.load(dataSource, maxConnectionsSizePerQuery), encryptRule);
return SchemaMetaDataDecorator.decorate(SchemaMetaDataLoader.load(dataSource, maxConnectionsSizePerQuery), encryptRule, new EncryptTableMetaDataDecorator());
}
/**
......
......@@ -32,8 +32,8 @@ import org.apache.shardingsphere.shardingproxy.backend.schema.LogicSchema;
import org.apache.shardingsphere.shardingproxy.backend.schema.LogicSchemas;
import org.apache.shardingsphere.shardingproxy.config.yaml.YamlDataSourceParameter;
import org.apache.shardingsphere.shardingproxy.context.ShardingProxyContext;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetasLoader;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaDataLoader;
import org.apache.shardingsphere.underlying.common.constant.properties.PropertiesConstant;
import org.apache.shardingsphere.underlying.common.metadata.ShardingSphereMetaData;
import org.apache.shardingsphere.underlying.common.metadata.datasource.DataSourceMetas;
......@@ -69,14 +69,14 @@ public final class MasterSlaveSchema extends LogicSchema {
private ShardingSphereMetaData createMetaData() throws SQLException {
DataSourceMetas dataSourceMetas = new DataSourceMetas(LogicSchemas.getInstance().getDatabaseType(), getDatabaseAccessConfigurationMap());
TableMetas tableMetas = loadTableMetas();
return new ShardingSphereMetaData(dataSourceMetas, tableMetas);
SchemaMetaData schemaMetaData = loadSchemaMetaData();
return new ShardingSphereMetaData(dataSourceMetas, schemaMetaData);
}
private TableMetas loadTableMetas() throws SQLException {
private SchemaMetaData loadSchemaMetaData() throws SQLException {
DataSource dataSource = getBackendDataSource().getDataSources().values().iterator().next();
int maxConnectionsSizePerQuery = ShardingProxyContext.getInstance().getProperties().<Integer>getValue(PropertiesConstant.MAX_CONNECTIONS_SIZE_PER_QUERY);
return TableMetasLoader.load(dataSource, maxConnectionsSizePerQuery);
return SchemaMetaDataLoader.load(dataSource, maxConnectionsSizePerQuery);
}
/**
......
......@@ -30,8 +30,8 @@ import org.apache.shardingsphere.shardingproxy.backend.schema.LogicSchema;
import org.apache.shardingsphere.shardingproxy.backend.schema.LogicSchemas;
import org.apache.shardingsphere.shardingproxy.config.yaml.YamlDataSourceParameter;
import org.apache.shardingsphere.shardingproxy.context.ShardingProxyContext;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetasLoader;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaDataLoader;
import org.apache.shardingsphere.underlying.common.constant.properties.PropertiesConstant;
import org.apache.shardingsphere.underlying.common.metadata.ShardingSphereMetaData;
import org.apache.shardingsphere.underlying.common.metadata.datasource.DataSourceMetas;
......@@ -61,14 +61,14 @@ public final class ShadowSchema extends LogicSchema {
private ShardingSphereMetaData createMetaData() throws SQLException {
DataSourceMetas dataSourceMetas = new DataSourceMetas(LogicSchemas.getInstance().getDatabaseType(), getDatabaseAccessConfigurationMap());
TableMetas tableMetas = createTableMetas();
return new ShardingSphereMetaData(dataSourceMetas, tableMetas);
SchemaMetaData schemaMetaData = createSchemaMetaData();
return new ShardingSphereMetaData(dataSourceMetas, schemaMetaData);
}
private TableMetas createTableMetas() throws SQLException {
private SchemaMetaData createSchemaMetaData() throws SQLException {
DataSource dataSource = getBackendDataSource().getDataSources().values().iterator().next();
int maxConnectionsSizePerQuery = ShardingProxyContext.getInstance().getProperties().<Integer>getValue(PropertiesConstant.MAX_CONNECTIONS_SIZE_PER_QUERY);
return TableMetasLoader.load(dataSource, maxConnectionsSizePerQuery);
return SchemaMetaDataLoader.load(dataSource, maxConnectionsSizePerQuery);
}
/**
......
......@@ -21,6 +21,8 @@ import com.google.common.eventbus.Subscribe;
import lombok.Getter;
import org.apache.shardingsphere.api.config.sharding.ShardingRuleConfiguration;
import org.apache.shardingsphere.core.log.ConfigurationLogger;
import org.apache.shardingsphere.core.metadata.ShardingMetaDataLoader;
import org.apache.shardingsphere.core.metadata.ShardingTableMetaDataDecorator;
import org.apache.shardingsphere.core.rule.MasterSlaveRule;
import org.apache.shardingsphere.core.rule.ShardingRule;
import org.apache.shardingsphere.encrypt.metadata.EncryptTableMetaDataDecorator;
......@@ -29,15 +31,13 @@ import org.apache.shardingsphere.orchestration.core.common.rule.OrchestrationMas
import org.apache.shardingsphere.orchestration.core.common.rule.OrchestrationShardingRule;
import org.apache.shardingsphere.orchestration.core.registrycenter.event.DisabledStateChangedEvent;
import org.apache.shardingsphere.orchestration.core.registrycenter.schema.OrchestrationShardingSchema;
import org.apache.shardingsphere.core.metadata.ShardingTableMetaDataDecorator;
import org.apache.shardingsphere.core.metadata.ShardingTableMetasLoader;
import org.apache.shardingsphere.shardingproxy.backend.schema.LogicSchema;
import org.apache.shardingsphere.shardingproxy.backend.schema.LogicSchemas;
import org.apache.shardingsphere.shardingproxy.config.yaml.YamlDataSourceParameter;
import org.apache.shardingsphere.shardingproxy.context.ShardingProxyContext;
import org.apache.shardingsphere.sql.parser.binder.metadata.index.IndexMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.sql.parser.binder.statement.ddl.AlterTableStatementContext;
import org.apache.shardingsphere.sql.parser.binder.statement.ddl.CreateIndexStatementContext;
......@@ -54,6 +54,7 @@ import org.apache.shardingsphere.sql.parser.sql.statement.ddl.DropTableStatement
import org.apache.shardingsphere.underlying.common.constant.properties.PropertiesConstant;
import org.apache.shardingsphere.underlying.common.metadata.ShardingSphereMetaData;
import org.apache.shardingsphere.underlying.common.metadata.datasource.DataSourceMetas;
import org.apache.shardingsphere.underlying.common.metadata.decorator.SchemaMetaDataDecorator;
import java.sql.SQLException;
import java.util.Collection;
......@@ -84,8 +85,8 @@ public final class ShardingSchema extends LogicSchema {
private ShardingSphereMetaData createMetaData() throws SQLException {
DataSourceMetas dataSourceMetas = new DataSourceMetas(LogicSchemas.getInstance().getDatabaseType(), getDatabaseAccessConfigurationMap());
TableMetas tableMetas = loadTableMetas();
return new ShardingSphereMetaData(dataSourceMetas, tableMetas);
SchemaMetaData schemaMetaData = loadSchemaMetaData();
return new ShardingSphereMetaData(dataSourceMetas, schemaMetaData);
}
/**
......@@ -136,24 +137,24 @@ public final class ShardingSchema extends LogicSchema {
private void refreshTableMetaData(final CreateTableStatement createTableStatement) throws SQLException {
String tableName = createTableStatement.getTable().getTableName().getIdentifier().getValue();
getMetaData().getTables().put(tableName, loadTableMeta(tableName));
getMetaData().getSchema().put(tableName, loadTableMeta(tableName));
}
private void refreshTableMetaData(final AlterTableStatement alterTableStatement) throws SQLException {
String tableName = alterTableStatement.getTable().getTableName().getIdentifier().getValue();
getMetaData().getTables().put(tableName, loadTableMeta(tableName));
getMetaData().getSchema().put(tableName, loadTableMeta(tableName));
}
private void refreshTableMetaData(final DropTableStatement dropTableStatement) {
for (SimpleTableSegment each : dropTableStatement.getTables()) {
getMetaData().getTables().remove(each.getTableName().getIdentifier().getValue());
getMetaData().getSchema().remove(each.getTableName().getIdentifier().getValue());
}
}
private void refreshTableMetaData(final CreateIndexStatement createIndexStatement) {
if (null != createIndexStatement.getIndex()) {
String indexName = createIndexStatement.getIndex().getIdentifier().getValue();
getMetaData().getTables().get(createIndexStatement.getTable().getTableName().getIdentifier().getValue()).getIndexes().put(indexName, new IndexMetaData(indexName));
getMetaData().getSchema().get(createIndexStatement.getTable().getTableName().getIdentifier().getValue()).getIndexes().put(indexName, new IndexMetaData(indexName));
}
}
......@@ -161,23 +162,23 @@ public final class ShardingSchema extends LogicSchema {
Collection<String> indexNames = getIndexNames(dropIndexStatement);
if (null != dropIndexStatement.getTable()) {
for (String each : indexNames) {
getMetaData().getTables().get(dropIndexStatement.getTable().getTableName().getIdentifier().getValue()).getIndexes().remove(each);
getMetaData().getSchema().get(dropIndexStatement.getTable().getTableName().getIdentifier().getValue()).getIndexes().remove(each);
}
}
for (String each : indexNames) {
if (findLogicTableName(getMetaData().getTables(), each).isPresent()) {
getMetaData().getTables().get(dropIndexStatement.getTable().getTableName().getIdentifier().getValue()).getIndexes().remove(each);
if (findLogicTableName(getMetaData().getSchema(), each).isPresent()) {
getMetaData().getSchema().get(dropIndexStatement.getTable().getTableName().getIdentifier().getValue()).getIndexes().remove(each);
}
}
}
private TableMetas loadTableMetas() throws SQLException {
private SchemaMetaData loadSchemaMetaData() throws SQLException {
int maxConnectionsSizePerQuery = ShardingProxyContext.getInstance().getProperties().<Integer>getValue(PropertiesConstant.MAX_CONNECTIONS_SIZE_PER_QUERY);
boolean isCheckingMetaData = ShardingProxyContext.getInstance().getProperties().<Boolean>getValue(PropertiesConstant.CHECK_TABLE_METADATA_ENABLED);
TableMetas result = new ShardingTableMetasLoader(getBackendDataSource().getDataSources(), shardingRule, maxConnectionsSizePerQuery, isCheckingMetaData).load();
result = new ShardingTableMetaDataDecorator().decorate(result, shardingRule);
SchemaMetaData result = new ShardingMetaDataLoader(getBackendDataSource().getDataSources(), shardingRule, maxConnectionsSizePerQuery, isCheckingMetaData).load();
result = SchemaMetaDataDecorator.decorate(result, shardingRule, new ShardingTableMetaDataDecorator());
if (!shardingRule.getEncryptRule().getEncryptTableNames().isEmpty()) {
result = new EncryptTableMetaDataDecorator().decorate(result, shardingRule.getEncryptRule());
result = SchemaMetaDataDecorator.decorate(result, shardingRule, new ShardingTableMetaDataDecorator());
}
return result;
}
......@@ -185,7 +186,7 @@ public final class ShardingSchema extends LogicSchema {
private TableMetaData loadTableMeta(final String tableName) throws SQLException {
int maxConnectionsSizePerQuery = ShardingProxyContext.getInstance().getProperties().<Integer>getValue(PropertiesConstant.MAX_CONNECTIONS_SIZE_PER_QUERY);
boolean isCheckingMetaData = ShardingProxyContext.getInstance().getProperties().<Boolean>getValue(PropertiesConstant.CHECK_TABLE_METADATA_ENABLED);
TableMetaData result = new ShardingTableMetasLoader(getBackendDataSource().getDataSources(), shardingRule, maxConnectionsSizePerQuery, isCheckingMetaData).load(tableName);
TableMetaData result = new ShardingMetaDataLoader(getBackendDataSource().getDataSources(), shardingRule, maxConnectionsSizePerQuery, isCheckingMetaData).load(tableName);
result = new ShardingTableMetaDataDecorator().decorate(result, tableName, shardingRule);
if (!shardingRule.getEncryptRule().getEncryptTableNames().isEmpty()) {
result = new EncryptTableMetaDataDecorator().decorate(result, tableName, shardingRule.getEncryptRule());
......@@ -201,9 +202,9 @@ public final class ShardingSchema extends LogicSchema {
return result;
}
private Optional<String> findLogicTableName(final TableMetas tableMetas, final String logicIndexName) {
for (String each : tableMetas.getAllTableNames()) {
if (tableMetas.get(each).getIndexes().containsKey(logicIndexName)) {
private Optional<String> findLogicTableName(final SchemaMetaData schemaMetaData, final String logicIndexName) {
for (String each : schemaMetaData.getAllTableNames()) {
if (schemaMetaData.get(each).getIndexes().containsKey(logicIndexName)) {
return Optional.of(each);
}
}
......
......@@ -24,8 +24,8 @@ import org.apache.shardingsphere.shardingproxy.backend.schema.LogicSchema;
import org.apache.shardingsphere.shardingproxy.backend.schema.LogicSchemas;
import org.apache.shardingsphere.shardingproxy.config.yaml.YamlDataSourceParameter;
import org.apache.shardingsphere.shardingproxy.context.ShardingProxyContext;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetasLoader;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaDataLoader;
import org.apache.shardingsphere.underlying.common.constant.properties.PropertiesConstant;
import org.apache.shardingsphere.underlying.common.metadata.ShardingSphereMetaData;
import org.apache.shardingsphere.underlying.common.metadata.datasource.DataSourceMetas;
......@@ -53,13 +53,13 @@ public final class TransparentSchema extends LogicSchema {
private ShardingSphereMetaData createMetaData() throws SQLException {
DataSourceMetas dataSourceMetas = new DataSourceMetas(LogicSchemas.getInstance().getDatabaseType(), getDatabaseAccessConfigurationMap());
TableMetas tableMetas = loadTableMetas();
return new ShardingSphereMetaData(dataSourceMetas, tableMetas);
SchemaMetaData schemaMetaData = loadSchemaMetaData();
return new ShardingSphereMetaData(dataSourceMetas, schemaMetaData);
}
private TableMetas loadTableMetas() throws SQLException {
private SchemaMetaData loadSchemaMetaData() throws SQLException {
DataSource dataSource = getBackendDataSource().getDataSources().values().iterator().next();
int maxConnectionsSizePerQuery = ShardingProxyContext.getInstance().getProperties().<Integer>getValue(PropertiesConstant.MAX_CONNECTIONS_SIZE_PER_QUERY);
return TableMetasLoader.load(dataSource, maxConnectionsSizePerQuery);
return SchemaMetaDataLoader.load(dataSource, maxConnectionsSizePerQuery);
}
}
......@@ -29,7 +29,7 @@ import org.apache.shardingsphere.underlying.common.metadata.ShardingSphereMetaDa
import org.apache.shardingsphere.sql.parser.binder.metadata.column.ColumnMetaData;
import org.apache.shardingsphere.underlying.common.metadata.datasource.DataSourceMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.junit.Test;
import java.sql.ResultSetMetaData;
......@@ -127,10 +127,10 @@ public final class QueryHeaderTest {
private ShardingSchema getShardingSchema() {
ShardingSchema result = mock(ShardingSchema.class);
ColumnMetaData columnMetaData = new ColumnMetaData("order_id", "int", true);
TableMetas tableMetas = mock(TableMetas.class);
when(tableMetas.get("t_logic_order")).thenReturn(new TableMetaData(Collections.singletonList(columnMetaData), Collections.singletonList(new IndexMetaData("order_id"))));
SchemaMetaData schemaMetaData = mock(SchemaMetaData.class);
when(schemaMetaData.get("t_logic_order")).thenReturn(new TableMetaData(Collections.singletonList(columnMetaData), Collections.singletonList(new IndexMetaData("order_id"))));
ShardingSphereMetaData metaData = mock(ShardingSphereMetaData.class);
when(metaData.getTables()).thenReturn(tableMetas);
when(metaData.getSchema()).thenReturn(schemaMetaData);
DataSourceMetas dataSourceMetas = mock(DataSourceMetas.class);
when(dataSourceMetas.getDataSourceMetaData("ds_0")).thenReturn(mock(DataSourceMetaData.class));
when(metaData.getDataSources()).thenReturn(dataSourceMetas);
......
......@@ -19,7 +19,7 @@ package org.apache.shardingsphere.sql.parser.binder;
import lombok.AccessLevel;
import lombok.NoArgsConstructor;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.CommonSQLStatementContext;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.sql.parser.binder.statement.dal.DescribeStatementContext;
......@@ -75,16 +75,16 @@ public final class SQLStatementContextFactory {
/**
* Create SQL statement context.
*
* @param tableMetas table metas
* @param schemaMetaData table meta data
* @param sql SQL
* @param parameters SQL parameters
* @param sqlStatement SQL statement
* @return SQL statement context
*/
@SuppressWarnings("unchecked")
public static SQLStatementContext newInstance(final TableMetas tableMetas, final String sql, final List<Object> parameters, final SQLStatement sqlStatement) {
public static SQLStatementContext newInstance(final SchemaMetaData schemaMetaData, final String sql, final List<Object> parameters, final SQLStatement sqlStatement) {
if (sqlStatement instanceof DMLStatement) {
return getDMLStatementContext(tableMetas, sql, parameters, (DMLStatement) sqlStatement);
return getDMLStatementContext(schemaMetaData, sql, parameters, (DMLStatement) sqlStatement);
}
if (sqlStatement instanceof DDLStatement) {
return getDDLStatementContext((DDLStatement) sqlStatement);
......@@ -98,9 +98,9 @@ public final class SQLStatementContextFactory {
return new CommonSQLStatementContext(sqlStatement);
}
private static SQLStatementContext getDMLStatementContext(final TableMetas tableMetas, final String sql, final List<Object> parameters, final DMLStatement sqlStatement) {
private static SQLStatementContext getDMLStatementContext(final SchemaMetaData schemaMetaData, final String sql, final List<Object> parameters, final DMLStatement sqlStatement) {
if (sqlStatement instanceof SelectStatement) {
return new SelectStatementContext(tableMetas, sql, parameters, (SelectStatement) sqlStatement);
return new SelectStatementContext(schemaMetaData, sql, parameters, (SelectStatement) sqlStatement);
}
if (sqlStatement instanceof UpdateStatement) {
return new UpdateStatementContext((UpdateStatement) sqlStatement);
......@@ -109,7 +109,7 @@ public final class SQLStatementContextFactory {
return new DeleteStatementContext((DeleteStatement) sqlStatement);
}
if (sqlStatement instanceof InsertStatement) {
return new InsertStatementContext(tableMetas, parameters, (InsertStatement) sqlStatement);
return new InsertStatementContext(schemaMetaData, parameters, (InsertStatement) sqlStatement);
}
throw new UnsupportedOperationException(String.format("Unsupported SQL statement `%s`", sqlStatement.getClass().getSimpleName()));
}
......
......@@ -15,9 +15,10 @@
* limitations under the License.
*/
package org.apache.shardingsphere.sql.parser.binder.metadata.table;
package org.apache.shardingsphere.sql.parser.binder.metadata.schema;
import lombok.Getter;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetaData;
import java.util.ArrayList;
import java.util.Collection;
......@@ -28,20 +29,29 @@ import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
/**
* Table metas.
* Schema meta data.
*/
@Getter
public final class TableMetas {
public final class SchemaMetaData {
private final Map<String, TableMetaData> tables;
public TableMetas(final Map<String, TableMetaData> tables) {
public SchemaMetaData(final Map<String, TableMetaData> tables) {
this.tables = new ConcurrentHashMap<>(tables.size(), 1);
for (Entry<String, TableMetaData> entry : tables.entrySet()) {
this.tables.put(entry.getKey().toLowerCase(), entry.getValue());
}
}
/**
* Get all table names.
*
* @return all table names
*/
public Collection<String> getAllTableNames() {
return tables.keySet();
}
/**
* Get table meta data via table name.
*
......@@ -101,13 +111,4 @@ public final class TableMetas {
public List<String> getAllColumnNames(final String tableName) {
return containsTable(tableName) ? new ArrayList<>(get(tableName).getColumns().keySet()) : Collections.emptyList();
}
/**
* Get all table names.
*
* @return all table names
*/
public Collection<String> getAllTableNames() {
return tables.keySet();
}
}
......@@ -15,13 +15,14 @@
* limitations under the License.
*/
package org.apache.shardingsphere.sql.parser.binder.metadata.table;
package org.apache.shardingsphere.sql.parser.binder.metadata.schema;
import com.google.common.collect.Lists;
import lombok.AccessLevel;
import lombok.NoArgsConstructor;
import org.apache.shardingsphere.sql.parser.binder.metadata.column.ColumnMetaDataLoader;
import org.apache.shardingsphere.sql.parser.binder.metadata.index.IndexMetaDataLoader;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetaData;
import javax.sql.DataSource;
import java.sql.Connection;
......@@ -39,45 +40,31 @@ import java.util.concurrent.Executors;
import java.util.concurrent.Future;
/**
* Table metas loader.
* Schema meta data loader.
*/
@NoArgsConstructor(access = AccessLevel.PRIVATE)
public final class TableMetasLoader {
public final class SchemaMetaDataLoader {
private static final String TABLE_TYPE = "TABLE";
private static final String TABLE_NAME = "TABLE_NAME";
/**
* Load table meta data.
*
* @param dataSource data source
* @param table table name
* @return table meta data
* @throws SQLException SQL exception
*/
public static TableMetaData load(final DataSource dataSource, final String table) throws SQLException {
try (Connection connection = dataSource.getConnection()) {
return new TableMetaData(ColumnMetaDataLoader.load(connection, table), IndexMetaDataLoader.load(connection, table));
}
}
/**
* Load table metas.
* Load schema meta data.
*
* @param dataSource data source
* @param maxConnectionCount count of max connections permitted to use for this query
* @return table metas
* @return schema meta data
* @throws SQLException SQL exception
*/
public static TableMetas load(final DataSource dataSource, final int maxConnectionCount) throws SQLException {
public static SchemaMetaData load(final DataSource dataSource, final int maxConnectionCount) throws SQLException {
List<String> tableNames;
try (Connection connection = dataSource.getConnection()) {
tableNames = loadAllTableNames(connection);
}
List<List<String>> tableGroups = Lists.partition(tableNames, Math.max(tableNames.size() / maxConnectionCount, 1));
if (1 == tableGroups.size()) {
return new TableMetas(load(dataSource.getConnection(), tableGroups.get(0)));
return new SchemaMetaData(load(dataSource.getConnection(), tableGroups.get(0)));
}
Map<String, TableMetaData> result = new ConcurrentHashMap<>(tableNames.size(), 1);
ExecutorService executorService = Executors.newFixedThreadPool(maxConnectionCount);
......@@ -96,7 +83,7 @@ public final class TableMetasLoader {
}
}
return new TableMetas(result);
return new SchemaMetaData(result);
}
private static Map<String, TableMetaData> load(final Connection connection, final Collection<String> tables) throws SQLException {
......@@ -107,7 +94,6 @@ public final class TableMetasLoader {
}
return result;
}
}
private static List<String> loadAllTableNames(final Connection connection) throws SQLException {
......
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.sql.parser.binder.metadata.table;
import lombok.AccessLevel;
import lombok.NoArgsConstructor;
import org.apache.shardingsphere.sql.parser.binder.metadata.column.ColumnMetaDataLoader;
import org.apache.shardingsphere.sql.parser.binder.metadata.index.IndexMetaDataLoader;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.SQLException;
/**
* Table meta data loader.
*/
@NoArgsConstructor(access = AccessLevel.PRIVATE)
public final class TableMetaDataLoader {
/**
* Load table meta data.
*
* @param dataSource data source
* @param table table name
* @return table meta data
* @throws SQLException SQL exception
*/
public static TableMetaData load(final DataSource dataSource, final String table) throws SQLException {
try (Connection connection = dataSource.getConnection()) {
return new TableMetaData(ColumnMetaDataLoader.load(connection, table), IndexMetaDataLoader.load(connection, table));
}
}
}
......@@ -18,7 +18,7 @@
package org.apache.shardingsphere.sql.parser.binder.segment.select.projection.engine;
import lombok.RequiredArgsConstructor;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.segment.select.projection.DerivedColumn;
import org.apache.shardingsphere.sql.parser.binder.segment.select.projection.Projection;
import org.apache.shardingsphere.sql.parser.binder.segment.select.projection.impl.AggregationDistinctProjection;
......@@ -47,7 +47,7 @@ import java.util.stream.Collectors;
@RequiredArgsConstructor
public final class ProjectionEngine {
private final TableMetas tableMetas;
private final SchemaMetaData schemaMetaData;
private int aggregationAverageDerivedColumnCount;
......@@ -124,7 +124,7 @@ public final class ProjectionEngine {
private Collection<ColumnProjection> getUnqualifiedShorthandColumns(final Collection<SimpleTableSegment> tables) {
Collection<ColumnProjection> result = new LinkedList<>();
for (SimpleTableSegment each : tables) {
result.addAll(tableMetas.getAllColumnNames(
result.addAll(schemaMetaData.getAllColumnNames(
each.getTableName().getIdentifier().getValue()).stream().map(columnName -> new ColumnProjection(null, columnName, null)).collect(Collectors.toList()));
}
return result;
......@@ -134,7 +134,7 @@ public final class ProjectionEngine {
for (SimpleTableSegment each : tables) {
String tableName = each.getTableName().getIdentifier().getValue();
if (owner.equalsIgnoreCase(each.getAlias().orElse(tableName))) {
return tableMetas.getAllColumnNames(tableName).stream().map(columnName -> new ColumnProjection(owner, columnName, null)).collect(Collectors.toList());
return schemaMetaData.getAllColumnNames(tableName).stream().map(columnName -> new ColumnProjection(owner, columnName, null)).collect(Collectors.toList());
}
}
return Collections.emptyList();
......
......@@ -18,7 +18,7 @@
package org.apache.shardingsphere.sql.parser.binder.segment.select.projection.engine;
import com.google.common.base.Preconditions;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.segment.select.groupby.GroupByContext;
import org.apache.shardingsphere.sql.parser.binder.segment.select.orderby.OrderByContext;
import org.apache.shardingsphere.sql.parser.binder.segment.select.orderby.OrderByItem;
......@@ -45,13 +45,13 @@ import java.util.Optional;
*/
public final class ProjectionsContextEngine {
private final TableMetas tableMetas;
private final SchemaMetaData schemaMetaData;
private final ProjectionEngine projectionEngine;
public ProjectionsContextEngine(final TableMetas tableMetas) {
this.tableMetas = tableMetas;
projectionEngine = new ProjectionEngine(tableMetas);
public ProjectionsContextEngine(final SchemaMetaData schemaMetaData) {
this.schemaMetaData = schemaMetaData;
projectionEngine = new ProjectionEngine(schemaMetaData);
}
/**
......@@ -177,7 +177,7 @@ public final class ProjectionsContextEngine {
private boolean isSameProjection(final ShorthandProjection shorthandProjection, final ColumnOrderByItemSegment orderItem, final SelectStatement selectStatement) {
Preconditions.checkState(shorthandProjection.getOwner().isPresent());
SimpleTableSegment tableSegment = find(shorthandProjection.getOwner().get(), selectStatement);
return tableMetas.containsColumn(tableSegment.getTableName().getIdentifier().getValue(), orderItem.getColumn().getIdentifier().getValue());
return schemaMetaData.containsColumn(tableSegment.getTableName().getIdentifier().getValue(), orderItem.getColumn().getIdentifier().getValue());
}
private boolean isSameAlias(final Projection projection, final TextOrderByItemSegment orderItem) {
......
......@@ -18,7 +18,7 @@
package org.apache.shardingsphere.sql.parser.binder.segment.table;
import lombok.RequiredArgsConstructor;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.sql.segment.dml.predicate.PredicateSegment;
import org.apache.shardingsphere.sql.parser.sql.segment.generic.table.SimpleTableSegment;
......@@ -56,17 +56,17 @@ public final class TablesContext {
* Find table name.
*
* @param predicate predicate
* @param tableMetas table metas
* @param schemaMetaData schema meta data
* @return table name
*/
public Optional<String> findTableName(final PredicateSegment predicate, final TableMetas tableMetas) {
public Optional<String> findTableName(final PredicateSegment predicate, final SchemaMetaData schemaMetaData) {
if (1 == tables.size()) {
return Optional.of(tables.iterator().next().getTableName().getIdentifier().getValue());
}
if (predicate.getColumn().getOwner().isPresent()) {
return Optional.of(findTableNameFromSQL(predicate.getColumn().getOwner().get().getIdentifier().getValue()));
}
return findTableNameFromMetaData(predicate.getColumn().getIdentifier().getValue(), tableMetas);
return findTableNameFromMetaData(predicate.getColumn().getIdentifier().getValue(), schemaMetaData);
}
private String findTableNameFromSQL(final String tableNameOrAlias) {
......@@ -78,9 +78,9 @@ public final class TablesContext {
throw new IllegalStateException("Can not find owner from table.");
}
private Optional<String> findTableNameFromMetaData(final String columnName, final TableMetas tableMetas) {
private Optional<String> findTableNameFromMetaData(final String columnName, final SchemaMetaData schemaMetaData) {
for (SimpleTableSegment each : tables) {
if (tableMetas.containsColumn(each.getTableName().getIdentifier().getValue(), columnName)) {
if (schemaMetaData.containsColumn(each.getTableName().getIdentifier().getValue(), columnName)) {
return Optional.of(each.getTableName().getIdentifier().getValue());
}
}
......
......@@ -19,7 +19,7 @@ package org.apache.shardingsphere.sql.parser.binder.statement.dml;
import lombok.Getter;
import lombok.ToString;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.segment.insert.InsertValueContext;
import org.apache.shardingsphere.sql.parser.binder.segment.table.TablesContext;
import org.apache.shardingsphere.sql.parser.binder.statement.CommonSQLStatementContext;
......@@ -47,10 +47,10 @@ public final class InsertStatementContext extends CommonSQLStatementContext<Inse
private final List<InsertValueContext> insertValueContexts;
public InsertStatementContext(final TableMetas tableMetas, final List<Object> parameters, final InsertStatement sqlStatement) {
public InsertStatementContext(final SchemaMetaData schemaMetaData, final List<Object> parameters, final InsertStatement sqlStatement) {
super(sqlStatement);
tablesContext = new TablesContext(sqlStatement.getTable());
columnNames = sqlStatement.useDefaultColumns() ? tableMetas.getAllColumnNames(sqlStatement.getTable().getTableName().getIdentifier().getValue()) : sqlStatement.getColumnNames();
columnNames = sqlStatement.useDefaultColumns() ? schemaMetaData.getAllColumnNames(sqlStatement.getTable().getTableName().getIdentifier().getValue()) : sqlStatement.getColumnNames();
insertValueContexts = getInsertValueContexts(parameters);
}
......
......@@ -20,7 +20,7 @@ package org.apache.shardingsphere.sql.parser.binder.statement.dml;
import com.google.common.base.Preconditions;
import lombok.Getter;
import lombok.ToString;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.segment.select.groupby.GroupByContext;
import org.apache.shardingsphere.sql.parser.binder.segment.select.groupby.engine.GroupByContextEngine;
import org.apache.shardingsphere.sql.parser.binder.segment.select.orderby.OrderByContext;
......@@ -91,12 +91,12 @@ public final class SelectStatementContext extends CommonSQLStatementContext<Sele
containsSubquery = containsSubquery();
}
public SelectStatementContext(final TableMetas tableMetas, final String sql, final List<Object> parameters, final SelectStatement sqlStatement) {
public SelectStatementContext(final SchemaMetaData schemaMetaData, final String sql, final List<Object> parameters, final SelectStatement sqlStatement) {
super(sqlStatement);
tablesContext = new TablesContext(sqlStatement.getSimpleTableSegments());
groupByContext = new GroupByContextEngine().createGroupByContext(sqlStatement);
orderByContext = new OrderByContextEngine().createOrderBy(sqlStatement, groupByContext);
projectionsContext = new ProjectionsContextEngine(tableMetas).createProjectionsContext(sql, sqlStatement, groupByContext, orderByContext);
projectionsContext = new ProjectionsContextEngine(schemaMetaData).createProjectionsContext(sql, sqlStatement, groupByContext, orderByContext);
paginationContext = new PaginationContextEngine().createPaginationContext(sqlStatement, projectionsContext, parameters);
containsSubquery = containsSubquery();
}
......
......@@ -20,6 +20,7 @@ package org.apache.shardingsphere.sql.parser.binder.metadata.table;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Sets;
import org.apache.shardingsphere.sql.parser.binder.metadata.column.ColumnMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.junit.Test;
import java.util.Collections;
......@@ -30,55 +31,55 @@ import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
public final class TableMetasTest {
public final class SchemaMetaDataTest {
@Test
public void assertGetAllTableNames() {
assertThat(new SchemaMetaData(ImmutableMap.of("tbl", mock(TableMetaData.class))).getAllTableNames(), is(Sets.newHashSet("tbl")));
}
@Test
public void assertGet() {
TableMetaData tableMetaData = mock(TableMetaData.class);
TableMetas tableMetas = new TableMetas(ImmutableMap.of("tbl", tableMetaData));
assertThat(tableMetas.get("tbl"), is(tableMetaData));
SchemaMetaData schemaMetaData = new SchemaMetaData(ImmutableMap.of("tbl", tableMetaData));
assertThat(schemaMetaData.get("tbl"), is(tableMetaData));
}
@Test
public void assertPut() {
TableMetas tableMetas = new TableMetas(Collections.emptyMap());
SchemaMetaData schemaMetaData = new SchemaMetaData(Collections.emptyMap());
TableMetaData tableMetaData = mock(TableMetaData.class);
tableMetas.put("tbl", tableMetaData);
assertThat(tableMetas.get("tbl"), is(tableMetaData));
schemaMetaData.put("tbl", tableMetaData);
assertThat(schemaMetaData.get("tbl"), is(tableMetaData));
}
@Test
public void assertRemove() {
TableMetas tableMetas = new TableMetas(ImmutableMap.of("tbl", mock(TableMetaData.class)));
tableMetas.remove("tbl");
assertNull(tableMetas.get("tbl"));
SchemaMetaData schemaMetaData = new SchemaMetaData(ImmutableMap.of("tbl", mock(TableMetaData.class)));
schemaMetaData.remove("tbl");
assertNull(schemaMetaData.get("tbl"));
}
@Test
public void assertContainsTable() {
assertTrue(new TableMetas(ImmutableMap.of("tbl", mock(TableMetaData.class))).containsTable("tbl"));
assertTrue(new SchemaMetaData(ImmutableMap.of("tbl", mock(TableMetaData.class))).containsTable("tbl"));
}
@Test
public void assertContainsColumn() {
TableMetaData tableMetaData = new TableMetaData(Collections.singletonList(new ColumnMetaData("col", "dataType", false)), Collections.emptyList());
assertTrue(new TableMetas(ImmutableMap.of("tbl", tableMetaData)).containsColumn("tbl", "col"));
assertTrue(new SchemaMetaData(ImmutableMap.of("tbl", tableMetaData)).containsColumn("tbl", "col"));
}
@Test
public void assertGetAllColumnNamesWhenContainsKey() {
TableMetaData tableMetaData = new TableMetaData(Collections.singletonList(new ColumnMetaData("col", "dataType", false)), Collections.emptyList());
assertThat(new TableMetas(ImmutableMap.of("tbl", tableMetaData)).getAllColumnNames("tbl"), is(Collections.singletonList("col")));
assertThat(new SchemaMetaData(ImmutableMap.of("tbl", tableMetaData)).getAllColumnNames("tbl"), is(Collections.singletonList("col")));
}
@Test
public void assertGetAllColumnNamesWhenNotContainsKey() {
TableMetaData tableMetaData = new TableMetaData(Collections.singletonList(new ColumnMetaData("col", "dataType", false)), Collections.emptyList());
assertThat(new TableMetas(ImmutableMap.of("tbl1", tableMetaData)).getAllColumnNames("tbl2"), is(Collections.<String>emptyList()));
}
@Test
public void assertGetAllTableNames() {
assertThat(new TableMetas(ImmutableMap.of("tbl", mock(TableMetaData.class))).getAllTableNames(), is(Sets.newHashSet("tbl")));
assertThat(new SchemaMetaData(ImmutableMap.of("tbl1", tableMetaData)).getAllColumnNames("tbl2"), is(Collections.<String>emptyList()));
}
}
......@@ -17,7 +17,7 @@
package org.apache.shardingsphere.sql.parser.binder.segment.select.projection.engine;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.segment.select.projection.Projection;
import org.apache.shardingsphere.sql.parser.binder.segment.select.projection.impl.AggregationDistinctProjection;
import org.apache.shardingsphere.sql.parser.binder.segment.select.projection.impl.AggregationProjection;
......@@ -49,14 +49,14 @@ public final class ProjectionEngineTest {
@Test
public void assertCreateProjectionWhenProjectionSegmentNotMatched() {
assertFalse(new ProjectionEngine(mock(TableMetas.class)).createProjection(null, Collections.emptyList(), null).isPresent());
assertFalse(new ProjectionEngine(mock(SchemaMetaData.class)).createProjection(null, Collections.emptyList(), null).isPresent());
}
@Test
public void assertCreateProjectionWhenProjectionSegmentInstanceOfShorthandProjectionSegment() {
ShorthandProjectionSegment shorthandProjectionSegment = new ShorthandProjectionSegment(0, 0);
shorthandProjectionSegment.setOwner(new OwnerSegment(0, 0, new IdentifierValue("tbl")));
Optional<Projection> actual = new ProjectionEngine(mock(TableMetas.class)).createProjection(null, Collections.emptyList(), shorthandProjectionSegment);
Optional<Projection> actual = new ProjectionEngine(mock(SchemaMetaData.class)).createProjection(null, Collections.emptyList(), shorthandProjectionSegment);
assertTrue(actual.isPresent());
assertThat(actual.get(), instanceOf(ShorthandProjection.class));
}
......@@ -65,7 +65,7 @@ public final class ProjectionEngineTest {
public void assertCreateProjectionWhenProjectionSegmentInstanceOfColumnProjectionSegment() {
ColumnProjectionSegment columnProjectionSegment = new ColumnProjectionSegment(new ColumnSegment(0, 10, new IdentifierValue("name")));
columnProjectionSegment.setAlias(new AliasSegment(0, 0, new IdentifierValue("alias")));
Optional<Projection> actual = new ProjectionEngine(mock(TableMetas.class)).createProjection(null, Collections.emptyList(), columnProjectionSegment);
Optional<Projection> actual = new ProjectionEngine(mock(SchemaMetaData.class)).createProjection(null, Collections.emptyList(), columnProjectionSegment);
assertTrue(actual.isPresent());
assertThat(actual.get(), instanceOf(ColumnProjection.class));
}
......@@ -73,7 +73,7 @@ public final class ProjectionEngineTest {
@Test
public void assertCreateProjectionWhenProjectionSegmentInstanceOfExpressionProjectionSegment() {
ExpressionProjectionSegment expressionProjectionSegment = new ExpressionProjectionSegment(0, 10, "text");
Optional<Projection> actual = new ProjectionEngine(mock(TableMetas.class)).createProjection(null, Collections.emptyList(), expressionProjectionSegment);
Optional<Projection> actual = new ProjectionEngine(mock(SchemaMetaData.class)).createProjection(null, Collections.emptyList(), expressionProjectionSegment);
assertTrue(actual.isPresent());
assertThat(actual.get(), instanceOf(ExpressionProjection.class));
}
......@@ -81,7 +81,7 @@ public final class ProjectionEngineTest {
@Test
public void assertCreateProjectionWhenProjectionSegmentInstanceOfAggregationDistinctProjectionSegment() {
AggregationDistinctProjectionSegment aggregationDistinctProjectionSegment = new AggregationDistinctProjectionSegment(0, 10, AggregationType.COUNT, 0, "distinctExpression");
Optional<Projection> actual = new ProjectionEngine(mock(TableMetas.class)).createProjection("select count(1) from table_1", Collections.emptyList(), aggregationDistinctProjectionSegment);
Optional<Projection> actual = new ProjectionEngine(mock(SchemaMetaData.class)).createProjection("select count(1) from table_1", Collections.emptyList(), aggregationDistinctProjectionSegment);
assertTrue(actual.isPresent());
assertThat(actual.get(), instanceOf(AggregationDistinctProjection.class));
}
......@@ -89,7 +89,7 @@ public final class ProjectionEngineTest {
@Test
public void assertCreateProjectionWhenProjectionSegmentInstanceOfAggregationProjectionSegment() {
AggregationProjectionSegment aggregationProjectionSegment = new AggregationProjectionSegment(0, 10, AggregationType.COUNT, 0);
Optional<Projection> actual = new ProjectionEngine(mock(TableMetas.class)).createProjection("select count(1) from table_1", Collections.emptyList(), aggregationProjectionSegment);
Optional<Projection> actual = new ProjectionEngine(mock(SchemaMetaData.class)).createProjection("select count(1) from table_1", Collections.emptyList(), aggregationProjectionSegment);
assertTrue(actual.isPresent());
assertThat(actual.get(), instanceOf(AggregationProjection.class));
}
......@@ -97,7 +97,7 @@ public final class ProjectionEngineTest {
@Test
public void assertCreateProjectionWhenProjectionSegmentInstanceOfAggregationDistinctProjectionSegmentAndAggregationTypeIsAvg() {
AggregationDistinctProjectionSegment aggregationDistinctProjectionSegment = new AggregationDistinctProjectionSegment(0, 10, AggregationType.AVG, 0, "distinctExpression");
Optional<Projection> actual = new ProjectionEngine(mock(TableMetas.class)).createProjection("select count(1) from table_1", Collections.emptyList(), aggregationDistinctProjectionSegment);
Optional<Projection> actual = new ProjectionEngine(mock(SchemaMetaData.class)).createProjection("select count(1) from table_1", Collections.emptyList(), aggregationDistinctProjectionSegment);
assertTrue(actual.isPresent());
assertThat(actual.get(), instanceOf(AggregationDistinctProjection.class));
}
......@@ -105,7 +105,7 @@ public final class ProjectionEngineTest {
@Test
public void assertCreateProjectionWhenProjectionSegmentInstanceOfAggregationProjectionSegmentAndAggregationTypeIsAvg() {
AggregationProjectionSegment aggregationProjectionSegment = new AggregationProjectionSegment(0, 10, AggregationType.AVG, 0);
Optional<Projection> actual = new ProjectionEngine(mock(TableMetas.class)).createProjection("select count(1) from table_1", Collections.emptyList(), aggregationProjectionSegment);
Optional<Projection> actual = new ProjectionEngine(mock(SchemaMetaData.class)).createProjection("select count(1) from table_1", Collections.emptyList(), aggregationProjectionSegment);
assertTrue(actual.isPresent());
assertThat(actual.get(), instanceOf(AggregationProjection.class));
}
......
......@@ -18,7 +18,7 @@
package org.apache.shardingsphere.sql.parser.binder.segment.select.projection.engine;
import com.google.common.collect.Lists;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.segment.select.groupby.GroupByContext;
import org.apache.shardingsphere.sql.parser.binder.segment.select.orderby.OrderByContext;
import org.apache.shardingsphere.sql.parser.binder.segment.select.orderby.OrderByItem;
......@@ -44,11 +44,11 @@ import static org.junit.Assert.assertNotNull;
public final class ProjectionsContextEngineTest {
private TableMetas emptyTableMetas;
private SchemaMetaData schemaMetaData;
@Before
public void setUp() {
emptyTableMetas = new TableMetas(Collections.emptyMap());
schemaMetaData = new SchemaMetaData(Collections.emptyMap());
}
@Test
......@@ -70,7 +70,7 @@ public final class ProjectionsContextEngineTest {
OwnerSegment owner = new OwnerSegment(0, 10, new IdentifierValue("name"));
shorthandProjectionSegment.setOwner(owner);
projectionsSegment.getProjections().addAll(Collections.singleton(shorthandProjectionSegment));
ProjectionsContext actual = new ProjectionsContextEngine(emptyTableMetas)
ProjectionsContext actual = new ProjectionsContextEngine(schemaMetaData)
.createProjectionsContext(null, selectStatement, new GroupByContext(Collections.emptyList(), 0), new OrderByContext(Collections.emptyList(), false));
assertNotNull(actual);
}
......@@ -86,7 +86,7 @@ public final class ProjectionsContextEngineTest {
projectionsSegment.getProjections().addAll(Collections.singleton(shorthandProjectionSegment));
OrderByItem orderByItem = new OrderByItem(new IndexOrderByItemSegment(0, 1, 0, OrderDirection.ASC));
OrderByContext orderByContext = new OrderByContext(Collections.singletonList(orderByItem), true);
ProjectionsContext actual = new ProjectionsContextEngine(emptyTableMetas)
ProjectionsContext actual = new ProjectionsContextEngine(schemaMetaData)
.createProjectionsContext(null, selectStatement, new GroupByContext(Collections.emptyList(), 0), orderByContext);
assertNotNull(actual);
}
......@@ -102,7 +102,7 @@ public final class ProjectionsContextEngineTest {
projectionsSegment.getProjections().addAll(Collections.singleton(shorthandProjectionSegment));
OrderByItem orderByItem = new OrderByItem(new ExpressionOrderByItemSegment(0, 1, "", OrderDirection.ASC));
OrderByContext orderByContext = new OrderByContext(Collections.singletonList(orderByItem), true);
ProjectionsContext actual = new ProjectionsContextEngine(emptyTableMetas)
ProjectionsContext actual = new ProjectionsContextEngine(schemaMetaData)
.createProjectionsContext(null, selectStatement, new GroupByContext(Collections.emptyList(), 0), orderByContext);
assertNotNull(actual);
}
......@@ -119,7 +119,7 @@ public final class ProjectionsContextEngineTest {
projectionsSegment.getProjections().addAll(Collections.singleton(shorthandProjectionSegment));
OrderByItem orderByItem = new OrderByItem(new ColumnOrderByItemSegment(new ColumnSegment(0, 0, new IdentifierValue("name")), OrderDirection.ASC));
OrderByContext orderByContext = new OrderByContext(Collections.singletonList(orderByItem), true);
ProjectionsContext actual = new ProjectionsContextEngine(emptyTableMetas)
ProjectionsContext actual = new ProjectionsContextEngine(schemaMetaData)
.createProjectionsContext(null, selectStatement, new GroupByContext(Collections.emptyList(), 0), orderByContext);
assertNotNull(actual);
}
......@@ -136,7 +136,7 @@ public final class ProjectionsContextEngineTest {
projectionsSegment.getProjections().addAll(Collections.singleton(shorthandProjectionSegment));
OrderByItem orderByItem = new OrderByItem(new ColumnOrderByItemSegment(new ColumnSegment(0, 0, new IdentifierValue("name")), OrderDirection.ASC));
OrderByContext orderByContext = new OrderByContext(Collections.singletonList(orderByItem), true);
ProjectionsContext actual = new ProjectionsContextEngine(emptyTableMetas)
ProjectionsContext actual = new ProjectionsContextEngine(schemaMetaData)
.createProjectionsContext(null, selectStatement, new GroupByContext(Collections.emptyList(), 0), orderByContext);
assertNotNull(actual);
}
......@@ -161,7 +161,7 @@ public final class ProjectionsContextEngineTest {
projectionsSegment.getProjections().addAll(Lists.newArrayList(columnProjectionSegment, shorthandProjectionSegment));
OrderByItem orderByItem = new OrderByItem(new ColumnOrderByItemSegment(new ColumnSegment(0, 0, new IdentifierValue("name")), OrderDirection.ASC));
OrderByContext orderByContext = new OrderByContext(Collections.singleton(orderByItem), false);
ProjectionsContext actual = new ProjectionsContextEngine(emptyTableMetas)
ProjectionsContext actual = new ProjectionsContextEngine(schemaMetaData)
.createProjectionsContext(null, selectStatement, new GroupByContext(Collections.emptyList(), 0), orderByContext);
assertNotNull(actual);
}
......
......@@ -18,7 +18,7 @@
package org.apache.shardingsphere.sql.parser.binder.segment.table;
import com.google.common.collect.Sets;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.sql.segment.dml.column.ColumnSegment;
import org.apache.shardingsphere.sql.parser.sql.segment.dml.predicate.PredicateSegment;
import org.apache.shardingsphere.sql.parser.sql.segment.dml.predicate.value.PredicateRightValue;
......@@ -59,7 +59,7 @@ public final class TablesContextTest {
public void assertFindTableNameWhenSingleTable() {
SimpleTableSegment tableSegment = createTableSegment("table_1", "tbl_1");
PredicateSegment predicateSegment = createPredicateSegment(createColumnSegment());
Optional<String> actual = new TablesContext(Collections.singletonList(tableSegment)).findTableName(predicateSegment, mock(TableMetas.class));
Optional<String> actual = new TablesContext(Collections.singletonList(tableSegment)).findTableName(predicateSegment, mock(SchemaMetaData.class));
assertTrue(actual.isPresent());
assertThat(actual.get(), is("table_1"));
}
......@@ -71,7 +71,7 @@ public final class TablesContextTest {
ColumnSegment columnSegment = createColumnSegment();
columnSegment.setOwner(new OwnerSegment(0, 10, new IdentifierValue("table_1")));
PredicateSegment predicateSegment = createPredicateSegment(columnSegment);
Optional<String> actual = new TablesContext(Arrays.asList(tableSegment1, tableSegment2)).findTableName(predicateSegment, mock(TableMetas.class));
Optional<String> actual = new TablesContext(Arrays.asList(tableSegment1, tableSegment2)).findTableName(predicateSegment, mock(SchemaMetaData.class));
assertTrue(actual.isPresent());
assertThat(actual.get(), is("table_1"));
}
......@@ -81,18 +81,18 @@ public final class TablesContextTest {
SimpleTableSegment tableSegment1 = createTableSegment("table_1", "tbl_1");
SimpleTableSegment tableSegment2 = createTableSegment("table_2", "tbl_2");
PredicateSegment predicateSegment = createPredicateSegment(createColumnSegment());
Optional<String> actual = new TablesContext(Arrays.asList(tableSegment1, tableSegment2)).findTableName(predicateSegment, mock(TableMetas.class));
Optional<String> actual = new TablesContext(Arrays.asList(tableSegment1, tableSegment2)).findTableName(predicateSegment, mock(SchemaMetaData.class));
assertFalse(actual.isPresent());
}
@Test
public void assertFindTableNameWhenColumnSegmentOwnerAbsentAndTableMetasContainsColumn() {
public void assertFindTableNameWhenColumnSegmentOwnerAbsentAndSchemaMetaDataContainsColumn() {
SimpleTableSegment tableSegment1 = createTableSegment("table_1", "tbl_1");
SimpleTableSegment tableSegment2 = createTableSegment("table_2", "tbl_2");
PredicateSegment predicateSegment = createPredicateSegment(createColumnSegment());
TableMetas tableMetas = mock(TableMetas.class);
when(tableMetas.containsColumn(anyString(), anyString())).thenReturn(true);
Optional<String> actual = new TablesContext(Arrays.asList(tableSegment1, tableSegment2)).findTableName(predicateSegment, tableMetas);
SchemaMetaData schemaMetaData = mock(SchemaMetaData.class);
when(schemaMetaData.containsColumn(anyString(), anyString())).thenReturn(true);
Optional<String> actual = new TablesContext(Arrays.asList(tableSegment1, tableSegment2)).findTableName(predicateSegment, schemaMetaData);
assertTrue(actual.isPresent());
assertThat(actual.get(), is("table_1"));
}
......
......@@ -17,7 +17,7 @@
package org.apache.shardingsphere.sql.parser.binder.statement.impl;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.dml.InsertStatementContext;
import org.apache.shardingsphere.sql.parser.sql.segment.dml.assignment.InsertValuesSegment;
import org.apache.shardingsphere.sql.parser.sql.segment.dml.column.ColumnSegment;
......@@ -46,29 +46,29 @@ public final class InsertStatementContextTest {
new ColumnSegment(0, 0, new IdentifierValue("id")), new ColumnSegment(0, 0, new IdentifierValue("name")), new ColumnSegment(0, 0, new IdentifierValue("status"))));
insertStatement.setInsertColumns(insertColumnsSegment);
setUpInsertValues(insertStatement);
InsertStatementContext actual = new InsertStatementContext(mock(TableMetas.class), Arrays.asList(1, "Tom", 2, "Jerry"), insertStatement);
InsertStatementContext actual = new InsertStatementContext(mock(SchemaMetaData.class), Arrays.asList(1, "Tom", 2, "Jerry"), insertStatement);
assertInsertStatementContext(actual);
}
@Test
public void assertInsertStatementContextWithoutColumnNames() {
TableMetas tableMetas = mock(TableMetas.class);
when(tableMetas.getAllColumnNames("tbl")).thenReturn(Arrays.asList("id", "name", "status"));
SchemaMetaData schemaMetaData = mock(SchemaMetaData.class);
when(schemaMetaData.getAllColumnNames("tbl")).thenReturn(Arrays.asList("id", "name", "status"));
InsertStatement insertStatement = new InsertStatement();
insertStatement.setTable(new SimpleTableSegment(0, 0, new IdentifierValue("tbl")));
setUpInsertValues(insertStatement);
InsertStatementContext actual = new InsertStatementContext(tableMetas, Arrays.asList(1, "Tom", 2, "Jerry"), insertStatement);
InsertStatementContext actual = new InsertStatementContext(schemaMetaData, Arrays.asList(1, "Tom", 2, "Jerry"), insertStatement);
assertInsertStatementContext(actual);
}
@Test
public void assertGetGroupedParameters() {
TableMetas tableMetas = mock(TableMetas.class);
when(tableMetas.getAllColumnNames("tbl")).thenReturn(Arrays.asList("id", "name", "status"));
SchemaMetaData schemaMetaData = mock(SchemaMetaData.class);
when(schemaMetaData.getAllColumnNames("tbl")).thenReturn(Arrays.asList("id", "name", "status"));
InsertStatement insertStatement = new InsertStatement();
insertStatement.setTable(new SimpleTableSegment(0, 0, new IdentifierValue("tbl")));
setUpInsertValues(insertStatement);
InsertStatementContext actual = new InsertStatementContext(tableMetas, Arrays.asList(1, "Tom", 2, "Jerry"), insertStatement);
InsertStatementContext actual = new InsertStatementContext(schemaMetaData, Arrays.asList(1, "Tom", 2, "Jerry"), insertStatement);
assertThat(actual.getGroupedParameters().size(), is(2));
}
......
......@@ -19,7 +19,7 @@ package org.apache.shardingsphere.underlying.common.metadata;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.underlying.common.metadata.datasource.DataSourceMetas;
/**
......@@ -31,5 +31,5 @@ public final class ShardingSphereMetaData {
private final DataSourceMetas dataSources;
private final TableMetas tables;
private final SchemaMetaData schema;
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.underlying.common.metadata.decorator;
import lombok.AccessLevel;
import lombok.NoArgsConstructor;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetaData;
import org.apache.shardingsphere.underlying.common.rule.BaseRule;
import java.util.HashMap;
import java.util.Map;
/**
* Schema meta data decorator.
*/
@NoArgsConstructor(access = AccessLevel.PRIVATE)
public final class SchemaMetaDataDecorator {
/**
* Decorate schema meta data.
*
* @param schemaMetaData schema meta data
* @param rule rule
* @param tableMetaDataDecorator table meta data decorator
* @param <T> type of rule
* @return decorated schema meta data
*/
public static <T extends BaseRule> SchemaMetaData decorate(final SchemaMetaData schemaMetaData, final T rule, final TableMetaDataDecorator<T> tableMetaDataDecorator) {
Map<String, TableMetaData> result = new HashMap<>(schemaMetaData.getAllTableNames().size(), 1);
for (String each : schemaMetaData.getAllTableNames()) {
result.put(each, tableMetaDataDecorator.decorate(schemaMetaData.get(each), each, rule));
}
return new SchemaMetaData(result);
}
}
......@@ -15,10 +15,9 @@
* limitations under the License.
*/
package org.apache.shardingsphere.underlying.common.metadata.table;
package org.apache.shardingsphere.underlying.common.metadata.decorator;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.underlying.common.rule.BaseRule;
/**
......@@ -26,15 +25,6 @@ import org.apache.shardingsphere.underlying.common.rule.BaseRule;
*/
public interface TableMetaDataDecorator<T extends BaseRule> {
/**
* Decorate table metas.
*
* @param tableMetas table metas
* @param rule rule
* @return decorated table metas
*/
TableMetas decorate(TableMetas tableMetas, T rule);
/**
* Decorate table meta data.
*
......
......@@ -19,7 +19,7 @@ package org.apache.shardingsphere.underlying.merge;
import lombok.RequiredArgsConstructor;
import org.apache.shardingsphere.spi.database.type.DatabaseType;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.underlying.common.constant.properties.ShardingSphereProperties;
import org.apache.shardingsphere.underlying.common.rule.BaseRule;
......@@ -46,7 +46,7 @@ public final class MergeEntry {
private final DatabaseType databaseType;
private final TableMetas tableMetas;
private final SchemaMetaData schemaMetaData;
private final ShardingSphereProperties properties;
......@@ -71,7 +71,7 @@ public final class MergeEntry {
for (Entry<BaseRule, ResultProcessEngine> entry : engines.entrySet()) {
if (entry.getValue() instanceof ResultMergerEngine) {
ResultMerger resultMerger = ((ResultMergerEngine) entry.getValue()).newInstance(databaseType, entry.getKey(), properties, sqlStatementContext);
return Optional.of(resultMerger.merge(queryResults, sqlStatementContext, tableMetas));
return Optional.of(resultMerger.merge(queryResults, sqlStatementContext, schemaMetaData));
}
}
return Optional.empty();
......@@ -83,7 +83,7 @@ public final class MergeEntry {
for (Entry<BaseRule, ResultProcessEngine> entry : engines.entrySet()) {
if (entry.getValue() instanceof ResultDecoratorEngine) {
ResultDecorator resultDecorator = ((ResultDecoratorEngine) entry.getValue()).newInstance(databaseType, entry.getKey(), properties, sqlStatementContext);
result = null == result ? resultDecorator.decorate(mergedResult, sqlStatementContext, tableMetas) : resultDecorator.decorate(result, sqlStatementContext, tableMetas);
result = null == result ? resultDecorator.decorate(mergedResult, sqlStatementContext, schemaMetaData) : resultDecorator.decorate(result, sqlStatementContext, schemaMetaData);
}
}
return null == result ? mergedResult : result;
......@@ -95,7 +95,7 @@ public final class MergeEntry {
for (Entry<BaseRule, ResultProcessEngine> entry : engines.entrySet()) {
if (entry.getValue() instanceof ResultDecoratorEngine) {
ResultDecorator resultDecorator = ((ResultDecoratorEngine) entry.getValue()).newInstance(databaseType, entry.getKey(), properties, sqlStatementContext);
result = null == result ? resultDecorator.decorate(queryResult, sqlStatementContext, tableMetas) : resultDecorator.decorate(result, sqlStatementContext, tableMetas);
result = null == result ? resultDecorator.decorate(queryResult, sqlStatementContext, schemaMetaData) : resultDecorator.decorate(result, sqlStatementContext, schemaMetaData);
}
}
return Optional.ofNullable(result);
......
......@@ -17,7 +17,7 @@
package org.apache.shardingsphere.underlying.merge.engine.decorator;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.underlying.executor.QueryResult;
import org.apache.shardingsphere.underlying.merge.result.MergedResult;
......@@ -34,20 +34,20 @@ public interface ResultDecorator {
*
* @param queryResult query result
* @param sqlStatementContext SQL statement context
* @param tableMetas table metas
* @param schemaMetaData schema meta data
* @return merged result
* @throws SQLException SQL exception
*/
MergedResult decorate(QueryResult queryResult, SQLStatementContext sqlStatementContext, TableMetas tableMetas) throws SQLException;
MergedResult decorate(QueryResult queryResult, SQLStatementContext sqlStatementContext, SchemaMetaData schemaMetaData) throws SQLException;
/**
* Decorate merged result.
*
* @param mergedResult merged result
* @param sqlStatementContext SQL statement context
* @param tableMetas table metas
* @param schemaMetaData schema meta data
* @return merged result
* @throws SQLException SQL exception
*/
MergedResult decorate(MergedResult mergedResult, SQLStatementContext sqlStatementContext, TableMetas tableMetas) throws SQLException;
MergedResult decorate(MergedResult mergedResult, SQLStatementContext sqlStatementContext, SchemaMetaData schemaMetaData) throws SQLException;
}
......@@ -17,7 +17,7 @@
package org.apache.shardingsphere.underlying.merge.engine.decorator.impl;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.underlying.executor.QueryResult;
import org.apache.shardingsphere.underlying.merge.engine.decorator.ResultDecorator;
......@@ -30,12 +30,12 @@ import org.apache.shardingsphere.underlying.merge.result.impl.transparent.Transp
public final class TransparentResultDecorator implements ResultDecorator {
@Override
public MergedResult decorate(final QueryResult queryResult, final SQLStatementContext sqlStatementContext, final TableMetas tableMetas) {
public MergedResult decorate(final QueryResult queryResult, final SQLStatementContext sqlStatementContext, final SchemaMetaData schemaMetaData) {
return new TransparentMergedResult(queryResult);
}
@Override
public MergedResult decorate(final MergedResult mergedResult, final SQLStatementContext sqlStatementContext, final TableMetas tableMetas) {
public MergedResult decorate(final MergedResult mergedResult, final SQLStatementContext sqlStatementContext, final SchemaMetaData schemaMetaData) {
return mergedResult;
}
}
......@@ -17,7 +17,7 @@
package org.apache.shardingsphere.underlying.merge.engine.merger;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.underlying.executor.QueryResult;
import org.apache.shardingsphere.underlying.merge.result.MergedResult;
......@@ -35,9 +35,9 @@ public interface ResultMerger {
*
* @param queryResults query results
* @param sqlStatementContext SQL statement context
* @param tableMetas table metas
* @param schemaMetaData schema meta data
* @return merged result
* @throws SQLException SQL exception
*/
MergedResult merge(List<QueryResult> queryResults, SQLStatementContext sqlStatementContext, TableMetas tableMetas) throws SQLException;
MergedResult merge(List<QueryResult> queryResults, SQLStatementContext sqlStatementContext, SchemaMetaData schemaMetaData) throws SQLException;
}
......@@ -17,7 +17,7 @@
package org.apache.shardingsphere.underlying.merge.engine.merger.impl;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.underlying.executor.QueryResult;
import org.apache.shardingsphere.underlying.merge.engine.merger.ResultMerger;
......@@ -32,7 +32,7 @@ import java.util.List;
public final class TransparentResultMerger implements ResultMerger {
@Override
public MergedResult merge(final List<QueryResult> queryResults, final SQLStatementContext sqlStatementContext, final TableMetas tableMetas) {
public MergedResult merge(final List<QueryResult> queryResults, final SQLStatementContext sqlStatementContext, final SchemaMetaData schemaMetaData) {
return new TransparentMergedResult(queryResults.get(0));
}
}
......@@ -18,7 +18,7 @@
package org.apache.shardingsphere.underlying.merge.result.impl.memory;
import lombok.RequiredArgsConstructor;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.underlying.common.rule.BaseRule;
import org.apache.shardingsphere.underlying.executor.QueryResult;
......@@ -49,15 +49,15 @@ public abstract class MemoryMergedResult<T extends BaseRule> implements MergedRe
private boolean wasNull;
protected MemoryMergedResult(final T rule, final TableMetas tableMetas, final SQLStatementContext sqlStatementContext, final List<QueryResult> queryResults) throws SQLException {
List<MemoryQueryResultRow> memoryQueryResultRowList = init(rule, tableMetas, sqlStatementContext, queryResults);
protected MemoryMergedResult(final T rule, final SchemaMetaData schemaMetaData, final SQLStatementContext sqlStatementContext, final List<QueryResult> queryResults) throws SQLException {
List<MemoryQueryResultRow> memoryQueryResultRowList = init(rule, schemaMetaData, sqlStatementContext, queryResults);
memoryResultSetRows = memoryQueryResultRowList.iterator();
if (!memoryQueryResultRowList.isEmpty()) {
currentResultSetRow = memoryQueryResultRowList.get(0);
}
}
protected abstract List<MemoryQueryResultRow> init(T rule, TableMetas tableMetas, SQLStatementContext sqlStatementContext, List<QueryResult> queryResults) throws SQLException;
protected abstract List<MemoryQueryResultRow> init(T rule, SchemaMetaData schemaMetaData, SQLStatementContext sqlStatementContext, List<QueryResult> queryResults) throws SQLException;
@Override
public final boolean next() {
......
......@@ -18,7 +18,7 @@
package org.apache.shardingsphere.underlying.merge.result.impl.memory.fixture;
import lombok.Getter;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.underlying.executor.QueryResult;
import org.apache.shardingsphere.underlying.merge.result.impl.fixture.TestRule;
......@@ -41,7 +41,7 @@ public final class TestMemoryMergedResult extends MemoryMergedResult<TestRule> {
}
@Override
protected List<MemoryQueryResultRow> init(final TestRule rule, final TableMetas tableMetas, final SQLStatementContext sqlStatementContext, final List<QueryResult> queryResults) {
protected List<MemoryQueryResultRow> init(final TestRule rule, final SchemaMetaData schemaMetaData, final SQLStatementContext sqlStatementContext, final List<QueryResult> queryResults) {
memoryQueryResultRow = mock(MemoryQueryResultRow.class);
return Collections.singletonList(memoryQueryResultRow);
}
......
......@@ -50,7 +50,7 @@ public final class SQLRewriteEntry {
*/
public SQLRewriteContext createSQLRewriteContext(final String sql, final List<Object> parameters,
final SQLStatementContext sqlStatementContext, final Map<BaseRule, SQLRewriteContextDecorator> decorators) {
SQLRewriteContext result = new SQLRewriteContext(metaData.getTables(), sqlStatementContext, sql, parameters);
SQLRewriteContext result = new SQLRewriteContext(metaData.getSchema(), sqlStatementContext, sql, parameters);
decorate(decorators, result);
result.generateSQLTokens();
return result;
......
......@@ -19,7 +19,7 @@ package org.apache.shardingsphere.underlying.rewrite.context;
import lombok.AccessLevel;
import lombok.Getter;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.sql.parser.binder.statement.dml.InsertStatementContext;
import org.apache.shardingsphere.underlying.rewrite.parameter.builder.ParameterBuilder;
......@@ -40,7 +40,7 @@ import java.util.List;
@Getter
public final class SQLRewriteContext {
private final TableMetas tableMetas;
private final SchemaMetaData schemaMetaData;
private final SQLStatementContext sqlStatementContext;
......@@ -55,8 +55,8 @@ public final class SQLRewriteContext {
@Getter(AccessLevel.NONE)
private final SQLTokenGenerators sqlTokenGenerators = new SQLTokenGenerators();
public SQLRewriteContext(final TableMetas tableMetas, final SQLStatementContext sqlStatementContext, final String sql, final List<Object> parameters) {
this.tableMetas = tableMetas;
public SQLRewriteContext(final SchemaMetaData schemaMetaData, final SQLStatementContext sqlStatementContext, final String sql, final List<Object> parameters) {
this.schemaMetaData = schemaMetaData;
this.sqlStatementContext = sqlStatementContext;
this.sql = sql;
this.parameters = parameters;
......@@ -78,6 +78,6 @@ public final class SQLRewriteContext {
* Generate SQL tokens.
*/
public void generateSQLTokens() {
sqlTokens.addAll(sqlTokenGenerators.generateSQLTokens(sqlStatementContext, parameters, tableMetas));
sqlTokens.addAll(sqlTokenGenerators.generateSQLTokens(sqlStatementContext, parameters, schemaMetaData));
}
}
......@@ -17,7 +17,7 @@
package org.apache.shardingsphere.underlying.rewrite.parameter.rewriter;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import java.util.Collection;
......@@ -29,8 +29,8 @@ public interface ParameterRewriterBuilder {
/**
* Get parameter rewriters.
*
* @param tableMetas table metas
* @param schemaMetaData schema meta data
* @return parameter rewriters
*/
Collection<ParameterRewriter> getParameterRewriters(TableMetas tableMetas);
Collection<ParameterRewriter> getParameterRewriters(SchemaMetaData schemaMetaData);
}
......@@ -17,11 +17,11 @@
package org.apache.shardingsphere.underlying.rewrite.sql.token.generator;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.underlying.rewrite.sql.token.generator.aware.ParametersAware;
import org.apache.shardingsphere.underlying.rewrite.sql.token.generator.aware.PreviousSQLTokensAware;
import org.apache.shardingsphere.underlying.rewrite.sql.token.generator.aware.TableMetasAware;
import org.apache.shardingsphere.underlying.rewrite.sql.token.generator.aware.SchemaMetaDataAware;
import org.apache.shardingsphere.underlying.rewrite.sql.token.pojo.SQLToken;
import java.util.Collection;
......@@ -62,14 +62,14 @@ public final class SQLTokenGenerators {
*
* @param sqlStatementContext SQL statement context
* @param parameters SQL parameters
* @param tableMetas table metas
* @param schemaMetaData schema meta data
* @return SQL tokens
*/
@SuppressWarnings("unchecked")
public List<SQLToken> generateSQLTokens(final SQLStatementContext sqlStatementContext, final List<Object> parameters, final TableMetas tableMetas) {
public List<SQLToken> generateSQLTokens(final SQLStatementContext sqlStatementContext, final List<Object> parameters, final SchemaMetaData schemaMetaData) {
List<SQLToken> result = new LinkedList<>();
for (SQLTokenGenerator each : sqlTokenGenerators) {
setUpSQLTokenGenerator(each, parameters, tableMetas, result);
setUpSQLTokenGenerator(each, parameters, schemaMetaData, result);
if (!each.isGenerateSQLToken(sqlStatementContext)) {
continue;
}
......@@ -85,12 +85,12 @@ public final class SQLTokenGenerators {
return result;
}
private void setUpSQLTokenGenerator(final SQLTokenGenerator sqlTokenGenerator, final List<Object> parameters, final TableMetas tableMetas, final List<SQLToken> previousSQLTokens) {
private void setUpSQLTokenGenerator(final SQLTokenGenerator sqlTokenGenerator, final List<Object> parameters, final SchemaMetaData schemaMetaData, final List<SQLToken> previousSQLTokens) {
if (sqlTokenGenerator instanceof ParametersAware) {
((ParametersAware) sqlTokenGenerator).setParameters(parameters);
}
if (sqlTokenGenerator instanceof TableMetasAware) {
((TableMetasAware) sqlTokenGenerator).setTableMetas(tableMetas);
if (sqlTokenGenerator instanceof SchemaMetaDataAware) {
((SchemaMetaDataAware) sqlTokenGenerator).setSchemaMetaData(schemaMetaData);
}
if (sqlTokenGenerator instanceof PreviousSQLTokensAware) {
((PreviousSQLTokensAware) sqlTokenGenerator).setPreviousSQLTokens(previousSQLTokens);
......
......@@ -17,17 +17,17 @@
package org.apache.shardingsphere.underlying.rewrite.sql.token.generator.aware;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
/**
* Table metas aware.
* Schema meta data aware.
*/
public interface TableMetasAware {
public interface SchemaMetaDataAware {
/**
* Set table metas.
* Set schema meta data.
*
* @param tableMetas table metas
* @param schemaMetaData schema meta data
*/
void setTableMetas(TableMetas tableMetas);
void setSchemaMetaData(SchemaMetaData schemaMetaData);
}
......@@ -17,7 +17,7 @@
package org.apache.shardingsphere.underlying.rewrite.impl;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetas;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.statement.SQLStatementContext;
import org.apache.shardingsphere.underlying.rewrite.context.SQLRewriteContext;
import org.apache.shardingsphere.underlying.rewrite.sql.SQLBuilder;
......@@ -34,7 +34,7 @@ public final class DefaultSQLBuilderTest {
@Test
public void assertToSQL() {
SQLRewriteContext context = new SQLRewriteContext(mock(TableMetas.class), mock(SQLStatementContext.class), "SELECT * FROM t_config", Collections.emptyList());
SQLRewriteContext context = new SQLRewriteContext(mock(SchemaMetaData.class), mock(SQLStatementContext.class), "SELECT * FROM t_config", Collections.emptyList());
SQLBuilder sqlBuilderWithoutTokens = new DefaultSQLBuilder(context);
assertThat(sqlBuilderWithoutTokens.toSQL(), is("SELECT * FROM t_config"));
}
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册