未验证 提交 5fb034e2 编写于 作者: L Liang Zhang 提交者: GitHub

Rename DataSourceConfiguration to ScalingDataSourceConfiguration to avoid...

Rename DataSourceConfiguration to ScalingDataSourceConfiguration to avoid duplicate class name (#7698)
上级 3fdee3f2
......@@ -32,7 +32,7 @@ public class DumperConfiguration {
private String dataSourceName;
private DataSourceConfiguration dataSourceConfiguration;
private ScalingDataSourceConfiguration dataSourceConfiguration;
@SuppressWarnings("rawtypes")
private PositionManager positionManager;
......
......@@ -30,7 +30,7 @@ import java.util.Set;
@Getter
public final class ImporterConfiguration {
private DataSourceConfiguration dataSourceConfiguration;
private ScalingDataSourceConfiguration dataSourceConfiguration;
private Map<String, Set<String>> shardingColumnsMap;
......
......@@ -24,12 +24,12 @@ import org.apache.shardingsphere.infra.database.type.DatabaseType;
import org.apache.shardingsphere.infra.database.type.DatabaseTypes;
/**
* JDBC data source configuration.
* JDBC scaling data source configuration.
*/
@Getter
@Setter
@EqualsAndHashCode(exclude = "databaseType")
public final class JDBCDataSourceConfiguration implements DataSourceConfiguration {
public final class JDBCScalingDataSourceConfiguration implements ScalingDataSourceConfiguration {
private String jdbcUrl;
......@@ -39,7 +39,7 @@ public final class JDBCDataSourceConfiguration implements DataSourceConfiguratio
private DatabaseType databaseType;
public JDBCDataSourceConfiguration(final String jdbcUrl, final String username, final String password) {
public JDBCScalingDataSourceConfiguration(final String jdbcUrl, final String username, final String password) {
this.jdbcUrl = jdbcUrl;
this.username = username;
this.password = password;
......
......@@ -44,14 +44,14 @@ public final class RuleConfiguration {
/**
* Get typed data source configuration.
*
* @return data source configuration
* @return scaling data source configuration
*/
public DataSourceConfiguration toTypedDataSourceConfiguration() {
public ScalingDataSourceConfiguration toTypedDataSourceConfiguration() {
if ("jdbc".equalsIgnoreCase(type)) {
return new Gson().fromJson(parameter, JDBCDataSourceConfiguration.class);
return new Gson().fromJson(parameter, JDBCScalingDataSourceConfiguration.class);
}
if ("shardingSphereJdbc".equalsIgnoreCase(type)) {
return new Gson().fromJson(parameter, ShardingSphereJDBCConfiguration.class);
return new Gson().fromJson(parameter, ShardingSphereJDBCScalingDataSourceConfiguration.class);
}
throw new UnsupportedOperationException("Unsupported Data Source Type:" + type);
}
......
......@@ -20,10 +20,9 @@ package org.apache.shardingsphere.scaling.core.config;
import org.apache.shardingsphere.infra.database.type.DatabaseType;
/**
* Data source configuration.
* Scaling data source configuration.
*/
// TODO rename class name, should not conflict with other class name
public interface DataSourceConfiguration {
public interface ScalingDataSourceConfiguration {
/**
* Get database type.
......
......@@ -24,10 +24,13 @@ import org.apache.shardingsphere.infra.database.type.DatabaseType;
import org.apache.shardingsphere.infra.database.type.DatabaseTypes;
import org.apache.shardingsphere.scaling.core.utils.ConfigurationYamlConverter;
/**
* ShardingSphere-JDBC scaling data source configuration.
*/
@Getter
@Setter
@EqualsAndHashCode(exclude = "databaseType")
public final class ShardingSphereJDBCConfiguration implements DataSourceConfiguration {
public final class ShardingSphereJDBCScalingDataSourceConfiguration implements ScalingDataSourceConfiguration {
private String dataSource;
......@@ -35,7 +38,7 @@ public final class ShardingSphereJDBCConfiguration implements DataSourceConfigur
private DatabaseType databaseType;
public ShardingSphereJDBCConfiguration(final String dataSource, final String rule) {
public ShardingSphereJDBCScalingDataSourceConfiguration(final String dataSource, final String rule) {
this.dataSource = dataSource;
this.rule = rule;
databaseType = getDatabaseType();
......
......@@ -22,9 +22,9 @@ import com.zaxxer.hikari.HikariDataSource;
import lombok.SneakyThrows;
import org.apache.shardingsphere.driver.api.ShardingSphereDataSourceFactory;
import org.apache.shardingsphere.infra.config.datasource.DataSourceConverter;
import org.apache.shardingsphere.scaling.core.config.DataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.ShardingSphereJDBCConfiguration;
import org.apache.shardingsphere.scaling.core.config.ScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.ShardingSphereJDBCScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.utils.ConfigurationYamlConverter;
import org.apache.shardingsphere.sharding.api.config.ShardingRuleConfiguration;
......@@ -38,21 +38,21 @@ import java.util.Map;
public final class DataSourceFactory {
/**
* New instance data source.
* New instance data source wrapper.
*
* @param dataSourceConfig data source configuration
* @return new data source
* @param dataSourceConfig scaling data source configuration
* @return new data source wrapper
*/
public DataSourceWrapper newInstance(final DataSourceConfiguration dataSourceConfig) {
if (dataSourceConfig instanceof JDBCDataSourceConfiguration) {
return newInstanceDataSourceByJDBC((JDBCDataSourceConfiguration) dataSourceConfig);
} else if (dataSourceConfig instanceof ShardingSphereJDBCConfiguration) {
return newInstanceDataSourceByShardingSphereJDBC((ShardingSphereJDBCConfiguration) dataSourceConfig);
public DataSourceWrapper newInstance(final ScalingDataSourceConfiguration dataSourceConfig) {
if (dataSourceConfig instanceof JDBCScalingDataSourceConfiguration) {
return newInstanceDataSourceByJDBC((JDBCScalingDataSourceConfiguration) dataSourceConfig);
} else if (dataSourceConfig instanceof ShardingSphereJDBCScalingDataSourceConfiguration) {
return newInstanceDataSourceByShardingSphereJDBC((ShardingSphereJDBCScalingDataSourceConfiguration) dataSourceConfig);
}
throw new UnsupportedOperationException("Unsupported data source configuration");
}
private DataSourceWrapper newInstanceDataSourceByJDBC(final JDBCDataSourceConfiguration dataSourceConfig) {
private DataSourceWrapper newInstanceDataSourceByJDBC(final JDBCScalingDataSourceConfiguration dataSourceConfig) {
HikariDataSource result = new HikariDataSource();
result.setJdbcUrl(dataSourceConfig.getJdbcUrl());
result.setUsername(dataSourceConfig.getUsername());
......@@ -61,7 +61,7 @@ public final class DataSourceFactory {
}
@SneakyThrows(SQLException.class)
private DataSourceWrapper newInstanceDataSourceByShardingSphereJDBC(final ShardingSphereJDBCConfiguration dataSourceConfig) {
private DataSourceWrapper newInstanceDataSourceByShardingSphereJDBC(final ShardingSphereJDBCScalingDataSourceConfiguration dataSourceConfig) {
Map<String, DataSource> dataSourceMap = DataSourceConverter.getDataSourceMap(
ConfigurationYamlConverter.loadDataSourceConfigurations(dataSourceConfig.getDataSource()));
ShardingRuleConfiguration ruleConfig = ConfigurationYamlConverter.loadShardingRuleConfiguration(dataSourceConfig.getRule());
......
......@@ -20,7 +20,7 @@ package org.apache.shardingsphere.scaling.core.datasource;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.shardingsphere.scaling.core.config.DataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.ScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.SyncConfiguration;
import javax.sql.DataSource;
......@@ -39,10 +39,10 @@ public final class DataSourceManager implements AutoCloseable {
private final DataSourceFactory dataSourceFactory = new DataSourceFactory();
@Getter
private final Map<DataSourceConfiguration, DataSourceWrapper> cachedDataSources = new ConcurrentHashMap<>();
private final Map<ScalingDataSourceConfiguration, DataSourceWrapper> cachedDataSources = new ConcurrentHashMap<>();
@Getter
private final Map<DataSourceConfiguration, DataSourceWrapper> sourceDataSources = new ConcurrentHashMap<>();
private final Map<ScalingDataSourceConfiguration, DataSourceWrapper> sourceDataSources = new ConcurrentHashMap<>();
public DataSourceManager(final List<SyncConfiguration> syncConfigs) {
createDataSources(syncConfigs);
......@@ -55,14 +55,14 @@ public final class DataSourceManager implements AutoCloseable {
private void createSourceDataSources(final List<SyncConfiguration> syncConfigs) {
for (SyncConfiguration syncConfiguration : syncConfigs) {
DataSourceConfiguration dataSourceConfig = syncConfiguration.getDumperConfiguration().getDataSourceConfiguration();
ScalingDataSourceConfiguration dataSourceConfig = syncConfiguration.getDumperConfiguration().getDataSourceConfiguration();
DataSourceWrapper dataSource = dataSourceFactory.newInstance(dataSourceConfig);
cachedDataSources.put(dataSourceConfig, dataSource);
sourceDataSources.put(dataSourceConfig, dataSource);
}
}
private void createTargetDataSources(final DataSourceConfiguration dataSourceConfig) {
private void createTargetDataSources(final ScalingDataSourceConfiguration dataSourceConfig) {
cachedDataSources.put(dataSourceConfig, dataSourceFactory.newInstance(dataSourceConfig));
}
......@@ -72,7 +72,7 @@ public final class DataSourceManager implements AutoCloseable {
* @param dataSourceConfig data source configuration
* @return data source
*/
public DataSource getDataSource(final DataSourceConfiguration dataSourceConfig) {
public DataSource getDataSource(final ScalingDataSourceConfiguration dataSourceConfig) {
if (cachedDataSources.containsKey(dataSourceConfig)) {
return cachedDataSources.get(dataSourceConfig);
}
......
......@@ -22,7 +22,7 @@ import lombok.Getter;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import org.apache.shardingsphere.scaling.core.config.InventoryDumperConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.constant.ScalingConstant;
import org.apache.shardingsphere.scaling.core.datasource.DataSourceManager;
import org.apache.shardingsphere.scaling.core.exception.SyncTaskExecuteException;
......@@ -64,7 +64,7 @@ public abstract class AbstractJDBCDumper extends AbstractShardingScalingExecutor
private Channel channel;
protected AbstractJDBCDumper(final InventoryDumperConfiguration inventoryDumperConfig, final DataSourceManager dataSourceManager) {
if (!JDBCDataSourceConfiguration.class.equals(inventoryDumperConfig.getDataSourceConfiguration().getClass())) {
if (!JDBCScalingDataSourceConfiguration.class.equals(inventoryDumperConfig.getDataSourceConfiguration().getClass())) {
throw new UnsupportedOperationException("AbstractJDBCDumper only support JDBCDataSourceConfiguration");
}
inventoryDumperConfiguration = inventoryDumperConfig;
......
......@@ -18,7 +18,7 @@
package org.apache.shardingsphere.scaling.core.job.preparer;
import lombok.extern.slf4j.Slf4j;
import org.apache.shardingsphere.scaling.core.config.DataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.ScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.SyncConfiguration;
import org.apache.shardingsphere.scaling.core.datasource.DataSourceManager;
import org.apache.shardingsphere.scaling.core.exception.PrepareFailedException;
......@@ -105,7 +105,7 @@ public final class ShardingScalingJobPreparer {
private void initIncrementalDataTasks(final String databaseType, final ShardingScalingJob shardingScalingJob, final DataSourceManager dataSourceManager) {
for (SyncConfiguration each : shardingScalingJob.getSyncConfigurations()) {
DataSourceConfiguration dataSourceConfig = each.getDumperConfiguration().getDataSourceConfiguration();
ScalingDataSourceConfiguration dataSourceConfig = each.getDumperConfiguration().getDataSourceConfiguration();
each.getDumperConfiguration().setPositionManager(initPositionManager(databaseType, dataSourceManager.getDataSource(dataSourceConfig)));
shardingScalingJob.getIncrementalDataTasks().add(syncTaskFactory.createIncrementalDataSyncTask(each.getConcurrency(), each.getDumperConfiguration(), each.getImporterConfiguration()));
}
......
......@@ -25,10 +25,11 @@ import lombok.NoArgsConstructor;
import org.apache.shardingsphere.infra.config.datasource.DataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.DumperConfiguration;
import org.apache.shardingsphere.scaling.core.config.ImporterConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.JobConfiguration;
import org.apache.shardingsphere.scaling.core.config.ScalingConfiguration;
import org.apache.shardingsphere.scaling.core.config.ShardingSphereJDBCConfiguration;
import org.apache.shardingsphere.scaling.core.config.ScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.ShardingSphereJDBCScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.SyncConfiguration;
import org.apache.shardingsphere.sharding.algorithm.sharding.inline.InlineExpressionParser;
import org.apache.shardingsphere.sharding.api.config.ShardingRuleConfiguration;
......@@ -63,7 +64,7 @@ public final class SyncConfigurationUtil {
*/
public static Collection<SyncConfiguration> toSyncConfigurations(final ScalingConfiguration scalingConfig) {
Collection<SyncConfiguration> result = new LinkedList<>();
ShardingSphereJDBCConfiguration sourceConfig = getSourceConfiguration(scalingConfig);
ShardingSphereJDBCScalingDataSourceConfiguration sourceConfig = getSourceConfiguration(scalingConfig);
Map<String, DataSourceConfiguration> sourceDataSource = ConfigurationYamlConverter.loadDataSourceConfigurations(sourceConfig.getDataSource());
ShardingRuleConfiguration sourceRule = ConfigurationYamlConverter.loadShardingRuleConfiguration(sourceConfig.getRule());
Map<String, Map<String, String>> dataSourceTableNameMap = toDataSourceTableNameMap(sourceRule, sourceDataSource.keySet());
......@@ -78,16 +79,16 @@ public final class SyncConfigurationUtil {
return result;
}
private static ShardingSphereJDBCConfiguration getSourceConfiguration(final ScalingConfiguration scalingConfig) {
org.apache.shardingsphere.scaling.core.config.DataSourceConfiguration result = scalingConfig.getRuleConfiguration().getSource().toTypedDataSourceConfiguration();
Preconditions.checkArgument(result instanceof ShardingSphereJDBCConfiguration, "Only support ShardingSphere source data source.");
return (ShardingSphereJDBCConfiguration) result;
private static ShardingSphereJDBCScalingDataSourceConfiguration getSourceConfiguration(final ScalingConfiguration scalingConfig) {
ScalingDataSourceConfiguration result = scalingConfig.getRuleConfiguration().getSource().toTypedDataSourceConfiguration();
Preconditions.checkArgument(result instanceof ShardingSphereJDBCScalingDataSourceConfiguration, "Only support ShardingSphere source data source.");
return (ShardingSphereJDBCScalingDataSourceConfiguration) result;
}
private static Optional<ShardingRuleConfiguration> getTargetRuleConfiguration(final ScalingConfiguration scalingConfig) {
org.apache.shardingsphere.scaling.core.config.DataSourceConfiguration dataSourceConfig = scalingConfig.getRuleConfiguration().getTarget().toTypedDataSourceConfiguration();
if (dataSourceConfig instanceof ShardingSphereJDBCConfiguration) {
return Optional.of(ConfigurationYamlConverter.loadShardingRuleConfiguration(((ShardingSphereJDBCConfiguration) dataSourceConfig).getRule()));
ScalingDataSourceConfiguration dataSourceConfig = scalingConfig.getRuleConfiguration().getTarget().toTypedDataSourceConfiguration();
if (dataSourceConfig instanceof ShardingSphereJDBCScalingDataSourceConfiguration) {
return Optional.of(ConfigurationYamlConverter.loadShardingRuleConfiguration(((ShardingSphereJDBCScalingDataSourceConfiguration) dataSourceConfig).getRule()));
}
return Optional.empty();
}
......@@ -171,7 +172,7 @@ public final class SyncConfigurationUtil {
DumperConfiguration result = new DumperConfiguration();
result.setDataSourceName(dataSourceName);
Map<String, Object> dataSourceProperties = dataSourceConfig.getProps();
JDBCDataSourceConfiguration dumperDataSourceConfig = new JDBCDataSourceConfiguration(
JDBCScalingDataSourceConfiguration dumperDataSourceConfig = new JDBCScalingDataSourceConfiguration(
dataSourceProperties.containsKey("jdbcUrl") ? dataSourceProperties.get("jdbcUrl").toString() : dataSourceProperties.get("url").toString(),
dataSourceProperties.get("username").toString(), dataSourceProperties.get("password").toString());
result.setDataSourceConfiguration(dumperDataSourceConfig);
......
......@@ -17,10 +17,10 @@
package org.apache.shardingsphere.scaling.core;
import org.apache.shardingsphere.scaling.core.config.DataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.ScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.DumperConfiguration;
import org.apache.shardingsphere.scaling.core.config.ImporterConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.JobConfiguration;
import org.apache.shardingsphere.scaling.core.config.ScalingConfiguration;
import org.apache.shardingsphere.scaling.core.config.ScalingContext;
......@@ -142,12 +142,12 @@ public final class ScalingJobControllerTest {
private ImporterConfiguration mockImporterConfiguration() {
ImporterConfiguration result = new ImporterConfiguration();
result.setDataSourceConfiguration(new JDBCDataSourceConfiguration(DATA_SOURCE_URL, USERNAME, PASSWORD));
result.setDataSourceConfiguration(new JDBCScalingDataSourceConfiguration(DATA_SOURCE_URL, USERNAME, PASSWORD));
return result;
}
private DumperConfiguration mockDumperConfig() {
DataSourceConfiguration dataSourceConfig = new JDBCDataSourceConfiguration(DATA_SOURCE_URL, USERNAME, PASSWORD);
ScalingDataSourceConfiguration dataSourceConfig = new JDBCScalingDataSourceConfiguration(DATA_SOURCE_URL, USERNAME, PASSWORD);
DumperConfiguration result = new DumperConfiguration();
result.setDataSourceName("ds0");
result.setDataSourceConfiguration(dataSourceConfig);
......
......@@ -19,7 +19,7 @@ package org.apache.shardingsphere.scaling.core.check;
import com.google.gson.Gson;
import lombok.SneakyThrows;
import org.apache.shardingsphere.scaling.core.config.DataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.ScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.ScalingConfiguration;
import org.apache.shardingsphere.scaling.core.datasource.DataSourceManager;
import org.apache.shardingsphere.scaling.core.job.ShardingScalingJob;
......@@ -63,7 +63,7 @@ public final class AbstractDataConsistencyCheckerTest {
}
@SneakyThrows(SQLException.class)
private void initTableData(final DataSourceConfiguration dataSourceConfig) {
private void initTableData(final ScalingDataSourceConfiguration dataSourceConfig) {
DataSource dataSource = new DataSourceManager().getDataSource(dataSourceConfig);
try (Connection connection = dataSource.getConnection();
Statement statement = connection.createStatement()) {
......
......@@ -28,8 +28,8 @@ public final class JDBCDataSourceConfigurationTest {
@Test
public void assertJDBCDataSourceConfigurationEquals() {
JDBCDataSourceConfiguration sourceConfig = new JDBCDataSourceConfiguration("jdbc:mysql://127.0.0.1:3306/test2?serverTimezone=UTC&useSSL=false", "root", "root");
JDBCDataSourceConfiguration targetConfig = new JDBCDataSourceConfiguration("jdbc:mysql://127.0.0.1:3306/test2?serverTimezone=UTC&useSSL=false", "root", "root");
JDBCScalingDataSourceConfiguration sourceConfig = new JDBCScalingDataSourceConfiguration("jdbc:mysql://127.0.0.1:3306/test2?serverTimezone=UTC&useSSL=false", "root", "root");
JDBCScalingDataSourceConfiguration targetConfig = new JDBCScalingDataSourceConfiguration("jdbc:mysql://127.0.0.1:3306/test2?serverTimezone=UTC&useSSL=false", "root", "root");
assertThat(sourceConfig, is(targetConfig));
sourceConfig.setDatabaseType(new MySQLDatabaseType());
targetConfig.setDatabaseType(new MySQLDatabaseType());
......@@ -38,8 +38,8 @@ public final class JDBCDataSourceConfigurationTest {
@Test
public void assertJDBCDataSourceConfigurationNotEquals() {
JDBCDataSourceConfiguration sourceConfig = new JDBCDataSourceConfiguration("jdbc:mysql://127.0.0.1:3306/test2?serverTimezone=UTC&useSSL=false", "sa", "root");
JDBCDataSourceConfiguration targetConfig = new JDBCDataSourceConfiguration("jdbc:mysql://127.0.0.1:3306/test2?serverTimezone=UTC&useSSL=false", "root", "root");
JDBCScalingDataSourceConfiguration sourceConfig = new JDBCScalingDataSourceConfiguration("jdbc:mysql://127.0.0.1:3306/test2?serverTimezone=UTC&useSSL=false", "sa", "root");
JDBCScalingDataSourceConfiguration targetConfig = new JDBCScalingDataSourceConfiguration("jdbc:mysql://127.0.0.1:3306/test2?serverTimezone=UTC&useSSL=false", "root", "root");
assertThat(sourceConfig, not(targetConfig));
}
}
......@@ -35,9 +35,9 @@ public class RuleConfigurationTest {
String username = "root";
String password = "password";
dataSourceConf.setParameter(mockJDBCConfiguration(jdbcUrl, username, password));
DataSourceConfiguration actual = dataSourceConf.toTypedDataSourceConfiguration();
assertThat(actual, instanceOf(JDBCDataSourceConfiguration.class));
JDBCDataSourceConfiguration jdbcDataSourceConfig = (JDBCDataSourceConfiguration) actual;
ScalingDataSourceConfiguration actual = dataSourceConf.toTypedDataSourceConfiguration();
assertThat(actual, instanceOf(JDBCScalingDataSourceConfiguration.class));
JDBCScalingDataSourceConfiguration jdbcDataSourceConfig = (JDBCScalingDataSourceConfiguration) actual;
assertThat(jdbcDataSourceConfig.getJdbcUrl(), is(jdbcUrl));
assertThat(jdbcDataSourceConfig.getUsername(), is(username));
assertThat(jdbcDataSourceConfig.getPassword(), is(password));
......@@ -58,9 +58,9 @@ public class RuleConfigurationTest {
String dataSource = FixtureShardingSphereJDBCConfiguration.DATA_SOURCE;
String rule = FixtureShardingSphereJDBCConfiguration.RULE;
dataSourceConf.setParameter(mockShardingSphereJDBCConfiguration(dataSource, rule));
DataSourceConfiguration actual = dataSourceConf.toTypedDataSourceConfiguration();
assertThat(actual, instanceOf(ShardingSphereJDBCConfiguration.class));
ShardingSphereJDBCConfiguration shardingSphereJDBCConfig = (ShardingSphereJDBCConfiguration) actual;
ScalingDataSourceConfiguration actual = dataSourceConf.toTypedDataSourceConfiguration();
assertThat(actual, instanceOf(ShardingSphereJDBCScalingDataSourceConfiguration.class));
ShardingSphereJDBCScalingDataSourceConfiguration shardingSphereJDBCConfig = (ShardingSphereJDBCScalingDataSourceConfiguration) actual;
assertThat(shardingSphereJDBCConfig.getDataSource(), is(dataSource));
assertThat(shardingSphereJDBCConfig.getRule(), is(rule));
}
......
......@@ -24,13 +24,12 @@ import org.junit.Test;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.junit.Assert.assertThat;
public class ShardingSphereJDBCConfigurationTest {
public final class ShardingSphereJDBCScalingDataSourceConfigurationTest {
@Test
public void assertShardingSphereJDBCDataSourceConfigurationDatabaseType() {
ShardingSphereJDBCConfiguration shardingSphereJDBCConfig = new ShardingSphereJDBCConfiguration(
FixtureShardingSphereJDBCConfiguration.DATA_SOURCE,
FixtureShardingSphereJDBCConfiguration.RULE);
assertThat(shardingSphereJDBCConfig.getDatabaseType(), instanceOf(H2DatabaseType.class));
ShardingSphereJDBCScalingDataSourceConfiguration config = new ShardingSphereJDBCScalingDataSourceConfiguration(
FixtureShardingSphereJDBCConfiguration.DATA_SOURCE, FixtureShardingSphereJDBCConfiguration.RULE);
assertThat(config.getDatabaseType(), instanceOf(H2DatabaseType.class));
}
}
......@@ -17,32 +17,28 @@
package org.apache.shardingsphere.scaling.core.datasource;
import org.apache.shardingsphere.scaling.core.config.JDBCDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.ShardingSphereJDBCConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.ShardingSphereJDBCScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.fixture.FixtureShardingSphereJDBCConfiguration;
import org.junit.Test;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertNotNull;
public class DataSourceFactoryTest {
@Test
public void assertNewJDBCInstance() {
JDBCDataSourceConfiguration jdbcDataSourceConfig = new JDBCDataSourceConfiguration(
"jdbc:h2:mem:test_db_2;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=false;MODE=MySQL",
"root",
"password");
DataSourceWrapper actual = new DataSourceFactory().newInstance(jdbcDataSourceConfig);
assertThat(actual, is(notNullValue()));
JDBCScalingDataSourceConfiguration config = new JDBCScalingDataSourceConfiguration(
"jdbc:h2:mem:test_db_2;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=false;MODE=MySQL", "root", "password");
DataSourceWrapper actual = new DataSourceFactory().newInstance(config);
assertNotNull(actual);
}
@Test
public void assertNewShardingSphereJDBCInstance() {
ShardingSphereJDBCConfiguration shardingSphereJDBCConfig = new ShardingSphereJDBCConfiguration(
ShardingSphereJDBCScalingDataSourceConfiguration config = new ShardingSphereJDBCScalingDataSourceConfiguration(
FixtureShardingSphereJDBCConfiguration.DATA_SOURCE, FixtureShardingSphereJDBCConfiguration.RULE);
DataSourceWrapper actual = new DataSourceFactory().newInstance(shardingSphereJDBCConfig);
assertThat(actual, is(notNullValue()));
DataSourceWrapper actual = new DataSourceFactory().newInstance(config);
assertNotNull(actual);
}
}
......@@ -19,7 +19,7 @@ package org.apache.shardingsphere.scaling.core.execute.executor.importer;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.apache.shardingsphere.scaling.core.config.DataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.ScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.ImporterConfiguration;
import org.apache.shardingsphere.scaling.core.datasource.DataSourceManager;
import org.apache.shardingsphere.scaling.core.execute.executor.channel.Channel;
......@@ -66,7 +66,7 @@ public final class AbstractJDBCImporterTest {
private AbstractSQLBuilder sqlBuilder;
@Mock
private DataSourceConfiguration dataSourceConfig;
private ScalingDataSourceConfiguration dataSourceConfig;
@Mock
private Channel channel;
......
......@@ -17,10 +17,10 @@
package org.apache.shardingsphere.scaling.core.job.preparer.resumer;
import org.apache.shardingsphere.scaling.core.config.DataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.ScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.DumperConfiguration;
import org.apache.shardingsphere.scaling.core.config.ImporterConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.JobConfiguration;
import org.apache.shardingsphere.scaling.core.config.ScalingConfiguration;
import org.apache.shardingsphere.scaling.core.config.ScalingContext;
......@@ -98,7 +98,7 @@ public final class SyncPositionResumerTest {
}
private DumperConfiguration mockDumperConfig() {
DataSourceConfiguration dataSourceConfig = new JDBCDataSourceConfiguration(DATA_SOURCE_URL, USERNAME, PASSWORD);
ScalingDataSourceConfiguration dataSourceConfig = new JDBCScalingDataSourceConfiguration(DATA_SOURCE_URL, USERNAME, PASSWORD);
DumperConfiguration result = new DumperConfiguration();
result.setDataSourceName("ds0");
result.setDataSourceConfiguration(dataSourceConfig);
......
......@@ -17,10 +17,10 @@
package org.apache.shardingsphere.scaling.core.job.preparer.splitter;
import org.apache.shardingsphere.scaling.core.config.DataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.ScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.DumperConfiguration;
import org.apache.shardingsphere.scaling.core.config.ImporterConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.SyncConfiguration;
import org.apache.shardingsphere.scaling.core.datasource.DataSourceManager;
import org.apache.shardingsphere.scaling.core.job.position.InventoryPosition;
......@@ -142,7 +142,7 @@ public final class InventoryDataTaskSplitterTest {
}
private DumperConfiguration mockDumperConfig() {
DataSourceConfiguration dataSourceConfig = new JDBCDataSourceConfiguration(DATA_SOURCE_URL, USERNAME, PASSWORD);
ScalingDataSourceConfiguration dataSourceConfig = new JDBCScalingDataSourceConfiguration(DATA_SOURCE_URL, USERNAME, PASSWORD);
DumperConfiguration result = new DumperConfiguration();
result.setDataSourceConfiguration(dataSourceConfig);
Map<String, String> tableMap = new HashMap<>();
......
......@@ -17,10 +17,10 @@
package org.apache.shardingsphere.scaling.core.job.task.incremental;
import org.apache.shardingsphere.scaling.core.config.DataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.ScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.DumperConfiguration;
import org.apache.shardingsphere.scaling.core.config.ImporterConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.ScalingContext;
import org.apache.shardingsphere.scaling.core.config.ServerConfiguration;
import org.apache.shardingsphere.scaling.core.fixture.FixtureNopManager;
......@@ -71,12 +71,12 @@ public final class IncrementalDataScalingTaskTest {
private ImporterConfiguration mockImporterConfiguration() {
ImporterConfiguration result = new ImporterConfiguration();
result.setDataSourceConfiguration(new JDBCDataSourceConfiguration(DATA_SOURCE_URL, USERNAME, PASSWORD));
result.setDataSourceConfiguration(new JDBCScalingDataSourceConfiguration(DATA_SOURCE_URL, USERNAME, PASSWORD));
return result;
}
private DumperConfiguration mockDumperConfig() {
DataSourceConfiguration dataSourceConfig = new JDBCDataSourceConfiguration(DATA_SOURCE_URL, USERNAME, PASSWORD);
ScalingDataSourceConfiguration dataSourceConfig = new JDBCScalingDataSourceConfiguration(DATA_SOURCE_URL, USERNAME, PASSWORD);
DumperConfiguration result = new DumperConfiguration();
result.setDataSourceName("ds0");
result.setDataSourceConfiguration(dataSourceConfig);
......
......@@ -17,11 +17,11 @@
package org.apache.shardingsphere.scaling.core.job.task.inventory;
import org.apache.shardingsphere.scaling.core.config.DataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.ScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.DumperConfiguration;
import org.apache.shardingsphere.scaling.core.config.ImporterConfiguration;
import org.apache.shardingsphere.scaling.core.config.InventoryDumperConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.ScalingContext;
import org.apache.shardingsphere.scaling.core.config.ServerConfiguration;
import org.apache.shardingsphere.scaling.core.config.SyncConfiguration;
......@@ -98,7 +98,7 @@ public final class InventoryDataScalingTaskTest {
}
private DumperConfiguration mockDumperConfig() {
DataSourceConfiguration dataSourceConfig = new JDBCDataSourceConfiguration(DATA_SOURCE_URL, USERNAME, PASSWORD);
ScalingDataSourceConfiguration dataSourceConfig = new JDBCScalingDataSourceConfiguration(DATA_SOURCE_URL, USERNAME, PASSWORD);
DumperConfiguration result = new DumperConfiguration();
result.setDataSourceConfiguration(dataSourceConfig);
result.setPositionManager(new InventoryPositionManager<>(new PrimaryKeyPosition(1, 100)));
......@@ -107,7 +107,7 @@ public final class InventoryDataScalingTaskTest {
}
private ImporterConfiguration mockImporterConfig() {
DataSourceConfiguration dataSourceConfig = new JDBCDataSourceConfiguration(DATA_SOURCE_URL, USERNAME, PASSWORD);
ScalingDataSourceConfiguration dataSourceConfig = new JDBCScalingDataSourceConfiguration(DATA_SOURCE_URL, USERNAME, PASSWORD);
ImporterConfiguration result = new ImporterConfiguration();
result.setDataSourceConfiguration(dataSourceConfig);
return result;
......
......@@ -20,7 +20,7 @@ package org.apache.shardingsphere.scaling.mysql;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import org.apache.shardingsphere.scaling.core.config.DumperConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.constant.ScalingConstant;
import org.apache.shardingsphere.scaling.core.datasource.DataSourceFactory;
import org.apache.shardingsphere.scaling.core.execute.executor.AbstractShardingScalingExecutor;
......@@ -70,7 +70,7 @@ public final class MySQLBinlogDumper extends AbstractShardingScalingExecutor<Bin
public MySQLBinlogDumper(final DumperConfiguration dumperConfig, final Position binlogPosition) {
this.binlogPosition = (BinlogPosition) binlogPosition;
if (!JDBCDataSourceConfiguration.class.equals(dumperConfig.getDataSourceConfiguration().getClass())) {
if (!JDBCScalingDataSourceConfiguration.class.equals(dumperConfig.getDataSourceConfiguration().getClass())) {
throw new UnsupportedOperationException("MySQLBinlogDumper only support JDBCDataSourceConfiguration");
}
this.dumperConfig = dumperConfig;
......@@ -84,7 +84,7 @@ public final class MySQLBinlogDumper extends AbstractShardingScalingExecutor<Bin
}
private void dump() {
JDBCDataSourceConfiguration jdbcDataSourceConfig = (JDBCDataSourceConfiguration) dumperConfig.getDataSourceConfiguration();
JDBCScalingDataSourceConfiguration jdbcDataSourceConfig = (JDBCScalingDataSourceConfiguration) dumperConfig.getDataSourceConfiguration();
JdbcUri uri = new JdbcUri(jdbcDataSourceConfig.getJdbcUrl());
MySQLClient client = new MySQLClient(new ConnectInfo(random.nextInt(), uri.getHostname(), uri.getPort(), jdbcDataSourceConfig.getUsername(), jdbcDataSourceConfig.getPassword()));
client.connect();
......
......@@ -18,7 +18,7 @@
package org.apache.shardingsphere.scaling.mysql;
import org.apache.shardingsphere.scaling.core.config.InventoryDumperConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.datasource.DataSourceManager;
import org.apache.shardingsphere.scaling.core.execute.executor.dumper.AbstractJDBCDumper;
import org.apache.shardingsphere.scaling.core.metadata.JdbcUri;
......@@ -38,7 +38,7 @@ public final class MySQLJdbcDumper extends AbstractJDBCDumper {
public MySQLJdbcDumper(final InventoryDumperConfiguration inventoryDumperConfig, final DataSourceManager dataSourceManager) {
super(inventoryDumperConfig, dataSourceManager);
JDBCDataSourceConfiguration jdbcDataSourceConfig = (JDBCDataSourceConfiguration) getInventoryDumperConfiguration().getDataSourceConfiguration();
JDBCScalingDataSourceConfiguration jdbcDataSourceConfig = (JDBCScalingDataSourceConfiguration) getInventoryDumperConfiguration().getDataSourceConfiguration();
jdbcDataSourceConfig.setJdbcUrl(fixMySQLUrl(jdbcDataSourceConfig.getJdbcUrl()));
}
......
......@@ -20,7 +20,7 @@ package org.apache.shardingsphere.scaling.mysql;
import lombok.SneakyThrows;
import org.apache.commons.collections4.map.HashedMap;
import org.apache.shardingsphere.scaling.core.config.DumperConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.ScalingContext;
import org.apache.shardingsphere.scaling.core.config.ServerConfiguration;
import org.apache.shardingsphere.scaling.core.constant.ScalingConstant;
......@@ -74,7 +74,7 @@ public final class MySQLBinlogDumperTest {
private DumperConfiguration mockDumperConfiguration() {
DumperConfiguration result = new DumperConfiguration();
result.setDataSourceConfiguration(new JDBCDataSourceConfiguration(URL, "root", "root"));
result.setDataSourceConfiguration(new JDBCScalingDataSourceConfiguration(URL, "root", "root"));
Map<String, String> tableNameMap = new HashedMap<>(1);
tableNameMap.put("t_order", "t_order");
result.setTableNameMap(tableNameMap);
......
......@@ -20,7 +20,7 @@ package org.apache.shardingsphere.scaling.mysql;
import lombok.SneakyThrows;
import org.apache.shardingsphere.scaling.core.config.DumperConfiguration;
import org.apache.shardingsphere.scaling.core.config.InventoryDumperConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.datasource.DataSourceManager;
import org.junit.Before;
import org.junit.Test;
......@@ -62,7 +62,7 @@ public final class MySQLJdbcDumperTest {
private DumperConfiguration mockDumperConfiguration() {
DumperConfiguration result = new DumperConfiguration();
result.setDataSourceConfiguration(new JDBCDataSourceConfiguration("jdbc:h2:mem:test;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=false;MODE=MySQL", "root", "root"));
result.setDataSourceConfiguration(new JDBCScalingDataSourceConfiguration("jdbc:h2:mem:test;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=false;MODE=MySQL", "root", "root"));
return result;
}
......
......@@ -19,7 +19,7 @@ package org.apache.shardingsphere.scaling.postgresql;
import lombok.Setter;
import org.apache.shardingsphere.scaling.core.config.DumperConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.exception.SyncTaskExecuteException;
import org.apache.shardingsphere.scaling.core.execute.executor.AbstractShardingScalingExecutor;
import org.apache.shardingsphere.scaling.core.execute.executor.channel.Channel;
......@@ -58,7 +58,7 @@ public final class PostgreSQLWalDumper extends AbstractShardingScalingExecutor<W
public PostgreSQLWalDumper(final DumperConfiguration dumperConfig, final Position position) {
walPosition = (WalPosition) position;
if (!JDBCDataSourceConfiguration.class.equals(dumperConfig.getDataSourceConfiguration().getClass())) {
if (!JDBCScalingDataSourceConfiguration.class.equals(dumperConfig.getDataSourceConfiguration().getClass())) {
throw new UnsupportedOperationException("PostgreSQLWalDumper only support JDBCDataSourceConfiguration");
}
this.dumperConfig = dumperConfig;
......@@ -73,7 +73,7 @@ public final class PostgreSQLWalDumper extends AbstractShardingScalingExecutor<W
private void dump() {
try {
Connection pgConnection = logicalReplication.createPgConnection((JDBCDataSourceConfiguration) dumperConfig.getDataSourceConfiguration());
Connection pgConnection = logicalReplication.createPgConnection((JDBCScalingDataSourceConfiguration) dumperConfig.getDataSourceConfiguration());
DecodingPlugin decodingPlugin = new TestDecodingPlugin(pgConnection.unwrap(PgConnection.class).getTimestampUtils());
PGReplicationStream stream = logicalReplication.createReplicationStream(pgConnection,
PostgreSQLPositionManager.SLOT_NAME, walPosition.getLogSequenceNumber());
......
......@@ -17,7 +17,7 @@
package org.apache.shardingsphere.scaling.postgresql.wal;
import org.apache.shardingsphere.scaling.core.config.JDBCDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCScalingDataSourceConfiguration;
import org.postgresql.PGConnection;
import org.postgresql.PGProperty;
import org.postgresql.replication.LogSequenceNumber;
......@@ -40,11 +40,11 @@ public final class LogicalReplication {
* @return PostgreSQL connection
* @throws SQLException sql exception
*/
public Connection createPgConnection(final JDBCDataSourceConfiguration jdbcDataSourceConfig) throws SQLException {
public Connection createPgConnection(final JDBCScalingDataSourceConfiguration jdbcDataSourceConfig) throws SQLException {
return createConnection(jdbcDataSourceConfig);
}
private Connection createConnection(final JDBCDataSourceConfiguration jdbcDataSourceConfig) throws SQLException {
private Connection createConnection(final JDBCScalingDataSourceConfiguration jdbcDataSourceConfig) throws SQLException {
Properties props = new Properties();
PGProperty.USER.set(props, jdbcDataSourceConfig.getUsername());
PGProperty.PASSWORD.set(props, jdbcDataSourceConfig.getPassword());
......
......@@ -18,7 +18,7 @@
package org.apache.shardingsphere.scaling.postgresql.wal;
import org.apache.shardingsphere.scaling.core.config.DumperConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.constant.ScalingConstant;
import org.apache.shardingsphere.scaling.core.datasource.DataSourceFactory;
import org.apache.shardingsphere.scaling.core.execute.executor.record.Column;
......@@ -58,7 +58,7 @@ public final class WalEventConverter {
* @return record
*/
public Record convert(final AbstractWalEvent event) {
JdbcUri uri = new JdbcUri(((JDBCDataSourceConfiguration) dumperConfig.getDataSourceConfiguration()).getJdbcUrl());
JdbcUri uri = new JdbcUri(((JDBCScalingDataSourceConfiguration) dumperConfig.getDataSourceConfiguration()).getJdbcUrl());
if (filter(uri.getDatabase(), event)) {
return createPlaceholderRecord(event);
} else if (event instanceof WriteRowEvent) {
......
......@@ -20,7 +20,7 @@ package org.apache.shardingsphere.scaling.postgresql;
import lombok.SneakyThrows;
import org.apache.shardingsphere.scaling.core.config.DumperConfiguration;
import org.apache.shardingsphere.scaling.core.config.InventoryDumperConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.datasource.DataSourceManager;
import org.junit.Before;
import org.junit.Test;
......@@ -76,7 +76,7 @@ public final class PostgreSQLJdbcDumperTest {
private DumperConfiguration mockDumperConfiguration() {
DumperConfiguration result = new DumperConfiguration();
result.setDataSourceConfiguration(new JDBCDataSourceConfiguration("jdbc:h2:mem:test;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=false;MODE=PostgreSQL", "root", "root"));
result.setDataSourceConfiguration(new JDBCScalingDataSourceConfiguration("jdbc:h2:mem:test;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=false;MODE=PostgreSQL", "root", "root"));
return result;
}
}
......@@ -18,7 +18,7 @@
package org.apache.shardingsphere.scaling.postgresql;
import org.apache.shardingsphere.scaling.core.config.DumperConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.ScalingContext;
import org.apache.shardingsphere.scaling.core.config.ServerConfiguration;
import org.apache.shardingsphere.scaling.core.exception.SyncTaskExecuteException;
......@@ -58,7 +58,7 @@ public final class PostgreSQLWalDumperTest {
private PostgreSQLWalDumper postgreSQLWalDumper;
private JDBCDataSourceConfiguration jdbcDataSourceConfig;
private JDBCScalingDataSourceConfiguration jdbcDataSourceConfig;
private MemoryChannel channel;
......@@ -73,7 +73,7 @@ public final class PostgreSQLWalDumperTest {
}
private DumperConfiguration mockDumperConfiguration() {
jdbcDataSourceConfig = new JDBCDataSourceConfiguration("jdbc:h2:mem:test;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=false;MODE=PostgreSQL", "root", "root");
jdbcDataSourceConfig = new JDBCScalingDataSourceConfiguration("jdbc:h2:mem:test;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=false;MODE=PostgreSQL", "root", "root");
DumperConfiguration result = new DumperConfiguration();
result.setDataSourceConfiguration(jdbcDataSourceConfig);
return result;
......
......@@ -18,7 +18,7 @@
package org.apache.shardingsphere.scaling.postgresql.wal;
import lombok.SneakyThrows;
import org.apache.shardingsphere.scaling.core.config.JDBCDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCScalingDataSourceConfiguration;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
......@@ -65,7 +65,7 @@ public final class LogicalReplicationTest {
@Test
public void assertCreatePgConnectionSuccess() throws SQLException {
Connection pgConnection = logicalReplication.createPgConnection(
new JDBCDataSourceConfiguration("jdbc:h2:mem:test;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=false;MODE=PostgreSQL", "root", "root"));
new JDBCScalingDataSourceConfiguration("jdbc:h2:mem:test;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=false;MODE=PostgreSQL", "root", "root"));
assertFalse(pgConnection.isClosed());
}
......
......@@ -21,7 +21,7 @@ import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import lombok.SneakyThrows;
import org.apache.shardingsphere.scaling.core.config.DumperConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.config.JDBCScalingDataSourceConfiguration;
import org.apache.shardingsphere.scaling.core.constant.ScalingConstant;
import org.apache.shardingsphere.scaling.core.datasource.DataSourceManager;
import org.apache.shardingsphere.scaling.core.execute.executor.record.DataRecord;
......@@ -58,7 +58,7 @@ public final class WalEventConverterTest {
private DumperConfiguration mockDumperConfiguration() {
DumperConfiguration result = new DumperConfiguration();
result.setDataSourceConfiguration(new JDBCDataSourceConfiguration("jdbc:h2:mem:test;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=false;MODE=PostgreSQL", "root", "root"));
result.setDataSourceConfiguration(new JDBCScalingDataSourceConfiguration("jdbc:h2:mem:test;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=false;MODE=PostgreSQL", "root", "root"));
Map<String, String> tableNameMap = Maps.newHashMap();
tableNameMap.put("t_order", "t_order");
result.setTableNameMap(tableNameMap);
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册