未验证 提交 c74dd2f5 编写于 作者: 杨翊 SionYang 提交者: GitHub

Prepare merge into master (#4516)

* Replace log4j with logback

* For spelling

* Upper JDBC for comment and Class name

* mysql --> MySQL in comment and class name

* For checkstype

* Remove unused file
上级 fa1f1b58
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
{
"ruleConfiguration": {
"dataSources": [
{
"name": "ds_0",
"password": "123456",
"url": "jdbc:mysql://127.0.0.1:3306/test?serverTimezone=UTC&useSSL=false",
"username": "root"
}
],
"destinationDataSources": {
"name": "dt_0",
"password": "123456",
"url": "jdbc:mysql://127.0.0.1:3306/test2?serverTimezone=UTC&useSSL=false",
"username": "root"
}
},
"jobConfiguration": {
"concurrency": 3
}
}
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
log4j.rootLogger=INFO, root
log4j.appender.root=org.apache.log4j.ConsoleAppender
log4j.appender.root.layout=org.apache.log4j.PatternLayout
log4j.appender.root.layout.ConversionPattern=%d [%t] %p %l - %m%n
<?xml version="1.0"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<configuration>
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>[%-5level] %d{HH:mm:ss.SSS} [%thread] %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<logger name="org.apache.shardingsphere" level="info" additivity="false">
<appender-ref ref="console"/>
</logger>
<root>
<level value="info" />
<appender-ref ref="console" />
</root>
</configuration>
......@@ -60,14 +60,9 @@
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.7.29</version>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
......
......@@ -28,7 +28,6 @@ import io.netty.handler.logging.LogLevel;
import io.netty.handler.logging.LoggingHandler;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.log4j.PropertyConfigurator;
import org.apache.shardingsphere.shardingscaling.core.config.ScalingContext;
import org.apache.shardingsphere.shardingscaling.core.config.ServerConfiguration;
import org.apache.shardingsphere.shardingscaling.core.web.HttpServerInitializer;
......@@ -43,12 +42,10 @@ import java.io.IOException;
*/
@Slf4j
public class Bootstrap {
private static final String SERVER_CONFIG_FILE = "/conf/server.yaml";
static {
PropertyConfigurator.configure(RuntimeUtil.getBasePath() + "conf" + File.separator + "log4j.properties");
}
private static final String DEFAULT_CONFIG_PATH = "/conf/";
private static final String DEFAULT_CONFIG_FILE_NAME = "server.yaml";
/**
* Main entry.
......@@ -72,7 +69,7 @@ public class Bootstrap {
.childHandler(new HttpServerInitializer());
int port = ScalingContext.getInstance().getServerConfiguration().getPort();
Channel channel = bootstrap.bind(port).sync().channel();
log.info("Shardingscaling is server on http://127.0.0.1:" + port + '/');
log.info("ShardingScaling is server on http://127.0.0.1:" + port + '/');
channel.closeFuture().sync();
} finally {
bossGroup.shutdownGracefully();
......@@ -81,7 +78,7 @@ public class Bootstrap {
}
private static void initServerConfig() throws IOException {
File yamlFile = new File(Bootstrap.class.getResource(SERVER_CONFIG_FILE).getFile());
File yamlFile = new File(RuntimeUtil.getResourcePath(DEFAULT_CONFIG_PATH + DEFAULT_CONFIG_FILE_NAME));
ServerConfiguration serverConfiguration = YamlEngine.unmarshal(yamlFile, ServerConfiguration.class);
Preconditions.checkNotNull(serverConfiguration, "Server configuration file `%s` is invalid.", yamlFile.getName());
ScalingContext.getInstance().init(serverConfiguration);
......
......@@ -26,9 +26,11 @@ public final class RuntimeUtil {
/**
* Get runtime classpath.
*
* @param relativePath relative resource path
* @return classpath
*/
public static String getBasePath() {
return Bootstrap.class.getResource("/").getFile();
public static String getResourcePath(final String relativePath) {
return Bootstrap.class.getResource(relativePath).getFile();
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
{
"ruleConfiguration": {
"sourceDatasource": "ds_0: !!org.apache.shardingsphere.orchestration.yaml.config.YamlDataSourceConfiguration\n dataSourceClassName: com.zaxxer.hikari.HikariDataSource\n properties:\n jdbcUrl: jdbc:mysql://127.0.0.1:3306/test?serverTimezone=UTC&useSSL=false\n username: root\n password: '123456'\n connectionTimeout: 30000\n idleTimeout: 60000\n maxLifetime: 1800000\n maxPoolSize: 50\n minPoolSize: 1\n maintenanceIntervalMilliseconds: 30000\n readOnly: false\n",
"sourceRule": "defaultDatabaseStrategy:\n inline:\n algorithmExpression: ds_${user_id % 2}\n shardingColumn: user_id\ntables:\n t1:\n actualDataNodes: ds_0.t1\n keyGenerator:\n column: order_id\n type: SNOWFLAKE\n logicTable: t1\n tableStrategy:\n inline:\n algorithmExpression: t1\n shardingColumn: order_id\n t2:\n actualDataNodes: ds_0.t2\n keyGenerator:\n column: order_item_id\n type: SNOWFLAKE\n logicTable: t2\n tableStrategy:\n inline:\n algorithmExpression: t2\n shardingColumn: order_id\n",
"destinationDataSources": {
"name": "dt_0",
"password": "123456",
"url": "jdbc:mysql://127.0.0.1:3306/test2?serverTimezone=UTC&useSSL=false",
"username": "root"
}
},
"jobConfiguration": {
"concurrency": 3
}
}
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
log4j.rootLogger=INFO, root
log4j.appender.root=org.apache.log4j.ConsoleAppender
log4j.appender.root.layout=org.apache.log4j.PatternLayout
log4j.appender.root.layout.ConversionPattern=%d [%t] %p %l - %m%n
<?xml version="1.0"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<configuration>
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>[%-5level] %d{HH:mm:ss.SSS} [%thread] %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<logger name="org.apache.shardingsphere" level="info" additivity="false">
<appender-ref ref="console"/>
</logger>
<root>
<level value="info" />
<appender-ref ref="console" />
</root>
</configuration>
......@@ -26,12 +26,12 @@ import lombok.Getter;
import lombok.Setter;
/**
* Jdbc data source configuration.
* JDBC data source configuration.
*/
@Getter
@Setter
@EqualsAndHashCode(exclude = {"databaseType"})
public final class JdbcDataSourceConfiguration implements DataSourceConfiguration {
public final class JDBCDataSourceConfiguration implements DataSourceConfiguration {
private String jdbcUrl;
......@@ -41,7 +41,7 @@ public final class JdbcDataSourceConfiguration implements DataSourceConfiguratio
private DatabaseType databaseType;
public JdbcDataSourceConfiguration(final String jdbcUrl, final String username, final String password) {
public JDBCDataSourceConfiguration(final String jdbcUrl, final String username, final String password) {
this.jdbcUrl = jdbcUrl;
this.username = username;
this.password = password;
......
......@@ -88,9 +88,9 @@ public class ScalingJobController {
}
/**
* List all shardingscaling jobs.
* List all sharding scaling jobs.
*
* @return list of shardingscaling jobs
* @return list of sharding scaling jobs
*/
public List<ShardingScalingJob> listShardingScalingJobs() {
return new LinkedList<>(scalingJobMap.values());
......
......@@ -19,7 +19,7 @@ package org.apache.shardingsphere.shardingscaling.core.datasource;
import com.zaxxer.hikari.HikariDataSource;
import org.apache.shardingsphere.shardingscaling.core.config.DataSourceConfiguration;
import org.apache.shardingsphere.shardingscaling.core.config.JdbcDataSourceConfiguration;
import org.apache.shardingsphere.shardingscaling.core.config.JDBCDataSourceConfiguration;
import javax.sql.DataSource;
......@@ -35,13 +35,13 @@ public final class DataSourceFactory {
* @return new data source
*/
public DataSource newInstance(final DataSourceConfiguration dataSourceConfiguration) {
if (dataSourceConfiguration instanceof JdbcDataSourceConfiguration) {
return newInstanceDataSourceByJDBC((JdbcDataSourceConfiguration) dataSourceConfiguration);
if (dataSourceConfiguration instanceof JDBCDataSourceConfiguration) {
return newInstanceDataSourceByJDBC((JDBCDataSourceConfiguration) dataSourceConfiguration);
}
throw new UnsupportedOperationException("Unsupported data source configuration");
}
private DataSource newInstanceDataSourceByJDBC(final JdbcDataSourceConfiguration dataSourceConfiguration) {
private DataSource newInstanceDataSourceByJDBC(final JDBCDataSourceConfiguration dataSourceConfiguration) {
HikariDataSource result = new HikariDataSource();
result.setJdbcUrl(dataSourceConfiguration.getJdbcUrl());
result.setUsername(dataSourceConfiguration.getUsername());
......
......@@ -19,7 +19,7 @@ package org.apache.shardingsphere.shardingscaling.core.execute.executor.position
/**
* Database itself data synchronize position manager.
* Such as mysql binlog, postgresql wal.
* Such as mysql binlog, postgreSQL wal.
*/
public interface LogPositionManager<T extends LogPosition> {
......
......@@ -21,7 +21,7 @@ import lombok.AccessLevel;
import lombok.Getter;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import org.apache.shardingsphere.shardingscaling.core.config.JdbcDataSourceConfiguration;
import org.apache.shardingsphere.shardingscaling.core.config.JDBCDataSourceConfiguration;
import org.apache.shardingsphere.shardingscaling.core.config.RdbmsConfiguration;
import org.apache.shardingsphere.shardingscaling.core.exception.SyncTaskExecuteException;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.AbstractSyncExecutor;
......@@ -42,10 +42,10 @@ import java.sql.ResultSetMetaData;
import java.sql.SQLException;
/**
* generic jdbc reader implement.
* Abstract JDBC reader implement.
*/
@Slf4j
public abstract class AbstractJdbcReader extends AbstractSyncExecutor implements JdbcReader {
public abstract class AbstractJDBCReader extends AbstractSyncExecutor implements JDBCReader {
@Getter(AccessLevel.PROTECTED)
private final RdbmsConfiguration rdbmsConfiguration;
......@@ -57,9 +57,9 @@ public abstract class AbstractJdbcReader extends AbstractSyncExecutor implements
@Setter
private Channel channel;
public AbstractJdbcReader(final RdbmsConfiguration rdbmsConfiguration, final DataSourceManager dataSourceManager) {
if (!JdbcDataSourceConfiguration.class.equals(rdbmsConfiguration.getDataSourceConfiguration().getClass())) {
throw new UnsupportedOperationException("AbstractJdbcReader only support JdbcDataSourceConfiguration");
public AbstractJDBCReader(final RdbmsConfiguration rdbmsConfiguration, final DataSourceManager dataSourceManager) {
if (!JDBCDataSourceConfiguration.class.equals(rdbmsConfiguration.getDataSourceConfiguration().getClass())) {
throw new UnsupportedOperationException("AbstractJDBCReader only support JDBCDataSourceConfiguration");
}
this.rdbmsConfiguration = rdbmsConfiguration;
this.dataSourceManager = dataSourceManager;
......
......@@ -20,6 +20,6 @@ package org.apache.shardingsphere.shardingscaling.core.execute.executor.reader;
/**
* JDBC reader.
*/
public interface JdbcReader extends Reader {
public interface JDBCReader extends Reader {
}
......@@ -37,7 +37,7 @@ public final class ReaderFactory {
* @return JDBC reader
*/
@SneakyThrows
public static JdbcReader newInstanceJdbcReader(final RdbmsConfiguration rdbmsConfiguration, final DataSourceManager dataSourceManager) {
public static JDBCReader newInstanceJdbcReader(final RdbmsConfiguration rdbmsConfiguration, final DataSourceManager dataSourceManager) {
return newInstanceJdbcReader(rdbmsConfiguration.getDataSourceConfiguration().getDatabaseType().getName(), rdbmsConfiguration, dataSourceManager);
}
......@@ -50,7 +50,7 @@ public final class ReaderFactory {
* @return JDBC reader
*/
@SneakyThrows
public static JdbcReader newInstanceJdbcReader(final String databaseType, final RdbmsConfiguration rdbmsConfiguration, final DataSourceManager dataSourceManager) {
public static JDBCReader newInstanceJdbcReader(final String databaseType, final RdbmsConfiguration rdbmsConfiguration, final DataSourceManager dataSourceManager) {
ScalingEntry scalingEntry = ScalingEntryLoader.getScalingEntryByDatabaseType(databaseType);
return scalingEntry.getJdbcReaderClass().getConstructor(RdbmsConfiguration.class, DataSourceManager.class).newInstance(rdbmsConfiguration, dataSourceManager);
}
......
......@@ -39,10 +39,10 @@ import java.util.ArrayList;
import java.util.List;
/**
* generic jdbc writer implement.
* Abstract JDBC writer implement.
*/
@Slf4j
public abstract class AbstractJdbcWriter extends AbstractSyncExecutor implements Writer {
public abstract class AbstractJDBCWriter extends AbstractSyncExecutor implements Writer {
private final RdbmsConfiguration rdbmsConfiguration;
......@@ -53,7 +53,7 @@ public abstract class AbstractJdbcWriter extends AbstractSyncExecutor implements
@Setter
private Channel channel;
public AbstractJdbcWriter(final RdbmsConfiguration rdbmsConfiguration, final DataSourceManager dataSourceManager) {
public AbstractJDBCWriter(final RdbmsConfiguration rdbmsConfiguration, final DataSourceManager dataSourceManager) {
this.rdbmsConfiguration = rdbmsConfiguration;
this.dataSourceManager = dataSourceManager;
sqlBuilder = createSqlBuilder();
......
......@@ -18,7 +18,7 @@
package org.apache.shardingsphere.shardingscaling.core.spi;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.checker.DatasourceChecker;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.reader.JdbcReader;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.reader.JDBCReader;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.position.LogPositionManager;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.reader.LogReader;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.writer.Writer;
......@@ -34,7 +34,7 @@ public interface ScalingEntry extends DatabaseTypeAwareSPI {
*
* @return JDBC reader type
*/
Class<? extends JdbcReader> getJdbcReaderClass();
Class<? extends JDBCReader> getJdbcReaderClass();
/**
* Get log reader type.
......
......@@ -20,7 +20,7 @@ package org.apache.shardingsphere.shardingscaling.core.web.util;
import org.apache.shardingsphere.api.config.sharding.ShardingRuleConfiguration;
import org.apache.shardingsphere.core.rule.ShardingRule;
import org.apache.shardingsphere.core.rule.TableRule;
import org.apache.shardingsphere.shardingscaling.core.config.JdbcDataSourceConfiguration;
import org.apache.shardingsphere.shardingscaling.core.config.JDBCDataSourceConfiguration;
import org.apache.shardingsphere.shardingscaling.core.config.RdbmsConfiguration;
import org.apache.shardingsphere.shardingscaling.core.config.ScalingConfiguration;
import org.apache.shardingsphere.shardingscaling.core.config.SyncConfiguration;
......@@ -102,7 +102,7 @@ public class SyncConfigurationUtil {
private static RdbmsConfiguration createReaderConfiguration(final DataSourceConfiguration dataSourceConfiguration) {
RdbmsConfiguration result = new RdbmsConfiguration();
Map<String, Object> dataSourceProperties = dataSourceConfiguration.getProperties();
JdbcDataSourceConfiguration readerDataSourceConfiguration = new JdbcDataSourceConfiguration(
JDBCDataSourceConfiguration readerDataSourceConfiguration = new JDBCDataSourceConfiguration(
dataSourceProperties.containsKey("jdbcUrl") ? dataSourceProperties.get("jdbcUrl").toString() : dataSourceProperties.get("url").toString(),
dataSourceProperties.get("username").toString(), dataSourceProperties.get("password").toString());
result.setDataSourceConfiguration(readerDataSourceConfiguration);
......@@ -111,7 +111,7 @@ public class SyncConfigurationUtil {
private static RdbmsConfiguration createWriterConfiguration(final ScalingConfiguration scalingConfiguration) {
RdbmsConfiguration writerConfiguration = new RdbmsConfiguration();
JdbcDataSourceConfiguration writerDataSourceConfiguration = new JdbcDataSourceConfiguration(
JDBCDataSourceConfiguration writerDataSourceConfiguration = new JDBCDataSourceConfiguration(
scalingConfiguration.getRuleConfiguration().getDestinationDataSources().getUrl(),
scalingConfiguration.getRuleConfiguration().getDestinationDataSources().getUsername(),
scalingConfiguration.getRuleConfiguration().getDestinationDataSources().getPassword());
......
......@@ -43,7 +43,7 @@ import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public final class AbstractJdbcWriterTest {
public final class AbstractJDBCWriterTest {
private static final String TABLE_NAME = "test_table";
......@@ -74,11 +74,11 @@ public final class AbstractJdbcWriterTest {
@Mock
private PreparedStatement preparedStatement;
private AbstractJdbcWriter jdbcWriter;
private AbstractJDBCWriter jdbcWriter;
@Before
public void setUp() throws Exception {
jdbcWriter = new AbstractJdbcWriter(getRdbmsConfiguration(), dataSourceManager) {
jdbcWriter = new AbstractJDBCWriter(getRdbmsConfiguration(), dataSourceManager) {
@Override
protected AbstractSqlBuilder createSqlBuilder() {
......
......@@ -18,16 +18,16 @@
package org.apache.shardingsphere.shardingscaling.core.fixture;
import org.apache.shardingsphere.shardingscaling.core.config.RdbmsConfiguration;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.reader.AbstractJdbcReader;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.reader.AbstractJDBCReader;
import org.apache.shardingsphere.shardingscaling.core.datasource.DataSourceManager;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
public final class FixtureH2JdbcReader extends AbstractJdbcReader {
public final class FixtureH2JDBCReader extends AbstractJDBCReader {
public FixtureH2JdbcReader(final RdbmsConfiguration rdbmsConfiguration, final DataSourceManager dataSourceManager) {
public FixtureH2JDBCReader(final RdbmsConfiguration rdbmsConfiguration, final DataSourceManager dataSourceManager) {
super(rdbmsConfiguration, dataSourceManager);
}
......
......@@ -19,7 +19,7 @@ package org.apache.shardingsphere.shardingscaling.core.fixture;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.checker.DatasourceChecker;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.position.LogPositionManager;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.reader.JdbcReader;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.reader.JDBCReader;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.reader.LogReader;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.writer.Writer;
import org.apache.shardingsphere.shardingscaling.core.spi.ScalingEntry;
......@@ -27,8 +27,8 @@ import org.apache.shardingsphere.shardingscaling.core.spi.ScalingEntry;
public final class FixtureH2ScalingEntry implements ScalingEntry {
@Override
public Class<? extends JdbcReader> getJdbcReaderClass() {
return FixtureH2JdbcReader.class;
public Class<? extends JDBCReader> getJdbcReaderClass() {
return FixtureH2JDBCReader.class;
}
@Override
......
......@@ -18,7 +18,7 @@
package org.apache.shardingsphere.shardingscaling.core.synctask.history;
import org.apache.shardingsphere.shardingscaling.core.config.DataSourceConfiguration;
import org.apache.shardingsphere.shardingscaling.core.config.JdbcDataSourceConfiguration;
import org.apache.shardingsphere.shardingscaling.core.config.JDBCDataSourceConfiguration;
import org.apache.shardingsphere.shardingscaling.core.config.RdbmsConfiguration;
import org.apache.shardingsphere.shardingscaling.core.config.SyncConfiguration;
import org.apache.shardingsphere.shardingscaling.core.controller.task.ReportCallback;
......@@ -194,7 +194,7 @@ public class HistoryDataSyncTaskGroupTest {
}
private RdbmsConfiguration mockReaderConfig() {
DataSourceConfiguration dataSourceConfiguration = new JdbcDataSourceConfiguration(dataSourceUrl, userName, password);
DataSourceConfiguration dataSourceConfiguration = new JDBCDataSourceConfiguration(dataSourceUrl, userName, password);
RdbmsConfiguration result = new RdbmsConfiguration();
result.setDataSourceConfiguration(dataSourceConfiguration);
return result;
......
......@@ -19,7 +19,7 @@ package org.apache.shardingsphere.shardingscaling.core.synctask.history;
import lombok.SneakyThrows;
import org.apache.shardingsphere.shardingscaling.core.config.DataSourceConfiguration;
import org.apache.shardingsphere.shardingscaling.core.config.JdbcDataSourceConfiguration;
import org.apache.shardingsphere.shardingscaling.core.config.JDBCDataSourceConfiguration;
import org.apache.shardingsphere.shardingscaling.core.config.RdbmsConfiguration;
import org.apache.shardingsphere.shardingscaling.core.config.ScalingContext;
import org.apache.shardingsphere.shardingscaling.core.config.ServerConfiguration;
......@@ -91,7 +91,7 @@ public class HistoryDataSyncTaskTest {
}
private RdbmsConfiguration mockReaderConfig() {
DataSourceConfiguration dataSourceConfiguration = new JdbcDataSourceConfiguration(dataSourceUrl, userName, password);
DataSourceConfiguration dataSourceConfiguration = new JDBCDataSourceConfiguration(dataSourceUrl, userName, password);
RdbmsConfiguration result = new RdbmsConfiguration();
result.setDataSourceConfiguration(dataSourceConfiguration);
result.setTableName("t_order");
......@@ -99,7 +99,7 @@ public class HistoryDataSyncTaskTest {
}
private RdbmsConfiguration mockWriterConfig() {
DataSourceConfiguration dataSourceConfiguration = new JdbcDataSourceConfiguration(dataSourceUrl, userName, password);
DataSourceConfiguration dataSourceConfiguration = new JDBCDataSourceConfiguration(dataSourceUrl, userName, password);
RdbmsConfiguration result = new RdbmsConfiguration();
result.setDataSourceConfiguration(dataSourceConfiguration);
return result;
......
......@@ -17,7 +17,7 @@
package org.apache.shardingsphere.shardingscaling.mysql;
import org.apache.shardingsphere.shardingscaling.core.config.JdbcDataSourceConfiguration;
import org.apache.shardingsphere.shardingscaling.core.config.JDBCDataSourceConfiguration;
import org.apache.shardingsphere.shardingscaling.core.config.RdbmsConfiguration;
import org.apache.shardingsphere.shardingscaling.core.datasource.DataSourceFactory;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.AbstractSyncExecutor;
......@@ -61,8 +61,8 @@ public final class MySQLBinlogReader extends AbstractSyncExecutor implements Log
public MySQLBinlogReader(final RdbmsConfiguration rdbmsConfiguration, final LogPosition binlogPosition) {
this.binlogPosition = (BinlogPosition) binlogPosition;
if (!JdbcDataSourceConfiguration.class.equals(rdbmsConfiguration.getDataSourceConfiguration().getClass())) {
throw new UnsupportedOperationException("MySQLBinlogReader only support JdbcDataSourceConfiguration");
if (!JDBCDataSourceConfiguration.class.equals(rdbmsConfiguration.getDataSourceConfiguration().getClass())) {
throw new UnsupportedOperationException("MySQLBinlogReader only support JDBCDataSourceConfiguration");
}
this.rdbmsConfiguration = rdbmsConfiguration;
this.metaDataManager = new MetaDataManager(new DataSourceFactory().newInstance(rdbmsConfiguration.getDataSourceConfiguration()));
......@@ -76,7 +76,7 @@ public final class MySQLBinlogReader extends AbstractSyncExecutor implements Log
@Override
public void read(final Channel channel) {
JdbcDataSourceConfiguration jdbcDataSourceConfiguration = (JdbcDataSourceConfiguration) rdbmsConfiguration.getDataSourceConfiguration();
JDBCDataSourceConfiguration jdbcDataSourceConfiguration = (JDBCDataSourceConfiguration) rdbmsConfiguration.getDataSourceConfiguration();
final JdbcUri uri = new JdbcUri(jdbcDataSourceConfiguration.getJdbcUrl());
MySQLConnector client = new MySQLConnector(123456, uri.getHostname(), uri.getPort(), jdbcDataSourceConfiguration.getUsername(), jdbcDataSourceConfiguration.getPassword());
client.connect();
......
......@@ -17,9 +17,9 @@
package org.apache.shardingsphere.shardingscaling.mysql;
import org.apache.shardingsphere.shardingscaling.core.config.JdbcDataSourceConfiguration;
import org.apache.shardingsphere.shardingscaling.core.config.JDBCDataSourceConfiguration;
import org.apache.shardingsphere.shardingscaling.core.config.RdbmsConfiguration;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.reader.AbstractJdbcReader;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.reader.AbstractJDBCReader;
import org.apache.shardingsphere.shardingscaling.core.metadata.JdbcUri;
import org.apache.shardingsphere.shardingscaling.core.datasource.DataSourceManager;
......@@ -33,15 +33,15 @@ import java.util.Map;
/**
* MySQL JDBC Reader.
*/
public final class MySQLJdbcReader extends AbstractJdbcReader {
public final class MySQLJdbcReader extends AbstractJDBCReader {
public MySQLJdbcReader(final RdbmsConfiguration rdbmsConfiguration, final DataSourceManager dataSourceManager) {
super(rdbmsConfiguration, dataSourceManager);
JdbcDataSourceConfiguration jdbcDataSourceConfiguration = (JdbcDataSourceConfiguration) getRdbmsConfiguration().getDataSourceConfiguration();
jdbcDataSourceConfiguration.setJdbcUrl(fixMysqlUrl(jdbcDataSourceConfiguration.getJdbcUrl()));
JDBCDataSourceConfiguration jdbcDataSourceConfiguration = (JDBCDataSourceConfiguration) getRdbmsConfiguration().getDataSourceConfiguration();
jdbcDataSourceConfiguration.setJdbcUrl(fixMySQLUrl(jdbcDataSourceConfiguration.getJdbcUrl()));
}
private String formatMysqlParams(final Map<String, String> params) {
private String formatMySQLParams(final Map<String, String> params) {
StringBuilder result = new StringBuilder();
for (Map.Entry<String, String> entry : params.entrySet()) {
result.append(entry.getKey());
......@@ -54,22 +54,21 @@ public final class MySQLJdbcReader extends AbstractJdbcReader {
return result.toString();
}
private String fixMysqlUrl(final String url) {
private String fixMySQLUrl(final String url) {
JdbcUri uri = new JdbcUri(url);
return String.format("jdbc:%s://%s/%s?%s", uri.getScheme(), uri.getHost(), uri.getDatabase(), fixMysqlParams(uri.getParameters()));
return String.format("jdbc:%s://%s/%s?%s", uri.getScheme(), uri.getHost(), uri.getDatabase(), fixMySQLParams(uri.getParameters()));
}
private String fixMysqlParams(final Map<String, String> parameters) {
private String fixMySQLParams(final Map<String, String> parameters) {
if (!parameters.containsKey("yearIsDateType")) {
parameters.put("yearIsDateType", "false");
}
return formatMysqlParams(parameters);
return formatMySQLParams(parameters);
}
@Override
public Object readValue(final ResultSet resultSet, final int index) throws SQLException {
if (isDateTimeValue(resultSet.getMetaData().getColumnType(index))) {
// fix: jdbc Time objects represent a wall-clock time and not a duration as MySQL treats them
return resultSet.getString(index);
} else {
return resultSet.getObject(index);
......
......@@ -18,7 +18,7 @@
package org.apache.shardingsphere.shardingscaling.mysql;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.checker.DatasourceChecker;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.reader.JdbcReader;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.reader.JDBCReader;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.position.LogPositionManager;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.reader.LogReader;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.writer.Writer;
......@@ -30,7 +30,7 @@ import org.apache.shardingsphere.shardingscaling.core.spi.ScalingEntry;
public final class MySQLScalingEntry implements ScalingEntry {
@Override
public Class<? extends JdbcReader> getJdbcReaderClass() {
public Class<? extends JDBCReader> getJdbcReaderClass() {
return MySQLJdbcReader.class;
}
......
......@@ -18,14 +18,14 @@
package org.apache.shardingsphere.shardingscaling.mysql;
import org.apache.shardingsphere.shardingscaling.core.config.RdbmsConfiguration;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.writer.AbstractJdbcWriter;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.writer.AbstractJDBCWriter;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.writer.AbstractSqlBuilder;
import org.apache.shardingsphere.shardingscaling.core.datasource.DataSourceManager;
/**
* MySQL writer.
*/
public final class MySQLWriter extends AbstractJdbcWriter {
public final class MySQLWriter extends AbstractJDBCWriter {
public MySQLWriter(final RdbmsConfiguration rdbmsConfiguration, final DataSourceManager dataSourceManager) {
super(rdbmsConfiguration, dataSourceManager);
......
......@@ -18,7 +18,7 @@
package org.apache.shardingsphere.shardingscaling.mysql.binlog.packet.binlog;
/**
* Mysql event types.
* MySQL event types.
* https://dev.mysql.com/doc/shorternals/en/binlog-event-type.html
*/
public final class EventTypes {
......
......@@ -25,7 +25,7 @@ import lombok.Setter;
import java.nio.charset.StandardCharsets;
/**
* Mysql Query command packet.
* MySQL Query command packet.
*/
@Setter
public final class QueryCommandPacket extends AbstractCommandPacket {
......
......@@ -18,7 +18,7 @@
package org.apache.shardingsphere.shardingscaling.postgresql;
import org.apache.shardingsphere.shardingscaling.core.config.RdbmsConfiguration;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.reader.AbstractJdbcReader;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.reader.AbstractJDBCReader;
import org.apache.shardingsphere.shardingscaling.core.datasource.DataSourceManager;
import java.sql.Connection;
......@@ -29,7 +29,7 @@ import java.sql.SQLException;
/**
* PostgreSQL JDBC reader.
*/
public final class PostgreSQLJdbcReader extends AbstractJdbcReader {
public final class PostgreSQLJdbcReader extends AbstractJDBCReader {
public PostgreSQLJdbcReader(final RdbmsConfiguration rdbmsConfiguration, final DataSourceManager dataSourceManager) {
super(rdbmsConfiguration, dataSourceManager);
......
......@@ -19,8 +19,8 @@ package org.apache.shardingsphere.shardingscaling.postgresql;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.checker.DatasourceChecker;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.position.LogPositionManager;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.reader.JDBCReader;
import org.apache.shardingsphere.shardingscaling.core.spi.ScalingEntry;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.reader.JdbcReader;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.reader.LogReader;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.writer.Writer;
......@@ -30,7 +30,7 @@ import org.apache.shardingsphere.shardingscaling.core.execute.executor.writer.Wr
public final class PostgreSQLScalingEntry implements ScalingEntry {
@Override
public Class<? extends JdbcReader> getJdbcReaderClass() {
public Class<? extends JDBCReader> getJdbcReaderClass() {
return PostgreSQLJdbcReader.class;
}
......
......@@ -18,7 +18,8 @@
package org.apache.shardingsphere.shardingscaling.postgresql;
import lombok.Setter;
import org.apache.shardingsphere.shardingscaling.core.config.JdbcDataSourceConfiguration;
import org.apache.shardingsphere.shardingscaling.core.config.JDBCDataSourceConfiguration;
import org.apache.shardingsphere.shardingscaling.core.config.RdbmsConfiguration;
import org.apache.shardingsphere.shardingscaling.core.exception.SyncTaskExecuteException;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.AbstractSyncExecutor;
......@@ -57,8 +58,8 @@ public final class PostgreSQLWalReader extends AbstractSyncExecutor implements L
public PostgreSQLWalReader(final RdbmsConfiguration rdbmsConfiguration, final LogPosition logPosition) {
walPosition = (WalPosition) logPosition;
if (!JdbcDataSourceConfiguration.class.equals(rdbmsConfiguration.getDataSourceConfiguration().getClass())) {
throw new UnsupportedOperationException("PostgreSQLWalReader only support JdbcDataSourceConfiguration");
if (!JDBCDataSourceConfiguration.class.equals(rdbmsConfiguration.getDataSourceConfiguration().getClass())) {
throw new UnsupportedOperationException("PostgreSQLWalReader only support JDBCDataSourceConfiguration");
}
this.rdbmsConfiguration = rdbmsConfiguration;
walEventConverter = new WalEventConverter(rdbmsConfiguration);
......@@ -73,7 +74,7 @@ public final class PostgreSQLWalReader extends AbstractSyncExecutor implements L
@Override
public void read(final Channel channel) {
try {
PGConnection pgConnection = logicalReplication.createPgConnection((JdbcDataSourceConfiguration) rdbmsConfiguration.getDataSourceConfiguration());
PGConnection pgConnection = logicalReplication.createPgConnection((JDBCDataSourceConfiguration) rdbmsConfiguration.getDataSourceConfiguration());
PGReplicationStream stream = logicalReplication.createReplicationStream(pgConnection,
PostgreSQLLogPositionManager.SLOT_NAME, walPosition.getLogSequenceNumber());
while (isRunning()) {
......
......@@ -18,14 +18,14 @@
package org.apache.shardingsphere.shardingscaling.postgresql;
import org.apache.shardingsphere.shardingscaling.core.config.RdbmsConfiguration;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.writer.AbstractJdbcWriter;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.writer.AbstractJDBCWriter;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.writer.AbstractSqlBuilder;
import org.apache.shardingsphere.shardingscaling.core.datasource.DataSourceManager;
/**
* postgreSQL writer.
*/
public final class PostgreSQLWriter extends AbstractJdbcWriter {
public final class PostgreSQLWriter extends AbstractJDBCWriter {
public PostgreSQLWriter(final RdbmsConfiguration rdbmsConfiguration, final DataSourceManager dataSourceManager) {
super(rdbmsConfiguration, dataSourceManager);
......
......@@ -17,7 +17,7 @@
package org.apache.shardingsphere.shardingscaling.postgresql.wal;
import org.apache.shardingsphere.shardingscaling.core.config.JdbcDataSourceConfiguration;
import org.apache.shardingsphere.shardingscaling.core.config.JDBCDataSourceConfiguration;
import org.postgresql.PGConnection;
import org.postgresql.PGProperty;
import org.postgresql.replication.LogSequenceNumber;
......@@ -35,11 +35,11 @@ public final class LogicalReplication {
/**
* Create PostgreSQL connection.
*
* @param jdbcDataSourceConfiguration jdbc configuration
* @param jdbcDataSourceConfiguration JDBC configuration
* @return PostgreSQL connection
* @throws SQLException sql exception
*/
public PGConnection createPgConnection(final JdbcDataSourceConfiguration jdbcDataSourceConfiguration) throws SQLException {
public PGConnection createPgConnection(final JDBCDataSourceConfiguration jdbcDataSourceConfiguration) throws SQLException {
return createConnection(jdbcDataSourceConfiguration);
}
......@@ -63,7 +63,7 @@ public final class LogicalReplication {
.start();
}
private PGConnection createConnection(final JdbcDataSourceConfiguration jdbcDataSourceConfiguration) throws SQLException {
private PGConnection createConnection(final JDBCDataSourceConfiguration jdbcDataSourceConfiguration) throws SQLException {
Properties props = new Properties();
PGProperty.USER.set(props, jdbcDataSourceConfiguration.getUsername());
PGProperty.PASSWORD.set(props, jdbcDataSourceConfiguration.getPassword());
......
......@@ -17,7 +17,7 @@
package org.apache.shardingsphere.shardingscaling.postgresql.wal;
import org.apache.shardingsphere.shardingscaling.core.config.JdbcDataSourceConfiguration;
import org.apache.shardingsphere.shardingscaling.core.config.JDBCDataSourceConfiguration;
import org.apache.shardingsphere.shardingscaling.core.config.RdbmsConfiguration;
import org.apache.shardingsphere.shardingscaling.core.datasource.DataSourceFactory;
import org.apache.shardingsphere.shardingscaling.core.execute.executor.record.Column;
......@@ -57,7 +57,7 @@ public final class WalEventConverter {
* @return record
*/
public Record convert(final AbstractWalEvent event) {
final JdbcUri uri = new JdbcUri(((JdbcDataSourceConfiguration) rdbmsConfiguration.getDataSourceConfiguration()).getJdbcUrl());
final JdbcUri uri = new JdbcUri(((JDBCDataSourceConfiguration) rdbmsConfiguration.getDataSourceConfiguration()).getJdbcUrl());
if (filter(uri.getDatabase(), event)) {
return createPlaceholderRecord(event);
} else if (event instanceof WriteRowEvent) {
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册