提交 9f60d3ab 编写于 作者: L loushang

Merge remote-tracking branch 'upstream/dev' into dev

#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
name: SonarCloud
on: [push, pull_request]
jobs:
sonarCloudTrigger:
name: SonarCloud Trigger
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- uses: actions/setup-java@v1
with:
java-version: 8
- uses: actions/cache@v1
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}-sonarqube
restore-keys: |
${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}-sonarqube
${{ runner.os }}-maven-
- name: Maven clean
run: mvn clean
- name: Run SonarCloud analyse
run: >
mvn clean --batch-mode
org.jacoco:jacoco-maven-plugin:prepare-agent
verify
org.sonarsource.scanner.maven:sonar-maven-plugin:sonar
-Dmaven.test.skip=true
-Dsonar.host.url=https://sonarcloud.io
-Dsonar.organization=apache
-Dsonar.projectKey=apache-dolphinscheduler
-Dsonar.login=e4058004bc6be89decf558ac819aa1ecbee57682
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
......@@ -49,6 +49,19 @@ jobs:
export MAVEN_OPTS='-Dmaven.repo.local=.m2/repository -XX:+TieredCompilation -XX:TieredStopAtLevel=1 -XX:+CMSClassUnloadingEnabled -XX:+UseConcMarkSweepGC -XX:-UseGCOverheadLimit -Xmx3g'
mvn test -Dmaven.test.skip=false cobertura:cobertura
CODECOV_TOKEN="09c2663f-b091-4258-8a47-c981827eb29a" bash <(curl -s https://codecov.io/bash)
- name: Run SonarCloud analysis
run: >
mvn clean --batch-mode
org.jacoco:jacoco-maven-plugin:prepare-agent
verify
org.sonarsource.scanner.maven:sonar-maven-plugin:sonar
-Dsonar.host.url=https://sonarcloud.io
-Dsonar.organization=apache
-Dsonar.projectKey=apache-dolphinscheduler
-Dsonar.login=e4058004bc6be89decf558ac819aa1ecbee57682
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
- name: Collect logs
run: |
mkdir -p ${LOG_DIR}
......
......@@ -330,11 +330,6 @@
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
......
......@@ -16,7 +16,7 @@
*/
package org.apache.dolphinscheduler.common.job.db;
import org.apache.commons.lang3.StringUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......
......@@ -16,8 +16,8 @@
*/
package org.apache.dolphinscheduler.common.job.db;
import org.apache.commons.lang3.StringUtils;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......
......@@ -16,7 +16,7 @@
*/
package org.apache.dolphinscheduler.common.job.db;
import org.apache.commons.lang3.StringUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......@@ -29,9 +29,6 @@ public class HiveDataSource extends BaseDataSource {
private static final Logger logger = LoggerFactory.getLogger(HiveDataSource.class);
/**
* gets the JDBC url for the data source connection
* @return
......@@ -49,8 +46,6 @@ public class HiveDataSource extends BaseDataSource {
jdbcUrl += ";principal=" + getPrincipal();
}
if (StringUtils.isNotEmpty(getOther())) {
jdbcUrl += ";" + getOther();
}
......
......@@ -16,7 +16,7 @@
*/
package org.apache.dolphinscheduler.common.job.db;
import org.apache.commons.lang3.StringUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......
......@@ -16,7 +16,7 @@
*/
package org.apache.dolphinscheduler.common.job.db;
import org.apache.commons.lang3.StringUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......
......@@ -16,7 +16,7 @@
*/
package org.apache.dolphinscheduler.common.job.db;
import org.apache.commons.lang3.StringUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......
......@@ -16,7 +16,7 @@
*/
package org.apache.dolphinscheduler.common.job.db;
import org.apache.commons.lang3.StringUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......
......@@ -16,7 +16,7 @@
*/
package org.apache.dolphinscheduler.common.job.db;
import org.apache.commons.lang3.StringUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......
......@@ -26,7 +26,7 @@ import com.alibaba.fastjson.JSONObject;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import org.apache.commons.lang3.StringUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import java.io.IOException;
import java.util.List;
......
......@@ -18,7 +18,6 @@ package org.apache.dolphinscheduler.common.utils;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ResUploadType;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.slf4j.Logger;
......
......@@ -17,7 +17,6 @@
package org.apache.dolphinscheduler.common.utils;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.lang3.StringUtils;
/**
* encryption utils
......
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils;
public class EnumUtils {
public static <E extends Enum<E>> E getEnum(final Class<E> enumClass, final String enumName) {
if (enumName == null) {
return null;
}
try {
return Enum.valueOf(enumClass, enumName);
} catch (final IllegalArgumentException ex) {
return null;
}
}
}
......@@ -23,7 +23,6 @@ import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONException;
import com.alibaba.fastjson.JSONObject;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.fs.FileSystem;
......
......@@ -19,7 +19,6 @@ package org.apache.dolphinscheduler.common.utils;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.shell.ShellExecutor;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.SystemInfo;
......@@ -180,7 +179,7 @@ public class OSUtils {
private static List<String> getUserListFromMac() throws IOException {
String result = exeCmd("dscl . list /users");
if (StringUtils.isNotEmpty(result)) {
return Arrays.asList(StringUtils.split(result, "\n"));
return Arrays.asList(result.split( "\n"));
}
return Collections.emptyList();
......@@ -251,9 +250,8 @@ public class OSUtils {
*/
public static String getGroup() throws IOException {
String result = exeCmd("groups");
if (StringUtils.isNotEmpty(result)) {
String[] groupInfo = StringUtils.split(result);
String[] groupInfo = result.split(" ");
return groupInfo[0];
}
......
......@@ -16,7 +16,6 @@
*/
package org.apache.dolphinscheduler.common.utils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils;
import java.nio.charset.StandardCharsets;
import java.util.regex.Pattern;
public class StringUtils {
public static final int INDEX_NOT_FOUND = -1;
public static final String EMPTY = "";
public static boolean isEmpty(final CharSequence cs) {
return cs == null || cs.length() == 0;
}
public static boolean isNotEmpty(final CharSequence cs) {
return !isEmpty(cs);
}
public static boolean isBlank(CharSequence cs){
int strLen;
if (cs == null || (strLen = cs.length()) == 0) {
return true;
}
for (int i = 0; i < strLen; i++) {
if (Character.isWhitespace(cs.charAt(i)) == false) {
return false;
}
}
return true;
}
public static boolean isNotBlank(CharSequence str){
return !isBlank(str);
}
public static String substringBefore(final String str, final String separator) {
if (isBlank(str) || separator == null) {
return str;
}
if (separator.isEmpty()) {
return EMPTY;
}
final int pos = str.indexOf(separator);
if (pos == INDEX_NOT_FOUND) {
return str;
}
return str.substring(0, pos);
}
public static String substringAfter(final String str, final String separator) {
if (isBlank(str)) {
return str;
}
if (separator == null) {
return EMPTY;
}
final int pos = str.indexOf(separator);
if (pos == INDEX_NOT_FOUND) {
return EMPTY;
}
return str.substring(pos + separator.length());
}
public static String substringAfterLast(final String str, final String separator) {
if (isEmpty(str)) {
return str;
}
if (isEmpty(separator)) {
return EMPTY;
}
final int pos = str.lastIndexOf(separator);
if (pos == INDEX_NOT_FOUND || pos == str.length() - separator.length()) {
return EMPTY;
}
return str.substring(pos + separator.length());
}
public static String getUtf8String(byte[] bytes){
return new String(bytes, StandardCharsets.UTF_8);
}
public static byte[] getUtf8Bytes(String str){
return str.getBytes(StandardCharsets.UTF_8);
}
public static boolean hasChinese(String str) {
if (str == null) {
return false;
}
Pattern pattern = Pattern.compile("[\\u4E00-\\u9FBF]+");
return pattern.matcher(str).find();
}
public static boolean hasSpace(String str) {
if (str == null) {
return false;
}
int len = str.length();
for (int i = 0; i < len; i++) {
if (str.charAt(i) == ' ') {
return true;
}
}
return false;
}
}
......@@ -28,7 +28,6 @@ import org.apache.dolphinscheduler.common.task.shell.ShellParameters;
import org.apache.dolphinscheduler.common.task.spark.SparkParameters;
import org.apache.dolphinscheduler.common.task.sql.SqlParameters;
import org.apache.dolphinscheduler.common.task.subprocess.SubProcessParameters;
import org.apache.commons.lang3.EnumUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......
......@@ -28,7 +28,6 @@ import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.imps.CuratorFrameworkState;
import org.apache.curator.framework.recipes.locks.InterProcessMutex;
......@@ -39,6 +38,7 @@ import org.apache.dolphinscheduler.common.model.Server;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.OSUtils;
import org.apache.dolphinscheduler.common.utils.ResInfo;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.threadutils;
import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.common.thread.ThreadPoolExecutors;
import org.apache.dolphinscheduler.common.thread.ThreadUtils;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Calendar;
import java.util.concurrent.*;
import static org.junit.Assert.*;
public class ThreadUtilsTest {
private static final Logger logger = LoggerFactory.getLogger(ThreadUtilsTest.class);
/**
* create a naming thread
*/
@Test
public void testNewDaemonFixedThreadExecutor() {
// create core size and max size are all 3
ExecutorService testExec = ThreadUtils.newDaemonFixedThreadExecutor("test-exec-thread",10);
for (int i = 0; i < 19; i++) {
final int index = i;
testExec.submit(() -> {
System.out.println("do some work index " + index);
});
}
assertFalse(testExec.isShutdown());
testExec.shutdownNow();
assertTrue(testExec.isShutdown());
}
/**
* test schedulerThreadExecutor as for print time in scheduler
* default check thread is 1
*/
@Test
public void testNewDaemonScheduleThreadExecutor() {
ScheduledExecutorService scheduleService = ThreadUtils.newDaemonThreadScheduledExecutor("scheduler-thread", 1);
Calendar start = Calendar.getInstance();
Calendar globalTimer = Calendar.getInstance();
globalTimer.set(2019, Calendar.DECEMBER, 1, 0, 0, 0);
// current
Calendar end = Calendar.getInstance();
end.set(2019, Calendar.DECEMBER, 1, 0, 0, 3);
Runnable schedulerTask = new Runnable() {
@Override
public void run() {
start.set(2019, Calendar.DECEMBER, 1, 0, 0, 0);
int index = 0;
// send heart beat work
while (start.getTime().getTime() <= end.getTime().getTime()) {
System.out.println("worker here");
System.out.println(index ++);
start.add(Calendar.SECOND, 1);
globalTimer.add(Calendar.SECOND, 1);
}
System.out.println("time is " + System.currentTimeMillis());
}
};
scheduleService.scheduleAtFixedRate(schedulerTask, 2, 10, TimeUnit.SECONDS);
assertFalse(scheduleService.isShutdown());
try {
Thread.sleep(60000);
} catch (InterruptedException e) {
e.printStackTrace();
}
scheduleService.shutdownNow();
assertTrue(scheduleService.isShutdown());
}
/**
* test stopper is working normal
*/
@Test
public void testStopper() {
assertTrue(Stopper.isRunning());
Stopper.stop();
assertTrue(Stopper.isStoped());
}
/**
* test threadPoolExecutors with 3 workers and current each 5 tasks
* @throws InterruptedException
*/
@Test
public void testThreadInfo() throws InterruptedException {
ThreadPoolExecutors workers = ThreadPoolExecutors.getInstance("worker", 3);
for (int i = 0; i < 5; ++i ) {
int index = i;
workers.execute(() -> {
for (int j = 0; j < 10; ++j) {
try {
Thread.sleep(1000);
System.out.printf("worker %d is doing the task", index);
System.out.println();
workers.printStatus();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
});
workers.submit(() -> {
for (int j = 0; j < 10; ++j) {
try {
Thread.sleep(1000);
System.out.printf("worker_2 %d is doing the task", index);
System.out.println();
workers.printStatus();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
});
}
Thread.sleep(50001);
workers.shutdown();
}
/**
* test a single daemon thread pool
*/
@Test
public void testNewDaemonSingleThreadExecutor() {
ExecutorService threadTest = ThreadUtils.newDaemonSingleThreadExecutor("thread_test");
threadTest.execute(() -> {
for (int i = 0; i < 100; ++i) {
System.out.println("daemon working ");
}
});
assertFalse(threadTest.isShutdown());
threadTest.shutdownNow();
assertTrue(threadTest.isShutdown());
}
@Test
public void testNewDaemonCachedThreadPool() {
ThreadPoolExecutor threadPoolExecutor = ThreadUtils.newDaemonCachedThreadPool("threadTest-");
Thread thread1 = threadPoolExecutor.getThreadFactory().newThread(() -> {
for (int i = 0; i < 10; ++i) {
System.out.println("this task is with index " + i );
}
});
assertTrue(thread1.getName().startsWith("threadTest-"));
assertFalse(threadPoolExecutor.isShutdown());
threadPoolExecutor.shutdown();
assertTrue(threadPoolExecutor.isShutdown());
}
@Test
public void testNewDaemonCachedThreadPoolWithThreadNumber() {
ThreadPoolExecutor threadPoolExecutor = ThreadUtils.newDaemonCachedThreadPool("threadTest--", 3, 10);
for (int i = 0; i < 10; ++ i) {
threadPoolExecutor.getThreadFactory().newThread(() -> {
assertEquals(3, threadPoolExecutor.getActiveCount());
System.out.println("this task is first work to do");
});
}
assertFalse(threadPoolExecutor.isShutdown());
threadPoolExecutor.shutdown();
assertTrue(threadPoolExecutor.isShutdown());
}
}
......@@ -16,8 +16,6 @@
*/
package org.apache.dolphinscheduler.common.utils;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.lang3.StringUtils;
import org.junit.Assert;
import org.junit.Test;
......
......@@ -162,12 +162,6 @@
<artifactId>spring-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>io.swagger</groupId>
<artifactId>swagger-annotations</artifactId>
<version>1.5.20</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.yaml</groupId>
<artifactId>snakeyaml</artifactId>
......
......@@ -22,8 +22,6 @@ import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.util.Date;
......@@ -33,7 +31,6 @@ import java.util.Date;
*/
@Data
@TableName("t_ds_user")
@ApiModel(description = "UserModelDesc")
public class User {
/**
......@@ -45,13 +42,11 @@ public class User {
/**
* user name
*/
@ApiModelProperty(name = "userName", notes = "USER_NAME",dataType = "String",required = true)
private String userName;
/**
* user password
*/
@ApiModelProperty(name = "userPassword", notes = "USER_PASSWORD",dataType = "String",required = true)
private String userPassword;
/**
......
......@@ -18,6 +18,7 @@ package org.apache.dolphinscheduler.dao.mapper;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.dao.entity.DatasourceUser;
import com.baomidou.mybatisplus.core.metadata.IPage;
......@@ -27,88 +28,116 @@ import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.annotation.Rollback;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.transaction.annotation.Transactional;
import java.util.Date;
import java.util.List;
import java.util.*;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
/**
* datasource mapper test
*/
@RunWith(SpringRunner.class)
@SpringBootTest
@Transactional
@Rollback(true)
public class DataSourceMapperTest {
/**
* datasource mapper
*/
@Autowired
DataSourceMapper dataSourceMapper;
/**
* datasource user relation mapper
*/
@Autowired
DataSourceUserMapper dataSourceUserMapper;
/**
* insert
* @return DataSource
* test insert
*/
private DataSource insertOne(){
//insertOne
DataSource dataSource = new DataSource();
dataSource.setUserId(4);
dataSource.setName("data source test");
dataSource.setType(DbType.MYSQL);
dataSource.setNote("mysql test");
dataSource.setConnectionParams("hello mysql");
dataSource.setUpdateTime(new Date());
dataSource.setCreateTime(new Date());
dataSourceMapper.insert(dataSource);
return dataSource;
@Test
public void testInsert(){
DataSource dataSource = createDataSource();
assertNotNull(dataSource.getId());
assertThat(dataSource.getId(), greaterThan(0));
}
/**
* test update
* test query
*/
@Test
public void testUpdate(){
//insertOne
DataSource dataSource = insertOne();
//update
dataSource.setUpdateTime(new Date());
int update = dataSourceMapper.updateById(dataSource);
Assert.assertEquals(update, 1);
dataSourceMapper.deleteById(dataSource.getId());
public void testSelectById() {
DataSource expectedDataSource = createDataSource();
DataSource actualDataSource = dataSourceMapper.selectById(expectedDataSource.getId());
assertEquals(expectedDataSource, actualDataSource);
}
/**
* test delete
* test query
*/
@Test
public void testDelete(){
public void testUpdate() {
DataSource expectedDataSource = createDataSource();
expectedDataSource.setName("modify " + expectedDataSource.getName());
expectedDataSource.setNote("modifiy " + expectedDataSource.getNote());
expectedDataSource.setUserId(2);
expectedDataSource.setType(DbType.HIVE);
expectedDataSource.setConnectionParams("modify " + expectedDataSource.getConnectionParams());
expectedDataSource.setUpdateTime(DateUtils.getCurrentDate());
DataSource dataSource = insertOne();
int delete = dataSourceMapper.deleteById(dataSource.getId());
Assert.assertEquals(delete, 1);
dataSourceMapper.updateById(expectedDataSource);
DataSource actualDataSource = dataSourceMapper.selectById(expectedDataSource.getId());
assertEquals(expectedDataSource, actualDataSource);
}
/**
* test query
* test delete
*/
@Test
public void testQuery() {
DataSource dataSource = insertOne();
//query
List<DataSource> dataSources = dataSourceMapper.selectList(null);
Assert.assertNotEquals(dataSources.size(), 0);
dataSourceMapper.deleteById(dataSource.getId());
public void testDelete(){
DataSource expectedDataSource = createDataSource();
dataSourceMapper.deleteById(expectedDataSource.getId());
DataSource actualDataSource = dataSourceMapper.selectById(expectedDataSource.getId());
assertNull(actualDataSource);
}
/**
* test query datasource by type
*/
@Test
public void testQueryDataSourceByType() {
DataSource dataSource = insertOne();
//query
List<DataSource> dataSources = dataSourceMapper.queryDataSourceByType(
0, DbType.MYSQL.ordinal()
);
Assert.assertNotEquals(dataSources.size(), 0);
dataSourceMapper.deleteById(dataSource.getId());
Integer userId = 1;
Map<Integer, DataSource> datasourceMap = createDataSourceMap(userId, "test");
List<DataSource> actualDataSources = dataSourceMapper.queryDataSourceByType(
0, DbType.MYSQL.ordinal());
assertThat(actualDataSources.size(), greaterThanOrEqualTo(2));
for (DataSource actualDataSource : actualDataSources){
DataSource expectedDataSource = datasourceMap.get(actualDataSource.getId());
if (expectedDataSource != null){
assertEquals(expectedDataSource,actualDataSource);
}
}
}
/**
......@@ -116,12 +145,23 @@ public class DataSourceMapperTest {
*/
@Test
public void testSelectPaging() {
DataSource dataSource = insertOne();
Page page = new Page(1, 3);
IPage<DataSource> dataSourceIPage = dataSourceMapper.selectPaging(page,
4, null);
Assert.assertNotEquals(dataSourceIPage.getTotal(), 0);
dataSourceMapper.deleteById(dataSource.getId());
String name = "test";
Integer userId = 1;
Map<Integer, DataSource> expectedDataSourceMap = createDataSourceMap(userId, name);
Page page = new Page(0, 4);
IPage<DataSource> dataSourceIPage = dataSourceMapper.selectPaging(page, userId, name);
List<DataSource> actualDataSources = dataSourceIPage.getRecords();
for (DataSource actualDataSource : actualDataSources){
DataSource expectedDataSource = expectedDataSourceMap.get(actualDataSource.getId());
if (expectedDataSource != null){
assertEquals(expectedDataSource,actualDataSource);
}
}
}
/**
......@@ -129,10 +169,17 @@ public class DataSourceMapperTest {
*/
@Test
public void testQueryDataSourceByName() {
DataSource dataSource = insertOne();
List<DataSource> dataSources = dataSourceMapper.queryDataSourceByName("data source test");
Assert.assertNotEquals(dataSources.size(), 0);
dataSourceMapper.deleteById(dataSource.getId());
String name = "test";
DataSource expectedDataSource = createDataSource(name);
List<DataSource> actualDataSources = dataSourceMapper.queryDataSourceByName(name);
for (DataSource actualDataSource : actualDataSources){
if (expectedDataSource.getId() == actualDataSource.getId()){
assertEquals(expectedDataSource,actualDataSource);
}
}
}
/**
......@@ -140,17 +187,20 @@ public class DataSourceMapperTest {
*/
@Test
public void testQueryAuthedDatasource() {
String name = "test";
Integer userId = 1;
DataSource dataSource = insertOne();
DatasourceUser datasourceUser = new DatasourceUser();
datasourceUser.setUserId(3);
datasourceUser.setDatasourceId(dataSource.getId());
dataSourceUserMapper.insert(datasourceUser);
Map<Integer, DataSource> expectedDataSourceMap = createDataSourceMap(userId, name);
List<DataSource> actualDataSources = dataSourceMapper.queryAuthedDatasource(userId);
for (DataSource actualDataSource : actualDataSources){
DataSource expectedDataSource = expectedDataSourceMap.get(actualDataSource.getId());
if (expectedDataSource != null){
assertEquals(expectedDataSource,actualDataSource);
}
}
List<DataSource> dataSources = dataSourceMapper.queryAuthedDatasource(3);
Assert.assertNotEquals(dataSources.size(), 0);
dataSourceMapper.deleteById(dataSource.getId());
dataSourceUserMapper.deleteById(datasourceUser.getId());
}
/**
......@@ -158,10 +208,19 @@ public class DataSourceMapperTest {
*/
@Test
public void testQueryDatasourceExceptUserId() {
DataSource dataSource = insertOne();
List<DataSource> dataSources = dataSourceMapper.queryDatasourceExceptUserId(3);
Assert.assertNotEquals(dataSources.size(), 0);
dataSourceMapper.deleteById(dataSource.getId());
String name = "test";
Integer userId = 1;
Map<Integer, DataSource> expectedDataSourceMap = createDataSourceMap(userId, name);
List<DataSource> actualDataSources = dataSourceMapper.queryDatasourceExceptUserId(userId);
for (DataSource actualDataSource : actualDataSources){
DataSource expectedDataSource = expectedDataSourceMap.get(actualDataSource.getId());
if (expectedDataSource != null){
assertEquals(expectedDataSource,actualDataSource);
}
}
}
/**
......@@ -169,13 +228,107 @@ public class DataSourceMapperTest {
*/
@Test
public void testListAllDataSourceByType() {
Integer count = 10;
Map<Integer, DataSource> expectedDataSourceMap = createDataSourceMap(count);
List<DataSource> actualDataSources = dataSourceMapper.listAllDataSourceByType(DbType.MYSQL.ordinal());
assertThat(actualDataSources.size(), greaterThanOrEqualTo(count));
for (DataSource actualDataSource : actualDataSources){
DataSource expectedDataSource = expectedDataSourceMap.get(actualDataSource.getId());
if (expectedDataSource != null){
assertEquals(expectedDataSource,actualDataSource);
}
}
}
/**
* create datasource relation
* @param userId
*/
private Map<Integer,DataSource> createDataSourceMap(Integer userId,String name){
Map<Integer,DataSource> dataSourceMap = new HashMap<>();
DataSource dataSource = createDataSource(userId, name);
dataSourceMap.put(dataSource.getId(),dataSource);
DataSource otherDataSource = createDataSource(userId + 1,name);
DatasourceUser datasourceUser = new DatasourceUser();
datasourceUser.setDatasourceId(otherDataSource.getId());
datasourceUser.setUserId(userId);
datasourceUser.setPerm(7);
datasourceUser.setCreateTime(DateUtils.getCurrentDate());
datasourceUser.setUpdateTime(DateUtils.getCurrentDate());
dataSourceUserMapper.insert(datasourceUser);
dataSourceMap.put(otherDataSource.getId(), otherDataSource);
return dataSourceMap;
}
/**
* create datasource map
* @param count datasource count
* @return datasource map
*/
private Map<Integer,DataSource> createDataSourceMap(Integer count){
Map<Integer,DataSource> dataSourceMap = new HashMap<>();
DataSource dataSource = insertOne();
for (int i = 0 ; i < count ;i++){
DataSource dataSource = createDataSource("test");
dataSourceMap.put(dataSource.getId(),dataSource);
}
List<DataSource> dataSources = dataSourceMapper.queryDataSourceByType(4, DbType.MYSQL.ordinal());
Assert.assertNotEquals(dataSources.size(), 0);
List<DataSource> dataSources2 = dataSourceMapper.queryDataSourceByType(10091, DbType.MYSQL.ordinal());
Assert.assertEquals(dataSources2.size(), 0);
dataSourceMapper.deleteById(dataSource.getId());
return dataSourceMap;
}
/**
* create datasource
* @return datasource
*/
private DataSource createDataSource(){
return createDataSource(1,"test");
}
/**
* create datasource
* @param name name
* @return datasource
*/
private DataSource createDataSource(String name){
return createDataSource(1,name);
}
/**
* create datasource
* @param userId userId
* @param name name
* @return datasource
*/
private DataSource createDataSource(Integer userId,String name){
Random random = new Random();
DataSource dataSource = new DataSource();
dataSource.setUserId(userId);
dataSource.setName(name);
dataSource.setType(DbType.MYSQL);
dataSource.setNote("mysql test");
dataSource.setConnectionParams("hello mysql");
dataSource.setUpdateTime(DateUtils.getCurrentDate());
dataSource.setCreateTime(DateUtils.getCurrentDate());
dataSourceMapper.insert(dataSource);
return dataSource;
}
}
\ No newline at end of file
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册