提交 f26e2467 编写于 作者: S simon

Using Jackson instead of Fastjson

上级 48735a8b
......@@ -16,13 +16,10 @@
*/
package org.apache.dolphinscheduler.alert.utils;
import com.alibaba.fastjson.JSON;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.CloseableHttpClient;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
......@@ -67,7 +64,7 @@ public class DingTalkUtilsTest {
// logger.info(PropertyUtils.getString(Constants.DINGTALK_WEBHOOK));
// String rsp = DingTalkUtils.sendDingTalkMsg(msgTosend, Constants.UTF_8);
// logger.info("send msg result:{}",rsp);
// String errmsg = JSON.parseObject(rsp).getString("errmsg");
// String errmsg = JSONUtils.parseObject(rsp).getString("errmsg");
// Assert.assertEquals("ok", errmsg);
// } catch (Exception e) {
// e.printStackTrace();
......
......@@ -16,14 +16,12 @@
*/
package org.apache.dolphinscheduler.alert.utils;
import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.enums.AlertType;
import org.apache.dolphinscheduler.common.enums.ShowType;
import org.apache.dolphinscheduler.dao.entity.Alert;
import org.apache.dolphinscheduler.plugin.model.AlertData;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
......@@ -207,7 +205,7 @@ public class EnterpriseWeChatUtilsTest {
// String msg = EnterpriseWeChatUtils.makeTeamSendMsg(partyId, agentId, "hello world");
// String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token);
//
// String errmsg = JSON.parseObject(resp).getString("errmsg");
// String errmsg = JSONUtils.parseObject(resp).getString("errmsg");
// Assert.assertEquals("ok",errmsg);
// } catch (IOException e) {
// e.printStackTrace();
......@@ -222,7 +220,7 @@ public class EnterpriseWeChatUtilsTest {
// String msg = EnterpriseWeChatUtils.makeTeamSendMsg(listPartyId, agentId, "hello world");
// String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token);
//
// String errmsg = JSON.parseObject(resp).getString("errmsg");
// String errmsg = JSONUtils.parseObject(resp).getString("errmsg");
// Assert.assertEquals("ok",errmsg);
// } catch (IOException e) {
// e.printStackTrace();
......@@ -249,7 +247,7 @@ public class EnterpriseWeChatUtilsTest {
//
// String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token);
//
// String errmsg = JSON.parseObject(resp).getString("errmsg");
// String errmsg = JSONUtils.parseObject(resp).getString("errmsg");
// Assert.assertEquals("ok",errmsg);
// } catch (IOException e) {
// e.printStackTrace();
......@@ -264,7 +262,7 @@ public class EnterpriseWeChatUtilsTest {
// String msg = EnterpriseWeChatUtils.makeUserSendMsg(listUserId, agentId, "hello world");
// String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token);
//
// String errmsg = JSON.parseObject(resp).getString("errmsg");
// String errmsg = JSONUtils.parseObject(resp).getString("errmsg");
// Assert.assertEquals("ok",errmsg);
// } catch (IOException e) {
// e.printStackTrace();
......
package org.apache.dolphinscheduler.api.dto.resources;
import com.alibaba.fastjson.annotation.JSONField;
import com.alibaba.fastjson.annotation.JSONType;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import org.apache.dolphinscheduler.common.enums.ResourceType;
import java.util.ArrayList;
......@@ -26,7 +25,7 @@ import java.util.List;
/**
* resource component
*/
@JSONType(orders={"id","pid","name","fullName","description","isDirctory","children","type"})
@JsonPropertyOrder({"id","pid","name","fullName","description","isDirctory","children","type"})
public abstract class ResourceComponent {
public ResourceComponent() {
}
......@@ -46,17 +45,14 @@ public abstract class ResourceComponent {
/**
* id
*/
@JSONField(ordinal = 1)
protected int id;
/**
* parent id
*/
@JSONField(ordinal = 2)
protected int pid;
/**
* name
*/
@JSONField(ordinal = 3)
protected String name;
/**
* current directory
......@@ -65,32 +61,26 @@ public abstract class ResourceComponent {
/**
* full name
*/
@JSONField(ordinal = 4)
protected String fullName;
/**
* description
*/
@JSONField(ordinal = 5)
protected String description;
/**
* is directory
*/
@JSONField(ordinal = 6)
protected boolean isDirctory;
/**
* id value
*/
@JSONField(ordinal = 7)
protected String idValue;
/**
* resoruce type
*/
@JSONField(ordinal = 8)
protected ResourceType type;
/**
* children
*/
@JSONField(ordinal = 8)
protected List<ResourceComponent> children = new ArrayList<>();
/**
......
......@@ -16,11 +16,9 @@
*/
package org.apache.dolphinscheduler.api.service;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.TypeReference;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.commons.lang.StringUtils;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.PageInfo;
......@@ -161,15 +159,15 @@ public class DataSourceService extends BaseService{
return result;
}
//check password,if the password is not updated, set to the old password.
JSONObject paramObject = JSON.parseObject(parameter);
String password = paramObject.getString(Constants.PASSWORD);
ObjectNode paramObject = JSONUtils.parseObject(parameter);
String password = paramObject.path(Constants.PASSWORD).asText();
if (StringUtils.isBlank(password)) {
String oldConnectionParams = dataSource.getConnectionParams();
JSONObject oldParams = JSON.parseObject(oldConnectionParams);
paramObject.put(Constants.PASSWORD, oldParams.getString(Constants.PASSWORD));
ObjectNode oldParams = JSONUtils.parseObject(oldConnectionParams);
paramObject.put(Constants.PASSWORD, oldParams.path(Constants.PASSWORD).asText());
}
// connectionParams json
String connectionParams = paramObject.toJSONString();
String connectionParams = JSONUtils.toJsonString(paramObject);
Boolean isConnection = checkConnection(type, connectionParams);
if (!isConnection) {
......@@ -315,7 +313,7 @@ public class DataSourceService extends BaseService{
for (DataSource dataSource : dataSourceList) {
String connectionParams = dataSource.getConnectionParams();
JSONObject object = JSON.parseObject(connectionParams);
ObjectNode object = JSONUtils.parseObject(connectionParams);
object.put(Constants.PASSWORD, Constants.XXXXXX);
dataSource.setConnectionParams(JSONUtils.toJsonString(object));
......@@ -379,11 +377,11 @@ public class DataSourceService extends BaseService{
try {
switch (dbType) {
case POSTGRESQL:
datasource = JSON.parseObject(parameter, PostgreDataSource.class);
datasource = JSONUtils.parseObject(parameter, PostgreDataSource.class);
Class.forName(Constants.ORG_POSTGRESQL_DRIVER);
break;
case MYSQL:
datasource = JSON.parseObject(parameter, MySQLDataSource.class);
datasource = JSONUtils.parseObject(parameter, MySQLDataSource.class);
Class.forName(Constants.COM_MYSQL_JDBC_DRIVER);
break;
case HIVE:
......@@ -398,26 +396,26 @@ public class DataSourceService extends BaseService{
getString(org.apache.dolphinscheduler.common.Constants.LOGIN_USER_KEY_TAB_PATH));
}
if (dbType == DbType.HIVE){
datasource = JSON.parseObject(parameter, HiveDataSource.class);
datasource = JSONUtils.parseObject(parameter, HiveDataSource.class);
}else if (dbType == DbType.SPARK){
datasource = JSON.parseObject(parameter, SparkDataSource.class);
datasource = JSONUtils.parseObject(parameter, SparkDataSource.class);
}
Class.forName(Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER);
break;
case CLICKHOUSE:
datasource = JSON.parseObject(parameter, ClickHouseDataSource.class);
datasource = JSONUtils.parseObject(parameter, ClickHouseDataSource.class);
Class.forName(Constants.COM_CLICKHOUSE_JDBC_DRIVER);
break;
case ORACLE:
datasource = JSON.parseObject(parameter, OracleDataSource.class);
datasource = JSONUtils.parseObject(parameter, OracleDataSource.class);
Class.forName(Constants.COM_ORACLE_JDBC_DRIVER);
break;
case SQLSERVER:
datasource = JSON.parseObject(parameter, SQLServerDataSource.class);
datasource = JSONUtils.parseObject(parameter, SQLServerDataSource.class);
Class.forName(Constants.COM_SQLSERVER_JDBC_DRIVER);
break;
case DB2:
datasource = JSON.parseObject(parameter, DB2ServerDataSource.class);
datasource = JSONUtils.parseObject(parameter, DB2ServerDataSource.class);
Class.forName(Constants.COM_DB2_JDBC_DRIVER);
break;
default:
......@@ -526,8 +524,7 @@ public class DataSourceService extends BaseService{
parameterMap.put(Constants.PRINCIPAL,principal);
}
if (other != null && !"".equals(other)) {
LinkedHashMap<String, String> map = JSON.parseObject(other, new TypeReference<LinkedHashMap<String, String>>() {
});
LinkedHashMap<String, String> map = JSONUtils.parseObject(other, LinkedHashMap.class);
if (map.size() > 0) {
StringBuilder otherSb = new StringBuilder();
for (Map.Entry<String, String> entry: map.entrySet()) {
......
......@@ -16,7 +16,6 @@
*/
package org.apache.dolphinscheduler.api.service;
import com.alibaba.fastjson.JSON;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.fasterxml.jackson.core.JsonProcessingException;
......@@ -1340,9 +1339,9 @@ public class ProcessDefinitionService extends BaseDAGService {
*/
if (taskInstance.getTaskType().equals(TaskType.SUB_PROCESS.name())) {
String taskJson = taskInstance.getTaskJson();
taskNode = JSON.parseObject(taskJson, TaskNode.class);
subProcessId = Integer.parseInt(JSON.parseObject(
taskNode.getParams()).getString(CMDPARAM_SUB_PROCESS_DEFINE_ID));
taskNode = JSONUtils.parseObject(taskJson, TaskNode.class);
subProcessId = Integer.parseInt(JSONUtils.parseObject(
taskNode.getParams()).path(CMDPARAM_SUB_PROCESS_DEFINE_ID).asText());
}
treeViewDto.getInstances().add(new Instance(taskInstance.getId(), taskInstance.getName(), taskInstance.getTaskType(), taskInstance.getState().toString()
, taskInstance.getStartTime(), taskInstance.getEndTime(), taskInstance.getHost(), DateUtils.format2Readable(endTime.getTime() - startTime.getTime()), subProcessId));
......
......@@ -16,7 +16,8 @@
*/
package org.apache.dolphinscheduler.api.service;
import java.nio.charset.StandardCharsets;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.dolphinscheduler.api.dto.gantt.GanttDto;
import org.apache.dolphinscheduler.api.dto.gantt.Task;
import org.apache.dolphinscheduler.api.enums.Status;
......@@ -33,11 +34,11 @@ import org.apache.dolphinscheduler.common.model.TaskNodeRelation;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils;
import com.alibaba.fastjson.JSON;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.dolphinscheduler.dao.entity.*;
import org.apache.dolphinscheduler.dao.mapper.*;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......@@ -49,6 +50,7 @@ import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.text.ParseException;
import java.util.*;
import java.util.stream.Collectors;
......
......@@ -16,13 +16,12 @@
*/
package org.apache.dolphinscheduler.api.controller;
import com.alibaba.fastjson.JSON;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.enums.ResourceType;
import org.apache.dolphinscheduler.common.enums.UdfType;
import org.apache.dolphinscheduler.common.utils.*;
import com.alibaba.fastjson.JSONObject;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
......@@ -55,7 +54,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
ObjectNode object = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
......@@ -79,7 +78,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
ObjectNode object = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
......@@ -282,7 +281,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
ObjectNode object = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
......@@ -304,7 +303,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
ObjectNode object = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
......@@ -325,7 +324,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
ObjectNode object = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
......@@ -345,7 +344,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
ObjectNode object = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
......@@ -366,7 +365,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
ObjectNode object = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
......@@ -387,7 +386,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
ObjectNode object = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
......@@ -407,7 +406,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
ObjectNode object = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
......@@ -428,7 +427,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
ObjectNode object = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
......@@ -447,7 +446,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
result.getCode().equals(Status.SUCCESS.getCode());
JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString());
ObjectNode object = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString());
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
......
......@@ -16,7 +16,6 @@
*/
package org.apache.dolphinscheduler.api.utils.exportprocess;
import com.alibaba.fastjson.JSONObject;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.dolphinscheduler.api.ApiApplicationServer;
......
......@@ -16,7 +16,6 @@
*/
package org.apache.dolphinscheduler.common.model;
import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy;
......@@ -295,7 +294,7 @@ public class TaskNode {
if(StringUtils.isNotEmpty(this.getTimeout())){
String formatStr = String.format("%s,%s", TaskTimeoutStrategy.WARN.name(), TaskTimeoutStrategy.FAILED.name());
String taskTimeout = this.getTimeout().replace(formatStr,TaskTimeoutStrategy.WARNFAILED.name());
return JSON.parseObject(taskTimeout,TaskTimeoutParameter.class);
return JSONUtils.parseObject(taskTimeout,TaskTimeoutParameter.class);
}
return new TaskTimeoutParameter(false);
}
......
......@@ -16,9 +16,6 @@
*/
package org.apache.dolphinscheduler.common.utils;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONException;
import com.alibaba.fastjson.JSONObject;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
......@@ -29,8 +26,8 @@ import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.ResUploadType;
import org.apache.dolphinscheduler.common.enums.ResourceType;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.client.cli.RMAdminCLI;
import org.slf4j.Logger;
......@@ -411,7 +408,7 @@ public class HadoopUtils implements Closeable {
* @return the return may be null or there may be other parse exceptions
* @throws JSONException json exception
*/
public ExecutionStatus getApplicationStatus(String applicationId) throws JSONException {
public ExecutionStatus getApplicationStatus(String applicationId) {
if (StringUtils.isEmpty(applicationId)) {
return null;
}
......@@ -670,10 +667,10 @@ public class HadoopUtils implements Closeable {
return null;
}
//to json
JSONObject jsonObject = JSON.parseObject(retStr);
ObjectNode jsonObject = JSONUtils.parseObject(retStr);
//get ResourceManager state
return jsonObject.getJSONObject("clusterInfo").getString("haState");
return jsonObject.get("clusterInfo").path("haState").asText();
}
}
......
......@@ -16,7 +16,8 @@
*/
package org.apache.dolphinscheduler.common.utils;
import com.alibaba.fastjson.JSON;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.time.DateUtils;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.DataType;
......@@ -24,8 +25,6 @@ import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils;
import org.apache.dolphinscheduler.common.utils.placeholder.PlaceholderUtils;
import org.apache.dolphinscheduler.common.utils.placeholder.TimePlaceholderUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......
......@@ -16,8 +16,7 @@
*/
package org.apache.dolphinscheduler.common.utils;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
......@@ -37,8 +36,8 @@ public class HttpUtilsTest {
//success
String result = HttpUtils.get("https://github.com/manifest.json");
Assert.assertNotNull(result);
JSONObject jsonObject = JSON.parseObject(result);
Assert.assertEquals("GitHub", jsonObject.getString("name"));
ObjectNode jsonObject = JSONUtils.parseObject(result);
Assert.assertEquals("GitHub", jsonObject.path("name").asText());
result = HttpUtils.get("https://123.333.111.33/ccc");
Assert.assertNull(result);
......
......@@ -16,22 +16,16 @@
*/
package org.apache.dolphinscheduler.common.utils;
import com.alibaba.fastjson.JSON;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.dolphinscheduler.common.enums.DataType;
import org.apache.dolphinscheduler.common.enums.Direct;
import org.apache.dolphinscheduler.common.process.Property;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import org.junit.Assert;
import org.junit.Test;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.*;
public class JSONUtilsTest {
......@@ -55,7 +49,7 @@ public class JSONUtilsTest {
property.setType(DataType.VARCHAR);
property.setValue("sssssss");
String str = "{\"direct\":\"IN\",\"prop\":\"ds\",\"type\":\"VARCHAR\",\"value\":\"sssssss\"}";
Property property1 = JSON.parseObject(str, Property.class);
Property property1 = JSONUtils.parseObject(str, Property.class);
Direct direct = property1.getDirect();
Assert.assertEquals(Direct.IN, direct);
}
......
......@@ -16,7 +16,6 @@
*/
package org.apache.dolphinscheduler.common.utils;
import com.alibaba.fastjson.JSON;
import org.apache.commons.lang.time.DateUtils;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.DataType;
......@@ -27,7 +26,9 @@ import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
import static org.apache.dolphinscheduler.common.Constants.PARAMETER_FORMAT_TIME;
import static org.apache.dolphinscheduler.common.utils.placeholder.TimePlaceholderUtils.replacePlaceholders;
......
......@@ -16,15 +16,14 @@
*/
package org.apache.dolphinscheduler.dao.entity;
import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.process.Property;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import com.baomidou.mybatisplus.core.toolkit.StringUtils;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.utils.*;
import java.util.Date;
......
......@@ -17,7 +17,6 @@
package org.apache.dolphinscheduler.server.master.consumer;
import com.alibaba.fastjson.JSONObject;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.enums.UdfType;
......
......@@ -16,7 +16,6 @@
*/
package org.apache.dolphinscheduler.server.master.runner;
import com.alibaba.fastjson.JSON;
import com.google.common.collect.Lists;
import org.apache.commons.io.FileUtils;
import org.apache.dolphinscheduler.common.Constants;
......
......@@ -17,9 +17,6 @@
package org.apache.dolphinscheduler.server.master.runner;
import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy;
......@@ -38,10 +35,10 @@ import org.apache.dolphinscheduler.server.master.dispatch.enums.ExecutorType;
import org.apache.dolphinscheduler.server.master.dispatch.executor.NettyExecutorManager;
import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.Date;
import java.util.Set;
import org.apache.dolphinscheduler.common.utils.*;
/**
......@@ -239,7 +236,7 @@ public class MasterTaskExecThread extends MasterBaseTaskExecThread {
*/
private TaskTimeoutParameter getTaskTimeoutParameter(){
String taskJson = taskInstance.getTaskJson();
TaskNode taskNode = JSON.parseObject(taskJson, TaskNode.class);
TaskNode taskNode = JSONUtils.parseObject(taskJson, TaskNode.class);
return taskNode.getTaskTimeoutParameter();
}
......
......@@ -19,15 +19,13 @@ package org.apache.dolphinscheduler.server.worker.processor;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.classic.sift.SiftingAppender;
import com.alibaba.fastjson.JSONObject;
import com.github.rholder.retry.RetryException;
import io.netty.channel.Channel;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.server.log.TaskLogDiscriminator;
import org.apache.dolphinscheduler.common.thread.ThreadUtils;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.command.TaskExecuteAckCommand;
......@@ -35,6 +33,7 @@ import org.apache.dolphinscheduler.remote.command.TaskExecuteRequestCommand;
import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor;
import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.log.TaskLogDiscriminator;
import org.apache.dolphinscheduler.server.worker.config.WorkerConfig;
import org.apache.dolphinscheduler.server.worker.runner.TaskExecuteThread;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
......
......@@ -16,8 +16,8 @@
*/
package org.apache.dolphinscheduler.server.worker.runner;
import org.apache.dolphinscheduler.common.utils.*;
import com.alibaba.fastjson.JSONObject;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.model.TaskNode;
......@@ -152,7 +152,7 @@ public class TaskExecuteThread implements Runnable {
// global params string
String globalParamsStr = taskExecutionContext.getGlobalParams();
if (globalParamsStr != null) {
List<Property> globalParamsList = JSONObject.parseArray(globalParamsStr, Property.class);
List<Property> globalParamsList = JSONUtils.toList(globalParamsStr, Property.class);
globalParamsMap.putAll(globalParamsList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue)));
}
return globalParamsMap;
......
......@@ -17,29 +17,16 @@
package org.apache.dolphinscheduler.server.worker.task.datax;
import java.io.File;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.nio.file.attribute.FileAttribute;
import java.nio.file.attribute.PosixFilePermission;
import java.nio.file.attribute.PosixFilePermissions;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.alibaba.druid.sql.ast.SQLStatement;
import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr;
import com.alibaba.druid.sql.ast.expr.SQLPropertyExpr;
import com.alibaba.druid.sql.ast.statement.*;
import com.alibaba.druid.sql.parser.SQLStatementParser;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.commons.io.FileUtils;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.DataType;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.task.AbstractParameters;
......@@ -50,8 +37,6 @@ import org.apache.dolphinscheduler.common.utils.OSUtils;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.dao.datasource.BaseDataSource;
import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory;
import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.server.entity.DataxTaskExecutionContext;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.utils.DataxUtils;
......@@ -59,20 +44,21 @@ import org.apache.dolphinscheduler.server.utils.ParamUtils;
import org.apache.dolphinscheduler.server.worker.task.AbstractTask;
import org.apache.dolphinscheduler.server.worker.task.CommandExecuteResult;
import org.apache.dolphinscheduler.server.worker.task.ShellCommandExecutor;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.slf4j.Logger;
import com.alibaba.druid.sql.ast.SQLStatement;
import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr;
import com.alibaba.druid.sql.ast.expr.SQLPropertyExpr;
import com.alibaba.druid.sql.ast.statement.SQLSelect;
import com.alibaba.druid.sql.ast.statement.SQLSelectItem;
import com.alibaba.druid.sql.ast.statement.SQLSelectQueryBlock;
import com.alibaba.druid.sql.ast.statement.SQLSelectStatement;
import com.alibaba.druid.sql.ast.statement.SQLUnionQuery;
import com.alibaba.druid.sql.parser.SQLStatementParser;
import com.alibaba.fastjson.JSONObject;
import java.io.File;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.nio.file.attribute.FileAttribute;
import java.nio.file.attribute.PosixFilePermission;
import java.nio.file.attribute.PosixFilePermissions;
import java.sql.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
......@@ -173,19 +159,19 @@ public class DataxTask extends AbstractTask {
*/
@Override
public void cancelApplication(boolean cancelApplication)
throws Exception {
throws Exception {
// cancel process
shellCommandExecutor.cancelApplication();
}
/**
* build datax configuration file
*
*
* @return datax json file name
* @throws Exception if error throws Exception
*/
private String buildDataxJsonFile()
throws Exception {
throws Exception {
// generate json
String fileName = String.format("%s/%s_job.json",
taskExecutionContext.getExecutePath(),
......@@ -216,14 +202,14 @@ public class DataxTask extends AbstractTask {
}
}else {
ObjectNode job = JSONUtils.createObjectNode();
job.putArray("content").addAll(buildDataxJobContentJson());
job.set("setting", buildDataxJobSettingJson());
JSONObject job = new JSONObject();
job.put("content", buildDataxJobContentJson());
job.put("setting", buildDataxJobSettingJson());
ObjectNode root = JSONUtils.createObjectNode();
JSONObject root = new JSONObject();
root.put("job", job);
root.put("core", buildDataxCoreJson());
root.set("job", job);
root.set("core", buildDataxCoreJson());
json = root.toString();
}
......@@ -236,13 +222,13 @@ public class DataxTask extends AbstractTask {
/**
* build datax job config
*
*
* @return collection of datax job config JSONObject
* @throws SQLException if error throws SQLException
*/
private List<JSONObject> buildDataxJobContentJson() throws SQLException {
DataxTaskExecutionContext dataxTaskExecutionContext = taskExecutionContext.getDataxTaskExecutionContext();
private List<ObjectNode> buildDataxJobContentJson() throws SQLException {
DataxTaskExecutionContext dataxTaskExecutionContext = taskExecutionContext.getDataxTaskExecutionContext();
BaseDataSource dataSourceCfg = DataSourceFactory.getDatasource(DbType.of(dataxTaskExecutionContext.getSourcetype()),
dataxTaskExecutionContext.getSourceConnectionParams());
......@@ -250,50 +236,76 @@ public class DataxTask extends AbstractTask {
BaseDataSource dataTargetCfg = DataSourceFactory.getDatasource(DbType.of(dataxTaskExecutionContext.getTargetType()),
dataxTaskExecutionContext.getTargetConnectionParams());
List<JSONObject> readerConnArr = new ArrayList<>();
JSONObject readerConn = new JSONObject();
readerConn.put("querySql", new String[] {dataXParameters.getSql()});
readerConn.put("jdbcUrl", new String[] {dataSourceCfg.getJdbcUrl()});
List<ObjectNode> readerConnArr = new ArrayList<>();
ObjectNode readerConn = JSONUtils.createObjectNode();
ArrayNode sqlArr = readerConn.putArray("querySql");
for (String sql : new String[]{dataXParameters.getSql()}) {
sqlArr.add(sql);
}
ArrayNode urlArr = readerConn.putArray("jdbcUrl");
for (String url : new String[]{dataSourceCfg.getJdbcUrl()}) {
urlArr.add(url);
}
readerConnArr.add(readerConn);
JSONObject readerParam = new JSONObject();
ObjectNode readerParam = JSONUtils.createObjectNode();
readerParam.put("username", dataSourceCfg.getUser());
readerParam.put("password", dataSourceCfg.getPassword());
readerParam.put("connection", readerConnArr);
readerParam.putArray("connection").addAll(readerConnArr);
JSONObject reader = new JSONObject();
ObjectNode reader = JSONUtils.createObjectNode();
reader.put("name", DataxUtils.getReaderPluginName(DbType.of(dataxTaskExecutionContext.getSourcetype())));
reader.put("parameter", readerParam);
reader.set("parameter", readerParam);
List<ObjectNode> writerConnArr = new ArrayList<>();
ObjectNode writerConn = JSONUtils.createObjectNode();
ArrayNode tableArr = writerConn.putArray("table");
for (String table : new String[]{dataXParameters.getTargetTable()}) {
tableArr.add(table);
}
List<JSONObject> writerConnArr = new ArrayList<>();
JSONObject writerConn = new JSONObject();
writerConn.put("table", new String[] {dataXParameters.getTargetTable()});
writerConn.put("jdbcUrl", dataTargetCfg.getJdbcUrl());
writerConnArr.add(writerConn);
JSONObject writerParam = new JSONObject();
ObjectNode writerParam = JSONUtils.createObjectNode();
writerParam.put("username", dataTargetCfg.getUser());
writerParam.put("password", dataTargetCfg.getPassword());
writerParam.put("column",
parsingSqlColumnNames(DbType.of(dataxTaskExecutionContext.getSourcetype()),
DbType.of(dataxTaskExecutionContext.getTargetType()),
dataSourceCfg, dataXParameters.getSql()));
writerParam.put("connection", writerConnArr);
String[] columns = parsingSqlColumnNames(DbType.of(dataxTaskExecutionContext.getSourcetype()),
DbType.of(dataxTaskExecutionContext.getTargetType()),
dataSourceCfg, dataXParameters.getSql());
ArrayNode columnArr = writerParam.putArray("column");
for (String column : columns) {
columnArr.add(column);
}
writerParam.putArray("connection").addAll(writerConnArr);
if (CollectionUtils.isNotEmpty(dataXParameters.getPreStatements())) {
writerParam.put("preSql", dataXParameters.getPreStatements());
ArrayNode preSqlArr = writerParam.putArray("preSql");
for (String preSql : dataXParameters.getPreStatements()) {
preSqlArr.add(preSql);
}
}
if (CollectionUtils.isNotEmpty(dataXParameters.getPostStatements())) {
writerParam.put("postSql", dataXParameters.getPostStatements());
ArrayNode postSqlArr = writerParam.putArray("postSql");
for (String postSql : dataXParameters.getPostStatements()) {
postSqlArr.add(postSql);
}
}
JSONObject writer = new JSONObject();
ObjectNode writer = JSONUtils.createObjectNode();
writer.put("name", DataxUtils.getWriterPluginName(DbType.of(dataxTaskExecutionContext.getTargetType())));
writer.put("parameter", writerParam);
writer.set("parameter", writerParam);
List<JSONObject> contentList = new ArrayList<>();
JSONObject content = new JSONObject();
List<ObjectNode> contentList = new ArrayList<>();
ObjectNode content = JSONUtils.createObjectNode();
content.put("reader", reader);
content.put("writer", writer);
contentList.add(content);
......@@ -303,11 +315,13 @@ public class DataxTask extends AbstractTask {
/**
* build datax setting config
*
*
* @return datax setting config JSONObject
*/
private JSONObject buildDataxJobSettingJson() {
JSONObject speed = new JSONObject();
private ObjectNode buildDataxJobSettingJson() {
ObjectNode speed = JSONUtils.createObjectNode();
speed.put("channel", DATAX_CHANNEL_COUNT);
if (dataXParameters.getJobSpeedByte() > 0) {
......@@ -318,19 +332,20 @@ public class DataxTask extends AbstractTask {
speed.put("record", dataXParameters.getJobSpeedRecord());
}
JSONObject errorLimit = new JSONObject();
ObjectNode errorLimit = JSONUtils.createObjectNode();
errorLimit.put("record", 0);
errorLimit.put("percentage", 0);
JSONObject setting = new JSONObject();
ObjectNode setting = JSONUtils.createObjectNode();
setting.put("speed", speed);
setting.put("errorLimit", errorLimit);
return setting;
}
private JSONObject buildDataxCoreJson() {
JSONObject speed = new JSONObject();
private ObjectNode buildDataxCoreJson() {
ObjectNode speed = JSONUtils.createObjectNode();
speed.put("channel", DATAX_CHANNEL_COUNT);
if (dataXParameters.getJobSpeedByte() > 0) {
......@@ -341,26 +356,26 @@ public class DataxTask extends AbstractTask {
speed.put("record", dataXParameters.getJobSpeedRecord());
}
JSONObject channel = new JSONObject();
channel.put("speed", speed);
ObjectNode channel = JSONUtils.createObjectNode();
channel.set("speed", speed);
JSONObject transport = new JSONObject();
transport.put("channel", channel);
ObjectNode transport = JSONUtils.createObjectNode();
transport.set("channel", channel);
JSONObject core = new JSONObject();
core.put("transport", transport);
ObjectNode core = JSONUtils.createObjectNode();
core.set("transport", transport);
return core;
}
/**
* create command
*
*
* @return shell command file name
* @throws Exception if error throws Exception
*/
private String buildShellCommandFile(String jobConfigFilePath)
throws Exception {
throws Exception {
// generate scripts
String fileName = String.format("%s/%s_node.sh",
taskExecutionContext.getExecutePath(),
......@@ -411,7 +426,7 @@ public class DataxTask extends AbstractTask {
/**
* parsing synchronized column names in SQL statements
*
*
* @param dsType
* the database type of the data source
* @param dtType
......@@ -437,7 +452,7 @@ public class DataxTask extends AbstractTask {
/**
* try grammatical parsing column
*
*
* @param dbType
* database type
* @param sql
......@@ -467,7 +482,7 @@ public class DataxTask extends AbstractTask {
}
notNull(selectItemList,
String.format("select query type [%s] is not support", sqlSelect.getQuery().toString()));
String.format("select query type [%s] is not support", sqlSelect.getQuery().toString()));
columnNames = new String[selectItemList.size()];
for (int i = 0; i < selectItemList.size(); i++ ) {
......@@ -487,12 +502,12 @@ public class DataxTask extends AbstractTask {
}
} else {
throw new RuntimeException(
String.format("grammatical analysis sql column [ %s ] failed", item.toString()));
String.format("grammatical analysis sql column [ %s ] failed", item.toString()));
}
if (columnName == null) {
throw new RuntimeException(
String.format("grammatical analysis sql column [ %s ] failed", item.toString()));
String.format("grammatical analysis sql column [ %s ] failed", item.toString()));
}
columnNames[i] = columnName;
......@@ -508,7 +523,7 @@ public class DataxTask extends AbstractTask {
/**
* try to execute sql to resolve column names
*
*
* @param baseDataSource
* the database connection parameters
* @param sql
......@@ -521,10 +536,10 @@ public class DataxTask extends AbstractTask {
sql = sql.replace(";", "");
try (
Connection connection = DriverManager.getConnection(baseDataSource.getJdbcUrl(), baseDataSource.getUser(),
baseDataSource.getPassword());
PreparedStatement stmt = connection.prepareStatement(sql);
ResultSet resultSet = stmt.executeQuery()) {
Connection connection = DriverManager.getConnection(baseDataSource.getJdbcUrl(), baseDataSource.getUser(),
baseDataSource.getPassword());
PreparedStatement stmt = connection.prepareStatement(sql);
ResultSet resultSet = stmt.executeQuery()) {
ResultSetMetaData md = resultSet.getMetaData();
int num = md.getColumnCount();
......@@ -552,4 +567,4 @@ public class DataxTask extends AbstractTask {
}
}
}
}
\ No newline at end of file
......@@ -17,8 +17,7 @@
package org.apache.dolphinscheduler.server.worker.task.http;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.commons.io.Charsets;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CommandType;
......@@ -48,7 +47,6 @@ import org.slf4j.Logger;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
......@@ -152,7 +150,7 @@ public class HttpTask extends AbstractTask {
String jsonObject = JSONUtils.toJsonString(httpProperty);
String params = ParameterUtils.convertParameterPlaceholders(jsonObject,ParamUtils.convert(paramsMap));
logger.info("http request params:{}",params);
httpPropertyList.add(JSON.parseObject(params,HttpProperty.class));
httpPropertyList.add(JSONUtils.parseObject(params,HttpProperty.class));
}
}
addRequestParams(builder,httpPropertyList);
......@@ -252,7 +250,7 @@ public class HttpTask extends AbstractTask {
*/
protected void addRequestParams(RequestBuilder builder,List<HttpProperty> httpPropertyList) {
if(CollectionUtils.isNotEmpty(httpPropertyList)){
JSONObject jsonParam = new JSONObject();
ObjectNode jsonParam = JSONUtils.createObjectNode();
for (HttpProperty property: httpPropertyList){
if(property.getHttpParametersType() != null){
if (property.getHttpParametersType().equals(HttpParametersType.PARAMETER)){
......
......@@ -16,8 +16,6 @@
*/
package org.apache.dolphinscheduler.server.worker.task.processdure;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.cronutils.utils.StringUtils;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.*;
......
......@@ -16,9 +16,9 @@
*/
package org.apache.dolphinscheduler.server.worker.task.sql;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.serializer.SerializerFeature;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.commons.lang.StringUtils;
import org.apache.dolphinscheduler.alert.utils.MailUtils;
import org.apache.dolphinscheduler.common.Constants;
......@@ -43,6 +43,7 @@ import org.apache.dolphinscheduler.server.utils.UDFUtils;
import org.apache.dolphinscheduler.server.worker.task.AbstractTask;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.slf4j.Logger;
import org.apache.dolphinscheduler.common.utils.*;
import java.sql.*;
import java.util.*;
......@@ -254,30 +255,30 @@ public class SqlTask extends AbstractTask {
* @throws Exception
*/
private void resultProcess(ResultSet resultSet) throws Exception{
JSONArray resultJSONArray = new JSONArray();
ArrayNode resultJSONArray = JSONUtils.createArrayNode();
ResultSetMetaData md = resultSet.getMetaData();
int num = md.getColumnCount();
int rowCount = 0;
while (rowCount < LIMIT && resultSet.next()) {
JSONObject mapOfColValues = new JSONObject(true);
ObjectNode mapOfColValues = JSONUtils.createObjectNode();
for (int i = 1; i <= num; i++) {
mapOfColValues.put(md.getColumnName(i), resultSet.getObject(i));
mapOfColValues.set(md.getColumnName(i), JSONUtils.toJsonNode(resultSet.getObject(i)));
}
resultJSONArray.add(mapOfColValues);
rowCount++;
}
logger.debug("execute sql : {}", JSONObject.toJSONString(resultJSONArray, SerializerFeature.WriteMapNullValue));
logger.debug("execute sql : {}", JSONUtils.toJsonString(resultJSONArray, SerializationFeature.WRITE_NULL_MAP_VALUES));
// if there is a result set
if (!resultJSONArray.isEmpty() ) {
if (!resultJSONArray.isEmpty(null) ) {
if (StringUtils.isNotEmpty(sqlParameters.getTitle())) {
sendAttachment(sqlParameters.getTitle(),
JSONObject.toJSONString(resultJSONArray, SerializerFeature.WriteMapNullValue));
JSONUtils.toJsonString(resultJSONArray, SerializationFeature.WRITE_NULL_MAP_VALUES));
}else{
sendAttachment(taskExecutionContext.getTaskName() + " query resultsets ",
JSONObject.toJSONString(resultJSONArray, SerializerFeature.WriteMapNullValue));
JSONUtils.toJsonString(resultJSONArray, SerializationFeature.WRITE_NULL_MAP_VALUES));
}
}
}
......
......@@ -16,7 +16,6 @@
*/
package org.apache.dolphinscheduler.server.worker.task.sqoop;
import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.task.AbstractParameters;
......@@ -27,7 +26,9 @@ import org.apache.dolphinscheduler.server.utils.ParamUtils;
import org.apache.dolphinscheduler.server.worker.task.AbstractYarnTask;
import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.SqoopJobGenerator;
import org.slf4j.Logger;
import java.util.Map;
import org.apache.dolphinscheduler.common.utils.*;
/**
* sqoop task extends the shell task
......@@ -50,7 +51,7 @@ public class SqoopTask extends AbstractYarnTask {
public void init() throws Exception {
logger.info("sqoop task params {}", taskExecutionContext.getTaskParams());
sqoopParameters =
JSON.parseObject(taskExecutionContext.getTaskParams(),SqoopParameters.class);
JSONUtils.parseObject(taskExecutionContext.getTaskParams(),SqoopParameters.class);
if (!sqoopParameters.checkParameters()) {
throw new RuntimeException("sqoop task params is not valid");
}
......
......@@ -17,7 +17,6 @@
package org.apache.dolphinscheduler.server.utils;
import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.DataType;
import org.apache.dolphinscheduler.common.enums.Direct;
......
......@@ -16,14 +16,12 @@
*/
package org.apache.dolphinscheduler.server.worker.shell;
import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.common.utils.LoggerUtils;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.server.worker.task.AbstractTask;
import org.apache.dolphinscheduler.server.worker.task.TaskManager;
import org.apache.dolphinscheduler.server.worker.task.TaskProps;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.process.ProcessService;
......@@ -32,6 +30,7 @@ import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.dolphinscheduler.common.utils.*;
import java.util.Date;
......@@ -68,7 +67,7 @@ public class ShellCommandExecutorTest {
TaskInstance taskInstance = processService.findTaskInstanceById(7657);
String taskJson = taskInstance.getTaskJson();
TaskNode taskNode = JSON.parseObject(taskJson, TaskNode.class);
TaskNode taskNode = JSONUtils.parseObject(taskJson, TaskNode.class);
taskProps.setTaskParams(taskNode.getParams());
......
......@@ -16,18 +16,17 @@
*/
package org.apache.dolphinscheduler.server.worker.sql;
import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.common.utils.LoggerUtils;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.server.worker.task.AbstractTask;
import org.apache.dolphinscheduler.server.worker.task.TaskManager;
import org.apache.dolphinscheduler.server.worker.task.TaskProps;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.common.utils.*;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
......@@ -112,7 +111,7 @@ public class SqlExecutorTest {
TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId);
String taskJson = taskInstance.getTaskJson();
TaskNode taskNode = JSON.parseObject(taskJson, TaskNode.class);
TaskNode taskNode = JSONUtils.parseObject(taskJson, TaskNode.class);
taskProps.setTaskParams(taskNode.getParams());
......
......@@ -22,7 +22,8 @@ import java.util.Arrays;
import java.util.Date;
import java.util.List;
import com.alibaba.fastjson.JSONObject;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.dao.datasource.BaseDataSource;
......@@ -289,24 +290,24 @@ public class DataxTaskTest {
*/
@Test
public void testBuildDataxJobContentJson()
throws Exception {
throws Exception {
try {
Method method = DataxTask.class.getDeclaredMethod("buildDataxJobContentJson");
method.setAccessible(true);
List<JSONObject> contentList = (List<JSONObject>) method.invoke(dataxTask, null);
List<ObjectNode> contentList = (List<ObjectNode>) method.invoke(dataxTask, null);
Assert.assertNotNull(contentList);
JSONObject content = contentList.get(0);
JSONObject reader = (JSONObject) content.get("reader");
ObjectNode content = contentList.get(0);
JsonNode reader = content.path("reader");
Assert.assertNotNull(reader);
String readerPluginName = (String) reader.get("name");
String readerPluginName = reader.path("name").asText();
Assert.assertEquals(DataxUtils.DATAX_READER_PLUGIN_MYSQL, readerPluginName);
JSONObject writer = (JSONObject) content.get("writer");
JsonNode writer = content.path("writer");
Assert.assertNotNull(writer);
String writerPluginName = (String) writer.get("name");
String writerPluginName = writer.path("name").asText();
Assert.assertEquals(DataxUtils.DATAX_WRITER_PLUGIN_MYSQL, writerPluginName);
}
catch (Exception e) {
......@@ -323,12 +324,11 @@ public class DataxTaskTest {
try {
Method method = DataxTask.class.getDeclaredMethod("buildDataxJobSettingJson");
method.setAccessible(true);
JSONObject setting = (JSONObject) method.invoke(dataxTask, null);
JsonNode setting = (JsonNode) method.invoke(dataxTask, null);
Assert.assertNotNull(setting);
Assert.assertNotNull(setting.get("speed"));
Assert.assertNotNull(setting.get("errorLimit"));
}
catch (Exception e) {
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
......@@ -338,11 +338,11 @@ public class DataxTaskTest {
*/
@Test
public void testBuildDataxCoreJson()
throws Exception {
throws Exception {
try {
Method method = DataxTask.class.getDeclaredMethod("buildDataxCoreJson");
method.setAccessible(true);
JSONObject coreConfig = (JSONObject) method.invoke(dataxTask, null);
ObjectNode coreConfig = (ObjectNode) method.invoke(dataxTask, null);
Assert.assertNotNull(coreConfig);
Assert.assertNotNull(coreConfig.get("transport"));
}
......
......@@ -16,7 +16,6 @@
*/
package org.apache.dolphinscheduler.server.worker.task.sqoop;
import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters;
import org.apache.dolphinscheduler.dao.entity.DataSource;
......@@ -34,8 +33,9 @@ import org.mockito.junit.MockitoJUnitRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationContext;
import org.apache.dolphinscheduler.common.utils.*;
import java.util.*;
import java.util.Date;
/**
* sqoop task test
......@@ -73,7 +73,7 @@ public class SqoopTaskTest {
@Test
public void testGenerator(){
String data1 = "{\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HDFS\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"0\\\",\\\"srcQuerySql\\\":\\\"\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[]}\",\"targetParams\":\"{\\\"targetPath\\\":\\\"/ods/tmp/test/person7\\\",\\\"deleteTargetDir\\\":true,\\\"fileType\\\":\\\"--as-textfile\\\",\\\"compressionCodec\\\":\\\"\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}";
SqoopParameters sqoopParameters1 = JSON.parseObject(data1,SqoopParameters.class);
SqoopParameters sqoopParameters1 = JSONUtils.parseObject(data1,SqoopParameters.class);
SqoopJobGenerator generator = new SqoopJobGenerator();
String script = generator.generateSqoopJob(sqoopParameters1,new TaskExecutionContext());
......@@ -81,21 +81,21 @@ public class SqoopTaskTest {
Assert.assertEquals(expected, script);
String data2 = "{\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HDFS\",\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"exportDir\\\":\\\"/ods/tmp/test/person7\\\"}\",\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"id,name,age,sex,create_time\\\",\\\"preQuery\\\":\\\"\\\",\\\"isUpdate\\\":true,\\\"targetUpdateKey\\\":\\\"id\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}";
SqoopParameters sqoopParameters2 = JSON.parseObject(data2,SqoopParameters.class);
SqoopParameters sqoopParameters2 = JSONUtils.parseObject(data2,SqoopParameters.class);
String script2 = generator.generateSqoopJob(sqoopParameters2,new TaskExecutionContext());
String expected2 = "sqoop export -m 1 --export-dir /ods/tmp/test/person7 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_3 --columns id,name,age,sex,create_time --fields-terminated-by '@' --lines-terminated-by '\\n' --update-key id --update-mode allowinsert";
Assert.assertEquals(expected2, script2);
String data3 = "{\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HIVE\",\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-17\\\"}\",\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"\\\",\\\"preQuery\\\":\\\"\\\",\\\"isUpdate\\\":false,\\\"targetUpdateKey\\\":\\\"\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}";
SqoopParameters sqoopParameters3 = JSON.parseObject(data3,SqoopParameters.class);
SqoopParameters sqoopParameters3 = JSONUtils.parseObject(data3,SqoopParameters.class);
String script3 = generator.generateSqoopJob(sqoopParameters3,new TaskExecutionContext());
String expected3 = "sqoop export -m 1 --hcatalog-database stg --hcatalog-table person_internal --hcatalog-partition-keys date --hcatalog-partition-values 2020-02-17 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_3 --fields-terminated-by '@' --lines-terminated-by '\\n'";
Assert.assertEquals(expected3, script3);
String data4 = "{\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HIVE\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"1\\\",\\\"srcQuerySql\\\":\\\"SELECT * FROM person_2\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[{\\\"prop\\\":\\\"id\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"Integer\\\"}]}\",\"targetParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal_2\\\",\\\"createHiveTable\\\":true,\\\"dropDelimiter\\\":false,\\\"hiveOverWrite\\\":true,\\\"replaceDelimiter\\\":\\\"\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-16\\\"}\",\"localParams\":[]}";
SqoopParameters sqoopParameters4 = JSON.parseObject(data4,SqoopParameters.class);
SqoopParameters sqoopParameters4 = JSONUtils.parseObject(data4,SqoopParameters.class);
String script4 = generator.generateSqoopJob(sqoopParameters4,new TaskExecutionContext());
String expected4 = "sqoop import -m 1 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --query 'SELECT * FROM person_2 WHERE $CONDITIONS' --map-column-java id=Integer --hive-import --hive-table stg.person_internal_2 --create-hive-table --hive-overwrite -delete-target-dir --hive-partition-key date --hive-partition-value 2020-02-16";
......
......@@ -16,9 +16,8 @@
*/
package org.apache.dolphinscheduler.service.process;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.cronutils.model.Cron;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.commons.lang.ArrayUtils;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.*;
......@@ -205,16 +204,16 @@ public class ProcessService {
CommandType commandType = command.getCommandType();
if(cmdTypeMap.containsKey(commandType)){
JSONObject cmdParamObj = (JSONObject) JSON.parse(command.getCommandParam());
JSONObject tempObj;
int processInstanceId = cmdParamObj.getInteger(CMDPARAM_RECOVER_PROCESS_ID_STRING);
ObjectNode cmdParamObj = JSONUtils.parseObject(command.getCommandParam());
ObjectNode tempObj;
int processInstanceId = cmdParamObj.path(CMDPARAM_RECOVER_PROCESS_ID_STRING).asInt();
List<Command> commands = commandMapper.selectList(null);
// for all commands
for (Command tmpCommand:commands){
if(cmdTypeMap.containsKey(tmpCommand.getCommandType())){
tempObj = (JSONObject) JSON.parse(tmpCommand.getCommandParam());
if(tempObj != null && processInstanceId == tempObj.getInteger(CMDPARAM_RECOVER_PROCESS_ID_STRING)){
tempObj = JSONUtils.parseObject(tmpCommand.getCommandParam());
if(tempObj != null && processInstanceId == tempObj.path(CMDPARAM_RECOVER_PROCESS_ID_STRING).asInt()){
isNeedCreate = false;
break;
}
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册