未验证 提交 c1ee1333 编写于 作者: Q qiaozhanwei 提交者: GitHub

Refactor Architecture Basic modification #1658 (#1946)

* 1,remove dolphinscheduler-rpc module  2,add dolphinscheduler-remote module 3,add dolphinscheduler-service module 4,refactor LoggerServer module (#1925)

* 1,remove dolphinscheduler-rpc module
2,add dolphinscheduler-remote module
3,add dolphinscheduler-service module
4,refactor LoggerServer module

* ProcessUtils modify

* Refactor architecture (#1926)

* move version to parent pom

* move version properties to parent pom for easy management

* remove freemarker dependency

* delete CombinedApplicationServer

* #1871 correct spelling

* #1873 some updates for TaskQueueZkImpl

* #1875 remove unused properties in pom

* #1878
1. remove tomcat dependency
2. remove combined_logback.xml in api module
3. format pom.xml for not aligning

* #1885 fix api server startup failure
1. add jsp-2.1 dependency
2. remove jasper-runtime dependency

* add stringutils ut (#1921)

* add stringutils ut

* Newfeature for #1675. (#1908)

Continue to finish the rest works, add the cache feature for dependence,mr,python,sub_process,procedure and shell.

* Add modify user name for process definition (#1919)

* class overrides equals() and should therefore also override hashCode()

* #1862 add modify user in process difinition list

* #1862 add pg-1.2.2 ddl.sql

* modify ScriptRunnerTest

* add updateProessDifinition UT

* modify updateProcessDifinition UT

* modify updateProcessDifinition UT

* modify mysql 1.2.2 ddl.sql&dml.sql

* add scope test to mysql in pom

* modify pg-1.2.2 ddl.sql

* refactor module

* updates
Co-authored-by: Nkhadgarmage <khadgar.mage@outlook.com>
Co-authored-by: Nzhukai <boness@qq.com>
Co-authored-by: NYelli <amarantine@my.com>

* dolphinscheduler-common remove spring (#1931)

* dolphinscheduler-common remove spring

* dolphinscheduler-common remove spring

* dolphinscheduler-common remove spring

* dolphinscheduler-common remove spring

* dolphinscheduler-common remove spring

* SpringApplicationContext class title add license (#1932)

* dolphinscheduler-common remove spring

* dolphinscheduler-common remove spring

* dolphinscheduler-common remove spring

* dolphinscheduler-common remove spring

* dolphinscheduler-common remove spring

* dolphinscheduler-common remove spring

* add license (#1934)

* dolphinscheduler-common remove spring

* dolphinscheduler-common remove spring

* dolphinscheduler-common remove spring

* dolphinscheduler-common remove spring

* dolphinscheduler-common remove spring

* dolphinscheduler-common remove spring

* Refactor architecture (#1936)

* move datasource classes to dao module

* fix send4LetterWord bug

* LoggerServiceTest remove ProcessDao (#1944)

* dolphinscheduler-common remove spring

* dolphinscheduler-common remove spring

* dolphinscheduler-common remove spring

* dolphinscheduler-common remove spring

* dolphinscheduler-common remove spring

* dolphinscheduler-common remove spring

* LoggerServiceTest remove ProcessDao

* exclude jasper-compiler in case of runtime conflict (#1938)

* move datasource classes to dao module

* fix send4LetterWord bug

* exclude jasper-compiler in case of runtime conflict

* DataAnaylysisServiceTest and ProcessDefinitionService modify

* remote module add comment

* OSUtilsTest modify

* add finally block to close channel (#1951)

* move datasource classes to dao module

* fix send4LetterWord bug

* exclude jasper-compiler in case of runtime conflict

* add finally block to close channel
Co-authored-by: NTboy <technoboy@yeah.net>
Co-authored-by: Nkhadgarmage <khadgar.mage@outlook.com>
Co-authored-by: Nzhukai <boness@qq.com>
Co-authored-by: NYelli <amarantine@my.com>
上级 cdd53a76
......@@ -31,12 +31,6 @@
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-alert</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-dao</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
......@@ -129,13 +123,13 @@
</dependency>
<dependency>
<groupId>com.github.xiaoymin</groupId>
<artifactId>swagger-bootstrap-ui</artifactId>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-service</artifactId>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-rpc</artifactId>
<groupId>com.github.xiaoymin</groupId>
<artifactId>swagger-bootstrap-ui</artifactId>
</dependency>
<dependency>
......
......@@ -22,12 +22,12 @@ import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.queue.ITaskQueue;
import org.apache.dolphinscheduler.common.queue.TaskQueueFactory;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.User;
import io.swagger.annotations.*;
import org.apache.dolphinscheduler.service.queue.ITaskQueue;
import org.apache.dolphinscheduler.service.queue.TaskQueueFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
......
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.log;
import io.grpc.ManagedChannel;
import io.grpc.ManagedChannelBuilder;
import io.grpc.StatusRuntimeException;
import org.apache.dolphinscheduler.rpc.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.TimeUnit;
/**
* log client
*/
public class LogClient {
private static final Logger logger = LoggerFactory.getLogger(LogClient.class);
private final ManagedChannel channel;
private final LogViewServiceGrpc.LogViewServiceBlockingStub blockingStub;
/**
* construct client connecting to HelloWorld server at {@code host:port}
*
* @param host host
* @param port port
*/
public LogClient(String host, int port) {
this(ManagedChannelBuilder.forAddress(host, port)
// Channels are secure by default (via SSL/TLS). For the example we disable TLS to avoid
// needing certificates.
.usePlaintext(true));
}
/**
* construct client for accessing RouteGuide server using the existing channel
*
*/
LogClient(ManagedChannelBuilder<?> channelBuilder) {
/**
* set max read size
*/
channelBuilder.maxInboundMessageSize(Integer.MAX_VALUE);
channel = channelBuilder.build();
blockingStub = LogViewServiceGrpc.newBlockingStub(channel);
}
/**
* shutdown
*
* @throws InterruptedException InterruptedException
*/
public void shutdown() throws InterruptedException {
channel.shutdown().awaitTermination(5, TimeUnit.SECONDS);
}
/**
* roll view log
*
* @param path path
* @param skipLineNum skip line number
* @param limit limit
* @return log content
*/
public String rollViewLog(String path,int skipLineNum,int limit) {
logger.info("roll view log : path {},skipLineNum {} ,limit {}", path, skipLineNum, limit);
LogParameter pathParameter = LogParameter
.newBuilder()
.setPath(path)
.setSkipLineNum(skipLineNum)
.setLimit(limit)
.build();
RetStrInfo retStrInfo;
try {
retStrInfo = blockingStub.rollViewLog(pathParameter);
return retStrInfo.getMsg();
} catch (StatusRuntimeException e) {
logger.error("roll view log error", e);
return null;
}
}
/**
* view log
*
* @param path path
* @return log content
*/
public String viewLog(String path) {
logger.info("view log path {}",path);
PathParameter pathParameter = PathParameter.newBuilder().setPath(path).build();
RetStrInfo retStrInfo;
try {
retStrInfo = blockingStub.viewLog(pathParameter);
return retStrInfo.getMsg();
} catch (StatusRuntimeException e) {
logger.error("view log error", e);
return null;
}
}
/**
* get log size
*
* @param path log path
* @return log content bytes
*/
public byte[] getLogBytes(String path) {
logger.info("log path {}",path);
PathParameter pathParameter = PathParameter.newBuilder().setPath(path).build();
RetByteInfo retByteInfo;
try {
retByteInfo = blockingStub.getLogBytes(pathParameter);
return retByteInfo.getData().toByteArray();
} catch (StatusRuntimeException e) {
logger.error("log size error", e);
return null;
}
}
}
\ No newline at end of file
......@@ -24,13 +24,13 @@ import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.queue.ITaskQueue;
import org.apache.dolphinscheduler.common.queue.TaskQueueFactory;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.ProcessDao;
import org.apache.dolphinscheduler.dao.entity.*;
import org.apache.dolphinscheduler.dao.mapper.*;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.service.queue.ITaskQueue;
import org.apache.dolphinscheduler.service.queue.TaskQueueFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
......@@ -69,7 +69,7 @@ public class DataAnalysisService extends BaseService{
TaskInstanceMapper taskInstanceMapper;
@Autowired
ProcessDao processDao;
ProcessService processService;
/**
* statistical task instance status data
......@@ -296,7 +296,7 @@ public class DataAnalysisService extends BaseService{
if(projectId !=0){
projectIds.add(projectId);
}else if(loginUser.getUserType() == UserType.GENERAL_USER){
projectIds = processDao.getProjectIdListHavePerm(loginUser.getId());
projectIds = processService.getProjectIdListHavePerm(loginUser.getId());
if(projectIds.size() ==0 ){
projectIds.add(0);
}
......
......@@ -21,10 +21,9 @@ import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.job.db.*;
import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.dao.datasource.*;
import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.dao.entity.Resource;
import org.apache.dolphinscheduler.dao.entity.User;
......
......@@ -25,12 +25,12 @@ import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.ProcessDao;
import org.apache.dolphinscheduler.dao.entity.*;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.utils.cron.CronUtils;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.service.quartz.cron.CronUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
......@@ -67,7 +67,7 @@ public class ExecutorService extends BaseService{
@Autowired
private ProcessDao processDao;
private ProcessService processService;
/**
* execute process instance
......@@ -186,13 +186,13 @@ public class ExecutorService extends BaseService{
return checkResult;
}
ProcessInstance processInstance = processDao.findProcessInstanceDetailById(processInstanceId);
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId);
if (processInstance == null) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId);
return result;
}
ProcessDefinition processDefinition = processDao.findProcessDefineById(processInstance.getProcessDefinitionId());
ProcessDefinition processDefinition = processService.findProcessDefineById(processInstance.getProcessDefinitionId());
if(executeType != ExecuteType.STOP && executeType != ExecuteType.PAUSE){
result = checkProcessDefinitionValid(processDefinition, processInstance.getProcessDefinitionId());
if (result.get(Constants.STATUS) != Status.SUCCESS) {
......@@ -227,7 +227,7 @@ public class ExecutorService extends BaseService{
} else {
processInstance.setCommandType(CommandType.STOP);
processInstance.addHistoryCmd(CommandType.STOP);
processDao.updateProcessInstance(processInstance);
processService.updateProcessInstance(processInstance);
result = updateProcessInstanceState(processInstanceId, ExecutionStatus.READY_STOP);
}
break;
......@@ -237,7 +237,7 @@ public class ExecutorService extends BaseService{
} else {
processInstance.setCommandType(CommandType.PAUSE);
processInstance.addHistoryCmd(CommandType.PAUSE);
processDao.updateProcessInstance(processInstance);
processService.updateProcessInstance(processInstance);
result = updateProcessInstanceState(processInstanceId, ExecutionStatus.READY_PAUSE);
}
break;
......@@ -257,7 +257,7 @@ public class ExecutorService extends BaseService{
*/
private boolean checkTenantSuitable(ProcessDefinition processDefinition) {
// checkTenantExists();
Tenant tenant = processDao.getTenantForProcess(processDefinition.getTenantId(),
Tenant tenant = processService.getTenantForProcess(processDefinition.getTenantId(),
processDefinition.getUserId());
if(tenant == null){
return false;
......@@ -319,7 +319,7 @@ public class ExecutorService extends BaseService{
private Map<String, Object> updateProcessInstanceState(Integer processInstanceId, ExecutionStatus executionStatus) {
Map<String, Object> result = new HashMap<>(5);
int update = processDao.updateProcessInstanceState(processInstanceId, executionStatus);
int update = processService.updateProcessInstanceState(processInstanceId, executionStatus);
if (update > 0) {
putMsg(result, Status.SUCCESS);
} else {
......@@ -347,12 +347,12 @@ public class ExecutorService extends BaseService{
CMDPARAM_RECOVER_PROCESS_ID_STRING, instanceId));
command.setExecutorId(loginUser.getId());
if(!processDao.verifyIsNeedCreateCommand(command)){
if(!processService.verifyIsNeedCreateCommand(command)){
putMsg(result, Status.PROCESS_INSTANCE_EXECUTING_COMMAND,processDefinitionId);
return result;
}
int create = processDao.createCommand(command);
int create = processService.createCommand(command);
if (create > 0) {
putMsg(result, Status.SUCCESS);
......@@ -376,7 +376,7 @@ public class ExecutorService extends BaseService{
putMsg(result,Status.REQUEST_PARAMS_NOT_VALID_ERROR,"process definition id");
}
List<Integer> ids = new ArrayList<>();
processDao.recurseFindSubProcessId(processDefineId, ids);
processService.recurseFindSubProcessId(processDefineId, ids);
Integer[] idArray = ids.toArray(new Integer[ids.size()]);
if (ids.size() > 0){
List<ProcessDefinition> processDefinitionList;
......@@ -506,9 +506,9 @@ public class ExecutorService extends BaseService{
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(start));
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(end));
command.setCommandParam(JSONUtils.toJson(cmdParam));
return processDao.createCommand(command);
return processService.createCommand(command);
}else if (runMode == RunMode.RUN_MODE_PARALLEL){
List<Schedule> schedules = processDao.queryReleaseSchedulerListByProcessDefinitionId(processDefineId);
List<Schedule> schedules = processService.queryReleaseSchedulerListByProcessDefinitionId(processDefineId);
List<Date> listDate = new LinkedList<>();
if(!CollectionUtils.isEmpty(schedules)){
for (Schedule item : schedules) {
......@@ -521,7 +521,7 @@ public class ExecutorService extends BaseService{
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(date));
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(date));
command.setCommandParam(JSONUtils.toJson(cmdParam));
processDao.createCommand(command);
processService.createCommand(command);
}
return listDate.size();
}else{
......@@ -532,7 +532,7 @@ public class ExecutorService extends BaseService{
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(start));
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(start));
command.setCommandParam(JSONUtils.toJson(cmdParam));
processDao.createCommand(command);
processService.createCommand(command);
start = DateUtils.getSomeDay(start, 1);
}
return runCunt;
......@@ -544,7 +544,7 @@ public class ExecutorService extends BaseService{
}
}else{
command.setCommandParam(JSONUtils.toJson(cmdParam));
return processDao.createCommand(command);
return processService.createCommand(command);
}
return 0;
......
......@@ -17,12 +17,12 @@
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.log.LogClient;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.ProcessDao;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.service.log.LogClientService;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
......@@ -37,7 +37,7 @@ public class LoggerService {
private static final Logger logger = LoggerFactory.getLogger(LoggerService.class);
@Autowired
private ProcessDao processDao;
private ProcessService processService;
/**
* view log
......@@ -49,7 +49,7 @@ public class LoggerService {
*/
public Result queryLog(int taskInstId, int skipLineNum, int limit) {
TaskInstance taskInstance = processDao.findTaskInstanceById(taskInstId);
TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId);
if (taskInstance == null){
return new Result(Status.TASK_INSTANCE_NOT_FOUND.getCode(), Status.TASK_INSTANCE_NOT_FOUND.getMsg());
......@@ -64,11 +64,17 @@ public class LoggerService {
Result result = new Result(Status.SUCCESS.getCode(), Status.SUCCESS.getMsg());
logger.info("log host : {} , logPath : {} , logServer port : {}",host,taskInstance.getLogPath(),Constants.RPC_PORT);
LogClient logClient = new LogClient(host, Constants.RPC_PORT);
String log = logClient.rollViewLog(taskInstance.getLogPath(),skipLineNum,limit);
result.setData(log);
logger.info(log);
LogClientService logClient = null;
try {
logClient = new LogClientService(host, Constants.RPC_PORT);
String log = logClient.rollViewLog(taskInstance.getLogPath(),skipLineNum,limit);
result.setData(log);
logger.info(log);
} finally {
if(logClient != null){
logClient.close();
}
}
return result;
}
......@@ -80,17 +86,20 @@ public class LoggerService {
* @return log byte array
*/
public byte[] getLogBytes(int taskInstId) {
TaskInstance taskInstance = processDao.findTaskInstanceById(taskInstId);
TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId);
if (taskInstance == null){
throw new RuntimeException("task instance is null");
}
String host = taskInstance.getHost();
if(StringUtils.isEmpty(host)){
throw new RuntimeException("task instance host is null");
LogClientService logClient = null;
try {
logClient = new LogClientService(host, Constants.RPC_PORT);
return logClient.getLogBytes(taskInstance.getLogPath());
} finally {
if(logClient != null){
logClient.close();
}
}
LogClient logClient = new LogClient(host, Constants.RPC_PORT);
return logClient.getLogBytes(taskInstance.getLogPath());
}
}
......@@ -43,9 +43,9 @@ import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.ProcessDao;
import org.apache.dolphinscheduler.dao.entity.*;
import org.apache.dolphinscheduler.dao.mapper.*;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
......@@ -94,7 +94,7 @@ public class ProcessDefinitionService extends BaseDAGService {
private ScheduleMapper scheduleMapper;
@Autowired
private ProcessDao processDao;
private ProcessService processService;
@Autowired
private WorkerGroupMapper workerGroupMapper;
......@@ -283,7 +283,7 @@ public class ProcessDefinitionService extends BaseDAGService {
if ((checkProcessJson.get(Constants.STATUS) != Status.SUCCESS)) {
return checkProcessJson;
}
ProcessDefinition processDefinition = processDao.findProcessDefineById(id);
ProcessDefinition processDefinition = processService.findProcessDefineById(id);
if (processDefinition == null) {
// check process definition exists
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, id);
......@@ -296,7 +296,7 @@ public class ProcessDefinitionService extends BaseDAGService {
putMsg(result, Status.SUCCESS);
}
ProcessDefinition processDefine = processDao.findProcessDefineById(id);
ProcessDefinition processDefine = processService.findProcessDefineById(id);
Date now = new Date();
processDefine.setId(id);
......
......@@ -30,15 +30,15 @@ import org.apache.dolphinscheduler.common.graph.DAG;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.model.TaskNodeRelation;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.queue.ITaskQueue;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils;
import org.apache.dolphinscheduler.dao.ProcessDao;
import com.alibaba.fastjson.JSON;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.dolphinscheduler.dao.entity.*;
import org.apache.dolphinscheduler.dao.mapper.*;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.service.queue.ITaskQueue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
......@@ -72,7 +72,7 @@ public class ProcessInstanceService extends BaseDAGService {
ProjectService projectService;
@Autowired
ProcessDao processDao;
ProcessService processService;
@Autowired
ProcessInstanceMapper processInstanceMapper;
......@@ -112,7 +112,7 @@ public class ProcessInstanceService extends BaseDAGService {
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
ProcessInstance processInstance = processDao.findProcessInstanceDetailById(processId);
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processId);
String workerGroupName = "";
if(processInstance.getWorkerGroupId() == -1){
workerGroupName = DEFAULT;
......@@ -125,7 +125,7 @@ public class ProcessInstanceService extends BaseDAGService {
}
}
processInstance.setWorkerGroupName(workerGroupName);
ProcessDefinition processDefinition = processDao.findProcessDefineById(processInstance.getProcessDefinitionId());
ProcessDefinition processDefinition = processService.findProcessDefineById(processInstance.getProcessDefinitionId());
processInstance.setReceivers(processDefinition.getReceivers());
processInstance.setReceiversCc(processDefinition.getReceiversCc());
result.put(Constants.DATA_LIST, processInstance);
......@@ -228,8 +228,8 @@ public class ProcessInstanceService extends BaseDAGService {
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
ProcessInstance processInstance = processDao.findProcessInstanceDetailById(processId);
List<TaskInstance> taskInstanceList = processDao.findValidTaskListByProcessId(processId);
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processId);
List<TaskInstance> taskInstanceList = processService.findValidTaskListByProcessId(processId);
AddDependResultForTaskList(taskInstanceList);
Map<String, Object> resultMap = new HashMap<>();
resultMap.put(PROCESS_INSTANCE_STATE, processInstance.getState().toString());
......@@ -304,7 +304,7 @@ public class ProcessInstanceService extends BaseDAGService {
return checkResult;
}
TaskInstance taskInstance = processDao.findTaskInstanceById(taskId);
TaskInstance taskInstance = processService.findTaskInstanceById(taskId);
if (taskInstance == null) {
putMsg(result, Status.TASK_INSTANCE_NOT_EXISTS, taskId);
return result;
......@@ -314,7 +314,7 @@ public class ProcessInstanceService extends BaseDAGService {
return result;
}
ProcessInstance subWorkflowInstance = processDao.findSubProcessInstance(
ProcessInstance subWorkflowInstance = processService.findSubProcessInstance(
taskInstance.getProcessInstanceId(), taskInstance.getId());
if (subWorkflowInstance == null) {
putMsg(result, Status.SUB_PROCESS_INSTANCE_NOT_EXIST, taskId);
......@@ -356,7 +356,7 @@ public class ProcessInstanceService extends BaseDAGService {
}
//check process instance exists
ProcessInstance processInstance = processDao.findProcessInstanceDetailById(processInstanceId);
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId);
if (processInstance == null) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId);
return result;
......@@ -380,7 +380,7 @@ public class ProcessInstanceService extends BaseDAGService {
String globalParams = null;
String originDefParams = null;
int timeout = processInstance.getTimeout();
ProcessDefinition processDefinition = processDao.findProcessDefineById(processInstance.getProcessDefinitionId());
ProcessDefinition processDefinition = processService.findProcessDefineById(processInstance.getProcessDefinitionId());
if (StringUtils.isNotEmpty(processInstanceJson)) {
ProcessData processData = JSONUtils.parseObject(processInstanceJson, ProcessData.class);
//check workflow json is valid
......@@ -396,7 +396,7 @@ public class ProcessInstanceService extends BaseDAGService {
processInstance.getCmdTypeIfComplement(), schedule);
timeout = processData.getTimeout();
processInstance.setTimeout(timeout);
Tenant tenant = processDao.getTenantForProcess(processData.getTenantId(),
Tenant tenant = processService.getTenantForProcess(processData.getTenantId(),
processDefinition.getUserId());
if(tenant != null){
processInstance.setTenantCode(tenant.getTenantCode());
......@@ -406,7 +406,7 @@ public class ProcessInstanceService extends BaseDAGService {
}
// int update = processDao.updateProcessInstance(processInstanceId, processInstanceJson,
// globalParams, schedule, flag, locations, connects);
int update = processDao.updateProcessInstance(processInstance);
int update = processService.updateProcessInstance(processInstance);
int updateDefine = 1;
if (syncDefine && StringUtils.isNotEmpty(processInstanceJson)) {
processDefinition.setProcessDefinitionJson(processInstanceJson);
......@@ -445,7 +445,7 @@ public class ProcessInstanceService extends BaseDAGService {
return checkResult;
}
ProcessInstance subInstance = processDao.findProcessInstanceDetailById(subId);
ProcessInstance subInstance = processService.findProcessInstanceDetailById(subId);
if (subInstance == null) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, subId);
return result;
......@@ -455,7 +455,7 @@ public class ProcessInstanceService extends BaseDAGService {
return result;
}
ProcessInstance parentWorkflowInstance = processDao.findParentProcessInstance(subId);
ProcessInstance parentWorkflowInstance = processService.findParentProcessInstance(subId);
if (parentWorkflowInstance == null) {
putMsg(result, Status.SUB_PROCESS_INSTANCE_NOT_EXIST);
return result;
......@@ -476,7 +476,7 @@ public class ProcessInstanceService extends BaseDAGService {
* @return delete result code
*/
@Transactional(rollbackFor = Exception.class)
public Map<String, Object> deleteProcessInstanceById(User loginUser, String projectName, Integer processInstanceId,ITaskQueue tasksQueue) {
public Map<String, Object> deleteProcessInstanceById(User loginUser, String projectName, Integer processInstanceId, ITaskQueue tasksQueue) {
Map<String, Object> result = new HashMap<>(5);
Project project = projectMapper.queryByName(projectName);
......@@ -486,8 +486,8 @@ public class ProcessInstanceService extends BaseDAGService {
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
ProcessInstance processInstance = processDao.findProcessInstanceDetailById(processInstanceId);
List<TaskInstance> taskInstanceList = processDao.findValidTaskListByProcessId(processInstanceId);
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId);
List<TaskInstance> taskInstanceList = processService.findValidTaskListByProcessId(processInstanceId);
if (null == processInstance) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId);
......@@ -512,7 +512,7 @@ public class ProcessInstanceService extends BaseDAGService {
.append(taskInstance.getId())
.append(UNDERLINE);
int taskWorkerGroupId = processDao.getTaskWorkerGroupId(taskInstance);
int taskWorkerGroupId = processService.getTaskWorkerGroupId(taskInstance);
WorkerGroup workerGroup = workerGroupMapper.selectById(taskWorkerGroupId);
if(workerGroup == null){
......@@ -541,9 +541,9 @@ public class ProcessInstanceService extends BaseDAGService {
}
// delete database cascade
int delete = processDao.deleteWorkProcessInstanceById(processInstanceId);
processDao.deleteAllSubWorkProcessByParentId(processInstanceId);
processDao.deleteWorkProcessMapByParentId(processInstanceId);
int delete = processService.deleteWorkProcessInstanceById(processInstanceId);
processService.deleteAllSubWorkProcessByParentId(processInstanceId);
processService.deleteWorkProcessMapByParentId(processInstanceId);
if (delete > 0) {
putMsg(result, Status.SUCCESS);
......
......@@ -26,7 +26,6 @@ import org.apache.dolphinscheduler.common.model.Server;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.ProcessDao;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.Schedule;
......@@ -34,11 +33,12 @@ import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper;
import org.apache.dolphinscheduler.dao.utils.cron.CronUtils;
import org.apache.dolphinscheduler.dao.quartz.ProcessScheduleJob;
import org.apache.dolphinscheduler.dao.quartz.QuartzExecutors;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.service.quartz.ProcessScheduleJob;
import org.apache.dolphinscheduler.service.quartz.QuartzExecutors;
import org.apache.dolphinscheduler.service.quartz.cron.CronUtils;
import org.quartz.CronExpression;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......@@ -68,7 +68,7 @@ public class SchedulerService extends BaseService {
private MonitorService monitorService;
@Autowired
private ProcessDao processDao;
private ProcessService processService;
@Autowired
private ScheduleMapper scheduleMapper;
......@@ -119,7 +119,7 @@ public class SchedulerService extends BaseService {
}
// check work flow define release state
ProcessDefinition processDefinition = processDao.findProcessDefineById(processDefineId);
ProcessDefinition processDefinition = processService.findProcessDefineById(processDefineId);
result = executorService.checkProcessDefinitionValid(processDefinition, processDefineId);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
......@@ -221,7 +221,7 @@ public class SchedulerService extends BaseService {
return result;
}
ProcessDefinition processDefinition = processDao.findProcessDefineById(schedule.getProcessDefinitionId());
ProcessDefinition processDefinition = processService.findProcessDefineById(schedule.getProcessDefinitionId());
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, schedule.getProcessDefinitionId());
return result;
......@@ -321,7 +321,7 @@ public class SchedulerService extends BaseService {
putMsg(result, Status.SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE, scheduleStatus);
return result;
}
ProcessDefinition processDefinition = processDao.findProcessDefineById(scheduleObj.getProcessDefinitionId());
ProcessDefinition processDefinition = processService.findProcessDefineById(scheduleObj.getProcessDefinitionId());
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, scheduleObj.getProcessDefinitionId());
return result;
......@@ -338,7 +338,7 @@ public class SchedulerService extends BaseService {
}
// check sub process definition release state
List<Integer> subProcessDefineIds = new ArrayList<>();
processDao.recurseFindSubProcessId(scheduleObj.getProcessDefinitionId(), subProcessDefineIds);
processService.recurseFindSubProcessId(scheduleObj.getProcessDefinitionId(), subProcessDefineIds);
Integer[] idArray = subProcessDefineIds.toArray(new Integer[subProcessDefineIds.size()]);
if (subProcessDefineIds.size() > 0){
List<ProcessDefinition> subProcessDefinitionList =
......@@ -423,7 +423,7 @@ public class SchedulerService extends BaseService {
return result;
}
ProcessDefinition processDefinition = processDao.findProcessDefineById(processDefineId);
ProcessDefinition processDefinition = processService.findProcessDefineById(processDefineId);
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefineId);
return result;
......@@ -472,7 +472,7 @@ public class SchedulerService extends BaseService {
logger.info("set schedule, project id: {}, scheduleId: {}", projectId, scheduleId);
Schedule schedule = processDao.querySchedule(scheduleId);
Schedule schedule = processService.querySchedule(scheduleId);
if (schedule == null) {
logger.warn("process schedule info not exists");
return;
......
......@@ -24,7 +24,6 @@ import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.ProcessDao;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
......@@ -33,6 +32,7 @@ import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
......@@ -56,7 +56,7 @@ public class TaskInstanceService extends BaseService {
ProjectService projectService;
@Autowired
ProcessDao processDao;
ProcessService processService;
@Autowired
TaskInstanceMapper taskInstanceMapper;
......
......@@ -17,6 +17,7 @@
package org.apache.dolphinscheduler.api.utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......@@ -91,5 +92,4 @@ public class FourLetterWordMain {
throw new IOException("Exception while executing four letter word: " + cmd, e);
}
}
}
......@@ -18,9 +18,9 @@ package org.apache.dolphinscheduler.api.utils;
import org.apache.dolphinscheduler.common.enums.ZKNodeType;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.common.zk.AbstractZKClient;
import org.apache.dolphinscheduler.common.model.Server;
import org.apache.dolphinscheduler.dao.entity.ZookeeperRecord;
import org.apache.dolphinscheduler.service.zk.AbstractZKClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
......@@ -34,7 +34,7 @@ import java.util.List;
* monitor zookeeper info
*/
@Component
public class ZookeeperMonitor extends AbstractZKClient{
public class ZookeeperMonitor extends AbstractZKClient {
private static final Logger LOG = LoggerFactory.getLogger(ZookeeperMonitor.class);
......
......@@ -21,15 +21,15 @@ import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.queue.ITaskQueue;
import org.apache.dolphinscheduler.common.queue.TaskQueueFactory;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.dao.ProcessDao;
import org.apache.dolphinscheduler.dao.entity.CommandCount;
import org.apache.dolphinscheduler.dao.entity.ExecuteStatusCount;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.*;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.service.queue.ITaskQueue;
import org.apache.dolphinscheduler.service.queue.TaskQueueFactory;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
......@@ -78,7 +78,7 @@ public class DataAnalysisServiceTest {
ITaskQueue taskQueue;
@Mock
ProcessDao processDao;
ProcessService processService;
private Project project;
......
......@@ -22,10 +22,10 @@ import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.RunMode;
import org.apache.dolphinscheduler.dao.ProcessDao;
import org.apache.dolphinscheduler.dao.entity.*;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
......@@ -52,7 +52,7 @@ public class ExecutorService2Test {
private ExecutorService executorService;
@Mock
private ProcessDao processDao;
private ProcessService processService;
@Mock
private ProcessDefinitionMapper processDefinitionMapper;
......@@ -100,8 +100,8 @@ public class ExecutorService2Test {
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(project);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(checkProjectAndAuth());
Mockito.when(processDefinitionMapper.selectById(processDefinitionId)).thenReturn(processDefinition);
Mockito.when(processDao.getTenantForProcess(tenantId, userId)).thenReturn(new Tenant());
Mockito.when(processDao.createCommand(any(Command.class))).thenReturn(1);
Mockito.when(processService.getTenantForProcess(tenantId, userId)).thenReturn(new Tenant());
Mockito.when(processService.createCommand(any(Command.class))).thenReturn(1);
}
/**
......@@ -111,7 +111,7 @@ public class ExecutorService2Test {
@Test
public void testNoComplement() throws ParseException {
try {
Mockito.when(processDao.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList());
Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList());
Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName,
processDefinitionId, cronTime, CommandType.START_PROCESS,
null, null,
......@@ -119,7 +119,7 @@ public class ExecutorService2Test {
"", "", RunMode.RUN_MODE_SERIAL,
Priority.LOW, 0, 110);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
verify(processDao, times(1)).createCommand(any(Command.class));
verify(processService, times(1)).createCommand(any(Command.class));
}catch (Exception e){
Assert.assertTrue(false);
}
......@@ -132,7 +132,7 @@ public class ExecutorService2Test {
@Test
public void testDateError() throws ParseException {
try {
Mockito.when(processDao.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList());
Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList());
Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName,
processDefinitionId, "2020-01-31 23:00:00,2020-01-01 00:00:00", CommandType.COMPLEMENT_DATA,
null, null,
......@@ -140,7 +140,7 @@ public class ExecutorService2Test {
"", "", RunMode.RUN_MODE_SERIAL,
Priority.LOW, 0, 110);
Assert.assertEquals(Status.START_PROCESS_INSTANCE_ERROR, result.get(Constants.STATUS));
verify(processDao, times(0)).createCommand(any(Command.class));
verify(processService, times(0)).createCommand(any(Command.class));
}catch (Exception e){
Assert.assertTrue(false);
}
......@@ -153,7 +153,7 @@ public class ExecutorService2Test {
@Test
public void testSerial() throws ParseException {
try {
Mockito.when(processDao.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList());
Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList());
Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName,
processDefinitionId, cronTime, CommandType.COMPLEMENT_DATA,
null, null,
......@@ -161,7 +161,7 @@ public class ExecutorService2Test {
"", "", RunMode.RUN_MODE_SERIAL,
Priority.LOW, 0, 110);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
verify(processDao, times(1)).createCommand(any(Command.class));
verify(processService, times(1)).createCommand(any(Command.class));
}catch (Exception e){
Assert.assertTrue(false);
}
......@@ -174,7 +174,7 @@ public class ExecutorService2Test {
@Test
public void testParallelWithOutSchedule() throws ParseException {
try{
Mockito.when(processDao.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList());
Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList());
Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName,
processDefinitionId, cronTime, CommandType.COMPLEMENT_DATA,
null, null,
......@@ -182,7 +182,7 @@ public class ExecutorService2Test {
"", "", RunMode.RUN_MODE_PARALLEL,
Priority.LOW, 0, 110);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
verify(processDao, times(31)).createCommand(any(Command.class));
verify(processService, times(31)).createCommand(any(Command.class));
}catch (Exception e){
Assert.assertTrue(false);
}
......@@ -195,7 +195,7 @@ public class ExecutorService2Test {
@Test
public void testParallelWithSchedule() throws ParseException {
try{
Mockito.when(processDao.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(oneSchedulerList());
Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(oneSchedulerList());
Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName,
processDefinitionId, cronTime, CommandType.COMPLEMENT_DATA,
null, null,
......@@ -203,7 +203,7 @@ public class ExecutorService2Test {
"", "", RunMode.RUN_MODE_PARALLEL,
Priority.LOW, 0, 110);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
verify(processDao, times(15)).createCommand(any(Command.class));
verify(processService, times(15)).createCommand(any(Command.class));
}catch (Exception e){
Assert.assertTrue(false);
}
......
......@@ -18,8 +18,8 @@ package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.dao.ProcessDao;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
......@@ -40,14 +40,14 @@ public class LoggerServiceTest {
@InjectMocks
private LoggerService loggerService;
@Mock
private ProcessDao processDao;
private ProcessService processService;
@Test
public void testQueryDataSourceList(){
TaskInstance taskInstance = new TaskInstance();
Mockito.when(processDao.findTaskInstanceById(1)).thenReturn(taskInstance);
Mockito.when(processService.findTaskInstanceById(1)).thenReturn(taskInstance);
Result result = loggerService.queryLog(2,1,1);
//TASK_INSTANCE_NOT_FOUND
Assert.assertEquals(Status.TASK_INSTANCE_NOT_FOUND.getCode(),result.getCode().intValue());
......@@ -59,7 +59,7 @@ public class LoggerServiceTest {
//SUCCESS
taskInstance.setHost("127.0.0.1");
taskInstance.setLogPath("/temp/log");
Mockito.when(processDao.findTaskInstanceById(1)).thenReturn(taskInstance);
Mockito.when(processService.findTaskInstanceById(1)).thenReturn(taskInstance);
result = loggerService.queryLog(1,1,1);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
}
......@@ -68,7 +68,7 @@ public class LoggerServiceTest {
public void testGetLogBytes(){
TaskInstance taskInstance = new TaskInstance();
Mockito.when(processDao.findTaskInstanceById(1)).thenReturn(taskInstance);
Mockito.when(processService.findTaskInstanceById(1)).thenReturn(taskInstance);
//task instance is null
try{
......
......@@ -28,9 +28,9 @@ import org.apache.dolphinscheduler.common.enums.*;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.FileUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.dao.ProcessDao;
import org.apache.dolphinscheduler.dao.entity.*;
import org.apache.dolphinscheduler.dao.mapper.*;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.http.entity.ContentType;
import org.json.JSONException;
import org.junit.Assert;
......@@ -80,7 +80,7 @@ public class ProcessDefinitionServiceTest {
private WorkerGroupMapper workerGroupMapper;
@Mock
private ProcessDao processDao;
private ProcessService processService;
private String sqlDependentJson = "{\"globalParams\":[]," +
"\"tasks\":[{\"type\":\"SQL\",\"id\":\"tasks-27297\",\"name\":\"sql\"," +
......@@ -584,7 +584,7 @@ public class ProcessDefinitionServiceTest {
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName));
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Mockito.when(processDao.findProcessDefineById(1)).thenReturn(getProcessDefinition());
Mockito.when(processService.findProcessDefineById(1)).thenReturn(getProcessDefinition());
Map<String, Object> updateResult = processDefinitionService.updateProcessDefinition(loginUser, projectName, 1, "test",
sqlDependentJson, "", "", "");
......
......@@ -85,21 +85,7 @@
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-client</artifactId>
<version>${curator.version}</version>
<exclusions>
<exclusion>
<groupId>log4j-1.2-api</groupId>
<artifactId>org.apache.logging.log4j</artifactId>
</exclusion>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>netty</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-collections4</artifactId>
......@@ -548,6 +534,10 @@
<artifactId>log4j-web</artifactId>
<groupId>org.apache.logging.log4j</groupId>
</exclusion>
<exclusion>
<artifactId>jasper-compiler</artifactId>
<groupId>tomcat</groupId>
</exclusion>
</exclusions>
</dependency>
......@@ -601,11 +591,6 @@
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.janino</groupId>
<artifactId>janino</artifactId>
......
......@@ -16,7 +16,6 @@
*/
package org.apache.dolphinscheduler.common.utils;
import org.springframework.lang.Nullable;
/**
* A collection of static utility methods to validate input.
......@@ -57,7 +56,7 @@ public final class Preconditions {
*
* @throws NullPointerException Thrown, if the passed reference was null.
*/
public static <T> T checkNotNull(T reference, @Nullable String errorMessage) {
public static <T> T checkNotNull(T reference, String errorMessage) {
if (reference == null) {
throw new NullPointerException(String.valueOf(errorMessage));
}
......@@ -84,8 +83,8 @@ public final class Preconditions {
* @throws NullPointerException Thrown, if the passed reference was null.
*/
public static <T> T checkNotNull(T reference,
@Nullable String errorMessageTemplate,
@Nullable Object... errorMessageArgs) {
String errorMessageTemplate,
Object... errorMessageArgs) {
if (reference == null) {
throw new NullPointerException(format(errorMessageTemplate, errorMessageArgs));
......@@ -121,7 +120,7 @@ public final class Preconditions {
*
* @throws IllegalArgumentException Thrown, if the condition is violated.
*/
public static void checkArgument(boolean condition, @Nullable Object errorMessage) {
public static void checkArgument(boolean condition, Object errorMessage) {
if (!condition) {
throw new IllegalArgumentException(String.valueOf(errorMessage));
}
......@@ -141,8 +140,8 @@ public final class Preconditions {
* @throws IllegalArgumentException Thrown, if the condition is violated.
*/
public static void checkArgument(boolean condition,
@Nullable String errorMessageTemplate,
@Nullable Object... errorMessageArgs) {
String errorMessageTemplate,
Object... errorMessageArgs) {
if (!condition) {
throw new IllegalArgumentException(format(errorMessageTemplate, errorMessageArgs));
......@@ -177,7 +176,7 @@ public final class Preconditions {
*
* @throws IllegalStateException Thrown, if the condition is violated.
*/
public static void checkState(boolean condition, @Nullable Object errorMessage) {
public static void checkState(boolean condition, Object errorMessage) {
if (!condition) {
throw new IllegalStateException(String.valueOf(errorMessage));
}
......@@ -197,8 +196,8 @@ public final class Preconditions {
* @throws IllegalStateException Thrown, if the condition is violated.
*/
public static void checkState(boolean condition,
@Nullable String errorMessageTemplate,
@Nullable Object... errorMessageArgs) {
String errorMessageTemplate,
Object... errorMessageArgs) {
if (!condition) {
throw new IllegalStateException(format(errorMessageTemplate, errorMessageArgs));
......@@ -231,7 +230,7 @@ public final class Preconditions {
* @throws IllegalArgumentException Thrown, if size is negative.
* @throws IndexOutOfBoundsException Thrown, if the index negative or greater than or equal to size
*/
public static void checkElementIndex(int index, int size, @Nullable String errorMessage) {
public static void checkElementIndex(int index, int size, String errorMessage) {
checkArgument(size >= 0, "Size was negative.");
if (index < 0 || index >= size) {
throw new IndexOutOfBoundsException(String.valueOf(errorMessage) + " Index: " + index + ", Size: " + size);
......@@ -248,7 +247,7 @@ public final class Preconditions {
*
* <p>This method is taken quasi verbatim from the Guava Preconditions class.
*/
private static String format(@Nullable String template, @Nullable Object... args) {
private static String format( String template, Object... args) {
final int numArgs = args == null ? 0 : args.length;
template = String.valueOf(template); // null -> "null"
......
......@@ -19,7 +19,6 @@ package org.apache.dolphinscheduler.common.utils;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.yetus.audience.InterfaceAudience;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
......
......@@ -116,21 +116,6 @@
<artifactId>cron-utils</artifactId>
</dependency>
<dependency>
<groupId>org.quartz-scheduler</groupId>
<artifactId>quartz</artifactId>
<exclusions>
<exclusion>
<artifactId>c3p0</artifactId>
<groupId>c3p0</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.quartz-scheduler</groupId>
<artifactId>quartz-jobs</artifactId>
</dependency>
<dependency>
<groupId>commons-configuration</groupId>
<artifactId>commons-configuration</artifactId>
......
......@@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.job.db;
package org.apache.dolphinscheduler.dao.datasource;
/**
* data source base class
......
......@@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.job.db;
package org.apache.dolphinscheduler.dao.datasource;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.StringUtils;
......
......@@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.job.db;
package org.apache.dolphinscheduler.dao.datasource;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.StringUtils;
......
......@@ -14,11 +14,11 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.job.db;
package org.apache.dolphinscheduler.dao.datasource;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.Constants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......
......@@ -14,14 +14,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.job.db;
package org.apache.dolphinscheduler.dao.datasource;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.*;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
/**
* data source of hive
......
......@@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.job.db;
package org.apache.dolphinscheduler.dao.datasource;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.StringUtils;
......
......@@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.job.db;
package org.apache.dolphinscheduler.dao.datasource;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.StringUtils;
......
......@@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.job.db;
package org.apache.dolphinscheduler.dao.datasource;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.StringUtils;
......
......@@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.job.db;
package org.apache.dolphinscheduler.dao.datasource;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.StringUtils;
......
......@@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.job.db;
package org.apache.dolphinscheduler.dao.datasource;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.StringUtils;
......
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>dolphinscheduler</artifactId>
<groupId>org.apache.dolphinscheduler</groupId>
<version>1.2.1-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>dolphinscheduler-remote</artifactId>
<name>dolphinscheduler-remote</name>
<!-- FIXME change it to the project's website -->
<url>http://www.example.com</url>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.7</maven.compiler.source>
<maven.compiler.target>1.7</maven.compiler.target>
</properties>
<dependencies>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-all</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
</project>
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.remote;
import io.netty.bootstrap.Bootstrap;
import io.netty.channel.*;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioSocketChannel;
import org.apache.dolphinscheduler.remote.codec.NettyDecoder;
import org.apache.dolphinscheduler.remote.codec.NettyEncoder;
import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.config.NettyClientConfig;
import org.apache.dolphinscheduler.remote.exceptions.RemotingException;
import org.apache.dolphinscheduler.remote.handler.NettyClientHandler;
import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor;
import org.apache.dolphinscheduler.remote.utils.Address;
import org.apache.dolphinscheduler.remote.utils.Constants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.InetSocketAddress;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
/**
* remoting netty client
*/
public class NettyRemotingClient {
private final Logger logger = LoggerFactory.getLogger(NettyRemotingClient.class);
/**
* bootstrap
*/
private final Bootstrap bootstrap = new Bootstrap();
/**
* encoder
*/
private final NettyEncoder encoder = new NettyEncoder();
/**
* channels
*/
private final ConcurrentHashMap<Address, Channel> channels = new ConcurrentHashMap();
/**
* default executor
*/
private final ExecutorService defaultExecutor = Executors.newFixedThreadPool(Constants.CPUS);
/**
* started flag
*/
private final AtomicBoolean isStarted = new AtomicBoolean(false);
/**
* worker group
*/
private final NioEventLoopGroup workerGroup;
/**
* client handler
*/
private final NettyClientHandler clientHandler = new NettyClientHandler(this);
/**
* netty client config
*/
private final NettyClientConfig clientConfig;
/**
* netty client init
*
* @param clientConfig client config
*/
public NettyRemotingClient(final NettyClientConfig clientConfig){
this.clientConfig = clientConfig;
this.workerGroup = new NioEventLoopGroup(clientConfig.getWorkerThreads(), new ThreadFactory() {
private AtomicInteger threadIndex = new AtomicInteger(0);
@Override
public Thread newThread(Runnable r) {
return new Thread(r, String.format("NettyClient_%d", this.threadIndex.incrementAndGet()));
}
});
this.start();
}
/**
* netty server start
*/
private void start(){
this.bootstrap
.group(this.workerGroup)
.channel(NioSocketChannel.class)
.option(ChannelOption.SO_KEEPALIVE, clientConfig.isSoKeepalive())
.option(ChannelOption.TCP_NODELAY, clientConfig.isTcpNoDelay())
.option(ChannelOption.SO_SNDBUF, clientConfig.getSendBufferSize())
.option(ChannelOption.SO_RCVBUF, clientConfig.getReceiveBufferSize())
.handler(new ChannelInitializer<SocketChannel>() {
@Override
public void initChannel(SocketChannel ch) throws Exception {
ch.pipeline().addLast(
new NettyDecoder(),
clientHandler,
encoder);
}
});
isStarted.compareAndSet(false, true);
}
/**
* register processor
*
* @param commandType command type
* @param processor processor
*/
public void registerProcessor(final CommandType commandType, final NettyRequestProcessor processor) {
registerProcessor(commandType, processor, null);
}
/**
* register processor
*
* @param commandType command type
* @param processor processor
* @param executor thread executor
*/
public void registerProcessor(final CommandType commandType, final NettyRequestProcessor processor, final ExecutorService executor) {
this.clientHandler.registerProcessor(commandType, processor, executor);
}
/**
* send connect
* @param address address
* @param command command
* @throws RemotingException
*/
public void send(final Address address, final Command command) throws RemotingException {
final Channel channel = getChannel(address);
if (channel == null) {
throw new RemotingException("network error");
}
try {
channel.writeAndFlush(command).addListener(new ChannelFutureListener(){
@Override
public void operationComplete(ChannelFuture future) throws Exception {
if(future.isSuccess()){
logger.info("sent command {} to {}", command, address);
} else{
logger.error("send command {} to {} failed, error {}", command, address, future.cause());
}
}
});
} catch (Exception ex) {
String msg = String.format("send command %s to address %s encounter error", command, address);
throw new RemotingException(msg, ex);
}
}
/**
* get channel
* @param address address
* @return channel
*/
public Channel getChannel(Address address) {
Channel channel = channels.get(address);
if(channel != null && channel.isActive()){
return channel;
}
return createChannel(address, true);
}
/**
* create channel
* @param address address
* @param isSync is sync
* @return channel
*/
public Channel createChannel(Address address, boolean isSync) {
ChannelFuture future;
try {
synchronized (bootstrap){
future = bootstrap.connect(new InetSocketAddress(address.getHost(), address.getPort()));
}
if(isSync){
future.sync();
}
if (future.isSuccess()) {
Channel channel = future.channel();
channels.put(address, channel);
return channel;
}
} catch (Exception ex) {
logger.info("connect to {} error {}", address, ex);
}
return null;
}
/**
* get default thread executor
* @return thread executor
*/
public ExecutorService getDefaultExecutor() {
return defaultExecutor;
}
/**
* close client
*/
public void close() {
if(isStarted.compareAndSet(true, false)){
try {
closeChannels();
if(workerGroup != null){
this.workerGroup.shutdownGracefully();
}
if(defaultExecutor != null){
defaultExecutor.shutdown();
}
} catch (Exception ex) {
logger.error("netty client close exception", ex);
}
logger.info("netty client closed");
}
}
/**
* close channel
*/
private void closeChannels(){
for (Channel channel : this.channels.values()) {
channel.close();
}
this.channels.clear();
}
/**
* remove channel
* @param address address
*/
public void removeChannel(Address address){
Channel channel = this.channels.remove(address);
if(channel != null){
channel.close();
}
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.remote;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelOption;
import io.netty.channel.ChannelPipeline;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import io.netty.channel.socket.nio.NioSocketChannel;
import org.apache.dolphinscheduler.remote.codec.NettyDecoder;
import org.apache.dolphinscheduler.remote.codec.NettyEncoder;
import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.config.NettyServerConfig;
import org.apache.dolphinscheduler.remote.handler.NettyServerHandler;
import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor;
import org.apache.dolphinscheduler.remote.utils.Constants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
/**
* remoting netty server
*/
public class NettyRemotingServer {
private final Logger logger = LoggerFactory.getLogger(NettyRemotingServer.class);
/**
* server bootstart
*/
private final ServerBootstrap serverBootstrap = new ServerBootstrap();
/**
* encoder
*/
private final NettyEncoder encoder = new NettyEncoder();
/**
* default executor
*/
private final ExecutorService defaultExecutor = Executors.newFixedThreadPool(Constants.CPUS);
/**
* boss group
*/
private final NioEventLoopGroup bossGroup;
/**
* worker group
*/
private final NioEventLoopGroup workGroup;
/**
* server config
*/
private final NettyServerConfig serverConfig;
/**
* server handler
*/
private final NettyServerHandler serverHandler = new NettyServerHandler(this);
/**
* started flag
*/
private final AtomicBoolean isStarted = new AtomicBoolean(false);
/**
* server init
*
* @param serverConfig server config
*/
public NettyRemotingServer(final NettyServerConfig serverConfig){
this.serverConfig = serverConfig;
this.bossGroup = new NioEventLoopGroup(1, new ThreadFactory() {
private AtomicInteger threadIndex = new AtomicInteger(0);
@Override
public Thread newThread(Runnable r) {
return new Thread(r, String.format("NettyServerBossThread_%d", this.threadIndex.incrementAndGet()));
}
});
this.workGroup = new NioEventLoopGroup(serverConfig.getWorkerThread(), new ThreadFactory() {
private AtomicInteger threadIndex = new AtomicInteger(0);
@Override
public Thread newThread(Runnable r) {
return new Thread(r, String.format("NettyServerWorkerThread_%d", this.threadIndex.incrementAndGet()));
}
});
}
/**
* server start
*/
public void start(){
if(this.isStarted.get()){
return;
}
this.serverBootstrap
.group(this.bossGroup, this.workGroup)
.channel(NioServerSocketChannel.class)
.option(ChannelOption.SO_REUSEADDR, true)
.option(ChannelOption.SO_BACKLOG, serverConfig.getSoBacklog())
.childOption(ChannelOption.SO_KEEPALIVE, serverConfig.isSoKeepalive())
.childOption(ChannelOption.TCP_NODELAY, serverConfig.isTcpNoDelay())
.childOption(ChannelOption.SO_SNDBUF, serverConfig.getSendBufferSize())
.childOption(ChannelOption.SO_RCVBUF, serverConfig.getReceiveBufferSize())
.childHandler(new ChannelInitializer<NioSocketChannel>() {
@Override
protected void initChannel(NioSocketChannel ch) throws Exception {
initNettyChannel(ch);
}
});
ChannelFuture future;
try {
future = serverBootstrap.bind(serverConfig.getListenPort()).sync();
} catch (Exception e) {
logger.error("NettyRemotingServer bind fail {}, exit", e);
throw new RuntimeException(String.format("NettyRemotingServer bind %s fail", serverConfig.getListenPort()));
}
if (future.isSuccess()) {
logger.info("NettyRemotingServer bind success at port : {}", serverConfig.getListenPort());
} else if (future.cause() != null) {
throw new RuntimeException(String.format("NettyRemotingServer bind %s fail", serverConfig.getListenPort()), future.cause());
} else {
throw new RuntimeException(String.format("NettyRemotingServer bind %s fail", serverConfig.getListenPort()));
}
//
isStarted.compareAndSet(false, true);
}
/**
* init netty channel
* @param ch socket channel
* @throws Exception
*/
private void initNettyChannel(NioSocketChannel ch) throws Exception{
ChannelPipeline pipeline = ch.pipeline();
pipeline.addLast("encoder", encoder);
pipeline.addLast("decoder", new NettyDecoder());
pipeline.addLast("handler", serverHandler);
}
/**
* register processor
* @param commandType command type
* @param processor processor
*/
public void registerProcessor(final CommandType commandType, final NettyRequestProcessor processor) {
this.registerProcessor(commandType, processor, null);
}
/**
* register processor
*
* @param commandType command type
* @param processor processor
* @param executor thread executor
*/
public void registerProcessor(final CommandType commandType, final NettyRequestProcessor processor, final ExecutorService executor) {
this.serverHandler.registerProcessor(commandType, processor, executor);
}
/**
* get default thread executor
* @return thread executor
*/
public ExecutorService getDefaultExecutor() {
return defaultExecutor;
}
public void close() {
if(isStarted.compareAndSet(true, false)){
try {
if(bossGroup != null){
this.bossGroup.shutdownGracefully();
}
if(workGroup != null){
this.workGroup.shutdownGracefully();
}
if(defaultExecutor != null){
defaultExecutor.shutdown();
}
} catch (Exception ex) {
logger.error("netty server close exception", ex);
}
logger.info("netty server closed");
}
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.remote.codec;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.ReplayingDecoder;
import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.command.CommandHeader;
import org.apache.dolphinscheduler.remote.command.CommandType;
import java.util.List;
/**
* netty decoder
*/
public class NettyDecoder extends ReplayingDecoder<NettyDecoder.State> {
public NettyDecoder(){
super(State.MAGIC);
}
private final CommandHeader commandHeader = new CommandHeader();
/**
* decode
*
* @param ctx channel handler context
* @param in byte buffer
* @param out out content
* @throws Exception
*/
@Override
protected void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) throws Exception {
switch (state()){
case MAGIC:
checkMagic(in.readByte());
checkpoint(State.COMMAND);
case COMMAND:
commandHeader.setType(in.readByte());
checkpoint(State.OPAQUE);
case OPAQUE:
commandHeader.setOpaque(in.readLong());
checkpoint(State.BODY_LENGTH);
case BODY_LENGTH:
commandHeader.setBodyLength(in.readInt());
checkpoint(State.BODY);
case BODY:
byte[] body = new byte[commandHeader.getBodyLength()];
in.readBytes(body);
//
Command packet = new Command();
packet.setType(commandType(commandHeader.getType()));
packet.setOpaque(commandHeader.getOpaque());
packet.setBody(body);
out.add(packet);
//
checkpoint(State.MAGIC);
}
}
/**
* get command type
* @param type type
* @return
*/
private CommandType commandType(byte type){
for(CommandType ct : CommandType.values()){
if(ct.ordinal() == type){
return ct;
}
}
return null;
}
/**
* check magic
* @param magic magic
*/
private void checkMagic(byte magic) {
if (magic != Command.MAGIC) {
throw new IllegalArgumentException("illegal packet [magic]" + magic);
}
}
enum State{
MAGIC,
COMMAND,
OPAQUE,
BODY_LENGTH,
BODY;
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.remote.codec;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandler.Sharable;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.MessageToByteEncoder;
import org.apache.dolphinscheduler.remote.command.Command;
/**
* netty encoder
*/
@Sharable
public class NettyEncoder extends MessageToByteEncoder<Command> {
/**
* encode
*
* @param ctx channel handler context
* @param msg command
* @param out byte buffer
* @throws Exception
*/
@Override
protected void encode(ChannelHandlerContext ctx, Command msg, ByteBuf out) throws Exception {
if(msg == null){
throw new Exception("encode msg is null");
}
out.writeByte(Command.MAGIC);
out.writeByte(msg.getType().ordinal());
out.writeLong(msg.getOpaque());
out.writeInt(msg.getBody().length);
out.writeBytes(msg.getBody());
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.remote.command;
import java.io.Serializable;
/**
* receive task log request command and content fill
* for netty data serializable transfer
*/
public class Command implements Serializable {
private static final long serialVersionUID = 1L;
public static final byte MAGIC = (byte) 0xbabe;
public Command(){
}
public Command(long opaque){
this.opaque = opaque;
}
/**
* command type
*/
private CommandType type;
/**
* request unique identification
*/
private long opaque;
/**
* data body
*/
private byte[] body;
public CommandType getType() {
return type;
}
public void setType(CommandType type) {
this.type = type;
}
public long getOpaque() {
return opaque;
}
public void setOpaque(long opaque) {
this.opaque = opaque;
}
public byte[] getBody() {
return body;
}
public void setBody(byte[] body) {
this.body = body;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (int) (opaque ^ (opaque >>> 32));
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
Command other = (Command) obj;
return opaque == other.opaque;
}
@Override
public String toString() {
return "Command [type=" + type + ", opaque=" + opaque + ", bodyLen=" + (body == null ? 0 : body.length) + "]";
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.remote.command;
import java.io.Serializable;
/**
* command header
*/
public class CommandHeader implements Serializable {
/**
* type
*/
private byte type;
/**
* request unique identification
*/
private long opaque;
/**
* body length
*/
private int bodyLength;
public int getBodyLength() {
return bodyLength;
}
public void setBodyLength(int bodyLength) {
this.bodyLength = bodyLength;
}
public byte getType() {
return type;
}
public void setType(byte type) {
this.type = type;
}
public long getOpaque() {
return opaque;
}
public void setOpaque(long opaque) {
this.opaque = opaque;
}
}
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */package org.apache.dolphinscheduler.remote.command; public enum CommandType { /** * roll view log request */ ROLL_VIEW_LOG_REQUEST, /** * roll view log response */ ROLL_VIEW_LOG_RESPONSE, /** * view whole log request */ VIEW_WHOLE_LOG_REQUEST, /** * view whole log response */ VIEW_WHOLE_LOG_RESPONSE, /** * get log bytes request */ GET_LOG_BYTES_REQUEST, /** * get log bytes response */ GET_LOG_BYTES_RESPONSE, WORKER_REQUEST, MASTER_RESPONSE, /** * execute task request */ EXECUTE_TASK_REQUEST, /** * execute task response */ EXECUTE_TASK_RESPONSE, /** * ping */ PING, /** * pong */ PONG;}
\ No newline at end of file
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */package org.apache.dolphinscheduler.remote.command; import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; import java.io.Serializable;import java.util.List;import java.util.concurrent.atomic.AtomicLong; /** * execute task request command */public class ExecuteTaskRequestCommand implements Serializable { private static final AtomicLong REQUEST = new AtomicLong(1); /** * task id */ private String taskId; /** * attempt id */ private String attemptId; /** * application name */ private String applicationName; /** * group name */ private String groupName; /** * task name */ private String taskName; /** * connect port */ private int connectorPort; /** * description info */ private String description; /** * class name */ private String className; /** * method name */ private String methodName; /** * params */ private String params; /** * shard items */ private List<Integer> shardItems; public List<Integer> getShardItems() { return shardItems; } public void setShardItems(List<Integer> shardItems) { this.shardItems = shardItems; } public String getParams() { return params; } public void setParams(String params) { this.params = params; } public String getTaskId() { return taskId; } public void setTaskId(String taskId) { this.taskId = taskId; } public String getApplicationName() { return applicationName; } public void setApplicationName(String applicationName) { this.applicationName = applicationName; } public String getGroupName() { return groupName; } public void setGroupName(String groupName) { this.groupName = groupName; } public String getTaskName() { return taskName; } public void setTaskName(String taskName) { this.taskName = taskName; } public int getConnectorPort() { return connectorPort; } public void setConnectorPort(int connectorPort) { this.connectorPort = connectorPort; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public String getClassName() { return className; } public void setClassName(String className) { this.className = className; } public String getMethodName() { return methodName; } public void setMethodName(String methodName) { this.methodName = methodName; } /** * package request command * * @return command */ public Command convert2Command(){ Command command = new Command(REQUEST.getAndIncrement()); command.setType(CommandType.EXECUTE_TASK_REQUEST); byte[] body = FastJsonSerializer.serialize(this); command.setBody(body); return command; }}
\ No newline at end of file
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */package org.apache.dolphinscheduler.remote.command; import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; import java.io.Serializable;import java.util.concurrent.atomic.AtomicLong; /** * execute taks response command */public class ExecuteTaskResponseCommand implements Serializable { private static final AtomicLong REQUEST = new AtomicLong(1); /** * task id */ private String taskId; /** * attempt id */ private String attemptId; /** * result info */ private Object result; /** * receive time */ private long receivedTime; /** * execute count */ private int executeCount; /** * execute time */ private long executeTime; public String getAttemptId() { return attemptId; } public void setAttemptId(String attemptId) { this.attemptId = attemptId; } public String getTaskId() { return taskId; } public void setTaskId(String taskId) { this.taskId = taskId; } public Object getResult() { return result; } public void setResult(Object result) { this.result = result; } public long getReceivedTime() { return receivedTime; } public void setReceivedTime(long receivedTime) { this.receivedTime = receivedTime; } public int getExecuteCount() { return executeCount; } public void setExecuteCount(int executeCount) { this.executeCount = executeCount; } public long getExecuteTime() { return executeTime; } public void setExecuteTime(long executeTime) { this.executeTime = executeTime; } /** * package response command * * @return command */ public Command convert2Command(){ Command command = new Command(REQUEST.getAndIncrement()); command.setType(CommandType.EXECUTE_TASK_RESPONSE); byte[] body = FastJsonSerializer.serialize(this); command.setBody(body); return command; }}
\ No newline at end of file
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.remote.command;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import java.io.Serializable;
import java.util.concurrent.atomic.AtomicLong;
/**
* ping machine
*/
public class Ping implements Serializable {
private static final AtomicLong ID = new AtomicLong(1);
/**
* ping body
*/
protected static ByteBuf EMPTY_BODY = Unpooled.EMPTY_BUFFER;
/**
* request command body
*/
private static byte[] EMPTY_BODY_ARRAY = new byte[0];
private static final ByteBuf PING_BUF;
static {
ByteBuf ping = Unpooled.buffer();
ping.writeByte(Command.MAGIC);
ping.writeByte(CommandType.PING.ordinal());
ping.writeLong(0);
ping.writeInt(0);
ping.writeBytes(EMPTY_BODY);
PING_BUF = Unpooled.unreleasableBuffer(ping).asReadOnly();
}
/**
* ping connect
* @return result
*/
public static ByteBuf pingContent(){
return PING_BUF.duplicate();
}
/**
* package ping command
*
* @return command
*/
public static Command create(){
Command command = new Command(ID.getAndIncrement());
command.setType(CommandType.PING);
command.setBody(EMPTY_BODY_ARRAY);
return command;
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.remote.command;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import java.io.Serializable;
/**
* Pong return after ping
*/
public class Pong implements Serializable {
/**
* pong body
*/
protected static ByteBuf EMPTY_BODY = Unpooled.EMPTY_BUFFER;
/**
* pong command body
*/
private static byte[] EMPTY_BODY_ARRAY = new byte[0];
/**
* ping byte buffer
*/
private static final ByteBuf PONG_BUF;
static {
ByteBuf ping = Unpooled.buffer();
ping.writeByte(Command.MAGIC);
ping.writeByte(CommandType.PONG.ordinal());
ping.writeLong(0);
ping.writeInt(0);
ping.writeBytes(EMPTY_BODY);
PONG_BUF = Unpooled.unreleasableBuffer(ping).asReadOnly();
}
/**
* ping content
* @return result
*/
public static ByteBuf pingContent(){
return PONG_BUF.duplicate();
}
/**
* package pong command
*
* @param opaque request unique identification
* @return command
*/
public static Command create(long opaque){
Command command = new Command(opaque);
command.setType(CommandType.PONG);
command.setBody(EMPTY_BODY_ARRAY);
return command;
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.remote.command.log;
import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer;
import java.io.Serializable;
import java.util.concurrent.atomic.AtomicLong;
/**
* get log bytes request command
*/
public class GetLogBytesRequestCommand implements Serializable {
/**
* request id
*/
private static final AtomicLong REQUEST = new AtomicLong(1);
/**
* log path
*/
private String path;
public GetLogBytesRequestCommand() {
}
public GetLogBytesRequestCommand(String path) {
this.path = path;
}
public String getPath() {
return path;
}
public void setPath(String path) {
this.path = path;
}
/**
* package request command
*
* @return command
*/
public Command convert2Command(){
Command command = new Command(REQUEST.getAndIncrement());
command.setType(CommandType.GET_LOG_BYTES_REQUEST);
byte[] body = FastJsonSerializer.serialize(this);
command.setBody(body);
return command;
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.remote.command.log;
import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer;
import java.io.Serializable;
/**
* get log bytes response command
*/
public class GetLogBytesResponseCommand implements Serializable {
/**
* log byte data
*/
private byte[] data;
public GetLogBytesResponseCommand() {
}
public GetLogBytesResponseCommand(byte[] data) {
this.data = data;
}
public byte[] getData() {
return data;
}
public void setData(byte[] data) {
this.data = data;
}
/**
* package response command
*
* @param opaque request unique identification
* @return command
*/
public Command convert2Command(long opaque){
Command command = new Command(opaque);
command.setType(CommandType.GET_LOG_BYTES_RESPONSE);
byte[] body = FastJsonSerializer.serialize(this);
command.setBody(body);
return command;
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.remote.command.log;
import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer;
import java.io.Serializable;
import java.util.concurrent.atomic.AtomicLong;
/**
* roll view log request command
*/
public class RollViewLogRequestCommand implements Serializable {
/**
* request id
*/
private static final AtomicLong REQUEST = new AtomicLong(1);
/**
* log path
*/
private String path;
/**
* skip line number
*/
private int skipLineNum;
/**
* query log line number limit
*/
private int limit;
public RollViewLogRequestCommand() {
}
public RollViewLogRequestCommand(String path, int skipLineNum, int limit) {
this.path = path;
this.skipLineNum = skipLineNum;
this.limit = limit;
}
public String getPath() {
return path;
}
public void setPath(String path) {
this.path = path;
}
public int getSkipLineNum() {
return skipLineNum;
}
public void setSkipLineNum(int skipLineNum) {
this.skipLineNum = skipLineNum;
}
public int getLimit() {
return limit;
}
public void setLimit(int limit) {
this.limit = limit;
}
/**
* package request command
*
* @return command
*/
public Command convert2Command(){
Command command = new Command(REQUEST.getAndIncrement());
command.setType(CommandType.ROLL_VIEW_LOG_REQUEST);
byte[] body = FastJsonSerializer.serialize(this);
command.setBody(body);
return command;
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.remote.command.log;
import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer;
import java.io.Serializable;
/**
* roll view log response command
*/
public class RollViewLogResponseCommand implements Serializable {
/**
* response data
*/
private String msg;
public RollViewLogResponseCommand() {
}
public RollViewLogResponseCommand(String msg) {
this.msg = msg;
}
public String getMsg() {
return msg;
}
public void setMsg(String msg) {
this.msg = msg;
}
/**
* package response command
*
* @param opaque request unique identification
* @return command
*/
public Command convert2Command(long opaque){
Command command = new Command(opaque);
command.setType(CommandType.ROLL_VIEW_LOG_RESPONSE);
byte[] body = FastJsonSerializer.serialize(this);
command.setBody(body);
return command;
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.remote.command.log;
import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer;
import java.io.Serializable;
import java.util.concurrent.atomic.AtomicLong;
/**
* view log request command
*/
public class ViewLogRequestCommand implements Serializable {
/**
* request id
*/
private static final AtomicLong REQUEST = new AtomicLong(1);
private String path;
public ViewLogRequestCommand() {
}
public ViewLogRequestCommand(String path) {
this.path = path;
}
public String getPath() {
return path;
}
public void setPath(String path) {
this.path = path;
}
/**
* package request command
*
* @return command
*/
public Command convert2Command(){
Command command = new Command(REQUEST.getAndIncrement());
command.setType(CommandType.VIEW_WHOLE_LOG_REQUEST);
byte[] body = FastJsonSerializer.serialize(this);
command.setBody(body);
return command;
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.remote.command.log;
import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer;
import java.io.Serializable;
/**
* view log response command
*/
public class ViewLogResponseCommand implements Serializable {
/**
* response data
*/
private String msg;
public ViewLogResponseCommand() {
}
public ViewLogResponseCommand(String msg) {
this.msg = msg;
}
public String getMsg() {
return msg;
}
public void setMsg(String msg) {
this.msg = msg;
}
/**
* package response command
*
* @param opaque request unique identification
* @return command
*/
public Command convert2Command(long opaque){
Command command = new Command(opaque);
command.setType(CommandType.VIEW_WHOLE_LOG_RESPONSE);
byte[] body = FastJsonSerializer.serialize(this);
command.setBody(body);
return command;
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.remote.config;
import org.apache.dolphinscheduler.remote.utils.Constants;
/**
* netty client config
*/
public class NettyClientConfig {
/**
* worker threads,default get machine cpus
*/
private int workerThreads = Constants.CPUS;
/**
* whether tpc delay
*/
private boolean tcpNoDelay = true;
/**
* whether keep alive
*/
private boolean soKeepalive = true;
/**
* send buffer size
*/
private int sendBufferSize = 65535;
/**
* receive buffer size
*/
private int receiveBufferSize = 65535;
public int getWorkerThreads() {
return workerThreads;
}
public void setWorkerThreads(int workerThreads) {
this.workerThreads = workerThreads;
}
public boolean isTcpNoDelay() {
return tcpNoDelay;
}
public void setTcpNoDelay(boolean tcpNoDelay) {
this.tcpNoDelay = tcpNoDelay;
}
public boolean isSoKeepalive() {
return soKeepalive;
}
public void setSoKeepalive(boolean soKeepalive) {
this.soKeepalive = soKeepalive;
}
public int getSendBufferSize() {
return sendBufferSize;
}
public void setSendBufferSize(int sendBufferSize) {
this.sendBufferSize = sendBufferSize;
}
public int getReceiveBufferSize() {
return receiveBufferSize;
}
public void setReceiveBufferSize(int receiveBufferSize) {
this.receiveBufferSize = receiveBufferSize;
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.remote.config;
import org.apache.dolphinscheduler.remote.utils.Constants;
/**
* netty server config
*/
public class NettyServerConfig {
/**
* init the server connectable queue
*/
private int soBacklog = 1024;
/**
* whether tpc delay
*/
private boolean tcpNoDelay = true;
/**
* whether keep alive
*/
private boolean soKeepalive = true;
/**
* send buffer size
*/
private int sendBufferSize = 65535;
/**
* receive buffer size
*/
private int receiveBufferSize = 65535;
/**
* worker threads,default get machine cpus
*/
private int workerThread = Constants.CPUS;
/**
* listen port
*/
private int listenPort = 12346;
public int getListenPort() {
return listenPort;
}
public void setListenPort(int listenPort) {
this.listenPort = listenPort;
}
public int getSoBacklog() {
return soBacklog;
}
public void setSoBacklog(int soBacklog) {
this.soBacklog = soBacklog;
}
public boolean isTcpNoDelay() {
return tcpNoDelay;
}
public void setTcpNoDelay(boolean tcpNoDelay) {
this.tcpNoDelay = tcpNoDelay;
}
public boolean isSoKeepalive() {
return soKeepalive;
}
public void setSoKeepalive(boolean soKeepalive) {
this.soKeepalive = soKeepalive;
}
public int getSendBufferSize() {
return sendBufferSize;
}
public void setSendBufferSize(int sendBufferSize) {
this.sendBufferSize = sendBufferSize;
}
public int getReceiveBufferSize() {
return receiveBufferSize;
}
public void setReceiveBufferSize(int receiveBufferSize) {
this.receiveBufferSize = receiveBufferSize;
}
public int getWorkerThread() {
return workerThread;
}
public void setWorkerThread(int workerThread) {
this.workerThread = workerThread;
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.remote.exceptions;
/**
* remote exception
*/
public class RemotingException extends Exception {
public RemotingException() {
super();
}
/** Constructs a new runtime exception with the specified detail message.
* The cause is not initialized, and may subsequently be initialized by a
* call to {@link #initCause}.
*
* @param message the detail message. The detail message is saved for
* later retrieval by the {@link #getMessage()} method.
*/
public RemotingException(String message) {
super(message);
}
/**
* Constructs a new runtime exception with the specified detail message and
* cause. <p>Note that the detail message associated with
* {@code cause} is <i>not</i> automatically incorporated in
* this runtime exception's detail message.
*
* @param message the detail message (which is saved for later retrieval
* by the {@link #getMessage()} method).
* @param cause the cause (which is saved for later retrieval by the
* {@link #getCause()} method). (A <tt>null</tt> value is
* permitted, and indicates that the cause is nonexistent or
* unknown.)
* @since 1.4
*/
public RemotingException(String message, Throwable cause) {
super(message, cause);
}
/** Constructs a new runtime exception with the specified cause and a
* detail message of <tt>(cause==null ? null : cause.toString())</tt>
* (which typically contains the class and detail message of
* <tt>cause</tt>). This constructor is useful for runtime exceptions
* that are little more than wrappers for other throwables.
*
* @param cause the cause (which is saved for later retrieval by the
* {@link #getCause()} method). (A <tt>null</tt> value is
* permitted, and indicates that the cause is nonexistent or
* unknown.)
* @since 1.4
*/
public RemotingException(Throwable cause) {
super(cause);
}
/**
* Constructs a new runtime exception with the specified detail
* message, cause, suppression enabled or disabled, and writable
* stack trace enabled or disabled.
*
* @param message the detail message.
* @param cause the cause. (A {@code null} value is permitted,
* and indicates that the cause is nonexistent or unknown.)
* @param enableSuppression whether or not suppression is enabled
* or disabled
* @param writableStackTrace whether or not the stack trace should
* be writable
*
* @since 1.7
*/
protected RemotingException(String message, Throwable cause,
boolean enableSuppression,
boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.remote.handler;
import io.netty.channel.*;
import org.apache.dolphinscheduler.remote.NettyRemotingClient;
import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor;
import org.apache.dolphinscheduler.remote.utils.ChannelUtils;
import org.apache.dolphinscheduler.remote.utils.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.RejectedExecutionException;
/**
* netty client request handler
*/
@ChannelHandler.Sharable
public class NettyClientHandler extends ChannelInboundHandlerAdapter {
private final Logger logger = LoggerFactory.getLogger(NettyClientHandler.class);
/**
* netty remote client
*/
private final NettyRemotingClient nettyRemotingClient;
/**
* client processors queue
*/
private final ConcurrentHashMap<CommandType, Pair<NettyRequestProcessor, ExecutorService>> processors = new ConcurrentHashMap();
public NettyClientHandler(NettyRemotingClient nettyRemotingClient){
this.nettyRemotingClient = nettyRemotingClient;
}
/**
* When the current channel is not active,
* the current channel has reached the end of its life cycle
*
* @param ctx channel handler context
* @throws Exception
*/
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
nettyRemotingClient.removeChannel(ChannelUtils.toAddress(ctx.channel()));
ctx.channel().close();
}
/**
* The current channel reads data from the remote
*
* @param ctx channel handler context
* @param msg message
* @throws Exception
*/
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
processReceived(ctx.channel(), (Command)msg);
}
/**
* register processor
*
* @param commandType command type
* @param processor processor
*/
public void registerProcessor(final CommandType commandType, final NettyRequestProcessor processor) {
this.registerProcessor(commandType, processor, nettyRemotingClient.getDefaultExecutor());
}
/**
* register processor
*
* @param commandType command type
* @param processor processor
* @param executor thread executor
*/
public void registerProcessor(final CommandType commandType, final NettyRequestProcessor processor, final ExecutorService executor) {
ExecutorService executorRef = executor;
if(executorRef == null){
executorRef = nettyRemotingClient.getDefaultExecutor();
}
this.processors.putIfAbsent(commandType, new Pair<NettyRequestProcessor, ExecutorService>(processor, executorRef));
}
/**
* process received logic
*
* @param channel channel
* @param msg message
*/
private void processReceived(final Channel channel, final Command msg) {
final CommandType commandType = msg.getType();
final Pair<NettyRequestProcessor, ExecutorService> pair = processors.get(commandType);
if (pair != null) {
Runnable r = new Runnable() {
@Override
public void run() {
try {
pair.getLeft().process(channel, msg);
} catch (Throwable ex) {
logger.error("process msg {} error : {}", msg, ex);
}
}
};
try {
pair.getRight().submit(r);
} catch (RejectedExecutionException e) {
logger.warn("thread pool is full, discard msg {} from {}", msg, ChannelUtils.getRemoteAddress(channel));
}
} else {
logger.warn("commandType {} not support", commandType);
}
}
/**
* caught exception
*
* @param ctx channel handler context
* @param cause cause
* @throws Exception
*/
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
logger.error("exceptionCaught : {}", cause);
nettyRemotingClient.removeChannel(ChannelUtils.toAddress(ctx.channel()));
ctx.channel().close();
}
/**
* channel write changed
* @param ctx channel handler context
* @throws Exception
*/
@Override
public void channelWritabilityChanged(ChannelHandlerContext ctx) throws Exception {
Channel ch = ctx.channel();
ChannelConfig config = ch.config();
if (!ch.isWritable()) {
if (logger.isWarnEnabled()) {
logger.warn("{} is not writable, over high water level : {}",
new Object[]{ch, config.getWriteBufferHighWaterMark()});
}
config.setAutoRead(false);
} else {
if (logger.isWarnEnabled()) {
logger.warn("{} is writable, to low water : {}",
new Object[]{ch, config.getWriteBufferLowWaterMark()});
}
config.setAutoRead(true);
}
}
}
\ No newline at end of file
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.remote.handler;
import io.netty.channel.*;
import org.apache.dolphinscheduler.remote.NettyRemotingServer;
import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor;
import org.apache.dolphinscheduler.remote.utils.ChannelUtils;
import org.apache.dolphinscheduler.remote.utils.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.RejectedExecutionException;
/**
* netty server request handler
*/
@ChannelHandler.Sharable
public class NettyServerHandler extends ChannelInboundHandlerAdapter {
private final Logger logger = LoggerFactory.getLogger(NettyServerHandler.class);
/**
* netty remote server
*/
private final NettyRemotingServer nettyRemotingServer;
/**
* server processors queue
*/
private final ConcurrentHashMap<CommandType, Pair<NettyRequestProcessor, ExecutorService>> processors = new ConcurrentHashMap();
public NettyServerHandler(NettyRemotingServer nettyRemotingServer){
this.nettyRemotingServer = nettyRemotingServer;
}
/**
* When the current channel is not active,
* the current channel has reached the end of its life cycle
* @param ctx channel handler context
* @throws Exception
*/
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
ctx.channel().close();
}
/**
* The current channel reads data from the remote end
*
* @param ctx channel handler context
* @param msg message
* @throws Exception
*/
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
processReceived(ctx.channel(), (Command)msg);
}
/**
* register processor
*
* @param commandType command type
* @param processor processor
*/
public void registerProcessor(final CommandType commandType, final NettyRequestProcessor processor) {
this.registerProcessor(commandType, processor, null);
}
/**
* register processor
*
* @param commandType command type
* @param processor processor
* @param executor thread executor
*/
public void registerProcessor(final CommandType commandType, final NettyRequestProcessor processor, final ExecutorService executor) {
ExecutorService executorRef = executor;
if(executorRef == null){
executorRef = nettyRemotingServer.getDefaultExecutor();
}
this.processors.putIfAbsent(commandType, new Pair<NettyRequestProcessor, ExecutorService>(processor, executorRef));
}
/**
* process received logic
* @param channel channel
* @param msg message
*/
private void processReceived(final Channel channel, final Command msg) {
final CommandType commandType = msg.getType();
final Pair<NettyRequestProcessor, ExecutorService> pair = processors.get(commandType);
if (pair != null) {
Runnable r = new Runnable() {
@Override
public void run() {
try {
pair.getLeft().process(channel, msg);
} catch (Throwable ex) {
logger.error("process msg {} error : {}", msg, ex);
}
}
};
try {
pair.getRight().submit(r);
} catch (RejectedExecutionException e) {
logger.warn("thread pool is full, discard msg {} from {}", msg, ChannelUtils.getRemoteAddress(channel));
}
} else {
logger.warn("commandType {} not support", commandType);
}
}
/**
* caught exception
*
* @param ctx channel handler context
* @param cause cause
* @throws Exception
*/
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
logger.error("exceptionCaught : {}", cause);
ctx.channel().close();
}
/**
* channel write changed
*
* @param ctx channel handler context
* @throws Exception
*/
@Override
public void channelWritabilityChanged(ChannelHandlerContext ctx) throws Exception {
Channel ch = ctx.channel();
ChannelConfig config = ch.config();
if (!ch.isWritable()) {
if (logger.isWarnEnabled()) {
logger.warn("{} is not writable, over high water level : {}",
new Object[]{ch, config.getWriteBufferHighWaterMark()});
}
config.setAutoRead(false);
} else {
if (logger.isWarnEnabled()) {
logger.warn("{} is writable, to low water : {}",
new Object[]{ch, config.getWriteBufferLowWaterMark()});
}
config.setAutoRead(true);
}
}
}
\ No newline at end of file
......@@ -14,30 +14,20 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.zk;
package org.apache.dolphinscheduler.remote.processor;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import io.netty.channel.Channel;
import org.apache.dolphinscheduler.remote.command.Command;
/**
* demo for using zkServer
* netty request processor
*/
public class TestZkServer {
public interface NettyRequestProcessor {
@Before
public void before(){
ZKServer.start();
}
@Test
public void test(){
Assert.assertTrue(ZKServer.isStarted());
}
@After
public void after(){
ZKServer.stop();
}
/**
* process logic
* @param channel channel
* @param command command
*/
void process(final Channel channel, final Command command);
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.remote.utils;
import java.io.Serializable;
/**
* server address
*/
public class Address implements Serializable {
/**
* host
*/
private String host;
/**
* port
*/
private int port;
public Address(){
//NOP
}
public Address(String host, int port){
this.host = host;
this.port = port;
}
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
public int getPort() {
return port;
}
public void setPort(int port) {
this.port = port;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((host == null) ? 0 : host.hashCode());
result = prime * result + port;
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
Address other = (Address) obj;
if (host == null) {
if (other.host != null) {
return false;
}
} else if (!host.equals(other.host)) {
return false;
}
return port == other.port;
}
@Override
public String toString() {
return "Address [host=" + host + ", port=" + port + "]";
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.remote.utils;
import io.netty.channel.Channel;
import java.net.InetSocketAddress;
/**
* channel utils
*/
public class ChannelUtils {
/**
* get local address
*
* @param channel channel
* @return local address
*/
public static String getLocalAddress(Channel channel){
return ((InetSocketAddress)channel.localAddress()).getAddress().getHostAddress();
}
/**
* get remote address
* @param channel channel
* @return remote address
*/
public static String getRemoteAddress(Channel channel){
return ((InetSocketAddress)channel.remoteAddress()).getAddress().getHostAddress();
}
/**
* channel to address
* @param channel channel
* @return address
*/
public static Address toAddress(Channel channel){
InetSocketAddress socketAddress = ((InetSocketAddress)channel.remoteAddress());
return new Address(socketAddress.getAddress().getHostAddress(), socketAddress.getPort());
}
}
......@@ -14,30 +14,28 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.zk;
package org.apache.dolphinscheduler.remote.utils;
import java.nio.charset.Charset;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
/**
* demo for using zkServer
* constant
*/
public class TestZk {
@Before
public void before(){
ZKServer.start();
}
@Test
public void test(){
Assert.assertTrue(ZKServer.isStarted());
}
@After
public void after(){
ZKServer.stop();
}
public class Constants {
public static final String COMMA = ",";
public static final String SLASH = "/";
/**
* charset
*/
public static final Charset UTF8 = Charset.forName("UTF-8");
/**
* cpus
*/
public static final int CPUS = Runtime.getRuntime().availableProcessors();
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.remote.utils;
import com.alibaba.fastjson.JSON;
/**
* json serialize or deserialize
*/
public class FastJsonSerializer {
/**
* serialize to byte
*
* @param obj object
* @param <T> object type
* @return byte array
*/
public static <T> byte[] serialize(T obj) {
String json = JSON.toJSONString(obj);
return json.getBytes(Constants.UTF8);
}
/**
* serialize to string
* @param obj object
* @param <T> object type
* @return string
*/
public static <T> String serializeToString(T obj) {
return JSON.toJSONString(obj);
}
/**
* deserialize
*
* @param src byte array
* @param clazz class
* @param <T> deserialize type
* @return deserialize type
*/
public static <T> T deserialize(byte[] src, Class<T> clazz) {
return JSON.parseObject(new String(src, Constants.UTF8), clazz);
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.remote.utils;
/**
* key value pair
*
* @param <L> L generic type
* @param <R> R generic type
*/
public class Pair<L, R> {
private L left;
private R right;
public Pair(L left, R right) {
this.left = left;
this.right = right;
}
public L getLeft() {
return left;
}
public void setLeft(L left) {
this.left = left;
}
public R getRight() {
return right;
}
public void setRight(R right) {
this.right = right;
}
}
package org.apache.dolphinscheduler.remote;/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import io.netty.channel.Channel;
import org.apache.dolphinscheduler.remote.NettyRemotingClient;
import org.apache.dolphinscheduler.remote.NettyRemotingServer;
import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.command.Ping;
import org.apache.dolphinscheduler.remote.command.Pong;
import org.apache.dolphinscheduler.remote.config.NettyClientConfig;
import org.apache.dolphinscheduler.remote.config.NettyServerConfig;
import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor;
import org.apache.dolphinscheduler.remote.utils.Address;
import org.junit.Assert;
import org.junit.Test;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicLong;
/**
* netty remote client test
*/
public class NettyRemotingClientTest {
/**
* test ping
*/
@Test
public void testSend(){
NettyServerConfig serverConfig = new NettyServerConfig();
NettyRemotingServer server = new NettyRemotingServer(serverConfig);
server.registerProcessor(CommandType.PING, new NettyRequestProcessor() {
@Override
public void process(Channel channel, Command command) {
channel.writeAndFlush(Pong.create(command.getOpaque()));
}
});
server.start();
//
CountDownLatch latch = new CountDownLatch(1);
AtomicLong opaque = new AtomicLong(1);
final NettyClientConfig clientConfig = new NettyClientConfig();
NettyRemotingClient client = new NettyRemotingClient(clientConfig);
client.registerProcessor(CommandType.PONG, new NettyRequestProcessor() {
@Override
public void process(Channel channel, Command command) {
opaque.set(command.getOpaque());
latch.countDown();
}
});
Command commandPing = Ping.create();
try {
client.send(new Address("127.0.0.1", serverConfig.getListenPort()), commandPing);
latch.await();
} catch (Exception e) {
e.printStackTrace();
}
Assert.assertEquals(opaque.get(), commandPing.getOpaque());
}
}
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler</artifactId>
<version>1.2.1-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>dolphinscheduler-rpc</artifactId>
<name>dolphinscheduler-rpc</name>
<url>https://github.com/apache/incubator-dolphinscheduler</url>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<protobuf.version>3.5.1</protobuf.version>
<grpc.version>1.9.0</grpc.version>
</properties>
<dependencies>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
<version>${protobuf.version}</version>
</dependency>
<dependency>
<groupId>io.grpc</groupId>
<artifactId>grpc-netty</artifactId>
<version>${grpc.version}</version>
</dependency>
<dependency>
<groupId>io.grpc</groupId>
<artifactId>grpc-protobuf</artifactId>
<version>${grpc.version}</version>
</dependency>
<dependency>
<groupId>io.grpc</groupId>
<artifactId>grpc-stub</artifactId>
<version>${grpc.version}</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</dependency>
</dependencies>
<build>
<extensions>
<extension>
<groupId>kr.motd.maven</groupId>
<artifactId>os-maven-plugin</artifactId>
<version>1.5.0.Final</version>
</extension>
</extensions>
<plugins>
<plugin>
<groupId>org.xolstice.maven.plugins</groupId>
<artifactId>protobuf-maven-plugin</artifactId>
<version>0.5.0</version>
<configuration>
<protocArtifact>com.google.protobuf:protoc:3.5.1-1:exe:${os.detected.classifier}</protocArtifact>
<pluginId>grpc-java</pluginId>
<pluginArtifact>io.grpc:protoc-gen-grpc-java:${grpc.version}:exe:${os.detected.classifier}</pluginArtifact>
</configuration>
<executions>
<execution>
<id>compile</id>
<goals>
<goal>compile</goal>
</goals>
</execution>
<execution>
<id>compile-custom</id>
<goals>
<goal>compile-custom</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>${java.version}</source>
<target>${java.version}</target>
<encoding>${project.build.sourceEncoding}</encoding>
</configuration>
</plugin>
</plugins>
</build>
</project>
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
syntax = "proto3";
package schduler;
option java_multiple_files = true;
option java_package = "org.apache.dolphinscheduler.rpc";
option java_outer_classname = "SchdulerProto";
/**
* return str info
*/
message RetStrInfo {
/**
* str msg info
*/
string msg = 1 ;
}
/**
* return byte info
*/
message RetByteInfo {
/**
* byte data info
*/
bytes data = 1;
}
/**
* log parameter
*/
message LogParameter {
/**
* path
*/
string path = 1 ;
/**
* skip line num
*/
int32 skipLineNum = 2 ;
/**
* display limt num
*/
int32 limit = 3 ;
}
/**
* path parameter
*/
message PathParameter {
/**
* path
*/
string path = 1 ;
}
/**
* log view service
*/
service LogViewService {
/**
* roll view log
*/
rpc rollViewLog(LogParameter) returns (RetStrInfo) {};
/**
* view all log
*/
rpc viewLog(PathParameter) returns (RetStrInfo) {};
/**
* get log bytes
*/
rpc getLogBytes(PathParameter) returns (RetByteInfo) {};
}
......@@ -71,7 +71,7 @@
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-rpc</artifactId>
<artifactId>dolphinscheduler-service</artifactId>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
......
......@@ -14,152 +14,101 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.rpc;
import io.grpc.stub.StreamObserver;
import org.apache.dolphinscheduler.common.Constants;
import com.google.protobuf.ByteString;
import io.grpc.Server;
import io.grpc.ServerBuilder;
import org.apache.dolphinscheduler.rpc.*;
package org.apache.dolphinscheduler.server.log;
import io.netty.channel.Channel;
import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.command.log.*;
import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor;
import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* logger server
* logger request process logic
*/
public class LoggerServer {
public class LoggerRequestProcessor implements NettyRequestProcessor {
private static final Logger logger = LoggerFactory.getLogger(LoggerServer.class);
private final Logger logger = LoggerFactory.getLogger(LoggerRequestProcessor.class);
/**
* server
*/
private Server server;
/**
* server start
* @throws IOException io exception
*/
public void start() throws IOException {
/* The port on which the server should run */
int port = Constants.RPC_PORT;
server = ServerBuilder.forPort(port)
.addService(new LogViewServiceGrpcImpl())
.build()
.start();
logger.info("server started, listening on port : {}" , port);
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
// Use stderr here since the logger may have been reset by its JVM shutdown hook.
logger.info("shutting down gRPC server since JVM is shutting down");
LoggerServer.this.stop();
logger.info("server shut down");
}
});
}
private final ThreadPoolExecutor executor;
/**
* stop
*/
private void stop() {
if (server != null) {
server.shutdown();
}
public LoggerRequestProcessor(){
this.executor = new ThreadPoolExecutor(4, 4, 10, TimeUnit.SECONDS, new LinkedBlockingQueue<>(100));
}
/**
* await termination on the main thread since the grpc library uses daemon threads.
*/
private void blockUntilShutdown() throws InterruptedException {
if (server != null) {
server.awaitTermination();
@Override
public void process(Channel channel, Command command) {
logger.info("received command : {}", command);
/**
* reuqest task log command type
*/
final CommandType commandType = command.getType();
switch (commandType){
case GET_LOG_BYTES_REQUEST:
GetLogBytesRequestCommand getLogRequest = FastJsonSerializer.deserialize(
command.getBody(), GetLogBytesRequestCommand.class);
byte[] bytes = getFileContentBytes(getLogRequest.getPath());
GetLogBytesResponseCommand getLogResponse = new GetLogBytesResponseCommand(bytes);
channel.writeAndFlush(getLogResponse.convert2Command(command.getOpaque()));
break;
case VIEW_WHOLE_LOG_REQUEST:
ViewLogRequestCommand viewLogRequest = FastJsonSerializer.deserialize(
command.getBody(), ViewLogRequestCommand.class);
String msg = readWholeFileContent(viewLogRequest.getPath());
ViewLogResponseCommand viewLogResponse = new ViewLogResponseCommand(msg);
channel.writeAndFlush(viewLogResponse.convert2Command(command.getOpaque()));
break;
case ROLL_VIEW_LOG_REQUEST:
RollViewLogRequestCommand rollViewLogRequest = FastJsonSerializer.deserialize(
command.getBody(), RollViewLogRequestCommand.class);
List<String> lines = readPartFileContent(rollViewLogRequest.getPath(),
rollViewLogRequest.getSkipLineNum(), rollViewLogRequest.getLimit());
StringBuilder builder = new StringBuilder();
for (String line : lines){
builder.append(line + "\r\n");
}
RollViewLogResponseCommand rollViewLogRequestResponse = new RollViewLogResponseCommand(builder.toString());
channel.writeAndFlush(rollViewLogRequestResponse.convert2Command(command.getOpaque()));
break;
default:
throw new IllegalArgumentException("unknown commandType");
}
}
/**
* main launches the server from the command line.
*/
/**
* main launches the server from the command line.
* @param args arguments
* @throws IOException io exception
* @throws InterruptedException interrupted exception
*/
public static void main(String[] args) throws IOException, InterruptedException {
final LoggerServer server = new LoggerServer();
server.start();
server.blockUntilShutdown();
public ExecutorService getExecutor(){
return this.executor;
}
/**
* Log View Service Grpc Implementation
*/
static class LogViewServiceGrpcImpl extends LogViewServiceGrpc.LogViewServiceImplBase {
@Override
public void rollViewLog(LogParameter request, StreamObserver<RetStrInfo> responseObserver) {
logger.info("log parameter path : {} ,skip line : {}, limit : {}",
request.getPath(),
request.getSkipLineNum(),
request.getLimit());
List<String> list = readFile(request.getPath(), request.getSkipLineNum(), request.getLimit());
StringBuilder sb = new StringBuilder();
boolean errorLineFlag = false;
for (String line : list){
sb.append(line + "\r\n");
}
RetStrInfo retInfoBuild = RetStrInfo.newBuilder().setMsg(sb.toString()).build();
responseObserver.onNext(retInfoBuild);
responseObserver.onCompleted();
}
@Override
public void viewLog(PathParameter request, StreamObserver<RetStrInfo> responseObserver) {
logger.info("task path is : {} " , request.getPath());
RetStrInfo retInfoBuild = RetStrInfo.newBuilder().setMsg(readFile(request.getPath())).build();
responseObserver.onNext(retInfoBuild);
responseObserver.onCompleted();
}
@Override
public void getLogBytes(PathParameter request, StreamObserver<RetByteInfo> responseObserver) {
try {
ByteString bytes = ByteString.copyFrom(getFileBytes(request.getPath()));
RetByteInfo.Builder builder = RetByteInfo.newBuilder();
builder.setData(bytes);
responseObserver.onNext(builder.build());
responseObserver.onCompleted();
}catch (Exception e){
logger.error("get log bytes failed",e);
}
}
}
/**
* get files bytes
* get files content bytes,for down load file
*
* @param path path
* @param filePath file path
* @return byte array of file
* @throws Exception exception
*/
private static byte[] getFileBytes(String path){
private byte[] getFileContentBytes(String filePath){
InputStream in = null;
ByteArrayOutputStream bos = null;
try {
in = new FileInputStream(path);
in = new FileInputStream(filePath);
bos = new ByteArrayOutputStream();
byte[] buf = new byte[1024];
int len = 0;
int len;
while ((len = in.read(buf)) != -1) {
bos.write(buf, 0, len);
}
......@@ -170,69 +119,61 @@ public class LoggerServer {
if (bos != null){
try {
bos.close();
} catch (IOException e) {
e.printStackTrace();
}
} catch (IOException ignore) {}
}
if (in != null){
try {
in.close();
} catch (IOException e) {
e.printStackTrace();
}
} catch (IOException ignore) {}
}
}
return null;
return new byte[0];
}
/**
* read file content
* read part file content,can skip any line and read some lines
*
* @param path
* @param skipLine
* @param limit
* @return
* @param filePath file path
* @param skipLine skip line
* @param limit read lines limit
* @return part file content
*/
private static List<String> readFile(String path,int skipLine,int limit){
try (Stream<String> stream = Files.lines(Paths.get(path))) {
private List<String> readPartFileContent(String filePath,
int skipLine,
int limit){
try (Stream<String> stream = Files.lines(Paths.get(filePath))) {
return stream.skip(skipLine).limit(limit).collect(Collectors.toList());
} catch (IOException e) {
logger.error("read file failed",e);
logger.error("read file error",e);
}
return null;
return Collections.EMPTY_LIST;
}
/**
* read file content
* read whole file content
*
* @param path path
* @return string of file content
* @throws Exception exception
* @param filePath file path
* @return whole file content
*/
private static String readFile(String path){
private String readWholeFileContent(String filePath){
BufferedReader br = null;
String line = null;
String line;
StringBuilder sb = new StringBuilder();
try {
br = new BufferedReader(new InputStreamReader(new FileInputStream(path)));
boolean errorLineFlag = false;
br = new BufferedReader(new InputStreamReader(new FileInputStream(filePath)));
while ((line = br.readLine()) != null){
sb.append(line + "\r\n");
}
return sb.toString();
}catch (IOException e){
logger.error("read file failed",e);
logger.error("read file error",e);
}finally {
try {
if (br != null){
br.close();
}
} catch (IOException e) {
logger.error(e.getMessage(),e);
}
} catch (IOException ignore) {}
}
return null;
return "";
}
}
\ No newline at end of file
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.log;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.remote.NettyRemotingServer;
import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.config.NettyServerConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* logger server
*/
public class LoggerServer {
private static final Logger logger = LoggerFactory.getLogger(LoggerServer.class);
/**
* netty server
*/
private final NettyRemotingServer server;
/**
* netty server config
*/
private final NettyServerConfig serverConfig;
/**
* loggger request processor
*/
private final LoggerRequestProcessor requestProcessor;
public LoggerServer(){
this.serverConfig = new NettyServerConfig();
this.serverConfig.setListenPort(Constants.RPC_PORT);
this.server = new NettyRemotingServer(serverConfig);
this.requestProcessor = new LoggerRequestProcessor();
this.server.registerProcessor(CommandType.GET_LOG_BYTES_REQUEST, requestProcessor, requestProcessor.getExecutor());
this.server.registerProcessor(CommandType.ROLL_VIEW_LOG_REQUEST, requestProcessor, requestProcessor.getExecutor());
this.server.registerProcessor(CommandType.VIEW_WHOLE_LOG_REQUEST, requestProcessor, requestProcessor.getExecutor());
}
/**
* main launches the server from the command line.
* @param args arguments
*/
public static void main(String[] args) {
final LoggerServer server = new LoggerServer();
server.start();
}
/**
* server start
*/
public void start() {
this.server.start();
logger.info("logger server started, listening on port : {}" , Constants.RPC_PORT);
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
LoggerServer.this.stop();
}
});
}
/**
* stop
*/
public void stop() {
this.server.close();
logger.info("logger server shut down");
}
}
......@@ -22,14 +22,14 @@ import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.common.thread.ThreadPoolExecutors;
import org.apache.dolphinscheduler.common.thread.ThreadUtils;
import org.apache.dolphinscheduler.common.utils.OSUtils;
import org.apache.dolphinscheduler.common.utils.SpringApplicationContext;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.ProcessDao;
import org.apache.dolphinscheduler.server.master.config.MasterConfig;
import org.apache.dolphinscheduler.server.master.runner.MasterSchedulerThread;
import org.apache.dolphinscheduler.dao.quartz.ProcessScheduleJob;
import org.apache.dolphinscheduler.dao.quartz.QuartzExecutors;
import org.apache.dolphinscheduler.server.zk.ZKMasterClient;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.service.quartz.ProcessScheduleJob;
import org.apache.dolphinscheduler.service.quartz.QuartzExecutors;
import org.quartz.SchedulerException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......@@ -66,10 +66,10 @@ public class MasterServer implements IStoppable {
private ScheduledExecutorService heartbeatMasterService;
/**
* dolphinscheduler database interface
* process service
*/
@Autowired
protected ProcessDao processDao;
protected ProcessService processService;
/**
* master exec thread pool
......@@ -77,17 +77,18 @@ public class MasterServer implements IStoppable {
private ExecutorService masterSchedulerService;
/**
* spring application context
* only use it for initialization
* master config
*/
@Autowired
private SpringApplicationContext springApplicationContext;
private MasterConfig masterConfig;
/**
* master config
* spring application context
* only use it for initialization
*/
@Autowired
private MasterConfig masterConfig;
private SpringApplicationContext springApplicationContext;
/**
......@@ -126,7 +127,7 @@ public class MasterServer implements IStoppable {
// master scheduler thread
MasterSchedulerThread masterSchedulerThread = new MasterSchedulerThread(
zkMasterClient,
processDao,
processService,
masterConfig.getMasterExecThreads());
// submit master scheduler thread
......@@ -136,7 +137,7 @@ public class MasterServer implements IStoppable {
// what system should do if exception
try {
logger.info("start Quartz server...");
ProcessScheduleJob.init(processDao);
ProcessScheduleJob.init(processService);
QuartzExecutors.getInstance().start();
} catch (Exception e) {
try {
......
......@@ -16,15 +16,15 @@
*/
package org.apache.dolphinscheduler.server.master.runner;
import org.apache.dolphinscheduler.common.queue.ITaskQueue;
import org.apache.dolphinscheduler.common.queue.TaskQueueFactory;
import org.apache.dolphinscheduler.common.utils.SpringApplicationContext;
import org.apache.dolphinscheduler.dao.AlertDao;
import org.apache.dolphinscheduler.dao.ProcessDao;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.utils.BeanContext;
import org.apache.dolphinscheduler.server.master.config.MasterConfig;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.service.queue.ITaskQueue;
import org.apache.dolphinscheduler.service.queue.TaskQueueFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......@@ -41,9 +41,9 @@ public class MasterBaseTaskExecThread implements Callable<Boolean> {
private static final Logger logger = LoggerFactory.getLogger(MasterBaseTaskExecThread.class);
/**
* process dao
* process service
*/
protected ProcessDao processDao;
protected ProcessService processService;
/**
* alert database access
......@@ -81,7 +81,7 @@ public class MasterBaseTaskExecThread implements Callable<Boolean> {
* @param processInstance process instance
*/
public MasterBaseTaskExecThread(TaskInstance taskInstance, ProcessInstance processInstance){
this.processDao = BeanContext.getBean(ProcessDao.class);
this.processService = BeanContext.getBean(ProcessService.class);
this.alertDao = BeanContext.getBean(AlertDao.class);
this.processInstance = processInstance;
this.taskQueue = TaskQueueFactory.getTaskQueueInstance();
......@@ -121,14 +121,14 @@ public class MasterBaseTaskExecThread implements Callable<Boolean> {
try {
if(!submitDB){
// submit task to db
task = processDao.submitTask(taskInstance, processInstance);
task = processService.submitTask(taskInstance, processInstance);
if(task != null && task.getId() != 0){
submitDB = true;
}
}
if(submitDB && !submitQueue){
// submit task to queue
submitQueue = processDao.submitTaskToQueue(task);
submitQueue = processService.submitTaskToQueue(task);
}
if(submitDB && submitQueue){
return task;
......
......@@ -28,14 +28,15 @@ import org.apache.dolphinscheduler.common.process.ProcessDag;
import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.common.thread.ThreadUtils;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.dao.ProcessDao;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.utils.DagHelper;
import org.apache.dolphinscheduler.dao.utils.cron.CronUtils;
import org.apache.dolphinscheduler.server.master.config.MasterConfig;
import org.apache.dolphinscheduler.server.utils.AlertManager;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.service.quartz.cron.CronUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......@@ -124,9 +125,9 @@ public class MasterExecThread implements Runnable {
private DAG<String,TaskNode,TaskNodeRelation> dag;
/**
* process dao
* process service
*/
private ProcessDao processDao;
private ProcessService processService;
/**
* master config
......@@ -136,10 +137,10 @@ public class MasterExecThread implements Runnable {
/**
* constructor of MasterExecThread
* @param processInstance process instance
* @param processDao process dao
* @param processService process dao
*/
public MasterExecThread(ProcessInstance processInstance,ProcessDao processDao){
this.processDao = processDao;
public MasterExecThread(ProcessInstance processInstance, ProcessService processService){
this.processService = processService;
this.processInstance = processInstance;
this.masterConfig = SpringApplicationContext.getBean(MasterConfig.class);
......@@ -177,7 +178,7 @@ public class MasterExecThread implements Runnable {
logger.error("process execute failed, process id:{}", processInstance.getId());
processInstance.setState(ExecutionStatus.FAILURE);
processInstance.setEndTime(new Date());
processDao.updateProcessInstance(processInstance);
processService.updateProcessInstance(processInstance);
}finally {
taskExecService.shutdown();
// post handle
......@@ -205,11 +206,11 @@ public class MasterExecThread implements Runnable {
Date startDate = DateUtils.getScheduleDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE));
Date endDate = DateUtils.getScheduleDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_END_DATE));
processDao.saveProcessInstance(processInstance);
processService.saveProcessInstance(processInstance);
// get schedules
int processDefinitionId = processInstance.getProcessDefinitionId();
List<Schedule> schedules = processDao.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId);
List<Schedule> schedules = processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId);
List<Date> listDate = Lists.newLinkedList();
if(!CollectionUtils.isEmpty(schedules)){
for (Schedule schedule : schedules) {
......@@ -223,7 +224,7 @@ public class MasterExecThread implements Runnable {
iterator = listDate.iterator();
scheduleDate = iterator.next();
processInstance.setScheduleTime(scheduleDate);
processDao.updateProcessInstance(processInstance);
processService.updateProcessInstance(processInstance);
}else{
scheduleDate = processInstance.getScheduleTime();
if(scheduleDate == null){
......@@ -239,7 +240,7 @@ public class MasterExecThread implements Runnable {
logger.error("process {} dag is null, please check out parameters",
processInstance.getId());
processInstance.setState(ExecutionStatus.SUCCESS);
processDao.updateProcessInstance(processInstance);
processService.updateProcessInstance(processInstance);
return;
}
......@@ -281,10 +282,10 @@ public class MasterExecThread implements Runnable {
processInstance.setCommandParam(JSONUtils.toJson(cmdParam));
}
List<TaskInstance> taskInstanceList = processDao.findValidTaskListByProcessId(processInstance.getId());
List<TaskInstance> taskInstanceList = processService.findValidTaskListByProcessId(processInstance.getId());
for(TaskInstance taskInstance : taskInstanceList){
taskInstance.setFlag(Flag.NO);
processDao.updateTaskInstance(taskInstance);
processService.updateTaskInstance(taskInstance);
}
processInstance.setState(ExecutionStatus.RUNNING_EXEUTION);
processInstance.setGlobalParams(ParameterUtils.curingGlobalParams(
......@@ -292,7 +293,7 @@ public class MasterExecThread implements Runnable {
processInstance.getProcessDefinition().getGlobalParamList(),
CommandType.COMPLEMENT_DATA, processInstance.getScheduleTime()));
processDao.saveProcessInstance(processInstance);
processService.saveProcessInstance(processInstance);
}
// flow end
......@@ -320,11 +321,11 @@ public class MasterExecThread implements Runnable {
*/
private void endProcess() {
processInstance.setEndTime(new Date());
processDao.updateProcessInstance(processInstance);
processService.updateProcessInstance(processInstance);
if(processInstance.getState().typeIsWaittingThread()){
processDao.createRecoveryWaitingThreadCommand(null, processInstance);
processService.createRecoveryWaitingThreadCommand(null, processInstance);
}
List<TaskInstance> taskInstances = processDao.findValidTaskListByProcessId(processInstance.getId());
List<TaskInstance> taskInstances = processService.findValidTaskListByProcessId(processInstance.getId());
alertManager.sendAlertProcessInstance(processInstance, taskInstances);
}
......@@ -361,7 +362,7 @@ public class MasterExecThread implements Runnable {
dependFailedTask.clear();
completeTaskList.clear();
errorTaskList.clear();
List<TaskInstance> taskInstanceList = processDao.findValidTaskListByProcessId(processInstance.getId());
List<TaskInstance> taskInstanceList = processService.findValidTaskListByProcessId(processInstance.getId());
for(TaskInstance task : taskInstanceList){
if(task.isTaskComplete()){
completeTaskList.put(task.getName(), task);
......@@ -417,7 +418,7 @@ public class MasterExecThread implements Runnable {
* @return TaskInstance
*/
private TaskInstance findTaskIfExists(String taskName){
List<TaskInstance> taskInstanceList = processDao.findValidTaskListByProcessId(this.processInstance.getId());
List<TaskInstance> taskInstanceList = processService.findValidTaskListByProcessId(this.processInstance.getId());
for(TaskInstance taskInstance : taskInstanceList){
if(taskInstance.getName().equals(taskName)){
return taskInstance;
......@@ -706,7 +707,7 @@ public class MasterExecThread implements Runnable {
* @return process instance execution status
*/
private ExecutionStatus getProcessInstanceState(){
ProcessInstance instance = processDao.findProcessInstanceById(processInstance.getId());
ProcessInstance instance = processService.findProcessInstanceById(processInstance.getId());
ExecutionStatus state = instance.getState();
if(activeTaskNode.size() > 0){
......@@ -784,10 +785,10 @@ public class MasterExecThread implements Runnable {
processInstance.getState().toString(), state.toString(),
processInstance.getCommandType().toString());
processInstance.setState(state);
ProcessInstance instance = processDao.findProcessInstanceById(processInstance.getId());
ProcessInstance instance = processService.findProcessInstanceById(processInstance.getId());
instance.setState(state);
instance.setProcessDefinition(processInstance.getProcessDefinition());
processDao.updateProcessInstance(instance);
processService.updateProcessInstance(instance);
processInstance = instance;
}
}
......@@ -845,7 +846,7 @@ public class MasterExecThread implements Runnable {
// send warning email if process time out.
if( !sendTimeWarning && checkProcessTimeOut(processInstance) ){
alertManager.sendProcessTimeoutAlert(processInstance,
processDao.findProcessDefineById(processInstance.getProcessDefinitionId()));
processService.findProcessDefineById(processInstance.getProcessDefinitionId()));
sendTimeWarning = true;
}
for(Map.Entry<MasterBaseTaskExecThread,Future<Boolean>> entry: activeTaskNode.entrySet()) {
......@@ -903,7 +904,7 @@ public class MasterExecThread implements Runnable {
if(completeTask.getState()== ExecutionStatus.PAUSE){
completeTask.setState(ExecutionStatus.KILL);
completeTaskList.put(entry.getKey(), completeTask);
processDao.updateTaskInstance(completeTask);
processService.updateTaskInstance(completeTask);
}
}
}
......@@ -961,7 +962,7 @@ public class MasterExecThread implements Runnable {
Future<Boolean> future = entry.getValue();
TaskInstance taskInstance = taskExecThread.getTaskInstance();
taskInstance = processDao.findTaskInstanceById(taskInstance.getId());
taskInstance = processService.findTaskInstanceById(taskInstance.getId());
if(taskInstance.getState().typeIsFinished()){
continue;
}
......@@ -1031,7 +1032,7 @@ public class MasterExecThread implements Runnable {
}
try {
Integer intId = Integer.valueOf(taskId);
TaskInstance task = processDao.findTaskInstanceById(intId);
TaskInstance task = processService.findTaskInstanceById(intId);
if(task == null){
logger.error("start node id cannot be found: {}", taskId);
}else {
......
......@@ -22,13 +22,13 @@ import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.common.thread.ThreadUtils;
import org.apache.dolphinscheduler.common.utils.OSUtils;
import org.apache.dolphinscheduler.common.utils.SpringApplicationContext;
import org.apache.dolphinscheduler.common.zk.AbstractZKClient;
import org.apache.dolphinscheduler.dao.ProcessDao;
import org.apache.dolphinscheduler.dao.entity.Command;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.server.master.config.MasterConfig;
import org.apache.dolphinscheduler.server.zk.ZKMasterClient;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.service.zk.AbstractZKClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......@@ -53,7 +53,7 @@ public class MasterSchedulerThread implements Runnable {
/**
* dolphinscheduler database interface
*/
private final ProcessDao processDao;
private final ProcessService processService;
/**
* zookeeper master client
......@@ -74,11 +74,11 @@ public class MasterSchedulerThread implements Runnable {
/**
* constructor of MasterSchedulerThread
* @param zkClient zookeeper master client
* @param processDao process dao
* @param processService process service
* @param masterExecThreadNum master exec thread num
*/
public MasterSchedulerThread(ZKMasterClient zkClient, ProcessDao processDao, int masterExecThreadNum){
this.processDao = processDao;
public MasterSchedulerThread(ZKMasterClient zkClient, ProcessService processService, int masterExecThreadNum){
this.processService = processService;
this.zkMasterClient = zkClient;
this.masterExecThreadNum = masterExecThreadNum;
this.masterExecService = ThreadUtils.newDaemonFixedThreadExecutor("Master-Exec-Thread",masterExecThreadNum);
......@@ -115,19 +115,19 @@ public class MasterSchedulerThread implements Runnable {
ThreadPoolExecutor poolExecutor = (ThreadPoolExecutor) masterExecService;
int activeCount = poolExecutor.getActiveCount();
// make sure to scan and delete command table in one transaction
Command command = processDao.findOneCommand();
Command command = processService.findOneCommand();
if (command != null) {
logger.info(String.format("find one command: id: %d, type: %s", command.getId(),command.getCommandType().toString()));
try{
processInstance = processDao.handleCommand(logger, OSUtils.getHost(), this.masterExecThreadNum - activeCount, command);
processInstance = processService.handleCommand(logger, OSUtils.getHost(), this.masterExecThreadNum - activeCount, command);
if (processInstance != null) {
logger.info("start master exec thread , split DAG ...");
masterExecService.execute(new MasterExecThread(processInstance,processDao));
masterExecService.execute(new MasterExecThread(processInstance, processService));
}
}catch (Exception e){
logger.error("scan command error ", e);
processDao.moveToErrorCommand(command, e.toString());
processService.moveToErrorCommand(command, e.toString());
}
} else{
//indicate that no command ,sleep for 1s
......
......@@ -82,7 +82,7 @@ public class MasterTaskExecThread extends MasterBaseTaskExecThread {
result = waitTaskQuit();
}
taskInstance.setEndTime(new Date());
processDao.updateTaskInstance(taskInstance);
processService.updateTaskInstance(taskInstance);
logger.info("task :{} id:{}, process id:{}, exec thread completed ",
this.taskInstance.getName(),taskInstance.getId(), processInstance.getId() );
return result;
......@@ -94,7 +94,7 @@ public class MasterTaskExecThread extends MasterBaseTaskExecThread {
*/
public Boolean waitTaskQuit(){
// query new state
taskInstance = processDao.findTaskInstanceById(taskInstance.getId());
taskInstance = processService.findTaskInstanceById(taskInstance.getId());
logger.info("wait task: process id: {}, task id:{}, task name:{} complete",
this.taskInstance.getProcessInstanceId(), this.taskInstance.getId(), this.taskInstance.getName());
// task time out
......@@ -126,15 +126,15 @@ public class MasterTaskExecThread extends MasterBaseTaskExecThread {
if (remainTime < 0) {
logger.warn("task id: {} execution time out",taskInstance.getId());
// process define
ProcessDefinition processDefine = processDao.findProcessDefineById(processInstance.getProcessDefinitionId());
ProcessDefinition processDefine = processService.findProcessDefineById(processInstance.getProcessDefinitionId());
// send warn mail
alertDao.sendTaskTimeoutAlert(processInstance.getWarningGroupId(),processDefine.getReceivers(),processDefine.getReceiversCc(),taskInstance.getId(),taskInstance.getName());
checkTimeout = false;
}
}
// updateProcessInstance task instance
taskInstance = processDao.findTaskInstanceById(taskInstance.getId());
processInstance = processDao.findProcessInstanceById(processInstance.getId());
taskInstance = processService.findTaskInstanceById(taskInstance.getId());
processInstance = processService.findProcessInstanceById(processInstance.getId());
Thread.sleep(Constants.SLEEP_TIME_MILLIS);
} catch (Exception e) {
logger.error("exception",e);
......
......@@ -64,7 +64,7 @@ public class SubProcessTaskExecThread extends MasterBaseTaskExecThread {
}
setTaskInstanceState();
waitTaskQuit();
subProcessInstance = processDao.findSubProcessInstance(processInstance.getId(), taskInstance.getId());
subProcessInstance = processService.findSubProcessInstance(processInstance.getId(), taskInstance.getId());
// at the end of the subflow , the task state is changed to the subflow state
if(subProcessInstance != null){
......@@ -75,7 +75,7 @@ public class SubProcessTaskExecThread extends MasterBaseTaskExecThread {
}
}
taskInstance.setEndTime(new Date());
processDao.updateTaskInstance(taskInstance);
processService.updateTaskInstance(taskInstance);
logger.info("subflow task :{} id:{}, process id:{}, exec thread completed ",
this.taskInstance.getName(),taskInstance.getId(), processInstance.getId() );
result = true;
......@@ -96,14 +96,14 @@ public class SubProcessTaskExecThread extends MasterBaseTaskExecThread {
* @return
*/
private Boolean setTaskInstanceState(){
subProcessInstance = processDao.findSubProcessInstance(processInstance.getId(), taskInstance.getId());
subProcessInstance = processService.findSubProcessInstance(processInstance.getId(), taskInstance.getId());
if(subProcessInstance == null || taskInstance.getState().typeIsFinished()){
return false;
}
taskInstance.setState(ExecutionStatus.RUNNING_EXEUTION);
taskInstance.setStartTime(new Date());
processDao.updateTaskInstance(taskInstance);
processService.updateTaskInstance(taskInstance);
return true;
}
......@@ -111,7 +111,7 @@ public class SubProcessTaskExecThread extends MasterBaseTaskExecThread {
* updateProcessInstance parent state
*/
private void updateParentProcessState(){
ProcessInstance parentProcessInstance = processDao.findProcessInstanceById(this.processInstance.getId());
ProcessInstance parentProcessInstance = processService.findProcessInstanceById(this.processInstance.getId());
if(parentProcessInstance == null){
logger.error("parent work flow instance is null , please check it! work flow id {}", processInstance.getId());
......@@ -145,7 +145,7 @@ public class SubProcessTaskExecThread extends MasterBaseTaskExecThread {
continue;
}
}
subProcessInstance = processDao.findProcessInstanceById(subProcessInstance.getId());
subProcessInstance = processService.findProcessInstanceById(subProcessInstance.getId());
updateParentProcessState();
if (subProcessInstance.getState().typeIsFinished()){
break;
......@@ -171,7 +171,7 @@ public class SubProcessTaskExecThread extends MasterBaseTaskExecThread {
return;
}
subProcessInstance.setState(ExecutionStatus.READY_STOP);
processDao.updateProcessInstance(subProcessInstance);
processService.updateProcessInstance(subProcessInstance);
}
/**
......@@ -183,6 +183,6 @@ public class SubProcessTaskExecThread extends MasterBaseTaskExecThread {
return;
}
subProcessInstance.setState(ExecutionStatus.READY_PAUSE);
processDao.updateProcessInstance(subProcessInstance);
processService.updateProcessInstance(subProcessInstance);
}
}
......@@ -16,7 +16,7 @@
*/
package org.apache.dolphinscheduler.server.monitor;
import org.apache.dolphinscheduler.common.zk.ZookeeperOperator;
import org.apache.dolphinscheduler.service.zk.ZookeeperOperator;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
......
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.rpc;
import io.grpc.ManagedChannel;
import io.grpc.ManagedChannelBuilder;
import io.grpc.StatusRuntimeException;
import org.apache.dolphinscheduler.rpc.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.TimeUnit;
/**
* log client
*/
public class LogClient {
/**
* logger of LogClient
*/
private static final Logger logger = LoggerFactory.getLogger(LogClient.class);
/**
* managed channel
*/
private final ManagedChannel channel;
/**
* blocking stub
*/
private final LogViewServiceGrpc.LogViewServiceBlockingStub blockingStub;
/**
* Construct client connecting to HelloWorld server at host:port.
*
* @param host host
* @param port port
*/
public LogClient(String host, int port) {
this(ManagedChannelBuilder.forAddress(host, port)
// Channels are secure by default (via SSL/TLS). For the example we disable TLS to avoid
// needing certificates.
.usePlaintext(true));
}
/**
* Construct client for accessing RouteGuide server using the existing channel.
*
* @param channelBuilder channel builder
*/
LogClient(ManagedChannelBuilder<?> channelBuilder) {
/**
* set max message read size
*/
channelBuilder.maxInboundMessageSize(Integer.MAX_VALUE);
channel = channelBuilder.build();
blockingStub = LogViewServiceGrpc.newBlockingStub(channel);
}
/**
* shut down channel
*
* @throws InterruptedException interrupted exception
*/
public void shutdown() throws InterruptedException {
channel.shutdown().awaitTermination(5, TimeUnit.SECONDS);
}
/**
* roll view log
*
* @param path log path
* @param skipLineNum skip line num
* @param limit limit
* @return log content
*/
public String rollViewLog(String path,int skipLineNum,int limit) {
logger.info("roll view log , path : {},skipLineNum : {} ,limit :{}", path, skipLineNum, limit);
LogParameter pathParameter = LogParameter
.newBuilder()
.setPath(path)
.setSkipLineNum(skipLineNum)
.setLimit(limit)
.build();
RetStrInfo retStrInfo;
try {
retStrInfo = blockingStub.rollViewLog(pathParameter);
return retStrInfo.getMsg();
} catch (StatusRuntimeException e) {
logger.error("roll view log failed", e);
return null;
}
}
/**
* view all log
*
* @param path log path
* @return log content
*/
public String viewLog(String path) {
logger.info("view log path : {}",path);
PathParameter pathParameter = PathParameter.newBuilder().setPath(path).build();
RetStrInfo retStrInfo;
try {
retStrInfo = blockingStub.viewLog(pathParameter);
return retStrInfo.getMsg();
} catch (StatusRuntimeException e) {
logger.error("view log failed", e);
return null;
}
}
/**
* get log bytes
*
* @param path log path
* @return log content
*/
public byte[] getLogBytes(String path) {
logger.info("get log bytes {}",path);
PathParameter pathParameter = PathParameter.newBuilder().setPath(path).build();
RetByteInfo retByteInfo;
try {
retByteInfo = blockingStub.getLogBytes(pathParameter);
return retByteInfo.getData().toByteArray();
} catch (StatusRuntimeException e) {
logger.error("get log bytes failed ", e);
return null;
}
}
}
\ No newline at end of file
......@@ -22,8 +22,8 @@ import org.apache.dolphinscheduler.common.utils.LoggerUtils;
import org.apache.dolphinscheduler.common.utils.OSUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.server.rpc.LogClient;
import org.apache.commons.io.FileUtils;
import org.apache.dolphinscheduler.service.log.LogClientService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......@@ -375,9 +375,16 @@ public class ProcessUtils {
public static void killYarnJob(TaskInstance taskInstance) {
try {
Thread.sleep(Constants.SLEEP_TIME_MILLIS);
LogClient logClient = new LogClient(taskInstance.getHost(), Constants.RPC_PORT);
String log = logClient.viewLog(taskInstance.getLogPath());
LogClientService logClient = null;
String log = null;
try {
logClient = new LogClientService(taskInstance.getHost(), Constants.RPC_PORT);
log = logClient.viewLog(taskInstance.getLogPath());
} finally {
if(logClient != null){
logClient.close();
}
}
if (StringUtils.isNotEmpty(log)) {
List<String> appIds = LoggerUtils.getAppIds(log, logger);
String workerDir = taskInstance.getExecutePath();
......
......@@ -16,7 +16,7 @@
*/
package org.apache.dolphinscheduler.server.utils;
import org.apache.dolphinscheduler.common.zk.ZookeeperOperator;
import org.apache.dolphinscheduler.service.zk.ZookeeperOperator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
......
此差异已折叠。
此差异已折叠。
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册