提交 cb8afbe7 编写于 作者: Q qiaozhanwei 提交者: bao liang

common,dao,server useless code and chinese modify (#1199)

* add ConnectionFactoryTest and ConnectionFactory read datasource from appliction.yml

* .escheduler_env.sh to dolphinscheduler_env.sh

* dao yml assembly to conf directory

* table name modify

* entity title table  name modify

* logback log name modify

* running through the big process

* running through the big process error modify

* logback log name modify

* data_source.properties rename

* logback log name modify

* install.sh optimization

* install.sh optimization

* command count modify

* command state update

* countCommandState sql update

* countCommandState sql update

* remove application.yml file

* master.properties modify

* install.sh modify

* install.sh modify

* api server startup modify

* the current user quits and the session is completely emptied. bug fix

* remove pom package resources

* checkQueueNameExist method update

* checkQueueExist

* install.sh error output update

* signOut error update

* ProcessDao is null bug fix

* install.sh add mail.user

* request url variables replace

* process define import bug fix

* process define import export bug fix

* processdefine import export bug fix

* down log suffix format modify

* import export process define contains crontab error bug fix

* add Flink local mode

* ProcessDao is null bug fix

* loadAverage display problem bug fix

* MasterServer rename Server

* rollback .env

* rollback .env

* MasterServer rename Server

* the task is abnormal and task is running bug fix

* owners and administrators can delete

* dockerfile optimization

* dockerfile optimization

* dockerfile optimization

* remove application-alert.properties

* task log print worker log bug fix

* remove .escheduler_env.sh

* change dockerfile email address

* dockerfile dao application.properties and install.sh modify

* application.properties modify

* application.properties modify

* dockerfile startup.sh modify

* remove docs

* nginx conf modify

* dockerfile application.properties modify

* dockerfile email address change

* the alert module is modified in English.

* alert server comment and chinese modify

* api server useless code and chinese modify

* common,dao,server useless code and chinese modify
上级 1fad357f
......@@ -24,6 +24,6 @@ public interface IStoppable {
* Stop this service.
* @param cause why stopping
*/
public void stop(String cause);
void stop(String cause);
}
\ No newline at end of file
......@@ -29,7 +29,7 @@ public enum CommandType {
* command types
* 0 start a new process
* 1 start a new process from current nodes
* 2 recover tolerance fault work flow
* 2 recover tolerance fault work flow
* 3 start process from paused task nodes
* 4 start process from failure task nodes
* 5 complement data
......
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.enums;
/**
* depend strategy
*/
public enum DependStrategy {
/**
* 0 none,1 all success 2 all failed 3 one success 4 one failed
*/
NONE, ALL_SUCCESS, ALL_FAILED, ONE_SUCCESS, ONE_FAILED
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.enums;
/**
* self depency strategy
*/
public enum SelfDependStrategy {
/**
* 0 donot depend the last cycle;
* 1 depend the last cycle
**/
NO_DEP_PRE, DEP_PRE
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.enums;
/**
* cycle enums
*/
public enum ServerEnum {
/**
* master server , worker server
*/
MASTER_SERVER,WORKER_SERVER
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils;
/**
* enum field util
*/
public class EnumFieldUtil {
/**
* Generate a string for the enums field
*
* @param field
* @param enumClass
* @return
*/
public static String genFieldStr(String field, Class<?> enumClass) {
//TODO...
// delete this class when mybatisplus is ok
return "";
// return "#{" + field + ",javaType=" + enumClass.getName() + ",typeHandler=" + EnumOrdinalTypeHandler.class.getName() + "}";
}
}
......@@ -64,7 +64,7 @@ public class DAGTest {
graph.addNode(i, "v(" + i + ")");
}
// 构造边
// construction side
assertTrue(graph.addEdge(1, 2));
assertTrue(graph.addEdge(2, 5));
......@@ -84,7 +84,7 @@ public class DAGTest {
/**
* 测试增加顶点
* add node
*/
@Test
public void testAddNode() {
......@@ -104,7 +104,7 @@ public class DAGTest {
/**
* 添加边
* add edge
*/
@Test
public void testAddEdge() {
......@@ -129,7 +129,7 @@ public class DAGTest {
/**
* 测试后续结点
* add subsequent node
*/
@Test
public void testSubsequentNodes() {
......@@ -141,7 +141,7 @@ public class DAGTest {
/**
* 测试入度
* test indegree
*/
@Test
public void testIndegree() {
......@@ -155,7 +155,7 @@ public class DAGTest {
/**
* 测试起点
* test begin node
*/
@Test
public void testBeginNode() {
......@@ -170,7 +170,7 @@ public class DAGTest {
/**
* 测试终点
* test end node
*/
@Test
public void testEndNode() {
......@@ -183,18 +183,18 @@ public class DAGTest {
/**
* 测试环
* test cycle
*/
@Test
public void testCycle() {
clear();
// 构造顶点
for (int i = 1; i <= 5; ++i) {
graph.addNode(i, "v(" + i + ")");
}
// 构造边, 1->2, 2->3, 3->4
// construction side
try {
graph.addEdge(1, 2);
graph.addEdge(2, 3);
......@@ -208,9 +208,9 @@ public class DAGTest {
try {
boolean addResult = graph.addEdge(4, 1);//有环,添加失败
boolean addResult = graph.addEdge(4, 1);
if(!addResult){//有环,添加失败
if(!addResult){
assertTrue(true);
}
......@@ -222,15 +222,14 @@ public class DAGTest {
fail();
}
// 重新清空
clear();
// 构造顶点
// construction node
for (int i = 1; i <= 5; ++i) {
graph.addNode(i, "v(" + i +")");
}
// 构造边, 1->2, 2->3, 3->4
// construction side, 1->2, 2->3, 3->4
try {
graph.addEdge(1, 2);
graph.addEdge(2, 3);
......@@ -251,7 +250,8 @@ public class DAGTest {
makeGraph();
try {
List<Integer> topoList = new ArrayList<>();//一种拓扑结果是1 3 4 2 5 6 7
// topological result is : 1 3 4 2 5 6 7
List<Integer> topoList = new ArrayList<>();
topoList.add(1);
topoList.add(3);
topoList.add(4);
......@@ -276,10 +276,10 @@ public class DAGTest {
graph.addEdge(2, 3, null, true);
graph.addEdge(3, 4, null, true);
graph.addEdge(4, 5, null, true);
graph.addEdge(5, 1, null, false); //因环会添加失败,ERROR级别日志输出
graph.addEdge(5, 1, null, false); //The loop will fail to add
try {
List<Integer> topoList = new ArrayList<>();//拓扑结果是1 2 3 4 5
List<Integer> topoList = new ArrayList<>();// topological result is : 1 2 3 4 5
topoList.add(1);
topoList.add(2);
topoList.add(3);
......@@ -296,9 +296,6 @@ public class DAGTest {
}
/**
*
*/
@Test
public void testTopologicalSort3() throws Exception {
clear();
......@@ -316,7 +313,7 @@ public class DAGTest {
graph.addNode(i, "v(" + i + ")");
}
// 构造边
// construction node
assertTrue(graph.addEdge(1, 2));
assertTrue(graph.addEdge(1, 3));
......@@ -345,9 +342,6 @@ public class DAGTest {
logger.info(i + " subsequentNodes : " + graph.getSubsequentNodes(i));
}
// assertArrayEquals(expectedList.toArray(),graph.topologicalSort().toArray());
logger.info(6 + " previousNodesb: " + graph.getPreviousNodes(6));
assertEquals(5, graph.getSubsequentNodes(2).toArray()[0]);
......
......@@ -33,8 +33,6 @@ import java.text.DecimalFormat;
public class OSUtilsTest {
private static Logger logger = LoggerFactory.getLogger(OSUtilsTest.class);
// static SystemInfo si = new SystemInfo();
// static HardwareAbstractionLayer hal = si.getHardware();
@Test
......@@ -46,7 +44,6 @@ public class OSUtilsTest {
@Test
public void memoryUsage() {
logger.info("memoryUsage : {}", OSUtils.memoryUsage());// 0.3361799418926239
// printMemory(hal.getMemory());// 35 %
}
@Test
......@@ -81,66 +78,4 @@ public class OSUtilsTest {
logger.info("cpuUsage1 : {}", df.format(cpuUsage));
}
//
// @Test
// public void getUserList() {
// logger.info("getUserList : {}", OSUtils.getUserList());
// }
//
//
// @Test
// public void getGroup() throws Exception {
// logger.info("getGroup : {}", OSUtils.getGroup());
// logger.info("getGroup : {}", OSUtils.exeShell("groups"));
//
//
// }
//
//
// @Test
// public void getProcessID() {
// logger.info("getProcessID : {}", OSUtils.getProcessID());
// }
//
//
// @Test
// public void getHost() {
// logger.info("getHost : {}", OSUtils.getHost());
// }
//
//
//
// @Test
// public void anotherGetOsInfoTest() throws InterruptedException {
// OperatingSystemMXBean os = ManagementFactory.getPlatformMXBean(OperatingSystemMXBean.class);
// final MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
//
// MemoryUsage memoryUsage = memoryMXBean.getHeapMemoryUsage();
// double usage = (double)memoryUsage.getUsed() / (double)memoryUsage.getCommitted();
// logger.info("memory usage : {}",usage);
//
// if (os instanceof UnixOperatingSystemMXBean) {
// UnixOperatingSystemMXBean unixOs = (UnixOperatingSystemMXBean) os;
// logger.info("getMaxFileDescriptorCount : {}" ,unixOs.getMaxFileDescriptorCount()); //10240
// logger.info("getOpenFileDescriptorCount : {}",unixOs.getOpenFileDescriptorCount()); //241
// logger.info("getAvailableProcessors : {}",unixOs.getAvailableProcessors()); //8
//
// logger.info("getSystemLoadAverage : {}",unixOs.getSystemLoadAverage()); //1.36083984375
//
// logger.info("getFreePhysicalMemorySize : {}",unixOs.getFreePhysicalMemorySize()); //209768448
//
// logger.info("getTotalPhysicalMemorySize : {}",unixOs.getTotalPhysicalMemorySize()); //17179869184 16G
//
// for(int i = 0; i < 3; i++) {
// logger.info("getSystemCpuLoad : {}", unixOs.getSystemCpuLoad()); //0.0
//
// logger.info("getProcessCpuLoad : {}", unixOs.getProcessCpuLoad() * 10); //0.0
// Thread.sleep(1000l);
// }
// }
// }
//
}
......@@ -24,6 +24,9 @@ import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.concurrent.CountDownLatch;
/**
* SHELL Taks Test
*/
public class ShellExecutorTest {
private static final Logger logger = LoggerFactory.getLogger(ShellExecutorTest.class);
......
......@@ -21,6 +21,9 @@ import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Thread Pool Executor Test
*/
public class ThreadPoolExecutorsTest {
private static final Logger logger = LoggerFactory.getLogger(ThreadPoolExecutors.class);
......
......@@ -48,9 +48,7 @@ public class HadoopUtilsTest {
public void readFileTest(){
try {
byte[] bytes = HadoopUtils.getInstance().catFile("/dolphinscheduler/hdfs/resources/35435.sh");
logger.info("------------------start");
logger.info(new String(bytes));
logger.info("---------------------end");
} catch (Exception e) {
}
......
......@@ -48,9 +48,7 @@ public class StandaloneZKServerForTest {
//delete zk data dir ?
File zkFile = new File(System.getProperty("java.io.tmpdir"), "zookeeper");
// if(zkFile.exists()){
// zkFile.delete();
// }
startStandaloneServer("2000", zkFile.getAbsolutePath(), "2181", "10", "5");
}
});
......
......@@ -36,10 +36,10 @@ public class DaoFactory {
}
/**
* 获取 Dao 实例
* get dao instance
*
* @param clazz
* @return Dao实例
* @return dao instance
*/
@SuppressWarnings("unchecked")
public static <T extends AbstractBaseDao> T getDaoInstance(Class<T> clazz) {
......@@ -47,9 +47,8 @@ public class DaoFactory {
synchronized (daoMap) {
if (!daoMap.containsKey(className)) {
try {
// T t = BeanContext.getBean(clazz);
T t = clazz.getConstructor().newInstance();
// 实例初始化
// init
t.init();
daoMap.put(className, t);
} catch (Exception e) {
......
......@@ -43,7 +43,7 @@ public class MonitorDBDao {
public static final String VARIABLE_NAME = "variable_name";
/**
* 加载配置文件
* load conf
*/
private static Configuration conf;
......
......@@ -59,7 +59,6 @@ public class ProcessDao extends AbstractBaseDao {
private final int[] stateArray = new int[]{ExecutionStatus.SUBMITTED_SUCCESS.ordinal(),
ExecutionStatus.RUNNING_EXEUTION.ordinal(),
ExecutionStatus.READY_PAUSE.ordinal(),
// ExecutionStatus.NEED_FAULT_TOLERANCE.ordinal(),
ExecutionStatus.READY_STOP.ordinal()};
@Autowired
......@@ -1017,9 +1016,6 @@ public class ProcessDao extends AbstractBaseDao {
* ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId}_${task executed by ip1},${ip2}...
*
* The tasks with the highest priority are selected by comparing the priorities of the above four levels from high to low.
*
* 流程实例优先级_流程实例id_任务优先级_任务id_任务执行机器ip1,ip2... high <- low
*
* @param taskInstance
* @return
*/
......@@ -1167,7 +1163,6 @@ public class ProcessDao extends AbstractBaseDao {
logger.error("save error, process instance is null!");
return ;
}
//创建流程实例
if(workProcessInstance.getId() != 0){
processInstanceMapper.updateById(workProcessInstance);
}else{
......@@ -1602,7 +1597,7 @@ public class ProcessDao extends AbstractBaseDao {
Cron depCron;
List<Date> list;
List<Schedule> schedules = this.selectAllByProcessDefineId(ids);
// 遍历所有的调度信息
// for all scheduling info
for(Schedule depSchedule:schedules){
strCrontab = depSchedule.getCrontab();
depCronExpression = CronUtils.parse2CronExpression(strCrontab);
......
......@@ -43,7 +43,7 @@ public class TaskRecordDao {
private static Logger logger = LoggerFactory.getLogger(TaskRecordDao.class.getName());
/**
* 加载配置文件
* load conf
*/
private static Configuration conf;
......
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.entity;
import org.apache.dolphinscheduler.common.enums.SelfDependStrategy;
/**
* dependency
*/
public class Dependency {
/**
* self depend strategy
*/
private SelfDependStrategy self;
/**
* outer dependency string
*/
private String outer;
public Dependency(){}
public Dependency(String outer, SelfDependStrategy self){
this.outer = outer;
this.self = self;
}
public SelfDependStrategy getSelf() {
return self;
}
public void setSelf(SelfDependStrategy self) {
this.self = self;
}
public String getOuter() {
return outer;
}
public void setOuter(String outer) {
this.outer = outer;
}
}
......@@ -44,7 +44,6 @@ public class DagHelper {
/**
* generate flow node relation list by task node list;
* Edges that are not in the task Node List will not be added to the result
* 根据task Node List生成node关系列表,不在task Node List中的边不会被添加到结果中
*
* @param taskNodeList
* @return
......@@ -67,7 +66,6 @@ public class DagHelper {
/**
* generate task nodes needed by dag
* 生成dag需要的task nodes
*
* @param taskNodeList
* @param taskDependType
......@@ -119,7 +117,6 @@ public class DagHelper {
/**
* find all the nodes that depended on the start node
* 找到所有依赖start node的node
*
* @param startNode
* @param taskNodeList
......@@ -142,7 +139,6 @@ public class DagHelper {
/**
* find all nodes that start nodes depend on.
* 找到所有start node依赖的node
*
* @param startNode
* @param taskNodeList
......@@ -170,7 +166,6 @@ public class DagHelper {
/**
* generate dag by start nodes and recovery nodes
* 根据start nodes 和 recovery nodes 生成dag
* @param processDefinitionJson
* @param startNodeNameList
* @param recoveryNodeNameList
......@@ -217,7 +212,6 @@ public class DagHelper {
/**
* find node by node name
* 通过 name 获取节点
* @param nodeDetails
* @param nodeName
* @return
......
......@@ -23,7 +23,7 @@ import java.util.ArrayList;
import java.util.List;
/**
* 链接判断工具
* DAG Cycle judge
*/
public class CycleLinks extends AbstractCycle {
private final List<AbstractCycle> cycleList = new ArrayList<>();
......
......@@ -96,7 +96,6 @@ public class DagHelperTest {
TaskNode node4 = new TaskNode();
node4.setId("4");
node4.setName("4");
// node4.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN);
taskNodeList.add(node4);
TaskNode node3 = new TaskNode();
......
......@@ -50,9 +50,7 @@ public class StandaloneZKServerForTest {
//delete zk data dir ?
File zkFile = new File(System.getProperty("java.io.tmpdir"), "zookeeper");
// if(zkFile.exists()){
// zkFile.delete();
// }
startStandaloneServer("2000", zkFile.getAbsolutePath(), "2181", "10", "5");
}
});
......
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.zk;
import org.junit.Assert;
import org.junit.Test;
import java.util.Arrays;
import java.util.List;
/**
*
*/
public class ZKWorkerClientTest {
@Test
public void getZKWorkerClient() throws Exception {
// ZKWorkerClient zkWorkerClient = ZKWorkerClient.getZKWorkerClient();
// zkWorkerClient.removeDeadServerByHost("127.0.0.1", Constants.WORKER_PREFIX);
}
@Test
public void test(){
String ips = "";
List<String> ipList = Arrays.asList(ips.split(","));
Assert.assertEquals(1, ipList.size());
}
}
\ No newline at end of file
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册