...
 
Commits (6)
    https://gitcode.net/apache/dolphinscheduler/-/commit/db615ba284de8ab2782ef41ab412286357d77cb6 [hotfix] Remove dead link check path filter (#12985) 2022-11-24T13:23:09+08:00 Jay Chung zhongjiajie955@gmail.com Some patch not run docs ci due to not docs related change https://gitcode.net/apache/dolphinscheduler/-/commit/38b876733c0ac30ebeeeb9c679687834bb2d6009 [Feature-10498] Mask the password in the log of sqoop task (#11589) 2022-11-24T14:54:54+08:00 rickchengx 38122586+rickchengx@users.noreply.github.com https://gitcode.net/apache/dolphinscheduler/-/commit/e180e16981f2a7250f847972e2a4f4679d234988 [Fix] Fix Java path in Kubernetes Helm Chart (#12987) 2022-11-24T15:35:03+08:00 旺阳 qingwli@cisco.com https://gitcode.net/apache/dolphinscheduler/-/commit/04ef87d1d9bfc8097ad7ffd6b30e49554c685b01 [Bug-12956] fix incorrect java path (#12957) 2022-11-24T18:52:15+08:00 henry zhang 33995591+zhangfane@users.noreply.github.com https://gitcode.net/apache/dolphinscheduler/-/commit/31021730ec78d8de1b55a3471ca713995052105b The task instance list is sorted by submission time (#12974) 2022-11-24T18:59:53+08:00 Kerwin 37063904+zhuangchong@users.noreply.github.com https://gitcode.net/apache/dolphinscheduler/-/commit/50779ea1e6acdbca721dcc3d6331e13687ac9544 [Bug-12963] [Master] Fix dependent task node null pointer exception (#12965) 2022-11-24T19:00:46+08:00 Kerwin 37063904+zhuangchong@users.noreply.github.com * Fix that there are both manual and scheduled workflow instances in dependent nodes, and one of them will report a null pointer exception during execution.
...@@ -18,11 +18,6 @@ name: Docs ...@@ -18,11 +18,6 @@ name: Docs
on: on:
pull_request: pull_request:
paths:
- '.github/workflows/docs.yml'
- '**/*.md'
- 'docs/**'
- '.dlc.json'
schedule: schedule:
- cron: '0 18 * * *' # TimeZone: UTC 0 - cron: '0 18 * * *' # TimeZone: UTC 0
......
...@@ -216,7 +216,7 @@ common: ...@@ -216,7 +216,7 @@ common:
HADOOP_CONF_DIR: "/opt/soft/hadoop/etc/hadoop" HADOOP_CONF_DIR: "/opt/soft/hadoop/etc/hadoop"
SPARK_HOME: "/opt/soft/spark" SPARK_HOME: "/opt/soft/spark"
PYTHON_HOME: "/usr/bin/python" PYTHON_HOME: "/usr/bin/python"
JAVA_HOME: "/usr/local/openjdk-8" JAVA_HOME: "/opt/java/openjdk"
HIVE_HOME: "/opt/soft/hive" HIVE_HOME: "/opt/soft/hive"
FLINK_HOME: "/opt/soft/flink" FLINK_HOME: "/opt/soft/flink"
DATAX_HOME: "/opt/soft/datax" DATAX_HOME: "/opt/soft/datax"
......
...@@ -197,7 +197,7 @@ In the early schedule design, if there is no priority design and use the fair sc ...@@ -197,7 +197,7 @@ In the early schedule design, if there is no priority design and use the fair sc
- For details, please refer to the logback configuration of Master and Worker, as shown in the following example: - For details, please refer to the logback configuration of Master and Worker, as shown in the following example:
```xml ```xml
<conversionRule conversionWord="message" converterClass="org.apache.dolphinscheduler.service.log.SensitiveDataConverter"/> <conversionRule conversionWord="message" converterClass="org.apache.dolphinscheduler.common.log.SensitiveDataConverter"/>
<appender name="TASKLOGFILE" class="ch.qos.logback.classic.sift.SiftingAppender"> <appender name="TASKLOGFILE" class="ch.qos.logback.classic.sift.SiftingAppender">
<filter class="org.apache.dolphinscheduler.service.log.TaskLogFilter"/> <filter class="org.apache.dolphinscheduler.service.log.TaskLogFilter"/>
<Discriminator class="org.apache.dolphinscheduler.service.log.TaskLogDiscriminator"> <Discriminator class="org.apache.dolphinscheduler.service.log.TaskLogDiscriminator">
......
...@@ -540,7 +540,7 @@ common: ...@@ -540,7 +540,7 @@ common:
| `common.configmap.HADOOP_CONF_DIR` | Set `HADOOP_CONF_DIR` for DolphinScheduler's task environment | `/opt/soft/hadoop/etc/hadoop` | | `common.configmap.HADOOP_CONF_DIR` | Set `HADOOP_CONF_DIR` for DolphinScheduler's task environment | `/opt/soft/hadoop/etc/hadoop` |
| `common.configmap.SPARK_HOME` | Set `SPARK_HOME` for DolphinScheduler's task environment | `/opt/soft/spark` | | `common.configmap.SPARK_HOME` | Set `SPARK_HOME` for DolphinScheduler's task environment | `/opt/soft/spark` |
| `common.configmap.PYTHON_HOME` | Set `PYTHON_HOME` for DolphinScheduler's task environment | `/usr/bin/python` | | `common.configmap.PYTHON_HOME` | Set `PYTHON_HOME` for DolphinScheduler's task environment | `/usr/bin/python` |
| `common.configmap.JAVA_HOME` | Set `JAVA_HOME` for DolphinScheduler's task environment | `/usr/local/openjdk-8` | | `common.configmap.JAVA_HOME` | Set `JAVA_HOME` for DolphinScheduler's task environment | `/opt/java/openjdk` |
| `common.configmap.HIVE_HOME` | Set `HIVE_HOME` for DolphinScheduler's task environment | `/opt/soft/hive` | | `common.configmap.HIVE_HOME` | Set `HIVE_HOME` for DolphinScheduler's task environment | `/opt/soft/hive` |
| `common.configmap.FLINK_HOME` | Set `FLINK_HOME` for DolphinScheduler's task environment | `/opt/soft/flink` | | `common.configmap.FLINK_HOME` | Set `FLINK_HOME` for DolphinScheduler's task environment | `/opt/soft/flink` |
| `common.configmap.DATAX_HOME` | Set `DATAX_HOME` for DolphinScheduler's task environment | `/opt/soft/datax` | | `common.configmap.DATAX_HOME` | Set `DATAX_HOME` for DolphinScheduler's task environment | `/opt/soft/datax` |
......
...@@ -195,7 +195,7 @@ ...@@ -195,7 +195,7 @@
- 详情可参考Master和Worker的logback配置,如下示例: - 详情可参考Master和Worker的logback配置,如下示例:
```xml ```xml
<conversionRule conversionWord="message" converterClass="org.apache.dolphinscheduler.service.log.SensitiveDataConverter"/> <conversionRule conversionWord="message" converterClass="org.apache.dolphinscheduler.common.log.SensitiveDataConverter"/>
<appender name="TASKLOGFILE" class="ch.qos.logback.classic.sift.SiftingAppender"> <appender name="TASKLOGFILE" class="ch.qos.logback.classic.sift.SiftingAppender">
<filter class="org.apache.dolphinscheduler.service.log.TaskLogFilter"/> <filter class="org.apache.dolphinscheduler.service.log.TaskLogFilter"/>
<Discriminator class="org.apache.dolphinscheduler.service.log.TaskLogDiscriminator"> <Discriminator class="org.apache.dolphinscheduler.service.log.TaskLogDiscriminator">
......
...@@ -539,7 +539,7 @@ common: ...@@ -539,7 +539,7 @@ common:
| `common.configmap.HADOOP_CONF_DIR` | Set `HADOOP_CONF_DIR` for DolphinScheduler's task environment | `/opt/soft/hadoop/etc/hadoop` | | `common.configmap.HADOOP_CONF_DIR` | Set `HADOOP_CONF_DIR` for DolphinScheduler's task environment | `/opt/soft/hadoop/etc/hadoop` |
| `common.configmap.SPARK_HOME` | Set `SPARK_HOME` for DolphinScheduler's task environment | `/opt/soft/spark` | | `common.configmap.SPARK_HOME` | Set `SPARK_HOME` for DolphinScheduler's task environment | `/opt/soft/spark` |
| `common.configmap.PYTHON_HOME` | Set `PYTHON_HOME` for DolphinScheduler's task environment | `/usr/bin/python` | | `common.configmap.PYTHON_HOME` | Set `PYTHON_HOME` for DolphinScheduler's task environment | `/usr/bin/python` |
| `common.configmap.JAVA_HOME` | Set `JAVA_HOME` for DolphinScheduler's task environment | `/usr/local/openjdk-8` | | `common.configmap.JAVA_HOME` | Set `JAVA_HOME` for DolphinScheduler's task environment | `/opt/java/openjdk` |
| `common.configmap.HIVE_HOME` | Set `HIVE_HOME` for DolphinScheduler's task environment | `/opt/soft/hive` | | `common.configmap.HIVE_HOME` | Set `HIVE_HOME` for DolphinScheduler's task environment | `/opt/soft/hive` |
| `common.configmap.FLINK_HOME` | Set `FLINK_HOME` for DolphinScheduler's task environment | `/opt/soft/flink` | | `common.configmap.FLINK_HOME` | Set `FLINK_HOME` for DolphinScheduler's task environment | `/opt/soft/flink` |
| `common.configmap.DATAX_HOME` | Set `DATAX_HOME` for DolphinScheduler's task environment | `/opt/soft/datax` | | `common.configmap.DATAX_HOME` | Set `DATAX_HOME` for DolphinScheduler's task environment | `/opt/soft/datax` |
......
...@@ -15,11 +15,15 @@ ...@@ -15,11 +15,15 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.service.log; package org.apache.dolphinscheduler.common.log;
import org.apache.dolphinscheduler.common.constants.Constants; import org.apache.dolphinscheduler.common.constants.Constants;
import org.apache.dolphinscheduler.common.constants.DataSourceConstants; import org.apache.dolphinscheduler.common.constants.DataSourceConstants;
import org.apache.commons.lang3.StringUtils;
import java.util.Arrays;
import java.util.HashSet;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
...@@ -33,10 +37,9 @@ import com.google.common.base.Strings; ...@@ -33,10 +37,9 @@ import com.google.common.base.Strings;
*/ */
public class SensitiveDataConverter extends MessageConverter { public class SensitiveDataConverter extends MessageConverter {
/** private static Pattern multilinePattern;
* password pattern private static HashSet<String> maskPatterns =
*/ new HashSet<>(Arrays.asList(DataSourceConstants.DATASOURCE_PASSWORD_REGEX));
private final Pattern pwdPattern = Pattern.compile(DataSourceConstants.DATASOURCE_PASSWORD_REGEX);
@Override @Override
public String convert(ILoggingEvent event) { public String convert(ILoggingEvent event) {
...@@ -45,41 +48,25 @@ public class SensitiveDataConverter extends MessageConverter { ...@@ -45,41 +48,25 @@ public class SensitiveDataConverter extends MessageConverter {
String requestLogMsg = event.getFormattedMessage(); String requestLogMsg = event.getFormattedMessage();
// desensitization log // desensitization log
return convertMsg(requestLogMsg); return maskSensitiveData(requestLogMsg);
} }
/** public static void addMaskPattern(String maskPattern) {
* deal with sensitive log maskPatterns.add(maskPattern);
*
* @param oriLogMsg original log
*/
private String convertMsg(final String oriLogMsg) {
String tempLogMsg = oriLogMsg;
if (!Strings.isNullOrEmpty(tempLogMsg)) {
tempLogMsg = passwordHandler(pwdPattern, tempLogMsg);
}
return tempLogMsg;
} }
/** public static String maskSensitiveData(final String logMsg) {
* password regex if (StringUtils.isEmpty(logMsg)) {
* return logMsg;
* @param logMsg original log }
*/ multilinePattern = Pattern.compile(String.join("|", maskPatterns), Pattern.MULTILINE);
static String passwordHandler(Pattern pwdPattern, String logMsg) {
Matcher matcher = pwdPattern.matcher(logMsg);
StringBuffer sb = new StringBuffer(logMsg.length()); StringBuffer sb = new StringBuffer(logMsg.length());
Matcher matcher = multilinePattern.matcher(logMsg);
while (matcher.find()) { while (matcher.find()) {
String password = matcher.group(); String password = matcher.group();
String maskPassword = Strings.repeat(Constants.STAR, password.length()); String maskPassword = Strings.repeat(Constants.STAR, password.length());
matcher.appendReplacement(sb, maskPassword); matcher.appendReplacement(sb, maskPassword);
} }
matcher.appendTail(sb); matcher.appendTail(sb);
......
...@@ -15,13 +15,7 @@ ...@@ -15,13 +15,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.dolphinscheduler.service.log; package org.apache.dolphinscheduler.common.log;
import static org.apache.dolphinscheduler.service.log.SensitiveDataConverter.passwordHandler;
import org.apache.dolphinscheduler.common.constants.DataSourceConstants;
import java.util.regex.Pattern;
import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
...@@ -32,11 +26,6 @@ public class SensitiveDataConverterTest { ...@@ -32,11 +26,6 @@ public class SensitiveDataConverterTest {
private final Logger logger = LoggerFactory.getLogger(SensitiveDataConverterTest.class); private final Logger logger = LoggerFactory.getLogger(SensitiveDataConverterTest.class);
/**
* password pattern
*/
private final Pattern pwdPattern = Pattern.compile(DataSourceConstants.DATASOURCE_PASSWORD_REGEX);
private final String logMsg = "{\"address\":\"jdbc:mysql://192.168.xx.xx:3306\"," private final String logMsg = "{\"address\":\"jdbc:mysql://192.168.xx.xx:3306\","
+ "\"database\":\"carbond\"," + "\"database\":\"carbond\","
+ "\"jdbcUrl\":\"jdbc:mysql://192.168.xx.xx:3306/ods\"," + "\"jdbcUrl\":\"jdbc:mysql://192.168.xx.xx:3306/ods\","
...@@ -49,21 +38,17 @@ public class SensitiveDataConverterTest { ...@@ -49,21 +38,17 @@ public class SensitiveDataConverterTest {
+ "\"user\":\"view\"," + "\"user\":\"view\","
+ "\"password\":\"*****\"}"; + "\"password\":\"*****\"}";
@Test
public void convert() {
Assertions.assertEquals(maskLogMsg, passwordHandler(pwdPattern, logMsg));
}
/** /**
* mask sensitive logMsg - sql task datasource password * mask sensitive logMsg - sql task datasource password
*/ */
@Test @Test
public void testPwdLogMsgConverter() { public void testPwdLogMsgConverter() {
logger.info("parameter : {}", logMsg); final String maskedLog = SensitiveDataConverter.maskSensitiveData(logMsg);
logger.info("parameter : {}", passwordHandler(pwdPattern, logMsg));
logger.info("original parameter : {}", logMsg);
logger.info("masked parameter : {}", maskedLog);
Assertions.assertNotEquals(logMsg, passwordHandler(pwdPattern, logMsg)); Assertions.assertEquals(maskLogMsg, maskedLog);
Assertions.assertEquals(maskLogMsg, passwordHandler(pwdPattern, logMsg));
} }
......
...@@ -217,7 +217,7 @@ ...@@ -217,7 +217,7 @@
<if test="processInstanceName != null and processInstanceName != ''"> <if test="processInstanceName != null and processInstanceName != ''">
and process.name like concat('%', #{processInstanceName}, '%') and process.name like concat('%', #{processInstanceName}, '%')
</if> </if>
order by instance.start_time desc order by instance.submit_time desc
</select> </select>
<select id="queryStreamTaskInstanceListPaging" resultType="org.apache.dolphinscheduler.dao.entity.TaskInstance"> <select id="queryStreamTaskInstanceListPaging" resultType="org.apache.dolphinscheduler.dao.entity.TaskInstance">
select select
......
...@@ -205,8 +205,8 @@ public class DependentExecute { ...@@ -205,8 +205,8 @@ public class DependentExecute {
return lastManualProcess; return lastManualProcess;
} }
return (lastManualProcess.getEndTime().after(lastSchedulerProcess.getEndTime())) ? lastManualProcess // In the time range, there are both manual and scheduled workflow instances, return the last workflow instance
: lastSchedulerProcess; return lastManualProcess.getId() > lastSchedulerProcess.getId() ? lastManualProcess : lastSchedulerProcess;
} }
/** /**
......
...@@ -28,7 +28,7 @@ ...@@ -28,7 +28,7 @@
</appender> </appender>
<conversionRule conversionWord="message" <conversionRule conversionWord="message"
converterClass="org.apache.dolphinscheduler.service.log.SensitiveDataConverter"/> converterClass="org.apache.dolphinscheduler.common.log.SensitiveDataConverter"/>
<appender name="TASKLOGFILE" class="ch.qos.logback.classic.sift.SiftingAppender"> <appender name="TASKLOGFILE" class="ch.qos.logback.classic.sift.SiftingAppender">
<filter class="org.apache.dolphinscheduler.service.log.TaskLogFilter"/> <filter class="org.apache.dolphinscheduler.service.log.TaskLogFilter"/>
<Discriminator class="org.apache.dolphinscheduler.service.log.TaskLogDiscriminator"> <Discriminator class="org.apache.dolphinscheduler.service.log.TaskLogDiscriminator">
......
...@@ -48,7 +48,7 @@ ...@@ -48,7 +48,7 @@
<logger name="org.apache.hadoop" level="WARN"/> <logger name="org.apache.hadoop" level="WARN"/>
<conversionRule conversionWord="message" <conversionRule conversionWord="message"
converterClass="org.apache.dolphinscheduler.service.log.SensitiveDataConverter"/> converterClass="org.apache.dolphinscheduler.common.log.SensitiveDataConverter"/>
<appender name="TASKLOGFILE" class="ch.qos.logback.classic.sift.SiftingAppender"> <appender name="TASKLOGFILE" class="ch.qos.logback.classic.sift.SiftingAppender">
<filter class="org.apache.dolphinscheduler.service.log.TaskLogFilter"/> <filter class="org.apache.dolphinscheduler.service.log.TaskLogFilter"/>
<Discriminator class="org.apache.dolphinscheduler.service.log.TaskLogDiscriminator"> <Discriminator class="org.apache.dolphinscheduler.service.log.TaskLogDiscriminator">
......
...@@ -72,4 +72,5 @@ public final class SqoopConstants { ...@@ -72,4 +72,5 @@ public final class SqoopConstants {
public static final String UPDATE_KEY = "--update-key"; public static final String UPDATE_KEY = "--update-key";
public static final String UPDATE_MODE = "--update-mode"; public static final String UPDATE_MODE = "--update-mode";
public static final String SQOOP_PASSWORD_REGEX = "(?<=(--password \")).+?(?=\")";
} }
...@@ -17,6 +17,7 @@ ...@@ -17,6 +17,7 @@
package org.apache.dolphinscheduler.plugin.task.sqoop; package org.apache.dolphinscheduler.plugin.task.sqoop;
import org.apache.dolphinscheduler.common.log.SensitiveDataConverter;
import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.plugin.task.api.AbstractYarnTask; import org.apache.dolphinscheduler.plugin.task.api.AbstractYarnTask;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
...@@ -67,6 +68,8 @@ public class SqoopTask extends AbstractYarnTask { ...@@ -67,6 +68,8 @@ public class SqoopTask extends AbstractYarnTask {
sqoopTaskExecutionContext = sqoopTaskExecutionContext =
sqoopParameters.generateExtendedContext(taskExecutionContext.getResourceParametersHelper()); sqoopParameters.generateExtendedContext(taskExecutionContext.getResourceParametersHelper());
SensitiveDataConverter.addMaskPattern(SqoopConstants.SQOOP_PASSWORD_REGEX);
} }
@Override @Override
......
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.sqoop;
import org.apache.dolphinscheduler.common.log.SensitiveDataConverter;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
public class SqoopTaskTest {
@Test
public void testSqoopPasswordMask() {
final String originalScript =
"sqoop import -D mapred.job.name=sqoop_task -m 1 --connect \"jdbc:mysql://localhost:3306/defuault\" --username root --password \"mypassword\" --table student --target-dir /sqoop_test --as-textfile";
final String maskScript =
"sqoop import -D mapred.job.name=sqoop_task -m 1 --connect \"jdbc:mysql://localhost:3306/defuault\" --username root --password \"**********\" --table student --target-dir /sqoop_test --as-textfile";
SensitiveDataConverter.addMaskPattern(SqoopConstants.SQOOP_PASSWORD_REGEX);
Assertions.assertEquals(maskScript, SensitiveDataConverter.maskSensitiveData(originalScript));
}
}
...@@ -29,7 +29,7 @@ ...@@ -29,7 +29,7 @@
</appender> </appender>
<conversionRule conversionWord="message" <conversionRule conversionWord="message"
converterClass="org.apache.dolphinscheduler.service.log.SensitiveDataConverter"/> converterClass="org.apache.dolphinscheduler.common.log.SensitiveDataConverter"/>
<appender name="TASKLOGFILE" class="ch.qos.logback.classic.sift.SiftingAppender"> <appender name="TASKLOGFILE" class="ch.qos.logback.classic.sift.SiftingAppender">
<filter class="org.apache.dolphinscheduler.service.log.TaskLogFilter"/> <filter class="org.apache.dolphinscheduler.service.log.TaskLogFilter"/>
<Discriminator class="org.apache.dolphinscheduler.service.log.TaskLogDiscriminator"> <Discriminator class="org.apache.dolphinscheduler.service.log.TaskLogDiscriminator">
......