...
 
Commits (6)
    https://gitcode.net/apache/dolphinscheduler/-/commit/db615ba284de8ab2782ef41ab412286357d77cb6 [hotfix] Remove dead link check path filter (#12985) 2022-11-24T13:23:09+08:00 Jay Chung zhongjiajie955@gmail.com Some patch not run docs ci due to not docs related change https://gitcode.net/apache/dolphinscheduler/-/commit/38b876733c0ac30ebeeeb9c679687834bb2d6009 [Feature-10498] Mask the password in the log of sqoop task (#11589) 2022-11-24T14:54:54+08:00 rickchengx 38122586+rickchengx@users.noreply.github.com https://gitcode.net/apache/dolphinscheduler/-/commit/e180e16981f2a7250f847972e2a4f4679d234988 [Fix] Fix Java path in Kubernetes Helm Chart (#12987) 2022-11-24T15:35:03+08:00 旺阳 qingwli@cisco.com https://gitcode.net/apache/dolphinscheduler/-/commit/04ef87d1d9bfc8097ad7ffd6b30e49554c685b01 [Bug-12956] fix incorrect java path (#12957) 2022-11-24T18:52:15+08:00 henry zhang 33995591+zhangfane@users.noreply.github.com https://gitcode.net/apache/dolphinscheduler/-/commit/31021730ec78d8de1b55a3471ca713995052105b The task instance list is sorted by submission time (#12974) 2022-11-24T18:59:53+08:00 Kerwin 37063904+zhuangchong@users.noreply.github.com https://gitcode.net/apache/dolphinscheduler/-/commit/50779ea1e6acdbca721dcc3d6331e13687ac9544 [Bug-12963] [Master] Fix dependent task node null pointer exception (#12965) 2022-11-24T19:00:46+08:00 Kerwin 37063904+zhuangchong@users.noreply.github.com * Fix that there are both manual and scheduled workflow instances in dependent nodes, and one of them will report a null pointer exception during execution.
......@@ -18,11 +18,6 @@ name: Docs
on:
pull_request:
paths:
- '.github/workflows/docs.yml'
- '**/*.md'
- 'docs/**'
- '.dlc.json'
schedule:
- cron: '0 18 * * *' # TimeZone: UTC 0
......
......@@ -216,7 +216,7 @@ common:
HADOOP_CONF_DIR: "/opt/soft/hadoop/etc/hadoop"
SPARK_HOME: "/opt/soft/spark"
PYTHON_HOME: "/usr/bin/python"
JAVA_HOME: "/usr/local/openjdk-8"
JAVA_HOME: "/opt/java/openjdk"
HIVE_HOME: "/opt/soft/hive"
FLINK_HOME: "/opt/soft/flink"
DATAX_HOME: "/opt/soft/datax"
......
......@@ -197,7 +197,7 @@ In the early schedule design, if there is no priority design and use the fair sc
- For details, please refer to the logback configuration of Master and Worker, as shown in the following example:
```xml
<conversionRule conversionWord="message" converterClass="org.apache.dolphinscheduler.service.log.SensitiveDataConverter"/>
<conversionRule conversionWord="message" converterClass="org.apache.dolphinscheduler.common.log.SensitiveDataConverter"/>
<appender name="TASKLOGFILE" class="ch.qos.logback.classic.sift.SiftingAppender">
<filter class="org.apache.dolphinscheduler.service.log.TaskLogFilter"/>
<Discriminator class="org.apache.dolphinscheduler.service.log.TaskLogDiscriminator">
......
......@@ -540,7 +540,7 @@ common:
| `common.configmap.HADOOP_CONF_DIR` | Set `HADOOP_CONF_DIR` for DolphinScheduler's task environment | `/opt/soft/hadoop/etc/hadoop` |
| `common.configmap.SPARK_HOME` | Set `SPARK_HOME` for DolphinScheduler's task environment | `/opt/soft/spark` |
| `common.configmap.PYTHON_HOME` | Set `PYTHON_HOME` for DolphinScheduler's task environment | `/usr/bin/python` |
| `common.configmap.JAVA_HOME` | Set `JAVA_HOME` for DolphinScheduler's task environment | `/usr/local/openjdk-8` |
| `common.configmap.JAVA_HOME` | Set `JAVA_HOME` for DolphinScheduler's task environment | `/opt/java/openjdk` |
| `common.configmap.HIVE_HOME` | Set `HIVE_HOME` for DolphinScheduler's task environment | `/opt/soft/hive` |
| `common.configmap.FLINK_HOME` | Set `FLINK_HOME` for DolphinScheduler's task environment | `/opt/soft/flink` |
| `common.configmap.DATAX_HOME` | Set `DATAX_HOME` for DolphinScheduler's task environment | `/opt/soft/datax` |
......
......@@ -195,7 +195,7 @@
- 详情可参考Master和Worker的logback配置,如下示例:
```xml
<conversionRule conversionWord="message" converterClass="org.apache.dolphinscheduler.service.log.SensitiveDataConverter"/>
<conversionRule conversionWord="message" converterClass="org.apache.dolphinscheduler.common.log.SensitiveDataConverter"/>
<appender name="TASKLOGFILE" class="ch.qos.logback.classic.sift.SiftingAppender">
<filter class="org.apache.dolphinscheduler.service.log.TaskLogFilter"/>
<Discriminator class="org.apache.dolphinscheduler.service.log.TaskLogDiscriminator">
......
......@@ -539,7 +539,7 @@ common:
| `common.configmap.HADOOP_CONF_DIR` | Set `HADOOP_CONF_DIR` for DolphinScheduler's task environment | `/opt/soft/hadoop/etc/hadoop` |
| `common.configmap.SPARK_HOME` | Set `SPARK_HOME` for DolphinScheduler's task environment | `/opt/soft/spark` |
| `common.configmap.PYTHON_HOME` | Set `PYTHON_HOME` for DolphinScheduler's task environment | `/usr/bin/python` |
| `common.configmap.JAVA_HOME` | Set `JAVA_HOME` for DolphinScheduler's task environment | `/usr/local/openjdk-8` |
| `common.configmap.JAVA_HOME` | Set `JAVA_HOME` for DolphinScheduler's task environment | `/opt/java/openjdk` |
| `common.configmap.HIVE_HOME` | Set `HIVE_HOME` for DolphinScheduler's task environment | `/opt/soft/hive` |
| `common.configmap.FLINK_HOME` | Set `FLINK_HOME` for DolphinScheduler's task environment | `/opt/soft/flink` |
| `common.configmap.DATAX_HOME` | Set `DATAX_HOME` for DolphinScheduler's task environment | `/opt/soft/datax` |
......
......@@ -15,11 +15,15 @@
* limitations under the License.
*/
package org.apache.dolphinscheduler.service.log;
package org.apache.dolphinscheduler.common.log;
import org.apache.dolphinscheduler.common.constants.Constants;
import org.apache.dolphinscheduler.common.constants.DataSourceConstants;
import org.apache.commons.lang3.StringUtils;
import java.util.Arrays;
import java.util.HashSet;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
......@@ -33,10 +37,9 @@ import com.google.common.base.Strings;
*/
public class SensitiveDataConverter extends MessageConverter {
/**
* password pattern
*/
private final Pattern pwdPattern = Pattern.compile(DataSourceConstants.DATASOURCE_PASSWORD_REGEX);
private static Pattern multilinePattern;
private static HashSet<String> maskPatterns =
new HashSet<>(Arrays.asList(DataSourceConstants.DATASOURCE_PASSWORD_REGEX));
@Override
public String convert(ILoggingEvent event) {
......@@ -45,41 +48,25 @@ public class SensitiveDataConverter extends MessageConverter {
String requestLogMsg = event.getFormattedMessage();
// desensitization log
return convertMsg(requestLogMsg);
return maskSensitiveData(requestLogMsg);
}
/**
* deal with sensitive log
*
* @param oriLogMsg original log
*/
private String convertMsg(final String oriLogMsg) {
String tempLogMsg = oriLogMsg;
if (!Strings.isNullOrEmpty(tempLogMsg)) {
tempLogMsg = passwordHandler(pwdPattern, tempLogMsg);
}
return tempLogMsg;
public static void addMaskPattern(String maskPattern) {
maskPatterns.add(maskPattern);
}
/**
* password regex
*
* @param logMsg original log
*/
static String passwordHandler(Pattern pwdPattern, String logMsg) {
Matcher matcher = pwdPattern.matcher(logMsg);
public static String maskSensitiveData(final String logMsg) {
if (StringUtils.isEmpty(logMsg)) {
return logMsg;
}
multilinePattern = Pattern.compile(String.join("|", maskPatterns), Pattern.MULTILINE);
StringBuffer sb = new StringBuffer(logMsg.length());
Matcher matcher = multilinePattern.matcher(logMsg);
while (matcher.find()) {
String password = matcher.group();
String maskPassword = Strings.repeat(Constants.STAR, password.length());
matcher.appendReplacement(sb, maskPassword);
}
matcher.appendTail(sb);
......
......@@ -15,13 +15,7 @@
* limitations under the License.
*/
package org.apache.dolphinscheduler.service.log;
import static org.apache.dolphinscheduler.service.log.SensitiveDataConverter.passwordHandler;
import org.apache.dolphinscheduler.common.constants.DataSourceConstants;
import java.util.regex.Pattern;
package org.apache.dolphinscheduler.common.log;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
......@@ -32,11 +26,6 @@ public class SensitiveDataConverterTest {
private final Logger logger = LoggerFactory.getLogger(SensitiveDataConverterTest.class);
/**
* password pattern
*/
private final Pattern pwdPattern = Pattern.compile(DataSourceConstants.DATASOURCE_PASSWORD_REGEX);
private final String logMsg = "{\"address\":\"jdbc:mysql://192.168.xx.xx:3306\","
+ "\"database\":\"carbond\","
+ "\"jdbcUrl\":\"jdbc:mysql://192.168.xx.xx:3306/ods\","
......@@ -49,21 +38,17 @@ public class SensitiveDataConverterTest {
+ "\"user\":\"view\","
+ "\"password\":\"*****\"}";
@Test
public void convert() {
Assertions.assertEquals(maskLogMsg, passwordHandler(pwdPattern, logMsg));
}
/**
* mask sensitive logMsg - sql task datasource password
*/
@Test
public void testPwdLogMsgConverter() {
logger.info("parameter : {}", logMsg);
logger.info("parameter : {}", passwordHandler(pwdPattern, logMsg));
final String maskedLog = SensitiveDataConverter.maskSensitiveData(logMsg);
logger.info("original parameter : {}", logMsg);
logger.info("masked parameter : {}", maskedLog);
Assertions.assertNotEquals(logMsg, passwordHandler(pwdPattern, logMsg));
Assertions.assertEquals(maskLogMsg, passwordHandler(pwdPattern, logMsg));
Assertions.assertEquals(maskLogMsg, maskedLog);
}
......
......@@ -217,7 +217,7 @@
<if test="processInstanceName != null and processInstanceName != ''">
and process.name like concat('%', #{processInstanceName}, '%')
</if>
order by instance.start_time desc
order by instance.submit_time desc
</select>
<select id="queryStreamTaskInstanceListPaging" resultType="org.apache.dolphinscheduler.dao.entity.TaskInstance">
select
......
......@@ -205,8 +205,8 @@ public class DependentExecute {
return lastManualProcess;
}
return (lastManualProcess.getEndTime().after(lastSchedulerProcess.getEndTime())) ? lastManualProcess
: lastSchedulerProcess;
// In the time range, there are both manual and scheduled workflow instances, return the last workflow instance
return lastManualProcess.getId() > lastSchedulerProcess.getId() ? lastManualProcess : lastSchedulerProcess;
}
/**
......
......@@ -28,7 +28,7 @@
</appender>
<conversionRule conversionWord="message"
converterClass="org.apache.dolphinscheduler.service.log.SensitiveDataConverter"/>
converterClass="org.apache.dolphinscheduler.common.log.SensitiveDataConverter"/>
<appender name="TASKLOGFILE" class="ch.qos.logback.classic.sift.SiftingAppender">
<filter class="org.apache.dolphinscheduler.service.log.TaskLogFilter"/>
<Discriminator class="org.apache.dolphinscheduler.service.log.TaskLogDiscriminator">
......
......@@ -48,7 +48,7 @@
<logger name="org.apache.hadoop" level="WARN"/>
<conversionRule conversionWord="message"
converterClass="org.apache.dolphinscheduler.service.log.SensitiveDataConverter"/>
converterClass="org.apache.dolphinscheduler.common.log.SensitiveDataConverter"/>
<appender name="TASKLOGFILE" class="ch.qos.logback.classic.sift.SiftingAppender">
<filter class="org.apache.dolphinscheduler.service.log.TaskLogFilter"/>
<Discriminator class="org.apache.dolphinscheduler.service.log.TaskLogDiscriminator">
......
......@@ -72,4 +72,5 @@ public final class SqoopConstants {
public static final String UPDATE_KEY = "--update-key";
public static final String UPDATE_MODE = "--update-mode";
public static final String SQOOP_PASSWORD_REGEX = "(?<=(--password \")).+?(?=\")";
}
......@@ -17,6 +17,7 @@
package org.apache.dolphinscheduler.plugin.task.sqoop;
import org.apache.dolphinscheduler.common.log.SensitiveDataConverter;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.plugin.task.api.AbstractYarnTask;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
......@@ -67,6 +68,8 @@ public class SqoopTask extends AbstractYarnTask {
sqoopTaskExecutionContext =
sqoopParameters.generateExtendedContext(taskExecutionContext.getResourceParametersHelper());
SensitiveDataConverter.addMaskPattern(SqoopConstants.SQOOP_PASSWORD_REGEX);
}
@Override
......
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.task.sqoop;
import org.apache.dolphinscheduler.common.log.SensitiveDataConverter;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
public class SqoopTaskTest {
@Test
public void testSqoopPasswordMask() {
final String originalScript =
"sqoop import -D mapred.job.name=sqoop_task -m 1 --connect \"jdbc:mysql://localhost:3306/defuault\" --username root --password \"mypassword\" --table student --target-dir /sqoop_test --as-textfile";
final String maskScript =
"sqoop import -D mapred.job.name=sqoop_task -m 1 --connect \"jdbc:mysql://localhost:3306/defuault\" --username root --password \"**********\" --table student --target-dir /sqoop_test --as-textfile";
SensitiveDataConverter.addMaskPattern(SqoopConstants.SQOOP_PASSWORD_REGEX);
Assertions.assertEquals(maskScript, SensitiveDataConverter.maskSensitiveData(originalScript));
}
}
......@@ -29,7 +29,7 @@
</appender>
<conversionRule conversionWord="message"
converterClass="org.apache.dolphinscheduler.service.log.SensitiveDataConverter"/>
converterClass="org.apache.dolphinscheduler.common.log.SensitiveDataConverter"/>
<appender name="TASKLOGFILE" class="ch.qos.logback.classic.sift.SiftingAppender">
<filter class="org.apache.dolphinscheduler.service.log.TaskLogFilter"/>
<Discriminator class="org.apache.dolphinscheduler.service.log.TaskLogDiscriminator">
......