提交 074fd41a 编写于 作者: T Tboy 提交者: dailidong

remove freemarker dependency (#1757)


* remove freemarker dependency
上级 2087f464
...@@ -19,7 +19,6 @@ package org.apache.dolphinscheduler.api.service; ...@@ -19,7 +19,6 @@ package org.apache.dolphinscheduler.api.service;
import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.commons.collections.BeanMap; import org.apache.commons.collections.BeanMap;
import org.apache.commons.lang.StringUtils;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.utils.Result;
...@@ -28,6 +27,7 @@ import org.apache.dolphinscheduler.common.enums.ResourceType; ...@@ -28,6 +27,7 @@ import org.apache.dolphinscheduler.common.enums.ResourceType;
import org.apache.dolphinscheduler.common.utils.FileUtils; import org.apache.dolphinscheduler.common.utils.FileUtils;
import org.apache.dolphinscheduler.common.utils.HadoopUtils; import org.apache.dolphinscheduler.common.utils.HadoopUtils;
import org.apache.dolphinscheduler.common.utils.PropertyUtils; import org.apache.dolphinscheduler.common.utils.PropertyUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.dao.entity.Resource;
import org.apache.dolphinscheduler.dao.entity.Tenant; import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.UdfFunc; import org.apache.dolphinscheduler.dao.entity.UdfFunc;
...@@ -104,7 +104,7 @@ public class ResourcesService extends BaseService { ...@@ -104,7 +104,7 @@ public class ResourcesService extends BaseService {
String nameSuffix = FileUtils.suffix(name); String nameSuffix = FileUtils.suffix(name);
// determine file suffix // determine file suffix
if (!StringUtils.equals(fileSuffix, nameSuffix)) { if (!(StringUtils.isNotEmpty(fileSuffix) && fileSuffix.equalsIgnoreCase(nameSuffix))) {
/** /**
* rename file suffix and original suffix must be consistent * rename file suffix and original suffix must be consistent
*/ */
...@@ -341,7 +341,7 @@ public class ResourcesService extends BaseService { ...@@ -341,7 +341,7 @@ public class ResourcesService extends BaseService {
String nameSuffix = FileUtils.suffix(name); String nameSuffix = FileUtils.suffix(name);
// determine file suffix // determine file suffix
if (!StringUtils.equals(fileSuffix, nameSuffix)) { if (!(StringUtils.isNotEmpty(fileSuffix) && fileSuffix.equalsIgnoreCase(nameSuffix))) {
return false; return false;
} }
// query tenant // query tenant
...@@ -539,7 +539,7 @@ public class ResourcesService extends BaseService { ...@@ -539,7 +539,7 @@ public class ResourcesService extends BaseService {
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
Map<String, Object> map = new HashMap<>(); Map<String, Object> map = new HashMap<>();
map.put(ALIAS, resource.getAlias()); map.put(ALIAS, resource.getAlias());
map.put(CONTENT, StringUtils.join(content.toArray(), "\n")); map.put(CONTENT, StringUtils.join(content, "\n"));
result.setData(map); result.setData(map);
}else{ }else{
logger.error("read file {} not exist in hdfs", hdfsFileName); logger.error("read file {} not exist in hdfs", hdfsFileName);
...@@ -602,7 +602,7 @@ public class ResourcesService extends BaseService { ...@@ -602,7 +602,7 @@ public class ResourcesService extends BaseService {
putMsg(result, Status.SUCCESS); putMsg(result, Status.SUCCESS);
Map<Object, Object> dataMap = new BeanMap(resource); Map<Object, Object> dataMap = new BeanMap(resource);
Map<String, Object> resultMap = new HashMap<>(5); Map<String, Object> resultMap = new HashMap<>();
for (Map.Entry<Object, Object> entry: dataMap.entrySet()) { for (Map.Entry<Object, Object> entry: dataMap.entrySet()) {
if (!Constants.CLASS.equalsIgnoreCase(entry.getKey().toString())) { if (!Constants.CLASS.equalsIgnoreCase(entry.getKey().toString())) {
resultMap.put(entry.getKey().toString(), entry.getValue()); resultMap.put(entry.getKey().toString(), entry.getValue());
......
...@@ -22,6 +22,7 @@ import org.apache.dolphinscheduler.api.utils.Result; ...@@ -22,6 +22,7 @@ import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.UdfType; import org.apache.dolphinscheduler.common.enums.UdfType;
import org.apache.dolphinscheduler.common.utils.PropertyUtils; import org.apache.dolphinscheduler.common.utils.PropertyUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.dao.entity.Resource;
import org.apache.dolphinscheduler.dao.entity.UdfFunc; import org.apache.dolphinscheduler.dao.entity.UdfFunc;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
...@@ -30,7 +31,6 @@ import org.apache.dolphinscheduler.dao.mapper.UDFUserMapper; ...@@ -30,7 +31,6 @@ import org.apache.dolphinscheduler.dao.mapper.UDFUserMapper;
import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper; import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper;
import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
......
...@@ -86,7 +86,6 @@ ...@@ -86,7 +86,6 @@
<commons.configuration.version>1.10</commons.configuration.version> <commons.configuration.version>1.10</commons.configuration.version>
<commons.email.version>1.5</commons.email.version> <commons.email.version>1.5</commons.email.version>
<poi.version>3.17</poi.version> <poi.version>3.17</poi.version>
<freemarker.version>2.3.21</freemarker.version>
<javax.servlet.api.version>3.1.0</javax.servlet.api.version> <javax.servlet.api.version>3.1.0</javax.servlet.api.version>
<commons.collections4.version>4.1</commons.collections4.version> <commons.collections4.version>4.1</commons.collections4.version>
<guava.version>20.0</guava.version> <guava.version>20.0</guava.version>
...@@ -358,11 +357,11 @@ ...@@ -358,11 +357,11 @@
<version>${slf4j.log4j12.version}</version> <version>${slf4j.log4j12.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-collections</groupId> <groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId> <artifactId>commons-collections</artifactId>
<version>${commons.collections.version}</version> <version>${commons.collections.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-httpclient</groupId> <groupId>commons-httpclient</groupId>
...@@ -406,13 +405,6 @@ ...@@ -406,13 +405,6 @@
<version>${poi.version}</version> <version>${poi.version}</version>
</dependency> </dependency>
<dependency>
<groupId>org.freemarker</groupId>
<artifactId>freemarker</artifactId>
<version>${freemarker.version}</version>
</dependency>
<!-- hadoop --> <!-- hadoop -->
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册