Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
淡淡忧伤的程序员
DolphinScheduler
提交
d7b768d1
DolphinScheduler
项目概览
淡淡忧伤的程序员
/
DolphinScheduler
与 Fork 源项目一致
Fork自
apache / DolphinScheduler
通知
48
Star
0
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
DolphinScheduler
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
体验新版 GitCode,发现更多精彩内容 >>
提交
d7b768d1
编写于
7月 02, 2020
作者:
L
lenboo
浏览文件
操作
浏览文件
下载
差异文件
Merge remote-tracking branch 'upstream/1.3.1-release' into 131
上级
5bfcdd2a
02e22e8a
变更
4
隐藏空白更改
内联
并排
Showing
4 changed file
with
37 addition
and
57 deletion
+37
-57
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/SQLTaskExecutionContext.java
...lphinscheduler/server/entity/SQLTaskExecutionContext.java
+8
-8
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumer.java
...ler/server/master/consumer/TaskPriorityQueueConsumer.java
+8
-2
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/UDFUtils.java
...va/org/apache/dolphinscheduler/server/utils/UDFUtils.java
+20
-45
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java
...ache/dolphinscheduler/server/worker/task/sql/SqlTask.java
+1
-2
未找到文件。
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/SQLTaskExecutionContext.java
浏览文件 @
d7b768d1
...
@@ -20,7 +20,7 @@ package org.apache.dolphinscheduler.server.entity;
...
@@ -20,7 +20,7 @@ package org.apache.dolphinscheduler.server.entity;
import
org.apache.dolphinscheduler.dao.entity.UdfFunc
;
import
org.apache.dolphinscheduler.dao.entity.UdfFunc
;
import
java.io.Serializable
;
import
java.io.Serializable
;
import
java.util.
List
;
import
java.util.
Map
;
/**
/**
* SQL Task ExecutionContext
* SQL Task ExecutionContext
...
@@ -38,9 +38,9 @@ public class SQLTaskExecutionContext implements Serializable {
...
@@ -38,9 +38,9 @@ public class SQLTaskExecutionContext implements Serializable {
*/
*/
private
String
connectionParams
;
private
String
connectionParams
;
/**
/**
* udf function
list
* udf function
tenant code map
*/
*/
private
List
<
UdfFunc
>
udfFuncList
;
private
Map
<
UdfFunc
,
String
>
udfFuncTenantCodeMap
;
public
int
getWarningGroupId
()
{
public
int
getWarningGroupId
()
{
...
@@ -51,12 +51,12 @@ public class SQLTaskExecutionContext implements Serializable {
...
@@ -51,12 +51,12 @@ public class SQLTaskExecutionContext implements Serializable {
this
.
warningGroupId
=
warningGroupId
;
this
.
warningGroupId
=
warningGroupId
;
}
}
public
List
<
UdfFunc
>
getUdfFuncList
()
{
public
Map
<
UdfFunc
,
String
>
getUdfFuncTenantCodeMap
()
{
return
udfFunc
List
;
return
udfFunc
TenantCodeMap
;
}
}
public
void
setUdfFunc
List
(
List
<
UdfFunc
>
udfFuncList
)
{
public
void
setUdfFunc
TenantCodeMap
(
Map
<
UdfFunc
,
String
>
udfFuncTenantCodeMap
)
{
this
.
udfFunc
List
=
udfFuncList
;
this
.
udfFunc
TenantCodeMap
=
udfFuncTenantCodeMap
;
}
}
public
String
getConnectionParams
()
{
public
String
getConnectionParams
()
{
...
@@ -72,7 +72,7 @@ public class SQLTaskExecutionContext implements Serializable {
...
@@ -72,7 +72,7 @@ public class SQLTaskExecutionContext implements Serializable {
return
"SQLTaskExecutionContext{"
+
return
"SQLTaskExecutionContext{"
+
"warningGroupId="
+
warningGroupId
+
"warningGroupId="
+
warningGroupId
+
", connectionParams='"
+
connectionParams
+
'\''
+
", connectionParams='"
+
connectionParams
+
'\''
+
", udfFunc
List="
+
udfFuncList
+
", udfFunc
TenantCodeMap="
+
udfFuncTenantCodeMap
+
'}'
;
'}'
;
}
}
}
}
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumer.java
浏览文件 @
d7b768d1
...
@@ -324,7 +324,13 @@ public class TaskPriorityQueueConsumer extends Thread{
...
@@ -324,7 +324,13 @@ public class TaskPriorityQueueConsumer extends Thread{
}
}
List
<
UdfFunc
>
udfFuncList
=
processService
.
queryUdfFunListByids
(
udfFunIdsArray
);
List
<
UdfFunc
>
udfFuncList
=
processService
.
queryUdfFunListByids
(
udfFunIdsArray
);
sqlTaskExecutionContext
.
setUdfFuncList
(
udfFuncList
);
Map
<
UdfFunc
,
String
>
udfFuncMap
=
new
HashMap
<>();
for
(
UdfFunc
udfFunc
:
udfFuncList
)
{
String
tenantCode
=
processService
.
queryTenantCodeByResName
(
udfFunc
.
getResourceName
(),
ResourceType
.
UDF
);
udfFuncMap
.
put
(
udfFunc
,
tenantCode
);
}
sqlTaskExecutionContext
.
setUdfFuncTenantCodeMap
(
udfFuncMap
);
}
}
}
}
...
@@ -366,7 +372,7 @@ public class TaskPriorityQueueConsumer extends Thread{
...
@@ -366,7 +372,7 @@ public class TaskPriorityQueueConsumer extends Thread{
if
(
baseParam
!=
null
)
{
if
(
baseParam
!=
null
)
{
List
<
ResourceInfo
>
projectResourceFiles
=
baseParam
.
getResourceFilesList
();
List
<
ResourceInfo
>
projectResourceFiles
=
baseParam
.
getResourceFilesList
();
if
(
projectResourceFiles
!=
null
)
{
if
(
CollectionUtils
.
isNotEmpty
(
projectResourceFiles
)
)
{
// filter the resources that the resource id equals 0
// filter the resources that the resource id equals 0
Set
<
ResourceInfo
>
oldVersionResources
=
projectResourceFiles
.
stream
().
filter
(
t
->
t
.
getId
()
==
0
).
collect
(
Collectors
.
toSet
());
Set
<
ResourceInfo
>
oldVersionResources
=
projectResourceFiles
.
stream
().
filter
(
t
->
t
.
getId
()
==
0
).
collect
(
Collectors
.
toSet
());
...
...
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/UDFUtils.java
浏览文件 @
d7b768d1
...
@@ -16,6 +16,7 @@
...
@@ -16,6 +16,7 @@
*/
*/
package
org.apache.dolphinscheduler.server.utils
;
package
org.apache.dolphinscheduler.server.utils
;
import
org.apache.commons.collections.MapUtils
;
import
org.apache.dolphinscheduler.common.Constants
;
import
org.apache.dolphinscheduler.common.Constants
;
import
org.apache.dolphinscheduler.common.utils.CollectionUtils
;
import
org.apache.dolphinscheduler.common.utils.CollectionUtils
;
import
org.apache.dolphinscheduler.common.utils.HadoopUtils
;
import
org.apache.dolphinscheduler.common.utils.HadoopUtils
;
...
@@ -24,10 +25,8 @@ import org.apache.dolphinscheduler.dao.entity.UdfFunc;
...
@@ -24,10 +25,8 @@ import org.apache.dolphinscheduler.dao.entity.UdfFunc;
import
org.slf4j.Logger
;
import
org.slf4j.Logger
;
import
java.text.MessageFormat
;
import
java.text.MessageFormat
;
import
java.util.ArrayList
;
import
java.util.*
;
import
java.util.HashSet
;
import
java.util.stream.Collectors
;
import
java.util.List
;
import
java.util.Set
;
import
static
org
.
apache
.
dolphinscheduler
.
common
.
utils
.
CollectionUtils
.
isNotEmpty
;
import
static
org
.
apache
.
dolphinscheduler
.
common
.
utils
.
CollectionUtils
.
isNotEmpty
;
...
@@ -43,53 +42,44 @@ public class UDFUtils {
...
@@ -43,53 +42,44 @@ public class UDFUtils {
/**
/**
* create function list
* create function list
* @param udfFuncs udf functions
* @param udfFuncTenantCodeMap key is udf function,value is tenant code
* @param tenantCode tenant code
* @param logger logger
* @param logger logger
* @return create function list
* @return create function list
*/
*/
public
static
List
<
String
>
createFuncs
(
List
<
UdfFunc
>
udfFuncs
,
String
tenantCode
,
Logger
logger
){
public
static
List
<
String
>
createFuncs
(
Map
<
UdfFunc
,
String
>
udfFuncTenantCodeMap
,
Logger
logger
){
if
(
CollectionUtils
.
isEmpty
(
udfFuncs
)){
if
(
MapUtils
.
isEmpty
(
udfFuncTenantCodeMap
)){
logger
.
info
(
"can't find udf function resource"
);
logger
.
info
(
"can't find udf function resource"
);
return
null
;
return
null
;
}
}
// get hive udf jar path
String
hiveUdfJarPath
=
HadoopUtils
.
getHdfsUdfDir
(
tenantCode
);
logger
.
info
(
"hive udf jar path : {}"
,
hiveUdfJarPath
);
// is the root directory of udf defined
if
(
StringUtils
.
isEmpty
(
hiveUdfJarPath
))
{
logger
.
error
(
"not define hive udf jar path"
);
throw
new
RuntimeException
(
"hive udf jar base path not defined "
);
}
Set
<
String
>
resources
=
getFuncResouces
(
udfFuncs
);
List
<
String
>
funcList
=
new
ArrayList
<>();
List
<
String
>
funcList
=
new
ArrayList
<>();
// build jar sql
// build jar sql
buildJarSql
(
funcList
,
resources
,
hiveUdfJarPath
);
buildJarSql
(
funcList
,
udfFuncTenantCodeMap
);
// build temp function sql
// build temp function sql
buildTempFuncSql
(
funcList
,
udfFunc
s
);
buildTempFuncSql
(
funcList
,
udfFunc
TenantCodeMap
.
keySet
().
stream
().
collect
(
Collectors
.
toList
())
);
return
funcList
;
return
funcList
;
}
}
/**
/**
* build jar sql
* build jar sql
* @param sqls sql list
* @param sqls sql list
* @param resources resource set
* @param udfFuncTenantCodeMap key is udf function,value is tenant code
* @param uploadPath upload path
*/
*/
private
static
void
buildJarSql
(
List
<
String
>
sqls
,
Set
<
String
>
resources
,
String
uploadPath
)
{
private
static
void
buildJarSql
(
List
<
String
>
sqls
,
Map
<
UdfFunc
,
String
>
udfFuncTenantCodeMap
)
{
String
defaultFS
=
HadoopUtils
.
getInstance
().
getConfiguration
().
get
(
Constants
.
FS_DEFAULTFS
);
String
defaultFS
=
HadoopUtils
.
getInstance
().
getConfiguration
().
get
(
Constants
.
FS_DEFAULTFS
);
if
(!
uploadPath
.
startsWith
(
"hdfs:"
))
{
uploadPath
=
defaultFS
+
uploadPath
;
}
for
(
String
resource
:
resources
)
{
Set
<
Map
.
Entry
<
UdfFunc
,
String
>>
entries
=
udfFuncTenantCodeMap
.
entrySet
();
sqls
.
add
(
String
.
format
(
"add jar %s/%s"
,
uploadPath
,
resource
));
for
(
Map
.
Entry
<
UdfFunc
,
String
>
entry:
entries
){
String
uploadPath
=
HadoopUtils
.
getHdfsUdfDir
(
entry
.
getValue
());
if
(!
uploadPath
.
startsWith
(
"hdfs:"
))
{
uploadPath
=
defaultFS
+
uploadPath
;
}
sqls
.
add
(
String
.
format
(
"add jar %s%s"
,
uploadPath
,
entry
.
getKey
().
getResourceName
()));
}
}
}
}
/**
/**
...
@@ -106,20 +96,5 @@ public class UDFUtils {
...
@@ -106,20 +96,5 @@ public class UDFUtils {
}
}
}
}
/**
* get the resource names of all functions
* @param udfFuncs udf function list
* @return
*/
private
static
Set
<
String
>
getFuncResouces
(
List
<
UdfFunc
>
udfFuncs
)
{
Set
<
String
>
resources
=
new
HashSet
<>();
for
(
UdfFunc
udfFunc
:
udfFuncs
)
{
resources
.
add
(
udfFunc
.
getResourceName
());
}
return
resources
;
}
}
}
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java
浏览文件 @
d7b768d1
...
@@ -132,8 +132,7 @@ public class SqlTask extends AbstractTask {
...
@@ -132,8 +132,7 @@ public class SqlTask extends AbstractTask {
.
map
(
this
::
getSqlAndSqlParamsMap
)
.
map
(
this
::
getSqlAndSqlParamsMap
)
.
collect
(
Collectors
.
toList
());
.
collect
(
Collectors
.
toList
());
List
<
String
>
createFuncs
=
UDFUtils
.
createFuncs
(
sqlTaskExecutionContext
.
getUdfFuncList
(),
List
<
String
>
createFuncs
=
UDFUtils
.
createFuncs
(
sqlTaskExecutionContext
.
getUdfFuncTenantCodeMap
(),
taskExecutionContext
.
getTenantCode
(),
logger
);
logger
);
// execute sql task
// execute sql task
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录