diff --git a/escheduler-api/src/main/java/cn/escheduler/api/controller/DataSourceController.java b/escheduler-api/src/main/java/cn/escheduler/api/controller/DataSourceController.java index 51e0c93bd643100561ce08d59b162e89c4d3dfc0..7301f419cc30479207e2d83fb9789bd1a0cf1925 100644 --- a/escheduler-api/src/main/java/cn/escheduler/api/controller/DataSourceController.java +++ b/escheduler-api/src/main/java/cn/escheduler/api/controller/DataSourceController.java @@ -23,6 +23,7 @@ import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Result; import cn.escheduler.common.enums.DbType; import cn.escheduler.common.enums.ResUploadType; +import cn.escheduler.common.utils.CommonUtils; import cn.escheduler.common.utils.ParameterUtils; import cn.escheduler.common.utils.PropertyUtils; import cn.escheduler.dao.model.User; @@ -455,7 +456,7 @@ public class DataSourceController extends BaseController { logger.info("login user {},get kerberos startup state : {}", loginUser.getUserName()); try{ // if upload resource is HDFS and kerberos startup is true , else false - return success(Status.SUCCESS.getMsg(), CheckUtils.getKerberosStartupState()); + return success(Status.SUCCESS.getMsg(), CommonUtils.getKerberosStartupState()); }catch (Exception e){ logger.error(KERBEROS_STARTUP_STATE.getMsg(),e); return error(Status.KERBEROS_STARTUP_STATE.getCode(), Status.KERBEROS_STARTUP_STATE.getMsg()); diff --git a/escheduler-api/src/main/java/cn/escheduler/api/service/DataSourceService.java b/escheduler-api/src/main/java/cn/escheduler/api/service/DataSourceService.java index 02164f971b6187f3e17fbf96c2631f1e8b4b1906..b11e34913f56af68ce58849e2f8aaedd80ce115e 100644 --- a/escheduler-api/src/main/java/cn/escheduler/api/service/DataSourceService.java +++ b/escheduler-api/src/main/java/cn/escheduler/api/service/DataSourceService.java @@ -25,6 +25,7 @@ import cn.escheduler.common.enums.DbType; import cn.escheduler.common.enums.ResUploadType; import cn.escheduler.common.enums.UserType; import cn.escheduler.common.job.db.*; +import cn.escheduler.common.utils.CommonUtils; import cn.escheduler.common.utils.PropertyUtils; import cn.escheduler.dao.mapper.DataSourceMapper; import cn.escheduler.dao.mapper.DatasourceUserMapper; @@ -374,7 +375,7 @@ public class DataSourceService extends BaseService{ break; case HIVE: case SPARK: - if (CheckUtils.getKerberosStartupState()) { + if (CommonUtils.getKerberosStartupState()) { System.setProperty(cn.escheduler.common.Constants.JAVA_SECURITY_KRB5_CONF, getString(cn.escheduler.common.Constants.JAVA_SECURITY_KRB5_CONF_PATH)); Configuration configuration = new Configuration(); @@ -470,7 +471,7 @@ public class DataSourceService extends BaseService{ String address = buildAddress(type, host, port); String jdbcUrl = address + "/" + database; - if (CheckUtils.getKerberosStartupState() && + if (CommonUtils.getKerberosStartupState() && (type == DbType.HIVE || type == DbType.SPARK)){ jdbcUrl += ";principal=" + principal; } diff --git a/escheduler-api/src/main/java/cn/escheduler/api/utils/CheckUtils.java b/escheduler-api/src/main/java/cn/escheduler/api/utils/CheckUtils.java index f6330b79deda36c855c982ce90882cfff5b22ba6..00c50f8263832dbdcae32c6188c326ca93287b0e 100644 --- a/escheduler-api/src/main/java/cn/escheduler/api/utils/CheckUtils.java +++ b/escheduler-api/src/main/java/cn/escheduler/api/utils/CheckUtils.java @@ -160,16 +160,4 @@ public class CheckUtils { return pattern.matcher(str).matches(); } - - /** - * if upload resource is HDFS and kerberos startup is true , else false - * @return - */ - public static boolean getKerberosStartupState(){ - String resUploadStartupType = PropertyUtils.getString(cn.escheduler.common.Constants.RES_UPLOAD_STARTUP_TYPE); - ResUploadType resUploadType = ResUploadType.valueOf(resUploadStartupType); - Boolean kerberosStartupState = getBoolean(cn.escheduler.common.Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE); - return resUploadType == ResUploadType.HDFS && kerberosStartupState; - } - } diff --git a/escheduler-common/src/main/java/cn/escheduler/common/utils/CommonUtils.java b/escheduler-common/src/main/java/cn/escheduler/common/utils/CommonUtils.java index d0164791d222c38df2fed74e97798829dedde73c..43087fbd9c8c0d76cc47a9f2c29a0c1267f702f6 100644 --- a/escheduler-common/src/main/java/cn/escheduler/common/utils/CommonUtils.java +++ b/escheduler-common/src/main/java/cn/escheduler/common/utils/CommonUtils.java @@ -17,6 +17,7 @@ package cn.escheduler.common.utils; import cn.escheduler.common.Constants; +import cn.escheduler.common.enums.ResUploadType; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -63,4 +64,14 @@ public class CommonUtils { + /** + * if upload resource is HDFS and kerberos startup is true , else false + * @return + */ + public static boolean getKerberosStartupState(){ + String resUploadStartupType = PropertyUtils.getString(cn.escheduler.common.Constants.RES_UPLOAD_STARTUP_TYPE); + ResUploadType resUploadType = ResUploadType.valueOf(resUploadStartupType); + Boolean kerberosStartupState = getBoolean(cn.escheduler.common.Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE); + return resUploadType == ResUploadType.HDFS && kerberosStartupState; + } } diff --git a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/sql/SqlTask.java b/escheduler-server/src/main/java/cn/escheduler/server/worker/task/sql/SqlTask.java index dd10d05ddf36bf7aaaed430a943d06433465d650..26d682f1325c451e8f40ca63d1544585650a7e1e 100644 --- a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/sql/SqlTask.java +++ b/escheduler-server/src/main/java/cn/escheduler/server/worker/task/sql/SqlTask.java @@ -29,6 +29,7 @@ import cn.escheduler.common.task.sql.SqlBinds; import cn.escheduler.common.task.sql.SqlParameters; import cn.escheduler.common.task.sql.SqlType; import cn.escheduler.common.utils.CollectionUtils; +import cn.escheduler.common.utils.CommonUtils; import cn.escheduler.common.utils.ParameterUtils; import cn.escheduler.dao.AlertDao; import cn.escheduler.dao.DaoFactory; @@ -43,6 +44,8 @@ import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.serializer.SerializerFeature; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.EnumUtils; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.UserGroupInformation; import org.slf4j.Logger; import java.sql.*; @@ -51,6 +54,8 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; +import static cn.escheduler.common.utils.PropertyUtils.getString; + /** * sql task */ @@ -228,7 +233,15 @@ public class SqlTask extends AbstractTask { List createFuncs){ Connection connection = null; try { - + if (CommonUtils.getKerberosStartupState()) { + System.setProperty(cn.escheduler.common.Constants.JAVA_SECURITY_KRB5_CONF, + getString(cn.escheduler.common.Constants.JAVA_SECURITY_KRB5_CONF_PATH)); + Configuration configuration = new Configuration(); + configuration.set(cn.escheduler.common.Constants.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); + UserGroupInformation.setConfiguration(configuration); + UserGroupInformation.loginUserFromKeytab(getString(cn.escheduler.common.Constants.LOGIN_USER_KEY_TAB_USERNAME), + getString(cn.escheduler.common.Constants.LOGIN_USER_KEY_TAB_PATH)); + } if (DbType.HIVE.name().equals(sqlParameters.getType())) { Properties paramProp = new Properties(); paramProp.setProperty("user", baseDataSource.getUser()); @@ -278,7 +291,7 @@ public class SqlTask extends AbstractTask { array.add(mapOfColValues); } - logger.info("execute sql : {}", JSONObject.toJSONString(array, SerializerFeature.WriteMapNullValue)); + logger.debug("execute sql : {}", JSONObject.toJSONString(array, SerializerFeature.WriteMapNullValue)); // send as an attachment if (StringUtils.isEmpty(sqlParameters.getShowType())) {