HadoopUtils.java 20.1 KB
Newer Older
L
ligang 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
/*
 * Licensed to the Apache Software Foundation (ASF) under one or more
 * contributor license agreements.  See the NOTICE file distributed with
 * this work for additional information regarding copyright ownership.
 * The ASF licenses this file to You under the Apache License, Version 2.0
 * (the "License"); you may not use this file except in compliance with
 * the License.  You may obtain a copy of the License at
 *
 *    http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
Q
qiaozhanwei 已提交
17
package org.apache.dolphinscheduler.common.utils;
L
ligang 已提交
18

19 20 21
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
Q
qiaozhanwei 已提交
22 23 24
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.ResUploadType;
L
ligang 已提交
25 26 27 28 29
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONException;
import com.alibaba.fastjson.JSONObject;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
L
lgcareer 已提交
30
import org.apache.hadoop.fs.*;
L
ligang 已提交
31
import org.apache.hadoop.fs.FileSystem;
journey2018's avatar
journey2018 已提交
32
import org.apache.hadoop.security.UserGroupInformation;
L
ligang 已提交
33 34 35 36 37
import org.apache.hadoop.yarn.client.cli.RMAdminCLI;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.*;
38
import java.nio.file.Files;
L
add  
lgcareer 已提交
39
import java.security.PrivilegedExceptionAction;
40
import java.util.Collections;
L
ligang 已提交
41
import java.util.List;
42
import java.util.Map;
43
import java.util.concurrent.TimeUnit;
L
ligang 已提交
44 45 46 47 48 49 50 51 52 53 54
import java.util.stream.Collectors;
import java.util.stream.Stream;

/**
 * hadoop utils
 * single instance
 */
public class HadoopUtils implements Closeable {

    private static final Logger logger = LoggerFactory.getLogger(HadoopUtils.class);

55 56 57 58 59 60 61 62 63 64 65 66
    private static final String HADOOP_UTILS_KEY = "HADOOP_UTILS_KEY";

    private static final LoadingCache<String, HadoopUtils> cache = CacheBuilder
            .newBuilder()
            .expireAfterWrite(PropertyUtils.getInt(Constants.KERBEROS_EXPIRE_TIME, 7), TimeUnit.DAYS)
            .build(new CacheLoader<String, HadoopUtils>() {
                @Override
                public HadoopUtils load(String key) throws Exception {
                    return new HadoopUtils();
                }
            });

67 68
    private Configuration configuration;
    private FileSystem fs;
L
ligang 已提交
69

70
    private static String hdfsUser = PropertyUtils.getString(Constants.HDFS_ROOT_USER);
L
lgcareer 已提交
71

72
    private HadoopUtils() {
L
ligang 已提交
73
        init();
L
lgcareer 已提交
74
        initHdfsPath();
L
ligang 已提交
75 76
    }

77 78 79
    public static HadoopUtils getInstance() {

        return cache.getUnchecked(HADOOP_UTILS_KEY);
L
ligang 已提交
80 81
    }

L
add  
lgcareer 已提交
82
    /**
83
     * init dolphinscheduler root path in hdfs
L
add  
lgcareer 已提交
84
     */
85
    private void initHdfsPath() {
Q
qiaozhanwei 已提交
86
        String hdfsPath = PropertyUtils.getString(Constants.DATA_STORE_2_HDFS_BASEPATH);
L
add  
lgcareer 已提交
87
        Path path = new Path(hdfsPath);
L
lgcareer 已提交
88

L
add  
lgcareer 已提交
89
        try {
L
lgcareer 已提交
90 91 92
            if (!fs.exists(path)) {
                fs.mkdirs(path);
            }
L
add  
lgcareer 已提交
93
        } catch (Exception e) {
94
            logger.error(e.getMessage(), e);
L
add  
lgcareer 已提交
95 96 97
        }
    }

L
lgcareer 已提交
98

L
ligang 已提交
99 100 101
    /**
     * init hadoop configuration
     */
102
    private void init() {
103 104 105 106 107 108 109 110 111 112 113 114 115 116 117
        try {
            configuration = new Configuration();

            String resUploadStartupType = PropertyUtils.getString(Constants.RES_UPLOAD_STARTUP_TYPE);
            ResUploadType resUploadType = ResUploadType.valueOf(resUploadStartupType);

            if (resUploadType == ResUploadType.HDFS) {
                if (PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE)) {
                    System.setProperty(Constants.JAVA_SECURITY_KRB5_CONF,
                            PropertyUtils.getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH));
                    configuration.set(Constants.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
                    UserGroupInformation.setConfiguration(configuration);
                    UserGroupInformation.loginUserFromKeytab(PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME),
                            PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_PATH));
                }
journey2018's avatar
journey2018 已提交
118

119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136
                String defaultFS = configuration.get(Constants.FS_DEFAULTFS);
                //first get key from core-site.xml hdfs-site.xml ,if null ,then try to get from properties file
                // the default is the local file system
                if (defaultFS.startsWith("file")) {
                    String defaultFSProp = PropertyUtils.getString(Constants.FS_DEFAULTFS);
                    if (StringUtils.isNotBlank(defaultFSProp)) {
                        Map<String, String> fsRelatedProps = PropertyUtils.getPrefixedProperties("fs.");
                        configuration.set(Constants.FS_DEFAULTFS, defaultFSProp);
                        fsRelatedProps.forEach((key, value) -> configuration.set(key, value));
                    } else {
                        logger.error("property:{} can not to be empty, please set!", Constants.FS_DEFAULTFS);
                        throw new RuntimeException(
                                String.format("property: %s can not to be empty, please set!", Constants.FS_DEFAULTFS)
                        );
                    }
                } else {
                    logger.info("get property:{} -> {}, from core-site.xml hdfs-site.xml ", Constants.FS_DEFAULTFS, defaultFS);
                }
L
ligang 已提交
137

138 139 140 141 142 143 144 145
                if (fs == null) {
                    if (StringUtils.isNotEmpty(hdfsUser)) {
                        UserGroupInformation ugi = UserGroupInformation.createRemoteUser(hdfsUser);
                        ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
                            @Override
                            public Boolean run() throws Exception {
                                fs = FileSystem.get(configuration);
                                return true;
L
lgcareer 已提交
146
                            }
147 148 149 150
                        });
                    } else {
                        logger.warn("hdfs.root.user is not set value!");
                        fs = FileSystem.get(configuration);
L
ligang 已提交
151 152
                    }
                }
153 154 155 156 157 158
            } else if (resUploadType == ResUploadType.S3) {
                configuration.set(Constants.FS_DEFAULTFS, PropertyUtils.getString(Constants.FS_DEFAULTFS));
                configuration.set(Constants.FS_S3A_ENDPOINT, PropertyUtils.getString(Constants.FS_S3A_ENDPOINT));
                configuration.set(Constants.FS_S3A_ACCESS_KEY, PropertyUtils.getString(Constants.FS_S3A_ACCESS_KEY));
                configuration.set(Constants.FS_S3A_SECRET_KEY, PropertyUtils.getString(Constants.FS_S3A_SECRET_KEY));
                fs = FileSystem.get(configuration);
L
ligang 已提交
159
            }
160 161 162 163 164 165 166 167 168 169 170


            String rmHaIds = PropertyUtils.getString(Constants.YARN_RESOURCEMANAGER_HA_RM_IDS);
            String appAddress = PropertyUtils.getString(Constants.YARN_APPLICATION_STATUS_ADDRESS);
            if (!StringUtils.isEmpty(rmHaIds)) {
                appAddress = getAppAddress(appAddress, rmHaIds);
                logger.info("appAddress : {}", appAddress);
            }
            configuration.set(Constants.YARN_APPLICATION_STATUS_ADDRESS, appAddress);
        } catch (Exception e) {
            logger.error(e.getMessage(), e);
L
ligang 已提交
171 172 173 174 175 176 177 178 179 180 181 182 183
        }
    }

    /**
     * @return Configuration
     */
    public Configuration getConfiguration() {
        return configuration;
    }

    /**
     * get application url
     *
D
dailidong 已提交
184 185
     * @param applicationId application id
     * @return url of application
L
ligang 已提交
186 187
     */
    public String getApplicationUrl(String applicationId) {
Q
qiaozhanwei 已提交
188
        return String.format(configuration.get(Constants.YARN_APPLICATION_STATUS_ADDRESS), applicationId);
L
ligang 已提交
189 190 191 192 193
    }

    /**
     * cat file on hdfs
     *
194
     * @param hdfsFilePath hdfs file path
D
dailidong 已提交
195 196
     * @return byte[] byte array
     * @throws IOException errors
L
ligang 已提交
197 198 199
     */
    public byte[] catFile(String hdfsFilePath) throws IOException {

200 201
        if (StringUtils.isBlank(hdfsFilePath)) {
            logger.error("hdfs file path:{} is blank", hdfsFilePath);
202
            return new byte[0];
L
ligang 已提交
203 204 205 206 207 208 209 210 211 212
        }

        FSDataInputStream fsDataInputStream = fs.open(new Path(hdfsFilePath));
        return IOUtils.toByteArray(fsDataInputStream);
    }


    /**
     * cat file on hdfs
     *
213 214 215
     * @param hdfsFilePath hdfs file path
     * @param skipLineNums skip line numbers
     * @param limit        read how many lines
D
dailidong 已提交
216 217
     * @return content of file
     * @throws IOException errors
L
ligang 已提交
218 219 220
     */
    public List<String> catFile(String hdfsFilePath, int skipLineNums, int limit) throws IOException {

221 222
        if (StringUtils.isBlank(hdfsFilePath)) {
            logger.error("hdfs file path:{} is blank", hdfsFilePath);
223
            return Collections.emptyList();
L
ligang 已提交
224 225
        }

226
        try (FSDataInputStream in = fs.open(new Path(hdfsFilePath))) {
227 228 229 230
            BufferedReader br = new BufferedReader(new InputStreamReader(in));
            Stream<String> stream = br.lines().skip(skipLineNums).limit(limit);
            return stream.collect(Collectors.toList());
        }
231

L
ligang 已提交
232 233 234 235 236 237 238 239
    }

    /**
     * make the given file and all non-existent parents into
     * directories. Has the semantics of Unix 'mkdir -p'.
     * Existence of the directory hierarchy is not an error.
     *
     * @param hdfsPath path to create
D
dailidong 已提交
240 241
     * @return mkdir result
     * @throws IOException errors
L
ligang 已提交
242 243 244 245 246 247 248 249 250 251 252 253
     */
    public boolean mkdir(String hdfsPath) throws IOException {
        return fs.mkdirs(new Path(hdfsPath));
    }

    /**
     * copy files between FileSystems
     *
     * @param srcPath      source hdfs path
     * @param dstPath      destination hdfs path
     * @param deleteSource whether to delete the src
     * @param overwrite    whether to overwrite an existing file
D
dailidong 已提交
254 255
     * @return if success or not
     * @throws IOException errors
L
ligang 已提交
256 257 258 259 260 261 262 263
     */
    public boolean copy(String srcPath, String dstPath, boolean deleteSource, boolean overwrite) throws IOException {
        return FileUtil.copy(fs, new Path(srcPath), fs, new Path(dstPath), deleteSource, overwrite, fs.getConf());
    }

    /**
     * the src file is on the local disk.  Add it to FS at
     * the given dst name.
264 265 266 267 268
     *
     * @param srcFile      local file
     * @param dstHdfsPath  destination hdfs path
     * @param deleteSource whether to delete the src
     * @param overwrite    whether to overwrite an existing file
D
dailidong 已提交
269 270
     * @return if success or not
     * @throws IOException errors
L
ligang 已提交
271 272 273
     */
    public boolean copyLocalToHdfs(String srcFile, String dstHdfsPath, boolean deleteSource, boolean overwrite) throws IOException {
        Path srcPath = new Path(srcFile);
274
        Path dstPath = new Path(dstHdfsPath);
L
ligang 已提交
275 276 277 278 279 280 281 282 283

        fs.copyFromLocalFile(deleteSource, overwrite, srcPath, dstPath);

        return true;
    }

    /**
     * copy hdfs file to local
     *
284 285 286 287
     * @param srcHdfsFilePath source hdfs file path
     * @param dstFile         destination file
     * @param deleteSource    delete source
     * @param overwrite       overwrite
D
dailidong 已提交
288 289
     * @return result of copy hdfs file to local
     * @throws IOException errors
L
ligang 已提交
290 291 292 293 294 295 296 297
     */
    public boolean copyHdfsToLocal(String srcHdfsFilePath, String dstFile, boolean deleteSource, boolean overwrite) throws IOException {
        Path srcPath = new Path(srcHdfsFilePath);
        File dstPath = new File(dstFile);

        if (dstPath.exists()) {
            if (dstPath.isFile()) {
                if (overwrite) {
298
                    Files.delete(dstPath.toPath());
L
ligang 已提交
299 300 301 302 303 304
                }
            } else {
                logger.error("destination file must be a file");
            }
        }

305
        if (!dstPath.getParentFile().exists()) {
L
ligang 已提交
306 307 308 309 310 311 312 313 314 315
            dstPath.getParentFile().mkdirs();
        }

        return FileUtil.copy(fs, srcPath, dstPath, deleteSource, fs.getConf());
    }

    /**
     * delete a file
     *
     * @param hdfsFilePath the path to delete.
316 317 318 319
     * @param recursive    if path is a directory and set to
     *                     true, the directory is deleted else throws an exception. In
     *                     case of a file the recursive can be set to either true or false.
     * @return true if delete is successful else false.
D
dailidong 已提交
320
     * @throws IOException errors
L
ligang 已提交
321 322 323 324 325 326 327 328 329
     */
    public boolean delete(String hdfsFilePath, boolean recursive) throws IOException {
        return fs.delete(new Path(hdfsFilePath), recursive);
    }

    /**
     * check if exists
     *
     * @param hdfsFilePath source file path
D
dailidong 已提交
330 331
     * @return result of exists or not
     * @throws IOException errors
L
ligang 已提交
332 333 334 335 336
     */
    public boolean exists(String hdfsFilePath) throws IOException {
        return fs.exists(new Path(hdfsFilePath));
    }

L
ligang 已提交
337 338 339
    /**
     * Gets a list of files in the directory
     *
D
dailidong 已提交
340 341 342
     * @param filePath file path
     * @return {@link FileStatus} file status
     * @throws Exception errors
L
ligang 已提交
343
     */
344
    public FileStatus[] listFileStatus(String filePath) throws Exception {
L
ligang 已提交
345 346 347 348 349 350 351 352
        try {
            return fs.listStatus(new Path(filePath));
        } catch (IOException e) {
            logger.error("Get file list exception", e);
            throw new Exception("Get file list exception", e);
        }
    }

L
ligang 已提交
353 354 355
    /**
     * Renames Path src to Path dst.  Can take place on local fs
     * or remote DFS.
356
     *
L
ligang 已提交
357 358 359
     * @param src path to be renamed
     * @param dst new path after rename
     * @return true if rename is successful
360
     * @throws IOException on failure
L
ligang 已提交
361 362 363 364 365 366 367 368 369
     */
    public boolean rename(String src, String dst) throws IOException {
        return fs.rename(new Path(src), new Path(dst));
    }


    /**
     * get the state of an application
     *
D
dailidong 已提交
370
     * @param applicationId application id
L
ligang 已提交
371
     * @return the return may be null or there may be other parse exceptions
D
dailidong 已提交
372
     * @throws JSONException json exception
L
ligang 已提交
373 374 375 376 377 378 379 380 381 382
     */
    public ExecutionStatus getApplicationStatus(String applicationId) throws JSONException {
        if (StringUtils.isEmpty(applicationId)) {
            return null;
        }

        String applicationUrl = getApplicationUrl(applicationId);

        String responseContent = HttpUtils.get(applicationUrl);

383
        JSONObject jsonObject = JSON.parseObject(responseContent);
L
ligang 已提交
384 385 386
        String result = jsonObject.getJSONObject("app").getString("finalStatus");

        switch (result) {
Q
qiaozhanwei 已提交
387
            case Constants.ACCEPTED:
L
ligang 已提交
388
                return ExecutionStatus.SUBMITTED_SUCCESS;
Q
qiaozhanwei 已提交
389
            case Constants.SUCCEEDED:
L
ligang 已提交
390
                return ExecutionStatus.SUCCESS;
Q
qiaozhanwei 已提交
391 392 393 394
            case Constants.NEW:
            case Constants.NEW_SAVING:
            case Constants.SUBMITTED:
            case Constants.FAILED:
L
ligang 已提交
395
                return ExecutionStatus.FAILURE;
Q
qiaozhanwei 已提交
396
            case Constants.KILLED:
L
ligang 已提交
397 398
                return ExecutionStatus.KILL;

Q
qiaozhanwei 已提交
399
            case Constants.RUNNING:
L
ligang 已提交
400 401 402 403 404 405 406 407 408
            default:
                return ExecutionStatus.RUNNING_EXEUTION;
        }
    }

    /**
     * @return data hdfs path
     */
    public static String getHdfsDataBasePath() {
Q
qiaozhanwei 已提交
409
        String basePath = PropertyUtils.getString(Constants.DATA_STORE_2_HDFS_BASEPATH);
410 411 412 413 414 415
        if ("/".equals(basePath)) {
            // if basepath is configured to /,  the generated url may be  //default/resources (with extra leading /)
            return "";
        } else {
            return basePath;
        }
L
ligang 已提交
416 417 418 419 420 421 422 423
    }

    /**
     * hdfs resource dir
     *
     * @param tenantCode tenant code
     * @return hdfs resource dir
     */
424
    public static String getHdfsResDir(String tenantCode) {
L
ligang 已提交
425 426 427 428
        return String.format("%s/resources", getHdfsTenantDir(tenantCode));
    }

    /**
429 430 431
     * hdfs user dir
     *
     * @param tenantCode tenant code
432
     * @param userId     user id
433 434
     * @return hdfs resource dir
     */
435 436
    public static String getHdfsUserDir(String tenantCode, int userId) {
        return String.format("%s/home/%d", getHdfsTenantDir(tenantCode), userId);
437 438 439 440
    }

    /**
     * hdfs udf dir
L
ligang 已提交
441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456
     *
     * @param tenantCode tenant code
     * @return get udf dir on hdfs
     */
    public static String getHdfsUdfDir(String tenantCode) {
        return String.format("%s/udfs", getHdfsTenantDir(tenantCode));
    }

    /**
     * get absolute path and name for file on hdfs
     *
     * @param tenantCode tenant code
     * @param filename   file name
     * @return get absolute path and name for file on hdfs
     */
    public static String getHdfsFilename(String tenantCode, String filename) {
457
        return String.format("%s/%s", getHdfsResDir(tenantCode), filename);
L
ligang 已提交
458 459 460 461 462 463 464 465 466 467 468 469 470 471
    }

    /**
     * get absolute path and name for udf file on hdfs
     *
     * @param tenantCode tenant code
     * @param filename   file name
     * @return get absolute path and name for udf file on hdfs
     */
    public static String getHdfsUdfFilename(String tenantCode, String filename) {
        return String.format("%s/%s", getHdfsUdfDir(tenantCode), filename);
    }

    /**
D
dailidong 已提交
472
     * @param tenantCode tenant code
L
ligang 已提交
473 474
     * @return file directory of tenants on hdfs
     */
475
    public static String getHdfsTenantDir(String tenantCode) {
476
        return String.format("%s/%s", getHdfsDataBasePath(), tenantCode);
L
ligang 已提交
477 478 479 480 481 482
    }


    /**
     * getAppAddress
     *
D
dailidong 已提交
483
     * @param appAddress app address
484
     * @param rmHa       resource manager ha
D
dailidong 已提交
485
     * @return app address
L
ligang 已提交
486 487 488 489 490 491
     */
    public static String getAppAddress(String appAddress, String rmHa) {

        //get active ResourceManager
        String activeRM = YarnHAAdminUtils.getAcitveRMName(rmHa);

Q
qiaozhanwei 已提交
492
        String[] split1 = appAddress.split(Constants.DOUBLE_SLASH);
L
ligang 已提交
493 494 495 496 497

        if (split1.length != 2) {
            return null;
        }

Q
qiaozhanwei 已提交
498 499
        String start = split1[0] + Constants.DOUBLE_SLASH;
        String[] split2 = split1[1].split(Constants.COLON);
L
ligang 已提交
500 501 502 503 504

        if (split2.length != 2) {
            return null;
        }

Q
qiaozhanwei 已提交
505
        String end = Constants.COLON + split2[1];
L
ligang 已提交
506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536

        return start + activeRM + end;
    }


    @Override
    public void close() throws IOException {
        if (fs != null) {
            try {
                fs.close();
            } catch (IOException e) {
                logger.error("Close HadoopUtils instance failed", e);
                throw new IOException("Close HadoopUtils instance failed", e);
            }
        }
    }


    /**
     * yarn ha admin utils
     */
    private static final class YarnHAAdminUtils extends RMAdminCLI {

        /**
         * get active resourcemanager
         *
         * @param rmIds
         * @return
         */
        public static String getAcitveRMName(String rmIds) {

Q
qiaozhanwei 已提交
537
            String[] rmIdArr = rmIds.split(Constants.COMMA);
L
ligang 已提交
538

Q
qiaozhanwei 已提交
539
            int activeResourceManagerPort = PropertyUtils.getInt(Constants.HADOOP_RESOURCE_MANAGER_HTTPADDRESS_PORT, 8088);
L
ligang 已提交
540 541 542 543 544 545 546 547 548 549

            String yarnUrl = "http://%s:" + activeResourceManagerPort + "/ws/v1/cluster/info";

            String state = null;
            try {
                /**
                 * send http get request to rm1
                 */
                state = getRMState(String.format(yarnUrl, rmIdArr[0]));

Q
qiaozhanwei 已提交
550
                if (Constants.HADOOP_RM_STATE_ACTIVE.equals(state)) {
L
ligang 已提交
551
                    return rmIdArr[0];
Q
qiaozhanwei 已提交
552
                } else if (Constants.HADOOP_RM_STATE_STANDBY.equals(state)) {
L
ligang 已提交
553
                    state = getRMState(String.format(yarnUrl, rmIdArr[1]));
Q
qiaozhanwei 已提交
554
                    if (Constants.HADOOP_RM_STATE_ACTIVE.equals(state)) {
L
ligang 已提交
555 556 557 558 559 560 561
                        return rmIdArr[1];
                    }
                } else {
                    return null;
                }
            } catch (Exception e) {
                state = getRMState(String.format(yarnUrl, rmIdArr[1]));
Q
qiaozhanwei 已提交
562
                if (Constants.HADOOP_RM_STATE_ACTIVE.equals(state)) {
L
ligang 已提交
563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586
                    return rmIdArr[0];
                }
            }
            return null;
        }


        /**
         * get ResourceManager state
         *
         * @param url
         * @return
         */
        public static String getRMState(String url) {

            String retStr = HttpUtils.get(url);

            if (StringUtils.isEmpty(retStr)) {
                return null;
            }
            //to json
            JSONObject jsonObject = JSON.parseObject(retStr);

            //get ResourceManager state
587
            return jsonObject.getJSONObject("clusterInfo").getString("haState");
L
ligang 已提交
588 589 590 591
        }

    }
}