提交 b16a7b9b 编写于 作者: Z zengqiao

v2.8.1_e初始化

1、测试代码,开源用户尽量不要使用;
2、包含Kafka-HA的相关功能,在v2.8.0_e的基础上,补充按照clientId切换的功能;
3、基于v2.8.0_e拉的分支;
上级 e81c0f30
......@@ -591,4 +591,7 @@ CREATE TABLE `work_order` (
`gmt_create` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
`gmt_modify` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '修改时间',
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='工单表';
\ No newline at end of file
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='工单表';
ALTER TABLE `topic_connections` ADD COLUMN `client_id` VARCHAR(1024) NOT NULL DEFAULT '' COMMENT '客户端ID' AFTER `client_version`;
......@@ -9,9 +9,13 @@ import lombok.Getter;
@Getter
public enum HaResTypeEnum {
CLUSTER(0, "Cluster"),
TOPIC(1, "Topic"),
KAFKA_USER(2, "KafkaUser"),
KAFKA_USER_AND_CLIENT(3, "KafkaUserAndClient"),
;
private final int code;
......@@ -22,4 +26,4 @@ public enum HaResTypeEnum {
this.code = code;
this.msg = msg;
}
}
\ No newline at end of file
}
......@@ -33,6 +33,8 @@ public class ConfigConstant {
public static final String HA_SWITCH_JOB_TIMEOUT_UNIT_SEC_CONFIG_PREFIX = "HA_SWITCH_JOB_TIMEOUT_UNIT_SEC_CONFIG_CLUSTER";
public static final String HA_CONNECTION_ACTIVE_TIME_UNIT_MIN = "HA_CONNECTION_ACTIVE_TIME_UNIT_MIN";
private ConfigConstant() {
}
}
package com.xiaojukeji.kafka.manager.common.entity.ao.topic;
import lombok.Data;
/**
* @author zengqiao
* @date 20/4/20
*/
@Data
public class TopicConnection {
private Long clusterId;
......@@ -19,72 +22,9 @@ public class TopicConnection {
private String clientVersion;
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public String getAppId() {
return appId;
}
public void setAppId(String appId) {
this.appId = appId;
}
public String getIp() {
return ip;
}
public void setIp(String ip) {
this.ip = ip;
}
public String getHostname() {
return hostname;
}
public void setHostname(String hostname) {
this.hostname = hostname;
}
public String getClientType() {
return clientType;
}
public void setClientType(String clientType) {
this.clientType = clientType;
}
public String getClientVersion() {
return clientVersion;
}
private String clientId;
public void setClientVersion(String clientVersion) {
this.clientVersion = clientVersion;
}
private Long realConnectTime;
@Override
public String toString() {
return "TopicConnectionDTO{" +
"clusterId=" + clusterId +
", topicName='" + topicName + '\'' +
", appId='" + appId + '\'' +
", ip='" + ip + '\'' +
", hostname='" + hostname + '\'' +
", clientType='" + clientType + '\'' +
", clientVersion='" + clientVersion + '\'' +
'}';
}
private Long createTime;
}
\ No newline at end of file
......@@ -15,12 +15,4 @@ public class ASSwitchJobActionDTO {
@NotBlank(message = "action不允许为空")
@ApiModelProperty(value = "动作, force")
private String action;
// @NotNull(message = "all不允许为NULL")
// @ApiModelProperty(value = "所有的Topic")
// private Boolean allJumpWaitInSync;
//
// @NotNull(message = "jumpWaitInSyncActiveTopicList不允许为NULL")
// @ApiModelProperty(value = "操作的Topic")
// private List<String> jumpWaitInSyncActiveTopicList;
}
......@@ -4,6 +4,7 @@ import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import javax.validation.Valid;
import javax.validation.constraints.NotNull;
import java.util.List;
......@@ -27,5 +28,13 @@ public class ASSwitchJobDTO {
private Long standbyClusterPhyId;
@NotNull(message = "topicNameList不允许为NULL")
@ApiModelProperty(value="切换的Topic名称列表")
private List<String> topicNameList;
/**
* kafkaUser+Client列表
*/
@Valid
@ApiModelProperty(value="切换的KafkaUser&ClientId列表,Client可以为空串")
private List<KafkaUserAndClientDTO> kafkaUserAndClientIdList;
}
package com.xiaojukeji.kafka.manager.common.entity.dto.ha;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import javax.validation.constraints.NotBlank;
@Data
@ApiModel(description="KafkaUser和ClientId信息")
public class KafkaUserAndClientDTO {
@NotBlank(message = "kafkaUser不允许为空串")
@ApiModelProperty(value = "kafkaUser")
private String kafkaUser;
@ApiModelProperty(value = "clientId")
private String clientId;
}
......@@ -32,6 +32,9 @@ public class HaTopicRelationDTO {
@ApiModelProperty(value = "需要关联|解绑的topic名称列表")
private List<String> topicNames;
@ApiModelProperty(value = "解绑是否保留备集群资源(topic,kafkaUser,group)")
private Boolean retainStandbyResource;
@Override
public String toString() {
return "HaTopicRelationDTO{" +
......@@ -39,6 +42,7 @@ public class HaTopicRelationDTO {
", standbyClusterId=" + standbyClusterId +
", all=" + all +
", topicNames=" + topicNames +
", retainStandbyResource=" + retainStandbyResource +
'}';
}
......
......@@ -21,4 +21,11 @@ public class AppRelateTopicsDTO {
@NotNull(message = "filterTopicNameList不允许为NULL")
@ApiModelProperty(value="过滤的Topic列表")
private List<String> filterTopicNameList;
@ApiModelProperty(value="使用KafkaUser+Client维度的数据,默认是kafkaUser维度")
private Boolean useKafkaUserAndClientId;
@NotNull(message = "ha不允许为NULL")
@ApiModelProperty(value="查询是否高可用topic")
private Boolean ha;
}
\ No newline at end of file
package com.xiaojukeji.kafka.manager.common.entity.pojo.gateway;
import lombok.Data;
import java.util.Date;
/**
......@@ -7,6 +9,7 @@ import java.util.Date;
* @author zengqiao
* @date 20/7/6
*/
@Data
public class TopicConnectionDO {
private Long id;
......@@ -22,87 +25,13 @@ public class TopicConnectionDO {
private String clientVersion;
private Date createTime;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Long getClusterId() {
return clusterId;
}
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
private String clientId;
public String getTopicName() {
return topicName;
}
private Long realConnectTime;
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getAppId() {
return appId;
}
public void setAppId(String appId) {
this.appId = appId;
}
public String getIp() {
return ip;
}
public void setIp(String ip) {
this.ip = ip;
}
public String getClientVersion() {
return clientVersion;
}
public void setClientVersion(String clientVersion) {
this.clientVersion = clientVersion;
}
public Date getCreateTime() {
return createTime;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
@Override
public String toString() {
return "TopicConnectionDO{" +
"id=" + id +
", clusterId=" + clusterId +
", topicName='" + topicName + '\'' +
", type='" + type + '\'' +
", appId='" + appId + '\'' +
", ip='" + ip + '\'' +
", clientVersion='" + clientVersion + '\'' +
", createTime=" + createTime +
'}';
}
private Date createTime;
public String uniqueKey() {
return appId + clusterId + topicName + type + ip;
return appId + clusterId + topicName + type + ip + clientId;
}
}
\ No newline at end of file
package com.xiaojukeji.kafka.manager.common.entity.pojo.ha;
import com.baomidou.mybatisplus.annotation.TableName;
import com.xiaojukeji.kafka.manager.common.bizenum.ha.HaResTypeEnum;
import com.xiaojukeji.kafka.manager.common.entity.pojo.BaseDO;
import lombok.AllArgsConstructor;
import lombok.Data;
......@@ -37,6 +38,7 @@ public class HaASRelationDO extends BaseDO {
/**
* 资源类型
* @see HaResTypeEnum
*/
private Integer resType;
......
package com.xiaojukeji.kafka.manager.common.entity.pojo.ha;
import com.baomidou.mybatisplus.annotation.TableName;
import com.xiaojukeji.kafka.manager.common.entity.dto.ha.KafkaUserAndClientDTO;
import com.xiaojukeji.kafka.manager.common.entity.pojo.BaseDO;
import com.xiaojukeji.kafka.manager.common.utils.ConvertUtil;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.ArrayList;
import java.util.List;
/**
* HA-主备关系切换任务表
......@@ -28,15 +34,35 @@ public class HaASSwitchJobDO extends BaseDO {
*/
private Integer jobStatus;
/**
* 类型,0:kafkaUser 1:kafkaUser+Client
*/
private Integer type;
/**
* 扩展数据
*/
private String extendData;
/**
* 操作人
*/
private String operator;
public HaASSwitchJobDO(Long activeClusterPhyId, Long standbyClusterPhyId, Integer jobStatus, String operator) {
public HaASSwitchJobDO(Long activeClusterPhyId, Long standbyClusterPhyId, Integer type, List<KafkaUserAndClientDTO> extendDataObj, Integer jobStatus, String operator) {
this.activeClusterPhyId = activeClusterPhyId;
this.standbyClusterPhyId = standbyClusterPhyId;
this.type = type;
this.extendData = ValidateUtils.isEmptyList(extendDataObj)? "": ConvertUtil.obj2Json(extendDataObj);
this.jobStatus = jobStatus;
this.operator = operator;
}
public List<KafkaUserAndClientDTO> getExtendRawData() {
if (ValidateUtils.isBlank(extendData)) {
return new ArrayList<>();
}
return ConvertUtil.str2ObjArrayByJson(extendData, KafkaUserAndClientDTO.class);
}
}
......@@ -2,11 +2,13 @@ package com.xiaojukeji.kafka.manager.common.entity.vo.normal.topic;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
/**
* @author zhongyuankai,zengqiao
* @date 20/4/8
*/
@Data
@ApiModel(value = "Topic连接信息")
public class TopicConnectionVO {
@ApiModelProperty(value = "集群ID")
......@@ -30,72 +32,12 @@ public class TopicConnectionVO {
@ApiModelProperty(value = "客户端版本")
private String clientVersion;
public Long getClusterId() {
return clusterId;
}
@ApiModelProperty(value = "客户端ID")
private String clientId;
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
}
@ApiModelProperty(value = "连接Broker时间")
private Long realConnectTime;
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public String getAppId() {
return appId;
}
public void setAppId(String appId) {
this.appId = appId;
}
public String getIp() {
return ip;
}
public void setIp(String ip) {
this.ip = ip;
}
public String getHostname() {
return hostname;
}
public void setHostname(String hostname) {
this.hostname = hostname;
}
public String getClientType() {
return clientType;
}
public void setClientType(String clientType) {
this.clientType = clientType;
}
public String getClientVersion() {
return clientVersion;
}
public void setClientVersion(String clientVersion) {
this.clientVersion = clientVersion;
}
@Override
public String toString() {
return "TopicConnectionVO{" +
"clusterId=" + clusterId +
", topicName='" + topicName + '\'' +
", appId='" + appId + '\'' +
", ip='" + ip + '\'' +
", hostname='" + hostname + '\'' +
", clientType='" + clientType + '\'' +
", clientVersion='" + clientVersion + '\'' +
'}';
}
@ApiModelProperty(value = "创建时间")
private Long createTime;
}
......@@ -3,7 +3,9 @@ package com.xiaojukeji.kafka.manager.common.entity.vo.rd.app;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.ArrayList;
import java.util.List;
/**
......@@ -11,6 +13,7 @@ import java.util.List;
* @date 20/5/4
*/
@Data
@NoArgsConstructor
@ApiModel(description="App关联Topic信息")
public class AppRelateTopicsVO {
@ApiModelProperty(value="物理集群ID")
......@@ -19,6 +22,12 @@ public class AppRelateTopicsVO {
@ApiModelProperty(value="kafkaUser")
private String kafkaUser;
@ApiModelProperty(value="clientId")
private String clientId;
@ApiModelProperty(value="已建立HA的Client")
private List<String> haClientIdList;
@ApiModelProperty(value="选中的Topic列表")
private List<String> selectedTopicNameList;
......@@ -27,4 +36,37 @@ public class AppRelateTopicsVO {
@ApiModelProperty(value="未建立HA的Topic列表")
private List<String> notHaTopicNameList;
public AppRelateTopicsVO(Long clusterPhyId, String kafkaUser, String clientId) {
this.clusterPhyId = clusterPhyId;
this.kafkaUser = kafkaUser;
this.clientId = clientId;
this.selectedTopicNameList = new ArrayList<>();
this.notSelectTopicNameList = new ArrayList<>();
this.notHaTopicNameList = new ArrayList<>();
}
public void addSelectedIfNotExist(String topicName) {
if (selectedTopicNameList.contains(topicName)) {
return;
}
selectedTopicNameList.add(topicName);
}
public void addNotSelectedIfNotExist(String topicName) {
if (notSelectTopicNameList.contains(topicName)) {
return;
}
notSelectTopicNameList.add(topicName);
}
public void addNotHaIfNotExist(String topicName) {
if (notHaTopicNameList.contains(topicName)) {
return;
}
notHaTopicNameList.add(topicName);
}
}
\ No newline at end of file
package com.xiaojukeji.kafka.manager.common.utils;
public class HAUtils {
public static String mergeKafkaUserAndClient(String kafkaUser, String clientId) {
if (ValidateUtils.isBlank(clientId)) {
return kafkaUser;
}
return String.format("%s#%s", kafkaUser, clientId);
}
public static Tuple<String, String> splitKafkaUserAndClient(String kafkaUserAndClientId) {
if (ValidateUtils.isBlank(kafkaUserAndClientId)) {
return null;
}
int idx = kafkaUserAndClientId.indexOf('#');
if (idx == -1) {
return null;
} else if (idx == kafkaUserAndClientId.length() - 1) {
return new Tuple<>(kafkaUserAndClientId.substring(0, idx), "");
}
return new Tuple<>(kafkaUserAndClientId.substring(0, idx), kafkaUserAndClientId.substring(idx + 1));
}
private HAUtils() {
}
}
......@@ -79,10 +79,27 @@ public class JsonUtils {
TopicConnectionDO connectionDO = new TopicConnectionDO();
String[] appIdDetailArray = appIdDetail.toString().split("#");
if (appIdDetailArray.length >= 3) {
connectionDO.setAppId(appIdDetailArray[0]);
connectionDO.setIp(appIdDetailArray[1]);
connectionDO.setClientVersion(appIdDetailArray[2]);
if (appIdDetailArray == null) {
appIdDetailArray = new String[0];
}
connectionDO.setAppId(parseTopicConnections(appIdDetailArray, 0));
connectionDO.setIp(parseTopicConnections(appIdDetailArray, 1));
connectionDO.setClientVersion(parseTopicConnections(appIdDetailArray, 2));
// 解析clientId
StringBuilder sb = new StringBuilder();
for (int i = 3; i < appIdDetailArray.length - 1; ++i) {
sb.append(parseTopicConnections(appIdDetailArray, i)).append("#");
}
connectionDO.setClientId(sb.substring(0, sb.length() - 1));
// 解析时间
Long receiveTime = ConvertUtil.string2Long(parseTopicConnections(appIdDetailArray, appIdDetailArray.length - 1));
if (receiveTime == null) {
connectionDO.setRealConnectTime(-1L);
} else {
connectionDO.setRealConnectTime(receiveTime);
}
connectionDO.setClusterId(clusterId);
......@@ -95,4 +112,8 @@ public class JsonUtils {
}
return connectionDOList;
}
private static String parseTopicConnections(String[] appIdDetailArray, int idx) {
return (appIdDetailArray != null && appIdDetailArray.length >= idx + 1)? appIdDetailArray[idx]: "";
}
}
\ No newline at end of file
package com.xiaojukeji.kafka.manager.common.utils;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import lombok.Data;
/**
* @Author: D10865
* @Description:
* @Date: Create on 2018/5/29 下午4:08
* @Modified By
*/
@JsonIgnoreProperties(value = { "hibernateLazyInitializer", "handler" })
@Data
public class Tuple<T, V> {
private T v1;
private V v2;
public Tuple(){}
public Tuple(T v1, V v2) {
this.v1 = v1;
this.v2 = v2;
}
public T v1() {
return v1;
}
public Tuple<T, V> setV1(T v1) {
this.v1 = v1;
return this;
}
public V v2() {
return v2;
}
public Tuple<T, V> setV2(V v2) {
this.v2 = v2;
return this;
}
@Override
public boolean equals(Object o) {
if (this == o) {return true;}
if (o == null || getClass() != o.getClass()) {return false;}
Tuple<?, ?> tuple = (Tuple<?, ?>) o;
if (v1 != null ? !v1.equals(tuple.v1) : tuple.v1 != null) {return false;}
return v2 != null ? v2.equals(tuple.v2) : tuple.v2 == null;
}
@Override
public int hashCode() {
int result = v1 != null ? v1.hashCode() : 0;
result = 31 * result + (v2 != null ? v2.hashCode() : 0);
return result;
}
}
......@@ -4,10 +4,10 @@
"description": "",
"scripts": {
"prestart": "npm install --save-dev webpack-dev-server",
"start": "webpack serve",
"start": "webpack-dev-server",
"daily-build": "cross-env NODE_ENV=production webpack",
"pre-build": "cross-env NODE_ENV=production webpack",
"prod-build": "cross-env NODE_ENV=production webpack",
"prod-build": "cross-env NODE_OPTIONS=--max-old-space-size=8000 NODE_ENV=production webpack",
"fix-memory": "cross-env LIMIT=4096 increase-memory-limit"
},
"author": "",
......@@ -52,10 +52,11 @@
"typescript": "^3.3.3333",
"url-loader": "^4.1.1",
"webpack": "^4.29.6",
"webpack-cli": "^4.9.1",
"webpack-cli": "^3.2.3",
"webpack-dev-server": "^3.11.3",
"xlsx": "^0.16.1"
},
"dependencies": {
"format-to-json": "^1.0.4"
}
}
\ No newline at end of file
}
.ant-table-wrapper.no-lr-padding {
padding-left: 0!important;
padding-right: 0!important;
}
.no-table-header .ant-table-header {
display: none;
}
\ No newline at end of file
......@@ -6,6 +6,8 @@ import { Table, Tooltip } from 'component/antd';
import { SearchAndFilterContainer } from 'container/search-filter';
import Url from 'lib/url-parser';
import { pagination, cellStyle } from 'constants/table';
import moment = require('moment');
import { timeFormat } from 'constants/strategy';
@observer
export class ConnectInformation extends SearchAndFilterContainer {
......@@ -27,44 +29,70 @@ export class ConnectInformation extends SearchAndFilterContainer {
title: '客户端类型',
dataIndex: 'clientType',
key: 'clientType',
width: '20%',
width: 130,
filters: [{ text: '消费', value: 'consumer' }, { text: '生产', value: 'produce' }],
onFilter: (value: string, record: IConnectionInfo) => record.clientType.indexOf(value) === 0,
render: (t: string) =>
<span>{t === 'consumer' ? '消费' : '生产'}</span>,
}, this.renderColumnsFilter('filterVisible'));
const columns = [{
title: 'AppID',
dataIndex: 'appId',
key: 'appId',
width: '20%',
sorter: (a: IConnectionInfo, b: IConnectionInfo) => a.appId.charCodeAt(0) - b.appId.charCodeAt(0),
},
{
title: '主机名',
dataIndex: 'hostname',
key: 'hostname',
width: '40%',
onCell: () => ({
style: {
maxWidth: 250,
...cellStyle,
const columns = [
{
title: 'AppID',
dataIndex: 'appId',
key: 'appId',
width: '30%',
sorter: (a: IConnectionInfo, b: IConnectionInfo) => a.appId.charCodeAt(0) - b.appId.charCodeAt(0),
},
{
title: 'clientID',
dataIndex: 'clientId',
key: 'clientId',
width: '30%',
onCell: () => ({
style: {
maxWidth: 250,
...cellStyle,
},
}),
render: (t: string) => {
return (
<Tooltip placement="bottomLeft" title={t} >{t}</Tooltip>
);
},
},
{
title: '主机名',
dataIndex: 'hostname',
key: 'hostname',
width: '30%',
onCell: () => ({
style: {
maxWidth: 250,
...cellStyle,
},
}),
render: (t: string) => {
return (
<Tooltip placement="bottomLeft" title={t} >{t}</Tooltip>
);
},
}),
render: (t: string) => {
return (
<Tooltip placement="bottomLeft" title={t} >{t}</Tooltip>
);
},
},
{
title: '客户端版本',
dataIndex: 'clientVersion',
key: 'clientVersion',
width: '20%',
},
{
title: '客户端版本',
dataIndex: 'clientVersion',
key: 'clientVersion',
width: 130,
},
clientType,
{
title: '最后访问时间',
dataIndex: 'realConnectTime',
key: 'realConnectTime',
width: 170,
render: (t: number) => moment(t).format(timeFormat),
sorter: (a: IConnectionInfo, b: IConnectionInfo) => a.realConnectTime - b.realConnectTime,
},
];
if (connectInfo) {
return (
......
......@@ -75,6 +75,8 @@ export interface IConnectionInfo {
hostname: string;
ip: string;
topicName: string;
clientId?: string;
realConnectTime?: number;
key?: number;
}
......
......@@ -130,7 +130,11 @@ module.exports = {
historyApiFallback: true,
proxy: {
'/api/v1/': {
target: 'http://127.0.0.1:8080/',
// target: 'http://117.51.150.133:8080/',
target: 'http://10.190.55.249:8080/',
// target: 'http://10.179.37.199:8008',
// target: 'http://10.179.148.210:8080',
// target: 'http://99.11.45.164:8888',
changeOrigin: true,
}
},
......
......@@ -4,13 +4,15 @@ import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.vo.rd.app.AppRelateTopicsVO;
import java.util.List;
import java.util.Set;
/**
* Ha App管理
*/
public interface HaAppManager {
Result<List<AppRelateTopicsVO>> appRelateTopics(Long clusterPhyId, List<String> filterTopicNameList);
Result<List<AppRelateTopicsVO>> appRelateTopics(Boolean ha, Long clusterPhyId, List<String> filterTopicNameList);
Result<List<AppRelateTopicsVO>> appAndClientRelateTopics(Long clusterPhyId, Set<String> filterTopicNameSet);
boolean isContainAllRelateAppTopics(Long clusterPhyId, List<String> filterTopicNameList);
}
......@@ -3,6 +3,7 @@ package com.xiaojukeji.kafka.manager.service.biz.ha;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.TopicOperationResult;
import com.xiaojukeji.kafka.manager.common.entity.ao.ha.HaSwitchTopic;
import com.xiaojukeji.kafka.manager.common.entity.dto.ha.KafkaUserAndClientDTO;
import com.xiaojukeji.kafka.manager.common.entity.dto.op.topic.HaTopicRelationDTO;
import com.xiaojukeji.kafka.manager.common.entity.pojo.ha.JobLogDO;
......@@ -37,6 +38,7 @@ public interface HaTopicManager {
Result<HaSwitchTopic> switchHaWithCanRetry(Long newActiveClusterPhyId,
Long newStandbyClusterPhyId,
List<String> switchTopicNameList,
List<KafkaUserAndClientDTO> kafkaUserAndClientIdList,
boolean focus,
boolean firstTriggerExecute,
JobLogDO switchLogTemplate,
......
package com.xiaojukeji.kafka.manager.service.biz.ha.impl;
import com.xiaojukeji.kafka.manager.common.bizenum.ha.HaResTypeEnum;
import com.xiaojukeji.kafka.manager.common.constant.ConfigConstant;
import com.xiaojukeji.kafka.manager.common.constant.Constant;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.pojo.TopicDO;
import com.xiaojukeji.kafka.manager.common.entity.pojo.gateway.TopicConnectionDO;
import com.xiaojukeji.kafka.manager.common.entity.pojo.ha.HaASRelationDO;
import com.xiaojukeji.kafka.manager.common.entity.vo.rd.app.AppRelateTopicsVO;
import com.xiaojukeji.kafka.manager.common.utils.FutureUtil;
import com.xiaojukeji.kafka.manager.service.biz.ha.HaAppManager;
import com.xiaojukeji.kafka.manager.service.service.ConfigService;
import com.xiaojukeji.kafka.manager.service.service.TopicManagerService;
import com.xiaojukeji.kafka.manager.service.service.gateway.AuthorityService;
import com.xiaojukeji.kafka.manager.service.service.gateway.TopicConnectionService;
import com.xiaojukeji.kafka.manager.service.service.ha.HaASRelationService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
......@@ -22,17 +32,45 @@ public class HaAppManagerImpl implements HaAppManager {
@Autowired
private HaASRelationService haASRelationService;
@Autowired
private TopicConnectionService topicConnectionService;
@Autowired
private ConfigService configService;
@Autowired
private TopicManagerService topicManagerService;
private static final FutureUtil<Result<List<AppRelateTopicsVO>>> ConnectionsSearchTP = FutureUtil.init(
"ConnectionsSearchTP",
5,
5,
500
);
@Override
public Result<List<AppRelateTopicsVO>> appRelateTopics(Long clusterPhyId, List<String> filterTopicNameList) {
public Result<List<AppRelateTopicsVO>> appRelateTopics(Boolean ha, Long clusterPhyId, List<String> filterTopicNameList) {
// 获取关联的Topic列表
Map<String, Set<String>> userTopicMap = this.appRelateTopicsMap(clusterPhyId, filterTopicNameList);
Map<String, Set<String>> appClientSetMap = haASRelationService.listAllHAClient(clusterPhyId, userTopicMap.keySet());
// 获取集群已建立HA的Topic列表
Set<String> haTopicNameSet = haASRelationService.listAllHAFromDB(clusterPhyId, HaResTypeEnum.TOPIC)
.stream()
.map(elem -> elem.getActiveResName())
.collect(Collectors.toSet());
Set<String> topicNameSet = null;
if (ha) {
topicNameSet = haTopicNameSet;
}else {
List<TopicDO> topicDOS = topicManagerService.getByClusterId(clusterPhyId);
topicNameSet = topicDOS.stream()
.filter(topicBizPO -> !haTopicNameSet.contains(topicBizPO.getTopicName()))
.map(TopicDO::getTopicName).collect(Collectors.toSet());
}
Set<String> filterTopicNameSet = new HashSet<>(filterTopicNameList);
List<AppRelateTopicsVO> voList = new ArrayList<>();
......@@ -40,16 +78,18 @@ public class HaAppManagerImpl implements HaAppManager {
AppRelateTopicsVO vo = new AppRelateTopicsVO();
vo.setClusterPhyId(clusterPhyId);
vo.setKafkaUser(entry.getKey());
vo.setHaClientIdList(new ArrayList<>(appClientSetMap.getOrDefault(entry.getKey(), new HashSet<>())));
vo.setSelectedTopicNameList(new ArrayList<>());
vo.setNotSelectTopicNameList(new ArrayList<>());
vo.setNotHaTopicNameList(new ArrayList<>());
Set<String> finalTopicNameSet = topicNameSet;
entry.getValue().forEach(elem -> {
if (elem.startsWith("__")) {
// ignore
return;
}
if (!haTopicNameSet.contains(elem)) {
if (!finalTopicNameSet.contains(elem)) {
vo.getNotHaTopicNameList().add(elem);
} else if (filterTopicNameSet.contains(elem)) {
vo.getSelectedTopicNameList().add(elem);
......@@ -64,6 +104,104 @@ public class HaAppManagerImpl implements HaAppManager {
return Result.buildSuc(voList);
}
@Override
public Result<List<AppRelateTopicsVO>> appAndClientRelateTopics(Long clusterPhyId, Set<String> filterTopicNameSet) {
List<HaASRelationDO> haASRelationDOList = haASRelationService.listAllHAFromDB(clusterPhyId, HaResTypeEnum.CLUSTER);
Long secondClusterId = null;
for (HaASRelationDO asRelationDO: haASRelationDOList) {
if (clusterPhyId.equals(asRelationDO.getActiveClusterPhyId())) {
secondClusterId = asRelationDO.getStandbyClusterPhyId();
} else {
secondClusterId = asRelationDO.getActiveClusterPhyId();
}
break;
}
Map<String/*TopicName*/, Result<Map<String/*KafkaUser*/, Set<String>/*ClientID*/>>> connectionsResultMap = new ConcurrentHashMap<>();
// 生效时间
Long activeMin = configService.getLongValue(ConfigConstant.HA_CONNECTION_ACTIVE_TIME_UNIT_MIN, Constant.TOPIC_CONNECTION_LATEST_TIME_MS / 1000 / 60);
// 获取Topic关联的连接
for (String topicName: filterTopicNameSet) {
Long tempSecondClusterId = secondClusterId;
ConnectionsSearchTP.runnableTask(
String.format("clusterPhyId=%d||topicName=%s", clusterPhyId, topicName),
10000,
() -> {
Result<Map<String, Set<String>>> userAndClientMapResult = topicConnectionService.getHaKafkaUserAndClientIdByTopicName(
clusterPhyId,
tempSecondClusterId,
topicName,
new Date(System.currentTimeMillis() - activeMin * 60L * 1000L),
new Date()
);
connectionsResultMap.put(topicName, userAndClientMapResult);
}
);
ConnectionsSearchTP.waitExecute(10000);
}
// 因为接口比较重要,只要一出现异常,则直接返回错误
for (Result<Map<String, Set<String>>> valueResult: connectionsResultMap.values()) {
if (valueResult.failed()) {
return Result.buildFromIgnoreData(valueResult);
}
}
// 查询结果转Map
Map<String/*KafkaUser*/, Set<String>/*ClientID*/> kafkaUserAndClientMap = new HashMap<>();
for (Result<Map<String, Set<String>>> valueResult: connectionsResultMap.values()) {
for (Map.Entry<String, Set<String>> entry: valueResult.getData().entrySet()) {
kafkaUserAndClientMap.putIfAbsent(entry.getKey(), new HashSet<>());
kafkaUserAndClientMap.get(entry.getKey()).addAll(entry.getValue());
}
}
// 获取集群已建立HA的Topic列表
Set<String> haTopicNameSet = haASRelationService.listAllHAFromDB(clusterPhyId, HaResTypeEnum.TOPIC)
.stream()
.map(elem -> elem.getActiveResName())
.collect(Collectors.toSet());
// 获取KafkaUser+Client下的Topic列表
List<AppRelateTopicsVO> voList = new ArrayList<>();
for (Map.Entry<String, Set<String>> entry: kafkaUserAndClientMap.entrySet()) {
Long tempSecondClusterId = secondClusterId;
ConnectionsSearchTP.runnableTask(
"",
10000,
() -> {
Result<List<TopicConnectionDO>> doListResult = topicConnectionService.getByClusterAndAppId(
clusterPhyId,
tempSecondClusterId,
entry.getKey(),
new Date(System.currentTimeMillis() - activeMin * 60L * 1000L),
new Date()
);
if (doListResult.failed()) {
return Result.buildFromIgnoreData(doListResult);
}
return Result.buildSuc(convert2VOList(clusterPhyId, entry.getValue(), doListResult.getData(), haTopicNameSet, filterTopicNameSet));
}
);
for (Result<List<AppRelateTopicsVO>> elem: ConnectionsSearchTP.waitResult(10000)) {
if (elem.failed()) {
Result.buildFromIgnoreData(elem);
}
voList.addAll(elem.getData());
}
}
return Result.buildSuc(voList);
}
@Override
public boolean isContainAllRelateAppTopics(Long clusterPhyId, List<String> filterTopicNameList) {
Map<String, Set<String>> userTopicMap = this.appRelateTopicsMap(clusterPhyId, filterTopicNameList);
......@@ -91,4 +229,41 @@ public class HaAppManagerImpl implements HaAppManager {
return userTopicMap;
}
private List<AppRelateTopicsVO> convert2VOList(Long clusterPhyId,
Set<String> clientIdSet,
List<TopicConnectionDO> connectionList,
Set<String> haTopicNameSet,
Set<String> filterTopicNameSet) {
Map<String/*clientID*/, AppRelateTopicsVO> voMap = new HashMap<>();
for (TopicConnectionDO connection: connectionList) {
if (connection.getTopicName().startsWith("__")) {
// 忽略系统内部Topic
continue;
}
if (!clientIdSet.contains("") && !clientIdSet.contains(connection.getClientId())) {
continue;
}
AppRelateTopicsVO vo = voMap.get(connection.getClientId());
if (vo == null) {
vo = new AppRelateTopicsVO(clusterPhyId, connection.getAppId(), connection.getClientId());
}
if (!haTopicNameSet.contains(connection.getTopicName())) {
vo.addNotHaIfNotExist(connection.getTopicName());
}
if (!filterTopicNameSet.contains(connection.getTopicName())) {
vo.addNotSelectedIfNotExist(connection.getTopicName());
} else {
vo.addSelectedIfNotExist(connection.getTopicName());
}
voMap.put(connection.getClientId(), vo);
}
return new ArrayList<>(voMap.values());
}
}
......@@ -22,10 +22,7 @@ import com.xiaojukeji.kafka.manager.common.entity.pojo.ha.HaASSwitchJobDO;
import com.xiaojukeji.kafka.manager.common.entity.pojo.ha.HaASSwitchSubJobDO;
import com.xiaojukeji.kafka.manager.common.entity.pojo.ha.JobLogDO;
import com.xiaojukeji.kafka.manager.common.entity.vo.ha.job.HaJobDetailVO;
import com.xiaojukeji.kafka.manager.common.utils.BackoffUtils;
import com.xiaojukeji.kafka.manager.common.utils.ConvertUtil;
import com.xiaojukeji.kafka.manager.common.utils.FutureUtil;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.common.utils.*;
import com.xiaojukeji.kafka.manager.service.biz.ha.HaAppManager;
import com.xiaojukeji.kafka.manager.service.biz.ha.HaTopicManager;
import com.xiaojukeji.kafka.manager.service.biz.job.HaASSwitchJobManager;
......@@ -95,19 +92,20 @@ public class HaASSwitchJobManagerImpl implements HaASSwitchJobManager {
LOGGER.info("method=createJob||activeClusterPhyId={}||switchTopics={}||operator={}", dto.getActiveClusterPhyId(), ConvertUtil.obj2Json(haTopicSetResult.getData()), operator);
// 2、查看是否将KafkaUser关联的Topic都涵盖了
if (dto.getMustContainAllKafkaUserTopics() != null
&& dto.getMustContainAllKafkaUserTopics()
&& (dto.getAll() == null || !dto.getAll())
&& !haAppManager.isContainAllRelateAppTopics(dto.getActiveClusterPhyId(), dto.getTopicNameList())) {
return Result.buildFromRSAndMsg(ResultStatus.OPERATION_FORBIDDEN, "存在KafkaUser关联的Topic未选中");
}
// // 2、查看是否将KafkaUser关联的Topic都涵盖了
// if (dto.getMustContainAllKafkaUserTopics() != null
// && dto.getMustContainAllKafkaUserTopics()
// && (dto.getAll() == null || !dto.getAll())
// && !haAppManager.isContainAllRelateAppTopics(dto.getActiveClusterPhyId(), dto.getTopicNameList())) {
// return Result.buildFromRSAndMsg(ResultStatus.OPERATION_FORBIDDEN, "存在KafkaUser关联的Topic未选中");
// }
// 3、创建任务
Result<Long> longResult = haASSwitchJobService.createJob(
dto.getActiveClusterPhyId(),
dto.getStandbyClusterPhyId(),
new ArrayList<>(haTopicSetResult.getData()),
dto.getKafkaUserAndClientIdList(),
operator
);
if (longResult.failed()) {
......@@ -176,6 +174,7 @@ public class HaASSwitchJobManagerImpl implements HaASSwitchJobManager {
jobDO.getActiveClusterPhyId(),
jobDO.getStandbyClusterPhyId(),
subJobDOList.stream().map(elem -> elem.getActiveResName()).collect(Collectors.toList()),
jobDO.getExtendRawData(),
focus,
firstTriggerExecute,
new JobLogDO(JobLogBizTypEnum.HA_SWITCH_JOB_LOG.getCode(), String.valueOf(jobId)),
......
package com.xiaojukeji.kafka.manager.service.service.gateway;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.ao.topic.TopicConnection;
import com.xiaojukeji.kafka.manager.common.entity.pojo.gateway.TopicConnectionDO;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* @author zhongyuankai
......@@ -21,6 +24,14 @@ public interface TopicConnectionService {
Date startTime,
Date endTime);
Result<Map<String/*KafkaUser*/, Set<String>/*ClientID*/>> getHaKafkaUserAndClientIdByTopicName(Long firstClusterId,
Long secondClusterId,
String topicName,
Date startTime,
Date endTime);
Set<String> getKafkaUserAndClientIdTopicNames(Set<Long> clusterIdSet, String kafkaUser, String clientId, Date startTime, Date endTime);
/**
* 查询连接信息
*/
......@@ -37,6 +48,8 @@ public interface TopicConnectionService {
Date startTime,
Date endTime);
Result<List<TopicConnectionDO>> getByClusterAndAppId(Long firstClusterId, Long secondClusterId, String appId, Date startTime, Date endTime);
/**
* 判断topic是否存在连接
*/
......
package com.xiaojukeji.kafka.manager.service.service.gateway.impl;
import com.xiaojukeji.kafka.manager.common.bizenum.KafkaClientEnum;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
import com.xiaojukeji.kafka.manager.common.entity.pojo.gateway.TopicConnectionDO;
import com.xiaojukeji.kafka.manager.common.entity.ao.topic.TopicConnection;
import com.xiaojukeji.kafka.manager.common.constant.KafkaConstant;
......@@ -67,6 +69,71 @@ public class TopicConnectionServiceImpl implements TopicConnectionService {
return getByTopicName(clusterId, doList);
}
@Override
public Result<Map<String, Set<String>>> getHaKafkaUserAndClientIdByTopicName(Long firstClusterId,
Long secondClusterId,
String topicName,
Date startTime,
Date endTime) {
List<TopicConnectionDO> doList = new ArrayList<>();
try {
if (firstClusterId != null) {
doList.addAll(topicConnectionDao.getByTopicName(firstClusterId, topicName, startTime, endTime));
}
} catch (Exception e) {
LOGGER.error("get topic connections failed, firstClusterId:{} topicName:{}.", firstClusterId, topicName, e);
return Result.buildFromRSAndMsg(ResultStatus.MYSQL_ERROR, e.getMessage());
}
try {
if (secondClusterId != null) {
doList.addAll(topicConnectionDao.getByTopicName(secondClusterId, topicName, startTime, endTime));
}
} catch (Exception e) {
LOGGER.error("get topic connections failed, secondClusterId:{} topicName:{}.", secondClusterId, topicName, e);
return Result.buildFromRSAndMsg(ResultStatus.MYSQL_ERROR, e.getMessage());
}
if (ValidateUtils.isEmptyList(doList)) {
return Result.buildSuc(new HashMap<>());
}
Map<String, Set<String>> userAndClientMap = new HashMap<>();
for (TopicConnectionDO po: doList) {
if (!po.getClientId().startsWith("P#") && !po.getClientId().startsWith("C#")) {
// 忽略非HA的clientId
continue;
}
userAndClientMap.putIfAbsent(po.getAppId(), new HashSet<>());
userAndClientMap.get(po.getAppId()).add(po.getClientId());
}
return Result.buildSuc(userAndClientMap);
}
@Override
public Set<String> getKafkaUserAndClientIdTopicNames(Set<Long> clusterIdSet, String kafkaUser, String clientId, Date startTime, Date endTime) {
List<TopicConnectionDO> doList = null;
try {
doList = topicConnectionDao.getByAppId(kafkaUser, startTime, endTime);
} catch (Exception e) {
LOGGER.error("get topic connections failed, kafkaUser:{}.", kafkaUser, e);
}
if (ValidateUtils.isEmptyList(doList)) {
return new HashSet<>();
}
return doList
.stream()
.filter(elem -> elem.getClientId().equals(clientId) && clusterIdSet.contains(elem.getClusterId()))
.map(item -> item.getTopicName())
.collect(Collectors.toSet());
}
@Override
public List<TopicConnection> getByTopicName(Long clusterId,
String topicName,
......@@ -102,6 +169,36 @@ public class TopicConnectionServiceImpl implements TopicConnectionService {
return getByTopicName(null, doList);
}
@Override
public Result<List<TopicConnectionDO>> getByClusterAndAppId(Long firstClusterId, Long secondClusterId, String appId, Date startTime, Date endTime) {
List<TopicConnectionDO> doList = new ArrayList<>();
try {
if (firstClusterId != null) {
doList.addAll(topicConnectionDao.getByClusterAndAppId(firstClusterId, appId, startTime, endTime));
}
} catch (Exception e) {
LOGGER.error("get topic connections failed, firstClusterId:{} appId:{}.", firstClusterId, appId, e);
return Result.buildFromRSAndMsg(ResultStatus.MYSQL_ERROR, e.getMessage());
}
try {
if (secondClusterId != null) {
doList.addAll(topicConnectionDao.getByClusterAndAppId(secondClusterId, appId, startTime, endTime));
}
} catch (Exception e) {
LOGGER.error("get topic connections failed, secondClusterId:{} appId:{}.", secondClusterId, appId, e);
return Result.buildFromRSAndMsg(ResultStatus.MYSQL_ERROR, e.getMessage());
}
if (ValidateUtils.isEmptyList(doList)) {
return Result.buildSuc(new ArrayList<>());
}
return Result.buildSuc(doList);
}
@Override
public boolean isExistConnection(Long clusterId,
String topicName,
......@@ -210,6 +307,10 @@ public class TopicConnectionServiceImpl implements TopicConnectionService {
LOGGER.error("get hostname failed. ip:{}.", connectionDO.getIp(), e);
}
dto.setHostname(hostName.replace(KafkaConstant.BROKER_HOST_NAME_SUFFIX, ""));
dto.setClientId(connectionDO.getClientId());
dto.setRealConnectTime(connectionDO.getRealConnectTime());
dto.setCreateTime(connectionDO.getCreateTime().getTime());
return dto;
}
......
......@@ -5,6 +5,8 @@ import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.pojo.ha.HaASRelationDO;
import java.util.List;
import java.util.Map;
import java.util.Set;
public interface HaASRelationService {
Result<Void> replaceTopicRelationsToDB(Long standbyClusterPhyId, List<HaASRelationDO> topicRelationDOList);
......@@ -53,6 +55,8 @@ public interface HaASRelationService {
*/
List<HaASRelationDO> listAllHAFromDB(Long firstClusterPhyId, HaResTypeEnum resTypeEnum);
Map<String, Set<String>> listAllHAClient(Long firstClusterPhyId, Set<String> kafkaUserSet);
/**
* 获取主备关系
*/
......
......@@ -4,6 +4,7 @@ package com.xiaojukeji.kafka.manager.service.service.ha;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.ao.ha.job.HaJobDetail;
import com.xiaojukeji.kafka.manager.common.entity.ao.ha.job.HaSubJobExtendData;
import com.xiaojukeji.kafka.manager.common.entity.dto.ha.KafkaUserAndClientDTO;
import com.xiaojukeji.kafka.manager.common.entity.pojo.ha.HaASSwitchJobDO;
import com.xiaojukeji.kafka.manager.common.entity.pojo.ha.HaASSwitchSubJobDO;
......@@ -14,7 +15,11 @@ public interface HaASSwitchJobService {
/**
* 创建任务
*/
Result<Long> createJob(Long activeClusterPhyId, Long standbyClusterPhyId, List<String> topicNameList, String operator);
Result<Long> createJob(Long activeClusterPhyId,
Long standbyClusterPhyId,
List<String> topicNameList,
List<KafkaUserAndClientDTO> kafkaUserAndClientList,
String operator);
/**
* 更新任务状态
......
......@@ -6,6 +6,8 @@ import com.xiaojukeji.kafka.manager.common.bizenum.ha.HaStatusEnum;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
import com.xiaojukeji.kafka.manager.common.entity.pojo.ha.HaASRelationDO;
import com.xiaojukeji.kafka.manager.common.utils.HAUtils;
import com.xiaojukeji.kafka.manager.common.utils.Tuple;
import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
import com.xiaojukeji.kafka.manager.dao.ha.HaASRelationDao;
import com.xiaojukeji.kafka.manager.service.service.ha.HaASRelationService;
......@@ -14,9 +16,7 @@ import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Collectors;
......@@ -177,6 +177,34 @@ public class HaASRelationServiceImpl implements HaASRelationService {
return doList;
}
@Override
public Map<String, Set<String>> listAllHAClient(Long firstClusterPhyId, Set<String> kafkaUserSet) {
LambdaQueryWrapper<HaASRelationDO> lambdaQueryWrapper = new LambdaQueryWrapper<>();
lambdaQueryWrapper.eq(HaASRelationDO::getResType, HaResTypeEnum.KAFKA_USER_AND_CLIENT.getCode());
lambdaQueryWrapper.and(lambda ->
lambda.eq(HaASRelationDO::getActiveClusterPhyId, firstClusterPhyId).or().eq(HaASRelationDO::getStandbyClusterPhyId, firstClusterPhyId)
);
// 查询HA列表
List<HaASRelationDO> doList = haASRelationDao.selectList(lambdaQueryWrapper);
if (ValidateUtils.isNull(doList)) {
return new HashMap<>();
}
Map<String, Set<String>> haClientMap = new HashMap<>();
doList.forEach(elem -> {
Tuple<String, String> data = HAUtils.splitKafkaUserAndClient(elem.getActiveResName());
if (data == null || !kafkaUserSet.contains(data.getV1())) {
return;
}
haClientMap.putIfAbsent(data.getV1(), new HashSet<>());
haClientMap.get(data.getV1()).add(data.getV2());
});
return haClientMap;
}
@Override
public List<HaASRelationDO> listAllHAFromDB(Long firstClusterPhyId, Long secondClusterPhyId, HaResTypeEnum resTypeEnum) {
// 查询HA列表
......
......@@ -6,6 +6,7 @@ import com.xiaojukeji.kafka.manager.common.bizenum.ha.job.HaJobStatusEnum;
import com.xiaojukeji.kafka.manager.common.entity.Result;
import com.xiaojukeji.kafka.manager.common.entity.ResultStatus;
import com.xiaojukeji.kafka.manager.common.entity.ao.ha.job.*;
import com.xiaojukeji.kafka.manager.common.entity.dto.ha.KafkaUserAndClientDTO;
import com.xiaojukeji.kafka.manager.common.entity.pojo.ha.HaASSwitchJobDO;
import com.xiaojukeji.kafka.manager.common.entity.pojo.ha.HaASSwitchSubJobDO;
import com.xiaojukeji.kafka.manager.common.utils.ConvertUtil;
......@@ -35,10 +36,22 @@ public class HaASSwitchJobServiceImpl implements HaASSwitchJobService {
@Override
@Transactional
public Result<Long> createJob(Long activeClusterPhyId, Long standbyClusterPhyId, List<String> topicNameList, String operator) {
public Result<Long> createJob(Long activeClusterPhyId,
Long standbyClusterPhyId,
List<String> topicNameList,
List<KafkaUserAndClientDTO> kafkaUserAndClientList,
String operator) {
try {
// 父任务
HaASSwitchJobDO jobDO = new HaASSwitchJobDO(activeClusterPhyId, standbyClusterPhyId, HaJobStatusEnum.RUNNING.getStatus(), operator);
HaASSwitchJobDO jobDO = new HaASSwitchJobDO(
activeClusterPhyId,
standbyClusterPhyId,
ValidateUtils.isEmptyList(kafkaUserAndClientList)? 0: 1,
kafkaUserAndClientList,
HaJobStatusEnum.RUNNING.getStatus(),
operator
);
haASSwitchJobDao.insert(jobDO);
// 子任务
......
......@@ -6,10 +6,15 @@ import kafka.admin.AdminUtils;
import kafka.admin.AdminUtils$;
import kafka.server.ConfigType;
import kafka.utils.ZkUtils;
import org.apache.kafka.common.KafkaException;
import org.apache.kafka.common.security.JaasUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Properties;
......@@ -40,7 +45,7 @@ public class HaKafkaUserCommands {
props.putAll(modifiedProps);
// 修改配置, 这里不使用changeUserOrUserClientIdConfig方法的原因是changeUserOrUserClientIdConfig这个方法会进行参数检查
AdminUtils$.MODULE$.kafka$admin$AdminUtils$$changeEntityConfig(zkUtils, ConfigType.User(), kafkaUser, props);
AdminUtils$.MODULE$.kafka$admin$AdminUtils$$changeEntityConfig(zkUtils, ConfigType.User(), sanitize(kafkaUser), props);
} catch (Exception e) {
LOGGER.error("method=changeHaUserConfig||zookeeper={}||kafkaUser={}||modifiedProps={}||errMsg=exception", zookeeper, kafkaUser, modifiedProps, e);
return false;
......@@ -73,7 +78,7 @@ public class HaKafkaUserCommands {
}
// 修改配置, 这里不使用changeUserOrUserClientIdConfig方法的原因是changeUserOrUserClientIdConfig这个方法会进行参数检查
AdminUtils$.MODULE$.kafka$admin$AdminUtils$$changeEntityConfig(zkUtils, ConfigType.User(), kafkaUser, presentProps);
AdminUtils$.MODULE$.kafka$admin$AdminUtils$$changeEntityConfig(zkUtils, ConfigType.User(), sanitize(kafkaUser), presentProps);
return true;
}catch (Exception e){
......@@ -90,4 +95,37 @@ public class HaKafkaUserCommands {
private HaKafkaUserCommands() {
}
private static String sanitize(String name) {
String encoded = "";
try {
encoded = URLEncoder.encode(name, StandardCharsets.UTF_8.name());
StringBuilder builder = new StringBuilder();
for (int i = 0; i < encoded.length(); i++) {
char c = encoded.charAt(i);
if (c == '*') { // Metric ObjectName treats * as pattern
builder.append("%2A");
} else if (c == '+') { // Space URL-encoded as +, replace with percent encoding
builder.append("%20");
} else {
builder.append(c);
}
}
return builder.toString();
} catch (UnsupportedEncodingException e) {
throw new KafkaException(e);
}
}
/**
* Desanitize name that was URL-encoded using {@link #sanitize(String)}. This
* is used to obtain the desanitized version of node names in ZooKeeper.
*/
private static String desanitize(String name) {
try {
return URLDecoder.decode(name, StandardCharsets.UTF_8.name());
} catch (UnsupportedEncodingException e) {
throw new KafkaException(e);
}
}
}
......@@ -16,4 +16,6 @@ public interface TopicConnectionDao {
List<TopicConnectionDO> getByTopicName(Long clusterId, String topicName, Date startTime, Date endTime);
List<TopicConnectionDO> getByAppId(String appId, Date startTime, Date endTime);
List<TopicConnectionDO> getByClusterAndAppId(Long clusterId, String appId, Date startTime, Date endTime);
}
\ No newline at end of file
......@@ -58,4 +58,14 @@ public class TopicConnectionDaoImpl implements TopicConnectionDao {
params.put("endTime", endTime);
return sqlSession.selectList("TopicConnectionDao.getByAppId", params);
}
@Override
public List<TopicConnectionDO> getByClusterAndAppId(Long clusterId, String appId, Date startTime, Date endTime) {
Map<String, Object> params = new HashMap<>(4);
params.put("appId", appId);
params.put("clusterId", clusterId);
params.put("startTime", startTime);
params.put("endTime", endTime);
return sqlSession.selectList("TopicConnectionDao.getByClusterAndAppId", params);
}
}
\ No newline at end of file
......@@ -10,6 +10,8 @@
<result column="active_cluster_phy_id" property="activeClusterPhyId" />
<result column="standby_cluster_phy_id" property="standbyClusterPhyId" />
<result column="job_status" property="jobStatus" />
<result column="type" property="type" />
<result column="extend_data" property="extendData" />
<result column="operator" property="operator" />
</resultMap>
......@@ -18,9 +20,9 @@
useGeneratedKeys="true"
keyProperty="id">
INSERT INTO ks_km_physical_cluster
(active_cluster_phy_id, standby_cluster_phy_id, job_status, operator)
(active_cluster_phy_id, standby_cluster_phy_id, job_status, `type`, extend_data, operator)
VALUES
(#{activeClusterPhyId}, #{standbyClusterPhyId}, #{jobStatus}, #{operator})
(#{activeClusterPhyId}, #{standbyClusterPhyId}, #{jobStatus}, #{type}, #{extendData}, #{operator})
</insert>
<select id="listAllLatest" resultMap="HaASSwitchJobMap">
......
......@@ -10,31 +10,27 @@
<result property="appId" column="app_id"/>
<result property="ip" column="ip"/>
<result property="clientVersion" column="client_version"/>
<result property="clientId" column="client_id"/>
<result property="realConnectTime" column="real_connect_time"/>
<result property="createTime" column="create_time"/>
</resultMap>
<insert id="batchReplace" parameterType="java.util.List">
REPLACE INTO topic_connections (
cluster_id,
topic_name,
`type`,
app_id,
ip,
client_version,
create_time
)
VALUES
<foreach collection="list" item="item" index="index" separator=",">
(
#{item.clusterId},
#{item.topicName},
#{item.type},
#{item.appId},
#{item.ip},
#{item.clientVersion},
#{item.createTime}
)
insert into topic_connections (cluster_id, topic_name, `type`, app_id, ip, client_version, client_id, real_connect_time, create_time)
values
<foreach collection="list" item="item" separator=",">
(#{item.clusterId}, #{item.topicName}, #{item.type}, #{item.appId}, #{item.ip}, #{item.clientVersion}, #{item.clientId}, #{item.realConnectTime}, #{item.createTime})
</foreach>
on duplicate key update
real_connect_time = IF(real_connect_time > VALUES(real_connect_time), real_connect_time, VALUES(real_connect_time)),
cluster_id = VALUES(cluster_id),
topic_name = VALUES(topic_name),
`type` = VALUES(`type`),
app_id = VALUES(app_id),
ip = VALUES(ip),
client_version = VALUES(client_version),
client_id = VALUES(client_id),
create_time = VALUES(create_time)
</insert>
<select id="getByTopicName" parameterType="java.util.Map" resultMap="TopicConnectionMap">
......@@ -53,4 +49,14 @@
AND create_time >= #{startTime} AND #{endTime} >= create_time
]]>
</select>
<select id="getByClusterAndAppId" parameterType="java.util.Map" resultMap="TopicConnectionMap">
<![CDATA[
SELECT * FROM topic_connections
WHERE app_id = #{appId}
AND cluster_id = #{clusterId}
AND create_time >= #{startTime}
AND #{endTime} >= create_time
]]>
</select>
</mapper>
\ No newline at end of file
package com.xiaojukeji.kafka.manager.task.dispatch.op;
package com.xiaojukeji.kafka.manager.task.dispatch.ha;
import com.xiaojukeji.kafka.manager.service.biz.job.HaASSwitchJobManager;
import com.xiaojukeji.kafka.manager.service.service.ha.HaASSwitchJobService;
......
//package com.xiaojukeji.kafka.manager.task.dispatch.ha;
//
//import com.xiaojukeji.kafka.manager.common.bizenum.ha.HaResTypeEnum;
//import com.xiaojukeji.kafka.manager.common.bizenum.ha.job.HaJobStatusEnum;
//import com.xiaojukeji.kafka.manager.common.constant.ConfigConstant;
//import com.xiaojukeji.kafka.manager.common.constant.Constant;
//import com.xiaojukeji.kafka.manager.common.entity.dto.ha.ASSwitchJobDTO;
//import com.xiaojukeji.kafka.manager.common.entity.pojo.ClusterDO;
//import com.xiaojukeji.kafka.manager.common.entity.pojo.ha.HaASRelationDO;
//import com.xiaojukeji.kafka.manager.common.entity.pojo.ha.HaASSwitchJobDO;
//import com.xiaojukeji.kafka.manager.common.utils.HAUtils;
//import com.xiaojukeji.kafka.manager.common.utils.Tuple;
//import com.xiaojukeji.kafka.manager.common.utils.ValidateUtils;
//import com.xiaojukeji.kafka.manager.service.biz.job.HaASSwitchJobManager;
//import com.xiaojukeji.kafka.manager.service.service.ClusterService;
//import com.xiaojukeji.kafka.manager.service.service.ConfigService;
//import com.xiaojukeji.kafka.manager.service.service.gateway.TopicConnectionService;
//import com.xiaojukeji.kafka.manager.service.service.ha.HaASRelationService;
//import com.xiaojukeji.kafka.manager.service.service.ha.HaASSwitchJobService;
//import com.xiaojukeji.kafka.manager.task.component.AbstractScheduledTask;
//import com.xiaojukeji.kafka.manager.task.component.CustomScheduled;
//import org.springframework.beans.factory.annotation.Autowired;
//import org.springframework.stereotype.Component;
//
//import java.util.*;
//import java.util.function.Function;
//import java.util.stream.Collectors;
//
///**
// * 主备切换任务
// */
//@Component
//@CustomScheduled(name = "HandleHaClientNewTopic",
// cron = "0 0/2 * * * ?",
// threadNum = 1,
// description = "处理HAClient的新增Topic")
//public class HandleHaClientNewTopic extends AbstractScheduledTask<ClusterDO> {
// @Autowired
// private ClusterService clusterService;
//
// @Autowired
// private HaASRelationService haASRelationService;
//
// @Autowired
// private TopicConnectionService topicConnectionService;
//
// @Autowired
// private HaASSwitchJobManager haASSwitchJobManager;
//
// @Autowired
// private HaASSwitchJobService haASSwitchJobService;
//
// @Autowired
// private ConfigService configService;
//
// @Override
// public List<ClusterDO> listAllTasks() {
// return clusterService.list();
// }
//
// @Override
// public void processTask(ClusterDO clusterDO) {
// if (this.existRunningTask(clusterDO.getId())) {
// // 存在运行中的任务
// return;
// }
//
// // 获取已经建立HA的Client
// List<HaASRelationDO> doList = haASRelationService.listAllHAFromDB(clusterDO.getId(), HaResTypeEnum.KAFKA_USER_AND_CLIENT);
//
// // 获取已经建立HA的Topic
// Map<String, HaASRelationDO> nameMap = haASRelationService.listAllHAFromDB(clusterDO.getId(), HaResTypeEnum.TOPIC)
// .stream()
// .collect(Collectors.toMap(HaASRelationDO::getActiveResName, Function.identity()));
//
// // 新的主备集群 & 需要切换的Topic
// Long newActiveClusterId = null;
// Long newStandbyClusterId = null;
// Map<String, HaASRelationDO> needSwitchTopicMap = new HashMap<>();
//
// // 查找clientId关联的Topic列表
// for (HaASRelationDO asRelationDO: doList) {
// if (newActiveClusterId != null && !newActiveClusterId.equals(asRelationDO.getActiveClusterPhyId())) {
// // 一次切换,仅能有一个主集群ID,不能有多个。不一致时,直接忽略
// continue;
// }
//
// Tuple<String, String> userAndClient = HAUtils.splitKafkaUserAndClient(asRelationDO.getActiveResName());
// if (userAndClient == null || ValidateUtils.isBlank(userAndClient.getV2())) {
// continue;
// }
//
// // 获取该client对应的Topic
// Set<String> topicNameSet = topicConnectionService.getKafkaUserAndClientIdTopicNames(
// new HashSet<>(Arrays.asList(asRelationDO.getActiveClusterPhyId(), asRelationDO.getStandbyClusterPhyId())),
// userAndClient.getV1(),
// userAndClient.getV2(),
// new Date(System.currentTimeMillis() - configService.getLongValue(ConfigConstant.HA_CONNECTION_ACTIVE_TIME_UNIT_MIN, 20L) * 60L * 1000L),
// new Date()
// );
//
// // 遍历Topic,判断主备关系是否符合预期
// for (String topicName: topicNameSet) {
// HaASRelationDO topicRelation = nameMap.get(topicName);
// if (topicRelation == null
// || asRelationDO.getActiveClusterPhyId().equals(topicRelation.getActiveClusterPhyId())) {
// // Topic为空,未建立高可用,忽略该Topic
// // 已建立HA,且该Topic的主备信息和当前clientId一致,因此也不需要进行主备切换
// continue;
// }
//
// // 主备信息不一致时,进行主备切换
// if (needSwitchTopicMap.isEmpty()) {
// newActiveClusterId = asRelationDO.getActiveClusterPhyId();
// newStandbyClusterId = asRelationDO.getStandbyClusterPhyId();
// }
//
// needSwitchTopicMap.put(topicName, topicRelation);
// }
// }
//
// if (this.existRunningTask(clusterDO.getId())) {
// // 再次判断是否存在运行中的任务
// return;
// }
//
// // 创建任务
// haASSwitchJobManager.createJob(
// this.convert2ASSwitchJobDTO(newActiveClusterId, newStandbyClusterId, new ArrayList<>(needSwitchTopicMap.values())),
// Constant.DEFAULT_USER_NAME
// );
// }
//
// private ASSwitchJobDTO convert2ASSwitchJobDTO(Long newActiveClusterId, Long newStandbyClusterId, List<HaASRelationDO> doList) {
// ASSwitchJobDTO dto = new ASSwitchJobDTO();
// dto.setAll(false);
// dto.setMustContainAllKafkaUserTopics(false);
// dto.setActiveClusterPhyId(newActiveClusterId);
// dto.setStandbyClusterPhyId(newStandbyClusterId);
// dto.setTopicNameList(doList.stream().map(elem -> elem.getActiveResName()).collect(Collectors.toList()));
// dto.setKafkaUserAndClientIdList(new ArrayList<>()); // clientId 或者 kafkaUser 已切换好,所以后台任务不需要执行该步骤
//
// return dto;
// }
//
// private boolean existRunningTask(Long clusterPhyId) {
// Map<Long/*集群ID*/, HaASSwitchJobDO> jobMap = haASSwitchJobService.listClusterLatestJobs();
//
// HaASSwitchJobDO jobDO = jobMap.remove(clusterPhyId);
// if (jobDO == null || !HaJobStatusEnum.isRunning(jobDO.getJobStatus())) {
// return false;
// }
//
// return true;
// }
//}
......@@ -16,6 +16,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import java.util.HashSet;
import java.util.List;
/**
......@@ -52,6 +53,10 @@ public class RdAppController {
@PostMapping(value = "apps/relate-topics")
@ResponseBody
public Result<List<AppRelateTopicsVO>> appRelateTopics(@Validated @RequestBody AppRelateTopicsDTO dto) {
return haAppManager.appRelateTopics(dto.getClusterPhyId(), dto.getFilterTopicNameList());
if (dto.getUseKafkaUserAndClientId() != null && dto.getUseKafkaUserAndClientId()) {
return haAppManager.appAndClientRelateTopics(dto.getClusterPhyId(), new HashSet<>(dto.getFilterTopicNameList()));
}
return haAppManager.appRelateTopics(dto.getHa(), dto.getClusterPhyId(), dto.getFilterTopicNameList());
}
}
\ No newline at end of file
......@@ -16,7 +16,7 @@
</parent>
<properties>
<kafka-manager.revision>2.8.0_e</kafka-manager.revision>
<kafka-manager.revision>2.8.1_e</kafka-manager.revision>
<spring.boot.version>2.1.18.RELEASE</spring.boot.version>
<swagger2.version>2.9.2</swagger2.version>
<swagger.version>1.5.21</swagger.version>
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册