提交 dfba63de 编写于 作者: clevertension's avatar clevertension

add h2 support phase 3

上级 d4333ff7
......@@ -13,7 +13,7 @@ import java.util.HashMap;
import java.util.Map;
/**
* @author pengys5
* @author pengys5, clevertension
*/
public class CpuMetricH2DAO extends H2DAO implements ICpuMetricDAO, IPersistenceDAO<H2SqlEntity, H2SqlEntity> {
private final Logger logger = LoggerFactory.getLogger(CpuMetricH2DAO.class);
......
......@@ -11,7 +11,7 @@ import java.util.HashMap;
import java.util.Map;
/**
* @author pengys5
* @author pengys5, clevertension
*/
public class GCMetricH2DAO extends H2DAO implements IGCMetricDAO, IPersistenceDAO<H2SqlEntity, H2SqlEntity> {
@Override public Data get(String id, DataDefine dataDefine) {
......
......@@ -18,7 +18,7 @@ import java.text.MessageFormat;
import java.util.*;
/**
* @author pengys5
* @author pengys5, clevertension
*/
public class InstanceHeartBeatH2DAO extends H2DAO implements IInstanceHeartBeatDAO, IPersistenceDAO<H2SqlEntity, H2SqlEntity> {
private final Logger logger = LoggerFactory.getLogger(InstanceHeartBeatEsDAO.class);
......
......@@ -11,7 +11,7 @@ import java.util.HashMap;
import java.util.Map;
/**
* @author pengys5
* @author pengys5, clevertension
*/
public class MemoryMetricH2DAO extends H2DAO implements IMemoryMetricDAO, IPersistenceDAO<H2SqlEntity, H2SqlEntity> {
@Override public Data get(String id, DataDefine dataDefine) {
......
......@@ -11,7 +11,7 @@ import java.util.HashMap;
import java.util.Map;
/**
* @author pengys5
* @author pengys5, clevertension
*/
public class MemoryPoolMetricH2DAO extends H2DAO implements IMemoryPoolMetricDAO, IPersistenceDAO<H2SqlEntity, H2SqlEntity> {
@Override public Data get(String id, DataDefine dataDefine) {
......
......@@ -11,7 +11,7 @@ import org.slf4j.LoggerFactory;
import java.text.MessageFormat;
/**
* @author pengys5
* @author pengys5, clevertension
*/
public class ApplicationH2DAO extends H2DAO implements IApplicationDAO {
private final Logger logger = LoggerFactory.getLogger(ApplicationH2DAO.class);
......
......@@ -15,7 +15,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author pengys5
* @author pengys5, clevertension
*/
public class InstanceH2DAO extends H2DAO implements IInstanceDAO {
private final Logger logger = LoggerFactory.getLogger(InstanceH2DAO.class);
......
package org.skywalking.apm.collector.agentregister.worker.servicename.dao;
import org.skywalking.apm.collector.client.h2.H2Client;
import org.skywalking.apm.collector.client.h2.H2ClientException;
import org.skywalking.apm.collector.core.util.Const;
import org.skywalking.apm.collector.storage.define.register.ServiceNameDataDefine;
import org.skywalking.apm.collector.storage.define.register.ServiceNameTable;
import org.skywalking.apm.collector.storage.h2.dao.H2DAO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.text.MessageFormat;
import java.util.HashMap;
import java.util.Map;
/**
* @author pengys5
* @author pengys5, clevertension
*/
public class ServiceNameH2DAO extends H2DAO implements IServiceNameDAO {
private final Logger logger = LoggerFactory.getLogger(ServiceNameH2DAO.class);
private static final String GET_SERVICE_ID_SQL = "select {0} from {1} where {2} = ? and {3} = ? limit 1";
private static final String GET_SERVICE_NAME_SQL = "select {0} from {1} where {2} = ?";
@Override public int getServiceId(int applicationId, String serviceName) {
@Override
public int getServiceId(int applicationId, String serviceName) {
H2Client client = getClient();
String sql = MessageFormat.format(GET_SERVICE_ID_SQL, ServiceNameTable.COLUMN_SERVICE_ID, ServiceNameTable.COLUMN_SERVICE_NAME,
ServiceNameTable.TABLE, ServiceNameTable.COLUMN_APPLICATION_ID, ServiceNameTable.COLUMN_SERVICE_NAME);
Object[] params = new Object[]{applicationId, serviceName};
try (ResultSet rs = client.executeQuery(sql, params)) {
if (rs.next()) {
return rs.getInt(ServiceNameTable.COLUMN_SERVICE_ID);
}
} catch (SQLException | H2ClientException e) {
logger.error(e.getMessage(), e);
}
return 0;
}
@Override public int getMaxServiceId() {
return 0;
@Override
public int getMaxServiceId() {
return getMaxId(ServiceNameTable.TABLE, ServiceNameTable.COLUMN_SERVICE_ID);
}
@Override public int getMinServiceId() {
return 0;
@Override
public int getMinServiceId() {
return getMinId(ServiceNameTable.TABLE, ServiceNameTable.COLUMN_SERVICE_ID);
}
@Override public String getServiceName(int serviceId) {
return null;
@Override
public String getServiceName(int serviceId) {
H2Client client = getClient();
String sql = MessageFormat.format(GET_SERVICE_NAME_SQL, ServiceNameTable.COLUMN_SERVICE_NAME,
ServiceNameTable.TABLE, ServiceNameTable.COLUMN_SERVICE_ID);
Object[] params = new Object[]{serviceId};
try (ResultSet rs = client.executeQuery(sql, params)) {
if (rs.next()) {
return rs.getString(ServiceNameTable.COLUMN_SERVICE_NAME);
}
} catch (SQLException | H2ClientException e) {
logger.error(e.getMessage(), e);
}
return Const.EMPTY_STRING;
}
@Override public void save(ServiceNameDataDefine.ServiceName serviceName) {
@Override
public void save(ServiceNameDataDefine.ServiceName serviceName) {
logger.debug("save service name register info, application id: {}, service name: {}", serviceName.getApplicationId(), serviceName.getServiceName());
H2Client client = getClient();
Map<String, Object> source = new HashMap();
source.put(ServiceNameTable.COLUMN_SERVICE_ID, serviceName.getServiceId());
source.put(ServiceNameTable.COLUMN_APPLICATION_ID, serviceName.getApplicationId());
source.put(ServiceNameTable.COLUMN_SERVICE_NAME, serviceName.getServiceName());
String sql = getBatchInsertSql(ServiceNameTable.TABLE, source.keySet());
Object[] params = source.values().toArray(new Object[0]);
try {
client.execute(sql, params);
} catch (H2ClientException e) {
logger.error(e.getMessage(), e);
}
}
}
......@@ -4,7 +4,6 @@ import org.skywalking.apm.collector.core.framework.UnexpectedException;
import org.skywalking.apm.collector.core.stream.Data;
import org.skywalking.apm.collector.storage.define.DataDefine;
import org.skywalking.apm.collector.storage.define.global.GlobalTraceTable;
import org.skywalking.apm.collector.storage.define.node.NodeComponentTable;
import org.skywalking.apm.collector.storage.h2.dao.H2DAO;
import org.skywalking.apm.collector.storage.h2.define.H2SqlEntity;
import org.skywalking.apm.collector.stream.worker.impl.dao.IPersistenceDAO;
......@@ -15,7 +14,7 @@ import java.util.HashMap;
import java.util.Map;
/**
* @author pengys5
* @author pengys5, clevertension
*/
public class GlobalTraceH2DAO extends H2DAO implements IGlobalTraceDAO, IPersistenceDAO<H2SqlEntity, H2SqlEntity> {
private final Logger logger = LoggerFactory.getLogger(GlobalTraceH2DAO.class);
......@@ -36,7 +35,7 @@ public class GlobalTraceH2DAO extends H2DAO implements IGlobalTraceDAO, IPersist
source.put(GlobalTraceTable.COLUMN_TIME_BUCKET, data.getDataLong(0));
logger.debug("global trace source: {}", source.toString());
String sql = getBatchInsertSql(NodeComponentTable.TABLE, source.keySet());
String sql = getBatchInsertSql(GlobalTraceTable.TABLE, source.keySet());
entity.setSql(sql);
entity.setParams(source.values().toArray(new Object[0]));
return entity;
......
......@@ -17,7 +17,7 @@ import java.text.MessageFormat;
import java.util.*;
/**
* @author pengys5
* @author pengys5, clevertension
*/
public class InstPerformanceH2DAO extends H2DAO implements IInstPerformanceDAO, IPersistenceDAO<H2SqlEntity, H2SqlEntity> {
private final Logger logger = LoggerFactory.getLogger(InstPerformanceH2DAO.class);
......
......@@ -18,7 +18,7 @@ import java.text.MessageFormat;
import java.util.*;
/**
* @author pengys5
* @author pengys5, clevertension
*/
public class NodeComponentH2DAO extends H2DAO implements INodeComponentDAO, IPersistenceDAO<H2SqlEntity, H2SqlEntity> {
private final Logger logger = LoggerFactory.getLogger(NodeComponentH2DAO.class);
......
......@@ -18,7 +18,7 @@ import java.text.MessageFormat;
import java.util.*;
/**
* @author pengys5
* @author pengys5, clevertension
*/
public class NodeMappingH2DAO extends H2DAO implements INodeMappingDAO, IPersistenceDAO<H2SqlEntity, H2SqlEntity> {
private final Logger logger = LoggerFactory.getLogger(NodeComponentH2DAO.class);
......
......@@ -4,7 +4,6 @@ import org.skywalking.apm.collector.client.h2.H2Client;
import org.skywalking.apm.collector.client.h2.H2ClientException;
import org.skywalking.apm.collector.core.stream.Data;
import org.skywalking.apm.collector.storage.define.DataDefine;
import org.skywalking.apm.collector.storage.define.node.NodeMappingTable;
import org.skywalking.apm.collector.storage.define.noderef.NodeReferenceTable;
import org.skywalking.apm.collector.storage.h2.dao.H2DAO;
import org.skywalking.apm.collector.storage.h2.define.H2SqlEntity;
......@@ -21,7 +20,7 @@ import java.util.List;
import java.util.Map;
/**
* @author pengys5
* @author pengys5, clevertension
*/
public class NodeReferenceH2DAO extends H2DAO implements INodeReferenceDAO, IPersistenceDAO<H2SqlEntity, H2SqlEntity> {
private final Logger logger = LoggerFactory.getLogger(NodeReferenceH2DAO.class);
......@@ -64,7 +63,7 @@ public class NodeReferenceH2DAO extends H2DAO implements INodeReferenceDAO, IPer
source.put(NodeReferenceTable.COLUMN_SUMMARY, data.getDataInteger(6));
source.put(NodeReferenceTable.COLUMN_ERROR, data.getDataInteger(7));
source.put(NodeReferenceTable.COLUMN_TIME_BUCKET, data.getDataLong(0));
String sql = getBatchInsertSql(NodeMappingTable.TABLE, source.keySet());
String sql = getBatchInsertSql(NodeReferenceTable.TABLE, source.keySet());
entity.setSql(sql);
entity.setParams(source.values().toArray(new Object[0]));
......@@ -84,7 +83,7 @@ public class NodeReferenceH2DAO extends H2DAO implements INodeReferenceDAO, IPer
source.put(NodeReferenceTable.COLUMN_ERROR, data.getDataInteger(7));
source.put(NodeReferenceTable.COLUMN_TIME_BUCKET, data.getDataLong(0));
String id = data.getDataString(0);
String sql = getBatchUpdateSql(NodeMappingTable.TABLE, source.keySet(), "id");
String sql = getBatchUpdateSql(NodeReferenceTable.TABLE, source.keySet(), "id");
entity.setSql(sql);
List<Object> values = new ArrayList<>(source.values());
values.add(id);
......
......@@ -13,7 +13,7 @@ import java.util.HashMap;
import java.util.Map;
/**
* @author pengys5
* @author pengys5, clevertension
*/
public class SegmentCostH2DAO extends H2DAO implements ISegmentCostDAO, IPersistenceDAO<H2SqlEntity, H2SqlEntity> {
private final Logger logger = LoggerFactory.getLogger(SegmentCostH2DAO.class);
......
......@@ -15,7 +15,7 @@ import java.util.HashMap;
import java.util.Map;
/**
* @author pengys5
* @author pengys5, clevertension
*/
public class SegmentH2DAO extends H2DAO implements ISegmentDAO, IPersistenceDAO<H2SqlEntity, H2SqlEntity> {
private final Logger logger = LoggerFactory.getLogger(SegmentCostH2DAO.class);
......@@ -26,7 +26,7 @@ public class SegmentH2DAO extends H2DAO implements ISegmentDAO, IPersistenceDAO<
Map<String, Object> source = new HashMap<>();
H2SqlEntity entity = new H2SqlEntity();
source.put("id", data.getDataString(0));
source.put(SegmentTable.COLUMN_DATA_BINARY, new String(Base64.getEncoder().encode(data.getDataBytes(0))));
source.put(SegmentTable.COLUMN_DATA_BINARY, Base64.getEncoder().encode(data.getDataBytes(0)));
logger.debug("segment source: {}", source.toString());
String sql = getBatchInsertSql(SegmentTable.TABLE, source.keySet());
......
......@@ -5,7 +5,6 @@ import org.skywalking.apm.collector.client.h2.H2ClientException;
import org.skywalking.apm.collector.core.stream.Data;
import org.skywalking.apm.collector.storage.define.DataDefine;
import org.skywalking.apm.collector.storage.define.service.ServiceEntryTable;
import org.skywalking.apm.collector.storage.define.serviceref.ServiceReferenceTable;
import org.skywalking.apm.collector.storage.h2.dao.H2DAO;
import org.skywalking.apm.collector.storage.h2.define.H2SqlEntity;
import org.skywalking.apm.collector.stream.worker.impl.dao.IPersistenceDAO;
......@@ -14,16 +13,19 @@ import org.slf4j.LoggerFactory;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author pengys5
* @author pengys5, clevertension
*/
public class ServiceEntryH2DAO extends H2DAO implements IServiceEntryDAO, IPersistenceDAO<H2SqlEntity, H2SqlEntity> {
private final Logger logger = LoggerFactory.getLogger(ServiceEntryH2DAO.class);
@Override public Data get(String id, DataDefine dataDefine) {
H2Client client = getClient();
String sql = "select * from " + ServiceReferenceTable.TABLE + " where id = ?";
String sql = "select * from " + ServiceEntryTable.TABLE + " where id = ?";
Object[] params = new Object[] {id};
try (ResultSet rs = client.executeQuery(sql, params)) {
Data data = dataDefine.build(id);
......
......@@ -17,7 +17,7 @@ import java.text.MessageFormat;
import java.util.*;
/**
* @author pengys5
* @author pengys5, clevertension
*/
public class ServiceReferenceH2DAO extends H2DAO implements IServiceReferenceDAO, IPersistenceDAO<H2SqlEntity, H2SqlEntity> {
private final Logger logger = LoggerFactory.getLogger(ServiceReferenceH2DAO.class);
......
......@@ -4,35 +4,36 @@ cluster:
sessionTimeout: 100000
agent_server:
jetty:
host: 127.0.0.1
host: localhost
port: 10800
context_path: /
agent_stream:
grpc:
host: 127.0.0.1
host: localhost
port: 11800
jetty:
host: 127.0.0.1
host: localhost
port: 12800
context_path: /
ui:
jetty:
host: 127.0.0.1
host: localhost
port: 12800
context_path: /
collector_inside:
grpc:
host: 127.0.0.1
host: localhost
port: 11800
#storage:
# elasticsearch:
# cluster_name: CollectorDBCluster
# cluster_transport_sniffer: true
# cluster_nodes: localhost:9300
# index_shards_number: 2
# index_replicas_number: 0
storage:
h2:
url: jdbc:h2:~/collector
user_name: sa
password: sa
\ No newline at end of file
elasticsearch:
cluster_name: CollectorDBCluster
cluster_transport_sniffer: true
cluster_nodes: localhost:9300
index_shards_number: 2
index_replicas_number: 0
# uncomment to enable h2 storage
#storage:
# h2:
# url: jdbc:h2:~/collector
# user_name: sa
# password: sa
\ No newline at end of file
......@@ -10,7 +10,7 @@ import org.skywalking.apm.collector.storage.define.AttributeType;
import org.skywalking.apm.collector.storage.define.DataDefine;
/**
* @author pengys5
* @author pengys5H
*/
public class SegmentCostDataDefine extends DataDefine {
......
......@@ -18,13 +18,13 @@ import java.util.Map;
*/
public class BatchH2DAO extends H2DAO implements IBatchDAO {
private final Logger logger = LoggerFactory.getLogger(BatchH2DAO.class);
private final Map<String, PreparedStatement> batchSqls = new HashMap<>();
@Override
public void batchPersistence(List<?> batchCollection) {
if (batchCollection != null && batchCollection.size() > 0) {
logger.info("the batch collection size is {}", batchCollection.size());
logger.info("the batch collection size is {}, current thread id {}", batchCollection.size());
Connection conn = null;
final Map<String, PreparedStatement> batchSqls = new HashMap<>();
try {
conn = getClient().getConnection();
conn.setAutoCommit(false);
......@@ -61,6 +61,7 @@ public class BatchH2DAO extends H2DAO implements IBatchDAO {
logger.error(e.getMessage(), e1);
}
}
batchSqls.clear();
}
}
......
......@@ -60,9 +60,9 @@ public abstract class H2DAO extends DAO<H2Client> {
public final String getBatchUpdateSql(String tableName, Set<String> columnNames, String whereClauseName) {
StringBuilder sb = new StringBuilder("update ");
sb.append(tableName).append(" ");
sb.append(tableName).append(" set ");
columnNames.forEach((columnName) -> {
sb.append("set ").append(columnName).append("=?,");
sb.append(columnName).append("=?,");
});
sb.delete(sb.length() - 1, sb.length());
sb.append(" where ").append(whereClauseName).append("=?");
......
......@@ -8,6 +8,7 @@ import org.skywalking.apm.collector.core.util.StringUtils;
import org.skywalking.apm.collector.storage.define.node.NodeComponentTable;
import org.skywalking.apm.collector.storage.h2.dao.H2DAO;
import org.skywalking.apm.collector.ui.cache.ApplicationCache;
import org.skywalking.apm.network.trace.component.ComponentsDefine;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......@@ -20,7 +21,7 @@ import java.text.MessageFormat;
*/
public class NodeComponentH2DAO extends H2DAO implements INodeComponentDAO {
private final Logger logger = LoggerFactory.getLogger(NodeComponentH2DAO.class);
private static final String AGGREGATE_COMPONENT_SQL = "select * from {3} where {4} >= ? and {4} <= ? group by {0}, {1}, {2} limit 100";
private static final String AGGREGATE_COMPONENT_SQL = "select {0}, {1}, {2} from {3} where {4} >= ? and {4} <= ? group by {0}, {1}, {2} limit 100";
@Override public JsonArray load(long startTime, long endTime) {
JsonArray nodeComponentArray = new JsonArray();
nodeComponentArray.addAll(aggregationComponent(startTime, endTime));
......@@ -37,7 +38,8 @@ public class NodeComponentH2DAO extends H2DAO implements INodeComponentDAO {
try (ResultSet rs = client.executeQuery(sql, params)) {
while (rs.next()) {
int peerId = rs.getInt(NodeComponentTable.COLUMN_PEER_ID);
String componentName = rs.getString(NodeComponentTable.COLUMN_COMPONENT_NAME);
int componentId = rs.getInt(NodeComponentTable.COLUMN_COMPONENT_ID);
String componentName = ComponentsDefine.getInstance().getComponentName(componentId);
if (peerId != 0) {
String peer = ApplicationCache.getForUI(peerId);
nodeComponentArray.add(buildNodeComponent(peer, componentName));
......
......@@ -20,7 +20,7 @@ import java.text.MessageFormat;
*/
public class NodeMappingH2DAO extends H2DAO implements INodeMappingDAO {
private final Logger logger = LoggerFactory.getLogger(NodeMappingH2DAO.class);
private static final String NODE_MAPPING_SQL = "select * from {3} where {4} >= ? and {4} <= ? group by {0}, {1}, {2} limit 100";
private static final String NODE_MAPPING_SQL = "select {0}, {1}, {2} from {3} where {4} >= ? and {4} <= ? group by {0}, {1}, {2} limit 100";
@Override public JsonArray load(long startTime, long endTime) {
H2Client client = getClient();
JsonArray nodeMappingArray = new JsonArray();
......
......@@ -106,27 +106,29 @@ public class SegmentCostH2DAO extends H2DAO implements ISegmentCostDAO {
int cnt = 0;
int num = from;
try (ResultSet rs = client.executeQuery(sql, p)) {
JsonObject topSegmentJson = new JsonObject();
topSegmentJson.addProperty("num", num);
String segmentId = rs.getString(SegmentCostTable.COLUMN_SEGMENT_ID);
topSegmentJson.addProperty(SegmentCostTable.COLUMN_SEGMENT_ID, segmentId);
topSegmentJson.addProperty(SegmentCostTable.COLUMN_START_TIME, rs.getLong(SegmentCostTable.COLUMN_START_TIME));
topSegmentJson.addProperty(SegmentCostTable.COLUMN_END_TIME, rs.getLong(SegmentCostTable.COLUMN_END_TIME));
while (rs.next()) {
JsonObject topSegmentJson = new JsonObject();
topSegmentJson.addProperty("num", num);
String segmentId = rs.getString(SegmentCostTable.COLUMN_SEGMENT_ID);
topSegmentJson.addProperty(SegmentCostTable.COLUMN_SEGMENT_ID, segmentId);
topSegmentJson.addProperty(SegmentCostTable.COLUMN_START_TIME, rs.getLong(SegmentCostTable.COLUMN_START_TIME));
topSegmentJson.addProperty(SegmentCostTable.COLUMN_END_TIME, rs.getLong(SegmentCostTable.COLUMN_END_TIME));
IGlobalTraceDAO globalTraceDAO = (IGlobalTraceDAO) DAOContainer.INSTANCE.get(IGlobalTraceDAO.class.getName());
List<String> globalTraces = globalTraceDAO.getGlobalTraceId(segmentId);
if (CollectionUtils.isNotEmpty(globalTraces)) {
topSegmentJson.addProperty(GlobalTraceTable.COLUMN_GLOBAL_TRACE_ID, globalTraces.get(0));
}
IGlobalTraceDAO globalTraceDAO = (IGlobalTraceDAO) DAOContainer.INSTANCE.get(IGlobalTraceDAO.class.getName());
List<String> globalTraces = globalTraceDAO.getGlobalTraceId(segmentId);
if (CollectionUtils.isNotEmpty(globalTraces)) {
topSegmentJson.addProperty(GlobalTraceTable.COLUMN_GLOBAL_TRACE_ID, globalTraces.get(0));
}
topSegmentJson.addProperty(SegmentCostTable.COLUMN_APPLICATION_ID, rs.getInt(SegmentCostTable.COLUMN_APPLICATION_ID));
topSegmentJson.addProperty(SegmentCostTable.COLUMN_SERVICE_NAME, rs.getString(SegmentCostTable.COLUMN_SERVICE_NAME));
topSegmentJson.addProperty(SegmentCostTable.COLUMN_COST, rs.getLong(SegmentCostTable.COLUMN_COST));
topSegmentJson.addProperty(SegmentCostTable.COLUMN_IS_ERROR, rs.getBoolean(SegmentCostTable.COLUMN_IS_ERROR));
topSegmentJson.addProperty(SegmentCostTable.COLUMN_APPLICATION_ID, rs.getInt(SegmentCostTable.COLUMN_APPLICATION_ID));
topSegmentJson.addProperty(SegmentCostTable.COLUMN_SERVICE_NAME, rs.getString(SegmentCostTable.COLUMN_SERVICE_NAME));
topSegmentJson.addProperty(SegmentCostTable.COLUMN_COST, rs.getLong(SegmentCostTable.COLUMN_COST));
topSegmentJson.addProperty(SegmentCostTable.COLUMN_IS_ERROR, rs.getBoolean(SegmentCostTable.COLUMN_IS_ERROR));
num++;
topSegArray.add(topSegmentJson);
cnt++;
num++;
topSegArray.add(topSegmentJson);
cnt++;
}
} catch (SQLException | H2ClientException e) {
logger.error(e.getMessage(), e);
}
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册