提交 a798bb64 编写于 作者: P pengys5

Add cpu metric record persistence, but not test.

上级 00c708fa
......@@ -4,6 +4,7 @@ import java.util.LinkedList;
import java.util.List;
import org.skywalking.apm.collector.agentstream.AgentStreamModuleDefine;
import org.skywalking.apm.collector.agentstream.AgentStreamModuleGroupDefine;
import org.skywalking.apm.collector.agentstream.grpc.handler.JVMMetricsServiceHandler;
import org.skywalking.apm.collector.agentstream.grpc.handler.TraceSegmentServiceHandler;
import org.skywalking.apm.collector.core.cluster.ClusterDataListener;
import org.skywalking.apm.collector.core.framework.Handler;
......@@ -46,6 +47,7 @@ public class AgentStreamGRPCModuleDefine extends AgentStreamModuleDefine {
@Override public List<Handler> handlerList() {
List<Handler> handlers = new LinkedList<>();
handlers.add(new TraceSegmentServiceHandler());
handlers.add(new JVMMetricsServiceHandler());
return handlers;
}
}
package org.skywalking.apm.collector.agentstream.grpc.handler;
import io.grpc.stub.StreamObserver;
import org.skywalking.apm.collector.agentstream.worker.Const;
import org.skywalking.apm.collector.agentstream.worker.jvmmetric.cpu.CpuMetricPersistenceWorker;
import org.skywalking.apm.collector.agentstream.worker.jvmmetric.cpu.define.CpuMetricDataDefine;
import org.skywalking.apm.collector.agentstream.worker.util.TimeBucketUtils;
import org.skywalking.apm.collector.core.framework.CollectorContextHelper;
import org.skywalking.apm.collector.server.grpc.GRPCHandler;
import org.skywalking.apm.collector.stream.StreamModuleContext;
import org.skywalking.apm.collector.stream.StreamModuleGroupDefine;
import org.skywalking.apm.collector.stream.worker.WorkerInvokeException;
import org.skywalking.apm.collector.stream.worker.WorkerNotFoundException;
import org.skywalking.apm.network.proto.CPU;
import org.skywalking.apm.network.proto.Downstream;
import org.skywalking.apm.network.proto.JVMMetrics;
import org.skywalking.apm.network.proto.JVMMetricsServiceGrpc;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author pengys5
*/
public class JVMMetricsServiceHandler extends JVMMetricsServiceGrpc.JVMMetricsServiceImplBase implements GRPCHandler {
private final Logger logger = LoggerFactory.getLogger(JVMMetricsServiceHandler.class);
@Override public void collect(JVMMetrics request, StreamObserver<Downstream> responseObserver) {
int applicationInstanceId = request.getApplicationInstanceId();
logger.debug("receive the jvm metric from application instance, id: {}", applicationInstanceId);
StreamModuleContext context = (StreamModuleContext)CollectorContextHelper.INSTANCE.getContext(StreamModuleGroupDefine.GROUP_NAME);
request.getMetricsList().forEach(metric -> {
long time = TimeBucketUtils.INSTANCE.getSecondTimeBucket(metric.getTime());
sendToCpuMetricPersistenceWorker(context, applicationInstanceId, time, metric.getCpu());
});
responseObserver.onNext(Downstream.newBuilder().build());
responseObserver.onCompleted();
}
private void sendToCpuMetricPersistenceWorker(StreamModuleContext context, int applicationInstanceId,
long timeBucket, CPU cpu) {
CpuMetricDataDefine.CpuMetric cpuMetric = new CpuMetricDataDefine.CpuMetric();
cpuMetric.setId(timeBucket + Const.ID_SPLIT + applicationInstanceId);
cpuMetric.setApplicationInstanceId(applicationInstanceId);
cpuMetric.setUsagePercent(cpu.getUsagePercent());
cpuMetric.setTimeBucket(timeBucket);
try {
logger.debug("send to cpu metric persistence worker, id: {}", cpuMetric.getId());
context.getClusterWorkerContext().lookup(CpuMetricPersistenceWorker.WorkerRole.INSTANCE).tell(cpuMetric.toData());
} catch (WorkerInvokeException | WorkerNotFoundException e) {
logger.error(e.getMessage(), e);
}
}
}
package org.skywalking.apm.collector.agentstream.worker.jvmmetric.cpu;
import org.skywalking.apm.collector.agentstream.worker.jvmmetric.cpu.dao.ICpuMetricDAO;
import org.skywalking.apm.collector.agentstream.worker.jvmmetric.cpu.define.CpuMetricDataDefine;
import org.skywalking.apm.collector.storage.dao.DAOContainer;
import org.skywalking.apm.collector.stream.worker.AbstractLocalAsyncWorkerProvider;
import org.skywalking.apm.collector.stream.worker.ClusterWorkerContext;
import org.skywalking.apm.collector.stream.worker.ProviderNotFoundException;
import org.skywalking.apm.collector.stream.worker.Role;
import org.skywalking.apm.collector.stream.worker.impl.PersistenceWorker;
import org.skywalking.apm.collector.stream.worker.impl.dao.IPersistenceDAO;
import org.skywalking.apm.collector.stream.worker.impl.data.DataDefine;
import org.skywalking.apm.collector.stream.worker.selector.HashCodeSelector;
import org.skywalking.apm.collector.stream.worker.selector.WorkerSelector;
/**
* @author pengys5
*/
public class CpuMetricPersistenceWorker extends PersistenceWorker {
public CpuMetricPersistenceWorker(Role role, ClusterWorkerContext clusterContext) {
super(role, clusterContext);
}
@Override public void preStart() throws ProviderNotFoundException {
super.preStart();
}
@Override protected boolean needMergeDBData() {
return false;
}
@Override protected IPersistenceDAO persistenceDAO() {
return (IPersistenceDAO)DAOContainer.INSTANCE.get(ICpuMetricDAO.class.getName());
}
public static class Factory extends AbstractLocalAsyncWorkerProvider<CpuMetricPersistenceWorker> {
@Override
public Role role() {
return WorkerRole.INSTANCE;
}
@Override
public CpuMetricPersistenceWorker workerInstance(ClusterWorkerContext clusterContext) {
return new CpuMetricPersistenceWorker(role(), clusterContext);
}
@Override
public int queueSize() {
return 1024;
}
}
public enum WorkerRole implements Role {
INSTANCE;
@Override
public String roleName() {
return CpuMetricPersistenceWorker.class.getSimpleName();
}
@Override
public WorkerSelector workerSelector() {
return new HashCodeSelector();
}
@Override public DataDefine dataDefine() {
return new CpuMetricDataDefine();
}
}
}
package org.skywalking.apm.collector.agentstream.worker.jvmmetric.cpu.dao;
import java.util.HashMap;
import java.util.Map;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.update.UpdateRequestBuilder;
import org.skywalking.apm.collector.agentstream.worker.jvmmetric.cpu.define.CpuMetricTable;
import org.skywalking.apm.collector.storage.elasticsearch.dao.EsDAO;
import org.skywalking.apm.collector.stream.worker.impl.dao.IPersistenceDAO;
import org.skywalking.apm.collector.stream.worker.impl.data.Data;
import org.skywalking.apm.collector.stream.worker.impl.data.DataDefine;
/**
* @author pengys5
*/
public class CpuMetricEsDAO extends EsDAO implements ICpuMetricDAO, IPersistenceDAO<IndexRequestBuilder, UpdateRequestBuilder> {
@Override public Data get(String id, DataDefine dataDefine) {
return null;
}
@Override public IndexRequestBuilder prepareBatchInsert(Data data) {
Map<String, Object> source = new HashMap<>();
source.put(CpuMetricTable.COLUMN_APPLICATION_INSTANCE_ID, data.getDataInteger(0));
source.put(CpuMetricTable.COLUMN_USAGE_PERCENT, data.getDataDouble(0));
source.put(CpuMetricTable.COLUMN_TIME_BUCKET, data.getDataLong(0));
return getClient().prepareIndex(CpuMetricTable.TABLE, data.getDataString(0)).setSource(source);
}
@Override public UpdateRequestBuilder prepareBatchUpdate(Data data) {
return null;
}
}
package org.skywalking.apm.collector.agentstream.worker.jvmmetric.cpu.dao;
import org.skywalking.apm.collector.storage.h2.dao.H2DAO;
/**
* @author pengys5
*/
public class CpuMetricH2DAO extends H2DAO implements ICpuMetricDAO {
}
package org.skywalking.apm.collector.agentstream.worker.jvmmetric.cpu.dao;
/**
* @author pengys5
*/
public interface ICpuMetricDAO {
}
package org.skywalking.apm.collector.agentstream.worker.jvmmetric.cpu.define;
import org.skywalking.apm.collector.remote.grpc.proto.RemoteData;
import org.skywalking.apm.collector.stream.worker.impl.data.Attribute;
import org.skywalking.apm.collector.stream.worker.impl.data.AttributeType;
import org.skywalking.apm.collector.stream.worker.impl.data.Data;
import org.skywalking.apm.collector.stream.worker.impl.data.DataDefine;
import org.skywalking.apm.collector.stream.worker.impl.data.Transform;
import org.skywalking.apm.collector.stream.worker.impl.data.operate.CoverOperation;
import org.skywalking.apm.collector.stream.worker.impl.data.operate.NonOperation;
/**
* @author pengys5
*/
public class CpuMetricDataDefine extends DataDefine {
@Override protected int initialCapacity() {
return 4;
}
@Override protected void attributeDefine() {
addAttribute(0, new Attribute(CpuMetricTable.COLUMN_ID, AttributeType.STRING, new NonOperation()));
addAttribute(1, new Attribute(CpuMetricTable.COLUMN_APPLICATION_INSTANCE_ID, AttributeType.INTEGER, new CoverOperation()));
addAttribute(2, new Attribute(CpuMetricTable.COLUMN_USAGE_PERCENT, AttributeType.DOUBLE, new CoverOperation()));
addAttribute(2, new Attribute(CpuMetricTable.COLUMN_TIME_BUCKET, AttributeType.LONG, new CoverOperation()));
}
@Override public Object deserialize(RemoteData remoteData) {
return null;
}
@Override public RemoteData serialize(Object object) {
return null;
}
public static class CpuMetric implements Transform<CpuMetric> {
private String id;
private int applicationInstanceId;
private double usagePercent;
private long timeBucket;
public CpuMetric(String id, int applicationInstanceId, double usagePercent, long timeBucket) {
this.id = id;
this.applicationInstanceId = applicationInstanceId;
this.usagePercent = usagePercent;
this.timeBucket = timeBucket;
}
public CpuMetric() {
}
@Override public Data toData() {
CpuMetricDataDefine define = new CpuMetricDataDefine();
Data data = define.build(id);
data.setDataString(0, this.id);
data.setDataInteger(0, this.applicationInstanceId);
data.setDataDouble(0, this.usagePercent);
data.setDataLong(0, this.timeBucket);
return data;
}
@Override public CpuMetric toSelf(Data data) {
this.id = data.getDataString(0);
this.applicationInstanceId = data.getDataInteger(0);
this.usagePercent = data.getDataDouble(0);
this.timeBucket = data.getDataLong(0);
return this;
}
public void setId(String id) {
this.id = id;
}
public void setApplicationInstanceId(int applicationInstanceId) {
this.applicationInstanceId = applicationInstanceId;
}
public void setUsagePercent(double usagePercent) {
this.usagePercent = usagePercent;
}
public void setTimeBucket(long timeBucket) {
this.timeBucket = timeBucket;
}
public String getId() {
return id;
}
public int getApplicationInstanceId() {
return applicationInstanceId;
}
public double getUsagePercent() {
return usagePercent;
}
public long getTimeBucket() {
return timeBucket;
}
}
}
package org.skywalking.apm.collector.agentstream.worker.jvmmetric.cpu.define;
import org.skywalking.apm.collector.storage.elasticsearch.define.ElasticSearchColumnDefine;
import org.skywalking.apm.collector.storage.elasticsearch.define.ElasticSearchTableDefine;
/**
* @author pengys5
*/
public class CpuMetricEsTableDefine extends ElasticSearchTableDefine {
public CpuMetricEsTableDefine() {
super(CpuMetricTable.TABLE);
}
@Override public int refreshInterval() {
return 1;
}
@Override public int numberOfShards() {
return 2;
}
@Override public int numberOfReplicas() {
return 0;
}
@Override public void initialize() {
addColumn(new ElasticSearchColumnDefine(CpuMetricTable.COLUMN_APPLICATION_INSTANCE_ID, ElasticSearchColumnDefine.Type.Integer.name()));
addColumn(new ElasticSearchColumnDefine(CpuMetricTable.COLUMN_USAGE_PERCENT, ElasticSearchColumnDefine.Type.Double.name()));
addColumn(new ElasticSearchColumnDefine(CpuMetricTable.COLUMN_TIME_BUCKET, ElasticSearchColumnDefine.Type.Long.name()));
}
}
package org.skywalking.apm.collector.agentstream.worker.jvmmetric.cpu.define;
import org.skywalking.apm.collector.storage.h2.define.H2ColumnDefine;
import org.skywalking.apm.collector.storage.h2.define.H2TableDefine;
/**
* @author pengys5
*/
public class CpuMetricH2TableDefine extends H2TableDefine {
public CpuMetricH2TableDefine() {
super(CpuMetricTable.TABLE);
}
@Override public void initialize() {
addColumn(new H2ColumnDefine(CpuMetricTable.COLUMN_ID, H2ColumnDefine.Type.Varchar.name()));
addColumn(new H2ColumnDefine(CpuMetricTable.COLUMN_APPLICATION_INSTANCE_ID, H2ColumnDefine.Type.Int.name()));
addColumn(new H2ColumnDefine(CpuMetricTable.COLUMN_USAGE_PERCENT, H2ColumnDefine.Type.Double.name()));
addColumn(new H2ColumnDefine(CpuMetricTable.COLUMN_TIME_BUCKET, H2ColumnDefine.Type.Bigint.name()));
}
}
package org.skywalking.apm.collector.agentstream.worker.jvmmetric.cpu.define;
import org.skywalking.apm.collector.agentstream.worker.CommonTable;
/**
* @author pengys5
*/
public class CpuMetricTable extends CommonTable {
public static final String TABLE = "cpu_metric";
public static final String COLUMN_APPLICATION_INSTANCE_ID = "application_instance_id";
public static final String COLUMN_USAGE_PERCENT = "application_instance_id";
}
......@@ -13,6 +13,7 @@ public enum TimeBucketUtils {
private final SimpleDateFormat dayDateFormat = new SimpleDateFormat("yyyyMMdd");
private final SimpleDateFormat hourDateFormat = new SimpleDateFormat("yyyyMMddHH");
private final SimpleDateFormat minuteDateFormat = new SimpleDateFormat("yyyyMMddHHmm");
private final SimpleDateFormat secondDateFormat = new SimpleDateFormat("yyyyMMddHHmmss");
public long getMinuteTimeBucket(long time) {
Calendar calendar = Calendar.getInstance();
......@@ -21,6 +22,13 @@ public enum TimeBucketUtils {
return Long.valueOf(timeStr);
}
public long getSecondTimeBucket(long time) {
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(time);
String timeStr = secondDateFormat.format(calendar.getTime());
return Long.valueOf(timeStr);
}
public long getHourTimeBucket(long time) {
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(time);
......
......@@ -9,4 +9,5 @@ org.skywalking.apm.collector.agentstream.worker.noderef.summary.dao.NodeRefSumEs
org.skywalking.apm.collector.agentstream.worker.segment.cost.dao.SegmentCostEsDAO
org.skywalking.apm.collector.agentstream.worker.global.dao.GlobalTraceEsDAO
org.skywalking.apm.collector.agentstream.worker.service.entry.dao.ServiceEntryEsDAO
org.skywalking.apm.collector.agentstream.worker.serviceref.reference.dao.ServiceRefEsDAO
\ No newline at end of file
org.skywalking.apm.collector.agentstream.worker.serviceref.reference.dao.ServiceRefEsDAO
org.skywalking.apm.collector.agentstream.worker.jvmmetric.cpu.dao.CpuMetricEsDAO
\ No newline at end of file
......@@ -9,4 +9,5 @@ org.skywalking.apm.collector.agentstream.worker.noderef.summary.dao.NodeRefSumH2
org.skywalking.apm.collector.agentstream.worker.segment.cost.dao.SegmentCostH2DAO
org.skywalking.apm.collector.agentstream.worker.global.dao.GlobalTraceH2DAO
org.skywalking.apm.collector.agentstream.worker.service.entry.dao.ServiceEntryH2DAO
org.skywalking.apm.collector.agentstream.worker.serviceref.reference.dao.ServiceRefH2DAO
\ No newline at end of file
org.skywalking.apm.collector.agentstream.worker.serviceref.reference.dao.ServiceRefH2DAO
org.skywalking.apm.collector.agentstream.worker.jvmmetric.cpu.dao.CpuMetricH2DAO
\ No newline at end of file
......@@ -20,6 +20,8 @@ org.skywalking.apm.collector.agentstream.worker.segment.origin.SegmentPersistenc
org.skywalking.apm.collector.agentstream.worker.segment.cost.SegmentCostPersistenceWorker$Factory
org.skywalking.apm.collector.agentstream.worker.global.GlobalTracePersistenceWorker$Factory
org.skywalking.apm.collector.agentstream.worker.jvmmetric.cpu.CpuMetricPersistenceWorker$Factory
org.skywalking.apm.collector.agentstream.worker.register.application.ApplicationRegisterSerialWorker$Factory
org.skywalking.apm.collector.agentstream.worker.register.instance.InstanceRegisterSerialWorker$Factory
org.skywalking.apm.collector.agentstream.worker.register.servicename.ServiceNameRegisterSerialWorker$Factory
\ No newline at end of file
......@@ -32,4 +32,7 @@ org.skywalking.apm.collector.agentstream.worker.service.entry.define.ServiceEntr
org.skywalking.apm.collector.agentstream.worker.service.entry.define.ServiceEntryH2TableDefine
org.skywalking.apm.collector.agentstream.worker.serviceref.reference.define.ServiceRefEsTableDefine
org.skywalking.apm.collector.agentstream.worker.serviceref.reference.define.ServiceRefH2TableDefine
\ No newline at end of file
org.skywalking.apm.collector.agentstream.worker.serviceref.reference.define.ServiceRefH2TableDefine
org.skywalking.apm.collector.agentstream.worker.jvmmetric.cpu.define.CpuMetricEsTableDefine
org.skywalking.apm.collector.agentstream.worker.jvmmetric.cpu.define.CpuMetricH2TableDefine
\ No newline at end of file
package org.skywalking.apm.collector.agentstream.util;
import java.util.Calendar;
import java.util.TimeZone;
import org.junit.Assert;
import org.junit.Test;
import org.skywalking.apm.collector.agentstream.worker.util.TimeBucketUtils;
/**
* @author pengys5
*/
public class TimeBucketUtilsTestCase {
@Test
public void testUTCLocation() {
TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
long timeBucket = 201703310915L;
long changedTimeBucket = TimeBucketUtils.INSTANCE.changeToUTCTimeBucket(timeBucket);
Assert.assertEquals(201703310115L, changedTimeBucket);
}
@Test
public void testUTC8Location() {
TimeZone.setDefault(TimeZone.getTimeZone("GMT+08:00"));
long timeBucket = 201703310915L;
long changedTimeBucket = TimeBucketUtils.INSTANCE.changeToUTCTimeBucket(timeBucket);
Assert.assertEquals(201703310915L, changedTimeBucket);
}
@Test
public void testGetSecondTimeBucket() {
long timeBucket = TimeBucketUtils.INSTANCE.getSecondTimeBucket(1490922929258L);
Assert.assertEquals(20170331091529L, timeBucket);
}
@Test
public void test() {
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(1490922929258L);
calendar.set(Calendar.SECOND, calendar.get(Calendar.SECOND) - 3);
// System.out.println(calendar.getTimeInMillis());
calendar.set(Calendar.SECOND, calendar.get(Calendar.SECOND) - 2);
// System.out.println(calendar.getTimeInMillis());
calendar.set(Calendar.SECOND, calendar.get(Calendar.SECOND) - 2);
// System.out.println(calendar.getTimeInMillis());
}
}
......@@ -11,6 +11,6 @@ public class ElasticSearchColumnDefine extends ColumnDefine {
}
public enum Type {
Binary, Boolean, Date, Keyword, Long, Integer
Binary, Boolean, Keyword, Long, Integer, Double
}
}
......@@ -12,6 +12,6 @@ public class H2ColumnDefine extends ColumnDefine {
}
public enum Type {
Boolean, Varchar, Int, Bigint, BINARY
Boolean, Varchar, Int, Bigint, BINARY, Double
}
}
......@@ -4,5 +4,5 @@ package org.skywalking.apm.collector.stream.worker.impl.data;
* @author pengys5
*/
public enum AttributeType {
STRING, LONG, FLOAT, INTEGER, BYTE, BOOLEAN
STRING, LONG, DOUBLE, INTEGER, BYTE, BOOLEAN
}
......@@ -10,29 +10,29 @@ import org.skywalking.apm.collector.stream.worker.selector.AbstractHashMessage;
public class Data extends AbstractHashMessage {
private final int stringCapacity;
private final int longCapacity;
private final int floatCapacity;
private final int doubleCapacity;
private final int integerCapacity;
private final int booleanCapacity;
private final int byteCapacity;
private String[] dataStrings;
private Long[] dataLongs;
private Float[] dataFloats;
private Double[] dataDoubles;
private Integer[] dataIntegers;
private Boolean[] dataBooleans;
private byte[][] dataBytes;
public Data(String id, int stringCapacity, int longCapacity, int floatCapacity, int integerCapacity,
public Data(String id, int stringCapacity, int longCapacity, int doubleCapacity, int integerCapacity,
int booleanCapacity, int byteCapacity) {
super(id);
this.dataStrings = new String[stringCapacity];
this.dataLongs = new Long[longCapacity];
this.dataFloats = new Float[floatCapacity];
this.dataDoubles = new Double[doubleCapacity];
this.dataIntegers = new Integer[integerCapacity];
this.dataBooleans = new Boolean[booleanCapacity];
this.dataBytes = new byte[byteCapacity][];
this.stringCapacity = stringCapacity;
this.longCapacity = longCapacity;
this.floatCapacity = floatCapacity;
this.doubleCapacity = doubleCapacity;
this.integerCapacity = integerCapacity;
this.booleanCapacity = booleanCapacity;
this.byteCapacity = byteCapacity;
......@@ -46,8 +46,8 @@ public class Data extends AbstractHashMessage {
dataLongs[position] = value;
}
public void setDataFloat(int position, Float value) {
dataFloats[position] = value;
public void setDataDouble(int position, Double value) {
dataDoubles[position] = value;
}
public void setDataInteger(int position, Integer value) {
......@@ -70,8 +70,8 @@ public class Data extends AbstractHashMessage {
return dataLongs[position];
}
public Float getDataFloat(int position) {
return dataFloats[position];
public Double getDataDouble(int position) {
return dataDoubles[position];
}
public Integer getDataInteger(int position) {
......@@ -93,7 +93,7 @@ public class Data extends AbstractHashMessage {
public RemoteData serialize() {
RemoteData.Builder builder = RemoteData.newBuilder();
builder.setIntegerCapacity(integerCapacity);
builder.setFloatCapacity(floatCapacity);
builder.setDoubleCapacity(doubleCapacity);
builder.setStringCapacity(stringCapacity);
builder.setLongCapacity(longCapacity);
builder.setByteCapacity(byteCapacity);
......@@ -105,8 +105,8 @@ public class Data extends AbstractHashMessage {
for (int i = 0; i < dataIntegers.length; i++) {
builder.setDataIntegers(i, dataIntegers[i]);
}
for (int i = 0; i < dataFloats.length; i++) {
builder.setDataFloats(i, dataFloats[i]);
for (int i = 0; i < dataDoubles.length; i++) {
builder.setDataDoubles(i, dataDoubles[i]);
}
for (int i = 0; i < dataLongs.length; i++) {
builder.setDataLongs(i, dataLongs[i]);
......
......@@ -9,7 +9,7 @@ public abstract class DataDefine {
private Attribute[] attributes;
private int stringCapacity;
private int longCapacity;
private int floatCapacity;
private int doubleCapacity;
private int integerCapacity;
private int booleanCapacity;
private int byteCapacity;
......@@ -26,8 +26,8 @@ public abstract class DataDefine {
stringCapacity++;
} else if (AttributeType.LONG.equals(attribute.getType())) {
longCapacity++;
} else if (AttributeType.FLOAT.equals(attribute.getType())) {
floatCapacity++;
} else if (AttributeType.DOUBLE.equals(attribute.getType())) {
doubleCapacity++;
} else if (AttributeType.INTEGER.equals(attribute.getType())) {
integerCapacity++;
} else if (AttributeType.BOOLEAN.equals(attribute.getType())) {
......@@ -47,13 +47,13 @@ public abstract class DataDefine {
protected abstract void attributeDefine();
public final Data build(String id) {
return new Data(id, stringCapacity, longCapacity, floatCapacity, integerCapacity, booleanCapacity, byteCapacity);
return new Data(id, stringCapacity, longCapacity, doubleCapacity, integerCapacity, booleanCapacity, byteCapacity);
}
public void mergeData(Data newData, Data oldData) {
int stringPosition = 0;
int longPosition = 0;
int floatPosition = 0;
int doublePosition = 0;
int integerPosition = 0;
int booleanPosition = 0;
int bytePosition = 0;
......@@ -65,9 +65,9 @@ public abstract class DataDefine {
} else if (AttributeType.LONG.equals(attribute.getType())) {
attribute.getOperation().operate(newData.getDataLong(longPosition), oldData.getDataLong(longPosition));
longPosition++;
} else if (AttributeType.FLOAT.equals(attribute.getType())) {
attribute.getOperation().operate(newData.getDataFloat(floatPosition), oldData.getDataFloat(floatPosition));
floatPosition++;
} else if (AttributeType.DOUBLE.equals(attribute.getType())) {
attribute.getOperation().operate(newData.getDataDouble(doublePosition), oldData.getDataDouble(doublePosition));
doublePosition++;
} else if (AttributeType.INTEGER.equals(attribute.getType())) {
attribute.getOperation().operate(newData.getDataInteger(integerPosition), oldData.getDataInteger(integerPosition));
integerPosition++;
......
......@@ -8,7 +8,7 @@ public interface Operation {
Long operate(Long newValue, Long oldValue);
Float operate(Float newValue, Float oldValue);
Double operate(Double newValue, Double oldValue);
Integer operate(Integer newValue, Integer oldValue);
......
......@@ -15,7 +15,7 @@ public class AddOperation implements Operation {
return newValue + oldValue;
}
@Override public Float operate(Float newValue, Float oldValue) {
@Override public Double operate(Double newValue, Double oldValue) {
return newValue + oldValue;
}
......
......@@ -14,7 +14,7 @@ public class CoverOperation implements Operation {
return newValue;
}
@Override public Float operate(Float newValue, Float oldValue) {
@Override public Double operate(Double newValue, Double oldValue) {
return newValue;
}
......
......@@ -14,7 +14,7 @@ public class NonOperation implements Operation {
return oldValue;
}
@Override public Float operate(Float newValue, Float oldValue) {
@Override public Double operate(Double newValue, Double oldValue) {
return oldValue;
}
......
......@@ -16,13 +16,13 @@ message RemoteMessage {
message RemoteData {
int32 stringCapacity = 1;
int32 longCapacity = 2;
int32 floatCapacity = 3;
int32 doubleCapacity = 3;
int32 integerCapacity = 4;
int32 byteCapacity = 5;
int32 booleanCapacity = 6;
repeated string dataStrings = 7;
repeated int64 dataLongs = 8;
repeated float dataFloats = 9;
repeated double dataDoubles = 9;
repeated int32 dataIntegers = 10;
repeated bytes dataBytes = 11;
repeated bool dataBooleans = 12;
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册