未验证 提交 5a40a19e 编写于 作者: B BUAAserein 提交者: GitHub

[IOTDB-5284] Fix some iotdb-server code smells (#8754)

* fix some code smells
上级 2f8abb24
......@@ -28,11 +28,11 @@ public abstract class InfluxFunction {
// contain possible parameters
private List<Expression> expressionList;
public InfluxFunction(List<Expression> expressionList) {
protected InfluxFunction(List<Expression> expressionList) {
this.expressionList = expressionList;
}
public InfluxFunction() {}
protected InfluxFunction() {}
public List<Expression> getExpressions() {
return this.expressionList;
......
......@@ -27,7 +27,7 @@ import java.util.List;
public abstract class InfluxAggregator extends InfluxFunction {
public InfluxAggregator(List<Expression> expressionList) {
protected InfluxAggregator(List<Expression> expressionList) {
super(expressionList);
}
......
......@@ -38,7 +38,7 @@ public class InfluxMeanFunction extends InfluxAggregator {
@Override
public InfluxFunctionValue calculateBruteForce() {
return new InfluxFunctionValue(numbers.size() == 0 ? numbers : MathUtils.mean(numbers), 0L);
return new InfluxFunctionValue(numbers.isEmpty() ? numbers : MathUtils.mean(numbers), 0L);
}
@Override
......
......@@ -36,8 +36,7 @@ public class InfluxStddevFunction extends InfluxAggregator {
@Override
public InfluxFunctionValue calculateBruteForce() {
return new InfluxFunctionValue(
numbers.size() == 0 ? numbers : MathUtils.popStdDev(numbers), 0L);
return new InfluxFunctionValue(numbers.isEmpty() ? numbers : MathUtils.popStdDev(numbers), 0L);
}
@Override
......
......@@ -37,7 +37,7 @@ public class InfluxSumFunction extends InfluxAggregator {
@Override
public InfluxFunctionValue calculateBruteForce() {
return new InfluxFunctionValue(numbers.size() == 0 ? numbers : MathUtils.sum(numbers), 0L);
return new InfluxFunctionValue(numbers.isEmpty() ? numbers : MathUtils.sum(numbers), 0L);
}
@Override
......
......@@ -32,7 +32,7 @@ public abstract class InfluxSelector extends InfluxFunction {
private List<Object> relatedValues;
public InfluxSelector(List<Expression> expressionList) {
protected InfluxSelector(List<Expression> expressionList) {
super(expressionList);
}
......
......@@ -223,7 +223,7 @@ public abstract class AbstractQueryHandler {
} else {
// If there are no common queries, they are all function queries
for (InfluxFunction function : functions) {
if (value.size() == 0) {
if (value.isEmpty()) {
value.add(function.calculateBruteForce().getTimestamp());
} else {
value.set(0, function.calculateBruteForce().getTimestamp());
......@@ -298,7 +298,7 @@ public abstract class AbstractQueryHandler {
InfluxFunctionValue functionValue =
updateByIoTDBFunc(database, measurement, function, sessionid);
// InfluxFunctionValue functionValue = function.calculateByIoTDBFunc();
if (value.size() == 0) {
if (value.isEmpty()) {
value.add(functionValue.getTimestamp());
} else {
value.set(0, functionValue.getTimestamp());
......@@ -445,7 +445,7 @@ public abstract class AbstractQueryHandler {
String realQuerySql;
realQuerySql = "select * from " + curQueryPath;
if (!(realIotDBCondition.length() == 0)) {
if (realIotDBCondition.length() != 0) {
realQuerySql += " where " + realIotDBCondition;
}
realQuerySql += " align by device";
......
......@@ -111,8 +111,9 @@ public class NewQueryHandler extends AbstractQueryHandler {
TSExecuteStatementResp tsExecuteStatementResp =
NewInfluxDBServiceImpl.executeStatement(functionSql, sessionid);
Map<String, Object> map = QueryResultUtils.getColumnNameAndValue(tsExecuteStatementResp);
for (String colume : map.keySet()) {
Object o = map.get(colume);
for (Map.Entry<String, Object> entry : map.entrySet()) {
String colume = entry.getKey();
Object o = entry.getValue();
String fullPath = colume.substring(functionName.length() + 1, colume.length() - 1);
String devicePath = StringUtils.getDeviceByPath(fullPath);
String specificSql =
......
......@@ -130,7 +130,7 @@ public class TagQueryHandler extends NewQueryHandler {
String realQuerySql;
realQuerySql = "select * from " + curQueryPath;
if (!(realIotDBCondition.length() == 0)) {
if (realIotDBCondition.length() != 0) {
realQuerySql += " where " + realIotDBCondition;
}
realQuerySql += " align by device";
......
......@@ -41,6 +41,7 @@ public final class InfluxSelectComponent extends SelectComponent {
super(null);
}
@Override
public void addResultColumn(ResultColumn resultColumn) {
Expression expression = resultColumn.getExpression();
if (expression instanceof FunctionExpression) {
......
......@@ -216,7 +216,7 @@ public class GrafanaApiServiceImpl extends GrafanaApiService {
public Response node(List<String> requestBody, SecurityContext securityContext)
throws NotFoundException {
try {
if (requestBody != null && requestBody.size() > 0) {
if (requestBody != null && !requestBody.isEmpty()) {
PartialPath path = new PartialPath(Joiner.on(".").join(requestBody));
String sql = "show child paths " + path;
Statement statement = StatementGenerator.createStatement(sql, ZoneId.systemDefault());
......
......@@ -246,7 +246,7 @@ public class FileReaderManager {
public synchronized void writeFileReferenceInfo() {
DEBUG_LOGGER.info("[closedReferenceMap]\n");
for (Map.Entry<String, AtomicInteger> entry : closedReferenceMap.entrySet()) {
DEBUG_LOGGER.info(String.format("\t%s: %d\n", entry.getKey(), entry.getValue().get()));
DEBUG_LOGGER.info(String.format("\t%s: %d%n", entry.getKey(), entry.getValue().get()));
}
DEBUG_LOGGER.info("[unclosedReferenceMap]\n");
for (Map.Entry<String, AtomicInteger> entry : unclosedReferenceMap.entrySet()) {
......
......@@ -139,9 +139,9 @@ public class QueryFileManager {
sealedFilePathsMap.entrySet()) {
long queryId = entry.getKey();
Set<TsFileResource> tsFileResources = entry.getValue().keySet();
DEBUG_LOGGER.info(String.format("\t[queryId: %d]\n", queryId));
DEBUG_LOGGER.info("\t[queryId: {}]\n", queryId);
for (TsFileResource tsFileResource : tsFileResources) {
DEBUG_LOGGER.info(String.format("\t\t%s\n", tsFileResource.getTsFile().getAbsolutePath()));
DEBUG_LOGGER.info("\t\t{}\n", tsFileResource.getTsFile().getAbsolutePath());
}
}
DEBUG_LOGGER.info("[Query Unsealed File Info]\n");
......@@ -149,9 +149,9 @@ public class QueryFileManager {
unsealedFilePathsMap.entrySet()) {
long queryId = entry.getKey();
Set<TsFileResource> tsFileResources = entry.getValue().keySet();
DEBUG_LOGGER.info(String.format("\t[queryId: %d]\n", queryId));
DEBUG_LOGGER.info("\t[queryId: {}]\n", queryId);
for (TsFileResource tsFileResource : tsFileResources) {
DEBUG_LOGGER.info(String.format("\t\t%s\n", tsFileResource.getTsFile().getAbsolutePath()));
DEBUG_LOGGER.info("\t\t{}\n", tsFileResource.getTsFile().getAbsolutePath());
}
}
}
......
......@@ -111,7 +111,7 @@ public class DataNodeServerCommandLine extends ServerCommandLine {
TDataNodeRemoveReq removeReq = new TDataNodeRemoveReq(dataNodeLocations);
try (ConfigNodeClient configNodeClient = new ConfigNodeClient()) {
TDataNodeRemoveResp removeResp = configNodeClient.removeDataNode(removeReq);
LOGGER.info("Remove result {} ", removeResp.toString());
LOGGER.info("Remove result {} ", removeResp);
if (removeResp.getStatus().getCode() != TSStatusCode.SUCCESS_STATUS.getStatusCode()) {
throw new IoTDBException(
removeResp.getStatus().toString(), removeResp.getStatus().getCode());
......
......@@ -123,9 +123,8 @@ public class SettleService implements IService {
return;
}
logger.info(
"Totally find "
+ (seqResourcesToBeSettled.size() + unseqResourcesToBeSettled.size())
+ " tsFiles to be settled.");
"Totally find {} tsFiles to be settled.",
seqResourcesToBeSettled.size() + unseqResourcesToBeSettled.size());
// settle seqTsFile
for (TsFileResource resource : seqResourcesToBeSettled) {
resource.readLock();
......
......@@ -433,7 +433,7 @@ public class ClientRPCServiceImpl implements IClientRPCServiceWithHandler {
QueryDataSetUtils.convertQueryResultByFetchSize(queryExecution, req.fetchSize);
List<ByteBuffer> result = pair.left;
finished = pair.right;
boolean hasResultSet = !(result.size() == 0);
boolean hasResultSet = !result.isEmpty();
resp.setHasResultSet(hasResultSet);
resp.setIsAlign(true);
resp.setQueryResult(result);
......
......@@ -341,7 +341,7 @@ public class DataNodeInternalRPCServiceImpl implements IDataNodeRPCService.Iface
@Override
public TLoadResp sendTsFilePieceNode(TTsFilePieceReq req) throws TException {
LOGGER.info(String.format("Receive load node from uuid %s.", req.uuid));
LOGGER.info("Receive load node from uuid {}.", req.uuid);
ConsensusGroupId groupId =
ConsensusGroupId.Factory.createFromTConsensusGroupId(req.consensusGroupId);
......
......@@ -349,7 +349,7 @@ public class SyncService implements IService {
public void recordMessage(String pipeName, PipeMessage message) {
if (!pipes.containsKey(pipeName)) {
logger.warn(String.format("No running PIPE for message %s.", message));
logger.warn("No running PIPE for message {}.", message);
return;
}
TSStatus status = null;
......@@ -365,10 +365,10 @@ public class SyncService implements IService {
status = syncInfoFetcher.recordMsg(pipeName, message);
break;
default:
logger.error(String.format("Unknown message type: %s", message));
logger.error("Unknown message type: {}", message);
}
if (status != null && status.getCode() != TSStatusCode.SUCCESS_STATUS.getStatusCode()) {
logger.error(String.format("Failed to record message: %s", message));
logger.error("Failed to record message: {}", message);
}
}
......@@ -420,13 +420,13 @@ public class SyncService implements IService {
private void startExternalPipeManager(String pipeName, boolean startExtPipe)
throws PipeException {
if (!(pipes.get(pipeName) instanceof TsFilePipe)) {
logger.error("startExternalPipeManager(), runningPipe is not TsFilePipe. " + pipeName);
logger.error("startExternalPipeManager(), runningPipe is not TsFilePipe. {}", pipeName);
return;
}
PipeSink pipeSink = pipes.get(pipeName).getPipeSink();
if (!(pipeSink instanceof ExternalPipeSink)) {
logger.error("startExternalPipeManager(), pipeSink is not ExternalPipeSink." + pipeSink);
logger.error("startExternalPipeManager(), pipeSink is not ExternalPipeSink. {}", pipeSink);
return;
}
......@@ -435,9 +435,8 @@ public class SyncService implements IService {
ExtPipePluginRegister.getInstance().getWriteFactory(extPipeSinkTypeName);
if (externalPipeSinkWriterFactory == null) {
logger.error(
String.format(
"startExternalPipeManager(), can not found ExternalPipe plugin for %s.",
extPipeSinkTypeName));
"startExternalPipeManager(), can not found ExternalPipe plugin for {}.",
extPipeSinkTypeName);
throw new PipeException("Can not found ExternalPipe plugin for " + extPipeSinkTypeName + ".");
}
......
......@@ -42,7 +42,7 @@ public abstract class AbstractOpBlock implements Comparable<AbstractOpBlock> {
// data number of this OpBlock
protected long dataCount = -1;
public AbstractOpBlock(String storageGroupName, long pipeDataSerialNumber, long beginIndex) {
protected AbstractOpBlock(String storageGroupName, long pipeDataSerialNumber, long beginIndex) {
this.storageGroup = storageGroupName;
this.pipeDataSerialNumber = pipeDataSerialNumber;
this.beginIndex = beginIndex;
......
......@@ -228,11 +228,11 @@ public class PipeOpManager {
* in list can be discrete
*/
private void commitFilePipe(List<Long> filePipeSerialNumberList) {
if (filePipeSerialNumberList.size() <= 0) {
if (filePipeSerialNumberList.isEmpty()) {
return;
}
if (filePipeSerialNumberSet.size() <= 0) {
if (filePipeSerialNumberSet.isEmpty()) {
logger.error("commitFilePipe(), filePipeSerialNumberSet should not be empty.");
return;
}
......@@ -250,7 +250,7 @@ public class PipeOpManager {
return;
}
if (filePipeSerialNumberSet.size() > 0) {
if (!filePipeSerialNumberSet.isEmpty()) {
if (filePipeSerialNumberSet.first() > minNum) {
filePipe.commit(filePipeSerialNumberSet.first() - 1);
}
......@@ -276,7 +276,7 @@ public class PipeOpManager {
* @return True - PipeOpManager has no Pipe data
*/
public boolean isEmpty() {
return (filePipeSerialNumberSet.size() <= 0);
return filePipeSerialNumberSet.isEmpty();
}
/**
......
......@@ -122,11 +122,7 @@ public class PipeOpSgManager {
return true;
}
if (opBlockBeginIndex > currentOpBlockBeginIndex) {
return true;
}
return false;
return opBlockBeginIndex > currentOpBlockBeginIndex;
}
/**
......
......@@ -281,7 +281,6 @@ public class TsFileOpBlock extends AbstractOpBlock {
timeChunkOffset = entry.getKey();
timeChunkPointCount = chunkInfo.pointCount;
iter.remove();
continue;
} else {
chunkInfo.isTimeAligned = true;
chunkInfo.timeChunkOffsetInFile = timeChunkOffset;
......@@ -577,9 +576,9 @@ public class TsFileOpBlock extends AbstractOpBlock {
new PageReader(pageData, chunkHeader.getDataType(), valueDecoder, timeDecoder, null);
BatchData batchData = pageReader.getAllSatisfiedPageData();
if (chunkHeader.getChunkType() == MetaMarker.CHUNK_HEADER) {
logger.debug("points in the page(by pageHeader): " + pageHeader.getNumOfValues());
logger.debug("points in the page(by pageHeader): {}", pageHeader.getNumOfValues());
} else {
logger.debug("points in the page(by batchData): " + batchData.length());
logger.debug("points in the page(by batchData): {}", batchData.length());
}
if (batchData.isEmpty()) {
......@@ -1008,7 +1007,7 @@ public class TsFileOpBlock extends AbstractOpBlock {
long lengthInChunk = min(chunkPointCount - indexInChunk, remain);
if (!sensorFullPath.equals(lastSensorFullPath)) {
if ((tvPairList != null) && (tvPairList.size() > 0)) {
if ((tvPairList != null) && (!tvPairList.isEmpty())) {
insertToDataList(dataList, lastSensorFullPath, tvPairList);
tvPairList = null;
}
......@@ -1037,7 +1036,7 @@ public class TsFileOpBlock extends AbstractOpBlock {
}
}
if ((tvPairList != null) && (tvPairList.size() > 0)) {
if ((tvPairList != null) && (!tvPairList.isEmpty())) {
insertToDataList(dataList, lastSensorFullPath, tvPairList);
}
......@@ -1174,6 +1173,7 @@ public class TsFileOpBlock extends AbstractOpBlock {
* @return
* @throws IOException
*/
@Override
public TsFileMetadata readFileMetadata() throws IOException {
if (tsFileMetaData != null) {
return tsFileMetaData;
......
......@@ -182,7 +182,7 @@ public class ExtPipePlugin {
}
alive = true;
logger.info("External pipe " + extPipeTypeName + " begin to START");
logger.info("External pipe {} begin to START", extPipeTypeName);
// == Launch pipe worker threads
executorService =
......@@ -201,7 +201,7 @@ public class ExtPipePlugin {
writerInvocationFailures = new ConcurrentHashMap<>();
logger.info("External pipe " + extPipeTypeName + " finish START.");
logger.info("External pipe {} finish START.", extPipeTypeName);
}
/** Stop all working threads */
......
......@@ -123,7 +123,7 @@ public class ExtPipePluginManager {
// == Start monitor Pipe data thread
alive = true;
ThreadPoolExecutor tpe = ((ThreadPoolExecutor) monitorService);
if ((tpe.getActiveCount() <= 0) && (tpe.getQueue().size() <= 0)) {
if ((tpe.getActiveCount() <= 0) && (tpe.getQueue().isEmpty())) {
monitorService.submit(this::monitorPipeData);
}
......@@ -221,7 +221,7 @@ public class ExtPipePluginManager {
sgName, tsFileFullName, modsFileFullName, pipeDataSerialNumber);
lastPipeDataSerialNumber = pipeDataSerialNumber;
} catch (IOException e) {
logger.error("monitorPipeData(), Can not append TsFile: {}" + tsFileFullName);
logger.error("monitorPipeData(), Can not append TsFile: {}", tsFileFullName);
}
continue;
} else if (pipeData instanceof DeletionPipeData) {
......
......@@ -34,7 +34,7 @@ public abstract class Operation {
private long startIndex;
private long endIndex;
public Operation(
protected Operation(
OperationType operationType, String storageGroup, long startIndex, long endIndex) {
this.operationType = operationType;
this.storageGroup = storageGroup;
......
......@@ -65,6 +65,7 @@ public class DeletionPipeData extends PipeData {
+ deletion.serializeWithoutFileOffset(stream);
}
@Override
public void deserialize(DataInputStream stream) throws IOException, IllegalPathException {
super.deserialize(stream);
database = ReadWriteIOUtils.readString(stream);
......
......@@ -36,9 +36,9 @@ public abstract class PipeData {
protected long serialNumber;
public PipeData() {}
protected PipeData() {}
public PipeData(long serialNumber) {
protected PipeData(long serialNumber) {
this.serialNumber = serialNumber;
}
......
......@@ -65,8 +65,8 @@ public class DeletionLoader implements ILoader {
SCHEMA_FETCHER,
IoTDBDescriptor.getInstance().getConfig().getQueryTimeoutThreshold());
if (result.status.code != TSStatusCode.SUCCESS_STATUS.getStatusCode()) {
logger.error(String.format("Delete %s error, statement: %s.", deletion, statement));
logger.error(String.format("Delete result status : %s.", result.status));
logger.error("Delete {} error, statement: {}.", deletion, statement);
logger.error("Delete result status : {}.", result.status);
throw new LoadFileException(
String.format("Can not execute delete statement: %s", statement));
}
......
......@@ -68,9 +68,8 @@ public class TsFileLoader implements ILoader {
SCHEMA_FETCHER,
IoTDBDescriptor.getInstance().getConfig().getQueryTimeoutThreshold());
if (result.status.code != TSStatusCode.SUCCESS_STATUS.getStatusCode()) {
logger.error(
String.format("Load TsFile %s error, statement: %s.", tsFile.getPath(), statement));
logger.error(String.format("Load TsFile result status : %s.", result.status));
logger.error("Load TsFile {} error, statement: {}.", tsFile.getPath(), statement);
logger.error("Load TsFile result status : {}.", result.status);
throw new LoadFileException(
String.format("Can not execute load TsFile statement: %s", statement));
}
......
......@@ -101,7 +101,7 @@ public class BufferedPipeDataQueue implements PipeDataQueue {
if (file.getName().endsWith(SyncConstant.PIPE_LOG_NAME_SUFFIX) && file.length() > 0) {
startNumbers.add(SyncPathUtil.getSerialNumberFromPipeLogName(file.getName()));
}
if (startNumbers.size() != 0) {
if (!startNumbers.isEmpty()) {
Collections.sort(startNumbers);
for (Long startTime : startNumbers) {
pipeLogStartNumber.offer(startTime);
......
......@@ -39,7 +39,7 @@ import java.util.stream.Collectors;
public class ExternalPipeSink implements PipeSink {
private static final Logger logger = LoggerFactory.getLogger(ExternalPipeSink.class);
private final PipeSinkType pipeSinkType = PipeSinkType.ExternalPipe;
private static final PipeSinkType pipeSinkType = PipeSinkType.ExternalPipe;
private String pipeSinkName;
private String extPipeSinkTypeName;
......
......@@ -192,7 +192,7 @@ public class TsFilePipe implements Pipe {
// put history data into PipeDataQueue
int historyTsFilesSize = historyTsFiles.size();
for (int i = 0; i < historyTsFilesSize; i++) {
long serialNumber = 1 - historyTsFilesSize + i;
long serialNumber = 1L - historyTsFilesSize + i;
File tsFile = historyTsFiles.get(i);
historyQueueMap
.get(entry.getKey())
......
......@@ -135,7 +135,7 @@ public class TsFilePipeLogger {
public void finishCollect() {
try {
if (SyncPathUtil.createFile(new File(pipeDir, SyncConstant.FINISH_COLLECT_LOCK_NAME))) {
logger.info(String.format("Create finish collecting Lock file in %s.", pipeDir));
logger.info("Create finish collecting Lock file in {}.", pipeDir);
}
} catch (IOException e) {
logger.warn(String.format("Can not make lock file in %s, because %s", pipeDir, e));
......
......@@ -225,7 +225,7 @@ public class SenderManager {
while (!Thread.currentThread().isInterrupted()) {
PipeData pipeData = pipe.take(dataRegionId);
if (!syncClient.send(pipeData)) {
logger.error(String.format("Can not transfer PipeData %s, skip it.", pipeData));
logger.error("Can not transfer PipeData {}, skip it.", pipeData);
// can do something.
SyncService.getInstance()
.recordMessage(
......
......@@ -437,8 +437,8 @@ public class ReceiverManager {
IoTDBDescriptor.getInstance().getConfig().getQueryTimeoutThreshold());
if (result.status.code != TSStatusCode.SUCCESS_STATUS.getStatusCode()
&& result.status.code != TSStatusCode.DATABASE_ALREADY_EXISTS.getStatusCode()) {
logger.error(String.format("Create Database error, statement: %s.", statement));
logger.error(String.format("Create database result status : %s.", result.status));
logger.error("Create Database error, statement: {}.", statement);
logger.error("Create database result status : {}.", result.status);
return false;
}
} catch (IllegalPathException e) {
......
......@@ -78,7 +78,7 @@ public class TsFileSelfCheckTool {
*/
public long check(String filename, boolean fastFinish)
throws IOException, TsFileStatisticsMistakesException, TsFileTimeseriesMetadataException {
logger.info("file path: " + filename);
logger.info("file path: {}", filename);
TsFileSelfCheckToolReader reader = new TsFileSelfCheckToolReader(filename);
Map<Long, Pair<Path, TimeseriesMetadata>> timeseriesMetadataMap = null;
long res = -1;
......
......@@ -99,7 +99,7 @@ public class TsFileAndModSettleTool {
if (arg.endsWith(TSFILE_SUFFIX)) { // it's a file
File f = new File(arg);
if (!f.exists()) {
logger.warn("Cannot find TsFile : " + arg);
logger.warn("Cannot find TsFile : {}", arg);
continue;
}
files.add(f);
......@@ -115,11 +115,11 @@ public class TsFileAndModSettleTool {
private static List<File> getAllFilesInOneDirBySuffix(String dirPath, String suffix) {
File dir = new File(dirPath);
if (!dir.isDirectory()) {
logger.warn("It's not a directory path : " + dirPath);
logger.warn("It's not a directory path : {}", dirPath);
return Collections.emptyList();
}
if (!dir.exists()) {
logger.warn("Cannot find Directory : " + dirPath);
logger.warn("Cannot find Directory : {}", dirPath);
return Collections.emptyList();
}
List<File> tsFiles =
......
......@@ -556,7 +556,7 @@ public class TsFileValidationTool {
}
}
}
if (seqDataDirList.size() == 0 && fileList.size() == 0) {
if (seqDataDirList.isEmpty() && fileList.isEmpty()) {
System.out.println(
"Please input correct param, which is [path of data dir] [-pd = print details or not] [-f = path of outFile]. Eg: xxx/iotdb/data/data -pd=true -f=xxx/TsFile_validation_view.txt");
return false;
......
......@@ -87,7 +87,7 @@ public class TriggerExecutor {
LOGGER.warn(
"Trigger {} was fired with wrong event {}",
triggerInformation.getTriggerName(),
triggerInformation.getEvent().toString());
triggerInformation.getEvent());
}
return true;
}
......
......@@ -295,7 +295,7 @@ public class TriggerFireVisitor extends PlanVisitor<TriggerFireResult, TriggerEv
TriggerManagementService.getInstance().getMatchedTriggerListForPath(device, measurements);
boolean isAllEmpty = true;
for (List<String> triggerNameList : triggerNameLists) {
if (triggerNameList.size() != 0) {
if (!triggerNameList.isEmpty()) {
isAllEmpty = false;
break;
}
......@@ -350,7 +350,7 @@ public class TriggerFireVisitor extends PlanVisitor<TriggerFireResult, TriggerEv
LOGGER.warn(
"Error occurred when trying to fire trigger({}) on TEndPoint: {}, the cause is: {}",
triggerName,
tDataNodeLocation.getInternalEndPoint().toString(),
tDataNodeLocation.getInternalEndPoint(),
e);
// update TDataNodeLocation of stateful trigger through config node
updateLocationOfStatefulTrigger(triggerName, tDataNodeLocation.getDataNodeId());
......@@ -361,7 +361,7 @@ public class TriggerFireVisitor extends PlanVisitor<TriggerFireResult, TriggerEv
LOGGER.warn(
"Error occurred when trying to fire trigger({}) on TEndPoint: {}, the cause is: {}",
triggerName,
tDataNodeLocation.getInternalEndPoint().toString(),
tDataNodeLocation.getInternalEndPoint(),
e);
// do not retry if it is not due to bad network or no executor found
return TriggerManagementService.getInstance()
......@@ -407,14 +407,13 @@ public class TriggerFireVisitor extends PlanVisitor<TriggerFireResult, TriggerEv
CONFIG_NODE_CLIENT_MANAGER.borrowClient(ConfigNodeInfo.configNodeRegionId)) {
TDataNodeLocation newTDataNodeLocation =
configNodeClient.getLocationOfStatefulTrigger(triggerName).getDataNodeLocation();
if (newTDataNodeLocation != null) {
if (currentDataNodeId != newTDataNodeLocation.getDataNodeId()) {
if (newTDataNodeLocation != null
&& currentDataNodeId != newTDataNodeLocation.getDataNodeId()) {
// indicates that the location of this stateful trigger has changed
TriggerManagementService.getInstance()
.updateLocationOfStatefulTrigger(triggerName, newTDataNodeLocation);
return true;
}
}
return false;
} catch (ClientManagerException | TException | IOException e) {
LOGGER.error(
......
......@@ -55,7 +55,7 @@ public class TriggerInformationUpdater {
private Future<?> updateFuture;
private static final long UPDATE_INTERVAL = 1000 * 60;
private static final long UPDATE_INTERVAL = 1000L * 60;
public void startTriggerInformationUpdater() {
if (updateFuture == null) {
......
......@@ -854,9 +854,9 @@ public abstract class AlignedTVList extends TVList {
return size;
}
// time array mem size
size += (long) PrimitiveArrayManager.ARRAY_SIZE * 8L;
size += PrimitiveArrayManager.ARRAY_SIZE * 8L;
// index array mem size
size += (long) PrimitiveArrayManager.ARRAY_SIZE * 4L;
size += PrimitiveArrayManager.ARRAY_SIZE * 4L;
// array headers mem size
size += (long) NUM_BYTES_ARRAY_HEADER * (2 + types.length);
// Object references size in ArrayList
......
......@@ -72,7 +72,7 @@ public class FixedPriorityBlockingQueue<T> {
final ReentrantLock lock = this.lock;
lock.lockInterruptibly();
try {
while (queue.size() == 0) {
while (queue.isEmpty()) {
notEmpty.await();
}
return queue.pollFirst();
......@@ -92,7 +92,7 @@ public class FixedPriorityBlockingQueue<T> {
final ReentrantLock lock = this.lock;
lock.lockInterruptibly();
try {
while (queue.size() == 0) {
while (queue.isEmpty()) {
notEmpty.await();
}
return queue.pollLast();
......
......@@ -63,7 +63,7 @@ public abstract class TVList implements WALEntryValue {
protected AtomicInteger referenceCount;
private long version;
public TVList() {
protected TVList() {
timestamps = new ArrayList<>();
rowCount = 0;
maxTime = Long.MIN_VALUE;
......
......@@ -42,7 +42,7 @@ public abstract class AbstractNodeAllocationStrategy implements NodeAllocationSt
/** manage wal folders */
protected FolderManager folderManager;
public AbstractNodeAllocationStrategy() {
protected AbstractNodeAllocationStrategy() {
try {
folderManager =
new FolderManager(
......
......@@ -46,7 +46,7 @@ public abstract class AbstractWALBuffer implements IWALBuffer {
/** current wal file log writer */
protected volatile WALWriter currentWALFileWriter;
public AbstractWALBuffer(
protected AbstractWALBuffer(
String identifier, String logDirectory, long startFileVersion, long startSearchIndex)
throws FileNotFoundException {
this.identifier = identifier;
......
......@@ -56,7 +56,7 @@ public abstract class WALEntry implements SerializedSize {
*/
protected final WALFlushListener walFlushListener;
public WALEntry(long memTableId, WALEntryValue value, boolean wait) {
protected WALEntry(long memTableId, WALEntryValue value, boolean wait) {
this.memTableId = memTableId;
this.value = value;
if (value instanceof IMemTable) {
......@@ -93,6 +93,8 @@ public abstract class WALEntry implements SerializedSize {
case ROLL_WAL_LOG_WRITER_SIGNAL:
case WAL_FILE_INFO_END_MARKER:
return new WALSignalEntry(type);
default:
break;
}
// handle info
......
......@@ -44,7 +44,7 @@ public abstract class LogWriter implements ILogWriter {
protected final FileChannel logChannel;
protected long size;
public LogWriter(File logFile) throws FileNotFoundException {
protected LogWriter(File logFile) throws FileNotFoundException {
this.logFile = logFile;
this.logStream = new FileOutputStream(logFile, true);
this.logChannel = this.logStream.getChannel();
......
......@@ -64,6 +64,8 @@ public class WALFakeNode implements IWALNode {
case FAILURE:
walFlushListener.fail(cause);
break;
default:
break;
}
return walFlushListener;
}
......
......@@ -633,7 +633,7 @@ public class WALNode implements IWALNode {
}
// update iterator
if (insertNodes.size() != 0) {
if (!insertNodes.isEmpty()) {
itr = insertNodes.iterator();
return true;
}
......
......@@ -46,7 +46,7 @@ public abstract class AbstractTsFileRecoverPerformer implements Closeable {
/** this writer will be open when .resource file doesn't exist */
protected RestorableTsFileIOWriter writer;
public AbstractTsFileRecoverPerformer(TsFileResource tsFileResource) {
protected AbstractTsFileRecoverPerformer(TsFileResource tsFileResource) {
this.tsFileResource = tsFileResource;
}
......
......@@ -31,7 +31,7 @@ public abstract class AbstractResultListener {
protected volatile Status status;
protected volatile Exception cause;
public AbstractResultListener(boolean wait) {
protected AbstractResultListener(boolean wait) {
this.wait = wait;
this.status = Status.RUNNING;
this.cause = null;
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册