提交 03e17405 编写于 作者: A Alexey Milovidov

Checkpoint

上级 603d132d
......@@ -195,7 +195,7 @@ struct Dictionary
const auto size = string.size();
if (size != 0)
{
auto string_ptr = string_arena->alloc(size + 1);
auto * string_ptr = string_arena->alloc(size + 1);
std::copy(string.data(), string.data() + size + 1, string_ptr);
string_ref = StringRef{string_ptr, size};
}
......@@ -260,7 +260,7 @@ int main(int argc, char ** argv)
for (const auto & s : data)
{
auto ptr = arena.alloc(s.size() + 1);
auto * ptr = arena.alloc(s.size() + 1);
memcpy(ptr, s.data(), s.size() + 1);
refs.emplace_back(ptr, s.size() + 1);
}
......@@ -286,7 +286,7 @@ int main(int argc, char ** argv)
arena.free(const_cast<char *>(refs[index_to].data), refs[index_to].size);
const auto & s = data[index_from];
auto ptr = arena.alloc(s.size() + 1);
auto * ptr = arena.alloc(s.size() + 1);
memcpy(ptr, s.data(), s.size() + 1);
bytes += s.size() + 1;
......
......@@ -293,7 +293,7 @@ void NO_INLINE test(const Key * data, size_t size, std::function<void(Map<Key, V
if (init)
init(map);
for (auto end = data + size; data < end; ++data)
for (const auto * end = data + size; data < end; ++data)
++map[*data];
watch.stop();
......
......@@ -139,7 +139,7 @@ static void aggregate3(Map & local_map, Map & global_map, Mutex & mutex, Source:
for (auto it = begin; it != end; ++it)
{
auto found = local_map.find(*it);
auto * found = local_map.find(*it);
if (found)
++found->getMapped();
......@@ -199,7 +199,7 @@ static void aggregate4(Map & local_map, MapTwoLevel & global_map, Mutex * mutexe
{
for (; it != block_end; ++it)
{
auto found = local_map.find(*it);
auto * found = local_map.find(*it);
if (found)
++found->getMapped();
......
......@@ -619,7 +619,7 @@ void CacheDictionary::setAttributeValue(Attribute & attribute, const Key idx, co
const auto str_size = string.size();
if (str_size != 0)
{
auto string_ptr = string_arena->alloc(str_size + 1);
auto * string_ptr = string_arena->alloc(str_size + 1);
std::copy(string.data(), string.data() + str_size + 1, string_ptr);
string_ref = StringRef{string_ptr, str_size};
}
......@@ -896,7 +896,7 @@ void CacheDictionary::update(BunchUpdateUnit & bunch_update_unit) const
break;
}
const auto id_column = typeid_cast<const ColumnUInt64 *>(block.safeGetByPosition(0).column.get());
const auto * id_column = typeid_cast<const ColumnUInt64 *>(block.safeGetByPosition(0).column.get());
if (!id_column)
throw Exception{name + ": id column has type different from UInt64.", ErrorCodes::TYPE_MISMATCH};
......
......@@ -345,8 +345,8 @@ template StringRef ComplexKeyCacheDictionary::placeKeysInPool<ArenaWithFreeLists
StringRef ComplexKeyCacheDictionary::placeKeysInFixedSizePool(const size_t row, const Columns & key_columns) const
{
const auto res = fixed_size_keys_pool->alloc();
auto place = res;
auto * res = fixed_size_keys_pool->alloc();
auto * place = res;
for (const auto & key_column : key_columns)
{
......@@ -367,7 +367,7 @@ StringRef ComplexKeyCacheDictionary::copyIntoArena(StringRef src, Arena & arena)
StringRef ComplexKeyCacheDictionary::copyKey(const StringRef key) const
{
const auto res = key_size_is_fixed ? fixed_size_keys_pool->alloc() : keys_pool->alloc(key.size);
auto * res = key_size_is_fixed ? fixed_size_keys_pool->alloc() : keys_pool->alloc(key.size);
memcpy(res, key.data, key.size);
return {res, key.size};
......
......@@ -63,7 +63,7 @@ void ComplexKeyCacheDictionary::setAttributeValue(Attribute & attribute, const s
const auto str_size = string.size();
if (str_size != 0)
{
auto str_ptr = string_arena->alloc(str_size);
auto * str_ptr = string_arena->alloc(str_size);
std::copy(string.data(), string.data() + str_size, str_ptr);
string_ref = StringRef{str_ptr, str_size};
}
......
......@@ -358,7 +358,7 @@ void ComplexKeyHashedDictionary::updateData()
for (size_t i = 0; i < saved_block->rows(); ++i)
{
const auto s_key = placeKeysInPool(i, saved_key_column_ptrs, keys, temp_key_pool);
auto it = update_key_hash.find(s_key);
auto * it = update_key_hash.find(s_key);
if (it)
filter[i] = 0;
else
......@@ -619,7 +619,7 @@ bool ComplexKeyHashedDictionary::setAttributeValue(Attribute & attribute, const
{
auto & map = std::get<ContainerType<StringRef>>(attribute.maps);
const auto & string = value.get<String>();
const auto string_in_arena = attribute.string_arena->insert(string.data(), string.size());
const auto * string_in_arena = attribute.string_arena->insert(string.data(), string.size());
const auto pair = map.insert({key, StringRef{string_in_arena, string.size()}});
return pair.second;
}
......@@ -649,7 +649,7 @@ StringRef ComplexKeyHashedDictionary::placeKeysInPool(const size_t row, const Co
sum_keys_size += keys[j].size;
}
auto key_start = block_start;
const auto * key_start = block_start;
for (size_t j = 0; j < keys_size; ++j)
{
keys[j].data = key_start;
......
......@@ -605,7 +605,7 @@ template <>
void FlatDictionary::setAttributeValueImpl<String>(Attribute & attribute, const Key id, const String & value)
{
resize<StringRef>(attribute, id);
const auto string_in_arena = attribute.string_arena->insert(value.data(), value.size());
const auto * string_in_arena = attribute.string_arena->insert(value.data(), value.size());
auto & array = std::get<ContainerType<StringRef>>(attribute.arrays);
array[id] = StringRef{string_in_arena, value.size()};
loaded_ids[id] = true;
......
......@@ -667,7 +667,7 @@ bool HashedDictionary::setAttributeValue(Attribute & attribute, const Key id, co
case AttributeUnderlyingType::utString:
{
const auto & string = value.get<String>();
const auto string_in_arena = attribute.string_arena->insert(string.data(), string.size());
const auto * string_in_arena = attribute.string_arena->insert(string.data(), string.size());
if (!sparse)
{
auto & map = *std::get<CollectionPtrType<StringRef>>(attribute.maps);
......@@ -795,8 +795,10 @@ void registerDictionaryHashed(DictionaryFactory & factory)
return std::make_unique<HashedDictionary>(database, name, dict_struct, std::move(source_ptr), dict_lifetime, require_nonempty, sparse);
};
using namespace std::placeholders;
factory.registerLayout("hashed", std::bind(create_layout, _1, _2, _3, _4, _5, /* sparse = */ false), false);
factory.registerLayout("sparse_hashed", std::bind(create_layout, _1, _2, _3, _4, _5, /* sparse = */ true), false);
factory.registerLayout("hashed",
[&](auto && a, auto && b, auto && c, auto && d, DictionarySourcePtr e){ return create_layout(a, b, c, d, std::move(e), /* sparse = */ false); }, false);
factory.registerLayout("sparse_hashed",
[&](auto && a, auto && b, auto && c, auto && d, DictionarySourcePtr e){ return create_layout(a, b, c, d, std::move(e), /* sparse = */ true); }, false);
}
}
......@@ -77,7 +77,7 @@ namespace
if (!data)
throw Exception("LibraryDictionarySource: No data returned", ErrorCodes::EXTERNAL_LIBRARY_ERROR);
auto columns_received = static_cast<const ClickHouseLibrary::Table *>(data);
const auto * columns_received = static_cast<const ClickHouseLibrary::Table *>(data);
if (columns_received->error_code)
throw Exception(
"LibraryDictionarySource: Returned error: " + std::to_string(columns_received->error_code) + " "
......@@ -188,7 +188,7 @@ BlockInputStreamPtr LibraryDictionarySource::loadAll()
ClickHouseLibrary::CStrings columns{static_cast<decltype(ClickHouseLibrary::CStrings::data)>(columns_holder.get()),
dict_struct.attributes.size()};
size_t i = 0;
for (auto & a : dict_struct.attributes)
for (const auto & a : dict_struct.attributes)
{
columns.data[i] = a.name.c_str();
++i;
......@@ -199,7 +199,7 @@ BlockInputStreamPtr LibraryDictionarySource::loadAll()
auto func_load_all
= library->get<void * (*)(decltype(data_ptr), decltype(&settings->strings), decltype(&columns))>("ClickHouseDictionary_v3_loadAll");
data_ptr = library->get<decltype(data_ptr) (*)(decltype(lib_data))>("ClickHouseDictionary_v3_dataNew")(lib_data);
auto data = func_load_all(data_ptr, &settings->strings, &columns);
auto * data = func_load_all(data_ptr, &settings->strings, &columns);
auto block = dataToBlock(description.sample_block, data);
SCOPE_EXIT(library->get<void (*)(decltype(lib_data), decltype(data_ptr))>("ClickHouseDictionary_v3_dataDelete")(lib_data, data_ptr));
return std::make_shared<OneBlockInputStream>(block);
......@@ -214,7 +214,7 @@ BlockInputStreamPtr LibraryDictionarySource::loadIds(const std::vector<UInt64> &
ClickHouseLibrary::CStrings columns_pass{static_cast<decltype(ClickHouseLibrary::CStrings::data)>(columns_holder.get()),
dict_struct.attributes.size()};
size_t i = 0;
for (auto & a : dict_struct.attributes)
for (const auto & a : dict_struct.attributes)
{
columns_pass.data[i] = a.name.c_str();
++i;
......@@ -226,7 +226,7 @@ BlockInputStreamPtr LibraryDictionarySource::loadIds(const std::vector<UInt64> &
= library->get<void * (*)(decltype(data_ptr), decltype(&settings->strings), decltype(&columns_pass), decltype(&ids_data))>(
"ClickHouseDictionary_v3_loadIds");
data_ptr = library->get<decltype(data_ptr) (*)(decltype(lib_data))>("ClickHouseDictionary_v3_dataNew")(lib_data);
auto data = func_load_ids(data_ptr, &settings->strings, &columns_pass, &ids_data);
auto * data = func_load_ids(data_ptr, &settings->strings, &columns_pass, &ids_data);
auto block = dataToBlock(description.sample_block, data);
SCOPE_EXIT(library->get<void (*)(decltype(lib_data), decltype(data_ptr))>("ClickHouseDictionary_v3_dataDelete")(lib_data, data_ptr));
return std::make_shared<OneBlockInputStream>(block);
......@@ -259,7 +259,7 @@ BlockInputStreamPtr LibraryDictionarySource::loadKeys(const Columns & key_column
auto func_load_keys = library->get<void * (*)(decltype(data_ptr), decltype(&settings->strings), decltype(&request_cols))>(
"ClickHouseDictionary_v3_loadKeys");
data_ptr = library->get<decltype(data_ptr) (*)(decltype(lib_data))>("ClickHouseDictionary_v3_dataNew")(lib_data);
auto data = func_load_keys(data_ptr, &settings->strings, &request_cols);
auto * data = func_load_keys(data_ptr, &settings->strings, &request_cols);
auto block = dataToBlock(description.sample_block, data);
SCOPE_EXIT(library->get<void (*)(decltype(lib_data), decltype(data_ptr))>("ClickHouseDictionary_v3_dataDelete")(lib_data, data_ptr));
return std::make_shared<OneBlockInputStream>(block);
......
......@@ -237,8 +237,8 @@ std::vector<IPolygonDictionary::Point> IPolygonDictionary::extractPoints(const C
{
if (key_columns.size() != 2)
throw Exception{"Expected two columns of coordinates", ErrorCodes::BAD_ARGUMENTS};
const auto column_x = typeid_cast<const ColumnVector<Float64>*>(key_columns[0].get());
const auto column_y = typeid_cast<const ColumnVector<Float64>*>(key_columns[1].get());
const auto * column_x = typeid_cast<const ColumnVector<Float64>*>(key_columns[0].get());
const auto * column_y = typeid_cast<const ColumnVector<Float64>*>(key_columns[1].get());
if (!column_x || !column_y)
throw Exception{"Expected columns of Float64", ErrorCodes::TYPE_MISMATCH};
const auto rows = key_columns.front()->size();
......@@ -534,17 +534,17 @@ void addNewPoint(Float64 x, Float64 y, Data & data, Offset & offset)
const IColumn * unrollMultiPolygons(const ColumnPtr & column, Offset & offset)
{
const auto ptr_multi_polygons = typeid_cast<const ColumnArray*>(column.get());
const auto * ptr_multi_polygons = typeid_cast<const ColumnArray*>(column.get());
if (!ptr_multi_polygons)
throw Exception{"Expected a column containing arrays of polygons", ErrorCodes::TYPE_MISMATCH};
offset.multi_polygon_offsets.assign(ptr_multi_polygons->getOffsets());
const auto ptr_polygons = typeid_cast<const ColumnArray*>(&ptr_multi_polygons->getData());
const auto * ptr_polygons = typeid_cast<const ColumnArray*>(&ptr_multi_polygons->getData());
if (!ptr_polygons)
throw Exception{"Expected a column containing arrays of rings when reading polygons", ErrorCodes::TYPE_MISMATCH};
offset.polygon_offsets.assign(ptr_polygons->getOffsets());
const auto ptr_rings = typeid_cast<const ColumnArray*>(&ptr_polygons->getData());
const auto * ptr_rings = typeid_cast<const ColumnArray*>(&ptr_polygons->getData());
if (!ptr_rings)
throw Exception{"Expected a column containing arrays of points when reading rings", ErrorCodes::TYPE_MISMATCH};
offset.ring_offsets.assign(ptr_rings->getOffsets());
......@@ -554,7 +554,7 @@ const IColumn * unrollMultiPolygons(const ColumnPtr & column, Offset & offset)
const IColumn * unrollSimplePolygons(const ColumnPtr & column, Offset & offset)
{
const auto ptr_polygons = typeid_cast<const ColumnArray*>(column.get());
const auto * ptr_polygons = typeid_cast<const ColumnArray*>(column.get());
if (!ptr_polygons)
throw Exception{"Expected a column containing arrays of points", ErrorCodes::TYPE_MISMATCH};
offset.ring_offsets.assign(ptr_polygons->getOffsets());
......@@ -566,8 +566,8 @@ const IColumn * unrollSimplePolygons(const ColumnPtr & column, Offset & offset)
void handlePointsReprByArrays(const IColumn * column, Data & data, Offset & offset)
{
const auto ptr_points = typeid_cast<const ColumnArray*>(column);
const auto ptr_coord = typeid_cast<const ColumnVector<Float64>*>(&ptr_points->getData());
const auto * ptr_points = typeid_cast<const ColumnArray*>(column);
const auto * ptr_coord = typeid_cast<const ColumnVector<Float64>*>(&ptr_points->getData());
if (!ptr_coord)
throw Exception{"Expected coordinates to be of type Float64", ErrorCodes::TYPE_MISMATCH};
const auto & offsets = ptr_points->getOffsets();
......@@ -583,13 +583,13 @@ void handlePointsReprByArrays(const IColumn * column, Data & data, Offset & offs
void handlePointsReprByTuples(const IColumn * column, Data & data, Offset & offset)
{
const auto ptr_points = typeid_cast<const ColumnTuple*>(column);
const auto * ptr_points = typeid_cast<const ColumnTuple*>(column);
if (!ptr_points)
throw Exception{"Expected a column of tuples representing points", ErrorCodes::TYPE_MISMATCH};
if (ptr_points->tupleSize() != 2)
throw Exception{"Points should be two-dimensional", ErrorCodes::BAD_ARGUMENTS};
const auto column_x = typeid_cast<const ColumnVector<Float64>*>(&ptr_points->getColumn(0));
const auto column_y = typeid_cast<const ColumnVector<Float64>*>(&ptr_points->getColumn(1));
const auto * column_x = typeid_cast<const ColumnVector<Float64>*>(&ptr_points->getColumn(0));
const auto * column_y = typeid_cast<const ColumnVector<Float64>*>(&ptr_points->getColumn(1));
if (!column_x || !column_y)
throw Exception{"Expected coordinates to be of type Float64", ErrorCodes::TYPE_MISMATCH};
for (size_t i = 0; i < column_x->size(); ++i)
......
......@@ -127,7 +127,7 @@ void RangeHashedDictionary::getString(
for (const auto i : ext::range(0, ids.size()))
{
const auto it = attr.find(ids[i]);
const auto * it = attr.find(ids[i]);
if (it)
{
const auto date = dates[i];
......@@ -407,11 +407,11 @@ void RangeHashedDictionary::getItemsImpl(
return v.range.contains(date);
});
out[i] = static_cast<OutputType>(val_it != std::end(ranges_and_values) ? val_it->value : null_value);
out[i] = static_cast<OutputType>(val_it != std::end(ranges_and_values) ? val_it->value : null_value); // NOLINT
}
else
{
out[i] = static_cast<OutputType>(null_value);
out[i] = static_cast<OutputType>(null_value); // NOLINT
}
}
......@@ -493,10 +493,10 @@ void RangeHashedDictionary::setAttributeValue(Attribute & attribute, const Key i
{
auto & map = *std::get<Ptr<StringRef>>(attribute.maps);
const auto & string = value.get<String>();
const auto string_in_arena = attribute.string_arena->insert(string.data(), string.size());
const auto * string_in_arena = attribute.string_arena->insert(string.data(), string.size());
const StringRef string_ref{string_in_arena, string.size()};
const auto it = map.find(id);
auto * it = map.find(id);
if (it)
{
......@@ -649,7 +649,7 @@ struct RangeHashedDIctionaryCallGetBlockInputStreamImpl
template <typename RangeType, size_t>
void operator()()
{
auto & type = dict->dict_struct.range_min->type;
const auto & type = dict->dict_struct.range_min->type;
if (!stream && dynamic_cast<const DataTypeNumberBase<RangeType> *>(type.get()))
stream = dict->getBlockInputStreamImpl<RangeType>(*column_names, max_block_size);
}
......
......@@ -164,7 +164,7 @@ namespace DB
RedisArray keys;
auto key_type = storageTypeToKeyType(storage_type);
for (auto & key : all_keys)
for (const auto & key : all_keys)
if (key_type == client->execute<String>(RedisCommand("TYPE").addRedisType(key)))
keys.addRedisType(std::move(key));
......
......@@ -618,7 +618,7 @@ bool TrieDictionary::setAttributeValue(Attribute & attribute, const StringRef ke
case AttributeUnderlyingType::utString:
{
const auto & string = value.get<String>();
const auto string_in_arena = attribute.string_arena->insert(string.data(), string.size());
const auto * string_in_arena = attribute.string_arena->insert(string.data(), string.size());
setAttributeValueImpl<StringRef>(attribute, key, StringRef{string_in_arena, string.size()});
return true;
}
......
......@@ -42,7 +42,7 @@ DataTypePtr FunctionModelEvaluate::getReturnTypeImpl(const ColumnsWithTypeAndNam
throw Exception("Illegal type " + arguments[0].type->getName() + " of first argument of function " + getName()
+ ", expected a string.", ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT);
const auto name_col = checkAndGetColumnConst<ColumnString>(arguments[0].column.get());
const auto * name_col = checkAndGetColumnConst<ColumnString>(arguments[0].column.get());
if (!name_col)
throw Exception("First argument of function " + getName() + " must be a constant string",
ErrorCodes::ILLEGAL_COLUMN);
......@@ -56,7 +56,7 @@ DataTypePtr FunctionModelEvaluate::getReturnTypeImpl(const ColumnsWithTypeAndNam
if (has_nullable)
{
if (auto * tuple = typeid_cast<const DataTypeTuple *>(type.get()))
if (const auto * tuple = typeid_cast<const DataTypeTuple *>(type.get()))
{
auto elements = tuple->getElements();
for (auto & element : elements)
......@@ -73,7 +73,7 @@ DataTypePtr FunctionModelEvaluate::getReturnTypeImpl(const ColumnsWithTypeAndNam
void FunctionModelEvaluate::executeImpl(Block & block, const ColumnNumbers & arguments, size_t result, size_t /*input_rows_count*/)
{
const auto name_col = checkAndGetColumnConst<ColumnString>(block.getByPosition(arguments[0]).column.get());
const auto * name_col = checkAndGetColumnConst<ColumnString>(block.getByPosition(arguments[0]).column.get());
if (!name_col)
throw Exception("First argument of function " + getName() + " must be a constant string",
ErrorCodes::ILLEGAL_COLUMN);
......@@ -94,7 +94,7 @@ void FunctionModelEvaluate::executeImpl(Block & block, const ColumnNumbers & arg
materialized_columns.push_back(full_column);
columns.back() = full_column.get();
}
if (auto * col_nullable = checkAndGetColumn<ColumnNullable>(*columns.back()))
if (const auto * col_nullable = checkAndGetColumn<ColumnNullable>(*columns.back()))
{
if (!null_map)
null_map = col_nullable->getNullMapColumnPtr();
......@@ -120,7 +120,7 @@ void FunctionModelEvaluate::executeImpl(Block & block, const ColumnNumbers & arg
if (null_map)
{
if (auto * tuple = typeid_cast<const ColumnTuple *>(res.get()))
if (const auto * tuple = typeid_cast<const ColumnTuple *>(res.get()))
{
auto nested = tuple->getColumns();
for (auto & col : nested)
......
......@@ -42,7 +42,7 @@ struct ArraySinkCreator<>
std::unique_ptr<IArraySink> createArraySink(ColumnArray & col, size_t column_size)
{
using Creator = ApplyTypeListForClass<ArraySinkCreator, TypeListNumbers>::Type;
if (auto column_nullable = typeid_cast<ColumnNullable *>(&col.getData()))
if (auto * column_nullable = typeid_cast<ColumnNullable *>(&col.getData()))
{
auto column = ColumnArray::create(column_nullable->getNestedColumnPtr()->assumeMutable(), col.getOffsetsPtr()->assumeMutable());
return Creator::create(*column, &column_nullable->getNullMapData(), column_size);
......
......@@ -54,7 +54,7 @@ struct ArraySourceCreator<>
std::unique_ptr<IArraySource> createArraySource(const ColumnArray & col, bool is_const, size_t total_rows)
{
using Creator = typename ApplyTypeListForClass<ArraySourceCreator, TypeListNumbers>::Type;
if (auto column_nullable = typeid_cast<const ColumnNullable *>(&col.getData()))
if (const auto * column_nullable = typeid_cast<const ColumnNullable *>(&col.getData()))
{
auto column = ColumnArray::create(column_nullable->getNestedColumnPtr(), col.getOffsetsPtr());
return Creator::create(*column, &column_nullable->getNullMapData(), is_const, total_rows);
......
......@@ -54,7 +54,7 @@ struct ValueSourceCreator<>
std::unique_ptr<IValueSource> createValueSource(const IColumn & col, bool is_const, size_t total_rows)
{
using Creator = typename ApplyTypeListForClass<ValueSourceCreator, TypeListNumbers>::Type;
if (auto column_nullable = typeid_cast<const ColumnNullable *>(&col))
if (const auto * column_nullable = typeid_cast<const ColumnNullable *>(&col))
{
return Creator::create(column_nullable->getNestedColumn(), &column_nullable->getNullMapData(), is_const, total_rows);
}
......
......@@ -31,7 +31,7 @@ struct ArrayAllImpl
if (!column_filter)
{
auto column_filter_const = checkAndGetColumnConst<ColumnUInt8>(&*mapped);
const auto * column_filter_const = checkAndGetColumnConst<ColumnUInt8>(&*mapped);
if (!column_filter_const)
throw Exception("Unexpected type of filter column", ErrorCodes::ILLEGAL_COLUMN);
......
......@@ -40,7 +40,7 @@ public:
for (auto i : ext::range(0, arguments.size()))
{
auto array_type = typeid_cast<const DataTypeArray *>(arguments[i].get());
const auto * array_type = typeid_cast<const DataTypeArray *>(arguments[i].get());
if (!array_type)
throw Exception("Argument " + std::to_string(i) + " for function " + getName() + " must be an array but it has type "
+ arguments[i]->getName() + ".", ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT);
......@@ -83,13 +83,13 @@ public:
{
bool is_const = false;
if (auto argument_column_const = typeid_cast<const ColumnConst *>(argument_column.get()))
if (const auto * argument_column_const = typeid_cast<const ColumnConst *>(argument_column.get()))
{
is_const = true;
argument_column = argument_column_const->getDataColumnPtr();
}
if (auto argument_column_array = typeid_cast<const ColumnArray *>(argument_column.get()))
if (const auto * argument_column_array = typeid_cast<const ColumnArray *>(argument_column.get()))
sources.emplace_back(GatherUtils::createArraySource(*argument_column_array, is_const, rows));
else
throw Exception{"Arguments for function " + getName() + " must be arrays.", ErrorCodes::LOGICAL_ERROR};
......
......@@ -31,7 +31,7 @@ struct ArrayCountImpl
if (!column_filter)
{
auto column_filter_const = checkAndGetColumnConst<ColumnUInt8>(&*mapped);
const auto * column_filter_const = checkAndGetColumnConst<ColumnUInt8>(&*mapped);
if (!column_filter_const)
throw Exception("Unexpected type of filter column", ErrorCodes::ILLEGAL_COLUMN);
......
......@@ -82,7 +82,7 @@ struct ArrayCumSumImpl
// skip empty arrays
if (pos < offset)
{
res_values[pos++] = x;
res_values[pos++] = x; // NOLINT
for (; pos < offset; ++pos)
res_values[pos] = res_values[pos - 1] + x;
}
......@@ -110,7 +110,7 @@ struct ArrayCumSumImpl
// skip empty arrays
if (pos < offset)
{
res_values[pos] = data[pos];
res_values[pos] = data[pos]; // NOLINT
for (++pos; pos < offset; ++pos)
res_values[pos] = res_values[pos - 1] + data[pos];
}
......
......@@ -78,7 +78,7 @@ struct ArrayCumSumNonNegativeImpl
// skip empty arrays
if (pos < offset)
{
accum_sum = data[pos] > 0 ? data[pos] : Element(0);
accum_sum = data[pos] > 0 ? data[pos] : Element(0); // NOLINT
res_values[pos] = accum_sum;
for (++pos; pos < offset; ++pos)
{
......
......@@ -71,7 +71,7 @@ private:
{
ParserSelectQuery parser;
std::string message;
auto text = query.data();
const auto * text = query.data();
if (ASTPtr ast = tryParseQuery(parser, text, text + query.size(), message, false, "", false, 0, 0))
return ast;
throw Exception(message, ErrorCodes::SYNTAX_ERROR);
......
......@@ -75,7 +75,7 @@ void TabSeparatedRowOutputFormat::writeBeforeExtremes()
void registerOutputFormatProcessorTabSeparated(FormatFactory & factory)
{
for (auto * name : {"TabSeparated", "TSV"})
for (const auto * name : {"TabSeparated", "TSV"})
{
factory.registerOutputFormatProcessor(name, [](
WriteBuffer & buf,
......@@ -87,7 +87,7 @@ void registerOutputFormatProcessorTabSeparated(FormatFactory & factory)
});
}
for (auto * name : {"TabSeparatedRaw", "TSVRaw"})
for (const auto * name : {"TabSeparatedRaw", "TSVRaw"})
{
factory.registerOutputFormatProcessor(name, [](
WriteBuffer & buf,
......@@ -99,7 +99,7 @@ void registerOutputFormatProcessorTabSeparated(FormatFactory & factory)
});
}
for (auto * name : {"TabSeparatedWithNames", "TSVWithNames"})
for (const auto * name : {"TabSeparatedWithNames", "TSVWithNames"})
{
factory.registerOutputFormatProcessor(name, [](
WriteBuffer & buf,
......@@ -111,7 +111,7 @@ void registerOutputFormatProcessorTabSeparated(FormatFactory & factory)
});
}
for (auto * name : {"TabSeparatedWithNamesAndTypes", "TSVWithNamesAndTypes"})
for (const auto * name : {"TabSeparatedWithNamesAndTypes", "TSVWithNamesAndTypes"})
{
factory.registerOutputFormatProcessor(name, [](
WriteBuffer & buf,
......
......@@ -113,7 +113,7 @@ static AggregatingSortedAlgorithm::ColumnsDefinition defineColumns(
continue;
}
if (auto simple = dynamic_cast<const DataTypeCustomSimpleAggregateFunction *>(column.type->getCustomName()))
if (const auto * simple = dynamic_cast<const DataTypeCustomSimpleAggregateFunction *>(column.type->getCustomName()))
{
auto type = recursiveRemoveLowCardinality(column.type);
if (type.get() == column.type.get())
......@@ -141,9 +141,9 @@ static MutableColumns getMergedColumns(const Block & header, const AggregatingSo
MutableColumns columns;
columns.resize(header.columns());
for (auto & desc : def.columns_to_simple_aggregate)
for (const auto & desc : def.columns_to_simple_aggregate)
{
auto & type = desc.nested_type ? desc.nested_type
const auto & type = desc.nested_type ? desc.nested_type
: desc.real_type;
columns[desc.column_number] = type->createColumn();
}
......@@ -164,7 +164,7 @@ static void preprocessChunk(Chunk & chunk, const AggregatingSortedAlgorithm::Col
for (auto & column : columns)
column = column->convertToFullColumnIfConst();
for (auto & desc : def.columns_to_simple_aggregate)
for (const auto & desc : def.columns_to_simple_aggregate)
if (desc.nested_type)
columns[desc.column_number] = recursiveRemoveLowCardinality(columns[desc.column_number]);
......@@ -177,12 +177,12 @@ static void postprocessChunk(Chunk & chunk, const AggregatingSortedAlgorithm::Co
size_t num_rows = chunk.getNumRows();
auto columns = chunk.detachColumns();
for (auto & desc : def.columns_to_simple_aggregate)
for (const auto & desc : def.columns_to_simple_aggregate)
{
if (desc.nested_type)
{
auto & from_type = desc.nested_type;
auto & to_type = desc.real_type;
const auto & from_type = desc.nested_type;
const auto & to_type = desc.real_type;
columns[desc.column_number] = recursiveTypeConversion(columns[desc.column_number], from_type, to_type);
}
}
......
......@@ -86,7 +86,7 @@ struct SummingSortedAlgorithm::AggregateDescription
static bool isInPrimaryKey(const SortDescription & description, const std::string & name, const size_t number)
{
for (auto & desc : description)
for (const auto & desc : description)
if (desc.column_name == name || (desc.column_name.empty() && desc.column_number == number))
return true;
......@@ -343,7 +343,7 @@ static MutableColumns getMergedDataColumns(
size_t num_columns = def.column_numbers_not_to_aggregate.size() + def.columns_to_aggregate.size();
columns.reserve(num_columns);
for (auto & desc : def.columns_to_aggregate)
for (const auto & desc : def.columns_to_aggregate)
{
// Wrap aggregated columns in a tuple to match function signature
if (!desc.is_agg_func_type && isTuple(desc.function->getReturnType()))
......@@ -359,7 +359,7 @@ static MutableColumns getMergedDataColumns(
columns.emplace_back(header.safeGetByPosition(desc.column_numbers[0]).column->cloneEmpty());
}
for (auto & column_number : def.column_numbers_not_to_aggregate)
for (const auto & column_number : def.column_numbers_not_to_aggregate)
columns.emplace_back(header.safeGetByPosition(column_number).type->createColumn());
return columns;
......@@ -386,7 +386,7 @@ static void postprocessChunk(
Columns res_columns(num_result_columns);
size_t next_column = 0;
for (auto & desc : def.columns_to_aggregate)
for (const auto & desc : def.columns_to_aggregate)
{
auto column = std::move(columns[next_column]);
++next_column;
......
......@@ -41,7 +41,7 @@ void MergingSortedTransform::onFinish()
if (quiet)
return;
auto & merged_data = algorithm.getMergedData();
const auto & merged_data = algorithm.getMergedData();
auto * log = &Logger::get("MergingSortedTransform");
......
......@@ -20,7 +20,7 @@ SourceFromInputStream::SourceFromInputStream(BlockInputStreamPtr stream_, bool f
void SourceFromInputStream::init()
{
auto & sample = getPort().getHeader();
const auto & sample = getPort().getHeader();
for (auto & type : sample.getDataTypes())
if (typeid_cast<const DataTypeAggregateFunction *>(type.get()))
has_aggregate_functions = true;
......@@ -115,7 +115,7 @@ void SourceFromInputStream::work()
if (rows_before_limit)
{
auto & info = stream->getProfileInfo();
const auto & info = stream->getProfileInfo();
if (info.hasAppliedLimit())
rows_before_limit->add(info.getRowsBeforeLimit());
}
......@@ -147,7 +147,7 @@ Chunk SourceFromInputStream::generate()
{
if (rows_before_limit)
{
auto & info = stream->getProfileInfo();
const auto & info = stream->getProfileInfo();
if (info.hasAppliedLimit())
rows_before_limit->add(info.getRowsBeforeLimit());
}
......
......@@ -39,8 +39,8 @@ ConvertingTransform::ConvertingTransform(
: ISimpleTransform(std::move(source_header_), std::move(result_header_), false)
, conversion(getOutputPort().getHeader().columns())
{
auto & source = getInputPort().getHeader();
auto & result = getOutputPort().getHeader();
const auto & source = getInputPort().getHeader();
const auto & result = getOutputPort().getHeader();
size_t num_input_columns = source.columns();
size_t num_result_columns = result.columns();
......@@ -71,9 +71,9 @@ ConvertingTransform::ConvertingTransform(
/// Check constants.
if (auto * res_const = typeid_cast<const ColumnConst *>(res_elem.column.get()))
if (const auto * res_const = typeid_cast<const ColumnConst *>(res_elem.column.get()))
{
if (auto * src_const = typeid_cast<const ColumnConst *>(src_elem.column.get()))
if (const auto * src_const = typeid_cast<const ColumnConst *>(src_elem.column.get()))
{
if (res_const->getField() != src_const->getField())
throw Exception("Cannot convert column " + backQuoteIfNeed(res_elem.name) + " because "
......@@ -94,8 +94,8 @@ ConvertingTransform::ConvertingTransform(
void ConvertingTransform::transform(Chunk & chunk)
{
auto & source = getInputPort().getHeader();
auto & result = getOutputPort().getHeader();
const auto & source = getInputPort().getHeader();
const auto & result = getOutputPort().getHeader();
auto num_rows = chunk.getNumRows();
auto src_columns = chunk.detachColumns();
......
......@@ -25,7 +25,7 @@ DistinctTransform::DistinctTransform(
auto pos = columns_.empty() ? i
: header_.getPositionByName(columns_[i]);
auto & col = header_.getByPosition(pos).column;
const auto & col = header_.getByPosition(pos).column;
if (!(col && isColumnConst(*col)))
key_columns_pos.emplace_back(pos);
......
......@@ -61,7 +61,7 @@ void ExtremesTransform::transform(DB::Chunk & chunk)
return;
size_t num_columns = chunk.getNumColumns();
auto & columns = chunk.getColumns();
const auto & columns = chunk.getColumns();
if (extremes_columns.empty())
{
......
......@@ -85,7 +85,7 @@ IProcessor::Status FilterTransform::prepare()
}
void FilterTransform::removeFilterIfNeed(Chunk & chunk)
void FilterTransform::removeFilterIfNeed(Chunk & chunk) const
{
if (chunk && remove_filter_column)
chunk.erase(filter_column_position);
......
......@@ -38,7 +38,7 @@ private:
bool are_prepared_sets_initialized = false;
void removeFilterIfNeed(Chunk & chunk);
void removeFilterIfNeed(Chunk & chunk) const;
};
}
......@@ -15,7 +15,7 @@ LimitByTransform::LimitByTransform(const Block & header, size_t group_length_, s
for (const auto & name : columns)
{
auto position = header.getPositionByName(name);
auto & column = header.getByPosition(position).column;
const auto & column = header.getByPosition(position).column;
/// Ignore all constant columns.
if (!(column && isColumnConst(*column)))
......
......@@ -248,11 +248,11 @@ IProcessor::Status GroupingAggregatedTransform::prepare()
void GroupingAggregatedTransform::addChunk(Chunk chunk, size_t input)
{
auto & info = chunk.getChunkInfo();
const auto & info = chunk.getChunkInfo();
if (!info)
throw Exception("Chunk info was not set for chunk in GroupingAggregatedTransform.", ErrorCodes::LOGICAL_ERROR);
auto * agg_info = typeid_cast<const AggregatedChunkInfo *>(info.get());
const auto * agg_info = typeid_cast<const AggregatedChunkInfo *>(info.get());
if (!agg_info)
throw Exception("Chunk should have AggregatedChunkInfo in GroupingAggregatedTransform.", ErrorCodes::LOGICAL_ERROR);
......@@ -275,7 +275,7 @@ void GroupingAggregatedTransform::work()
{
if (!single_level_chunks.empty())
{
auto & header = getInputs().front().getHeader(); /// Take header from input port. Output header is empty.
const auto & header = getInputs().front().getHeader(); /// Take header from input port. Output header is empty.
auto block = header.cloneWithColumns(single_level_chunks.back().detachColumns());
single_level_chunks.pop_back();
auto blocks = params->aggregator.convertBlockToTwoLevel(block);
......@@ -302,8 +302,8 @@ MergingAggregatedBucketTransform::MergingAggregatedBucketTransform(AggregatingTr
void MergingAggregatedBucketTransform::transform(Chunk & chunk)
{
auto & info = chunk.getChunkInfo();
auto * chunks_to_merge = typeid_cast<const ChunksToMerge *>(info.get());
const auto & info = chunk.getChunkInfo();
const auto * chunks_to_merge = typeid_cast<const ChunksToMerge *>(info.get());
if (!chunks_to_merge)
throw Exception("MergingAggregatedSimpleTransform chunk must have ChunkInfo with type ChunksToMerge.",
......@@ -314,12 +314,12 @@ void MergingAggregatedBucketTransform::transform(Chunk & chunk)
BlocksList blocks_list;
for (auto & cur_chunk : *chunks_to_merge->chunks)
{
auto & cur_info = cur_chunk.getChunkInfo();
const auto & cur_info = cur_chunk.getChunkInfo();
if (!cur_info)
throw Exception("Chunk info was not set for chunk in MergingAggregatedBucketTransform.",
ErrorCodes::LOGICAL_ERROR);
auto * agg_info = typeid_cast<const AggregatedChunkInfo *>(cur_info.get());
const auto * agg_info = typeid_cast<const AggregatedChunkInfo *>(cur_info.get());
if (!agg_info)
throw Exception("Chunk should have AggregatedChunkInfo in MergingAggregatedBucketTransform.",
ErrorCodes::LOGICAL_ERROR);
......@@ -374,11 +374,11 @@ bool SortingAggregatedTransform::tryPushChunk()
void SortingAggregatedTransform::addChunk(Chunk chunk, size_t from_input)
{
auto & info = chunk.getChunkInfo();
const auto & info = chunk.getChunkInfo();
if (!info)
throw Exception("Chunk info was not set for chunk in SortingAggregatedTransform.", ErrorCodes::LOGICAL_ERROR);
auto * agg_info = typeid_cast<const AggregatedChunkInfo *>(info.get());
const auto * agg_info = typeid_cast<const AggregatedChunkInfo *>(info.get());
if (!agg_info)
throw Exception("Chunk should have AggregatedChunkInfo in SortingAggregatedTransform.", ErrorCodes::LOGICAL_ERROR);
......
......@@ -26,11 +26,11 @@ void MergingAggregatedTransform::consume(Chunk chunk)
total_input_rows += chunk.getNumRows();
++total_input_blocks;
auto & info = chunk.getChunkInfo();
const auto & info = chunk.getChunkInfo();
if (!info)
throw Exception("Chunk info was not set for chunk in MergingAggregatedTransform.", ErrorCodes::LOGICAL_ERROR);
auto * agg_info = typeid_cast<const AggregatedChunkInfo *>(info.get());
const auto * agg_info = typeid_cast<const AggregatedChunkInfo *>(info.get());
if (!agg_info)
throw Exception("Chunk should have AggregatedChunkInfo in MergingAggregatedTransform.", ErrorCodes::LOGICAL_ERROR);
......
......@@ -138,7 +138,7 @@ SortingTransform::SortingTransform(
, max_merged_block_size(max_merged_block_size_)
, limit(limit_)
{
auto & sample = inputs.front().getHeader();
const auto & sample = inputs.front().getHeader();
/// Replace column names to column position in sort_description.
for (auto & column_description : description)
......@@ -368,7 +368,7 @@ void SortingTransform::enrichChunkWithConstants(Chunk & chunk)
Columns column_with_constants;
column_with_constants.reserve(num_result_columns);
auto & header = inputs.front().getHeader();
const auto & header = inputs.front().getHeader();
size_t next_non_const_column = 0;
for (size_t i = 0; i < num_result_columns; ++i)
......
......@@ -21,7 +21,7 @@ void finalizeChunk(Chunk & chunk)
auto columns = chunk.detachColumns();
for (auto & column : columns)
if (auto * agg_function = typeid_cast<const ColumnAggregateFunction *>(column.get()))
if (const auto * agg_function = typeid_cast<const ColumnAggregateFunction *>(column.get()))
column = agg_function->convertToValues();
chunk.setColumns(std::move(columns), num_rows);
......@@ -122,11 +122,11 @@ void TotalsHavingTransform::transform(Chunk & chunk)
/// Block with values not included in `max_rows_to_group_by`. We'll postpone it.
if (overflow_row)
{
auto & info = chunk.getChunkInfo();
const auto & info = chunk.getChunkInfo();
if (!info)
throw Exception("Chunk info was not set for chunk in TotalsHavingTransform.", ErrorCodes::LOGICAL_ERROR);
auto * agg_info = typeid_cast<const AggregatedChunkInfo *>(info.get());
const auto * agg_info = typeid_cast<const AggregatedChunkInfo *>(info.get());
if (!agg_info)
throw Exception("Chunk should have AggregatedChunkInfo in TotalsHavingTransform.", ErrorCodes::LOGICAL_ERROR);
......@@ -154,7 +154,7 @@ void TotalsHavingTransform::transform(Chunk & chunk)
else
{
/// Compute the expression in HAVING.
auto & cur_header = final ? finalized_header : getInputPort().getHeader();
const auto & cur_header = final ? finalized_header : getInputPort().getHeader();
auto finalized_block = cur_header.cloneWithColumns(finalized.detachColumns());
expression->execute(finalized_block);
auto columns = finalized_block.getColumns();
......
......@@ -19,7 +19,7 @@ NamesAndTypesList StorageSystemBuildOptions::getNamesAndTypes()
void StorageSystemBuildOptions::fillData(MutableColumns & res_columns, const Context &, const SelectQueryInfo &) const
{
#if !defined(ARCADIA_BUILD)
for (auto it = auto_config_build; *it; it += 2)
for (auto * it = auto_config_build; *it; it += 2)
{
res_columns[0]->insert(it[0]);
res_columns[1]->insert(it[1]);
......
......@@ -19,7 +19,7 @@ NamesAndTypesList StorageSystemContributors::getNamesAndTypes()
void StorageSystemContributors::fillData(MutableColumns & res_columns, const Context &, const SelectQueryInfo &) const
{
std::vector<const char *> contributors;
for (auto it = auto_contributors; *it; ++it)
for (auto * it = auto_contributors; *it; ++it)
contributors.emplace_back(*it);
std::shuffle(contributors.begin(), contributors.end(), thread_local_rng);
......
......@@ -60,7 +60,7 @@ protected:
while (StoragesInfo info = stream.next())
{
const auto parts = info.data->getDetachedParts();
for (auto & p : parts)
for (const auto & p : parts)
{
size_t i = 0;
new_columns[i++]->insert(info.database);
......
......@@ -38,7 +38,7 @@ static StorageSystemGraphite::Configs getConfigs(const Context & context)
for (auto iterator = db.second->getTablesIterator(context); iterator->isValid(); iterator->next())
{
auto & table = iterator->table();
const auto & table = iterator->table();
const MergeTreeData * table_data = dynamic_cast<const MergeTreeData *>(table.get());
if (!table_data)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册