提交 ece87075 编写于 作者: B bigsheeper 提交者: yefu.chen

Fix query node schema

Signed-off-by: Nbigsheeper <yihao.dai@zilliz.com>
上级 8c260e81
......@@ -31,7 +31,7 @@ Collection::parse() {
}
auto schema = std::make_shared<Schema>();
for (const milvus::grpc::FieldMeta & child: collection.schema().field_metas()){
std::cout<<"add Field, name :" << child.field_name() << std::endl;
std::cout<<"add Field, name :" << child.field_name() << ", datatype :" << child.type() << ", dim :" << int(child.dim()) << std::endl;
schema->AddField(std::string_view(child.field_name()), DataType {child.type()}, int(child.dim()));
}
/*
......
......@@ -161,6 +161,10 @@ class Schema {
}
}
const std::vector<FieldMeta>& get_fields() {
return fields_;
}
const FieldMeta&
operator[](const std::string& field_name) const {
auto offset_iter = offsets_.find(field_name);
......
......@@ -19,8 +19,20 @@ TestABI() {
std::unique_ptr<SegmentBase>
CreateSegment(SchemaPtr schema, IndexMetaPtr remote_index_meta) {
if (remote_index_meta == nullptr) {
int dim = 0;
std::string index_field_name;
for (auto& field: schema->get_fields()) {
if (field.get_data_type() == DataType::VECTOR_FLOAT) {
dim = field.get_dim();
index_field_name = field.get_name();
}
}
assert(dim != 0);
assert(!index_field_name.empty());
auto index_meta = std::make_shared<IndexMeta>(schema);
auto dim = schema->operator[]("fakevec").get_dim();
// TODO: this is merge of query conf and insert conf
// TODO: should be splitted into multiple configs
auto conf = milvus::knowhere::Config{
......@@ -32,7 +44,7 @@ CreateSegment(SchemaPtr schema, IndexMetaPtr remote_index_meta) {
{milvus::knowhere::Metric::TYPE, milvus::knowhere::Metric::L2},
{milvus::knowhere::meta::DEVICEID, 0},
};
index_meta->AddEntry("fakeindex", "fakevec", knowhere::IndexEnum::INDEX_FAISS_IVFPQ,
index_meta->AddEntry("fakeindex", index_field_name, knowhere::IndexEnum::INDEX_FAISS_IVFPQ,
knowhere::IndexMode::MODE_CPU, conf);
remote_index_meta = index_meta;
}
......@@ -141,7 +153,11 @@ Status
SegmentNaive::Insert(int64_t reserved_begin, int64_t size, const int64_t *uids_raw, const Timestamp *timestamps_raw,
const DogDataChunk &entities_raw) {
assert(entities_raw.count == size);
assert(entities_raw.sizeof_per_row == schema_->get_total_sizeof());
if (entities_raw.sizeof_per_row != schema_->get_total_sizeof()) {
std::string msg = "entity length = " + std::to_string(entities_raw.sizeof_per_row) +
", schema length = " + std::to_string(schema_->get_total_sizeof());
throw std::runtime_error(msg);
}
auto raw_data = reinterpret_cast<const char *>(entities_raw.raw_data);
// std::vector<char> entities(raw_data, raw_data + size * len_per_row);
......
......@@ -13,7 +13,7 @@ import (
func TestIndex_BuildIndex(t *testing.T) {
// 1. Construct node, collection, partition and segment
node := NewQueryNode(0, 0)
var collection = node.NewCollection(0, "collection0", "fake schema")
var collection = node.NewCollection(0, "collection0", "")
var partition = collection.NewPartition("partition0")
var segment = partition.NewSegment(0)
......
......@@ -10,7 +10,7 @@ import (
func TestResult_PublishSearchResult(t *testing.T) {
// Construct node, collection, partition and segment
node := NewQueryNode(0, 0)
var collection = node.NewCollection(0, "collection0", "fake schema")
var collection = node.NewCollection(0, "collection0", "")
var partition = collection.NewPartition("partition0")
var segment = partition.NewSegment(0)
node.SegmentsMap[0] = segment
......@@ -34,7 +34,7 @@ func TestResult_PublishSearchResult(t *testing.T) {
func TestResult_PublishFailedSearchResult(t *testing.T) {
// Construct node, collection, partition and segment
node := NewQueryNode(0, 0)
var collection = node.NewCollection(0, "collection0", "fake schema")
var collection = node.NewCollection(0, "collection0", "")
var partition = collection.NewPartition("partition0")
var segment = partition.NewSegment(0)
node.SegmentsMap[0] = segment
......@@ -46,7 +46,7 @@ func TestResult_PublishFailedSearchResult(t *testing.T) {
func TestResult_PublicStatistic(t *testing.T) {
// Construct node, collection, partition and segment
node := NewQueryNode(0, 0)
var collection = node.NewCollection(0, "collection0", "fake schema")
var collection = node.NewCollection(0, "collection0", "")
var partition = collection.NewPartition("partition0")
var segment = partition.NewSegment(0)
node.SegmentsMap[0] = segment
......
......@@ -77,7 +77,7 @@ func (s *Segment) Close() error {
}
// Build index after closing segment
go s.buildIndex()
// go s.buildIndex()
return nil
}
......
......@@ -7,7 +7,7 @@ import (
func TestSegmentManagement_SegmentsManagement(t *testing.T) {
// Construct node, collection, partition and segment
node := NewQueryNode(0, 0)
var collection = node.NewCollection(0, "collection0", "fake schema")
var collection = node.NewCollection(0, "collection0", "")
var partition = collection.NewPartition("partition0")
var segment = partition.NewSegment(0)
node.SegmentsMap[0] = segment
......@@ -19,7 +19,7 @@ func TestSegmentManagement_SegmentsManagement(t *testing.T) {
func TestSegmentManagement_SegmentService(t *testing.T) {
// Construct node, collection, partition and segment
node := NewQueryNode(0, 0)
var collection = node.NewCollection(0, "collection0", "fake schema")
var collection = node.NewCollection(0, "collection0", "")
var partition = collection.NewPartition("partition0")
var segment = partition.NewSegment(0)
node.SegmentsMap[0] = segment
......@@ -31,7 +31,7 @@ func TestSegmentManagement_SegmentService(t *testing.T) {
func TestSegmentManagement_SegmentStatistic(t *testing.T) {
// Construct node, collection, partition and segment
node := NewQueryNode(0, 0)
var collection = node.NewCollection(0, "collection0", "fake schema")
var collection = node.NewCollection(0, "collection0", "")
var partition = collection.NewPartition("partition0")
var segment = partition.NewSegment(0)
node.SegmentsMap[0] = segment
......@@ -43,7 +43,7 @@ func TestSegmentManagement_SegmentStatistic(t *testing.T) {
func TestSegmentManagement_SegmentStatisticService(t *testing.T) {
// Construct node, collection, partition and segment
node := NewQueryNode(0, 0)
var collection = node.NewCollection(0, "collection0", "fake schema")
var collection = node.NewCollection(0, "collection0", "")
var partition = collection.NewPartition("partition0")
var segment = partition.NewSegment(0)
node.SegmentsMap[0] = segment
......
......@@ -13,7 +13,7 @@ import (
func TestSegment_ConstructorAndDestructor(t *testing.T) {
// 1. Construct node, collection, partition and segment
node := NewQueryNode(0, 0)
var collection = node.NewCollection(0, "collection0", "fake schema")
var collection = node.NewCollection(0, "collection0", "")
var partition = collection.NewPartition("partition0")
var segment = partition.NewSegment(0)
......@@ -26,7 +26,7 @@ func TestSegment_ConstructorAndDestructor(t *testing.T) {
func TestSegment_SegmentInsert(t *testing.T) {
// 1. Construct node, collection, partition and segment
node := NewQueryNode(0, 0)
var collection = node.NewCollection(0, "collection0", "fake schema")
var collection = node.NewCollection(0, "collection0", "")
var partition = collection.NewPartition("partition0")
var segment = partition.NewSegment(0)
......@@ -71,7 +71,7 @@ func TestSegment_SegmentInsert(t *testing.T) {
func TestSegment_SegmentDelete(t *testing.T) {
// 1. Construct node, collection, partition and segment
node := NewQueryNode(0, 0)
var collection = node.NewCollection(0, "collection0", "fake schema")
var collection = node.NewCollection(0, "collection0", "")
var partition = collection.NewPartition("partition0")
var segment = partition.NewSegment(0)
......@@ -96,7 +96,7 @@ func TestSegment_SegmentDelete(t *testing.T) {
func TestSegment_SegmentSearch(t *testing.T) {
// 1. Construct node, collection, partition and segment
node := NewQueryNode(0, 0)
var collection = node.NewCollection(0, "collection0", "fake schema")
var collection = node.NewCollection(0, "collection0", "")
var partition = collection.NewPartition("partition0")
var segment = partition.NewSegment(0)
......@@ -156,7 +156,7 @@ func TestSegment_SegmentSearch(t *testing.T) {
func TestSegment_SegmentPreInsert(t *testing.T) {
// 1. Construct node, collection, partition and segment
node := NewQueryNode(0, 0)
var collection = node.NewCollection(0, "collection0", "fake schema")
var collection = node.NewCollection(0, "collection0", "")
var partition = collection.NewPartition("partition0")
var segment = partition.NewSegment(0)
......@@ -173,7 +173,7 @@ func TestSegment_SegmentPreInsert(t *testing.T) {
func TestSegment_SegmentPreDelete(t *testing.T) {
// 1. Construct node, collection, partition and segment
node := NewQueryNode(0, 0)
var collection = node.NewCollection(0, "collection0", "fake schema")
var collection = node.NewCollection(0, "collection0", "")
var partition = collection.NewPartition("partition0")
var segment = partition.NewSegment(0)
......@@ -192,7 +192,7 @@ func TestSegment_SegmentPreDelete(t *testing.T) {
func TestSegment_GetStatus(t *testing.T) {
// 1. Construct node, collection, partition and segment
node := NewQueryNode(0, 0)
var collection = node.NewCollection(0, "collection0", "fake schema")
var collection = node.NewCollection(0, "collection0", "")
var partition = collection.NewPartition("partition0")
var segment = partition.NewSegment(0)
......@@ -209,7 +209,7 @@ func TestSegment_GetStatus(t *testing.T) {
func TestSegment_Close(t *testing.T) {
// 1. Construct node, collection, partition and segment
node := NewQueryNode(0, 0)
var collection = node.NewCollection(0, "collection0", "fake schema")
var collection = node.NewCollection(0, "collection0", "")
var partition = collection.NewPartition("partition0")
var segment = partition.NewSegment(0)
......@@ -226,7 +226,7 @@ func TestSegment_Close(t *testing.T) {
func TestSegment_GetRowCount(t *testing.T) {
// 1. Construct node, collection, partition and segment
node := NewQueryNode(0, 0)
var collection = node.NewCollection(0, "collection0", "fake schema")
var collection = node.NewCollection(0, "collection0", "")
var partition = collection.NewPartition("partition0")
var segment = partition.NewSegment(0)
......@@ -275,7 +275,7 @@ func TestSegment_GetRowCount(t *testing.T) {
func TestSegment_GetDeletedCount(t *testing.T) {
// 1. Construct node, collection, partition and segment
node := NewQueryNode(0, 0)
var collection = node.NewCollection(0, "collection0", "fake schema")
var collection = node.NewCollection(0, "collection0", "")
var partition = collection.NewPartition("partition0")
var segment = partition.NewSegment(0)
......@@ -305,7 +305,7 @@ func TestSegment_GetDeletedCount(t *testing.T) {
func TestSegment_GetMemSize(t *testing.T) {
// 1. Construct node, collection, partition and segment
node := NewQueryNode(0, 0)
var collection = node.NewCollection(0, "collection0", "fake schema")
var collection = node.NewCollection(0, "collection0", "")
var partition = collection.NewPartition("partition0")
var segment = partition.NewSegment(0)
......@@ -350,3 +350,51 @@ func TestSegment_GetMemSize(t *testing.T) {
collection.DeletePartition(partition)
node.DeleteCollection(collection)
}
func TestSegment_RealSchemaTest(t *testing.T) {
// 1. Construct node, collection, partition and segment
// var schemaString = "id: 6873737669791618215\nname: \"collection0\"\nschema: \u003c\n field_metas: \u003c\n field_name: \"field_1\"\n type: INT64\n \u003e\n field_metas: \u003c\n field_name: \"field_2\"\n type: FLOAT\n \u003e\n field_metas: \u003c\n field_name: \"field_3\"\n type: INT32\n \u003e\n field_metas: \u003c\n field_name: \"field_vec\"\n type: VECTOR_FLOAT\n \u003e\n\u003e\ncreate_time: 1600416765\nsegment_ids: 6873737669791618215\npartition_tags: \"default\"\n"
// var schemaString = "id: 6873737669791618215\nname: \"collection0\"\nschema: \u003c\n field_metas: \u003c\n field_name: \"age\"\n type: INT32\n \u003e\n field_metas: \u003c\n field_name: \"fakevec\"\n type: VECTOR_FLOAT\n \u003e\n\u003e\ncreate_time: 1600416765\nsegment_ids: 6873737669791618215\npartition_tags: \"default\"\n"
var schemaString = "id: 6873737669791618215\nname: \"collection0\"\nschema: \u003c\n field_metas: \u003c\n field_name: \"age\"\n type: INT32\n dim: 1\n \u003e\n field_metas: \u003c\n field_name: \"field_1\"\n type: VECTOR_FLOAT\n dim: 16\n \u003e\n\u003e\ncreate_time: 1600416765\nsegment_ids: 6873737669791618215\npartition_tags: \"default\"\n"
node := NewQueryNode(0, 0)
var collection = node.NewCollection(0, "collection0", schemaString)
var partition = collection.NewPartition("partition0")
var segment = partition.NewSegment(0)
// 2. Create ids and timestamps
ids := []int64{1, 2, 3}
timestamps := []uint64{0, 0, 0}
// 3. Create records, use schema below:
// schema_tmp->AddField("fakeVec", DataType::VECTOR_FLOAT, 16);
// schema_tmp->AddField("age", DataType::INT32);
const DIM = 16
const N = 3
var vec = [DIM]float32{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}
var rawData []byte
for _, ele := range vec {
buf := make([]byte, 4)
binary.LittleEndian.PutUint32(buf, math.Float32bits(ele))
rawData = append(rawData, buf...)
}
bs := make([]byte, 4)
binary.LittleEndian.PutUint32(bs, 1)
rawData = append(rawData, bs...)
var records [][]byte
for i := 0; i < N; i++ {
records = append(records, rawData)
}
// 4. Do PreInsert
var offset = segment.SegmentPreInsert(N)
assert.GreaterOrEqual(t, offset, int64(0))
// 5. Do Insert
var err = segment.SegmentInsert(offset, &ids, &timestamps, &records)
assert.NoError(t, err)
// 6. Destruct node, collection, and segment
partition.DeleteSegment(segment)
collection.DeletePartition(partition)
node.DeleteCollection(collection)
}
......@@ -13,7 +13,7 @@ func TestUtilFunctions_GetKey2Segments(t *testing.T) {
func TestUtilFunctions_GetCollectionByCollectionName(t *testing.T) {
// 1. Construct node, and collections
node := NewQueryNode(0, 0)
var _ = node.NewCollection(0, "collection0", "fake schema")
var _ = node.NewCollection(0, "collection0", "")
// 2. Get collection by collectionName
var c0, err = node.GetCollectionByCollectionName("collection0")
......@@ -27,7 +27,7 @@ func TestUtilFunctions_GetCollectionByCollectionName(t *testing.T) {
func TestUtilFunctions_GetSegmentBySegmentID(t *testing.T) {
// 1. Construct node, collection, partition and segment
node := NewQueryNode(0, 0)
var collection = node.NewCollection(0, "collection0", "fake schema")
var collection = node.NewCollection(0, "collection0", "")
var partition = collection.NewPartition("partition0")
var segment = partition.NewSegment(0)
node.SegmentsMap[0] = segment
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册