提交 e33893c1 编写于 作者: X xiaojun.lin

MS-565 remove jsoncons v1


Former-commit-id: 3f8b9719ec3e3b3ecc4d56aa813c70c133a95da9
上级 72885057
......@@ -93,7 +93,7 @@ endif()
set(CORE_INCLUDE_DIRS ${CORE_INCLUDE_DIRS} PARENT_SCOPE)
if(BUILD_UNIT_TEST STREQUAL "ON")
# add_subdirectory(test)
add_subdirectory(test)
endif()
config_summary()
......@@ -8,7 +8,6 @@ link_directories(${CUDA_TOOLKIT_ROOT_DIR}/lib64)
include_directories(${CORE_SOURCE_DIR}/knowhere)
include_directories(${CORE_SOURCE_DIR}/thirdparty)
include_directories(${CORE_SOURCE_DIR}/thirdparty/SPTAG/AnnService)
include_directories(${CORE_SOURCE_DIR}/thirdparty/jsoncons-0.126.0/include)
set(SPTAG_SOURCE_DIR ${CORE_SOURCE_DIR}/thirdparty/SPTAG)
file(GLOB HDR_FILES
......@@ -55,6 +54,7 @@ set(index_srcs
knowhere/index/vector_index/IndexGPUIVFPQ.cpp
knowhere/index/vector_index/FaissBaseIndex.cpp
knowhere/index/vector_index/helpers/FaissIO.cpp
knowhere/index/vector_index/helpers/IndexParameter.cpp
)
set(depend_libs
......
......@@ -69,8 +69,7 @@ ConvertToQueryResult(const DatasetPtr &dataset, const Config &config) {
auto dimension = tensor->shape()[1];
auto rows = tensor->shape()[0];
auto k = config[META_K].as<int64_t>();
std::vector<SPTAG::QueryResult> query_results(rows, SPTAG::QueryResult(nullptr, k, true));
std::vector<SPTAG::QueryResult> query_results(rows, SPTAG::QueryResult(nullptr, config->k, true));
for (auto i = 0; i < rows; ++i) {
query_results[i].SetTarget(&p_data[i * dimension]);
}
......
......@@ -18,15 +18,42 @@
#pragma once
#include <jsoncons/json.hpp>
#include <memory>
namespace zilliz {
namespace knowhere {
using Config = jsoncons::json;
enum class METRICTYPE {
INVALID = 0,
L2 = 1,
IP = 2,
};
// General Config
constexpr int64_t INVALID_VALUE = -1;
constexpr int64_t DEFAULT_K = INVALID_VALUE;
constexpr int64_t DEFAULT_DIM = INVALID_VALUE;
constexpr int64_t DEFAULT_GPUID = INVALID_VALUE;
constexpr METRICTYPE DEFAULT_TYPE = METRICTYPE::INVALID;
struct Cfg {
METRICTYPE metric_type = DEFAULT_TYPE;
int64_t k = DEFAULT_K;
int64_t gpu_id = DEFAULT_GPUID;
int64_t d = DEFAULT_DIM;
Cfg(const int64_t &dim,
const int64_t &k,
const int64_t &gpu_id,
METRICTYPE type)
: d(dim), k(k), gpu_id(gpu_id), metric_type(type) {}
Cfg() = default;
virtual bool
CheckValid(){};
};
using Config = std::shared_ptr<Cfg>;
} // namespace knowhere
} // namespace zilliz
......@@ -36,10 +36,11 @@ namespace zilliz {
namespace knowhere {
IndexModelPtr GPUIVF::Train(const DatasetPtr &dataset, const Config &config) {
auto nlist = config["nlist"].as<size_t>();
gpu_id_ = config.get_with_default("gpu_id", gpu_id_);
auto metric_type = config["metric_type"].as_string() == "L2" ?
faiss::METRIC_L2 : faiss::METRIC_INNER_PRODUCT;
auto build_cfg = std::dynamic_pointer_cast<IVFCfg>(config);
if (build_cfg != nullptr) {
build_cfg->CheckValid(); // throw exception
}
gpu_id_ = build_cfg->gpu_id;
GETTENSOR(dataset)
......@@ -48,7 +49,9 @@ IndexModelPtr GPUIVF::Train(const DatasetPtr &dataset, const Config &config) {
ResScope rs(temp_resource, gpu_id_, true);
faiss::gpu::GpuIndexIVFFlatConfig idx_config;
idx_config.device = gpu_id_;
faiss::gpu::GpuIndexIVFFlat device_index(temp_resource->faiss_res.get(), dim, nlist, metric_type, idx_config);
faiss::gpu::GpuIndexIVFFlat device_index(temp_resource->faiss_res.get(), dim,
build_cfg->nlist, GetMetricType(build_cfg->metric_type),
idx_config);
device_index.train(rows, (float *) p_data);
std::shared_ptr<faiss::Index> host_index = nullptr;
......@@ -143,9 +146,10 @@ void GPUIVF::search_impl(int64_t n,
const Config &cfg) {
std::lock_guard<std::mutex> lk(mutex_);
// TODO(linxj): gpu index support GenParams
if (auto device_index = std::static_pointer_cast<faiss::gpu::GpuIndexIVF>(index_)) {
auto nprobe = cfg.get_with_default("nprobe", size_t(1));
device_index->setNumProbes(nprobe);
auto search_cfg = std::dynamic_pointer_cast<IVFCfg>(cfg);
device_index->setNumProbes(search_cfg->nprobe);
{
// TODO(linxj): allocate mem
......
......@@ -29,19 +29,19 @@ namespace zilliz {
namespace knowhere {
IndexModelPtr GPUIVFPQ::Train(const DatasetPtr &dataset, const Config &config) {
auto nlist = config["nlist"].as<size_t>();
auto M = config["M"].as<size_t>(); // number of subquantizers(subvectors)
auto nbits = config["nbits"].as<size_t>();// number of bit per subvector index
auto gpu_num = config.get_with_default("gpu_id", gpu_id_);
auto metric_type = config["metric_type"].as_string() == "L2" ?
faiss::METRIC_L2 : faiss::METRIC_L2; // IP not support.
auto build_cfg = std::dynamic_pointer_cast<IVFPQCfg>(config);
if (build_cfg != nullptr) {
build_cfg->CheckValid(); // throw exception
}
gpu_id_ = build_cfg->gpu_id;
GETTENSOR(dataset)
// TODO(linxj): set device here.
// TODO(linxj): set gpu resource here.
faiss::gpu::StandardGpuResources res;
faiss::gpu::GpuIndexIVFPQ device_index(&res, dim, nlist, M, nbits, metric_type);
faiss::gpu::GpuIndexIVFPQ device_index(&res, dim, build_cfg->nlist, build_cfg->m,
build_cfg->nbits, GetMetricType(build_cfg->metric_type)); // IP not support
device_index.train(rows, (float *) p_data);
std::shared_ptr<faiss::Index> host_index = nullptr;
......@@ -52,10 +52,11 @@ IndexModelPtr GPUIVFPQ::Train(const DatasetPtr &dataset, const Config &config) {
std::shared_ptr<faiss::IVFSearchParameters> GPUIVFPQ::GenParams(const Config &config) {
auto params = std::make_shared<faiss::IVFPQSearchParameters>();
params->nprobe = config.get_with_default("nprobe", size_t(1));
//params->scan_table_threshold = 0;
//params->polysemous_ht = 0;
//params->max_codes = 0;
auto search_cfg = std::dynamic_pointer_cast<IVFPQCfg>(config);
params->nprobe = search_cfg->nprobe;
// params->scan_table_threshold = conf->scan_table_threhold;
// params->polysemous_ht = conf->polysemous_ht;
// params->max_codes = conf->max_codes;
return params;
}
......
......@@ -28,17 +28,17 @@ namespace zilliz {
namespace knowhere {
IndexModelPtr GPUIVFSQ::Train(const DatasetPtr &dataset, const Config &config) {
auto nlist = config["nlist"].as<size_t>();
auto nbits = config["nbits"].as<size_t>(); // TODO(linxj): gpu only support SQ4 SQ8 SQ16
gpu_id_ = config.get_with_default("gpu_id", gpu_id_);
auto metric_type = config["metric_type"].as_string() == "L2" ?
faiss::METRIC_L2 : faiss::METRIC_INNER_PRODUCT;
auto build_cfg = std::dynamic_pointer_cast<IVFSQCfg>(config);
if (build_cfg != nullptr) {
build_cfg->CheckValid(); // throw exception
}
gpu_id_ = build_cfg->gpu_id;
GETTENSOR(dataset)
std::stringstream index_type;
index_type << "IVF" << nlist << "," << "SQ" << nbits;
auto build_index = faiss::index_factory(dim, index_type.str().c_str(), metric_type);
index_type << "IVF" << build_cfg->nlist << "," << "SQ" << build_cfg->nbits;
auto build_index = faiss::index_factory(dim, index_type.str().c_str(), GetMetricType(build_cfg->metric_type));
auto temp_resource = FaissGpuResourceMgr::GetInstance().GetRes(gpu_id_);
if (temp_resource != nullptr) {
......
......@@ -51,24 +51,22 @@ DatasetPtr IDMAP::Search(const DatasetPtr &dataset, const Config &config) {
KNOWHERE_THROW_MSG("index not initialize");
}
auto k = config["k"].as<size_t>();
config->CheckValid();
//auto metric_type = config["metric_type"].as_string() == "L2" ?
// faiss::METRIC_L2 : faiss::METRIC_INNER_PRODUCT;
//index_->metric_type = metric_type;
GETTENSOR(dataset)
// TODO(linxj): handle malloc exception
auto elems = rows * k;
auto elems = rows * config->k;
auto res_ids = (int64_t *) malloc(sizeof(int64_t) * elems);
auto res_dis = (float *) malloc(sizeof(float) * elems);
search_impl(rows, (float *) p_data, k, res_dis, res_ids, Config());
search_impl(rows, (float *) p_data, config->k, res_dis, res_ids, Config());
auto id_buf = MakeMutableBufferSmart((uint8_t *) res_ids, sizeof(int64_t) * elems);
auto dist_buf = MakeMutableBufferSmart((uint8_t *) res_dis, sizeof(float) * elems);
// TODO: magic
std::vector<BufferPtr> id_bufs{nullptr, id_buf};
std::vector<BufferPtr> dist_bufs{nullptr, dist_buf};
......@@ -136,11 +134,9 @@ int64_t *IDMAP::GetRawIds() {
const char* type = "IDMap,Flat";
void IDMAP::Train(const Config &config) {
auto metric_type = config["metric_type"].as_string() == "L2" ?
faiss::METRIC_L2 : faiss::METRIC_INNER_PRODUCT;
auto dim = config["dim"].as<size_t>();
config->CheckValid();
auto index = faiss::index_factory(dim, type, metric_type);
auto index = faiss::index_factory(config->d, type, GetMetricType(config->metric_type));
index_.reset(index);
}
......
......@@ -37,17 +37,20 @@ namespace knowhere {
IndexModelPtr IVF::Train(const DatasetPtr &dataset, const Config &config) {
auto nlist = config["nlist"].as<size_t>();
auto metric_type = config["metric_type"].as_string() == "L2" ?
faiss::METRIC_L2 : faiss::METRIC_INNER_PRODUCT;
auto build_cfg = std::dynamic_pointer_cast<IVFCfg>(config);
if (build_cfg != nullptr) {
build_cfg->CheckValid(); // throw exception
}
GETTENSOR(dataset)
faiss::Index *coarse_quantizer = new faiss::IndexFlatL2(dim);
auto index = std::make_shared<faiss::IndexIVFFlat>(coarse_quantizer, dim, nlist, metric_type);
auto index = std::make_shared<faiss::IndexIVFFlat>(coarse_quantizer, dim,
build_cfg->nlist,
GetMetricType(build_cfg->metric_type));
index->train(rows, (float *) p_data);
// TODO: override here. train return model or not.
// TODO(linxj): override here. train return model or not.
return std::make_shared<IVFIndexModel>(index);
}
......@@ -60,7 +63,6 @@ void IVF::Add(const DatasetPtr &dataset, const Config &config) {
std::lock_guard<std::mutex> lk(mutex_);
GETTENSOR(dataset)
// TODO: magic here.
auto array = dataset->array()[0];
auto p_ids = array->data()->GetValues<long>(1, 0);
index_->add_with_ids(rows, (float *) p_data, p_ids);
......@@ -97,28 +99,22 @@ DatasetPtr IVF::Search(const DatasetPtr &dataset, const Config &config) {
KNOWHERE_THROW_MSG("index not initialize or trained");
}
auto k = config["k"].as<size_t>();
auto search_cfg = std::dynamic_pointer_cast<IVFCfg>(config);
if (search_cfg != nullptr) {
search_cfg->CheckValid(); // throw exception
}
GETTENSOR(dataset)
// TODO(linxj): handle malloc exception
auto elems = rows * k;
auto elems = rows * search_cfg->k;
auto res_ids = (int64_t *) malloc(sizeof(int64_t) * elems);
auto res_dis = (float *) malloc(sizeof(float) * elems);
search_impl(rows, (float*) p_data, k, res_dis, res_ids, config);
//faiss::ivflib::search_with_parameters(index_.get(),
// rows,
// (float *) p_data,
// k,
// res_dis,
// res_ids,
// params.get());
search_impl(rows, (float*) p_data, search_cfg->k, res_dis, res_ids, config);
auto id_buf = MakeMutableBufferSmart((uint8_t *) res_ids, sizeof(int64_t) * elems);
auto dist_buf = MakeMutableBufferSmart((uint8_t *) res_dis, sizeof(float) * elems);
// TODO: magic
std::vector<BufferPtr> id_bufs{nullptr, id_buf};
std::vector<BufferPtr> dist_bufs{nullptr, dist_buf};
......@@ -146,7 +142,9 @@ void IVF::set_index_model(IndexModelPtr model) {
std::shared_ptr<faiss::IVFSearchParameters> IVF::GenParams(const Config &config) {
auto params = std::make_shared<faiss::IVFPQSearchParameters>();
params->nprobe = config.get_with_default("nprobe", size_t(1));
auto search_cfg = std::dynamic_pointer_cast<IVFCfg>(config);
params->nprobe = search_cfg->nprobe;
//params->max_codes = config.get_with_default("max_codes", size_t(0));
return params;
......
......@@ -27,16 +27,16 @@ namespace zilliz {
namespace knowhere {
IndexModelPtr IVFPQ::Train(const DatasetPtr &dataset, const Config &config) {
auto nlist = config["nlist"].as<size_t>();
auto M = config["M"].as<size_t>(); // number of subquantizers(subvector)
auto nbits = config["nbits"].as<size_t>();// number of bit per subvector index
auto metric_type = config["metric_type"].as_string() == "L2" ?
faiss::METRIC_L2 : faiss::METRIC_INNER_PRODUCT;
auto build_cfg = std::dynamic_pointer_cast<IVFPQCfg>(config);
if (build_cfg != nullptr) {
build_cfg->CheckValid(); // throw exception
}
GETTENSOR(dataset)
faiss::Index *coarse_quantizer = new faiss::IndexFlat(dim, metric_type);
auto index = std::make_shared<faiss::IndexIVFPQ>(coarse_quantizer, dim, nlist, M, nbits);
faiss::Index *coarse_quantizer = new faiss::IndexFlat(dim, GetMetricType(build_cfg->metric_type));
auto index = std::make_shared<faiss::IndexIVFPQ>(coarse_quantizer, dim,
build_cfg->nlist, build_cfg->m, build_cfg->nbits);
index->train(rows, (float *) p_data);
return std::make_shared<IVFIndexModel>(index);
......@@ -44,10 +44,11 @@ IndexModelPtr IVFPQ::Train(const DatasetPtr &dataset, const Config &config) {
std::shared_ptr<faiss::IVFSearchParameters> IVFPQ::GenParams(const Config &config) {
auto params = std::make_shared<faiss::IVFPQSearchParameters>();
params->nprobe = config.get_with_default("nprobe", size_t(1));
//params->scan_table_threshold = 0;
//params->polysemous_ht = 0;
//params->max_codes = 0;
auto search_cfg = std::dynamic_pointer_cast<IVFPQCfg>(config);
params->nprobe = search_cfg->nprobe;
// params->scan_table_threshold = conf->scan_table_threhold;
// params->polysemous_ht = conf->polysemous_ht;
// params->max_codes = conf->max_codes;
return params;
}
......
......@@ -29,16 +29,17 @@ namespace zilliz {
namespace knowhere {
IndexModelPtr IVFSQ::Train(const DatasetPtr &dataset, const Config &config) {
auto nlist = config["nlist"].as<size_t>();
auto nbits = config["nbits"].as<size_t>(); // TODO(linxj): only support SQ4 SQ6 SQ8 SQ16
auto metric_type = config["metric_type"].as_string() == "L2" ?
faiss::METRIC_L2 : faiss::METRIC_INNER_PRODUCT;
auto build_cfg = std::dynamic_pointer_cast<IVFSQCfg>(config);
if (build_cfg != nullptr) {
build_cfg->CheckValid(); // throw exception
}
GETTENSOR(dataset)
std::stringstream index_type;
index_type << "IVF" << nlist << "," << "SQ" << nbits;
auto build_index = faiss::index_factory(dim, index_type.str().c_str(), metric_type);
index_type << "IVF" << build_cfg->nlist << "," << "SQ" << build_cfg->nbits;
auto build_index = faiss::index_factory(dim, index_type.str().c_str(),
GetMetricType(build_cfg->metric_type));
build_index->train(rows, (float *) p_data);
std::shared_ptr<faiss::Index> ret_index;
......
......@@ -118,8 +118,8 @@ CPUKDTRNG::Add(const DatasetPtr &origin, const Config &add_config) {
void
CPUKDTRNG::SetParameters(const Config &config) {
for (auto &para : KDTParameterMgr::GetInstance().GetKDTParameters()) {
auto value = config.get_with_default(para.first, para.second);
index_ptr_->SetParameter(para.first, value);
// auto value = config.get_with_default(para.first, para.second);
index_ptr_->SetParameter(para.first, para.second);
}
}
......
......@@ -67,30 +67,29 @@ void NSG::Load(const BinarySet &index_binary) {
}
DatasetPtr NSG::Search(const DatasetPtr &dataset, const Config &config) {
auto build_cfg = std::dynamic_pointer_cast<NSGCfg>(config);
if (build_cfg != nullptr) {
build_cfg->CheckValid(); // throw exception
}
if (!index_ || !index_->is_trained) {
KNOWHERE_THROW_MSG("index not initialize or trained");
}
// Required
// if not found throw exception here.
auto k = config["k"].as<size_t>();
auto search_length = config.get_with_default("search_length", 30);
GETTENSOR(dataset)
auto elems = rows * k;
auto elems = rows * build_cfg->k;
auto res_ids = (int64_t *) malloc(sizeof(int64_t) * elems);
auto res_dis = (float *) malloc(sizeof(float) * elems);
// TODO(linxj): get from config
algo::SearchParams s_params;
s_params.search_length = search_length;
index_->Search((float *) p_data, rows, dim, k, res_dis, res_ids, s_params);
s_params.search_length = build_cfg->search_length;
index_->Search((float *) p_data, rows, dim,
build_cfg->k, res_dis, res_ids, s_params);
auto id_buf = MakeMutableBufferSmart((uint8_t *) res_ids, sizeof(int64_t) * elems);
auto dist_buf = MakeMutableBufferSmart((uint8_t *) res_dis, sizeof(float) * elems);
// TODO: magic
std::vector<BufferPtr> id_bufs{nullptr, id_buf};
std::vector<BufferPtr> dist_bufs{nullptr, dist_buf};
......@@ -108,45 +107,41 @@ DatasetPtr NSG::Search(const DatasetPtr &dataset, const Config &config) {
}
IndexModelPtr NSG::Train(const DatasetPtr &dataset, const Config &config) {
TimeRecorder rc("Interface");
auto build_cfg = std::dynamic_pointer_cast<NSGCfg>(config);
if (build_cfg != nullptr) {
build_cfg->CheckValid(); // throw exception
}
auto metric_type = config["metric_type"].as_string();
if (metric_type != "L2") { KNOWHERE_THROW_MSG("NSG not support this kind of metric type");}
if (build_cfg->metric_type != METRICTYPE::L2) {
KNOWHERE_THROW_MSG("NSG not support this kind of metric type");
}
// TODO(linxj): dev IndexFactory, support more IndexType
auto preprocess_index = std::make_shared<GPUIVF>(0);
//auto preprocess_index = std::make_shared<IVF>();
auto preprocess_index = std::make_shared<GPUIVF>(build_cfg->gpu_id);
auto model = preprocess_index->Train(dataset, config);
preprocess_index->set_index_model(model);
preprocess_index->AddWithoutIds(dataset, config);
rc.RecordSection("build ivf");
auto k = config["knng"].as<int64_t>();
Graph knng;
preprocess_index->GenGraph(k, knng, dataset, config);
rc.RecordSection("build knng");
preprocess_index->GenGraph(build_cfg->knng, knng, dataset, config);
algo::BuildParams b_params;
b_params.candidate_pool_size = build_cfg->candidate_pool_size;
b_params.out_degree = build_cfg->out_degree;
b_params.search_length = build_cfg->search_length;
GETTENSOR(dataset)
auto array = dataset->array()[0];
auto p_ids = array->data()->GetValues<long>(1, 0);
algo::BuildParams b_params;
b_params.candidate_pool_size = config["candidate_pool_size"].as<size_t>();
b_params.out_degree = config["out_degree"].as<size_t>();
b_params.search_length = config["search_length"].as<size_t>();
index_ = std::make_shared<algo::NsgIndex>(dim, rows);
index_->SetKnnGraph(knng);
index_->Build_with_ids(rows, (float *) p_data, (long *) p_ids, b_params);
rc.RecordSection("build nsg");
rc.ElapseFromBegin("total cost");
return nullptr; // TODO(linxj): support serialize
}
void NSG::Add(const DatasetPtr &dataset, const Config &config) {
// TODO(linxj): support incremental index.
//KNOWHERE_THROW_MSG("Not support yet");
// do nothing
}
int64_t NSG::Count() {
......@@ -156,6 +151,7 @@ int64_t NSG::Count() {
int64_t NSG::Dimension() {
return index_->dimension;
}
VectorIndexPtr NSG::Clone() {
KNOWHERE_THROW_MSG("not support");
}
......
......@@ -22,6 +22,7 @@
#include <memory>
#include "knowhere/common/Config.h"
#include "knowhere/index/vector_index/helpers/IndexParameter.h"
#include "knowhere/common/Dataset.h"
#include "knowhere/index/Index.h"
#include "knowhere/index/preprocessor/Preprocessor.h"
......
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
#include "IndexParameter.h"
#include "knowhere/common/Exception.h"
#include <faiss/Index.h>
namespace zilliz {
namespace knowhere {
faiss::MetricType GetMetricType(METRICTYPE &type) {
if (type == METRICTYPE::L2) return faiss::METRIC_L2;
if (type == METRICTYPE::IP) return faiss::METRIC_INNER_PRODUCT;
if (type == METRICTYPE::INVALID) KNOWHERE_THROW_MSG("Metric type is invalid");
}
}
}
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
#pragma once
#include "knowhere/common/Config.h"
#include <faiss/Index.h>
namespace zilliz {
namespace knowhere {
extern faiss::MetricType GetMetricType(METRICTYPE &type);
// IVF Config
constexpr int64_t DEFAULT_NLIST = INVALID_VALUE;
constexpr int64_t DEFAULT_NPROBE = INVALID_VALUE;
constexpr int64_t DEFAULT_NSUBVECTORS = INVALID_VALUE;
constexpr int64_t DEFAULT_NBITS = INVALID_VALUE;
constexpr int64_t DEFAULT_SCAN_TABLE_THREHOLD = INVALID_VALUE;
constexpr int64_t DEFAULT_POLYSEMOUS_HT = INVALID_VALUE;
constexpr int64_t DEFAULT_MAX_CODES = INVALID_VALUE;
// NSG Config
constexpr int64_t DEFAULT_SEARCH_LENGTH = INVALID_VALUE;
constexpr int64_t DEFAULT_OUT_DEGREE = INVALID_VALUE;
constexpr int64_t DEFAULT_CANDIDATE_SISE = INVALID_VALUE;
constexpr int64_t DEFAULT_NNG_K = INVALID_VALUE;
struct IVFCfg : public Cfg {
int64_t nlist = DEFAULT_NLIST;
int64_t nprobe = DEFAULT_NPROBE;
IVFCfg(const int64_t &dim,
const int64_t &k,
const int64_t &gpu_id,
const int64_t &nlist,
const int64_t &nprobe,
METRICTYPE type)
: nlist(nlist), nprobe(nprobe), Cfg(dim, k, gpu_id, type) {}
IVFCfg() = default;
bool
CheckValid() override {};
};
using IVFConfig = std::shared_ptr<IVFCfg>;
struct IVFSQCfg : public IVFCfg {
// TODO(linxj): cpu only support SQ4 SQ6 SQ8 SQ16, gpu only support SQ4, SQ8, SQ16
int64_t nbits = DEFAULT_NBITS;
IVFSQCfg(const int64_t &dim,
const int64_t &k,
const int64_t &gpu_id,
const int64_t &nlist,
const int64_t &nprobe,
const int64_t &nbits,
METRICTYPE type)
: nbits(nbits), IVFCfg(dim, k, gpu_id, nlist, nprobe, type) {}
IVFSQCfg() = default;
bool
CheckValid() override {};
};
using IVFSQConfig = std::shared_ptr<IVFSQCfg>;
struct IVFPQCfg : public IVFCfg {
int64_t m = DEFAULT_NSUBVECTORS; // number of subquantizers(subvector)
int64_t nbits = DEFAULT_NBITS; // number of bit per subvector index
// TODO(linxj): not use yet
int64_t scan_table_threhold = DEFAULT_SCAN_TABLE_THREHOLD;
int64_t polysemous_ht = DEFAULT_POLYSEMOUS_HT;
int64_t max_codes = DEFAULT_MAX_CODES;
IVFPQCfg(const int64_t &dim,
const int64_t &k,
const int64_t &gpu_id,
const int64_t &nlist,
const int64_t &nprobe,
const int64_t &nbits,
const int64_t &m,
METRICTYPE type)
: nbits(nbits), m(m), IVFCfg(dim, k, gpu_id, nlist, nprobe, type) {}
IVFPQCfg() = default;
bool
CheckValid() override {};
};
using IVFPQConfig = std::shared_ptr<IVFPQCfg>;
struct NSGCfg : public IVFCfg {
int64_t knng = DEFAULT_NNG_K;
int64_t search_length = DEFAULT_SEARCH_LENGTH;
int64_t out_degree = DEFAULT_OUT_DEGREE;
int64_t candidate_pool_size = DEFAULT_CANDIDATE_SISE;
NSGCfg(const int64_t &dim,
const int64_t &k,
const int64_t &gpu_id,
const int64_t &nlist,
const int64_t &nprobe,
const int64_t &knng,
const int64_t &search_length,
const int64_t &out_degree,
const int64_t &candidate_size,
METRICTYPE type)
: knng(knng), search_length(search_length), out_degree(out_degree), candidate_pool_size(candidate_size),
IVFCfg(dim, k, gpu_id, nlist, nprobe, type) {}
NSGCfg() = default;
bool
CheckValid() override {};
};
using NSGConfig = std::shared_ptr<NSGCfg>;
struct KDTCfg : public Cfg {
int64_t tptnubmber = -1;
};
} // knowhere
} // zilliz
......@@ -35,7 +35,7 @@ KDTParameterMgr::KDTParameterMgr() {
{"NumTopDimensionKDTSplit", "5"},
{"NumSamplesKDTSplitConsideration", "100"},
{"TPTNumber", "32"},
{"TPTNumber", "1"},
{"TPTLeafSize", "2000"},
{"NumTopDimensionTPTSplit", "5"},
......
include_directories(${CORE_SOURCE_DIR}/thirdparty)
include_directories(${CORE_SOURCE_DIR}/thirdparty/SPTAG/AnnService)
include_directories(${CORE_SOURCE_DIR}/knowhere)
include_directories(${CORE_SOURCE_DIR}/thirdparty/jsoncons-0.126.0/include)
include_directories(/usr/local/cuda/include)
link_directories(/usr/local/cuda/lib64)
link_directories(${CORE_SOURCE_DIR}/thirdparty/tbb)
set(unittest_libs
gtest gmock gtest_main gmock_main)
message(STATUS "arrow prefix: ${ARROW_PREFIX}")
message(STATUS "libjemalloc_pic path: ${ARROW_PREFIX}/lib/libjemalloc_pic.a")
set(depend_libs
gtest gmock gtest_main gmock_main
faiss openblas lapack
arrow "${ARROW_PREFIX}/lib/libjemalloc_pic.a"
tbb
......@@ -24,36 +22,45 @@ set(basic_libs
set(util_srcs
${MILVUS_ENGINE_SRC}/utils/easylogging++.cc
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/helpers/FaissGpuResourceMgr.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/helpers/FaissIO.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/helpers/IndexParameter.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/adapter/Structure.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/adapter/ArrowAdapter.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/common/Exception.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/common/Timer.cpp
utils.cpp
)
#<IVF-TEST>
set(ivf_srcs
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/ivf.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/gpu_ivf.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/cloner.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/idmap.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/adapter/structure.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/common/exception.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/common/timer.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/FaissGpuResourceMgr.cpp
utils.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/helpers/Cloner.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/IndexIVF.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/IndexGPUIVF.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/IndexIVFSQ.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/IndexGPUIVFSQ.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/IndexIVFPQ.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/IndexGPUIVFPQ.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/IndexIDMAP.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/FaissBaseIndex.cpp
)
if(NOT TARGET test_ivf)
add_executable(test_ivf test_ivf.cpp ${ivf_srcs} ${util_srcs})
endif()
target_link_libraries(test_ivf ${depend_libs} ${unittest_libs} ${basic_libs})
#<IDMAP-TEST>
set(idmap_srcs
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/idmap.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/ivf.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/cloner.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/gpu_ivf.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/adapter/structure.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/common/exception.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/common/timer.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/FaissGpuResourceMgr.cpp
utils.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/helpers/Cloner.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/FaissBaseIndex.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/IndexIDMAP.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/IndexGPUIVF.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/IndexIVF.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/IndexIVFPQ.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/IndexGPUIVFPQ.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/IndexIVFSQ.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/IndexGPUIVFSQ.cpp
)
if(NOT TARGET test_idmap)
add_executable(test_idmap test_idmap.cpp ${idmap_srcs} ${util_srcs})
......@@ -62,15 +69,10 @@ target_link_libraries(test_idmap ${depend_libs} ${unittest_libs} ${basic_libs})
#<KDT-TEST>
set(kdt_srcs
${CORE_SOURCE_DIR}/knowhere/knowhere/index/preprocessor/normalize.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/kdt_parameters.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/cpu_kdt_rng.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/adapter/structure.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/adapter/sptag.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/common/exception.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/adapter/arrow.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/common/timer.cpp
utils.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/adapter/SptagAdapter.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/preprocessor/Normalize.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/helpers/KDTParameterMgr.cpp
${CORE_SOURCE_DIR}/knowhere/knowhere/index/vector_index/IndexKDT.cpp
)
if(NOT TARGET test_kdt)
add_executable(test_kdt test_kdt.cpp ${kdt_srcs} ${util_srcs})
......
......@@ -19,17 +19,17 @@
#include <gtest/gtest.h>
#include <iostream>
#include <sstream>
#include "knowhere/index/vector_index/IndexIDMAP.h"
#include "knowhere/adapter/Structure.h"
#include "knowhere/index/vector_index/utils/Cloner.h"
#include "knowhere/index/vector_index/helpers/Cloner.h"
#include "knowhere/common/Exception.h"
#include "utils.h"
using namespace zilliz::knowhere;
using namespace zilliz::knowhere::cloner;
static int device_id = 0;
class IDMAPTest : public DataGen, public ::testing::Test {
......@@ -79,15 +79,19 @@ void PrintResult(const DatasetPtr &result,
TEST_F(IDMAPTest, idmap_basic) {
ASSERT_TRUE(!xb.empty());
Config Default_cfg;
index_->Train(Config::object{{"dim", dim}, {"metric_type", "L2"}});
index_->Add(base_dataset, Default_cfg);
auto conf = std::make_shared<Cfg>();
conf->d = dim;
conf->k = k;
conf->metric_type = METRICTYPE::L2;
index_->Train(conf);
index_->Add(base_dataset, conf);
EXPECT_EQ(index_->Count(), nb);
EXPECT_EQ(index_->Dimension(), dim);
ASSERT_TRUE(index_->GetRawVectors() != nullptr);
ASSERT_TRUE(index_->GetRawIds() != nullptr);
auto result = index_->Search(query_dataset, Config::object{{"k", k}});
auto result = index_->Search(query_dataset, conf);
AssertAnns(result, nq, k);
PrintResult(result, nq, k);
......@@ -95,7 +99,7 @@ TEST_F(IDMAPTest, idmap_basic) {
auto binaryset = index_->Serialize();
auto new_index = std::make_shared<IDMAP>();
new_index->Load(binaryset);
auto re_result = index_->Search(query_dataset, Config::object{{"k", k}});
auto re_result = index_->Search(query_dataset, conf);
AssertAnns(re_result, nq, k);
PrintResult(re_result, nq, k);
}
......@@ -109,11 +113,16 @@ TEST_F(IDMAPTest, idmap_serialize) {
reader(ret, bin->size);
};
auto conf = std::make_shared<Cfg>();
conf->d = dim;
conf->k = k;
conf->metric_type = METRICTYPE::L2;
{
// serialize index
index_->Train(Config::object{{"dim", dim}, {"metric_type", "L2"}});
index_->Train(conf);
index_->Add(base_dataset, Config());
auto re_result = index_->Search(query_dataset, Config::object{{"k", k}});
auto re_result = index_->Search(query_dataset, conf);
AssertAnns(re_result, nq, k);
PrintResult(re_result, nq, k);
EXPECT_EQ(index_->Count(), nb);
......@@ -133,7 +142,7 @@ TEST_F(IDMAPTest, idmap_serialize) {
index_->Load(binaryset);
EXPECT_EQ(index_->Count(), nb);
EXPECT_EQ(index_->Dimension(), dim);
auto result = index_->Search(query_dataset, Config::object{{"k", k}});
auto result = index_->Search(query_dataset, conf);
AssertAnns(result, nq, k);
PrintResult(result, nq, k);
}
......@@ -141,29 +150,33 @@ TEST_F(IDMAPTest, idmap_serialize) {
TEST_F(IDMAPTest, copy_test) {
ASSERT_TRUE(!xb.empty());
Config Default_cfg;
index_->Train(Config::object{{"dim", dim}, {"metric_type", "L2"}});
index_->Add(base_dataset, Default_cfg);
auto conf = std::make_shared<Cfg>();
conf->d = dim;
conf->k = k;
conf->metric_type = METRICTYPE::L2;
index_->Train(conf);
index_->Add(base_dataset, conf);
EXPECT_EQ(index_->Count(), nb);
EXPECT_EQ(index_->Dimension(), dim);
ASSERT_TRUE(index_->GetRawVectors() != nullptr);
ASSERT_TRUE(index_->GetRawIds() != nullptr);
auto result = index_->Search(query_dataset, Config::object{{"k", k}});
auto result = index_->Search(query_dataset, conf);
AssertAnns(result, nq, k);
//PrintResult(result, nq, k);
{
// clone
auto clone_index = index_->Clone();
auto clone_result = clone_index->Search(query_dataset, Config::object{{"k", k}});
auto clone_result = clone_index->Search(query_dataset, conf);
AssertAnns(clone_result, nq, k);
}
{
// cpu to gpu
auto clone_index = CopyCpuToGpu(index_, device_id, Config());
auto clone_result = clone_index->Search(query_dataset, Config::object{{"k", k}});
auto clone_index = CopyCpuToGpu(index_, device_id, conf);
auto clone_result = clone_index->Search(query_dataset, conf);
AssertAnns(clone_result, nq, k);
ASSERT_THROW({ std::static_pointer_cast<GPUIDMAP>(clone_index)->GetRawVectors(); },
zilliz::knowhere::KnowhereException);
......@@ -172,24 +185,24 @@ TEST_F(IDMAPTest, copy_test) {
auto binary = clone_index->Serialize();
clone_index->Load(binary);
auto new_result = clone_index->Search(query_dataset, Config::object{{"k", k}});
auto new_result = clone_index->Search(query_dataset, conf);
AssertAnns(new_result, nq, k);
auto clone_gpu_idx = clone_index->Clone();
auto clone_gpu_res = clone_gpu_idx->Search(query_dataset, Config::object{{"k", k}});
auto clone_gpu_res = clone_gpu_idx->Search(query_dataset, conf);
AssertAnns(clone_gpu_res, nq, k);
// gpu to cpu
auto host_index = CopyGpuToCpu(clone_index, Config());
auto host_result = host_index->Search(query_dataset, Config::object{{"k", k}});
auto host_index = CopyGpuToCpu(clone_index, conf);
auto host_result = host_index->Search(query_dataset, conf);
AssertAnns(host_result, nq, k);
ASSERT_TRUE(std::static_pointer_cast<IDMAP>(host_index)->GetRawVectors() != nullptr);
ASSERT_TRUE(std::static_pointer_cast<IDMAP>(host_index)->GetRawIds() != nullptr);
// gpu to gpu
auto device_index = CopyCpuToGpu(index_, device_id, Config());
auto new_device_index = std::static_pointer_cast<GPUIDMAP>(device_index)->CopyGpuToGpu(device_id, Config());
auto device_result = new_device_index->Search(query_dataset, Config::object{{"k", k}});
auto device_index = CopyCpuToGpu(index_, device_id, conf);
auto new_device_index = std::static_pointer_cast<GPUIDMAP>(device_index)->CopyGpuToGpu(device_id, conf);
auto device_result = new_device_index->Search(query_dataset, conf);
AssertAnns(device_result, nq, k);
}
}
此差异已折叠。
......@@ -20,10 +20,10 @@
#include <iostream>
#include <sstream>
#include "knowhere/common/Exception.h"
#include "knowhere/common/Exception.h"
#include "knowhere/index/vector_index/IndexKDT.h"
#include "knowhere/index/vector_index/utils/Definitions.h"
#include "knowhere/index/vector_index/helpers/Definitions.h"
#include "knowhere/adapter/SptagAdapter.h"
#include "knowhere/adapter/Structure.h"
......@@ -38,31 +38,24 @@ using ::testing::Combine;
class KDTTest
: public DataGen, public TestWithParam<::std::tuple<Config, Config, Config, Config>> {
: public DataGen, public ::testing::Test {
protected:
void SetUp() override {
std::tie(preprocess_cfg, train_cfg, add_cfg, search_cfg) = GetParam();
index_ = std::make_shared<CPUKDTRNG>();
auto tempconf = std::make_shared<KDTCfg>();
tempconf->tptnubmber = 1;
tempconf->k = 10;
conf = tempconf;
Init_with_default();
}
protected:
Config preprocess_cfg;
Config train_cfg;
Config add_cfg;
Config search_cfg;
Config conf;
std::shared_ptr<CPUKDTRNG> index_ = nullptr;
};
INSTANTIATE_TEST_CASE_P(KDTParameters, KDTTest,
Values(
std::make_tuple(Config(),
Config::object{{"TPTNumber", 1}},
Config(),
Config::object{{"k", 10}})
)
);
void AssertAnns(const DatasetPtr &result,
const int &nq,
const int &k) {
......@@ -93,16 +86,16 @@ void PrintResult(const DatasetPtr &result,
}
// TODO(linxj): add test about count() and dimension()
TEST_P(KDTTest, kdt_basic) {
TEST_F(KDTTest, kdt_basic) {
assert(!xb.empty());
auto preprocessor = index_->BuildPreprocessor(base_dataset, preprocess_cfg);
auto preprocessor = index_->BuildPreprocessor(base_dataset, conf);
index_->set_preprocessor(preprocessor);
auto model = index_->Train(base_dataset, train_cfg);
auto model = index_->Train(base_dataset, conf);
index_->set_index_model(model);
index_->Add(base_dataset, add_cfg);
auto result = index_->Search(query_dataset, search_cfg);
index_->Add(base_dataset, conf);
auto result = index_->Search(query_dataset, conf);
AssertAnns(result, nq, k);
{
......@@ -124,18 +117,18 @@ TEST_P(KDTTest, kdt_basic) {
}
}
TEST_P(KDTTest, kdt_serialize) {
TEST_F(KDTTest, kdt_serialize) {
assert(!xb.empty());
auto preprocessor = index_->BuildPreprocessor(base_dataset, preprocess_cfg);
auto preprocessor = index_->BuildPreprocessor(base_dataset, conf);
index_->set_preprocessor(preprocessor);
auto model = index_->Train(base_dataset, train_cfg);
//index_->Add(base_dataset, add_cfg);
auto model = index_->Train(base_dataset, conf);
//index_->Add(base_dataset, conf);
auto binaryset = index_->Serialize();
auto new_index = std::make_shared<CPUKDTRNG>();
new_index->Load(binaryset);
auto result = new_index->Search(query_dataset, search_cfg);
auto result = new_index->Search(query_dataset, conf);
AssertAnns(result, nq, k);
PrintResult(result, nq, k);
ASSERT_EQ(new_index->Count(), nb);
......@@ -172,7 +165,7 @@ TEST_P(KDTTest, kdt_serialize) {
auto new_index = std::make_shared<CPUKDTRNG>();
new_index->Load(load_data_list);
auto result = new_index->Search(query_dataset, search_cfg);
auto result = new_index->Search(query_dataset, conf);
AssertAnns(result, nq, k);
PrintResult(result, nq, k);
}
......
{
"targets": {
"centos7-x86_64": {
"buildenv": "centos7-x86_64",
"builddeps": ["gcc-c++", "make", "wget"],
"environment": {
"CXXFLAGS": "-std=c++11 -Wall -g3"
},
"buildcmd": [
"uname -a",
"rpm -q centos-release",
"g++ --version",
"cd",
"wget https://github.com/Kitware/CMake/releases/download/v3.14.0/cmake-3.14.0.tar.gz",
"tar xfz cmake-3.14.0.tar.gz",
"cd cmake-3.14.0",
"./bootstrap",
"make -j8",
"cd",
"cmake-3.14.0/bin/cmake . -DBUILD_TESTS=ON",
"cmake-3.14.0/bin/cmake --build . --target test_jsoncons",
"cd tests",
"./test_jsoncons;"
]
},
"fedora24-x86_64": {
"buildenv": "fedora24-x86_64",
"builddeps": ["cmake", "make", "gcc gcc-c++"],
"environment": {
"CXXFLAGS": "-std=c++11 -Wall -g3"
},
"buildcmd": [
"uname -a",
"cat /etc/fedora-release",
"g++ --version",
"cmake . -DBUILD_TESTS=ON",
"cmake --build . --target test_jsoncons",
"cd tests",
"./test_jsoncons;"
]
}
}
}
*.suo
*.sdf
*.VC.opendb
*.VC.db
*.vcxproj.user
*.vcxproj.filters
**/x64/Debug/
**/x64/Release/
**/*.vcxproj
**/vs2017
**/vs2015
**/vs2013
**/temp
**/build
examples/build/cmake/build_llvm.sh
examples/build/cmake/build_llvm
src/jsoncons/json_structures.hpp.orig
examples/build/vs2013/
src/jsoncons/json_serializer2.hpp
examples/booklist.json
examples/booklist2.json
examples/build/vs2017/.vs/vs2017/v15/ipch/AutoPCH/2f913b3e9413499/BASICS_EXAMPLES.ipch
examples/build/vs2017/.vs/vs2017/v15/ipch/AutoPCH/8571a45cc244269f/EXAMPLES.ipch
examples/build/vs2017/vs2017.sln
language: cpp
dist: trusty
sudo: required
matrix:
include:
- os: linux
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-4.8
- libgmp-dev
env: COMPILER=gcc VERSION=4.8 CXXFLAGS="-std=c++11"
- os: linux
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-4.9
- libgmp-dev
env: COMPILER=gcc VERSION=4.9 JSONCONS_SANITIZE=1 CXXFLAGS="-std=c++11"
- os: linux
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-5
env: COMPILER=gcc VERSION=5 CXXFLAGS="-std=gnu++11"
- os: linux
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-6
env: COMPILER=gcc VERSION=6 JSONCONS_SANITIZE=1
- os: linux
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-7
env: COMPILER=gcc VERSION=7 JSONCONS_SANITIZE=1
- os: linux
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-8
env: COMPILER=gcc VERSION=8 JSONCONS_SANITIZE=1 CXXFLAGS="-std=c++17 -Werror -Wall -Wextra -Wimplicit-fallthrough -pedantic -Wcast-align -Wcast-qual"
- os: linux
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-8
env: COMPILER=gcc VERSION=8 JSONCONS_SANITIZE=1 CXXFLAGS="-std=c++17 -DJSONCONS_HAS_STRING_VIEW"
- os: linux
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-8
env: COMPILER=gcc VERSION=8 JSONCONS_SANITIZE=1 CXXFLAGS="-std=gnu++17 -Wall -Wextra -Wimplicit-fallthrough"
- os: linux
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- clang-3.9
- g++-4.8-aarch64-linux-gnu
- gcc-4.8-aarch64-linux-gnu
- g++-4.8-multilib
- gcc-4.8-multilib
- qemu
- qemu-system-arm
env: COMPILER=clang VERSION=3.9 CROSS_COMPILE=1 ARM_ARCH_DIR=aarch64-linux-gnu GCC_VER=4.8.4 ARM_SETTINGS="armv8-a -target aarch64-linux-gnueabi" CXXFLAGS="-std=c++11"
- os: linux
addons:
apt:
sources:
- ubuntu-toolchain-r-test
- llvm-toolchain-trusty-4.0
packages:
- clang-4.0
- g++-7
env: COMPILER=clang VERSION=4.0 CXXFLAGS="-std=c++11"
- os: linux
addons:
apt:
sources:
- ubuntu-toolchain-r-test
- llvm-toolchain-trusty-5.0
packages:
- clang-5.0
- g++-7
env: COMPILER=clang VERSION=5.0 CXXFLAGS="-std=gnu++11"
- os: linux
addons:
apt:
sources:
- ubuntu-toolchain-r-test
- llvm-toolchain-trusty-6.0
packages:
- clang-6.0
- g++-7
env: COMPILER=clang VERSION=6.0 JSONCONS_SANITIZE=1 CXXFLAGS="-std=c++11 -Wall -Wextra -Wimplicit-fallthrough"
- os: linux
addons:
apt:
sources:
- ubuntu-toolchain-r-test
- llvm-toolchain-trusty-6.0
packages:
- clang-6.0
- g++-7
env: COMPILER=clang VERSION=6.0 CXXFLAGS="-DJSONCONS_NO_DEPRECATED"
- os: osx
osx_image: xcode7.3
compiler: clang
env: CXXFLAGS="-std=c++11"
- os: osx
osx_image: xcode8
compiler: clang
env: CXXFLAGS="-std=c++11"
- os: osx
osx_image: xcode8.1
compiler: clang
env: CXXFLAGS="-std=c++11"
- os: osx
osx_image: xcode8.2
compiler: clang
env: CXXFLAGS="-std=c++11"
- os: osx
osx_image: xcode8.3
compiler: clang
env: CXXFLAGS="-std=c++11"
- os: osx
osx_image: xcode9
compiler: clang
env: CXXFLAGS="-std=c++11"
- os: osx
osx_image: xcode9.1
compiler: clang
env: CXXFLAGS="-std=c++11"
- os: osx
osx_image: xcode9.2
compiler: clang
env: CXXFLAGS="-std=c++11"
- os: osx
osx_image: xcode9.3
compiler: clang
env: CXXFLAGS="-std=c++11"
- os: osx
osx_image: xcode9.4
compiler: clang
env: CXXFLAGS="-std=c++11"
before_install:
- |
# Configure build variables
if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then
if [[ "$COMPILER" == "gcc" ]]; then
export CXX=g++-$VERSION CC=gcc-$VERSION;
fi
if [[ "$COMPILER" == "clang" ]]; then
export CXX=clang++-$VERSION CC=clang-$VERSION;
fi
elif [[ "$TRAVIS_OS_NAME" == "osx" ]]; then
export CXX=clang++ CC=clang;
fi
install:
# get CMake (only for systems with brew - macOS)
- |
if [[ (-x $(which brew)) ]]; then
brew update
brew install cmake
brew upgrade cmake
cmake --version
fi
- if [[ "$CROSS_COMPILE" == 1 ]] ; then
if [[ "$ARM_ARCH_DIR" == "aarch64-linux-gnu" ]] ; then
mkdir $HOME/linker_bin ;
ln -s /usr/bin/aarch64-linux-gnu-ld $HOME/linker_bin/ld ;
echo "SETTING GNU LINKER DIR" ;
ls -al $HOME/linker_bin/ld ;
cmake . -DBUILD_TESTS=ON -DCROSS_COMPILE_ARM=ON -DDOWNLOAD_GTEST=ON -DARM_ARCH_DIRECTORY="$ARM_ARCH_DIR" -DARM_GCC_VER="$GCC_VER" -DTARGET_ARCH="$ARM_SETTINGS --prefix=$HOME/linker_bin/" ${CMAKE_OPTIONS};
else
cmake . -DBUILD_TESTS=ON -DCROSS_COMPILE_ARM=ON -DDOWNLOAD_GTEST=ON -DARM_ARCH_DIRECTORY="$ARM_ARCH_DIR" -DARM_GCC_VER="$GCC_VER" -DTARGET_ARCH="$ARM_SETTINGS" ${CMAKE_OPTIONS};
fi
else
cmake . -DBUILD_TESTS=ON ${CMAKE_OPTIONS};
fi
- make -j2 test_jsoncons
- cd tests
script:
- if [[ "$JSONCONS_VALGRIND" == 1 ]]; then
ctest -T memcheck;
fi
- if [[ "$CROSS_COMPILE" == 1 ]]; then
if [[ "$ARM_ARCH_DIR" == "aarch64-linux-gnu" ]]; then
qemu-aarch64 -L /usr/aarch64-linux-gnu/ ./test_jsoncons ;
else
qemu-arm -L /usr/arm-linux-gnueabi/ ./test_jsoncons ;
fi
else
./test_jsoncons;
fi
cmake_minimum_required(VERSION 3.1)
project(jsoncons)
set(JSONCONS_PROJECT_DIR ${PROJECT_SOURCE_DIR})
set(JSONCONS_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/include)
# Versioning
# ==========
file(STRINGS "${JSONCONS_INCLUDE_DIR}/jsoncons/config/version.hpp" jsoncons_version_defines
REGEX "#define JSONCONS_VERSION_(MAJOR|MINOR|PATCH)")
foreach(ver ${jsoncons_version_defines})
if(ver MATCHES "#define JSONCONS_VERSION_(MAJOR|MINOR|PATCH) +([^ ]+)$")
set(JSONCONS_VERSION_${CMAKE_MATCH_1} "${CMAKE_MATCH_2}" CACHE INTERNAL "")
endif()
endforeach()
set(${PROJECT_NAME}_VERSION
${JSONCONS_VERSION_MAJOR}.${JSONCONS_VERSION_MINOR}.${JSONCONS_VERSION_PATCH})
message(STATUS "jsoncons v${${PROJECT_NAME}_VERSION}")
# Build
# =====
file(GLOB_RECURSE JSONCONS_HEADERS ${JSONCONS_INCLUDE_DIR}/*.hpp)
add_library(jsoncons INTERFACE)
target_include_directories(jsoncons INTERFACE $<BUILD_INTERFACE:${JSONCONS_INCLUDE_DIR}>
$<INSTALL_INTERFACE:include>)
OPTION(BUILD_TESTS "jsoncons test suite" ON)
if(BUILD_TESTS)
add_subdirectory(tests)
endif()
# Installation
# ============
include(GNUInstallDirs)
include(CMakePackageConfigHelpers)
install(TARGETS jsoncons
EXPORT ${PROJECT_NAME}-targets)
# Makes the project importable from the build directory
export(EXPORT ${PROJECT_NAME}-targets
FILE "${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}Targets.cmake")
install(DIRECTORY ${JSONCONS_INCLUDE_DIR}/jsoncons
${JSONCONS_INCLUDE_DIR}/jsoncons_ext
DESTINATION ${CMAKE_INSTALL_INCLUDEDIR})
# GNUInstallDirs "DATADIR" wrong here; CMake search path wants "share".
set(JSONCONS_CMAKECONFIG_INSTALL_DIR "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}" CACHE STRING "install path for jsonconsConfig.cmake")
configure_package_config_file(build_files/cmake/config.cmake.in
"${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}Config.cmake"
INSTALL_DESTINATION ${JSONCONS_CMAKECONFIG_INSTALL_DIR})
# jsoncons is header-only and does not depend on the architecture.
write_basic_package_version_file(${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}ConfigVersion.cmake
VERSION ${${PROJECT_NAME}_VERSION}
COMPATIBILITY AnyNewerVersion)
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}Config.cmake
${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}ConfigVersion.cmake
DESTINATION ${JSONCONS_CMAKECONFIG_INSTALL_DIR})
install(EXPORT ${PROJECT_NAME}-targets
FILE ${PROJECT_NAME}Targets.cmake
DESTINATION ${JSONCONS_CMAKECONFIG_INSTALL_DIR})
// Copyright Daniel Parker 2013 - 2017.
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE or copy at
// http://www.boost.org/LICENSE_1_0.txt)
Boost Software License - Version 1.0 - August 17th, 2003
Permission is hereby granted, free of charge, to any person or organization
obtaining a copy of the software and accompanying documentation covered by
this license (the "Software") to use, reproduce, display, distribute,
execute, and transmit the Software, and to prepare derivative works of the
Software, and to permit third-parties to whom the Software is furnished to
do so, all subject to the following:
The copyright notices in the Software and this entire statement, including
the above license grant, this restriction and the following disclaimer,
must be included in all copies of the Software, in whole or in part, and
all derivative works of the Software, unless such copies or derivative
works are solely in the form of machine-executable object code generated by
a source language processor.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
# Roadmap
### For later releases
- Generaliztion and enhancement of encode and decode functions
At this point we'll slap a Version 1.0.0 Full Release stamp on `jsoncons`
(we've been leading up to this since 2013.)
### Post 1.0.0
- Support more error recovery and introduce optional `lenient_error_handler`.
- Implement [Concise data definition language (CDDL)](https://tools.ietf.org/html/draft-ietf-cbor-cddl-08) for schema validation in `jsoncons_ext`
A big thanks to the following individuals for contributing:
- Andrew Hutko (early code review)
- [Marc Chevrier](https://github.com/MarkaPola) (contributed clang port, build files, json is<T> and as<T> methods,
and make_array template implementation.)
- [Pedro Larroy](https://github.com/larroy) and the developers of the clearskies_core project (contributed build
system for posix systems, adding GCC to list of supported compilers, bug fixes,
Android fix)
- [Cory Fields](https://github.com/theuni) for fixing warnings about unused variables
- [Vitaliy Gusev](https://github.com/gusev-vitaliy) (reported error in json object operator[size_t i])
- [Alex Merry](https://github.com/amerry) for reporting errors with "typename" keyword experienced with gcc and providing
workaround for gcc 4.8 regex issues.
- [Ignatov Serguei](https://github.com/sergign60) (reported issues experienced with gcc for 0.95 and
0.96 candidate and helped fix them)
- [Milan Burda](https://github.com/miniak) for fix for clang build error
- [Peter Tissen](https://github.com/Bigpet), for reporting and suggesting a fix for get(name,default_val)
- [Tom Bass](https://github.com/tbass) for assistance with clang build errors
- [Andrey Alifanov](https://github.com/AndreyAlifanov) and [Amit Naik](https://github.com/amitnaik1) for failing test cases for JSON Path
- [Yuri Plaksyuk](https://github.com/yplaksyuk) for contributing an extension to JsonPath to allow filter
expressions over a single object.
- [Nikolay Amiantov](https://github.com/abbradar) for fixing compilation errors and warnings by GCC and
Clang, adding read support for std::array and, most appreciated,
adding Travis CI configuration.
- [jakalx](https://github.com/jakalx) contributed fix for operator== throws when comparing a string
against an empty object
- [Alexander](https://github.com/rog13) for contributing fix to jsonpatch::diff
- [Stefano Sinigardi](https://github.com/cenit) for contributing workaround for vs2017 platform issue
- [xezon](https://github.com/danielaparker/jsoncons/pull/140) for proposing decode_csv and encode_csv functions, the
ignore_empty_lines option, and fixes to mismatched allocator types. Also for fixes and improvements in string_view code.
- Vojtech Fried for contributing patches to JSONCONS_DEFINE_LITERAL
and to json::as_string to remove warnings
- [Joshua Pritikin](https://github.com/jpritikin), for reporting gcc ubsan runtime warnings about
load of misaligned addresses, and verifying fix
- [Tobias Hermann](https://github.com/Dobiasd), for reporting issue with `UINT_MAX` not declared
in `bignum.hpp`, and proposing fix.
- [Cebtenzzre](https://github.com/Cebtenzzre), for finding and fixing an issue with conversions on
a basic_json value leading to an infinite recursion when the
value is a bignum, and for fixing undefined behavior in the bignum
class.
- [massimo morara](https://github.com/massimomorara) for reporting numerous issues
- [Alexander B](https://github.com/bas524), for uncovering a bug in how json_parser validated
UTF-8 strings.
- [zhskyy](https://github.com/zhskyy), for contributing __FILE__ and __LINE__ macros removed
from JSONCONS_ASSERT if not defined _DEBUG.
- [soberich](https://github.com/soberich), for contributing the jsonpath sum and prod functions,
and a proposal for aggregation functions that work outside a filter.
- [patternoia](https://github.com/patternoia) for fixing the installation script
to include copying the jsoncons_ext directory into the installation place
- [mikewallis](https://github.com/mikewallis) for removing redundant macro continuation character in JSONCONS_TYPE_TRAITS_DECL
build: false
environment:
vsversion: none
arch: default
matrix:
- platform: vs
vsversion: 2015
arch: x86
FLAGS: ""
- platform: vs
vsversion: 2015
arch: x86
FLAGS: ""
- platform: vs
vsversion: 2015
arch: x86
FLAGS: "/permissive- /std:c++latest /utf-8"
- platform: vs
vsversion: 2015
arch: x64
FLAGS: ""
- platform: vs
vsversion: 2017
arch: x64
FLAGS: ""
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
- platform: vs
vsversion: 2017
arch: x64
FLAGS: "/permissive- /std:c++latest /utf-8"
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
- platform: vs
vsversion: 2017
arch: ARM
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
init:
- git config --global core.autocrlf input
before_build:
# Remove the following from the path, as it will interfere with
# the MinGW builds
- set PATH=%PATH:C:\Program Files\Git\usr\bin;=%
- if %platform%==msvc call "C:\Program Files (x86)\Microsoft Visual Studio 12.0\VC\vcvarsall.bat"
- if %platform%==msvc cmake -G "NMake Makefiles" -DCMAKE_INSTALL_PREFIX=%P%
- if %platform%==vs (
set "makecommand=Visual Studio"
)
- set "vcx=false"
- set "vcs=false"
- if %platform%==vs (
set "vcx=true"
)
- if %vsversion%==2015 (
set "makecommand=%makecommand% 14 %vsversion%"
)
- if %vsversion%==2017 (
set "makecommand=%makecommand% 15 %vsversion%"
)
- if %arch%==x64 (
set "makecommand=%makecommand% Win64"
)
- if %arch%==ARM (
set "makecommand=%makecommand% ARM"
)
- cmake -G "%makecommand%" -D BUILD_TESTS=1 .
build_script:
- cmake --build . --target test_jsoncons --config Release
- cd tests
test_script:
- set "testplatform=%platform%"
# Can not run ARM builds on x86/x64 build images
- if %arch%==ARM (
set "testplatform=none"
)
- if %testplatform%==vs .\Release\test_jsoncons
#!/bin/bash -x
INSTALL_PREFIX=$(pwd)/../build
cp -rf include/* ${INSTALL_PREFIX}/include
#
# Global Configuration for MacOS platform
#
# customize compiler flags
## Add new flags
#
# Global Configuration for linux platform
#
#
# GNU libstdc++ runtime is not supported because not yet C++11 compliant
#
# customize compiler flags
## Add new flags
add_definitions (-pthread)
set (CMAKE_SHARED_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -pthread")
set (CMAKE_MODULE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -pthread")
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -pthread")
#
# Global Configuration for windows platform
#
# define some preprocessor flags
add_definitions(/DWIN32_LEAN_AND_MEAN /D_UNICODE /DUNICODE /W4)
# jsoncons cmake module
# This module sets the following variables in your project::
#
# jsoncons_FOUND - true if jsoncons found on the system
# jsoncons_INCLUDE_DIRS - the directory containing jsoncons headers
# jsoncons_LIBRARY - empty
@PACKAGE_INIT@
if(NOT TARGET @PROJECT_NAME@)
include("${CMAKE_CURRENT_LIST_DIR}/@PROJECT_NAME@Targets.cmake")
get_target_property(@PROJECT_NAME@_INCLUDE_DIRS jsoncons INTERFACE_INCLUDE_DIRECTORIES)
endif()
All core jsoncons classes and functions are in namespace `jsoncons`.
#### Unpacked Representation
[json](ref/json.md)
[json_parser](ref/json_parser.md)
[json_reader](ref/json_reader.md)
[json_decoder](ref/json_decoder.md)
[ojson](ref/ojson.md)
[wjson](ref/wjson.md)
[wjson_reader](ref/wjson_reader.md)
[wojson](ref/wojson.md)
#### C++/JSON Conversion
[encode_json](ref/encode_json.md)
[decode_json](ref/decode_json.md)
#### Streaming
[json_content_handler](ref/json_content_handler.md)
[json_encoder](ref/json_encoder.md)
[json_options](ref/json_options.md)
[wjson_encoder](ref/wjson_encoder.md)
[wjson_options](ref/wjson_options.md)
[json_filter](ref/json_filter.md)
[rename_object_member_filter](ref/rename_object_member_filter.md)
[json_cursor](ref/json_cursor.md)
[staj_reader](ref/staj_reader.md)
[staj_object_iterator](ref/staj_object_iterator.md)
[staj_array_iterator](ref/staj_array_iterator.md)
### Extensions
#### [jsonpointer](ref/jsonpointer/jsonpointer.md)
#### [jsonpatch](ref/jsonpatch/jsonpatch.md)
#### [jsonpath](ref/jsonpath/jsonpath.md)
#### [cbor](ref/cbor/cbor.md)
#### [msgpack](ref/msgpack/msgpack.md)
#### [ubjson](ref/ubjson/ubjson.md)
#### [msgpack](ref/msgpack/msgpack.md)
#### [bson](ref/bson/bson.md)
### Tutorials
[Basics](Tutorials/Basics.md)
[Custom Allocators](Tutorials/Custom%20Allocators.md)
[Unicode support](Tutorials/Unicode%20support.md)
## Examples
The examples below illustrate the use of the [json](../ref/json.md) class and [json_query](../ref/jsonpath/json_query.md) function.
### json construction
```c++
#include <jsoncons/json.hpp>
// For convenience
using jsoncons::json;
// Construct a book object
json book1;
book1["category"] = "Fiction";
book1["title"] = "A Wild Sheep Chase: A Novel";
book1["author"] = "Haruki Murakami";
book1["date"] = "2002-04-09";
book1["price"] = 9.01;
book1["isbn"] = "037571894X";
// Construct another using the insert_or_assign function
json book2;
book2.insert_or_assign("category", "History");
book2.insert_or_assign("title", "Charlie Wilson's War");
book2.insert_or_assign("author", "George Crile");
book2.insert_or_assign("date", "2007-11-06");
book2.insert_or_assign("price", 10.50);
book2.insert_or_assign("isbn", "0802143415");
// Use insert_or_assign again, but more efficiently
json book3;
// Reserve memory, to avoid reallocations
book3.reserve(6);
// Insert in name alphabetical order
// Give insert_or_assign a hint where to insert the next member
auto hint = book3.insert_or_assign(book3.object_range().begin(),"author", "Haruki Murakami");
hint = book3.insert_or_assign(hint, "category", "Fiction");
hint = book3.insert_or_assign(hint, "date", "2006-01-03");
hint = book3.insert_or_assign(hint, "isbn", "1400079276");
hint = book3.insert_or_assign(hint, "price", 13.45);
hint = book3.insert_or_assign(hint, "title", "Kafka on the Shore");
// Construct a fourth from a string
json book4 = json::parse(R"(
{
"category" : "Fiction",
"title" : "Pulp",
"author" : "Charles Bukowski",
"date" : "2004-07-08",
"price" : 22.48,
"isbn" : "1852272007"
}
)");
// Construct a booklist array
json booklist = json::array();
// For efficiency, reserve memory, to avoid reallocations
booklist.reserve(4);
// For efficency, tell jsoncons to move the contents
// of the four book objects into the array
booklist.add(std::move(book1));
booklist.add(std::move(book2));
// Add the third book to the front
auto pos = booklist.add(booklist.array_range().begin(),std::move(book3));
// and the last one immediately after
booklist.add(pos+1,std::move(book4));
// See what's left of book1, 2, 3 and 4 (expect nulls)
std::cout << book1 << "," << book2 << "," << book3 << "," << book4 << std::endl;
++
//Loop through the booklist elements using a range-based for loop
for (const auto& book : booklist.array_range())
{
std::cout << book["title"].as<std::string>()
<< ","
<< book["price"].as<double>() << std::endl;
}
// The second book
json& book = booklist[1];
//Loop through the book's name-value pairs using a range-based for loop
for (const auto& member : book.object_range())
{
std::cout << member.key()
<< ","
<< member.value() << std::endl;
}
auto it = book.find("author");
if (it != book.object_range().end())
{
// member "author" found
}
if (book.contains("author"))
{
// book has a member "author"
}
book.get("author", "author unknown").as<std::string>();
// Returns author if found, otherwise "author unknown"
try
{
book["ratings"].as<std::string>();
}
catch (const std::out_of_range& e)
{
// member "ratings" not found
}
// Add ratings
book["ratings"]["*****"] = 4;
book["ratings"]["*"] = 2;
// Delete one-star ratings
book["ratings"].erase("*");
```
```c++
// Serialize the booklist to a file
std::ofstream os("booklist.json");
os << pretty_print(booklist);
```
The JSON output `booklist.json`
```json
[
{
"author":"Haruki Murakami",
"category":"Fiction",
"date":"2006-01-03",
"isbn":"1400079276",
"price":13.45,
"title":"Kafka on the Shore"
},
{
"author":"Charles Bukowski",
"category":"Fiction",
"date":"2004-07-08",
"isbn":"1852272007",
"price":22.48,
"ratings":
{
"*****":4
},
"title":"Pulp"
},
{
"author":"Haruki Murakami",
"category":"Fiction",
"date":"2002-04-09",
"isbn":"037571894X",
"price":9.01,
"title":"A Wild Sheep Chase: A Novel"
},
{
"author":"George Crile",
"category":"History",
"date":"2007-11-06",
"isbn":"0802143415",
"price":10.5,
"title":"Charlie Wilson's War"
}
]
```
### json query
```c++
#include <fstream>
#include <jsoncons/json.hpp>
#include <jsoncons_ext/jsonpath/json_query.hpp>
// For convenience
using jsoncons::json;
using jsoncons::jsonpath::json_query;
// Deserialize the booklist
std::ifstream is("booklist.json");
json booklist;
is >> booklist;
// Use a JSONPath expression to find
//
// (1) The authors of books that cost less than $12
json result = json_query(booklist, "$[*][?(@.price < 12)].author");
std::cout << result << std::endl;
// (2) The number of books
result = json_query(booklist, "$.length");
std::cout << result << std::endl;
// (3) The third book
result = json_query(booklist, "$[2]");
std::cout << std::endl << pretty_print(result) << std::endl;
// (4) The authors of books that were published in 2004
result = json_query(booklist, "$[*][?(@.date =~ /2004.*?/)].author");
std::cout << result << std::endl;
// (5) The titles of all books that have ratings
result = json_query(booklist, "$[*][?(@.ratings)].title");
std::cout << result << std::endl;
// (6) All authors and titles of books
result = json_query(booklist, "$..['author','title']");
std::cout << pretty_print(result) << std::endl;
```
Result:
```json
(1) ["Haruki Murakami","George Crile"]
(2) [4]
(3)
[
{
"author":"Haruki Murakami",
"category":"Fiction",
"date":"2002-04-09",
"isbn":"037571894X",
"price":9.01,
"title":"A Wild Sheep Chase: A Novel"
}
]
(4) ["Charles Bukowski"]
(5) ["Pulp"]
(6)
[
"Nigel Rees",
"Sayings of the Century",
"Evelyn Waugh",
"Sword of Honour",
"Herman Melville",
"Moby Dick",
"J. R. R. Tolkien",
"The Lord of the Rings"
]
```
## Once again, this time with wide characters
### wjson construction
```c++
#include <jsoncons/json.hpp>
// For convenience
using jsoncons::wjson;
// Construct a book object
wjson book1;
book1[L"category"] = L"Fiction";
book1[L"title"] = L"A Wild Sheep Chase: A Novel";
book1[L"author"] = L"Haruki Murakami";
book1[L"date"] = L"2002-04-09";
book1[L"price"] = 9.01;
book1[L"isbn"] = L"037571894X";
// Construct another using the insert_or_assign function
wjson book2;
book2.insert_or_assign(L"category", L"History");
book2.insert_or_assign(L"title", L"Charlie Wilson's War");
book2.insert_or_assign(L"author", L"George Crile");
book2.insert_or_assign(L"date", L"2007-11-06");
book2.insert_or_assign(L"price", 10.50);
book2.insert_or_assign(L"isbn", L"0802143415");
// Use insert_or_assign again, but more efficiently
wjson book3;
// Reserve memory, to avoid reallocations
book3.reserve(6);
// Insert in name alphabetical order
// Give insert_or_assign a hint where to insert the next member
auto hint = book3.insert_or_assign(book3.object_range().begin(), L"author", L"Haruki Murakami");
hint = book3.insert_or_assign(hint, L"category", L"Fiction");
hint = book3.insert_or_assign(hint, L"date", L"2006-01-03");
hint = book3.insert_or_assign(hint, L"isbn", L"1400079276");
hint = book3.insert_or_assign(hint, L"price", 13.45);
hint = book3.insert_or_assign(hint, L"title", L"Kafka on the Shore");
// Construct a fourth from a string
wjson book4 = wjson::parse(LR"(
{
"category" : "Fiction",
"title" : "Pulp",
"author" : "Charles Bukowski",
"date" : "2004-07-08",
"price" : 22.48,
"isbn" : "1852272007"
}
)");
// Construct a booklist array
wjson booklist = wjson::array();
// For efficiency, reserve memory, to avoid reallocations
booklist.reserve(4);
// For efficency, tell jsoncons to move the contents
// of the four book objects into the array
booklist.add(std::move(book1));
booklist.add(std::move(book2));
// Add the third book to the front
auto pos = booklist.add(booklist.array_range().begin(),std::move(book3));
// and the last one immediately after
booklist.add(pos+1,std::move(book4));
// See what's left of book1, 2, 3 and 4 (expect nulls)
std::wcout << book1 << L"," << book2 << L"," << book3 << L"," << book4 << std::endl;
++
//Loop through the booklist elements using a range-based for loop
for (const auto& book : booklist.array_range())
{
std::wcout << book[L"title"].as<std::wstring>()
<< L","
<< book[L"price"].as<double>() << std::endl;
}
// The second book
wjson& book = booklist[1];
//Loop through the book's name-value pairs using a range-based for loop
for (const auto& member : book.object_range())
{
std::wcout << member.key()
<< L","
<< member.value() << std::endl;
}
auto it = book.find(L"author");
if (it != book.object_range().end())
{
// member "author" found
}
if (book.contains(L"author"))
{
// book has a member "author"
}
book.get(L"author", L"author unknown").as<std::wstring>();
// Returns author if found, otherwise "author unknown"
try
{
book[L"ratings"].as<std::wstring>();
}
catch (const std::out_of_range& e)
{
// member "ratings" not found
}
// Add ratings
book[L"ratings"][L"*****"] = 4;
book[L"ratings"][L"*"] = 2;
// Delete one-star ratings
book[L"ratings"].erase(L"*");
```
```c++
// Serialize the booklist to a file
std::wofstream os("booklist2.json");
os << pretty_print(booklist);
```
### wjson query
```c++
// Deserialize the booklist
std::wifstream is("booklist2.json");
wjson booklist;
is >> booklist;
// Use a JSONPath expression to find
//
// (1) The authors of books that cost less than $12
wjson result = json_query(booklist, L"$[*][?(@.price < 12)].author");
std::wcout << result << std::endl;
// (2) The number of books
result = json_query(booklist, L"$.length");
std::wcout << result << std::endl;
// (3) The third book
result = json_query(booklist, L"$[2]");
std::wcout << pretty_print(result) << std::endl;
// (4) The authors of books that were published in 2004
result = json_query(booklist, L"$[*][?(@.date =~ /2004.*?/)].author");
std::wcout << result << std::endl;
// (5) The titles of all books that have ratings
result = json_query(booklist, L"$[*][?(@.ratings)].title");
std::wcout << result << std::endl;
// (6) All authors and titles of books
result = json_query(booklist, L"$..['author','title']");
std::wcout << pretty_print(result) << std::endl;
```
Result:
```json
(1) ["Haruki Murakami","George Crile"]
(2) [4]
(3)
[
{
"author":"Haruki Murakami",
"category":"Fiction",
"date":"2002-04-09",
"isbn":"037571894X",
"price":9.01,
"title":"A Wild Sheep Chase: A Novel"
}
]
(4) ["Charles Bukowski"]
(5) ["Pulp"]
(6)
[
"Nigel Rees",
"Sayings of the Century",
"Evelyn Waugh",
"Sword of Honour",
"Herman Melville",
"Moby Dick",
"J. R. R. Tolkien",
"The Lord of the Rings"
]
```
## Examples
### Using `json` with boost stateless `fast_pool_allocator`
```c++
#include <boost/pool/pool_alloc.hpp>
#include "jsoncons/json.hpp"
typedef jsoncons::basic_json<char, boost::fast_pool_allocator<void>> bfp_json;
bfp_json j;
j.insert_or_assign("FirstName","Joe");
j.insert_or_assign("LastName","Smith");
```
### Using `json` with stateful Boost.Interprocess allocators
```c++
#include <boost/interprocess/managed_shared_memory.hpp>
#include <boost/interprocess/containers/vector.hpp>
#include <boost/interprocess/allocators/allocator.hpp>
#include <boost/interprocess/containers/string.hpp>
#include <cstdlib> //std::system
#include <jsoncons/json.hpp>
using namespace jsoncons;
typedef boost::interprocess::allocator<int,
boost::interprocess::managed_shared_memory::segment_manager> shmem_allocator;
struct boost_sorted_policy : public sorted_policy
{
template <class T, class Allocator>
using sequence_container_type = boost::interprocess::vector<T,Allocator>;
template <class CharT, class CharTraits, class Allocator>
using key_storage = boost::interprocess::basic_string<CharT, CharTraits, Allocator>;
template <class CharT, class CharTraits, class Allocator>
using string_storage = boost::interprocess::basic_string<CharT, CharTraits, Allocator>;
};
typedef basic_json<char,boost_sorted_policy,shmem_allocator> shm_json;
int main(int argc, char *argv[])
{
typedef std::pair<double, int> MyType;
if(argc == 1){ //Parent process
//Remove shared memory on construction and destruction
struct shm_remove
{
shm_remove() { boost::interprocess::shared_memory_object::remove("MySharedMemory"); }
~shm_remove(){ boost::interprocess::shared_memory_object::remove("MySharedMemory"); }
} remover;
//Construct managed shared memory
boost::interprocess::managed_shared_memory segment(boost::interprocess::create_only,
"MySharedMemory", 65536);
//Initialize shared memory STL-compatible allocator
const shmem_allocator allocator(segment.get_segment_manager());
// Create json value with all dynamic allocations in shared memory
shm_json* j = segment.construct<shm_json>("my json")(shm_json::array(allocator));
j->push_back(10);
shm_json o(allocator);
o.insert_or_assign("category", "reference");
o.insert_or_assign("author", "Nigel Rees");
o.insert_or_assign("title", "Sayings of the Century");
o.insert_or_assign("price", 8.95);
j->push_back(o);
shm_json a = shm_json::array(2,shm_json::object(allocator),allocator);
a[0]["first"] = 1;
j->push_back(a);
std::pair<shm_json*, boost::interprocess::managed_shared_memory::size_type> res;
res = segment.find<shm_json>("my json");
std::cout << "Parent:" << std::endl;
std::cout << pretty_print(*(res.first)) << std::endl;
//Launch child process
std::string s(argv[0]); s += " child ";
if(0 != std::system(s.c_str()))
return 1;
//Check child has destroyed all objects
if(segment.find<MyType>("my json").first)
return 1;
}
else{
//Open managed shared memory
boost::interprocess::managed_shared_memory segment(boost::interprocess::open_only,
"MySharedMemory");
std::pair<shm_json*, boost::interprocess::managed_shared_memory::size_type> res;
res = segment.find<shm_json>("my json");
if (res.first != nullptr)
{
std::cout << "Child:" << std::endl;
std::cout << pretty_print(*(res.first)) << std::endl;
}
else
{
std::cout << "Result is null" << std::endl;
}
//We're done, delete all the objects
segment.destroy<shm_json>("my json");
}
return 0;
}
```
Output:
```
Parent:
[
10,
{
"author": "Nigel Rees",
"category": "reference",
"price": 8.95,
"title": "Sayings of the Century"
},
[
{
"first": 1
},
{}
]
]
Child:
[
10,
{
"author": "Nigel Rees",
"category": "reference",
"price": 8.95,
"title": "Sayings of the Century"
},
[
{
"first": 1
},
{}
]
]
```
Start Visual Studio Command prompt for x64
mkdir build64 & pushd build64
cmake -G "Visual Studio 14 2015 Win64" ..
popd
cmake --build build64 --config Debug
<mxfile modified="2019-03-29T03:25:17.280Z" host="www.draw.io" agent="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.86 Safari/537.36" etag="yA61vU4hp_5DYHRenpc1" version="10.5.9" type="device"><diagram id="iK-0d-7Yl-5MlnfMu26I" name="Page-1">vVZdb9owFP01eexEYqD0sQXabajVOiZBn5CJbxNvjs0cA2G/fjaxSZwElUpV+1D5nuvrj3POdQjQOCseJN6kj4IAC6IeKQI0CaLoOhzp/wY4lAAa9kogkZSUUFgBc/oPLOimbSmB3JuohGCKbnwwFpxDrDwMSyn2/rRXwfxdNziBFjCPMWujC0pUWqKjQa/CvwJNUrdz2LOZDLvJFshTTMS+BqFpgMZSCFWOsmIMzHDneCnr7s9kTweTwNUlBdOnXwWfPc/TaPx3phaP2Uvx/cqussNsay+8xjmNV79zwVeaU6UXX6WYEwYyiIZM73S3NqPEjLqQ41XVwfEnxZYTMEfo6fQ+pQrmGxyb7F4bRmOpypiOwlN1/UrufCAVFDXIXvEBRAZKHvQUmx1Ztq3dQsf+vhIvdBZMa8L1LYatX5LTyhWlemBZfQfD12cYXhuGgce6YT6B2VfK2FgwIY+1CI5/Gs+VFH+glkFDdIPIx2hxkRj9zxRj1BKjRStwcmveDR3FDOdaKp9JKKhaWtLN+MWMvwxsNClqqcnBBVwffukWMEGtyoRV2TFydWc1yMVWxvC26xSWCai3+x+I9wq2Fa0pNugQzGESGFZ057+dXSraHX4Iqm92Mkzf90uEGj4or22L6k9dY52bhu9GjXVKWlrrHC11uvRFLkt2j2k0e1rin5BcLZ5nsJtBR8t/4ylIqjDXoulvIcWJxFnLerqtlG82vz254NDoZQthRhNuHKuNop8TdGealOqv2K1NZJQQs03nO+G/JB/Q+GFDgKjX0fiow0fR+xtfh9U3tFSw+iGCpv8B</diagram></mxfile>
\ No newline at end of file
### jsoncons::cbor::cbor_decode_options
An abstract class that defines accessors for CBOR decoding options.
#### Header
```c++
#include <jsoncons/cbor/cbor_options.hpp>
```
#### Implementing classes
[cbor_options](cbor_options.md)
#### Destructor
virtual ~cbor_decode_options();
#### Accessors
<mxfile modified="2019-02-06T01:28:05.917Z" host="www.draw.io" agent="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.81 Safari/537.36" etag="HJRvV8Pagh8sW1Um5jfB" version="10.1.8" type="device"><diagram id="iK-0d-7Yl-5MlnfMu26I" name="Page-1">3Vffb9owEP5reGRKYmD0sYWu21CrdUyCPiE3vibeHJs5BsL++p2JQ35WbacKTfCC77Pv7Pvu43rtkUmS3Wi6jm8VA9ELPJb1yLQXBL4X+PhlkX2OkEGQA5HmzB0qgTn/A4WnQzecQVo7aJQShq/rYKikhNDUMKq12tWPPSlRv3VNI2gB85CKNrrgzMQ5Oh56Jf4ZeBQXN/ue20locdgBaUyZ2lUgct0jE62UyVdJNgFhySt4yf0+PbN7fJgGaV7jcH33I5Oz+3kcTH7PzOI2eci+9l2ULRUbl/AjTXm4+pkquUJODQZfxVQyAboXjATedPVoV5FddSGHVM2+4E+rjWRgn+Dh9i7mBuZrGtrdHSoGsdgkAi3/6F1NqXgfaANZBXIp3oBKwOg9HnG7Y8e2k5tfsL8ri+ePHBZXCjdwGHV6iY6RS0px4Vh9A8Mfn2E4fFR6lYLmVKDoT0DuExdiooTSB18Chw/iqdHqF1R2yIhcEPY+5XhVPQanrMe4VY8WrSDZpW0daIWCplitOpOQcbN0pNv1g11/GDprmlW2pvvCkPj4ZRHAGhUva5ZuB6vwe7YGqdroEF4WnqE6AvNyCwBWa4TtilYqNuwoWIFpENTwbb19dlXR3fBNcczsKJhBXS8BaeggT9s5VbtdI85FQ3fjRpycllacg6SOSb9KZdH2Ng5md0v6HaL+4n4G2xn0h2eoss5EyVmqZ9AINDqtekZnop5q7+lM9P/qPUOv0TS8f5WP39Chf1r9tGeOLzLGUcNQiT9mHMY5jTRNWqLCP+qmLqP6cCCVhMYk4SAcYiJptYgSwGGGXNkRgeMYfek2Es6YvaZzSqnPMe8wdviN9h94HWMH6VBS8PaxA81yiM8rWP4rRK7/Ag==</diagram></mxfile>
\ No newline at end of file
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册