c++的json读取操作
使用的开源库是nlohmann / json。后续操作也都是基于该开源库操作。本地json文件如下:{"model_config":{"model_type":"paddlex","model_cfg_file":"./models/yolov3/model.yml","model_key":"","model_filename":"./models/yolov3/model.pdmodel","
·
使用的开源库是nlohmann / json。后续操作也都是基于该开源库操作。
本地json文件如下:
{"model_config":{
"model_type":"paddlex",
"model_cfg_file":"./models/yolov3/model.yml",
"model_key":"",
"model_filename":"./models/yolov3/model.pdmodel",
"model_params_filename":"./models/yolov3/model.pdiparams",
"model_use_gpu":true,
"model_gpu_id":0,
"model_use_trt":false,
"is_input_shape":true,
"model_input_shape_max":{"image":[1,3,608,608]},
"model_input_shape_min":{"image":[1,3,608,608]},
"model_input_shape_optim":{"image":[1,3,608,608]},
"input_shape_path":"config.pdmodl"},
"task_config":{
"filter_ROI":[123,123,234,234,345,345],
"filter_ai_type":["clas", "seg"],
"filter_model_id":[25,36],
"object_select_type":[["person","hat","red"],["dog","eye"]],
"conf_thr":0.5}
}
上述json文件读取操作如下:
#include <iostream>
#include <fstream>
#include <numeric>
#include <vector>
#include <string>
#include "json.hpp"
using Json = nlohmann::json;
typedef struct ModelConfig_T
{
std::string model_type;
std::string model_cfg_file;
// 解密模型。默认为空,表示加载普通模型;如果非空则用该key解密模型并加载部署
std::string model_key;
std::string model_filename;
std::string model_params_filename;
bool model_use_gpu;
int model_gpu_id;
bool model_use_trt;
bool model_is_input_shape;
std::map<std::string, std::vector<int>> model_input_shape_max;
std::map<std::string, std::vector<int>> model_input_shape_min;
std::map<std::string, std::vector<int>> model_input_shape_optim;
std::string model_input_shape_path;
}ModelConfig;
typedef struct TaskConfig_T
{
std::vector<int> filter_ROI;
std::vector<std::string> filter_ai_type;
std::vector<int> filter_model_id;
std::vector<std::vector<std::string>> object_select_type;
double conf_thr; // 本次操作全局有效
}TaskConfig;
ModelConfig m_model_config;
TaskConfig m_task_config;
Json config_json;
std::ifstream(m_config_path) >> config_json;
m_model_config.model_type = config_json.at("model_config").at("model_type").get<std::string>();
m_model_config.model_cfg_file = config_json.at("model_config").at("model_cfg_file").get<std::string>();
m_model_config.model_key = config_json.at("model_config").at("model_key").get<std::string>();
m_model_config.model_filename = config_json.at("model_config").at("model_filename").get<std::string>();
m_model_config.model_params_filename = config_json.at("model_config").at("model_params_filename").get<std::string>();
m_model_config.model_use_gpu = config_json.at("model_config").at("model_use_gpu").get<bool>();
m_model_config.model_gpu_id = config_json.at("model_config").at("model_gpu_id").get<int>();
m_model_config.model_use_trt = config_json.at("model_config").at("model_use_trt").get<bool>();
m_model_config.model_is_input_shape = config_json.at("model_config").at("is_input_shape").get<bool>();
//std::cout << config_json.at("model_config").at("model_input_shape_max") << std::endl;
for (auto item : config_json.at("model_config").at("model_input_shape_max").items())
{
std::vector<int> tmp;
for (int i = 0; i < item.value().size(); i++)
{
tmp.push_back((int)item.value()[i]);
}
m_model_config.model_input_shape_max.insert(std::pair<std::string, std::vector<int>>(item.key(), item.value()));
//m_model_input_shape_max[item.key()] = { item.value()[0],item.value()[0],item.value()[0],item.value()[0]};
}
for (auto item : config_json.at("model_config").at("model_input_shape_min").items())
{
std::vector<int> tmp;
for (int i = 0; i < item.value().size(); i++)
{
tmp.push_back((int)item.value()[i]);
}
m_model_config.model_input_shape_min.insert(std::pair<std::string, std::vector<int>>(item.key(), item.value()));
//m_model_input_shape_max[item.key()] = { item.value()[0],item.value()[0],item.value()[0],item.value()[0]};
}
for (auto item : config_json.at("model_config").at("model_input_shape_optim").items())
{
std::vector<int> tmp;
for (int i = 0; i < item.value().size(); i++)
{
tmp.push_back((int)item.value()[i]);
}
m_model_config.model_input_shape_optim.insert(std::pair<std::string, std::vector<int>>(item.key(), item.value()));
//m_model_input_shape_max[item.key()] = { item.value()[0],item.value()[0],item.value()[0],item.value()[0]};
}
m_model_config.model_input_shape_path = config_json.at("model_config").at("input_shape_path").get<std::string>();
m_task_config.filter_ROI = config_json.at("task_config").at("filter_ROI").get<std::vector<int>>();
m_task_config.filter_ai_type = config_json.at("task_config").at("filter_ai_type").get<std::vector<std::string>>();
m_task_config.filter_model_id = config_json.at("task_config").at("filter_model_id").get<std::vector<int>>();
m_task_config.object_select_type = config_json.at("task_config").at("object_select_type").get<std::vector<std::vector<std::string>>>();
m_task_config.conf_thr = config_json.at("task_config").at("conf_thr").get<double>();
更多推荐
所有评论(0)