Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: use DTO for NCNN init parameters #1147

Draft
wants to merge 2 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ if (NOT EXISTS ${CMAKE_BINARY_DIR}/src)
COMMAND bash -c "mkdir ${CMAKE_BINARY_DIR}/src")
endif()

set(CMAKE_CXX_FLAGS "-g -O2 -Wall -Wextra -fopenmp -fPIC -std=c++14 -DUSE_OPENCV -DUSE_LMDB")
set(CMAKE_CXX_FLAGS "-g -O2 -Wall -Wextra -fopenmp -fPIC -std=c++14 -DUSE_OPENCV -DUSE_LMDB -fmax-errors=4")

if(WARNING)
string(APPEND CMAKE_CXX_FLAGS " -Werror")
Expand Down
14 changes: 14 additions & 0 deletions src/apidata.h
Original file line number Diff line number Diff line change
Expand Up @@ -298,6 +298,20 @@ namespace dd
.getPtr();
}

template <typename T>
inline static APIData fromDTO(const oatpp::Object<T> &dto)
{
std::shared_ptr<oatpp::data::mapping::ObjectMapper> object_mapper
= oatpp::parser::json::mapping::ObjectMapper::createShared();

oatpp::String json = object_mapper->writeToString(dto);
APIData ad;
rapidjson::Document d;
d.Parse<rapidjson::kParseNanAndInfFlag>(json->c_str());
ad.fromRapidJson(d);
return ad;
}

public:
/**
* \brief render Mustache template based on this APIData object
Expand Down
64 changes: 21 additions & 43 deletions src/backends/ncnn/ncnnlib.cc
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
#include "outputconnectorstrategy.h"
#include <thread>
#include <algorithm>
#include "utils/utils.hpp"

// NCNN
#include "ncnnlib.h"
Expand All @@ -31,6 +30,8 @@
#include "net.h"
#include <iostream>

#include "dto/mllib.hpp"

namespace dd
{
template <class TInputConnectorStrategy, class TOutputConnectorStrategy,
Expand All @@ -53,10 +54,10 @@ namespace dd
{
this->_libname = "ncnn";
_net = new ncnn::Net();
_net->opt.num_threads = _threads;
_net->opt.num_threads = 1;
_net->opt.blob_allocator = &_blob_pool_allocator;
_net->opt.workspace_allocator = &_workspace_pool_allocator;
_net->opt.lightmode = _lightmode;
_net->opt.lightmode = true;
}

template <class TInputConnectorStrategy, class TOutputConnectorStrategy,
Expand All @@ -69,12 +70,9 @@ namespace dd
this->_libname = "ncnn";
_net = tl._net;
tl._net = nullptr;
_nclasses = tl._nclasses;
_threads = tl._threads;
_timeserie = tl._timeserie;
_old_height = tl._old_height;
_inputBlob = tl._inputBlob;
_outputBlob = tl._outputBlob;
_init_dto = tl._init_dto;
}

template <class TInputConnectorStrategy, class TOutputConnectorStrategy,
Expand All @@ -92,11 +90,11 @@ namespace dd
template <class TInputConnectorStrategy, class TOutputConnectorStrategy,
class TMLModel>
void NCNNLib<TInputConnectorStrategy, TOutputConnectorStrategy,
TMLModel>::init_mllib(const APIData &ad)
TMLModel>::init_mllib(const oatpp::Object<DTO::MLLib> &init_dto)
{
bool use_fp32 = (ad.has("datatype")
&& ad.get("datatype").get<std::string>()
== "fp32"); // default is fp16
_init_dto = init_dto;

bool use_fp32 = (_init_dto->datatype == "fp32");
_net->opt.use_fp16_packed = !use_fp32;
_net->opt.use_fp16_storage = !use_fp32;
_net->opt.use_fp16_arithmetic = !use_fp32;
Expand Down Expand Up @@ -124,35 +122,11 @@ namespace dd
_old_height = this->_inputc.height();
_net->set_input_h(_old_height);

if (ad.has("nclasses"))
_nclasses = ad.get("nclasses").get<int>();

if (ad.has("threads"))
_threads = ad.get("threads").get<int>();
else
_threads = dd_utils::my_hardware_concurrency();

_timeserie = this->_inputc._timeserie;
if (_timeserie)
this->_mltype = "timeserie";

if (ad.has("lightmode"))
{
_lightmode = ad.get("lightmode").get<bool>();
_net->opt.lightmode = _lightmode;
}

// setting the value of Input Layer
if (ad.has("inputblob"))
{
_inputBlob = ad.get("inputblob").get<std::string>();
}
// setting the final Output Layer
if (ad.has("outputblob"))
{
_outputBlob = ad.get("outputblob").get<std::string>();
}

_net->opt.lightmode = _init_dto->lightmode;
_blob_pool_allocator.set_size_compare_ratio(0.0f);
_workspace_pool_allocator.set_size_compare_ratio(0.5f);
model_type(this->_mlmodel._params, this->_mltype);
Expand Down Expand Up @@ -233,7 +207,10 @@ namespace dd

// Extract detection or classification
int ret = 0;
std::string out_blob = _outputBlob;
std::string out_blob;
if (_init_dto->outputBlob)
out_blob = _init_dto->outputBlob->std_str();

if (out_blob.empty())
{
if (bbox == true)
Expand Down Expand Up @@ -262,11 +239,11 @@ namespace dd
{
best = ad_output.get("best").get<int>();
}
if (best == -1 || best > _nclasses)
best = _nclasses;
if (best == -1 || best > _init_dto->nclasses)
best = _init_dto->nclasses;

// for loop around batch size
#pragma omp parallel for num_threads(_threads)
#pragma omp parallel for num_threads(*_init_dto->threads)
for (size_t b = 0; b < inputc._ids.size(); b++)
{
std::vector<double> probs;
Expand All @@ -276,8 +253,8 @@ namespace dd
APIData rad;

ncnn::Extractor ex = _net->create_extractor();
ex.set_num_threads(_threads);
ex.input(_inputBlob.c_str(), inputc._in.at(b));
ex.set_num_threads(_init_dto->threads);
ex.input(_init_dto->inputBlob->c_str(), inputc._in.at(b));

ret = ex.extract(out_blob.c_str(), inputc._out.at(b));
if (ret == -1)
Expand Down Expand Up @@ -423,7 +400,8 @@ namespace dd
} // end for batch_size

tout.add_results(vrad);
out.add("nclasses", this->_nclasses);
int nclasses = this->_init_dto->nclasses;
out.add("nclasses", nclasses);
if (bbox == true)
out.add("bbox", true);
out.add("roi", false);
Expand Down
16 changes: 8 additions & 8 deletions src/backends/ncnn/ncnnlib.h
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,15 @@
#ifndef NCNNLIB_H
#define NCNNLIB_H

#include "apidata.h"
#include "utils/utils.hpp"

#include "dto/mllib.hpp"

// NCNN
#include "net.h"
#include "ncnnmodel.h"

#include "apidata.h"

namespace dd
{
template <class TInputConnectorStrategy, class TOutputConnectorStrategy,
Expand All @@ -41,7 +44,7 @@ namespace dd
~NCNNLib();

/*- from mllib -*/
void init_mllib(const APIData &ad);
void init_mllib(const oatpp::Object<DTO::MLLib> &init_dto);

void clear_mllib(const APIData &ad);

Expand All @@ -53,20 +56,17 @@ namespace dd

public:
ncnn::Net *_net = nullptr;
int _nclasses = 0;
bool _timeserie = false;
bool _lightmode = true;

private:
oatpp::Object<DTO::MLLib> _init_dto;
static ncnn::UnlockedPoolAllocator _blob_pool_allocator;
static ncnn::PoolAllocator _workspace_pool_allocator;

protected:
int _threads = 1;
int _old_height = -1;
std::string _inputBlob = "data";
std::string _outputBlob;
};

}

#endif
11 changes: 7 additions & 4 deletions src/backends/ncnn/ncnnmodel.h
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@
#include "dd_spdlog.h"
#include "mlmodel.h"
#include "apidata.h"
#include "dto/model.hpp"
#include "dto/service_create.hpp"

namespace dd
{
Expand All @@ -34,12 +36,13 @@ namespace dd
NCNNModel() : MLModel()
{
}
NCNNModel(const APIData &ad, APIData &adg,
NCNNModel(const oatpp::Object<DTO::Model> &model_dto,
const oatpp::Object<DTO::ServiceCreate> &service_dto,
const std::shared_ptr<spdlog::logger> &logger)
: MLModel(ad, adg, logger)
: MLModel(model_dto, service_dto, logger)
{
if (ad.has("repository"))
this->_repo = ad.get("repository").get<std::string>();
if (model_dto->repository)
this->_repo = model_dto->repository->std_str();
read_from_repository(spdlog::get("api"));
read_corresp_file();
}
Expand Down
11 changes: 7 additions & 4 deletions src/dto/img_connector.hpp → src/dto/input_connector.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@
* along with deepdetect. If not, see <http://www.gnu.org/licenses/>.
*/

#ifndef HTTP_DTO_IMG_CONNECTOR_HPP
#define HTTP_DTO_IMG_CONNECTOR_HPP
#ifndef DTO_INPUT_CONNECTOR_HPP
#define DTO_INPUT_CONNECTOR_HPP

#include "dd_config.h"
#include "oatpp/core/Types.hpp"
Expand All @@ -32,10 +32,13 @@ namespace dd
{
#include OATPP_CODEGEN_BEGIN(DTO)

class ImgInputConnectorParameters : public oatpp::DTO
class InputConnector : public oatpp::DTO
{
DTO_INIT(ImgInputConnectorParameters, DTO /* extends */)
DTO_INIT(InputConnector, DTO /* extends */)
// Connector type
DTO_FIELD(String, connector);

// IMG Input Connector
DTO_FIELD(Int32, width);
DTO_FIELD(Int32, height);
DTO_FIELD(Int32, crop_width);
Expand Down
85 changes: 85 additions & 0 deletions src/dto/mllib.hpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
/**
* DeepDetect
* Copyright (c) 2021 Jolibrain SASU
* Author: Mehdi Abaakouk <[email protected]>
*
* This file is part of deepdetect.
*
* deepdetect is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* deepdetect is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with deepdetect. If not, see <http://www.gnu.org/licenses/>.
*/

#ifndef DTO_MLLIB_H
#define DTO_MLLIB_H

#include "dd_config.h"
#include "utils/utils.hpp"
#include "oatpp/core/Types.hpp"
#include "oatpp/core/macro/codegen.hpp"

namespace dd
{
namespace DTO
{
#include OATPP_CODEGEN_BEGIN(DTO) ///< Begin DTO codegen section

class MLLib : public oatpp::DTO
{
DTO_INIT(MLLib, DTO /* extends */)

// NCNN Options
DTO_FIELD_INFO(nclasses)
{
info->description = "number of output classes (`supervised` service "
"type), classification only";
};
DTO_FIELD(Int32, nclasses) = 0;

DTO_FIELD_INFO(threads)
{
info->description = "number of threads";
};
DTO_FIELD(Int32, threads) = dd::dd_utils::my_hardware_concurrency();

DTO_FIELD_INFO(lightmode)
{
info->description = "enable light mode";
};
DTO_FIELD(Boolean, lightmode) = true;

DTO_FIELD_INFO(inputBlob)
{
info->description = "network input blob name";
};
DTO_FIELD(String, inputBlob) = "data";

DTO_FIELD_INFO(outputBlob)
{
info->description = "network output blob name (default depends on "
"network type(ie prob or "
"rnn_pred or probs or detection_out)";
};
DTO_FIELD(String, outputBlob);

DTO_FIELD_INFO(datatype)
{
info->description = "fp16 or fp32";
};

DTO_FIELD(String, datatype) = "fp16";
};
#include OATPP_CODEGEN_END(DTO) ///< End DTO codegen section

}
}
#endif
Loading