Skip to content

Commit

Permalink
fixed missing trt_backend option & remove un-need data layout check i…
Browse files Browse the repository at this point in the history
…n Cast (#14)

* update .gitignore

* Added checking for cmake include dir

* fixed missing trt_backend option bug when init from trt

* remove un-need data layout and add pre-check for dtype

* changed RGB2BRG to BGR2RGB in ppcls model

* Update CMakeLists.txt
  • Loading branch information
DefTruth authored Jul 12, 2022
1 parent 57697f3 commit a45f189
Show file tree
Hide file tree
Showing 5 changed files with 38 additions and 15 deletions.
11 changes: 11 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1 +1,12 @@
fastdeploy/libs/lib*
build
cmake-build-debug
cmake-build-release
.vscode
FastDeploy.cmake
fastdeploy/core/config.h
build-debug.sh
*dist
fastdeploy.egg-info
.setuptools-cmake-build
fastdeploy/version.py
3 changes: 2 additions & 1 deletion fastdeploy/backends/tensorrt/trt_backend.cc
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,8 @@ std::vector<int> toVec(const nvinfer1::Dims& dim) {
return out;
}

bool TrtBackend::InitFromTrt(const std::string& trt_engine_file) {
bool TrtBackend::InitFromTrt(const std::string& trt_engine_file,
const TrtBackendOption& option) {
if (initialized_) {
FDERROR << "TrtBackend is already initlized, cannot initialize again."
<< std::endl;
Expand Down
3 changes: 2 additions & 1 deletion fastdeploy/backends/tensorrt/trt_backend.h
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,8 @@ class TrtBackend : public BaseBackend {
bool InitFromOnnx(const std::string& model_file,
const TrtBackendOption& option = TrtBackendOption(),
bool from_memory_buffer = false);
bool InitFromTrt(const std::string& trt_engine_file);
bool InitFromTrt(const std::string& trt_engine_file,
const TrtBackendOption& option = TrtBackendOption());

bool Infer(std::vector<FDTensor>& inputs, std::vector<FDTensor>* outputs);

Expand Down
34 changes: 22 additions & 12 deletions fastdeploy/vision/common/processors/cast.cc
Original file line number Diff line number Diff line change
Expand Up @@ -18,30 +18,40 @@ namespace fastdeploy {
namespace vision {

bool Cast::CpuRun(Mat* mat) {
if (mat->layout != Layout::CHW) {
FDERROR << "Cast: The input data must be Layout::HWC format!" << std::endl;
return false;
}
cv::Mat* im = mat->GetCpuMat();
int c = im->channels();
if (dtype_ == "float") {
im->convertTo(*im, CV_32FC(im->channels()));
if (im->type() != CV_32FC(c)) {
im->convertTo(*im, CV_32FC(c));
}
} else if (dtype_ == "double") {
im->convertTo(*im, CV_64FC(im->channels()));
if (im->type() != CV_64FC(c)) {
im->convertTo(*im, CV_64FC(c));
}
} else {
FDLogger() << "[WARN] Cast not support for " << dtype_
<< " now! will skip this operation."
<< std::endl;
}
return true;
}

#ifdef ENABLE_OPENCV_CUDA
bool Cast::GpuRun(Mat* mat) {
if (mat->layout != Layout::CHW) {
FDERROR << "Cast: The input data must be Layout::HWC format!" << std::endl;
return false;
}
cv::cuda::GpuMat* im = mat->GetGpuMat();
int c = im->channels();
if (dtype_ == "float") {
im->convertTo(*im, CV_32FC(im->channels()));
if (im->type() != CV_32FC(c)) {
im->convertTo(*im, CV_32FC(c));
}
} else if (dtype_ == "double") {
im->convertTo(*im, CV_64FC(im->channels()));
if (im->type() != CV_64FC(c)) {
im->convertTo(*im, CV_64FC(c));
}
} else {
FDLogger() << "[WARN] Cast not support for " << dtype_
<< " now! will skip this operation."
<< std::endl;
}
return true;
}
Expand Down
2 changes: 1 addition & 1 deletion fastdeploy/vision/ppcls/model.cc
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ bool Model::BuildPreprocessPipelineFromConfig() {
return false;
}
auto preprocess_cfg = cfg["PreProcess"]["transform_ops"];
processors_.push_back(std::make_shared<RGB2BGR>());
processors_.push_back(std::make_shared<BGR2RGB>());
for (const auto& op : preprocess_cfg) {
FDASSERT(op.IsMap(),
"Require the transform information in yaml be Map type.");
Expand Down

0 comments on commit a45f189

Please sign in to comment.