Skip to content

Commit

Permalink
Merge branch 'dev-0.7'
Browse files Browse the repository at this point in the history
  • Loading branch information
lumurillo committed Jun 3, 2020
2 parents 505b225 + 3b85749 commit 966ac2a
Show file tree
Hide file tree
Showing 21 changed files with 700 additions and 32 deletions.
36 changes: 30 additions & 6 deletions .github/workflows/tests.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,11 @@ on:
jobs:
build_tensorflow_autotools:
runs-on: ubuntu-18.04
container: ridgerun/r2inference:v0.1.4
strategy:
matrix:
container: ["ridgerun/r2inference-ubuntu-18.04:v0.1.5", "ridgerun/r2inference-ubuntu-16.04:v0.3.1"]
container:
image: ${{ matrix.container }}
env:
CXXFLAGS: "-Werror"
steps:
Expand Down Expand Up @@ -42,7 +46,11 @@ jobs:
./list-backends
build_tflite_autotools:
runs-on: ubuntu-18.04
container: ridgerun/r2inference:v0.1.4
strategy:
matrix:
container: ["ridgerun/r2inference-ubuntu-18.04:v0.1.5"]
container:
image: ${{ matrix.container }}
env:
TENSORFLOW_PATH: /root/r2inference/backends/tflite/v2.0.1/include/tensorflow
CPPFLAGS: "-I${TENSORFLOW_PATH} -I${TENSORFLOW_PATH}/tensorflow/lite/tools/make/downloads/flatbuffers/include"
Expand Down Expand Up @@ -73,7 +81,11 @@ jobs:
./list-backends
build_tensorflow_tflite_autotools:
runs-on: ubuntu-18.04
container: ridgerun/r2inference:v0.1.4
strategy:
matrix:
container: ["ridgerun/r2inference-ubuntu-18.04:v0.1.5"]
container:
image: ${{ matrix.container }}
env:
TENSORFLOW_PATH: /root/r2inference/backends/tflite/v2.0.1/include/tensorflow
CPPFLAGS: "-I${TENSORFLOW_PATH} -I${TENSORFLOW_PATH}/tensorflow/lite/tools/make/downloads/flatbuffers/include"
Expand Down Expand Up @@ -106,7 +118,11 @@ jobs:
./list-backends
build_tensorflow_meson:
runs-on: ubuntu-18.04
container: ridgerun/r2inference:v0.1.4
strategy:
matrix:
container: ["ridgerun/r2inference-ubuntu-18.04:v0.1.5", "ridgerun/r2inference-ubuntu-16.04:v0.3.1"]
container:
image: ${{ matrix.container }}
env:
CXXFLAGS: "-Werror"
steps:
Expand All @@ -132,7 +148,11 @@ jobs:
./list_backends
build_tflite_meson:
runs-on: ubuntu-18.04
container: ridgerun/r2inference:v0.1.4
strategy:
matrix:
container: ["ridgerun/r2inference-ubuntu-18.04:v0.1.5"]
container:
image: ${{ matrix.container }}
env:
TENSORFLOW_PATH: /root/r2inference/backends/tflite/v2.0.1/include/tensorflow
CPPFLAGS: "-I${TENSORFLOW_PATH} -I${TENSORFLOW_PATH}/tensorflow/lite/tools/make/downloads/flatbuffers/include"
Expand Down Expand Up @@ -160,7 +180,11 @@ jobs:
./list_backends
build_tensorflow_tflite_meson:
runs-on: ubuntu-18.04
container: ridgerun/r2inference:v0.1.4
strategy:
matrix:
container: ["ridgerun/r2inference-ubuntu-18.04:v0.1.5"]
container:
image: ${{ matrix.container }}
env:
TENSORFLOW_PATH: /root/r2inference/backends/tflite/v2.0.1/include/tensorflow
CPPFLAGS: "-I${TENSORFLOW_PATH} -I${TENSORFLOW_PATH}/tensorflow/lite/tools/make/downloads/flatbuffers/include"
Expand Down
9 changes: 8 additions & 1 deletion configure.ac
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

# Initialize autoconf.
AC_PREREQ([2.69])
AC_INIT([RidgeRun inference library],[0.6.0],[https://github.com/RidgeRun/r2inference/issues],[r2inference])
AC_INIT([RidgeRun inference library],[0.7.0],[https://github.com/RidgeRun/r2inference/issues],[r2inference])

# Initialize our build utils
RR_INIT
Expand Down Expand Up @@ -50,12 +50,17 @@ RR_CHECK_FEATURE_LIB(TENSORFLOW, TensorFlow Installation,
tensorflow, TF_Version, tensorflow/c/c_api.h, no)

AC_LANG_PUSH([C++])
RR_CHECK_FEATURE_LIB(EDGETPU, EdgeTPU with TensorFlow lite Installation,
:libedgetpu.so.1.0, edgetpu_version, edgetpu.h, no)

RR_CHECK_FEATURE_LIB(TFLITE, TensorFlow lite Installation,
tensorflow-lite, TfLiteTypeGetName, tensorflow/lite/model.h, no)

# add specific LIBS for TFLITE
AC_SUBST([TFLITE_LIBS], ["$TFLITE_LIBS -pthread -ldl -lrt"])

AM_COND_IF([HAVE_EDGETPU], [AM_COND_IF([HAVE_TFLITE], [], [AC_MSG_ERROR(The EdgeTPU backend needs TFLITE enabled as well)])], [])

RR_CHECK_FEATURE_LIB(TENSORRT, TensorRT Installation,
nvinfer, createInferBuilder_INTERNAL, NvInfer.h, no)
AC_LANG_POP([C++])
Expand All @@ -74,11 +79,13 @@ docs/api/Makefile
docs/uml/Makefile
examples/Makefile
examples/r2i/Makefile
examples/r2i/edgetpu/Makefile
examples/r2i/ncsdk/Makefile
examples/r2i/tensorflow/Makefile
examples/r2i/tensorrt/Makefile
examples/r2i/tflite/Makefile
r2i/Makefile
r2i/edgetpu/Makefile
r2i/ncsdk/Makefile
r2i/tensorflow/Makefile
r2i/tensorrt/Makefile
Expand Down
5 changes: 5 additions & 0 deletions examples/r2i/Makefile.am
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,18 @@
AM_DEFAULT_SOURCE_EXT = .cc

DIST_SUBDIRS = \
edgetpu \
ncsdk \
tensorflow \
tflite
SUBDIRS =

if ENABLE_EXAMPLES

if HAVE_EDGETPU
SUBDIRS += edgetpu
endif

if HAVE_NCSDK
SUBDIRS += ncsdk
endif
Expand Down
36 changes: 36 additions & 0 deletions examples/r2i/edgetpu/Makefile.am
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
# Copyright (C) 2020 RidgeRun, LLC (http://www.ridgerun.com)
# All Rights Reserved.
#
# The contents of this software are proprietary and confidential to RidgeRun,
# LLC. No part of this program may be photocopied, reproduced or translated
# into another programming language without prior written consent of
# RidgeRun, LLC. The user is free to modify the source code after obtaining
# a software license from RidgeRun. All source code changes must be provided
# back to RidgeRun without any encumbrance.

AM_DEFAULT_SOURCE_EXT = .cc

if ENABLE_EXAMPLES

noinst_PROGRAMS = \
inception

AM_CXXFLAGS = \
$(RR_CXXFLAGS) \
$(CODE_COVERAGE_CXXFLAGS) \
-I../common/

AM_CFLAGS = \
$(RR_CFLAGS) \
$(CODE_COVERAGE_CFLAGS)

AM_CPPFLAGS = \
$(RR_CPPFLAGS) \
$(CODE_COVERAGE_CPPFLAGS)

LDADD = \
$(RR_LIBS) \
$(CODE_COVERAGE_LIBS) \
$(top_builddir)/r2i/libr2inference-@RR_PACKAGE_VERSION@.la

endif # ENABLE_EXAMPLES
202 changes: 202 additions & 0 deletions examples/r2i/edgetpu/inception.cc
Original file line number Diff line number Diff line change
@@ -0,0 +1,202 @@
/* Copyright (C) 2018-2020 RidgeRun, LLC (http://www.ridgerun.com)
* All Rights Reserved.
*
* The contents of this software are proprietary and confidential to RidgeRun,
* LLC. No part of this program may be photocopied, reproduced or translated
* into another programming language without prior written consent of
* RidgeRun, LLC. The user is free to modify the source code after obtaining
* a software license from RidgeRun. All source code changes must be provided
* back to RidgeRun without any encumbrance.
*/

#include <algorithm>
#include <fstream>
#include <getopt.h>
#include <iostream>
#include <memory>
#include <string>
#include <vector>
#include <r2i/r2i.h>

#define STB_IMAGE_IMPLEMENTATION
#include "stb_image.h"

#define STB_IMAGE_RESIZE_IMPLEMENTATION
#include "stb_image_resize.h"

void PrintTopPrediction (std::shared_ptr<r2i::IPrediction> prediction) {
r2i::RuntimeError error;
int num_labels = prediction->GetResultSize();

std::vector<double> results;
results.resize(num_labels);

for (int i = 0; i < num_labels; ++i) {
results[i] = prediction->At(i, error);
}

auto it = std::max_element(results.begin(), results.end());
std::cout << "Highest probability is label "
<< std::distance(results.begin(), it) << " (" << *it << ")"
<< std::endl;
}

void PrintUsage() {
std::cerr << "Required arguments: "
<< "-i [JPG input_image] "
<< "-m [Inception TfLite Model] "
<< "-s [Model Input Size] "
<< "-I [Input Node] "
<< "-O [Output Node] \n"
<< " Example: "
<< " ./inception -i cat.jpg -m graph_inceptionv2_tensorflow.pb "
<< "-s 224"
<< std::endl;
}

std::unique_ptr<float[]> PreProcessImage (const unsigned char *input,
int width, int height, int reqwidth, int reqheight) {

const int channels = 3;
const int scaled_size = channels * reqwidth * reqheight;
std::unique_ptr<unsigned char[]> scaled (new unsigned char[scaled_size]);
std::unique_ptr<float[]> adjusted (new float[scaled_size]);

stbir_resize_uint8(input, width, height, 0, scaled.get(), reqwidth,
reqheight, 0, channels);

for (int i = 0; i < scaled_size; i += channels) {
/* RGB = (RGB - Mean)*StdDev */
adjusted[i + 0] = (static_cast<float>(scaled[i + 0]) - 127.5) / 127.5;
adjusted[i + 1] = (static_cast<float>(scaled[i + 1]) - 127.5) / 127.5;
adjusted[i + 2] = (static_cast<float>(scaled[i + 2]) - 127.5) / 127.5;
}

return adjusted;
}

std::unique_ptr<float[]> LoadImage(const std::string &path, int reqwidth,
int reqheight) {
int channels = 3;
int width, height, cp;

unsigned char *img = stbi_load(path.c_str(), &width, &height, &cp, channels);
if (!img) {
std::cerr << "The picture " << path << " could not be loaded";
return nullptr;
}

auto ret = PreProcessImage(img, width, height, reqwidth, reqheight);
free (img);

return ret;
}

bool ParseArgs (int &argc, char *argv[], std::string &image_path,
std::string &model_path, int &index, int &size,
std::string &in_node, std::string &out_node) {

int option = 0;
while ((option = getopt(argc, argv, "i:m:p:s:I:O:")) != -1) {
switch (option) {
case 'i' :
image_path = optarg;
break;
case 'm' :
model_path = optarg;
break;
case 'p' :
index = std::stoi (optarg);
break;
case 's' :
size = std::stoi (optarg);
break;
case 'I' :
in_node = optarg;
break;
case 'O' :
out_node = optarg;
break;
default:
return false;
}
}
return true;
}

int main (int argc, char *argv[]) {

r2i::RuntimeError error;
std::string model_path;
std::string image_path;
std::string in_node;
std::string out_node;
int Index = 0;
int size = 0;

if (false == ParseArgs (argc, argv, image_path, model_path, Index,
size, in_node, out_node)) {
PrintUsage ();
exit (EXIT_FAILURE);
}

if (image_path.empty() || model_path.empty ()) {
PrintUsage ();
exit (EXIT_FAILURE);
}

auto factory = r2i::IFrameworkFactory::MakeFactory(
r2i::FrameworkCode::EDGETPU,
error);

if (nullptr == factory) {
std::cerr << "EdgeTPU backend is not built: " << error << std::endl;
exit(EXIT_FAILURE);
}

std::cout << "Loading Model: " << model_path << std::endl;
auto loader = factory->MakeLoader (error);
std::shared_ptr<r2i::IModel> model = loader->Load (model_path, error);
if (error.IsError ()) {
std::cerr << "Loader error: " << error << std::endl;
exit(EXIT_FAILURE);
}

std::cout << "Setting model to engine" << std::endl;
std::shared_ptr<r2i::IEngine> engine = factory->MakeEngine (error);
error = engine->SetModel (model);

std::cout << "Loading image: " << image_path << std::endl;
std::unique_ptr<float[]> image_data = LoadImage (image_path, size, size);

std::cout << "Configuring frame" << std::endl;
std::shared_ptr<r2i::IFrame> frame = factory->MakeFrame (error);

error = frame->Configure (image_data.get(), size, size,
r2i::ImageFormat::Id::RGB);

std::cout << "Starting engine" << std::endl;
error = engine->Start ();
if (error.IsError ()) {
std::cerr << "Engine start error: " << error << std::endl;
exit(EXIT_FAILURE);
}

std::cout << "Predicting..." << std::endl;
auto prediction = engine->Predict (frame, error);
if (error.IsError ()) {
std::cerr << "Engine prediction error: " << error << std::endl;
exit(EXIT_FAILURE);
}

PrintTopPrediction (prediction);

std::cout << "Stopping engine" << std::endl;
error = engine->Stop ();
if (error.IsError ()) {
std::cerr << "Engine stop error: " << error << std::endl;
exit(EXIT_FAILURE);
}

return EXIT_SUCCESS;
}
11 changes: 11 additions & 0 deletions examples/r2i/edgetpu/meson.build
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# Compile examples
app_examples = [
'inception',
]

foreach app : app_examples
executable(app, '@0@.cc'.format(app),
include_directories: [configinc, common_inc_dir],
dependencies : [r2inference_lib_dep],
install: false)
endforeach
Loading

0 comments on commit 966ac2a

Please sign in to comment.