Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Tflite #5

Draft
wants to merge 13 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,14 @@ The `inference_pkg_launch.py`, included in this package, provides an example dem
|`load_model`|`LoadModelSrv`|Service that is responsible for setting pre-processing algorithm and inference tasks for the specific type of model loaded.|
|`inference_state`|`InferenceStateSrv`|Service that is responsible for starting and stopping inference tasks.|


### Parameters

| Parameter name | Description |
| ---------------- | ----------- |
| `device` | String that is either `CPU`, `GPU` or `MYRIAD`. Default is `CPU`. `MYRIAD` is the Intel Compute Stick 2. |


## Resources

* [Getting started with AWS DeepRacer OpenSource](https://github.com/aws-deepracer/aws-deepracer-launcher/blob/main/getting-started.md)
23 changes: 21 additions & 2 deletions inference_pkg/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
cmake_minimum_required(VERSION 3.5)
project(inference_pkg)
include(FetchContent)

set(ABSL_PROPAGATE_CXX_STD ON)

# Default to C99
if(NOT CMAKE_C_STANDARD)
Expand All @@ -12,9 +15,20 @@ if(NOT CMAKE_CXX_STANDARD)
endif()

if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES "Clang")
add_compile_options(-Wall -Wextra -Wpedantic)
add_compile_options(-Wno-deprecated-declarations -Wno-ignored-attributes)
endif()

FetchContent_Declare(tensorflow-lite
GIT_REPOSITORY https://github.com/tensorflow/tensorflow.git
)
FetchContent_Populate(tensorflow-lite)

add_subdirectory(
${tensorflow-lite_SOURCE_DIR}/tensorflow/lite
${tensorflow-lite_BINARY_DIR}
EXCLUDE_FROM_ALL
)

# find dependencies
find_package(ament_cmake REQUIRED)
find_package(rclcpp REQUIRED)
Expand Down Expand Up @@ -44,6 +58,7 @@ endif()

add_executable(inference_node
src/inference_node.cpp
src/tflite_inference_eng.cpp
src/intel_inference_eng.cpp
src/image_process.cpp
)
Expand All @@ -52,12 +67,16 @@ target_include_directories(inference_node PRIVATE
include
${OpenCV_INCLUDE_DIRS}
${InferenceEngine_INCLUDE_DIRS}
${CMAKE_CURRENT_BINARY_DIR}/flatbuffers/include
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/include>
)

target_link_libraries(inference_node -lm -ldl
${OpenCV_LIBRARIES}
tensorflow-lite
${InferenceEngine_LIBRARIES}
${NGRAPH_LIBRARIES})
${NGRAPH_LIBRARIES}
)

ament_target_dependencies(inference_node rclcpp deepracer_interfaces_pkg sensor_msgs std_msgs cv_bridge image_transport OpenCV InferenceEngine ngraph)

Expand Down
17 changes: 9 additions & 8 deletions inference_pkg/include/inference_pkg/image_process.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

#include "rclcpp/rclcpp.hpp"
#include "sensor_msgs/msg/image.hpp"
#include "sensor_msgs/msg/compressed_image.hpp"
#include "cv_bridge/cv_bridge.h"
#include <unordered_map>

Expand All @@ -33,9 +34,9 @@ namespace InferTask {
/// @param frameData ROS message containing the image data.
/// @param retImg Open CV Mat object that will be used to store the post processed image
/// @param params Hash map containing relevant pre-processing parameters
virtual void processImage(const sensor_msgs::msg::Image &frameData, cv::Mat& retImg,
virtual void processImage(const sensor_msgs::msg::CompressedImage &frameData, cv::Mat& retImg,
const std::unordered_map<std::string, int> &params) = 0;
virtual void processImageVec(const std::vector<sensor_msgs::msg::Image> &frameDataArr, cv::Mat& retImg,
virtual void processImageVec(const std::vector<sensor_msgs::msg::CompressedImage> &frameDataArr, cv::Mat& retImg,
const std::unordered_map<std::string, int> &params) = 0;
/// Resets the image processing algorithms data if any.
virtual void reset() = 0;
Expand All @@ -49,9 +50,9 @@ namespace InferTask {
public:
RGB() = default;
virtual ~RGB() = default;
virtual void processImage(const sensor_msgs::msg::Image &frameData, cv::Mat& retImg,
virtual void processImage(const sensor_msgs::msg::CompressedImage &frameData, cv::Mat& retImg,
const std::unordered_map<std::string, int> &params) override;
virtual void processImageVec(const std::vector<sensor_msgs::msg::Image> &frameDataArr, cv::Mat& retImg,
virtual void processImageVec(const std::vector<sensor_msgs::msg::CompressedImage> &frameDataArr, cv::Mat& retImg,
const std::unordered_map<std::string, int> &params) override {(void)frameDataArr;(void)retImg;(void)params;}
virtual void reset() override {}
virtual const std::string getEncode() const;
Expand All @@ -67,9 +68,9 @@ namespace InferTask {
/// @param isMask True if background masking should be performed on the image.
Grey(bool isThreshold, bool isMask);
virtual ~Grey() = default;
virtual void processImage(const sensor_msgs::msg::Image &frameData, cv::Mat& retImg,
virtual void processImage(const sensor_msgs::msg::CompressedImage &frameData, cv::Mat& retImg,
const std::unordered_map<std::string, int> &params) override;
virtual void processImageVec(const std::vector<sensor_msgs::msg::Image> &frameDataArr, cv::Mat& retImg,
virtual void processImageVec(const std::vector<sensor_msgs::msg::CompressedImage> &frameDataArr, cv::Mat& retImg,
const std::unordered_map<std::string, int> &params);
virtual void reset() override;
virtual const std::string getEncode() const;
Expand All @@ -91,9 +92,9 @@ namespace InferTask {
public:
GreyDiff() = default;
virtual ~GreyDiff() = default;
virtual void processImage(const sensor_msgs::msg::Image &frameData, cv::Mat& retImg,
virtual void processImage(const sensor_msgs::msg::CompressedImage &frameData, cv::Mat& retImg,
const std::unordered_map<std::string, int> &params) override;
virtual void processImageVec(const std::vector<sensor_msgs::msg::Image> &frameDataArr, cv::Mat& retImg,
virtual void processImageVec(const std::vector<sensor_msgs::msg::CompressedImage> &frameDataArr, cv::Mat& retImg,
const std::unordered_map<std::string, int> &params) override {(void)frameDataArr;(void)retImg;(void)params;}
virtual void reset() override;
virtual const std::string getEncode() const;
Expand Down
4 changes: 3 additions & 1 deletion inference_pkg/include/inference_pkg/inference_base.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,10 @@ namespace InferTask {
/// @returns True if model loaded successfully, false otherwise
/// @param artifactPath Path to the model artifact.
/// @param imgProcess Pointer to the image processing algorithm
/// @param device Reference to the compute device (CPU, GPU, MYRIAD)
virtual bool loadModel(const char* artifactPath,
std::shared_ptr<ImgProcessBase> imgProcess) = 0;
std::shared_ptr<ImgProcessBase> imgProcess,
std::string device) = 0;
/// Starts the inference task until stopped.
virtual void startInference() = 0;
/// Stops the inference task if running.
Expand Down
3 changes: 2 additions & 1 deletion inference_pkg/include/inference_pkg/intel_inference_eng.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,8 @@ namespace IntelInferenceEngine {
RLInferenceModel(std::shared_ptr<rclcpp::Node> inferenceNodePtr, const std::string &sensorSubName);
virtual ~RLInferenceModel();
virtual bool loadModel(const char* artifactPath,
std::shared_ptr<InferTask::ImgProcessBase> imgProcess) override;
std::shared_ptr<InferTask::ImgProcessBase> imgProcess,
std::string device) override;
virtual void startInference() override;
virtual void stopInference() override;
/// Callback method to retrieve sensor data.
Expand Down
73 changes: 73 additions & 0 deletions inference_pkg/include/inference_pkg/tflite_inference_eng.hpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
///////////////////////////////////////////////////////////////////////////////////
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. //
// //
// Licensed under the Apache License, Version 2.0 (the "License"). //
// You may not use this file except in compliance with the License. //
// You may obtain a copy of the License at //
// //
// http://www.apache.org/licenses/LICENSE-2.0 //
// //
// Unless required by applicable law or agreed to in writing, software //
// distributed under the License is distributed on an "AS IS" BASIS, //
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //
// See the License for the specific language governing permissions and //
// limitations under the License. //
///////////////////////////////////////////////////////////////////////////////////

#ifndef TFLITE_INFERENCE_ENG_HPP
#define TFLITE_INFERENCE_ENG_HPP

#include "inference_pkg/inference_base.hpp"
#include "tensorflow/lite/interpreter.h"
#include "tensorflow/lite/kernels/register.h"
#include "tensorflow/lite/model.h"
#include "deepracer_interfaces_pkg/msg/evo_sensor_msg.hpp"
#include "deepracer_interfaces_pkg/msg/infer_results_array.hpp"
#include <atomic>

namespace TFLiteInferenceEngine {
class RLInferenceModel : public InferTask::InferenceBase
{
/// Concrete inference task class for running reinforcement learning models
/// on the GPU.
public:
/// @param node_name Name of the node to be created.
/// @param subName Name of the topic to subscribe to for sensor data.
RLInferenceModel(std::shared_ptr<rclcpp::Node> inferenceNodePtr, const std::string &sensorSubName);
virtual ~RLInferenceModel();
virtual bool loadModel(const char* artifactPath,
std::shared_ptr<InferTask::ImgProcessBase> imgProcess,
std::string device) override;
virtual void startInference() override;
virtual void stopInference() override;
/// Callback method to retrieve sensor data.
/// @param msg Message returned by the ROS messaging system.
void sensorCB(const deepracer_interfaces_pkg::msg::EvoSensorMsg::SharedPtr msg);

private:
/// Inference node object
std::shared_ptr<rclcpp::Node> inferenceNode;
/// ROS subscriber object to the desired sensor topic.
rclcpp::Subscription<deepracer_interfaces_pkg::msg::EvoSensorMsg>::SharedPtr sensorSub_;
/// ROS publisher object to the desired topic.
rclcpp::Publisher<deepracer_interfaces_pkg::msg::InferResultsArray>::SharedPtr resultPub_;
/// Pointer to image processing algorithm.
std::shared_ptr<InferTask::ImgProcessBase> imgProcess_;
/// Inference state variable.
std::atomic<bool> doInference_;
/// Neural network Inference engine core object.
std::unique_ptr<tflite::FlatBufferModel> model_;
/// Inference request object
std::unique_ptr<tflite::Interpreter> interpreter_;
/// Vector of hash map that stores all relevant pre-processing parameters for each input head.
std::vector<std::unordered_map<std::string, int>> paramsArr_;
/// Vector of names of the input heads
std::vector<std::string> inputNamesArr_;
/// Name of the output layer
std::string outputName_;
std::vector<std::vector<int>> outputDimsArr_;
std::vector<TfLiteTensor const *> output_tensors_;

};
}
#endif
10 changes: 5 additions & 5 deletions inference_pkg/src/image_process.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ namespace {
/// @param frameData ROS image message containing the image data.
/// @param retImg Reference to CV object to be populated the with resized image.
/// @param params Hash map containing resize information.
bool cvtToCVObjResize (const sensor_msgs::msg::Image &frameData, cv::Mat &retImg,
bool cvtToCVObjResize (const sensor_msgs::msg::CompressedImage &frameData, cv::Mat &retImg,
const std::unordered_map<std::string, int> &params) {

cv_bridge::CvImagePtr cvPtr;
Expand Down Expand Up @@ -121,7 +121,7 @@ namespace {
}

namespace InferTask {
void RGB::processImage(const sensor_msgs::msg::Image &frameData, cv::Mat &retImg,
void RGB::processImage(const sensor_msgs::msg::CompressedImage &frameData, cv::Mat &retImg,
const std::unordered_map<std::string, int> &params) {
cvtToCVObjResize(frameData, retImg, params);
}
Expand All @@ -137,7 +137,7 @@ namespace InferTask {

}

void Grey::processImage(const sensor_msgs::msg::Image &frameData, cv::Mat &retImg,
void Grey::processImage(const sensor_msgs::msg::CompressedImage &frameData, cv::Mat &retImg,
const std::unordered_map<std::string, int> &params) {
cv::Mat currImg;
if (cvtToCVObjResize(frameData, currImg, params)) {
Expand All @@ -160,7 +160,7 @@ namespace InferTask {
}
}

void Grey::processImageVec(const std::vector<sensor_msgs::msg::Image> &frameDataArr, cv::Mat &retImg,
void Grey::processImageVec(const std::vector<sensor_msgs::msg::CompressedImage> &frameDataArr, cv::Mat &retImg,
const std::unordered_map<std::string, int> &params) {
// Left camera image is sent as the top image and the right camera image is sent as second in the vector.
// Stack operation replaces the beginning values as we loop through and hence we loop in decreasing order
Expand Down Expand Up @@ -188,7 +188,7 @@ namespace InferTask {
return sensor_msgs::image_encodings::MONO8;
}

void GreyDiff::processImage(const sensor_msgs::msg::Image &frameData, cv::Mat &retImg,
void GreyDiff::processImage(const sensor_msgs::msg::CompressedImage &frameData, cv::Mat &retImg,
const std::unordered_map<std::string, int> &params) {
(void)retImg;
cv::Mat currImg;
Expand Down
45 changes: 42 additions & 3 deletions inference_pkg/src/inference_node.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@
///////////////////////////////////////////////////////////////////////////////////

#include "inference_pkg/intel_inference_eng.hpp"
#include "inference_pkg/tflite_inference_eng.hpp"
#include "std_msgs/msg/string.hpp"
#include "deepracer_interfaces_pkg/srv/inference_state_srv.hpp"
#include "deepracer_interfaces_pkg/srv/load_model_srv.hpp"

Expand All @@ -41,11 +43,25 @@ namespace InferTask {
/// Class that will manage the inference task. In particular it will start and stop the
/// inference tasks and feed the inference task the sensor data.
/// @param nodeName Reference to the string containing name of the node.
/// @param device Reference to the compute device (CPU, GPU, MYRIAD)
public:
const char* MODEL_ARTIFACT_TOPIC = "model_artifact";

InferenceNodeMgr(const std::string & nodeName)
: Node(nodeName)
: Node(nodeName),
deviceName_("CPU"),
inferenceEngine_("TFLITE")
{
RCLCPP_INFO(this->get_logger(), "%s started", nodeName.c_str());

this->declare_parameter<std::string>("device", deviceName_);
// Device name; OpenVINO supports CPU, GPU and MYRIAD
deviceName_ = this->get_parameter("device").as_string();

this->declare_parameter<std::string>("inference_engine", inferenceEngine_);
// Inference Engine name; TFLITE or OPENVINO
inferenceEngine_ = this->get_parameter("inference_engine").as_string();

loadModelServiceCbGrp_ = this->create_callback_group(rclcpp::callback_group::CallbackGroupType::MutuallyExclusive);
loadModelService_ = this->create_service<deepracer_interfaces_pkg::srv::LoadModelSrv>("load_model",
std::bind(&InferTask::InferenceNodeMgr::LoadModelHdl,
Expand All @@ -66,6 +82,9 @@ namespace InferTask {
::rmw_qos_profile_default,
setInferenceStateServiceCbGrp_);

// Create a publisher to publish the images to run inference.
modelArtifactPub_ = this->create_publisher<std_msgs::msg::String>(MODEL_ARTIFACT_TOPIC, 1);

// Add all available task and algorithms to these hash maps.
taskList_ = { {rlTask, nullptr} };
preProcessList_ = { {rgb, std::make_shared<RGB>()},
Expand Down Expand Up @@ -119,7 +138,12 @@ namespace InferTask {
if (itInferTask != taskList_.end() && itPreProcess != preProcessList_.end()) {
switch(req->task_type) {
case rlTask:
itInferTask->second.reset(new IntelInferenceEngine::RLInferenceModel(this->shared_from_this(), "/sensor_fusion_pkg/sensor_msg"));
if (inferenceEngine_.compare("TFLITE") == 0) {
itInferTask->second.reset(new TFLiteInferenceEngine::RLInferenceModel(this->shared_from_this(), "/sensor_fusion_pkg/sensor_msg"));
} else {
itInferTask->second.reset(new IntelInferenceEngine::RLInferenceModel(this->shared_from_this(), "/sensor_fusion_pkg/sensor_msg"));
}

break;
case objDetectTask:
//! TODO add onject detection when class is implemented.
Expand All @@ -129,7 +153,14 @@ namespace InferTask {
RCLCPP_ERROR(this->get_logger(), "Unknown inference task");
return;
}
itInferTask->second->loadModel(req->artifact_path.c_str(), itPreProcess->second);

itInferTask->second->loadModel(req->artifact_path.c_str(), itPreProcess->second, deviceName_);

// Send a message to say we have loaded a model
std_msgs::msg::String modelArtifactMsg;
modelArtifactMsg.data = req->artifact_path;
modelArtifactPub_->publish(modelArtifactMsg);

res->error = 0;
}
}
Expand All @@ -149,6 +180,14 @@ namespace InferTask {
/// List of available pre-processing algorithms.
std::unordered_map<int, std::shared_ptr<ImgProcessBase>> preProcessList_;
/// Reference to the node handler.

/// ROS publisher object to publish the name of a new model.
rclcpp::Publisher<std_msgs::msg::String>::SharedPtr modelArtifactPub_;

/// Compute device type.
std::string deviceName_;
/// Inference Engine parameter.
std::string inferenceEngine_;
};
}

Expand Down
Loading