Skip to content

Commit

Permalink
Perception: use customized leakyReLU TensorRT layer (ApolloAuto#9216)
Browse files Browse the repository at this point in the history
  • Loading branch information
KaWaiTsoiBaidu authored and techoe committed Jul 24, 2019
1 parent 9734ab8 commit 22f3c79
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 11 deletions.
34 changes: 24 additions & 10 deletions modules/perception/inference/tensorrt/rt_net.cc
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

#include "cyber/common/log.h"
#include "modules/perception/inference/tensorrt/plugins/argmax_plugin.h"
#include "modules/perception/inference/tensorrt/plugins/leakyReLU_plugin.h"
#include "modules/perception/inference/tensorrt/plugins/slice_plugin.h"
#include "modules/perception/inference/tensorrt/plugins/softmax_plugin.h"

Expand Down Expand Up @@ -150,19 +151,31 @@ void RTNet::addDeconvLayer(const LayerParameter &layer_param,
#endif
}
void RTNet::addActiveLayer(const LayerParameter &layer_param,
nvinfer1::ITensor *const *inputs,
nvinfer1::ITensor *const *inputs, int nbInputs,
nvinfer1::INetworkDefinition *net,
TensorMap *tensor_map,
TensorModifyMap *tensor_modify_map) {
nvinfer1::ActivationType type = nvinfer1::ActivationType::kSIGMOID;
auto pair = active_map.find(layer_param.type());
if (pair != active_map.end()) {
type = pair->second;
}
nvinfer1::IActivationLayer *reluLayer = net->addActivation(*inputs[0], type);
reluLayer->setName(layer_param.name().c_str());
if (layer_param.type() == "ReLU") {
std::shared_ptr<ReLUPlugin> relu_plugin;
relu_plugin.reset(new ReLUPlugin(layer_param.relu_param(),
inputs[0]->getDimensions()));
nvinfer1::IPluginLayer *ReLU_Layer =
net->addPlugin(inputs, nbInputs, *relu_plugin);
relu_plugins_.push_back(relu_plugin);
ReLU_Layer->setName(layer_param.name().c_str());
ConstructMap(layer_param, ReLU_Layer, tensor_map, tensor_modify_map);
} else {
nvinfer1::ActivationType type = nvinfer1::ActivationType::kSIGMOID;
auto pair = active_map.find(layer_param.type());
if (pair != active_map.end()) {
type = pair->second;
}
nvinfer1::IActivationLayer *reluLayer =
net->addActivation(*inputs[0], type);
reluLayer->setName(layer_param.name().c_str());

ConstructMap(layer_param, reluLayer, tensor_map, tensor_modify_map);
ConstructMap(layer_param, reluLayer, tensor_map, tensor_modify_map);
}
}

void RTNet::addConcatLayer(const LayerParameter &layer_param,
Expand Down Expand Up @@ -409,7 +422,8 @@ void RTNet::addLayer(const LayerParameter &layer_param,
tensor_modify_map);
} else if (layer_param.type() == "Sigmoid" || layer_param.type() == "TanH" ||
layer_param.type() == "ReLU") {
addActiveLayer(layer_param, inputs, net, tensor_map, tensor_modify_map);
addActiveLayer(layer_param, inputs, nbInputs, net,
tensor_map, tensor_modify_map);
} else if (layer_param.type() == "Concat") {
addConcatLayer(layer_param, inputs, nbInputs, net, tensor_map,
tensor_modify_map);
Expand Down
4 changes: 3 additions & 1 deletion modules/perception/inference/tensorrt/rt_net.h
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ namespace apollo {
namespace perception {
namespace inference {
class ArgMax1Plugin;
class ReLUPlugin;
class SLICEPlugin;
class SoftmaxPlugin;

Expand Down Expand Up @@ -99,7 +100,7 @@ class RTNet : public Inference {
TensorModifyMap *tensor_modify_map);

void addActiveLayer(const LayerParameter &layer_param,
nvinfer1::ITensor *const *inputs,
nvinfer1::ITensor *const *inputs, int nbInputs,
nvinfer1::INetworkDefinition *net, TensorMap *tensor_map,
TensorModifyMap *tensor_modify_map);

Expand Down Expand Up @@ -180,6 +181,7 @@ class RTNet : public Inference {
std::vector<std::shared_ptr<ArgMax1Plugin>> argmax_plugins_;
std::vector<std::shared_ptr<SoftmaxPlugin>> softmax_plugins_;
std::vector<std::shared_ptr<SLICEPlugin>> slice_plugins_;
std::vector<std::shared_ptr<ReLUPlugin>> relu_plugins_;
std::vector<std::string> output_names_;
std::vector<std::string> input_names_;
std::map<std::string, std::string> tensor_modify_map_;
Expand Down

0 comments on commit 22f3c79

Please sign in to comment.