Skip to content

Commit bc53411

Browse files
committedFeb 2, 2021
feat(aten::leaky_relu_): Adding alias for inplace leaky relu
Signed-off-by: Naren Dasan <naren@narendasan.com> Signed-off-by: Naren Dasan <narens@nvidia.com>
1 parent a1d1686 commit bc53411

File tree

1 file changed

+14
-0
lines changed

1 file changed

+14
-0
lines changed
 

‎core/conversion/converters/impl/activation.cpp

+14
Original file line numberDiff line numberDiff line change
@@ -126,6 +126,20 @@ auto acthardtanh TRTORCH_UNUSED =
126126
return true;
127127
}})
128128
.pattern({"aten::leaky_relu(Tensor self, Scalar negative_slope=0.01) -> (Tensor)",
129+
[](ConversionCtx* ctx, const torch::jit::Node* n, args& args) -> bool {
130+
auto self = args[0].ITensorOrFreeze(ctx);
131+
auto negative_slopeScalar = args[1].unwrapToScalar().to<float>();
132+
133+
auto new_layer = ctx->net->addActivation(*self, nvinfer1::ActivationType::kLEAKY_RELU);
134+
new_layer->setAlpha(negative_slopeScalar);
135+
136+
new_layer->setName(util::node_info(n).c_str());
137+
auto out_tensor = new_layer->getOutput(0);
138+
out_tensor = ctx->AssociateValueAndTensor(n->outputs()[0], out_tensor);
139+
LOG_DEBUG("Output shape: " << out_tensor->getDimensions());
140+
return true;
141+
}})
142+
.pattern({"aten::leaky_relu_(Tensor(a!) self, Scalar negative_slope=0.01) -> Tensor(a!)",
129143
[](ConversionCtx* ctx, const torch::jit::Node* n, args& args) -> bool {
130144
auto self = args[0].ITensorOrFreeze(ctx);
131145
auto negative_slopeScalar = args[1].unwrapToScalar().to<float>();

0 commit comments

Comments
 (0)
Please sign in to comment.