File tree Expand file tree Collapse file tree 1 file changed +14
-0
lines changed
core/conversion/converters/impl Expand file tree Collapse file tree 1 file changed +14
-0
lines changed Original file line number Diff line number Diff line change @@ -126,6 +126,20 @@ auto acthardtanh TRTORCH_UNUSED =
126126 return true ;
127127 }})
128128 .pattern({" aten::leaky_relu(Tensor self, Scalar negative_slope=0.01) -> (Tensor)" ,
129+ [](ConversionCtx* ctx, const torch::jit::Node* n, args& args) -> bool {
130+ auto self = args[0 ].ITensorOrFreeze (ctx);
131+ auto negative_slopeScalar = args[1 ].unwrapToScalar ().to <float >();
132+
133+ auto new_layer = ctx->net ->addActivation (*self, nvinfer1::ActivationType::kLEAKY_RELU );
134+ new_layer->setAlpha (negative_slopeScalar);
135+
136+ new_layer->setName (util::node_info (n).c_str ());
137+ auto out_tensor = new_layer->getOutput (0 );
138+ out_tensor = ctx->AssociateValueAndTensor (n->outputs ()[0 ], out_tensor);
139+ LOG_DEBUG (" Output shape: " << out_tensor->getDimensions ());
140+ return true ;
141+ }})
142+ .pattern({" aten::leaky_relu_(Tensor(a!) self, Scalar negative_slope=0.01) -> Tensor(a!)" ,
129143 [](ConversionCtx* ctx, const torch::jit::Node* n, args& args) -> bool {
130144 auto self = args[0 ].ITensorOrFreeze (ctx);
131145 auto negative_slopeScalar = args[1 ].unwrapToScalar ().to <float >();
You can’t perform that action at this time.
0 commit comments