Skip to content
This repository was archived by the owner on Nov 17, 2023. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion src/operator/subgraph/mkldnn/mkldnn_fc-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,8 @@ static inline bool SupportMKLDNNFCEltwiseFusion(const std::string op_name) {
op_name == "sqrt" ||
op_name == "exp" ||
op_name == "abs" ||
op_name == "clip") {
op_name == "clip" ||
op_name == "LeakyReLU") {
return true;
} else {
return false;
Expand Down
16 changes: 14 additions & 2 deletions src/operator/subgraph/mkldnn/mkldnn_fc.cc
Original file line number Diff line number Diff line change
Expand Up @@ -286,8 +286,16 @@ void SgMKLDNNFCOp::Forward(const OpContext &ctx,
if (fuse_requantize || mkldnn_param.enable_float_output) {
float tmp_scale_ = 1.0f;
if (fuse_requantize) {
tmp_scale_ =
GetQuantizeScale(output.dtype(), cached_min_output_, cached_max_output_) / data_scale_;
if (mkldnn_param.with_eltwise) {
tmp_scale_ = 1.0 / data_scale_;
full_param_.eltwise_param.scale =
GetQuantizeScale(output.dtype(), cached_min_output_, cached_max_output_);
} else {
tmp_scale_ =
GetQuantizeScale(output.dtype(),
cached_min_output_,
cached_max_output_) / data_scale_;
}
} else {
tmp_scale_ = 1.0 / data_scale_;
}
Expand Down Expand Up @@ -405,6 +413,10 @@ static void SgMKLDNNFCParamParser(nnvm::NodeAttrs *attrs) {
if (op_name == "Activation") {
const ActivationParam act_param = nnvm::get<ActivationParam>(node->attrs.parsed);
full_param.eltwise_param.alg = GetMKLDNNActAlgo(act_param);
} else if (op_name == "LeakyReLU") {
const auto act_param = nnvm::get<LeakyReLUParam>(node->attrs.parsed);
full_param.eltwise_param.alpha = act_param.slope;
full_param.eltwise_param.alg = GetMKLDNNActAlgo(act_param);
} else if (op_name == "clip") {
const ClipParam clip_param = nnvm::get<ClipParam>(node->attrs.parsed);
full_param.eltwise_param.alg = mkldnn::algorithm::eltwise_bounded_relu;
Expand Down
10 changes: 10 additions & 0 deletions src/operator/subgraph/mkldnn/mkldnn_fc_property.h
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,16 @@ class SgMKLDNNFCSelector : public SubgraphSelector {
return true;
}
}
if (new_node.op() == Op::Get("LeakyReLU")) {
const LeakyReLUParam &param = nnvm::get<LeakyReLUParam>(new_node.attrs.parsed);
if (param.act_type == leakyrelu::kLeakyReLU ||
param.act_type == leakyrelu::kELU ||
param.act_type == leakyrelu::kGELU) {
matched_list_.push_back(&new_node);
status_ = kSuccess;
return true;
}
}
if (!quantized_ && (new_node.op() == Op::Get("square") ||
new_node.op() == Op::Get("sqrt") ||
new_node.op() == Op::Get("exp"))) {
Expand Down
4 changes: 3 additions & 1 deletion tests/python/mkl/subgraphs/test_fc_subgraph.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
from mxnet.gluon import nn
from mxnet.test_utils import assert_almost_equal_with_err

fc_post_ops_list=['relu', 'sigmoid', 'tanh', 'softrelu',
fc_post_ops_list=['relu', 'sigmoid', 'tanh', 'softrelu', 'gelu', 'elu', 'leaky',
'square', 'square_root', 'abs', 'exp', 'bounded_relu']

def test_float64_fallback():
Expand Down Expand Up @@ -71,6 +71,8 @@ def hybrid_forward(self, F, x):
fc_out = self.fc(x)
if self.alg in ['relu', 'sigmoid', 'tanh', 'softrelu']:
out = F.Activation(fc_out, act_type=self.alg)
elif self.alg in ['gelu', 'elu', 'leaky']:
out = F.LeakyReLU(fc_out, act_type=self.alg)
elif self.alg == 'square':
out = F.square(fc_out)
elif self.alg == 'square_root':
Expand Down