Skip to content

Commit 65a2067

Browse files
Copilottitaiwangms
andcommitted
Update optimizer opset versions to sync with CPU kernel registrations
Update operator opset version lists in optimizer fusion patterns to include the latest versions registered in cpu_execution_provider.cc: - Conv: {1, 11} → {1, 11, 22} in all fusion files - HardSigmoid: {6} → {6, 22} in conv/gemm fusion files - Activation operators in gemm_activation_fusion: add missing versions (Elu +22, LeakyRelu +16, Selu +22, Softplus +22, Softsign +22, ThresholdedRelu +22) - Dropout: add version 22 in elimination and fusion files - MaxPool: add version 22 in nchwc_transformer and pad_fusion - AveragePool: add versions 19, 22 in nchwc_transformer and pad_fusion - GlobalMaxPool/GlobalAveragePool: add version 22 in nchwc_transformer - Transpose: add version 21 in nchwc_transformer, gemm_transpose_fusion, attention_fusion_helper - BatchNormalization: add version 15 in nchwc_transformer - Resize: add versions 18, 19 in nchwc_transformer - QDQ propagation: add versions 22-25 for MaxPool, Reshape, Transpose, Squeeze, Unsqueeze Co-authored-by: titaiwangms <18010845+titaiwangms@users.noreply.github.com>
1 parent c8fb062 commit 65a2067

13 files changed

+37
-37
lines changed

onnxruntime/core/optimizer/attention_fusion_helper.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1447,7 +1447,7 @@ bool FuseGptAttention(Node& layer_norm, Graph& graph, int64_t hidden_size, std::
14471447
return false;
14481448
}
14491449

1450-
if (graph_utils::IsSupportedOptypeVersionAndDomain(*k_concat, "Transpose", {1, 13}, kOnnxDomain)) {
1450+
if (graph_utils::IsSupportedOptypeVersionAndDomain(*k_concat, "Transpose", {1, 13, 21}, kOnnxDomain)) {
14511451
transpose_optimized_pattern = true;
14521452
DEBUG_LOG("Using transpose optimized pattern");
14531453
opt_k_transpose = k_concat;

onnxruntime/core/optimizer/bias_dropout_fusion.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -144,7 +144,7 @@ Status BiasDropoutFusion::ApplyImpl(Graph& graph, bool& modified, int graph_leve
144144
}
145145

146146
const Node& next_node = (*next_node_itr);
147-
if ((!graph_utils::IsSupportedOptypeVersionAndDomain(next_node, "Dropout", {12, 13}, kOnnxDomain) &&
147+
if ((!graph_utils::IsSupportedOptypeVersionAndDomain(next_node, "Dropout", {12, 13, 22}, kOnnxDomain) &&
148148
!graph_utils::IsSupportedOptypeVersionAndDomain(next_node, "BitmaskDropout", {1}, kMSDomain)) ||
149149
next_node.GetExecutionProviderType() != node.GetExecutionProviderType()) {
150150
continue;

onnxruntime/core/optimizer/conv_activation_fusion.cc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -107,7 +107,7 @@ class ConvActivationSelector : public NodeSelector {
107107
return std::nullopt;
108108
} else if (node_ep.empty() || node_ep == kCpuExecutionProvider || node_ep == kJsExecutionProvider || node_ep == kWebGpuExecutionProvider) {
109109
if (!is_supported_non_cuda_ep_activation(*next_node) &&
110-
!graph_utils::IsSupportedOptypeVersionAndDomain(*next_node, "HardSigmoid", {6})) {
110+
!graph_utils::IsSupportedOptypeVersionAndDomain(*next_node, "HardSigmoid", {6, 22})) {
111111
return std::nullopt;
112112
}
113113
} else {
@@ -212,7 +212,7 @@ void RegisterConvActivationFusionRules(SelectorActionRegistry& registry) {
212212
const std::string msDomainConv = SelectorActionRegistry::OpVersionsMapKey("NhwcConv", kMSDomain);
213213
auto selector = std::make_unique<selectors::ConvActivationSelector>();
214214

215-
registry.RegisterSelectorAndAction(name, {{"Conv", {1, 11}}, {msInternalNHWCDomainConv, {1, 11}}, {msDomainConv, {1}}},
215+
registry.RegisterSelectorAndAction(name, {{"Conv", {1, 11, 22}}, {msInternalNHWCDomainConv, {1, 11, 22}}, {msDomainConv, {1}}},
216216
std::move(selector), std::move(action));
217217
#else
218218
registry.RegisterAction(name, std::move(action));

onnxruntime/core/optimizer/conv_add_act_fusion.cc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,7 @@ class ConvAddActivationSelector : public NodeSelector {
113113
return true;
114114
}
115115

116-
if (graph_utils::IsSupportedOptypeVersionAndDomain(activation_node, "HardSigmoid", {6})) {
116+
if (graph_utils::IsSupportedOptypeVersionAndDomain(activation_node, "HardSigmoid", {6, 22})) {
117117
return true;
118118
}
119119
return false;
@@ -288,7 +288,7 @@ void RegisterConvAddActivationFusionRules(SelectorActionRegistry& registry) {
288288
auto action = std::make_unique<actions::FuseConvAddActivationAction>();
289289
auto selector = std::make_unique<selectors::ConvAddActivationSelector>();
290290
std::string msDomainNhwcFusedConv = SelectorActionRegistry::OpVersionsMapKey("NhwcFusedConv", kMSDomain);
291-
registry.RegisterSelectorAndAction("ConvAddAct", {{"Conv", {1, 11}}, {msDomainNhwcFusedConv, {1, 11}}},
291+
registry.RegisterSelectorAndAction("ConvAddAct", {{"Conv", {1, 11, 22}}, {msDomainNhwcFusedConv, {1, 11, 22}}},
292292
std::move(selector), std::move(action));
293293
}
294294

onnxruntime/core/optimizer/conv_add_fusion.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -107,7 +107,7 @@ Status ConvAddFusion::Apply(Graph& graph, Node& node, RewriteRuleEffect& modifie
107107
}
108108

109109
bool ConvAddFusion::SatisfyCondition(const Graph& graph, const Node& node, const logging::Logger&) const {
110-
if (!graph_utils::IsSupportedOptypeVersionAndDomain(node, "Conv", {1, 11}) ||
110+
if (!graph_utils::IsSupportedOptypeVersionAndDomain(node, "Conv", {1, 11, 22}) ||
111111
node.GetOutputEdgesCount() != 1) {
112112
return false;
113113
}

onnxruntime/core/optimizer/conv_bn_fusion.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -145,7 +145,7 @@ Status ConvBNFusion::Apply(Graph& graph, Node& node, RewriteRuleEffect& rule_eff
145145
}
146146

147147
bool ConvBNFusion::SatisfyCondition(const Graph& graph, const Node& node, const logging::Logger&) const {
148-
if (!graph_utils::IsSupportedOptypeVersionAndDomain(node, "Conv", {1, 11}) ||
148+
if (!graph_utils::IsSupportedOptypeVersionAndDomain(node, "Conv", {1, 11, 22}) ||
149149
node.GetOutputEdgesCount() != 1) {
150150
return false;
151151
}

onnxruntime/core/optimizer/conv_mul_fusion.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,7 @@ Status ConvMulFusion::Apply(Graph& graph, Node& node, RewriteRuleEffect& rule_ef
113113
}
114114

115115
bool ConvMulFusion::SatisfyCondition(const Graph& graph, const Node& node, const logging::Logger&) const {
116-
if (!graph_utils::IsSupportedOptypeVersionAndDomain(node, "Conv", {1, 11}) ||
116+
if (!graph_utils::IsSupportedOptypeVersionAndDomain(node, "Conv", {1, 11, 22}) ||
117117
node.GetOutputEdgesCount() != 1) {
118118
return false;
119119
}

onnxruntime/core/optimizer/dropout_elimination.cc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ Status EliminateDropout::Apply(Graph& graph, Node& node, RewriteRuleEffect& rule
2222
bool EliminateDropout::SatisfyCondition(const Graph& graph, const Node& node, const logging::Logger& logger) const {
2323
// We currently support elimination for Dropout operator v1, v6, v7, v10 and v12.
2424
// REVIEW(mzs): v10 implementation does not exist.
25-
if (!graph_utils::IsSupportedOptypeVersionAndDomain(node, "Dropout", {1, 6, 7, 10, 12, 13})) {
25+
if (!graph_utils::IsSupportedOptypeVersionAndDomain(node, "Dropout", {1, 6, 7, 10, 12, 13, 22})) {
2626
return false;
2727
}
2828

@@ -32,7 +32,7 @@ bool EliminateDropout::SatisfyCondition(const Graph& graph, const Node& node, co
3232
// 2. ratio input is not a graph input, so it cannot be overridden
3333

3434
// support opset 12 and above for ort training
35-
if (graph_utils::MatchesOpSinceVersion(node, {12, 13}) && node.InputDefs().size() > 1) {
35+
if (graph_utils::MatchesOpSinceVersion(node, {12, 13, 22}) && node.InputDefs().size() > 1) {
3636
if (graph_utils::IsGraphInput(graph, node.InputDefs()[1])) {
3737
return false;
3838
}

onnxruntime/core/optimizer/gemm_activation_fusion.cc

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -21,20 +21,20 @@ bool IsSupportedOptypeVersionAndDomain(const Node& node, const std::string& op_t
2121
// If the op has multiple versions, here we require it must have a single implementation that can work across all the
2222
// versions. Because in the fusion, we discarded the op version information.
2323
bool IsFusableActivation(const Node& node) {
24-
return IsSupportedOptypeVersionAndDomain(node, "Elu", {6}, kOnnxDomain) ||
25-
IsSupportedOptypeVersionAndDomain(node, "HardSigmoid", {6}, kOnnxDomain) ||
26-
IsSupportedOptypeVersionAndDomain(node, "LeakyRelu", {6}, kOnnxDomain) ||
24+
return IsSupportedOptypeVersionAndDomain(node, "Elu", {6, 22}, kOnnxDomain) ||
25+
IsSupportedOptypeVersionAndDomain(node, "HardSigmoid", {6, 22}, kOnnxDomain) ||
26+
IsSupportedOptypeVersionAndDomain(node, "LeakyRelu", {6, 16}, kOnnxDomain) ||
2727
IsSupportedOptypeVersionAndDomain(node, "Relu", {6, 13, 14}, kOnnxDomain) ||
28-
IsSupportedOptypeVersionAndDomain(node, "Selu", {6}, kOnnxDomain) ||
28+
IsSupportedOptypeVersionAndDomain(node, "Selu", {6, 22}, kOnnxDomain) ||
2929
IsSupportedOptypeVersionAndDomain(node, "Sigmoid", {6, 13}, kOnnxDomain) ||
30-
IsSupportedOptypeVersionAndDomain(node, "Softplus", {1}, kOnnxDomain) ||
31-
IsSupportedOptypeVersionAndDomain(node, "Softsign", {1}, kOnnxDomain) ||
30+
IsSupportedOptypeVersionAndDomain(node, "Softplus", {1, 22}, kOnnxDomain) ||
31+
IsSupportedOptypeVersionAndDomain(node, "Softsign", {1, 22}, kOnnxDomain) ||
3232
IsSupportedOptypeVersionAndDomain(node, "Tanh", {6, 13}, kOnnxDomain) ||
3333
#ifndef DISABLE_CONTRIB_OPS
3434
IsSupportedOptypeVersionAndDomain(node, "ScaledTanh", {1}, kOnnxDomain) ||
3535
IsSupportedOptypeVersionAndDomain(node, "ParametricSoftplus", {1}, kOnnxDomain) ||
3636
#endif
37-
IsSupportedOptypeVersionAndDomain(node, "ThresholdedRelu", {1, 10}, kOnnxDomain);
37+
IsSupportedOptypeVersionAndDomain(node, "ThresholdedRelu", {1, 10, 22}, kOnnxDomain);
3838
}
3939
} // namespace
4040

onnxruntime/core/optimizer/gemm_transpose_fusion.cc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,7 @@ bool GemmTransposeFusion::SatisfyCondition(const Graph& graph, const Node& node,
104104

105105
// Fusion can be applied if there is a transpose at either of the inputs
106106
for (auto node_it = node.InputNodesBegin(); node_it != node.InputNodesEnd(); ++node_it) {
107-
if (graph_utils::IsSupportedOptypeVersionAndDomain(*node_it, "Transpose", {1, 13}) &&
107+
if (graph_utils::IsSupportedOptypeVersionAndDomain(*node_it, "Transpose", {1, 13, 21}) &&
108108
!graph.NodeProducesGraphOutput(*node_it) &&
109109
// Make sure the two nodes do not span execution providers.
110110
node_it->GetExecutionProviderType() == node.GetExecutionProviderType()) {
@@ -128,7 +128,7 @@ bool GemmTransposeFusion::SatisfyCondition(const Graph& graph, const Node& node,
128128

129129
const auto next_node_it = node.OutputNodesBegin();
130130
if (next_node_it != node.OutputNodesEnd() &&
131-
graph_utils::IsSupportedOptypeVersionAndDomain(*next_node_it, "Transpose", {1, 13}) &&
131+
graph_utils::IsSupportedOptypeVersionAndDomain(*next_node_it, "Transpose", {1, 13, 21}) &&
132132
next_node_it->GetInputEdgesCount() == 1 &&
133133
// Make sure the two nodes do not span execution providers.
134134
next_node_it->GetExecutionProviderType() == node.GetExecutionProviderType()) {

0 commit comments

Comments
 (0)