diff --git a/onnxruntime/core/optimizer/attention_fusion_helper.h b/onnxruntime/core/optimizer/attention_fusion_helper.h index 33fff5e1e1a16..a328a6d451a89 100644 --- a/onnxruntime/core/optimizer/attention_fusion_helper.h +++ b/onnxruntime/core/optimizer/attention_fusion_helper.h @@ -1447,7 +1447,7 @@ bool FuseGptAttention(Node& layer_norm, Graph& graph, int64_t hidden_size, std:: return false; } - if (graph_utils::IsSupportedOptypeVersionAndDomain(*k_concat, "Transpose", {1, 13}, kOnnxDomain)) { + if (graph_utils::IsSupportedOptypeVersionAndDomain(*k_concat, "Transpose", {1, 13, 21}, kOnnxDomain)) { transpose_optimized_pattern = true; DEBUG_LOG("Using transpose optimized pattern"); opt_k_transpose = k_concat; diff --git a/onnxruntime/core/optimizer/bias_dropout_fusion.cc b/onnxruntime/core/optimizer/bias_dropout_fusion.cc index 1d4ca9de1d9ff..b93bfedeb1a02 100644 --- a/onnxruntime/core/optimizer/bias_dropout_fusion.cc +++ b/onnxruntime/core/optimizer/bias_dropout_fusion.cc @@ -144,7 +144,7 @@ Status BiasDropoutFusion::ApplyImpl(Graph& graph, bool& modified, int graph_leve } const Node& next_node = (*next_node_itr); - if ((!graph_utils::IsSupportedOptypeVersionAndDomain(next_node, "Dropout", {12, 13}, kOnnxDomain) && + if ((!graph_utils::IsSupportedOptypeVersionAndDomain(next_node, "Dropout", {12, 13, 22}, kOnnxDomain) && !graph_utils::IsSupportedOptypeVersionAndDomain(next_node, "BitmaskDropout", {1}, kMSDomain)) || next_node.GetExecutionProviderType() != node.GetExecutionProviderType()) { continue; diff --git a/onnxruntime/core/optimizer/compute_optimizer/upstream_reshape.cc b/onnxruntime/core/optimizer/compute_optimizer/upstream_reshape.cc index 716988e93312c..51a4b38a125b2 100644 --- a/onnxruntime/core/optimizer/compute_optimizer/upstream_reshape.cc +++ b/onnxruntime/core/optimizer/compute_optimizer/upstream_reshape.cc @@ -225,7 +225,7 @@ Status UpStreamReshapeGraphTransformer::RemoveOriginalReshapeNode( std::optional UpStreamReshapeGraphTransformer::IsSupportedForUpstream( Graph& graph, Node& node, const logging::Logger& logger) const { - if (!graph_utils::IsSupportedOptypeVersionAndDomain(node, "Reshape", {1, 5, 13, 14}, kOnnxDomain)) { + if (!graph_utils::IsSupportedOptypeVersionAndDomain(node, "Reshape", {1, 5, 13, 14, 19, 21, 23, 24, 25}, kOnnxDomain)) { return std::nullopt; } diff --git a/onnxruntime/core/optimizer/conv_activation_fusion.cc b/onnxruntime/core/optimizer/conv_activation_fusion.cc index b7f5af5888be0..44a20428a09e0 100644 --- a/onnxruntime/core/optimizer/conv_activation_fusion.cc +++ b/onnxruntime/core/optimizer/conv_activation_fusion.cc @@ -107,7 +107,7 @@ class ConvActivationSelector : public NodeSelector { return std::nullopt; } else if (node_ep.empty() || node_ep == kCpuExecutionProvider || node_ep == kJsExecutionProvider || node_ep == kWebGpuExecutionProvider) { if (!is_supported_non_cuda_ep_activation(*next_node) && - !graph_utils::IsSupportedOptypeVersionAndDomain(*next_node, "HardSigmoid", {6})) { + !graph_utils::IsSupportedOptypeVersionAndDomain(*next_node, "HardSigmoid", {6, 22})) { return std::nullopt; } } else { @@ -212,7 +212,7 @@ void RegisterConvActivationFusionRules(SelectorActionRegistry& registry) { const std::string msDomainConv = SelectorActionRegistry::OpVersionsMapKey("NhwcConv", kMSDomain); auto selector = std::make_unique(); - registry.RegisterSelectorAndAction(name, {{"Conv", {1, 11}}, {msInternalNHWCDomainConv, {1, 11}}, {msDomainConv, {1}}}, + registry.RegisterSelectorAndAction(name, {{"Conv", {1, 11, 22}}, {msInternalNHWCDomainConv, {1, 11, 22}}, {msDomainConv, {1}}}, std::move(selector), std::move(action)); #else registry.RegisterAction(name, std::move(action)); diff --git a/onnxruntime/core/optimizer/conv_add_act_fusion.cc b/onnxruntime/core/optimizer/conv_add_act_fusion.cc index 6f90eaf07ef4d..45441d20a4112 100644 --- a/onnxruntime/core/optimizer/conv_add_act_fusion.cc +++ b/onnxruntime/core/optimizer/conv_add_act_fusion.cc @@ -113,7 +113,7 @@ class ConvAddActivationSelector : public NodeSelector { return true; } - if (graph_utils::IsSupportedOptypeVersionAndDomain(activation_node, "HardSigmoid", {6})) { + if (graph_utils::IsSupportedOptypeVersionAndDomain(activation_node, "HardSigmoid", {6, 22})) { return true; } return false; @@ -288,7 +288,7 @@ void RegisterConvAddActivationFusionRules(SelectorActionRegistry& registry) { auto action = std::make_unique(); auto selector = std::make_unique(); std::string msDomainNhwcFusedConv = SelectorActionRegistry::OpVersionsMapKey("NhwcFusedConv", kMSDomain); - registry.RegisterSelectorAndAction("ConvAddAct", {{"Conv", {1, 11}}, {msDomainNhwcFusedConv, {1, 11}}}, + registry.RegisterSelectorAndAction("ConvAddAct", {{"Conv", {1, 11, 22}}, {msDomainNhwcFusedConv, {1, 11, 22}}}, std::move(selector), std::move(action)); } diff --git a/onnxruntime/core/optimizer/conv_add_fusion.cc b/onnxruntime/core/optimizer/conv_add_fusion.cc index e1fd199bfa943..dd57334666e9f 100644 --- a/onnxruntime/core/optimizer/conv_add_fusion.cc +++ b/onnxruntime/core/optimizer/conv_add_fusion.cc @@ -107,7 +107,7 @@ Status ConvAddFusion::Apply(Graph& graph, Node& node, RewriteRuleEffect& modifie } bool ConvAddFusion::SatisfyCondition(const Graph& graph, const Node& node, const logging::Logger&) const { - if (!graph_utils::IsSupportedOptypeVersionAndDomain(node, "Conv", {1, 11}) || + if (!graph_utils::IsSupportedOptypeVersionAndDomain(node, "Conv", {1, 11, 22}) || node.GetOutputEdgesCount() != 1) { return false; } diff --git a/onnxruntime/core/optimizer/conv_bn_fusion.cc b/onnxruntime/core/optimizer/conv_bn_fusion.cc index 4c493f45a2b61..dff11f2276097 100644 --- a/onnxruntime/core/optimizer/conv_bn_fusion.cc +++ b/onnxruntime/core/optimizer/conv_bn_fusion.cc @@ -145,7 +145,7 @@ Status ConvBNFusion::Apply(Graph& graph, Node& node, RewriteRuleEffect& rule_eff } bool ConvBNFusion::SatisfyCondition(const Graph& graph, const Node& node, const logging::Logger&) const { - if (!graph_utils::IsSupportedOptypeVersionAndDomain(node, "Conv", {1, 11}) || + if (!graph_utils::IsSupportedOptypeVersionAndDomain(node, "Conv", {1, 11, 22}) || node.GetOutputEdgesCount() != 1) { return false; } diff --git a/onnxruntime/core/optimizer/conv_mul_fusion.cc b/onnxruntime/core/optimizer/conv_mul_fusion.cc index 9563415ad56b6..4dd10edbbfb89 100644 --- a/onnxruntime/core/optimizer/conv_mul_fusion.cc +++ b/onnxruntime/core/optimizer/conv_mul_fusion.cc @@ -113,7 +113,7 @@ Status ConvMulFusion::Apply(Graph& graph, Node& node, RewriteRuleEffect& rule_ef } bool ConvMulFusion::SatisfyCondition(const Graph& graph, const Node& node, const logging::Logger&) const { - if (!graph_utils::IsSupportedOptypeVersionAndDomain(node, "Conv", {1, 11}) || + if (!graph_utils::IsSupportedOptypeVersionAndDomain(node, "Conv", {1, 11, 22}) || node.GetOutputEdgesCount() != 1) { return false; } diff --git a/onnxruntime/core/optimizer/dropout_elimination.cc b/onnxruntime/core/optimizer/dropout_elimination.cc index d989c4dd80532..b76f6289d5fc6 100644 --- a/onnxruntime/core/optimizer/dropout_elimination.cc +++ b/onnxruntime/core/optimizer/dropout_elimination.cc @@ -22,7 +22,7 @@ Status EliminateDropout::Apply(Graph& graph, Node& node, RewriteRuleEffect& rule bool EliminateDropout::SatisfyCondition(const Graph& graph, const Node& node, const logging::Logger& logger) const { // We currently support elimination for Dropout operator v1, v6, v7, v10 and v12. // REVIEW(mzs): v10 implementation does not exist. - if (!graph_utils::IsSupportedOptypeVersionAndDomain(node, "Dropout", {1, 6, 7, 10, 12, 13})) { + if (!graph_utils::IsSupportedOptypeVersionAndDomain(node, "Dropout", {1, 6, 7, 10, 12, 13, 22})) { return false; } @@ -32,7 +32,7 @@ bool EliminateDropout::SatisfyCondition(const Graph& graph, const Node& node, co // 2. ratio input is not a graph input, so it cannot be overridden // support opset 12 and above for ort training - if (graph_utils::MatchesOpSinceVersion(node, {12, 13}) && node.InputDefs().size() > 1) { + if (graph_utils::MatchesOpSinceVersion(node, {12, 13, 22}) && node.InputDefs().size() > 1) { if (graph_utils::IsGraphInput(graph, node.InputDefs()[1])) { return false; } diff --git a/onnxruntime/core/optimizer/fast_gelu_fusion.cc b/onnxruntime/core/optimizer/fast_gelu_fusion.cc index a38a0fc06fbb2..3d8e04b243197 100644 --- a/onnxruntime/core/optimizer/fast_gelu_fusion.cc +++ b/onnxruntime/core/optimizer/fast_gelu_fusion.cc @@ -151,7 +151,7 @@ MatchResult FastGeluFusion::CheckSecondFormula(Graph& graph, Node& pow1_node, if (p_cast1_node != nullptr) { Node& cast1_node = *graph.GetNode(p_cast1_node->Index()); // this is fused Cast node, so expect 2 output edges - if (!(graph_utils::IsSupportedOptypeVersionAndDomain(cast1_node, "Cast", {9, 13, 19}) && + if (!(graph_utils::IsSupportedOptypeVersionAndDomain(cast1_node, "Cast", {9, 13, 19, 21, 23, 24, 25}) && CheckNode(graph, cast1_node, pow1_node.GetExecutionProviderType(), false)) || cast1_node.GetOutputEdgesCount() != 2) { return matchResult; @@ -262,7 +262,7 @@ Status FastGeluFusion::ApplyImpl(Graph& graph, bool& modified, int graph_level, if (p_cast3_node == nullptr) continue; Node& cast3_node = *graph.GetNode(p_cast3_node->Index()); - if (!(graph_utils::IsSupportedOptypeVersionAndDomain(cast3_node, "Cast", {9, 13, 19}) && + if (!(graph_utils::IsSupportedOptypeVersionAndDomain(cast3_node, "Cast", {9, 13, 19, 21, 23, 24, 25}) && CheckNode(graph, cast3_node, node.GetExecutionProviderType(), true))) { continue; } diff --git a/onnxruntime/core/optimizer/gemm_activation_fusion.cc b/onnxruntime/core/optimizer/gemm_activation_fusion.cc index 50be2cbd48f7b..7465607ac7124 100644 --- a/onnxruntime/core/optimizer/gemm_activation_fusion.cc +++ b/onnxruntime/core/optimizer/gemm_activation_fusion.cc @@ -21,20 +21,20 @@ bool IsSupportedOptypeVersionAndDomain(const Node& node, const std::string& op_t // If the op has multiple versions, here we require it must have a single implementation that can work across all the // versions. Because in the fusion, we discarded the op version information. bool IsFusableActivation(const Node& node) { - return IsSupportedOptypeVersionAndDomain(node, "Elu", {6}, kOnnxDomain) || - IsSupportedOptypeVersionAndDomain(node, "HardSigmoid", {6}, kOnnxDomain) || - IsSupportedOptypeVersionAndDomain(node, "LeakyRelu", {6}, kOnnxDomain) || + return IsSupportedOptypeVersionAndDomain(node, "Elu", {6, 22}, kOnnxDomain) || + IsSupportedOptypeVersionAndDomain(node, "HardSigmoid", {6, 22}, kOnnxDomain) || + IsSupportedOptypeVersionAndDomain(node, "LeakyRelu", {6, 16}, kOnnxDomain) || IsSupportedOptypeVersionAndDomain(node, "Relu", {6, 13, 14}, kOnnxDomain) || - IsSupportedOptypeVersionAndDomain(node, "Selu", {6}, kOnnxDomain) || + IsSupportedOptypeVersionAndDomain(node, "Selu", {6, 22}, kOnnxDomain) || IsSupportedOptypeVersionAndDomain(node, "Sigmoid", {6, 13}, kOnnxDomain) || - IsSupportedOptypeVersionAndDomain(node, "Softplus", {1}, kOnnxDomain) || - IsSupportedOptypeVersionAndDomain(node, "Softsign", {1}, kOnnxDomain) || + IsSupportedOptypeVersionAndDomain(node, "Softplus", {1, 22}, kOnnxDomain) || + IsSupportedOptypeVersionAndDomain(node, "Softsign", {1, 22}, kOnnxDomain) || IsSupportedOptypeVersionAndDomain(node, "Tanh", {6, 13}, kOnnxDomain) || #ifndef DISABLE_CONTRIB_OPS IsSupportedOptypeVersionAndDomain(node, "ScaledTanh", {1}, kOnnxDomain) || IsSupportedOptypeVersionAndDomain(node, "ParametricSoftplus", {1}, kOnnxDomain) || #endif - IsSupportedOptypeVersionAndDomain(node, "ThresholdedRelu", {1, 10}, kOnnxDomain); + IsSupportedOptypeVersionAndDomain(node, "ThresholdedRelu", {1, 10, 22}, kOnnxDomain); } } // namespace diff --git a/onnxruntime/core/optimizer/gemm_transpose_fusion.cc b/onnxruntime/core/optimizer/gemm_transpose_fusion.cc index d1f862460dae7..da454b67aecf4 100644 --- a/onnxruntime/core/optimizer/gemm_transpose_fusion.cc +++ b/onnxruntime/core/optimizer/gemm_transpose_fusion.cc @@ -104,7 +104,7 @@ bool GemmTransposeFusion::SatisfyCondition(const Graph& graph, const Node& node, // Fusion can be applied if there is a transpose at either of the inputs for (auto node_it = node.InputNodesBegin(); node_it != node.InputNodesEnd(); ++node_it) { - if (graph_utils::IsSupportedOptypeVersionAndDomain(*node_it, "Transpose", {1, 13}) && + if (graph_utils::IsSupportedOptypeVersionAndDomain(*node_it, "Transpose", {1, 13, 21, 23, 24, 25}) && !graph.NodeProducesGraphOutput(*node_it) && // Make sure the two nodes do not span execution providers. node_it->GetExecutionProviderType() == node.GetExecutionProviderType()) { @@ -128,7 +128,7 @@ bool GemmTransposeFusion::SatisfyCondition(const Graph& graph, const Node& node, const auto next_node_it = node.OutputNodesBegin(); if (next_node_it != node.OutputNodesEnd() && - graph_utils::IsSupportedOptypeVersionAndDomain(*next_node_it, "Transpose", {1, 13}) && + graph_utils::IsSupportedOptypeVersionAndDomain(*next_node_it, "Transpose", {1, 13, 21, 23, 24, 25}) && next_node_it->GetInputEdgesCount() == 1 && // Make sure the two nodes do not span execution providers. next_node_it->GetExecutionProviderType() == node.GetExecutionProviderType()) { diff --git a/onnxruntime/core/optimizer/isinf_reducesum_fusion.cc b/onnxruntime/core/optimizer/isinf_reducesum_fusion.cc index 7d249ea715e8d..3f963e86acb0d 100644 --- a/onnxruntime/core/optimizer/isinf_reducesum_fusion.cc +++ b/onnxruntime/core/optimizer/isinf_reducesum_fusion.cc @@ -33,7 +33,7 @@ Status IsInfReduceSumFusion::ApplyImpl(Graph& graph, bool& modified, int graph_l ORT_RETURN_IF_ERROR(Recurse(isinf_node, modified, graph_level, logger)); - if (!graph_utils::IsSupportedOptypeVersionAndDomain(isinf_node, "IsInf", {10}) || + if (!graph_utils::IsSupportedOptypeVersionAndDomain(isinf_node, "IsInf", {10, 20}) || isinf_node.GetOutputEdgesCount() != 1 || graph.NodeProducesGraphOutput(isinf_node)) { continue; @@ -45,7 +45,7 @@ Status IsInfReduceSumFusion::ApplyImpl(Graph& graph, bool& modified, int graph_l // This Cast can be skipped as we are replacing the subgraph with IsAllFinite, which supports FP16 auto cast1_node_iter = isinf_node.InputNodesBegin(); if (cast1_node_iter != isinf_node.InputNodesEnd() && - graph_utils::IsSupportedOptypeVersionAndDomain(*cast1_node_iter, "Cast", {9, 13, 19}) && + graph_utils::IsSupportedOptypeVersionAndDomain(*cast1_node_iter, "Cast", {9, 13, 19, 21, 23, 24, 25}) && cast1_node_iter->GetOutputEdgesCount() == 1) { // check input type of cast node Node& cast1_node = *graph.GetNode(cast1_node_iter->Index()); @@ -65,7 +65,7 @@ Status IsInfReduceSumFusion::ApplyImpl(Graph& graph, bool& modified, int graph_l } Node& cast2_node = *graph.GetNode(cast2_node_itr->Index()); - if (!graph_utils::IsSupportedOptypeVersionAndDomain(cast2_node, "Cast", {9, 13, 19}) || + if (!graph_utils::IsSupportedOptypeVersionAndDomain(cast2_node, "Cast", {9, 13, 19, 21, 23, 24, 25}) || cast2_node.GetOutputEdgesCount() != 1 || graph.NodeProducesGraphOutput(cast2_node)) { continue; diff --git a/onnxruntime/core/optimizer/layer_norm_fusion.cc b/onnxruntime/core/optimizer/layer_norm_fusion.cc index 8a7f83e871768..3ade3864255ea 100644 --- a/onnxruntime/core/optimizer/layer_norm_fusion.cc +++ b/onnxruntime/core/optimizer/layer_norm_fusion.cc @@ -241,7 +241,7 @@ Status LayerNormFusion::ApplyImpl(Graph& graph, bool& modified, int graph_level, if (p_reduce_mean_input_node) { Node& reduce_mean_input_node = *graph.GetNode(p_reduce_mean_input_node->Index()); // If input to the 1st ReduceMean is a Cast, and the Cast has same consumer count as subCnt + 1 - if (graph_utils::IsSupportedOptypeVersionAndDomain(reduce_mean_input_node, "Cast", {9, 13, 19}) && + if (graph_utils::IsSupportedOptypeVersionAndDomain(reduce_mean_input_node, "Cast", {9, 13, 19, 21, 23, 24, 25}) && reduce_mean_input_node.GetExecutionProviderType() == reduce_mean_node.GetExecutionProviderType() && optimizer_utils::CheckOutputEdges(graph, reduce_mean_input_node, static_cast(subCnt) + 1)) { nodes_to_remove.insert(nodes_to_remove.begin(), reduce_mean_input_node); @@ -254,7 +254,7 @@ Status LayerNormFusion::ApplyImpl(Graph& graph, bool& modified, int graph_level, const Node* p_cast1 = nullptr; if (!p_sub_node_dup && sub_node.GetOutputEdgesCount() == 1) { Node& cast_node = *graph.GetNode(sub_node.OutputNodesBegin()->Index()); - if (graph_utils::IsSupportedOptypeVersionAndDomain(cast_node, "Cast", {9, 13, 19}) && + if (graph_utils::IsSupportedOptypeVersionAndDomain(cast_node, "Cast", {9, 13, 19, 21, 23, 24, 25}) && cast_node.GetExecutionProviderType() == reduce_mean_node.GetExecutionProviderType() && optimizer_utils::CheckOutputEdges(graph, cast_node, 2u) && IsSupportedDataType(cast_node)) { p_cast1 = &cast_node; @@ -353,7 +353,7 @@ Status LayerNormFusion::ApplyImpl(Graph& graph, bool& modified, int graph_level, const Node* p_cast2 = graph_utils::FirstParentByType(pow_node, "Cast"); if (p_cast2 != nullptr && p_cast2 != p_cast1) { Node& cast_node = *graph.GetNode(p_cast2->Index()); - if (!graph_utils::IsSupportedOptypeVersionAndDomain(cast_node, "Cast", {9, 13, 19}) || + if (!graph_utils::IsSupportedOptypeVersionAndDomain(cast_node, "Cast", {9, 13, 19, 21, 23, 24, 25}) || cast_node.GetExecutionProviderType() != reduce_mean_node.GetExecutionProviderType() || !optimizer_utils::CheckOutputEdges(graph, cast_node, 1)) { continue; @@ -371,7 +371,7 @@ Status LayerNormFusion::ApplyImpl(Graph& graph, bool& modified, int graph_level, // can be removed. This is one possible place a Cast Op can exist, that is between Div and Mul nodes. // div --> mul or div --> cast --> mul Node* next_node = graph.GetNode(div_node.OutputNodesBegin()->Index()); - if (graph_utils::IsSupportedOptypeVersionAndDomain(*next_node, "Cast", {9, 13, 19}) && + if (graph_utils::IsSupportedOptypeVersionAndDomain(*next_node, "Cast", {9, 13, 19, 21, 23, 24, 25}) && optimizer_utils::CheckOutputEdges(graph, *next_node, 1)) { nodes_to_remove.push_back(*next_node); next_node = graph.GetNode(next_node->OutputNodesBegin()->Index()); @@ -637,7 +637,7 @@ Status SimplifiedLayerNormFusion::ApplyImpl(Graph& graph, bool& modified, int gr if (is_gpu_ep && p_pow_input_node) { Node& pow_input_node = *graph.GetNode(p_pow_input_node->Index()); // If input to Pow is a Cast, and the Cast has 2 consumers only (Pow, Div) - if (graph_utils::IsSupportedOptypeVersionAndDomain(pow_input_node, "Cast", {9, 13, 19}) && + if (graph_utils::IsSupportedOptypeVersionAndDomain(pow_input_node, "Cast", {9, 13, 19, 21, 23, 24, 25}) && pow_input_node.GetExecutionProviderType() == pow_node.GetExecutionProviderType() && optimizer_utils::CheckOutputEdges(graph, pow_input_node, 2)) { nodes_to_remove.insert(nodes_to_remove.begin(), pow_input_node); @@ -647,7 +647,7 @@ Status SimplifiedLayerNormFusion::ApplyImpl(Graph& graph, bool& modified, int gr // div --> mul or div --> cast --> mul Node* next_node = graph.GetNode(div_node.OutputNodesBegin()->Index()); - if (graph_utils::IsSupportedOptypeVersionAndDomain(*next_node, "Cast", {9, 13, 19}) && + if (graph_utils::IsSupportedOptypeVersionAndDomain(*next_node, "Cast", {9, 13, 19, 21, 23, 24, 25}) && optimizer_utils::CheckOutputEdges(graph, *next_node, 1)) { if (!is_gpu_ep) continue; nodes_to_remove.push_back(*next_node); diff --git a/onnxruntime/core/optimizer/matmul_bn_fusion.cc b/onnxruntime/core/optimizer/matmul_bn_fusion.cc index 63a15ad630ce9..871571ea64881 100644 --- a/onnxruntime/core/optimizer/matmul_bn_fusion.cc +++ b/onnxruntime/core/optimizer/matmul_bn_fusion.cc @@ -10,8 +10,8 @@ namespace onnxruntime { namespace { const std::vector>> ignorable_nodes{ - {"Reshape", {1, 5, 13, 14, 19}}, - {"Transpose", {1, 13}}}; + {"Reshape", {1, 5, 13, 14, 19, 21, 23, 24, 25}}, + {"Transpose", {1, 13, 21, 23, 24, 25}}}; const std::pair> dest = {"BatchNormalization", {1, 6, 7, 9, 14, 15}}; } // namespace @@ -244,4 +244,4 @@ Status MatmulBNFusion::Apply(Graph& graph, Node& matmul_node, RewriteRuleEffect& rule_effect = RewriteRuleEffect::kRemovedCurrentNode; return Status::OK(); } -} // namespace onnxruntime \ No newline at end of file +} // namespace onnxruntime diff --git a/onnxruntime/core/optimizer/nchwc_transformer.cc b/onnxruntime/core/optimizer/nchwc_transformer.cc index bff9d2990118a..e8c3bf24a612f 100644 --- a/onnxruntime/core/optimizer/nchwc_transformer.cc +++ b/onnxruntime/core/optimizer/nchwc_transformer.cc @@ -1149,15 +1149,15 @@ void NchwcTransformerImpl::TrackTransposeFromNhwc(Node& node) { } void NchwcTransformerImpl::Transform(Node& node) { - if (graph_utils::IsSupportedOptypeVersionAndDomain(node, "Transpose", {1, 13})) { + if (graph_utils::IsSupportedOptypeVersionAndDomain(node, "Transpose", {1, 13, 21, 23, 24, 25})) { TrackTransposeFromNhwc(node); } - if (graph_utils::IsSupportedOptypeVersionAndDomain(node, "Conv", {1, 11}) || + if (graph_utils::IsSupportedOptypeVersionAndDomain(node, "Conv", {1, 11, 22}) || graph_utils::IsSupportedOptypeVersionAndDomain(node, "FusedConv", {1}, kMSDomain)) { TransformConv(node); - } else if (graph_utils::IsSupportedOptypeVersionAndDomain(node, "MaxPool", {1, 8, 10, 11, 12}) || - graph_utils::IsSupportedOptypeVersionAndDomain(node, "AveragePool", {1, 7, 10, 11})) { + } else if (graph_utils::IsSupportedOptypeVersionAndDomain(node, "MaxPool", {1, 8, 10, 11, 12, 22}) || + graph_utils::IsSupportedOptypeVersionAndDomain(node, "AveragePool", {1, 7, 10, 11, 19, 22})) { TransformPool(node); } else if (node.GetInputEdgesCount() == 0 && node.InputDefs().size() != 0) { // The following transforms only run when the input edge count has already @@ -1176,15 +1176,15 @@ void NchwcTransformerImpl::Transform(Node& node) { graph_utils::IsSupportedOptypeVersionAndDomain(node, "Sigmoid", {6, 13}) || graph_utils::IsSupportedOptypeVersionAndDomain(node, "Tanh", {6, 13})) { TransformActivation(node); - } else if (graph_utils::IsSupportedOptypeVersionAndDomain(node, "BatchNormalization", {7, 9, 14})) { + } else if (graph_utils::IsSupportedOptypeVersionAndDomain(node, "BatchNormalization", {7, 9, 14, 15})) { TransformBatchNormalization(node); - } else if (graph_utils::IsSupportedOptypeVersionAndDomain(node, "Transpose", {1, 13})) { + } else if (graph_utils::IsSupportedOptypeVersionAndDomain(node, "Transpose", {1, 13, 21, 23, 24, 25})) { TransformTransposeToNhwc(node); } else if (graph_utils::IsSupportedOptypeVersionAndDomain(node, "Upsample", {9, 13}) || - graph_utils::IsSupportedOptypeVersionAndDomain(node, "Resize", {10, 11, 13})) { + graph_utils::IsSupportedOptypeVersionAndDomain(node, "Resize", {10, 11, 13, 18, 19})) { TransformResize(node); - } else if (graph_utils::IsSupportedOptypeVersionAndDomain(node, "GlobalMaxPool", {1}) || - graph_utils::IsSupportedOptypeVersionAndDomain(node, "GlobalAveragePool", {1})) { + } else if (graph_utils::IsSupportedOptypeVersionAndDomain(node, "GlobalMaxPool", {1, 22}) || + graph_utils::IsSupportedOptypeVersionAndDomain(node, "GlobalAveragePool", {1, 22})) { // Convert these pooling types only if the input is already in NCHWc format. TransformPool(node); } diff --git a/onnxruntime/core/optimizer/not_where_fusion.cc b/onnxruntime/core/optimizer/not_where_fusion.cc index 862b192a10cce..618374f8e5347 100644 --- a/onnxruntime/core/optimizer/not_where_fusion.cc +++ b/onnxruntime/core/optimizer/not_where_fusion.cc @@ -39,7 +39,7 @@ Condition -> Where -> v0----| */ bool NotWhereFusion::SatisfyCondition(const Graph& graph, const Node& node, const logging::Logger& logger) const { - if (!graph_utils::IsSupportedOptypeVersionAndDomain(node, "Where", {9})) { + if (!graph_utils::IsSupportedOptypeVersionAndDomain(node, "Where", {9, 16})) { return false; } @@ -54,7 +54,7 @@ bool NotWhereFusion::SatisfyCondition(const Graph& graph, const Node& node, cons if (p_not_node->GetOutputEdgesCount() > 1) { // all consumers of not must be where for (auto it = p_not_node->OutputNodesBegin(); it != p_not_node->OutputNodesEnd(); ++it) { - if (!graph_utils::IsSupportedOptypeVersionAndDomain(*it, "Where", {9})) { + if (!graph_utils::IsSupportedOptypeVersionAndDomain(*it, "Where", {9, 16})) { return false; } } diff --git a/onnxruntime/core/optimizer/pad_fusion.cc b/onnxruntime/core/optimizer/pad_fusion.cc index d0b6d42fd46c9..b364b770a425c 100644 --- a/onnxruntime/core/optimizer/pad_fusion.cc +++ b/onnxruntime/core/optimizer/pad_fusion.cc @@ -9,9 +9,9 @@ namespace onnxruntime { bool VerifyNotCastChild(const Node& child_node) { - if (!graph_utils::IsSupportedOptypeVersionAndDomain(child_node, "Conv", {1, 11}) && - !graph_utils::IsSupportedOptypeVersionAndDomain(child_node, "AveragePool", {7, 10, 11, 19}) && - !graph_utils::IsSupportedOptypeVersionAndDomain(child_node, "MaxPool", {1, 8, 10, 11, 12})) { + if (!graph_utils::IsSupportedOptypeVersionAndDomain(child_node, "Conv", {1, 11, 22}) && + !graph_utils::IsSupportedOptypeVersionAndDomain(child_node, "AveragePool", {7, 10, 11, 19, 22}) && + !graph_utils::IsSupportedOptypeVersionAndDomain(child_node, "MaxPool", {1, 8, 10, 11, 12, 22})) { return false; } @@ -90,7 +90,7 @@ void UpdatePaddingAttribute(Node& child_node, const std::vector& pads_v */ bool PadFusion::SatisfyCondition(const Graph& graph, const Node& node, const logging::Logger&) const { // if Pad has input axis, don't fuse it. - if (!graph_utils::IsSupportedOptypeVersionAndDomain(node, "Pad", {1, 2, 11, 13, 18, 19}) || + if (!graph_utils::IsSupportedOptypeVersionAndDomain(node, "Pad", {1, 2, 11, 13, 18, 19, 21, 23, 24, 25}) || node.GetOutputEdgesCount() != 1 || node.InputDefs().size() > 3) { return false; @@ -130,7 +130,7 @@ bool PadFusion::SatisfyCondition(const Graph& graph, const Node& node, const log } const Node& child_node = *node.OutputNodesBegin(); - if (graph_utils::IsSupportedOptypeVersionAndDomain(child_node, "Cast", {1, 6, 9, 13})) { + if (graph_utils::IsSupportedOptypeVersionAndDomain(child_node, "Cast", {1, 6, 9, 13, 19, 21, 23, 24, 25})) { if (child_node.GetOutputEdgesCount() != 1) { return false; } diff --git a/onnxruntime/core/optimizer/pre_shape_node_elimination.cc b/onnxruntime/core/optimizer/pre_shape_node_elimination.cc index 8f50ef7c09c95..f64ee1d4ec8f5 100644 --- a/onnxruntime/core/optimizer/pre_shape_node_elimination.cc +++ b/onnxruntime/core/optimizer/pre_shape_node_elimination.cc @@ -48,7 +48,7 @@ bool PreShapeNodeElimination::SatisfyCondition(const Graph& graph, const Node& n for (const Node* next_node : output_nodes) { // Check if the next node is not of type "Shape" - if (!next_node || !graph_utils::IsSupportedOptypeVersionAndDomain(*next_node, "Shape", {13, 15, 19}, kOnnxDomain)) { + if (!next_node || !graph_utils::IsSupportedOptypeVersionAndDomain(*next_node, "Shape", {13, 15, 19, 21, 23, 24, 25}, kOnnxDomain)) { return false; } } diff --git a/onnxruntime/core/optimizer/qdq_transformer/qdq_propagation.cc b/onnxruntime/core/optimizer/qdq_transformer/qdq_propagation.cc index 7b518947138a5..b8252bc7a75b4 100644 --- a/onnxruntime/core/optimizer/qdq_transformer/qdq_propagation.cc +++ b/onnxruntime/core/optimizer/qdq_transformer/qdq_propagation.cc @@ -21,11 +21,11 @@ using onnxruntime::graph_utils::ExtendedGraphEdge; namespace onnxruntime { namespace { bool CanNodePropagate(const Node& node) { - return graph_utils::IsSupportedOptypeVersionAndDomain(node, "MaxPool", {12}) || - graph_utils::IsSupportedOptypeVersionAndDomain(node, "Reshape", {5, 13, 14, 19, 21}) || - graph_utils::IsSupportedOptypeVersionAndDomain(node, "Transpose", {1, 13, 21}) || - graph_utils::IsSupportedOptypeVersionAndDomain(node, "Squeeze", {1, 11, 13, 21}) || - graph_utils::IsSupportedOptypeVersionAndDomain(node, "Unsqueeze", {1, 11, 13, 21}) || + return graph_utils::IsSupportedOptypeVersionAndDomain(node, "MaxPool", {12, 22}) || + graph_utils::IsSupportedOptypeVersionAndDomain(node, "Reshape", {5, 13, 14, 19, 21, 23, 24, 25}) || + graph_utils::IsSupportedOptypeVersionAndDomain(node, "Transpose", {1, 13, 21, 23, 24, 25}) || + graph_utils::IsSupportedOptypeVersionAndDomain(node, "Squeeze", {1, 11, 13, 21, 23, 24, 25}) || + graph_utils::IsSupportedOptypeVersionAndDomain(node, "Unsqueeze", {1, 11, 13, 21, 23, 24, 25}) || graph_utils::IsSupportedOptypeVersionAndDomain(node, "Slice", {1, 10, 11, 13}); } diff --git a/onnxruntime/core/optimizer/reshape_fusion.cc b/onnxruntime/core/optimizer/reshape_fusion.cc index daab9bba278aa..6a2b4295093d8 100644 --- a/onnxruntime/core/optimizer/reshape_fusion.cc +++ b/onnxruntime/core/optimizer/reshape_fusion.cc @@ -34,7 +34,7 @@ Status ReshapeFusion::ApplyImpl(Graph& graph, bool& modified, int graph_level, c Node& reshape = *p_reshape; ORT_RETURN_IF_ERROR(Recurse(reshape, modified, graph_level, logger)); - if (!graph_utils::IsSupportedOptypeVersionAndDomain(reshape, "Reshape", {5, 13, 14}) || + if (!graph_utils::IsSupportedOptypeVersionAndDomain(reshape, "Reshape", {5, 13, 14, 19, 21, 23, 24, 25}) || !graph_utils::IsSupportedProvider(reshape, GetCompatibleExecutionProviders())) { continue; } @@ -92,8 +92,8 @@ static bool Match_Linear_Subgraph_1(Graph& graph, const Node& concat, const Node const Node& reshape = *reshape_itr; std::vector linear_path{ - {0, 0, "Add", {7}, kOnnxDomain}, - {0, 0, "MatMul", {1, 9}, kOnnxDomain}}; + {0, 0, "Add", {7, 13, 14}, kOnnxDomain}, + {0, 0, "MatMul", {1, 9, 13}, kOnnxDomain}}; std::vector edges; if (!graph_utils::FindPath(reshape, true, linear_path, edges, logger)) { return false; @@ -158,9 +158,9 @@ bool ReshapeFusion::Match_One_Element_Output_Subgraph_1(Graph& graph, const Node int index, gsl::span shape_value, bool checkOneElementOnly, const logging::Logger& logger) { std::vector parent_path{ - {0, index, "Unsqueeze", {1, 11, 13}, kOnnxDomain}, + {0, index, "Unsqueeze", {1, 11, 13, 21, 23, 24, 25}, kOnnxDomain}, {0, 0, "Gather", {1, 11, 13}, kOnnxDomain}, - {0, 0, "Shape", {1, 13, 15}, kOnnxDomain}}; + {0, 0, "Shape", {1, 13, 15, 19, 21, 23, 24, 25}, kOnnxDomain}}; std::vector edges; if (graph_utils::FindPath(concat, true, parent_path, edges, logger)) { const Node& unsqueeze = edges[0]->GetNode(); @@ -204,9 +204,9 @@ bool ReshapeFusion::Match_One_Element_Output_Subgraph_1(Graph& graph, const Node bool ReshapeFusion::Match_One_Element_Output_Subgraph_2(Graph& graph, const NodeArg& root_input, const Node& cur_node, int index, const logging::Logger& logger) { std::vector parent_path{ - {0, index, "Squeeze", {1, 11, 13}, kOnnxDomain}, + {0, index, "Squeeze", {1, 11, 13, 21, 23, 24, 25}, kOnnxDomain}, {0, 0, "Slice", {1, 11, 13}, kOnnxDomain}, - {0, 0, "Shape", {1, 13}, kOnnxDomain}}; + {0, 0, "Shape", {1, 13, 15, 19, 21, 23, 24, 25}, kOnnxDomain}}; std::vector edges; if (graph_utils::FindPath(cur_node, true, parent_path, edges, logger)) { const Node& slice = edges[1]->GetNode(); @@ -282,15 +282,15 @@ bool ReshapeFusion::Is_One_Element_Output_Subgraph(Graph& graph, const NodeArg& } std::vector div_path{ - {0, index, "Unsqueeze", {1, 11, 13}, kOnnxDomain}, + {0, index, "Unsqueeze", {1, 11, 13, 21, 23, 24, 25}, kOnnxDomain}, {0, 0, "Div", {7, 13, 14}, kOnnxDomain}}; std::vector mul_path{ - {0, index, "Unsqueeze", {1, 11, 13}, kOnnxDomain}, + {0, index, "Unsqueeze", {1, 11, 13, 21, 23, 24, 25}, kOnnxDomain}, {0, 0, "Mul", {7, 13, 14}, kOnnxDomain}}; std::vector unsqueeze_path{ - {0, index, "Unsqueeze", {1, 11, 13}, kOnnxDomain}}; + {0, index, "Unsqueeze", {1, 11, 13, 21, 23, 24, 25}, kOnnxDomain}}; std::vector edges; if (graph_utils::FindPath(concat, true, div_path, edges, logger) || diff --git a/onnxruntime/test/optimizer/conv_add_act_test.cc b/onnxruntime/test/optimizer/conv_add_act_test.cc index f61f9b29d9cce..bb409a2bbb82e 100644 --- a/onnxruntime/test/optimizer/conv_add_act_test.cc +++ b/onnxruntime/test/optimizer/conv_add_act_test.cc @@ -36,8 +36,8 @@ void TestConvPath(const std::vector& input_shape, const std::vector opsets{12, 13, 14}; // Clip support int64_t since opset 12 + const std::vector opsets{12, 13, 14, 23}; // Clip support int64_t since opset 12 for (auto& opset_version : opsets) { std::unique_ptr transformer = std::make_unique(); ASSERT_STATUS_OK(TestGraphTransformer(build_test_case, opset_version, *logger_, std::move(transformer), TransformerLevel::Level1, @@ -7156,7 +7156,7 @@ TEST_F(GraphTransformationTests, ConstantSharing_ShareFloatOrHalfTypedInitialize return Status::OK(); }; - const std::vector opsets{12, 13, 14}; // Clip support int64_t since opset 12 + const std::vector opsets{12, 13, 14, 23}; // Clip support int64_t since opset 12 // Float data type tests. auto build_test_case_float = [&](ModelTestBuilder& builder) { @@ -7280,7 +7280,7 @@ TEST_F(GraphTransformationTests, ConstantSharing_Share2DFloatOrHalfTypedInitiali return Status::OK(); }; - const std::vector opsets{12, 13, 14}; // Clip support int64_t since opset 12 + const std::vector opsets{12, 13, 14, 23}; // Clip support int64_t since opset 12 // Float data type tests. auto build_test_case_float = [&](ModelTestBuilder& builder) { @@ -7386,7 +7386,7 @@ TEST_F(GraphTransformationTests, ConstantSharing_ShareFloatAndHalfTypedInitializ return Status::OK(); }; - const std::vector opsets{12, 13, 14}; + const std::vector opsets{12, 13, 14, 23}; auto build_test_case_float = [&](ModelTestBuilder& builder) { auto* input0_arg = builder.MakeInput({{1, 1, 256, 256}}); @@ -7525,7 +7525,7 @@ TEST_F(GraphTransformationTests, ConstantSharing_Share2DFloatAndHalfTypedInitial return Status::OK(); }; - const std::vector opsets{12, 13, 14}; + const std::vector opsets{12, 13, 14, 23}; std::vector values{0.0f, 1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f}; std::vector values_float16; @@ -7651,7 +7651,7 @@ TEST_F(GraphTransformationTests, ConstantSharing_ShareIntMaxOrFloatInfinityIniti return Status::OK(); }; - const std::vector opsets{12, 13, 14}; + const std::vector opsets{12, 13, 14, 23}; // Float data type tests. auto build_test_case_float = [&](ModelTestBuilder& builder) { diff --git a/onnxruntime/test/optimizer/nchwc_optimizer_test.cc b/onnxruntime/test/optimizer/nchwc_optimizer_test.cc index fc0ba86c7f1f6..7dfa5c7812f6e 100644 --- a/onnxruntime/test/optimizer/nchwc_optimizer_test.cc +++ b/onnxruntime/test/optimizer/nchwc_optimizer_test.cc @@ -553,7 +553,7 @@ TEST(NchwcOptimizerTests, ConvAddFusion) { // Verify that Add or Sum can be fused into a preceding NCHWc Conv node, // with an optional Relu node following. std::vector op_types{"Add", "Sum"}; - static const int opset_versions[] = {7, 10, 11, 12}; + static const int opset_versions[] = {7, 10, 11, 12, 14, 22}; for (auto& op_type : op_types) { for (auto opset_version : opset_versions) { test_case(op_type, opset_version, false); @@ -1271,7 +1271,7 @@ TEST(NchwcOptimizerTests, UpsampleNearest) { // Verify that upsample nodes can be converted to the NCHWc format for // various versions of the operator. - static const int opset_versions[] = {9, 10, 11, 13}; + static const int opset_versions[] = {9, 10, 11, 13, 18, 19}; for (auto opset_version : opset_versions) { test_case(opset_version, 1.f, 1.f, false); test_case(opset_version, 2.f, 2.f, false); @@ -1327,7 +1327,7 @@ TEST(NchwcOptimizerTests, UpsampleLinear) { // various versions of the operator. std::vector transformation_modes{"asymmetric", "align_corners", "half_pixel"}; for (auto& transformation_mode : transformation_modes) { - static const int opset_versions[] = {9, 10, 11, 13}; + static const int opset_versions[] = {9, 10, 11, 13, 18, 19}; for (auto opset_version : opset_versions) { // Older versions of the operator do not support transformation modes. if (opset_version < 11 && transformation_mode == "asymmetric") { diff --git a/onnxruntime/test/optimizer/qdq_transformer_test.cc b/onnxruntime/test/optimizer/qdq_transformer_test.cc index 7fe68a38d23a0..37da2d4247e34 100644 --- a/onnxruntime/test/optimizer/qdq_transformer_test.cc +++ b/onnxruntime/test/optimizer/qdq_transformer_test.cc @@ -3584,6 +3584,11 @@ TEST(QDQTransformerTests, QDQPropagation_QBackward) { check_graph, TransformerLevel::Default, TransformerLevel::Level1); + TransformerTester(build_test_case, + check_graph, + TransformerLevel::Default, + TransformerLevel::Level1, + 21); }; test_case({1, 13, 13, 23}, 4, {0, 3, 1, 2}, false, false, false /*use_contrib_qdq*/); @@ -3723,7 +3728,12 @@ TEST(QDQTransformerTests, QDQPropagation_DQForward) { TransformerLevel::Level1, 18, 0.0, 0.0, nullptr, {}, // defaults that we're not overriding {"TransposeOptimizer"}); // disable TransposeOptimizer for simplicity - // TODO: fix opset 19 + TransformerTester(build_test_case, + check_graph, + TransformerLevel::Default, + TransformerLevel::Level1, + 21, 0.0, 0.0, nullptr, {}, // defaults that we're not overriding + {"TransposeOptimizer"}); // disable TransposeOptimizer for simplicity }; test_case({1, 13, 13, 23}, 4, {0, 3, 1, 2}, false, false, false /*use_contrib_qdq*/); diff --git a/onnxruntime/test/optimizer/transpose_optimizer_test.cc b/onnxruntime/test/optimizer/transpose_optimizer_test.cc index 3cc7eb8f3675f..94383c5de99ba 100644 --- a/onnxruntime/test/optimizer/transpose_optimizer_test.cc +++ b/onnxruntime/test/optimizer/transpose_optimizer_test.cc @@ -123,7 +123,7 @@ TEST(TransposeOptimizerTests, TestSplit) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestSplitDefaultAxis) { @@ -156,7 +156,7 @@ TEST(TransposeOptimizerTests, TestSplitDefaultAxis) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestSplitNegativeAxis) { @@ -190,7 +190,7 @@ TEST(TransposeOptimizerTests, TestSplitNegativeAxis) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestConcat) { @@ -221,7 +221,7 @@ TEST(TransposeOptimizerTests, TestConcat) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestPad) { @@ -257,7 +257,7 @@ TEST(TransposeOptimizerTests, TestPad) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {10, 18}); + /*opset_version*/ {10, 18, 23}); } TEST(TransposeOptimizerTests, TestPadOpset15) { @@ -286,7 +286,7 @@ TEST(TransposeOptimizerTests, TestPadOpset15) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestPadNonconst) { @@ -318,7 +318,7 @@ TEST(TransposeOptimizerTests, TestPadNonconst) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {11, 18}); + /*opset_version*/ {11, 18, 23}); } TEST(TransposeOptimizerTests, TestResize) { @@ -382,7 +382,7 @@ TEST(TransposeOptimizerTests, TestResizeOpset11) { // need the level 2 TransposeOptimizer as pushing a Transpose through a Resize requires it to be // assigned to the CPU EP first TransformerLevel::Level2, - /*opset_version*/ {11, 18}); + /*opset_version*/ {11, 18, 23}); } TEST(TransposeOptimizerTests, TestResizeOpset15) { @@ -412,7 +412,7 @@ TEST(TransposeOptimizerTests, TestResizeOpset15) { // need the level 2 TransposeOptimizer as pushing a Transpose through a Resize requires it to be // assigned to the CPU EP first TransformerLevel::Level2, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestResizeSizeRoi) { @@ -444,7 +444,7 @@ TEST(TransposeOptimizerTests, TestResizeSizeRoi) { // need the level 2 TransposeOptimizer as pushing a Transpose through a Resize requires it to be // assigned to the CPU EP first TransformerLevel::Level2, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestResizeRoiScalesZeroRank0) { @@ -511,7 +511,7 @@ TEST(TransposeOptimizerTests, TestResizeNonconst) { // need the level 2 TransposeOptimizer as pushing a Transpose through a Resize requires it to be // assigned to the CPU EP first TransformerLevel::Level2, - /*opset_version*/ {11, 18}); + /*opset_version*/ {11, 18, 23}); } TEST(TransposeOptimizerTests, TestResizeNonconstOpset13) { @@ -542,7 +542,7 @@ TEST(TransposeOptimizerTests, TestResizeNonconstOpset13) { // need the level 2 TransposeOptimizer as pushing a Transpose through a Resize requires it to be // assigned to the CPU EP first TransformerLevel::Level2, - /*opset_version*/ {13, 18}); + /*opset_version*/ {13, 18, 23}); } TEST(TransposeOptimizerTests, TestAdd) { @@ -569,7 +569,7 @@ TEST(TransposeOptimizerTests, TestAdd) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestShape) { @@ -615,7 +615,7 @@ TEST(TransposeOptimizerTests, TestShapeOpset15) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestShapeSliceNoStart) { @@ -639,7 +639,7 @@ TEST(TransposeOptimizerTests, TestShapeSliceNoStart) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestShapeSliceNegativeEnd) { @@ -663,7 +663,7 @@ TEST(TransposeOptimizerTests, TestShapeSliceNegativeEnd) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestShapeSliceNegativeStartNoEnd) { @@ -687,7 +687,7 @@ TEST(TransposeOptimizerTests, TestShapeSliceNegativeStartNoEnd) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestShapeSliceStartAndEnd) { @@ -712,7 +712,7 @@ TEST(TransposeOptimizerTests, TestShapeSliceStartAndEnd) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestShapeSliceEmptyResult) { @@ -737,7 +737,7 @@ TEST(TransposeOptimizerTests, TestShapeSliceEmptyResult) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestReduceSumKeepdimsTrue) { @@ -771,7 +771,7 @@ TEST(TransposeOptimizerTests, TestReduceSumKeepdimsTrue) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {7, 18}, + /*opset_version*/ {7, 18, 23}, /*per_sample_tolerance*/ 1e-07, /*relative_per_sample_tolerance*/ 1e-06); } @@ -797,7 +797,7 @@ TEST(TransposeOptimizerTests, TestReduceSumEmptyAxesKeepdimsTrue) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {7, 18}, + /*opset_version*/ {7, 18, 23}, /*per_sample_tolerance*/ 1e-07, /*relative_per_sample_tolerance*/ 1e-06); } @@ -833,7 +833,7 @@ TEST(TransposeOptimizerTests, TestReduceSumKeepdimsFalse) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {7, 18}, + /*opset_version*/ {7, 18, 23}, /*per_sample_tolerance*/ 1e-07, /*relative_per_sample_tolerance*/ 1e-06); } @@ -859,7 +859,7 @@ TEST(TransposeOptimizerTests, TestReduceSumEmptyAxesKeepdimsFalse) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {7, 18}, + /*opset_version*/ {7, 18, 23}, /*per_sample_tolerance*/ 1e-07, /*relative_per_sample_tolerance*/ 1e-06); } @@ -1235,7 +1235,7 @@ TEST(TransposeOptimizerTests, TestReduceMaxKeepdimsTrue) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestReduceMaxKeepdimsTrueDefaultAxes) { @@ -1259,7 +1259,7 @@ TEST(TransposeOptimizerTests, TestReduceMaxKeepdimsTrueDefaultAxes) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestReduceMaxKeepdimsFalse) { @@ -1293,7 +1293,7 @@ TEST(TransposeOptimizerTests, TestReduceMaxKeepdimsFalse) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestReduceMaxKeepdimsFalseDefaultAxes) { @@ -1317,7 +1317,7 @@ TEST(TransposeOptimizerTests, TestReduceMaxKeepdimsFalseDefaultAxes) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestReduceMax) { @@ -1349,7 +1349,7 @@ TEST(TransposeOptimizerTests, TestReduceMax) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestReduceMaxDefaultAxes) { @@ -1372,7 +1372,7 @@ TEST(TransposeOptimizerTests, TestReduceMaxDefaultAxes) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestReduceOpsReduceLogSum) { @@ -1406,7 +1406,7 @@ TEST(TransposeOptimizerTests, TestReduceOpsReduceLogSum) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestReduceOpsReduceLogSumExp) { @@ -1440,7 +1440,7 @@ TEST(TransposeOptimizerTests, TestReduceOpsReduceLogSumExp) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestReduceOpsReduceMax) { @@ -1474,7 +1474,7 @@ TEST(TransposeOptimizerTests, TestReduceOpsReduceMax) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestReduceOpsReduceMean) { @@ -1508,7 +1508,7 @@ TEST(TransposeOptimizerTests, TestReduceOpsReduceMean) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestReduceOpsReduceMin) { @@ -1542,7 +1542,7 @@ TEST(TransposeOptimizerTests, TestReduceOpsReduceMin) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestReduceOpsReduceProd) { @@ -1576,7 +1576,7 @@ TEST(TransposeOptimizerTests, TestReduceOpsReduceProd) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestReduceOpsReduceSumSquare) { @@ -1610,7 +1610,7 @@ TEST(TransposeOptimizerTests, TestReduceOpsReduceSumSquare) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestReduceOpsReduceL1) { @@ -1644,7 +1644,7 @@ TEST(TransposeOptimizerTests, TestReduceOpsReduceL1) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestReduceOpsReduceL2) { @@ -1678,7 +1678,7 @@ TEST(TransposeOptimizerTests, TestReduceOpsReduceL2) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestSqueezeOpset7) { @@ -1781,7 +1781,7 @@ TEST(TransposeOptimizerTests, TestSqueezeEmptyNoOpt) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {7, 18}); + /*opset_version*/ {7, 18, 23}); } TEST(TransposeOptimizerTests, TestSqueezeEmptyNoOptOpset15) { @@ -1826,7 +1826,7 @@ TEST(TransposeOptimizerTests, TestSqueezeNonconstNoOpt) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestUnsqueezeOpset7) { @@ -1933,7 +1933,7 @@ TEST(TransposeOptimizerTests, TestUnsqueezeNonconstNoOpt) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 14); + /*opset_version*/ {14, 23}); } TEST(TransposeOptimizerTests, TestSlice) { @@ -2019,7 +2019,7 @@ TEST(TransposeOptimizerTests, TestSliceOpset15) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestSliceNoAxesOpset15) { @@ -2047,7 +2047,7 @@ TEST(TransposeOptimizerTests, TestSliceNoAxesOpset15) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestSliceNegativeAxesInt32) { @@ -2076,7 +2076,7 @@ TEST(TransposeOptimizerTests, TestSliceNegativeAxesInt32) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestSliceStepsInt32) { @@ -2106,7 +2106,7 @@ TEST(TransposeOptimizerTests, TestSliceStepsInt32) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestSliceNegativeAxes) { @@ -2135,7 +2135,7 @@ TEST(TransposeOptimizerTests, TestSliceNegativeAxes) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestSliceSteps) { @@ -2165,7 +2165,7 @@ TEST(TransposeOptimizerTests, TestSliceSteps) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestSliceNonconstNoOpt) { @@ -2193,7 +2193,7 @@ TEST(TransposeOptimizerTests, TestSliceNonconstNoOpt) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestSliceNonconstInt32NoOpt) { @@ -2221,7 +2221,7 @@ TEST(TransposeOptimizerTests, TestSliceNonconstInt32NoOpt) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestSliceDefaultAxesNonconstStarts) { @@ -2249,7 +2249,7 @@ TEST(TransposeOptimizerTests, TestSliceDefaultAxesNonconstStarts) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestSliceDefaultAxesNonconstStartsUnknownLengthNoOpt) { @@ -2276,7 +2276,7 @@ TEST(TransposeOptimizerTests, TestSliceDefaultAxesNonconstStartsUnknownLengthNoO check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestSliceDefaultAxesNonconstStartsInt32) { @@ -2304,7 +2304,7 @@ TEST(TransposeOptimizerTests, TestSliceDefaultAxesNonconstStartsInt32) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestSliceDefaultAxesNonconstStartsUnknownLengthInt32NoOpt) { @@ -2331,7 +2331,7 @@ TEST(TransposeOptimizerTests, TestSliceDefaultAxesNonconstStartsUnknownLengthInt check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestTile) { @@ -2358,7 +2358,7 @@ TEST(TransposeOptimizerTests, TestTile) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestTileNonconstReps) { @@ -2385,7 +2385,7 @@ TEST(TransposeOptimizerTests, TestTileNonconstReps) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestArgMinNoAxisKeepdimsTrue) { @@ -2412,7 +2412,7 @@ TEST(TransposeOptimizerTests, TestArgMinNoAxisKeepdimsTrue) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestArgMinNoAxisKeepdimsFalse) { @@ -2439,7 +2439,7 @@ TEST(TransposeOptimizerTests, TestArgMinNoAxisKeepdimsFalse) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestArgMinNoAxis) { @@ -2465,7 +2465,7 @@ TEST(TransposeOptimizerTests, TestArgMinNoAxis) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestArgMinKeepdimsTrue) { @@ -2493,7 +2493,7 @@ TEST(TransposeOptimizerTests, TestArgMinKeepdimsTrue) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestArgMinKeepdimsFalse) { @@ -2521,7 +2521,7 @@ TEST(TransposeOptimizerTests, TestArgMinKeepdimsFalse) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestArgMin) { @@ -2548,7 +2548,7 @@ TEST(TransposeOptimizerTests, TestArgMin) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestArgMax) { @@ -2576,7 +2576,7 @@ TEST(TransposeOptimizerTests, TestArgMax) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestSoftmax) { @@ -2603,7 +2603,7 @@ TEST(TransposeOptimizerTests, TestSoftmax) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 12, + /*opset_version*/ {12, 23}, /*per_sample_tolerance*/ 1e-07, /*relative_per_sample_tolerance*/ 1e-06); } @@ -2631,7 +2631,7 @@ TEST(TransposeOptimizerTests, TestSoftmaxNoAxis) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 12, + /*opset_version*/ {12, 23}, /*per_sample_tolerance*/ 1e-07, /*relative_per_sample_tolerance*/ 1e-06); } @@ -2660,7 +2660,7 @@ TEST(TransposeOptimizerTests, TestSoftmax_2) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 12, + /*opset_version*/ {12, 23}, /*per_sample_tolerance*/ 1e-07, /*relative_per_sample_tolerance*/ 1e-06); } @@ -2688,7 +2688,7 @@ TEST(TransposeOptimizerTests, TestSoftmaxNoOptimization) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 12, + /*opset_version*/ {12, 23}, /*per_sample_tolerance*/ 1e-07, /*relative_per_sample_tolerance*/ 1e-06); } @@ -2716,7 +2716,7 @@ TEST(TransposeOptimizerTests, TestSoftmaxNoOptimization_2) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 12, + /*opset_version*/ {12, 23}, /*per_sample_tolerance*/ 1e-07, /*relative_per_sample_tolerance*/ 1e-06); } @@ -2744,7 +2744,7 @@ TEST(TransposeOptimizerTests, TestSoftmaxNoOptimization_3) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 12, + /*opset_version*/ {12, 23}, /*per_sample_tolerance*/ 1e-07, /*relative_per_sample_tolerance*/ 1e-06); } @@ -2829,7 +2829,7 @@ TEST(TransposeOptimizerTests, TestHardmaxAndLogSoftmaxNoAxis) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15, + /*opset_version*/ {15, 23}, /*per_sample_tolerance*/ 1e-07, /*relative_per_sample_tolerance*/ 1e-06); } @@ -2857,7 +2857,7 @@ TEST(TransposeOptimizerTests, TestHardmaxAndLogSoftmaxNoAxis_2) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15, + /*opset_version*/ {15, 23}, /*per_sample_tolerance*/ 1e-07, /*relative_per_sample_tolerance*/ 1e-06); } @@ -2889,7 +2889,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsAdd) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestBroadcastOpsMul) { @@ -2919,7 +2919,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsMul) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestBroadcastOpsSub) { @@ -2949,7 +2949,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsSub) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestBroadcastOpsDiv) { @@ -2982,7 +2982,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsDiv) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}, + /*opset_version*/ {15, 18, 23}, /*per_sample_tolerance*/ 1e-07, /*relative_per_sample_tolerance*/ 1e-06); #else @@ -2990,7 +2990,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsDiv) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); #endif // defined(_M_ARM64) && _MSC_VER >= 1930 } @@ -3021,7 +3021,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsPRelu) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestBroadcastOpsGreater) { @@ -3051,7 +3051,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsGreater) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestBroadcastOpsLess) { @@ -3081,7 +3081,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsLess) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestBroadcastOpsPow) { @@ -3111,7 +3111,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsPow) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestBroadcastOpsMax) { @@ -3141,7 +3141,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsMax) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestBroadcastOpsMin) { @@ -3171,7 +3171,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsMin) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestBroadcastOpsMean) { @@ -3201,7 +3201,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsMean) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestBroadcastOpsSum) { @@ -3231,7 +3231,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsSum) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestBroadcastOpsGreaterOrEqual) { @@ -3261,7 +3261,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsGreaterOrEqual) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestBroadcastOpsLessOrEqual) { @@ -3291,7 +3291,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsLessOrEqual) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestBroadcastOpsEqual) { @@ -3321,7 +3321,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsEqual) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestBroadcastOpsAnd) { @@ -3351,7 +3351,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsAnd) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestBroadcastOpsOr) { @@ -3381,7 +3381,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsOr) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestBroadcastOpsXor) { @@ -3411,7 +3411,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsXor) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestBroadcastOpsMod) { @@ -3442,7 +3442,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsMod) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestBroadcastOpsBitShift) { @@ -3473,7 +3473,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsBitShift) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestWhere) { @@ -3504,7 +3504,7 @@ TEST(TransposeOptimizerTests, TestWhere) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } // Utility function that runs TransformerTester for the graph Transpose -> QuantizeLinear -> Transpose. @@ -3797,7 +3797,7 @@ TEST(TransposeOptimizerTests, TestCast) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestQLinearSoftmax) { @@ -3827,17 +3827,19 @@ TEST(TransposeOptimizerTests, TestQLinearSoftmax) { EXPECT_EQ(transpose_cost, 0); }; - TransformerTester(build_test_case_1, - check_optimized_graph_1, - TransformerLevel::Level2, - TransformerLevel::Level3, - /*opset_version*/ 13, - /*per_sample_tolerance*/ 0.0, - /*relative_per_sample_tolerance*/ 0.0, - /*transformer*/ nullptr, - /*add_session_options*/ {}, - /*disabled_optimizers*/ {}, - /*ep*/ DefaultCpuExecutionProvider()); + for (int opset_version : {13, 23}) { + TransformerTester(build_test_case_1, + check_optimized_graph_1, + TransformerLevel::Level2, + TransformerLevel::Level3, + /*opset_version*/ opset_version, + /*per_sample_tolerance*/ 0.0, + /*relative_per_sample_tolerance*/ 0.0, + /*transformer*/ nullptr, + /*add_session_options*/ {}, + /*disabled_optimizers*/ {}, + /*ep*/ DefaultCpuExecutionProvider()); + } } TEST(TransposeOptimizerTests, TestBroadcastReusedInputs) { @@ -3868,7 +3870,7 @@ TEST(TransposeOptimizerTests, TestBroadcastReusedInputs) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestTransposeGraphOutput) { @@ -3896,7 +3898,7 @@ TEST(TransposeOptimizerTests, TestTransposeGraphOutput) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestSimpleReshapeAsTranspose) { @@ -3929,7 +3931,7 @@ TEST(TransposeOptimizerTests, TestSimpleReshapeAsTranspose) { check_optimized_graph, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestReshapeAsTransposeGraphOutput) { @@ -3960,7 +3962,7 @@ TEST(TransposeOptimizerTests, TestReshapeAsTransposeGraphOutput) { check_optimized_graph, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } enum class TransposeReshapeResult { @@ -4029,7 +4031,7 @@ static void TestTransposeReshape(const std::vector& input_shape, // check_optimized_graph, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 23}); } // Transpose -> Reshape can be merged if the Reshape could also be expressed as a Transpose due to not changing the @@ -4180,7 +4182,7 @@ TEST(TransposeOptimizerTests, TestCancelingNodesGraphOutputs) { check_optimized_graph, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestNonCancelingReshapeDueToNonConstShape) { @@ -4216,7 +4218,7 @@ TEST(TransposeOptimizerTests, TestNonCancelingReshapeDueToNonConstShape) { check_optimized_graph, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestPushBroadcastUnsqueezeTranspose) { @@ -4251,7 +4253,7 @@ TEST(TransposeOptimizerTests, TestPushBroadcastUnsqueezeTranspose) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestOptimizeTowardsTranspose) { @@ -4281,7 +4283,7 @@ TEST(TransposeOptimizerTests, TestOptimizeTowardsTranspose) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestOnlyOptimizeTowardsTranspose) { @@ -4308,7 +4310,7 @@ TEST(TransposeOptimizerTests, TestOnlyOptimizeTowardsTranspose) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestDontOptimizeWrongInput) { @@ -4334,7 +4336,7 @@ TEST(TransposeOptimizerTests, TestDontOptimizeWrongInput) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestOptimizeBothInputs) { @@ -4362,7 +4364,7 @@ TEST(TransposeOptimizerTests, TestOptimizeBothInputs) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } TEST(TransposeOptimizerTests, TestOmitIdentityTranspose) { @@ -4394,7 +4396,7 @@ TEST(TransposeOptimizerTests, TestOmitIdentityTranspose) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ {15, 18}); + /*opset_version*/ {15, 18, 23}); } // regression test for a model where the transpose optimizations were not completed in a single pass in level 1.