diff --git a/onnxruntime/core/optimizer/bias_softmax_fusion.cc b/onnxruntime/core/optimizer/bias_softmax_fusion.cc index 80603cdbd3270..7c34449d583cc 100755 --- a/onnxruntime/core/optimizer/bias_softmax_fusion.cc +++ b/onnxruntime/core/optimizer/bias_softmax_fusion.cc @@ -135,6 +135,7 @@ bool TrySelectInputAndBiasWithAlignment(Node& add_node, Node& softmax_node, Node new_axis = (int)HandleNegativeAxis(axis, rank); // The axis attribute for Softmax in OpSet-11 and OpSet-13 are different. + // Details in function documentatin. if (is_since_opset_13 && new_axis != rank - 1) return false; int singlebatch_rank = rank - new_axis; diff --git a/onnxruntime/core/optimizer/layer_norm_fusion.cc b/onnxruntime/core/optimizer/layer_norm_fusion.cc index 9895918dd2653..25feb5b8d702c 100644 --- a/onnxruntime/core/optimizer/layer_norm_fusion.cc +++ b/onnxruntime/core/optimizer/layer_norm_fusion.cc @@ -4,6 +4,7 @@ #include "core/optimizer/layer_norm_fusion.h" #include "core/graph/graph_utils.h" #include "core/optimizer/utils.h" +#include "core/optimizer/transpose_optimizer/optimizer_api.h" #include "float.h" #include @@ -16,12 +17,17 @@ static constexpr std::array supported_data_types{"tensor(fl // Default epsilon static constexpr float DEFAULT_LAYERNORM_EPSILON = 1e-5f; -static bool IsSupportedDataType(const Node& node) { +static bool IsSupportedDataType(const Node& node, int first_n_inputs=-1) { + int input_index = 0; for (const auto& input_arg : node.InputDefs()) { + if (first_n_inputs != -1 && input_index >= first_n_inputs) { + return true; + } if (std::find(supported_data_types.begin(), supported_data_types.end(), *(input_arg->Type())) == supported_data_types.end()) { return false; } + ++input_index; } return true; } @@ -99,11 +105,11 @@ Status LayerNormFusion::ApplyImpl(Graph& graph, bool& modified, int graph_level, Node& reduce_mean_node = *p_reduce_mean; ORT_RETURN_IF_ERROR(Recurse(reduce_mean_node, modified, graph_level, logger)); - if (!graph_utils::IsSupportedOptypeVersionAndDomain(reduce_mean_node, "ReduceMean", {1, 11, 13}) || + if (!graph_utils::IsSupportedOptypeVersionAndDomain(reduce_mean_node, "ReduceMean", {1, 11, 13, 18}) || !graph_utils::IsSupportedProvider(reduce_mean_node, GetCompatibleExecutionProviders()) || (reduce_mean_node.GetOutputEdgesCount() != 1 && reduce_mean_node.GetOutputEdgesCount() != 2) || graph.NodeProducesGraphOutput(reduce_mean_node) || - !IsSupportedDataType(reduce_mean_node)) { + !IsSupportedDataType(reduce_mean_node, 1)) { continue; } nodes_to_remove.push_back(reduce_mean_node); @@ -263,10 +269,10 @@ Status LayerNormFusion::ApplyImpl(Graph& graph, bool& modified, int graph_level, continue; } Node& reduce_mean2_node = *graph.GetNode(p_reduce_mean2->Index()); - if (!graph_utils::IsSupportedOptypeVersionAndDomain(reduce_mean2_node, "ReduceMean", {1, 11, 13}) || + if (!graph_utils::IsSupportedOptypeVersionAndDomain(reduce_mean2_node, "ReduceMean", {1, 11, 13, 18}) || reduce_mean2_node.GetExecutionProviderType() != reduce_mean_node.GetExecutionProviderType() || !optimizer_utils::CheckOutputEdges(graph, reduce_mean2_node, 1) || - !IsSupportedDataType(reduce_mean2_node) || + !IsSupportedDataType(reduce_mean2_node, 1) || reduce_mean2_node.GetInputEdgesCount() == 0) { continue; } @@ -333,8 +339,16 @@ Status LayerNormFusion::ApplyImpl(Graph& graph, bool& modified, int graph_level, // get axes attributes const onnxruntime::NodeAttributes& attributes = reduce_mean_node.GetAttributes(); std::vector axes_values; + // TODO: modify this codes when opset >= 18 (axes is an input). if (attributes.find("axes") != attributes.end()) { axes_values = RetrieveValues(attributes.at("axes")); + } else if (reduce_mean_node.InputDefs().size() == 2) { + auto axes = reduce_mean_node.InputDefs()[1]; + auto axes_const = graph.GetConstantInitializer(axes->Name(), true); + if (axes_const != nullptr) { + Initializer initializer{*axes_const, graph.ModelPath()}; + axes_values.insert(axes_values.end(), initializer.DataAsSpan().begin(), initializer.DataAsSpan().end()); + } } // Get the inputs for the new LayerNormalization node. @@ -485,9 +499,9 @@ Status SimplifiedLayerNormFusion::ApplyImpl(Graph& graph, bool& modified, int gr continue; } Node& reduce_mean_node = *graph.GetNode(p_reduce_mean->Index()); - if (!graph_utils::IsSupportedOptypeVersionAndDomain(reduce_mean_node, "ReduceMean", {1, 11, 13}) || + if (!graph_utils::IsSupportedOptypeVersionAndDomain(reduce_mean_node, "ReduceMean", {1, 11, 13, 18}) || reduce_mean_node.GetExecutionProviderType() != pow_node.GetExecutionProviderType() || - !optimizer_utils::CheckOutputEdges(graph, reduce_mean_node, 1) || !IsSupportedDataType(reduce_mean_node) || + !optimizer_utils::CheckOutputEdges(graph, reduce_mean_node, 1) || !IsSupportedDataType(reduce_mean_node, 1) || reduce_mean_node.GetInputEdgesCount() == 0) { continue; } @@ -585,6 +599,13 @@ Status SimplifiedLayerNormFusion::ApplyImpl(Graph& graph, bool& modified, int gr std::vector axes_values; if (attributes.find("axes") != attributes.end()) { axes_values = RetrieveValues(attributes.at("axes")); + } else if (reduce_mean_node.InputDefs().size() == 2) { + auto axes = reduce_mean_node.InputDefs()[1]; + auto axes_const = graph.GetConstantInitializer(axes->Name(), true); + if (axes_const != nullptr && axes_const->data_type() == ONNX_NAMESPACE::TensorProto_DataType_INT64) { + Initializer initializer{*axes_const, graph.ModelPath()}; + axes_values.insert(axes_values.end(), initializer.DataAsSpan().begin(), initializer.DataAsSpan().end()); + } } // Get the inputs for the new LayerNormalization node. diff --git a/onnxruntime/core/optimizer/transpose_optimizer/transpose_optimizer.cc b/onnxruntime/core/optimizer/transpose_optimizer/transpose_optimizer.cc index 0ac7cbb8fa058..700c91ab85974 100644 --- a/onnxruntime/core/optimizer/transpose_optimizer/transpose_optimizer.cc +++ b/onnxruntime/core/optimizer/transpose_optimizer/transpose_optimizer.cc @@ -1040,7 +1040,7 @@ static bool HandlePad(HandlerArgs& args) { constexpr HandlerInfo pad_handler = {&FirstInput, &HandlePad}; -static bool HandleReduceOp(HandlerArgs& args) { +static bool HandleReduceOpWithArg(HandlerArgs& args) { int64_t keepdims = args.node.GetAttributeIntDefault("keepdims", 1); std::optional> axes = args.node.GetAttributeInts("axes"); @@ -1078,11 +1078,11 @@ static bool HandleReduceOp(HandlerArgs& args) { return true; } -constexpr HandlerInfo reduce_op_handler = {&FirstInput, &HandleReduceOp}; - -static bool HandleReduceSum(HandlerArgs& args) { - if (args.ctx.opset < 13) { - return HandleReduceOp(args); +static bool HandleReduceOps(HandlerArgs& args) { + if ((args.node.OpType() == "ReduceSum" && args.ctx.opset < 13) || + // or all other reduce operators since opset 18 + (args.node.OpType() != "ReduceSum" && args.ctx.opset < 18)) { + return HandleReduceOpWithArg(args); } bool keepdims = args.node.GetAttributeIntDefault("keepdims", 1) != 0; @@ -1147,7 +1147,7 @@ static bool HandleReduceSum(HandlerArgs& args) { return true; } -constexpr HandlerInfo reduce_sum_handler = {&FirstInput, &HandleReduceSum}; +constexpr HandlerInfo reduce_op_handler = {&FirstInput, &HandleReduceOps}; static bool HandleSqueeze(HandlerArgs& args) { std::vector new_axes; @@ -1709,7 +1709,7 @@ static const std::unordered_map handler_ma #if !defined(USE_CUDA) && !defined(USE_ROCM) {"Resize", resize_handler}, #endif - {"ReduceSum", reduce_sum_handler}, + {"ReduceSum", reduce_op_handler}, {"ReduceLogSum", reduce_op_handler}, {"ReduceLogSumExp", reduce_op_handler}, diff --git a/onnxruntime/test/optimizer/graph_transform_test.cc b/onnxruntime/test/optimizer/graph_transform_test.cc index fde8392d943cd..9df487726ed8b 100755 --- a/onnxruntime/test/optimizer/graph_transform_test.cc +++ b/onnxruntime/test/optimizer/graph_transform_test.cc @@ -95,7 +95,6 @@ namespace onnxruntime { namespace test { #define MODEL_FOLDER ORT_TSTR("testdata/transform/") - TEST_F(GraphTransformationTests, IdentityElimination) { constexpr const ORTCHAR_T* model_uri = MODEL_FOLDER "abs-id-max.onnx"; std::shared_ptr model; @@ -4390,11 +4389,12 @@ TEST_F(GraphTransformationTests, ReshapeFusionOpsetTest) { return Status::OK(); }; - const std::vector opsets{11, 12, 13, 14, 15, 15}; + const std::vector opsets{11, 12, 13, 14, 15, 18}; bool shape_test_for_opset15 = false; - for (auto& opset_version : opsets) { + for (auto& opset : opsets) { auto build_test_case = [&](ModelTestBuilder& builder) { + auto opset_version = builder.DomainToVersionMap().find(kOnnxDomain)->second; auto* input_arg0 = builder.MakeInput({{batch_size, seq_lenth, hidden_size}}); auto* input_arg1 = builder.MakeInput({{hidden_size}}); auto* scalar_int_0 = builder.MakeInitializer({}, {0}); @@ -4414,7 +4414,7 @@ TEST_F(GraphTransformationTests, ReshapeFusionOpsetTest) { auto* out = builder.MakeOutput(); builder.AddNode("Add", {input_arg0, input_arg1}, {add_out}); - if (opset_version == 15) { + if (opset_version >= 15) { if (shape_test_for_opset15) { auto& shape_1 = builder.AddNode("Shape", {add_out}, {shape_out}); shape_1.AddAttribute("start", (int64_t)1); @@ -4442,11 +4442,11 @@ TEST_F(GraphTransformationTests, ReshapeFusionOpsetTest) { }; std::unique_ptr transformer = std::make_unique(); - if (opset_version == 15 && shape_test_for_opset15) { - ASSERT_STATUS_OK(TestGraphTransformer(build_test_case, opset_version, *logger_, std::move(transformer), TransformerLevel::Level1, 1, + if (opset >= 15 && shape_test_for_opset15) { + ASSERT_STATUS_OK(TestGraphTransformer(build_test_case, opset, *logger_, std::move(transformer), TransformerLevel::Level1, 1, pre_graph_checker, pre_graph_checker)); } else { - ASSERT_STATUS_OK(TestGraphTransformer(build_test_case, opset_version, *logger_, std::move(transformer), TransformerLevel::Level1, 1, + ASSERT_STATUS_OK(TestGraphTransformer(build_test_case, opset, *logger_, std::move(transformer), TransformerLevel::Level1, 1, pre_graph_checker, post_graph_checker)); } } @@ -4610,13 +4610,24 @@ TEST_F(GraphTransformationTests, LayerNormWithCastFusionTest_5) { auto* cast_out_2 = builder.MakeIntermediate(); auto* mul_out = builder.MakeIntermediate(); auto* add_out_2 = builder.MakeOutput(); + auto opset = builder.DomainToVersionMap().find(kOnnxDomain)->second; + onnxruntime::NodeArg* axes = nullptr; - builder.AddNode("ReduceMean", {data_arg}, {reduce_mean_out_1}).AddAttribute("axes", std::vector{-1}); + if (opset >= 18) { + axes = builder.MakeInitializer({1}, {-1}); + builder.AddNode("ReduceMean", {data_arg, axes}, {reduce_mean_out_1}); + } else { + builder.AddNode("ReduceMean", {data_arg}, {reduce_mean_out_1}).AddAttribute("axes", std::vector{-1}); + } builder.AddNode("Sub", {data_arg, reduce_mean_out_1}, {sub_out}); builder.AddNode("Cast", {sub_out}, {cast_out_1}) .AddAttribute("to", static_cast(ONNX_NAMESPACE::TensorProto_DataType_FLOAT)); builder.AddNode("Pow", {cast_out_1, pow_initializer}, {pow_out}); - builder.AddNode("ReduceMean", {pow_out}, {reduce_mean_out_2}).AddAttribute("axes", std::vector{-1}); + if (opset >= 18) { + builder.AddNode("ReduceMean", {pow_out, axes}, {reduce_mean_out_2}); + } else { + builder.AddNode("ReduceMean", {pow_out}, {reduce_mean_out_2}).AddAttribute("axes", std::vector{-1}); + } builder.AddNode("Add", {reduce_mean_out_2, add_initializer}, {add_out_1}); builder.AddNode("Sqrt", {add_out_1}, {sqrt_out}); builder.AddNode("Div", {cast_out_1, sqrt_out}, {div_out}); @@ -4652,7 +4663,7 @@ TEST_F(GraphTransformationTests, LayerNormWithCastFusionTest_5) { }; std::unique_ptr transformer = std::make_unique(); - ASSERT_STATUS_OK(TestGraphTransformer(build_test_case, 14, *logger_, std::move(transformer), TransformerLevel::Level1, + ASSERT_STATUS_OK(TestGraphTransformer(build_test_case, {14, 18}, *logger_, std::move(transformer), TransformerLevel::Level1, 1, pre_graph_checker, post_graph_checker)); } diff --git a/onnxruntime/test/optimizer/graph_transform_test_builder.cc b/onnxruntime/test/optimizer/graph_transform_test_builder.cc index 274b9184e037a..80f17fdda3936 100644 --- a/onnxruntime/test/optimizer/graph_transform_test_builder.cc +++ b/onnxruntime/test/optimizer/graph_transform_test_builder.cc @@ -17,6 +17,31 @@ namespace onnxruntime { namespace test { +void TransformerTester(const std::function& build_test_case, + const std::function& check_transformed_graph, + TransformerLevel baseline_level, + TransformerLevel target_level, + const std::vector& opset_versions, + double per_sample_tolerance, + double relative_per_sample_tolerance, + std::unique_ptr transformer, + const std::function& add_session_options, + const InlinedHashSet& disabled_optimizers) { + ASSERT_TRUE(transformer == nullptr); + for (auto opset_version : opset_versions) { + TransformerTester(build_test_case, + check_transformed_graph, + baseline_level, + target_level, + opset_version, + per_sample_tolerance, + relative_per_sample_tolerance, + nullptr, + add_session_options, + disabled_optimizers); + } +} + void TransformerTester(const std::function& build_test_case, const std::function& check_transformed_graph, TransformerLevel baseline_level, @@ -101,22 +126,36 @@ Status TestGraphTransformer(const std::function& const logging::Logger& logger, std::unique_ptr transformer, TransformerLevel level, unsigned steps, const std::function& pre_graph_checker, const std::function& post_graph_checker) { - // Build the model for this test. - std::unordered_map domain_to_version; - domain_to_version[kOnnxDomain] = opset_version; - domain_to_version[kMSDomain] = 1; - Model model("TransformerTester", false, ModelMetaData(), PathString(), IOnnxRuntimeOpSchemaRegistryList(), - domain_to_version, {}, logger); - Graph& graph = model.MainGraph(); - ModelTestBuilder helper(graph); - build_test_case(helper); - helper.SetGraphOutputs(); - ORT_RETURN_IF_ERROR(graph.Resolve()); - ORT_RETURN_IF_ERROR(pre_graph_checker(graph)); + const std::vector opset_versions{opset_version}; + return TestGraphTransformer(build_test_case, opset_versions, logger, std::move(transformer), + level, steps, pre_graph_checker, post_graph_checker); +} + +Status TestGraphTransformer(const std::function& build_test_case, + const std::vector& opset_versions, + const logging::Logger& logger, std::unique_ptr transformer, + TransformerLevel level, unsigned steps, const std::function& pre_graph_checker, + const std::function& post_graph_checker) { onnxruntime::GraphTransformerManager graph_transformation_mgr{steps}; ORT_RETURN_IF_ERROR(graph_transformation_mgr.Register(std::move(transformer), level)); - ORT_RETURN_IF_ERROR(graph_transformation_mgr.ApplyTransformers(graph, level, logger)); - ORT_RETURN_IF_ERROR(post_graph_checker(graph)); + + for (auto opset : opset_versions) { + // Build the model for this test. + std::unordered_map domain_to_version; + domain_to_version[kOnnxDomain] = opset; + domain_to_version[kMSDomain] = 1; + Model model("TransformerTester", false, ModelMetaData(), PathString(), IOnnxRuntimeOpSchemaRegistryList(), + domain_to_version, {}, logger); + Graph& graph = model.MainGraph(); + ModelTestBuilder helper(graph); + build_test_case(helper); + helper.SetGraphOutputs(); + ORT_RETURN_IF_ERROR(graph.Resolve()); + ORT_RETURN_IF_ERROR(pre_graph_checker(graph)); + ORT_RETURN_IF_ERROR(graph_transformation_mgr.ApplyTransformers(graph, level, logger)); + ORT_RETURN_IF_ERROR(post_graph_checker(graph)); + } + return Status::OK(); } diff --git a/onnxruntime/test/optimizer/graph_transform_test_builder.h b/onnxruntime/test/optimizer/graph_transform_test_builder.h index 199f86e056bcb..14c73b2b558af 100644 --- a/onnxruntime/test/optimizer/graph_transform_test_builder.h +++ b/onnxruntime/test/optimizer/graph_transform_test_builder.h @@ -50,6 +50,10 @@ class ModelTestBuilder { ModelTestBuilder(Graph& graph) : graph_(graph) { } + const std::unordered_map& DomainToVersionMap() const noexcept { + return graph_.DomainToVersionMap(); + } + template NodeArg* MakeInput(const std::vector& shape, const std::vector& data) { ONNX_NAMESPACE::TypeProto type_proto; @@ -356,6 +360,17 @@ void TransformerTester(const std::function& buil const std::function& add_session_options = {}, const InlinedHashSet& disabled_optimizers = {}); +void TransformerTester(const std::function& build_test_case, + const std::function& check_transformed_graph, + TransformerLevel baseline_level, + TransformerLevel target_level, + const std::vector& opset_versions, + double per_sample_tolerance = 0.0, + double relative_per_sample_tolerance = 0.0, + std::unique_ptr transformer = nullptr, // must be null in this case. + const std::function& add_session_options = {}, + const InlinedHashSet& disabled_optimizers = {}); + /** * @brief Apply a GraphTransformer to a graph, and run graph checkers before and after applying the transformer. * @@ -372,5 +387,23 @@ Status TestGraphTransformer(const std::function& const logging::Logger& logger, std::unique_ptr transformer, TransformerLevel level, unsigned steps, const std::function& pre_graph_checker, const std::function& post_graph_checker); + +/** + * @brief Apply a GraphTransformer to a graph, and run graph checkers before and after applying the transformer. + * + * @param build_test_case The function to build a graph for testing + * @param opset_versions A graph is created and tested for every opset in this set + * @param logger The logger + * @param transformer The GraphTransformer to be applied + * @param level The transformer level on which the transformer will be applied + * @param steps The step count of the GraphTransformerManager + * @param pre_graph_checker The graph checker function before applying the transformer + * @param post_graph_checker The graph checker function after applying the transformer + */ +Status TestGraphTransformer(const std::function& build_test_case, + const std::vector& opset_versions, + const logging::Logger& logger, std::unique_ptr transformer, + TransformerLevel level, unsigned steps, const std::function& pre_graph_checker, + const std::function& post_graph_checker); } // namespace test } // namespace onnxruntime diff --git a/onnxruntime/test/optimizer/nhwc_transformer_test.cc b/onnxruntime/test/optimizer/nhwc_transformer_test.cc index cbb4de74bfa15..99e94cff6275d 100644 --- a/onnxruntime/test/optimizer/nhwc_transformer_test.cc +++ b/onnxruntime/test/optimizer/nhwc_transformer_test.cc @@ -278,6 +278,9 @@ TEST(NhwcTransformerTests, ConvSplit) { conv_output_arg, .37f, 131); conv_node.AddAttribute("pads", std::vector{1, 1, 1, 1}); Node& split_node = builder.AddNode("Split", {conv_output_arg}, {split_output1_arg, split_output2_arg}); + if (builder.DomainToVersionMap().find(kOnnxDomain)->second >= 18) { + split_node.AddAttribute("num_outputs", static_cast(2)); + } split_node.AddAttribute("axis", static_cast(axis)); builder.AddQLinearBinaryNode("QLinearAdd", split_output1_arg, .37f, 131, @@ -302,6 +305,11 @@ TEST(NhwcTransformerTests, ConvSplit) { check_nhwc_graph, TransformerLevel::Level2, TransformerLevel::Level3); + TransformerTester(build_test_case, + check_nhwc_graph, + TransformerLevel::Level2, + TransformerLevel::Level3, + 18); } } @@ -323,6 +331,9 @@ TEST(NhwcTransformerTests, ConvSplitQLinearConcat) { conv_node.AddAttribute("pads", std::vector{1, 1, 1, 1}); Node& split_node = builder.AddNode("Split", {conv_output_arg}, {split_output1_arg, split_output2_arg}); + if (builder.DomainToVersionMap().find(kOnnxDomain)->second >= 18) { + split_node.AddAttribute("num_outputs", static_cast(2)); + } split_node.AddAttribute("axis", static_cast(axis)); Node& qlconcat_node = builder.AddQLinearConcatLike( @@ -346,6 +357,11 @@ TEST(NhwcTransformerTests, ConvSplitQLinearConcat) { check_nhwc_graph, TransformerLevel::Level2, TransformerLevel::Level3); + TransformerTester(build_test_case, + check_nhwc_graph, + TransformerLevel::Level2, + TransformerLevel::Level3, + 18); } } diff --git a/onnxruntime/test/optimizer/qdq_test_utils.h b/onnxruntime/test/optimizer/qdq_test_utils.h index cb19a1e69e8f8..0ba991a4d22e1 100644 --- a/onnxruntime/test/optimizer/qdq_test_utils.h +++ b/onnxruntime/test/optimizer/qdq_test_utils.h @@ -378,6 +378,9 @@ GetQDQTestCaseFn BuildConsolidationTestCase( auto* split_output_3 = builder.MakeIntermediate(); Node& split_node = builder.AddNode("Split", {upper_dq_output}, {split_output_1, split_output_2, split_output_3}); split_node.AddAttribute("axis", axis); + if (builder.DomainToVersionMap().find(kOnnxDomain)->second >= 18) { + split_node.AddAttribute("num_outputs", static_cast(3)); + } // add Q auto* lower_q_output_1 = builder.MakeIntermediate(); @@ -456,6 +459,9 @@ GetQDQTestCaseFn BuildQDQSplitTestCase( auto* split_output_3 = builder.MakeIntermediate(); Node& split_node = builder.AddNode("Split", {dq_output}, {split_output_1, split_output_2, split_output_3}); split_node.AddAttribute("axis", axis); + if (builder.DomainToVersionMap().find(kOnnxDomain)->second >= 18) { + split_node.AddAttribute("num_outputs", static_cast(3)); + } // add Q auto* q_split_output_1 = builder.MakeOutput(); diff --git a/onnxruntime/test/optimizer/qdq_transformer_test.cc b/onnxruntime/test/optimizer/qdq_transformer_test.cc index b253273c5bbc2..e2dcc7fac29ca 100644 --- a/onnxruntime/test/optimizer/qdq_transformer_test.cc +++ b/onnxruntime/test/optimizer/qdq_transformer_test.cc @@ -67,6 +67,14 @@ void QDQTransformerConvTests() { 0.01 /*per_sample_tolerance*/, 0.01 /*relative_per_sample_tolerance*/, std::make_unique(QDQIsInt8Allowed())); + TransformerTester(BuildQDQConvTestCase(input_shape, weights_shape), + check_graph, + TransformerLevel::Level1, + TransformerLevel::Level2, + 18 /*opset_version*/, + 0.01 /*per_sample_tolerance*/, + 0.01 /*relative_per_sample_tolerance*/, + std::make_unique(QDQIsInt8Allowed())); }; test_case({1, 12, 37}, {32, 12, 5}); @@ -157,10 +165,13 @@ TEST(QDQTransformerTests, ConvMaxPoolReshape_UInt8) { test_case({1, 12, 37}, {32, 12, 5}, 11); test_case({1, 12, 37}, {32, 12, 5}, 12); + test_case({1, 12, 37}, {32, 12, 5}, 18); test_case({1, 23, 13, 13}, {30, 23, 3, 3}, 11); test_case({1, 23, 13, 13}, {30, 23, 3, 3}, 12); + test_case({1, 23, 13, 13}, {30, 23, 3, 3}, 18); test_case({1, 22, 11, 13, 15}, {30, 22, 5, 3, 3}, 11); test_case({1, 22, 11, 13, 15}, {30, 22, 5, 3, 3}, 12); + test_case({1, 22, 11, 13, 15}, {30, 22, 5, 3, 3}, 18); } TEST(QDQTransformerTests, ConvMaxPoolReshape_Int8) { @@ -292,6 +303,14 @@ void QDQTransformerAveragePoolTests() { 0.01 /*per_sample_tolerance*/, 0.01 /*relative_per_sample_tolerance*/, std::make_unique(QDQIsInt8Allowed())); + TransformerTester(BuildQDQAveragePoolTestCase(input_shape), + check_graph, + TransformerLevel::Level1, + TransformerLevel::Level2, + 18 /*opset_version*/, + 0.01 /*per_sample_tolerance*/, + 0.01 /*relative_per_sample_tolerance*/, + std::make_unique(QDQIsInt8Allowed())); }; test_case({1, 12, 37}); @@ -341,6 +360,14 @@ void QDQTransformerGlobalAveragePoolTests() { 0.01 /*per_sample_tolerance*/, 0.01 /*relative_per_sample_tolerance*/, std::make_unique(QDQIsInt8Allowed())); + TransformerTester(BuildQDQGlobalAveragePoolTestCase(input_shape), + check_graph, + TransformerLevel::Level1, + TransformerLevel::Level2, + 18 /*opset_version*/, + 0.01 /*per_sample_tolerance*/, + 0.01 /*relative_per_sample_tolerance*/, + std::make_unique(QDQIsInt8Allowed())); }; test_case({1, 12, 37}); @@ -391,6 +418,14 @@ void QDQTransformerBinaryOpTests(const std::string& op_type) { 0.01 /*per_sample_tolerance*/, 0.01 /*relative_per_sample_tolerance*/, std::make_unique(QDQIsInt8Allowed())); + TransformerTester(BuildBinaryOpTestCase(input_shape, op_type), + check_graph, + TransformerLevel::Level1, + TransformerLevel::Level2, + 18 /*opset_version*/, + 0.01 /*per_sample_tolerance*/, + 0.01 /*relative_per_sample_tolerance*/, + std::make_unique(QDQIsInt8Allowed())); }; test_case({1, 12, 37}); @@ -522,6 +557,14 @@ void QDQTransformerMatMulTests(bool has_output_q) { 0.01 /*per_sample_tolerance*/, 0.01 /*relative_per_sample_tolerance*/, std::make_unique(QDQIsInt8Allowed())); + TransformerTester(build_test_case, + check_graph, + TransformerLevel::Level1, + TransformerLevel::Level2, + 18 /*opset_version*/, + 0.01 /*per_sample_tolerance*/, + 0.01 /*relative_per_sample_tolerance*/, + std::make_unique(QDQIsInt8Allowed())); }; test_case({1, 2, 2}, {1, 2, 4}); @@ -677,6 +720,14 @@ void QDQTransformerGemmTests(bool has_output_q, bool has_bias, bool beta_not_one 0.01 /*per_sample_tolerance*/, 0.01 /*relative_per_sample_tolerance*/, std::make_unique(QDQIsInt8Allowed())); + TransformerTester(build_test_case, + check_binary_op_graph, + TransformerLevel::Level1, + TransformerLevel::Level2, + 18 /*opset_version*/, + 0.01 /*per_sample_tolerance*/, + 0.01 /*relative_per_sample_tolerance*/, + std::make_unique(QDQIsInt8Allowed())); }; test_case({2, 2}, {2, 4}); @@ -813,6 +864,14 @@ TEST(QDQTransformerTests, DoubleQDQ) { 12, (scale_1 + scale_3) / 2, 0.01); + TransformerTester( + BuildDoubleQDQTestCases(zp_1, zp_2, zp_3, zp_4, scale_1, scale_2, scale_3, scale_4), + succeed ? expect_succeed : expect_fail, + TransformerLevel::Default, + TransformerLevel::Level1, + 18, + (scale_1 + scale_3) / 2, + 0.01); }; auto test_case_2u8_2s8_failed = [&](uint8_t zp_1, uint8_t zp_2, int8_t zp_3, int8_t zp_4, @@ -870,7 +929,8 @@ TEST(QDQTransformerTests, Split) { TransformerTester(BuildQDQSplitTestCase(input_shape, axis), check_graph, TransformerLevel::Level1, - TransformerLevel::Level2); + TransformerLevel::Level2, + {12, 18}); }; test_case({6, 18, 54}, 0); } @@ -887,7 +947,7 @@ TEST(QDQTransformerTests, Split_without_IdenticalChildrenConsolidation) { TransformerTester(BuildConsolidationTestCase(input_shape, axis), check_graph, TransformerLevel::Level1, - TransformerLevel::Level2, 12, {}, {}, nullptr, {}, + TransformerLevel::Level2, {12, 18}, {}, {}, nullptr, {}, {"IdenticalChildrenConsolidation"}); }; test_case({6, 18, 54}, 0); @@ -904,7 +964,8 @@ TEST(QDQTransformerTests, Split_with_IdenticalChildrenConsolidation) { TransformerTester(BuildConsolidationTestCase(input_shape, axis), check_graph, TransformerLevel::Level1, - TransformerLevel::Level2); + TransformerLevel::Level2, + {12, 18}); }; test_case({6, 18, 54}, 0); } @@ -1509,7 +1570,7 @@ TEST(QDQTransformerTests, ConvAveragePoolReshape_Int8_Fail) { check_graph, TransformerLevel::Level1, TransformerLevel::Level2, - 12 /*opset_version*/, + {12, 18} /*opset_version*/, 0.01f /*per_sample_tolerance*/, 0.01f /*relative_per_sample_tolerance*/); }; @@ -1566,6 +1627,14 @@ void QDQTransformerLeakyReluTests() { 0.01 /*per_sample_tolerance*/, 0.01 /*relative_per_sample_tolerance*/, std::make_unique(QDQIsInt8Allowed())); + TransformerTester(build_test_case, + check_graph, + TransformerLevel::Level1, + TransformerLevel::Level2, + 18 /*opset_version*/, + 0.01 /*per_sample_tolerance*/, + 0.01 /*relative_per_sample_tolerance*/, + std::make_unique(QDQIsInt8Allowed())); }; test_case({1, 12, 37}); @@ -1635,6 +1704,14 @@ void QDQTransformerSigmoidTests() { 0.01 /*per_sample_tolerance*/, 0.01 /*relative_per_sample_tolerance*/, std::make_unique(QDQIsInt8Allowed())); + TransformerTester(build_test_case, + check_graph, + TransformerLevel::Level1, + TransformerLevel::Level2, + 18 /*opset_version*/, + 0.01 /*per_sample_tolerance*/, + 0.01 /*relative_per_sample_tolerance*/, + std::make_unique(QDQIsInt8Allowed())); }; test_case({1, 12, 37}); @@ -1907,7 +1984,7 @@ TEST(QDQTransformerTests, DQForward_MutilpleSteps) { TEST(QDQTransformerTests, Clip) { constexpr float epsilon = std::numeric_limits::epsilon(); - auto test_case = [&](float scale, auto zero_point, int clip_count, int opset_version = 12) { + auto test_case = [&](float scale, auto zero_point, int clip_count, int opset_version) { auto build_test_case = [&](ModelTestBuilder& builder) { auto* input_arg = builder.MakeInput({1, 32, 112, 112}, std::numeric_limits::min(), @@ -1922,7 +1999,9 @@ TEST(QDQTransformerTests, Clip) { auto* clip_output = builder.MakeIntermediate(); constexpr float min = .0f; constexpr float max = 6.0f; - if (opset_version >= 11) { + auto opset = builder.DomainToVersionMap().find(kOnnxDomain)->second; + EXPECT_EQ(opset_version, opset); + if (opset >= 11) { auto* min_initializer = builder.MakeScalarInitializer(min); auto* max_initializer = builder.MakeScalarInitializer(max); builder.AddNode("Clip", {dq_output, min_initializer, max_initializer}, {clip_output}); @@ -1953,18 +2032,21 @@ TEST(QDQTransformerTests, Clip) { epsilon); }; - test_case(.0235294122248888f, static_cast(-128), 0); // [0, 6] - test_case(.02f, static_cast(-128), 0); // [0, 5.1] - test_case(.03f, static_cast(-128), 1); // [0, 7.65] - test_case(.02f, static_cast(127), 1); // [-5.1 , 0] - test_case(.02f, static_cast(0), 1); // [-2.56, 2.54] - test_case(.04f, static_cast(-97), 1); // [-1.24, 8.96] - test_case(.02352941176f, static_cast(0), 0); // [0, 6] - test_case(.02f, static_cast(0), 0); // [0, 5.1] - test_case(.03f, static_cast(0), 1); // [0, 7.65] - test_case(.02f, static_cast(255), 1); // [-5.1, 0] - test_case(.02f, static_cast(128), 1); // [-2.56, 2.54] - test_case(.04f, static_cast(31), 1); // [-1.24, 8.96] + std::vector opsets{12, 18}; + for (auto opset : opsets) { + test_case(.0235294122248888f, static_cast(-128), 0, opset); // [0, 6] + test_case(.02f, static_cast(-128), 0, opset); // [0, 5.1] + test_case(.03f, static_cast(-128), 1, opset); // [0, 7.65] + test_case(.02f, static_cast(127), 1, opset); // [-5.1 , 0] + test_case(.02f, static_cast(0), 1, opset); // [-2.56, 2.54] + test_case(.04f, static_cast(-97), 1, opset); // [-1.24, 8.96] + test_case(.02352941176f, static_cast(0), 0, opset); // [0, 6] + test_case(.02f, static_cast(0), 0, opset); // [0, 5.1] + test_case(.03f, static_cast(0), 1, opset); // [0, 7.65] + test_case(.02f, static_cast(255), 1, opset); // [-5.1, 0] + test_case(.02f, static_cast(128), 1, opset); // [-2.56, 2.54] + test_case(.04f, static_cast(31), 1, opset); // [-1.24, 8.96] + } // opset_version = 10 test_case(.02f, static_cast(-128), 0, 10); // [0, 5.1] @@ -1973,10 +2055,12 @@ TEST(QDQTransformerTests, Clip) { test_case(.03f, static_cast(0), 1, 10); // [0, 7.65] // difference between lower/upper and min/max are within epsilon - test_case(epsilon, static_cast(-127), 0); // [-epsilon, x] (x <= 6 + epsilon) - test_case((6 + epsilon) / 255, static_cast(-128), 0); // [0, 6 + epsilon] - test_case(epsilon, static_cast(1), 0); // [-epsilon, x] (x <= 6 + epsilon) - test_case((6 + epsilon) / 255, static_cast(0), 0); // [0, 6 + epsilon] + for (auto opset : opsets) { + test_case(epsilon, static_cast(-127), 0, opset); // [-epsilon, x] (x <= 6 + epsilon) + test_case((6 + epsilon) / 255, static_cast(-128), 0, opset); // [0, 6 + epsilon] + test_case(epsilon, static_cast(1), 0, opset); // [-epsilon, x] (x <= 6 + epsilon) + test_case((6 + epsilon) / 255, static_cast(0), 0, opset); // [0, 6 + epsilon] + } } TEST(QDQTransformerTests, Concat) { @@ -2536,7 +2620,7 @@ TEST(QDQTransformerTests, QDQ_Selector_Test) { // regression test to validate TransposeOptimizer and QDQ Propagation don't loop // see https://github.com/microsoft/onnxruntime/issues/11605 -TEST(QDQTransformerTests, QDQPropagation_GH11605) { +TEST(QDQTransformerTests, QDQPropagation_GH11605_Opset12) { auto test_case = [&]() { auto build_test_case = [&](ModelTestBuilder& builder) { auto* input_arg = builder.MakeInput({1, 4, 4}, @@ -2585,7 +2669,61 @@ TEST(QDQTransformerTests, QDQPropagation_GH11605) { TransformerTester(build_test_case, check_graph, TransformerLevel::Default, - TransformerLevel::Level2); + TransformerLevel::Level2, + 12); + }; + + test_case(); +} + +TEST(QDQTransformerTests, QDQPropagation_GH11605_Opset13) { + auto test_case = [&]() { + auto build_test_case = [&](ModelTestBuilder& builder) { + auto* input_arg = builder.MakeInput({1, 4, 4}, + std::numeric_limits::min(), + std::numeric_limits::max()); + // add DQ + auto* dq_output = builder.MakeIntermediate(); + builder.AddDequantizeLinearNode(input_arg, 0.123f, uint8_t(0), dq_output); + + // add Transpose 0, 2, 1 + const std::vector& perms{0, 2, 1}; + auto* transpose_output = builder.MakeIntermediate(); + Node& transpose_node = builder.AddNode("Transpose", {dq_output}, {transpose_output}); + transpose_node.AddAttribute("perm", perms); + + // add Softmax with axis=2 (to block the Transpose moving past it due to the transpose perms) + auto* softmax_output = builder.MakeIntermediate(); + Node& softmax_node = builder.AddNode("Softmax", {transpose_output}, {softmax_output}); + softmax_node.AddAttribute("axis", int64_t(2)); + + // add second Transpose. this is so the check in TransposeOptimizer::ProcessTranspose for outputs leading to + // a Transpose is satisfied, allowing the first Transpose to move past the Q/DQ inserted by QDQ Propagation + Node& transpose_node2 = builder.AddNode("Transpose", {softmax_output}, {builder.MakeOutput()}); + transpose_node2.AddAttribute("perm", perms); + }; + + // check that an edge case where transpose optimization gets blocked is handled gracefully. + // Original: DQ -> Tr -> SoftM -> Tr + // QDQ Prop inserts a Q/DQ pair to create a QDQ node group for the Transpose: DQ -> Tr -> Q -> DQ -> SoftM -> Tr + // Transpose opt phase 1 moves the Tr down until it blocks on the SoftMax: DQ -> Q -> DQ -> Tr -> SoftM -> Tr + // Transpose opt phase 2 flips the Tr to prior to the DQ as it's not part of a QDQ node group at that point, as + // running the transpose on 8-bit data should be cheaper: DQ -> Q -> Tr -> DQ -> SoftM -> Tr + // QDQ cleanup in Level2 removes the unnecessary DQ/Q pair at the start: Tr -> DQ -> SoftM -> Tr + // this is the optimal result as the Transpose is using 8-bit data and we have no surplus Q/DQ pairs + auto check_graph = [&](InferenceSessionWrapper& session) { + std::vector expected_op_types_in_order{ + "DequantizeLinear", + "Softmax"}; + const auto op_types_in_order = GetNodeOpTypesInTopologicalOrder(session.GetGraph()); + EXPECT_EQ(op_types_in_order, expected_op_types_in_order); + }; + + TransformerTester(build_test_case, + check_graph, + TransformerLevel::Default, + TransformerLevel::Level2, + 13); }; test_case(); diff --git a/onnxruntime/test/optimizer/transpose_optimizer_test.cc b/onnxruntime/test/optimizer/transpose_optimizer_test.cc index 980ac01b9d1f2..1fab4e3502bad 100644 --- a/onnxruntime/test/optimizer/transpose_optimizer_test.cc +++ b/onnxruntime/test/optimizer/transpose_optimizer_test.cc @@ -94,6 +94,9 @@ TEST(TransposeOptimizerTests, TestSplit) { transpose_1.AddAttribute("perm", std::vector{1, 2, 0}); auto& split_1 = builder.AddNode("Split", {transpose_1_out_0}, {split_1_out_0, split_1_out_1}); split_1.AddAttribute("axis", (int64_t)1); + if (builder.DomainToVersionMap().find(kOnnxDomain)->second >= 18) { + split_1.AddAttribute("num_outputs", static_cast(2)); + } auto& transpose_2 = builder.AddNode("Transpose", {split_1_out_0}, {transpose_2_out_0}); transpose_2.AddAttribute("perm", std::vector{2, 0, 1}); auto& transpose_3 = builder.AddNode("Transpose", {split_1_out_1}, {transpose_3_out_0}); @@ -109,7 +112,7 @@ TEST(TransposeOptimizerTests, TestSplit) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestSplitDefaultAxis) { @@ -123,7 +126,10 @@ TEST(TransposeOptimizerTests, TestSplitDefaultAxis) { auto& transpose_1 = builder.AddNode("Transpose", {input0_arg}, {transpose_1_out_0}); transpose_1.AddAttribute("perm", std::vector{1, 2, 0}); - builder.AddNode("Split", {transpose_1_out_0}, {split_1_out_0, split_1_out_1}); + auto& split_1 = builder.AddNode("Split", {transpose_1_out_0}, {split_1_out_0, split_1_out_1}); + if (builder.DomainToVersionMap().find(kOnnxDomain)->second >= 18) { + split_1.AddAttribute("num_outputs", static_cast(2)); + } auto& transpose_2 = builder.AddNode("Transpose", {split_1_out_0}, {transpose_2_out_0}); transpose_2.AddAttribute("perm", std::vector{2, 0, 1}); auto& transpose_3 = builder.AddNode("Transpose", {split_1_out_1}, {transpose_3_out_0}); @@ -139,7 +145,7 @@ TEST(TransposeOptimizerTests, TestSplitDefaultAxis) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestSplitNegativeAxis) { @@ -155,6 +161,9 @@ TEST(TransposeOptimizerTests, TestSplitNegativeAxis) { transpose_1.AddAttribute("perm", std::vector{1, 2, 0}); auto& split_1 = builder.AddNode("Split", {transpose_1_out_0}, {split_1_out_0, split_1_out_1}); split_1.AddAttribute("axis", (int64_t)1); + if (builder.DomainToVersionMap().find(kOnnxDomain)->second >= 18) { + split_1.AddAttribute("num_outputs", static_cast(2)); + } auto& transpose_2 = builder.AddNode("Transpose", {split_1_out_0}, {transpose_2_out_0}); transpose_2.AddAttribute("perm", std::vector{2, 0, 1}); auto& transpose_3 = builder.AddNode("Transpose", {split_1_out_1}, {transpose_3_out_0}); @@ -170,7 +179,7 @@ TEST(TransposeOptimizerTests, TestSplitNegativeAxis) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestConcat) { @@ -201,7 +210,7 @@ TEST(TransposeOptimizerTests, TestConcat) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestPad) { @@ -213,10 +222,17 @@ TEST(TransposeOptimizerTests, TestPad) { auto& transpose_1 = builder.AddNode("Transpose", {input0_arg}, {transpose_1_out_0}); transpose_1.AddAttribute("perm", std::vector{0, 3, 1, 2}); - auto& pad_1 = builder.AddNode("Pad", {transpose_1_out_0}, {pad_1_out_0}); - pad_1.AddAttribute("mode", "constant"); - pad_1.AddAttribute("value", (float)2.3); - pad_1.AddAttribute("pads", std::vector{1, -2, 3, 4, 5, 6, 7, 8}); + if (builder.DomainToVersionMap().find(kOnnxDomain)->second >= 18) { + auto* value = builder.MakeInitializer({1}, {(float)2.3}); + auto* pads = builder.MakeInitializer({8}, {1, -2, 3, 4, 5, 6, 7, 8}); + auto& pad_1 = builder.AddNode("Pad", {transpose_1_out_0, pads, value}, {pad_1_out_0}); + pad_1.AddAttribute("mode", "constant"); + } else { + auto& pad_1 = builder.AddNode("Pad", {transpose_1_out_0}, {pad_1_out_0}); + pad_1.AddAttribute("mode", "constant"); + pad_1.AddAttribute("value", (float)2.3); + pad_1.AddAttribute("pads", std::vector{1, -2, 3, 4, 5, 6, 7, 8}); + } auto& transpose_2 = builder.AddNode("Transpose", {pad_1_out_0}, {transpose_2_out_0}); transpose_2.AddAttribute("perm", std::vector{0, 2, 3, 1}); }; @@ -230,7 +246,7 @@ TEST(TransposeOptimizerTests, TestPad) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 10); + /*opset_version*/ {10, 18}); } TEST(TransposeOptimizerTests, TestPadOpset15) { @@ -259,7 +275,7 @@ TEST(TransposeOptimizerTests, TestPadOpset15) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestPadNonconst) { @@ -291,7 +307,7 @@ TEST(TransposeOptimizerTests, TestPadNonconst) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 11); + /*opset_version*/ {11, 18}); } // The CUDA Resize kernel assumes that the input is NCHW and @@ -312,10 +328,15 @@ TEST(TransposeOptimizerTests, TestResize) { auto* transpose_1_out_0 = builder.MakeIntermediate(); auto* resize_1_out_0 = builder.MakeIntermediate(); auto* transpose_2_out_0 = builder.MakeOutput(); + auto empty_arg = NodeArg("", nullptr); auto& transpose_1 = builder.AddNode("Transpose", {input0_arg}, {transpose_1_out_0}); transpose_1.AddAttribute("perm", std::vector{0, 3, 1, 2}); - builder.AddNode("Resize", {transpose_1_out_0, const_1}, {resize_1_out_0}); + if (builder.DomainToVersionMap().find(kOnnxDomain)->second >= 11) { + builder.AddNode("Resize", {transpose_1_out_0, &empty_arg, const_1}, {resize_1_out_0}); + } else { + builder.AddNode("Resize", {transpose_1_out_0, const_1}, {resize_1_out_0}); + } auto& transpose_2 = builder.AddNode("Transpose", {resize_1_out_0}, {transpose_2_out_0}); transpose_2.AddAttribute("perm", std::vector{0, 2, 3, 1}); }; @@ -329,7 +350,7 @@ TEST(TransposeOptimizerTests, TestResize) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 10); + /*opset_version*/ {10, 18}); } TEST(TransposeOptimizerTests, TestResizeOpset11) { @@ -357,7 +378,7 @@ TEST(TransposeOptimizerTests, TestResizeOpset11) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 11); + /*opset_version*/ {11, 18}); } TEST(TransposeOptimizerTests, TestResizeOpset15) { @@ -385,7 +406,7 @@ TEST(TransposeOptimizerTests, TestResizeOpset15) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestResizeSizeRoi) { @@ -415,7 +436,7 @@ TEST(TransposeOptimizerTests, TestResizeSizeRoi) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestResizeRoiScalesZeroRank0) { @@ -448,7 +469,8 @@ TEST(TransposeOptimizerTests, TestResizeRoiScalesZeroRank0) { TransformerTester(build_test_case_1, check_optimized_graph_1, TransformerLevel::Default, - TransformerLevel::Level1); + TransformerLevel::Level1, + {12, 18}); } TEST(TransposeOptimizerTests, TestResizeNonconst) { @@ -477,7 +499,7 @@ TEST(TransposeOptimizerTests, TestResizeNonconst) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 11); + /*opset_version*/ {11, 18}); } TEST(TransposeOptimizerTests, TestResizeNonconstOpset13) { @@ -506,7 +528,7 @@ TEST(TransposeOptimizerTests, TestResizeNonconstOpset13) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 13); + /*opset_version*/ {13, 18}); } #endif @@ -534,7 +556,7 @@ TEST(TransposeOptimizerTests, TestAdd) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestShape) { @@ -557,7 +579,7 @@ TEST(TransposeOptimizerTests, TestShape) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 7); + /*opset_version*/ {7, 18}); } TEST(TransposeOptimizerTests, TestShapeOpset15) { @@ -580,7 +602,7 @@ TEST(TransposeOptimizerTests, TestShapeOpset15) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestShapeSliceNoStart) { @@ -604,7 +626,7 @@ TEST(TransposeOptimizerTests, TestShapeSliceNoStart) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestShapeSliceNegativeEnd) { @@ -628,7 +650,7 @@ TEST(TransposeOptimizerTests, TestShapeSliceNegativeEnd) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestShapeSliceNegativeStartNoEnd) { @@ -652,7 +674,7 @@ TEST(TransposeOptimizerTests, TestShapeSliceNegativeStartNoEnd) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestShapeSliceStartAndEnd) { @@ -677,7 +699,7 @@ TEST(TransposeOptimizerTests, TestShapeSliceStartAndEnd) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestShapeSliceEmptyResult) { @@ -702,7 +724,7 @@ TEST(TransposeOptimizerTests, TestShapeSliceEmptyResult) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestReduceSumKeepdimsTrue) { @@ -714,9 +736,15 @@ TEST(TransposeOptimizerTests, TestReduceSumKeepdimsTrue) { auto& transpose_1 = builder.AddNode("Transpose", {input0_arg}, {transpose_1_out_0}); transpose_1.AddAttribute("perm", std::vector{0, 3, 1, 2}); - auto& reducesum_1 = builder.AddNode("ReduceSum", {transpose_1_out_0}, {reducesum_1_out_0}); - reducesum_1.AddAttribute("axes", std::vector{0, -2}); - reducesum_1.AddAttribute("keepdims", (int64_t)1); + if (builder.DomainToVersionMap().find(kOnnxDomain)->second >= 18) { + auto* init = builder.MakeInitializer({2}, {0, -2}); + auto& reducesum_1 = builder.AddNode("ReduceSum", {transpose_1_out_0, init}, {reducesum_1_out_0}); + reducesum_1.AddAttribute("keepdims", (int64_t)1); + } else { + auto& reducesum_1 = builder.AddNode("ReduceSum", {transpose_1_out_0}, {reducesum_1_out_0}); + reducesum_1.AddAttribute("axes", std::vector{0, -2}); + reducesum_1.AddAttribute("keepdims", (int64_t)1); + } auto& transpose_2 = builder.AddNode("Transpose", {reducesum_1_out_0}, {transpose_2_out_0}); transpose_2.AddAttribute("perm", std::vector{0, 2, 3, 1}); }; @@ -730,7 +758,7 @@ TEST(TransposeOptimizerTests, TestReduceSumKeepdimsTrue) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 7, + /*opset_version*/ {7, 18}, /*per_sample_tolerance*/ 1e-07, /*relative_per_sample_tolerance*/ 1e-06); } @@ -756,7 +784,7 @@ TEST(TransposeOptimizerTests, TestReduceSumEmptyAxesKeepdimsTrue) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 7, + /*opset_version*/ {7, 18}, /*per_sample_tolerance*/ 1e-07, /*relative_per_sample_tolerance*/ 1e-06); } @@ -770,9 +798,15 @@ TEST(TransposeOptimizerTests, TestReduceSumKeepdimsFalse) { auto& transpose_1 = builder.AddNode("Transpose", {input0_arg}, {transpose_1_out_0}); transpose_1.AddAttribute("perm", std::vector{0, 3, 1, 2}); - auto& reducesum_1 = builder.AddNode("ReduceSum", {transpose_1_out_0}, {reducesum_1_out_0}); - reducesum_1.AddAttribute("axes", std::vector{0, -2}); - reducesum_1.AddAttribute("keepdims", (int64_t)0); + if (builder.DomainToVersionMap().find(kOnnxDomain)->second >= 18) { + auto* init = builder.MakeInitializer({2}, {0, -2}); + auto& reducesum_1 = builder.AddNode("ReduceSum", {transpose_1_out_0, init}, {reducesum_1_out_0}); + reducesum_1.AddAttribute("keepdims", (int64_t)0); + } else { + auto& reducesum_1 = builder.AddNode("ReduceSum", {transpose_1_out_0}, {reducesum_1_out_0}); + reducesum_1.AddAttribute("axes", std::vector{0, -2}); + reducesum_1.AddAttribute("keepdims", (int64_t)0); + } auto& transpose_2 = builder.AddNode("Transpose", {reducesum_1_out_0}, {transpose_2_out_0}); transpose_2.AddAttribute("perm", std::vector{1, 0}); }; @@ -786,7 +820,7 @@ TEST(TransposeOptimizerTests, TestReduceSumKeepdimsFalse) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 7, + /*opset_version*/ {7, 18}, /*per_sample_tolerance*/ 1e-07, /*relative_per_sample_tolerance*/ 1e-06); } @@ -812,7 +846,7 @@ TEST(TransposeOptimizerTests, TestReduceSumEmptyAxesKeepdimsFalse) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 7, + /*opset_version*/ {7, 18}, /*per_sample_tolerance*/ 1e-07, /*relative_per_sample_tolerance*/ 1e-06); } @@ -874,7 +908,7 @@ TEST(TransposeOptimizerTests, TestReduceSumEmptyAxesKeepdimsTrueOpset15) { /*relative_per_sample_tolerance*/ 1e-06); } -TEST(TransposeOptimizerTests, TestReduceSumEmptyAxesKeepdimsTrueNoopEmptyTrue) { +TEST(TransposeOptimizerTests, TestReduceSumEmptyAxesKeepdimsTrueNoopEmptyTrueOpset15) { auto build_test_case_1 = [&](ModelTestBuilder& builder) { auto* input0_arg = MakeInput(builder, {{-1, 4, -1, 5}}, {2, 4, 6, 5}, 0.0, 1.0); auto* const_1 = builder.MakeInitializer({0}, {}); @@ -905,7 +939,7 @@ TEST(TransposeOptimizerTests, TestReduceSumEmptyAxesKeepdimsTrueNoopEmptyTrue) { /*relative_per_sample_tolerance*/ 1e-06); } -TEST(TransposeOptimizerTests, TestReduceSumEmptyAxesKeepdimsTrueNoopEmptyFalse) { +TEST(TransposeOptimizerTests, TestReduceSumEmptyAxesKeepdimsTrueNoopEmptyFalseOpset15) { auto build_test_case_1 = [&](ModelTestBuilder& builder) { auto* input0_arg = MakeInput(builder, {{-1, 4, -1, 5}}, {2, 4, 6, 5}, 0.0, 1.0); auto* const_1 = builder.MakeInitializer({0}, {}); @@ -933,7 +967,7 @@ TEST(TransposeOptimizerTests, TestReduceSumEmptyAxesKeepdimsTrueNoopEmptyFalse) /*relative_per_sample_tolerance*/ 1e-06); } -TEST(TransposeOptimizerTests, TestReduceSumNoAxesInput) { +TEST(TransposeOptimizerTests, TestReduceSumNoAxesInputOpset15) { auto build_test_case_1 = [&](ModelTestBuilder& builder) { auto* input0_arg = MakeInput(builder, {{-1, 4, -1, 5}}, {2, 4, 6, 5}, 0.0, 1.0); auto* transpose_1_out_0 = builder.MakeIntermediate(); @@ -1017,7 +1051,7 @@ TEST(TransposeOptimizerTests, TestReduceSumEmptyAxesKeepdimsFalseOpset15) { /*relative_per_sample_tolerance*/ 1e-06); } -TEST(TransposeOptimizerTests, TestReduceSumEmptyAxesKeepdimsFalseNoopEmptyTrue) { +TEST(TransposeOptimizerTests, TestReduceSumEmptyAxesKeepdimsFalseNoopEmptyTrueOpset15) { auto build_test_case_1 = [&](ModelTestBuilder& builder) { auto* input0_arg = MakeInput(builder, {{-1, 4, -1, 5}}, {2, 4, 6, 5}, 0.0, 1.0); auto* const_1 = builder.MakeInitializer({0}, {}); @@ -1048,7 +1082,7 @@ TEST(TransposeOptimizerTests, TestReduceSumEmptyAxesKeepdimsFalseNoopEmptyTrue) /*relative_per_sample_tolerance*/ 1e-06); } -TEST(TransposeOptimizerTests, TestReduceSumEmptyAxesKeepdimsFalseNoopEmptyFalse) { +TEST(TransposeOptimizerTests, TestReduceSumEmptyAxesKeepdimsFalseNoopEmptyFalseOpset15) { auto build_test_case_1 = [&](ModelTestBuilder& builder) { auto* input0_arg = MakeInput(builder, {{-1, 4, -1, 5}}, {2, 4, 6, 5}, 0.0, 1.0); auto* const_1 = builder.MakeInitializer({0}, {}); @@ -1076,7 +1110,7 @@ TEST(TransposeOptimizerTests, TestReduceSumEmptyAxesKeepdimsFalseNoopEmptyFalse) /*relative_per_sample_tolerance*/ 1e-06); } -TEST(TransposeOptimizerTests, TestReduceSumNoAxesInput_2) { +TEST(TransposeOptimizerTests, TestReduceSumNoAxesInput_2Opset15) { auto build_test_case_1 = [&](ModelTestBuilder& builder) { auto* input0_arg = MakeInput(builder, {{-1, 4, -1, 5}}, {2, 4, 6, 5}, 0.0, 1.0); auto* transpose_1_out_0 = builder.MakeIntermediate(); @@ -1103,7 +1137,7 @@ TEST(TransposeOptimizerTests, TestReduceSumNoAxesInput_2) { /*relative_per_sample_tolerance*/ 1e-06); } -TEST(TransposeOptimizerTests, TestReduceSumNonconstKeepdimsTrueNoOpt) { +TEST(TransposeOptimizerTests, TestReduceSumNonconstKeepdimsTrueNoOptOpset13) { auto build_test_case_1 = [&](ModelTestBuilder& builder) { auto* input0_arg = MakeInput(builder, {{-1, 4, -1, 5}}, {2, 4, 6, 5}, 0.0, 1.0); auto* input1_arg = MakeInput(builder, {std::vector{}}, std::vector{}, {-1}); @@ -1130,7 +1164,7 @@ TEST(TransposeOptimizerTests, TestReduceSumNonconstKeepdimsTrueNoOpt) { /*opset_version*/ 13); } -TEST(TransposeOptimizerTests, TestReduceSumNonconstKeepdimsFalseNoOpt) { +TEST(TransposeOptimizerTests, TestReduceSumNonconstKeepdimsFalseNoOptOpset13) { auto build_test_case_1 = [&](ModelTestBuilder& builder) { auto* input0_arg = MakeInput(builder, {{-1, 4, -1, 5}}, {2, 4, 6, 5}, 0.0, 1.0); auto* input1_arg = MakeInput(builder, {std::vector{}}, std::vector{}, {-1}); @@ -1166,9 +1200,15 @@ TEST(TransposeOptimizerTests, TestReduceMaxKeepdimsTrue) { auto& transpose_1 = builder.AddNode("Transpose", {input0_arg}, {transpose_1_out_0}); transpose_1.AddAttribute("perm", std::vector{0, 3, 1, 2}); - auto& reducemax_1 = builder.AddNode("ReduceMax", {transpose_1_out_0}, {reducemax_1_out_0}); - reducemax_1.AddAttribute("axes", std::vector{0, -2}); - reducemax_1.AddAttribute("keepdims", (int64_t)1); + if (builder.DomainToVersionMap().find(kOnnxDomain)->second >= 18) { + auto* axes = builder.MakeInitializer({2}, {0, -2}); + auto& reducemax_1 = builder.AddNode("ReduceMax", {transpose_1_out_0, axes}, {reducemax_1_out_0}); + reducemax_1.AddAttribute("keepdims", (int64_t)1); + } else { + auto& reducemax_1 = builder.AddNode("ReduceMax", {transpose_1_out_0}, {reducemax_1_out_0}); + reducemax_1.AddAttribute("axes", std::vector{0, -2}); + reducemax_1.AddAttribute("keepdims", (int64_t)1); + } auto& transpose_2 = builder.AddNode("Transpose", {reducemax_1_out_0}, {transpose_2_out_0}); transpose_2.AddAttribute("perm", std::vector{0, 2, 3, 1}); }; @@ -1182,7 +1222,7 @@ TEST(TransposeOptimizerTests, TestReduceMaxKeepdimsTrue) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestReduceMaxKeepdimsTrueDefaultAxes) { @@ -1206,7 +1246,7 @@ TEST(TransposeOptimizerTests, TestReduceMaxKeepdimsTrueDefaultAxes) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestReduceMaxKeepdimsFalse) { @@ -1218,13 +1258,19 @@ TEST(TransposeOptimizerTests, TestReduceMaxKeepdimsFalse) { auto& transpose_1 = builder.AddNode("Transpose", {input0_arg}, {transpose_1_out_0}); transpose_1.AddAttribute("perm", std::vector{0, 3, 1, 2}); - auto& reducemax_1 = builder.AddNode("ReduceMax", {transpose_1_out_0}, {reducemax_1_out_0}); - reducemax_1.AddAttribute("axes", std::vector{0, -2}); - reducemax_1.AddAttribute("keepdims", (int64_t)0); + if (builder.DomainToVersionMap().find(kOnnxDomain)->second >= 18) { + auto* axes = builder.MakeInitializer({2}, {0, -2}); + auto& reducemax_1 = builder.AddNode("ReduceMax", {transpose_1_out_0, axes}, {reducemax_1_out_0}); + reducemax_1.AddAttribute("keepdims", (int64_t)0); + } else { + auto& reducemax_1 = builder.AddNode("ReduceMax", {transpose_1_out_0}, {reducemax_1_out_0}); + reducemax_1.AddAttribute("axes", std::vector{0, -2}); + reducemax_1.AddAttribute("keepdims", (int64_t)0); + } auto& transpose_2 = builder.AddNode("Transpose", {reducemax_1_out_0}, {transpose_2_out_0}); transpose_2.AddAttribute("perm", std::vector{1, 0}); }; - + auto check_optimized_graph_1 = [&](InferenceSessionWrapper& session) { int transpose_cost = EstimateTransposeCost(session.GetGraph()); EXPECT_EQ(transpose_cost, 0); @@ -1234,7 +1280,7 @@ TEST(TransposeOptimizerTests, TestReduceMaxKeepdimsFalse) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestReduceMaxKeepdimsFalseDefaultAxes) { @@ -1258,7 +1304,7 @@ TEST(TransposeOptimizerTests, TestReduceMaxKeepdimsFalseDefaultAxes) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestReduceMax) { @@ -1270,8 +1316,13 @@ TEST(TransposeOptimizerTests, TestReduceMax) { auto& transpose_1 = builder.AddNode("Transpose", {input0_arg}, {transpose_1_out_0}); transpose_1.AddAttribute("perm", std::vector{0, 3, 1, 2}); - auto& reducemax_1 = builder.AddNode("ReduceMax", {transpose_1_out_0}, {reducemax_1_out_0}); - reducemax_1.AddAttribute("axes", std::vector{0, -2}); + if (builder.DomainToVersionMap().find(kOnnxDomain)->second >= 18) { + auto* axes = builder.MakeInitializer({2}, {0, -2}); + builder.AddNode("ReduceMax", {transpose_1_out_0, axes}, {reducemax_1_out_0}); + } else { + auto& reducemax_1 = builder.AddNode("ReduceMax", {transpose_1_out_0}, {reducemax_1_out_0}); + reducemax_1.AddAttribute("axes", std::vector{0, -2}); + } auto& transpose_2 = builder.AddNode("Transpose", {reducemax_1_out_0}, {transpose_2_out_0}); transpose_2.AddAttribute("perm", std::vector{0, 2, 3, 1}); }; @@ -1285,7 +1336,7 @@ TEST(TransposeOptimizerTests, TestReduceMax) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestReduceMaxDefaultAxes) { @@ -1308,7 +1359,7 @@ TEST(TransposeOptimizerTests, TestReduceMaxDefaultAxes) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestReduceOpsReduceLogSum) { @@ -1320,9 +1371,15 @@ TEST(TransposeOptimizerTests, TestReduceOpsReduceLogSum) { auto& transpose_1 = builder.AddNode("Transpose", {input0_arg}, {transpose_1_out_0}); transpose_1.AddAttribute("perm", std::vector{0, 3, 1, 2}); - auto& reducelogsum_1 = builder.AddNode("ReduceLogSum", {transpose_1_out_0}, {reducelogsum_1_out_0}); - reducelogsum_1.AddAttribute("axes", std::vector{0, -2}); - reducelogsum_1.AddAttribute("keepdims", (int64_t)0); + if (builder.DomainToVersionMap().find(kOnnxDomain)->second >= 18) { + auto* axes = builder.MakeInitializer({2}, {0, -2}); + auto& reducelogsum_1 = builder.AddNode("ReduceLogSum", {transpose_1_out_0, axes}, {reducelogsum_1_out_0}); + reducelogsum_1.AddAttribute("keepdims", (int64_t)0); + } else { + auto& reducelogsum_1 = builder.AddNode("ReduceLogSum", {transpose_1_out_0}, {reducelogsum_1_out_0}); + reducelogsum_1.AddAttribute("axes", std::vector{0, -2}); + reducelogsum_1.AddAttribute("keepdims", (int64_t)0); + } auto& transpose_2 = builder.AddNode("Transpose", {reducelogsum_1_out_0}, {transpose_2_out_0}); transpose_2.AddAttribute("perm", std::vector{1, 0}); }; @@ -1336,7 +1393,7 @@ TEST(TransposeOptimizerTests, TestReduceOpsReduceLogSum) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestReduceOpsReduceLogSumExp) { @@ -1348,9 +1405,15 @@ TEST(TransposeOptimizerTests, TestReduceOpsReduceLogSumExp) { auto& transpose_1 = builder.AddNode("Transpose", {input0_arg}, {transpose_1_out_0}); transpose_1.AddAttribute("perm", std::vector{0, 3, 1, 2}); - auto& reducelogsumexp_1 = builder.AddNode("ReduceLogSumExp", {transpose_1_out_0}, {reducelogsumexp_1_out_0}); - reducelogsumexp_1.AddAttribute("axes", std::vector{0, -2}); - reducelogsumexp_1.AddAttribute("keepdims", (int64_t)0); + if (builder.DomainToVersionMap().find(kOnnxDomain)->second >= 18) { + auto* axes = builder.MakeInitializer({2}, {0, -2}); + auto& reducelogsumexp_1 = builder.AddNode("ReduceLogSumExp", {transpose_1_out_0, axes}, {reducelogsumexp_1_out_0}); + reducelogsumexp_1.AddAttribute("keepdims", (int64_t)0); + } else { + auto& reducelogsumexp_1 = builder.AddNode("ReduceLogSumExp", {transpose_1_out_0}, {reducelogsumexp_1_out_0}); + reducelogsumexp_1.AddAttribute("axes", std::vector{0, -2}); + reducelogsumexp_1.AddAttribute("keepdims", (int64_t)0); + } auto& transpose_2 = builder.AddNode("Transpose", {reducelogsumexp_1_out_0}, {transpose_2_out_0}); transpose_2.AddAttribute("perm", std::vector{1, 0}); }; @@ -1364,7 +1427,7 @@ TEST(TransposeOptimizerTests, TestReduceOpsReduceLogSumExp) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestReduceOpsReduceMax) { @@ -1376,9 +1439,15 @@ TEST(TransposeOptimizerTests, TestReduceOpsReduceMax) { auto& transpose_1 = builder.AddNode("Transpose", {input0_arg}, {transpose_1_out_0}); transpose_1.AddAttribute("perm", std::vector{0, 3, 1, 2}); - auto& reducemax_1 = builder.AddNode("ReduceMax", {transpose_1_out_0}, {reducemax_1_out_0}); - reducemax_1.AddAttribute("axes", std::vector{0, -2}); - reducemax_1.AddAttribute("keepdims", (int64_t)0); + if (builder.DomainToVersionMap().find(kOnnxDomain)->second >= 18) { + auto* axes = builder.MakeInitializer({2}, {0, -2}); + auto& reducemax_1 = builder.AddNode("ReduceMax", {transpose_1_out_0, axes}, {reducemax_1_out_0}); + reducemax_1.AddAttribute("keepdims", (int64_t)0); + } else { + auto& reducemax_1 = builder.AddNode("ReduceMax", {transpose_1_out_0}, {reducemax_1_out_0}); + reducemax_1.AddAttribute("axes", std::vector{0, -2}); + reducemax_1.AddAttribute("keepdims", (int64_t)0); + } auto& transpose_2 = builder.AddNode("Transpose", {reducemax_1_out_0}, {transpose_2_out_0}); transpose_2.AddAttribute("perm", std::vector{1, 0}); }; @@ -1392,7 +1461,7 @@ TEST(TransposeOptimizerTests, TestReduceOpsReduceMax) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestReduceOpsReduceMean) { @@ -1404,9 +1473,15 @@ TEST(TransposeOptimizerTests, TestReduceOpsReduceMean) { auto& transpose_1 = builder.AddNode("Transpose", {input0_arg}, {transpose_1_out_0}); transpose_1.AddAttribute("perm", std::vector{0, 3, 1, 2}); - auto& reducemean_1 = builder.AddNode("ReduceMean", {transpose_1_out_0}, {reducemean_1_out_0}); - reducemean_1.AddAttribute("axes", std::vector{0, -2}); - reducemean_1.AddAttribute("keepdims", (int64_t)0); + if (builder.DomainToVersionMap().find(kOnnxDomain)->second >= 18) { + auto* axes = builder.MakeInitializer({2}, {0, -2}); + auto& reducemean_1 = builder.AddNode("ReduceMean", {transpose_1_out_0, axes}, {reducemean_1_out_0}); + reducemean_1.AddAttribute("keepdims", (int64_t)0); + } else { + auto& reducemean_1 = builder.AddNode("ReduceMean", {transpose_1_out_0}, {reducemean_1_out_0}); + reducemean_1.AddAttribute("axes", std::vector{0, -2}); + reducemean_1.AddAttribute("keepdims", (int64_t)0); + } auto& transpose_2 = builder.AddNode("Transpose", {reducemean_1_out_0}, {transpose_2_out_0}); transpose_2.AddAttribute("perm", std::vector{1, 0}); }; @@ -1420,7 +1495,7 @@ TEST(TransposeOptimizerTests, TestReduceOpsReduceMean) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestReduceOpsReduceMin) { @@ -1432,9 +1507,15 @@ TEST(TransposeOptimizerTests, TestReduceOpsReduceMin) { auto& transpose_1 = builder.AddNode("Transpose", {input0_arg}, {transpose_1_out_0}); transpose_1.AddAttribute("perm", std::vector{0, 3, 1, 2}); - auto& reducemin_1 = builder.AddNode("ReduceMin", {transpose_1_out_0}, {reducemin_1_out_0}); - reducemin_1.AddAttribute("axes", std::vector{0, -2}); - reducemin_1.AddAttribute("keepdims", (int64_t)0); + if (builder.DomainToVersionMap().find(kOnnxDomain)->second >= 18) { + auto* axes = builder.MakeInitializer({2}, {0, -2}); + auto& reducemin_1 = builder.AddNode("ReduceMin", {transpose_1_out_0, axes}, {reducemin_1_out_0}); + reducemin_1.AddAttribute("keepdims", (int64_t)0); + } else { + auto& reducemin_1 = builder.AddNode("ReduceMin", {transpose_1_out_0}, {reducemin_1_out_0}); + reducemin_1.AddAttribute("axes", std::vector{0, -2}); + reducemin_1.AddAttribute("keepdims", (int64_t)0); + } auto& transpose_2 = builder.AddNode("Transpose", {reducemin_1_out_0}, {transpose_2_out_0}); transpose_2.AddAttribute("perm", std::vector{1, 0}); }; @@ -1448,7 +1529,7 @@ TEST(TransposeOptimizerTests, TestReduceOpsReduceMin) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestReduceOpsReduceProd) { @@ -1460,9 +1541,15 @@ TEST(TransposeOptimizerTests, TestReduceOpsReduceProd) { auto& transpose_1 = builder.AddNode("Transpose", {input0_arg}, {transpose_1_out_0}); transpose_1.AddAttribute("perm", std::vector{0, 3, 1, 2}); - auto& reduceprod_1 = builder.AddNode("ReduceProd", {transpose_1_out_0}, {reduceprod_1_out_0}); - reduceprod_1.AddAttribute("axes", std::vector{0, -2}); - reduceprod_1.AddAttribute("keepdims", (int64_t)0); + if (builder.DomainToVersionMap().find(kOnnxDomain)->second >= 18) { + auto* axes = builder.MakeInitializer({2}, {0, -2}); + auto& reduceprod_1 = builder.AddNode("ReduceProd", {transpose_1_out_0, axes}, {reduceprod_1_out_0}); + reduceprod_1.AddAttribute("keepdims", (int64_t)0); + } else { + auto& reduceprod_1 = builder.AddNode("ReduceProd", {transpose_1_out_0}, {reduceprod_1_out_0}); + reduceprod_1.AddAttribute("axes", std::vector{0, -2}); + reduceprod_1.AddAttribute("keepdims", (int64_t)0); + } auto& transpose_2 = builder.AddNode("Transpose", {reduceprod_1_out_0}, {transpose_2_out_0}); transpose_2.AddAttribute("perm", std::vector{1, 0}); }; @@ -1476,7 +1563,7 @@ TEST(TransposeOptimizerTests, TestReduceOpsReduceProd) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestReduceOpsReduceSumSquare) { @@ -1488,9 +1575,15 @@ TEST(TransposeOptimizerTests, TestReduceOpsReduceSumSquare) { auto& transpose_1 = builder.AddNode("Transpose", {input0_arg}, {transpose_1_out_0}); transpose_1.AddAttribute("perm", std::vector{0, 3, 1, 2}); - auto& reducesumsquare_1 = builder.AddNode("ReduceSumSquare", {transpose_1_out_0}, {reducesumsquare_1_out_0}); - reducesumsquare_1.AddAttribute("axes", std::vector{0, -2}); - reducesumsquare_1.AddAttribute("keepdims", (int64_t)0); + if (builder.DomainToVersionMap().find(kOnnxDomain)->second >= 18) { + auto* init = builder.MakeInitializer({2}, {0, -2}); + auto& reducesumsquare_1 = builder.AddNode("ReduceSumSquare", {transpose_1_out_0, init}, {reducesumsquare_1_out_0}); + reducesumsquare_1.AddAttribute("keepdims", (int64_t)0); + } else { + auto& reducesumsquare_1 = builder.AddNode("ReduceSumSquare", {transpose_1_out_0}, {reducesumsquare_1_out_0}); + reducesumsquare_1.AddAttribute("axes", std::vector{0, -2}); + reducesumsquare_1.AddAttribute("keepdims", (int64_t)0); + } auto& transpose_2 = builder.AddNode("Transpose", {reducesumsquare_1_out_0}, {transpose_2_out_0}); transpose_2.AddAttribute("perm", std::vector{1, 0}); }; @@ -1504,7 +1597,7 @@ TEST(TransposeOptimizerTests, TestReduceOpsReduceSumSquare) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestReduceOpsReduceL1) { @@ -1516,9 +1609,15 @@ TEST(TransposeOptimizerTests, TestReduceOpsReduceL1) { auto& transpose_1 = builder.AddNode("Transpose", {input0_arg}, {transpose_1_out_0}); transpose_1.AddAttribute("perm", std::vector{0, 3, 1, 2}); - auto& reducel1_1 = builder.AddNode("ReduceL1", {transpose_1_out_0}, {reducel1_1_out_0}); - reducel1_1.AddAttribute("axes", std::vector{0, -2}); - reducel1_1.AddAttribute("keepdims", (int64_t)0); + if (builder.DomainToVersionMap().find(kOnnxDomain)->second >= 18) { + auto* axes = builder.MakeInitializer({2}, {0, -2}); + auto& reducel1_1 = builder.AddNode("ReduceL1", {transpose_1_out_0, axes}, {reducel1_1_out_0}); + reducel1_1.AddAttribute("keepdims", (int64_t)0); + } else { + auto& reducel1_1 = builder.AddNode("ReduceL1", {transpose_1_out_0}, {reducel1_1_out_0}); + reducel1_1.AddAttribute("axes", std::vector{0, -2}); + reducel1_1.AddAttribute("keepdims", (int64_t)0); + } auto& transpose_2 = builder.AddNode("Transpose", {reducel1_1_out_0}, {transpose_2_out_0}); transpose_2.AddAttribute("perm", std::vector{1, 0}); }; @@ -1532,7 +1631,7 @@ TEST(TransposeOptimizerTests, TestReduceOpsReduceL1) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestReduceOpsReduceL2) { @@ -1544,9 +1643,15 @@ TEST(TransposeOptimizerTests, TestReduceOpsReduceL2) { auto& transpose_1 = builder.AddNode("Transpose", {input0_arg}, {transpose_1_out_0}); transpose_1.AddAttribute("perm", std::vector{0, 3, 1, 2}); - auto& reducel2_1 = builder.AddNode("ReduceL2", {transpose_1_out_0}, {reducel2_1_out_0}); - reducel2_1.AddAttribute("axes", std::vector{0, -2}); - reducel2_1.AddAttribute("keepdims", (int64_t)0); + if (builder.DomainToVersionMap().find(kOnnxDomain)->second >= 18) { + auto* axes = builder.MakeInitializer({2}, {0, -2}); + auto& reducel2_1 = builder.AddNode("ReduceL2", {transpose_1_out_0, axes}, {reducel2_1_out_0}); + reducel2_1.AddAttribute("keepdims", (int64_t)0); + } else { + auto& reducel2_1 = builder.AddNode("ReduceL2", {transpose_1_out_0}, {reducel2_1_out_0}); + reducel2_1.AddAttribute("axes", std::vector{0, -2}); + reducel2_1.AddAttribute("keepdims", (int64_t)0); + } auto& transpose_2 = builder.AddNode("Transpose", {reducel2_1_out_0}, {transpose_2_out_0}); transpose_2.AddAttribute("perm", std::vector{1, 0}); }; @@ -1560,10 +1665,10 @@ TEST(TransposeOptimizerTests, TestReduceOpsReduceL2) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } -TEST(TransposeOptimizerTests, TestSqueeze) { +TEST(TransposeOptimizerTests, TestSqueezeOpset7) { auto build_test_case_1 = [&](ModelTestBuilder& builder) { auto* input0_arg = MakeInput(builder, {{1, -1, 1, 2}}, {1, 4, 1, 2}, 0.0, 1.0); auto* transpose_1_out_0 = builder.MakeIntermediate(); @@ -1663,7 +1768,7 @@ TEST(TransposeOptimizerTests, TestSqueezeEmptyNoOpt) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 7); + /*opset_version*/ {7, 18}); } TEST(TransposeOptimizerTests, TestSqueezeEmptyNoOptOpset15) { @@ -1708,10 +1813,10 @@ TEST(TransposeOptimizerTests, TestSqueezeNonconstNoOpt) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } -TEST(TransposeOptimizerTests, TestUnsqueeze) { +TEST(TransposeOptimizerTests, TestUnsqueezeOpset7) { auto build_test_case_1 = [&](ModelTestBuilder& builder) { auto* input0_arg = MakeInput(builder, {{2, -1, 6, 5}}, {2, 4, 6, 5}, 0.0, 1.0); auto* transpose_1_out_0 = builder.MakeIntermediate(); @@ -1901,7 +2006,7 @@ TEST(TransposeOptimizerTests, TestSliceOpset15) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestSliceNoAxesOpset15) { @@ -1929,7 +2034,7 @@ TEST(TransposeOptimizerTests, TestSliceNoAxesOpset15) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestSliceNegativeAxesInt32) { @@ -1958,7 +2063,7 @@ TEST(TransposeOptimizerTests, TestSliceNegativeAxesInt32) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestSliceStepsInt32) { @@ -1988,7 +2093,7 @@ TEST(TransposeOptimizerTests, TestSliceStepsInt32) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestSliceNegativeAxes) { @@ -2017,7 +2122,7 @@ TEST(TransposeOptimizerTests, TestSliceNegativeAxes) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestSliceSteps) { @@ -2047,7 +2152,7 @@ TEST(TransposeOptimizerTests, TestSliceSteps) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestSliceNonconstNoOpt) { @@ -2075,7 +2180,7 @@ TEST(TransposeOptimizerTests, TestSliceNonconstNoOpt) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestSliceNonconstInt32NoOpt) { @@ -2103,7 +2208,7 @@ TEST(TransposeOptimizerTests, TestSliceNonconstInt32NoOpt) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestSliceDefaultAxesNonconstStarts) { @@ -2131,7 +2236,7 @@ TEST(TransposeOptimizerTests, TestSliceDefaultAxesNonconstStarts) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestSliceDefaultAxesNonconstStartsUnknownLengthNoOpt) { @@ -2158,7 +2263,7 @@ TEST(TransposeOptimizerTests, TestSliceDefaultAxesNonconstStartsUnknownLengthNoO check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestSliceDefaultAxesNonconstStartsInt32) { @@ -2186,7 +2291,7 @@ TEST(TransposeOptimizerTests, TestSliceDefaultAxesNonconstStartsInt32) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestSliceDefaultAxesNonconstStartsUnknownLengthInt32NoOpt) { @@ -2213,7 +2318,7 @@ TEST(TransposeOptimizerTests, TestSliceDefaultAxesNonconstStartsUnknownLengthInt check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestTile) { @@ -2240,7 +2345,7 @@ TEST(TransposeOptimizerTests, TestTile) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestTileNonconstReps) { @@ -2267,7 +2372,7 @@ TEST(TransposeOptimizerTests, TestTileNonconstReps) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestArgMinNoAxisKeepdimsTrue) { @@ -2294,7 +2399,7 @@ TEST(TransposeOptimizerTests, TestArgMinNoAxisKeepdimsTrue) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestArgMinNoAxisKeepdimsFalse) { @@ -2321,7 +2426,7 @@ TEST(TransposeOptimizerTests, TestArgMinNoAxisKeepdimsFalse) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestArgMinNoAxis) { @@ -2347,7 +2452,7 @@ TEST(TransposeOptimizerTests, TestArgMinNoAxis) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestArgMinKeepdimsTrue) { @@ -2375,7 +2480,7 @@ TEST(TransposeOptimizerTests, TestArgMinKeepdimsTrue) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestArgMinKeepdimsFalse) { @@ -2403,7 +2508,7 @@ TEST(TransposeOptimizerTests, TestArgMinKeepdimsFalse) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestArgMin) { @@ -2430,7 +2535,7 @@ TEST(TransposeOptimizerTests, TestArgMin) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestArgMax) { @@ -2458,7 +2563,7 @@ TEST(TransposeOptimizerTests, TestArgMax) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestSoftmax) { @@ -2771,7 +2876,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsAdd) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestBroadcastOpsMul) { @@ -2801,7 +2906,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsMul) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestBroadcastOpsSub) { @@ -2831,7 +2936,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsSub) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestBroadcastOpsDiv) { @@ -2861,7 +2966,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsDiv) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestBroadcastOpsPRelu) { @@ -2891,7 +2996,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsPRelu) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestBroadcastOpsGreater) { @@ -2921,7 +3026,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsGreater) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestBroadcastOpsLess) { @@ -2951,7 +3056,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsLess) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestBroadcastOpsPow) { @@ -2981,7 +3086,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsPow) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestBroadcastOpsMax) { @@ -3011,7 +3116,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsMax) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestBroadcastOpsMin) { @@ -3041,7 +3146,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsMin) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestBroadcastOpsMean) { @@ -3071,7 +3176,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsMean) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestBroadcastOpsSum) { @@ -3101,7 +3206,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsSum) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestBroadcastOpsGreaterOrEqual) { @@ -3131,7 +3236,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsGreaterOrEqual) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestBroadcastOpsLessOrEqual) { @@ -3161,7 +3266,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsLessOrEqual) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestBroadcastOpsEqual) { @@ -3191,7 +3296,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsEqual) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestBroadcastOpsAnd) { @@ -3221,7 +3326,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsAnd) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestBroadcastOpsOr) { @@ -3251,7 +3356,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsOr) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestBroadcastOpsXor) { @@ -3281,7 +3386,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsXor) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestBroadcastOpsMod) { @@ -3312,7 +3417,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsMod) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestBroadcastOpsBitShift) { @@ -3343,7 +3448,7 @@ TEST(TransposeOptimizerTests, TestBroadcastOpsBitShift) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestWhere) { @@ -3374,7 +3479,7 @@ TEST(TransposeOptimizerTests, TestWhere) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestQuantizeLinearScalar) { @@ -3402,7 +3507,7 @@ TEST(TransposeOptimizerTests, TestQuantizeLinearScalar) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestQuantizeLinearScalarIgnoreAxis) { @@ -3431,7 +3536,7 @@ TEST(TransposeOptimizerTests, TestQuantizeLinearScalarIgnoreAxis) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestQuantizeLinearVector) { @@ -3460,7 +3565,7 @@ TEST(TransposeOptimizerTests, TestQuantizeLinearVector) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestQuantizeLinearVectorUnknownRank) { @@ -3489,7 +3594,7 @@ TEST(TransposeOptimizerTests, TestQuantizeLinearVectorUnknownRank) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestQuantizeLinearScalarOpset10) { @@ -3546,7 +3651,7 @@ TEST(TransposeOptimizerTests, TestDequantizeLinearScalarIgnoreAxis) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestDequantizeLinearVector) { @@ -3575,7 +3680,7 @@ TEST(TransposeOptimizerTests, TestDequantizeLinearVector) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestDequantizeLinearNoAxis) { @@ -3665,7 +3770,7 @@ TEST(TransposeOptimizerTests, TestCast) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestBroadcastReusedInputs) { @@ -3696,7 +3801,7 @@ TEST(TransposeOptimizerTests, TestBroadcastReusedInputs) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestTransposeGraphOutput) { @@ -3724,7 +3829,7 @@ TEST(TransposeOptimizerTests, TestTransposeGraphOutput) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestSimpleReshapeAsTranspose) { @@ -3757,7 +3862,7 @@ TEST(TransposeOptimizerTests, TestSimpleReshapeAsTranspose) { check_optimized_graph, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestReshapeAsTransposeGraphOutput) { @@ -3788,7 +3893,7 @@ TEST(TransposeOptimizerTests, TestReshapeAsTransposeGraphOutput) { check_optimized_graph, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestCancelingNodesGraphOutputs) { @@ -3819,7 +3924,7 @@ TEST(TransposeOptimizerTests, TestCancelingNodesGraphOutputs) { check_optimized_graph, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestNonCancelingReshape) { @@ -3855,7 +3960,7 @@ TEST(TransposeOptimizerTests, TestNonCancelingReshape) { check_optimized_graph, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestPushBroadcastUnsqueezeTranspose) { @@ -3890,7 +3995,7 @@ TEST(TransposeOptimizerTests, TestPushBroadcastUnsqueezeTranspose) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestOptimizeTowardsTranspose) { @@ -3920,7 +4025,7 @@ TEST(TransposeOptimizerTests, TestOptimizeTowardsTranspose) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestOnlyOptimizeTowardsTranspose) { @@ -3947,7 +4052,7 @@ TEST(TransposeOptimizerTests, TestOnlyOptimizeTowardsTranspose) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestDontOptimizeWrongInput) { @@ -3973,7 +4078,7 @@ TEST(TransposeOptimizerTests, TestDontOptimizeWrongInput) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestOptimizeBothInputs) { @@ -4001,7 +4106,7 @@ TEST(TransposeOptimizerTests, TestOptimizeBothInputs) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } TEST(TransposeOptimizerTests, TestOmitIdentityTranspose) { @@ -4012,9 +4117,16 @@ TEST(TransposeOptimizerTests, TestOmitIdentityTranspose) { auto& transpose_1 = builder.AddNode("Transpose", {input0_arg}, {transpose_1_out_0}); transpose_1.AddAttribute("perm", std::vector{0, 3, 1, 2}); - auto& reducemax_1 = builder.AddNode("ReduceMax", {transpose_1_out_0}, {reducemax_1_out_0}); - reducemax_1.AddAttribute("axes", std::vector{1}); - reducemax_1.AddAttribute("keepdims", (int64_t)0); + if (builder.DomainToVersionMap().find(kOnnxDomain)->second >= 18) { + auto* init = builder.MakeInitializer({1}, {1}); + auto& reducemax_1 = builder.AddNode("ReduceMax", {transpose_1_out_0, init}, {reducemax_1_out_0}); + reducemax_1.AddAttribute("keepdims", (int64_t)0); + } + else { + auto& reducemax_1 = builder.AddNode("ReduceMax", {transpose_1_out_0}, {reducemax_1_out_0}); + reducemax_1.AddAttribute("axes", std::vector{1}); + reducemax_1.AddAttribute("keepdims", (int64_t)0); + } }; auto check_optimized_graph_1 = [&](InferenceSessionWrapper& session) { @@ -4027,7 +4139,7 @@ TEST(TransposeOptimizerTests, TestOmitIdentityTranspose) { check_optimized_graph_1, TransformerLevel::Default, TransformerLevel::Level1, - /*opset_version*/ 15); + /*opset_version*/ {15, 18}); } // regression test for a model where the transpose optimizations were not completed in a single pass in level 1.