From 247514df6e843ce2768ac5d1a9877f9fe4cfd482 Mon Sep 17 00:00:00 2001 From: Wanming Lin Date: Fri, 18 Apr 2025 13:33:51 +0800 Subject: [PATCH] [WebNN] Support AveragePool with count_include_pad == 1 WebNN doesn't support AveragePool with count_include_pad == 1, but it can be emulated by inserting a pad operator. --- js/web/docs/webnn-operators.md | 2 +- .../webnn/builders/impl/pool_op_builder.cc | 34 ++++++++++++------- 2 files changed, 23 insertions(+), 13 deletions(-) diff --git a/js/web/docs/webnn-operators.md b/js/web/docs/webnn-operators.md index 33906e291d423..452b91bcaee90 100644 --- a/js/web/docs/webnn-operators.md +++ b/js/web/docs/webnn-operators.md @@ -18,7 +18,7 @@ platforms. Check the [WebNN status](https://webmachinelearning.github.io/webnn-s | And | ai.onnx(7+) | logicalAnd | | | ArgMax | ai.onnx(7-10, 11, 12, 13+) | argMax | | | ArgMin | ai.onnx(7-10, 11, 12, 13+) | argMin | | -| AveragePool | ai.onnx(7-9, 10, 11, 12-18, 19+) | averagePool2d | Only supports 4-D input, 2-D 'kernel_shape', 'count_include_pad' value is 0 | +| AveragePool | ai.onnx(7-9, 10, 11, 12-18, 19+) | averagePool2d | Only supports 4-D input, 2-D 'kernel_shape' | | BatchNormalization | ai.onnx(7-8, 9-13, 14, 15+) | batchNormalization | Only supports 'training_mode' value is 0, one output | | Cast | ai.onnx(7-8, 9-12, 13-18, 19-20, 21+) | cast | | | Ceil | ai.onnx(7-12, 13+) | ceil | | diff --git a/onnxruntime/core/providers/webnn/builders/impl/pool_op_builder.cc b/onnxruntime/core/providers/webnn/builders/impl/pool_op_builder.cc index 2d263c1ec1f9f..f2a3f08b73148 100644 --- a/onnxruntime/core/providers/webnn/builders/impl/pool_op_builder.cc +++ b/onnxruntime/core/providers/webnn/builders/impl/pool_op_builder.cc @@ -60,16 +60,17 @@ Status PoolOpBuilder::AddToModelBuilderImpl(ModelBuilder& model_builder, emscripten::val options = emscripten::val::object(); options.set("label", node.Name()); NodeAttrHelper helper(node); + const bool is_nhwc = model_builder.GetPreferredLayout() == DataLayout::NHWC; - const auto kernel_shape = helper.Get("kernel_shape", std::vector{0, 0}); + const auto onnx_kernel_shape = helper.Get("kernel_shape", std::vector{0, 0}); if (!is_global_pooling) { - options.set("windowDimensions", emscripten::val::array(kernel_shape)); + options.set("windowDimensions", emscripten::val::array(onnx_kernel_shape)); } const auto strides = helper.Get("strides", std::vector{1, 1}); options.set("strides", emscripten::val::array(strides)); const auto dilations = helper.Get("dilations", std::vector{1, 1}); options.set("dilations", emscripten::val::array(dilations)); - if (model_builder.GetPreferredLayout() == DataLayout::NHWC) { + if (is_nhwc) { options.set("layout", emscripten::val("nhwc")); } else { options.set("layout", emscripten::val("nchw")); @@ -78,7 +79,6 @@ Status PoolOpBuilder::AddToModelBuilderImpl(ModelBuilder& model_builder, // Add Padding. // Usually using autopadding is more efficient than using explicit padding. // Try to see if we can map explicit padding to auto padding. - const auto onnx_kernel_shape = helper.Get("kernel_shape", std::vector{0, 0}); const auto onnx_strides = helper.Get("strides", std::vector{1, 1}); const auto onnx_pads = helper.Get("pads", std::vector{0, 0, 0, 0}); auto pads = helper.Get("pads", std::vector{0, 0, 0, 0}); @@ -93,7 +93,7 @@ Status PoolOpBuilder::AddToModelBuilderImpl(ModelBuilder& model_builder, helper.Get("dilations", std::vector{1, 1}), auto_pad_type, pads_out, - model_builder.GetPreferredLayout() == DataLayout::NCHW)); + !is_nhwc)); pads = GetNarrowedIntfromInt64(pads_out); } // Permute the ONNX's pads, which is [beginning_height, beginning_width, ending_height, ending_width], @@ -105,6 +105,23 @@ Status PoolOpBuilder::AddToModelBuilderImpl(ModelBuilder& model_builder, options.set("roundingType", ceil_mode == 0 ? emscripten::val("floor") : emscripten::val("ceil")); + // WebNN doesn't support AveragePool with count_include_pad == 1, emulate it by pad + averagePool2d. + if (op_type == "AveragePool" && helper.Get("count_include_pad", 0) == 1) { + std::vector beginning_padding{0, 0, pads[0], pads[1]}; + std::vector ending_padding{0, 0, pads[2], pads[3]}; + // Unset padding option, because we will use pad op instead. + options.set("padding", emscripten::val::array(std::vector{0, 0, 0, 0})); + if (is_nhwc) { + beginning_padding = {0, pads[0], pads[1], 0}; + ending_padding = {0, pads[2], pads[3], 0}; + } + + emscripten::val pad_options = emscripten::val::object(); + pad_options.set("label", node.Name() + "_pad"); + input = model_builder.GetBuilder().call("pad", input, emscripten::val::array(beginning_padding), + emscripten::val::array(ending_padding), pad_options); + } + emscripten::val output = model_builder.GetBuilder().call(webnn_op_name.c_str(), input, options); model_builder.AddOperand(node.OutputDefs()[0]->Name(), std::move(output)); return Status::OK(); @@ -138,13 +155,6 @@ bool PoolOpBuilder::IsOpSupportedImpl(const GraphViewer&, } } - if (op_type == "AveragePool") { - if (helper.Get("count_include_pad", 0) != 0) { - LOGS(logger, VERBOSE) << "AveragePool only supports count_include_pad == 0"; - return false; - } - } - if (op_type == "MaxPool") { if (helper.Get("storage_order", 0) == 1) { LOGS(logger, VERBOSE) << "MaxPool storage_order == 1 is not supported";