|
1 |
| -# Copyright 2024 NXP |
| 1 | +# Copyright 2024-2025 NXP |
2 | 2 | #
|
3 | 3 | # This source code is licensed under the BSD-style license found in the
|
4 | 4 | # LICENSE file in the root directory of this source tree.
|
|
13 | 13 | )
|
14 | 14 | from executorch.backends.nxp.tests.executors import (
|
15 | 15 | convert_run_compare,
|
| 16 | + graph_contains_any_of_ops, |
16 | 17 | ToNCHWPreprocess,
|
17 | 18 | ToNHWCPreprocess,
|
18 | 19 | )
|
19 | 20 | from executorch.backends.nxp.tests.models import (
|
20 | 21 | ConstantPadNDConvModule,
|
21 | 22 | ConstantPadNDModule,
|
22 | 23 | )
|
| 24 | +from executorch.exir.dialects._ops import ops as exir_ops |
23 | 25 |
|
24 | 26 |
|
25 | 27 | @pytest.fixture(autouse=True)
|
@@ -121,3 +123,51 @@ def test_constant_pad_nd__unsupported_paddings(input_shape, paddings):
|
121 | 123 | nodes = list(exec_program.graph.nodes)
|
122 | 124 | # There is at least one non-delegated Pad node
|
123 | 125 | assert any(node.name == "aten_constant_pad_nd_default" for node in nodes)
|
| 126 | + |
| 127 | + |
| 128 | +def test_constant_pad_nd__delegation__formatless__supported_padding(): |
| 129 | + input_shape = (2, 4, 6, 8) # Formatless -> the last dim (8) will be padded. |
| 130 | + paddings = [0, 0, 1, 2, 3, 4] # The last dim is padded using the first 2 paddings. |
| 131 | + model = ConstantPadNDModule(paddings) |
| 132 | + exec_program = to_quantized_edge_program(model, input_shape).exported_program() |
| 133 | + |
| 134 | + # Make sure the `pad` was delegated. |
| 135 | + assert not graph_contains_any_of_ops( |
| 136 | + exec_program.graph, [exir_ops.edge.aten.constant_pad_nd.default] |
| 137 | + ) |
| 138 | + |
| 139 | + |
| 140 | +def test_constant_pad_nd__delegation__formatless__unsupported_padding(): |
| 141 | + input_shape = (2, 4, 6, 8) # Formatless -> the last dim (8) will be padded. |
| 142 | + paddings = [0, 1] # The last dim is padded using the first 2 paddings. |
| 143 | + model = ConstantPadNDModule(paddings) |
| 144 | + exec_program = to_quantized_edge_program(model, input_shape).exported_program() |
| 145 | + |
| 146 | + # Make sure the `pad` was NOT delegated. |
| 147 | + assert graph_contains_any_of_ops( |
| 148 | + exec_program.graph, [exir_ops.edge.aten.constant_pad_nd.default] |
| 149 | + ) |
| 150 | + |
| 151 | + |
| 152 | +def test_constant_pad_nd__delegation__channels_first__supported_padding(): |
| 153 | + input_shape = (2, 4, 6, 8) # Channels first -> the second dim (4) will be padded. |
| 154 | + paddings = [1, 2, 3, 4, 0, 0] # The second dim is padded using the paddings[4:6]. |
| 155 | + model = ConstantPadNDConvModule(paddings) |
| 156 | + exec_program = to_quantized_edge_program(model, input_shape).exported_program() |
| 157 | + |
| 158 | + # Make sure the `pad` was delegated. |
| 159 | + assert not graph_contains_any_of_ops( |
| 160 | + exec_program.graph, [exir_ops.edge.aten.constant_pad_nd.default] |
| 161 | + ) |
| 162 | + |
| 163 | + |
| 164 | +def test_constant_pad_nd__delegation__channels_first__unsupported_padding(): |
| 165 | + input_shape = (2, 3, 6, 8) # Channels first -> the second dim (3) will be padded. |
| 166 | + paddings = [0, 0, 0, 0, 1, 0] # The second dim is padded using the paddings[4:6]. |
| 167 | + model = ConstantPadNDConvModule(paddings) |
| 168 | + exec_program = to_quantized_edge_program(model, input_shape).exported_program() |
| 169 | + |
| 170 | + # Make sure the `pad` was NOT delegated. |
| 171 | + assert graph_contains_any_of_ops( |
| 172 | + exec_program.graph, [exir_ops.edge.aten.constant_pad_nd.default] |
| 173 | + ) |
0 commit comments