From ceb26125e49fc8d154aabbd6713feed5f2c2084f Mon Sep 17 00:00:00 2001 From: Chaitanya Lakhchaura <90706514+ZenithFlux@users.noreply.github.com> Date: Thu, 14 Mar 2024 18:28:26 +0530 Subject: [PATCH] fix: fixed frontend test `test_torch_unfold` for all backends Added an assumption so that bfloat arrays are not passed to paddle backend. --- .../test_nn/test_functional/test_convolution_functions.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/ivy_tests/test_ivy/test_frontends/test_torch/test_nn/test_functional/test_convolution_functions.py b/ivy_tests/test_ivy/test_frontends/test_torch/test_nn/test_functional/test_convolution_functions.py index de8d285f5d7d3..6c69aa5b0dddd 100644 --- a/ivy_tests/test_ivy/test_frontends/test_torch/test_nn/test_functional/test_convolution_functions.py +++ b/ivy_tests/test_ivy/test_frontends/test_torch/test_nn/test_functional/test_convolution_functions.py @@ -523,6 +523,9 @@ def test_torch_unfold( backend_fw, ): dtype, vals, kernel_shape, dilations, strides, padding = dtype_vals + # TODO add bfloat16 to unsupported dtypes of the tested function + if backend_fw == "paddle": + assume("bfloat16" not in dtype[0]) helpers.test_frontend_function( input_dtypes=dtype, backend_to_test=backend_fw,