Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refactor torch frontend max pooling functions #21814

Merged
merged 9 commits into from
Aug 18, 2023
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
49 changes: 20 additions & 29 deletions ivy/functional/frontends/torch/nn/functional/pooling_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,27 +103,26 @@ def avg_pool2d(


@to_ivy_arrays_and_back
def max_pool1d(input, kernel_size, stride=None, padding=0):
kernel_size = _broadcast_pooling_helper(kernel_size, "1d", name="kernel_size")
stride = _broadcast_pooling_helper(stride, "1d", name="stride")
padding = _broadcast_pooling_helper(padding, "1d", name="padding")
kernel_pads = list(zip(kernel_size, padding))

def max_pool1d(
input,
kernel_size,
stride=None,
padding=0,
ceil_mode=False,
dilation=1,
return_indices=False,
):
if stride is None:
stride = kernel_size
data_format = "NCW"

if not all([pad <= kernel / 2 for kernel, pad in kernel_pads]):
raise ValueError(
"pad should be smaller than or equal to half of kernel size, "
f"but got padding={padding}, kernel_size={kernel_size}. "
)
# figure out whether to apply padding
if all([pad == ivy.ceil((kernel - 1) / 2) for kernel, pad in kernel_pads]):
padding_str = "SAME"
else:
padding_str = "VALID"

return ivy.max_pool1d(
input, kernel_size, stride, padding_str, data_format=data_format
input,
kernel_size,
stride,
padding,
data_format=data_format,
dilation=dilation,
ceil_mode=ceil_mode,
)


Expand All @@ -138,14 +137,9 @@ def max_pool2d(
ceil_mode=False,
return_indices=False,
):
# ToDo: Add return_indices once superset in implemented
dim_check = False
if input.ndim == 3:
input = input.expand_dims()
dim_check = True
if not stride:
if stride is None:
stride = kernel_size
ret = ivy.max_pool2d(
return ivy.max_pool2d(
input,
kernel_size,
stride,
Expand All @@ -154,9 +148,6 @@ def max_pool2d(
dilation=dilation,
ceil_mode=ceil_mode,
)
if dim_check:
return ret.squeeze(0)
return ret


@to_ivy_arrays_and_back
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def calculate_same_padding(kernel_size, stride, shape):
if all([kernel_size[i] / 2 >= padding[i] for i in range(len(kernel_size))]):
if is_same_padding(padding, stride, kernel_size, shape):
return padding
return (0, 0)
return [0] * len(shape)


# avg_pool1d
Expand Down Expand Up @@ -200,8 +200,8 @@ def test_torch_avg_pool3d(
max_dims=3,
min_side=1,
max_side=3,
explicit_or_str_padding=False,
only_explicit_padding=True,
data_format="channel_first",
),
test_with_out=st.just(False),
)
Expand All @@ -215,23 +215,7 @@ def test_torch_max_pool1d(
on_device,
):
input_dtype, x, kernel_size, stride, padding = dtype_x_k_s

# Torch ground truth func expects input to be consistent
# with a channels first format i.e. NCW
x[0] = x[0].reshape((x[0].shape[0], x[0].shape[-1], x[0].shape[1]))
x_shape = [x[0].shape[2]]

# Torch ground truth func also takes padding input as an integer
# or a tuple of integers, not a string
padding = tuple(
[
ivy.functional.layers._handle_padding(
x_shape[i], stride[0], kernel_size[i], padding
)
for i in range(len(x_shape))
]
)

padding = (padding[0][0], )
AnnaTz marked this conversation as resolved.
Show resolved Hide resolved
helpers.test_frontend_function(
input_dtypes=input_dtype,
backend_to_test=backend_fw,
Expand All @@ -254,9 +238,10 @@ def test_torch_max_pool1d(
max_dims=4,
min_side=1,
max_side=4,
explicit_or_str_padding=True,
only_explicit_padding=True,
return_dilation=True,
).filter(lambda x: x[4] != "VALID" and x[4] != "SAME"),
data_format="channel_first",
),
test_with_out=st.just(False),
ceil_mode=st.just(True),
)
Expand All @@ -271,9 +256,6 @@ def test_torch_max_pool2d(
on_device,
):
dtype, x, kernel, stride, pad, dilation = x_k_s_p
# Torch ground truth func expects input to be consistent
# with a channels first format i.e. NCHW
x[0] = x[0].reshape((x[0].shape[0], x[0].shape[-1], *x[0].shape[1:-1]))
pad = (pad[0][0], pad[1][0])
AnnaTz marked this conversation as resolved.
Show resolved Hide resolved

helpers.test_frontend_function(
Expand Down
Loading