Skip to content

Commit

Permalink
lcm_ (#22396)
Browse files Browse the repository at this point in the history
  • Loading branch information
NiteshK84 authored Sep 11, 2023
1 parent b1a4da3 commit b26bdac
Show file tree
Hide file tree
Showing 2 changed files with 66 additions and 0 deletions.
21 changes: 21 additions & 0 deletions ivy/functional/frontends/torch/tensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -1833,6 +1833,27 @@ def char(self):
def lcm(self, other, *, out=None):
return torch_frontend.lcm(self, other, out=out)

@with_unsupported_dtypes(
{
"2.0.1 and below": (
"float16",
"bfloat16",
"float32",
"float64",
"complex",
"uint8",
"uint16",
"uint32",
"uint64",
"int8",
)
},
"torch",
)
def lcm_(self, other, *, out=None):
self.ivy_array = self.lcm(other, out=out).ivy_array
return self

@with_unsupported_dtypes(
{
"2.0.1 and below": (
Expand Down
45 changes: 45 additions & 0 deletions ivy_tests/test_ivy/test_frontends/test_torch/test_tensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -7851,6 +7851,51 @@ def test_torch_tensor_lcm(
)


# lcm_
@handle_frontend_method(
class_tree=CLASS_TREE,
init_tree="torch.tensor",
method_name="lcm_",
dtype_and_x=helpers.dtype_and_values(
available_dtypes=helpers.get_dtypes("integer"),
num_arrays=2,
min_value=-100,
max_value=100,
min_num_dims=1,
max_num_dims=3,
min_dim_size=1,
max_dim_size=3,
shared_dtype=True,
),
)
def test_torch_tensor_lcm_(
dtype_and_x,
frontend,
frontend_method_data,
init_flags,
method_flags,
on_device,
backend_fw,
):
input_dtype, x = dtype_and_x
helpers.test_frontend_method(
init_input_dtypes=input_dtype,
backend_to_test=backend_fw,
init_all_as_kwargs_np={
"data": x[0],
},
method_input_dtypes=input_dtype,
method_all_as_kwargs_np={
"other": x[1],
},
frontend=frontend,
frontend_method_data=frontend_method_data,
init_flags=init_flags,
method_flags=method_flags,
on_device=on_device,
)


# less
@handle_frontend_method(
class_tree=CLASS_TREE,
Expand Down

0 comments on commit b26bdac

Please sign in to comment.