Skip to content

Commit

Permalink
fix: Replace use of default mutable arguments in a function. (#26746)
Browse files Browse the repository at this point in the history
Co-authored-by: vedpatwardhan <[email protected]>
  • Loading branch information
Sai-Suraj-27 and vedpatwardhan authored Oct 13, 2023
1 parent 6d3a57c commit 78f9ea3
Show file tree
Hide file tree
Showing 8 changed files with 17 additions and 17 deletions.
6 changes: 3 additions & 3 deletions ivy/functional/backends/jax/gradients.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def _forward_fn(
ivy.index_nest(xs, grad_idx), ivy.is_array
)
for idx in xs_grad_arr_idx:
xs_grad_arr_idxs.append(grad_idx + idx)
xs_grad_arr_idxs.append(list(grad_idx) + idx)
ivy.set_nest_at_indices(xs, xs_grad_arr_idxs, x_arr_values)
elif ivy.is_array(xs):
xs = x
Expand All @@ -75,8 +75,8 @@ def execute_with_gradients(
/,
*,
retain_grads: bool = False,
xs_grad_idxs: Optional[Sequence[Sequence[Union[str, int]]]] = [[0]],
ret_grad_idxs: Optional[Sequence[Sequence[Union[str, int]]]] = [[0]],
xs_grad_idxs: Sequence[Sequence[Union[str, int]]] = ((0,),),
ret_grad_idxs: Sequence[Sequence[Union[str, int]]] = ((0,),),
):
# Conversion of required arrays to float variables and duplicate index chains
(
Expand Down
6 changes: 3 additions & 3 deletions ivy/functional/backends/mxnet/gradients.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
signature."""

# global
from typing import Optional, Sequence, Union
from typing import Sequence, Union
import mxnet as mx

# local
Expand All @@ -27,8 +27,8 @@ def execute_with_gradients(
/,
*,
retain_grads: bool = False,
xs_grad_idxs: Optional[Sequence[Sequence[Union[str, int]]]] = [[0]],
ret_grad_idxs: Optional[Sequence[Sequence[Union[str, int]]]] = [[0]],
xs_grad_idxs: Sequence[Sequence[Union[str, int]]] = ((0,),),
ret_grad_idxs: Sequence[Sequence[Union[str, int]]] = ((0,),),
):
raise IvyNotImplementedException()

Expand Down
6 changes: 3 additions & 3 deletions ivy/functional/backends/numpy/gradients.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

# global
import logging
from typing import Optional, Sequence, Union
from typing import Sequence, Union
import ivy


Expand Down Expand Up @@ -31,8 +31,8 @@ def execute_with_gradients(
/,
*,
retain_grads: bool = False,
xs_grad_idxs: Optional[Sequence[Sequence[Union[str, int]]]] = [[0]],
ret_grad_idxs: Optional[Sequence[Sequence[Union[str, int]]]] = [[0]],
xs_grad_idxs: Sequence[Sequence[Union[str, int]]] = ((0,),),
ret_grad_idxs: Sequence[Sequence[Union[str, int]]] = ((0,),),
):
logging.warning(
"NumPy does not support autograd, "
Expand Down
2 changes: 1 addition & 1 deletion ivy/functional/backends/paddle/gradients.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ def grad_(x):
{"2.5.1 and below": {"cpu": ("float16",)}}, backend_version
)
def execute_with_gradients(
func, xs, /, *, retain_grads=False, xs_grad_idxs=[[0]], ret_grad_idxs=[[0]]
func, xs, /, *, retain_grads=False, xs_grad_idxs=((0,),), ret_grad_idxs=((0,),)
):
# Conversion of required arrays to float variables and duplicate index chains
xs, xs_grad_idxs, xs1, required_duplicate_index_chains, _ = (
Expand Down
4 changes: 2 additions & 2 deletions ivy/functional/backends/tensorflow/gradients.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,8 +68,8 @@ def execute_with_gradients(
/,
*,
retain_grads: bool = False,
xs_grad_idxs: Optional[Sequence[Sequence[Union[str, int]]]] = [[0]],
ret_grad_idxs: Optional[Sequence[Sequence[Union[str, int]]]] = [[0]],
xs_grad_idxs: Sequence[Sequence[Union[str, int]]] = ((0,),),
ret_grad_idxs: Sequence[Sequence[Union[str, int]]] = ((0,),),
):
# Conversion of required arrays to float variables and duplicate index chains
xs, xs_grad_idxs, xs_required, required_duplicate_index_chains, _ = (
Expand Down
4 changes: 2 additions & 2 deletions ivy/functional/backends/torch/gradients.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,8 +99,8 @@ def execute_with_gradients(
/,
*,
retain_grads: bool = False,
xs_grad_idxs: Optional[Sequence[Sequence[Union[str, int]]]] = [[0]],
ret_grad_idxs: Optional[Sequence[Sequence[Union[str, int]]]] = [[0]],
xs_grad_idxs: Sequence[Sequence[Union[str, int]]] = ((0,),),
ret_grad_idxs: Sequence[Sequence[Union[str, int]]] = ((0,),),
):
# Conversion of required arrays to float variables and duplicate index chains
xs, xs_grad_idxs, xs1, required_duplicate_index_chains, _ = (
Expand Down
4 changes: 2 additions & 2 deletions ivy/functional/ivy/gradients.py
Original file line number Diff line number Diff line change
Expand Up @@ -406,8 +406,8 @@ def execute_with_gradients(
/,
*,
retain_grads: bool = False,
xs_grad_idxs: Optional[Sequence[Sequence[Union[str, int]]]] = [[0]],
ret_grad_idxs: Optional[Sequence[Sequence[Union[str, int]]]] = [[0]],
xs_grad_idxs: Sequence[Sequence[Union[str, int]]] = ((0,),),
ret_grad_idxs: Sequence[Sequence[Union[str, int]]] = ((0,),),
) -> Tuple[ivy.Array, ivy.Array]:
"""Call function func with input of xs variables, and return the function
result func_ret and the gradients of each output variable w.r.t each input
Expand Down
2 changes: 1 addition & 1 deletion ivy/utils/assertions.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ def check_all_or_any_fn(
*args,
fn,
type="all",
limit=[0],
limit=(0,),
message="args must exist according to type and limit given",
as_array=True,
):
Expand Down

0 comments on commit 78f9ea3

Please sign in to comment.