Skip to content

Commit

Permalink
Update compiler.py f76f151
Browse files Browse the repository at this point in the history
Co-authored-by: ivy-dev-bot <[email protected]>
  • Loading branch information
Sam-Armstrong and ivy-dev-bot authored Sep 10, 2024
1 parent bd31086 commit 71af269
Show file tree
Hide file tree
Showing 2 changed files with 26 additions and 20 deletions.
3 changes: 2 additions & 1 deletion binaries.json
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,7 @@
"V",
"VC",
"VCC",
"VCD",
"VCI",
"VCL",
"VCV",
Expand Down Expand Up @@ -146,4 +147,4 @@
}
]
}
}
}
43 changes: 24 additions & 19 deletions ivy/compiler/compiler.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,15 @@


def clear_graph_cache():
"""Clears the graph cache which gets populated if `graph_caching` is set to
`True` in `ivy.trace_graph`, `ivy.transpile` or `ivy.unify`. Use this to
"""Clears the graph cache which gets populated if `graph_caching` is set
to `True` in `ivy.trace_graph`, `ivy.transpile` or `ivy.unify`. Use this to
reset or clear the graph cache if needed.
Examples
--------
>>> import ivy
>>> ivy.clear_graph_cache()
"""
>>> ivy.clear_graph_cache()"""

from ._compiler import clear_graph_cache as _clear_graph_cache

return _clear_graph_cache()
Expand Down Expand Up @@ -55,8 +55,8 @@ def graph_transpile(
Returns
-------
Either a transpiled Graph or a non-initialized LazyGraph.
"""
Either a transpiled Graph or a non-initialized LazyGraph."""

from ._compiler import graph_transpile as _graph_transpile

return _graph_transpile(
Expand Down Expand Up @@ -93,16 +93,14 @@ def source_to_source(
e.g. (source="torch_frontend", target="ivy") or (source="torch_frontend", target="tensorflow") etc.
Args:
----
object: The object (class/function) to be translated.
source (str, optional): The source framework. Defaults to 'torch'.
target (str, optional): The target framework. Defaults to 'tensorflow'.
profiling: Whether to add performance profiling.
Returns:
-------
The translated object.
"""
The translated object."""

from ._compiler import source_to_source as _source_to_source

return _source_to_source(
Expand Down Expand Up @@ -133,8 +131,7 @@ def trace_graph(
params_v=None,
v=None
):
"""Takes `fn` and traces it into a more efficient composition of backend
operations.
"""Takes `fn` and traces it into a more efficient composition of backend operations.
Parameters
----------
Expand Down Expand Up @@ -204,8 +201,8 @@ def trace_graph(
>>> start = time.time()
>>> graph(x)
>>> print(time.time() - start)
0.0001785755157470703
"""
0.0001785755157470703"""

from ._compiler import trace_graph as _trace_graph

return _trace_graph(
Expand All @@ -231,7 +228,11 @@ def trace_graph(


def transpile(
object, source: str = "torch", target: str = "tensorflow", profiling: bool = False
object,
source: str = "torch",
target: str = "tensorflow",
profiling: bool = False,
reuse_existing: bool = True,
):
"""Converts a given object (class/function) from one framework to another.
Expand All @@ -242,23 +243,27 @@ def transpile(
e.g. (source="torch_frontend", target="ivy") or (source="torch_frontend", target="tensorflow") etc.
Args:
----
object: The object (class/function) to be translated.
source (str, optional): The source framework. Defaults to 'torch'.
target (str, optional): The target framework. Defaults to 'tensorflow'.
profiling: Whether to add performance profiling.
reuse_existing (bool, optional): If True, the function will check if `object`
already exists in the translated directory and reuse it.
If False, it will re-translate `object`,
even if it already exists in the directory, and overwrite
the old implementation. Defaults to 'True'.
Returns:
-------
The translated object.
"""
The translated object."""

from ._compiler import transpile as _transpile

return _transpile(
object=object,
source=source,
target=target,
profiling=profiling,
reuse_existing=reuse_existing,
)


Expand Down

0 comments on commit 71af269

Please sign in to comment.