Skip to content

Commit

Permalink
WIP, still need to change .value calls
Browse files Browse the repository at this point in the history
  • Loading branch information
yifanwu committed Apr 19, 2022
1 parent 4d2de19 commit 532539d
Show file tree
Hide file tree
Showing 2 changed files with 41 additions and 10 deletions.
25 changes: 15 additions & 10 deletions lineapy/graph_reader/apis.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
GetVersionEvent,
track,
)
from lineapy.utils.deprecation_utils import lru_cache

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -61,8 +62,8 @@ def version(self) -> str:
track(GetVersionEvent(""))
return self._version

@property
def value(self) -> object:
@lru_cache(maxsize=None)
def get_value(self) -> object:
"""
Get and return the value of the artifact
"""
Expand Down Expand Up @@ -95,13 +96,14 @@ def _get_value_path(
raise ValueError("No value saved for this node")
return value.value

@property
def _subgraph(self) -> Graph:
@lru_cache(maxsize=None)
def _get_subgraph(self) -> Graph:
"""
Return the slice subgraph for the artifact
"""
return get_slice_graph(self._graph, [self._node_id])
return get_slice_graph(self._get_graph(), [self._node_id])

@lru_cache(maxsize=None)
def get_code(self, use_lineapy_serialization=True) -> str:
"""
Return the slices code for the artifact
Expand All @@ -111,10 +113,11 @@ def get_code(self, use_lineapy_serialization=True) -> str:
GetCodeEvent(use_lineapy_serialization=True, is_session_code=False)
)
return self._de_linealize_code(
get_source_code_from_graph(self._subgraph),
get_source_code_from_graph(self._get_subgraph()),
use_lineapy_serialization,
)

@lru_cache(maxsize=None)
def get_session_code(self, use_lineapy_serialization=True) -> str:
"""
Return the raw session code for the artifact. This will include any
Expand Down Expand Up @@ -176,8 +179,8 @@ def replace_fun(match):

return swapped

@property
def _graph(self) -> Graph:
@lru_cache(maxsize=None)
def _get_graph(self) -> Graph:
session_context = self.db.get_session_context(self._session_id)
# FIXME: copied cover from tracer, we might want to refactor
nodes = self.db.get_nodes_for_session(self._session_id)
Expand All @@ -193,7 +196,9 @@ def visualize(self, path: Optional[str] = None) -> None:
# This way we can import lineapy without having graphviz installed.
from lineapy.visualizer import Visualizer

visualizer = Visualizer.for_public_node(self._graph, self._node_id)
visualizer = Visualizer.for_public_node(
self._get_graph(), self._node_id
)
if path:
visualizer.render_pdf_file(path)
else:
Expand All @@ -205,7 +210,7 @@ def execute(self) -> object:
"""
slice_exec = Executor(self.db, globals())
slice_exec.execute_graph(self._subgraph)
slice_exec.execute_graph(self._get_subgraph())
return slice_exec.get_value(self._node_id)


Expand Down
26 changes: 26 additions & 0 deletions lineapy/utils/deprecation_utils.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
from functools import singledispatch, update_wrapper
import functools
import weakref



# Descriptor version
Expand Down Expand Up @@ -117,3 +120,26 @@ def get_source_segment(source, node, padded=False):
lines.insert(0, first)
lines.append(last)
return "".join(lines)



# references
# really we should just use cached_property but not supported by Python 3.7
# https://stackoverflow.com/questions/33672412/python-functools-lru-cache-with-instance-methods-release-object/33672499#33672499

def lru_cache(*lru_args, **lru_kwargs):
def decorator(func):
@functools.wraps(func)
def wrapped_func(self, *args, **kwargs):
# We're storing the wrapped method inside the instance. If we had
# a strong reference to self the instance would never die.
self_weak = weakref.ref(self)
@functools.wraps(func)
@functools.lru_cache(*lru_args, **lru_kwargs)
def cached_method(*args, **kwargs):
return func(self_weak(), *args, **kwargs)
setattr(self, func.__name__, cached_method)
return cached_method(*args, **kwargs)
return wrapped_func
return decorator

0 comments on commit 532539d

Please sign in to comment.