Skip to content

Commit

Permalink
Use more concise names for the array and store conversion functions.
Browse files Browse the repository at this point in the history
  • Loading branch information
csadorf committed Apr 17, 2023
1 parent 55d9d65 commit 98c5ece
Show file tree
Hide file tree
Showing 3 changed files with 17 additions and 17 deletions.
6 changes: 3 additions & 3 deletions legate/raft/__init__.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
from .array_api import add, exp, fill, log, negative, subtract, sum_over_axis
from .core import array_to_store, convert, store_to_array
from .core import as_array, as_store, convert
from .multiarray import bincount, categorize, matmul, multiply

__all__ = [
"add",
"array_to_store",
"as_array",
"as_store",
"bincount",
"categorize",
"convert",
Expand All @@ -14,7 +15,6 @@
"matmul",
"multiply",
"negative",
"store_to_array",
"subtract",
"sum_over_axis",
]
4 changes: 2 additions & 2 deletions legate/raft/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def __array_interface__(self):
}


def array_to_store(array: np.ndarray) -> Store:
def as_store(array: np.ndarray) -> Store:
store = context.create_store(
pa.from_numpy_dtype(array.dtype),
shape=array.shape,
Expand All @@ -59,7 +59,7 @@ def array_to_store(array: np.ndarray) -> Store:
return store


def store_to_array(store: Store) -> np.ndarray:
def as_array(store: Store) -> np.ndarray:
if store.kind is Future:
dtype = store.get_dtype()
buf = store.storage.get_buffer(dtype.size)
Expand Down
24 changes: 12 additions & 12 deletions naive_bayes/multinomial.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,13 @@

from legate.raft import (
add,
array_to_store,
as_array,
as_store,
convert,
exp,
fill,
log,
matmul,
store_to_array,
subtract,
sum_over_axis,
)
Expand All @@ -34,12 +34,12 @@ def __init__(self, alpha: float = 1.0, fit_prior=True, class_prior=None):

def fit(self, X, y):
# Convert to a legate stores
X = array_to_store(X)
X = as_store(X)
_, n_features = X.shape
self.n_features_in_ = n_features

labelbin = LabelBinarizer()
Y = array_to_store(labelbin.fit_transform(y))
Y = as_store(labelbin.fit_transform(y))

self.classes_ = labelbin.classes_
assert Y.shape[1] != 1
Expand Down Expand Up @@ -69,12 +69,12 @@ def _update_feature_log_proba(self, alpha, dtype=pa.float64()):
@property
def feature_log_prob_(self):
if self._feature_log_prob_ is not None:
return store_to_array(self._feature_log_prob_)
return as_array(self._feature_log_prob_)

@property
def class_log_prior_(self):
if self._class_log_prior_ is not None:
return store_to_array(self._class_log_prior_)
return as_array(self._class_log_prior_)

def _update_class_log_prior(self, class_prior=None):
n_classes = len(self.classes_)
Expand Down Expand Up @@ -104,17 +104,17 @@ def _predict_log_proba(self, X):
return subtract(x1.transpose((1, 0)), x2).transpose((1, 0))

def predict_log_proba(self, X):
X = array_to_store(X)
X = as_store(X)
ret = self._predict_log_proba(X)
return store_to_array(ret)
return as_array(ret)

def predict_proba(self, X):
X = array_to_store(X)
X = as_store(X)
ret = exp(self._predict_log_proba(X))
return store_to_array(ret)
return as_array(ret)

def predict(self, X):
X = array_to_store(X)
jll = store_to_array(self._joint_log_likelihood(X))
X = as_store(X)
jll = as_array(self._joint_log_likelihood(X))
ret = self.classes_[np.argmax(jll, axis=1)]
return ret

0 comments on commit 98c5ece

Please sign in to comment.