Skip to content

Commit

Permalink
Release 0.0.71 (#78)
Browse files Browse the repository at this point in the history
* Bump version and add mxnet to changelog

* Fix variable in test

* Try 0.0.7

* 0.0.7 already exists, boo

* Bump makefile to 0.0.71

* Skip downloads for hosted storage

* Fix bug in pickle version

* Add missing data load

* Fix pytorch example

* Fix pytorch-lightning example

* Capture model type for pytorch lightning models

* Fix error message

* Update pytorch-lightning example with right kwargs

* Freeze libomp version, version 12 breaks xgboost
  • Loading branch information
nlathia authored Sep 25, 2021
1 parent 5d78c52 commit 844f81b
Show file tree
Hide file tree
Showing 14 changed files with 55 additions and 27 deletions.
4 changes: 2 additions & 2 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
# Change log

## Unreleased
## modelstore 0.0.71

🆕 Load models straight into memory! Model Store previously had `modelstore.download()` to download an artifact archive to a local path, it now also has `modelstore.load()` to load a model straight into memory. Currently works with `scikit-learn` models only.

🆕 Upload models from frameworks that are not (yet) supported by modelstore! The `modelstore.upload()` function now works if you give it a `model=` kwarg that is a path to a file.

🆕 Read a specific model's metadata with `modelstore.get_model_info()`

🆕 Added [Annoy](https://github.com/spotify/annoy) and [ONNX](https://github.com/onnx/onnx) support.
🆕 Added [Annoy](https://github.com/spotify/annoy), [ONNX](https://github.com/onnx/onnx), and [MXNet](https://mxnet.apache.org) (hybrid models) support.

## modelstore 0.0.65

Expand Down
6 changes: 5 additions & 1 deletion bin/_setup_brew
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,10 @@ brew update

# To use xgboost
# https://xgboost.readthedocs.io/en/latest/build.html#building-on-osx
brew install libomp

# Note: it looks like there's trouble with libomp 12
# https://github.com/dmlc/xgboost/issues/7039

brew install libomp@11.1.0

echo "\n ✅ Done."
2 changes: 1 addition & 1 deletion examples/Makefile-example
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ pyenv-local: pyenv-uninstall pyenv

pyenv-test: pyenv-uninstall pyenv
pip uninstall -y modelstore
pip install -i https://test.pypi.org/simple/ modelstore==0.0.7
pip install -i https://test.pypi.org/simple/ modelstore==0.0.71

pyenv-prod: pyenv-uninstall pyenv
pip uninstall -y modelstore
Expand Down
1 change: 1 addition & 0 deletions examples/examples-by-ml-library/libraries/keras_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,5 +39,6 @@ def load_and_test(modelstore: ModelStore, model_id: str):
model = modelstore.load(DIABETES_DOMAIN, model_id)

# Run some test predictions
_, X_test, _, y_test = load_diabetes_dataset()
results = mean_squared_error(y_test, model.predict(X_test))
print(f"🔍 Loaded model MSE={results}.")
8 changes: 5 additions & 3 deletions examples/examples-by-ml-library/libraries/pytorch_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,16 +36,18 @@ def _train_example_model() -> ExampleNet:

results = mean_squared_error(y_test, model(X_test).detach().numpy())
print(f"🔍 Fit model MSE={results}.")
return model
return model, optimizer


def train_and_upload(modelstore: ModelStore) -> dict:
# Train a PyTorch model
model = _train_example_model()
model, optimizer = _train_example_model()

# Upload the model to the model store
print(f'⤴️ Uploading the pytorch model to the "{DIABETES_DOMAIN}" domain.')
meta_data = modelstore.upload(DIABETES_DOMAIN, model=model)
meta_data = modelstore.upload(
DIABETES_DOMAIN, model=model, optimizer=optimizer
)
return meta_data


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,18 +59,18 @@ def _train_example_model() -> ExampleLightningNet:

results = mean_squared_error(y_test, model(X_test).detach().numpy())
print(f"🔍 Fit model MSE={results}.")
return model
return model, trainer


def train_and_upload(modelstore: ModelStore) -> dict:
# Train a PyTorch model
model = _train_example_model()
model, trainer = _train_example_model()

# Upload the model to the model store
print(
f'⤴️ Uploading the pytorch lightning model to the "{DIABETES_DOMAIN}" domain.'
)
meta_data = modelstore.upload(DIABETES_DOMAIN, model=model)
meta_data = modelstore.upload(DIABETES_DOMAIN, model=model, trainer=trainer)
return meta_data


Expand Down
8 changes: 5 additions & 3 deletions examples/examples-by-ml-library/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,9 +66,11 @@ def main(modelstore_in, ml_framework):
example = EXAMPLES[ml_framework]
meta_data = example.train_and_upload(modelstore)

# Run the example: download and load a model
model_id = meta_data["model"]["model_id"]
example.load_and_test(modelstore, model_id)
if modelstore_in != "hosted":
# Run the example: download and load a model
# Currently unimplemented in the hosted storage
model_id = meta_data["model"]["model_id"]
example.load_and_test(modelstore, model_id)

# The upload returns meta-data about the model that was uploaded
# In this example, we just print it out to the terminal
Expand Down
13 changes: 11 additions & 2 deletions modelstore/meta/dependencies.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,14 +12,23 @@

def _get_version(modname: str) -> str:
try:
if modname == "pickle":
# pylint: disable=import-outside-toplevel
import pickle

return pickle.format_version
if modname in sys.modules:
mod = sys.modules[modname]
else:
mod = importlib.import_module(modname)
return mod.__version__
except AttributeError:
#  Annoy does not have a __version__
return pkg_resources.get_distribution(modname).version
try:
#  Annoy does not have a __version__
return pkg_resources.get_distribution(modname).version
except:
logger.debug("Unable to get %s's version", modname)
return None
except ImportError:
logger.debug("%s is not installed.", modname)
return None
Expand Down
4 changes: 3 additions & 1 deletion modelstore/model_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,9 @@ def upload(self, domain: str, **kwargs) -> dict:
for manager in self._managers:
if manager.matches_with(**kwargs):
return manager.upload(domain, **kwargs)
raise ValueError("unable to upload: could not find matching manager")
raise ValueError(
"unable to upload: could not find matching manager (did you add all of the required kwargs?)"
)

def load(self, domain: str, model_id: str):
meta_data = self.get_model_info(domain, model_id)
Expand Down
14 changes: 9 additions & 5 deletions modelstore/models/pytorch_lightning.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ class PyTorchLightningManager(ModelManager):

"""
Model persistence for PyTorch Lightning models:
https://pytorch-lightning.readthedocs.io/en/stable/weights_loading.html#checkpoint-saving
https://pytorch-lightning.readthedocs.io/en/latest/common/weights_loading.html#manual-saving
// @TODO: export as for onnx & torchscript
https://pytorch-lightning.readthedocs.io/en/latest/new-project.html#predict-or-deploy
Expand All @@ -49,13 +49,15 @@ def optional_dependencies(cls) -> list:
return deps + ["torch", "torchvision"]

def _required_kwargs(self):
return ["trainer"]
return ["trainer", "model"]

def matches_with(self, **kwargs) -> bool:
# pylint: disable=import-outside-toplevel
from pytorch_lightning import Trainer
from pytorch_lightning import LightningModule, Trainer

return isinstance(kwargs.get("trainer"), Trainer)
return isinstance(kwargs.get("trainer"), Trainer) and isinstance(
kwargs.get("model"), LightningModule
)

def _get_functions(self, **kwargs) -> list:
if not self.matches_with(**kwargs):
Expand Down Expand Up @@ -105,7 +107,9 @@ def _model_file_path(parent_dir: str) -> str:
return os.path.join(parent_dir, MODEL_CHECKPOINT)


def _save_lightning_model(tmp_dir: str, trainer: "Trainer") -> str:
def _save_lightning_model(
tmp_dir: str, trainer: "pytorch_lightning.Trainer"
) -> str:
file_path = _model_file_path(tmp_dir)
trainer.save_checkpoint(file_path)
return file_path
4 changes: 2 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,10 @@

setup(
name="modelstore",
version="0.0.7",
version="0.0.71",
packages=find_packages(exclude=["tests", "examples", "docs"]),
include_package_data=True,
description="modelstore is a library for versioning, exporting, and storing machine learning models",
description="modelstore is a library for versioning, exporting, storing, and loading machine learning models",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/operatorai/modelstore",
Expand Down
2 changes: 2 additions & 0 deletions tests/meta/test_dependencies.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ def test_get_dependency_versions():
"flake8",
"isort",
"a-missing-dependency",
"pickle",
]
expected = {
"annoy": "1.17.0",
Expand All @@ -48,6 +49,7 @@ def test_get_dependency_versions():
"flake8": "3.8.4",
"isort": "5.6.4",
"a-missing-dependency": None,
"pickle": "4.0",
}
result = dependencies.get_dependency_versions(test_deps)
assert result == expected
Expand Down
2 changes: 1 addition & 1 deletion tests/models/test_mxnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ def test_load_model(tmp_path, mxnet_manager, mxnet_model):
}
},
)
y_loaded_pred = loaded_model(y).asnumpy()
y_loaded_pred = loaded_model(x).asnumpy()

# Expect the two to be the same
assert isinstance(loaded_model, nn.SymbolBlock)
Expand Down
8 changes: 5 additions & 3 deletions tests/models/test_pytorch_lightning.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,11 +122,13 @@ def test_model_data(lightning_manager, lightning_model):


def test_required_kwargs(lightning_manager):
assert lightning_manager._required_kwargs() == ["trainer"]
assert lightning_manager._required_kwargs() == ["trainer", "model"]


def test_matches_with(lightning_manager, lightning_trainer):
assert lightning_manager.matches_with(trainer=lightning_trainer)
def test_matches_with(lightning_manager, lightning_trainer, lightning_model):
assert lightning_manager.matches_with(
trainer=lightning_trainer, model=lightning_model
)
assert not lightning_manager.matches_with(model="a-string-value")
assert not lightning_manager.matches_with(classifier=lightning_trainer)

Expand Down

0 comments on commit 844f81b

Please sign in to comment.