Skip to content

Commit

Permalink
some changes
Browse files Browse the repository at this point in the history
  • Loading branch information
Cedric Kulbach committed Feb 14, 2023
1 parent 8f44b6d commit 593e06b
Show file tree
Hide file tree
Showing 4 changed files with 145 additions and 3 deletions.
4 changes: 3 additions & 1 deletion deep_river/anomaly/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,9 @@
AnomalyMinMaxScaler,
AnomalyStandardScaler,
)

"""
This module contains the anomaly detection algorithms for the deep_river package.
"""
__all__ = [
"Autoencoder",
"RollingAutoencoder",
Expand Down
8 changes: 7 additions & 1 deletion deep_river/classification/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,13 @@
from deep_river.classification.classifier import Classifier
from deep_river.classification.rolling_classifier import RollingClassifier

from deep_river.classification.zoo import LogisticRegression, \
MultiLayerPerceptron
"""
This module contains the classifiers for the deep_river package.
"""
__all__ = [
"Classifier",
"RollingClassifier",
"MultiLayerPerceptron",
"LogisticRegression"
]
130 changes: 130 additions & 0 deletions deep_river/classification/zoo.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
from typing import Type, Union, Callable

from torch import nn

from deep_river.classification import Classifier



class LogisticRegression(Classifier):
"""
"""

class LRModule(nn.Module):
def __init__(self, n_features):
super(LogisticRegression.LRModule, self).__init__()
self.dense0 = nn.Linear(n_features, 1)
self.softmax = nn.Softmax(dim=-1)

def forward(self, X, **kwargs):
X = self.dense0(X)
return self.softmax(X)
def __init__(
self,
loss_fn: Union[str, Callable] = "binary_cross_entropy_with_logits",
optimizer_fn: Union[str, Callable] = "sgd",
lr: float = 1e-3,
output_is_logit: bool = True,
is_class_incremental: bool = False,
device: str = "cpu",
seed: int = 42,
**kwargs,
):
print(kwargs)
super().__init__(
module=LogisticRegression.LRModule,
loss_fn=loss_fn,
output_is_logit=output_is_logit,
is_class_incremental=is_class_incremental,
optimizer_fn=optimizer_fn,
device=device,
lr=lr,
seed=seed,
**kwargs,
)

@classmethod
def _unit_test_params(cls):
"""
Returns a dictionary of parameters to be used for unit testing the
respective class.
Yields
-------
dict
Dictionary of parameters to be used for unit testing the
respective class.
"""

yield {
"loss_fn": "binary_cross_entropy_with_logits",
"optimizer_fn": "sgd",
}

class MultiLayerPerceptron(Classifier):
"""
"""

class MLPModule(nn.Module):
def __init__(self, n_width, n_depth, n_features):
super(MultiLayerPerceptron.MLPModule, self).__init__()
self.dense0 = nn.Linear(n_features, n_width)
self.block = [nn.Linear(n_width, n_width) for _ in range(n_depth)]
self.denselast = nn.Linear(n_width, 1)
self.softmax = nn.Softmax(dim=-1)

def forward(self, X, **kwargs):
X = self.dense0(X)
for layer in self.block:
X = layer(X)
X = self.denselast(X)
return self.softmax(X)

def __init__(
self,
n_width: int = 5,
n_depth: int = 5,
loss_fn: Union[str, Callable] = "binary_cross_entropy_with_logits",
optimizer_fn: Union[str, Callable] = "sgd",
lr: float = 1e-3,
output_is_logit: bool = True,
is_class_incremental: bool = False,
device: str = "cpu",
seed: int = 42,
**kwargs,
):
self.n_width = n_width
self.n_depth = n_depth
kwargs['n_width'] = n_width
kwargs['n_depth'] = n_depth
super().__init__(
module=MultiLayerPerceptron.MLPModule,
loss_fn=loss_fn,
output_is_logit=output_is_logit,
is_class_incremental=is_class_incremental,
optimizer_fn=optimizer_fn,
device=device,
lr=lr,
seed=seed,
**kwargs,
)

@classmethod
def _unit_test_params(cls):
"""
Returns a dictionary of parameters to be used for unit testing the
respective class.
Yields
-------
dict
Dictionary of parameters to be used for unit testing the
respective class.
"""

yield {
"loss_fn": "binary_cross_entropy_with_logits",
"optimizer_fn": "sgd",
}
6 changes: 5 additions & 1 deletion deep_river/regression/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,11 @@
from deep_river.regression.regressor import Regressor
from deep_river.regression.rolling_regressor import RollingRegressor

from deep_river.regression.multioutput import MultiTargetRegressor
"""
This module contains the regressors for the deep_river package.
"""
__all__ = [
"Regressor",
"RollingRegressor",
"MultiTargetRegressor"
]

0 comments on commit 593e06b

Please sign in to comment.