-
Notifications
You must be signed in to change notification settings - Fork 10
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Cedric Kulbach
committed
Feb 14, 2023
1 parent
8f44b6d
commit 593e06b
Showing
4 changed files
with
145 additions
and
3 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,7 +1,13 @@ | ||
from deep_river.classification.classifier import Classifier | ||
from deep_river.classification.rolling_classifier import RollingClassifier | ||
|
||
from deep_river.classification.zoo import LogisticRegression, \ | ||
MultiLayerPerceptron | ||
""" | ||
This module contains the classifiers for the deep_river package. | ||
""" | ||
__all__ = [ | ||
"Classifier", | ||
"RollingClassifier", | ||
"MultiLayerPerceptron", | ||
"LogisticRegression" | ||
] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,130 @@ | ||
from typing import Type, Union, Callable | ||
|
||
from torch import nn | ||
|
||
from deep_river.classification import Classifier | ||
|
||
|
||
|
||
class LogisticRegression(Classifier): | ||
""" | ||
""" | ||
|
||
class LRModule(nn.Module): | ||
def __init__(self, n_features): | ||
super(LogisticRegression.LRModule, self).__init__() | ||
self.dense0 = nn.Linear(n_features, 1) | ||
self.softmax = nn.Softmax(dim=-1) | ||
|
||
def forward(self, X, **kwargs): | ||
X = self.dense0(X) | ||
return self.softmax(X) | ||
def __init__( | ||
self, | ||
loss_fn: Union[str, Callable] = "binary_cross_entropy_with_logits", | ||
optimizer_fn: Union[str, Callable] = "sgd", | ||
lr: float = 1e-3, | ||
output_is_logit: bool = True, | ||
is_class_incremental: bool = False, | ||
device: str = "cpu", | ||
seed: int = 42, | ||
**kwargs, | ||
): | ||
print(kwargs) | ||
super().__init__( | ||
module=LogisticRegression.LRModule, | ||
loss_fn=loss_fn, | ||
output_is_logit=output_is_logit, | ||
is_class_incremental=is_class_incremental, | ||
optimizer_fn=optimizer_fn, | ||
device=device, | ||
lr=lr, | ||
seed=seed, | ||
**kwargs, | ||
) | ||
|
||
@classmethod | ||
def _unit_test_params(cls): | ||
""" | ||
Returns a dictionary of parameters to be used for unit testing the | ||
respective class. | ||
Yields | ||
------- | ||
dict | ||
Dictionary of parameters to be used for unit testing the | ||
respective class. | ||
""" | ||
|
||
yield { | ||
"loss_fn": "binary_cross_entropy_with_logits", | ||
"optimizer_fn": "sgd", | ||
} | ||
|
||
class MultiLayerPerceptron(Classifier): | ||
""" | ||
""" | ||
|
||
class MLPModule(nn.Module): | ||
def __init__(self, n_width, n_depth, n_features): | ||
super(MultiLayerPerceptron.MLPModule, self).__init__() | ||
self.dense0 = nn.Linear(n_features, n_width) | ||
self.block = [nn.Linear(n_width, n_width) for _ in range(n_depth)] | ||
self.denselast = nn.Linear(n_width, 1) | ||
self.softmax = nn.Softmax(dim=-1) | ||
|
||
def forward(self, X, **kwargs): | ||
X = self.dense0(X) | ||
for layer in self.block: | ||
X = layer(X) | ||
X = self.denselast(X) | ||
return self.softmax(X) | ||
|
||
def __init__( | ||
self, | ||
n_width: int = 5, | ||
n_depth: int = 5, | ||
loss_fn: Union[str, Callable] = "binary_cross_entropy_with_logits", | ||
optimizer_fn: Union[str, Callable] = "sgd", | ||
lr: float = 1e-3, | ||
output_is_logit: bool = True, | ||
is_class_incremental: bool = False, | ||
device: str = "cpu", | ||
seed: int = 42, | ||
**kwargs, | ||
): | ||
self.n_width = n_width | ||
self.n_depth = n_depth | ||
kwargs['n_width'] = n_width | ||
kwargs['n_depth'] = n_depth | ||
super().__init__( | ||
module=MultiLayerPerceptron.MLPModule, | ||
loss_fn=loss_fn, | ||
output_is_logit=output_is_logit, | ||
is_class_incremental=is_class_incremental, | ||
optimizer_fn=optimizer_fn, | ||
device=device, | ||
lr=lr, | ||
seed=seed, | ||
**kwargs, | ||
) | ||
|
||
@classmethod | ||
def _unit_test_params(cls): | ||
""" | ||
Returns a dictionary of parameters to be used for unit testing the | ||
respective class. | ||
Yields | ||
------- | ||
dict | ||
Dictionary of parameters to be used for unit testing the | ||
respective class. | ||
""" | ||
|
||
yield { | ||
"loss_fn": "binary_cross_entropy_with_logits", | ||
"optimizer_fn": "sgd", | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,7 +1,11 @@ | ||
from deep_river.regression.regressor import Regressor | ||
from deep_river.regression.rolling_regressor import RollingRegressor | ||
|
||
from deep_river.regression.multioutput import MultiTargetRegressor | ||
""" | ||
This module contains the regressors for the deep_river package. | ||
""" | ||
__all__ = [ | ||
"Regressor", | ||
"RollingRegressor", | ||
"MultiTargetRegressor" | ||
] |