Skip to content

Commit

Permalink
format
Browse files Browse the repository at this point in the history
  • Loading branch information
Cedric Kulbach committed Feb 14, 2023
1 parent f66b2e4 commit 2d0737c
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 15 deletions.
1 change: 1 addition & 0 deletions deep_river/anomaly/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
AnomalyMinMaxScaler,
AnomalyStandardScaler,
)

"""
This module contains the anomaly detection algorithms for the deep_river package.
"""
Expand Down
7 changes: 5 additions & 2 deletions deep_river/classification/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
from deep_river.classification.classifier import Classifier
from deep_river.classification.rolling_classifier import RollingClassifier
from deep_river.classification.zoo import LogisticRegression, \
MultiLayerPerceptron
from deep_river.classification.zoo import (
LogisticRegression,
MultiLayerPerceptron,
)

"""
This module contains the classifiers for the deep_river package.
"""
Expand Down
23 changes: 10 additions & 13 deletions deep_river/classification/zoo.py
Original file line number Diff line number Diff line change
@@ -1,25 +1,23 @@
from typing import Type, Union, Callable
from typing import Callable, Type, Union

from torch import nn

from deep_river.classification import Classifier



class LogisticRegression(Classifier):
"""
"""
""" """

class LRModule(nn.Module):
def __init__(self, n_features):
super(LogisticRegression.LRModule, self).__init__()
super().__init__()
self.dense0 = nn.Linear(n_features, 1)
self.softmax = nn.Softmax(dim=-1)

def forward(self, X, **kwargs):
X = self.dense0(X)
return self.softmax(X)

def __init__(
self,
loss_fn: Union[str, Callable] = "binary_cross_entropy_with_logits",
Expand Down Expand Up @@ -61,14 +59,13 @@ def _unit_test_params(cls):
"optimizer_fn": "sgd",
}

class MultiLayerPerceptron(Classifier):
"""

"""
class MultiLayerPerceptron(Classifier):
""" """

class MLPModule(nn.Module):
def __init__(self, n_width, n_depth, n_features):
super(MultiLayerPerceptron.MLPModule, self).__init__()
super().__init__()
self.dense0 = nn.Linear(n_features, n_width)
self.block = [nn.Linear(n_width, n_width) for _ in range(n_depth)]
self.denselast = nn.Linear(n_width, 1)
Expand Down Expand Up @@ -96,8 +93,8 @@ def __init__(
):
self.n_width = n_width
self.n_depth = n_depth
kwargs['n_width'] = n_width
kwargs['n_depth'] = n_depth
kwargs["n_width"] = n_width
kwargs["n_depth"] = n_depth
super().__init__(
module=MultiLayerPerceptron.MLPModule,
loss_fn=loss_fn,
Expand Down Expand Up @@ -126,4 +123,4 @@ def _unit_test_params(cls):
yield {
"loss_fn": "binary_cross_entropy_with_logits",
"optimizer_fn": "sgd",
}
}

0 comments on commit 2d0737c

Please sign in to comment.