Skip to content

Commit

Permalink
add constrained SVD model
Browse files Browse the repository at this point in the history
  • Loading branch information
williamgilpin committed Nov 28, 2020
1 parent d78924e commit 80c349e
Show file tree
Hide file tree
Showing 9 changed files with 65 additions and 24 deletions.
Binary file added datasets/accelerometer_subject3_2.csv.gz
Binary file not shown.
Binary file added datasets/dwelling_worm1_2.csv.gz
Binary file not shown.
Binary file added datasets/dwelling_worm2_2.csv.gz
Binary file not shown.
Binary file added datasets/roaming_worm2_2.csv.gz
Binary file not shown.
21 changes: 5 additions & 16 deletions demos.ipynb

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion exploratory.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -455,7 +455,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.9"
"version": "3.8.5"
}
},
"nbformat": 4,
Expand Down
14 changes: 14 additions & 0 deletions fnn/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -510,6 +510,20 @@ def transform(self, X, y=None):
# **kwargs
# )

class ETDConstrainedEmbedding(NeuralNetworkEmbedding):
def __init__(
self,
*args,
**kwargs
):
super().__init__(*args, **kwargs)
kwargs.pop("time_window")
self.model = ETDConstrained(
self.n_latent,
self.time_window,
**kwargs
)

class MLPEmbedding(NeuralNetworkEmbedding):
def __init__(
self,
Expand Down
45 changes: 44 additions & 1 deletion fnn/networks.py
Original file line number Diff line number Diff line change
Expand Up @@ -547,4 +547,47 @@ def __init__(
def call(self, inputs, training=False):
outputs = self.decoder(self.encoder(inputs))
return outputs



class ETDConstrained(tf.keras.Model):
"""
A batchwise fully-connected eigen-time-delay embedding (linear autoencoder)
"""
def __init__(
self,
n_latent,
time_window,
n_features=1,
latent_regularizer=None,
rnn_opts=dict(),
random_state=None,
**kwargs
):
super(ETDConstrained, self).__init__()
self.n_latent = n_latent
self.time_window = time_window
self.n_features = n_features

# Initialize state
tf.random.set_seed(random_state)

# Encoder
self.encoder = tf.keras.Sequential()
self.encoder.add(tf.keras.layers.InputLayer(input_shape=(time_window, n_features)))
self.encoder.add(tf.keras.layers.Flatten())
self.encoder.add(tf.keras.layers.Dense(n_latent, activation=None, **rnn_opts))
self.encoder.add(
tf.keras.layers.Reshape(
(n_latent,),
activity_regularizer=latent_regularizer
)
)

self.decoder = tf.keras.Sequential()
self.decoder.add(tf.keras.layers.Flatten())
self.decoder.add(tf.keras.layers.Dense(time_window*n_features, activation=None, **rnn_opts))
self.decoder.add(tf.keras.layers.Reshape((time_window, n_features)))

def call(self, inputs, training=False):
outputs = self.decoder(self.encoder(inputs))
return outputs
7 changes: 1 addition & 6 deletions fnn/utils.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,7 @@
import numpy as np
import warnings

import scipy
from scipy.linalg import hankel, orthogonal_procrustes
from scipy.signal import periodogram, argrelextrema, savgol_filter
from scipy.spatial.distance import pdist, squareform, directed_hausdorff
from scipy.spatial import procrustes

from scipy.linalg import hankel
import matplotlib.pyplot as plt

###------------------------------------###
Expand Down

0 comments on commit 80c349e

Please sign in to comment.