Skip to content

Commit

Permalink
refactor eigen decomposition
Browse files Browse the repository at this point in the history
  • Loading branch information
haifengl committed Jul 19, 2017
1 parent 33a3d66 commit 0809c17
Show file tree
Hide file tree
Showing 31 changed files with 2,187 additions and 1,673 deletions.
8 changes: 5 additions & 3 deletions core/src/main/java/smile/classification/FLD.java
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import smile.math.Math;
import smile.math.matrix.Matrix;
import smile.math.matrix.DenseMatrix;
import smile.math.matrix.EigenValueDecomposition;
import smile.math.matrix.EVD;
import smile.projection.Projection;

/**
Expand Down Expand Up @@ -272,7 +272,8 @@ public FLD(double[][] x, int[] y, int L, double tol) {
}
}

EigenValueDecomposition eigen = new EigenValueDecomposition(T, true);
T.setSymmetric(true);
EVD eigen = T.eigen();

tol = tol * tol;
double[] s = eigen.getEigenValues();
Expand All @@ -294,7 +295,8 @@ public FLD(double[][] x, int[] y, int L, double tol) {
}

B = U.abmm(UB);
eigen = new EigenValueDecomposition(B, true);
B.setSymmetric(true);
eigen = B.eigen();

U = eigen.getEigenVectors();
scaling = Matrix.zeros(p, L);
Expand Down
5 changes: 3 additions & 2 deletions core/src/main/java/smile/classification/LDA.java
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import smile.math.Math;
import smile.math.matrix.Matrix;
import smile.math.matrix.DenseMatrix;
import smile.math.matrix.EigenValueDecomposition;
import smile.math.matrix.EVD;

/**
* Linear discriminant analysis. LDA is based on the Bayes decision theory
Expand Down Expand Up @@ -293,7 +293,8 @@ public LDA(double[][] x, int[] y, double[] priori, double tol) {
}
}

EigenValueDecomposition evd = new EigenValueDecomposition(C, true);
C.setSymmetric(true);
EVD evd = C.eigen();

for (double s : evd.getEigenValues()) {
if (s < tol) {
Expand Down
5 changes: 3 additions & 2 deletions core/src/main/java/smile/classification/QDA.java
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import smile.math.Math;
import smile.math.matrix.Matrix;
import smile.math.matrix.DenseMatrix;
import smile.math.matrix.EigenValueDecomposition;
import smile.math.matrix.EVD;

/**
* Quadratic discriminant analysis. QDA is closely related to linear discriminant
Expand Down Expand Up @@ -285,7 +285,8 @@ public QDA(double[][] x, int[] y, double[] priori, double tol) {
}
}

EigenValueDecomposition eigen = new EigenValueDecomposition(cov[i], true);
cov[i].setSymmetric(true);
EVD eigen = cov[i].eigen();

for (double s : eigen.getEigenValues()) {
if (s < tol) {
Expand Down
5 changes: 3 additions & 2 deletions core/src/main/java/smile/classification/RDA.java
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import smile.math.Math;
import smile.math.matrix.Matrix;
import smile.math.matrix.DenseMatrix;
import smile.math.matrix.EigenValueDecomposition;
import smile.math.matrix.EVD;

/**
* Regularized discriminant analysis. RDA is a compromise between LDA and QDA,
Expand Down Expand Up @@ -308,7 +308,8 @@ public RDA(double[][] x, int[] y, double[] priori, double alpha, double tol) {
}
}

EigenValueDecomposition eigen = new EigenValueDecomposition(cov[i], true);
cov[i].setSymmetric(true);
EVD eigen = cov[i].eigen();

for (double s : eigen.getEigenValues()) {
if (s < tol) {
Expand Down
8 changes: 4 additions & 4 deletions core/src/main/java/smile/clustering/SpectralClustering.java
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import smile.math.Math;
import smile.math.matrix.Matrix;
import smile.math.matrix.DenseMatrix;
import smile.math.matrix.EigenValueDecomposition;
import smile.math.matrix.EVD;
import smile.math.matrix.Lanczos;

/**
Expand Down Expand Up @@ -124,7 +124,7 @@ public SpectralClustering(double[][] W, int k) {
}
}

EigenValueDecomposition eigen = Lanczos.eigen(L, k);
EVD eigen = Lanczos.eigen(L, k);
double[][] Y = eigen.getEigenVectors().array();
for (int i = 0; i < n; i++) {
Math.unitize2(Y[i]);
Expand Down Expand Up @@ -191,7 +191,7 @@ public SpectralClustering(double[][] data, int k, double sigma) {
}
}

EigenValueDecomposition eigen = Lanczos.eigen(L, k);
EVD eigen = Lanczos.eigen(L, k);
double[][] Y = eigen.getEigenVectors().array();
for (int i = 0; i < n; i++) {
Math.unitize2(Y[i]);
Expand Down Expand Up @@ -273,7 +273,7 @@ public SpectralClustering(double[][] data, int k, int l, double sigma) {
}
}

EigenValueDecomposition eigen = Lanczos.eigen(W, k);
EVD eigen = Lanczos.eigen(W, k);
double[] e = eigen.getEigenValues();
double scale = Math.sqrt((double)l / n);
for (int i = 0; i < k; i++) {
Expand Down
4 changes: 2 additions & 2 deletions core/src/main/java/smile/manifold/IsoMap.java
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
import smile.math.Math;
import smile.math.matrix.Matrix;
import smile.math.matrix.DenseMatrix;
import smile.math.matrix.EigenValueDecomposition;
import smile.math.matrix.EVD;
import smile.math.matrix.Lanczos;
import smile.neighbor.CoverTree;
import smile.neighbor.KDTree;
Expand Down Expand Up @@ -180,7 +180,7 @@ public IsoMap(double[][] data, int d, int k, boolean CIsomap) {
}
}

EigenValueDecomposition eigen = Lanczos.eigen(B, d);
EVD eigen = Lanczos.eigen(B, d);
coordinates = new double[n][d];
for (int j = 0; j < d; j++) {
if (eigen.getEigenValues()[j] < 0) {
Expand Down
4 changes: 2 additions & 2 deletions core/src/main/java/smile/manifold/LLE.java
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
import smile.math.matrix.DenseMatrix;
import smile.math.matrix.SparseMatrix;
import smile.math.matrix.LU;
import smile.math.matrix.EigenValueDecomposition;
import smile.math.matrix.EVD;
import smile.math.matrix.Lanczos;
import smile.neighbor.CoverTree;
import smile.neighbor.KDTree;
Expand Down Expand Up @@ -205,7 +205,7 @@ public int compare(Neighbor<double[], double[]> o1, Neighbor<double[], double[]>
SparseMatrix W = new SparseMatrix(n, n, w, rowIndex, colIndex);
SparseMatrix M = W.aat();

EigenValueDecomposition eigen = Lanczos.eigen(M, n);
EVD eigen = Lanczos.eigen(M, n);

coordinates = new double[n][d];
for (int j = 0; j < d; j++) {
Expand Down
4 changes: 2 additions & 2 deletions core/src/main/java/smile/manifold/LaplacianEigenmap.java
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
import smile.math.Math;
import smile.math.SparseArray;
import smile.math.distance.EuclideanDistance;
import smile.math.matrix.EigenValueDecomposition;
import smile.math.matrix.EVD;
import smile.math.matrix.Lanczos;
import smile.math.matrix.PowerIteration;
import smile.math.matrix.SparseMatrix;
Expand Down Expand Up @@ -199,7 +199,7 @@ public LaplacianEigenmap(double[][] data, int d, int k, double t) {
}

L = W.toSparseMatrix();
EigenValueDecomposition eigen = Lanczos.eigen(L, d + 1);
EVD eigen = Lanczos.eigen(L, d + 1);

coordinates = new double[n][d];
for (int j = 0; j < d; j++) {
Expand Down
6 changes: 3 additions & 3 deletions core/src/main/java/smile/mds/MDS.java
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
import smile.math.matrix.Lanczos;
import smile.math.matrix.Matrix;
import smile.math.matrix.DenseMatrix;
import smile.math.matrix.EigenValueDecomposition;
import smile.math.matrix.EVD;

/**
* Classical multidimensional scaling, also known as principal coordinates
Expand Down Expand Up @@ -167,7 +167,7 @@ public MDS(double[][] proximity, int k, boolean add) {
}
}

EigenValueDecomposition eigen = new EigenValueDecomposition(Z, false, true);
EVD eigen = Lanczos.eigen(Z, 1);
double c = Math.max(eigen.getEigenValues());

for (int i = 0; i < n; i++) {
Expand All @@ -180,7 +180,7 @@ public MDS(double[][] proximity, int k, boolean add) {
}
}

EigenValueDecomposition eigen = Lanczos.eigen(B, k);
EVD eigen = Lanczos.eigen(B, k);

coordinates = new double[n][k];
for (int j = 0; j < k; j++) {
Expand Down
4 changes: 2 additions & 2 deletions core/src/main/java/smile/projection/KPCA.java
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import smile.math.kernel.MercerKernel;
import smile.math.matrix.Matrix;
import smile.math.matrix.DenseMatrix;
import smile.math.matrix.EigenValueDecomposition;
import smile.math.matrix.EVD;
import smile.math.matrix.Lanczos;

/**
Expand Down Expand Up @@ -154,7 +154,7 @@ public KPCA(T[] data, MercerKernel<T> kernel, int k, double threshold) {
}
}

EigenValueDecomposition eigen = Lanczos.eigen(K, k);
EVD eigen = Lanczos.eigen(K, k);

p = 0;
for (int i = 0; i < k; i++) {
Expand Down
5 changes: 3 additions & 2 deletions core/src/main/java/smile/projection/PCA.java
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
import smile.math.Math;
import smile.math.matrix.Matrix;
import smile.math.matrix.DenseMatrix;
import smile.math.matrix.EigenValueDecomposition;
import smile.math.matrix.EVD;
import smile.math.matrix.SVD;

/**
Expand Down Expand Up @@ -169,7 +169,8 @@ public PCA(double[][] data, boolean cor) {
}
}

EigenValueDecomposition eigen = new EigenValueDecomposition(cov, true);
cov.setSymmetric(true);
EVD eigen = cov.eigen();

DenseMatrix loadings = eigen.getEigenVectors();
if (cor) {
Expand Down
6 changes: 3 additions & 3 deletions core/src/main/java/smile/projection/PPCA.java
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import smile.math.matrix.Matrix;
import smile.math.matrix.DenseMatrix;
import smile.math.matrix.Cholesky;
import smile.math.matrix.EigenValueDecomposition;
import smile.math.matrix.EVD;

/**
* Probabilistic principal component analysis. PPCA is a simplified factor analysis
Expand Down Expand Up @@ -149,8 +149,8 @@ public PPCA(double[][] data, int k) {
}
}


EigenValueDecomposition eigen = new EigenValueDecomposition(cov);
cov.setSymmetric(true);
EVD eigen = cov.eigen();
double[] evalues = eigen.getEigenValues();
DenseMatrix evectors = eigen.getEigenVectors();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import smile.math.matrix.DenseMatrix;
import smile.math.matrix.Cholesky;
import smile.math.matrix.LU;
import smile.math.matrix.EigenValueDecomposition;
import smile.math.matrix.EVD;

/**
* Gaussian Process for Regression. A Gaussian process is a stochastic process
Expand Down Expand Up @@ -264,7 +264,8 @@ public GaussianProcessRegression(T[] x, double[] y, T[] t, MercerKernel<T> kerne
}
}

EigenValueDecomposition eigen = new EigenValueDecomposition(W, true);
W.setSymmetric(true);
EVD eigen = W.eigen();
DenseMatrix U = eigen.getEigenVectors();
DenseMatrix D = eigen.getD();
for (int i = 0; i < m; i++) {
Expand Down
4 changes: 2 additions & 2 deletions core/src/main/java/smile/vq/SOM.java
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
import smile.math.matrix.Lanczos;
import smile.math.matrix.Matrix;
import smile.math.matrix.DenseMatrix;
import smile.math.matrix.EigenValueDecomposition;
import smile.math.matrix.EVD;

/**
* Self-Organizing Map. An SOM is a unsupervised learning method to produce
Expand Down Expand Up @@ -238,7 +238,7 @@ public SOM(double[][] data, int width, int height) {
}
}

EigenValueDecomposition eigen = Lanczos.eigen(V, 2);
EVD eigen = Lanczos.eigen(V, 2);
double[] v1 = new double[d];
double[] v2 = new double[d];
for (int i = 0; i < d; i++) {
Expand Down
10 changes: 0 additions & 10 deletions math/src/main/java/smile/math/matrix/BandMatrix.java
Original file line number Diff line number Diff line change
Expand Up @@ -175,16 +175,6 @@ public BandMatrix aat() {
throw new UnsupportedOperationException();
}

/**
* Returns the k largest eigen pairs. Only works for symmetric matrix.
*/
public EigenValueDecomposition eigen(int k) {
if (m1 != m2) {
throw new UnsupportedOperationException("The matrix is not square.");
}
return Lanczos.eigen(this, k);
}

/**
* Returns the matrix determinant.
*/
Expand Down
43 changes: 43 additions & 0 deletions math/src/main/java/smile/math/matrix/DenseMatrix.java
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,16 @@ public interface DenseMatrix extends Matrix, MatrixMultiplication<DenseMatrix, D
*/
public int ld();

/** Returns true if the matrix is symmetric. */
public boolean isSymmetric();

/**
* Sets if the matrix is symmetric. It is the caller's responability to
* make sure if the matrix symmetric. Also the matrix won't update this
* property if the matrix values are changed.
*/
public void setSymmetric(boolean symmetric);

/**
* Set the entry value at row i and column j.
*/
Expand Down Expand Up @@ -117,6 +127,39 @@ public default SVD svd(boolean inPlace) {
return a.svd();
}

/**
* Returns the eigen value decomposition. Note that the input matrix
* will be overwritten on output.
*/
public EVD eigen();

/**
* Returns the eigen value decomposition.
* @param inPlace if true, this matrix will be overwritten U on output.
*/
public default EVD eigen(boolean inPlace) {
DenseMatrix a = inPlace ? this : copy();
return a.eigen();
}

/**
* Returns the eigen values in an array of size 2N. The first half and second half
* of returned array contain the real and imaginary parts, respectively, of the
* computed eigenvalues.
*/
public double[] eig();

/**
* Returns the eigen values in an array of size 2N. The first half and second half
* of returned array contain the real and imaginary parts, respectively, of the
* computed eigenvalues.
* @param inPlace if true, this matrix will be overwritten U on output.
*/
public default double[] eig(boolean inPlace) {
DenseMatrix a = inPlace ? this : copy();
return a.eig();
}

/**
* Returns the matrix transpose.
*/
Expand Down
Loading

0 comments on commit 0809c17

Please sign in to comment.