Created
April 21, 2011 12:16
-
-
Save fabianp/934363 to your computer and use it in GitHub Desktop.
locally linear embedding - swiss roll example
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# -*- coding: utf-8 -*- | |
""" | |
=================================== | |
Swiss Roll reduction with LLE | |
=================================== | |
An illustration of Swiss Roll reduction | |
with locally linear embedding | |
""" | |
################################################################################ | |
# locally linear embedding function | |
from scipy.sparse import linalg, eye | |
from pyamg import smoothed_aggregation_solver | |
from scikits.learn import neighbors | |
def locally_linear_embedding(X, n_neighbors, out_dim, tol=1e-6, max_iter=200): | |
W = neighbors.kneighbors_graph( | |
X, n_neighbors=n_neighbors, mode='barycenter') | |
# M = (I-W)' (I-W) | |
A = eye(*W.shape, format=W.format) - W | |
A = (A.T).dot(A).tocsr() | |
# initial approximation to the eigenvectors | |
X = np.random.rand(W.shape[0], out_dim) | |
ml = smoothed_aggregation_solver(A, symmetry='symmetric') | |
prec = ml.aspreconditioner() | |
# compute eigenvalues and eigenvectors with LOBPCG | |
eigen_values, eigen_vectors = linalg.lobpcg( | |
A, X, M=prec, largest=False, tol=tol, maxiter=max_iter) | |
index = np.argsort(eigen_values) | |
return eigen_vectors[:, index], np.sum(eigen_values) | |
import numpy as np | |
import pylab as pl | |
################################################################################ | |
# generate the swiss roll | |
n_samples, n_features = 2000, 3 | |
n_turns, radius = 1.2, 1.0 | |
rng = np.random.RandomState(0) | |
t = rng.uniform(low=0, high=1, size=n_samples) | |
data = np.zeros((n_samples, n_features)) | |
# generate the 2D spiral data driven by a 1d parameter t | |
max_rot = n_turns * 2 * np.pi | |
data[:, 0] = radius = t * np.cos(t * max_rot) | |
data[:, 1] = radius = t * np.sin(t * max_rot) | |
data[:, 2] = rng.uniform(-1, 1.0, n_samples) | |
manifold = np.vstack((t * 2 - 1, data[:, 2])).T.copy() | |
colors = manifold[:, 0] | |
# rotate and plot original data | |
sp = pl.subplot(211) | |
U = np.dot(data, [[-.79, -.59, -.13], | |
[ .29, -.57, .75], | |
[-.53, .56, .63]]) | |
sp.scatter(U[:, 1], U[:, 2], c=colors) | |
sp.set_title("Original data") | |
print "Computing LLE embedding" | |
n_neighbors, out_dim = 12, 2 | |
X_r, cost = locally_linear_embedding(data, n_neighbors, out_dim) | |
sp = pl.subplot(212) | |
sp.scatter(X_r[:,0], X_r[:,1], c=colors) | |
sp.set_title("LLE embedding") | |
pl.show() |
I should have credited you for the swiss roll code :-)
BTW, the code is referenced from here: http://fseoane.net/blog/2011/locally-linear-embedding-and-sparse-eigensolvers/
Updates
"from scikits.learn import neighbors"
To
"from sklearn.neighbors import kneighbors_graph"
for scikit-learn v0.22 The code in the example is based on v0.8
hello, im trying to implement your code with python 3.6 but I have problem with line 21 that asked me to change from 'barycenter' to 'connectivity' and I got the weird result. could I please know your suggestion? I'm to new for a code
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Nice! Not as good as the Hessian LLE from mdp but still a good start. Also dig a hole in your swissroll to make the topology more interesting :)