from sklearn.feature_selection import VarianceThreshold
# 特征選擇 VarianceThreshold刪除低方差的特征(刪除差別不大的特征)
var = VarianceThreshold(threshold=1.0) # 將方差小于等于1.0的特征刪除。 默認(rèn)threshold=0.0
data = var.fit_transform([[0, 2, 0, 3], [0, 1, 4, 3], [0, 1, 1, 3]])
print(data)
'''
[[0]
[4]
[1]]
'''
def laplaEigen(dataMat,k,t):
m,n=shape(dataMat)
W=mat(zeros([m,m]))
D=mat(zeros([m,m]))
for i in range(m):
k_index=knn(dataMat[i,:],dataMat,k)
for j in range(k):
sqDiffVector = dataMat[i,:]-dataMat[k_index[j],:]
sqDiffVector=array(sqDiffVector)**2
sqDistances = sqDiffVector.sum()
W[i,k_index[j]]=math.exp(-sqDistances/t)
D[i,i]+=W[i,k_index[j]]
L=D-W
Dinv=np.linalg.inv(D)
X=np.dot(D.I,L)
lamda,f=np.linalg.eig(X)
return lamda,f
def knn(inX, dataSet, k):
dataSetSize = dataSet.shape[0]
diffMat = tile(inX, (dataSetSize,1)) - dataSet
sqDiffMat = array(diffMat)**2
sqDistances = sqDiffMat.sum(axis=1)
distances = sqDistances**0.5
sortedDistIndicies = distances.argsort()
return sortedDistIndicies[0:k]
dataMat, color = make_swiss_roll(n_samples=2000)
lamda,f=laplaEigen(dataMat,11,5.0)
fm,fn =shape(f)
print 'fm,fn:',fm,fn
lamdaIndicies = argsort(lamda)
first=0
second=0
print lamdaIndicies[0], lamdaIndicies[1]
for i in range(fm):
if lamda[lamdaIndicies[i]].real>1e-5:
print lamda[lamdaIndicies[i]]
first=lamdaIndicies[i]
second=lamdaIndicies[i+1]
break
print first, second
redEigVects = f[:,lamdaIndicies]
fig=plt.figure('origin')
ax1 = fig.add_subplot(111, projection='3d')
ax1.scatter(dataMat[:, 0], dataMat[:, 1], dataMat[:, 2], c=color,cmap=plt.cm.Spectral)
fig=plt.figure('lowdata')
ax2 = fig.add_subplot(111)
ax2.scatter(f[:,first], f[:,second], c=color, cmap=plt.cm.Spectral)
plt.show()
到此這篇關(guān)于Python特征降維知識點(diǎn)總結(jié)的文章就介紹到這了,更多相關(guān)Python特征降維如何理解內(nèi)容請搜索腳本之家以前的文章或繼續(xù)瀏覽下面的相關(guān)文章希望大家以后多多支持腳本之家!