03 聚類算法 - K-means聚類
04 聚類算法 - 代碼案例一 - K-means聚類
05 聚類算法 - 二分K-Means邮利、K-Means++北戏、K-Means||、Canopy吮炕、Mini Batch K-Means算法
常規(guī)操作:
import time
import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl
from sklearn.cluster import MiniBatchKMeans, KMeans
from sklearn.metrics.pairwise import pairwise_distances_argmin
from sklearn.datasets.samples_generator import make_blobs
## 設(shè)置屬性防止中文亂碼
mpl.rcParams['font.sans-serif'] = [u'SimHei']
mpl.rcParams['axes.unicode_minus'] = False
一歧蒋、初始化三個(gè)中心
centers = [[1, 1], [-1, -1], [1, -1]]
clusters = len(centers) #聚類的數(shù)目為3
產(chǎn)生3000組二維的數(shù)據(jù),中心是意思三個(gè)中心點(diǎn)偿曙,標(biāo)準(zhǔn)差是0.7
X, Y = make_blobs(n_samples=3000, centers=centers, cluster_std=0.7, random_state=28)
二氮凝、構(gòu)建kmeans算法
k_means = KMeans(init='k-means++', n_clusters=clusters, random_state=28)
t0 = time.time() #當(dāng)前時(shí)間
k_means.fit(X) #訓(xùn)練模型
km_batch = time.time() - t0 #使用kmeans訓(xùn)練數(shù)據(jù)的消耗時(shí)間
print ("K-Means算法模型訓(xùn)練消耗時(shí)間:%.4fs" % km_batch)
K-Means算法模型訓(xùn)練消耗時(shí)間:0.1861s
三、構(gòu)建MiniBatchKMeans算法
batch_size = 100
mbk = MiniBatchKMeans(init='k-means++', n_clusters=clusters,
batch_size=batch_size, random_state=28)
t0 = time.time()
mbk.fit(X)
mbk_batch = time.time() - t0
print ("Mini Batch K-Means算法模型訓(xùn)練消耗時(shí)間:%.4fs" % mbk_batch)
Mini Batch K-Means算法模型訓(xùn)練消耗時(shí)間:0.1511s
四望忆、預(yù)測(cè)結(jié)果
km_y_hat = k_means.predict(X)
mbkm_y_hat = mbk.predict(X)
五罩阵、獲取聚類中心點(diǎn)并聚類中心點(diǎn)進(jìn)行排序(方便后面畫(huà)圖)
#輸出kmeans聚類中心點(diǎn)
k_means_cluster_centers = k_means.cluster_centers_
#輸出mbk聚類中心點(diǎn)
mbk_means_cluster_centers = mbk.cluster_centers_
print ("K-Means算法聚類中心點(diǎn):\ncenter=", k_means_cluster_centers)
print ("Mini Batch K-Means算法聚類中心點(diǎn):\ncenter=", mbk_means_cluster_centers)
order = pairwise_distances_argmin(k_means_cluster_centers,
mbk_means_cluster_centers)
K-Means算法聚類中心點(diǎn):
center= [[-1.0600799 -1.05662982]
[ 1.02975208 -1.07435837]
[ 1.01491055 1.02216649]]
Mini Batch K-Means算法聚類中心點(diǎn):
center= [[ 0.99602094 1.10688195]
[-1.00828286 -1.05983915]
[ 1.07892315 -0.94286826]]
六竿秆、 畫(huà)圖
plt.figure(figsize=(12, 6), facecolor='w')
plt.subplots_adjust(left=0.05, right=0.95, bottom=0.05, top=0.9)
cm = mpl.colors.ListedColormap(['#FFC2CC', '#C2FFCC', '#CCC2FF'])
cm2 = mpl.colors.ListedColormap(['#FF0000', '#00FF00', '#0000FF'])
1、原始數(shù)據(jù)
plt.subplot(221)
plt.scatter(X[:, 0], X[:, 1], c=Y, s=6, cmap=cm, edgecolors='none')
plt.title(u'原始數(shù)據(jù)分布圖')
plt.xticks(())
plt.yticks(())
plt.grid(True)
2稿壁、K-Means算法聚類結(jié)果圖
plt.subplot(222)
plt.scatter(X[:,0], X[:,1], c=km_y_hat, s=6, cmap=cm,edgecolors='none')
plt.scatter(k_means_cluster_centers[:,0],
k_means_cluster_centers[:,1],c=range(clusters),s=60,cmap=cm2,edgecolors='none')
plt.title(u'K-Means算法聚類結(jié)果圖')
plt.xticks(())
plt.yticks(())
plt.text(-3.8, 3, 'train time: %.2fms' % (km_batch*1000))
plt.grid(True)
3幽钢、Mini Batch K-Means算法聚類結(jié)果圖
plt.subplot(223)
plt.scatter(X[:,0], X[:,1], c=mbkm_y_hat, s=6, cmap=cm,edgecolors='none')
plt.scatter(mbk_means_cluster_centers[:,0],
mbk_means_cluster_centers[:,1],c=range(clusters),s=60,cmap=cm2,edgecolors='none')
plt.title(u'Mini Batch K-Means算法聚類結(jié)果圖')
plt.xticks(())
plt.yticks(())
plt.text(-3.8, 3, 'train time: %.2fms' % (mbk_batch*1000))
plt.grid(True)
different = list(map(lambda x: (x!=0) & (x!=1) & (x!=2), mbkm_y_hat))
for k in range(clusters):
different += ((km_y_hat == k) != (mbkm_y_hat == order[k]))
identic = np.logical_not(different)
different_nodes = len(list(filter(lambda x:x, different)))
4、Mini Batch K-Means和K-Means算法預(yù)測(cè)結(jié)果不同的點(diǎn)
plt.subplot(224)
plt.plot(X[identic, 0], X[identic, 1], 'w', markerfacecolor='#bbbbbb', marker='.')
plt.plot(X[different, 0], X[different, 1], 'w', markerfacecolor='m', marker='.')
plt.title(u'Mini Batch K-Means和K-Means算法預(yù)測(cè)結(jié)果不同的點(diǎn)')
plt.xticks(())
plt.yticks(())
plt.text(-3.8, 2, 'different nodes: %d' % (different_nodes))
plt.show()