一、KNN分类模型 演示
import sys
import importlib
importlib.reload(sys)
class KdNode(object):
def __init__(self, dom_elt, split, left, right):
self.dom_elt = dom_elt
self.split = split
self.left = left
self.right = right
class KdTree(object):
def __init__(self, data):
k = len(data[0])
def CreateNode(split, data_set):
if not data_set:
return None
data_set.sort(key=lambda x: x[split])
split_pos = len(data_set) // 2
median = data_set[split_pos]
split_next = (split + 1) % k
return KdNode(median, split,
CreateNode(split_next, data_set[:split_pos]),
CreateNode(split_next, data_set[split_pos + 1:]))
self.root = CreateNode(0, data)
def preorder(root):
print (root.dom_elt)
if root.left:
preorder(root.left)
if root.right:
preorder(root.right)
if __name__ == "__main__":
data = [[2,3],[5,4],[9,6],[4,7],[8,1],[7,2]]
kd = KdTree(data)
preorder(kd.root)
输出:
[7, 2]
[5, 4]
[2, 3]
[4, 7]
[9, 6]
[8, 1]
1.2
"""
================================
Nearest Neighbors Classification
================================
Sample usage of Nearest Neighbors classification.
It will plot the decision boundaries for each class.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import ListedColormap
from sklearn import neighbors, datasets
n_neighbors = 15
iris = datasets.load_iris()
X = iris.data[:, :2]
y = iris.target
h = .02
cmap_light = ListedColormap(['#FFAAAA', '#AAFFAA', '#AAAAFF'])
cmap_bold = ListedColormap(['#FF0000', '#00FF00', '#0000FF'])
for weights in ['uniform', 'distance']:
clf = neighbors.KNeighborsClassifier(n_neighbors, weights=weights)
clf.fit(X, y)
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
plt.figure()
plt.pcolormesh(xx, yy, Z, cmap=cmap_light)
plt.scatter(X[:, 0], X[:, 1], c=y, cmap=cmap_bold,
edgecolor='k', s=20)
plt.xlim(xx.min(), xx.max())
plt.ylim(yy.min(), yy.max())
plt.title("3-Class classification (k = %i, weights = '%s')"
% (n_neighbors, weights))
plt.show()
二、KNN 回归模型 演示
import numpy as np
import matplotlib.pyplot as plt
from sklearn import neighbors
np.random.seed(0)
X = np.sort(5 * np.random.rand(40, 1), axis=0)
T = np.linspace(0, 5, 500)[:, np.newaxis]
y = np.sin(X).ravel()
y[::5] += 1 * (0.5 - np.random.rand(8))
n_neighbors = 5
plt.figure(figsize=((8,6)))
for i, weights in enumerate(['uniform','distance']):
knn = neighbors.KNeighborsRegressor(n_neighbors, weights=weights)
y_ = knn.fit(X, y).predict(T)
plt.subplot(2, 1, i+1)
plt.scatter(X, y, c='k', label='data')
plt.plot(T, y_, c='g', label='prediction')
plt.axis('tight')
plt.legend()
plt.title("KNeighborsRegressor(k=%i,weights='%s')"%(n_neighbors,weights))