Skip to content

Commit 1c80dba

Browse files
author
chihchunchen
committed
fixed indentation issue in knnplots.py
1 parent 85b3d9f commit 1c80dba

File tree

1 file changed

+40
-49
lines changed

1 file changed

+40
-49
lines changed

knnplots.py

+40-49
Original file line numberDiff line numberDiff line change
@@ -4,58 +4,49 @@
44
import numpy as np
55

66
def plotvector(XTrain, yTrain, XTest, yTest, weights, upperLim = 310):
7-
results = []
8-
for n in range(1, upperLim, 4):
9-
clf = neighbors.KNeighborsClassifier(n_neighbors = n, weights = weights)
10-
clf = clf.fit(XTrain, yTrain)
11-
preds = clf.predict(XTest)
12-
accuracy = clf.score(XTest, yTest)
13-
results.append([n, accuracy])
14-
15-
results = np.array(results)
16-
return(results)
7+
results = []
8+
for n in range(1, upperLim, 4):
9+
clf = neighbors.KNeighborsClassifier(n_neighbors = n, weights = weights)
10+
clf = clf.fit(XTrain, yTrain)
11+
preds = clf.predict(XTest)
12+
accuracy = clf.score(XTest, yTest)
13+
results.append([n, accuracy])
14+
results = np.array(results)
15+
return(results)
1716

1817
def plotaccuracy(XTrain, yTrain, XTest, yTest, upperLim):
19-
pltvector1 = plotvector(XTrain, yTrain, XTest, yTest, weights = "uniform")
20-
pltvector2 = plotvector(XTrain, yTrain, XTest, yTest, weights = "distance")
21-
line1 = plt.plot(pltvector1[:,0], pltvector1[:,1], label = "uniform")
22-
line2 = plt.plot(pltvector2[:,0], pltvector2[:,1], label = "distance")
23-
plt.legend(loc=3)
24-
plt.ylim(0.5, 1)
25-
plt.title("Accuracy with Increasing K")
26-
plt.show()
18+
pltvector1 = plotvector(XTrain, yTrain, XTest, yTest, weights = "uniform")
19+
pltvector2 = plotvector(XTrain, yTrain, XTest, yTest, weights = "distance")
20+
line1 = plt.plot(pltvector1[:,0], pltvector1[:,1], label = "uniform")
21+
line2 = plt.plot(pltvector2[:,0], pltvector2[:,1], label = "distance")
22+
plt.legend(loc=3)
23+
plt.ylim(0.5, 1)
24+
plt.title("Accuracy with Increasing K")
25+
plt.show()
2726

2827

2928

3029
def decisionplot(XTrain, yTrain, n_neighbors, weights):
31-
h = .02 # step size in the mesh
32-
33-
Xtrain = XTrain[:, :2] # we only take the first two features.
34-
35-
# Create color maps
36-
cmap_light = ListedColormap(["#FFAAAA", "#AAFFAA", "#AAAAFF"])
37-
cmap_bold = ListedColormap(["#FF0000", "#00FF00", "#0000FF"])
38-
39-
clf = neighbors.KNeighborsClassifier(n_neighbors, weights=weights)
40-
clf.fit(Xtrain, yTrain)
41-
42-
# Plot the decision boundary. For that, we will assign a color to each
43-
# point in the mesh [x_min, m_max]x[y_min, y_max].
44-
x_min, x_max = Xtrain[:, 0].min() - 1, Xtrain[:, 0].max() + 1
45-
y_min, y_max = Xtrain[:, 1].min() - 1, Xtrain[:, 1].max() + 1
46-
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
47-
np.arange(y_min, y_max, h))
48-
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
49-
50-
# Put the result into a color plot
51-
Z = Z.reshape(xx.shape)
52-
plt.figure()
53-
plt.pcolormesh(xx, yy, Z, cmap = cmap_light)
54-
55-
# Plot also the training points
56-
plt.scatter(Xtrain[:, 0], Xtrain[:, 1], c = yTrain, cmap = cmap_bold)
57-
plt.xlim(xx.min(), xx.max())
58-
plt.ylim(yy.min(), yy.max())
59-
plt.title("2-Class classification (k = %i, weights = '%s')"
60-
% (n_neighbors, weights))
61-
plt.show()
30+
h = .02 # step size in the mesh
31+
Xtrain = XTrain[:, :2] # we only take the first two features.
32+
# Create color maps
33+
cmap_light = ListedColormap(["#FFAAAA", "#AAFFAA", "#AAAAFF"])
34+
cmap_bold = ListedColormap(["#FF0000", "#00FF00", "#0000FF"])
35+
clf = neighbors.KNeighborsClassifier(n_neighbors, weights=weights)
36+
clf.fit(Xtrain, yTrain)
37+
# Plot the decision boundary. For that, we will assign a color to each
38+
# point in the mesh [x_min, m_max]x[y_min, y_max].
39+
x_min, x_max = Xtrain[:, 0].min() - 1, Xtrain[:, 0].max() + 1
40+
y_min, y_max = Xtrain[:, 1].min() - 1, Xtrain[:, 1].max() + 1
41+
xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
42+
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
43+
# Put the result into a color plot
44+
Z = Z.reshape(xx.shape)
45+
plt.figure()
46+
plt.pcolormesh(xx, yy, Z, cmap = cmap_light)
47+
# Plot also the training points
48+
plt.scatter(Xtrain[:, 0], Xtrain[:, 1], c = yTrain, cmap = cmap_bold)
49+
plt.xlim(xx.min(), xx.max())
50+
plt.ylim(yy.min(), yy.max())
51+
plt.title("2-Class classification (k = %i, weights = '%s')" % (n_neighbors, weights))
52+
plt.show()

0 commit comments

Comments
 (0)