Skip to content

Commit 1ea9209

Browse files
committed
SVM
1 parent 2aaf3e4 commit 1ea9209

File tree

10 files changed

+204
-1
lines changed

10 files changed

+204
-1
lines changed

SVM/SVM_scikit-learn.py

Lines changed: 71 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,71 @@
1+
import numpy as np
2+
from scipy import io as spio
3+
from matplotlib import pyplot as plt
4+
from sklearn import svm
5+
6+
def SVM():
7+
'''data1——线性分类'''
8+
data1 = spio.loadmat('data1.mat')
9+
X = data1['X']
10+
y = data1['y']
11+
y = np.ravel(y)
12+
plot_data(X,y)
13+
14+
model = svm.SVC(C=1.0,kernel='linear').fit(X,y) # 指定核函数为线性核函数
15+
plot_decisionBoundary(X, y, model) # 画决策边界
16+
'''data2——非线性分类'''
17+
data2 = spio.loadmat('data2.mat')
18+
X = data2['X']
19+
y = data2['y']
20+
y = np.ravel(y)
21+
plt = plot_data(X,y)
22+
plt.show()
23+
24+
model = svm.SVC(gamma=100).fit(X,y) # gamma为核函数的系数,值越大拟合的越好
25+
plot_decisionBoundary(X, y, model,class_='notLinear') # 画决策边界
26+
27+
28+
29+
# 作图
30+
def plot_data(X,y):
31+
plt.figure(figsize=(10,8))
32+
pos = np.where(y==1) # 找到y=1的位置
33+
neg = np.where(y==0) # 找到y=0的位置
34+
p1, = plt.plot(np.ravel(X[pos,0]),np.ravel(X[pos,1]),'ro',markersize=8)
35+
p2, = plt.plot(np.ravel(X[neg,0]),np.ravel(X[neg,1]),'g^',markersize=8)
36+
plt.xlabel("X1")
37+
plt.ylabel("X2")
38+
plt.legend([p1,p2],["y==1","y==0"])
39+
return plt
40+
41+
# 画决策边界
42+
def plot_decisionBoundary(X,y,model,class_='linear'):
43+
plt = plot_data(X, y)
44+
45+
# 线性边界
46+
if class_=='linear':
47+
w = model.coef_
48+
b = model.intercept_
49+
xp = np.linspace(np.min(X[:,0]),np.max(X[:,1]),100)
50+
yp = -(w[0,0]*xp+b)/w[0,1]
51+
plt.plot(xp,yp,'b-',linewidth=2.0)
52+
plt.show()
53+
else: # 非线性边界
54+
x_1 = np.transpose(np.linspace(np.min(X[:,0]),np.max(X[:,0]),100).reshape(1,-1))
55+
x_2 = np.transpose(np.linspace(np.min(X[:,1]),np.max(X[:,1]),100).reshape(1,-1))
56+
X1,X2 = np.meshgrid(x_1,x_2)
57+
vals = np.zeros(X1.shape)
58+
for i in range(X1.shape[1]):
59+
this_X = np.hstack((X1[:,i].reshape(-1,1),X2[:,i].reshape(-1,1)))
60+
vals[:,i] = model.predict(this_X)
61+
62+
plt.contour(X1,X2,vals,[0,1],color='blue')
63+
plt.show()
64+
65+
66+
67+
if __name__ == "__main__":
68+
SVM()
69+
70+
71+

SVM/data.txt

Lines changed: 118 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,118 @@
1+
0.051267,0.69956,1
2+
-0.092742,0.68494,1
3+
-0.21371,0.69225,1
4+
-0.375,0.50219,1
5+
-0.51325,0.46564,1
6+
-0.52477,0.2098,1
7+
-0.39804,0.034357,1
8+
-0.30588,-0.19225,1
9+
0.016705,-0.40424,1
10+
0.13191,-0.51389,1
11+
0.38537,-0.56506,1
12+
0.52938,-0.5212,1
13+
0.63882,-0.24342,1
14+
0.73675,-0.18494,1
15+
0.54666,0.48757,1
16+
0.322,0.5826,1
17+
0.16647,0.53874,1
18+
-0.046659,0.81652,1
19+
-0.17339,0.69956,1
20+
-0.47869,0.63377,1
21+
-0.60541,0.59722,1
22+
-0.62846,0.33406,1
23+
-0.59389,0.005117,1
24+
-0.42108,-0.27266,1
25+
-0.11578,-0.39693,1
26+
0.20104,-0.60161,1
27+
0.46601,-0.53582,1
28+
0.67339,-0.53582,1
29+
-0.13882,0.54605,1
30+
-0.29435,0.77997,1
31+
-0.26555,0.96272,1
32+
-0.16187,0.8019,1
33+
-0.17339,0.64839,1
34+
-0.28283,0.47295,1
35+
-0.36348,0.31213,1
36+
-0.30012,0.027047,1
37+
-0.23675,-0.21418,1
38+
-0.06394,-0.18494,1
39+
0.062788,-0.16301,1
40+
0.22984,-0.41155,1
41+
0.2932,-0.2288,1
42+
0.48329,-0.18494,1
43+
0.64459,-0.14108,1
44+
0.46025,0.012427,1
45+
0.6273,0.15863,1
46+
0.57546,0.26827,1
47+
0.72523,0.44371,1
48+
0.22408,0.52412,1
49+
0.44297,0.67032,1
50+
0.322,0.69225,1
51+
0.13767,0.57529,1
52+
-0.0063364,0.39985,1
53+
-0.092742,0.55336,1
54+
-0.20795,0.35599,1
55+
-0.20795,0.17325,1
56+
-0.43836,0.21711,1
57+
-0.21947,-0.016813,1
58+
-0.13882,-0.27266,1
59+
0.18376,0.93348,0
60+
0.22408,0.77997,0
61+
0.29896,0.61915,0
62+
0.50634,0.75804,0
63+
0.61578,0.7288,0
64+
0.60426,0.59722,0
65+
0.76555,0.50219,0
66+
0.92684,0.3633,0
67+
0.82316,0.27558,0
68+
0.96141,0.085526,0
69+
0.93836,0.012427,0
70+
0.86348,-0.082602,0
71+
0.89804,-0.20687,0
72+
0.85196,-0.36769,0
73+
0.82892,-0.5212,0
74+
0.79435,-0.55775,0
75+
0.59274,-0.7405,0
76+
0.51786,-0.5943,0
77+
0.46601,-0.41886,0
78+
0.35081,-0.57968,0
79+
0.28744,-0.76974,0
80+
0.085829,-0.75512,0
81+
0.14919,-0.57968,0
82+
-0.13306,-0.4481,0
83+
-0.40956,-0.41155,0
84+
-0.39228,-0.25804,0
85+
-0.74366,-0.25804,0
86+
-0.69758,0.041667,0
87+
-0.75518,0.2902,0
88+
-0.69758,0.68494,0
89+
-0.4038,0.70687,0
90+
-0.38076,0.91886,0
91+
-0.50749,0.90424,0
92+
-0.54781,0.70687,0
93+
0.10311,0.77997,0
94+
0.057028,0.91886,0
95+
-0.10426,0.99196,0
96+
-0.081221,1.1089,0
97+
0.28744,1.087,0
98+
0.39689,0.82383,0
99+
0.63882,0.88962,0
100+
0.82316,0.66301,0
101+
0.67339,0.64108,0
102+
1.0709,0.10015,0
103+
-0.046659,-0.57968,0
104+
-0.23675,-0.63816,0
105+
-0.15035,-0.36769,0
106+
-0.49021,-0.3019,0
107+
-0.46717,-0.13377,0
108+
-0.28859,-0.060673,0
109+
-0.61118,-0.067982,0
110+
-0.66302,-0.21418,0
111+
-0.59965,-0.41886,0
112+
-0.72638,-0.082602,0
113+
-0.83007,0.31213,0
114+
-0.72062,0.53874,0
115+
-0.59389,0.49488,0
116+
-0.48445,0.99927,0
117+
-0.0063364,0.99927,0
118+
0.63265,-0.030612,0

SVM/data1.mat

981 Bytes
Binary file not shown.

SVM/data2.mat

7.43 KB
Binary file not shown.

SVM/data3.mat

5.9 KB
Binary file not shown.

formula/LogisticRegression_01.wmf

310 Bytes
Binary file not shown.

formula/SVM.wmf

3.97 KB
Binary file not shown.

images/SVM_01.png

10.9 KB
Loading

images/SVM_02.png

12.4 KB
Loading

readme.md

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -636,6 +636,18 @@ def predict(Theta1,Theta2,X):
636636
- 归一化后训练集预测准确度
637637
![enter description here][23]
638638

639+
--------------------
640+
641+
## 四、SVM支持向量机
642+
643+
### 1、代价函数
644+
- 在逻辑回归中,我们的代价为:![\cos t({h_\theta }(x),y) = \left\{ {\begin{array}{c} { - \log ({h_\theta }(x))} \\ { - \log (1 - {h_\theta }(x))} \end{array} \begin{array}{c} {y = 1} \\ {y = 0} \end{array} } \right.](http://chart.apis.google.com/chart?cht=tx&chs=1x0&chf=bg,s,FFFFFF00&chco=000000&chl=%5Ccos%20t%28%7Bh_%5Ctheta%20%7D%28x%29%2Cy%29%20%3D%20%5Cleft%5C%7B%20%7B%5Cbegin%7Barray%7D%7Bc%7D%20%20%20%20%7B%20-%20%5Clog%20%28%7Bh_%5Ctheta%20%7D%28x%29%29%7D%20%5C%5C%20%20%20%20%7B%20-%20%5Clog%20%281%20-%20%7Bh_%5Ctheta%20%7D%28x%29%29%7D%20%20%5Cend%7Barray%7D%20%5Cbegin%7Barray%7D%7Bc%7D%20%20%20%20%7By%20%3D%201%7D%20%5C%5C%20%20%20%20%7By%20%3D%200%7D%20%20%5Cend%7Barray%7D%20%7D%20%5Cright.),其中:![{h_\theta }({\text{z}}) = \frac{1}{{1 + {e^{ - z}}}}](http://chart.apis.google.com/chart?cht=tx&chs=1x0&chf=bg,s,FFFFFF00&chco=000000&chl=%7Bh_%5Ctheta%20%7D%28%7B%5Ctext%7Bz%7D%7D%29%20%3D%20%5Cfrac%7B1%7D%7B%7B1%20%2B%20%7Be%5E%7B%20-%20z%7D%7D%7D%7D),![{h_\theta }({\text{z}}) = \frac{1}{{1 + {e^{ - z}}}}](http://chart.apis.google.com/chart?cht=tx&chs=1x0&chf=bg,s,FFFFFF00&chco=000000&chl=%7Bh_%5Ctheta%20%7D%28%7B%5Ctext%7Bz%7D%7D%29%20%3D%20%5Cfrac%7B1%7D%7B%7B1%20%2B%20%7Be%5E%7B%20-%20z%7D%7D%7D%7D)
645+
- 如图所示,如果`y=1``cost`代价函数如图所示
646+
![enter description here][24]
647+
我们想让![{\theta ^T}x > > 0](http://chart.apis.google.com/chart?cht=tx&chs=1x0&chf=bg,s,FFFFFF00&chco=000000&chl=%7B%5Ctheta%20%5ET%7Dx%20%3E%20%20%3E%200),即`z>>0`,这样的话`cost`代价函数才会趋于最小(这是我们想要的),所以用途中**红色**的函数![\cos {t_1}(z)](http://chart.apis.google.com/chart?cht=tx&chs=1x0&chf=bg,s,FFFFFF00&chco=000000&chl=%5Ccos%20%7Bt_1%7D%28z%29)代替逻辑回归中的cost
648+
-`y=0`时同样,用![\cos {t_0}(z)](http://chart.apis.google.com/chart?cht=tx&chs=1x0&chf=bg,s,FFFFFF00&chco=000000&chl=%5Ccos%20%7Bt_0%7D%28z%29)代替
649+
![enter description here][25]
650+
639651

640652
[1]: ./images/LinearRegression_01.png "LinearRegression_01.png"
641653
[2]: ./images/LogisticRegression_01.png "LogisticRegression_01.png"
@@ -659,4 +671,6 @@ def predict(Theta1,Theta2,X):
659671
[20]: ./images/NeuralNetwork_06.png "NeuralNetwork_06.png"
660672
[21]: ./images/NeuralNetwork_07.png "NeuralNetwork_07.png"
661673
[22]: ./images/NeuralNetwork_08.png "NeuralNetwork_08.png"
662-
[23]: ./images/NeuralNetwork_09.png "NeuralNetwork_09.png"
674+
[23]: ./images/NeuralNetwork_09.png "NeuralNetwork_09.png"
675+
[24]: ./images/SVM_01.png "SVM_01.png"
676+
[25]: ./images/SVM_02.png "SVM_02.png"

0 commit comments

Comments
 (0)