- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np7 \( V K. d, i
import matplotlib.pyplot as plt9 }' P* w$ `& e* P0 h" c( d
2 M* R7 d4 [. A; K: }1 L) q: ^import utilities + l$ `- q/ t3 u+ `. I* q
# y+ B7 c5 u1 y2 i+ k
# Load input data
$ |+ B: y9 T: |input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
' o H9 H( Y2 GX, y = utilities.load_data(input_file), t7 v% {1 v8 s. A8 Z
4 _2 U0 f7 A0 x2 S+ o) S) o0 V
###############################################/ \2 ^( a$ x7 D/ D/ ^
# Separate the data into classes based on 'y'0 B! F8 A: x$ b: Z7 {5 F) u7 m
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]). m$ S1 f i- `- f: N
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])& J# m" [- q* D* s4 Y0 C. T
9 b, [8 \/ o1 M6 j
# Plot the input data: b" O3 i' l+ `& k e2 z
plt.figure()
$ I7 j. L6 R- \) p( tplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s'): w5 H. o/ D, O5 e1 d
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
) b* y* H/ b; ~plt.title('Input data')( i# F/ q B4 o$ n1 ~% I- Q
) x2 Z5 W1 P- s# ~* W4 [5 l! [2 I" ]' s
###############################################9 L3 r: f$ P6 C7 d. y9 t$ h6 u
# Train test split and SVM training
0 O3 u3 j! R F+ tfrom sklearn import cross_validation
" S5 L* I: ^+ N6 V& s6 n; P* ffrom sklearn.svm import SVC' [% C( ]! ~! W y0 Y1 y' Z7 f
0 n; |, l! S; H
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)3 L, w% R9 h" `1 ? s
& P, J# r( V+ W3 j4 C( V#params = {'kernel': 'linear'}7 b/ u9 A/ E- X" `7 A
#params = {'kernel': 'poly', 'degree': 3}
1 |; f% j3 g- P5 U0 Dparams = {'kernel': 'rbf'}
4 \1 s7 i4 N& S" dclassifier = SVC(**params)# D; R$ L: f# \/ \
classifier.fit(X_train, y_train)
) z; \7 o( s3 f8 y2 m; @9 n; S$ outilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')* L7 z, ]% _" Z
p T5 a8 Q/ C; Z$ l y6 G
y_test_pred = classifier.predict(X_test)7 @; Y( T7 S0 W7 d O/ g+ l7 |
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')6 T3 S! B, w+ S+ F4 Q
- z. |1 W6 F2 L2 u###############################################
/ C* y; Q* V% X% Y" j, ^# Evaluate classifier performance
! o1 D) p/ i% s: |, E7 r, I2 w! O! c
from sklearn.metrics import classification_report6 t! V3 o2 O& M+ S: ~
/ i1 g+ Q$ y2 q* H5 B7 itarget_names = ['Class-' + str(int(i)) for i in set(y)]
; S8 g1 \! d8 Q! S: c9 r8 ]! Tprint "\n" + "#"*302 r" [; N* c, U8 B
print "\nClassifier performance on training dataset\n"
* ~/ n, R; O9 bprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
% r. b; X3 {# K( \+ Mprint "#"*30 + "\n"- `3 [, \- `3 j9 R5 t, G( A* p
R |8 C" ~: v! J9 z! q* i
print "#"*30
% Q! ]# b. s7 s1 iprint "\nClassification report on test dataset\n"7 o4 B1 E. K0 I1 C' L
print classification_report(y_test, y_test_pred, target_names=target_names)& h4 F |# K8 Y& S# N9 L
print "#"*30 + "\n"- P* ?2 L( q# K
1 r. m: D1 @4 C4 T
|
|