- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np/ Q* @9 {* I* o+ p% \- e
import matplotlib.pyplot as plt
( r Q& X( Q4 m; W0 [
2 G, C! X' ?* l/ e# jimport utilities
3 p( R0 r6 ^ P7 h0 E& e H# K8 X
3 f# n6 O! E' F9 _2 y$ S: k5 r' @! ]# Load input data# d# ?- F2 M+ u# w
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'8 ?' ~) r) {4 H' q
X, y = utilities.load_data(input_file)( g" i: T! m: X9 ~( P
z7 R, V0 H6 D0 y0 f q###############################################
' f: P& K% F# W* L8 I0 F1 A# Separate the data into classes based on 'y'
# G6 ?$ y3 f3 t" @4 N2 hclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])6 }! `. Q: ^, ?" B, J3 G; D
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])' `! k$ T0 K7 H4 U; D$ ^4 |4 d6 _' _/ O
; o' t) M4 q6 {# Plot the input data5 s8 a' M& A9 z2 P
plt.figure()
- `7 R" H, N3 y( Z" s$ s9 d$ Hplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')# A1 v) a" u0 |1 h
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')& G3 f* f2 h6 o+ {4 P( g7 b
plt.title('Input data')
7 O# A% t. ]& d: w6 T5 a, E! W* \2 N# q- j5 p8 r
###############################################
& m! q2 t, [1 N& x/ x& y* H& w# Train test split and SVM training
5 }6 Y) ]" T, ^9 C/ Y% y0 x. b4 Sfrom sklearn import cross_validation- |$ v0 w9 w0 ~2 P
from sklearn.svm import SVC
0 ~" k$ U1 R3 V1 b1 |1 X! n% N4 |
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)4 E* r Q: ?' _% k& Z) r
/ m9 E7 X( g4 V3 ?! q q
#params = {'kernel': 'linear'}
$ ^5 O H z4 u6 \#params = {'kernel': 'poly', 'degree': 3}* _ M9 O+ X( h7 Z6 H+ V
params = {'kernel': 'rbf'}+ y1 y; O# M6 k6 G
classifier = SVC(**params)- {& r8 ^. e* s' l
classifier.fit(X_train, y_train)
( W% K) L9 p. o/ ~( \utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
( [* C1 J& o9 f& E" m' W. ?) }; ], |$ u( F0 ^
y_test_pred = classifier.predict(X_test)9 n4 p& b/ I7 Q; j7 m
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
0 b# I# O( C6 P* ~2 s1 w) Z2 Y5 c8 x' o; c; H; {2 k9 ]; I3 X4 T- _/ R
###############################################2 ]' X- u5 w3 ]0 h
# Evaluate classifier performance
+ ?0 v. }) I5 Y) k9 m& U- ?) t* n
- V) l. b, T% t) l" T, ^, p' G( f3 rfrom sklearn.metrics import classification_report4 F8 T" v( g- T- q1 x+ i
5 P8 |8 d' O4 P' R3 r; Q! jtarget_names = ['Class-' + str(int(i)) for i in set(y)] }2 |* P/ h/ j( l* |- \0 w
print "\n" + "#"*30& K6 h( K# |+ W+ }. E
print "\nClassifier performance on training dataset\n"1 R4 q6 _, I, F7 {
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
b7 P i; `, I" y1 |2 }/ J$ `* D/ Oprint "#"*30 + "\n"7 {4 M. `* E8 A, e# q3 d# o
$ P! s x2 M' v5 ?3 J H& lprint "#"*305 Q+ y6 B! T; J* ^% L* Y
print "\nClassification report on test dataset\n"( Q8 D" l" v8 F" p4 R$ N
print classification_report(y_test, y_test_pred, target_names=target_names)
% u5 q- d$ g# e, m; L7 W# qprint "#"*30 + "\n"
7 Q3 c6 ? z" h, Z6 x
: X: p; E! S. h/ o7 I' ~ |
|