- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np+ H( G: B: Y+ \9 p+ d3 j
import matplotlib.pyplot as plt" @: Q$ K# |! u, Y
2 Z: _, n# T' _ [& w/ X4 Uimport utilities
9 S) ` S& g+ W4 B, ^1 }% V0 |' w4 n5 H- W8 U( B; c% ~
# Load input data
, Q# w- L9 s! C7 _! Xinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'4 {2 X: S- \0 Q: _; P
X, y = utilities.load_data(input_file)) `: c. V# i6 u. F# b0 x
; n. r; M! @' a2 s% c7 v6 F' m###############################################
. O' o& T- t/ k- J# Separate the data into classes based on 'y'7 R0 M) a$ U; T1 M. J8 l
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])! W) J6 N$ q. [$ C
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])3 X& p- b4 K3 o
% E- Y1 |7 W& n6 t
# Plot the input data. D2 \6 y3 w/ Q! o
plt.figure()3 f; B9 S# [+ L! P
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
5 ]' e8 k% K3 G+ H# V6 N" p# Dplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')5 q; @! ~! A7 f5 l, g4 K$ n; ?
plt.title('Input data')6 M4 k2 ^0 t( F% s" k; r8 S
) f0 J1 {7 E: ^: C# J3 z' v! x###############################################( \. U! f2 v& v& l! `5 { @: `5 X2 ~
# Train test split and SVM training" L) F# Z" H" K; C2 Y
from sklearn import cross_validation- y j" V! S& G4 @" @5 w% m9 e! h; V
from sklearn.svm import SVC
+ v7 g& p. Q- P8 T& z. M% e+ d0 o6 V1 V5 C4 H. j
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
: o1 ?% X3 v/ K- E* D$ }5 c% R/ Z: ~; Q
#params = {'kernel': 'linear'}( Y% f/ F# n; O& F' j7 v9 ?
#params = {'kernel': 'poly', 'degree': 3}
& k+ v. R) F/ r9 r: g$ j! t1 Vparams = {'kernel': 'rbf'}
y( e5 d( |- z* k3 Gclassifier = SVC(**params)5 o) ?% T3 J* S1 J
classifier.fit(X_train, y_train)
' K. ~2 I3 E3 |+ U& [% _+ Tutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')# J" |) `( t4 k7 K9 U
0 e* T' @% |) N; w' H
y_test_pred = classifier.predict(X_test)
( v9 ?. }( j$ l# ]# H" F& R) ^utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
; n- F9 ]+ v. n" A& A9 h' e5 ~# |( ?& S
###############################################& J6 I; R8 i% H5 `
# Evaluate classifier performance& E& b1 q4 s6 R1 G6 Q7 i4 a
& o, z" ]8 e: d: }. B
from sklearn.metrics import classification_report
, M! @/ j0 v+ X
0 ?* m5 \' m" L. l9 b8 Ntarget_names = ['Class-' + str(int(i)) for i in set(y)]1 M) h# ]7 P' b- g2 ^
print "\n" + "#"*308 d6 A9 g6 B6 S/ j* v3 \/ X
print "\nClassifier performance on training dataset\n"
! K0 \+ y/ a' @print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
) Q1 }+ E* q2 E8 n6 Jprint "#"*30 + "\n"
9 ^& v4 D6 E5 c# m8 W6 h+ [8 D# h% N0 {
print "#"*30- G1 W! k- ?+ W
print "\nClassification report on test dataset\n"0 Z1 W8 L9 b- Y" O3 J' m" [: k% L
print classification_report(y_test, y_test_pred, target_names=target_names)
3 O! j9 @. w9 e. Oprint "#"*30 + "\n"( b9 A% ~# n% F
, Z, |# E+ ]* a' M% w1 M3 x
|
|