- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
: d3 s2 y5 o$ qimport matplotlib.pyplot as plt
" {8 |! b- L0 j3 C& D; g" M% i) [) z& b$ H# Q
import utilities 4 \6 O: b; U! f0 \1 `2 g- g2 S
9 s+ N' R% n' k# S+ b! x, m. i# Load input data
$ O( J$ h1 h/ e" x% T+ Binput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'- K1 o7 n. \" F
X, y = utilities.load_data(input_file)9 s* D( q- v) ?) w( S# ]# ~4 {
/ g4 u1 U" X; f2 L% q7 V# Y: m+ e
###############################################4 T# V6 p, ?2 [ ^: ~3 W J' ?
# Separate the data into classes based on 'y'9 C& t! b3 j! W3 s- z `$ K
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
7 [# t: G I5 p, tclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
+ S3 ^, [' L& _1 J% I8 M9 B5 A2 [+ V! @, i
# Plot the input data
! Z3 D; d+ K8 _: o& h! C: Iplt.figure(), o1 r" [: r* n' ]. m g) b- Y- E, O
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s'). i9 J f" D# G* [5 p, t
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s') _3 [$ u# a% Z) k u! T( {
plt.title('Input data') D: ^# E5 @% v6 S8 N0 P
0 u: r" h3 @4 u& {. [, N) b###############################################" v* Y$ _1 M1 z) c# V) P
# Train test split and SVM training
- j! q/ H. y" M3 j/ cfrom sklearn import cross_validation
4 H2 L. V8 D! yfrom sklearn.svm import SVC& e* O A7 _! {; t! \
1 _1 Y& c! E* {# V i
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
* m9 T/ P" a$ Q+ ], a* |. `# Q o. O" E) a- l( S
#params = {'kernel': 'linear'}* y) |' a3 ]/ F+ f. w4 b
#params = {'kernel': 'poly', 'degree': 3}( |8 g7 z& s/ o
params = {'kernel': 'rbf'}
' b" @& B( f" h" F/ P9 ~) ]9 Kclassifier = SVC(**params)
( L3 @# V2 K3 ~" A, t+ Gclassifier.fit(X_train, y_train)
/ x& b; w8 e F* d+ Wutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
! E2 A# J- M1 G0 K5 F. Q9 z
* M- I7 ?' J2 n3 j" L( Jy_test_pred = classifier.predict(X_test)
7 i; S# a$ _$ B6 ^2 c; h' z3 Autilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')* g" H( g/ H- P) ]1 S( s
7 L; Z9 U0 N& T
###############################################
8 i8 T6 H& g2 i. G2 Z" s# Evaluate classifier performance
) R% q+ e) s! U3 Q
: t/ w5 h* U( ^+ p4 m5 Lfrom sklearn.metrics import classification_report
5 {( ?, V, H7 E! |2 K) K/ P9 N$ P5 ~5 w
target_names = ['Class-' + str(int(i)) for i in set(y)]
/ _7 I* x' M" Zprint "\n" + "#"*30
/ v5 V; N% C* o2 kprint "\nClassifier performance on training dataset\n"
2 C$ P; ]1 b, G C9 T& kprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
; s3 L9 _- B0 @6 u# [print "#"*30 + "\n", B/ j( P7 q+ Y) o% z! k
4 @9 z) P9 P! ?( J4 @print "#"*30' T! V% X; ? d$ ]( u& z) a! n
print "\nClassification report on test dataset\n"4 @' |: Y& M9 E7 L- f) W, y
print classification_report(y_test, y_test_pred, target_names=target_names)8 z7 i' p# L! e! M, o
print "#"*30 + "\n"+ X% H! b9 u; _
7 m$ y0 ]" A& W3 L |
|