- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np8 I7 T9 o' p2 T7 b; b! `1 M2 q
import matplotlib.pyplot as plt
; ?) ]; f+ x$ r/ |# X& n
* n q4 ?/ h& G+ ~import utilities : U( k/ T9 [+ W8 c1 }
6 u, t! c# |1 A% d+ V0 f2 M
# Load input data
. ~: i( @* k1 F) f- Linput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'+ }; p7 A9 O, Q9 _
X, y = utilities.load_data(input_file)0 D% q& A5 N, D# n: q
; w$ }3 v) a* ]/ k& L8 F) R7 n3 p) B
###############################################
8 X1 H' r- q& L6 q# Separate the data into classes based on 'y'
& V r; y" o! M( I7 Y$ rclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
( N" n1 q8 r) ^4 \class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
2 D% q9 j+ [9 D
2 |" H, S: ?" q& i# Plot the input data
" D, J# @/ L4 x, Iplt.figure()
) N; s; U5 l3 a/ O2 F# M5 _. {+ bplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
7 `1 x% c- d' Tplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')2 q W2 z) n! r h2 T2 U" L
plt.title('Input data')% Q8 {( n9 \( i; _
; B! m" o v! g
###############################################
1 ^4 E) `+ T$ @7 L0 `# Train test split and SVM training+ ?5 `4 ?3 ?0 G n
from sklearn import cross_validation- t+ D3 `) m3 o2 R5 l3 E
from sklearn.svm import SVC' k" ?6 D% C8 G) u
* @1 l! q" C9 Z" `+ H' d' A
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
; q, C, {0 |6 h7 G+ f$ ?
3 g. j; U2 Z8 |' U#params = {'kernel': 'linear'}
+ q8 y2 d0 M2 X0 {) J O#params = {'kernel': 'poly', 'degree': 3}- c3 n4 ^6 w1 T+ C" l4 i
params = {'kernel': 'rbf'}
/ w( J- T* k. J* N) Vclassifier = SVC(**params)
0 K. _; X1 u6 f& uclassifier.fit(X_train, y_train): I4 }( ~9 B9 e! B/ t
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')/ m7 r2 y" x9 S8 w) a: q
1 `( W+ h# v Dy_test_pred = classifier.predict(X_test)
; l$ R1 n" b$ xutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')' }) [/ ]8 f2 Z. T5 f
: }3 q7 d: o! {6 k. L: I
###############################################- {) o8 k( l5 ~! {. T
# Evaluate classifier performance
9 Y& G n0 b! }. B& \5 p
' B$ ^) D) n3 g. l. l4 F* kfrom sklearn.metrics import classification_report! V( U- |5 q4 ]+ q! J5 _* K
" w4 f2 R$ Y/ X* }6 W/ C& _6 X) ~
target_names = ['Class-' + str(int(i)) for i in set(y)]
4 n J$ N5 V( x, ^* @* S0 _3 i% I, \& Qprint "\n" + "#"*30
- d! j; _5 q7 Q5 i. iprint "\nClassifier performance on training dataset\n"
2 D$ t$ i' H# }% P! \3 I: Tprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
7 @, c( C4 f$ G- @/ @print "#"*30 + "\n"+ E ^, M2 U2 T( y. v
t S8 c+ f) Z6 P+ j* V) {1 \print "#"*30
0 Y8 Q8 u# i" N$ b# w( Hprint "\nClassification report on test dataset\n"
7 U, s) U# _; Q# X4 dprint classification_report(y_test, y_test_pred, target_names=target_names)
m O! n3 E# N$ E; Q7 Cprint "#"*30 + "\n"
3 c9 P0 _7 A( p
6 z$ s4 `( T- I2 Y& S |
|