- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
) N, R4 X0 }& e+ a* T' F3 ?% J9 Wimport matplotlib.pyplot as plt
+ \0 E( F) g+ x) }
& a6 T. Y8 ]8 q/ o! Gimport utilities ; D, Q9 _0 n0 K: O6 N' }9 N
! \+ b5 l( g; D# j- _- g) V+ R, F
# Load input data
! z* V' W2 T' @" e& q5 }* Yinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
: y. P; Y- ]3 |- a+ ~ o; i0 o; kX, y = utilities.load_data(input_file)4 r" L% \ }5 k
; q/ A; s b. r( s
###############################################
. i" u7 Q9 C+ T# Separate the data into classes based on 'y'
' `4 V2 n( {, `- r G7 Rclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
- E# d. Y/ \' T- r7 y7 \class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
3 n: I, y: c; T+ _3 d4 [+ \# U! i( H3 H
# Plot the input data" {( m& S9 |: q
plt.figure()5 ]% T# J. ~/ M' f* z' F
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
* g) m% Q2 w4 X, I* z6 }plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
: f F5 l2 o: E' B* N$ q# qplt.title('Input data')
- i9 n: u+ g5 s1 s% s/ P
" J" }5 b/ T" w* d" a) Y###############################################
! k( ~9 b; l$ W8 Y# Train test split and SVM training
_8 R( k9 I% O" A$ K- C. {2 Pfrom sklearn import cross_validation( ?, y: K! b9 B$ S4 g& z0 f0 H( E) a
from sklearn.svm import SVC
* a& Q" G5 ^# u' W
. [! f( ?* s; `3 d3 _; c) ^X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)8 B+ P# }" x2 q4 a @5 Z6 I
( {1 r7 i3 K) t! |1 C$ @
#params = {'kernel': 'linear'}
3 m! z; I/ n) w0 E#params = {'kernel': 'poly', 'degree': 3}4 S. M6 T [* ~' u8 d) U0 r
params = {'kernel': 'rbf'}- u) g9 C# I' f0 Z8 M+ D2 q' E
classifier = SVC(**params)
4 h9 k4 y3 i! h. S* oclassifier.fit(X_train, y_train)
% \$ v0 d8 }0 Zutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset'), V7 E+ l( J" r' z/ z6 h0 s
0 \! t/ C% T0 H9 ?, S) u4 ey_test_pred = classifier.predict(X_test)# Q& t7 h7 G6 }# t! {7 k3 c
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')$ o: B/ i) T5 I1 V
; C& b! B& q/ q5 [7 b; A5 f# h( d###############################################& y: o- W2 L: a
# Evaluate classifier performance: F: \9 h2 b0 C1 r
& t; p! k5 W7 b1 Bfrom sklearn.metrics import classification_report- z9 D+ Z! ?' G6 `
, _3 N4 q7 R/ m, G/ Qtarget_names = ['Class-' + str(int(i)) for i in set(y)]+ a& J6 n5 D2 W6 r7 k! m
print "\n" + "#"*30- Z% z8 E }% J- m3 J9 X8 W! I
print "\nClassifier performance on training dataset\n"
6 D# I/ t# ^1 Fprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
3 ] _5 p5 v2 V) @8 }print "#"*30 + "\n"
3 B; }+ E# V, E3 O& I/ @% D, ]- @" s! o' E. o* P2 I5 M
print "#"*30; M+ ?9 {7 ?4 c. l# C# ^
print "\nClassification report on test dataset\n"4 a% b' T$ q g: k" }+ F" m
print classification_report(y_test, y_test_pred, target_names=target_names)
$ S0 ^( c: i2 q; ~print "#"*30 + "\n"
9 W* T) h6 q+ ~4 _* R. Y3 D0 p% S1 Q! N S/ @# h
|
|