- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
* {0 g9 a0 Q! F, {$ W1 u( mimport matplotlib.pyplot as plt* Z- i4 j4 T5 F! ^1 W2 n0 E& Q
6 q7 s* g) P( \) Z
import utilities 5 B/ n5 g0 V# b& t% o, C
2 P0 ]# S: ~! M6 q# Load input data
7 q, C* F( U) W% \: k5 q; kinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
: }5 B- k% B( ], G5 H# nX, y = utilities.load_data(input_file)" {0 w; {3 U) [3 }) @, @. q
k( J. a& ]9 r############################################### D" N. [, ~' |( y* _5 H
# Separate the data into classes based on 'y': E, e# R, z8 ] |$ K
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]): `. b2 S5 H. I1 F/ t
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])2 s4 P0 P' G' B$ o; d
5 o! r$ ]7 Z& I, ~+ |# Plot the input data; n& m, {8 o' D" Y1 T1 Q4 \) O
plt.figure()2 s" O( G' v- u# d% T( t7 d, m3 I
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
' p. h% G4 M) Jplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')& m2 m) A1 V2 _2 K* D6 w8 g o
plt.title('Input data')
+ ]7 L7 d" \* S7 p' m4 L& H3 J/ x! X
###############################################
% W# z D1 r# F' C8 K# |% u# Train test split and SVM training# U7 t+ E* M" F
from sklearn import cross_validation
2 z: d' h+ C+ {# Kfrom sklearn.svm import SVC. [2 E# ^' k6 Q, ?$ {
, W5 l2 ]0 z0 I5 W. kX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
3 l2 Z" Z" ?8 ^2 v: j& c. y6 W. K& T. G4 p+ }9 v7 i$ \) h
#params = {'kernel': 'linear'}
3 o/ X) C8 y7 C+ T( d( o#params = {'kernel': 'poly', 'degree': 3} M/ M- E1 `. U5 B" q* `
params = {'kernel': 'rbf'}
4 Z3 I+ T0 _9 T/ J1 h& g" aclassifier = SVC(**params)" u$ ^ ?1 f) [9 _9 D! s1 W
classifier.fit(X_train, y_train)9 o, ^: \9 r+ r2 Z/ d
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')* b6 r1 d. [ y* i" n; E
. @" t! D& D( F4 N8 Wy_test_pred = classifier.predict(X_test): o4 Z+ x& A# r* b* U
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
* u4 \# [1 d2 _ `. a, X
b S. G7 y3 `; p1 N5 P################################################ Q" T9 X& D7 t7 w; h
# Evaluate classifier performance, I7 b4 F- g W/ c' T" _
1 x) h- K0 O& P+ \3 k0 {from sklearn.metrics import classification_report, n2 ]. V% c' P
* C4 E7 n" a1 E; L
target_names = ['Class-' + str(int(i)) for i in set(y)]: P& X ?# e; A# m9 |
print "\n" + "#"*30: ?/ a! ~5 K. ^1 ?- w' Q1 A
print "\nClassifier performance on training dataset\n"
5 J, g5 ^0 ~& R0 N/ P; L' I' Eprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
) Z2 B3 d8 }7 W% V. fprint "#"*30 + "\n"
5 s# i) a/ m" d3 |' [% s2 M
5 @7 |* H+ Q! A( |( z. iprint "#"*30
4 }1 T% j+ J6 J" Q8 B6 Z0 q/ tprint "\nClassification report on test dataset\n"* [' v3 _/ F+ t1 t9 S
print classification_report(y_test, y_test_pred, target_names=target_names), Q; o* ?: A1 T( T6 Q
print "#"*30 + "\n"8 b1 d5 L8 L# H- K* |3 R& m# ]
, E% `) ?6 ^* A9 S5 S
|
|