- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
$ U: u# \6 u" D' |5 jimport matplotlib.pyplot as plt% ?* J4 |- x2 L R# }
) R. G8 ]+ _% E
import utilities
, V9 u7 ?& p, I% p {3 @) d0 B% g7 l9 x
# Load input data
- T3 `7 v; S- w. _' D: ^; W) Minput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'3 ^8 }% w# h) `4 r6 n
X, y = utilities.load_data(input_file)" V. X3 l- P, j7 k
. F& l6 F B! A& K; N R) f
###############################################0 R# K" Y/ E9 G% I2 o+ c: F
# Separate the data into classes based on 'y'8 o$ _ w+ M# G* G" L4 W+ x9 q' q9 k2 D
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
P5 l" N, C2 B$ i5 Y% U8 wclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
6 ]& v+ {* w s, X, l ], _
8 Z' a m* q9 _; @" o& j- ~! D# Plot the input data
# S& y9 Y2 G, @plt.figure()
' p6 Z9 W& @4 a3 o" W! F% splt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
% t* P+ C, e, K: j- X0 z- d' zplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')7 q4 z, Q* M- q
plt.title('Input data')
8 w2 D6 y1 s f5 J, a: t# }& z
- O, i1 N) C' M% i###############################################/ c" o- y$ a1 c8 }
# Train test split and SVM training% \8 o/ i5 n" W
from sklearn import cross_validation
& t8 J! M- b1 F' sfrom sklearn.svm import SVC; c' E2 L) J/ v- X' f9 n, Y3 a
: [: y# B, O o1 bX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)6 e9 d2 M1 i/ b2 b1 F' S4 L
8 a1 x# v5 e. ^
#params = {'kernel': 'linear'}
y* \; o( y: R% ^#params = {'kernel': 'poly', 'degree': 3}) M- |. @! g: E3 }$ T* q
params = {'kernel': 'rbf'}
: f1 W2 K1 x6 [& {' U8 j5 |7 {8 tclassifier = SVC(**params); c( y5 R* r0 J% q
classifier.fit(X_train, y_train)
+ Y" i8 G( b9 u2 \5 b, V& Y1 Iutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset'). O6 V* Y% }8 v8 G
2 E& P) R$ z6 C, |( c. |
y_test_pred = classifier.predict(X_test)# A! B0 Z; S( z3 E# e$ Q) J# B
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
, J8 ?8 V7 J( n; M( q8 B1 K
`5 X; e0 a# |( {, n& @2 ?###############################################
! w& T7 _! G8 U; D: Q4 o# Evaluate classifier performance
6 k& p/ i. ~* A7 x( ]2 p5 _. C6 m" E, @4 v+ w7 h
from sklearn.metrics import classification_report
8 O' { V' O, ~! X0 Z) D7 c/ y8 A3 A+ i& x2 D) [0 z0 P+ L) E+ g: h. _ W1 `
target_names = ['Class-' + str(int(i)) for i in set(y)]+ D8 j; G% x# l, v. J' n
print "\n" + "#"*305 j3 g5 U# Y6 g( f3 ^! @% k L
print "\nClassifier performance on training dataset\n". Y0 r! p4 G# ]5 d, C9 ?' |1 ~
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
) i& N1 o% \$ b2 M* J6 [print "#"*30 + "\n"
3 s, m' _+ _# Y
) I1 b3 x* B' d( B0 U0 r$ Cprint "#"*30
! Z, w( @0 H( A0 s( o# b n- hprint "\nClassification report on test dataset\n"
: S# p' v R/ kprint classification_report(y_test, y_test_pred, target_names=target_names)
8 T& W( S7 n8 ?% J: V! E, `0 |6 Mprint "#"*30 + "\n"
% N4 J O1 k' \- e1 i+ Y" Z* Q
y+ O# t2 P* @* |9 v |
|