- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np6 g3 Y& G* [) l: z) z1 ?
import matplotlib.pyplot as plt: Z/ o% Y0 e/ h, r( S
) ]/ |( l* _( f% T1 p/ k
import utilities 4 j' h, M$ A1 {! J
: |8 P3 N$ W6 d# Load input data& M' _1 T2 m8 ?5 r+ u2 z. U4 P
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'; t* }" v G, f3 s9 N7 i
X, y = utilities.load_data(input_file)6 p' i! f8 b4 F5 p' M5 r" s
2 T& F8 w! f; s( J+ Y###############################################
) J6 ^- d- [. B' j5 ^ E0 a/ J0 S# o# Separate the data into classes based on 'y'7 n; o7 ]/ U9 J$ y/ L- K: s5 W7 G
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])5 U4 s% c: i! S! @% f. a
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
' T0 h# F6 G' m( V+ u' D/ ^# A; S- k* ]3 k9 P
# Plot the input data9 M6 q4 v) @, s6 F& v
plt.figure()) M0 @' K2 v# l, J9 U6 Y* T
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
& W: Y" l# V& E! ?# }0 c8 N& iplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')8 f5 K+ T# J" N
plt.title('Input data')5 c" q( K+ W' s4 W6 f
" Y% d& A: N& u1 J" C# M& q: H################################################ C/ o3 ]4 R u3 Y* Y+ C
# Train test split and SVM training. {/ h% W+ t6 t. {& r, v
from sklearn import cross_validation
; [* v* g9 N& L9 _- y) zfrom sklearn.svm import SVC
: L; a0 D% N! n8 v m& w2 M H5 |. C w5 l7 N P" j4 ~' ?
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)+ _0 q1 j4 | {- r6 f! u
7 q" z' K9 n: C" G
#params = {'kernel': 'linear'}9 G+ `, H& K% s( K, U( D$ U+ m) `: r
#params = {'kernel': 'poly', 'degree': 3}
9 |4 d: ~- ~: s' F' `; D2 A" Fparams = {'kernel': 'rbf'}- h* ?0 H3 ^( X) `0 t( Z6 U( Y1 h
classifier = SVC(**params)- ^, L5 g) s4 @# a8 I. [& ?
classifier.fit(X_train, y_train)5 |2 F' T2 C0 \% r/ O& V/ o
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')$ X) q! r3 w( m' t
" k* ?4 |5 f/ S4 b- \
y_test_pred = classifier.predict(X_test)
$ N) H9 I) ?2 r/ X& \* u. [utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')" z, J3 T1 |3 n4 M8 s7 y$ d
s8 v7 R1 A0 {+ n# _8 ]; w
###############################################- ^) R$ {, [4 l5 y Y9 L
# Evaluate classifier performance
" A! L! f0 R& v/ i7 [5 y4 D3 t- }: k8 F/ M. P5 [% H8 a
from sklearn.metrics import classification_report
& J5 w5 T6 s% N$ [" f$ t3 Y, [8 F/ k
target_names = ['Class-' + str(int(i)) for i in set(y)]! o* _8 z) b. h) ]& e1 A& B1 c
print "\n" + "#"*30) O. ~9 W9 r2 L/ x
print "\nClassifier performance on training dataset\n"; w. d. i9 b) [2 \7 j
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)( I* h P* |( D1 y8 g7 o
print "#"*30 + "\n"2 l# u7 @+ L* e' U! j' S# A4 U
" F* S- x# j) h9 S! U. u1 Eprint "#"*30
# l& P. V S. ]% tprint "\nClassification report on test dataset\n"+ d. n' z1 R# {) S$ O
print classification_report(y_test, y_test_pred, target_names=target_names)
$ @3 l T: Q& Hprint "#"*30 + "\n"
# k2 L( B9 q. N7 }$ y; Z( G% h& t1 f# T9 t3 W
|
|