- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
; }9 F4 q2 Z$ v) _; G& a) Jimport matplotlib.pyplot as plt
/ H. L# q- J* c' w6 I5 ]1 J9 W% Q7 s) \% M7 L+ |' Z: Y
import utilities 6 i* A% q1 I+ Z# |
$ V3 q& B4 l3 n \
# Load input data
) I3 A4 r& @' V0 Q+ I3 U4 v" e) ginput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
+ A; ~+ s3 G# `: h9 r1 f6 G# NX, y = utilities.load_data(input_file)
$ s B2 A0 m- b$ A6 r- @
/ a4 p) f; }3 L* v###############################################
. m" O! `# K. x6 `9 m3 S! O; F# Separate the data into classes based on 'y'
: c, B( K% w. h! Bclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])8 s. x& a3 W% A
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
+ M& P" Q: p* a2 w5 q L9 m0 W. H7 Z X- S
# Plot the input data
2 ]' q( D5 E/ s) ^7 tplt.figure()
9 }+ ^; [6 O* ~8 Hplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s'), k$ N/ |! B7 \% {! m: X; V: h% d4 [
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')) w- _; y5 K3 Y0 V
plt.title('Input data'). E4 F% y+ V, V$ e6 x. A5 |$ h5 J! j$ E
. {; v. t* P3 ]" w; f, p7 _/ }
###############################################
5 |: Q/ T/ `, }; y4 E# s/ |# Train test split and SVM training
& b& x8 n7 ]! G* c0 f$ Rfrom sklearn import cross_validation
- _& N2 |/ O E% |% l4 _' zfrom sklearn.svm import SVC6 r4 ~$ d/ V, C& S0 _0 t% r
/ p3 ?. |) k* S5 Z/ H2 eX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)3 W; h$ n* l; ^4 d: j2 |
- b, s$ a4 C, s, ]- V5 ~9 D#params = {'kernel': 'linear'}7 G; j$ S9 n) ^' T3 z" D
#params = {'kernel': 'poly', 'degree': 3}
7 a& s5 w; y0 X; W* l6 _) h5 uparams = {'kernel': 'rbf'}, ^" Y+ I1 i: o( p* a
classifier = SVC(**params). q" s6 ^- D8 A, _( F8 h/ ?( m h
classifier.fit(X_train, y_train)
2 c% O- v2 u v& _# d4 r' Iutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
! r9 D* v! e0 g( b, n/ `" T9 p4 `& N w* ?& ?% q7 o
y_test_pred = classifier.predict(X_test)
* H! x/ L2 c* B( O; u9 futilities.plot_classifier(classifier, X_test, y_test, 'Test dataset'): R$ `5 d0 `$ _! g% `
) T- J( e- T4 ~( M8 l3 [3 @###############################################
) p- W: j8 y; g. Z# Evaluate classifier performance9 D& D# x, E6 a4 L" r& M1 m
2 r* j5 c- _2 M2 B# k1 n$ V! X1 ifrom sklearn.metrics import classification_report
5 |0 y1 ?3 R8 l9 Y3 i! c1 ^/ g9 ?
/ l0 a. r7 [4 t$ w/ Xtarget_names = ['Class-' + str(int(i)) for i in set(y)]
^5 }% O" i, K& O/ Q/ \4 r/ Wprint "\n" + "#"*30: m" F" A* ]& \/ M+ k3 C$ ?
print "\nClassifier performance on training dataset\n"5 g& t0 i" K* i W! k, Q
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)* ^6 }8 i1 Y! U- V. L
print "#"*30 + "\n"
, |6 D4 x5 B( f1 M e: ]( j9 P# S4 V
print "#"*30* c& J0 z0 `( u1 Q7 y+ ^
print "\nClassification report on test dataset\n"
( j& _3 Y/ {/ B. w# a1 I H8 q6 Cprint classification_report(y_test, y_test_pred, target_names=target_names); ^2 X: m6 Q/ H3 r+ [# B+ X5 o( v
print "#"*30 + "\n"
6 C6 a6 H# M) Q9 P) a) h# n5 q7 ]' b2 q. ?# J
|
|