- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np8 X6 r7 U2 P1 l$ ~
import matplotlib.pyplot as plt
! a0 S+ U6 O, p
* _: V5 T6 J$ K1 x+ r3 Y9 Limport utilities
2 h/ i' u4 }' q$ ?6 t9 ?$ I
& A% A/ W) l" P+ }# Load input data
1 H% f# l4 L7 ?input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'1 O' G9 D8 G: S6 Z5 k
X, y = utilities.load_data(input_file)) `$ }6 o3 [+ {1 r' i- S
" |9 |' I. `! R( H9 H) B' G& f8 N" m0 }
###############################################. v- F6 ?. \6 I
# Separate the data into classes based on 'y', S% e; z- Q) [* N6 T
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]): s% @8 A. [) S6 f% n: g( R" e
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])* r3 R' V" ~) \; G6 M
3 B: f6 |+ C: Q- z1 [# Plot the input data* ]6 l( I$ X3 j
plt.figure()
# h: a* h. `+ B' i) [% w3 I# l. Cplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')# x. m6 y; ~) `) k/ f. f
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')8 }- F C5 t1 X+ s" q# ]- F
plt.title('Input data'): z; O: A: h; I( Y6 i9 O, B( j
1 E+ ]/ @' e3 _* U* i###############################################
- a: p, }; W! G4 O# Train test split and SVM training
1 @2 ]! i% z5 D: r5 Afrom sklearn import cross_validation
, P7 A3 n# f/ D$ gfrom sklearn.svm import SVC
, ^8 p, D& c- F. x* C* m& }6 n& ~" }7 M/ C; t; {9 ]" O
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)9 y0 t4 S7 G8 h+ B4 J; k+ U
" b: ?/ G. Z$ K4 c2 z% H
#params = {'kernel': 'linear'}
2 O2 ?- J$ A+ r: M, }$ g" n8 W#params = {'kernel': 'poly', 'degree': 3}" k- R3 m5 o/ B1 [4 V7 O: t8 v
params = {'kernel': 'rbf'}
6 b c0 Q/ S+ }& W9 G' i3 A5 l: k v, lclassifier = SVC(**params)/ O4 D" d& s, \! b
classifier.fit(X_train, y_train)0 i7 D$ N- n: G2 g- |) H: ?
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')# x0 ~1 r6 ^' { I3 R4 J) }
9 s$ W- ~2 h) [( M4 R, |3 q1 C! }y_test_pred = classifier.predict(X_test)
Z- Y* z& e: I- \" a2 [) Vutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
6 [0 I5 D1 ?$ @* a/ ]3 H' f# e
7 a# U C* e" w+ R& n$ M###############################################% G, y) @$ Z% E, {' t' b P
# Evaluate classifier performance( ^6 g9 T* [1 ^
* u( }+ S8 `$ ^* h* {0 p+ z7 E/ i
from sklearn.metrics import classification_report
- Y" s$ w! F/ \4 ~8 G& x& x3 a9 v( o) B9 G0 p: B4 l' w B, m+ V& \- c
target_names = ['Class-' + str(int(i)) for i in set(y)]
* \& F8 A1 N- s7 j% j2 Jprint "\n" + "#"*30. H$ o9 u& \. W
print "\nClassifier performance on training dataset\n"
, S/ \) i9 u. o- h8 p. w* ?print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
- b- [6 \; V: T7 Hprint "#"*30 + "\n"
5 E) B5 _4 f3 r6 C' \0 Z7 {; {0 k: V; |4 q2 B3 f' ]- Y
print "#"*30
; ~7 _: r1 N+ I& W \+ l% {% tprint "\nClassification report on test dataset\n"
% {5 X( v# N) O/ X. w$ uprint classification_report(y_test, y_test_pred, target_names=target_names)
% X$ P' }! g9 q, R5 z4 xprint "#"*30 + "\n"$ l. {4 S2 e2 E: y
1 s! H+ x, f- u+ }
|
|