- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np$ V$ s' `( z& i+ S+ E
import matplotlib.pyplot as plt
& X0 w4 y/ J, s2 G3 m& ?3 p ]* D0 M' c: }/ u# v% r9 a& a9 n0 F/ ~ o
import utilities , M) H9 C/ v4 c9 w6 f, Z. h
+ V+ L3 ?$ p! w* I6 }- i) x* k6 [
# Load input data5 k3 f, }# b3 G) Q9 U# X
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
$ c. H8 ?! x' D4 a* ^8 F9 F zX, y = utilities.load_data(input_file)
: M( }. d* S' X" f+ N% H0 j
+ s: y9 P" m& z###############################################
, k3 B4 }9 a$ g* n$ R# Separate the data into classes based on 'y'
5 f& Y' I, V" Q) @! {/ {class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
! d5 K; A9 D8 A6 S* v! fclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
# l1 _: ]; j: x1 H ~( z" q) ]. }! r8 a/ E0 l
# Plot the input data
/ ]* @6 ^" s5 b5 s C; splt.figure()
& a& m/ m: q" {8 r1 w4 A: X, bplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
; X$ W# ?0 H) hplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')# N2 {+ ?# |" n; X
plt.title('Input data')
" l, h) o3 o' ]( I: K6 E6 T5 w
8 x3 D7 m) E( G###############################################; P+ {+ V' L: P4 F
# Train test split and SVM training/ O8 G% D* J/ p
from sklearn import cross_validation3 j- ?3 F8 G! C' ?
from sklearn.svm import SVC( ~4 F1 Q# C; c7 y* S
- R' q( i. j2 E! x. P7 v3 b; i
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)# p; I6 S4 a- z# ]5 o
3 l; J1 h/ g7 ?. ?7 W#params = {'kernel': 'linear'}% b7 i7 }) J; q8 U% j f4 o
#params = {'kernel': 'poly', 'degree': 3}
! z. i* J% K" c4 y* X2 Rparams = {'kernel': 'rbf'}$ I$ n! K. X/ Q9 t9 B+ w
classifier = SVC(**params)
! B+ f. U( x9 r# |classifier.fit(X_train, y_train)
( G+ M4 d, {" b/ b& sutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')8 X; F" | R) H" ^( ~& a. L
& [+ {( D; \0 G: q
y_test_pred = classifier.predict(X_test)
! A2 ]0 u& M6 c0 G/ |- {utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')' X1 {$ j. a5 d# C, {2 |
* d8 k! I* M+ ]; N( C
###############################################
5 Y/ d: U6 N# r$ \8 c- p7 u u7 i1 s# Evaluate classifier performance
" Y8 x/ P3 |' W
* m: I9 L5 O, Z7 A0 Ufrom sklearn.metrics import classification_report
3 f6 A. i$ Y: {' B) l2 O
+ F; \- S: ^+ ^' t T7 `4 A/ Ktarget_names = ['Class-' + str(int(i)) for i in set(y)]
% q( [ R9 J, _! e8 \print "\n" + "#"*30$ F% i: x+ m- ~2 U$ e7 E: T
print "\nClassifier performance on training dataset\n"& R# L3 a' @) x" d
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
8 h& T- T( [' b6 R& Nprint "#"*30 + "\n"2 f( V7 N6 d5 B( m6 e+ z: X: V
2 c( m! K7 O6 B# `
print "#"*30
" _, w* `9 u9 \# xprint "\nClassification report on test dataset\n"
9 X# c2 Y7 X8 F4 A' ] |. I! Qprint classification_report(y_test, y_test_pred, target_names=target_names)
j( z7 C R9 r8 [print "#"*30 + "\n"
* I$ r$ h* D# t/ F2 ]+ ^6 D7 E+ n. k5 h% D6 L7 x* ~# {
|
|