- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
, t/ `$ t9 E8 {" B' h; l! eimport matplotlib.pyplot as plt
+ w: k. M( T! t; q, H5 A9 X& o- b( p. n
import utilities
' j7 j2 T5 {' y6 K( L7 O8 ^ x d! n; n" l n( `# z3 ~
# Load input data
7 X' f# ?; k* finput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
2 J) `9 U* d! \' cX, y = utilities.load_data(input_file)5 i& q$ L: U' r; o6 M% C# ]: n* P
7 [$ ]+ v# t6 Q" \0 |9 n###############################################5 w$ E/ A) J8 b8 Y
# Separate the data into classes based on 'y', A" m5 Z( {" K) l/ Y
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])3 M4 |7 i4 k4 O/ U; Y
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])) w' } g% c- _! B/ n" |
9 y: H2 v4 e* H" Y$ o4 @6 X9 M( F
# Plot the input data( B4 V1 _# E2 f6 ^( D* U
plt.figure()
+ v P' n: A7 `, Bplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
2 E3 }/ k7 D, }, w9 v* {, R I5 l: eplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')+ [( L: p" e% N9 \: C; a
plt.title('Input data')
/ e" ?6 d; i- H4 |% F* b8 G- n7 c+ M9 [5 r: ]4 D A; r
###############################################
8 X e5 Q C( Q% O1 O# Train test split and SVM training# u' [* R# D( l/ T3 I5 b8 Q
from sklearn import cross_validation, ?% f, ?& R9 H7 I
from sklearn.svm import SVC7 y2 S# E: r) n \/ |3 o
[; H( q8 L/ n/ P a2 B
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)% h2 q6 K, l7 z: Q1 Q
+ {7 N+ }1 r$ i: G* d2 l
#params = {'kernel': 'linear'}" o) K9 G/ u" Y# W }6 f$ R
#params = {'kernel': 'poly', 'degree': 3}
. g: u, q2 R. @% U/ u; iparams = {'kernel': 'rbf'}
- ^3 |. d X: e5 Y) Uclassifier = SVC(**params)* W- ~. j3 p1 K) C0 e3 o: A) Z/ c: E
classifier.fit(X_train, y_train): I8 @: f1 G7 H% ^
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')) P8 q0 I: [( y, Z, u( D
0 r1 n: Z6 {4 ], n3 p
y_test_pred = classifier.predict(X_test)
' V+ P5 d$ n7 @+ h: Sutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')8 S- q+ r8 V# F) a0 M- h
( j- [$ c3 n8 ^% `" s###############################################0 H' D' l g( H: S9 u5 W6 y. x9 O
# Evaluate classifier performance) i. W1 m' Q7 @0 h" T
3 }5 Q! C2 u0 U/ s, o
from sklearn.metrics import classification_report
: W3 U0 m" c- Z* p$ V6 u7 o I2 a. Q# N% A m. n! t* Z' }+ R
target_names = ['Class-' + str(int(i)) for i in set(y)]
/ A" ~3 h5 S! L0 L7 \6 ?! yprint "\n" + "#"*302 ?' s/ e( e8 j3 M4 `
print "\nClassifier performance on training dataset\n"% h' M; e4 B' V- F, E
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)5 d4 P7 b' z, o8 L% e
print "#"*30 + "\n"2 u/ M2 `$ x: C3 L/ N, f! p" p
5 [- o5 E0 S7 }. O; o
print "#"*30
# n/ S* @ D5 J& e/ Fprint "\nClassification report on test dataset\n"2 k u- _# {' O
print classification_report(y_test, y_test_pred, target_names=target_names)' z% P" W. m& K( H
print "#"*30 + "\n": Y# g; e; k% o" h9 F0 e
$ h( c7 t; O9 Q" o |
|