- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
2 Y$ k. v0 s4 M9 Z5 U9 V' c0 ^/ ?import matplotlib.pyplot as plt Z* [9 F$ p6 _4 K
$ W# N0 O# i$ |% Q( j& Z6 {8 bimport utilities z% w) v+ v. \2 m# L
- G5 e: x* R, n$ J1 s# Load input data
2 S; c" D* V1 e1 A& F% ^/ a! Uinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'& e% k) r) A- l! }% z
X, y = utilities.load_data(input_file)5 p+ r0 Y+ j( E% Q- P9 m4 D
! K: S; k# M: d7 g
###############################################% J0 T/ J4 r6 p$ ?7 U
# Separate the data into classes based on 'y'
% \( X$ |8 p" g+ @) N; s, Uclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
& G" z6 l) P d- `class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
# H7 l8 J W l; z$ h! I# x3 f' p9 h6 ?8 o2 d
# Plot the input data
4 ^ ?, @: ^7 O5 y/ X& ]plt.figure()
( G) D- H( ?3 s; aplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
& I" E1 v- L1 u3 X/ iplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')' v; E5 R% L4 u4 a
plt.title('Input data')
0 a( _3 C/ v, Y- u7 F
9 D! Q, b8 i" d" @. P; k###############################################5 T g. l8 k. F, B+ a
# Train test split and SVM training
& D/ D, v) H; R A, V& `1 _/ yfrom sklearn import cross_validation
5 S/ \3 n; W$ s) B% m: r3 Xfrom sklearn.svm import SVC
`1 e8 a# J0 Y4 ?: H7 v
& {- [( ?5 }* D. F0 o3 CX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
9 c, j Q/ C5 l; {% {! ?5 G, P' ?, n# Q& D9 s! _
#params = {'kernel': 'linear'}. V- {$ p! z! R- l: p
#params = {'kernel': 'poly', 'degree': 3}
% q! `6 ~% R% _% ^) L' ?6 @4 Y6 @params = {'kernel': 'rbf'}
( ?; f% T3 j6 Jclassifier = SVC(**params): _3 }; A' s8 h% N# I6 v. S7 B
classifier.fit(X_train, y_train); W0 y6 ~3 L5 y2 v" x! R
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
6 x$ S% Q4 J+ i& i6 k1 S" N, W8 R3 [+ W/ P; y6 w/ ~* ^
y_test_pred = classifier.predict(X_test)
4 N% ], @# H" X# E) J! Qutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset'). A: E6 F; c" V9 \
0 Y- B3 O. D) U \
###############################################
/ e& Y- p n" C5 R, b5 g z# Evaluate classifier performance
1 N) ?2 p( N! n/ P, [4 k; F! o- J) w" F/ ^5 m; `: T
from sklearn.metrics import classification_report7 H) {0 v$ ]9 A, v7 Q/ z7 |) [
( Y( X& D1 o/ U Ptarget_names = ['Class-' + str(int(i)) for i in set(y)]
u9 `) s& ?( D( u' v2 zprint "\n" + "#"*303 H0 y9 `. n# T+ n
print "\nClassifier performance on training dataset\n"' | U. L/ t5 |9 s5 i" B
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)9 J: U r+ N% t0 z& q p! j: I k
print "#"*30 + "\n"
# t8 y2 W" x% F( g: n. S; b. x7 F3 m, A
0 Z& Y" _9 R7 T5 jprint "#"*308 X" v B7 t9 y0 m9 [! c" A& \
print "\nClassification report on test dataset\n"
$ d2 h$ Z( {1 ?$ Wprint classification_report(y_test, y_test_pred, target_names=target_names); j, O3 z. E6 K: s
print "#"*30 + "\n"; K) m# \' r! @
* M; n1 T1 R9 k. n |
|