- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
( t* \' y: c& k3 J! X7 Gimport matplotlib.pyplot as plt' ?$ U+ ~' Y8 l! E
6 a5 e4 |, Q+ y- R5 `( k) g
import utilities 5 j6 n2 D. j4 B: R5 M8 T) ^
; D3 j, n3 \. }% v: C# Load input data z+ g5 g' v5 }, _
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'" @4 g& B5 g/ H0 ~: a- ]- q
X, y = utilities.load_data(input_file). {' e% c) a) B. W$ c* b# Q
) ?6 k' L A- N4 j. G
###############################################
5 q5 b$ D' k- x# Separate the data into classes based on 'y'
4 E3 w. }. d6 ^" S; d- Iclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
, {6 C; |6 O. \- N6 ?' C9 v9 Sclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])+ O3 L7 ?2 f" q! o% ?+ e
9 G/ T4 ^" ~5 ~( W# _) S7 ^
# Plot the input data
2 u& o" T; e) `' W: m2 q2 b# I1 Rplt.figure()" j: h' z) A) C2 @0 b4 U; ~$ H
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
. E+ j: G1 ^, q) ?7 ~plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')3 C: Y6 q& L" }4 {% j
plt.title('Input data')
4 Z& |% x; i" |' V8 I
& \( ?1 W, l- y3 c1 [$ W) |4 n4 H9 G###############################################. g& F5 Z+ t: Q9 Q0 r
# Train test split and SVM training
0 O3 f+ D# O0 p- \$ Gfrom sklearn import cross_validation) S* S: Y2 n4 M/ [8 r7 q
from sklearn.svm import SVC0 d! p/ e) |' |1 t
- q# n. A% G" yX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)% D9 i4 u& s4 r9 E$ }
& i& C: c* a/ V a) [/ a
#params = {'kernel': 'linear'}
. g' p+ V! |9 A" G- @#params = {'kernel': 'poly', 'degree': 3}
) F# K+ R: m( M& f. [1 \2 \params = {'kernel': 'rbf'}- S) h% r: A/ ?1 ^
classifier = SVC(**params)/ M! O- Z) t# _* q6 {
classifier.fit(X_train, y_train)4 Q. V* G( N( D3 o
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')* q* \5 H- ~/ l5 m
- k; |! @, N, `- k, @ V
y_test_pred = classifier.predict(X_test)
& {! W5 u0 B# Sutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
% s) H8 L1 P* }- `! \0 m
- }( y) A- `2 ^###############################################; g( Z) e! A- e! _/ J$ r6 K
# Evaluate classifier performance
0 o. W4 }9 n* E' \
2 E: [" f7 @- v9 a# z. Jfrom sklearn.metrics import classification_report
( v& [7 Y1 i4 l. E: T& ]
' \# @ A5 n! Z: ttarget_names = ['Class-' + str(int(i)) for i in set(y)]
' S! i5 S! G7 O& Q$ _print "\n" + "#"*30
# w& t I- r3 }& E2 \print "\nClassifier performance on training dataset\n": \+ q# f; v; l% Y+ Y) c3 i
print classification_report(y_train, classifier.predict(X_train), target_names=target_names), f& @% M4 o8 | z+ I5 i
print "#"*30 + "\n"
" ^ A7 J5 Q& Y9 E1 @, R- ] k) @1 `
p5 W2 M/ ^3 \7 z L0 n0 fprint "#"*308 t' o( j' @5 D
print "\nClassification report on test dataset\n"* u2 Y" A( e( [ A% c" ?0 Q' `+ c% `
print classification_report(y_test, y_test_pred, target_names=target_names)+ C" [2 t7 @5 M. Q3 p
print "#"*30 + "\n" A J) H# j2 L+ h
- k" l* C0 U+ S5 z/ o+ D. h |
|