- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
* f9 l+ Y9 x( H1 wimport matplotlib.pyplot as plt
/ d2 Y: y+ {! w0 [6 b( `1 A2 N
6 o( n* D4 G) Ximport utilities * y6 w: s9 h& P' _& C
e: x( Z3 U3 @: p2 L# Load input data
! l+ R+ V! o5 `- minput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'4 ~$ v" z* l! Y% X5 D3 F4 R
X, y = utilities.load_data(input_file): @: P8 Y0 `: T) }! k: W+ I9 G
' T7 A1 i% K, d, I
###############################################
}2 t; O k5 u8 H6 p# Separate the data into classes based on 'y'
. v& @3 M- E8 O! n5 D) cclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])! p' `% S- e, G* D
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
; P; _$ t+ |! Y. d1 W9 M6 _& \: E3 s9 r4 e9 v
# Plot the input data
* N3 A: h' \+ P) T) C5 L& w5 zplt.figure(). i2 l6 f( E4 U! W* M( c
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')- C4 D, `" R0 G- _3 c6 x
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')" @( q* b$ v/ v+ L8 }! x
plt.title('Input data')2 X% G6 C6 k; A5 I1 Z8 D
; S- d/ X; k- v! T9 P3 t###############################################! l- g( t! @+ S3 `5 {9 ~" `
# Train test split and SVM training5 `0 ^! v2 V- s& ?, l; t
from sklearn import cross_validation
; @% `1 p3 B& M7 Bfrom sklearn.svm import SVC
* ?8 }' i. u" D' L# f, c' o9 R1 Z' c1 N! Y# ?$ v) K) f. g
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
3 o1 m7 |; C( u5 ~- i7 E8 S* i
; }. Q/ J5 y/ a( q, ~& @; C, X#params = {'kernel': 'linear'}9 v9 x# k' N' O& A: `% |4 N
#params = {'kernel': 'poly', 'degree': 3}+ X1 P8 ^! U+ h K) h9 K
params = {'kernel': 'rbf'}4 y( d, K: A* [; Y; L
classifier = SVC(**params)' ?) g# W+ @) M7 m
classifier.fit(X_train, y_train)- A7 W, L" c7 m, `: l* l% g
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
7 h1 `8 U- h2 V2 p' s$ ~. ]- I% e9 g
/ u9 k* h( d; j7 f, Q% gy_test_pred = classifier.predict(X_test)
+ Z- B% b+ W: Eutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')& M5 P6 x/ z& ~- _% G
. I; i2 Y6 {8 b6 y
###############################################( r* L r/ _- Y) J( A( e
# Evaluate classifier performance8 o& O* ]/ `, V* }2 K
$ H7 W- p; X2 Ofrom sklearn.metrics import classification_report9 O( N3 K( Q; t1 F- r( F. D
* @% S. I( D8 E2 |' K
target_names = ['Class-' + str(int(i)) for i in set(y)]
4 S' m/ ]9 g5 r, Z" S! F7 H* cprint "\n" + "#"*30
6 g5 p a! b$ w6 B2 nprint "\nClassifier performance on training dataset\n"5 K0 x& q/ l3 [: C; ~
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
# `( s" z4 Z* m7 f3 D1 S: }print "#"*30 + "\n") U' E# `% i4 w$ U
% o' k% `' F M2 Tprint "#"*30
/ b* z+ e9 ]/ Y8 _ ~9 oprint "\nClassification report on test dataset\n"
' q! T8 m4 b4 Q+ N7 }- jprint classification_report(y_test, y_test_pred, target_names=target_names)
$ O5 z" g. }1 N$ h. F# \print "#"*30 + "\n"
9 K% ^8 Z5 D- V. r8 m) L& H+ P4 n( p9 r9 e+ L( A; s0 K |
|
|