- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 558
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
0 b& m! q" h* Y ~3 Timport matplotlib.pyplot as plt6 H8 ]/ K( Z' c
2 N+ o! e* D8 q5 Gimport utilities
: r0 D2 T7 m6 F( R+ M( U
1 r& B% u9 @, e* P. t# Load input data
2 Q5 T0 ]- n2 }8 z0 Sinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'& p. ~! k% j' Q: n! m
X, y = utilities.load_data(input_file)
( \4 t. w1 ], q( G9 u" J* ^9 ]+ U4 p1 a4 B. r: d- @4 h
###############################################. ^5 s0 |/ u ^4 X3 l/ I5 _: E
# Separate the data into classes based on 'y'5 U4 e2 d: z4 H' w
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
0 `+ A! `; g% V G6 @! h; Y+ nclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1]). V" N' T0 h1 j3 U/ x4 j
; W' v" w) z* s/ \& G
# Plot the input data) Q0 f1 i) F. U& D( ~3 x
plt.figure()% d5 ]2 Y; V8 o, ]# P
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
# F1 k9 Y: p8 G }( w( i' Vplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
6 k/ L7 S5 @2 U1 Bplt.title('Input data')
; A9 U: @2 G. n- r W+ ^1 h- l) y8 F! a3 T! ~1 F% y9 Z7 K9 R- u
###############################################
5 \& U1 z, J$ k) W0 d& ]# Train test split and SVM training
( H0 Q" o1 C* l+ w* M! R3 s$ yfrom sklearn import cross_validation
* U& w/ C0 @, U1 ^2 k5 Afrom sklearn.svm import SVC2 o% e8 ~8 F+ Z) }0 K5 X. h
T* m }0 l0 c% f2 c
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
; h6 d) d; h3 A. _+ m
8 D6 [: h& o7 [9 W; L#params = {'kernel': 'linear'}
1 l7 ]$ h0 [ K3 [! W% c#params = {'kernel': 'poly', 'degree': 3}& U+ R& c% m) X8 v
params = {'kernel': 'rbf'}
- M* ?0 ~. I: y- vclassifier = SVC(**params)
( d% M( g' y$ N8 v. eclassifier.fit(X_train, y_train)
: Q8 \2 v6 r+ t" Outilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')8 V$ e1 w Z( M
- n$ u+ e5 _, S4 H9 U# ~
y_test_pred = classifier.predict(X_test)% \0 ^. V/ `* E) k$ ~
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
; s+ ?8 t8 I3 J W7 n. R
2 V7 T7 \ x8 x! u3 H, J; z) N& Y- J8 D###############################################, ~+ N0 |5 [. v7 F) m
# Evaluate classifier performance2 i5 `+ ^3 H g" N$ ^
" p: j( I; u/ d. b
from sklearn.metrics import classification_report/ m8 x; G1 x' f* ~. @) r! y
+ G f0 D. ~ q& l" B8 U
target_names = ['Class-' + str(int(i)) for i in set(y)]% z, h6 }: f4 w7 z4 G, }1 A6 Y& `
print "\n" + "#"*30
7 \+ q3 N5 o% Y4 Pprint "\nClassifier performance on training dataset\n"
* o3 r4 A; Y/ Q! r1 ^print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
; m* z5 K: l% U( t6 }- iprint "#"*30 + "\n"
% w9 Q9 n" Q7 C* ^& T/ S2 p! n. E1 O8 a( N) ~% j5 r" O0 T& Z: B6 Y
print "#"*30
* p1 X- L: d1 s, E: W/ mprint "\nClassification report on test dataset\n"
* b* A9 Z( ]4 ?print classification_report(y_test, y_test_pred, target_names=target_names)4 V8 J6 O0 r. u6 {6 [& K
print "#"*30 + "\n": k0 r2 {7 C* G. E" o
( E0 O+ ^6 q$ A3 _* y, ~
|
|