- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np/ ?, ?( }* y( z6 i' f
import matplotlib.pyplot as plt
- O& b, {! Y6 M) X6 F
% Q v. u9 n7 j- mimport utilities / N p$ F1 P' P# G |2 g; e7 w9 D& K
3 S( _# H$ ]2 X$ h) _; t+ S
# Load input data+ L, N+ \& I9 E/ Q! x6 \
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
" N) N# h, i$ h* B, s% MX, y = utilities.load_data(input_file): p( i% ] W- q6 X- J
7 i4 e+ k4 g( h
###############################################
2 d" Q/ V2 A, N3 N) F# Separate the data into classes based on 'y'0 X( h1 ]( \5 d( p0 R
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
5 L) n) V$ j; i4 c' Aclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
6 f" e5 a+ F/ t- j! I" q9 I) H) A
3 s! n; ~( m$ O3 B: d; ~# Plot the input data
& }6 B7 n0 M: b3 M) Vplt.figure()8 N. B. U* Y' i/ C# }: D
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
! Z- O# x' P+ q5 A1 m2 V) V9 d+ O. ^. iplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
% M3 |" I* b% a: U6 t3 i ?plt.title('Input data')
. l1 r2 B9 e: N/ L$ e% h! G8 K/ m2 W
###############################################6 C( D- ] e6 H& R9 o
# Train test split and SVM training
# S Y$ Q) |; k3 zfrom sklearn import cross_validation
( k; z9 L( n0 Q7 z* q9 Ufrom sklearn.svm import SVC
' {1 r, [0 ^8 `4 l# g6 A
7 B+ C+ S$ m( w! ~2 G- ?X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
+ L/ F( |$ O# G4 ~
" _/ ~, ^& G$ n3 O9 B#params = {'kernel': 'linear'}3 J- s" ~. L+ i( e0 K8 q1 c& n% _
#params = {'kernel': 'poly', 'degree': 3}
! f$ c2 ]0 w) i: u: vparams = {'kernel': 'rbf'}
. _' f- k, x5 Z8 |' E# gclassifier = SVC(**params)
" @/ l6 I F. tclassifier.fit(X_train, y_train): w3 W: V6 k) ]8 ^# b
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')) n3 F8 q9 T% L% }2 D
* e1 X4 _; \0 U& My_test_pred = classifier.predict(X_test)
5 X6 o! C( e- X- dutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
2 J3 p. G! J3 {( x/ k2 J& E3 X! u4 U6 C' s7 s
###############################################7 N+ }) K1 t8 O; K+ X
# Evaluate classifier performance
0 P1 v/ c/ v$ G; M9 \/ H8 a; j0 u" L9 `
from sklearn.metrics import classification_report
0 w+ @: D+ U- U5 _
) i' b4 z7 [$ L% n+ ztarget_names = ['Class-' + str(int(i)) for i in set(y)]
/ P7 r6 M6 G- {. L* iprint "\n" + "#"*30
, t1 F2 o0 r9 Y7 C& X6 M' Qprint "\nClassifier performance on training dataset\n"$ Q }8 T' D! y* C6 f5 y
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
- o$ W G) ]+ S4 Xprint "#"*30 + "\n"0 u# Z$ e! I" S$ a
8 {- c- N: V' n7 u* C
print "#"*30
0 T& \+ |8 j/ p j. U/ r" m* l! ]print "\nClassification report on test dataset\n"
: ]+ g( ^# ?& |- o. e! j, X: tprint classification_report(y_test, y_test_pred, target_names=target_names)
8 x/ Y4 |. n" W# zprint "#"*30 + "\n"
; t! ] z& |# I8 v; ]# |. K) ~6 r' U% ]# u z2 G' I" \8 H
|
|