- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np X& x8 |! f. Y
import matplotlib.pyplot as plt
0 Z0 o8 v7 L9 x$ Y
: }) n5 Y8 E9 Z. ~import utilities % C, J* x& x" B" e% O; S- {8 U
/ A! V w, T+ \) G8 D0 j. G0 s/ l. C# Load input data9 y4 N1 B, W7 Y$ \
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
# ^; n2 y- ^, v( G3 s- D* x' z4 DX, y = utilities.load_data(input_file)% A/ v3 v6 s" B2 M9 w* f- X
% n, F4 w& J5 O6 ~: J4 ^: q
###############################################. ^; }0 [. p4 l; w1 c- [
# Separate the data into classes based on 'y'
, |8 e! ~' y1 p1 h, J f% v7 Rclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])/ d4 ~1 `' e* ]; M3 i$ v2 }" F
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1]); D& r. F/ n% H8 [9 ^5 K! O
; S0 s+ w! ^" r) i5 y7 \6 C' g8 A
# Plot the input data R1 L; u0 P6 @( F, |. k0 K- k
plt.figure()& l$ D! {7 a: P* I1 x! o- i
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')$ B; ~8 Q* c+ ^' _9 R
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
1 _/ ?3 B' H. F8 H9 l9 K+ Jplt.title('Input data')
2 p* X( g% W2 l9 h5 Z! f( d c: I3 I, ] |3 o; p
###############################################3 O7 [# D, ?, o7 x
# Train test split and SVM training _# i' m, p0 T1 ~8 x: _ g- }
from sklearn import cross_validation/ R4 [+ @% H& K" L
from sklearn.svm import SVC
- x+ D) `% ]* p; m6 r2 z* ^. I- Q9 @
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)+ p, g$ p+ Q3 C( b- P4 p3 L, ~
! g, Y; m. f+ b) B2 g#params = {'kernel': 'linear'}
E( a+ W9 w! z5 g#params = {'kernel': 'poly', 'degree': 3}; n' O% ^4 X) i: R4 \7 q0 z
params = {'kernel': 'rbf'}) O4 E e7 ]5 C# l# [
classifier = SVC(**params). w& O2 K' T3 C7 g! L# c, ^: }. j! T
classifier.fit(X_train, y_train). K3 D: P0 v$ V5 ]% ?* L
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')5 N! ?: }3 h4 v! D1 f) u
- r! S, u# F6 J* ]( Jy_test_pred = classifier.predict(X_test)) z* D* `7 J9 u) i9 O+ } G: j
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')) J% c; a$ P0 Q' F
& A6 }" ~7 C; {* \3 \1 K, H9 Z" m
###############################################
" T8 ~& N5 ^' j# g( T# Evaluate classifier performance* Z% i. K( y+ o7 F
# d- {" c( \) R: [# @. ]from sklearn.metrics import classification_report% v4 z% k; Z9 I" X- g3 R! m
% e2 @% q3 ~5 n+ vtarget_names = ['Class-' + str(int(i)) for i in set(y)]
+ @6 T, _ I" \1 Xprint "\n" + "#"*302 p. w1 O- m- V+ L1 b( Z3 x! S
print "\nClassifier performance on training dataset\n"
, v3 r3 @ ]( U2 C. I1 m0 ?$ Gprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
4 e/ t4 P/ I! r# r" c8 ~print "#"*30 + "\n"! i+ X6 E# R" \2 y# y1 i
; z6 X% ?, }: b5 _. Q0 c! F$ M$ {6 U" l
print "#"*306 f; ], L8 E1 V" g% F+ W
print "\nClassification report on test dataset\n"
3 h4 X& `4 v, a4 I; z6 ?print classification_report(y_test, y_test_pred, target_names=target_names)
0 K# |, q1 ]7 V) {0 A$ S4 ~8 P( Tprint "#"*30 + "\n"
! n' C5 ?+ \8 q" |* u6 v7 T( N8 x3 S. Z0 q9 v
|
|