- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np. i6 C( K6 g. u! e2 E
import matplotlib.pyplot as plt* `8 h% R4 Q5 Y) i! b
; I* l8 J% Y; p" |% o+ H
import utilities 5 P8 [4 f4 y3 S- @
5 b; X6 q8 }- k6 _
# Load input data% n1 d+ t& G5 n7 C H
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
% F( Z5 K6 y2 _. S4 i$ XX, y = utilities.load_data(input_file); R& q3 M' a6 p% D; X- A
1 k, _) c8 w5 v! @8 z* ]############################################### s3 p( e! |0 G, {7 p
# Separate the data into classes based on 'y'
1 w7 E/ u% g: J3 kclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
* A7 F5 l$ C" {0 d0 \class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])7 u& S2 A5 C# v) c% C
2 f* A8 G8 m) F) t& y# Plot the input data+ D2 ^" _/ X: g5 t( [, D
plt.figure()6 I ?$ y! b; T+ _& q5 J2 _
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')4 `' t, T8 W% T) R6 Z1 R+ n
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
7 J) P. N" w. A1 J) Zplt.title('Input data')
" S; N$ n* k2 g% O9 X$ v* x3 i! c. k+ O6 `+ H5 @: |- `1 J2 z
###############################################% z! _9 u5 d/ H
# Train test split and SVM training( J2 Y. z) s% s* i$ L
from sklearn import cross_validation
/ B1 G9 J( p3 ?from sklearn.svm import SVC
0 y5 ` i# U' r. C* A
t# j/ o) H0 ] n/ V5 PX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)' Y3 v( A: } E
9 R* p6 A; w B& x#params = {'kernel': 'linear'}
3 C5 e& c5 C1 ~1 o* _#params = {'kernel': 'poly', 'degree': 3}
' ]1 [7 I9 h7 `, T1 Z( Oparams = {'kernel': 'rbf'}
V F/ S w4 d& M" S1 Nclassifier = SVC(**params)
, Q! A8 A( C5 K5 w8 N, vclassifier.fit(X_train, y_train)) v4 }: `& h# B4 J1 D' s
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')1 F% y. ^5 h: U3 ?2 W
( v+ o; x2 b: U7 W9 Z+ S2 Hy_test_pred = classifier.predict(X_test)
8 r3 i1 ?$ t; e+ q- n( p; t1 h7 I( Hutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')4 U& g7 [( l2 L' m+ I
( q3 E/ n- f# Z: X" [
###############################################% E3 {" b( v; l0 j1 t3 I
# Evaluate classifier performance
4 W9 H" g1 p* {: K; V# a, @+ W: d% V. L9 H! Q! e" ]( d5 V
from sklearn.metrics import classification_report/ ?& n d! |0 B) J1 J1 x
2 v# S0 H& H' i; f1 t$ Htarget_names = ['Class-' + str(int(i)) for i in set(y)]- f! |& d. O+ r
print "\n" + "#"*302 P9 m }. |( ]* V, Z
print "\nClassifier performance on training dataset\n"
- u) S7 X$ o( U. i Uprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
, J% K$ Y: X- A1 ?% Cprint "#"*30 + "\n"
' D; k/ e4 y2 W( n4 P1 Y/ I
5 Q3 X$ ^) `) [$ qprint "#"*303 {) G6 X( P5 s+ \( Q& C: G% s1 F3 ]: _5 a
print "\nClassification report on test dataset\n"
5 N3 ~/ P/ I; a0 g6 D1 Qprint classification_report(y_test, y_test_pred, target_names=target_names)7 |$ D6 v' W4 d! a a4 j& a# Q; \8 n, T
print "#"*30 + "\n"
) G6 j( q' ]0 V/ A+ ^
$ ~/ f2 H$ J. B: @+ x |
|