- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np9 q, K( ~9 w P" e- \0 m
import matplotlib.pyplot as plt
) p/ Q% P. r# e* {* h! c
3 _7 ]: r0 g5 P. w cimport utilities
. P5 \8 ]! ?7 \* F7 C" m- b; h$ @
' P, f4 \$ d* R8 C/ Y' T- b# Load input data
. C1 S7 d- x& T# B% [" o# B. zinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
9 d5 E6 D# o% ^X, y = utilities.load_data(input_file)( r# [: F! X/ T- b
7 N- G8 k$ i# K1 Q0 W- a
###############################################
E9 y. m% X% c- P9 e# Separate the data into classes based on 'y'# @9 n8 l% {& P+ [: v* k9 o
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])( B' o4 A0 T. h4 T& X, h+ w
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])" ?) O3 G' _6 D: W& V7 ~1 b
( ?5 l0 h5 z7 t
# Plot the input data
+ Z) u( s$ a) m9 i- dplt.figure()0 x+ M( P M9 P$ K- ^) x0 j
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')( {0 ~' ]1 {. W+ o+ T) d
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')* z1 V% O$ |; b7 _9 E
plt.title('Input data'); E% g; \4 }3 Q A2 Z
& R5 _- D7 I% R, h
###############################################" L" F M5 D. S- ^- S! k4 q
# Train test split and SVM training9 h: y' ?1 W5 q+ ?
from sklearn import cross_validation
+ H. q, B8 t1 q/ @from sklearn.svm import SVC
% F" ]0 `# j1 D3 f2 G8 r2 f1 D/ B! ]. @& p7 n% q2 B
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)- s2 u* d' c1 h
% \; N, F& b4 Z: p( _#params = {'kernel': 'linear'}
3 i' T8 D/ ]& h6 o$ V* { Z' P#params = {'kernel': 'poly', 'degree': 3}' x) Q! M) o, o E$ A0 d% {& }0 w
params = {'kernel': 'rbf'}( t& i) b5 s; J b! `1 p$ G
classifier = SVC(**params)/ v5 S7 b. {0 E! N
classifier.fit(X_train, y_train)" \$ C: F% `2 e
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')+ d: l$ M0 ~0 D/ f& ]5 S$ N
' a3 c! k) x$ `# iy_test_pred = classifier.predict(X_test)
9 e& A$ {# ?8 Y5 f% y( ~utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
+ G2 x" o4 m2 G& C9 x7 i. A" P% o0 M" ~* ^
###############################################
0 f# T# z1 h- U9 f R4 R# J3 s0 i# Evaluate classifier performance$ `5 U* u) Z3 P' }; T, G. t
- \( I; i" A+ z- E, m2 R: ?
from sklearn.metrics import classification_report3 B$ z; G' d2 P1 n' o
+ }6 I0 X! Q% h7 Y$ w' ^% O4 p
target_names = ['Class-' + str(int(i)) for i in set(y)]/ e7 W& h* H) |/ B2 H
print "\n" + "#"*308 w$ U4 l9 b5 Z/ i
print "\nClassifier performance on training dataset\n"3 C/ ]: Y0 S9 O" w
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)6 P; e! d! ]) U7 T" W
print "#"*30 + "\n"
4 e2 c- ]& ^& ^5 N
6 A r# c3 K9 }print "#"*302 s2 C+ F/ Q& o6 M0 ~7 T
print "\nClassification report on test dataset\n"
+ b) b5 h( z7 }7 fprint classification_report(y_test, y_test_pred, target_names=target_names)
$ @ M& L- |- b7 I2 j Rprint "#"*30 + "\n"
" d3 l9 S/ D0 v2 R" a" S: e- \% O
- v/ _7 B/ Z, X, m |
|