- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
![Rank: 6](static/image/common/star_level3.gif) ![Rank: 6](static/image/common/star_level2.gif)
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
: e: F' | E# rimport matplotlib.pyplot as plt
8 [0 k7 L. G& w0 ?/ f& b
2 ]8 p5 o: k+ kimport utilities
' s# u- I/ ]8 o+ Y% S5 {+ X( T, i1 b
# Load input data
7 ] e) T- T" d9 h3 z! `input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
% X2 O0 ~! A$ }8 `3 g+ K9 _6 uX, y = utilities.load_data(input_file)
4 ~% Y Q5 p% P3 [% d% N9 L' r. M7 j% s$ [
###############################################, N* n. ~" G' x9 A! u! O
# Separate the data into classes based on 'y'
, v0 k |+ g% p. n& f f uclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]). e% S6 y* i- E
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
7 s" G* ]: }. Q
6 Q9 `' y2 U# J4 ~2 M' X" Y- X# Plot the input data
, D* A' k0 F" D& p# f) jplt.figure()$ T! [6 [ n& C2 b! n9 x
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
7 `+ \& e6 O% z) m& p. c/ iplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
0 p+ s9 q4 j! c+ {( @plt.title('Input data')& R2 T9 h( [- D' r4 Z* Z/ ~4 j% G
4 L4 e" T8 {/ P3 H) z8 n
###############################################
6 i. s" }& R8 B2 P" F/ |. b# Train test split and SVM training
. l' N1 t4 |9 ` r8 ^7 j% `- Gfrom sklearn import cross_validation
) C( E% ~! Y- C4 d1 b8 W* ]7 b/ ofrom sklearn.svm import SVC
" u0 a/ N+ p- Z9 l/ R t! v
, T6 f0 {5 k9 ^6 yX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
( Q7 ` Q( W! K/ L, U
$ q9 f; G$ q1 s3 \" L! U; S#params = {'kernel': 'linear'}% [* L, i( t; ?
#params = {'kernel': 'poly', 'degree': 3}( }9 g' s6 F! s! }$ [
params = {'kernel': 'rbf'}
7 j# V- P* {/ P5 x: r6 Aclassifier = SVC(**params)* b2 S: p7 f' F7 ?
classifier.fit(X_train, y_train)
( d; ]! p5 w8 y" P$ v" O1 Eutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')3 _1 { B) l) u( |, F( L! i6 u
8 ]; ^7 {; m: i
y_test_pred = classifier.predict(X_test)) U7 ?2 }& p q, Z
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
) Q8 U9 f' I4 t+ h- c4 J: I7 z1 h s2 }( o) `
###############################################
J5 B6 x& `3 H1 j# Evaluate classifier performance/ k' X, ]7 X# f( s' k
f- H2 O, O' F; c$ w g
from sklearn.metrics import classification_report
- G2 p3 C* @& P1 ^" r! G4 _, _. I- U7 W) N' P" N
target_names = ['Class-' + str(int(i)) for i in set(y)]) z5 l- d' z2 J- R; Y! T
print "\n" + "#"*30
: }- Y# d& o1 Xprint "\nClassifier performance on training dataset\n"9 j! y$ P2 o v: V" b# U
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
% @+ X$ p8 r M0 L/ rprint "#"*30 + "\n"' h, y5 R9 e, b9 M
5 S! h+ g( l, r8 s5 n* l' f- uprint "#"*30. D: R- `9 V: a0 Y( T
print "\nClassification report on test dataset\n"+ I+ y- c' k* }
print classification_report(y_test, y_test_pred, target_names=target_names)
# G: Z* q0 ^( ]- z; sprint "#"*30 + "\n"0 ]& m2 m0 z& Z# k* r* u; o
3 ]. d) f+ H2 H' K/ Y
|
|