- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
! ?( i/ U: h# V7 wimport matplotlib.pyplot as plt; [5 g# X6 x4 m9 V/ l8 m
- Z* N* {$ T! w1 Q& U$ himport utilities 7 }3 |& c9 t; t: V& M
9 q- }+ }0 |8 E8 s# Load input data
w5 b( {( ] T! T( b1 \3 G/ F: i8 uinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
; I$ m4 ?2 n4 p' R+ D7 b$ gX, y = utilities.load_data(input_file)
s6 c; x) D- }" f, v
3 e7 ^! l6 ~! H2 ^) a# b###############################################
9 l4 B4 [( T" {9 K0 u, y e K# Separate the data into classes based on 'y'0 {& y1 k( O; _2 ]
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])9 ?9 q7 a- d0 k1 V" Z H4 l- c
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
# T `9 R' X. E
5 A K+ Z, D2 u9 [& V% ~1 K# Plot the input data+ A( e" N/ H/ ]# v2 ?
plt.figure()
: X1 _# a1 O2 T% Splt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')/ l' N# G$ i* W4 i& h% j! `9 F7 ~1 J
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')/ ~ J. }3 S- A% H6 h, y* ^: \
plt.title('Input data')
% r1 K5 V/ j% m5 [( R9 e* M+ B, V; R- W2 V) g- m
###############################################
$ f5 k0 w1 u4 `# Train test split and SVM training2 z# i0 z, C1 i3 r
from sklearn import cross_validation# F# B/ @: H! P+ {. e
from sklearn.svm import SVC
# _, Z7 }. D3 A8 M5 D" l, t0 G1 _1 \+ l& G( e' ^$ W
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
: A2 V2 o6 n# x: V5 K
3 C( \1 A, g# T. c9 d#params = {'kernel': 'linear'}
9 g5 V# F1 r' f r1 C; g3 x& t; y' T#params = {'kernel': 'poly', 'degree': 3}4 T1 d1 z" ?/ a3 ]4 }3 q
params = {'kernel': 'rbf'}2 r M5 l8 e# _3 ^( A7 N
classifier = SVC(**params)
( N; k$ L o- o( R/ ^% v% {classifier.fit(X_train, y_train)! {5 Q. @* ~5 f) X# G# A$ ]: x( x
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
9 y; i$ T+ y$ ?' a0 Q
. E5 w& _- l' t3 b$ ay_test_pred = classifier.predict(X_test)1 L1 y! F; d/ L
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
5 Y! I9 z B8 _# u* s( r% P' K5 O
###############################################5 ?$ W, `* J9 T, Y, R. `
# Evaluate classifier performance! r: K7 i7 N1 ]' w5 q: C2 V4 R+ l
. i, Y# T! o6 v/ a1 C; s) ifrom sklearn.metrics import classification_report
" y8 p: {; d0 W* h) U+ m- ~
0 G4 O, a: Y4 W' @/ w. Ntarget_names = ['Class-' + str(int(i)) for i in set(y)]% _" u$ f$ y, T9 m5 y9 u+ U# N4 B
print "\n" + "#"*30
% l" ?: z2 n0 ]7 f8 T/ f# G" Aprint "\nClassifier performance on training dataset\n"0 [, l1 U; T0 S2 F
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
- w# X5 S$ W5 w W& V0 \print "#"*30 + "\n"
0 \- g# W/ T" R# E" e$ h1 v' A% a+ ]7 v" q8 E
print "#"*30
|4 ]. U/ q1 t! bprint "\nClassification report on test dataset\n"# b0 {# Q) d" m+ V
print classification_report(y_test, y_test_pred, target_names=target_names)9 Q6 ~0 B' F3 x. i; C4 N
print "#"*30 + "\n"
& r- k: p, D6 t6 p9 w! V$ b {$ o3 b7 ^" }
|
|