- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
2 U1 a; q( P' q$ g2 N3 f# a4 }+ qimport matplotlib.pyplot as plt
4 Y; R# C# Q( h, [) ^' i x$ C, ]* e1 N& | M6 Z
import utilities 7 M$ g9 a8 T% Y3 r% w8 q/ q
) F' R8 Z5 M, Z5 T! L! x3 p# Load input data- T7 A0 n6 N. f; z
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt') H) b: Y5 O% a4 f2 W: A! w4 s
X, y = utilities.load_data(input_file)7 T$ `3 }- }1 q6 J; O
) V8 |1 v% n& ~' F. z' z( H
###############################################$ e' {6 n$ c' `9 b8 E- E u8 o( y
# Separate the data into classes based on 'y'
" S* e/ s" S! e2 I. Gclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])& h* L" L0 }: ]4 ^2 S& R. @1 H% G. ^, @
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
* {! i( z$ d2 b- m Z) X# y% V$ s; m2 T# w
# Plot the input data/ m" d+ V4 B5 c3 V5 ~# x
plt.figure()7 r: ]3 ], f$ I w
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
% }% B. V$ W; {% g$ {' E: xplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
% Y6 T' o4 G) e" a/ W/ qplt.title('Input data')# _) u1 `. G" b) r( c
& { m+ r, ]9 F2 c0 Z) J
###############################################
- e; a5 i' {0 d8 s: S, T9 I# Train test split and SVM training. J$ G" T, V4 e2 {$ m/ \3 H
from sklearn import cross_validation' ?( V2 l; H: {9 W8 n; m
from sklearn.svm import SVC, J8 E1 B* a8 D0 _; b3 Z# q( I
3 o$ H6 _8 C) j7 I0 C2 e) nX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
" l. A) w n E: }) s7 N/ @# n4 ^, j+ h. u5 d1 h4 U
#params = {'kernel': 'linear'}
7 k! V. p; H$ w7 P/ X0 U3 A& [#params = {'kernel': 'poly', 'degree': 3}1 o: U7 u I9 @ W( b% |
params = {'kernel': 'rbf'}; d! I6 n7 ^/ N0 H! X
classifier = SVC(**params)
. w$ U4 g; U1 f E6 y' U4 Uclassifier.fit(X_train, y_train)% h" M0 Y, W7 G/ n0 U
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
7 U4 h) n" Y/ H! B$ t
/ S- z5 Y3 C' R! g- }! \y_test_pred = classifier.predict(X_test)5 G, h: f% s6 ?( Y' b
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')- m2 ]. W5 R0 r" G
% ^4 k; H- {, g' E2 r* f###############################################9 C5 P5 z/ G8 L# ?
# Evaluate classifier performance: L7 k# t5 R. C! S- X
/ A. U2 A( A' x% D4 {from sklearn.metrics import classification_report3 n% \; f% J( X# x! P# \) H# ^; r
% V; r' O6 k* A" [# O
target_names = ['Class-' + str(int(i)) for i in set(y)]6 y6 ~& a8 J) x) K+ o5 u2 G: D5 n
print "\n" + "#"*301 H% ~4 T+ P$ x" @* Q( ]6 @
print "\nClassifier performance on training dataset\n"
/ Y, V8 b3 l& |& Oprint classification_report(y_train, classifier.predict(X_train), target_names=target_names); K7 X" `" ^) {7 q5 H/ e" K
print "#"*30 + "\n"7 Z1 H6 E8 V( R q5 G I0 u
. o! m+ l, T, a G0 I
print "#"*30
9 w: i- r9 z" l |0 S4 E3 Y8 K9 hprint "\nClassification report on test dataset\n"% G) A5 {. |0 g" a
print classification_report(y_test, y_test_pred, target_names=target_names)( d; g' L# G4 [( a$ X
print "#"*30 + "\n"9 o* j. j: u4 T
|) q, c, A8 r
|
|