- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
2 p5 O* q( s" X) X. @' Uimport matplotlib.pyplot as plt$ o7 j, @" S" I- B) t! j
- }4 b1 o+ x4 @( e/ s
import utilities * f+ k: c( l9 B4 B6 g1 [* S
3 P% F/ ~; s2 v4 o% V5 f, J e# Load input data7 q; J/ i! o0 [; t" c
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
; \5 y; B) n; C7 ?& t }4 XX, y = utilities.load_data(input_file)" n) t1 I/ q& ?
* I9 W5 J8 ]4 B3 K- u
###############################################
+ a9 h, D1 |: _9 T% ^" p# Separate the data into classes based on 'y'2 \) ?* \+ H( b" I* s( P3 l4 ?
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])# q& Q9 p% X4 F6 c* W/ z0 ~
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])- @! H8 q% k# k' I2 R: x& c L
- }1 H# r9 w8 h+ s6 O5 @
# Plot the input data0 S! _, A, Z0 N5 h4 x0 z2 ~% b
plt.figure()& ^7 {* z7 a! H$ [, z
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
# F3 V* K& u$ P. Q& x1 O2 Splt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')+ [1 U0 x& Z/ Q
plt.title('Input data')/ ^1 Q! Q7 A L0 N& S% r+ T
4 e7 R& v( ?* a###############################################) `! g; E) o$ u; F9 |6 Y" U/ s8 a
# Train test split and SVM training* V9 |0 d! d. K+ |. Y9 c# A( P
from sklearn import cross_validation
2 y+ T7 K" }2 Lfrom sklearn.svm import SVC0 B$ `& N8 B8 w" _) l$ ?$ B* D9 N
# t, B/ {* l2 w$ i8 N8 ]" v
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
! {0 V& W+ U/ o$ W% ]# ~( d) ^2 J: M% I/ i- R
#params = {'kernel': 'linear'}& r( _! n s! M% m9 o
#params = {'kernel': 'poly', 'degree': 3}" ^7 M# i; \+ r! Q( l0 f
params = {'kernel': 'rbf'}
. m7 ?, c) i1 M, \. mclassifier = SVC(**params)
% R1 n& V" x7 Rclassifier.fit(X_train, y_train). P Z* j# Y/ ~" J
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
/ j$ R2 r5 D+ i% Q6 L) j3 e, z A t. C
" n0 ^- W3 D4 p, ~8 ]6 x& n- qy_test_pred = classifier.predict(X_test)
! _4 X( u" @# w4 Outilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')6 M! }0 I1 s" \6 c
# C0 U8 S: \$ g ]###############################################
& S# `- ~! v! @* f# Q# Evaluate classifier performance
) Q; E* y6 m: _; _: o2 l) U7 s
. u2 R/ p' m; {8 Qfrom sklearn.metrics import classification_report
( R- W5 y8 _4 P8 A3 H
; R6 T H1 w5 O- I; btarget_names = ['Class-' + str(int(i)) for i in set(y)]4 {$ B5 p3 `0 p6 {
print "\n" + "#"*30
+ v8 G. ]. u, A9 ^print "\nClassifier performance on training dataset\n"
% P0 k& M# t: Xprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
( h! k0 ]( n- R# n/ b7 t. O3 kprint "#"*30 + "\n"
% V! p& V& T9 }6 M* G- U# y$ U3 J( ^' b1 {7 F
print "#"*30
" ~' c; ]# K' o$ l7 l0 fprint "\nClassification report on test dataset\n"# S, z6 R6 g" N
print classification_report(y_test, y_test_pred, target_names=target_names)
- U5 m- { T& c4 b0 W/ I1 L! }print "#"*30 + "\n"
% o% E3 K" f/ p! d, c* O& d8 ^2 t3 M P# Z* ?1 J$ Y0 Z5 c
|
|