- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np0 P* H3 J' P* G, t% p
import matplotlib.pyplot as plt4 D/ `8 S" K- v% c; m' C
8 u: Q1 x8 x8 B# \4 y: ^" E& p- X
import utilities
3 N, r; X6 v6 D* a: S; T1 l8 w' N" C5 C7 p6 {/ k* |9 v; W
# Load input data
& K) d' }( B1 o) y. {7 m9 Pinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'3 j6 J. ]+ \% P% E" Y/ y$ s% a. u
X, y = utilities.load_data(input_file)
# L% L. C5 x9 b& C
9 o- _% d+ {6 s, s6 K& I. P3 d& u###############################################
0 ?& n5 i) i) q4 [1 }& k# Separate the data into classes based on 'y'- x; i4 N- X# M
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]). L" n% u7 d( B3 l( n; Y
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
- O5 X1 M2 o6 b
l2 m3 \3 @6 A, \* ^# Plot the input data% S6 u! n1 _. O/ F: ]& ]9 O
plt.figure()0 {- _; [) b% Y6 X( O
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')+ T# {; I3 o- l! u1 v/ `3 |
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')2 d! m, z% p' N2 ~* u8 |
plt.title('Input data')/ ?, b# i# R8 n1 l5 H# e! ^
* ~9 S" z; K" R6 E###############################################
9 z! z+ H- E2 ?9 `# Train test split and SVM training
( d6 V, N% }6 K4 `$ Nfrom sklearn import cross_validation+ v& |8 c9 E/ x# e
from sklearn.svm import SVC
! c" \, j+ h: W5 C, A9 \
! t2 b) m! Q4 eX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)7 | a8 X* D' X- f) Q0 e
4 m q0 E `3 J( b% R#params = {'kernel': 'linear'}& T( O: B1 B- J
#params = {'kernel': 'poly', 'degree': 3}
( \# i* B3 d: z% j. tparams = {'kernel': 'rbf'}" x; O. D0 i: p i5 j
classifier = SVC(**params)
1 H5 c( `1 y+ m& f4 O" z$ wclassifier.fit(X_train, y_train)$ j& w" d+ F- e! j
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
8 r5 n! v+ y! w& }" G& y4 D( |4 O+ M( }0 E, M. z
y_test_pred = classifier.predict(X_test)' L6 t! K& O9 Q$ N; O
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
+ I. [2 p* B% |1 {1 X/ K! Z0 d2 H3 a$ X" g
###############################################
( P8 |7 e m. {9 M7 V F7 U, I5 C# Evaluate classifier performance
J0 M. `3 i; ^# ~& v6 t! D8 C. C! [8 L" w/ l; s. v
from sklearn.metrics import classification_report- A1 e3 ~+ j4 O! w+ c7 K6 S% G
7 z& j+ b. ~1 t& {% C$ B9 Btarget_names = ['Class-' + str(int(i)) for i in set(y)]
. s5 j. p+ C7 u/ s& p5 \print "\n" + "#"*30 l/ P0 y0 F1 B/ k1 d
print "\nClassifier performance on training dataset\n"
7 D8 K0 e. B) dprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
( J0 o- Q" C+ b9 U! u' wprint "#"*30 + "\n"
* C6 t/ U& q1 f0 ^+ h* Q/ {: A3 M& B/ w4 K- ~3 X0 e( f
print "#"*307 i2 J" C; Q2 P) J/ Y8 f
print "\nClassification report on test dataset\n"
* X- a# t8 T8 ?# o( r8 L: mprint classification_report(y_test, y_test_pred, target_names=target_names)# D0 D" H1 w% q. s6 |; {; f
print "#"*30 + "\n"$ b; ~% t; ~( @0 \/ m+ u
+ S( Y, ^% f4 L+ e( ?0 `3 ? |
|