- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
' s- J6 E* P) w' nimport matplotlib.pyplot as plt" d9 N" S2 k$ \3 T+ e- ]
* P9 R' ?0 T4 N/ I: x/ ~' h
import utilities / `+ t$ b3 j1 t/ t% `* J
% U1 ^* @: Y9 ^' |# Load input data
2 p! J9 n R( ^) D3 Z0 y$ K: iinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'( u$ T- T+ u" K) }+ D4 E
X, y = utilities.load_data(input_file)' o4 F5 v/ ]" l9 w* G5 K
/ a) s% G4 @* c6 v3 M) I0 F
###############################################
& \- A: I: Z4 o* ]$ U# Separate the data into classes based on 'y'1 u F6 s9 I9 E$ W3 A& h
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
. M1 |7 p+ m4 E4 P2 W- |class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
* u8 L& ^" U; F# p# h0 \1 A, \, O/ t
# Plot the input data
8 z) L* P7 l. V' f$ iplt.figure()
( B1 @- E8 Z: q9 W5 z! S1 Tplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')# D+ L7 O9 @1 Z9 V; w9 |
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')6 b& p/ R$ K# V, x; n
plt.title('Input data'). }" l2 a1 k+ D/ t) O; j9 |: q
A0 c* J- F! d! |8 B( L# ^, y###############################################$ S N7 D& w: ~! b& }4 F; _ D M
# Train test split and SVM training. i( }# t W; c5 q* f
from sklearn import cross_validation
9 L' O- G o1 g+ Ufrom sklearn.svm import SVC9 }1 {% p) x& _6 V" X
5 H2 {7 r9 _7 q6 i
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)% _' ^" U5 ?, f5 S
4 W. a$ r, L; k2 Z" S9 r#params = {'kernel': 'linear'}8 u0 a6 y3 _# } L( \1 o
#params = {'kernel': 'poly', 'degree': 3}! R; m5 I# h; M
params = {'kernel': 'rbf'}
}9 N/ g& V/ b; Y) Q% Zclassifier = SVC(**params), k" ]: W& O" Z5 k. Z
classifier.fit(X_train, y_train)
# V1 Q I: Q- {: ]' _( Z4 }+ Putilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')% H O7 Z( O; n9 @/ Q: n
# V8 U" T& B5 ?) L! @! C
y_test_pred = classifier.predict(X_test)& J" G4 J+ W) w6 t$ E9 @1 K
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
, ^ R! L3 z9 U: e p! w! U
: [/ i- q; l; \" U* B: D$ g###############################################/ \4 a" O* d1 i# W* z6 L
# Evaluate classifier performance
7 a( K4 I' }. f) S; g! I4 g9 E9 A
$ k0 u7 }6 J- Jfrom sklearn.metrics import classification_report
$ |2 n+ [2 F+ e$ p b% z# m( e3 n/ B/ ^1 R6 @) S: T; h, {/ K
target_names = ['Class-' + str(int(i)) for i in set(y)]
% {4 S2 S+ a) \3 }" o3 X/ fprint "\n" + "#"*30" {8 y5 o) n1 [+ G
print "\nClassifier performance on training dataset\n"
' U+ |# G) s( u0 Xprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)* I# N% W% W6 @+ x
print "#"*30 + "\n"6 ^9 |8 g7 t. o {( I
) a( t* P0 }9 v; ]& _& Gprint "#"*307 `. Y* h, j, `+ x, r0 E. J
print "\nClassification report on test dataset\n"
2 | c, v) D, J/ O$ C+ Rprint classification_report(y_test, y_test_pred, target_names=target_names)
8 y: d& L( S* |) S* U6 oprint "#"*30 + "\n"" X9 O6 l9 {7 O; W% v6 v, b
7 T" |' v3 w7 ` |
|