- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 555
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
3 j' I: ~8 e& O* p- Iimport matplotlib.pyplot as plt
7 G4 ^1 r8 [( i+ U
[( H7 `; ]% e. G+ r+ ~- Bimport utilities
# x" ? ~, b1 d1 l
) L* P$ W0 P7 O& Z) y7 O( {# Load input data
5 W, A( T9 Y5 h4 Finput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'% [ J% A6 o w9 q$ B+ R3 K; c4 f, m3 t
X, y = utilities.load_data(input_file), f( l( _: c# L p M0 P; t
^* R) ^; e+ `7 P" Z! } x0 }$ ?
###############################################
2 G- x6 p+ T" o. E/ f+ i+ a# Separate the data into classes based on 'y'0 k! S) |$ |6 l: o6 f: c' X( n
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]), J- s' }% p6 W" O
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
4 M ^" |' ?3 Q# X: o7 l/ m% S! G: o2 M
# Plot the input data
3 E: g' ]1 ^5 s2 i1 l3 I% kplt.figure()
* E' d1 { ^8 f. A2 ~plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')' ~' Q/ I( r" W2 w* J, t* ^
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')* z' l6 F4 q, ]/ O' y, U5 t2 i
plt.title('Input data')# w1 X% }2 Y4 W% w' h6 ^
# V& l& {" d4 }5 H9 M- B
###############################################* e: Y) G; \$ s2 B; M7 T
# Train test split and SVM training
* b# Q; Y* d8 s$ j; ] hfrom sklearn import cross_validation
) t }# ]3 a- ffrom sklearn.svm import SVC6 F- g3 `) a# T5 T
8 B, P" D# l8 t1 a) wX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)* X: t% l) \' V2 L
5 h; `5 ^. O8 [#params = {'kernel': 'linear'}
& x- `. H$ g5 X, w2 d( |. u#params = {'kernel': 'poly', 'degree': 3}
: L5 H, X6 |' \+ s' v7 aparams = {'kernel': 'rbf'}
6 r0 m6 h: e. b* f" Gclassifier = SVC(**params)
. v6 F# B- e# }, ^7 _( r: \/ _1 X( ~# x8 Uclassifier.fit(X_train, y_train)% [4 v) m! p. ?# E) z
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')- f2 E( D- w U& _* |
1 b J' R' p. f% W) Y8 P6 c
y_test_pred = classifier.predict(X_test)
( k. W& y3 z: p) |. W; v# }# iutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')) c" m4 E E6 A V! z' q% }! f
% r. P- V( P8 `# ]2 |
###############################################8 z" u! E9 N* a: |; ]6 }( w- b
# Evaluate classifier performance
6 z* q9 z' ?6 Q1 Z
- T* r- p% s2 C- Rfrom sklearn.metrics import classification_report
: l t B6 E& L6 \% Q. O1 H1 o0 P
0 C9 d, q8 d8 m& _target_names = ['Class-' + str(int(i)) for i in set(y)]5 @. k1 @, I, E7 Z- }
print "\n" + "#"*30
- y# t4 J. `5 Xprint "\nClassifier performance on training dataset\n"
, i* A6 J, i0 p+ g) rprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
4 p: s- |: p% i# @print "#"*30 + "\n"
4 [1 V# t' b. A$ q* L. }9 M" S. P* o
print "#"*30
- |4 l+ r: k. n% u& n, ?print "\nClassification report on test dataset\n"
, O, ~+ {5 j* Kprint classification_report(y_test, y_test_pred, target_names=target_names)* ]/ x+ p T W) \8 h
print "#"*30 + "\n"
% I* K& E6 X+ G5 k' Z9 S$ M1 v& y. u. l! b. {8 _3 h1 ~8 p y
|
|