- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np' q+ {0 F2 K( t6 A \
import matplotlib.pyplot as plt# p/ A8 i2 Q+ o
3 |7 p+ V: S2 o4 X2 ]; {( L$ Q5 E7 W
import utilities
+ k- s3 e/ [3 v
) o) ]4 l g g6 L `8 A# Load input data9 l; X7 r" Z& h( t! }* s- u
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'* ~4 W* t/ t }5 h F: m
X, y = utilities.load_data(input_file)
4 c: M3 g! z6 q* Y' \0 q1 ?; W
& t5 W6 a- k) r) R! G- g###############################################: ~# f$ D' w+ j
# Separate the data into classes based on 'y'
: X0 O V H$ g8 s( @3 m% l6 b Xclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
% a& [. L8 w+ R( ?3 U2 Gclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])4 T9 R* j. p8 ^0 P
! b- ^- w. Q- {# n9 K1 `# Plot the input data9 H4 Y( E6 F5 O) g" A$ X; H; c3 y/ i6 Q9 |
plt.figure()
9 Z7 [. t, H2 G# V6 `3 qplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')8 J4 I( _- ]% \, J( r
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
; ~' o% x0 T, ?1 N! Bplt.title('Input data')- K" a: F. c9 Y4 L# ~- s
) {8 h$ ?! a' r7 p/ c4 w6 \' e4 ]
###############################################
7 `! K- L1 ]$ l/ e: e- J+ x, I/ J" |- R# Train test split and SVM training( w' `+ o0 r+ T8 F
from sklearn import cross_validation* e1 Y& X9 ]$ @
from sklearn.svm import SVC+ v" h6 J! N+ I9 p3 n) s- z
9 Z8 q8 T% ~6 t" w0 T, {X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
& m" E7 ^& K/ Y% r
) `" l% i0 d0 d#params = {'kernel': 'linear'}
% i; n* E5 k. A* n4 L#params = {'kernel': 'poly', 'degree': 3}
. Y# ]- H9 i( S# h4 b! T* dparams = {'kernel': 'rbf'}% n& T+ N0 B/ Y6 d- s4 L
classifier = SVC(**params). ^, h, X8 ^6 |6 k
classifier.fit(X_train, y_train)
0 l1 Z7 O" c9 R& b& G J( c5 dutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
( e4 l2 d$ [5 f" l F5 L. v, u+ D% E+ K
y_test_pred = classifier.predict(X_test)
' g. h2 Y( a0 E' Q G0 K v2 S' h0 Jutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
2 }& ]' J1 M5 T7 G F( z1 b
& U3 n& ~1 o5 \: P5 @( f. n2 ?###############################################
9 ]( \) e q: h4 V# Evaluate classifier performance
7 c7 c' M0 @. B0 [+ R+ J. i9 z- x! x! j0 Q4 t6 I+ O( v( y
from sklearn.metrics import classification_report
; @5 i2 {3 O% |3 @, z5 t- W8 X; g1 g" C( P% A! O0 A
target_names = ['Class-' + str(int(i)) for i in set(y)]$ F- F: A# y! S8 G% Z* ~ }
print "\n" + "#"*30
+ U; a/ m$ Z7 l& Vprint "\nClassifier performance on training dataset\n"% Z! P, A; K4 I |
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
" `! q+ \' J1 Q6 j+ K: ?5 p) dprint "#"*30 + "\n"4 i q+ S+ }% P" s- ~( V
* _- P7 V5 A- P" oprint "#"*30. F$ X l9 s) N" }7 R6 ^" r# S
print "\nClassification report on test dataset\n"# \; J# M! L6 x8 O
print classification_report(y_test, y_test_pred, target_names=target_names)7 Z) U2 c- l8 D) p) T! a
print "#"*30 + "\n"
' }! u0 I+ S2 P0 c7 E
4 b: [7 Z6 V" q |
|