- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np! W7 i% |4 [: W( F
import matplotlib.pyplot as plt! O; [' ^! c* E* n8 M
1 z6 x/ ]- @7 E9 B7 g9 R6 r
import utilities . r* u7 h7 w; `% L4 b- a6 G
& _* k( s6 |: R, q- ` V( B7 Q# Load input data; ]( g; w8 T+ M2 ]3 j/ g6 r" R
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
" t0 y- |. v o% A/ `X, y = utilities.load_data(input_file)% j8 `0 K9 l; f A9 h
8 u- O0 |; e8 J###############################################
$ I! _2 i, ?7 C( o& E e8 M# Separate the data into classes based on 'y'
. [/ s0 |1 P3 U$ x8 `+ d' f5 c$ \class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
; Y% Z5 ?' p$ D" M5 jclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])& P% D+ K- h8 @* A
6 |- o( X! B! ] t% J/ O4 z
# Plot the input data3 t, \* n' c8 I/ X2 V" W
plt.figure()* R6 D& D/ _" R' g1 f0 @% N! M7 U
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')! R+ C9 Q" i% A* _1 a9 [
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
- a! R. C$ ?5 Y# a, g. t% hplt.title('Input data')
~0 r {) w- e' s: `
" I- ], \* Y; W0 }###############################################
" A9 _1 F, K) z0 q5 E# Train test split and SVM training
8 t: Q, y" p( H) wfrom sklearn import cross_validation
' g& R9 ^' S7 Y/ d! x$ m; a1 kfrom sklearn.svm import SVC4 o. D1 A5 b% C) }7 L
, O! W- s! K: d _8 c* mX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)7 L5 m7 Z9 G0 _5 g- R- }# W( v
0 s/ Y; ?3 F0 `9 S#params = {'kernel': 'linear'}5 i2 |2 `# O% U
#params = {'kernel': 'poly', 'degree': 3}
6 H1 T# a2 I/ Z6 tparams = {'kernel': 'rbf'}8 X0 w' Q1 s+ M; I/ R
classifier = SVC(**params)4 @" ^% W8 H4 X; S- @# N& s
classifier.fit(X_train, y_train)6 H& `( ^ [2 T. O: Q4 X1 ~1 D' ]
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset'). S" E% Z+ n7 @# r8 U5 s+ S) B9 C$ Q
8 ^0 Z2 e: e, p7 N4 G) By_test_pred = classifier.predict(X_test)5 Y, `: C4 y+ Q+ p) A, y
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
" u8 ]$ o3 J: g% D0 T3 ]/ _" g H
1 Z6 d) x/ N: e+ x( b###############################################
% ^$ c8 s+ I, N" b# Evaluate classifier performance
. n5 d2 T" y: ]8 h' y
" b h+ X" H% ]: ^3 H% B* Ifrom sklearn.metrics import classification_report
4 w- z( K" K, w" p" a/ ?
5 }0 `* p/ x- w/ V( y# Xtarget_names = ['Class-' + str(int(i)) for i in set(y)], W4 p4 }4 j) a
print "\n" + "#"*30
8 E8 a! G# Y$ D* iprint "\nClassifier performance on training dataset\n"
9 W6 r- }. J1 S; ~. e% u) [print classification_report(y_train, classifier.predict(X_train), target_names=target_names)# B# t. K2 s3 y. D1 |" J
print "#"*30 + "\n"7 x2 U( l( K& T
' V3 V0 G" F* E' y, ~4 j9 O
print "#"*30, w/ ^3 {! ]. L' Z1 u( D( W/ `
print "\nClassification report on test dataset\n"
) d6 L- H, d { c, \print classification_report(y_test, y_test_pred, target_names=target_names)
2 T7 F0 @2 |% ~: B# @print "#"*30 + "\n"( V2 \: N( W' O Z. o& B# \- B: k
# F, ]3 j1 ?: O- d6 ~% }( F# i+ ?# x% U2 l
|
|