- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np' d5 u c3 _' P3 `2 \
import matplotlib.pyplot as plt% f5 }1 P7 j, ], w
' @# O( V; H" |0 @/ V1 v- z
import utilities
5 o( w2 n4 v8 Y7 l4 E2 O
8 ^# k8 b. |3 z( f; j" g: v8 C+ ^1 [ d# Load input data
5 W) o0 S$ k5 B9 N3 B7 z0 vinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
5 |9 [- w$ m# L) Q. Q* S, L0 Y1 OX, y = utilities.load_data(input_file)
% [9 A t- k' F2 _) J
' d q5 e( v0 m###############################################
' [' B/ F! Q- t! N# Separate the data into classes based on 'y'5 Z$ J& O: z% K6 y3 o# f4 V
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]); r u- ~$ n2 G( z, I; [0 S
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1]). P0 i1 r8 S% f. p3 K8 k7 `
) w2 _6 W$ ?8 h8 Q5 ^5 g4 I
# Plot the input data
3 A4 p: N. j% {+ B, M( x0 rplt.figure()9 B( Z' o$ T% U! u- E# `( x3 d
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')9 b' f9 N5 h& b) T* v3 k* z7 L$ v% g
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')/ H: T% I" u5 ^+ k* i- |
plt.title('Input data')
1 H7 b* B& t* t$ U! y
, r \* I" O8 t6 a. R###############################################
) ~$ P2 i: D* u6 ~3 u$ q, R# Train test split and SVM training
# ]( s% Q7 q& E3 n7 _& mfrom sklearn import cross_validation
7 Z# z" y. k: X+ B- K' mfrom sklearn.svm import SVC
2 D5 [+ y$ C+ _% a2 N' T9 V/ b% Z S; W
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
- a' G+ u0 d9 s, T& v& g% |- z5 f4 G/ l+ a
#params = {'kernel': 'linear'}
' |% u/ R/ M% n- q1 q#params = {'kernel': 'poly', 'degree': 3}
% M& M* s$ U4 C6 Nparams = {'kernel': 'rbf'}. x7 G( @ e3 Y9 \9 q9 ]
classifier = SVC(**params)1 M k' `6 f; \3 ]( R, d1 W1 j3 y
classifier.fit(X_train, y_train). w8 z% g; N" [7 S1 { l0 L
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')" {5 g9 i* x& r; J
$ J: z$ w, L- t3 \4 f% o/ ly_test_pred = classifier.predict(X_test)) {4 D4 t& r, w6 E& ~
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')6 w9 p/ J, v0 g
1 r9 z/ w4 O) j. l* H###############################################
9 T) k5 P) D3 V( O+ y( r# Evaluate classifier performance
2 V5 S6 ]& q; |7 ~7 o* F
( ^$ t) ?# ~/ W2 ~: m$ U o/ ^from sklearn.metrics import classification_report
/ v$ n- ]& |0 n1 u
1 ^4 M8 E8 z: B% y0 \; G9 Mtarget_names = ['Class-' + str(int(i)) for i in set(y)]
$ W# I6 H, J7 K# g. h h! Lprint "\n" + "#"*30
' s; ~: }6 n# j1 M0 A7 J* _print "\nClassifier performance on training dataset\n"
" _; q$ e1 U7 @ y6 pprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)( F2 o! w9 n# _" a* r
print "#"*30 + "\n"6 V% |0 X: T, W6 C2 x1 O2 ^
0 B, T0 r$ P1 a( ?print "#"*30' L1 O3 i# d5 f2 A9 v. `8 m% t
print "\nClassification report on test dataset\n"
5 c, Z* E" H( I; S, S3 p% s8 m2 a; k2 Rprint classification_report(y_test, y_test_pred, target_names=target_names)" p7 b' s( K4 ^2 A) s" @# m- l
print "#"*30 + "\n"
8 [ d( e$ k1 M, u2 \' q+ }' m4 p/ v u; Z
|
|