- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np" ^. Z9 {, J! y6 N- t, {
import matplotlib.pyplot as plt- E. w4 p# c) O# O0 S* s
3 S( f. c9 W: x- m' G: d+ K) ?- N
import utilities - p' J. \9 `0 k. L$ W" a; v# ]
; s* Q- P- w A B9 v( O
# Load input data6 ~) E! a0 W' W) ~# [; G! P
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
# Z/ q8 ^3 s5 f9 gX, y = utilities.load_data(input_file)8 ]& y2 W: X, F$ r
( W3 }( ]* J8 x9 y9 g& ?) M###############################################
$ C9 [, |5 A( q+ H# Separate the data into classes based on 'y'/ M# L' o0 q+ ^/ @
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])) t2 g* y) N: A! D) x) A0 ^
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
+ Z' B1 N+ c6 F4 } E+ A. o7 p: x- n
# Plot the input data
. l- |7 T/ U E! u8 fplt.figure()+ N- T. p5 X1 x: V
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
- o: ? ?; x( n9 z7 vplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
6 }, M( i# \- Q* J+ Q3 bplt.title('Input data')
. A9 P5 ^. a: q/ y B) r5 o! C, l4 A, S
###############################################
' Q0 A7 b w% {( y% H# Train test split and SVM training
" e! w8 A' e! \7 afrom sklearn import cross_validation
; o4 q$ s3 E( j* g. ]; Efrom sklearn.svm import SVC4 K, g m. T) Z# }7 [, {5 _
* u; u$ x5 [+ v/ F
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
# _, ?' ?0 c1 P9 R0 l7 [) I+ y
0 G1 p3 {* _: u#params = {'kernel': 'linear'}# F. }$ `' U; d4 |" ~5 t6 p4 q
#params = {'kernel': 'poly', 'degree': 3}
3 B( O- [8 c' d+ l; u7 a! d- `' F' v* aparams = {'kernel': 'rbf'}2 P, w# a6 Q0 o/ [5 ~
classifier = SVC(**params)9 B1 p9 v3 Y% E2 k( {3 q( B" `
classifier.fit(X_train, y_train) A8 l9 s" N; `8 c ?7 @, n
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')% ^0 ], \( J! b, k0 E% H# A b# ^
$ H7 y# a4 Z3 x6 z! z
y_test_pred = classifier.predict(X_test)( b/ b) ~2 F% X0 t+ B, ~' b
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset'), O, X6 A$ B' Q
- F% `) |; Y+ v( E) C2 `
###############################################* }$ b/ r, _, \; b/ Z6 c/ I
# Evaluate classifier performance
n6 P; j# L! ~/ A
) x F4 `# T' P4 u1 b) Q t2 Lfrom sklearn.metrics import classification_report
4 g W5 g# }8 T+ {5 F7 H. }
0 `4 k1 U4 n ~. H& vtarget_names = ['Class-' + str(int(i)) for i in set(y)]
q" Z+ l6 T+ F4 P% x [1 V9 Hprint "\n" + "#"*30
: p1 i: d* L' o* a4 A) Y# M' U% lprint "\nClassifier performance on training dataset\n"
$ ~* n, W' V1 X5 y6 \print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
0 E! ?; k9 ?) {2 aprint "#"*30 + "\n"; k t9 @3 o5 d. `
; g3 e9 \, e" t; i. e
print "#"*30
' W; N4 i0 l8 I" M6 C( N) _4 @* v3 cprint "\nClassification report on test dataset\n"7 ~, b+ c) z- ~6 R
print classification_report(y_test, y_test_pred, target_names=target_names)
. k! a8 O" X$ n- e9 S! C) k) {print "#"*30 + "\n"; h- v; Z0 H! V, R1 d* `. Y+ z
/ L# v- D# A8 E% k, ~
|
|