- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np; M6 `; t, N$ t& N* S" C& `; r. J
import matplotlib.pyplot as plt7 J1 e' c) k/ m* N( y7 f
$ J$ ?8 z& q' j
import utilities
* s7 X. X7 |: o% W# C: `" x: U1 n! i2 V9 a( W) b
# Load input data
& X e$ W) S. E P. V+ oinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt' P* J6 r! Z0 e6 |, L/ n% \
X, y = utilities.load_data(input_file)
6 n/ X7 X, X5 G7 c% d) k' R# h+ S! u* w
###############################################
$ c# U3 L1 M4 g9 k# Separate the data into classes based on 'y'7 ~4 o0 W! a3 M$ A" w+ X
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])+ ] S2 B& Q8 i0 i! }9 F/ K0 f! }! X
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])3 Z7 \/ m4 h6 T+ {& G M
1 [$ t) e. M. q5 U3 f8 ~# Plot the input data
# B G# V1 a, i/ c# kplt.figure()1 N5 r7 R' x' m
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
+ L8 r# a5 X4 f* D" F# ?plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')/ }- _4 @' E3 p: q1 h- I; P
plt.title('Input data')
/ p; C, l6 R! Z: e/ E
# `% g; N! E8 m: c p- T" f3 [# t- Z/ w###############################################
E |$ l# v- U0 m8 r' g# Train test split and SVM training& U6 w( U* j3 H2 R' t( i
from sklearn import cross_validation
8 E, b+ U- K7 x! d; Ufrom sklearn.svm import SVC# j7 @9 z. `0 o+ B) T, [
1 v4 B) M. i) m. J+ T3 W
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)5 w/ k6 M" T- M& [, E
2 e+ p) C( \8 @4 W3 z) ]3 S% r
#params = {'kernel': 'linear'}
0 Z: q% v6 p* J M#params = {'kernel': 'poly', 'degree': 3}9 d( ~- r' e# A
params = {'kernel': 'rbf'}
8 o% Q D8 B5 eclassifier = SVC(**params)
- N* {) _" k3 [* Vclassifier.fit(X_train, y_train)% h0 E8 E: X, L0 r
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')( d* J7 a4 Q0 o2 U+ S% {
# p b2 S- k5 v' H9 p- R/ h0 M6 ay_test_pred = classifier.predict(X_test)
: Z* o7 T( `, I2 Lutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset') U j) g0 j: \4 b8 a2 W( M3 D
8 q6 M! r" A0 G& h5 b9 f, [9 t
###############################################
, T- c, u, t# ^# Evaluate classifier performance7 i4 ~$ o2 T, I! `3 o/ ^- U
; o) ~9 Y+ w2 j5 @
from sklearn.metrics import classification_report
: [) X$ \3 b5 k: H0 I, ?4 ^, W/ o O& j# W2 L0 x( g; Y7 h' S
target_names = ['Class-' + str(int(i)) for i in set(y)]0 O) Q2 E2 P. l; U: ^! a- u, E; W
print "\n" + "#"*30
% N" o9 t$ E: Rprint "\nClassifier performance on training dataset\n"
9 [! ~; b$ n- f6 Q' I+ hprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
) U: v7 U: }" V1 dprint "#"*30 + "\n"
q) c: M- O$ A. G: `6 X
* Y) F6 y2 x3 X9 K8 d+ [: Rprint "#"*304 S2 \! B6 F1 p1 H7 \% l/ d$ N- r4 Y
print "\nClassification report on test dataset\n"
9 e0 W! L! j+ X3 d* k# ~9 Xprint classification_report(y_test, y_test_pred, target_names=target_names)
& s' o# s6 r& [ b( S0 Fprint "#"*30 + "\n"
F9 O, h0 J7 K
/ p, ^% u& Q0 u3 P0 V7 `0 }1 N |
|