- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np9 j6 O% B" ~4 A7 v2 J( p
import matplotlib.pyplot as plt. V4 F9 e1 {# m: H' n
1 I0 z' e+ A/ }7 L6 F) u- p
import utilities
5 U3 b' T& F. |* C' |' R2 X- U
6 \9 V- j. |% |6 p4 L* c# Load input data
5 P) b' m; c% Y" m9 m& yinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'; m; \1 V6 L/ e" V) z' A6 t
X, y = utilities.load_data(input_file)2 R4 y* m; x# m2 b4 Q; {/ `; R
% x7 _* Y' [/ D) W9 |3 E: V7 Z###############################################! q6 y$ B) `3 n- Z* j/ v) o
# Separate the data into classes based on 'y'; n, s0 h/ X( X7 o; y
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])1 p: H4 j- R! q7 @
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])8 C% O8 s9 `* ^7 X. x
( Z) m$ z4 z' B$ |* a, P* ~# Plot the input data6 F$ `$ y) B7 \& i
plt.figure()
# k. S! q" E6 k$ B- Tplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')/ Y: ~- o5 p( G# f, k' C8 i1 v
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
+ v# a! e# w5 [2 pplt.title('Input data')
3 \) l, ?9 @/ Z8 Y. f# w/ t: X6 X+ V- ?" i$ H1 P$ [3 V. j
###############################################6 m$ ?& V- ^+ Z# n( ?7 `$ R
# Train test split and SVM training" [) a2 v. v9 J: u: r& K! J
from sklearn import cross_validation
b: A8 l0 m7 _from sklearn.svm import SVC
7 `7 b/ S! E2 I1 Y# Q; A I) x5 x V, i
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)/ f c2 p0 R- J* i4 A
4 W% X+ t' g. }5 }' T. R0 l
#params = {'kernel': 'linear'}7 p4 d" o1 D0 G( j
#params = {'kernel': 'poly', 'degree': 3}
% U) t, K% O0 k+ q" o# Yparams = {'kernel': 'rbf'}
3 u6 I6 R8 r# X2 S" Fclassifier = SVC(**params)
; a9 N5 s; \) F' c2 Mclassifier.fit(X_train, y_train)! { _9 e3 d2 E! M5 H1 z* |
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
7 Z5 T6 i! o; T; W( p2 A2 g" ~; |1 X! v) X1 {; k; P
y_test_pred = classifier.predict(X_test)) a. s2 V( I8 x, U; L" E
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset') e# F- t/ z2 A) }7 `9 D2 F" }/ l
; s$ N/ \5 Y/ z. ?
###############################################" u% e+ U& H7 ^7 t, ]
# Evaluate classifier performance
/ n& C7 ?' o3 [4 _2 L5 }3 x7 `8 z" e0 m9 y, Q; g' r
from sklearn.metrics import classification_report
5 Y6 f! M E2 S- S# O! Q8 z
) T' l5 Z" d6 }! F, l1 a$ y% itarget_names = ['Class-' + str(int(i)) for i in set(y)]
6 w' ^7 H: S1 Iprint "\n" + "#"*30
; r1 b2 p. ~" h: J1 K' v+ t* I( rprint "\nClassifier performance on training dataset\n"& l* l0 `) }: a; x" d* s# f
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
* p7 i3 \; I2 n m" b" w: Wprint "#"*30 + "\n"; l5 a3 A- V. o5 V3 R
+ Y" |# p: z: K! a; d m! h
print "#"*30( y+ F2 B& l% O: @
print "\nClassification report on test dataset\n"
7 P! u" S' W8 ^: m2 i1 Y& Kprint classification_report(y_test, y_test_pred, target_names=target_names)
0 L1 t L' R# W' F/ b4 b7 Gprint "#"*30 + "\n"
1 u3 ^1 R% v8 N9 p3 z- b6 ~5 m$ o" R& A# h
|
|