- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np+ s/ V$ u2 R& h0 m
import matplotlib.pyplot as plt5 _$ E6 I X' f9 l: k
/ D" h7 H+ b. b7 s
import utilities
9 y! J+ x/ ~6 d, Q2 n, {
! ]6 W8 {+ Z1 a5 H$ P# E# Load input data. b, X) I/ X4 q1 S( N
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
$ K D2 \ Q! m) j4 E! U; SX, y = utilities.load_data(input_file)9 v; S* B" ]' \
$ m6 e* O6 \6 x. G# l# \$ u###############################################& Q; Y6 E% r# }" V k" V; b" D9 m
# Separate the data into classes based on 'y'( [2 l+ C* ^, B* V
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])9 G6 v! t7 n+ U( }) S
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
% q2 ~; r& h% }$ E. v) c r; W, A5 D) e1 O
# Plot the input data
* ]. p8 M' Z/ n7 d$ `5 Y& bplt.figure(): l/ X+ Z/ ?& }5 N, l* S1 l
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
& U2 [0 E% W/ k6 G: Vplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')& \4 X, X* f, k3 ], r
plt.title('Input data')& T8 k, L8 ^$ U
5 W1 D( }1 V' u$ k: ~( p###############################################* X& D4 P" y, R' B; u
# Train test split and SVM training
5 Y! c; F" |+ w* H9 Rfrom sklearn import cross_validation, F# T% a# v2 Z: L5 v
from sklearn.svm import SVC0 \$ T+ B) c' s" D- f/ B
# e1 q8 Q$ \2 F' F0 y" g1 h/ AX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
# u: A4 R" T+ S' n, p. M, ~$ [1 w; J! g" j( `9 Y) U: h
#params = {'kernel': 'linear'}
, q1 K' ^4 j& j#params = {'kernel': 'poly', 'degree': 3}
5 T# J$ z% {& j5 `* K" Fparams = {'kernel': 'rbf'}2 K! _! `6 H8 \3 X. e$ e
classifier = SVC(**params)9 ?/ \) }4 O5 G9 ]( k- b
classifier.fit(X_train, y_train)
) {! n) S. Y ]" i) W! [( j6 r% Butilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')+ _8 Q+ y+ ]# o# Z4 _ X' y
) s/ N1 |5 p9 ^5 U6 ~( i: N
y_test_pred = classifier.predict(X_test)
# o, J$ I6 p5 S8 ]utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
% ^. Q- O: M) I, u( E! s7 |- i
1 u$ V& e7 l' B+ n; v( E###############################################
g- ?2 _0 e; [; R$ A4 u2 ~# Evaluate classifier performance
4 ^ `8 @0 n( `( C
. l: d. U5 c4 Q$ g( u. T6 efrom sklearn.metrics import classification_report
. W6 o0 S! D; m) X2 l" K: v$ x1 M- t- e; q: k4 H% h' ?) S0 P+ G$ A
target_names = ['Class-' + str(int(i)) for i in set(y)]# m+ c! {6 D7 N
print "\n" + "#"*30( d* W2 }5 C6 \9 L
print "\nClassifier performance on training dataset\n"
, E; n9 B! C+ jprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)7 E+ {* u4 w# F# c3 g. V6 y' r
print "#"*30 + "\n"1 |8 S3 g$ A: C6 M6 f
# w+ O; A& [' F3 B4 `3 E+ V. t
print "#"*30( W3 w A0 B$ s- D( S4 Q, R
print "\nClassification report on test dataset\n"
4 y" o4 J' e d. F. t/ e: nprint classification_report(y_test, y_test_pred, target_names=target_names)
/ f5 Z/ V. M2 T2 S: b l1 g4 sprint "#"*30 + "\n"" x6 D! O9 Y+ {. }: J- h
& e: U, z( e: E! \7 w/ M
|
|