- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
& H- j4 q9 M' }9 z: _2 dimport matplotlib.pyplot as plt+ M& n$ G4 E7 x( c3 R0 H) L
. b- `7 g: v0 U7 z3 L
import utilities
- Z- s) E9 q4 d q+ Y
: \: U) n( j; X) [- l3 i+ l3 i* b# Load input data
( Q! n8 p; x% l* Pinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'' O5 |% E7 ~0 M+ [: f/ g! @2 H
X, y = utilities.load_data(input_file)
/ @; }2 ^ c! }4 C* W5 a4 i2 e9 ~1 }: `
###############################################
8 E9 i* c; r J/ x- C# Separate the data into classes based on 'y'$ B7 A/ L! c' v$ h x6 X2 V3 {
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]), w7 r$ l1 N. h" a# _* I. m3 S Y
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
1 o" | I3 `1 @$ U0 m' e2 K: A* v) s
# Plot the input data, e/ b* J- p8 o$ p1 [
plt.figure()
% ` C+ t$ r) M9 Y. v; Fplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
7 ^) g, a* i2 T! D9 hplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
+ c+ H! ~' A. l% {; Wplt.title('Input data'), K( z7 W) P9 r5 m
* T N7 }. g8 p/ o
###############################################
0 M6 P# h. y; I+ u# Train test split and SVM training3 F& g0 }8 Y9 _+ [
from sklearn import cross_validation7 B" @& w7 V* O6 W
from sklearn.svm import SVC
! U8 O& Z8 i# K! k5 S7 z6 J4 j1 V6 k0 B* ~* Z- d% v( |
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)1 H9 Q A8 ?# D1 T
% A" D% ~' o0 s. Y& T9 O#params = {'kernel': 'linear'}
8 C; F9 g3 L9 r" d: Z% W$ A4 t* t#params = {'kernel': 'poly', 'degree': 3}
% C+ G% [2 m" t' {& _( s" F, I3 Mparams = {'kernel': 'rbf'}/ g: _% K# n6 j4 T- a
classifier = SVC(**params)
- B0 `4 n: v/ K/ c( ^classifier.fit(X_train, y_train)3 X; r* k( L2 e- n9 a4 _
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')6 P( \; B ^4 Z$ R( ?+ e
: [( w+ W0 e$ H" v' L
y_test_pred = classifier.predict(X_test)& E4 i5 {. {+ k8 [+ @
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')1 P4 r9 s( N) z: T4 z) k
8 B! Q( T6 V5 W0 S8 D###############################################
0 B- ], M, o7 q5 ` ]" S3 [% x j- [# Evaluate classifier performance
% O" ]# d- c6 m9 T, N: j1 J5 z
* v: \: l4 C1 w9 vfrom sklearn.metrics import classification_report
, ^! ^' M' v1 Z' t
% a; ^: a) Q9 X. E; rtarget_names = ['Class-' + str(int(i)) for i in set(y)]
% Y/ A$ z- v0 m. } _8 H) Aprint "\n" + "#"*30
/ J- Q2 O6 M; }0 q4 A' F% \print "\nClassifier performance on training dataset\n"
0 D0 [' A" o. r" @0 B6 Gprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
! O: u; o0 l, d2 `. iprint "#"*30 + "\n"0 v0 I H p$ K
! T3 V& {. K- l
print "#"*30& D5 u- M) g$ @) T! J
print "\nClassification report on test dataset\n", P" L" Y! {1 A8 m
print classification_report(y_test, y_test_pred, target_names=target_names)
% U' M2 R0 Z8 U4 U/ \print "#"*30 + "\n"
* v; c" M, N* _; K9 P- t# P+ u0 j2 p
7 w3 C4 c8 w0 t7 x |
|