- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
' g* ] Y/ E7 v/ Iimport matplotlib.pyplot as plt( y& D; S* ]( Z2 e0 m
8 K5 F6 ^2 i* }import utilities
" J3 ~0 o4 \+ Q! D
: c2 T' A' Y7 e: N' ~# Load input data
; d \6 ]2 U( r0 h6 [% W7 l5 Vinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
& D$ f+ A; V4 e( U! pX, y = utilities.load_data(input_file), e" t0 K+ ]+ @8 ?4 C+ I. `
[, G* y: ~" P4 {% T! S
###############################################
: M8 w5 H* ? J1 I# Separate the data into classes based on 'y'5 S1 Z3 q8 B1 }- c# i! \" _
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])) q& o( Q2 o- b. r' e! `3 C
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
. d( _) l) {& b9 v! ?
7 D, S* W+ U. A; z# Plot the input data; Y# [) K% u+ o- v2 P9 e! Z& P; f& r
plt.figure()1 W1 d. Q, W* U1 Y- G
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
2 w K! q4 |: ]5 Mplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
" K; ^& b6 d( Tplt.title('Input data')
# b' t0 N4 V( ]5 ]" |9 s) @4 d5 q1 c: Y, F9 j& M
###############################################
9 H1 e, l: h0 ~# Train test split and SVM training
& h3 `- q8 u, f& p- M# Lfrom sklearn import cross_validation) g5 s8 b" ?. C/ h
from sklearn.svm import SVC' [$ I9 V, V9 p" G$ z1 y& w
% H7 c) n9 w8 V2 R- N
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)4 a: ^' F: V0 l3 c7 p
! y0 z$ c$ s R' @$ K# a
#params = {'kernel': 'linear'}* H4 S$ b5 p; Y/ }$ c) [
#params = {'kernel': 'poly', 'degree': 3}
( H- l0 U- c; \, c. W& j; fparams = {'kernel': 'rbf'}7 ^$ ^) l8 d5 [
classifier = SVC(**params)
% O$ Y9 P) o* s8 G; I8 O* {8 f, B1 yclassifier.fit(X_train, y_train)
/ z/ ^2 ~& Z( o6 i* ~. eutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
' i4 D( @, o( g- K. p* |; I( C/ x( ?1 t- F1 S! A
y_test_pred = classifier.predict(X_test)
+ W7 T- f2 @, { m: b7 i( autilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
6 J2 b4 T6 m1 d' o7 _9 t+ B+ r y3 u8 c* E- `1 n& y: L3 u
###############################################
3 V$ n* j4 }% W2 _# Evaluate classifier performance
. d$ m R+ s$ T3 K3 A1 U; ]0 s% x \" d3 F2 D% o
from sklearn.metrics import classification_report3 ?- j& M' W" h" k2 J- D
5 |$ t6 y8 Y- o j
target_names = ['Class-' + str(int(i)) for i in set(y)]
8 _+ A- |9 Y5 Z$ aprint "\n" + "#"*309 H! L" w) y3 H2 n/ S9 y
print "\nClassifier performance on training dataset\n"
0 N8 C( x `4 I& e& Z: fprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
, K3 `) ~9 X' u" @; aprint "#"*30 + "\n" C g0 O: j* v8 I' g6 D5 z
+ z4 s3 c' A" Qprint "#"*30" m! Q5 L0 y6 `
print "\nClassification report on test dataset\n"0 O' Y7 M1 b: ?
print classification_report(y_test, y_test_pred, target_names=target_names)7 R. j) |0 o V: `6 M; }; F
print "#"*30 + "\n"/ U P: J% ~2 D- S7 R
/ J9 j# u% M4 s) u9 A0 i |
|