- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
1 Q% P/ A6 _2 R- G8 ?import matplotlib.pyplot as plt
' w2 V' ^9 G* r1 w6 f! j& n, l9 T3 E' j$ L) l
import utilities
! t7 b/ b7 `4 ^+ `- J7 R1 I* Z
9 N1 u4 r9 S6 F6 w1 \# Load input data0 s& V5 o* E! u. C. Y& z. M4 o2 J1 @
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
# e( E; p! N% i4 C- mX, y = utilities.load_data(input_file)
. d$ l' R0 C! c7 ~4 v' V6 {/ g6 N1 _- ]9 f) y/ Q; j6 S
###############################################
7 G. s: A9 q: d# Separate the data into classes based on 'y'6 t% J, q% s' G3 P' `( J% }
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])1 J+ e: L' F8 e
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])0 A0 |, ^% A5 v& e. Q
5 b8 {" x3 l( N+ \* c* a* V$ S& e
# Plot the input data+ M; U( n- J; z1 v
plt.figure()- ~+ Z9 L2 u8 D8 z7 z1 P
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
" B$ F0 Z) {- g# A }3 Bplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')5 |1 ^% n# R" G# ?. ?* N8 r
plt.title('Input data')6 X9 a- e/ ]. W0 b; K \
* t2 \) W+ M, V' w
###############################################
: J; v9 b- k" }/ S6 t, D: c$ v8 k# Train test split and SVM training' Q, m( t8 g& I* |2 g( N; @: `5 o
from sklearn import cross_validation
/ Z$ o( ]: G7 T7 Ifrom sklearn.svm import SVC
" T( L# ^! K5 F7 M$ y
: k7 p4 d6 Z5 w- N5 Z7 D: TX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
8 C* x: s. V) ~" k" ~3 a* ~& z: I+ ^' s" b' Z5 i2 D( Q7 w2 \
#params = {'kernel': 'linear'}
9 X- @+ T2 h0 p3 g- [4 H) H2 @#params = {'kernel': 'poly', 'degree': 3}
# y7 D' W1 w) z# B( S/ yparams = {'kernel': 'rbf'}. L7 W7 Y# ?2 l8 B8 Z
classifier = SVC(**params)
: b7 w& ?# V6 @classifier.fit(X_train, y_train)
- q7 L/ V7 G: v0 P( [utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
+ d% X( I$ d5 |) |# V5 k6 V1 s2 }( b" ?1 N( q l1 z) O
y_test_pred = classifier.predict(X_test)
- e G4 B/ O. y' q. X! hutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')" @1 T' T! p( W# s
7 m# \' ^, q. Y& }2 P, V
###############################################
! @0 J4 t' L8 S* U% W) O% ^# Evaluate classifier performance
; l( d' ]' H [: o
S* v9 j- L o* efrom sklearn.metrics import classification_report
$ c# }. C3 x; a' U8 F* W+ V) L1 w
9 k2 c# P' S- G! Mtarget_names = ['Class-' + str(int(i)) for i in set(y)]
' L4 H# G# Q3 p3 J# Q3 sprint "\n" + "#"*30
* o- o, w" D) r8 Tprint "\nClassifier performance on training dataset\n"8 F6 M1 O4 y. v- g' h$ R! p$ q$ G5 e
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
5 t z4 R7 N4 Y( z+ S9 }. |print "#"*30 + "\n"
2 x! x( J" z2 p0 s
+ \5 l, f$ Y: `/ u7 Q1 H4 wprint "#"*307 P) {- W/ S; I! Y
print "\nClassification report on test dataset\n"
2 l* P% G0 A |. V9 `' wprint classification_report(y_test, y_test_pred, target_names=target_names)" ^0 P( v4 Y+ I$ G
print "#"*30 + "\n"" [, V$ S! ]4 f+ s
6 Y, E1 C, Z% t
|
|