- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
# x6 c5 m+ j% G( p8 \1 Kimport matplotlib.pyplot as plt1 @1 f4 v0 T5 Q5 E. A
4 ?# p% f: k, V- f4 I& K5 Qimport utilities
# `& K2 u l4 L6 N7 X; {8 _8 W! W2 _0 e: g
0 |4 g t, ]: j" m" v/ A( w# Load input data
1 p2 N7 N- O7 B! K. ?0 Binput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
8 z2 ~% H0 n0 k, e0 RX, y = utilities.load_data(input_file)( G5 b9 h7 B! Q; L6 ^% X; V a9 \9 {
0 w) ~) }0 c) g. v* B
###############################################
7 e p% N' y4 _1 N9 y- H4 w# Separate the data into classes based on 'y'
% E) e3 U% z7 _class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
8 `% O6 J! t F Y; s) C- [class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])8 X ^: D9 p+ R) k% K
9 c( ~" m! N* @4 O0 |" G% {+ v7 r# Plot the input data+ \# u4 v: t; h. n P
plt.figure()
, [. |# W# s2 ]( }plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
) K) O7 s X6 f% |$ O9 n9 S5 ]plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
3 t# F( Q9 t# r0 T. }plt.title('Input data')
4 u0 N5 X/ U9 P6 N9 O7 a; l" C7 w a; w. Y0 k+ s
###############################################
: L4 a; l& i1 B0 h; _# Train test split and SVM training2 ~$ W" _' k4 B+ u
from sklearn import cross_validation }' y( Q1 P4 V- c) G6 k. p
from sklearn.svm import SVC+ l2 {8 L7 d" D3 b( M# s0 } P$ j
* P2 V0 D4 s6 |8 I5 CX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
- ] W I$ f; G9 w' D& f
. T$ T; }- G3 y6 O: M#params = {'kernel': 'linear'}6 s3 _. I* X* I4 m; y2 \: P
#params = {'kernel': 'poly', 'degree': 3}& W* _! J) M ?3 r! G5 G
params = {'kernel': 'rbf'}
9 X! o$ S: r, w% e6 D" E( oclassifier = SVC(**params)' O* x) a) p' @. a: v. e: F
classifier.fit(X_train, y_train)/ E3 x! [' t. K& ~: S
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset'). w1 R# o! N' U
, W8 P4 C6 M5 ^) A9 v
y_test_pred = classifier.predict(X_test)
8 }% S3 }* y b7 A: ?utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
% f& ]7 ~. r4 }0 @, j" T# S0 I+ I1 L. \5 |8 I
###############################################0 |* A* A1 I5 ?$ U$ x0 |5 g
# Evaluate classifier performance
, A' I9 Y6 W) S
7 C; G$ ~ l% L7 z) Ufrom sklearn.metrics import classification_report/ v6 ~( l% R& [, t0 J, j9 H2 w
! n4 X$ F1 d% C4 ytarget_names = ['Class-' + str(int(i)) for i in set(y)]
* }8 x/ B) Z' l6 Qprint "\n" + "#"*30* |; }" g5 T! s' V
print "\nClassifier performance on training dataset\n"
8 b$ J0 q7 w e8 d. bprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)$ I) _3 L* a2 _" W" R" y; T+ o
print "#"*30 + "\n"- T" E2 }+ z" i3 }7 W
5 n1 H1 K8 H6 Y! L8 g
print "#"*30
) }+ t u% W) A! s" m( u( lprint "\nClassification report on test dataset\n"3 |) S9 K" V2 y5 i
print classification_report(y_test, y_test_pred, target_names=target_names)
+ i2 W/ t3 O) Y8 oprint "#"*30 + "\n"7 D/ L/ ^# V! B( T8 Y
. c% Y" b( q6 i% l- j( U- x2 `4 w |
|