- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np' _& L) W$ B. Y/ M' X
import matplotlib.pyplot as plt6 G9 U; I/ j# f2 D0 I3 ?! l$ W
7 l2 N" O9 E. F9 ^" |0 E
import utilities
7 u% a3 F/ l- d$ F" R) J/ }8 S# I: A# V) A
# Load input data
$ j4 Y4 z7 G1 l. h! _6 ~input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
' A- A, q+ m( }. {% [X, y = utilities.load_data(input_file)# E( W8 ?$ X. [ G1 J
. E4 Y: W& M: `+ J. }###############################################
& @/ @1 U) v& N4 h3 c# Separate the data into classes based on 'y'0 u/ N' Q& m& k$ q% Q4 {; J
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]). |. X! B4 @% ], B, |3 h
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
; }* F" m: H: V' V: l1 n( W: j7 F" w8 U g# L
# Plot the input data- c+ Z! s; ]: h3 m: l
plt.figure()$ n- v7 e. c3 Y0 \
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')4 q( L) b+ \# e! G c9 g
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s') [2 ?6 `1 l1 O0 z% ]; h( Z
plt.title('Input data')2 h) s: f9 Y' J P/ a7 H$ R/ [+ U4 t
: v8 L/ Z# h" x9 z) f
###############################################
* L/ U i: ]) Q4 l, s$ {; p) V# Train test split and SVM training5 ^3 s) S9 ~$ \
from sklearn import cross_validation
1 a; r2 J0 x1 D( i8 ?5 Dfrom sklearn.svm import SVC# ?+ N$ b1 P+ W1 q5 D
: S9 E& Z8 r" s9 f A2 HX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)% n4 ?% m. O$ F; j0 S
8 U5 i5 ^! g0 T- ^2 g0 V#params = {'kernel': 'linear'} ^: v3 y; E+ M6 j7 X3 }
#params = {'kernel': 'poly', 'degree': 3}: y% x, z/ G+ S. X; ~7 _' s/ D
params = {'kernel': 'rbf'}
3 c! j% l3 e( {/ _1 Nclassifier = SVC(**params)+ l6 {8 b' l0 N
classifier.fit(X_train, y_train)3 Y; U9 i4 `4 M! _% Q, y
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
! [. X# D" P- `1 I7 ^/ ?# i Y6 w# H5 s
y_test_pred = classifier.predict(X_test)
( m, J2 N! E. _5 Q6 u) J, \# m' Kutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')8 h3 v9 d4 o" g1 A$ W* d" i ~
5 Z8 i' F& U! C$ y; ^# h
###############################################
/ s- T9 {5 ?' L1 l. y' p" C9 D# Evaluate classifier performance
9 i: O2 G: D( K, l. j) f6 s- T6 G+ U
from sklearn.metrics import classification_report
" ]# R- m) l# J; f2 Q8 S% y y; u; M. y, M
target_names = ['Class-' + str(int(i)) for i in set(y)]6 H: T3 l+ p: V
print "\n" + "#"*30, X' D* o# }4 W& e! s
print "\nClassifier performance on training dataset\n"+ L4 S" B( P$ H+ Z* X2 q. l) k
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
+ e h$ l9 f+ A" T' eprint "#"*30 + "\n"7 V) ^+ S; ]% w
6 V0 e `" d( i) P- M m- `% gprint "#"*30
4 n& \' ` g9 `; e; aprint "\nClassification report on test dataset\n"
( ]: m, v2 h, N5 g; |7 Eprint classification_report(y_test, y_test_pred, target_names=target_names)
$ x! y. n- J( k% V: g2 gprint "#"*30 + "\n"
$ f5 W+ r1 `( x' |! r% d% z( P0 n; n: n: l+ z) i
|
|