- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
1 t$ K! \, U7 _import matplotlib.pyplot as plt
" c# ~- K; |+ S% Q% \' I
: y; p, H+ y# l4 y+ \! iimport utilities ! ]+ c3 }3 y: |- E: u! J
" I- {1 n7 K+ D5 K8 R1 Q4 R2 {
# Load input data
d0 E+ s7 Y. q$ Oinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
4 K) B. p9 i' }X, y = utilities.load_data(input_file)
/ P9 J+ R# I1 H! y" J1 u% y+ L: o8 h
5 d6 }5 l7 l \3 g6 T###############################################
4 O$ c; M# V" l& F1 v# Separate the data into classes based on 'y'6 y- T. b$ i) s
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])' l/ L! ]" J, ]3 Y5 @7 K
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])9 U* ?! u/ Y( ]: z4 [
0 S* D5 Y# _" Q. V8 G& e1 O# Plot the input data+ A* K6 R6 m! b* g% k/ q i
plt.figure()" @8 Z% S. k3 V( E
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
: S9 b+ U9 O$ Tplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
2 V k; L4 b5 v$ b: Vplt.title('Input data')
7 m- b$ {+ s) D, c' k( H m; M+ k1 `1 o; S& {) z: b
###############################################7 K3 B5 [" \% w* i& ~: I
# Train test split and SVM training0 F. k, O( Y. K x: R( ]. Z
from sklearn import cross_validation
0 f2 p& n& A; L' k# `& Yfrom sklearn.svm import SVC2 F! H% }3 {+ q/ \4 j Y. O4 |
6 @9 Z1 B( w2 b0 ]+ |. uX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
/ z/ U* y2 c3 s. W& o2 R! X9 r; E4 X5 c! s
#params = {'kernel': 'linear'}
% \. ?5 @- k' m' ~; j& j0 B" b# f#params = {'kernel': 'poly', 'degree': 3}$ d6 O# z" w0 }, K
params = {'kernel': 'rbf'}1 M: V4 j0 L8 t- q# j+ B
classifier = SVC(**params)
7 ]6 c5 \/ k1 `4 ]$ r% J( M- Vclassifier.fit(X_train, y_train)" q8 J) a" G4 m# U
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')+ {9 U4 d F& o# k* W# ~
8 [4 V3 S7 z; N+ Z" my_test_pred = classifier.predict(X_test)
5 I4 g5 X3 l! F- ^& e% butilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')& b* x7 y2 s' {- @* _9 F. y6 t
3 Z4 h2 A) o9 ? c4 R: E+ |
###############################################' C' b6 u/ F* ?7 _( {2 t: \5 @
# Evaluate classifier performance5 w% _3 e" b0 X' ]1 `* g% _
6 x6 [+ Z+ q2 P1 V1 ffrom sklearn.metrics import classification_report% O3 j! T+ o) W+ n* ^' ]
* Z$ A2 J# c8 \% z! B# H1 w7 i
target_names = ['Class-' + str(int(i)) for i in set(y)]
+ H+ x: r# j2 k; \2 `8 uprint "\n" + "#"*301 g4 l! v5 Q' o2 D1 \$ R6 J0 X- s
print "\nClassifier performance on training dataset\n"+ p6 w- W+ z+ ]& P
print classification_report(y_train, classifier.predict(X_train), target_names=target_names) s& E* S" q" Y# R$ B' }- @
print "#"*30 + "\n", Q- S- T+ u& x/ ]3 e3 N7 n! R4 @
$ ~+ d3 ^) q* _
print "#"*30$ u g( b, U* i" L+ E* N
print "\nClassification report on test dataset\n"0 v0 W& r" a) R) r
print classification_report(y_test, y_test_pred, target_names=target_names): _) M+ `. Y, ], D! ~8 A
print "#"*30 + "\n"2 ^) v) K; _6 N, k5 Q3 B! c1 e r7 V
1 N( }( n) M0 n' s. J0 W% _0 Z
|
|