- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np/ H6 @ U5 W' @
import matplotlib.pyplot as plt4 S0 w5 [; V0 g
7 U- X1 e3 i) L4 u3 o
import utilities / X& q1 i m* M$ v& S! g. \1 s! I
/ W+ I( n7 A p& V; d2 G# Load input data: V5 ^' Z0 ^2 ?
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
5 M- x- |# I# ?6 u {( |X, y = utilities.load_data(input_file)
0 h. [" B0 Y/ ?1 f( G# s* W; O+ a9 o4 V
############################################### I0 K& g5 f% ^4 a# p
# Separate the data into classes based on 'y': B& w- G% Q5 E% v3 O
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
/ d6 [$ i: L) `& s$ }4 wclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])+ M7 w9 k; _* s8 @9 }2 t, K1 ]
3 J" y! k: e9 N% N- u# Plot the input data
+ _2 y* o& A3 W# tplt.figure()
! k9 X; {. y$ b7 b2 bplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
2 K2 T+ L. N5 @- iplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')0 g- w: |/ K' j K Z/ L
plt.title('Input data')) R' l6 c; l3 @# [6 l& p4 F
5 `4 G2 A" A* W. w' j###############################################
. `+ B4 B+ }% R( h# Train test split and SVM training
/ T# z1 ~( _' @ c7 G, |from sklearn import cross_validation
, N6 k8 R$ n) Afrom sklearn.svm import SVC
: `" L, K: j' T4 t2 m1 D) d) @
8 q1 j6 F5 R7 Y. ~7 h6 ]' DX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)/ q& r" ^' z0 Y6 L/ ], K
$ O* B8 Q* j/ C) [$ o% u
#params = {'kernel': 'linear'}6 O, S; g( j' q5 Y, O
#params = {'kernel': 'poly', 'degree': 3}
3 ]. v ?; ~% j; E* |& E' Sparams = {'kernel': 'rbf'}* ~1 S/ ?: H6 G* i7 j+ T- e0 J
classifier = SVC(**params)
. i$ @! r- [3 \$ l8 p5 Oclassifier.fit(X_train, y_train) y% N' j! y' R+ i: Z
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
+ W% e9 _( N& O% d
6 ~* n7 k8 \# a8 K" Ny_test_pred = classifier.predict(X_test)/ P$ @2 U/ o- @" N( t3 X# C
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')# Y7 C* q, R4 d. x% p
2 q+ W) t% T: x& m1 S$ W###############################################
$ t, p8 L Y2 g) H# Evaluate classifier performance
0 v# d6 w. \0 J- p: X+ D" Q) }/ d: R! g. K! C* U
from sklearn.metrics import classification_report
2 [* @7 w6 `. X3 L+ p: U+ D, b3 y# n6 p5 p
target_names = ['Class-' + str(int(i)) for i in set(y)]; b, _8 c( I# ^) M8 k5 w' `
print "\n" + "#"*30
9 ]; f- s4 @) [5 yprint "\nClassifier performance on training dataset\n"
: t1 h7 r: t- _print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
' l$ Q/ |9 Z: t; }print "#"*30 + "\n"
% ]$ n' J9 [, e. a5 S) ^& i' t1 R( h G7 r2 y7 ` v8 p8 \4 h
print "#"*30. a+ Z/ ^! [$ ~4 V
print "\nClassification report on test dataset\n"& ]- q" V% s/ r4 U* ^& k
print classification_report(y_test, y_test_pred, target_names=target_names)2 ?# @$ ], M. M8 O7 y
print "#"*30 + "\n"6 e) w6 L+ r6 i8 ]! ~/ Q
. O8 w$ w1 h& o" r( Q m5 W7 n |
|