- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
9 Y0 `8 K1 Z3 u* G N! N. I( H& g A uimport matplotlib.pyplot as plt
% v) K r% R2 ~( N6 Q! B( c0 K2 g/ l* [# H' e, z) o/ t$ [
import utilities 0 ~2 r/ ~" J* N6 i
) I" ~% J/ n4 |8 O0 {# Load input data3 l' a7 W; c3 P4 ~
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'. k2 a) |! K1 X$ z ]
X, y = utilities.load_data(input_file)
, l) u) S* T+ D$ m, Y }, B# Y" a% \# Q2 i4 A
###############################################
8 B5 M6 w5 v; w. p7 S# Separate the data into classes based on 'y'
4 s' `. u# A7 Qclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])' l3 m4 A5 \ T0 ]: {
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])* r. c2 T7 A% S4 t6 ^. W7 C
2 M/ X) C! k. H) J6 ~. q
# Plot the input data# ]- e! m" X# F
plt.figure()- q6 F% O5 }1 B+ C7 m; V1 K
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s'): l9 V* I8 V, I3 d0 ]6 G& A3 o
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
$ s/ n( J$ k) ]/ k7 T6 W9 H* o9 K7 uplt.title('Input data')
; k+ Z5 [( w- m/ j: o9 \7 ~# W) W, _/ r
###############################################
- [. L* y# X% f0 I7 b8 \, s# Train test split and SVM training
. T$ |6 u2 q& Nfrom sklearn import cross_validation/ v* [ N( g* ?0 Z
from sklearn.svm import SVC
0 p9 l5 s. x2 a
( B3 e# {' \9 K! B. DX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)) p: V. j; u$ ]% R4 b
* r0 E0 S& d0 j#params = {'kernel': 'linear'}9 w1 R8 N' K) J6 \: X0 |6 |
#params = {'kernel': 'poly', 'degree': 3}
/ ^9 t. o4 {' K4 Jparams = {'kernel': 'rbf'} a a# _4 A; `) T+ e
classifier = SVC(**params)- l( m/ Z$ r8 h+ p
classifier.fit(X_train, y_train)9 |" L0 z( ?7 O
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset'): O* {' x/ A: [9 u! m
, x2 M v7 l4 x4 e
y_test_pred = classifier.predict(X_test)
) c* z% p& U+ c: U O" l! Z( nutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
$ T8 o* B4 _) o; E( `* n' T, D) [3 L) n: y
###############################################
+ k5 w& b' S6 n" u8 q# Evaluate classifier performance* Z; Z& D5 Y( P% \8 q& x) V
% G* n& y2 M; a6 s7 X
from sklearn.metrics import classification_report
0 A& ~# p& Y# ~6 w* w/ d& \3 A( y! @" v2 W1 l! ^
target_names = ['Class-' + str(int(i)) for i in set(y)]7 R% W: m \( J) _; a/ q5 B
print "\n" + "#"*30
. {& D& S1 }1 x: [print "\nClassifier performance on training dataset\n"
6 t! g' B% J/ }3 }% X7 h/ l3 Kprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
& W4 D" N0 F. G4 {6 [print "#"*30 + "\n"
3 o! ?. N1 L1 v* ?, g) @1 Q3 b5 k- N1 L1 k" F0 {" ^6 r( R
print "#"*30 R v7 A8 C5 F! t) L
print "\nClassification report on test dataset\n"
9 m6 e) d! p- S: }5 _print classification_report(y_test, y_test_pred, target_names=target_names)3 O/ D( L* _6 A: a+ ?* q: r& J
print "#"*30 + "\n"
9 L4 k2 h7 E# V& z2 o3 m8 t+ p \! G2 e
|
|