- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
![Rank: 6](static/image/common/star_level3.gif) ![Rank: 6](static/image/common/star_level2.gif)
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
, D4 J' H! N4 p5 g! s p% c3 mimport matplotlib.pyplot as plt
3 L+ D9 f- r3 U! B- {- [( g5 L( h& X$ m4 w3 p' [7 V1 k, e2 w
import utilities : Z5 \! E! G+ F- y, `7 N/ A* K0 X+ s
4 t$ P" g( v* f# h# j( ~) e' \& v
# Load input data
7 V" N2 e) F( M+ z$ l# `input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
0 }/ w1 N5 a9 t$ @5 pX, y = utilities.load_data(input_file): P- N8 N7 Z- |# P/ K$ k
% E( j6 N% d u' v
###############################################
1 m$ X) [& q9 T* C8 P3 {# Separate the data into classes based on 'y'
9 w. l; p! l5 G# {* h" X% oclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])/ U( G+ _: e) |2 z
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
( h) o( w& ?5 i( ?
7 x$ E1 \6 C( d% K- G2 Y. O1 ]# Plot the input data
' s" G: e) V; C7 I x" G) v9 @3 iplt.figure()! K9 m4 m: B& z2 j, c1 I- }
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')3 J* k, u$ V- t1 I5 u
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')9 H" Q; ^- G6 W) ?+ j
plt.title('Input data')9 Z. X7 |4 Y0 E) V/ }- i. Q/ p
+ g1 |5 S' u% ?7 T% ~- _" K
###############################################
( n, O% p/ ?/ x# J9 x3 G1 S# Train test split and SVM training7 u5 B- q% w P
from sklearn import cross_validation
! c* m0 z; ~* t, b3 D0 O8 r7 Tfrom sklearn.svm import SVC# t0 Z' ~7 C0 k
( `- h k1 Y) v% Z: ]X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)9 D0 f3 T' V2 a; ]( l2 T3 s
) ]. b/ T+ H& D& O, B( f. b#params = {'kernel': 'linear'}
. _: z$ s2 O& ?6 g- E#params = {'kernel': 'poly', 'degree': 3}
- R& l9 \- A( j' x9 T# V5 R9 lparams = {'kernel': 'rbf'}6 S! U* \ P. q0 P
classifier = SVC(**params)
+ L" |: Q6 e: g9 {+ Eclassifier.fit(X_train, y_train)1 e- n4 }! a6 u3 ]/ E" [
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
# G G0 Y6 G) l1 A2 F
6 L- T7 w! S' N7 X. y) W2 ^y_test_pred = classifier.predict(X_test)8 q6 H0 Z7 i3 I/ L
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
1 g7 C; C% _6 I# |$ j3 t$ S7 H2 g4 h1 A0 t( K* |' P! `
###############################################: T3 e/ j/ ?: G
# Evaluate classifier performance# p. r5 @* Q0 Z; U& C; q3 |, n
0 l6 c0 T6 a9 J/ R% b, k
from sklearn.metrics import classification_report
$ h, F7 {. I( |# K8 u* @' d: w% J4 U4 G8 h" J+ R. r4 I6 r
target_names = ['Class-' + str(int(i)) for i in set(y)]
; T& L+ w' k2 S5 }* B# B" aprint "\n" + "#"*30
1 b- P9 j) A' t3 V* h" }8 Sprint "\nClassifier performance on training dataset\n"
# ?1 A0 M6 H; ~4 E9 r7 gprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)0 B4 W. B. E$ y" D
print "#"*30 + "\n"
3 @+ E! `6 Q" ?5 R- M# m& z" D" e
5 O8 g; o0 N- o6 y3 E* x& d7 o# oprint "#"*30$ a& k3 |4 c2 a0 e% D
print "\nClassification report on test dataset\n") c5 D" X1 K% l
print classification_report(y_test, y_test_pred, target_names=target_names)- }- n8 w2 [3 y
print "#"*30 + "\n"
; O/ t2 k3 T3 O. ^- l/ G+ M7 P" ~. W) H
|
|