- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np3 W# Z% @* `+ ^7 I. O$ l& _
import matplotlib.pyplot as plt; h1 [9 f b# X7 b
# f! [& A, G z0 }
import utilities
9 W2 [: j3 K5 p, s* P. j+ W
, @3 t% J( P4 C; A9 v+ ] N# Load input data8 X& |" @: T1 b) k0 n# v
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'( _. p: g! }+ t% f( m; k
X, y = utilities.load_data(input_file)$ J( x; ?; Y, F1 f, c n1 j
! _% O2 t( P9 k8 x; l) t4 l% V
###############################################
8 p! J" }; G7 Y# Separate the data into classes based on 'y'( N+ ]' D. V2 a0 V. s+ }# `( u
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
} S2 h3 y4 n- T J, vclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1]); i( K5 c; G& _* E! t# @9 H
" }! ]/ @; a1 X6 ~, B$ O
# Plot the input data
) q5 I0 ~# p+ Wplt.figure()$ p' [/ G" X! F1 I C3 ]
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
9 g& L; _$ n/ j5 bplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s'), p6 \, W( r; U" n, {) K2 U* Q! |) R* L
plt.title('Input data')5 c$ s0 \" ]* W6 D
8 ^5 ~. Z) z8 X4 G) Y( z/ d
###############################################
7 e+ {5 T9 f. r$ d* M+ e4 l# Train test split and SVM training p& q) `# ]/ V/ o8 {! _- g
from sklearn import cross_validation% P; B3 y& ?4 ]1 ~
from sklearn.svm import SVC3 a; ^8 E2 N7 C. R( f3 k
# T3 N/ {" q; H+ `" EX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
0 n- [: a8 f2 Y R4 q
4 V' F7 s) I( }) ?% R. P0 A* f: Y#params = {'kernel': 'linear'}
, c1 J) S4 ~" w# b6 `) u/ w/ e h0 W#params = {'kernel': 'poly', 'degree': 3}
; Z% I; ~. u8 H- vparams = {'kernel': 'rbf'}: E) ]* p& n- q# ~
classifier = SVC(**params)- c+ |0 R* r2 ]8 x' f T7 V, {# V
classifier.fit(X_train, y_train)
# }1 K3 C4 X9 {- i* sutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
! N& w9 ?/ l' y, P6 O- T9 d, {2 c, o
y_test_pred = classifier.predict(X_test)
, k: H R2 |. Q# [* Z6 E6 W( s0 v7 |utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')2 w+ x7 D* b$ |" v5 N' _: \2 W
( T* l% \9 f: Y% B4 u/ O7 Z################################################ z+ U) w7 g i8 f5 @' Z
# Evaluate classifier performance
; h2 D5 P: B7 H! K: r% z2 p* d' N+ {2 \: }$ v. r* {9 d
from sklearn.metrics import classification_report
) d7 h$ m/ t/ f6 Y
+ q- M( ^4 R( J9 ?( w& T9 z Y2 }8 ~target_names = ['Class-' + str(int(i)) for i in set(y)]5 t. m1 r" q. P5 U6 s
print "\n" + "#"*30" E, X, E1 Y- |8 j4 G
print "\nClassifier performance on training dataset\n"
4 z; P1 x/ }; ]* u. A! F+ sprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)! p, A5 \3 D# d, F' D4 k' H3 e
print "#"*30 + "\n"
* f; Z0 f, o5 @/ p
& m- a; F+ V! M2 l( R6 b% Kprint "#"*302 W" H" M& V7 \! S' `, s( ]
print "\nClassification report on test dataset\n"
4 m3 A- r- E1 F( lprint classification_report(y_test, y_test_pred, target_names=target_names)
; M1 S5 \ E2 ?print "#"*30 + "\n"
9 x. L) c# J- X% M9 t% Q; t* V* r! l* B; t0 v6 P& Y
|
|