- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
![Rank: 6](static/image/common/star_level3.gif) ![Rank: 6](static/image/common/star_level2.gif)
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
, {/ z6 X2 b" g& G! aimport matplotlib.pyplot as plt7 b3 f, b$ @0 t
6 u' k7 i4 K3 [5 rimport utilities
" [* q) J7 V6 j
8 j* ~ [! E3 D6 e$ a- V1 ?# Load input data6 h; [, a9 V& G! _- H
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'- d& ^' }$ m2 Y* N0 ?/ \3 f/ g
X, y = utilities.load_data(input_file)7 t# Q+ X0 R. y& L# @4 F0 ]5 k
5 ?/ U ^1 z5 K0 O% }9 o
############################################### e- B6 n0 R9 d6 T$ B
# Separate the data into classes based on 'y'
( _6 \7 H4 [5 E5 R! Mclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]) ?' y, H! Y& B8 S
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])! T6 H6 K1 k7 o1 z( C
) [( P' A0 Q$ o! O0 }7 n# Plot the input data
0 {: S6 g; M5 G( Lplt.figure()4 ~5 T, p, x: ]
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
A0 \! b, L+ x9 p0 H5 Bplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
9 N# g0 I! |8 W; J* `plt.title('Input data'); P7 {% s5 Q/ }5 {2 ?" v8 H
3 b) ?: ^4 K9 m8 i7 N9 p% S
###############################################
! K5 ?# |7 A! s9 o" K: F# Train test split and SVM training, K6 f9 _: F2 m9 g
from sklearn import cross_validation
( g5 I$ c& B8 o, _, t2 Dfrom sklearn.svm import SVC5 s3 Q/ ]6 i( U6 m
% z' _5 s' P& e6 B/ t _X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5). h. E' D8 b" Y9 a4 N/ C
1 F0 F5 k- n- s) F
#params = {'kernel': 'linear'}* K: s& X s# m$ `1 y
#params = {'kernel': 'poly', 'degree': 3}
/ P- U* j0 i; ^ }6 o$ a) Qparams = {'kernel': 'rbf'}7 G& B) }7 a6 P) R9 a3 H' F7 V7 \
classifier = SVC(**params)6 P0 k0 c2 B% F$ `* H
classifier.fit(X_train, y_train)
/ s% o( o+ W; n6 [8 N, nutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')$ u) M0 X2 }3 ]
# b$ q1 t4 e3 R& Iy_test_pred = classifier.predict(X_test)! v% I$ N- R5 |+ `$ H
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
d: a3 @- f9 c5 D
; b4 k4 }) |2 h% d5 r" B6 G################################################ L9 o# ]1 e, r. X) x7 s
# Evaluate classifier performance1 U5 I% n# u2 B" I0 Q9 [6 a
" N% B5 R, B% V) Z: V9 E6 f9 B0 R pfrom sklearn.metrics import classification_report
$ I% P- f. b; u; Q
* t5 v% n. I1 p% ?target_names = ['Class-' + str(int(i)) for i in set(y)]
! B5 O& p: Q, E% {3 qprint "\n" + "#"*30
' j+ {- ?0 D% ?5 xprint "\nClassifier performance on training dataset\n"/ d- Q* p. p i9 h+ E t
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)# Z1 }4 L9 `. g& G" I4 y
print "#"*30 + "\n"8 ], w k2 X+ l
* T3 A8 Q' U. w& \! Iprint "#"*30
) n2 s+ X2 [$ n X9 W% ] e* L2 Sprint "\nClassification report on test dataset\n"
8 U$ Y! W1 @* O/ f* \; `print classification_report(y_test, y_test_pred, target_names=target_names)5 L* c- E% h% `
print "#"*30 + "\n"
: D6 J3 K# P3 n5 G- T$ w
% Q2 I/ u e |4 g1 q( t: J |
|