- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 558
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np1 Z7 k+ X' M& X8 t. Y, t' i# ]
import matplotlib.pyplot as plt! f, t* f# }( u2 Y
9 Z6 P6 I& P! \+ B n3 W
import utilities
" e, p' s$ s* x
* y* L: r7 A) I7 W+ J/ i( R" H! g! d) Z# Load input data9 M' B- A l: @, c7 i
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'- i6 \) ]" x' {0 P1 C. ^, l" W
X, y = utilities.load_data(input_file)# H2 y0 S7 G8 `0 c) f; h s8 r) b0 L
; H$ {) @ I, Y: w$ |2 J###############################################( p% H$ Z) B; x2 i6 f
# Separate the data into classes based on 'y'8 A; e5 B* X* {% B
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
' R" `, A9 r* ] _' g- |class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])+ t- F4 D( Y7 y# W9 O7 _4 \
7 U" p6 S3 D; L1 ?0 ~0 |
# Plot the input data
4 _5 F* i$ t& A* s! m$ x' u6 f3 \" aplt.figure()
# H8 e# `, {% ^7 W" {( \ f. _plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s'), T& T7 L6 n! }, l) G& Z7 |
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
1 A- w# `8 t# p# Splt.title('Input data')
9 w Q3 @: V1 F3 n, X( ?% |4 Q& g! I1 E6 U: ?! P* f% h
###############################################6 z1 F' E/ Y& K8 ^ K5 t
# Train test split and SVM training
, o( d* _- K" v9 H: Ffrom sklearn import cross_validation0 L9 b6 p$ X7 }6 i4 e. d" v/ n& p
from sklearn.svm import SVC
) ]$ y, N9 f* N# N0 p3 ^. D+ L5 t0 H* x. ]( \
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
+ O) Z8 H$ t. z8 S$ V
5 \# F7 P# C% R2 M2 T* f#params = {'kernel': 'linear'}' v3 b+ G4 @9 e! V2 P- ]
#params = {'kernel': 'poly', 'degree': 3}
: e& `1 S/ K1 M1 e- sparams = {'kernel': 'rbf'}
5 C+ G4 n7 A2 ~1 [) b, bclassifier = SVC(**params). L* y1 D0 J) ^0 k2 {
classifier.fit(X_train, y_train)
8 A4 _8 W1 \& A; {utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')* W( w9 p# K3 X$ E/ M: v+ ^- |
& l) L/ ^" \, D- O
y_test_pred = classifier.predict(X_test)
0 q; p' g6 a1 e4 Putilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
4 G- ~% ?+ t- w- R( F- |9 `7 z( w1 h6 i$ i" m! I5 F
###############################################
0 l7 X4 K2 o! Q3 o- N' R# Evaluate classifier performance
4 m# C8 F" p D% ~& |, \8 T. D% U% D5 y5 f! m. R0 d
from sklearn.metrics import classification_report) U; R: j1 |% V0 \9 _
0 N3 o" r7 Q7 n- r# ]$ U* k, h$ atarget_names = ['Class-' + str(int(i)) for i in set(y)]) w: k* v/ D5 x3 }: O) S1 g
print "\n" + "#"*30
! v& ^+ J7 x1 C7 K/ i( ]print "\nClassifier performance on training dataset\n"
( Z- u7 m C, }; e& Fprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)- p6 c/ t; c0 r& x6 n- d
print "#"*30 + "\n"
: z2 V" o$ k J! T& |1 r* C
, [! c: J: O/ ?% H2 Pprint "#"*30" o9 J% s7 y. V
print "\nClassification report on test dataset\n"
: v& u' C, j* y. E8 Nprint classification_report(y_test, y_test_pred, target_names=target_names)
9 T" l$ r) `+ ]* D a# Q9 a, Cprint "#"*30 + "\n"' {) R3 X g7 d: ]$ N
/ z% g, u( U) l4 C! a1 A5 U |
|