- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np: x; i: Q) d0 p" X' K: L! q1 J. l
import matplotlib.pyplot as plt: R6 G& _: Z. d8 V- a' f, t
9 y. o4 m2 k _6 r* ^import utilities
; @0 L. M: v; Y2 h
- k, K# H: I' a6 t7 G; r1 ^# Load input data
W7 M" ^/ w6 Y( a! finput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'& Z7 W7 k, H- r, L- U" B
X, y = utilities.load_data(input_file)3 p* U h& n8 N8 F8 J
& ` P- f& n4 Z* b d
###############################################
# Y7 A7 e6 y4 p+ D+ n6 w1 ]# Separate the data into classes based on 'y'
( `1 p- M b) T2 j& ^& O6 iclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
+ E. H& P% c1 S/ u8 dclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
' v5 z- W$ L1 d4 y0 S- I+ |% v$ N. ]7 C) _) w
# Plot the input data
" B2 ]8 } |" q: S: ^plt.figure()
6 f6 ?: {( |' ~, q& o/ I. J) jplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')" f; V/ E8 |: z# }. f, H
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')- {; l& h, |3 q# g
plt.title('Input data') c9 \) j6 {% V' b5 Z
, h- ?5 ^8 O% T! [###############################################) D- t7 P3 s& S! a! f$ B6 x+ t; f
# Train test split and SVM training0 G' U# X) H2 Z! C
from sklearn import cross_validation$ w4 P4 ~" O/ S$ k' c- D, h# o
from sklearn.svm import SVC1 x3 Y" q3 Z* M+ i; n% F; G! @5 w5 g
( l4 |1 C# U, p: q* B
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5); I3 z; N: z+ c; j
. O0 g% P, R7 f5 w0 r+ i
#params = {'kernel': 'linear'}
2 ^, u3 H9 N# d! b& D2 w) o# l#params = {'kernel': 'poly', 'degree': 3}2 B6 |7 \, K, N
params = {'kernel': 'rbf'}
; W& `: e% b8 @1 E) V# Z0 [classifier = SVC(**params)
# f# W2 k+ Q8 u! R3 Q5 u. Vclassifier.fit(X_train, y_train)
- l$ @# N7 L2 E6 C1 _utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
5 e% g' x3 G' R `1 f/ O# t0 A, @: m6 [- T
y_test_pred = classifier.predict(X_test)
; C6 f8 ^* m7 ]: s$ X# butilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')& ~! Y2 a) Z* g' x8 C
) i* A: I% A9 D& k- {/ @* E' o8 K###############################################/ h$ e9 @2 o9 x- U. d# ?( f3 p7 V/ r
# Evaluate classifier performance
& G6 p# j$ p/ }6 w% g8 N+ \. j+ O8 \
from sklearn.metrics import classification_report7 g7 S2 P1 t4 u
0 m- Y: i# Y& D9 ^# _- {7 C0 q
target_names = ['Class-' + str(int(i)) for i in set(y)]7 v8 j( u$ z1 h2 f* a, w! V Y/ \5 n
print "\n" + "#"*30
6 v4 ~. q& Z# t* zprint "\nClassifier performance on training dataset\n"
0 I: O+ j' W M( u7 lprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)$ R/ Z( u9 f. n0 H4 \8 ]
print "#"*30 + "\n"- {4 |2 r6 E; m9 H: M' P" [5 f3 t
- z6 S9 U ]0 r, i
print "#"*30- r* J8 i0 c' J( ]0 S
print "\nClassification report on test dataset\n"
- T% i2 [+ Q* a) | _print classification_report(y_test, y_test_pred, target_names=target_names)) F$ z& m D9 N9 J& I! X* q; v" p
print "#"*30 + "\n"
3 a/ a) u; B( Y, x W$ o6 b6 t+ \4 N& t, O2 ]. v
|
|