- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
* v6 ]# I% Z1 d9 e0 u1 x4 \import matplotlib.pyplot as plt+ \* b' r5 ~) V0 O; ~2 P7 ]7 g! t
# p& p7 w) P @3 i' zimport utilities
6 Z w' z3 ` m% x) p
2 c( X- |7 ?2 J9 e8 p' P9 ? e# Load input data) b/ c4 v0 E5 l& x0 J2 c5 O# K0 j
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'6 x" {. _) W5 E6 i* Z& ]0 \/ i
X, y = utilities.load_data(input_file)
1 _2 N3 c4 O5 o
1 h1 l7 H. R! Q. P' B9 Q$ M###############################################$ o) _) l4 v: z! j6 F
# Separate the data into classes based on 'y' A5 @! n- x; c# W) |: f
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])" b8 N$ ]$ K- ~/ R1 j$ T5 p# S
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1]), c4 U- w) B( w5 z+ p4 \
0 x5 b: r6 N& i, S* N, B" [
# Plot the input data. v ^9 P& o: u& ^6 o! f& N* f* }1 U+ q
plt.figure()
& C! Y% K5 x2 U& Vplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')2 V- Z1 T# `6 z g9 E2 Y8 e; `2 p
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s'), a/ b7 J e m5 n
plt.title('Input data')
; r$ C8 h9 E, I/ A" k4 } A7 s# @. U: {2 d6 `
###############################################
% \7 ?; U: @2 P# Train test split and SVM training
# z' N( G1 ?, N6 @* m# e efrom sklearn import cross_validation
5 ^7 ]" c" ]9 S# @( o7 Dfrom sklearn.svm import SVC# w9 z$ N1 K+ `" N/ \! n* g
/ W4 s6 q8 s+ o: n6 @X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
& D8 R/ c& m$ h; j9 F
; Z4 J4 \+ i9 i/ \3 b#params = {'kernel': 'linear'}8 L/ P5 q5 X) E# _5 \5 q
#params = {'kernel': 'poly', 'degree': 3}
; m8 g$ I4 J6 h6 W% Uparams = {'kernel': 'rbf'}
0 ?8 g* ~ p9 l; ^" ~classifier = SVC(**params)9 D5 n4 E0 ^5 L) V7 v J0 a
classifier.fit(X_train, y_train)$ B0 ]7 ^0 Y5 B- Q& Q3 [* L
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')3 c; W3 @% q% r4 D
1 f( G+ F9 o0 q- {' j( n
y_test_pred = classifier.predict(X_test)# r' Z' d+ e6 W3 ^% `3 S
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
5 z+ @) e7 y9 z$ g6 T! _
3 s4 }& l; F: T" Q###############################################0 |3 @" x7 g3 z1 l
# Evaluate classifier performance! g1 d! F" P$ P0 x7 ^% c9 i
) W0 j c6 P$ O( G
from sklearn.metrics import classification_report+ b( J: |2 V5 ]- L) V
9 @- k4 [3 g2 b. T# {* ltarget_names = ['Class-' + str(int(i)) for i in set(y)]- e0 h1 `* |* l
print "\n" + "#"*30% r# i- d* F( ?% j
print "\nClassifier performance on training dataset\n"; p. h% M6 |" N5 \* x: v) ^
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
n- | s/ G5 Lprint "#"*30 + "\n"% O1 D6 B& {! Y
4 d' |! k5 r1 b) l% A+ Nprint "#"*30) c/ W2 p/ Z$ T, d+ B
print "\nClassification report on test dataset\n"7 C0 V, w; Q H- W3 ?& r5 ]7 Q. C
print classification_report(y_test, y_test_pred, target_names=target_names)
# \' r5 X. z" b$ nprint "#"*30 + "\n"
5 T! c7 W+ `% O6 j
- o+ T. e, I0 S) }+ K |
|