- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
4 r# I- _' f+ a, C2 [9 k9 B1 himport matplotlib.pyplot as plt8 R( v. A8 z' S# L* r4 A4 P
: u: z% D! E4 h; a, }import utilities
2 Q, o0 q/ |) i1 \. B9 T4 h9 H6 f8 Z1 f
# Load input data
4 w/ U( F% z L$ {- `' Pinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
3 Y9 F7 @5 Z% p2 EX, y = utilities.load_data(input_file)
5 v& R1 v# E/ t! X# t3 ]8 z# r; C
2 P8 t3 p0 h" \" m$ M9 b0 q###############################################
' q7 I# i7 N W% R7 x6 d2 m" U# Separate the data into classes based on 'y'' \- M6 o4 j, w+ v1 M) e
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
% f/ e( t. w. Q9 Q3 @) ^% g" N* R# Jclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])1 G: l+ O; u! n A
* y/ J% ~) h; J" c+ D* n. H# Plot the input data C" I/ q8 L& w+ L9 t4 a/ a* E
plt.figure()
' W( t2 n' x5 x, ?* vplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
. s! T3 h! c1 ]) d7 ]! Q+ t3 Mplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')1 w5 ]4 {- L& T9 {6 ^" Z
plt.title('Input data')
3 x# L% z# Q8 b2 S" p0 h8 |$ \
2 k! f3 R5 Z" _2 M2 d$ k###############################################
t% p! R3 o8 a5 q# Train test split and SVM training. V4 \+ e8 \0 u: F& T
from sklearn import cross_validation
, j! @" s: m; j1 Vfrom sklearn.svm import SVC, M# m# S: j3 n
: f4 r0 }, [- U, i: ^& iX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5). Y' y' l# O4 q, o1 J! i
3 ?+ H- J K6 P$ s#params = {'kernel': 'linear'}7 T: u* {- _) h. ? {3 G
#params = {'kernel': 'poly', 'degree': 3}, ]5 ^( R8 Q- }$ C
params = {'kernel': 'rbf'}. r' G% c! J1 `% q- c- o# e
classifier = SVC(**params)
: S7 J) ]: H5 j7 Q' f" e Y7 bclassifier.fit(X_train, y_train)! }0 E& i, A1 i
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
' o8 Z& Y. U: a5 G2 g+ ?
4 [" h# j5 ^6 m7 M/ j3 L* ty_test_pred = classifier.predict(X_test)8 U$ W5 q V% {
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')- L0 n( F/ E; @7 q9 \$ G! L i5 [
2 Z" z S$ i8 a) a' | x###############################################
1 s5 N/ z( ~+ ?5 U _2 y! _# Evaluate classifier performance7 Q7 P4 l& D: }; q8 x& ?
" a/ Y; ` r7 M) T/ pfrom sklearn.metrics import classification_report) ?+ j2 A+ U* U
1 k7 a# n0 v$ p x; {* w; r; Mtarget_names = ['Class-' + str(int(i)) for i in set(y)]
. P. C0 B/ y$ n, Nprint "\n" + "#"*30: q ^/ R. |/ ^: f& g9 i9 f# d4 k; B
print "\nClassifier performance on training dataset\n"
* r4 c& ^! E) M* j! q9 U, j% tprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
7 ~8 x5 a7 V2 B8 P [print "#"*30 + "\n"$ O( W" N% i6 Q* H# a% P& [
) P/ s0 U. U- k0 R1 c
print "#"*30
2 K+ ]8 F5 L& [' Nprint "\nClassification report on test dataset\n"+ L2 \3 h! ?, b
print classification_report(y_test, y_test_pred, target_names=target_names)7 F# v9 c( { W6 Y% V
print "#"*30 + "\n"
7 y: i) i) x% \
/ {4 k V- [5 {. x u; d |
|