- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
9 P% v+ F6 ^' s3 `import matplotlib.pyplot as plt
+ l$ |& ?! ]7 p2 n/ j5 S# w( f
6 \& |, q. Z6 Y) aimport utilities 3 ^) N! ~' O" g1 f$ A" j: r+ _
+ M2 r7 _' D; I4 ?# Load input data# H4 h7 U" |9 l6 q5 H k0 p
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
/ L+ D3 u8 }/ OX, y = utilities.load_data(input_file)1 b. ]& \. @1 E; C$ t6 q
( |( z1 @* z0 q# L###############################################( Z+ R: J% V3 S8 W$ s- ^" t
# Separate the data into classes based on 'y') c- m4 v7 P, ?
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
8 z6 `* z- P# U6 X, q8 U( p3 bclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1]). ~$ o9 G4 {( p* W
7 @4 O1 W7 Q1 R2 R( e# Plot the input data3 Z Z! v1 H+ C, t
plt.figure()
/ v' j# `: \1 _2 Yplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')5 Y$ R2 v3 u6 N3 f/ F& J
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
( B! @* s( X% G2 Qplt.title('Input data')) ]) J+ M5 w% D+ O) M, |
# @% |5 d; ]2 `. b& S
###############################################& Z; J; s; ?' d- H ^
# Train test split and SVM training, h U2 [2 r% p- Q
from sklearn import cross_validation
: d" ?) Q' d qfrom sklearn.svm import SVC
/ K/ l4 o) l0 a) @8 Q6 I8 |3 m6 G
7 R# V1 i8 j; s5 |4 b8 [X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
- n- v; R2 _* B' S+ x" N+ y/ R7 K# F4 B! i" q3 y' b
#params = {'kernel': 'linear'}7 Z. W0 s; N, Y$ z" w. a- l
#params = {'kernel': 'poly', 'degree': 3}
( Y! [0 n3 X- W: Yparams = {'kernel': 'rbf'}% C3 Z" d i. d0 ~4 u
classifier = SVC(**params); \ F5 W/ O* K8 O6 N) p5 `6 Q
classifier.fit(X_train, y_train)
. U' l/ X) j/ Y1 T: Yutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
+ ]& p! x( j6 y( S+ r$ P3 o2 b
) l6 @- L3 K( jy_test_pred = classifier.predict(X_test)8 n( d4 k6 l& Y$ a0 U, m3 I, K
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
! m: g1 ?# y+ V3 ^; H, `5 u7 G- ]' j
0 G, m+ i! Z5 ?###############################################
+ D) ^& f a* ~* ?$ u$ K# Evaluate classifier performance" f; ~( J3 F3 b. E5 G% B; ?
; p% s; b/ t8 g) F( G3 C7 n
from sklearn.metrics import classification_report
" O/ T) A0 {" ] V6 ^4 [: X" `- Y I3 [& ^+ n8 L6 ]. \3 m
target_names = ['Class-' + str(int(i)) for i in set(y)]- P- [3 n5 M# B- \
print "\n" + "#"*30# p/ ^5 g% v L) c3 }# h
print "\nClassifier performance on training dataset\n"
9 R& G9 q* O3 rprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
2 N4 s6 s O2 I/ aprint "#"*30 + "\n" _( d# o9 y' V' |; ?" [6 E
3 s0 Y b5 p, F7 y
print "#"*30
$ p& r/ k4 q1 l% o+ k1 P+ [print "\nClassification report on test dataset\n"
7 B( u0 ^5 e, F9 ^+ m* t* m8 Uprint classification_report(y_test, y_test_pred, target_names=target_names). U$ R0 K6 v: [6 K8 @: X6 K
print "#"*30 + "\n"% x' {; U) S$ O4 H5 `7 I1 @% Z
c4 G' Z" j: e( D8 S
|
|