- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
S) M9 {& F) A8 N. X, Nimport matplotlib.pyplot as plt8 S5 R: a( R( _" p8 b! e5 q
& T+ Z, R5 @8 x1 S( L9 _6 cimport utilities * ~: h0 q; g7 d3 @
, q5 ?/ a6 x. C8 z
# Load input data
1 y% |9 n# J) W1 Y' D" v# \0 ], Minput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
# N6 l0 n; h7 |, h0 UX, y = utilities.load_data(input_file)# s$ ^4 a9 F; b
! ~5 j( ]" C/ ]) O" O
###############################################6 B0 V9 @3 b- A4 L* z' K& b( ?
# Separate the data into classes based on 'y'
7 |6 c6 Q) Y, X! aclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])- G4 r/ g0 q* | u# t+ i
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])/ V: \' R9 ?6 C" a2 j3 r! V% ~' e
" S( D5 @$ Q3 b; v6 l6 Z$ ~# Plot the input data
. h% F. {1 L* Y: l9 G l jplt.figure(), \- W1 e5 f8 w o+ U* K
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')/ |6 b; [, @) Y+ M1 c
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
+ a" E/ S1 `9 l4 m$ X9 }plt.title('Input data')
) p8 L* a1 ^% ?* M' ~9 B6 }
) P! S7 Z9 s: P4 h0 @: S1 H###############################################& P+ z, r4 U8 L% j6 {: [
# Train test split and SVM training
4 M$ Q& `, D5 r9 f6 ufrom sklearn import cross_validation5 B+ K4 y# m1 V9 W
from sklearn.svm import SVC
) b* s/ b3 F8 O! h# J+ m- i) ~
3 J/ ]+ O; V; h$ }" fX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)/ O* G* z$ @! X5 S- z; t
4 D3 H9 t6 N, e6 ^
#params = {'kernel': 'linear'}* g: [/ V6 K; f6 X
#params = {'kernel': 'poly', 'degree': 3}
; E$ ~+ Q" W) u; Sparams = {'kernel': 'rbf'}
% S" D$ D8 O- s4 O* j; ]$ X7 Eclassifier = SVC(**params)* d4 @0 k; J7 `1 F8 N
classifier.fit(X_train, y_train)
- j: g4 i. T( H, Kutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')4 f' ~: d `% P( B' o) v2 S
8 C! B: H! O8 l+ Y/ ky_test_pred = classifier.predict(X_test)
& l0 E7 x8 r& tutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')! j; t' Q) H+ U2 g( p/ @7 _: g
9 g- z. Y) m6 r2 Y T###############################################
/ s& c6 y& D+ M% D- ^. k& u# ]# Evaluate classifier performance
; H* q9 M2 m5 o5 G/ A6 o" g+ g% R; A( j, W) k# J3 ~( R9 v
from sklearn.metrics import classification_report2 ^- q% N0 ?6 P/ Q
) }+ L. p8 i' x# r2 ztarget_names = ['Class-' + str(int(i)) for i in set(y)]
( _. K- R; D/ t* Z+ ~print "\n" + "#"*301 R8 ]9 H3 x, W# r
print "\nClassifier performance on training dataset\n"
7 O9 j( B; |! u# f/ u" Kprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
+ L$ A6 z# U% {, j8 z2 ] Z2 S- v8 Oprint "#"*30 + "\n"
( D( m2 e2 B2 ]. S5 p
) H( Z+ A4 \ r8 H& o1 j! `print "#"*300 ^) c ^; Z. b$ |
print "\nClassification report on test dataset\n"
& \/ i- w9 j; T' aprint classification_report(y_test, y_test_pred, target_names=target_names)0 C: X3 N3 P/ \1 |; O
print "#"*30 + "\n"4 D1 Z& \/ K1 f& Y
4 Y" T+ X/ k/ z" @- @- R
|
|