- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
- V2 M6 Z. V+ h# @' s4 N3 Oimport matplotlib.pyplot as plt
# t9 C+ u0 T4 s) N/ v0 Z
" s" Y3 U( z* cimport utilities & U2 G( z G5 N0 p
+ `/ M) }* z! A6 o a
# Load input data' J1 T' H, A$ g/ M2 W* x1 h" w+ i
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
; @7 m0 e) M' `X, y = utilities.load_data(input_file)$ ^: {4 Z$ M; h
$ u. Q) @, f" o. D. t
###############################################
, N$ P \/ a$ B" o' M2 ~+ A# Separate the data into classes based on 'y'
/ F; M/ p. y' _9 v6 lclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])$ f; [: X" O2 b+ p/ p& ~. N' ^5 k
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])* B6 D7 {, L/ Q! k V) {( N% Z; J# |
/ H r; F& K' ]
# Plot the input data2 B6 g" M7 L9 @& i8 r, r0 L
plt.figure()
" f. R( ]1 Z$ A8 i4 |plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
2 _8 Z& w5 W& J2 R7 H" Uplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
9 ~/ }' d1 o) I- Dplt.title('Input data')8 L, w$ ?! r+ H: {+ i; Y q
2 k3 d) V) w) `& Y+ X9 P$ z###############################################
# B9 U" `* Q" _7 D; x/ ^# Train test split and SVM training/ h5 S4 Z6 I( h' d' {
from sklearn import cross_validation
5 A! X4 n- B: ?7 K/ ^2 X( nfrom sklearn.svm import SVC
& a* ^" h. \/ F- b5 @- z# c7 C u# C5 @/ a
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
. B6 d- K- |" I& G* E6 q. _8 K3 S, @% T8 E
#params = {'kernel': 'linear'}5 H! m- \% Z" S; b2 g; ^) X
#params = {'kernel': 'poly', 'degree': 3}
7 d6 t3 x0 o: x% e" W( Fparams = {'kernel': 'rbf'}
% l( a* c) S9 X3 ~; s, oclassifier = SVC(**params)
+ m) u0 H# K' E9 E+ Qclassifier.fit(X_train, y_train)# m1 r" g2 x: m- H
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')" Q8 q. i5 j: l% h6 y5 K. |$ R$ F9 g
/ U3 C% b1 J" h9 ly_test_pred = classifier.predict(X_test)
' ~" z" s# n0 l2 uutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')6 n9 X+ D* J; R$ u, ]9 v
/ N( X; w' ?: Z###############################################' x- Q; B6 ^: V) w3 y
# Evaluate classifier performance3 q1 q( Z6 g s. H. [0 a
9 c% h7 Z3 T3 O" K5 \" E3 d8 R
from sklearn.metrics import classification_report9 L& \- ]* B+ L$ I D8 Z
* Y$ G+ q( Y% @1 I. @+ H+ h7 W
target_names = ['Class-' + str(int(i)) for i in set(y)]- ~. I- }; [* v" a8 K
print "\n" + "#"*30
$ ^5 P }- n& J4 J6 N- ]print "\nClassifier performance on training dataset\n"
3 U0 I l; _/ S# Gprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
/ J" c9 ?7 J+ fprint "#"*30 + "\n", S. _- D4 |+ G) ]( k0 y
7 B% d; a- a8 V7 p' n
print "#"*30
% w: p9 V& `+ d% \0 w1 |( Z. ]print "\nClassification report on test dataset\n"
2 E3 x/ K5 S3 X/ E3 jprint classification_report(y_test, y_test_pred, target_names=target_names)% E0 K$ V0 y L! _( [2 h; C
print "#"*30 + "\n"" s! D5 y2 @* s, _6 O/ z
9 }3 R+ ~3 c" I7 u, {' D6 `2 I
|
|