- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np2 `1 C0 U. `, I; T% u; X
import matplotlib.pyplot as plt
- M5 C% U! ^# q9 h* `
, }# e( M9 i& Y4 r5 _- W7 A7 ~import utilities
7 B( g2 U6 S. A; }" U2 i, d8 w' j1 j+ H. C- L- |3 j) Q# @! a
# Load input data
! j* |- n% j8 M9 dinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt': t% `* q" w* `7 \
X, y = utilities.load_data(input_file)
B3 X) O8 H: W: J$ S
" Z3 j) L3 }- f/ B###############################################
: v @+ R3 a6 n8 O# Separate the data into classes based on 'y'1 `7 X) [4 ?$ z$ x( L
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
7 w4 O L* Q, X' W3 C; `& ^6 kclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
: l# y/ b3 m7 i: y) X: v5 H3 d) W# u$ H6 n/ W* K: a
# Plot the input data# r' O3 {3 V' V' q, g
plt.figure()
7 f0 X9 y; T' ^plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')/ J9 i. q* o4 e9 F
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')5 O) D7 v: b) F' t9 ^" a. R- t6 }
plt.title('Input data')
6 I6 H$ c4 {, ?- e) P& q* H5 z2 \5 T3 M4 |1 m
################################################ `7 R, h7 B! Q, o
# Train test split and SVM training Q) I; t8 {! k
from sklearn import cross_validation
: T6 r* L3 N# q1 O, R5 ?from sklearn.svm import SVC
' `! v' e& E( @- H1 ?2 Z9 \
3 b5 i' u! S" [. bX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)" h. t3 U# q8 P; J+ g: g4 a
1 V- i$ ^, I; N4 r1 y- C
#params = {'kernel': 'linear'}: [6 t$ j+ _$ ~4 B. V
#params = {'kernel': 'poly', 'degree': 3}' a M6 l( L0 O3 E( T6 A @; W
params = {'kernel': 'rbf'}2 k7 b r! M9 m4 h
classifier = SVC(**params). d2 m8 ]# p) o
classifier.fit(X_train, y_train)
: w& W1 J1 F: t$ yutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
! V; M8 L% T: `# w) T p: Y @7 p" I9 {8 j+ V6 h
y_test_pred = classifier.predict(X_test)5 a, k+ z1 I, g. x4 D' K+ h
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
" p- x8 k" A# E. j" t) U" f- `. d3 G" |% ^
###############################################0 K* L9 ^8 `/ P: J, A/ N
# Evaluate classifier performance
0 q' I% @4 c$ t( h/ B, k* z5 Y" E7 t {+ \/ B/ q
from sklearn.metrics import classification_report [) V% C& v- e
' K6 n6 T( \) E9 Z+ M- Ctarget_names = ['Class-' + str(int(i)) for i in set(y)]& i5 E# a7 \0 w8 H/ B! D7 Q
print "\n" + "#"*30
9 d; m/ c( t" L3 X0 q( Tprint "\nClassifier performance on training dataset\n"$ u: g. u4 _) u1 d c5 @
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
1 a* j( I7 {- l' `1 _1 Z6 ~print "#"*30 + "\n"0 [ k2 z) y/ k( {$ t& i: ]/ F) T
( o4 f3 _) s$ ~: R# t& A9 M) Rprint "#"*30
* N9 |5 N7 ?9 |print "\nClassification report on test dataset\n"
( m& T3 T4 \2 ^3 `print classification_report(y_test, y_test_pred, target_names=target_names)7 H3 _1 x% o( L9 k
print "#"*30 + "\n"
6 y+ ^0 T2 U* ~5 t8 y- ~/ h# m4 L9 s. t7 } C+ X- C
|
|