- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np) @/ B+ K1 n% G
import matplotlib.pyplot as plt# x, {+ Q& f; V# [% t3 n# N/ @
% I" \( [% L6 u6 n. |" w3 x6 Timport utilities
& H6 Y. a" F: `2 b' n6 Z0 r+ b1 I ]8 s) x% J
# Load input data. p$ `; `& R! _0 n% [) r7 [
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'+ `- g( U9 ?* Y. u6 [% K7 [
X, y = utilities.load_data(input_file) n( K- ^3 C+ r6 w
' m2 b* W" t2 ^: p9 b. V% w###############################################
* X- @7 x& W$ U- i$ n k8 g U' s# Separate the data into classes based on 'y'
8 d' ]# L0 v% @" ]# Kclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
B9 X( F8 Y3 T6 l9 k9 z) x5 tclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])& z; U; a' a2 U; N) ?( e
1 K1 I O* E: L
# Plot the input data
& G3 T% G' E/ U/ `/ Nplt.figure()' M3 ^1 T: w1 u
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')/ H: d* Q- `7 B* {/ X2 \/ [
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
0 I6 S' K4 r' z* ^! eplt.title('Input data')3 ~1 r( E* u3 w/ o! X; Y
* G3 I$ n; U G. j###############################################
/ K+ E$ E3 V. G# Train test split and SVM training
' @1 R, H3 ~9 j' z7 u! ]from sklearn import cross_validation
% Y2 [( [8 U3 N" b1 l: afrom sklearn.svm import SVC& Y' \0 D) s' C, n
8 f& ~: Y9 ?: W1 E( v5 L4 ]2 {X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
U9 G- S; n' u1 h: K. w, c2 d' W2 {) F
#params = {'kernel': 'linear'}6 S) h* d/ T7 Q
#params = {'kernel': 'poly', 'degree': 3}: c! o* W( ?6 P2 e! B' B
params = {'kernel': 'rbf'}& y- w# d- R5 g z- R! E3 w& T# R4 b
classifier = SVC(**params)
+ W [' Z' [ B& U F- |: u, d, Gclassifier.fit(X_train, y_train)
' n4 |# g+ ^* Gutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
7 x+ W Y3 S- _4 |# n7 T0 R
* O: g8 g$ Q$ |4 qy_test_pred = classifier.predict(X_test)& p/ p8 D* J! }
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
( q; B5 ]- _% U0 X
- @5 f$ G2 E8 p- q9 D( k! o###############################################! p: \& J3 ?6 L- A% o
# Evaluate classifier performance+ E6 K2 m1 U0 M# A/ l
% t3 z7 a5 a; P6 f
from sklearn.metrics import classification_report/ M% m: J5 J3 }% c5 y. y
, k3 q3 F# |% H; F$ \! gtarget_names = ['Class-' + str(int(i)) for i in set(y)]9 A, ~0 Q& c4 I* v
print "\n" + "#"*307 i& P' b+ j; m. G$ R% e
print "\nClassifier performance on training dataset\n") P" f, j* _7 }- c* c7 Y8 L! o) z5 ~
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)/ Q" I1 I* ?, V; G! ?7 Y4 ?
print "#"*30 + "\n"( g5 k& j3 E) q b! T
+ Z$ }. T* T$ M# i' r
print "#"*30
3 H% h$ |9 I! ?4 cprint "\nClassification report on test dataset\n"
8 ~9 }! d) q2 G% t' kprint classification_report(y_test, y_test_pred, target_names=target_names)
4 g7 e2 P; ]* nprint "#"*30 + "\n"2 t9 \! q ?3 s- P9 y! C4 G
0 d' X0 A. Z5 M, \7 f& j1 S |
|