- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 555
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np* }' V! g% X% g2 A9 G# _
import matplotlib.pyplot as plt
; z5 {( }' ^% }" h2 c! ~& M; G2 @* T2 H; J% v' q' F
import utilities
9 q* T7 ~- I* g. J: R$ k0 P) @4 k" f% n4 P6 f8 e- A- S6 h4 h' s
# Load input data
+ J: ^$ Y$ b2 Y0 Oinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
$ O7 P- H" w0 E. zX, y = utilities.load_data(input_file)) K" f) H' @4 W3 l
$ P t6 j3 a* _$ Z6 v8 N
###############################################0 E$ p7 a3 Z/ Y- m+ {( |. B
# Separate the data into classes based on 'y'& g& f6 T B* W& ~3 A- w4 f5 q
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
4 O+ O! c& a$ |0 U3 G1 O( Kclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1]) `* _: C+ s" e5 i) Y: ^
' j, M3 x( z3 d# Plot the input data
4 _, Y* }& V4 G7 B+ f! ?; p, Qplt.figure()
3 a8 v8 I1 l. w+ K# ~7 N/ Rplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')0 \, I' i0 u& s
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
8 E& i$ z4 |! q8 k: i$ }. B+ Wplt.title('Input data')' Z/ a( d+ g$ N5 P$ c% ?. R' g1 U7 M
& o0 [/ X" W) I; E2 s4 t###############################################" ]* ~" O3 w+ f8 t4 b9 u
# Train test split and SVM training5 s5 y6 R; ^ j$ d7 k
from sklearn import cross_validation
! J1 A: A. R7 u& U. [from sklearn.svm import SVC
& a5 l4 S9 `3 c. E) G( e* x5 R. }7 O. p$ H5 u! z, E* K
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
% R: L6 O8 ]4 w! |% p* |5 a( L8 q* m/ h7 Q
#params = {'kernel': 'linear'}3 D; N- y. ^' I9 U2 ]
#params = {'kernel': 'poly', 'degree': 3}
+ d& W& [: q0 Q- Q5 pparams = {'kernel': 'rbf'}( y& {2 D0 @3 g; M
classifier = SVC(**params)
# {) g, W1 A6 Dclassifier.fit(X_train, y_train)
2 T; f, q7 _' |3 ^6 q& y: r8 cutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
. V5 d% q( [) ~3 b
1 X1 Q8 G# s- _* \, Hy_test_pred = classifier.predict(X_test) a. F6 J* M- N* p, A
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')9 ?! @! E+ v; ^5 C# a9 q
: H9 b4 n' [# |. c' h, f4 F
###############################################! d- n: D2 Z/ G9 n
# Evaluate classifier performance+ n) c' e6 ?2 l9 }3 L5 |7 [; C
# c& V- E) Y, D! `7 ^from sklearn.metrics import classification_report
: ?0 i! e9 g) }' X1 v. ]
& j! U8 B$ K$ y% @( {target_names = ['Class-' + str(int(i)) for i in set(y)]
( H, C z' B2 G! a' ~+ m0 U# @( [print "\n" + "#"*30
; F- L0 `) L8 a/ Iprint "\nClassifier performance on training dataset\n"
5 u+ D3 Q6 `6 z5 s: f' F8 @1 ~print classification_report(y_train, classifier.predict(X_train), target_names=target_names)7 J2 o# d5 ?9 w
print "#"*30 + "\n"$ \9 A6 |$ X5 R" b
+ U ? B2 f9 D( n7 r
print "#"*30. J6 B7 Z0 q/ n8 e' ~5 ~/ U
print "\nClassification report on test dataset\n"
0 y; q2 f! z% Y; Vprint classification_report(y_test, y_test_pred, target_names=target_names)
, M. N2 B6 d, K# l' C- i8 B6 `9 lprint "#"*30 + "\n": h. K( `2 ~2 @9 ?
' j/ q. E2 q" M) _& T& m0 d% O
|
|