- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np9 ~7 w# V2 |; C* e7 i
import matplotlib.pyplot as plt
% B7 e* _/ [$ A9 a( h, T- M- K9 M$ B- @' V( B8 `7 g7 G
import utilities 0 s4 G3 s; H9 z+ j% E
) `0 ~; R8 |0 H
# Load input data
" w% R2 \7 ~( }* D, G; D; ^/ Vinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
6 s) @, R, D3 s0 hX, y = utilities.load_data(input_file)# L. L/ ~7 @: y1 c h
' U0 p$ j! ]/ e" ]2 h0 {
###############################################2 |% g& w/ N" M2 f; t
# Separate the data into classes based on 'y'
, J; U" f0 v$ {- ]) G+ \. o% \2 Kclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
; w8 v5 @9 q K/ Xclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
+ ~4 Y( s. ^8 b8 K8 [% x1 G) j$ e$ \. D' ]; r% {) `2 n4 Q
# Plot the input data
% }+ d! Z7 \& W% T* X9 r; Kplt.figure()( K" f. V4 i4 @
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
3 T9 F. R9 q5 ]% k' Iplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')" b& t! _7 D; ?8 U
plt.title('Input data'): x! Q+ f$ ?" `: V' Z- Y5 ?: E( q7 G( w
v/ Q, }. }8 P; H; O
###############################################' {% D3 c# q& u( c
# Train test split and SVM training9 I/ [+ B t* q2 h
from sklearn import cross_validation
( r9 }$ R# o) A Y! b" Afrom sklearn.svm import SVC7 z9 _! _) ^& K4 j8 [9 H
2 K2 }' j3 T; O
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)- ]( W X. l+ B' i) n' F' O+ @
! j& l6 o0 g/ ~8 R
#params = {'kernel': 'linear'}
# h. S3 b# l3 d: i; y#params = {'kernel': 'poly', 'degree': 3}
3 s+ q' n( e, c, iparams = {'kernel': 'rbf'}( U" G$ @& D+ C) L! K+ r/ h
classifier = SVC(**params)
" N2 ~( v' ~2 eclassifier.fit(X_train, y_train)+ F/ z( V2 E+ J# Z* z, k& D; g8 d2 [) ~
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset') A. D9 V. r/ I u; P2 ~, K
/ P" D- X6 x' d# p# S1 [
y_test_pred = classifier.predict(X_test)9 W% l- T& _: _; r: {6 `
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset'), s8 ~9 x" ^, X& ~, x4 ~
$ o; e7 U+ L8 z* V
###############################################
( m& K% T+ _, c p; S5 }# Evaluate classifier performance& I- L* B" h$ A- n/ O
; ^, X1 X& S( j' B9 n; dfrom sklearn.metrics import classification_report
' b; I! e, W8 S0 I: G
& j1 f/ G5 c, h$ y, f! ^target_names = ['Class-' + str(int(i)) for i in set(y)]% |- f) M V$ u* B. X( K+ K3 q
print "\n" + "#"*30
3 ~: i+ U4 w ~, P9 T ?- wprint "\nClassifier performance on training dataset\n"1 \9 n# ?4 X( n8 S U
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
2 {$ ^8 o0 }: R$ _print "#"*30 + "\n"- ?6 `' o2 y4 p# p# T! u
- Z! P) Q# [( bprint "#"*30! S K4 g2 I: I6 X3 C/ u0 z
print "\nClassification report on test dataset\n"
) ~* y, @7 ]2 y- Eprint classification_report(y_test, y_test_pred, target_names=target_names)* ?) G! E' N& ^' ]
print "#"*30 + "\n"7 q. y- p1 ^- n
7 g% G$ G0 e6 g* \+ {
|
|