- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
7 i, @% @* Z& E N. Yimport matplotlib.pyplot as plt9 g) j6 [' S( l/ a) ^$ ~2 O- b
; N5 O( u4 N# _$ ?( t
import utilities
; \' t8 B; P4 B& A4 E
3 a6 o, t: u9 M: @# Load input data
! p$ ^/ y' N" g& x: V% W0 finput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
. V$ U5 V9 @! tX, y = utilities.load_data(input_file)8 v; W* [, B1 Q G9 W1 s& ]2 H
$ a/ Q7 J; e* O& C; j% ]! _###############################################$ a: a4 ?6 z& c3 @) b N; x0 f
# Separate the data into classes based on 'y'2 j# x: ^# B- r/ d/ T
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
7 B* t# t; k2 Q% b: o: b' y+ H9 z: Nclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])5 T7 x1 y! W" d2 `
& n! s' f2 P: _: f/ h
# Plot the input data
& v, [* U# K8 Y1 V" \# Iplt.figure()
- B0 B$ q7 Y3 V$ gplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')# ~5 v/ P% T( ~9 [- {9 |+ E
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')6 g0 Y* a% R8 o# m' u
plt.title('Input data')
( }5 T$ N4 L% a2 H3 Q0 p
. N+ I0 Z1 t4 E( c/ s( v###############################################7 V5 I! T, Y; V
# Train test split and SVM training' {" P2 L0 i8 ^: K- n0 }
from sklearn import cross_validation
* E. `/ P9 p, b( B5 Ofrom sklearn.svm import SVC
& u+ H" V* R# G2 Z% d- W, W+ n" s. Y7 C0 X' x
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)# L2 v% A1 S3 V. y4 J' G) g5 w
& C8 D1 W/ l$ ^" {
#params = {'kernel': 'linear'}
$ h: C3 X0 A$ y& P. m$ m+ P6 [: A#params = {'kernel': 'poly', 'degree': 3}) s) f( ?% H& J7 G, u( k' D' q
params = {'kernel': 'rbf'}& z- m8 N2 u$ h# A0 W
classifier = SVC(**params)
3 k* W" m; { m2 }classifier.fit(X_train, y_train)
3 F' c; B0 i( S& jutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')( ~) d0 R7 e/ a( L4 [* v( e
2 y5 n4 l$ F$ S6 `
y_test_pred = classifier.predict(X_test)3 I* `: h/ m: v( r9 D
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset'), L# i8 d7 {* \ ?0 B7 s
4 j/ p7 P9 J* Q$ s- v###############################################+ a( }$ `4 L a8 L* p0 G
# Evaluate classifier performance" P# G6 r4 G4 q! U/ c" R
7 q {6 x+ S# W
from sklearn.metrics import classification_report
* x$ e2 N. _2 ~) D# G5 h3 `
/ R" z# C) _( q5 P+ starget_names = ['Class-' + str(int(i)) for i in set(y)]
( R1 e. L( q+ X6 nprint "\n" + "#"*30, _4 ?' \7 x9 w: q+ l$ Y
print "\nClassifier performance on training dataset\n"
# l* {0 b) T4 ?! z7 mprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)9 |8 ]% I3 {4 F+ N2 T
print "#"*30 + "\n"
2 q: F+ G6 u9 b+ a" g6 T, I" m* g9 Y( u1 o- A
print "#"*303 i9 v+ _; p. A
print "\nClassification report on test dataset\n"
6 W7 Y V2 L6 H# O4 qprint classification_report(y_test, y_test_pred, target_names=target_names)
7 w6 r" G9 }* Z. wprint "#"*30 + "\n"0 r. R4 ^3 L' A
3 @! c- @. d2 U: P/ d; l2 B" f3 q
|
|