- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np, M" F9 @8 z! T8 y' f3 y* p
import matplotlib.pyplot as plt
, T( N' v; M4 N2 D3 J% C- `
7 Q( ?$ W @+ ?, ^import utilities ( m j3 V! C, t, `# _4 {0 k9 h% ~
2 S- l) ]) C+ l, E# Load input data
' u* W- `7 g2 cinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
D3 h- Z4 ]; Q7 O, @% E5 jX, y = utilities.load_data(input_file)3 D! y% ^* f9 w( G
( ?% ^8 l, t2 `' l
###############################################7 s% a3 W2 R3 i2 @3 B
# Separate the data into classes based on 'y'
; z3 x4 F0 N: n% Dclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]). {1 x7 e$ b1 e3 }. U
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])3 s$ K, a) T1 R3 [) A# o, S
. ^ k) g4 ^/ V( V3 T4 i; N9 q# q
# Plot the input data X, f3 Z! c2 ^5 |
plt.figure()
" R: F7 j9 i! x) eplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
2 P6 ^( x( o9 P/ C( @7 Tplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
0 @# W ]3 y; h, v! Z5 f6 \plt.title('Input data')
?$ |' u: G8 H0 C* ?: o- X0 J; Z) w( w" M6 o# W' t( g2 ?
###############################################
5 G- Q, X' M4 i' C# Train test split and SVM training
8 J9 S/ ?" ], V* J3 t" ~9 W% ?from sklearn import cross_validation
- ^- v& @" S: ^7 n6 xfrom sklearn.svm import SVC% j* Q* Z& n, Y' i3 l
+ Y+ c# Z8 ?1 k& p4 m! R( o% u9 mX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
4 a! m1 [ l) |8 S
0 Q+ I2 \* W) X6 u#params = {'kernel': 'linear'}) n: N1 z! J; n6 C$ u
#params = {'kernel': 'poly', 'degree': 3}
" ^9 n2 u& h+ A9 e; a Eparams = {'kernel': 'rbf'}2 ]# u3 j1 s& g
classifier = SVC(**params)8 a( Z4 d3 t1 Y" B6 A
classifier.fit(X_train, y_train)
/ e* g8 Z; ?2 V+ D- L1 Wutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
$ L0 _8 Q% F: V5 Y7 `2 ^
6 A/ A' T* o1 }8 A L+ x3 F/ sy_test_pred = classifier.predict(X_test)
. M/ `- Q' w' L: M1 @! butilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
- N, ?) r, y: G. L1 \- {7 x
; E. u0 w$ h! N3 j+ B, g' B9 M############################################### V+ W J7 O2 Z ~( `
# Evaluate classifier performance& I( Z, f5 g" c2 J* ]7 N1 X
5 u. q' i' F0 O# f' Y* jfrom sklearn.metrics import classification_report
- h3 T) J$ p: A9 m1 L: U, _( D; T0 W/ N$ N! ~
target_names = ['Class-' + str(int(i)) for i in set(y)]( ^% O5 N2 Q" W2 Z
print "\n" + "#"*30
0 r4 S7 O$ y) r" iprint "\nClassifier performance on training dataset\n"
+ N5 |$ \2 r, f/ s7 R, a+ n! sprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
8 l9 T( t' F% T( y# sprint "#"*30 + "\n"
' {7 ^4 ^# h4 V
2 b9 [7 O; d" Y$ o6 |print "#"*302 w( r9 V1 _' }" n2 g# o% x
print "\nClassification report on test dataset\n"
! D( i* e2 d) wprint classification_report(y_test, y_test_pred, target_names=target_names)
* O. k$ C6 E! j$ s2 ^* I, S' w8 i8 h/ Uprint "#"*30 + "\n"
& x* p8 m" V, `8 J' r" S3 U2 h1 n! b5 w7 O- u
|
|