- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np3 ^( s2 e; c/ G6 i# t
import matplotlib.pyplot as plt# ?0 Q) L' }: h4 o) N" z0 L: b
9 Z v7 a" w6 v* r' Ximport utilities
5 l C' ^9 N9 d$ F8 t5 |& O/ P
, U3 `8 Z# R' j4 U) d6 b1 }# Load input data: j8 Z' u5 g+ o: W
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'; F: J" l. d* B" s8 c F; A
X, y = utilities.load_data(input_file)
- L; J3 h' ^4 p# `# s2 ?, @. x; \- |7 {) H) { ^
###############################################+ v1 i. s6 S3 p
# Separate the data into classes based on 'y', o3 `, o, t: Y9 o8 z
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
$ E; R& Z9 h, r1 L& p7 R% qclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
+ c- o& }4 m* k8 J' }, k6 T- Y2 x9 N& y
# Plot the input data) L, a; u* u7 p% v% E
plt.figure()8 n; k. g; [( g
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
1 l7 c+ ]; p, ^plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
1 R1 d! K: w% D3 \. y: D T5 aplt.title('Input data')
$ ^& a; W5 ^" }9 W$ r+ `8 _7 S& n9 l; U: v* E [3 `6 x, \
###############################################5 ?& y9 r- }7 T3 e. g1 p
# Train test split and SVM training
% ]7 _. w* K. jfrom sklearn import cross_validation
& M( c8 F7 a6 v/ R3 X hfrom sklearn.svm import SVC
5 a6 m% C) [! f @" q! v# K( Z! O! @
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
$ K K3 c9 g+ y/ S9 b0 R
5 t7 E5 e0 X, X#params = {'kernel': 'linear'}
8 y+ P/ U' l- W; _! z7 y* k#params = {'kernel': 'poly', 'degree': 3}5 x% M$ t7 Z ?
params = {'kernel': 'rbf'}
, M; O1 b2 N$ n$ M8 a% Nclassifier = SVC(**params)
L+ _) c4 W( n* I8 z9 v7 g6 H. j7 Sclassifier.fit(X_train, y_train): k5 X( ?# R4 F) C3 M2 H- U
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')% d+ N9 C7 J( `1 c$ S
B$ X$ H) T% F. i; F
y_test_pred = classifier.predict(X_test)8 E+ r/ h/ k1 b( Z
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')7 N5 D' v" y4 X, H& Z4 Y
" P, u1 g8 y% n5 z( R###############################################" p' I5 _/ i( g, {* o# R3 K9 m
# Evaluate classifier performance) [9 D! N4 h- o0 y
& h6 [0 {9 l- {/ I4 U9 D6 Ifrom sklearn.metrics import classification_report3 Q. d6 T/ i5 N, o
8 @& r$ _0 n8 M3 @9 A
target_names = ['Class-' + str(int(i)) for i in set(y)]
* q/ `9 y! O" d; u1 x4 `. t! E% Xprint "\n" + "#"*30) R. d* P9 a$ _ ~1 N/ g
print "\nClassifier performance on training dataset\n"# \9 Y/ a8 Z! h: S# b2 r
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
0 [9 u( {% F8 ~print "#"*30 + "\n"
# j" r9 ?# |; L* U2 q' M9 y0 g% {9 p
2 a2 _7 [9 O2 p2 ?print "#"*307 V" M3 A( |. z2 m+ Q9 C
print "\nClassification report on test dataset\n"
2 M1 `+ | {. _9 \) y/ s9 Wprint classification_report(y_test, y_test_pred, target_names=target_names)& `7 |- B7 Y$ C( D
print "#"*30 + "\n"
* F9 P7 a0 a. k0 |! @0 @* h! a1 M0 r6 X) X8 ^+ l+ ?' w
|
|