- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np9 W, Z% |& ?& ~$ U
import matplotlib.pyplot as plt
% n w* B* h8 M. h+ e: s5 X) N9 c. s6 X; x% `0 j
import utilities
- Q0 n9 U" _: Q* f7 A. T( @
; D7 M& U) }7 b% X, y8 `7 G# Load input data6 ]+ U. x' t8 E' V
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
3 x/ X' o% @; i3 E$ \* [X, y = utilities.load_data(input_file)
: `9 g4 I% V1 r' Z; R
$ u! k- q" D$ J& c1 M( R/ ]###############################################+ W. `0 K$ i9 b- H$ ?
# Separate the data into classes based on 'y'
2 U; e) j F0 Y* s7 _0 m j4 t- \class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])( \% C. I4 b4 w
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
7 v. @6 ^2 W' O/ @# H X- S: j0 b$ R; d, M
# Plot the input data! L& S, G y# }3 J0 f7 {6 S4 Z
plt.figure()
7 i8 a8 W1 s0 N/ c( zplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')# p8 ~1 W5 a( s- @) y: Q% ]
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
/ H$ k8 K7 f, ~* T2 eplt.title('Input data')3 |7 P, N) t( P0 Z# H2 z! ]
5 m: [% t6 @# E$ O: j6 b
###############################################
8 G; W' n0 i( K/ [+ h4 E/ w1 J% U# Train test split and SVM training
# Z$ B5 t1 `' J' C/ W/ Gfrom sklearn import cross_validation/ ]* s: t9 }' y- A q' m! s& l
from sklearn.svm import SVC; }8 K$ {8 w8 p: G: `: t" w
* s& y3 e% i, y, E# sX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5): m% v2 e9 j+ Y: t' i
v/ j6 O- d" m- {; U T
#params = {'kernel': 'linear'}, d2 Y) L+ M! I' `
#params = {'kernel': 'poly', 'degree': 3}* i' O0 j- ^" E+ `0 o1 C; V1 Q
params = {'kernel': 'rbf'}
/ v5 m+ d, e: |; tclassifier = SVC(**params)! S: b6 ?, }" n
classifier.fit(X_train, y_train)+ p' f( }+ m6 q; I" l/ X$ X6 ]
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
0 I, ]6 z, v. }1 m
: }7 E& N. |& s& T% U. \y_test_pred = classifier.predict(X_test)
# K7 ^ J C o/ b! O$ ?utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')& y2 V: R" J; y p( L7 m
* v6 d) b {7 d1 D- j+ i# }
###############################################; W9 {* z2 a- U0 o( F( R
# Evaluate classifier performance$ ]+ k! z" W. W* |
; O# \: w4 U8 c5 F
from sklearn.metrics import classification_report+ Z; k; F& c5 b- @' o. r P' s
; n* k: E/ K5 i2 Q( D; M+ u Ptarget_names = ['Class-' + str(int(i)) for i in set(y)]
; K% ?! t |* t7 yprint "\n" + "#"*30, r% b' p" {5 l+ u" _; q$ J( c
print "\nClassifier performance on training dataset\n"2 U' c2 L7 p: ~6 Z
print classification_report(y_train, classifier.predict(X_train), target_names=target_names) z4 Z0 x2 I6 f) `. v& p7 Q
print "#"*30 + "\n": H+ w8 s" Y2 t+ v4 k- t
: }5 E1 Z& _8 d2 ?8 C
print "#"*30+ P) N! {4 Z" l1 K- _
print "\nClassification report on test dataset\n") x1 N4 `7 t% D& v
print classification_report(y_test, y_test_pred, target_names=target_names)1 q/ j4 n& p& f
print "#"*30 + "\n"
8 p8 Y# u. C. O+ f
/ A' f1 r: R7 C/ e3 n: A) e: P |
|