- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np8 |6 V6 p& c( O! H$ ?" p* E
import matplotlib.pyplot as plt2 L% S: ^8 N) d* d
* z9 D# a7 g" u9 B$ P2 bimport utilities
7 ]4 f! m+ Y' Z& f
5 z* E% k* u: O7 ]: e# Load input data5 {, K% d* Y0 w: \' f5 {6 {
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'$ @% W0 {9 b" ^
X, y = utilities.load_data(input_file)2 \: b0 t- _+ W" x% a" J+ l
F- T+ E$ U8 N- z3 y2 |
###############################################$ _/ s, g4 P; o: S* L
# Separate the data into classes based on 'y'8 i3 i7 Z4 x( `
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])1 L' q0 P8 V! \- b3 n- i2 _
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])' r3 ]5 K) R3 D
# V) a& ~ S' C$ K5 _: j) Y# Plot the input data
. M8 h: i/ c! d3 Fplt.figure()4 P) Q9 | w* s% E9 c
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
3 c' X/ Q+ x! V6 |% }plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')/ ]; J A: H0 T8 G7 U3 x
plt.title('Input data')
9 {, }7 F2 ]+ B$ | l( d- k# y1 Z8 w% h0 K
###############################################
: D9 z* p& J8 ~# Train test split and SVM training2 l C2 h6 ]2 b" f
from sklearn import cross_validation) u4 D+ k( x) J( S
from sklearn.svm import SVC: @( y S0 A0 a4 b+ c: P5 R
7 E& \* ]4 O: ^! JX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
. d5 s; J& o7 J, b: N3 ^$ w$ {) M( z& H3 |
#params = {'kernel': 'linear'}6 O& w: ^& |5 r' n8 p
#params = {'kernel': 'poly', 'degree': 3}
' u8 I: p& Z/ X+ |; T) l% xparams = {'kernel': 'rbf'}& K: v. v- k3 B
classifier = SVC(**params)7 l6 I% Y3 b7 o G/ l1 i
classifier.fit(X_train, y_train)
Z/ B$ U; T0 T8 ]$ Autilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
9 e/ u! G/ s7 D8 @# z0 m- X$ ^* t/ _4 W( k
y_test_pred = classifier.predict(X_test): k: ], [% H' \
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')0 ~! k$ p; ^, q2 S: ]. }+ ~9 H# I1 Q( ?
: O( w' M# H- x###############################################
/ i) [6 k; R4 J+ A! d z# Evaluate classifier performance
; f# T0 @' H% X& f0 H+ Q1 K0 R6 |! y2 D9 X. _
from sklearn.metrics import classification_report0 c! ^+ r. L5 l+ q* S
3 t7 K, c; p& P: ^2 @" rtarget_names = ['Class-' + str(int(i)) for i in set(y)]
1 e+ ~- G3 c8 w' u! iprint "\n" + "#"*301 k; M4 _, ~& d Q( g
print "\nClassifier performance on training dataset\n"$ d( h0 A$ R8 S0 o1 v
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
1 Z, x* B b9 b% I ~3 s/ n/ Uprint "#"*30 + "\n": v8 ^( P/ s1 Z( O' a
% i4 H r w; p! K$ fprint "#"*30
; W4 s' \2 a+ j7 M8 p& Rprint "\nClassification report on test dataset\n"0 R: t/ @+ G" M- D
print classification_report(y_test, y_test_pred, target_names=target_names)
1 I8 ~( K Q$ k+ {! [; H3 Kprint "#"*30 + "\n"
& z& c1 o/ {0 b8 h2 P* Q! J5 R
4 y$ U8 l9 F* A8 _/ o- H2 R3 m9 ` |
|