- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np' C% }/ t4 W% R" M) f% T* } ?
import matplotlib.pyplot as plt
1 r, W* J( X6 j. o) l
7 y" p' l" G$ E( ^& l) Limport utilities
+ [: _9 n! {: [9 \2 B- R
2 M0 n: p! W+ g- i# Load input data
) _! _2 u7 k6 T2 M0 `" Winput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
5 G; O C, V0 i8 VX, y = utilities.load_data(input_file)# |. ?0 _# i% z# R! N6 M0 u
! T: B7 S" r' K7 f3 B3 p
###############################################$ D# h7 o j2 }" O c& q* A: T
# Separate the data into classes based on 'y': P& V, a( v$ ^) N6 c1 ]
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
r* E; J( A" oclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1]). r' H; k5 V* D* p
9 ]2 G5 c: H- D8 u# Plot the input data
" C, i, J: C; m. z8 N: V& mplt.figure()% e. t4 l/ Z1 l+ m
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')3 [$ Y: J% Z0 Y* u E7 |" ^$ }) X
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
8 W- u6 n, d) }1 m* K6 X* L2 dplt.title('Input data')
5 j3 @& U+ l: y* X: b
" y1 l& Z' d7 [" M* |9 h7 r###############################################
% y% E% |: j. K" F2 Y7 s# Train test split and SVM training
+ D* ~* |9 y" ]6 }0 Xfrom sklearn import cross_validation1 b0 t: S R$ U D! ? b
from sklearn.svm import SVC
8 ?, }' B" J& i* n! B/ h1 i
R1 S# Q1 h( |7 n1 p& HX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)2 m, l& b8 K2 } L% R2 E( v: `& k( R
0 R- m9 `: W% I( q
#params = {'kernel': 'linear'}
. M. h/ H" m1 K$ N: H#params = {'kernel': 'poly', 'degree': 3}% l, f/ G4 N3 K6 Q+ L0 J
params = {'kernel': 'rbf'}$ d. s* e( B: \& G# A( q6 `
classifier = SVC(**params)/ F& B- I. }% m" [
classifier.fit(X_train, y_train)
( h: a8 I3 a% L8 P% outilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')2 }1 n' T+ Q5 ?( {# l
2 `, c8 k! r; R( N& D& ?
y_test_pred = classifier.predict(X_test)
! {. G' i5 w$ p3 kutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')2 Z% y, s- ~, ^6 f; o/ j
) B( F- }/ K3 p0 R. E###############################################* k7 ]: r" J* k
# Evaluate classifier performance" i, C0 Y2 D& q7 O/ A( }
3 M0 b o0 F- Q0 {6 `! _
from sklearn.metrics import classification_report
! ~4 V2 j* u1 R$ e6 z8 ~) ^+ E1 H0 V, [
target_names = ['Class-' + str(int(i)) for i in set(y)]# N2 V0 N5 O0 h& W. \
print "\n" + "#"*30
- X) O7 Y$ J- Dprint "\nClassifier performance on training dataset\n"
) T& X9 g; r! w$ {. R! i& d$ A0 `print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
f6 \* o; [: L" _: e9 Aprint "#"*30 + "\n"
0 e0 z# Y1 e8 x3 X" P- W z a" w5 R% [/ Y2 \9 x* g9 z& Y h
print "#"*303 ]- d" F9 j: q8 u6 c
print "\nClassification report on test dataset\n"0 v# s$ F' l5 c2 o
print classification_report(y_test, y_test_pred, target_names=target_names)
- P+ y& g6 }( C# nprint "#"*30 + "\n"( G7 u+ ^! P" ~+ X3 W0 M. D
2 q/ a- o. W" z$ p( P; m |
|