- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np1 g5 {7 W& z3 @0 M
import matplotlib.pyplot as plt
2 B5 T9 S) ?4 Q+ W, d3 X8 j4 E: w% E8 o( @* g" i# Z6 P( `
import utilities
- ?6 @4 H3 K% D! \5 S4 u- y3 I' l" R% i+ _
# Load input data- d9 r) i& q4 A9 _& K/ ], Z5 ]
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
x7 U! w$ G, U V! Y: Q7 [3 bX, y = utilities.load_data(input_file)* t& O3 f2 {" P# X1 I; o8 ]' |1 w7 K
/ i. n1 G: Z& q0 x! _3 b" _- ?
###############################################
: P2 n: L8 h, K# Separate the data into classes based on 'y'
/ N8 n6 \9 i3 Z Cclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
$ U. g o9 [% G5 Q- H* Z" O7 yclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
6 O. G# q. | J1 {+ \ K$ |5 E7 S! v
# Plot the input data6 c2 U0 B1 |$ P, [
plt.figure()7 y* `2 x" E! Y, r
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
8 o' p+ b9 l. o/ yplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')& @8 n- K8 ^: y, }/ r
plt.title('Input data')
( P1 z' x0 x( ^/ L- t8 P1 H/ _: W6 M7 e2 j, y5 W( F9 Q6 `
###############################################: g a; F$ Y( G$ @5 C: N
# Train test split and SVM training: t$ G& T% g" i+ G: Y
from sklearn import cross_validation/ z& W/ g; p$ k. T4 L
from sklearn.svm import SVC9 r6 L8 M( j8 h& J7 Z
8 p; d2 n# {: G
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)9 z6 g1 @0 e, ]3 H* J) D* M
7 k0 N5 a7 k/ P5 U1 Z, I! ~: ?* o2 E#params = {'kernel': 'linear'}5 b! T; a4 E- ~) k
#params = {'kernel': 'poly', 'degree': 3} w' M* `7 \) \8 l( j8 u$ W
params = {'kernel': 'rbf'}
. A, T" @9 p5 @+ G0 }* g# Nclassifier = SVC(**params)
$ ~8 Z1 M* ^# B* Q2 Yclassifier.fit(X_train, y_train)1 G% P# p" X$ [3 ]
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')' p: E. G7 p, `: l
0 ^; X) ~# a0 e9 A& c9 Q
y_test_pred = classifier.predict(X_test)* {. r) R( {% S, r c$ Q. g! ~
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
* M9 ?+ ]. k% @
: }: v: a. W$ k- m, |###############################################- B E. X+ e% \+ Y% K! j% S, {. i
# Evaluate classifier performance
1 R; g. _4 g1 T* J6 N6 F6 ]
" ~- D) X. Z; d1 afrom sklearn.metrics import classification_report/ O" _8 L/ Z9 U* v9 n
0 A* s: ^7 g' c3 u3 J3 w, ltarget_names = ['Class-' + str(int(i)) for i in set(y)]
) _- R1 ?2 \% p+ Z1 M$ bprint "\n" + "#"*301 ~3 F/ K0 L* z: H7 t5 N+ O" F
print "\nClassifier performance on training dataset\n"' @, ?" J; k" s5 x' Y+ ]; H
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)( Q' j6 N3 `6 k; q) v+ ?
print "#"*30 + "\n"
/ E- K" Z, `: f& ^- B! F) `; l Q
: T( ]( |" g& s3 k* j; c1 Aprint "#"*30 o) Y+ V" J E" i
print "\nClassification report on test dataset\n"
; t% f6 U0 B) h8 O1 z8 Eprint classification_report(y_test, y_test_pred, target_names=target_names)1 A9 Q# v( d! h6 v/ c
print "#"*30 + "\n"! H$ T9 ^! ?7 E6 M& Q% o, a5 o* ^# K
* c( y' N8 A7 K4 E |
|