- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
3 m# X. V( _6 {; ]" D/ ?import matplotlib.pyplot as plt2 z- N+ P/ |+ W5 Q% r- E& \7 y
; Z1 x1 i# T2 B
import utilities 1 i3 o1 G% n+ {& O, z# L
: ]6 ^9 J& S3 _% g v1 Q6 e! y# Load input data6 Y! h5 Z$ M3 I+ w" j; c: ?
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
9 }# J. D. V* p i8 aX, y = utilities.load_data(input_file)
@# T+ e) i6 i" Z# ?- M q: s7 P6 i' Z
###############################################
8 o0 C7 s% V; N+ D! x& o j6 N# Separate the data into classes based on 'y'
3 @ F0 }& Y* L9 U! N9 O+ cclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
1 f4 }% X3 I' e3 kclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
Z F. Q" o- l L" g+ v
2 h0 Y5 A( c) R( L# Plot the input data( V0 f+ U3 f% t) `. f
plt.figure()
4 {5 p# b, [* i/ c9 ]! ]plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s'). C8 c* O+ K4 T. o- V/ g+ `
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
3 E0 G" O) \3 G. s8 ^plt.title('Input data')
. {: f* ]& z8 g/ K4 X2 ]' h1 l* g: w3 ]
###############################################' M- z' }# V+ D) D7 g8 B
# Train test split and SVM training# J3 G) ^7 R2 j" V0 _+ R7 F
from sklearn import cross_validation# G: ?6 @$ [0 k6 d
from sklearn.svm import SVC
& ~# L' v) D1 V C2 b3 e! [, Q' _' }
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
7 f2 p# n9 X% D. c! C( X+ m. \- R& d" S4 L: n4 r# Q$ m2 p2 q; j4 J
#params = {'kernel': 'linear'}' ]) G$ `5 i+ W0 K, H4 z, C5 Y
#params = {'kernel': 'poly', 'degree': 3}
W: V1 i9 I! T- S% Vparams = {'kernel': 'rbf'}/ |* c/ I* n" } s/ a8 S3 N
classifier = SVC(**params)$ @. [: J, H! a7 n. b1 y6 i: L
classifier.fit(X_train, y_train)
1 F) c4 m/ p2 k! M8 A% xutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
. B* J& u$ T% F1 _/ H3 u. d8 u1 a! ?- E0 Y
y_test_pred = classifier.predict(X_test)
, r3 O5 u6 I0 |" p5 q- Y, ^; Autilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')5 L7 W" F+ i3 B
( F8 [- j8 S1 E+ K/ l) d% n& N2 \7 i0 U###############################################
]$ |0 l2 g3 {) @! j# Evaluate classifier performance6 c. Z4 ?- ~9 Z' n# J8 m
# w1 e3 F+ i8 Z- d. ?from sklearn.metrics import classification_report3 s1 G. p0 e; T5 _+ E' F7 ] |) f$ {
# |$ q1 j F+ t* h( X* ^target_names = ['Class-' + str(int(i)) for i in set(y)]
) V. ?6 ?2 E1 \) F2 ~% H9 `print "\n" + "#"*30- \2 ~# n& X* X; s/ q
print "\nClassifier performance on training dataset\n"" o, N, j5 u! _/ k; u$ f; `
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
W9 P3 g- Y0 L- h* Rprint "#"*30 + "\n"( W3 F/ p: D# m
) f9 Z$ w: q Z, `) J# @9 ^print "#"*30
% |1 r6 d. w: o g- nprint "\nClassification report on test dataset\n"
2 z8 B. @' d# u; ], d# x# cprint classification_report(y_test, y_test_pred, target_names=target_names)
/ t! a2 j% l$ l" m* ~( _$ vprint "#"*30 + "\n"
6 e3 O$ z9 |) p
7 ^% F; Q: q) _' h. ] |
|