- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
% d6 W" D! i( h/ Yimport matplotlib.pyplot as plt: T% ]( m/ }; q# R+ n5 w E
& M- d. q7 Y4 x' S
import utilities
3 s9 Q. \# J# P- x! Y
4 b6 _( x2 G( Z0 [7 }# Load input data& p( U2 p* c! t1 p' Y; _$ u+ A
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'+ e! T- n+ b t4 X, |
X, y = utilities.load_data(input_file)
: Q. X+ R) j$ b9 u2 Y1 h$ I' Z
h' F8 J" y* i1 w###############################################
0 H. X" o: v6 w {# Separate the data into classes based on 'y'
# ]" P& M- h: U, w7 Hclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
6 m- `" x* A! }( }5 F( Oclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1]); V8 `( T2 }" |: i8 F' k
' t' @* }8 v: H! _7 i, p4 \# Plot the input data0 j" h% K1 Y6 O
plt.figure()
5 D/ H" S, D$ ~- h2 M6 ]plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')1 P' _6 a) D: ?2 C' D7 o
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
; [7 U3 v5 x9 f2 V! _plt.title('Input data')
. K$ Y/ v# E* X; x7 e q H/ u+ G
' N% L/ U0 }9 }% ^/ g###############################################
- M- d2 ]: l9 x" l# Train test split and SVM training: \( @8 V7 |! t. X
from sklearn import cross_validation
. T- E# x0 e0 F" Vfrom sklearn.svm import SVC$ e6 N# u! J1 `6 ~- {
& k) g( I) p$ z4 B
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)' O! ~9 @( O/ {0 Z. y" F6 w& r8 a% R0 @
) t8 @! C7 q. |% H
#params = {'kernel': 'linear'}1 Z$ E# T) L. p/ X& o1 k0 C
#params = {'kernel': 'poly', 'degree': 3}
- \* y* y$ a" J+ o0 ]# G% ~params = {'kernel': 'rbf'}2 C' @; s& r+ y. x9 w$ @
classifier = SVC(**params)( G" n9 S L2 Z w/ _
classifier.fit(X_train, y_train)2 U% F4 h, X* U- P9 P
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')/ Q, v- u8 @" X, E& c( j a1 w
+ K _# b* W+ N' R# Cy_test_pred = classifier.predict(X_test)& B" c; W: C! U2 D
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
. v6 M) z, ]# X/ E* u
' E4 o; S9 P1 U% @- V6 \2 D, e1 F###############################################
) p- u6 A B/ P5 [. Q# Evaluate classifier performance
$ C$ Y. @8 @5 ~; b1 L6 K& J- H! {2 R5 m. s+ {/ R
from sklearn.metrics import classification_report
5 S* \1 u( K1 H# |+ D
1 p7 N* x) s, R$ I& Htarget_names = ['Class-' + str(int(i)) for i in set(y)]
. B. \- @+ F* i% k lprint "\n" + "#"*30$ \8 p5 F; |5 E: t) V6 m2 Z9 r
print "\nClassifier performance on training dataset\n"
4 u4 F6 k/ U& mprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
/ E& @ d* {* t' pprint "#"*30 + "\n"1 A, E" P; j3 U. K" ?- s+ C' ^
' c- h p, A R+ Q7 }print "#"*30# A% t! D- O" W% Y4 ~" _
print "\nClassification report on test dataset\n"4 b4 `& M R: \, K* Z% t
print classification_report(y_test, y_test_pred, target_names=target_names)
J3 m3 I: Z+ i' l. Eprint "#"*30 + "\n"& ~& I1 i3 j. O6 Q" T3 K4 W! ~2 N! m
, ^7 u) k! d; A" x9 M( o: |1 G
|
|