- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 559
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
9 P3 `8 S. p' }2 y1 o( Gimport matplotlib.pyplot as plt
* E' I! d4 D1 f# M3 b$ E7 a$ I5 W
1 k# Z6 w+ Z1 x L/ Aimport utilities : G3 Q, J/ E9 s4 e$ ?8 Q' |5 ~
& ]# @8 v. \) f' d" D4 B3 ~" u
# Load input data
7 ]* o! H G# `0 }: C+ Winput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'8 r$ \( q$ e) l
X, y = utilities.load_data(input_file)
C6 R* }' X% ^" R+ B: `. @- ^
* w) q& v* b; @5 j# N5 @############################################### i" u# @: ~' t4 F/ n: B
# Separate the data into classes based on 'y'
, `2 G4 M+ Z! n- pclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]); ?( P; ^! w3 G! I) r; E
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
- x g! X% [. f; J& Y- X- V+ P2 W+ g P
# Plot the input data
5 D, l) y4 U- o+ Hplt.figure()
/ O( o! W/ r6 g( S; H: zplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
6 ~& u7 c, [# P; vplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
; ~ {! m( f* ?; d0 Aplt.title('Input data')
, P' d, m6 [% F. s9 i
! J' n K6 W. f" N9 I% X0 W###############################################+ V% D) q( r# [7 m
# Train test split and SVM training
3 b: V: s8 s- afrom sklearn import cross_validation
' B0 ]( ^; r- |# B# {7 k+ C8 t8 T! O; ofrom sklearn.svm import SVC
3 L3 E6 q! H6 h! A7 P
7 H( @. o Q, m: R" tX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5): ^" B6 p' A: y. J6 ?' r7 ^' I
4 B# c+ v9 H0 |' a# `
#params = {'kernel': 'linear'}5 j3 Y% A- v( h1 a- s+ r
#params = {'kernel': 'poly', 'degree': 3}, W1 m1 D; [1 n( @) s; j* X
params = {'kernel': 'rbf'}8 C$ N9 n$ {, T* `4 y, y, z2 |1 S2 V
classifier = SVC(**params)
1 ^" y) k" t3 Z- {' O. K0 Rclassifier.fit(X_train, y_train)
# r& u4 K J! ^# gutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')4 x% O; u& L: b0 D
) @; i: z9 t# s: O, h4 f" xy_test_pred = classifier.predict(X_test)& K. P; U( N2 }6 e" e1 h4 C _) R
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
b# |2 I. H6 `; E, F9 E3 E" D" v4 ~/ ?( v" a) o; q3 w, n" }* u
###############################################- d& t, R3 e/ X* _
# Evaluate classifier performance- ]# E5 x! i; ~5 w3 p4 k
; Y8 F! i, I9 e( Q2 J% ]from sklearn.metrics import classification_report' A5 K* G L: {5 J
* G" i4 K/ I3 D7 h0 U1 Gtarget_names = ['Class-' + str(int(i)) for i in set(y)]6 V$ H3 K E0 ]! V, o8 a# Q C
print "\n" + "#"*30% O; P( h4 h3 ?2 h; {3 u
print "\nClassifier performance on training dataset\n"
1 W6 v4 b r) I' T) ^+ e/ x6 Wprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
' |" o5 r9 f$ Dprint "#"*30 + "\n"
( T9 L9 g+ {8 v# f; U% A* L ]( q& z7 c4 |- U
print "#"*30
: Q% G, D3 d# R/ E) y/ Uprint "\nClassification report on test dataset\n"! ]2 M/ \+ f2 u% Z, a6 d2 @! m- |
print classification_report(y_test, y_test_pred, target_names=target_names)
7 W' k" w* ]! Z" L! ]# k; Uprint "#"*30 + "\n"" m* m6 Z( {# y; k+ E( o% }% c4 a
5 F8 l2 R0 {/ z& p7 Y
|
|