- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
![Rank: 6](static/image/common/star_level3.gif) ![Rank: 6](static/image/common/star_level2.gif)
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np [9 q- |4 n% q% f1 n/ c
import matplotlib.pyplot as plt) ~4 h! P9 E/ @. c% v
# ^( E F) s. l# _5 D( T8 J
import utilities
) L! r" |* n: f& V. N/ E+ G; E3 f x) }
# Load input data3 d+ h, N9 M% `
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
3 z) ^! Y" |( V4 u3 HX, y = utilities.load_data(input_file)
8 {, W% j6 N; X! r; R
5 t5 q) l% n# T9 a+ Z###############################################2 [2 Q$ |4 E! S1 K
# Separate the data into classes based on 'y'6 a$ z) d3 v' ]5 I
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])' |6 I6 J2 c5 t
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])+ H/ Q+ J/ y$ d! X, x7 d. a7 ~
6 [# h Q# j8 f* O# Plot the input data7 O. B% F& A) G. f" ]
plt.figure()
4 J/ v4 f7 }6 A) i' l. d1 G0 i7 d, {plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s'), e3 t7 W& v u/ i: Z5 N$ y
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s'). I" Z. K- V: N5 c) P8 j/ A
plt.title('Input data')
! i% e5 j8 I: _+ C. K$ n3 T
) U& f J O4 P2 y& ]' b; c9 `###############################################% z% T: ~# _% b. l0 w
# Train test split and SVM training0 M9 ?7 f9 z' M/ v" o
from sklearn import cross_validation
3 E/ l, U. G C0 n! l# xfrom sklearn.svm import SVC% z4 I8 c4 `& a1 u2 [9 e3 ?
( H8 `; a4 k8 H) fX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
9 h; \2 I0 I, I' t, ~& A
- G; k# Q/ |. ^5 G: m" t, M#params = {'kernel': 'linear'}
8 X2 m* p! C% Q/ V" b: `' l#params = {'kernel': 'poly', 'degree': 3}
6 A" B" a# a( o/ k9 s- m5 Fparams = {'kernel': 'rbf'}' C# }; ~$ t; n7 q
classifier = SVC(**params)
$ x" Y1 `- w8 X( b# g/ h+ q$ ~5 Gclassifier.fit(X_train, y_train)
# S1 j8 n V3 g6 h8 }# o& vutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
n+ ]# C3 _4 u/ S0 M3 U* t, o. X7 v/ A" X0 [: s$ s0 a
y_test_pred = classifier.predict(X_test)* ]0 k0 \$ b, v) r
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
v" ]! T6 T2 T$ o W& X T! B# s
3 U, X- k: W, T###############################################
$ |+ W5 ?6 `! u) g3 Y# Evaluate classifier performance4 J) H# p1 I' a3 s3 t5 C
( i8 i# g9 g0 Pfrom sklearn.metrics import classification_report5 ?3 l2 j! u& m1 J9 F! x7 v
! A/ X; }# V/ K8 `5 Ttarget_names = ['Class-' + str(int(i)) for i in set(y)]& x, f( D( q' n: t
print "\n" + "#"*30
) [2 l5 S$ ?9 B! u" E" J; gprint "\nClassifier performance on training dataset\n"5 W) {4 t1 b9 w4 t6 b; s
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
h3 P& i5 O% [6 Z7 j8 u, D- Mprint "#"*30 + "\n"" n1 z- ]" T; X* t; T
* s% b1 r: N7 J- \! B
print "#"*301 `3 [- S* ^ `+ s: ?% Y8 p
print "\nClassification report on test dataset\n"! u$ m: w8 c J# [9 ]0 c
print classification_report(y_test, y_test_pred, target_names=target_names)
; i: C: C5 L/ J1 Z, N1 d# bprint "#"*30 + "\n"+ [$ |1 y5 j: e4 y4 S: ~) |
* W7 f- Z7 I$ C8 O+ @6 D& ^* r8 o; g |
|