- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 558
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
4 ^0 P; p# ^9 Simport matplotlib.pyplot as plt
{0 p9 y; ~/ F# D7 }; @5 x) W6 t7 j
import utilities 7 r$ _/ ]/ Y8 w! G; E' c
r! ]* f, B7 i' s# Load input data
6 x) i! h% D3 j: minput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'+ o" a. F- `) y9 M1 @: O; O% ?
X, y = utilities.load_data(input_file)
9 [0 V' z$ E8 f+ R/ a O
9 l8 c0 @5 J! I: O3 d9 w) [###############################################
" _( k4 }& K+ O3 P$ C; [# Separate the data into classes based on 'y'# N% P! j* _3 }" S8 {
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])* n0 }7 c7 ^7 k, L9 m: j% H
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])% V0 t) k% X9 v' O
) b* ?7 {, }3 W& Z; u S
# Plot the input data
- H# r n( y0 f& {8 yplt.figure()8 o: i; l: L+ |# B& w
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
& F; j4 G7 `( dplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')) ?5 n& D k n5 w o/ o
plt.title('Input data')0 t) B$ B L" `
5 l D6 L5 E& t9 F4 q
###############################################
8 r! \# { b, w9 ~& v# Train test split and SVM training
& }% {5 j) P# `/ ^3 s7 U9 c9 R/ b; y' }: v* xfrom sklearn import cross_validation
6 r* Q) L& v' N1 ^" K* }6 T9 l5 ufrom sklearn.svm import SVC$ u% \( n. ?8 R" u" E3 F$ l
1 r- D6 | V7 o% |0 tX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
( C7 k5 v, |' i. X0 \. r3 L
( F8 t! o& b- C+ u! a4 X8 Z# w& N) G#params = {'kernel': 'linear'}/ e6 C7 D* U- A3 s
#params = {'kernel': 'poly', 'degree': 3}
8 W; a6 p( {. h6 E7 \8 Q4 [params = {'kernel': 'rbf'}/ F# V1 \9 |0 M; A% a5 u
classifier = SVC(**params)
2 C8 J) t6 j. j" ~classifier.fit(X_train, y_train)4 q6 \$ R d# }2 _
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')5 _. o% H7 b% S/ q- o6 r4 [9 _( x6 l
+ `4 @4 n/ }0 `. R. v" fy_test_pred = classifier.predict(X_test)' v& ^0 M# q; k0 ~+ D: h
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
3 @4 M+ w0 {. a2 N" c+ w6 r5 l8 \/ B( O( o; E$ r0 A
###############################################
- n- [" l8 e3 ^ r9 i, l1 P# Evaluate classifier performance- N% V- D: N7 h8 ~
" r* `) r' r1 P" c9 t$ X; f+ h4 K' c6 wfrom sklearn.metrics import classification_report
* ^ P* F, x5 w! ?
- X0 }( I, [) U# \2 {& Ttarget_names = ['Class-' + str(int(i)) for i in set(y)]) N: W7 X) v9 \$ c; d+ Z
print "\n" + "#"*307 Y. g( S" j; l5 m3 [
print "\nClassifier performance on training dataset\n"
, r/ F0 \. h8 s# pprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
5 h+ o' `1 B v! g8 {print "#"*30 + "\n"
' Q0 {, X, {- f% Q& V) U# M) N; T" a6 D
print "#"*30* T& V9 B+ N. j" G4 P8 f7 j
print "\nClassification report on test dataset\n"
5 b- y3 k7 W# X: I! u% N" i* ^6 O0 mprint classification_report(y_test, y_test_pred, target_names=target_names)
& U& U" m3 @6 d8 l, `6 Pprint "#"*30 + "\n"9 g5 j8 {( e9 Y& C5 B. I6 K
6 `5 w: i2 Z. ?, k K9 L6 f |
|