KNN算法
knn = KNeighborsClassifier ( )
朴素贝叶斯
gnb = GaussianNB ( )
决策树
dtc = DecisionTreeClassifier ( )
SVM算法
svm = SVC ()
代码:
import numpy as np
import matplotlib.pyplot as plt
from itertools import product
from sklearn import datasets
from sklearn.neighbors import KNeighborsClassifier
from sklearn.naive_bayes import GaussianNB
from sklearn.tree import DecisionTreeClassifier
from sklearn.svm import SVC
# 加载数据
wine = datasets.load_wine ( )
x_train = wine.data [ : , [ 0 , 2 ] ]
y_train = wine.target
# 定义分类器对象,均使用默认参数
knn = KNeighborsClassifier ( )
gnb = GaussianNB ( )
dtc = DecisionTreeClassifier ( )
svm = SVC ()
# 训练分类器
knn.fit ( x_train , y_train )
gnb.fit ( x_train , y_train )
dtc.fit ( x_train , y_train )
svm.fit ( x_train , y_train )
# 测试并输出准确率
print ( 'KNN:' , knn.score ( x_train , y_train ))
print ( 'GaussianNB:' , gnb.score ( x_train , y_train ))
print ( 'Decision Tree:' , dtc.score ( x_train , y_train ))
print ( 'Support Vector Machine:' , svm.score ( x_train , y_train ))
# 获取测试点范围
x_min , x_max = x_train[:,0].min()-1 , x_train [:,0].max()+1
y_min , y_max = x_train [ : , 1 ].min ( ) - 1 , x_train [ : , 1 ].max ( ) + 1
# 生成测试点
xx , yy = np.meshgrid ( np.arange ( x_min , x_max , 0.1 ) , np.arange ( y_min , y_max , 0.1 ) )
# 设置子图
f , axe = plt.subplots ( 2 , 2 , sharex = 'col' , sharey = 'row' , figsize = ( 10 , 8 ) )
# 测试全部测试点并将分类结果作为颜色参数绘制结果图
for idx , clf , tt in zip ( product([0,1],[0,1]) , [ knn , gnb , dtc , svm] , [ 'KNN' , 'GaussianNB' , 'Decision Tree' , 'Support Vector Machine' ] ) :
Z = clf.predict ( np.c_[ xx.ravel ( ) , yy.ravel ( ) ] )
Z = Z.reshape ( xx.shape )
axe [ idx [ 0 ] , idx [ 1 ] ].contourf ( xx , yy , Z , alpha = 0.4 )
axe [ idx [ 0 ] , idx [ 1 ] ].scatter ( x_train [ : , 0 ] , x_train [ : , 1 ] , c = y_train , s = 20 , edgecolor = 'k' )
axe [ idx [ 0 ] , idx [ 1 ] ].set_title ( tt )
plt.show()
结果:
版权声明:本文内容由互联网用户自发贡献,该文观点仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 举报,一经查实,本站将立刻删除。
文章由极客之音整理,本文链接:https://www.bmabk.com/index.php/post/147438.html