码迷,mamicode.com
首页 > 编程语言 > 详细

k-近邻算法

时间:2016-03-01 22:20:58      阅读:376      评论:0      收藏:0      [点我收藏+]

标签:

听朋友说machine Learning 很牛,特地买了本《机器学习实战》,了解机器学习,顺便学习python。。

第一个算法是kNN,很容易理解,简单实用,但是存储和计算的复杂度有点高,而且无法给出数据的内在含义

书中介绍的两个实例,让我感觉机器学习确实很实用,以下是从《机器学习实战》中整理摘抄的python代码

实例1:实用kNN改进约会网站的配对效果

from  numpy import *

import operator
from nt import listdir

#kNN算法实现
def classify0(intx, dataSet, labels, k):
    dataSetSize = dataSet.shape[0]
    diffmat = tile(intx, (dataSetSize, 1)) - dataSet
    sqDiffMat = diffmat ** 2
    sqDistances = sqDiffMat.sum(axis=1)
    distances = sqDistances ** 0.5
    sortedDistIndicies = distances.argsort()
    classCount = {}
    
    for i in range(k):
        voteIlabel = labels[sortedDistIndicies[i]]
#         print(type(classCount))
        classCount[voteIlabel] = classCount.get(voteIlabel, 0) + 1
    sortedClassCount = sorted(classCount.items(), key=operator.itemgetter(1), reverse=True)
    return sortedClassCount[0][0]

#从文件中获取数据
def fileTomatrix(filename):
    fr = open(filename)
    arrayLines = fr.readlines()
    numberOfLines = len(arrayLines)
    returnMat = zeros((numberOfLines, 3))
    classLabelVector = []
    index = 0
    for line in arrayLines:
        line = line.strip()
        listFromLine = line.split(\t)
        returnMat[index, :] = listFromLine[0:3]
        classLabelVector.append(int(listFromLine[-1]))
        index += 1
    return returnMat, classLabelVector

#数据归一化
def autoNorm(dataSet):
    minVals = dataSet.min(0)
    maxVals = dataSet.max(0)
    ranges = maxVals - minVals
    normDataSet = zeros(shape(dataSet))
    m = normDataSet.shape[0]
    
    normDataSet = dataSet - tile(minVals, (m, 1))
    normDataSet = normDataSet / tile(ranges, (m, 1))
    return normDataSet, ranges, minVals

def classifyPerson():
    result=[not at all, in small doses, in large doses]
    percentTats=float(input("percentage of time spent playing video games?"))
    ffMiles=float(input("frequent flier miles earned per years?"))
    iceCream=float(input("liters of iceCream consumed per year?"))
    
    inArr = array([ffMiles, percentTats, iceCream])
    
    datingDataMat, datingLabels = fileTomatrix(dataSet.txt)
    normMat, ranges, minVals = autoNorm(datingDataMat)
    classifierResult = classify0((inArr - minVals) / ranges, normMat, datingLabels, 3)
    
    print("You will probably like this person:",result[classifierResult - 1])
    

实例2:实用kNN识别手写数字

from  numpy import *

import operator
from nt import listdir

#kNN算法实现
def classify0(intx, dataSet, labels, k):
    dataSetSize = dataSet.shape[0]
    diffmat = tile(intx, (dataSetSize, 1)) - dataSet
    sqDiffMat = diffmat ** 2
    sqDistances = sqDiffMat.sum(axis=1)
    distances = sqDistances ** 0.5
    sortedDistIndicies = distances.argsort()
    classCount = {}
    
    for i in range(k):
        voteIlabel = labels[sortedDistIndicies[i]]
#         print(type(classCount))
        classCount[voteIlabel] = classCount.get(voteIlabel, 0) + 1
    sortedClassCount = sorted(classCount.items(), key=operator.itemgetter(1), reverse=True)
    return sortedClassCount[0][0]


def image2vector(filename):
    returnResult = zeros((1,1024))
    fr = open(filename)
    for i in range(32):
        listStr = fr.readline()
        for j in range(32):
            returnResult[0,32*i+j] = int(listStr[j])
    return returnResult


def handwritingClassTest():
    trainingFileList = listdir(trainingDigits)
    m = len(trainingFileList)
#得到训练数据 trainingMat = zeros((m,1024)) hwLabels = [] for i in range(m): fileNameStr = trainingFileList[i] fileStr= fileNameStr.split(.)[0] classNumStr = int(fileStr.split(_)[0]) hwLabels.append(classNumStr) trainingMat[i,:] = image2vector(trainingDigits/%s % fileNameStr) testFileList = listdir(testDigits) errorCount = 0 mTest = len(testFileList) for i in range(mTest): fileNameStr = testFileList[i] fileStr= fileNameStr.split(.)[0] classNumStr = int(fileStr.split(_)[0]) vectorUnderTest = image2vector(testDigits/%s%fileNameStr) classifierResult = classify0(vectorUnderTest, trainingMat, hwLabels, 5) print("the classifier came back with: %d,the real number is: %d" %(classifierResult,classNumStr)) if classifierResult != classNumStr: errorCount +=1 print("\nthe total number of errors is : %d" %errorCount) print("\nthe total error rate is %f" % (errorCount/mTest))

 

k-近邻算法

标签:

原文地址:http://www.cnblogs.com/wxquare/p/5232450.html

(0)
(0)
   
举报
评论 一句话评论(0
登录后才能评论!
© 2014 mamicode.com 版权所有  联系我们:gaon5@hotmail.com
迷上了代码!