AdaBoost
(1)收集数据:可以使用任何方法;
(2)准备数据:依赖于所使用的若分类器类型;
(3)分析数据:可以使用任意方法
(4)训练算法:AdaBoost的大部分时间都用在训练上,分类器将多次在同一数据集上训练若分类器;
(5)测试算法:计算分类的错误率;
(6)使用算法:同SVM一样,AdaBoost预测的两个类别中的一个,如果想要把它应用到多个类的场合,那么就像多类SVM中的做法一样对AdaBoost进行修改。
AdaBoost
优点: 泛化错误率低,易编码,可以应用在大部分分类器上,无需参数调整
缺点: 对离群点敏感
注:分类器一般采用若可学习分类器,通过集成,组合为强可学习分类器。
集成方法:Bagging和Boosting
from numpy import *
def loadSimpData():
datMat=matrix([1,2],[2,1],[1,1],[1,1],[2,1])
classLabels=[1,1,-1,-1,-1]
return datMat,classLabels
def loadDataSet(fileName):
numFeat=len(open(fileName).readline().split('\t'))
dataMat=[];labelMat=[]
fr=open(fileName)
for line in fr.readlines():
lineArr=[]
curLine=line.strip().split('\t')
for i in range(numFeat-1):
lineArr.append(float(curLine[i]))
dataMat.append(lineArr)
labelMat.append(float(curLine[-1]))
return dataMat,labelMat
def stumpClassify(dataMatrix,dimen,threshVal,threshIneq):
retArray=ones((shape(dataMatrix)[0],1))
if threshIneq=='lt':
retArray[dataMatrix[:,dimen]<=threshVal]=-1.0
else:
retArray[dataMatrix[:,dimen]>threshVal]=-1.0
return retArray
def buildStump(dataArr,classLabels,D):
dataMatrix=mat(dataArr);labelMat=mat(classLabels).T
m,n=shape(dataMatrix)
numSteps=10.0;bestStump={};bestClasEst=mat(zeros((m,1)))
minError=inf
for i in range(n):
rangeMin=dataMatrix[:,i].min();rangeMax=dataMatrix[:,i].max()
stepSize=(rangeMax-rangeMin)/numSteps
for j in range(-1,int(numSteps)+1):
for inequal in ['lt','gt']:
threshVal=(rangeMin+float(j)*stepSize)
predictedVals=stumpClassify(dataMatrix,i,threshVal,inequal)
errArr=mat(ones((m,1)))
errArr[predictedVals==labelMat]=0
weightedError=D.T*errArr
if weightedError<minError:
minError=weightedError
bestClasEst=predictedVals.copy()
bestStump['dim']=i
bestStump['thresh']=threshVal
bestStump['ineq']=inequal
return bestStump,minError,bestClasEst
def adaBoostTrainDS(dataArr,classLabels,numIt=40):
weakClassArr=[]
m=shape(dataArr)[0]
D=mat(ones((m,1))/m)
aggClassEst=mat(zeros((m,1)))
for i in range(numIt):
bestStump,error,classEst=buildStump(dataArr,classLabels,D)
alpha=float(0.5*log((1.0-error)/max(error,1e-16)))
bestStump['alpha']=alpha
weakClassArr.append(bestStump)
expon=multiply(-1*alpha*mat(classLabels).T,classEst)
D=multiply(D,exp(expon))
D=D/D.sum()
aggClassEst+=alpha*classEst
aggErrors=multiply(sign(aggClassEst)!=mat(classLabels).T,ones((m,1)))
errorRate=aggErrors.sum()/m
print("total error :",errorRate)
if errorRate==0.0:
break
return weakClassArr,aggClassEst
def adaClassify(datToClass,classifierArr):
dataMatrix=mat(datToClass)
m=shape(dataMatrix)[0]
aggClassEst=mat(zeros((m,1)))
for i in range(len(classifierArr)):
classEst=stumpClassify(dataMatrix,classifierArr[i]['dim'],classifierArr[i]['thresh'],classifierArr[i]['ineq'])
aggClassEst+=classifierArr[i]['alpha']*classEst
print(aggClassEst)
return sign(aggClassEst)
dataMat,labelMat=loadDataSet('.\horseColicTraining2.txt')
datMat,labelmat=loadDataSet('.\horseColicTest2.txt')
weakClassArr,aggClassEst=adaBoostTrainDS(dataMat,labelMat)
print(weakClassArr)
a=adaClassify(datMat,weakClassArr)
print(a)
来源:CSDN
作者:weixin_45170671
链接:https://blog.csdn.net/weixin_45170671/article/details/103534964