(转载请注明出处:http://blog.csdn.net/buptgshengod)
def loadDataSet():
dataMat = []; labelMat = []
fr = open(‘/Users/hakuri/Desktop/testSet.txt‘)
for line in fr.readlines():
lineArr = line.strip().split()
dataMat.append([1.0, float(lineArr[0]), float(lineArr[1])])
labelMat.append(int(lineArr[2]))
return dataMat,labelMat
def sigmoid(inX):
return 1.0/(1+exp(-inX))def gradAscent(dataMatIn, classLabels):
dataMatrix = mat(dataMatIn) #convert to NumPy matrix
labelMat = mat(classLabels).transpose() #convert to NumPy matrix
m,n = shape(dataMatrix)
alpha = 0.001
maxCycles = 500
weights = ones((n,1))
for k in range(maxCycles): #heavy on matrix operations
h = sigmoid(dataMatrix*weights) #matrix mult
error = (labelMat - h) #vector subtraction
weights = weights + alpha * dataMatrix.transpose()* error #matrix mult
return weights
[[ 4.12414349]
[ 0.48007329]
[-0.6168482 ]]
我们得出x1和x2的关系(设x0=1),0=4.12414349+0.48007329*x1-0.6168482*x2【机器学习算法-python实现】逻辑回归的实现(LogicalRegression)
原文地址:http://blog.csdn.net/buptgshengod/article/details/24715035