#! /usr/bin/env python3 # def logistic_regression ( x, y, alpha, kmax ): #*****************************************************************************80 # ## logistic_regression seeks best coefficients for logistic regression. # # Discussion: # # The desired model is # # y(x) = 1 / ( 1 + exp ( - ( w[0] * x[0] + w[1] * x[1] + ... * w[n-1] * x[n-1] ) ) ) # # Usually, x[0] always has the value 1, but this is not required. # # A gradient descent iteration is used to estimate the coefficients. # # Input: # # real x[m,n]: m sets of n-dimensional input variables. # # real y[m]: the m output variables, between 0 and 1. # # real alpha: a learning rate. Set alpha to 1 initially, but if the # iteration is not converging, try repeatedly halving alpha. # # integer kmax: the number of gradient descent iterations to take. # # Output: # # real w[n]: the estimated logistic regression coefficients. # import numpy as np m, n = x.shape w = np.zeros ( n ) J = np.zeros ( kmax ) for k in range ( 0, kmax ): y2 = 1.0 / ( 1.0 + np.exp ( - np.matmul ( x, w ) ) ) for j in range ( 0, n ): w[j] = w[j] - ( alpha / m ) * np.dot ( ( y2 - y ), x[:,j] ) J[k] = ( 1.0 / m ) * np.sum ( \ - np.dot ( y, np.log ( y2 ) ) \ - np.dot ( ( 1.0 - y ), np.log ( 1.0 - y2 ) ) ) return w