Mar-20-2020, 05:20 PM
Hello.
I want to fit a logistic cruve to some data I have prepared.
The curve has a form of:
![[Image: 6f42e36c949c94189976ae00853af9a1b618e099]](https://wikimedia.org/api/rest_v1/media/math/render/svg/6f42e36c949c94189976ae00853af9a1b618e099)
But I have also added a bias term to the function (+b) at the end.
I have calculated the derivatives with wolfram alpha, and hardcoded them.
At first, everything seems to work fine, but after circa 200 iterations, I see an increase in error value.
Please look at those screenshots:
Now, what could cause such a strange behaviour?
I will post the code, but I wasn't able to get a more "minimal" example, so It's quite long:
I want to fit a logistic cruve to some data I have prepared.
The curve has a form of:
But I have also added a bias term to the function (+b) at the end.
I have calculated the derivatives with wolfram alpha, and hardcoded them.
At first, everything seems to work fine, but after circa 200 iterations, I see an increase in error value.
Please look at those screenshots:
Now, what could cause such a strange behaviour?
I will post the code, but I wasn't able to get a more "minimal" example, so It's quite long:
#!/usr/bin/python3 import numpy as np import matplotlib.pyplot as plt import matplotlib.animation as animation #function to fit: L/(1+np.exp(-k*(x-x0))) + b #which is a logistic curve #derivatives over a given variable def ddk(x, y, L, k, x0, b): return -L*(x0-x)*np.exp(k*x0-k*x)*(L/(np.exp(k*x0-k*x)+1)+b-y)/(np.exp(k*x0-k*x)+1)**2 def ddl(x, y, L, k, x0, b): return (L/(np.exp(x0*k-k*x)+1)+b-y)/(np.exp(k*x0-k*x)+1) def ddx0(x, y, L, k, x0, b): return -k*L*np.exp(x0*k-k*x)*(L/(np.exp(x0*k-k*x)+1)+b-y)/(np.exp(x0*k-k*x)+1)**2 def ddb(x, y, L, k, x0, b): return (L/(np.exp(x0*k-k*x)+1)+b-y) def funcValue(x, L, k, x0, b): return L/(1+np.exp(-k*(x-x0))) + b def errValue(x, y, L, k, x0, b): return (funcValue(x, L, k, x0, b) - y)**2 def standardize(X): avg = np.average(X) std = np.std(X) return (X-avg)/std vector_ddk = np.vectorize(ddk, excluded=['L','k','x0','b']) vector_ddb = np.vectorize(ddb, excluded=['L','k','x0','b']) vector_ddl = np.vectorize(ddl, excluded=['L','k','x0','b']) vector_ddx0= np.vectorize(ddx0, excluded=['L','k','x0','b']) vector_err = np.vectorize(errValue, excluded=['K', 'k', 'x0','b']) def update_params(X, y, param_vector, learningRate=1): count = np.size(X, axis=0) theta1 = param_vector[0] #L theta2 = param_vector[1] #k theta3 = param_vector[2] #x0 theta4 = param_vector[3] #b theta1mdf = learningRate / count * np.sum(vector_ddk(X, y, theta1, theta2, theta3, theta4)) theta2mdf = learningRate / count * np.sum(vector_ddl(X, y, theta1, theta2, theta3, theta4)) theta3mdf = learningRate / count * np.sum(vector_ddx0(X, y, theta1, theta2, theta3, theta4)) theta4mdf = learningRate / count * np.sum(vector_ddb(X, y, theta1, theta2, theta3, theta4)) param_vector[0] -= theta1mdf param_vector[1] -= theta2mdf param_vector[2] -= theta3mdf param_vector[3] -= theta4mdf X = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13] #params y = [1, 1.2, 1.3, 4, 7, 9, 11, 12, 14, 14.5, 16, 16, 16] #the regression part #========================================================= X = standardize(X) X = np.matrix(X).reshape(-1).T y = np.matrix(y).reshape(-1).T param_vector = [16, 4, 1, 1] #L, k, x0, b params = [] errors = [] learning_iterations = 500 for i in range(0, learning_iterations): params.append(param_vector[:]) update_params(X, y, param_vector, 0.02) errors.append(1/(2*np.size(X,axis=0))*np.sum(vector_err(X, y, *param_vector))) #========================================================== #the plotting part figure = plt.figure() ax = figure.add_subplot(211) erax = figure.add_subplot(212) ln, = ax.plot([], []) erln, = erax.plot([], []) erax.set_ylim(ymin=0, ymax=1) erax.set_xlim(xmin=0, xmax=learning_iterations) ax.scatter(np.ravel(X), np.ravel(y)) def animate(frame): global X, y, params, errors, ln, erln, erax _x = np.linspace(X[0,0]-1, X[-1,0]+1, num=100) _y = [funcValue(m, *params[frame]) for m in _x] ln.set_data(_x, _y) _ex = list(range(0, frame+1)) _ey = [errors[m] for m in _ex] erln.set_data(_ex, _ey) erax.set_xlabel("Error: " + str(errors[frame])) return ln anim = animation.FuncAnimation(figure, animate, frames=learning_iterations, interval=50, repeat=False, repeat_delay=1000) plt.show()