my code is as follows:
import numpy as np import scipy.optimize as opt import matplotlib.pyplot as plt #*matplotlib inline def f(x,a, b, c, d): return a / (1+np.exp(-c*(x-d)))+b a,c=np.random.exponential(size=2) b,d=np.random.randn(2) n=100 x=np.linspace(-10,10,n) y_model=f(x,a,b,c,d) y= y_model+a *.2*np.random.random(n) fig, ax=plt.subplots(1,1, figsize=(6,4)) ax.plot(x,y_model, '--k') ax.plot(x,y,'o') (a_,b_,c_,d_), _=opt.curve_fit(f,x,y) y_fit=f(x,a_,b_,c_,d_) fig, ax =plt.subplots(1,1,figsize=(6,4)) ax.plot(x,y_model,'--k') ax.plot(x,y,'o') ax.plot(x,y_fit,'-') class NeuralNetwork: def __init__(self, x, y): self.input = x self.weights1 = np.random.rand(self.input.shape[1],4) self.weights2 = np.random.rand(4,1) self.y = y self.output = np.zeros(self.y.shape) def feedforward(self): self.layer1 = sigmoid(np.dot(self.input, self.weights1)) self.outout = sigmoid(np.dot(self.layer1, self.weights2)) def backprop(self): # application of the chain rule to find derivative of the loss function with respect to weights d_weights2 = np.dot(self.layer1.T, (2*(self.y - self.output) * sigmoid_derivative(self.output))) d_weights1 = np.dot(self.input.T, (np.dot(2*(self.y - self.output) * sigmoid_derivative(self.output), self.weights2.T) * sigmoid_dervative(self.layer1))) #update the weights with the derivative (slope) of loss function self.weights1 += d_weights1 self.weights2 += d_weights2the sigmoid used is undefined. how do I fixed this?