Academic Integrity: tutoring, explanations, and feedback — we don’t complete graded work or submit on a student’s behalf.

Machine Learning : Design a Multi-Layer Perceptron neural network and have it le

ID: 3886907 • Letter: M

Question

Machine Learning : Design a Multi-Layer Perceptron neural network and have it learn the function y = x^2 + 1. Use the interval -1 to 1 in increments of .1 (this gives you 21 total data points.)Use the generalized delta rule (backpropagation) weight update algorithm. Run the simulation twice: once with 2 hidden neurons and once with 5 hidden neurons.Generate a learning curve (MSE vs iterations) for each case.Create a learning graph as well. Here is what I have in python so far.

import random
import math
#li - iteration layer

e = 2.72
inputs[-1.0,-0.9,-0.8,.-0.7,-0.6,-0.5,-0.4,-0.3,-0.2,-0.1,0,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9.1.0]

def f(x):
return x

def fderiv(x):
return 1

def forward_pass(x, w):
y = [x]
for l in range(len(w)):
yl = []
n_inputs = len(w[l])
n_outputs = len(w[l][0])
for i in range(n_inputs):
netli = 0
for j in range(n_outputs):
print(y[-1], i, j)
netli += w[l][i][j] * y[-1][j]
yli = f(netli)
yl.append(yli)
y.append(yl)
return y   

def create_weights(structure):
w = []
n_layers = len(structure)
for l in range(n_layers - 1):
n_rows = structure[l]
n_cols = structure[l + 1]
matrix = []
for i in range(n_rows):
matrix.append([0] * n_cols)
w.append(matrix)
return w
def sigmoidfunction(f,x):
f= 1/(1 + (math.e) ^(-x))

print(" ".join(map(str, create_weights([5, 6, 7]))))
x = [1, 2, 3, 4, 5]
w = create_weights([5, 6, 7])

def error(yi):
ei
di

ei= di-y
return ei
  
def back_pass(d, y, net, w):
sigma = []
  
sigmal = [] #local error of a layer
for i in range(len(y[-1])):
ei = d[i] - y[-1][i]
sigmali = ei * fderiv(net[l][i])
  
sigmal.append(sigmali)
sigma.append(sigmal)
  
for l in range(len(y) - 2, -1, -1):
sigmal = [] #local error of a layer
for i in range(len(y[l])):
# ei = d[i] - y[-1][i]
sigmali = ei * fderiv(net[l][i])
sigmali
sigmal.append(sigmali)
sigma.append(sigmal)
  
sigmli = fderive(net[i][l]) * sigma * w[l + 1][i][k]
  
  
def weightupdate():

def hiddenlayer():
  
def outputlayer():

def deltarule():
  
  

print(forward_pass(x, w))
  

I need help with weight update for hidden layer, backpropagation, and my graphs.
  
  
  
  

Explanation / Answer

The code has done

import random
import math
#li - iteration layer
e = 2.72
inputs[-1.0,-0.9,-0.8,.-0.7,-0.6,-0.5,-0.4,-0.3,-0.2,-0.1,0,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9.1.0]
def f(x):
return x
def fderiv(x):
return 1
def forward_pass(x, w):
y = [x]
for l in range(len(w)):
yl = []
n_inputs = len(w[l])
n_outputs = len(w[l][0])
for i in range(n_inputs):
netli = 0
for j in range(n_outputs):
print(y[-1], i, j)
netli += w[l][i][j] * y[-1][j]
yli = f(netli)
yl.append(yli)
y.append(yl)
return y   
def create_weights(structure):
w = []
n_layers = len(structure)
for l in range(n_layers - 1):
n_rows = structure[l]
n_cols = structure[l + 1]
matrix = []
for i in range(n_rows):
matrix.append([0] * n_cols)
w.append(matrix)
return w
def sigmoidfunction(f,x):
f= 1/(1 + (math.e) ^(-x))
print(" ".join(map(str, create_weights([5, 6, 7]))))
x = [1, 2, 3, 4, 5]
w = create_weights([5, 6, 7])
def error(yi):
ei
di

ei= di-y
return ei
  
def back_pass(d, y, net, w):
sigma = []
  
sigmal = [] #local error of a layer
for i in range(len(y[-1])):
ei = d[i] - y[-1][i]
sigmali = ei * fderiv(net[l][i])
  
sigmal.append(sigmali)
sigma.append(sigmal)
  
for l in range(len(y) - 2, -1, -1):
sigmal = [] #local error of a layer
for i in range(len(y[l])):
# ei = d[i] - y[-1][i]
sigmali = ei * fderiv(net[l][i])
sigmali
sigmal.append(sigmali)
sigma.append(sigmal)
  
sigmli = fderive(net[i][l]) * sigma * w[l + 1][i][k]
  
  
def weightupdate():
def hiddenlayer():
  
def outputlayer():
def deltarule():
  
  
print(forward_pass(x, w))
  

Hire Me For All Your Tutoring Needs
Integrity-first tutoring: clear explanations, guidance, and feedback.
Drop an Email at
drjack9650@gmail.com
Chat Now And Get Quote