我的第一个深度神经网络及代码实现

【概念】

神经网络包含:输入层,隐含层和输出层

神经网络初始化时需要实现权重初始化;

之后经过比对预测结果利用 反向传播算法(BP)更新权重;

我的第一个深度神经网络及代码实现

【代码实现】

#coding:utf-8
#neural network class definition
import numpy
import scipy.spatial
class neuralNetwork:
    #initialise the neural network
    def __init__(self,inputnodes,hiddennodes,outputnodes,learninggate):

        #set number of the inputnodes,hiddennodes,outputnodes
        self.inodes = inputnodes
        self.hnodes = hiddennodes
        self.onodes = outputnodes

        #link weight matrices,wih and who
        self.wih = numpy.random.normal(0.0,pow(self.hnodes,-0.5),(self.hnodes, self.inodes))
        self.who = numpy.random.normal(0.0,pow(self.onodes,-0.5),(self.onodes, self.hnodes))

        #set the learningrate
        self.lr = learninggate

        #activation function is the sigmoid function
        self.activation_function = lambda x:scipy.special.expit(x)
        pass;

    #trian the neural network
    def train(self,inputs_list,targets_list):
        #convert inputs to 2d array
        inputs = numpy.array(inputs_list,ndmin = 2).T
        targets = numpy.array(targets_list,ndmin = 2).T

        #calculate signals into hidden_layer
        hidden_inputs = numpy.dot(self.wih,inputs)
        #claculate the signals emerging from hidden layer
        hidden_outputs = self.activation_function(hidden_inputs)

        #calculate signals into final output layer
        final_inputs = numpy.dot(self.who,hidden_outputs)
        #calculate the signals emerging from final out_layer
        final_outputs = self.activation_function(final_inputs)

        #error is the (target-actual)
        output_errors = targets - final_outputs
        #hidden layer error is the output_errors, split by weights, recombined at hidden nodes
        hidden_errors = numpy.dot(self.who.T,output_errors)

        #update the weight for the links between the hidden and output layers
        self.who += self.lr * numpy.dot((output_errors * final_outputs * (1.0 - final_outputs)),numpy.transpose(hidden_outputs))
        #update the weights for the links between the input and hidden layers
        self.wih += self.lr * numpy.dot((hidden_errors * hidden_outputs * (1.0 - hidden_outputs)), numpy.transpose(inputs))
        pass

    #query the neural network
    def query(self,inputs_list):
        #convert inputs list to 2d array
        inputs = numpy.array(inputs_list, ndmin=2).T

        #calculate signals into hidden layers
        hidden_inputs = numpy.dot(self.wih,inputs)
        #calculate the signals emerging from hidden layers
        hidden_outputs = self.activation_function(hidden_inputs)
        #calculate signales into final output layers
        final_inputs = numpy.dot(self.who, hidden_outputs)
        #calculate the signals emerging from final output layer
        final_output = self.activation_function(final_inputs)

        return final_output
        pass