欢迎您访问程序员文章站本站旨在为大家提供分享程序员计算机编程知识!
您现在的位置是: 首页

ppp

程序员文章站 2024-02-14 13:18:16
...
#-*- coding: UTF-8 -*-
from sklearn.datasets import load_iris
import numpy as np
import math


iris = load_iris()
data = iris.data
label=iris.target
label.shape=(1, 150)  
label=np.transpose(label) 
data_set = np.hstack((label, data))

num_DataLayer = 4
num_HiddenLayer1 = 5
num_HiddenLayer2 = 5
num_HiddenLayer3 = 4
num_OutputLayer = 3

#初始化每层元素的**值
net_DataLayer = np.zeros(num_DataLayer)
net_HiddenLayer1 = np.zeros(num_HiddenLayer1)
net_HiddenLayer2 = np.zeros(num_HiddenLayer2)
net_HiddenLayer3 = np.zeros(num_HiddenLayer3)
net_OutputLayer = np.zeros(num_OutputLayer)

#初始化每层元素的未**值
active_DataLayer = np.zeros(num_DataLayer)
active_HiddenLayer1 = np.zeros(num_HiddenLayer1)
active_HiddenLayer2 = np.zeros(num_HiddenLayer2)
active_HiddenLayer3 = np.zeros(num_HiddenLayer3)
active_OutputLayer = np.zeros(num_OutputLayer)


WDH1 = np.random.random((num_HiddenLayer1, num_DataLayer))
WH1H2 = np.random.random((num_HiddenLayer2, num_HiddenLayer1))
WH2H3 = np.random.random((num_HiddenLayer3, num_HiddenLayer2))
WH3O = np.random.random((num_OutputLayer, num_HiddenLayer3))

biasDH1 = 1;
biasH1H2 = 1;
biasH2H3 = 1;
bias3O = 1;


##############################################
# **函数sigmoid
def sigmoid(x):
    return 1 / (1 + np.exp(x))


##############################################
def forward(DataLayer):
    global net_DataLayer
    global net_HiddenLayer1
    global net_HiddenLayer2
    global net_HiddenLayer3
    global net_OutputLayer
    
    global active_DataLayer
    global active_HiddenLayer1
    global active_HiddenLayer2
    global active_HiddenLayer3
    global active_OutputLayer

    net_HiddenLayer1 = np.dot(WDH1, DataLayer) + biasDH1
    active_HiddenLayer1 = sigmoid(net_HiddenLayer1)
    
    net_HiddenLayer2 = np.dot(WH1H2, active_HiddenLayer1) + biasH1H2
    active_HiddenLayer2 = sigmoid(net_HiddenLayer2)
    
    net_HiddenLayer3 = np.dot(WH2H3, active_HiddenLayer2) + biasH2H3
    active_HiddenLayer3 = sigmoid(net_HiddenLayer3)
    
    net_OutputLayer = np.dot(WH3O, active_HiddenLayer3) + bias3O
    active_OutputLayer = sigmoid(net_OutputLayer)
    
    return active_OutputLayer


##############################################
def loss(label_true, label_predict):
    Y = [0 for i in range(num_OutputLayer)]
    Y[label_true] = 1;
    loss = label_predict - Y
    loss = (loss**2) / 2
    total_loss = sum(loss);
    return total_loss, loss

#计算loss对每一个权重系数的偏导数
##############################################
def cal_delta_WH3O(label_true, label_predict):
    global active_OutputLayer    
    global net_OutputLayer
    global active_HiddenLayer3

    print("cal_delta_WH3O")
    loss_active = label_predict - label_true
    active_net = active_OutputLayer*(1-active_OutputLayer)
    net_weights = active_HiddenLayer3
    net_weights.shape = (1, num_HiddenLayer3)
    net_weights = np.transpose(net_weights)
    delta_WH3O = np.dot(np.multiply(loss_active, active_net), net_weights)
    return delta_WH3O

##############################################
def cal_delta_H2H3():
    print("cal_delta_WH3O")

##############################################
def cal_delta_H1H2():
    print("cal_delta_WH3O")

##############################################
def cal_delta_DH1():
    print("cal_delta_WH3O")


##############################################
def update_weights():
    print("update_weights")

##############################################
def backward():
    print("backward")



s = 1
DataLayer = data[s, :]
print(DataLayer)
forward(DataLayer)

loss = loss(int(label[s]), forward(DataLayer))
loss_total = loss[0]
loss_OutputLayer = loss[1]
print(loss_OutputLayer)
                                                                                                                                            


ttt = 2
def f():
    global ttt
    ttt = 5
    
f()
print(ttt)