Tensorflow拟合函数 sin(x) + cos(x)
程序员文章站
2022-07-14 23:03:09
...
介绍
拟合函数 sin(x) + cos(x)其原理与神经网络的简单实例 原理类似,都是全连接型网络,对每一条连接设置参数并更新,共计算了5000次。
代码
import numpy as np
import matplotlib.pyplot as plt
import math
import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()
# 输入和输出数据
train_X = np.linspace(-math.pi, math.pi, 1000)[:, np.newaxis]
train_Y = np.sin(train_X) + np.cos(train_X)
# 创建模型
input = {'X': tf.placeholder("float", [None, 1]),
'Y': tf.placeholder("float", [None, 1])}
parameter = {'W1': tf.Variable(tf.random_normal([1, 10]) ),
'b1': tf.Variable(tf.zeros([1, 10]) ),
'W2': tf.Variable(tf.random_normal([10, 6]) ),
'b2': tf.Variable(tf.zeros([1, 6]) ),
'W3': tf.Variable(tf.random_normal([6, 1]) ),
'b3': tf.Variable(tf.zeros([1]) )}
# 正向函数
z1 = tf.matmul(input['X'], parameter['W1']) + parameter['b1']
z2 = tf.nn.relu(z1)
z3 = tf.matmul(z2, parameter['W2']) + parameter['b2']
z4 = tf.nn.relu(z3)
z5 = tf.matmul(z4, parameter['W3']) + parameter['b3']
# 反向函数
cost = tf.reduce_mean(tf.square(input['Y'] - z5))
learning_rate = 0.01
optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)
init = tf.global_variables_initializer()
plotdata = {"count": [], "loss": []}
# 启动session
with tf.Session() as sess:
sess.run(init)
for i in range(5000 + 1):
sess.run(optimizer, feed_dict={input['X']: train_X, input['Y']: train_Y})
if i % 2 == 0:
loss = sess.run(cost, feed_dict={input['X']: train_X, input['Y']: train_Y})
# print("i:", i, "cost=", loss)
if not (loss == "NA"):
plotdata["count"].append(i)
plotdata["loss"].append(loss)
print(" Finish")
# 图像显示
plt.subplot(211)
plt.plot(train_X, train_Y, 'ro' )
plt.plot(train_X, sess.run(z5, feed_dict={input['X']: train_X}) )
plt.subplot(212)
plt.plot(plotdata["count"], plotdata["loss"], 'b--')
plt.show()
模拟图
上一篇: Dating with girls(2)
下一篇: 扫描线填充算法