singmod 与relu损失函数的比较

霸气de小男生 提交于 2019-12-29 19:27:26

【推荐】2019 Java 开发者跳槽指南.pdf(吐血整理) >>>

import matplotlib.pyplot as plt import numpy as np import tensorflow as tf

sess = tf.Session() tf.set_random_seed(5) np.random.seed(42)

batch_size = 50 a1 = tf.Variable(tf.random_normal(shape=[1,1])) b1 = tf.Variable(tf.random_uniform(shape=[1,1]))

a2 = tf.Variable(tf.random_normal(shape=[1,1])) b2 = tf.Variable(tf.random_uniform(shape=[1,1]))

x = np.random.normal(2,0.1,500)

x_data = tf.placeholder(shape=[None,1],dtype=tf.float32)

sigmoid_activation = tf.sigmoid(tf.add(tf.matmul(x_data,a1),b1)) relu_activation = tf.nn.relu(tf.add(tf.matmul(x_data,a2),b2))

loss1 = tf.reduce_mean(tf.square(tf.subtract(sigmoid_activation,0.75))) loss2 = tf.reduce_mean(tf.square(tf.subtract(relu_activation,0.75)))

my_opt = tf.train.GradientDescentOptimizer(0.01) train_sigmoid_step = my_opt.minimize(loss1) train_relu_step = my_opt.minimize(loss2)

init = tf.global_variables_initializer() sess.run(init)

loss_vec_sigmoid = [] loss_vec_relu = []

activation_sigmoid = [] activation_relu =[] for i in range(750): rand_index = np.random.choice(len(x),size = batch_size) x_vals = np.transpose([x[rand_index]]) sess.run(train_sigmoid_step,feed_dict={x_data:x_vals}) sess.run(train_relu_step,feed_dict={x_data:x_vals})

loss_vec_sigmoid.append(sess.run(loss1,feed_dict={x_data:x_vals}))
loss_vec_relu.append(sess.run(loss2,feed_dict={x_data:x_vals}))

activation_sigmoid.append(np.mean(sess.run(sigmoid_activation,feed_dict={x_data:x_vals})))
activation_relu.append(np.mean(sess.run(relu_activation,feed_dict={x_data:x_vals})))

plt.plot(loss_vec_relu,'k-') plt.plot(loss_vec_sigmoid,'r--') plt.ylim([0,1.0]) plt.show() plt.plot(activation_sigmoid,'k-') plt.plot(activation_relu,'r--') plt.show()

易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!