标签:lse function var and put return one inpu bsp
import tensorflow as tf def add_layer(inputs, in_size, out_size, activation_function=None): Weight = tf.Variable(tf.random_normal([in_size, out_size])) # 随机变量会比全部都是0好很多 biases = tf.Variable(tf.zeros(1, out_size)) + 0.1 Wx_plus_b = tf.matmul(inputs, Weight) + biases if activation_function is None: out_put = Wx_plus_b else: out_put = activation_function(Wx_plus_b) return out_put
标签:lse function var and put return one inpu bsp
原文地址:https://www.cnblogs.com/francischeng/p/9690966.html