Answer the question
In order to leave comments, you need to log in
How to extract and reuse weights from tf.layers abstractions?
I want to use TensorFlow abstractions like tf.layers.dense() whenever possible instead of variable weights and offsets, their initialization, relu() and matmul().
I rewrite the model, where first there are 100500 lines of code with weights-byas, then the order of the layers is determined in the function:
def model( data, infere = False):
keep_prob = 1 if infere else 0.5
L2_regularizer = tf.contrib.layers.l2_regularizer(scale = 1e-4)
TNI = tf.truncated_normal_initializer(stddev = 0.2, seed = seed)
# Layers
conv = tf.nn.conv2d(data, layer1_weights, [1,1,1,1], padding='SAME')
hidden = tf.nn.relu(conv + layer1_biases)
maxpool = tf.nn.max_pool(hidden, [1,2,2,1], [1,2,2,1], 'SAME')
conv = tf.nn.conv2d(maxpool, layer2_weights, [1,1,1,1], padding='SAME')
hidden = tf.nn.relu(conv + layer2_biases)
maxpool = tf.nn.max_pool(hidden, [1,3,3,1], [1,2,2,1], 'SAME')
shape = maxpool.get_shape().as_list()
reshape = tf.reshape(maxpool, [shape[0], shape[1] * shape[2] * shape[3]])
dense = tf.layers.dense( inputs = reshape,
units = 64,
activation = tf.nn.relu,
kernel_initializer = TNI,
bias_initializer = tf.ones_initializer(),
kernel_regularizer = L2_regularizer,
name = "Dense_1"
)
dropout = tf.nn.dropout( dense, keep_prob = keep_prob, seed = seed)
dense = tf.layers.dense( inputs = dropout,
units = 10,
activation = tf.nn.relu,
kernel_initializer = TNI,
bias_initializer = tf.ones_initializer(),
kernel_regularizer = L2_regularizer,
name = "Dense_2"
)
return dense
Answer the question
In order to leave comments, you need to log in
Didn't find what you were looking for?
Ask your questionAsk a Question
731 491 924 answers to any question