A TensorFLow CheatSheet
Commonly used functions
- tf.saved_model.loader.load(sess, tags, export_dir)
- tf.get_default_graph() # Return the default Graph being used in the current thread.
- graph.get_tensor_by_name(tensor_name)
- tf.reshape(
tensor, shape, name=None
)
Define Layers
- tf.layers.conv2d(
) # convolutioninputs,
filters,
kernel_size,
strides=(1, 1),
padding='valid',
data_format='channels_last',
dilation_rate=(1, 1),
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=tf.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
reuse=None - tf.layers.conv2d_transpose(
) # upsample or deconvolutioninputs,
filters,
kernel_size,
strides=(1, 1),
padding='valid',
data_format='channels_last',
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=tf.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
reuse=None - tf.add(x, y, name=None) # skip connection (element-wise addition)
Opimizers
- tf.train.AdamOptimizer(
)learning_rate=0.001,
beta1=0.9,
beta2=0.999,
epsilon=1e-08,
use_locking=False,
name='Adam'
Loss Function
- tf.nn.softmax_cross_entropy_with_logits(
)_sentinel=None,
labels=None,
logits=None,
dim=-1,
name=None
Regularizer
-
tf.contrib.layers.l2_regularizer(
scale,
scope=None)
Initializer
- tf.random_normal_initializer(
)mean=0.0,
stddev=1.0,
seed=None,
dtype=tf.float32