activation.py 3.21 KB
Newer Older
yongshk's avatar
yongshk committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
#! /usr/bin/python
# -*- coding: utf8 -*-



import tensorflow as tf

def identity(x, name=None):
    """The identity activation function, Shortcut is ``linear``.

    Parameters
    ----------
    x : a tensor input
        input(s)


    Returns
    --------
    A `Tensor` with the same type as `x`.
    """
    return x

# Shortcut
linear = identity

def ramp(x=None, v_min=0, v_max=1, name=None):
    """The ramp activation function.

    Parameters
    ----------
    x : a tensor input
        input(s)
    v_min : float
        if input(s) smaller than v_min, change inputs to v_min
    v_max : float
        if input(s) greater than v_max, change inputs to v_max
    name : a string or None
        An optional name to attach to this activation function.


    Returns
    --------
    A `Tensor` with the same type as `x`.
    """
    return tf.clip_by_value(x, clip_value_min=v_min, clip_value_max=v_max, name=name)

def leaky_relu(x=None, alpha=0.1, name="LeakyReLU"):
    """The LeakyReLU, Shortcut is ``lrelu``.

    Modified version of ReLU, introducing a nonzero gradient for negative
    input.

    Parameters
    ----------
    x : A `Tensor` with type `float`, `double`, `int32`, `int64`, `uint8`,
        `int16`, or `int8`.
    alpha : `float`. slope.
    name : a string or None
        An optional name to attach to this activation function.

    Examples
    ---------
    >>> network = tl.layers.DenseLayer(network, n_units=100, name = 'dense_lrelu',
    ...                 act= lambda x : tl.act.lrelu(x, 0.2))

    References
    ------------
    - `Rectifier Nonlinearities Improve Neural Network Acoustic Models, Maas et al. (2013) <http://web.stanford.edu/~awni/papers/relu_hybrid_icml2013_final.pdf>`_
    """
    with tf.name_scope(name) as scope:
        # x = tf.nn.relu(x)
        # m_x = tf.nn.relu(-x)
        # x -= alpha * m_x
        x = tf.maximum(x, alpha * x)
    return x

#Shortcut
lrelu = leaky_relu

def pixel_wise_softmax(output, name='pixel_wise_softmax'):
    """Return the softmax outputs of images, every pixels have multiple label, the sum of a pixel is 1.
    Usually be used for image segmentation.

    Parameters
    ------------
    output : tensor
        - For 2d image, 4D tensor [batch_size, height, weight, channel], channel >= 2.
        - For 3d image, 5D tensor [batch_size, depth, height, weight, channel], channel >= 2.

    Examples
    ---------
    >>> outputs = pixel_wise_softmax(network.outputs)
    >>> dice_loss = 1 - dice_coe(outputs, y_, epsilon=1e-5)

    References
    -----------
    - `tf.reverse <https://www.tensorflow.org/versions/master/api_docs/python/array_ops.html#reverse>`_
    """
    with tf.name_scope(name) as scope:
        return tf.nn.softmax(output)
        ## old implementation
        # exp_map = tf.exp(output)
        # if output.get_shape().ndims == 4:   # 2d image
        #     evidence = tf.add(exp_map, tf.reverse(exp_map, [False, False, False, True]))
        # elif output.get_shape().ndims == 5: # 3d image
        #     evidence = tf.add(exp_map, tf.reverse(exp_map, [False, False, False, False, True]))
        # else:
        #     raise Exception("output parameters should be 2d or 3d image, not %s" % str(output._shape))
        # return tf.div(exp_map, evidence)