Skip to content

Commit d842262

Browse files
authored
Update get_start_advance.rst
1 parent 9a23c2d commit d842262

File tree

1 file changed

+2
-3
lines changed

1 file changed

+2
-3
lines changed

docs/user/get_start_advance.rst

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -31,12 +31,11 @@ The fully-connected layer is `a = f(x*W+b)`, the most simple implementation is a
3131
def __init__(
3232
self,
3333
n_units, # the number of units/channels of this layer
34-
act=None, # None: no activation, tf.nn.relu: ReLU ...
34+
act=None, # None: no activation, tf.nn.relu or 'relu': ReLU ...
3535
name=None, # the name of this layer (optional)
3636
):
37-
super(Dense, self).__init__(name) # auto naming, dense_1, dense_2 ...
37+
super(Dense, self).__init__(name, act=act) # auto naming, dense_1, dense_2 ...
3838
self.n_units = n_units
39-
self.act = act
4039
4140
def build(self, inputs_shape): # initialize the model weights here
4241
shape = [inputs_shape[1], self.n_units]

0 commit comments

Comments
 (0)