5 examples of 'tf.keras.layers.dense' in Python

Every line of 'tf.keras.layers.dense' code snippets is scanned for vulnerabilities by our powerful machine learning engine that combs millions of open source libraries, ensuring your Python code is secure.

All examples are scanned by Snyk Code

By copying the Snyk Code Snippets you agree to
18def dense(inputs, units):
19 return tf.layers.dense(inputs, units,
20 kernel_initializer=tf.random_normal_initializer(stddev=0.02),
21 bias_initializer=tf.constant_initializer(0.0))
72def add_dense(self):
73 # self.model.add(MaxoutDense(output_size
74 # , output_size
75 # , W_regularizer=l2(.01)
76 # , init=self.conf['--initialization']))
77 self.model.add(Dense(int(self.conf['--input_dim'])
78 , 1
79 , init=self.conf['--initialization']
80 , activation=self.conf['--activation']))
81 # self.model.add(Activation('softmax'))
82 # model.add(Activation(conf['--activation']))
83
84 return self.model
113def model(inputs):
114 conv_1 = Conv2D(64, (10, 10), activation='relu', kernel_initializer=W_init, kernel_regularizer=l2(2e-4))(inputs)
115 # conv_1 = Conv2D(64, (10, 10), activation='relu')(inputs)
116 pool_1 = MaxPooling2D()(conv_1)
117
118 conv_2 = Conv2D(128,(7,7),activation='relu', kernel_regularizer=l2(2e-4),kernel_initializer=W_init,bias_initializer=b_init)(pool_1)
119 # conv_2 = Conv2D(128,(7,7),activation='relu')(pool_1)
120 pool_2 = MaxPooling2D()(conv_2)
121
122 conv_3 = Conv2D(128,(4,4),activation='relu',kernel_initializer=W_init,kernel_regularizer=l2(2e-4),bias_initializer=b_init)(pool_2)
123 pool_3 = MaxPooling2D()(conv_3)
124
125 conv_4 = Conv2D(256,(4,4),activation='relu',kernel_initializer=W_init,kernel_regularizer=l2(2e-4),bias_initializer=b_init)(pool_3)
126
127
128 f = Flatten()(conv_4)
129
130 dense_1 = Dense(4096, activation="sigmoid",kernel_regularizer=l2(1e-3),kernel_initializer=W_init,bias_initializer=b_init)
131 return dense_1
122def Dense_net(self, input_x):
123 x = conv_layer(input_x, filter=2 * self.filters, kernel=[7,7], layer_name='conv0')
124 x = Max_Pooling(x, pool_size=3, stride=2)
125
126
127 """
128 for i in range(self.nb_blocks) :
129 # 6 -> 12 -> 32
130
131 x = self.dense_block(input_x=x, nb_layers=4, layer_name='dense_'+str(i))
132 x = self.transition_layer(x, scope='trans_'+str(i))
133 """
134
135
136
137 x = self.dense_block(input_x=x, nb_layers=6, layer_name='dense_1')
138 x = self.transition_layer(x, scope='trans_1')
139
140 x = self.dense_block(input_x=x, nb_layers=12, layer_name='dense_2')
141 x = self.transition_layer(x, scope='trans_2')
142
143 x = self.dense_block(input_x=x, nb_layers=32, layer_name='dense_3')
144 x = self.transition_layer(x, scope='trans_3')
145
146
147 x = self.dense_block(input_x=x, nb_layers=32, layer_name='dense_final') # in paper, nb_layers = 32
148
149 x = Relu(x)
150 x = Global_Average_Pooling(x)
151 x = Linear(x)
152
153 x = tf.reshape(x, [-1, 10])
154 return x
113def call(self, x):
114 if self.equalized_lr:
115 kernel = self.kernel*self.wscale
116 else:
117 kernel = self.kernel
118 x = tf.matmul(x, kernel)
119 if self.use_bias:
120 x = tf.nn.bias_add(x, self.bias)
121 return x

Related snippets