Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- def build_subnetwork(self,
- features,
- logits_dimension,
- training,
- iteration_step,
- summary,
- previous_ensemble=None):
- input_layer = tf.to_float(features[FEATURES_KEY])
- kernel_initializer = tf.glorot_uniform_initializer(seed=self._seed)
- last_layer = input_layer
- for _ in range(self._num_layers):
- last_layer = tf.layers.dense(
- last_layer,
- units=self._layer_size,
- activation=tf.nn.relu,
- kernel_initializer=kernel_initializer)
- logits = tf.layers.dense(
- last_layer,
- units=logits_dimension,
- kernel_initializer=kernel_initializer)
- persisted_tensors = {_NUM_LAYERS_KEY: tf.constant(self._num_layers)}
- return adanet.Subnetwork(
- last_layer=last_layer,
- logits=logits,
- complexity=self._measure_complexity(),
- persisted_tensors=persisted_tensors)
- def build_subnetwork(self,
- features,
- logits_dimension,
- training,
- iteration_step,
- summary,
- previous_ensemble=None):
- input_layer = tf.to_float(features[FEATURES_KEY])
- kernel_initializer = tf.glorot_uniform_initializer(seed=self._seed)
- last_layer = input_layer
- for _ in range(self._num_layers):
- last_layer = tf.keras.layers.Dense(
- units=64, activation="relu", kernel_initializer=kernel_initializer,
- use_bias=True, bias_initializer=kernel_initializer)(last_layer)
- logits = tf.keras.layers.Dense(
- units=2, activation=None, kernel_initializer=kernel_initializer,
- use_bias=True, bias_initializer=kernel_initializer)(last_layer)
- persisted_tensors = {_NUM_LAYERS_KEY: tf.constant(self._num_layers)}
- return adanet.Subnetwork(
- last_layer=last_layer,
- logits=logits,
- complexity=self._measure_complexity(),
- persisted_tensors=persisted_tensors)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement