Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import tensorflow as tf
- from tensorflow.keras import Model, layers
- img_w = 5
- img_h = 3
- img_c = 2
- class TestModel(Model):
- # Set layers.
- def __init__(self):
- super(TestModel, self).__init__()
- self.single_file_pixels = img_w * img_h * img_c
- self.input_reshape = layers.Reshape((img_h, img_w, img_c))
- self.main_max_pool = layers.MaxPool2D(2, strides=2, padding='same')
- # Flatten the data to a 1-D vector for the fully connected layer.
- self.flatten = layers.Flatten()
- # Fully connected layer.
- self.dense_layer = layers.Dense(20)
- # Apply Dropout (if is_training is False, dropout is not applied).
- self.dropout = layers.Dropout(rate=0.2)
- # Output layer, class prediction.
- self.dense_out = layers.Dense(2)
- self.conv2d = layers.Conv2D(2, kernel_size=2, activation=tf.nn.relu, padding='same')
- # Set forward pass.
- def call(self, x, is_training=False, mask=None):
- raw_items = self.input_reshape(x)
- conv1 = self.conv2d(raw_items)
- conv2 = self.conv2d(conv1)
- pool = self.main_max_pool(conv2)
- fc1 = self.flatten(pool)
- fc2 = self.dense_layer(fc1)
- do1 = self.dropout(fc2, training=is_training)
- out = self.dense_out(do1)
- y = out
- if not is_training:
- # tf cross entropy expect logits without softmax, so only
- # apply softmax when not training.
- y = tf.nn.softmax(y)
- return y
- with tf.device("/CPU"):
- test_model = TestModel()
- test_model.build(input_shape=(None, test_model.single_file_pixels))
- test_model.summary()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement