Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- IMG = 128
- a = "/content/gdrive/My Drive/Colab Notebooks/"
- def preprocess_train(img, label):
- img = tf.cast(img, tf.float32)
- img = img / 255 - 0.5
- img = tf.image.resize(img, (IMG, IMG))
- return img, label
- batch_size = 100
- data_train = np.load(a+"train-1.npy", allow_pickle=True)
- for i in range(2, 5):
- t = np.load(a+f"./train-{i}.npy", allow_pickle=True)
- data_train = np.concatenate([data_train, t])
- def train_gen():
- for img, label in data_train[int(len(data_train) * val_size):]:
- img = img[..., None] # [batch, w, h, channels]
- yield img, char_to_id[label]
- ds_train = tf.data.Dataset.from_generator(train_gen,
- output_types=(tf.float32, tf.int32),
- output_shapes=((None,None,1), ())
- ).map(preprocess_train, num_parallel_calls=-1).prefetch(-1).shuffle(1024).batch(batch_size).repeat()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement