Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- AUTOTUNE = tf.data.experimental.AUTOTUNE
- def __get__tag__data():
- with open("instances_train2017.json") as f:
- listDick = json.load(f)
- images = listDick["images"]
- annotations = listDick["annotations"]
- fileNames = [] // Name Of Files
- catIDs = [] // Category IDS
- // Try And Load 10 Images For Now [ O(N^2) ]
- for i in images[:10]:
- fileNames.append(i["file_name"])
- for j in annotations:
- if(i["id"] == j["image_id"]):
- catIDs.append(j["category_id"])
- break // Break Once You Have Found
- print(fileNames) // Print And See If The List Looks Good
- print(catIDs) // Prints And See If List Looks Good
- list_ds = tf.data.Dataset.from_tensor_slices((fileNames, catIDs)) // Make A Tuple Of The 2 Arrays And Load Into a TF Dataset
- // Print And See If Tensors Are Being Inputted Correctly
- for f in list_ds.take(5):
- print(f)
- # Set `num_parallel_calls` so multiple images are loaded/processed in parallel.
- labeled_ds = list_ds.map(process_img_label, num_parallel_calls=AUTOTUNE)
- print(labeled_ds)
- thing = labeled_ds.take(1)
- print(thing)
- for image, label in labeled_ds.take(1): // ERROR HAPPENS HERE!!!
- print("Image shape: ", image.numpy().shape)
- print("Label: ", label.numpy())
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement