Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import os, sys
- sys.path.append(os.path.join(os.getcwd(), "keras-deep-graph-learning")) # Adding the submodule to the module search path
- sys.path.append(os.path.join(os.getcwd(), "keras-deep-graph-learning/examples")) # Adding the submodule to the module search path
- import numpy as np
- from keras.layers import Dense, Activation, Dropout
- from keras.models import Model, Sequential
- from keras.regularizers import l2
- from keras.optimizers import Adam
- from keras_dgl.layers import GraphCNN
- import keras.backend as K
- from keras.utils import to_categorical
- print("Creating our simple sample data...")
- A = np.array([[0,1,5], [1,0,0], [5,0,0]])
- print(A)
- X = np.array([[1,2,10], [4,3,10], [0,2,11]]) # features, whatever we have there...
- # Notice, if we set A = identity matrix, then we'd effectively assume no edges and just do a basic
- # MLP on the features.
- # We could do the same by setting the graph_conv_filter below to Id.
- # We could also set X to Id, and thus effectively assume no features, and in this way
- # do an "edge" embedding, so effectively try to understand what's connected to what.
- # We could then use that as feature in any way we like...
- Y_o_dim = np.array([1,2,1])
- Y = to_categorical(Y_o_dim) # labels, whatever we wanna classify things into... in categorical form.
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement