Guest User

Untitled

a guest
May 23rd, 2021
48
28 days
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
  1. import torch
  2. import torch.nn as nn
  3. from dgl.nn.pytorch import GraphConv
  4. import dgl
  5.  
  6. class GCN(nn.Module):
  7.     def __init__(self,
  8.                  in_feats,
  9.                  n_hidden,
  10.                  n_classes,
  11.                  n_layers,
  12.                  activation,
  13.                  dropout):
  14.         super(GCN, self).__init__()
  15.         self.layers = nn.ModuleList()
  16.         # input layer
  17.         self.layers.append(GraphConv(in_feats, n_hidden, activation=activation))
  18.         # hidden layers
  19.         for i in range(n_layers - 1):
  20.             self.layers.append(GraphConv(n_hidden, n_hidden, activation=activation))
  21.         # output layer
  22.         #self.layers.append(GraphConv(n_hidden, n_classes, activation=torch.sigmoid))
  23.         self.layers.append(GraphConv(n_hidden, n_classes, activation=torch.sigmoid))
  24.         self.dropout = nn.Dropout(p=dropout)
  25.  
  26.     def forward(self, g, features):
  27.         h = features
  28.         for i, layer in enumerate(self.layers):
  29.             if i != 0:
  30.                 h = self.dropout(h)
  31.             h = layer(g, h)
  32.         return h
  33.  
  34.  
  35. def hindsight_loss(output, labels):
  36.     if (len(labels.size()) == 1):
  37.         # we need to reshape our labels from [0,0,1,0] to [[0], [0], [1], [0]]
  38.         labels = labels.resize_(labels.size()[0], 1)
  39.  
  40.     labels = labels.to(torch.float32)
  41.  
  42.     ce_func = nn.BCELoss() # TODO: remove sigmoid layer and change to BCEWithLogitsLoss
  43.     loss = ce_func(output[:, 0, None], labels )
  44.     for res in range(1, output.shape[1]):
  45.         new_loss = ce_func(output[:, res, None],labels)
  46.         loss = torch.min(loss, new_loss)
  47.  
  48.     return loss
  49.  
RAW Paste Data