Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import numpy as np
- import torch
- import torch.nn as nn
- import torch.nn.functional as F
- def hidden_init(layer):
- fan_in = layer.weight.data.size()[0]
- lim = 1. / np.sqrt(fan_in)
- return (-lim, lim)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement