Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- class GPT2Model(nn.Module):
- def __init__(self, config):
- super(GPT2Model, self).__init__(config)
- self.wte = nn.Embedding(config.vocab_size, config.n_embd)
- self.wpe = nn.Embedding(config.n_positions, config.n_embd)
- self.drop = nn.Dropout(config.embd_pdrop)
- self.h = nn.ModuleList([Block(config.n_ctx, config, scale=True) for _ in range(config.n_layer)])
- self.ln_f = LayerNorm(config.n_embd, eps=config.layer_norm_epsilon)
- self.apply(self.init_weights)
- def init_weights(self):
- raise NotImplementedError
- def forward(self, input_ids, past=None):
- raise NotImplementedError
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement