mirror of
https://github.com/vllm-project/vllm.git
synced 2025-10-20 14:53:52 +08:00
[Bugfix] Correct LayerNorm epsilon parameter in modernbert.py (#27008)
Signed-off-by: bogdanm <152898065+bogdan01m@users.noreply.github.com> Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
This commit is contained in:
@ -39,9 +39,12 @@ class ModernBertEmbeddings(nn.Module):
|
||||
self.tok_embeddings = VocabParallelEmbedding(
|
||||
config.vocab_size, config.hidden_size
|
||||
)
|
||||
self.norm = nn.LayerNorm(
|
||||
config.hidden_size, eps=config.layer_norm_eps, bias=config.norm_bias
|
||||
eps = (
|
||||
getattr(config, "norm_eps", None)
|
||||
or getattr(config, "layer_norm_eps", None)
|
||||
or 1e-5
|
||||
)
|
||||
self.norm = nn.LayerNorm(config.hidden_size, eps=eps, bias=config.norm_bias)
|
||||
|
||||
def get_input_embeddings(self, input_ids: torch.Tensor) -> torch.Tensor:
|
||||
return self.tok_embeddings(input_ids)
|
||||
|
Reference in New Issue
Block a user