Skip to content

Commit

Permalink
Update configuration_roformer.py
Browse files Browse the repository at this point in the history
  • Loading branch information
JunnYu authored May 26, 2021
1 parent 93da27c commit 61fcfc2
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions src/roformer/configuration_roformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ class RoFormerConfig(PretrainedConfig):
gradient_checkpointing (:obj:`bool`, `optional`, defaults to :obj:`False`):
If :obj:`True`, use gradient checkpointing to save memory at the expense of slower backward pass.
Example::
Example::
>>> from transformers import RoFormerModel, RoFormerConfig
Expand All @@ -96,7 +96,7 @@ class RoFormerConfig(PretrainedConfig):
def __init__(
self,
vocab_size=50000,
embedding_size=768,
embedding_size=None,
hidden_size=768,
num_hidden_layers=12,
num_attention_heads=12,
Expand All @@ -117,7 +117,7 @@ def __init__(
super().__init__(pad_token_id=pad_token_id, **kwargs)

self.vocab_size = vocab_size
self.embedding_size = embedding_size
self.embedding_size = hidden_size if embedding_size is None else embedding_size
self.hidden_size = hidden_size
self.num_hidden_layers = num_hidden_layers
self.num_attention_heads = num_attention_heads
Expand Down

0 comments on commit 61fcfc2

Please sign in to comment.