Skip to content

Commit

Permalink
[fix] remove useless code
Browse files Browse the repository at this point in the history
  • Loading branch information
nnnyt committed Mar 19, 2024
1 parent 9572d78 commit aefcc3b
Showing 1 changed file with 0 additions and 17 deletions.
17 changes: 0 additions & 17 deletions EduNLP/ModelZoo/jiuzhang/modeling.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,18 +28,12 @@
BaseModelOutput,
BaseModelOutputWithPastAndCrossAttentions,
Seq2SeqModelOutput,
Seq2SeqLMOutput,
Seq2SeqSequenceClassifierOutput,
)
from transformers.modeling_utils import PreTrainedModel
from transformers.utils import logging
from transformers import BartConfig as CPTConfig
from transformers import BertModel, BertConfig


logger = logging.get_logger(__name__)


def shift_tokens_right(input_ids: torch.Tensor, pad_token_id: int, decoder_start_token_id: int):
"""
Shift input ids one token to the right.
Expand Down Expand Up @@ -84,17 +78,6 @@ def _expand_mask(mask: torch.Tensor, dtype: torch.dtype, tgt_len: Optional[int]
return inverted_mask.masked_fill(inverted_mask.bool(), torch.finfo(dtype).min)


def attention_mask_func(attention_scores, attention_mask):
return attention_scores + attention_mask


def init_method(std):
def init_(tensor):
return torch.nn.init.normal_(tensor, mean=0.0, std=std)

return init_


class CPTLearnedPositionalEmbedding(nn.Embedding):
"""
This module learns positional embeddings up to a fixed maximum size.
Expand Down

0 comments on commit aefcc3b

Please sign in to comment.