Update modeling_ltgbert.py
Browse files- modeling_ltgbert.py +2 -0
modeling_ltgbert.py
CHANGED
|
@@ -452,6 +452,7 @@ class LtgBertModel(LtgBertPreTrainedModel):
|
|
| 452 |
output_hidden_states: Optional[bool] = None,
|
| 453 |
output_attentions: Optional[bool] = None,
|
| 454 |
return_dict: Optional[bool] = None,
|
|
|
|
| 455 |
) -> Union[Tuple[torch.Tensor], BaseModelOutput]:
|
| 456 |
|
| 457 |
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
|
|
@@ -498,6 +499,7 @@ class LtgBertForMaskedLM(LtgBertModel):
|
|
| 498 |
output_attentions: Optional[bool] = None,
|
| 499 |
return_dict: Optional[bool] = None,
|
| 500 |
labels: Optional[torch.LongTensor] = None,
|
|
|
|
| 501 |
) -> Union[Tuple[torch.Tensor], MaskedLMOutput]:
|
| 502 |
r"""
|
| 503 |
labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
|
|
|
|
| 452 |
output_hidden_states: Optional[bool] = None,
|
| 453 |
output_attentions: Optional[bool] = None,
|
| 454 |
return_dict: Optional[bool] = None,
|
| 455 |
+
token_type_ids = None
|
| 456 |
) -> Union[Tuple[torch.Tensor], BaseModelOutput]:
|
| 457 |
|
| 458 |
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
|
|
|
|
| 499 |
output_attentions: Optional[bool] = None,
|
| 500 |
return_dict: Optional[bool] = None,
|
| 501 |
labels: Optional[torch.LongTensor] = None,
|
| 502 |
+
token_type_ids = None
|
| 503 |
) -> Union[Tuple[torch.Tensor], MaskedLMOutput]:
|
| 504 |
r"""
|
| 505 |
labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
|