Upload modeling_flash_llama.py
Browse files- modeling_flash_llama.py +1 -1
modeling_flash_llama.py
CHANGED
@@ -782,7 +782,7 @@ class LlamaForCausalLM(LlamaPreTrainedModel):
|
|
782 |
output_hidden_states: Optional[bool] = None,
|
783 |
return_dict: Optional[bool] = None,
|
784 |
only_last_logit: Optional[bool] = None,
|
785 |
-
xentropy: Optional[bool] =
|
786 |
is_padded_inputs: Optional[bool] = None,
|
787 |
) -> Union[Tuple, CausalLMOutputWithPast]:
|
788 |
r"""
|
|
|
782 |
output_hidden_states: Optional[bool] = None,
|
783 |
return_dict: Optional[bool] = None,
|
784 |
only_last_logit: Optional[bool] = None,
|
785 |
+
xentropy: Optional[bool] = True,
|
786 |
is_padded_inputs: Optional[bool] = None,
|
787 |
) -> Union[Tuple, CausalLMOutputWithPast]:
|
788 |
r"""
|