Skip to content

Commit

Permalink
Allow text generation for ProphetNetForCausalLM (huggingface#9707)
Browse files Browse the repository at this point in the history
* Moved ProphetNetForCausalLM's parent initialization after config update

* Added unit tests for generation for ProphetNetForCausalLM
  • Loading branch information
guillaume-be authored Jan 21, 2021
1 parent 910aa89 commit fb36c27
Show file tree
Hide file tree
Showing 2 changed files with 23 additions and 1 deletion.
2 changes: 1 addition & 1 deletion src/transformers/models/prophetnet/modeling_prophetnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -1883,11 +1883,11 @@ def get_decoder(self):
)
class ProphetNetForCausalLM(ProphetNetPreTrainedModel):
def __init__(self, config):
super().__init__(config)
# set config for CLM
config = copy.deepcopy(config)
config.is_decoder = True
config.is_encoder_decoder = False
super().__init__(config)
self.prophetnet = ProphetNetDecoderWrapper(config)

self.padding_idx = config.pad_token_id
Expand Down
22 changes: 22 additions & 0 deletions tests/test_modeling_prophetnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -302,6 +302,24 @@ def create_and_check_generate_with_past_key_value_states(
output_with_past_cache = model.generate(input_ids[:1], num_beams=2, max_length=5, do_sample=True)
self.parent.assertTrue(torch.all(output_with_past_cache == output_without_past_cache))

def create_and_check_decoder_generate_with_past_key_value_states(
self,
config,
input_ids,
decoder_input_ids,
attention_mask,
decoder_attention_mask,
lm_labels,
):
model = ProphetNetForCausalLM(config=config).to(torch_device).eval()
torch.manual_seed(0)
output_without_past_cache = model.generate(
input_ids[:1], num_beams=2, max_length=10, do_sample=True, use_cache=False
)
torch.manual_seed(0)
output_with_past_cache = model.generate(input_ids[:1], num_beams=2, max_length=10, do_sample=True)
self.parent.assertTrue(torch.all(output_with_past_cache == output_without_past_cache))

def create_and_check_model_fp16_forward(
self,
config,
Expand Down Expand Up @@ -911,6 +929,10 @@ def test_decoder_model_generate(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_generate_with_past_key_value_states(*config_and_inputs)

def test_encoder_decoder_model_generate(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_decoder_generate_with_past_key_value_states(*config_and_inputs)

def test_attn_mask_model(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.check_model_with_attn_mask(*config_and_inputs)
Expand Down

0 comments on commit fb36c27

Please sign in to comment.