Skip to content
Snippets Groups Projects
Commit dd0a91cd authored by DeepLearning VM's avatar DeepLearning VM
Browse files
parents e8b32c57 139c8d03
No related branches found
No related tags found
No related merge requests found
......@@ -84,11 +84,11 @@ def sample_sequence(model, length, context, num_samples=1, temperature=1,
generated = context
prev = context
past = None
import pdb;pdb.set_trace()
with torch.no_grad():
for i in trange(length):
# inputs = {'input_ids': generated, 'past': None, 'key_word': key_word, 'use_keyword':use_keyword}
inputs = {'input_ids': generated, 'past': None, 'attention_mask':attention_mask}
current_length = generated.shape[-1]
inputs = {'input_ids': generated, 'past': None, 'attention_mask':attention_mask[:,:,:current_length,:current_length]}
logits, past = model(**inputs)
next_token_logits = logits[0, -1, :] / (temperature if temperature>0 else 1.)
filtered_logits = top_k_top_p_filtering(next_token_logits, top_k=top_k, top_p=top_p)
......@@ -138,7 +138,6 @@ def run_model(args, model, tokenizer, test_loader):
# else:
# x, type_x, pos_x, lm_x, x_len, meta = sample
# keyword_x = None
import pdb;pdb.set_trace()
x, type_x, pos_x, lm_x, x_len, attention_mask = sample
input_len = x_len[0] # The number of tokens of the context utterances
context_tokens = x[0][:input_len+1] # at evaluation stage, the input is without the ground truth
......
......@@ -20,8 +20,8 @@ pwd
# python gpt_sample.py --model_dir mi_tuned_both --output_dir mi_tuned_both --num_turns 10 --keyword --augment --top_p 0.95
mkdir -p ../models/mi_tuned_kbert_mask
python gpt_tuning.py --output_dir mi_tuned_kbert_mask --num_train_epochs 10 --num_turns 5 --kbert
python gpt_sample.py --model_dir mi_tuned_kbert_mask --output_dir mi_tuned_kbert --num_turns 5 --top_p 0.95
python gpt_tuning.py --output_dir mi_tuned_kbert_mask --num_train_epochs 1 --num_turns 5 --kbert
python gpt_sample.py --model_dir mi_tuned_kbert_mask --output_dir mi_tuned_kbert_mask --num_turns 5 --top_p 0.95 --kbert
echo "Finished."
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment