--- annotations_creators: - machine-generated language: - en language_creators: - machine-generated - expert-generated license: - mit multilinguality: - monolingual pretty_name: MultiPLE-E size_categories: - 1K len(problem["prompt"]) and stop_index < min_stop_index: min_stop_index = stop_index return decoded_string[:min_stop_index] for problem in problems["test"]: input_ids = tokenizer( problem["prompt"], return_tensors="pt", ).input_ids.cuda() generated_ids = model.generate( input_ids, max_length=256, pad_token_id=tokenizer.eos_token_id + 2 ) truncated_string = stop_at_stop_token(tokenizer.decode(generated_ids[0]), problem) filename = problem["name"] + "." + LANG with open(filename, "w") as f: print(f"Created {filename}") f.write(truncated_string) f.write("\n") f.write(problem["tests"]) ```