Spaces:
Runtime error
Runtime error
File size: 2,361 Bytes
8655a4b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 |
"""
Do optional replace of bos/eos/pad/unk.
Usage:
python3 -m fastchat.data.optional_replace --in input.json --out output.json --model-name-or-path <your_token_path>
Requirement:
pip3 install transformers tqdm
"""
import argparse
import json
import traceback
import transformers
from tqdm import tqdm
def replace_special_tokens(
tokenizer: transformers.PreTrainedTokenizer, text: str
) -> str:
if not text:
return text
def _insert_vline(token: str) -> str:
if len(token) < 2:
return " "
elif len(token) == 2:
return f"{token[0]}|{token[1]}"
else:
return f"{token[:1]}|{token[1:-1]}|{token[-1:]}"
if tokenizer.bos_token:
text = text.replace(tokenizer.bos_token, _insert_vline(tokenizer.bos_token))
if tokenizer.eos_token:
text = text.replace(tokenizer.eos_token, _insert_vline(tokenizer.eos_token))
if tokenizer.pad_token:
text = text.replace(tokenizer.pad_token, _insert_vline(tokenizer.pad_token))
if tokenizer.unk_token:
text = text.replace(tokenizer.unk_token, _insert_vline(tokenizer.unk_token))
return text
def replace(conv, tokenizer):
# Replace bos/eos/pad/unk tokens
if tokenizer:
try:
for sentence in conv["conversations"]:
sentence["value"] = replace_special_tokens(tokenizer, sentence["value"])
except Exception as e:
traceback.print_exc()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--in-file", type=str, required=True)
parser.add_argument("--out-file", type=str)
parser.add_argument(
"--model-name-or-path",
type=str,
help="The directory or address where the model token is stored.",
)
args = parser.parse_args()
in_file = args.in_file
out_file = args.out_file
tokenizer = None
if args.model_name_or_path:
tokenizer = transformers.AutoTokenizer.from_pretrained(
args.model_name_or_path,
trust_remote_code=True,
use_fast=False,
)
if out_file is None:
out_file = f"{in_file}_replace.json"
content = json.load(open(in_file, "r"))
for conv in tqdm(content):
replace(conv, tokenizer)
json.dump(content, open(out_file, "w"), indent=2, ensure_ascii=False)
|