Spaces:
Running
Running
import gradio as gr | |
from anthropic import Anthropic | |
import tiktoken | |
def tokenize(text : str) -> int: | |
# About Anthropic tokenizer | |
# https://github.com/anthropics/anthropic-sdk-python | |
anthropic = Anthropic() | |
anthropic_count = anthropic.count_tokens(text) | |
# About Open AI tokenizer | |
# https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb | |
gpt_35_encoder = tiktoken.get_encoding("cl100k_base") | |
tokens = gpt_35_encoder.encode(text) | |
return anthropic_count, len(tokens) | |
counter = gr.Interface( | |
title="LLM tokenizer", | |
description="左側のボックスにテキストを入れると、 Anthropic / OpenAI それぞれで分割したときのトークン数を計算します", | |
fn=tokenize, | |
inputs=gr.Textbox(lines=10, placeholder="Text Here..."), | |
outputs=[ | |
gr.Number(value=0, label="Anthropic's token", interactive=False), | |
gr.Number(value=0, label="OpenAI GPT 3.5 's token", interactive=False) | |
], | |
live=True, | |
allow_flagging="never" | |
) | |
counter.launch() | |