token-counter / app.py
kiranr's picture
Update app.py
e2c8253
raw
history blame
332 Bytes
from transformers import AutoTokenizer
import gradio as gr
tokenizer = AutoTokenizer.from_pretrained("gpt2")
def tokenize(input_text):
tokens = tokenizer(input_text)["input_ids"]
return f"Number of tokens: {len(tokens)}"
iface = gr.Interface(fn=tokenize, inputs=gr.inputs.Textbox(lines=7), outputs="text")
iface.launch()