File size: 341 Bytes
31ef55f
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
from sentencepiece import SentencePieceProcessor
import gradio as gr

sp = SentencePieceProcessor(model_file="tokenizer.model")

def tokenize(input_text):
    tokens = sp.EncodeAsIds(input_text)
    return f"Number of tokens: {len(tokens)}"

iface = gr.Interface(fn=tokenize, inputs=gr.inputs.Textbox(lines=7), outputs="text")
iface.launch()