import gradio as gr from transformers import AutoTokenizer, AutoModelForSeq2SeqLM model_checkpoint = "anyantudre/flan-t5-ft-en2fr" def func(source_text): tokenizer = AutoTokenizer.from_pretrained(model_checkpoint) model = AutoModelForSeq2SeqLM.from_pretrained(model_checkpoint, load_in_8bit=False) inputs = tokenizer([source_text], return_tensors="pt", padding="longest") outputs = model.generate(**inputs) translation = tokenizer.batch_decode(outputs, skip_special_tokens=True) return translation[0] title = "Fine-tuned FLAN-T5 | Traduction fr-en" demo_status = "Demo is running on CPU" description = "Details: https://www.kaggle.com/code/waalbannyantudre/fine-tuning-flan-t5-traduction-fr-en/notebook" examples = [ ['Hi. nice to meet you'], ] demo = gr.Interface( fn=func, inputs=gr.Textbox(lines=5, label="Input text"), outputs=gr.Textbox(label="Translation"), title=title, description=description, examples=examples ) demo.launch()