import torch from transformers import pipeline from transformers import AutoModelForSeq2SeqLM, AutoTokenizer max_length = 512 device = 0 if torch.cuda.is_available() else "cpu" model_id = "ArissBandoss/nllb-200-distilled-600M-finetuned-fr-to-mos-V1" tokenizer = AutoTokenizer.from_pretrained(model_id) model = AutoModelForSeq2SeqLM.from_pretrained(model_id) def goai_traduction(text, src_lang, tgt_lang): trans_pipe = pipeline("translation", model=model, tokenizer=tokenizer, src_lang=src_lang, tgt_lang=tgt_lang, max_length=max_length, device=device ) return trans_pipe(text)[0]["translation_text"]