from transformers import RobertaTokenizer, T5ForConditionalGeneration def query(lang,user_prompt): tokenizer = RobertaTokenizer.from_pretrained('Salesforce/codet5-base-multi-sum') model = T5ForConditionalGeneration.from_pretrained('Salesforce/codet5-base-multi-sum') prompt=f"""<|system|>As an expert in {lang}, summarize the following function.\n<|user|>\n {user_prompt}\n<|assistant|>""" input_ids = tokenizer(prompt, return_tensors="pt").input_ids generated_ids = model.generate(input_ids, max_length=20) return tokenizer.decode(generated_ids[0], skip_special_tokens=True)