import gradio from os import system system("pip3 install torch") system("pip3 install transformers") from transformers import AutoTokenizer,AutoModelForCausalLM tokenizer = AutoTokenizer.from_pretrained("apple/OpenELM-270M") model = openelm_270m = AutoModelForCausalLM.from_pretrained("apple/OpenELM-270M", trust_remote_code=True) def work(inp_text): out = tokenizer.encode(inp_text,return_tensors="pt") out = model.generate( out, max_new_tokens=20, do_sample=True, temperature=0.3, ) out = tokenizer.decode(out[0]) return str(out) demo = gradio.Interface( fn=work, inputs=["text"], outputs=["text"], ) demo.launch()