import gradio as gr from typing import List def prepare_input(question: str): inputs = f"{prompt} {question}" input_ids = tokenizer(inputs, max_length=700, return_tensors="pt").input_ids return input_ids def inference(question: str) -> str: input_data = prepare_input(question=question) input_data = input_data.to(model.device) outputs = model.generate(inputs=input_data, num_beams=10, top_k=10, max_length=1024) result = tokenizer.decode(token_ids=outputs[0], skip_special_tokens=True) # print("question:", question, "answer:", result) return result # test_id = 1160 # # 거실조명1 꺼주세요 "Home:거실,NickName:조명1,ActionID:1061,Value:없음" # print("model result:", inference(dataset_finetuning["train"][test_id]["Q"])) # print("real result:", dataset_finetuning["train"][test_id]["C"]) print(inference("거실 조명1 꺼주세요")) gr.load("models/yeye776/t5-OndeviceAI-HomeIoT").launch() # iface = gradio.Interface(fn=pipe_home, inputs="text", outputs="text") # iface.launch()