import pip pip.main(['install', 'torch']) pip.main(['install', 'transformers']) import re import torch import gradio as gr import transformers from transformers import AutoTokenizer, AutoModelForSequenceClassification def load_model(model_name): # model model = AutoModelForSequenceClassification.from_pretrained(model_name) # tokenizer tokenizer = AutoTokenizer.from_pretrained(model_name) return model, tokenizer def inference(prompt_inputs): model_name = "Unggi/feedback_prize_kor" model, tokenizer = load_model( model_name = model_name ) # preprocessing prompt_inputs = prompt_inputs.replace('\n', ' ') # prompt 구두점 단위로 분리하기 prompt_list = re.split('[.?!]+', prompt_inputs) class_id_list = [] for prompt in prompt_list: inputs = tokenizer( prompt, return_tensors="pt" ) with torch.no_grad(): logits = model(**inputs).logits predicted_class_id = logits.argmax().item() class_id = model.config.id2label[predicted_class_id] class_id_list.append(class_id) outputs = [] for p, c_id in zip(prompt_list, class_id_list): outputs.append(p + '\t' + c_id) outputs = '\n'.join(outputs) return outputs demo = gr.Interface( fn=inference, inputs="text", outputs="text", #return 값 examples=[ "민주주의 국가에서 국민은 주인이다." ] ).launch() # launch(share=True)를 설정하면 외부에서 접속 가능한 링크가 생성됨 demo.launch()