Spaces:
Running
Running
File size: 2,931 Bytes
6ade039 829f512 6ade039 bd5680b 6ade039 6d38642 bd5680b 829f512 bd5680b 6ade039 4ed057b 6ade039 da2515f 6ade039 bd5680b 6ade039 4ed057b 6ade039 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 |
import pip
pip.main(['install', 'torch'])
pip.main(['install', 'transformers'])
import re
import torch
import gradio as gr
import transformers
from transformers import AutoTokenizer, AutoModelForSequenceClassification
def load_model(model_name):
# model
model = AutoModelForSequenceClassification.from_pretrained(model_name)
# tokenizer
tokenizer = AutoTokenizer.from_pretrained(model_name)
return model, tokenizer
def inference(prompt_inputs):
model_name = "Unggi/feedback_prize_kor"
model, tokenizer = load_model(
model_name = model_name
)
# preprocessing
prompt_inputs = prompt_inputs.replace('\n', ' ')
# prompt ꡬλμ λ¨μλ‘ λΆλ¦¬νκΈ°
prompt_list = re.split('[.?!]+', prompt_inputs)
class_id_list = []
for prompt in prompt_list:
inputs = tokenizer(
prompt,
return_tensors="pt"
)
with torch.no_grad():
logits = model(**inputs).logits
predicted_class_id = logits.argmax().item()
class_id = model.config.id2label[predicted_class_id]
class_id_list.append(class_id)
outputs = []
for p, c_id in zip(prompt_list, class_id_list):
outputs.append(p + '\t' + '=>' + '\t' + '<' + c_id + '>')
outputs = '\n'.join(outputs)
return outputs
demo = gr.Interface(
fn=inference,
inputs="text",
outputs="text", #return κ°
examples=[
"μμ λΆν° μ¬μ±μ μ¬νμ°Έμ¬μ΄λμ΄λ μμλ₯Ό μ§μμ μ΄λ©° ν μλ νλμΌλ‘ νλν΄λκ°μΌλ©° νμ¬κΉμ§ κ³μ μμ±νλ±μ μ€μμ±μ΄ μ£Όλͺ©λ°κ³ μλ€. μμμ λ§νλ―μ΄ μμ΄ μ΄λ¦μ μ±μ μλ²μ§μ μ±μΌλ‘ μ§λ κ²μ μ΄μ λΆλͺ¨ λͺ¨λλ₯Ό λ°νμΌλ‘ μ΄λ¦μ μ§λ μκ³μ λλ₯Ό λμ
νκΈ°λ νκ³ μ§μ₯μμμ λ¨λ
λͺ¨λ μΉμ§κ³Ό μκΈ λ±μ λλ±νκ² λΆλ°°νλ©° λ¨λ
κ° μ ν΄μ Έ μλ μ§μ
λ€μ΄ μ±μ°¨λ³μ κ²½κ³κ° 무λμ§κ³Ό λμμ λꡬλ ν μ μλλ‘ νλ κ² μ΄μΈμ λ§μ μμ±νλ±μ μλ€μ΄ λλλκ³ μλ€. μμ½ν΄μ μ±μ°¨λ³μ΄ κ°κ°μΈμ ꡬμνκ³ μ§λ¨μ κ±Έλ¦Όλμ΄ λμ΄ μ¬λλ§λ€ μμ μ μ£Όμ₯κ³Ό νλμ λν μ νμ΄ λΆκ°νΌνλ©° μ¬νμ λ°μ μ μ ν΄νλ κ²λΏλ§ μλλΌ μ μ§κ΅ λλ λ°λμ§ν κ΅κ°λ‘ λμκ°λλ° λ°©ν΄κ° λλ€λ κ²μΈλ° μ΄λ₯Ό 극볡νκΈ° μν΄μλ μ±μ°¨λ³μ λν μκ°μ λ¨μ³λ²λ¦¬κ³ μμ±νλ±μ΄λ μ μ§μ μλ―Όμμμ κ°μΆ€μΌλ‘ μ μ§κ΅μ μν΄ κ΅κ°μ λ
Έλ ₯λ νμνμ§λ§, 무μλ³΄λ€ κ°μΈμ΄ μ±μ°¨λ³μ λν κ΄λ
μ μκ³ μμ±νλ±μ μν μ κ·Ήμ μΈ λ
Έλ ₯μ΄ μꡬλμ΄μΌ νλ€. κ·Έλ‘ μΈν΄ κ΅κ°μ κ°μΈμ μν λ°λμ§ν μ¬νκ° νμ±λλ€λ κ²μ΄λ€."
]
).launch() # launch(share=True)λ₯Ό μ€μ νλ©΄ μΈλΆμμ μ μ κ°λ₯ν λ§ν¬κ° μμ±λ¨
demo.launch() |