Spaces:
Sleeping
Sleeping
added tokenizer definition
Browse files
app.py
CHANGED
@@ -3,6 +3,14 @@ import pickle
|
|
3 |
import gradio as gr
|
4 |
from openai import OpenAI
|
5 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6 |
|
7 |
client = OpenAI(
|
8 |
base_url="https://yo4x63mj3sbmgpwc.us-east-1.aws.endpoints.huggingface.cloud/v1/",
|
|
|
3 |
import gradio as gr
|
4 |
from openai import OpenAI
|
5 |
|
6 |
+
from kiwipiepy import Kiwi
|
7 |
+
|
8 |
+
tagger = Kiwi()
|
9 |
+
|
10 |
+
|
11 |
+
def tokenizer(t):
|
12 |
+
return [e.form for e in tagger.tokenize(t)]
|
13 |
+
|
14 |
|
15 |
client = OpenAI(
|
16 |
base_url="https://yo4x63mj3sbmgpwc.us-east-1.aws.endpoints.huggingface.cloud/v1/",
|