xxxxxx commited on
Commit
73ef696
·
1 Parent(s): eb36871
Files changed (2) hide show
  1. app.py +8 -2
  2. requirements.txt +1 -0
app.py CHANGED
@@ -3,6 +3,7 @@ from transformers import pipeline, AutoTokenizer
3
  import json
4
  from onnxruntime import InferenceSession
5
  import numpy as np
 
6
 
7
  # 设置页面配置
8
  st.set_page_config(page_title="中文垃圾信息分类器", page_icon="🚫", layout="wide")
@@ -11,11 +12,16 @@ st.set_page_config(page_title="中文垃圾信息分类器", page_icon="🚫", l
11
  @st.cache_resource
12
  def load_classifiers():
13
  hf_classifier = pipeline("text-classification", model="app-x/chinese_spam_classifier")
14
- onnx_session = InferenceSession("app-x/chinese_spam_classifier_onnx/model_optimized.onnx")
 
 
 
 
15
  tokenizer = AutoTokenizer.from_pretrained("app-x/chinese_spam_classifier_onnx")
16
 
17
  # 加载配置文件
18
- with open("app-x/chinese_spam_classifier_onnx/config.json", "r") as f:
 
19
  config = json.load(f)
20
 
21
  id2label = config["id2label"]
 
3
  import json
4
  from onnxruntime import InferenceSession
5
  import numpy as np
6
+ from huggingface_hub import hf_hub_download
7
 
8
  # 设置页面配置
9
  st.set_page_config(page_title="中文垃圾信息分类器", page_icon="🚫", layout="wide")
 
12
  @st.cache_resource
13
  def load_classifiers():
14
  hf_classifier = pipeline("text-classification", model="app-x/chinese_spam_classifier")
15
+
16
+ # 从Hugging Face Hub下载ONNX模型
17
+ onnx_path = hf_hub_download("app-x/chinese_spam_classifier_onnx", "model_optimized.onnx")
18
+ onnx_session = InferenceSession(onnx_path)
19
+
20
  tokenizer = AutoTokenizer.from_pretrained("app-x/chinese_spam_classifier_onnx")
21
 
22
  # 加载配置文件
23
+ config_path = hf_hub_download("app-x/chinese_spam_classifier_onnx", "config.json")
24
+ with open(config_path, "r") as f:
25
  config = json.load(f)
26
 
27
  id2label = config["id2label"]
requirements.txt CHANGED
@@ -2,6 +2,7 @@ streamlit
2
  transformers
3
  torch
4
  onnxruntime
 
5
 
6
 
7
 
 
2
  transformers
3
  torch
4
  onnxruntime
5
+ huggingface_hub
6
 
7
 
8