Remove env variables subprocess call
#4
by
pdufour
- opened
app.py
CHANGED
@@ -1,7 +1,12 @@
|
|
1 |
-
import
|
2 |
-
from huggingface_hub import HfApi
|
3 |
import os
|
|
|
4 |
import subprocess
|
|
|
|
|
|
|
|
|
|
|
5 |
|
6 |
HF_TOKEN = st.secrets.get("HF_TOKEN") or os.environ.get("HF_TOKEN")
|
7 |
HF_USERNAME = (
|
@@ -9,18 +14,29 @@ HF_USERNAME = (
|
|
9 |
or os.environ.get("HF_USERNAME")
|
10 |
or os.environ.get("SPACE_AUTHOR_NAME")
|
11 |
)
|
12 |
-
|
13 |
-
|
|
|
|
|
|
|
14 |
TRANSFORMERS_REPOSITORY_PATH = "./transformers.js"
|
|
|
15 |
HF_BASE_URL = "https://huggingface.co"
|
16 |
|
17 |
if not os.path.exists(TRANSFORMERS_REPOSITORY_PATH):
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
)
|
23 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
24 |
st.write("## Convert a HuggingFace model to ONNX")
|
25 |
|
26 |
input_model_id = st.text_input(
|
@@ -51,7 +67,7 @@ if input_model_id:
|
|
51 |
with st.spinner("Converting model..."):
|
52 |
output = subprocess.run(
|
53 |
[
|
54 |
-
|
55 |
"-m",
|
56 |
"scripts.convert",
|
57 |
"--quantize",
|
@@ -61,7 +77,17 @@ if input_model_id:
|
|
61 |
cwd=TRANSFORMERS_REPOSITORY_PATH,
|
62 |
capture_output=True,
|
63 |
text=True,
|
|
|
64 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
65 |
|
66 |
model_folder_path = (
|
67 |
f"{TRANSFORMERS_REPOSITORY_PATH}/models/{input_model_id}"
|
@@ -103,4 +129,4 @@ if input_model_id:
|
|
103 |
st.write("You can now go and view the model on HuggingFace!")
|
104 |
st.link_button(
|
105 |
f"Go to {output_model_id}", output_model_url, type="primary"
|
106 |
-
)
|
|
|
1 |
+
import sys
|
|
|
2 |
import os
|
3 |
+
import urllib.request
|
4 |
import subprocess
|
5 |
+
import tarfile
|
6 |
+
import tempfile
|
7 |
+
|
8 |
+
import streamlit as st
|
9 |
+
from huggingface_hub import HfApi
|
10 |
|
11 |
HF_TOKEN = st.secrets.get("HF_TOKEN") or os.environ.get("HF_TOKEN")
|
12 |
HF_USERNAME = (
|
|
|
14 |
or os.environ.get("HF_USERNAME")
|
15 |
or os.environ.get("SPACE_AUTHOR_NAME")
|
16 |
)
|
17 |
+
|
18 |
+
TRANSFORMERS_BASE_URL = "https://github.com/xenova/transformers.js/archive/refs"
|
19 |
+
TRANSFORMERS_REPOSITORY_REVISION = "3.0.0"
|
20 |
+
TRANSFORMERS_REF_TYPE = "tags" if urllib.request.urlopen(f"{TRANSFORMERS_BASE_URL}/tags/{TRANSFORMERS_REPOSITORY_REVISION}.tar.gz").getcode() == 200 else "heads"
|
21 |
+
TRANSFORMERS_REPOSITORY_URL = f"{TRANSFORMERS_BASE_URL}/{TRANSFORMERS_REF_TYPE}/{TRANSFORMERS_REPOSITORY_REVISION}.tar.gz"
|
22 |
TRANSFORMERS_REPOSITORY_PATH = "./transformers.js"
|
23 |
+
ARCHIVE_PATH = f"./transformers_{TRANSFORMERS_REPOSITORY_REVISION}.tar.gz"
|
24 |
HF_BASE_URL = "https://huggingface.co"
|
25 |
|
26 |
if not os.path.exists(TRANSFORMERS_REPOSITORY_PATH):
|
27 |
+
urllib.request.urlretrieve(TRANSFORMERS_REPOSITORY_URL, ARCHIVE_PATH)
|
28 |
+
|
29 |
+
with tempfile.TemporaryDirectory() as tmp_dir:
|
30 |
+
with tarfile.open(ARCHIVE_PATH, "r:gz") as tar:
|
31 |
+
tar.extractall(tmp_dir)
|
32 |
+
|
33 |
+
extracted_folder = os.path.join(tmp_dir, os.listdir(tmp_dir)[0])
|
34 |
+
|
35 |
+
os.rename(extracted_folder, TRANSFORMERS_REPOSITORY_PATH)
|
36 |
+
|
37 |
+
os.remove(ARCHIVE_PATH)
|
38 |
+
print("Repository downloaded and extracted successfully.")
|
39 |
+
|
40 |
st.write("## Convert a HuggingFace model to ONNX")
|
41 |
|
42 |
input_model_id = st.text_input(
|
|
|
67 |
with st.spinner("Converting model..."):
|
68 |
output = subprocess.run(
|
69 |
[
|
70 |
+
sys.executable,
|
71 |
"-m",
|
72 |
"scripts.convert",
|
73 |
"--quantize",
|
|
|
77 |
cwd=TRANSFORMERS_REPOSITORY_PATH,
|
78 |
capture_output=True,
|
79 |
text=True,
|
80 |
+
env={},
|
81 |
)
|
82 |
+
|
83 |
+
# Log the script output
|
84 |
+
print("### Script Output ###")
|
85 |
+
print(output.stdout)
|
86 |
+
|
87 |
+
# Log any errors
|
88 |
+
if output.stderr:
|
89 |
+
print("### Script Errors ###")
|
90 |
+
print(output.stderr)
|
91 |
|
92 |
model_folder_path = (
|
93 |
f"{TRANSFORMERS_REPOSITORY_PATH}/models/{input_model_id}"
|
|
|
129 |
st.write("You can now go and view the model on HuggingFace!")
|
130 |
st.link_button(
|
131 |
f"Go to {output_model_id}", output_model_url, type="primary"
|
132 |
+
)
|