Spaces:
Sleeping
Sleeping
rmayormartins
commited on
Commit
Β·
d77b896
1
Parent(s):
27e2c21
Adicionando app.py e atualizando requirements.txt
Browse files- README.md +12 -2
- app.py +58 -0
- requirements.txt +2 -0
README.md
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
---
|
2 |
-
title: Onnx
|
3 |
-
emoji:
|
4 |
colorFrom: purple
|
5 |
colorTo: yellow
|
6 |
sdk: gradio
|
@@ -10,4 +10,14 @@ pinned: false
|
|
10 |
license: ecl-2.0
|
11 |
---
|
12 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
1 |
---
|
2 |
+
title: Onnx-Scope
|
3 |
+
emoji: π
|
4 |
colorFrom: purple
|
5 |
colorTo: yellow
|
6 |
sdk: gradio
|
|
|
10 |
license: ecl-2.0
|
11 |
---
|
12 |
|
13 |
+
##Information
|
14 |
+
ONNX is an open-source format designed for representing machine learning models in a platform-independent manner. Developed collaboratively by Microsoft and Facebook (Meta), ONNX aims to foster interoperability in AI ecosystem. It allows developers to transition models between various frameworks such as PyTorch and TensorFlow.
|
15 |
+
|
16 |
+
## Additional Information
|
17 |
+
- Developed by Ramon Mayor Martins (2023)
|
18 |
+
- E-mail: [rmayormartins@gmail.com](mailto:rmayormartins@gmail.com)
|
19 |
+
- Homepage: [https://rmayormartins.github.io/](https://rmayormartins.github.io/)
|
20 |
+
- Twitter: [@rmayormartins](https://twitter.com/rmayormartins)
|
21 |
+
- GitHub: [https://github.com/rmayormartins](https://github.com/rmayormartins)
|
22 |
+
|
23 |
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
app.py
ADDED
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
import onnx
|
3 |
+
from collections import Counter
|
4 |
+
import zipfile
|
5 |
+
import os
|
6 |
+
|
7 |
+
def process_onnx(uploaded_file):
|
8 |
+
|
9 |
+
if zipfile.is_zipfile(uploaded_file.name):
|
10 |
+
with zipfile.ZipFile(uploaded_file.name, 'r') as zip_ref:
|
11 |
+
zip_ref.extractall("/tmp")
|
12 |
+
onnx_file = zip_ref.namelist()[0]
|
13 |
+
file_path = os.path.join("/tmp", onnx_file)
|
14 |
+
else:
|
15 |
+
file_path = uploaded_file.name
|
16 |
+
|
17 |
+
|
18 |
+
model = onnx.load(file_path)
|
19 |
+
|
20 |
+
|
21 |
+
info = {
|
22 |
+
"Model Name": model.graph.name,
|
23 |
+
"Number of Nodes": len(model.graph.node),
|
24 |
+
"Architecture Summary": Counter(),
|
25 |
+
"Nodes": []
|
26 |
+
}
|
27 |
+
|
28 |
+
|
29 |
+
for node in model.graph.node:
|
30 |
+
node_info = {
|
31 |
+
"Name": node.name,
|
32 |
+
"Type": node.op_type,
|
33 |
+
"Inputs": node.input,
|
34 |
+
"Outputs": node.output
|
35 |
+
}
|
36 |
+
info["Nodes"].append(node_info)
|
37 |
+
info["Architecture Summary"][node.op_type] += 1
|
38 |
+
|
39 |
+
|
40 |
+
output = []
|
41 |
+
for key, value in info.items():
|
42 |
+
if key != "Nodes":
|
43 |
+
output.append(f"{key}: {value}")
|
44 |
+
output.append("\nComplete Nodes:")
|
45 |
+
for node in info["Nodes"]:
|
46 |
+
output.append(str(node))
|
47 |
+
|
48 |
+
return '\n'.join(output)
|
49 |
+
|
50 |
+
iface = gr.Interface(
|
51 |
+
fn=process_onnx,
|
52 |
+
inputs=gr.File(label="Upload .ONNX or .ZIP (with .onnx) File"),
|
53 |
+
outputs="text",
|
54 |
+
title="ONNX Model Scope",
|
55 |
+
description="Upload an ONNX file or a ZIP (containing an .onnx file) to extract and display its detailed information. This process can take some time depending on the size of the ONNX model."
|
56 |
+
)
|
57 |
+
|
58 |
+
iface.launch(debug=True)
|
requirements.txt
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
gradio
|
2 |
+
onnx
|