Spaces:
Runtime error
Runtime error
NimaBoscarino
commited on
Commit
β’
82adb55
1
Parent(s):
349e976
UX changes, search for models with API
Browse files
app.py
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
import gradio as gr
|
2 |
-
from huggingface_hub import ModelCard
|
3 |
|
4 |
from compliance_checks import (
|
5 |
ComplianceSuite,
|
@@ -9,6 +9,8 @@ from compliance_checks import (
|
|
9 |
ComputationalRequirementsCheck,
|
10 |
)
|
11 |
|
|
|
|
|
12 |
checks = [
|
13 |
IntendedPurposeCheck(),
|
14 |
GeneralLimitationsCheck(),
|
@@ -21,9 +23,20 @@ def status_emoji(status: bool):
|
|
21 |
return "β
" if status else "π"
|
22 |
|
23 |
|
24 |
-
def
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
25 |
card = ModelCard.load(repo_id_or_path=model_id).content
|
26 |
-
return card
|
27 |
|
28 |
|
29 |
def run_compliance_check(model_card: str):
|
@@ -47,23 +60,29 @@ def compliance_result(compliance_check: ComplianceCheck):
|
|
47 |
return accordion, description
|
48 |
|
49 |
|
50 |
-
def
|
51 |
with open(file_obj.name) as f:
|
52 |
model_card = f.read()
|
53 |
-
return model_card
|
54 |
|
55 |
|
56 |
model_card_box = gr.TextArea(label="Model Card")
|
57 |
|
58 |
# Have to destructure everything since I need to delay rendering.
|
59 |
col = gr.Column()
|
60 |
-
submit_markdown = gr.Button(value="Run validation checks")
|
61 |
tab = gr.Tab(label="Results")
|
62 |
col2 = gr.Column()
|
63 |
compliance_results = [compliance_result(c) for c in suite.checks]
|
64 |
compliance_accordions = [c[0] for c in compliance_results]
|
65 |
compliance_descriptions = [c[1] for c in compliance_results]
|
66 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
67 |
with gr.Blocks(css="""\
|
68 |
#file-upload .boundedheight {
|
69 |
max-height: 100px;
|
@@ -82,7 +101,7 @@ code {
|
|
82 |
provision of information to usersβ. **(DISCLAIMER: this is NOT a commercial or legal advice-related product)**
|
83 |
|
84 |
To check a model card, first load it by doing any one of the following:
|
85 |
-
- If the model is on the Hugging Face Hub,
|
86 |
- If you have the model card on your computer as a Markdown file, select the "Upload your own card" tab and click \
|
87 |
"Upload a Markdown file".
|
88 |
- Paste your model card's text directly into the "Model Card" text area.
|
@@ -93,48 +112,48 @@ code {
|
|
93 |
with gr.Row():
|
94 |
with gr.Column():
|
95 |
with gr.Tab(label="Load a card from the π€ Hugging Face Hub"):
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
-
inputs=[model_id_search],
|
106 |
-
outputs=[model_card_box]
|
107 |
-
# cache_examples=True, # TODO: Why does this break the app?
|
108 |
)
|
109 |
|
110 |
-
|
|
|
|
|
|
|
|
|
111 |
|
112 |
with gr.Tab(label="Upload your own card"):
|
113 |
file = gr.UploadButton(label="Upload a Markdown file", elem_id="file-upload")
|
114 |
# TODO: Bug β uploading more than once doesn't trigger the function? Gradio bug?
|
115 |
file.upload(
|
116 |
-
fn=
|
117 |
inputs=[file],
|
118 |
-
outputs=[model_card_box
|
119 |
)
|
120 |
|
121 |
model_card_box.render()
|
122 |
|
123 |
with col.render():
|
124 |
-
submit_markdown.render()
|
125 |
with tab.render():
|
126 |
with col2.render():
|
127 |
for a, d in compliance_results:
|
128 |
with a.render():
|
129 |
d.render()
|
130 |
|
131 |
-
|
132 |
-
fn=
|
133 |
-
inputs=[
|
134 |
-
outputs=[model_card_box
|
135 |
)
|
136 |
|
137 |
-
|
138 |
fn=run_compliance_check,
|
139 |
inputs=[model_card_box],
|
140 |
outputs=[*compliance_accordions, *compliance_descriptions]
|
|
|
1 |
import gradio as gr
|
2 |
+
from huggingface_hub import ModelCard, HfApi
|
3 |
|
4 |
from compliance_checks import (
|
5 |
ComplianceSuite,
|
|
|
9 |
ComputationalRequirementsCheck,
|
10 |
)
|
11 |
|
12 |
+
hf_api = HfApi()
|
13 |
+
|
14 |
checks = [
|
15 |
IntendedPurposeCheck(),
|
16 |
GeneralLimitationsCheck(),
|
|
|
23 |
return "β
" if status else "π"
|
24 |
|
25 |
|
26 |
+
def search_for_models(query: str):
|
27 |
+
if query.strip() == "":
|
28 |
+
return examples, ",".join([e[0] for e in examples])
|
29 |
+
models = [m.id for m in list(iter(hf_api.list_models(search=query, limit=10)))]
|
30 |
+
model_samples = [[m] for m in models]
|
31 |
+
models_text = ",".join(models)
|
32 |
+
return model_samples, models_text
|
33 |
+
|
34 |
+
|
35 |
+
def load_model_card(index, options_string: str):
|
36 |
+
options = options_string.split(",")
|
37 |
+
model_id = options[index]
|
38 |
card = ModelCard.load(repo_id_or_path=model_id).content
|
39 |
+
return card
|
40 |
|
41 |
|
42 |
def run_compliance_check(model_card: str):
|
|
|
60 |
return accordion, description
|
61 |
|
62 |
|
63 |
+
def read_file(file_obj):
|
64 |
with open(file_obj.name) as f:
|
65 |
model_card = f.read()
|
66 |
+
return model_card
|
67 |
|
68 |
|
69 |
model_card_box = gr.TextArea(label="Model Card")
|
70 |
|
71 |
# Have to destructure everything since I need to delay rendering.
|
72 |
col = gr.Column()
|
|
|
73 |
tab = gr.Tab(label="Results")
|
74 |
col2 = gr.Column()
|
75 |
compliance_results = [compliance_result(c) for c in suite.checks]
|
76 |
compliance_accordions = [c[0] for c in compliance_results]
|
77 |
compliance_descriptions = [c[1] for c in compliance_results]
|
78 |
|
79 |
+
examples = [
|
80 |
+
["bigscience/bloom"],
|
81 |
+
["roberta-base"],
|
82 |
+
["openai/clip-vit-base-patch32"],
|
83 |
+
["distilbert-base-cased-distilled-squad"],
|
84 |
+
]
|
85 |
+
|
86 |
with gr.Blocks(css="""\
|
87 |
#file-upload .boundedheight {
|
88 |
max-height: 100px;
|
|
|
101 |
provision of information to usersβ. **(DISCLAIMER: this is NOT a commercial or legal advice-related product)**
|
102 |
|
103 |
To check a model card, first load it by doing any one of the following:
|
104 |
+
- If the model is on the Hugging Face Hub, search for a model and select it from the results.
|
105 |
- If you have the model card on your computer as a Markdown file, select the "Upload your own card" tab and click \
|
106 |
"Upload a Markdown file".
|
107 |
- Paste your model card's text directly into the "Model Card" text area.
|
|
|
112 |
with gr.Row():
|
113 |
with gr.Column():
|
114 |
with gr.Tab(label="Load a card from the π€ Hugging Face Hub"):
|
115 |
+
with gr.Row():
|
116 |
+
model_id_search = gr.Text(label="Model ID")
|
117 |
+
|
118 |
+
search_results_text = gr.Text(visible=False, value=",".join([e[0] for e in examples]))
|
119 |
+
search_results_index = gr.Dataset(
|
120 |
+
label="Search Results",
|
121 |
+
components=[model_id_search],
|
122 |
+
samples=examples,
|
123 |
+
type="index",
|
|
|
|
|
|
|
124 |
)
|
125 |
|
126 |
+
model_id_search.change(
|
127 |
+
fn=search_for_models,
|
128 |
+
inputs=[model_id_search],
|
129 |
+
outputs=[search_results_index, search_results_text]
|
130 |
+
)
|
131 |
|
132 |
with gr.Tab(label="Upload your own card"):
|
133 |
file = gr.UploadButton(label="Upload a Markdown file", elem_id="file-upload")
|
134 |
# TODO: Bug β uploading more than once doesn't trigger the function? Gradio bug?
|
135 |
file.upload(
|
136 |
+
fn=read_file,
|
137 |
inputs=[file],
|
138 |
+
outputs=[model_card_box]
|
139 |
)
|
140 |
|
141 |
model_card_box.render()
|
142 |
|
143 |
with col.render():
|
|
|
144 |
with tab.render():
|
145 |
with col2.render():
|
146 |
for a, d in compliance_results:
|
147 |
with a.render():
|
148 |
d.render()
|
149 |
|
150 |
+
search_results_index.click(
|
151 |
+
fn=load_model_card,
|
152 |
+
inputs=[search_results_index, search_results_text],
|
153 |
+
outputs=[model_card_box]
|
154 |
)
|
155 |
|
156 |
+
model_card_box.change(
|
157 |
fn=run_compliance_check,
|
158 |
inputs=[model_card_box],
|
159 |
outputs=[*compliance_accordions, *compliance_descriptions]
|