Spaces:
Runtime error
Runtime error
NimaBoscarino
commited on
Commit
•
490bc75
1
Parent(s):
e814211
UI/UX Overhaul
Browse files- app.py +46 -31
- tests/conftest.py +1 -1
app.py
CHANGED
@@ -4,7 +4,6 @@ from huggingface_hub import ModelCard
|
|
4 |
from compliance_checks import (
|
5 |
ComplianceSuite,
|
6 |
ComplianceCheck,
|
7 |
-
ModelProviderIdentityCheck,
|
8 |
IntendedPurposeCheck,
|
9 |
GeneralLimitationsCheck,
|
10 |
ComputationalRequirementsCheck,
|
@@ -13,7 +12,6 @@ from compliance_checks import (
|
|
13 |
from bloom_card import bloom_card
|
14 |
|
15 |
checks = [
|
16 |
-
ModelProviderIdentityCheck(),
|
17 |
IntendedPurposeCheck(),
|
18 |
GeneralLimitationsCheck(),
|
19 |
ComputationalRequirementsCheck(),
|
@@ -47,51 +45,68 @@ def compliance_result(compliance_check: ComplianceCheck):
|
|
47 |
return accordion, description
|
48 |
|
49 |
|
50 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
51 |
gr.Markdown("""\
|
52 |
-
#
|
53 |
-
|
|
|
54 |
""")
|
55 |
|
56 |
with gr.Row(elem_id="reverse-row"):
|
57 |
-
with gr.
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
|
|
|
|
62 |
|
63 |
with gr.Column():
|
64 |
-
with gr.Tab(label="
|
65 |
-
|
66 |
-
populate_sample_card = gr.Button(value="Populate Sample")
|
67 |
-
submit_markdown = gr.Button()
|
68 |
-
with gr.Tab(label="Search for Model"):
|
69 |
-
model_id_search = gr.Text()
|
70 |
-
submit_model_search = gr.Button()
|
71 |
gr.Examples(
|
72 |
-
examples=[
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
73 |
inputs=[model_id_search],
|
74 |
-
outputs=[
|
75 |
-
fn=fetch_and_run_compliance_check,
|
76 |
# cache_examples=True, # TODO: Why does this break the app?
|
77 |
)
|
78 |
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
84 |
|
85 |
submit_model_search.click(
|
86 |
-
fn=
|
87 |
inputs=[model_id_search],
|
88 |
-
outputs=[
|
89 |
)
|
90 |
|
91 |
-
|
92 |
-
fn=
|
93 |
-
inputs=[],
|
94 |
-
outputs=[
|
95 |
)
|
96 |
|
97 |
demo.launch()
|
|
|
4 |
from compliance_checks import (
|
5 |
ComplianceSuite,
|
6 |
ComplianceCheck,
|
|
|
7 |
IntendedPurposeCheck,
|
8 |
GeneralLimitationsCheck,
|
9 |
ComputationalRequirementsCheck,
|
|
|
12 |
from bloom_card import bloom_card
|
13 |
|
14 |
checks = [
|
|
|
15 |
IntendedPurposeCheck(),
|
16 |
GeneralLimitationsCheck(),
|
17 |
ComputationalRequirementsCheck(),
|
|
|
45 |
return accordion, description
|
46 |
|
47 |
|
48 |
+
def read_file(file_obj):
|
49 |
+
with open(file_obj.name) as f:
|
50 |
+
return f.read()
|
51 |
+
|
52 |
+
|
53 |
+
model_card_box = gr.TextArea(label="Model Card")
|
54 |
+
|
55 |
+
with gr.Blocks(css="#reverse-row { flex-direction: row-reverse;} #file-upload .boundedheight {max-height: 100px;}") as demo:
|
56 |
gr.Markdown("""\
|
57 |
+
# RegCheck AI
|
58 |
+
This Space uses model cards’ information as a source of regulatory compliance with some provisions of the proposed [EU AI Act](https://eur-lex.europa.eu/legal-content/EN/TXT/?uri=celex%3A52021PC0206). For the moment being, the demo is a **prototype** limited to specific provisions of Article 13 of the AI Act, related to “Transparency and provision of information to users”. Choose a model card and check whether it has some useful info to comply with the EU AI Act! **(DISCLAIMER: this is NOT a commercial or legal advice-related product)**
|
59 |
+
|
60 |
""")
|
61 |
|
62 |
with gr.Row(elem_id="reverse-row"):
|
63 |
+
with gr.Column():
|
64 |
+
submit_markdown = gr.Button(value="Run validation checks")
|
65 |
+
with gr.Tab(label="Results"):
|
66 |
+
with gr.Column():
|
67 |
+
compliance_results = [compliance_result(c) for c in suite.checks]
|
68 |
+
compliance_accordions = [c[0] for c in compliance_results]
|
69 |
+
compliance_descriptions = [c[1] for c in compliance_results]
|
70 |
|
71 |
with gr.Column():
|
72 |
+
with gr.Tab(label="Load a card from the 🤗 Hugging Face Hub"):
|
73 |
+
model_id_search = gr.Text(label="Model ID")
|
|
|
|
|
|
|
|
|
|
|
74 |
gr.Examples(
|
75 |
+
examples=[
|
76 |
+
"society-ethics/model-card-webhook-test",
|
77 |
+
"bigscience/bloom",
|
78 |
+
"roberta-base",
|
79 |
+
"openai/clip-vit-base-patch32",
|
80 |
+
"distilbert-base-cased-distilled-squad",
|
81 |
+
],
|
82 |
+
fn=lambda x: ModelCard.load(repo_id_or_path=x).content,
|
83 |
inputs=[model_id_search],
|
84 |
+
outputs=[model_card_box]
|
|
|
85 |
# cache_examples=True, # TODO: Why does this break the app?
|
86 |
)
|
87 |
|
88 |
+
submit_model_search = gr.Button(value="Load model card")
|
89 |
+
|
90 |
+
with gr.Tab(label="Upload your own card"):
|
91 |
+
file = gr.UploadButton(label="Upload a Markdown file", elem_id="file-upload")
|
92 |
+
file.upload(
|
93 |
+
fn=read_file,
|
94 |
+
inputs=[file],
|
95 |
+
outputs=[model_card_box]
|
96 |
+
)
|
97 |
+
|
98 |
+
model_card_box.render()
|
99 |
|
100 |
submit_model_search.click(
|
101 |
+
fn=lambda x: ModelCard.load(repo_id_or_path=x).content,
|
102 |
inputs=[model_id_search],
|
103 |
+
outputs=[model_card_box]
|
104 |
)
|
105 |
|
106 |
+
submit_markdown.click(
|
107 |
+
fn=run_compliance_check,
|
108 |
+
inputs=[model_card_box],
|
109 |
+
outputs=[*compliance_accordions, *compliance_descriptions]
|
110 |
)
|
111 |
|
112 |
demo.launch()
|
tests/conftest.py
CHANGED
@@ -27,7 +27,7 @@ expected_check_results = {
|
|
27 |
"openai___clip-vit-large-patch14": [True, True, False],
|
28 |
"philschmid___bart-large-cnn-samsum": [False, False, False],
|
29 |
"prajjwal1___bert-tiny": [False, False, False],
|
30 |
-
"roberta-base": [True, True, True],
|
31 |
"roberta-large": [True, True, True],
|
32 |
"runwayml___stable-diffusion-v1-5": [True, True, True],
|
33 |
"sentence-transformers___all-MiniLM-L6-v2": [True, False, False],
|
|
|
27 |
"openai___clip-vit-large-patch14": [True, True, False],
|
28 |
"philschmid___bart-large-cnn-samsum": [False, False, False],
|
29 |
"prajjwal1___bert-tiny": [False, False, False],
|
30 |
+
"roberta-base": [True, True, True], # For the computational requirements, sort of?
|
31 |
"roberta-large": [True, True, True],
|
32 |
"runwayml___stable-diffusion-v1-5": [True, True, True],
|
33 |
"sentence-transformers___all-MiniLM-L6-v2": [True, False, False],
|