|
import gradio as gr |
|
from resources import * |
|
from transformers import pipeline |
|
|
|
|
|
|
|
bellamy_bowie_classifier = pipeline("zero-shot-classification", model="facebook/bart-large-mnli") |
|
|
|
bellamy_bowie_classifier_candidate_labels = ["manager", "engineer", "technician", "politician", "scientist", "student", "journalist", "marketeer", "spokesperson", "other"] |
|
bellamy_bowie_classifier_candidate_labels_preselection = ["manager", "engineer", "technician", "politician", "scientist", "student", "journalist"] |
|
|
|
|
|
def bellamy_bowie_predict(candidate_labels_selected, sequence): |
|
outputs = bellamy_bowie_classifier(sequence, candidate_labels_selected) |
|
return dict(zip(outputs['labels'], outputs['scores'])) |
|
|
|
|
|
|
|
|
|
ellis_cappy_captioner = pipeline("image-to-text", model="Salesforce/blip-image-captioning-base", max_new_tokens=40) |
|
|
|
|
|
def ellis_cappy_captionizer(img): |
|
captions = ellis_cappy_captioner(img) |
|
return captions[0]["generated_text"] |
|
|
|
|
|
with gr.Blocks() as aidademo: |
|
with gr.Tab("Bellamy Bowie"): |
|
with gr.Row(): |
|
with gr.Column(scale=3): |
|
gr.HTML(bellamy_bowie_description) |
|
with gr.Column(scale=1): |
|
gr.Image(bellamy_bowie_hero) |
|
with gr.Row(): |
|
with gr.Column(scale=1): |
|
bellamy_bowie_checkbox_input = gr.CheckboxGroup(choices=bellamy_bowie_classifier_candidate_labels, value=bellamy_bowie_classifier_candidate_labels_preselection, label="Target personas of your message", info="Recommendation: Don't change the preselection for your first analysis."), |
|
bellamy_bowie_textbox_input = gr.Textbox(lines=10, placeholder="Your text goes here", label="Write or paste your message to classify") |
|
bellamy_bowie_submit_button = gr.Button("Submit") |
|
with gr.Column(scale=1): |
|
bellamy_bowie_outputs = gr.Label(label="Matching scores by personas") |
|
gr.HTML(bellamy_bowie_note_quality) |
|
with gr.Row(): |
|
with gr.Column(scale=1): |
|
gr.Examples(bellamy_bowie_examples, inputs=[bellamy_bowie_textbox_input]) |
|
gr.HTML(bellamy_bowie_article) |
|
|
|
with gr.Tab("Ellis Cappy"): |
|
gr.HTML(ellis_cappy_description) |
|
gr.Image("https://images.nightcafe.studio/jobs/1tLpG6zZANbrgG4ds8wF/1tLpG6zZANbrgG4ds8wF--4--andnn.jpg", |
|
min_width=548) |
|
with gr.Tab("Ellis Cappy"): |
|
with gr.Row(): |
|
with gr.Column(scale=3): |
|
gr.HTML(ellis_cappy_description) |
|
with gr.Column(scale=1): |
|
gr.Image(ellis_cappy_hero) |
|
with gr.Row(): |
|
with gr.Column(scale=1): |
|
ellis_cappy_image_input = gr.Image(type="pil", label=None) |
|
ellis_cappy_submit_button = gr.Button("Submit") |
|
with gr.Column(scale=1): |
|
ellis_cappy_textbox_output = gr.Textbox(label="Suggested caption", lines=2) |
|
gr.HTML(ellis_cappy_note_quality) |
|
with gr.Row(): |
|
with gr.Column(scale=1): |
|
gr.Examples(ellis_cappy_examples, inputs=[ellis_cappy_image_input]) |
|
gr.HTML(ellis_cappy_article) |
|
|
|
ellis_cappy_submit_button.click(fn=ellis_cappy_captionizer, inputs=ellis_cappy_image_input, |
|
outputs=ellis_cappy_textbox_output, api_name="captionizer") |
|
bellamy_bowie_submit_button.click(fn=bellamy_bowie_predict, |
|
inputs=[bellamy_bowie_checkbox_input, bellamy_bowie_textbox_input], |
|
outputs=bellamy_bowie_outputs) |
|
|
|
aidademo.launch() |
|
|