Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
use standard generate
Browse files- app_dialogue.py +61 -71
app_dialogue.py
CHANGED
@@ -355,6 +355,7 @@ def resize_with_ratio(image: PIL.Image.Image, fixed_width: int) -> PIL.Image.Ima
|
|
355 |
|
356 |
return resized_img
|
357 |
|
|
|
358 |
def make_new_lines(draw, image, font, text_is_too_long, lines, num_lines, num_loops):
|
359 |
max_len_increment = 0
|
360 |
while text_is_too_long and max_len_increment < 10:
|
@@ -389,6 +390,7 @@ def make_new_lines(draw, image, font, text_is_too_long, lines, num_lines, num_lo
|
|
389 |
lines = new_lines
|
390 |
return lines, text_width, text_is_too_long
|
391 |
|
|
|
392 |
def test_font_size(
|
393 |
draw,
|
394 |
image,
|
@@ -546,9 +548,11 @@ def format_user_prompt_with_im_history_and_system_conditioning(
|
|
546 |
|
547 |
return resulting_list, current_user_prompt_list
|
548 |
|
|
|
549 |
def expand_layout():
|
550 |
return gr.Column(scale=2), gr.Gallery(height=682)
|
551 |
|
|
|
552 |
textbox = gr.Textbox(
|
553 |
placeholder="Upload an image and ask the AI to create a meme!",
|
554 |
show_label=False,
|
@@ -565,17 +569,17 @@ chatbot = gr.Chatbot(
|
|
565 |
visible=False,
|
566 |
avatar_images=[None, BOT_AVATAR],
|
567 |
)
|
568 |
-
css=
|
569 |
.gradio-container{max-width: 1000px!important}
|
570 |
h1{display: flex;align-items: center;justify-content: center;gap: .25em}
|
571 |
*{transition: width 0.5s ease, flex-grow 0.5s ease}
|
572 |
-
|
573 |
with gr.Blocks(title="AI Meme Generator", theme=gr.themes.Base(), css=css) as demo:
|
574 |
with gr.Row(scale=0.5):
|
575 |
gr.HTML(
|
576 |
"""<h1 align="center">AI Meme Generator <span style="font-size: 13px;">powered by <a href="https://huggingface.co/blog/idefics">IDEFICS</a></span><img width=40 height=40 src="https://cdn-uploads.huggingface.co/production/uploads/624bebf604abc7ebb01789af/v770xGti5vH1SYLBgyOO_.png" /></h1>"""
|
577 |
)
|
578 |
-
|
579 |
with gr.Row(elem_id="model_selector_row"):
|
580 |
model_selector = gr.Dropdown(
|
581 |
choices=MODELS,
|
@@ -587,7 +591,7 @@ with gr.Blocks(title="AI Meme Generator", theme=gr.themes.Base(), css=css) as de
|
|
587 |
visible=False,
|
588 |
)
|
589 |
with gr.Row(equal_height=True):
|
590 |
-
#scale=2 when expanded
|
591 |
with gr.Column(scale=4, min_width=250) as upload_area:
|
592 |
imagebox = gr.Image(
|
593 |
type="filepath", label="Image to meme", height=272, visible=True
|
@@ -596,13 +600,20 @@ with gr.Blocks(title="AI Meme Generator", theme=gr.themes.Base(), css=css) as de
|
|
596 |
with gr.Row():
|
597 |
textbox.render()
|
598 |
with gr.Row():
|
599 |
-
submit_btn = gr.Button(
|
|
|
|
|
600 |
clear_btn = gr.ClearButton(
|
601 |
[textbox, imagebox, chatbot], value="π§Ή Clear", min_width=120
|
602 |
)
|
603 |
-
regenerate_btn = gr.Button(
|
|
|
|
|
604 |
upload_btn = gr.UploadButton(
|
605 |
-
"π Upload image",
|
|
|
|
|
|
|
606 |
)
|
607 |
with gr.Accordion(
|
608 |
"Advanced settings", open=False, visible=True
|
@@ -717,8 +728,9 @@ with gr.Blocks(title="AI Meme Generator", theme=gr.themes.Base(), css=css) as de
|
|
717 |
show_download_button=True,
|
718 |
show_share_button=True,
|
719 |
columns=[2],
|
720 |
-
object_fit="contain",
|
721 |
-
|
|
|
722 |
with gr.Row(equal_height=True):
|
723 |
with gr.Box(elem_id="gallery_box"):
|
724 |
gallery_type_choice = gr.Radio(
|
@@ -799,7 +811,7 @@ with gr.Blocks(title="AI Meme Generator", theme=gr.themes.Base(), css=css) as de
|
|
799 |
client = Client(
|
800 |
base_url=client_endpoint,
|
801 |
headers={"x-use-cache": "0", "Authorization": f"Bearer {API_TOKEN}"},
|
802 |
-
timeout=30
|
803 |
)
|
804 |
|
805 |
# Common parameters to all decoding strategies
|
@@ -838,61 +850,29 @@ with gr.Blocks(title="AI Meme Generator", theme=gr.themes.Base(), css=css) as de
|
|
838 |
query = prompt_list_to_tgi_input(formated_prompt_list)
|
839 |
all_meme_images = []
|
840 |
for i in range(4):
|
841 |
-
|
842 |
-
|
843 |
-
|
844 |
-
|
845 |
-
|
846 |
-
|
847 |
-
|
848 |
-
|
849 |
-
|
850 |
-
|
851 |
-
|
852 |
-
|
853 |
-
|
854 |
-
|
855 |
-
|
856 |
-
text_at_the_top=text_at_the_top,
|
857 |
-
)
|
858 |
-
meme_image = pil_to_temp_file(meme_image)
|
859 |
-
all_meme_images.append(meme_image)
|
860 |
-
yield user_prompt_str, all_meme_images, chat_history
|
861 |
-
if i == 3:
|
862 |
-
return
|
863 |
-
|
864 |
-
if text_token in STOP_SUSPECT_LIST:
|
865 |
-
acc_text += text_token
|
866 |
-
continue
|
867 |
-
|
868 |
-
if idx == 0 and text_token.startswith(" "):
|
869 |
-
text_token = text_token.lstrip()
|
870 |
-
|
871 |
-
acc_text += text_token
|
872 |
-
# Commented to not have a chatbot history that could confuse user
|
873 |
-
|
874 |
-
# last_turn = chat_history.pop(-1)
|
875 |
-
# last_turn[-1] += acc_text
|
876 |
-
# if last_turn[-1].endswith("\nUser"):
|
877 |
-
# # Safeguard: sometimes (rarely), the model won't generate the token `<end_of_utterance>` and will go directly to generating `\nUser:`
|
878 |
-
# # It will thus stop the generation on `\nUser:`. But when it exits, it will have already generated `\nUser`
|
879 |
-
# # This post-processing ensures that we don't have an additional `\nUser` wandering around.
|
880 |
-
# last_turn[-1] = last_turn[-1][:-5]
|
881 |
-
# chat_history.append(last_turn)
|
882 |
-
# yield "", None, chat_history
|
883 |
-
full_text += acc_text
|
884 |
-
acc_text = ""
|
885 |
gr.on(
|
886 |
-
triggers=[
|
887 |
-
textbox.submit,
|
888 |
-
imagebox.upload,
|
889 |
-
submit_btn.click
|
890 |
-
],
|
891 |
fn=expand_layout,
|
892 |
outputs=[upload_area, generated_memes_gallery],
|
893 |
-
queue=False
|
|
|
|
|
894 |
).then(
|
895 |
-
fn=lambda: "", inputs=[], outputs=[generated_memes_gallery], queue=False).then(
|
896 |
fn=model_inference,
|
897 |
inputs=[
|
898 |
model_selector,
|
@@ -926,7 +906,7 @@ with gr.Blocks(title="AI Meme Generator", theme=gr.themes.Base(), css=css) as de
|
|
926 |
fn=remove_last_turn,
|
927 |
inputs=chatbot,
|
928 |
outputs=[chatbot, textbox, generated_memes_gallery],
|
929 |
-
queue=False
|
930 |
).then(
|
931 |
fn=model_inference,
|
932 |
inputs=[
|
@@ -953,13 +933,22 @@ with gr.Blocks(title="AI Meme Generator", theme=gr.themes.Base(), css=css) as de
|
|
953 |
|
954 |
upload_btn.upload(add_file, [upload_btn], [imagebox, upload_btn], queue=False)
|
955 |
submit_btn.click(
|
956 |
-
lambda: gr.update(label="π Upload image", interactive=True),
|
|
|
|
|
|
|
957 |
)
|
958 |
textbox.submit(
|
959 |
-
lambda: gr.update(label="π Upload image", interactive=True),
|
|
|
|
|
|
|
960 |
)
|
961 |
clear_btn.click(
|
962 |
-
lambda: gr.update(label="π Upload image", interactive=True),
|
|
|
|
|
|
|
963 |
)
|
964 |
gallery_type_choice.change(
|
965 |
fn=choose_gallery,
|
@@ -971,11 +960,9 @@ with gr.Blocks(title="AI Meme Generator", theme=gr.themes.Base(), css=css) as de
|
|
971 |
fn=add_file_gallery,
|
972 |
inputs=[template_gallery],
|
973 |
outputs=[textbox, imagebox, generated_memes_gallery],
|
974 |
-
queue=False
|
975 |
).success(
|
976 |
-
fn=expand_layout,
|
977 |
-
outputs=[upload_area, generated_memes_gallery],
|
978 |
-
queue=False
|
979 |
).success(
|
980 |
fn=model_inference,
|
981 |
inputs=[
|
@@ -1000,7 +987,10 @@ with gr.Blocks(title="AI Meme Generator", theme=gr.themes.Base(), css=css) as de
|
|
1000 |
],
|
1001 |
)
|
1002 |
demo.load(
|
1003 |
-
fn=choose_gallery,
|
|
|
|
|
|
|
1004 |
)
|
1005 |
demo.queue(concurrency_count=4, max_size=40)
|
1006 |
-
demo.launch(max_threads=400)
|
|
|
355 |
|
356 |
return resized_img
|
357 |
|
358 |
+
|
359 |
def make_new_lines(draw, image, font, text_is_too_long, lines, num_lines, num_loops):
|
360 |
max_len_increment = 0
|
361 |
while text_is_too_long and max_len_increment < 10:
|
|
|
390 |
lines = new_lines
|
391 |
return lines, text_width, text_is_too_long
|
392 |
|
393 |
+
|
394 |
def test_font_size(
|
395 |
draw,
|
396 |
image,
|
|
|
548 |
|
549 |
return resulting_list, current_user_prompt_list
|
550 |
|
551 |
+
|
552 |
def expand_layout():
|
553 |
return gr.Column(scale=2), gr.Gallery(height=682)
|
554 |
|
555 |
+
|
556 |
textbox = gr.Textbox(
|
557 |
placeholder="Upload an image and ask the AI to create a meme!",
|
558 |
show_label=False,
|
|
|
569 |
visible=False,
|
570 |
avatar_images=[None, BOT_AVATAR],
|
571 |
)
|
572 |
+
css = """
|
573 |
.gradio-container{max-width: 1000px!important}
|
574 |
h1{display: flex;align-items: center;justify-content: center;gap: .25em}
|
575 |
*{transition: width 0.5s ease, flex-grow 0.5s ease}
|
576 |
+
"""
|
577 |
with gr.Blocks(title="AI Meme Generator", theme=gr.themes.Base(), css=css) as demo:
|
578 |
with gr.Row(scale=0.5):
|
579 |
gr.HTML(
|
580 |
"""<h1 align="center">AI Meme Generator <span style="font-size: 13px;">powered by <a href="https://huggingface.co/blog/idefics">IDEFICS</a></span><img width=40 height=40 src="https://cdn-uploads.huggingface.co/production/uploads/624bebf604abc7ebb01789af/v770xGti5vH1SYLBgyOO_.png" /></h1>"""
|
581 |
)
|
582 |
+
|
583 |
with gr.Row(elem_id="model_selector_row"):
|
584 |
model_selector = gr.Dropdown(
|
585 |
choices=MODELS,
|
|
|
591 |
visible=False,
|
592 |
)
|
593 |
with gr.Row(equal_height=True):
|
594 |
+
# scale=2 when expanded
|
595 |
with gr.Column(scale=4, min_width=250) as upload_area:
|
596 |
imagebox = gr.Image(
|
597 |
type="filepath", label="Image to meme", height=272, visible=True
|
|
|
600 |
with gr.Row():
|
601 |
textbox.render()
|
602 |
with gr.Row():
|
603 |
+
submit_btn = gr.Button(
|
604 |
+
value="βΆοΈ Submit", visible=True, min_width=120
|
605 |
+
)
|
606 |
clear_btn = gr.ClearButton(
|
607 |
[textbox, imagebox, chatbot], value="π§Ή Clear", min_width=120
|
608 |
)
|
609 |
+
regenerate_btn = gr.Button(
|
610 |
+
value="π Regenerate", visible=True, min_width=120
|
611 |
+
)
|
612 |
upload_btn = gr.UploadButton(
|
613 |
+
"π Upload image",
|
614 |
+
file_types=["image"],
|
615 |
+
visible=False,
|
616 |
+
min_width=120,
|
617 |
)
|
618 |
with gr.Accordion(
|
619 |
"Advanced settings", open=False, visible=True
|
|
|
728 |
show_download_button=True,
|
729 |
show_share_button=True,
|
730 |
columns=[2],
|
731 |
+
object_fit="contain",
|
732 |
+
height=428,
|
733 |
+
) # height 600 when expanded
|
734 |
with gr.Row(equal_height=True):
|
735 |
with gr.Box(elem_id="gallery_box"):
|
736 |
gallery_type_choice = gr.Radio(
|
|
|
811 |
client = Client(
|
812 |
base_url=client_endpoint,
|
813 |
headers={"x-use-cache": "0", "Authorization": f"Bearer {API_TOKEN}"},
|
814 |
+
timeout=30,
|
815 |
)
|
816 |
|
817 |
# Common parameters to all decoding strategies
|
|
|
850 |
query = prompt_list_to_tgi_input(formated_prompt_list)
|
851 |
all_meme_images = []
|
852 |
for i in range(4):
|
853 |
+
text = client.generate(prompt=query, **generation_args).generated_text
|
854 |
+
if image is not None and text != "":
|
855 |
+
meme_image = make_meme_image(
|
856 |
+
image=image,
|
857 |
+
text=text,
|
858 |
+
font_meme_text=font_meme_text,
|
859 |
+
all_caps_meme_text=all_caps_meme_text,
|
860 |
+
text_at_the_top=text_at_the_top,
|
861 |
+
)
|
862 |
+
meme_image = pil_to_temp_file(meme_image)
|
863 |
+
all_meme_images.append(meme_image)
|
864 |
+
yield user_prompt_str, all_meme_images, chat_history
|
865 |
+
if i == 3:
|
866 |
+
return
|
867 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
868 |
gr.on(
|
869 |
+
triggers=[textbox.submit, imagebox.upload, submit_btn.click],
|
|
|
|
|
|
|
|
|
870 |
fn=expand_layout,
|
871 |
outputs=[upload_area, generated_memes_gallery],
|
872 |
+
queue=False,
|
873 |
+
).then(
|
874 |
+
fn=lambda: "", inputs=[], outputs=[generated_memes_gallery], queue=False
|
875 |
).then(
|
|
|
876 |
fn=model_inference,
|
877 |
inputs=[
|
878 |
model_selector,
|
|
|
906 |
fn=remove_last_turn,
|
907 |
inputs=chatbot,
|
908 |
outputs=[chatbot, textbox, generated_memes_gallery],
|
909 |
+
queue=False,
|
910 |
).then(
|
911 |
fn=model_inference,
|
912 |
inputs=[
|
|
|
933 |
|
934 |
upload_btn.upload(add_file, [upload_btn], [imagebox, upload_btn], queue=False)
|
935 |
submit_btn.click(
|
936 |
+
lambda: gr.update(label="π Upload image", interactive=True),
|
937 |
+
[],
|
938 |
+
upload_btn,
|
939 |
+
queue=False,
|
940 |
)
|
941 |
textbox.submit(
|
942 |
+
lambda: gr.update(label="π Upload image", interactive=True),
|
943 |
+
[],
|
944 |
+
upload_btn,
|
945 |
+
queue=False,
|
946 |
)
|
947 |
clear_btn.click(
|
948 |
+
lambda: gr.update(label="π Upload image", interactive=True),
|
949 |
+
[],
|
950 |
+
upload_btn,
|
951 |
+
queue=False,
|
952 |
)
|
953 |
gallery_type_choice.change(
|
954 |
fn=choose_gallery,
|
|
|
960 |
fn=add_file_gallery,
|
961 |
inputs=[template_gallery],
|
962 |
outputs=[textbox, imagebox, generated_memes_gallery],
|
963 |
+
queue=False,
|
964 |
).success(
|
965 |
+
fn=expand_layout, outputs=[upload_area, generated_memes_gallery], queue=False
|
|
|
|
|
966 |
).success(
|
967 |
fn=model_inference,
|
968 |
inputs=[
|
|
|
987 |
],
|
988 |
)
|
989 |
demo.load(
|
990 |
+
fn=choose_gallery,
|
991 |
+
inputs=[gallery_type_choice],
|
992 |
+
outputs=[template_gallery],
|
993 |
+
queue=False,
|
994 |
)
|
995 |
demo.queue(concurrency_count=4, max_size=40)
|
996 |
+
demo.launch(max_threads=400)
|