Spaces:
Runtime error
Runtime error
File size: 2,613 Bytes
9aa6aea 123074d ce50c4e 9aa6aea 123074d c978e22 9aa6aea bad108e 9aa6aea c978e22 9aa6aea c978e22 9aa6aea |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 |
# -*- coding: utf-8 -*-
# ===================================================
#
# Author : Fan Zhang
# Email : zhangfan@baai.ac.cn
# Institute : Beijing Academy of Artificial Intelligence (BAAI)
# Create On : 2023-12-11 15:34
# Last Modified : 2023-12-20 12:45
# File Name : app.py
# Description :
#
# ===================================================
import argparse
import gradio as gr
from demo.generation_frontend import build_generation
from demo.chat_frontend import build_chat
parser = argparse.ArgumentParser()
parser.add_argument("--title", type=str, default='Emu')
parser.add_argument("--host", type=str, default="0.0.0.0")
parser.add_argument("--port", type=int, default=9002)
parser.add_argument("--share", action="store_true")
parser.add_argument("--controller-url", type=str, default="http://218.91.113.230:9003")
parser.add_argument("--concurrency-count", type=int, default=2)
parser.add_argument("--disable-chat", action="store_true")
parser.add_argument("--disable-generate", action="store_true")
args = parser.parse_args()
if __name__ == "__main__":
title = "Emu2: Generative Multimodal Models are In-Context Learners<br> \
<div align='center'> \
<h2>|<a href='https://arxiv.org/abs/2307.05222' target='_blank' rel='noopener'>paper</a>| \
|<a href='https://github.com/baaivision/Emu' target='_blank' rel='noopener'>code</a>|</h2> \
</div> \
<div align='center'> \
<h2>|<a href='https://jwolpxeehx.feishu.cn/docx/KskPdU99FomufKx4G9hcQMeQnHv' target='_blank' rel='noopener'>使用说明</a>| \
|<a href='https://jwolpxeehx.feishu.cn/docx/RYHNd1tvEo8k8Mx9HeMcvvxWnvZ' target='_blank' rel='noopener'>User Guide</a>|</h2> \
<div align='left'> \
"
interface_list, tab_names = [], []
if not args.disable_generate:
demo_generation = build_generation(args)
interface_list.append(demo_generation)
tab_names.append("Multi-modal Generation")
if not args.disable_chat:
demo_chat = build_chat(args)
interface_list.append(demo_chat)
tab_names.append("Multi-modal Chat")
demo_all = gr.TabbedInterface(
interface_list=interface_list,
tab_names=tab_names,
title=title,
theme=gr.themes.Default(primary_hue="blue", secondary_hue="blue"),
)
demo_all.queue(
max_size=20,
status_update_rate=3,
api_open=False,
default_concurrency_limit=args.concurrency_count,
).launch(
share=args.share,
)
|