import argparse import gradio as gr from ui import chat def main(args): demo = gr.ChatInterface( fn=chat, examples=["hello", "how are you?", "What is Large Language Model?"], title="Gradio 🤝 TGI", description="This space is a template that you can fork/duplicate for your own usage. " "This space let you build LLM powered idea on top of [Gradio](https://www.gradio.app/) " "and open LLM served locally by [TGI(Text Generation Inference)](https://huggingface.co/docs/text-generation-inference/en/index). " "To use this space, [duplicate]() this space, set which model you want to use (i.e. mistralai/Mistral-7B-Instruct-v0.2), then " "you are all good to go. Just focus on the implementation of your idea 💡. For your convenience, this space also provides " "some handy [utility functions](https://huggingface.co/spaces/chansung/gradio_together_tgi/blob/main/app/gen/openllm.py) to aynchronously generate text by interacting with the locally served LLM.", multimodal=False ) demo.queue().launch(server_name="0.0.0.0", server_port=args.port) if __name__ == "__main__": parser = argparse.ArgumentParser(description="This is my Gradio app's description") parser.add_argument("--port", type=int, default=7860, help="Port to expose Gradio app") args = parser.parse_args() main(args)