File size: 1,470 Bytes
016174f
0d32d53
d32a13b
d87df45
016174f
 
f9aa63c
016174f
ce67b54
 
 
 
 
 
 
016174f
 
 
 
0d32d53
016174f
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
import argparse
import gradio as gr
from ui import chat

def main(args):
    demo = gr.ChatInterface(
        fn=chat, 
        examples=["hello", "how are you?", "What is Large Language Model?"], 
        title="Gradio 🀝 TGI",
        description="This space is a template that you can fork/duplicate for your own usage. "
                    "This space let you build LLM powered idea on top of [Gradio](https://www.gradio.app/) " 
                    "and open LLM served locally by [TGI(Text Generation Inference)](https://huggingface.co/docs/text-generation-inference/en/index). "
                    "To use this space, [duplicate]() this space, set which model you want to use (i.e. mistralai/Mistral-7B-Instruct-v0.2), then "
                    "you are all good to go. Just focus on the implementation of your idea πŸ’‘. For your convenience, this space also provides "
                    "some handy [utility functions](https://huggingface.co/spaces/chansung/gradio_together_tgi/blob/main/app/gen/openllm.py) to aynchronously generate text by interacting with the locally served LLM.",
        multimodal=False
    )
    
    demo.queue().launch(server_name="0.0.0.0", server_port=args.port)

if __name__ == "__main__":
    parser = argparse.ArgumentParser(description="This is my Gradio app's description")
    parser.add_argument("--port", type=int, default=7860, help="Port to expose Gradio app")
    
    args = parser.parse_args()    
    main(args)