sam2ai commited on
Commit
1c1883d
β€’
1 Parent(s): 377ca50

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +57 -11
app.py CHANGED
@@ -3,13 +3,16 @@ import random
3
  import time
4
  import requests
5
  import json
 
6
 
7
 
8
- def http_yield(prompt):
9
- print(prompt)
10
- bot_message = random.choice(["How are you?", "I love you", "I'm very hungry"])
11
- for character in bot_message:
12
- yield character
 
 
13
 
14
  def http_bot_yield(prompt):
15
  headers = {"User-Agent": "vLLM Client"}
@@ -35,13 +38,53 @@ def http_bot_yield(prompt):
35
  # print("output --->", output)
36
  yield output
37
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38
  with gr.Blocks() as demo:
39
- chatbot = gr.Chatbot()
40
- msg = gr.Textbox()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
41
  clear = gr.Button("Clear")
 
 
 
 
 
 
 
 
 
 
 
42
 
43
  def user(user_message, history):
44
- print("", history + [[user_message, None]])
45
  return "", history + [[user_message, None]]
46
 
47
  def bot(history):
@@ -57,12 +100,15 @@ with gr.Blocks() as demo:
57
  # print("yield --- > ", b_text)
58
  time.sleep(0.05)
59
  yield history
 
60
 
61
  msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
62
  bot, chatbot, chatbot
63
  )
 
 
64
  clear.click(lambda: None, None, chatbot, queue=False)
65
-
66
- demo.title = "🐒 Olive: OdiaGPT Model built by the OdiaGenAI Team"
67
  demo.queue()
68
- demo.launch()
 
3
  import time
4
  import requests
5
  import json
6
+ import os
7
 
8
 
9
+
10
+
11
+ # def http_yield(prompt):
12
+ # print(prompt)
13
+ # bot_message = random.choice(["How are you?", "I love you", "I'm very hungry"])
14
+ # for character in bot_message:
15
+ # yield character
16
 
17
  def http_bot_yield(prompt):
18
  headers = {"User-Agent": "vLLM Client"}
 
38
  # print("output --->", output)
39
  yield output
40
 
41
+
42
+ def vote(data: gr.LikeData):
43
+ if data.liked:
44
+ print("You upvoted this response: " + data.value)
45
+ return
46
+ else:
47
+ print("You downvoted this response: " + data.value)
48
+
49
+
50
+
51
+ title_markdown = ("""
52
+ <h1 align="center"><a href="https://www.odiagenai.org/"><img src="//custom-images.strikinglycdn.com/res/hrscywv4p/image/upload/c_limit,fl_lossy,h_9000,w_1200,f_auto,q_auto/11567154/889956_627486.png", alt="mPLUG-Owl" border="0" style="margin: 0 auto; height: 100px;" /></a> </h1>
53
+ <h2 align="center"> 🐒 Olive: OdiaGPT Model built by the OdiaGenAI Team </h2>
54
+ """)
55
+
56
  with gr.Blocks() as demo:
57
+ with gr.Row():
58
+ gr.Markdown(title_markdown)
59
+
60
+
61
+ chatbot = gr.Chatbot(
62
+ [],
63
+ elem_id="chatbot",
64
+ bubble_full_width=False,
65
+ avatar_images=(None, (os.path.join(os.path.dirname(__file__), "olive_final_logo.png"))),
66
+ )
67
+ msg = gr.Textbox(scale=4,
68
+ show_label=False,
69
+ placeholder="Enter text and press enter",
70
+ container=False
71
+ )
72
+ submit_btn = gr.Button(value="Submit")
73
  clear = gr.Button("Clear")
74
+
75
+ gr.Examples(examples=[
76
+ ["Explain why this meme is funny."],
77
+ ['Can you write me a master rap song that rhymes very well based on this image?'],
78
+ ['What happened at the end of this movie?'],
79
+ ['What is funny about this image? Describe it panel by panel.'],
80
+ ['We design new mugs shown in the image. Can you help us write an advertisement?'],
81
+ ['Why this happens and how to fix it?'],
82
+ ["What do you think about the person's behaviour?"],
83
+ ['Do you know who drew this painting?​'],
84
+ ], inputs=[msg])
85
 
86
  def user(user_message, history):
87
+ # print("", history + [[user_message, None]])
88
  return "", history + [[user_message, None]]
89
 
90
  def bot(history):
 
100
  # print("yield --- > ", b_text)
101
  time.sleep(0.05)
102
  yield history
103
+ # push_to_comet(prompt, history[-1][1])
104
 
105
  msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
106
  bot, chatbot, chatbot
107
  )
108
+ submit_btn.click(user, [msg, chatbot], [msg, chatbot], queue=False).then(
109
+ bot, chatbot, chatbot)
110
  clear.click(lambda: None, None, chatbot, queue=False)
111
+ # chatbot.like(vote, None, None)
112
+
113
  demo.queue()
114
+ demo.launch(debug=True)