Spaces:
Runtime error
Runtime error
File size: 4,665 Bytes
58cedea 1c1883d 58cedea 1c1883d 58cedea 12e6138 58cedea 190f089 58cedea 190f089 58cedea 190f089 58cedea 190f089 58cedea 19202ab 58cedea 1c1883d 99a2d56 1c1883d 58cedea 1c1883d 58cedea 1c1883d 19202ab 9818540 1c1883d 19202ab 58cedea 1c1883d 58cedea 1c1883d 58cedea 1c1883d 58cedea 1c1883d 58cedea 1c1883d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 |
import gradio as gr
import random
import time
import requests
import json
import os
# def http_yield(prompt):
# print(prompt)
# bot_message = random.choice(["How are you?", "I love you", "I'm very hungry"])
# for character in bot_message:
# yield character
def http_bot_yield(prompt):
headers = {"User-Agent": "vLLM Client"}
system_message = """
Below is an instruction that describes a task. Write a response that appropriately completes the request.
"""
pload = {
"prompt": f"<s>[INST] {prompt} [/INST] ",
"stream": True,
"max_tokens": 1024,
"temperature": 0.1
}
# pload = {
# "prompt": f"### System:\n{system_message}\n\n\n\n### Instruction:\n{prompt}\n\n### Response:\n",
# "stream": True,
# "max_tokens": 2048,
# "temperature": 0.1
# }
response = requests.post(
'http://164.52.200.104/generate',
headers=headers,
json=pload,
stream=True
)
for chunk in response.iter_lines(chunk_size=8192,
decode_unicode=False,
delimiter=b"\0"):
if chunk:
data = json.loads(chunk.decode("utf-8"))
output = data["text"][0].split('[/INST] ')[-1]
# print("output --->", output)
yield output
def vote(data: gr.LikeData):
if data.liked:
print("You upvoted this response: " + data.value)
return
else:
print("You downvoted this response: " + data.value)
title_markdown = ("""
<h1 align="center"><a href="https://www.odiagenai.org/"><img src="//custom-images.strikinglycdn.com/res/hrscywv4p/image/upload/c_limit,fl_lossy,h_9000,w_1200,f_auto,q_auto/11567154/889956_627486.png", alt="mPLUG-Owl" border="0" style="margin: 0 auto; height: 100px;" /></a> </h1>
<h2 align="center"> 🐢 Olive: OdiaGPT Model built by the OdiaGenAI Team </h2>
""")
with gr.Blocks() as demo:
with gr.Row():
gr.Markdown(title_markdown)
chatbot = gr.Chatbot(
[],
elem_id="chatbot",
bubble_full_width=False,
avatar_images=(None, (os.path.join(os.path.dirname(__file__), "olive_final_logo.png"))),
)
msg = gr.Textbox(scale=4,
show_label=False,
placeholder="Enter text and press enter",
container=False
)
submit_btn = gr.Button(value="Submit")
clear = gr.Button("Clear")
gr.Examples(examples=[
["What is the result of dividing 16 by 4"],
['Give an example of Palindrome'],
['What is 2+2 ?'],
['What is Generative AI.'],
['ଓଡିଶା ବିଷୟରେ ଏକ ଓଡିଆ କବିତା ଲେଖ |'],
# ['Why this happens and how to fix it?'],
# ["What do you think about the person's behaviour?"],
# ['Do you know who drew this painting?'],
], inputs=[msg])
# gr.Examples(examples=[
# ["16 को 4 से विभाजित करने पर क्या परिणाम आता है?"],
# ['पैलिंड्रोम का एक उदाहरण दीजिए'],
# ['2+2 क्या है?'],
# ['जेनरेटिव एआई क्या है?'],
# ['ओडिशा के बारे में एक कविता लिखें'],
# # ['Why this happens and how to fix it?'],
# # ["What do you think about the person's behaviour?"],
# # ['Do you know who drew this painting?'],
# ], inputs=[msg])
def user(user_message, history):
# print("", history + [[user_message, None]])
return "", history + [[user_message, None]]
def bot(history):
print(history[-1][0])
prompt = history[-1][0]
history[-1][1] = ""
# b_text = ""
print(history)
for character in http_bot_yield(prompt):
# print("yld --- > ", history[-1][1])
history[-1][1] = character
# b_text += character
# print("yield --- > ", b_text)
time.sleep(0.05)
yield history
# push_to_comet(prompt, history[-1][1])
msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
bot, chatbot, chatbot
)
submit_btn.click(user, [msg, chatbot], [msg, chatbot], queue=False).then(
bot, chatbot, chatbot)
clear.click(lambda: None, None, chatbot, queue=False)
# chatbot.like(vote, None, None)
demo.queue()
demo.launch(debug=True)
|