Spaces:
Running
Running
File size: 1,310 Bytes
79cade0 7a27471 79cade0 a4d00f1 7a27471 dca2811 7a27471 dca2811 7a27471 a4d00f1 6c5f8cf cc3ea16 6c5f8cf a4d00f1 6c5f8cf 79cade0 6c5f8cf 79cade0 cc3ea16 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 |
import gradio as gr
import requests
import os
ACCESS_TOKEN = os.getenv("HF_TOKEN")
def respond(message, max_tokens=512, temperature=0.7, top_p=0.95):
data = {
"model": "command-r-plus:104b-fp16",
"prompt": message,
"max_tokens": max_tokens,
"temperature": temperature,
"top_p": top_p
}
response = requests.post("http://hugpu.ai:7877/api/generate", json=data)
try:
generated_text = response.json().get('generated_text', '')
except json.JSONDecodeError as e:
print("Failed to decode JSON from response:", response.text)
generated_text = "An error occurred while processing your request."
return generated_text
demo = gr.Interface(
fn=respond,
inputs=[
gr.Textbox(label="Your Message", placeholder="Type your message here..."),
gr.Slider(minimum=1, maximum=2048, value=512, label="Max Tokens"),
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, label="Temperature"),
gr.Slider(minimum=0.1, maximum=1.0, value=0.95, label="Top-P")
],
outputs=gr.Textbox(),
title="Advanced AI Chatbot",
description="Enter your message and receive a response generated by an advanced AI model.",
theme="Nymbo/Nymbo_Theme"
)
if __name__ == "__main__":
demo.launch()
|