import json import requests import gradio as gr import random API_TOKEN = os.getenv("API_TOKEN") headers = {"Authorization": f"Bearer {API_TOKEN}"} API_URL = "https://api-inference.huggingface.co/models/bigscience/bloom" prompt="""What would this look like in 2050? Give 5 optimally describing bigrams. Input:cake shop Output:Sustainable ingredients,Virtual reality experience,Customizable flavor options,3D printed designs,Robotic assistants ##### Input:consulting firm Output:Virtual meetings,AI integration,Sustainability focus,Virtual reality simulations,Blockchain technology ##### Input:biscuit brand Output:Sustainable packaging,Plant-based ingredients,Digital marketing,Virtual reality,Smart packaging ##### """ def query(text): #global prompt print(text) text = prompt+"\nInput:"+text + "\nOutput:" #print(text) payload = { "inputs": text, "parameters": { "max_length": 250, "temperature": 0.9, "seed":random.randint(0, 100), }, } data = json.dumps(payload) response = requests.request("POST", API_URL, headers=headers, data=data) #print('Response here\n',response.content) generated_text=json.loads(response.content.decode("utf-8"))[0]['generated_text'] result = generated_text.replace(text,'').strip() result = result.replace("Output:","") parts = result.split("###") topic = parts[0].strip() topic=topic.split("Input:")[0] print(topic) return topic with gr.Blocks() as demo: gr.Markdown("