import gradio as gr import tensorflow as tf from transformers import BertTokenizer, TFBertModel import numpy as np # Load your model model = tf.keras.models.load_model('models/model_files') # Load tokenizer tokenizer = BertTokenizer.from_pretrained('bert-base-uncased') def preprocess_text(text): inputs = tokenizer(text, return_tensors='tf', padding=True, truncation=True, max_length=512) return inputs def predict(text, image, structured): text_inputs = preprocess_text(text) image = tf.image.resize(image, (224, 224)) image = tf.keras.applications.resnet50.preprocess_input(image) structured = (structured - structured.mean()) / structured.std() prediction = model.predict([text_inputs['input_ids'], text_inputs['attention_mask'], image, structured]) return prediction[0][0] # Define the chat function def chat_response(user_input): return f"Model response to: {user_input}" # Define the code execution function def execute_code(code): exec_globals = {} exec(code, exec_globals) return exec_globals.get("output", "No output") with gr.Blocks() as demo: with gr.Row(): with gr.Column(): chat_input = gr.Textbox(lines=2, placeholder="Enter your message here...") chat_output = gr.Textbox(lines=5, placeholder="Model response will appear here...") chat_button = gr.Button("Send") with gr.Column(): code_input = gr.Textbox(lines=10, placeholder="Enter your code here...") code_output = gr.Textbox(lines=5, placeholder="Code output will appear here...") code_button = gr.Button("Run Code") chat_button.click(chat_response, inputs=chat_input, outputs=chat_output) code_button.click(execute_code, inputs=code_input, outputs=code_output) demo.launch()