Spaces:
Runtime error
Runtime error
from transformers import GPT2LMHeadModel, GPT2Tokenizer | |
import gradio as gr | |
import torch | |
import json | |
title = "AI ChatBot" | |
description = "A State-of-the-Art Large-scale Pretrained Response generation model (GEMMA)" | |
examples = [["How are you?"]] | |
tokenizer = GPT2Tokenizer.from_pretrained("gpt2") | |
model = GPT2LMHeadModel.from_pretrained("gpt2") | |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu") | |
model.to(device) | |
# Load courses data from JSON file | |
with open("uts_courses.json", "r") as f: | |
courses_data = json.load(f) | |
# Define the predict function as before | |
def main(): | |
# Load courses data from JSON file | |
with open("uts_courses.json", "r") as f: | |
courses_data = json.load(f) | |
print("Contents of uts_courses.json:") | |
print(courses_data) | |
print() | |
if __name__ == "__main__": | |
main() | |
gr.Interface( | |
fn=predict, | |
title=title, | |
description=description, | |
examples=examples, | |
inputs=["text"], | |
outputs=["text", "state"], | |
theme="finlaymacklon/boxy_violet" | |
).launch() | |