Spaces:
Runtime error
Runtime error
File size: 1,033 Bytes
f900be6 b68f8d3 dcfc2ef b68f8d3 dcfc2ef 98b4c9a b68f8d3 f900be6 dcfc2ef b68f8d3 f900be6 b68f8d3 dcfc2ef f5833f8 dcfc2ef b68f8d3 98b4c9a 99a6c15 e7c3958 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 |
from transformers import GPT2LMHeadModel, GPT2Tokenizer
import gradio as gr
import torch
import json
title = "AI ChatBot"
description = "A State-of-the-Art Large-scale Pretrained Response generation model (GEMMA)"
examples = [["How are you?"]]
tokenizer = GPT2Tokenizer.from_pretrained("gpt2")
model = GPT2LMHeadModel.from_pretrained("gpt2")
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model.to(device)
# Load courses data from JSON file
with open("uts_courses.json", "r") as f:
courses_data = json.load(f)
# Define the predict function as before
def main():
# Load courses data from JSON file
with open("uts_courses.json", "r") as f:
courses_data = json.load(f)
print("Contents of uts_courses.json:")
print(courses_data)
print()
if __name__ == "__main__":
main()
gr.Interface(
fn=predict,
title=title,
description=description,
examples=examples,
inputs=["text"],
outputs=["text", "state"],
theme="finlaymacklon/boxy_violet"
).launch()
|