File size: 1,216 Bytes
e23f935
be70822
e23f935
 
 
 
39c7f29
e23f935
 
 
 
9ec55d9
 
be70822
e23f935
be70822
 
 
 
 
 
 
e23f935
 
 
 
 
 
 
 
be70822
e23f935
be70822
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
import os
import streamlit as st
from transformers import T5ForConditionalGeneration, T5Tokenizer
import torch

# Define the model path and check if it exists
model_path = "Cegil/code_generation/pytorch_model.bin"  # Update to your model's actual path
if not os.path.exists(model_path):
    raise FileNotFoundError(f"Model path '{model_path}' does not exist.")

# Load the model and tokenizer
code_gen_model = T5ForConditionalGeneration.from_pretrained(model_path)
tokenizer = T5Tokenizer.from_pretrained(model_path)

# Streamlit UI
st.title("Code Generation Interface")

# Input prompt for code generation
prompt = st.text_input("Enter your code generation prompt:", "Example prompt")

# Button to generate code
if st.button("Generate Code"):
    # Tokenize the input prompt
    inputs = tokenizer(prompt, return_tensors="pt")  # Ensure input is properly tokenized

    # Generate code using the code generation model
    output = code_gen_model.generate(inputs['input_ids'])

    # Decode the output to get the generated code
    generated_code = tokenizer.decode(output[0], skip_special_tokens=True)

    # Display the generated code in a formatted way
    st.write("Generated Code:")
    st.code(generated_code)