|
import streamlit as st |
|
from langchain_community.llms import CTransformers |
|
|
|
st.title("Generating Response with HuggingFace Models") |
|
st.markdown("## Model: `marella/gpt-2-ggml`") |
|
|
|
def get_response(question: str) -> str: |
|
""" |
|
This function takes a user input question and returns the response from the LLM model. |
|
|
|
Args: |
|
question (str): The user input question. |
|
|
|
Returns: |
|
str: The response from the LLM model. |
|
|
|
""" |
|
llm = CTransformers(model="marella/gpt-2-ggml") |
|
response = llm.invoke(question) |
|
return response |
|
|
|
user_input = st.text_area("Enter your query here...") |
|
|
|
if st.button("Get Response") and user_input: |
|
with st.spinner("Generating Response..."): |
|
answer = get_response(user_input) |
|
if answer is not None: |
|
st.success('Great! Response generated successfully') |
|
st.write(answer) |