Spaces:
Sleeping
Sleeping
import streamlit as st | |
from llama_cpp import Llama | |
# Load the model | |
llm = Llama.from_pretrained( | |
repo_id="DavidAU/Command-R-01-200xq-Ultra-NEO-V1-35B-IMATRIX-GGUF", | |
filename="CommandR-35B-NEO-V1-D_AU-IQ3_XS-0200xq-imat13.gguf", | |
) | |
# Function to generate story (model) | |
def generate_story(user_input): | |
response = llm.create_chat_completion( | |
messages=[ | |
{ | |
"role": "user", | |
"content": user_input | |
} | |
] | |
) | |
return response['choices'][0]['message']['content'] | |
# Start storytelling | |
def start_storytelling(): | |
print("Welcome to the Storytelling bot.") | |
story_prompt = st.text_input("What would you like your story to be about?") | |
story_part = generate_story(story_prompt) | |
print("\nHere's the beginning of your story:") | |
print(story_part) | |
# Function start | |
start_storytelling() |